Skip to content
Snippets Groups Projects
Commit a9e41399 authored by aknecht2's avatar aknecht2
Browse files

Added ih-write-sql script, updated ih-seed script to optionally allow thresh value.

parent cd6384d1
Branches
No related tags found
No related merge requests found
...@@ -9,7 +9,7 @@ To obtain access to a larger set of rice images (RGB, FLUO, NIR, IR), please con ...@@ -9,7 +9,7 @@ To obtain access to a larger set of rice images (RGB, FLUO, NIR, IR), please con
* Publications: * Publications:
* Campbell MT, Knecht AC, Berger B, Brien CJ, Wang D, Walia H. Integrating Image-Based Phenomics and Association Analysis to Dissect the Genetic Architecture of Temporal Salinity Responses in Rice. 2015, Plant Physiology. `Link <http://www.plantphysiol.org/content/early/2015/06/25/pp.15.00450.short>`_. * Campbell MT, Knecht AC, Berger B, Brien CJ, Wang D, Walia H. Integrating Image-Based Phenomics and Association Analysis to Dissect the Genetic Architecture of Temporal Salinity Responses in Rice. 2015, Plant Physiology. `Link <http://www.plantphysiol.org/content/early/2015/06/25/pp.15.00450.short>`_.
* Knecht AC, Campbell MT, Caprez A, Swanson DR, Walia H. Image Harvest: An open source platform for high-throughput plant image processing and analysis. 2016, JXB. (In press). * Knecht AC, Campbell MT, Caprez A, Swanson DR, Walia H. Image Harvest: An open source platform for high-throughput plant image processing and analysis. 2016, JXB. (In prep).
* `Developing a processing workflow on a local computer <http://cropstressgenomics.org/data/html/ex_script_camera2.html>`_ * `Developing a processing workflow on a local computer <http://cropstressgenomics.org/data/html/ex_script_camera2.html>`_
* `Executing processing workflows on a computing cluster <http://cropstressgenomics.org/data/html/ex_workflow_1.html>`_ * `Executing processing workflows on a computing cluster <http://cropstressgenomics.org/data/html/ex_workflow_1.html>`_
......
...@@ -765,10 +765,6 @@ class Stats: ...@@ -765,10 +765,6 @@ class Stats:
self.conn.commit() self.conn.commit()
return return
def SOMANYGRAPHINGFUNCTIONS(self):
return
def export(self, table, processed = True, group = None, fname = None): def export(self, table, processed = True, group = None, fname = None):
""" """
:param table: The table to write to csv :param table: The table to write to csv
......
...@@ -9,6 +9,7 @@ parser.add_argument("--input", dest="input", help="Path to input seed scan image ...@@ -9,6 +9,7 @@ parser.add_argument("--input", dest="input", help="Path to input seed scan image
parser.add_argument("--output", dest="output", help="Path to output csv file.", required = True) parser.add_argument("--output", dest="output", help="Path to output csv file.", required = True)
parser.add_argument("--dpi", dest="dpi", help="dpi of the image.", type = float, default = 600) parser.add_argument("--dpi", dest="dpi", help="dpi of the image.", type = float, default = 600)
parser.add_argument("--roi", dest="roi", help="roi file") parser.add_argument("--roi", dest="roi", help="roi file")
parser.add_argument("--thresh", dest="thresh", type=int, default=230, help="Threshold value.")
parser.add_argument("--ystart", dest="ystart", default=-1, help="Minimum Y of the roi.") parser.add_argument("--ystart", dest="ystart", default=-1, help="Minimum Y of the roi.")
parser.add_argument("--yend", dest="yend", default=-1, help="Maximum Y of the roi.") parser.add_argument("--yend", dest="yend", default=-1, help="Maximum Y of the roi.")
parser.add_argument("--xstart", dest="xstart", default=-1, help="Minimum X of the roi.") parser.add_argument("--xstart", dest="xstart", default=-1, help="Minimum X of the roi.")
...@@ -29,7 +30,8 @@ if args.writesteps: ...@@ -29,7 +30,8 @@ if args.writesteps:
seed.convertColor("bgr", "gray") seed.convertColor("bgr", "gray")
seed.equalizeHist() seed.equalizeHist()
seed.threshold(230) seed.threshold(args.thresh)
seed.save("thresh")
if args.writesteps: if args.writesteps:
seed.write(prefix + "_thresh.png") seed.write(prefix + "_thresh.png")
......
#!python
"""
A helper script as a workaround for >1000 column sqlite joins.
The script itself walks through the current directory, and
creates an sql file that will join all possible databases.
"""
import os
import argparse
parser = argparse.ArgumentParser(description = "Creates an sql script to join many databse files.")
parser.add_argument("--prefix", dest="prefix", help="Optional prefix to match for db names.")
parser.add_argument("--output", dest="output", default="combine.sql", help="Output file name.")
args = parser.parse_args()
# The required lines to write for each database
lines = [
"begin;",
"insert into images select * from to_merge.images;",
"commit;",
"detach to_merge;"
]
# Writes the resulting combine.sql
with open("combine.sql", "w") as wh:
for root, dirs, files in os.walk("."):
for f in files:
# Matches all db files
if f[-2:] == "db":
if not args.prefix or f[-len(args.prefix):] == prefix:
first_line = ["attach \"%s\" as to_merge;" % (f,)]
write_lines = first_line + std_lines
for line in write_lines:
wh.write(line + "\n")
wh.write("\n")
break
"""
You may want to optionally filter based on the type of the data if different
imtypes have different processing results.
After creating the combine.sql file, you can aggregate data the following way:
1. Copy a sample database to maintain original structure.
> cp fluosv0.db fluosv_all.db
2. Load up the final database in sqlite:
> sqlite3 fluosv_all.db
3. Within sqlite, read in the created sql file:
> .read combine.sql
"""
...@@ -34,6 +34,7 @@ setup( ...@@ -34,6 +34,7 @@ setup(
"scripts/ih-flood-fill", "scripts/ih-flood-fill",
"scripts/ih-seed", "scripts/ih-seed",
"scripts/ih-write-sql",
"scripts/ih-setup", "scripts/ih-setup",
"scripts/ih-crawl", "scripts/ih-crawl",
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment