Skip to content
Snippets Groups Projects
Commit 18da1ec1 authored by Christopher Schmied's avatar Christopher Schmied
Browse files

Added resaving of output: define xml

parent 455f7ce2
No related branches found
No related tags found
No related merge requests found
...@@ -16,8 +16,10 @@ xml_merge_in = produce_xml_merge_job_files(datasets) ...@@ -16,8 +16,10 @@ xml_merge_in = produce_xml_merge_job_files(datasets)
rule done: rule done:
#input: [ ds+"_fusion" for ds in datasets ] #input: [ ds+"_fusion" for ds in datasets ]
input: [ ds + "_" + config["common"]["fusion_switch"] for ds in datasets ] #input: [ ds + "_" + config["common"]["fusion_switch"] for ds in datasets ]
#input: [ ds + "_" + config["common"]["fusion_switch"] for ds in datasets ]
input: config["hdf5_output"]["output_xml"] + ".xml"
rule resave_prepared: rule resave_prepared:
input: expand("{dataset}.{suffix}",dataset=[ config["common"]["hdf5_xml_filename"] ], suffix=["xml","h5"]) input: expand("{dataset}.{suffix}",dataset=[ config["common"]["hdf5_xml_filename"] ], suffix=["xml","h5"])
...@@ -374,77 +376,42 @@ rule deconvolution: ...@@ -374,77 +376,42 @@ rule deconvolution:
) )
cmd_string += " > {log} 2>&1 && touch {output}" cmd_string += " > {log} 2>&1 && touch {output}"
shell(cmd_string) shell(cmd_string)
rule hdf5_output_define_xml: rule define_output:
input: glob.glob('*.tif'), "{xml_base}-{file_id,\d+}-00.h5_" + config["common"]["fusion_switch"] input: glob.glob('TP*')
output: config["hdf5_output"]["output_xml"] + ".xml" output: config["hdf5_output"]["output_xml"] + ".xml"
message: "Execute define_xml_tif on the following files {input}" log: "define_xml_tif.log"
log: "hdf5_output_define_xml.log"
run: run:
cmd_string = produce_string( cmd_string = produce_string(
"""{fiji-prefix} {fiji-app} \ """{fiji-prefix} {fiji-app} \
-Dimage_file_directory={jdir} \ -Dimage_file_directory={jdir} \
-Dtimepoints={output_timepoints} \ -Dtimepoints={timepoints} \
-Dchannels={output_channels} \ -Dacquisition_angles={acquisition_angles} \
-Dimage_file_pattern={output_image_file_pattern} \ -Dchannels={channels} \
-Dpixel_distance_x={output_pixel_distance_x} \ -Dimage_file_pattern={image_file_pattern} \
-Dpixel_distance_y={output_pixel_distance_y} \ -Dpixel_distance_x={pixel_distance_x} \
-Dpixel_distance_z={output_pixel_distance_z} \ -Dpixel_distance_y={pixel_distance_y} \
-Dpixel_unit={output_pixel_unit} \ -Dpixel_distance_z={pixel_distance_z} \
-Dxml_filename={output_xml} \ -Dpixel_unit={pixel_unit} \
-Dtype_of_dataset={output_type_of_dataset} \ -Dxml_filename={first_xml_filename} \
-Dmultiple_timepoints={output_multiple_timepoints} \ -Dtype_of_dataset={type_of_dataset} \
-Dmultiple_channels={output_multiple_channels} \ -Dmultiple_timepoints={multiple_timepoints} \
-Dmultiple_illumination_directions={output_illumination_directions} \ -Dmultiple_channels={multiple_channels} \
-Dmultiple_angles={output_multiple_angles} \ -Dmultiple_illumination_directions={multiple_illumination_directions} \
-Dimglib_container={output_imglib_container} \ -Dmultiple_angles={multiple_angles} \
-Dimglib_container={imglib_container} \
-- --no-splash {path_bsh}""", -- --no-splash {path_bsh}""",
config["common"], config["common"],
config["hdf5_output"], config["define_xml_tif"],
jdir=JOBDIR, jdir=JOBDIR,
path_bsh=config["common"]["bsh_directory"] + config["define_xml_tif"]["bsh_file"]) path_bsh=config["common"]["bsh_directory"] + config["define_xml_tif"]["bsh_file"])
cmd_string +="> {log} 2>&1 && touch {output}" cmd_string +=" > {log} 2>&1"
shell(cmd_string) shell(cmd_string)
ruleorder: define_xml_tif > define_xml_czi
# create mother .xml/.h5
rule hdf5_output_define_hdf5:
input: config["hdf5_output"]["output_xml"] + ".xml"
output: expand("{dataset}.{suffix}",dataset=[ config["hdf5_output"]["output_hdf5_xml"].strip('\"')], suffix=["xml","h5"]),
expand("{xml_base}-{file_id}-00.h5_empty", xml_base=[ config["hdf5_output"]["output_hdf5_xml"].strip('\"') ],file_id=range(int(config["common"]["ntimepoints"]))) # problematic needs padding of file_id
log: "hdf5_output_define_hdf5.log"
run:
part_string = produce_string(
"""{fiji-prefix} {fiji-app} \
-Dimage_file_directory={jdir} \
-Dfirst_xml_filename={output_xml} \
-Dhdf5_xml_filename={output_hdf5_xml} \
-Dresave_angle={resave_angle} \
-Dresave_channel={resave_channel} \
-Dresave_illumination={resave_illumination} \
-Dresave_timepoint={resave_timepoint} \
-Dsubsampling_factors={subsampling_factors} \
-Dhdf5_chunk_sizes={hdf5_chunk_sizes} \
-Dtimepoints_per_partition={timepoints_per_partition} \
-Dsetups_per_partition={setups_per_partition} \
-Drun_only_job_number=0 \
-Doutput_data_type={output_data_type} \
-- --no-splash {path_bsh}""", # the & submits everyting at once
config["common"],
config["define_xml_czi"],
config["resave_hdf5"],
config["hdf5_output"],
jdir=JOBDIR,
path_bsh=config["common"]["bsh_directory"] + config["resave_hdf5"]["bsh_file"])
part_string += "> {log} 2>&1"
shell(part_string)
#create dummy files according to the number of timepoints found
for index in range(int(config["common"]["ntimepoints"])):
shell("touch {basename}-{file_id}-00.h5_empty".format(basename=config["common"]["hdf5_xml_filename"], file_id="%02d" % index)) # problematic needs padding of file_id
rule distclean: rule distclean:
params : glob.glob(config["common"]["hdf5_xml_filename"].strip('\"')+"*"), glob.glob(config["common"]["first_xml_filename"].strip('\"')+"*"), glob.glob("*registered"), glob.glob("*log"), glob.glob("*_deconvolved"), glob.glob("*.xml~*"),"interestpoints", glob.glob("*empty"), expand("{dataset}.{suffix}",dataset=[ config["common"]["hdf5_xml_filename"].strip('\"')], suffix=["xml","h5"])# xml_merge_in, params : glob.glob(config["common"]["hdf5_xml_filename"].strip('\"')+"*"), glob.glob(config["common"]["first_xml_filename"].strip('\"')+"*"), glob.glob("*registered"), glob.glob("*log"), glob.glob("*_deconvolved"), glob.glob("*.xml~*"),"interestpoints", glob.glob("*empty"), expand("{dataset}.{suffix}",dataset=[ config["common"]["hdf5_xml_filename"].strip('\"')], suffix=["xml","h5"])# xml_merge_in,
......
...@@ -58,21 +58,6 @@ System.out.println( "setups_per_partition=" + setups_per_partition ); ...@@ -58,21 +58,6 @@ System.out.println( "setups_per_partition=" + setups_per_partition );
int run_only_job_number = Integer.parseInt( System.getProperty( "run_only_job_number" ) ); int run_only_job_number = Integer.parseInt( System.getProperty( "run_only_job_number" ) );
System.out.println( "run_only_job_number=" + run_only_job_number ); System.out.println( "run_only_job_number=" + run_only_job_number );
job_type = System.getProperty( "job_type" );
System.out.println( "Job type = " + job_type );
//
//if ( job_type.equalsIgnoreCase( "xml" ) )
// {
// job_number = 0;
// }
//else if ( job_type.equalsIgnoreCase( "hdf5" ) )
// {
// job_number = run_only_job_number + 1;
// }
//
//System.out.println( "Job Number = " + job_number );
// Activate cluster processing // Activate cluster processing
System.out.println("========================================================="); System.out.println("=========================================================");
try{ try{
......
...@@ -55,14 +55,41 @@ System.out.println( "run_only_job_number=" + run_only_job_number ); ...@@ -55,14 +55,41 @@ System.out.println( "run_only_job_number=" + run_only_job_number );
// Add a switch for choosing between 16 or 32 bit // Add a switch for choosing between 16 or 32 bit
output_data_type = System.getProperty( "output_data_type" );
System.out.println( "Output Datatype = " + output_data_type );
if ( output_data_type.equalsIgnoreCase( "16Bit" ) )
{
data_string = "convert_32bit=convert_32bit";
}
else if ( output_data_type.equalsIgnoreCase( "32Bit" ) )
{
data_string = "";
}
System.out.println( "Data string = " + data_string );
// Activate cluster processing // Activate cluster processing
System.out.println("========================================================="); System.out.println("=========================================================");
System.out.println("Cluster setting:"); System.out.println("Cluster setting:");
try{
IJ.run("Toggle Cluster Processing", "display_cluster"); IJ.run("Toggle Cluster Processing", "display_cluster");
}
catch ( e ) {
print( "[deconvolution-GPU] caught exception: "+e );
//important to fail the process if exception occurs
runtime.exit(1);
}
// Executes Fiji plugin // Executes Fiji plugin
System.out.println("========================================================="); System.out.println("=========================================================");
System.out.println("Start plugin:"); System.out.println("Start plugin:");
try{
IJ.run("As HDF5", IJ.run("As HDF5",
"select_xml=" + image_file_directory + first_xml_filename + ".xml " + "select_xml=" + image_file_directory + first_xml_filename + ".xml " +
"resave_angle=[" + resave_angle + "] " + "resave_angle=[" + resave_angle + "] " +
...@@ -77,8 +104,16 @@ IJ.run("As HDF5", ...@@ -77,8 +104,16 @@ IJ.run("As HDF5",
"setups_per_partition=" + setups_per_partition + " " + "setups_per_partition=" + setups_per_partition + " " +
"run_only_job_number=" + run_only_job_number + " " + "run_only_job_number=" + run_only_job_number + " " +
"use_deflate_compression " + "use_deflate_compression " +
"export_path=" + image_file_directory + hdf5_xml_filename ); "export_path=" + image_file_directory + hdf5_xml_filename + " " +
data_string +
);
}
catch ( e ) {
print( "[deconvolution-GPU] caught exception: "+e );
//important to fail the process if exception occurs
runtime.exit(1);
}
/* shutdown */ /* shutdown */
System.exit(0); System.exit(0);
...@@ -176,7 +176,7 @@ ...@@ -176,7 +176,7 @@
"hdf5_output" : "hdf5_output" :
{ {
"output_image_file_pattern" : "TP{t}_Ch{c}_Ill0_Ang0,72,144,216,288.tif", "output_image_file_pattern" : "TP{t}_Ch{c}_Ill0_Ang0,72,144,216,288.tif",
"output_data_type" : "16Bit", "output_data_type" : "32Bit",
"output_xml" : "\"fused_Dual_Channel\"", "output_xml" : "\"fused_Dual_Channel\"",
"output_hdf5_xml" : "\"hdf5_fused_Stock68\"", "output_hdf5_xml" : "\"hdf5_fused_Stock68\"",
"output_multiple_channels" : "\"NO (one channel)\"", "output_multiple_channels" : "\"NO (one channel)\"",
...@@ -191,7 +191,7 @@ ...@@ -191,7 +191,7 @@
"output_multiple_angles" : "\"NO (one angle)\"", "output_multiple_angles" : "\"NO (one angle)\"",
"output_type_of_dataset" : "\"Image Stacks (ImageJ Opener)\"", "output_type_of_dataset" : "\"Image Stacks (ImageJ Opener)\"",
"output_imglib_container" : "\"ArrayImg (faster)\"", "output_imglib_container" : "\"ArrayImg (faster)\"",
"fusion_output_export" : "/export_output.bsh", "bsh_file" : "/export_output.bsh",
"convert_32bit" : "\"[Use min/max of first image (might saturate intenities over time)]\"" "convert_32bit" : "\"[Use min/max of first image (might saturate intenities over time)]\""
} }
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment