From 18da1ec182ddc5b2a2b18c44d4b61dfb0ca550c0 Mon Sep 17 00:00:00 2001
From: Christopher Schmied <schmied@mpi-cbg.de>
Date: Wed, 27 May 2015 19:06:14 +0200
Subject: [PATCH] Added resaving of output: define xml

---
 spim_registration/timelapse/Snakefile         | 85 ++++++-------------
 spim_registration/timelapse/export.bsh        | 15 ----
 spim_registration/timelapse/export_output.bsh | 41 ++++++++-
 spim_registration/timelapse/tomancak_czi.json |  4 +-
 4 files changed, 66 insertions(+), 79 deletions(-)

diff --git a/spim_registration/timelapse/Snakefile b/spim_registration/timelapse/Snakefile
index 778c1e2..b912717 100755
--- a/spim_registration/timelapse/Snakefile
+++ b/spim_registration/timelapse/Snakefile
@@ -16,8 +16,10 @@ xml_merge_in = produce_xml_merge_job_files(datasets)
 
 rule done:
     #input: [ ds+"_fusion" for ds in datasets ]
-    input: [ ds + "_" + config["common"]["fusion_switch"] for ds in datasets ]
-
+    #input: [ ds + "_" + config["common"]["fusion_switch"] for ds in datasets ]
+    #input: [ ds + "_" + config["common"]["fusion_switch"] for ds in datasets ]
+    input: config["hdf5_output"]["output_xml"] + ".xml"
+    
 rule resave_prepared:
     input: expand("{dataset}.{suffix}",dataset=[ config["common"]["hdf5_xml_filename"] ], suffix=["xml","h5"])
 
@@ -374,77 +376,42 @@ rule deconvolution:
         )
         cmd_string += " > {log} 2>&1 && touch {output}"
         shell(cmd_string)
-        
-rule hdf5_output_define_xml:
-    input: glob.glob('*.tif'), "{xml_base}-{file_id,\d+}-00.h5_" + config["common"]["fusion_switch"]
+
+rule define_output:
+    input: glob.glob('TP*')
     output: config["hdf5_output"]["output_xml"] + ".xml"
-    message: "Execute define_xml_tif on the following files {input}"
-    log: "hdf5_output_define_xml.log"
+    log: "define_xml_tif.log"
     run:
         cmd_string = produce_string(
         	"""{fiji-prefix} {fiji-app} \
         -Dimage_file_directory={jdir} \
-        -Dtimepoints={output_timepoints} \
-        -Dchannels={output_channels} \
-        -Dimage_file_pattern={output_image_file_pattern} \
-        -Dpixel_distance_x={output_pixel_distance_x} \
-        -Dpixel_distance_y={output_pixel_distance_y} \
-        -Dpixel_distance_z={output_pixel_distance_z} \
-        -Dpixel_unit={output_pixel_unit} \
-       	-Dxml_filename={output_xml} \
-        -Dtype_of_dataset={output_type_of_dataset} \
-        -Dmultiple_timepoints={output_multiple_timepoints} \
-        -Dmultiple_channels={output_multiple_channels} \
-        -Dmultiple_illumination_directions={output_illumination_directions} \
-        -Dmultiple_angles={output_multiple_angles} \
-        -Dimglib_container={output_imglib_container} \
+        -Dtimepoints={timepoints} \
+        -Dacquisition_angles={acquisition_angles} \
+        -Dchannels={channels} \
+        -Dimage_file_pattern={image_file_pattern} \
+        -Dpixel_distance_x={pixel_distance_x} \
+        -Dpixel_distance_y={pixel_distance_y} \
+        -Dpixel_distance_z={pixel_distance_z} \
+        -Dpixel_unit={pixel_unit} \
+       	-Dxml_filename={first_xml_filename} \
+        -Dtype_of_dataset={type_of_dataset} \
+        -Dmultiple_timepoints={multiple_timepoints} \
+        -Dmultiple_channels={multiple_channels} \
+        -Dmultiple_illumination_directions={multiple_illumination_directions} \
+        -Dmultiple_angles={multiple_angles} \
+        -Dimglib_container={imglib_container} \
 	-- --no-splash {path_bsh}""",
 	config["common"],
-	config["hdf5_output"],
+	config["define_xml_tif"],
 	jdir=JOBDIR,
 	path_bsh=config["common"]["bsh_directory"] + config["define_xml_tif"]["bsh_file"])
 	
-	cmd_string +="> {log} 2>&1 && touch {output}"
+	cmd_string +=" > {log} 2>&1"
 	shell(cmd_string)
 
-ruleorder: define_xml_tif > define_xml_czi 
-
-# create mother .xml/.h5
-rule hdf5_output_define_hdf5:
-    input: config["hdf5_output"]["output_xml"] + ".xml" 
-    output: expand("{dataset}.{suffix}",dataset=[ config["hdf5_output"]["output_hdf5_xml"].strip('\"')], suffix=["xml","h5"]),
-            expand("{xml_base}-{file_id}-00.h5_empty", xml_base=[ config["hdf5_output"]["output_hdf5_xml"].strip('\"') ],file_id=range(int(config["common"]["ntimepoints"]))) # problematic needs padding of file_id
-    log: "hdf5_output_define_hdf5.log"
-    run:
-        part_string = produce_string(
-        	"""{fiji-prefix} {fiji-app} \
-                -Dimage_file_directory={jdir} \
-                -Dfirst_xml_filename={output_xml} \
-                -Dhdf5_xml_filename={output_hdf5_xml} \
-                -Dresave_angle={resave_angle} \
-                -Dresave_channel={resave_channel} \
-                -Dresave_illumination={resave_illumination} \
-                -Dresave_timepoint={resave_timepoint} \
-                -Dsubsampling_factors={subsampling_factors} \
-                -Dhdf5_chunk_sizes={hdf5_chunk_sizes} \
-                -Dtimepoints_per_partition={timepoints_per_partition} \
-                -Dsetups_per_partition={setups_per_partition} \
-                -Drun_only_job_number=0 \
-                -Doutput_data_type={output_data_type} \
-                -- --no-splash {path_bsh}""", # the & submits everyting at once
-           config["common"],
-           config["define_xml_czi"],
-           config["resave_hdf5"],
-           config["hdf5_output"],
-           jdir=JOBDIR,
-           path_bsh=config["common"]["bsh_directory"] + config["resave_hdf5"]["bsh_file"])
 
-        part_string += "> {log} 2>&1"
-        shell(part_string)
 
-        #create dummy files according to the number of timepoints found
-        for index in range(int(config["common"]["ntimepoints"])):
-           shell("touch {basename}-{file_id}-00.h5_empty".format(basename=config["common"]["hdf5_xml_filename"], file_id="%02d" % index)) # problematic needs padding of file_id
+       	       
 
 rule distclean:
     params : glob.glob(config["common"]["hdf5_xml_filename"].strip('\"')+"*"), glob.glob(config["common"]["first_xml_filename"].strip('\"')+"*"), glob.glob("*registered"), glob.glob("*log"), glob.glob("*_deconvolved"), glob.glob("*.xml~*"),"interestpoints", glob.glob("*empty"), expand("{dataset}.{suffix}",dataset=[ config["common"]["hdf5_xml_filename"].strip('\"')], suffix=["xml","h5"])# xml_merge_in, 
diff --git a/spim_registration/timelapse/export.bsh b/spim_registration/timelapse/export.bsh
index 9905b6e..8666721 100755
--- a/spim_registration/timelapse/export.bsh
+++ b/spim_registration/timelapse/export.bsh
@@ -58,21 +58,6 @@ System.out.println( "setups_per_partition=" + setups_per_partition );
 int run_only_job_number =  Integer.parseInt( System.getProperty( "run_only_job_number" ) );
 System.out.println( "run_only_job_number=" + run_only_job_number );
 
-job_type = System.getProperty( "job_type" );
-System.out.println( "Job type = " + job_type );
-
-//
-//if ( job_type.equalsIgnoreCase( "xml" ) )
-//	{
-//		job_number = 0;
-//	}
-//else if ( job_type.equalsIgnoreCase( "hdf5" ) )
-//	{
-//		job_number = run_only_job_number + 1;
-//	}
-//
-//System.out.println( "Job Number = " + job_number );
-
 // Activate cluster processing
 System.out.println("=========================================================");
 try{
diff --git a/spim_registration/timelapse/export_output.bsh b/spim_registration/timelapse/export_output.bsh
index e43fe32..af20191 100755
--- a/spim_registration/timelapse/export_output.bsh
+++ b/spim_registration/timelapse/export_output.bsh
@@ -55,14 +55,41 @@ System.out.println( "run_only_job_number=" + run_only_job_number );
 
 // Add a switch for choosing between 16 or 32 bit
 
+
+output_data_type = System.getProperty( "output_data_type" );
+
+System.out.println( "Output Datatype = " + output_data_type );
+
+
+if ( output_data_type.equalsIgnoreCase( "16Bit" ) )
+	{
+		data_string = "convert_32bit=convert_32bit";
+	}
+else if ( output_data_type.equalsIgnoreCase( "32Bit" ) )
+	{
+		data_string = "";
+	}
+
+System.out.println( "Data string = " + data_string );
+
+
+
 // Activate cluster processing
 System.out.println("=========================================================");
 System.out.println("Cluster setting:");
+try{
 IJ.run("Toggle Cluster Processing", "display_cluster");
-
+}
+catch ( e ) { 
+	print( "[deconvolution-GPU] caught exception: "+e );
+	//important to fail the process if exception occurs
+	runtime.exit(1);
+}
+	
 // Executes Fiji plugin
 System.out.println("=========================================================");
 System.out.println("Start plugin:");
+try{
 IJ.run("As HDF5",
 	"select_xml=" + image_file_directory + first_xml_filename + ".xml " +
 	"resave_angle=[" + resave_angle + "] " +
@@ -77,8 +104,16 @@ IJ.run("As HDF5",
 	"setups_per_partition=" + setups_per_partition + " " +
 	"run_only_job_number=" + run_only_job_number + " " +
 	"use_deflate_compression " +
-	"export_path=" + image_file_directory + hdf5_xml_filename );
-
+	"export_path=" + image_file_directory + hdf5_xml_filename + " " +
+	data_string +
+	);
+}
+catch ( e ) { 
+	print( "[deconvolution-GPU] caught exception: "+e );
+	//important to fail the process if exception occurs
+	runtime.exit(1);
+}
+	
 /* shutdown */
 System.exit(0);
 
diff --git a/spim_registration/timelapse/tomancak_czi.json b/spim_registration/timelapse/tomancak_czi.json
index b1dbb8e..a21c0bc 100755
--- a/spim_registration/timelapse/tomancak_czi.json
+++ b/spim_registration/timelapse/tomancak_czi.json
@@ -176,7 +176,7 @@
     "hdf5_output" :
     {
     	    "output_image_file_pattern" : "TP{t}_Ch{c}_Ill0_Ang0,72,144,216,288.tif",
-    	    "output_data_type" : "16Bit",
+    	    "output_data_type" : "32Bit",
     	    "output_xml" : "\"fused_Dual_Channel\"",
     	    "output_hdf5_xml" : "\"hdf5_fused_Stock68\"",
     	    "output_multiple_channels" : "\"NO (one channel)\"",	
@@ -191,7 +191,7 @@
     	    "output_multiple_angles" : "\"NO (one angle)\"",					
     	    "output_type_of_dataset" : "\"Image Stacks (ImageJ Opener)\"", 		
     	    "output_imglib_container" : "\"ArrayImg (faster)\"",
-    	    "fusion_output_export" : "/export_output.bsh", 	
+    	    "bsh_file" : "/export_output.bsh", 	
     	    "convert_32bit" : "\"[Use min/max of first image (might saturate intenities over time)]\""
     	    
     }
-- 
GitLab