From 32427ebaa227c74fc99a218a2cb996c177fe9e49 Mon Sep 17 00:00:00 2001
From: Christopher Schmied <schmied@mpi-cbg.de>
Date: Wed, 10 Jun 2015 15:32:20 +0200
Subject: [PATCH] Updated fusion.bsh and fixed bug in export_output.bsh

BUG in export_output.bsh: took subsampling and chunk sizes from
resave hdf5. But these were not good.
Added new chunck size and subsampling option
---
 spim_registration/timelapse/Snakefile         | 15 +++++++-------
 spim_registration/timelapse/export_output.bsh | 19 +++++++-----------
 spim_registration/timelapse/fusion.bsh        | 20 +++++++++++++------
 .../timelapse/tomancak_test_cluster.yaml      | 11 ++++++----
 4 files changed, 36 insertions(+), 29 deletions(-)

diff --git a/spim_registration/timelapse/Snakefile b/spim_registration/timelapse/Snakefile
index a0a6858..7b11770 100755
--- a/spim_registration/timelapse/Snakefile
+++ b/spim_registration/timelapse/Snakefile
@@ -21,6 +21,7 @@ xml_merge_in = produce_xml_merge_job_files(datasets)
 
 rule done:
     input: [ ds + "_output_hdf5" for ds in datasets ]
+    #input: [ ds + "_fusion" for ds in datasets ]
     
 rule resave_prepared:
     input: expand("{dataset}.{suffix}",dataset=[ config["common"]["hdf5_xml_filename"] ], suffix=["xml","h5"])
@@ -288,7 +289,7 @@ rule fusion:
     		-- --no-splash {path_bsh}""",
                                     config["common"],
                                     config["fusion"],
-                                    config["resave_czi_hdf5"],
+                                    config["resave_hdf5"],
                                     path_bsh=config["common"]["bsh_directory"] + config["fusion"]["bsh_file"],
                                     jdir=JOBDIR,
                                     file_id_w="{wildcards.file_id}",
@@ -417,8 +418,8 @@ rule hdf5_xml_output:
                 -Dresave_channel={resave_channel} \
                 -Dresave_illumination={resave_illumination} \
                 -Dresave_timepoint={resave_timepoint} \
-                -Dsubsampling_factors={subsampling_factors} \
-                -Dhdf5_chunk_sizes={hdf5_chunk_sizes} \
+                -Dsubsampling_factors={subsampling_output} \
+                -Dhdf5_chunk_sizes={chunk_sizes_output} \
                 -Dtimepoints_per_partition={timepoints_per_partition} \
                 -Dsetups_per_partition={setups_per_partition} \
                 -Drun_only_job_number=0 \
@@ -429,7 +430,7 @@ rule hdf5_xml_output:
            config["hdf5_output"],
            config["resave_hdf5"],
            jdir=JOBDIR,
-           path_bsh=config["common"]["bsh_directory"] + config["resave_hdf5"]["bsh_file"])
+           path_bsh=config["common"]["bsh_directory"] + config["hdf5_output"]["bsh_file_hdf5"])
 
         part_string += " > {log} 2>&1 && touch {output}"
         shell(part_string)
@@ -448,8 +449,8 @@ rule resave_hdf5_output:
         -Dresave_channel={resave_channel} \
         -Dresave_illumination={resave_illumination} \
         -Dresave_timepoint={resave_timepoint} \
-        -Dsubsampling_factors={subsampling_factors} \
-        -Dhdf5_chunk_sizes={hdf5_chunk_sizes} \
+        -Dsubsampling_factors={subsampling_output} \
+        -Dhdf5_chunk_sizes={chunk_sizes_output} \
         -Dtimepoints_per_partition={timepoints_per_partition} \
         -Dsetups_per_partition={setups_per_partition} \
         -Drun_only_job_number={job_number} \
@@ -460,7 +461,7 @@ rule resave_hdf5_output:
            config["hdf5_output"],
            config["resave_hdf5"],
            jdir=JOBDIR,
-           path_bsh=config["common"]["bsh_directory"] + config["resave_hdf5"]["bsh_file"],
+           path_bsh=config["common"]["bsh_directory"] + config["hdf5_output"]["bsh_file_hdf5"],
            input_xml_base="{wildcards.xml_base}",
            job_number=int(wildcards.file_id)+1) 
    	part_string += " > {log} 2>&1 && touch {output}"
diff --git a/spim_registration/timelapse/export_output.bsh b/spim_registration/timelapse/export_output.bsh
index af20191..2a16aed 100755
--- a/spim_registration/timelapse/export_output.bsh
+++ b/spim_registration/timelapse/export_output.bsh
@@ -54,26 +54,22 @@ System.out.println( "setups_per_partition=" + setups_per_partition );
 System.out.println( "run_only_job_number=" + run_only_job_number );
 
 // Add a switch for choosing between 16 or 32 bit
-
-
 output_data_type = System.getProperty( "output_data_type" );
-
 System.out.println( "Output Datatype = " + output_data_type );
 
+convert_32bit = System.getProperty( "convert_32bit" );
 
-if ( output_data_type.equalsIgnoreCase( "16Bit" ) )
+if ( output_data_type.equalsIgnoreCase( "32Bit" ) )
 	{
-		data_string = "convert_32bit=convert_32bit";
+		data_string = "convert_32bit=" + convert_32bit;
 	}
-else if ( output_data_type.equalsIgnoreCase( "32Bit" ) )
+else if ( output_data_type.equalsIgnoreCase( "16Bit" ) )
 	{
 		data_string = "";
 	}
 
 System.out.println( "Data string = " + data_string );
 
-
-
 // Activate cluster processing
 System.out.println("=========================================================");
 System.out.println("Cluster setting:");
@@ -81,7 +77,7 @@ try{
 IJ.run("Toggle Cluster Processing", "display_cluster");
 }
 catch ( e ) { 
-	print( "[deconvolution-GPU] caught exception: "+e );
+	print( "[export output: cluster setting] caught exception: "+e );
 	//important to fail the process if exception occurs
 	runtime.exit(1);
 }
@@ -105,11 +101,10 @@ IJ.run("As HDF5",
 	"run_only_job_number=" + run_only_job_number + " " +
 	"use_deflate_compression " +
 	"export_path=" + image_file_directory + hdf5_xml_filename + " " +
-	data_string +
-	);
+	data_string + "");
 }
 catch ( e ) { 
-	print( "[deconvolution-GPU] caught exception: "+e );
+	print( "[export output: resaving caught exception: "+e );
 	//important to fail the process if exception occurs
 	runtime.exit(1);
 }
diff --git a/spim_registration/timelapse/fusion.bsh b/spim_registration/timelapse/fusion.bsh
index f88eff8..79f9d54 100755
--- a/spim_registration/timelapse/fusion.bsh
+++ b/spim_registration/timelapse/fusion.bsh
@@ -19,14 +19,17 @@ System.out.println( "Start to load Parameters:" );
 System.out.println( "-------------------------------------------------------" );
 System.out.println( "Load xml file: " );
 image_file_directory = System.getProperty( "image_file_directory" );
+
 merged_xml = System.getProperty( "merged_xml" );
+if ( ! merged_xml.endsWith(".xml") )
+    merged_xml = merged.xml + ".xml";
 
 System.out.println( "xml_path=" + image_file_directory + merged_xml + ".xml" );
 
 // Load general Parameters
 System.out.println( "-------------------------------------------------------" );
 System.out.println( "General parameters: " );
-parallel_timepoints = System.getProperty( "parallel_timepoints" );
+int parallel_timepoints = Integer.parseInt( System.getProperty( "parallel_timepoints" ) );
 process_timepoint = System.getProperty( "process_timepoint" );
 process_channel = System.getProperty( "process_channel" );
 process_illumination = System.getProperty( "process_illumination" );
@@ -72,9 +75,9 @@ System.out.println( "imglib2_data_container=" + imglib2_data_container );
 // Execute Fiji Plugin
 System.out.println( "=======================================================" );
 System.out.println( "Starting Fusion" );
-
+try{
 IJ.run("Fuse/Deconvolve Dataset", 
-	"select_xml=" + image_file_directory + merged_xml + ".xml " +
+	"select_xml=" + image_file_directory + merged_xml + " " +
 	"process_angle=[" + process_angle + "] " +	
         "process_illumination=[" + process_illumination + "] " +
         "process_timepoint=[" + process_timepoint + "] " +
@@ -95,11 +98,16 @@ IJ.run("Fuse/Deconvolve Dataset",
 	"imglib2_container=" + imglib2_container_fusion + " " + 
 	"process_views_in_paralell=All " +
 	"interpolation=[" + interpolation + "] " + 
-	"blend " +
-	"content-based " +
+	//"blend " +
+	//"content-based " +
 	"output_file_directory=" + image_file_directory + " " +
 	"lossless " +
 	"imglib2_data_container=[" + imglib2_data_container + "]");
-
+}
+catch ( e ) {
+	    print( "[Contentbased multiview fusion] caught exception: "+e );
+	    //important to fail the process if exception occurs
+	    runtime.exit(1);
+}
 /* shutdown */
 runtime.exit(0);
diff --git a/spim_registration/timelapse/tomancak_test_cluster.yaml b/spim_registration/timelapse/tomancak_test_cluster.yaml
index bab766e..817e3c1 100755
--- a/spim_registration/timelapse/tomancak_test_cluster.yaml
+++ b/spim_registration/timelapse/tomancak_test_cluster.yaml
@@ -15,7 +15,8 @@ common: {
   pixel_distance_y: '0.28590106964',
   pixel_distance_z: '1.50000',
   pixel_unit: "um",
-  fusion_switch: "deconvolution"
+  # fusion_switch: "deconvolution"
+  fusion_switch: "fusion"
   }
               
 define_xml_czi: {
@@ -96,7 +97,7 @@ Dublicate_transformations: {
   
 fusion: {
   bsh_file: "fusion.bsh",
-  downsample: '1',
+  downsample: '4',
   fused_image: '"Append to current XML Project"',
   imglib2_container_fusion: '"ArrayImg"',
   imglib2_data_container: '"ArrayImg (faster)"',
@@ -160,7 +161,7 @@ deconvolution: {
 hdf5_output: {
   output_image_file_pattern: 'TP{{t}}_Chgreen_Ill0_Ang0,72,144,216,288.tif',
   output_xml: '"fused_Single_Channel"',
-  output_hdf5_xml: '"hdf5_fused_Stock68"',
+  output_hdf5_xml: '"hdf5_fused_Single_Channel"',
   output_multiple_channels: '"NO (one channel)"',
   output_timepoints: '0-4',
   output_pixel_distance_x: 0.5718,
@@ -168,13 +169,15 @@ hdf5_output: {
   output_pixel_distance_z: 0.5718,
   output_pixel_unit: 'um',
   output_channels: "green",
-  output_data_type: "32Bit",
+  output_data_type: "16Bit",
   convert_32bit: '"[Use min/max of first image (might saturate intenities over time)]"',
   output_type_of_dataset: '"Image Stacks (ImageJ Opener)"',
   output_multiple_timepoints: '"YES (one file per time-point)"',
   output_multiple_angles: '"NO (one angle)"',
   output_illumination_directions: '"NO (one illumination direction)"',
   output_imglib_container: '"ArrayImg (faster)"',
+  subsampling_output: '"{{ {{1,1,1}}, {{2,2,2}} }}"',
+  chunk_sizes_output: '"{{ {{16,16,16}}, {{16,16,16}} }}"',
   bsh_file_define: "define_output.bsh",
   bsh_file_hdf5: "export_output.bsh"
   }
-- 
GitLab