From f27d91374154aabad35b8b91497c69c852bc905a Mon Sep 17 00:00:00 2001
From: Christopher Schmied <schmied@mpi-cbg.de>
Date: Tue, 26 May 2015 19:40:10 +0200
Subject: [PATCH] hdf5_xml works

---
 spim_registration/timelapse/Snakefile  | 17 +++++++----------
 spim_registration/timelapse/export.bsh | 19 ++++++++++++++++---
 2 files changed, 23 insertions(+), 13 deletions(-)

diff --git a/spim_registration/timelapse/Snakefile b/spim_registration/timelapse/Snakefile
index 0e55826..ed64a52 100755
--- a/spim_registration/timelapse/Snakefile
+++ b/spim_registration/timelapse/Snakefile
@@ -111,9 +111,8 @@ ruleorder: define_xml_tif > define_xml_czi
 # create mother .xml/.h5
 rule hdf5_xml:
     input: config["common"]["first_xml_filename"] + ".xml" 
-    output: expand("{dataset}.{suffix}",dataset=[ config["common"]["hdf5_xml_filename"] ], suffix=["xml","h5"]),
+    output: expand("{dataset}.{suffix}",dataset=[ config["common"]["hdf5_xml_filename"].strip('\"')], suffix=["xml","h5"]),
             expand("{xml_base}-{file_id}-00.h5_empty", xml_base=[ config["common"]["hdf5_xml_filename"].strip('\"') ],file_id=range(int(config["common"]["ntimepoints"])))
-    
     log: "hdf5_xml.log"
     run:
         part_string = produce_string(
@@ -142,15 +141,14 @@ rule hdf5_xml:
 
         #create dummy files according to the number of timepoints found
         for index in range(int(config["common"]["ntimepoints"])):
-           shell("touch {basename}-{file_id}-00.h5_empty".format(basename=config["common"]["hdf5_xml_filename"],file_id=index))
+           shell("touch {basename}-{file_id}-00.h5_empty".format(basename=config["common"]["hdf5_xml_filename"], file_id=index))
         
 
 # resave .czi dataset as hdf5	
 rule resave_hdf5:
-    input: "{xml_base}-{file_id}-00.h5_empty"
+    input: "{xml_base}-{file_id}-00.h5_empty" # rules.hdf5_xml.output
     output: "{xml_base}-{file_id}-00.h5"
     message: "Execute resave_hdf5 on {input}"
-    threads: int(config["resave_hdf5"]["parallel_timepoints"]) # parallel timepoints should tell me how many timepoints to expect 
     log: "resave_hdf5-{file_id}.log"
     run:
         part_string = produce_string(
@@ -167,20 +165,19 @@ rule resave_hdf5:
         -Dtimepoints_per_partition={timepoints_per_partition} \
         -Dsetups_per_partition={setups_per_partition} \
         -Drun_only_job_number={job_number} \
-        -- --no-splash {path_bsh} > {log}-{job_number}.log 2>&1 &\n""", # the & submits everyting at once
+        -- --no-splash {path_bsh}""", # the & submits everyting at once
            config["common"],
            config["define_xml_czi"],
            config["resave_hdf5"],
            jdir=JOBDIR,
            path_bsh=config["common"]["bsh_directory"] + config["resave_hdf5"]["bsh_file"],
-           intput_xml_base="{wildcards.xml_base}",
-           job_number=int("{wildcards.file_id}")+1
-        )
+           input_xml_base="{wildcards.xml_base}",
+           job_number="{wildcards.file_id}")
    	part_string += "> {log} 2>&1"
         shell(part_string) 
        	       	
 rule registration:
-    input: "{xml_base}-{file_id}-00.h5", rules.resave_hdf5.output
+    input:  "{xml_base}-{file_id}-00.h5" # rules.resave_hdf5.output 
     #input: rules.resave_hdf5.output, "{xml_base}-{file_id}-00.h5"
     output: "{xml_base}-{file_id,\d+}-00.h5_registered", #"{xml_base}.job_{file_id,\d+}.xml"
     log: "{xml_base}-{file_id}-registration.log"
diff --git a/spim_registration/timelapse/export.bsh b/spim_registration/timelapse/export.bsh
index a3f77f1..81b754f 100755
--- a/spim_registration/timelapse/export.bsh
+++ b/spim_registration/timelapse/export.bsh
@@ -45,7 +45,8 @@ subsampling_factors = System.getProperty( "subsampling_factors" );
 hdf5_chunk_sizes = System.getProperty( "hdf5_chunk_sizes" );
 timepoints_per_partition = System.getProperty( "timepoints_per_partition" );
 setups_per_partition = System.getProperty( "setups_per_partition" );
-run_only_job_number = System.getProperty( "run_only_job_number" );
+int run_only_job_number =  Integer.parseInt( System.getProperty( "run_only_job_number" ) );
+run_only_job_number = run_only_job_number + 1;
 
 System.out.println( "subsampling_factors=" + subsampling_factors);
 System.out.println( "hdf5_chunk_sizes=" + hdf5_chunk_sizes );
@@ -55,12 +56,19 @@ System.out.println( "run_only_job_number=" + run_only_job_number );
 
 // Activate cluster processing
 System.out.println("=========================================================");
+try{
 System.out.println("Cluster setting:");
 IJ.run("Toggle Cluster Processing", "display_cluster");
-
+}
+catch ( e ) {
+	    print( "[deconvolution-GPU] caught exception: "+e );
+	    //important to fail the process if exception occurs
+	    runtime.exit(1);
+}
 // Executes Fiji plugin
 System.out.println("=========================================================");
 System.out.println("Start plugin:");
+try{
 IJ.run("As HDF5",
 	"select_xml=" + image_file_directory + first_xml_filename + ".xml " +
 	"resave_angle=[" + resave_angle + "] " +
@@ -76,7 +84,12 @@ IJ.run("As HDF5",
 	"run_only_job_number=" + run_only_job_number + " " +
 	"use_deflate_compression " +
 	"export_path=" + image_file_directory + hdf5_xml_filename );
-
+}
+catch ( e ) {
+	    print( "[deconvolution-GPU] caught exception: "+e );
+	    //important to fail the process if exception occurs
+	    runtime.exit(1);
+}
 /* shutdown */
 System.exit(0);
 
-- 
GitLab