From 7d7dbd9583171ffb0254b41cbbeaab9864742878 Mon Sep 17 00:00:00 2001
From: Christopher Schmied <schmied@mpi-cbg.de>
Date: Fri, 22 May 2015 18:49:31 +0200
Subject: [PATCH] Added resave czi as hdf5

---
 spim_registration/timelapse/Snakefile         | 43 ++++++++++++-------
 spim_registration/timelapse/tomancak_czi.json |  8 ++--
 2 files changed, 32 insertions(+), 19 deletions(-)

diff --git a/spim_registration/timelapse/Snakefile b/spim_registration/timelapse/Snakefile
index d98baea..149341a 100755
--- a/spim_registration/timelapse/Snakefile
+++ b/spim_registration/timelapse/Snakefile
@@ -67,14 +67,14 @@ rule define_xml_czi:
         shell(cmd_string)
 
 # resave .czi dataset as hdf5	
-rule resave_hdf5:
+rule resave_czi_hdf5:
     input: rules.define_xml_czi.output, glob.glob('*.czi')
-    output: glob.glob('*h5')
+    output: "hdf5_Stock68.h5", "hdf5_Stock68.xml" 
     message: "Execute resave_hdf5 on {input}"
+    threads: int(config["resave_hdf5"]["parallel_timepoints"]) # parallel timepoints should tell me how many timepoints to expect 
     log: "export.log"
-    run: 
-        for i in config["resave_hdf5"]["parallel_timepoints"]:
-    	    cmd_string = produce_string("""{fiji-prefix} {fiji-app} \
+    run:
+        part_string = produce_string("""{fiji-prefix} {fiji-app} \
         -Dimage_file_directory={jdir} \
         -Dfirst_xml_filename={first_xml_filename} \
         -Dhdf5_xml_filename={hdf5_xml_filename} \
@@ -86,17 +86,30 @@ rule resave_hdf5:
         -Dhdf5_chunk_sizes={hdf5_chunk_sizes} \
         -Dtimepoints_per_partition={timepoints_per_partition} \
         -Dsetups_per_partition={setups_per_partition} \
-        -Drun_only_job_number=%i \
-        -- --no-splash {path_bsh}""",
-        config["common"],
-        config["define_xml_czi"],
-        config["resave_hdf5"],
-        jdir=JOBDIR,
-        path_bsh=config["common"]["bsh_directory"] + config["resave_hdf5"]["bsh_file"]) % i 
-            i +=1
+        -Drun_only_job_number={job_number} \
+        -- --no-splash {path_bsh} > {log}-{job_number}.log 2>&1 &\n""", # the & submits everyting at once
+           config["common"],
+           config["define_xml_czi"],
+           config["resave_hdf5"],
+           jdir=JOBDIR,
+           path_bsh=config["common"]["bsh_directory"] + config["resave_hdf5"]["bsh_file"])
+        cmd_string = ""
+        for i in range(int(config["resave_hdf5"]["parallel_timepoints"])):
+        	cmd_string += part_string.format(job_number=i) 
+        
+        shell(cmd_string) 
+
+rule define_xml_tif:
+
+rule resave_tif_hdf5
+        
+        
+        
+        
+        
+        
+        
         
-        cmd_string += "> {log} 2>&1"
-        shell(cmd_string)
 
 rule registration:
     # input: "{xml_base}-{file_id}-00.h5"
diff --git a/spim_registration/timelapse/tomancak_czi.json b/spim_registration/timelapse/tomancak_czi.json
index 358cdff..b98a0ec 100755
--- a/spim_registration/timelapse/tomancak_czi.json
+++ b/spim_registration/timelapse/tomancak_czi.json
@@ -28,16 +28,16 @@
     	    "bsh_file" : "define_czi.bsh" 
     },
     
-    "resave_hdf5" :
+    "resave_czi_hdf5" :
     {
-    	    "parallel_timepoints" : "range(0,2)",
+    	    "parallel_timepoints" : "2",
     	    "hdf5_xml_filename" : "\"hdf5_Stock68\"",
     	    "resave_angle" : "\"All angles\"",
     	    "resave_channel" : "\"All channels\"",
     	    "resave_illumination" : "\"All illuminations\"",
     	    "resave_timepoint" : "\"All Timepoints\"",
-    	    "subsampling_factors" : "\"{ {1,1,1}, {2,2,1}, {4,4,1}, {8,8,1} }\"",
-    	    "hdf5_chunk_sizes" : "\"{ {32,32,4}, {32,32,4}, {16,16,16}, {16,16,16} }\"",
+    	    "subsampling_factors" : "\"{{ {{1,1,1}}, {{2,2,1}}, {{4,4,1}}, {{8,8,1}} }}\"",
+    	    "hdf5_chunk_sizes" : "\"{{ {{32,32,4}}, {{32,32,4}}, {{16,16,16}}, {{16,16,16}} }}\"",
     	    "timepoints_per_partition" : "1",
     	    "setups_per_partition" : "0",
     	    "bsh_file" : "export.bsh"
-- 
GitLab