From 6f3e8c958f95f81b0b4009bbf4181cc4eda25701 Mon Sep 17 00:00:00 2001
From: Christopher Schmied <schmied@mpi-cbg.de>
Date: Tue, 7 Jul 2015 22:08:39 +0200
Subject: [PATCH] Testing temp files

---
 spim_registration/timelapse/Snakefile  | 24 ++++++++++++------------
 spim_registration/timelapse/fusion.bsh |  2 +-
 2 files changed, 13 insertions(+), 13 deletions(-)
 mode change 100644 => 100755 spim_registration/timelapse/Snakefile

diff --git a/spim_registration/timelapse/Snakefile b/spim_registration/timelapse/Snakefile
old mode 100644
new mode 100755
index 50f2e0a..20eccb8
--- a/spim_registration/timelapse/Snakefile
+++ b/spim_registration/timelapse/Snakefile
@@ -105,7 +105,7 @@ ruleorder: define_xml_czi > define_xml_tif
 rule hdf5_xml:
     input: config["common"]["first_xml_filename"] + ".xml" 
     output: expand("{dataset}.{suffix}",dataset=[ config["common"]["hdf5_xml_filename"].strip('\"')], suffix=["xml","h5"]),
-            [ item+"_xml" for item in datasets ]
+            temp([ item+"_xml" for item in datasets ])
     log: "logs/b1_hdf5_xml.log"
     run:
         part_string = produce_string(
@@ -135,7 +135,7 @@ rule hdf5_xml:
 # resave  .czi/.tif dataset as hdf5	
 rule resave_hdf5:
     input: rules.hdf5_xml.output # "{xml_base}-{file_id,\d+}-00.h5_xml"
-    output: "{xml_base}-{file_id,\d+}-00.h5", "{xml_base}-{file_id,\d+}-00.h5_hdf5"
+    output: "{xml_base}-{file_id,\d+}-00.h5", temp("{xml_base}-{file_id,\d+}-00.h5_hdf5")
     log: "logs/b2_resave_hdf5-{file_id}.log"
     run:
         part_string = produce_string(
@@ -164,7 +164,7 @@ rule resave_hdf5:
        	       	
 rule registration:
     input:  "{xml_base}-{file_id}-00.h5" 
-    output: "{xml_base}.job_{file_id,\d+}.xml"#, "{xml_base}-{file_id,\d+}-00.h5_registered", 
+    output: temp("{xml_base}.job_{file_id,\d+}.xml")#, "{xml_base}-{file_id,\d+}-00.h5_registered", 
     log: "logs/c_{xml_base}-{file_id}-registration.log"
     run:
         cmd_string = produce_string(
@@ -236,7 +236,7 @@ rule xml_merge:
 
 rule timelapse:
     input: rules.xml_merge.output
-    output: rules.xml_merge.output[0] + "_timelapse"
+    output: temp(rules.xml_merge.output[0] + "_timelapse")
     log: "logs/d2_{xml_base}_timelapse.log"
     run:
         cmd_string = produce_string(
@@ -296,7 +296,7 @@ rule duplicate_transformations:
 
 rule fusion:
     input: [ str("{xml_base}_merge.xml_" + config["common"]["transformation_switch"] ) ], "{xml_base}-{file_id,\d+}-00.h5", merged_xml="{xml_base}_merge.xml" # rules.timelapse.output, "{xml_base}-{file_id,\d+}-00.h5", merged_xml="{xml_base}_merge.xml"
-    output: "{xml_base}-{file_id,\d+}-00.h5_fusion"
+    output: temp("{xml_base}-{file_id,\d+}-00.h5_fusion")
     log: "logs/e1_{xml_base}-{file_id,\d+}-00-fusion.log"
     run:
         cmd_string = produce_string(
@@ -367,7 +367,7 @@ rule external_transform:
 
 rule deconvolution:
     input: [ str("{xml_base}_merge.xml_" + config["common"]["transformation_switch"] ) ], "{xml_base}-{file_id,\d+}-00.h5", merged_xml="{xml_base}_merge.xml" # rules.timelapse.output, "{xml_base}-{file_id,\d+}-00.h5", merged_xml="{xml_base}_merge.xml" # rules.external_transform.output, "{xml_base}-{file_id,\d+}-00.h5", merged_xml="{xml_base}_merge.xml"
-    output: "{xml_base}-{file_id,\d+}-00.h5_deconvolution"
+    output: temp("{xml_base}-{file_id,\d+}-00.h5_deconvolution")
     log: "logs/e2_{xml_base}-{file_id,\d+}-00-deconvolution.log"
     run:
         cmd_string = produce_string(
@@ -414,7 +414,7 @@ rule deconvolution:
 
 rule define_output:
     input: [ item + "_" + config["common"]["fusion_switch"] for item in datasets ], glob.glob('TP*')
-    output: config["common"]["output_xml"].strip('\"') + ".xml"
+    output: temp(config["common"]["output_xml"].strip('\"') + ".xml")
     log: "logs/f1_define_output.log"
     run:
         cmd_string = produce_string(
@@ -447,7 +447,7 @@ rule define_output:
 rule hdf5_xml_output:
     input: config["common"]["output_xml"].strip('\"') + ".xml"
     output: expand("{dataset}.{suffix}",dataset=[ config["common"]["output_hdf5_xml"].strip('\"')], suffix=["xml","h5"]),
-    	    [ item+"_output" for item in datasets ] 
+    	    temp([ item+"_output" for item in datasets ])
     log: "logs/f2_output_hdf5_xml.log"
     run:
         part_string = produce_string(
@@ -478,7 +478,7 @@ rule hdf5_xml_output:
 
 rule resave_hdf5_output:
     input: rules.hdf5_xml_output.output
-    output: "{xml_base}-{file_id,\d+}-00.h5_output_hdf5"
+    output: temp("{xml_base}-{file_id,\d+}-00.h5_output_hdf5")
     log: "logs/f3_resave_output-{file_id}.log"
     run:
         part_string = produce_string(
@@ -518,7 +518,7 @@ rule distclean:
     
 # NOTE! The following enables mailing, which will send out a mail once an entire workflow is done (the below does not include anything in the message body, redirect from /dev/null)
 # onsuccess:
-#     shell("mail -s \"[SUCCESS] our_cluster:{jdir} finished \" xxx@mpi-cbg.de < /dev/null".format(jdir=JOBDIR))
+#    shell("mail -s \"[SUCCESS] our_cluster:{jdir} finished \" schmied@mpi-cbg.de < /dev/null".format(jdir=JOBDIR))
 
-# onerror:
-#     shell("mail -s \"[ERROR] out_cluster:{jdir}\" xxx@mpi-cbg.de < /dev/null".format(jdir=JOBDIR))
+onerror:
+    shell("mail -s \"[ERROR] out_cluster:{jdir}\" schmied@mpi-cbg.de < /dev/null".format(jdir=JOBDIR))
diff --git a/spim_registration/timelapse/fusion.bsh b/spim_registration/timelapse/fusion.bsh
index 945556c..cdc86a0 100644
--- a/spim_registration/timelapse/fusion.bsh
+++ b/spim_registration/timelapse/fusion.bsh
@@ -99,7 +99,7 @@ IJ.run("Fuse/Deconvolve Dataset",
 	"process_views_in_paralell=All " +
 	"interpolation=[" + interpolation + "] " + 
 	"blend " +
-	"content-based " +
+	//"content-based " +
 	"output_file_directory=" + image_file_directory + " " +
 	"lossless " +
 	"imglib2_data_container=[" + imglib2_data_container + "]");
-- 
GitLab