diff --git a/spim_registration/timelapse/Snakefile b/spim_registration/timelapse/Snakefile
index 4a41cdee566275be5ef5ad1e0433bbf7fe9e3a0f..a5ebd42518aa56accd699958797eacaa87d58e55 100755
--- a/spim_registration/timelapse/Snakefile
+++ b/spim_registration/timelapse/Snakefile
@@ -38,7 +38,7 @@ rule resave_prepared:
 # defining xml for czi dataset
 rule define_xml_czi:
     input: glob_and_escape("*.czi"), config["common"]["first_czi"]
-    output: temp (config["common"]["first_xml_filename"] + ".xml")
+    output: temp(config["common"]["first_xml_filename"] + ".xml")
     log: "logs/a1_define_xml_czi.log"
     run: 
         cmd_string = produce_string("""{fiji-prefix} {fiji-app} \
@@ -105,7 +105,7 @@ ruleorder: define_xml_czi > define_xml_tif
 rule hdf5_xml:
     input: config["common"]["first_xml_filename"] + ".xml" 
     output: expand("{dataset}.{suffix}",dataset=[ config["common"]["hdf5_xml_filename"].strip('\"')], suffix=["xml","h5"]),
-            temp([ item+"_xml" for item in datasets ])
+            [ item+"_xml" for item in datasets ]
     log: "logs/b1_hdf5_xml.log"
     run:
         part_string = produce_string(
@@ -135,7 +135,7 @@ rule hdf5_xml:
 # resave  .czi/.tif dataset as hdf5	
 rule resave_hdf5:
     input: rules.hdf5_xml.output, config["common"]["first_xml_filename"] + ".xml" # "{xml_base}-{file_id,\d+}-00.h5_xml"
-    output: "{xml_base}-{file_id,\d+}-00.h5", temp("{xml_base}-{file_id,\d+}-00.h5_hdf5")
+    output: "{xml_base}-{file_id,\d+}-00.h5", "{xml_base}-{file_id,\d+}-00.h5_hdf5"
     log: "logs/b2_resave_hdf5-{file_id}.log"
     run:
         part_string = produce_string(
@@ -164,7 +164,7 @@ rule resave_hdf5:
        	       	
 rule registration:
     input:  "{xml_base}-{file_id}-00.h5", expand("{dataset}.{suffix}",dataset=[ config["common"]["hdf5_xml_filename"].strip('\"')], suffix=["xml","h5"])
-    output: temp("{xml_base}.job_{file_id,\d+}.xml")  #, "{xml_base}-{file_id,\d+}-00.h5_registered", 
+    output: "{xml_base}.job_{file_id,\d+}.xml"  #, "{xml_base}-{file_id,\d+}-00.h5_registered", 
     log: "logs/c_{xml_base}-{file_id}-registration.log"
     run:
         cmd_string = produce_string(
@@ -236,7 +236,7 @@ rule xml_merge:
 
 rule timelapse:
     input: rules.xml_merge.output
-    output: temp(rules.xml_merge.output[0] + "_timelapse")
+    output: rules.xml_merge.output[0] + "_timelapse"
     log: "logs/d2_{xml_base}_timelapse.log"
     run:
         cmd_string = produce_string(
@@ -296,7 +296,7 @@ rule duplicate_transformations:
 
 rule fusion:
     input: [ str("{xml_base}_merge.xml_" + config["common"]["transformation_switch"] ) ], "{xml_base}-{file_id,\d+}-00.h5", merged_xml="{xml_base}_merge.xml" # rules.timelapse.output, "{xml_base}-{file_id,\d+}-00.h5", merged_xml="{xml_base}_merge.xml"
-    output: temp("{xml_base}-{file_id,\d+}-00.h5_fusion")
+    output: "{xml_base}-{file_id,\d+}-00.h5_fusion"
     log: "logs/e1_{xml_base}-{file_id,\d+}-00-fusion.log"
     run:
         cmd_string = produce_string(
@@ -367,7 +367,7 @@ rule external_transform:
 
 rule deconvolution:
     input: [ str("{xml_base}_merge.xml_" + config["common"]["transformation_switch"] ) ], "{xml_base}-{file_id,\d+}-00.h5", merged_xml="{xml_base}_merge.xml" # rules.timelapse.output, "{xml_base}-{file_id,\d+}-00.h5", merged_xml="{xml_base}_merge.xml" # rules.external_transform.output, "{xml_base}-{file_id,\d+}-00.h5", merged_xml="{xml_base}_merge.xml"
-    output: temp("{xml_base}-{file_id,\d+}-00.h5_deconvolution")
+    output: "{xml_base}-{file_id,\d+}-00.h5_deconvolution"
     log: "logs/e2_{xml_base}-{file_id,\d+}-00-deconvolution.log"
     run:
         cmd_string = produce_string(
@@ -447,7 +447,7 @@ rule define_output:
 rule hdf5_xml_output:
     input: config["common"]["output_xml"].strip('\"') + ".xml"
     output: expand("{dataset}.{suffix}",dataset=[ config["common"]["output_hdf5_xml"].strip('\"')], suffix=["xml","h5"]),
-    	    temp([ item+"_output" for item in datasets ]), 
+    	    [ item+"_output" for item in datasets ], 
     log: "logs/f2_output_hdf5_xml.log"
     run:
         part_string = produce_string(
@@ -478,7 +478,7 @@ rule hdf5_xml_output:
 
 rule resave_hdf5_output:
     input: rules.hdf5_xml_output.output, rules.define_output.output
-    output: temp("{xml_base}-{file_id,\d+}-00.h5_output_hdf5")
+    output: "{xml_base}-{file_id,\d+}-00.h5_output_hdf5"
     log: "logs/f3_resave_output-{file_id}.log"
     run:
         part_string = produce_string(
@@ -520,5 +520,5 @@ rule distclean:
 # onsuccess:
 #    shell("mail -s \"[SUCCESS] our_cluster:{jdir} finished \" schmied@mpi-cbg.de < /dev/null".format(jdir=JOBDIR))
 
-onerror:
-    shell("mail -s \"[ERROR] out_cluster:{jdir}\" schmied@mpi-cbg.de < /dev/null".format(jdir=JOBDIR))
+#onerror:
+#   shell("mail -s \"[ERROR] out_cluster:{jdir}\" schmied@mpi-cbg.de < /dev/null".format(jdir=JOBDIR))