diff --git a/spim_registration/timelapse/Snakefile b/spim_registration/timelapse/Snakefile
index 27a48bf546cb55f91f2f1a85aa5f663ee61bd8e8..8b4aa87b6df3b251e308c2a5ed285435036b20cf 100755
--- a/spim_registration/timelapse/Snakefile
+++ b/spim_registration/timelapse/Snakefile
@@ -9,9 +9,10 @@ if JOBDIR[-1] != "/": # this checks if jobdir ends with slash if not it adds a s
 #data specific config file, expected to be inside JOBDIR
 configfile: "tomancak_czi.json"
 
-padding_format = "-{0:0"+str(padding_of_file_id(int(config["common"]["ntimepoints"])))+"d}-00.h5"
+padding_format = "{0:0"+str(padding_of_file_id(int(config["common"]["ntimepoints"])))+"d}"
+ds_format = "-"+padding_format+"-00.h5"
 # problematic needs padding of file_id
-datasets = [ config["common"]["hdf5_xml_filename"].strip('\"')+padding_format.format(item) for item in range(int(config["common"]["ntimepoints"])) ]
+datasets = [ str(config["common"]["hdf5_xml_filename"].strip('\"')+(ds_format.format(item))) for item in range(int(config["common"]["ntimepoints"])) ]
 
 xml_merge_in = produce_xml_merge_job_files(datasets)
 
@@ -31,32 +32,32 @@ rule define_xml_czi:
     output: config["common"]["first_xml_filename"] + ".xml"
     log: "define_xml_czi.log"
     run: 
-        cmd_string = produce_string(
-        	"""{fiji-prefix} {fiji-app} \
+        cmd_string = produce_string("""{fiji-prefix} {fiji-app} \
         -Dimage_file_directory={jdir} \
-	-Dfirst_czi={first_czi} \
-	-Dangle_1={angle_1} \
-	-Dangle_2={angle_2} \
-	-Dangle_3={angle_3} \
-	-Dangle_4={angle_4} \
-	-Dangle_5={angle_5} \
-	-Dchannel_1={channel_1} \
-	-Dchannel_2={channel_2} \
-	-Dillumination_1={illumination_1} \
-	-Drotation_around={rotation_around} \
-	-Dpixel_distance_x={pixel_distance_x} \
-	-Dpixel_distance_y={pixel_distance_y} \
-	-Dpixel_distance_z={pixel_distance_z} \
-	-Dpixel_unit={pixel_unit} \
-	-Dfirst_xml_filename={first_xml_filename} \
-	-- --no-splash {path_bsh}""",
-	config["common"],
-	config["define_xml_czi"],
-	jdir=JOBDIR,
-	path_bsh=config["common"]["bsh_directory"] + config["define_xml_czi"]["bsh_file"])
-	
-	cmd_string += " > {log} 2>&1"
+           -Dfirst_czi={first_czi} \
+           -Dangle_1={angle_1} \
+           -Dangle_2={angle_2} \
+           -Dangle_3={angle_3} \
+           -Dangle_4={angle_4} \
+           -Dangle_5={angle_5} \
+           -Dchannel_1={channel_1} \
+           -Dchannel_2={channel_2} \
+           -Dillumination_1={illumination_1} \
+           -Drotation_around={rotation_around} \
+           -Dpixel_distance_x={pixel_distance_x} \
+           -Dpixel_distance_y={pixel_distance_y} \
+           -Dpixel_distance_z={pixel_distance_z} \
+           -Dpixel_unit={pixel_unit} \
+           -Dfirst_xml_filename={first_xml_filename} \
+           -- --no-splash {path_bsh}""",
+                                       config["common"],
+                                       config["define_xml_czi"],
+                                       jdir=JOBDIR,
+                                       path_bsh=config["common"]["bsh_directory"] + config["define_xml_czi"]["bsh_file"])
+        cmd_string += " > {log} 2>&1"
         shell(cmd_string)
+
+        
         
 # defining xml for tif dataset
 rule define_xml_tif:
@@ -83,10 +84,12 @@ rule define_xml_tif:
         -Dmultiple_angles={multiple_angles} \
         -Dimglib_container={imglib_container} \
 	-- --no-splash {path_bsh}""",
-	config["common"],
-	config["define_xml_tif"],
-	jdir=JOBDIR,
-	path_bsh=config["common"]["bsh_directory"] + config["define_xml_tif"]["bsh_file"])
+	   config["common"],
+	   config["define_xml_tif"],
+	   jdir=JOBDIR,
+	   path_bsh=config["common"]["bsh_directory"] + config["define_xml_tif"]["bsh_file"],
+           timepoints="1-"+str(config["common"]["ntimepoints"])
+        )
 	
 	cmd_string +=" > {log} 2>&1"
 	shell(cmd_string)
@@ -158,7 +161,7 @@ rule resave_hdf5:
        	       	
 rule registration:
     input:  "{xml_base}-{file_id}-00.h5" 
-    output: "{xml_base}-{file_id,\d+}-00.h5_registered", #"{xml_base}.job_{file_id,\d+}.xml"
+    output: "{xml_base}.job_{file_id,\d+}.xml"#, "{xml_base}-{file_id,\d+}-00.h5_registered", 
     log: "{xml_base}-{file_id}-registration.log"
     run:
         cmd_string = produce_string(
@@ -204,7 +207,7 @@ rule registration:
         #shell("touch {output}")
 
 rule xml_merge:
-    input: [ item+"_registered" for item in datasets ] #xml_merge_in 
+    input: [ str(config["common"]["hdf5_xml_filename"].strip('\"')+".job_"+(padding_format.format(item))+".xml") for item in range(int(config["common"]["ntimepoints"])) ] #[ item+"_registered" for item in datasets ] 
     output: "{xml_base}_merge.xml"
     log: "{xml_base}_merge.log"
     run:
@@ -215,7 +218,6 @@ rule xml_merge:
         -- --no-splash {path_bsh}""",
                                     config["common"],
                                     config["xml_merge"],
-                                    log="{log}",
                                     path_bsh=config["common"]["bsh_directory"] + config["xml_merge"]["bsh_file"],
                                     jdir=JOBDIR,
                                     output="{output}")
@@ -470,7 +472,7 @@ rule resave_hdf5_output:
        	       
 
 rule distclean:
-    params : glob.glob(config["common"]["hdf5_xml_filename"].strip('\"')+"*"), glob.glob(config["common"]["first_xml_filename"].strip('\"')+"*"), glob.glob("*registered"), glob.glob("*log"), glob.glob("*_deconvolved"), glob.glob("*.xml~*"),"interestpoints", glob.glob("*empty"), expand("{dataset}.{suffix}",dataset=[ config["common"]["hdf5_xml_filename"].strip('\"')], suffix=["xml","h5"])# xml_merge_in, 
+    params : glob.glob(config["common"]["hdf5_xml_filename"].strip('\"')+"*"), glob.glob(config["common"]["first_xml_filename"].strip('\"')+"*"), glob.glob("*registered"), glob.glob("*_fusion"), glob.glob("*_timelapse"), glob.glob("*log"), glob.glob("*_deconvolved"), glob.glob("*.xml~*"),"interestpoints", glob.glob("*empty"), expand("{dataset}.{suffix}",dataset=[ config["common"]["hdf5_xml_filename"].strip('\"')], suffix=["xml","h5"])# xml_merge_in, 
    
-    message : "removing {params}"
+    message : os.path.abspath(os.path.curdir) + ": rm -rf {params}"
     shell : "rm -rf {params}"
diff --git a/spim_registration/timelapse/registration.bsh b/spim_registration/timelapse/registration.bsh
index f7c6b26bb1edb96765c2da801b2c3f42da75e7e6..849808589f1891bbadc20f0f08f98454e90a5b8f 100644
--- a/spim_registration/timelapse/registration.bsh
+++ b/spim_registration/timelapse/registration.bsh
@@ -24,6 +24,7 @@ System.out.println( "-------------------------------------------------------" );
 System.out.println( "General parameters: " );
 
 parallel_timepoints = Integer.parseInt(System.getProperty( "parallel_timepoints" ));
+unique_id = System.getProperty( "parallel_timepoints" );
 process_timepoint = System.getProperty( "process_timepoint" );
 process_channel = System.getProperty( "process_channel" );
 process_illumination = System.getProperty( "process_illumination" );
@@ -120,7 +121,7 @@ System.out.println( "Starting Detection of Interest Points" );
 try {
 IJ.run("Detect Interest Points for Registration", 
 	"select_xml=" + xml_path + xml_filename + ".xml " +  
-	"unique_id=" + parallel_timepoints + " " +
+	"unique_id=" + unique_id + " " +
 	"process_angle=[" + process_angle + "] " + 
 	"process_channel=[" + process_channel + "] " +
 	"process_illumination=[" + process_illumination + "] " + 
@@ -151,7 +152,7 @@ System.out.println( "Starting Registration" );
 
 try {
     IJ.run("Register Dataset based on Interest Points",
-	   "select_xml=" + xml_path + xml_filename + ".job_" + parallel_timepoints + ".xml " +
+	   "select_xml=" + xml_path + xml_filename + ".job_" + unique_id + ".xml " +
 	   "process_angle=[" + process_angle + "] " +
 	   "process_channel=[" + process_channel + "] " +
 	   "process_illumination=[" + process_illumination + "] " +
diff --git a/spim_registration/timelapse/timelaps_utils.py b/spim_registration/timelapse/timelaps_utils.py
index 353bd094593d54558975149ef5867aaf56053a77..6a2bebe5395e172fe6c6cb394da4e3efff4ee3e1 100644
--- a/spim_registration/timelapse/timelaps_utils.py
+++ b/spim_registration/timelapse/timelaps_utils.py
@@ -25,4 +25,9 @@ def produce_string(_fstring, *args, **kwargs):
    return _fstring.format(**contents)
 
 def padding_of_file_id(_n_timepoints):
-   return math.ceil(math.log10(_n_timepoints))
+   value = math.ceil(math.log10(_n_timepoints))
+
+   if value < 2:
+      return 2
+   else:
+      return value
diff --git a/spim_registration/timelapse/tomancak_czi.json b/spim_registration/timelapse/tomancak_czi.json
index 317466fd5fcb72f8cb5a0ee4b98721a3b5477d4d..7c14583d1bd6ecc7515a1f8f586b8365ab8f38f1 100755
--- a/spim_registration/timelapse/tomancak_czi.json
+++ b/spim_registration/timelapse/tomancak_czi.json
@@ -5,11 +5,11 @@
 	"fiji-prefix" : "/sw/bin/xvfb-run -a",
 	"directory_cuda" : "/lustre/projects/hpcsupport/steinbac/unicore/christopher/unicore_jobs/Fiji.app.cuda_new/lib/",
 	"merged_xml" : "hdf5_test_unicore_merge",
-	"bsh_directory" : "/projects/pilot_spim/Christopher/snakemake-workflows/spim_registration/timelapse/",
+	"bsh_directory" : "/home/steinbac/development/cschmied-snakemake-workflows/spim_registration/timelapse/",
 	"first_xml_filename" : "test_unicore",
 	"hdf5_xml_filename" : "\"hdf5_test_unicore\"",
 	"fusion_switch" : "deconvolution",
-	"ntimepoints" : 2
+	"ntimepoints" : 3
     },
     
     "define_xml_czi" :