From 004093f58cb0bc478f57e7ca7949e77bac17e484 Mon Sep 17 00:00:00 2001 From: Christopher Schmied <schmied@mpi-cbg.de> Date: Sun, 5 Jul 2015 19:06:36 +0200 Subject: [PATCH] Log files are written into log directory in data directory Log files are ordered --- spim_registration/timelapse/Snakefile | 28 +++++++++++++-------------- 1 file changed, 14 insertions(+), 14 deletions(-) diff --git a/spim_registration/timelapse/Snakefile b/spim_registration/timelapse/Snakefile index 6c6b744..4aa60fa 100755 --- a/spim_registration/timelapse/Snakefile +++ b/spim_registration/timelapse/Snakefile @@ -34,7 +34,7 @@ rule resave_prepared: rule define_xml_czi: input:glob.glob('*.czi'), config["common"]["first_czi"] output: config["common"]["first_xml_filename"] + ".xml" - log: "define_xml_czi.log" + log: "logs/a1_define_xml_czi.log" run: cmd_string = produce_string("""{fiji-prefix} {fiji-app} \ -Dimage_file_directory={jdir} \ @@ -62,7 +62,7 @@ rule define_xml_czi: rule define_xml_tif: input: glob.glob(re.sub("{{.}}","*",config["common"]['image_file_pattern'])) #replaces all occurrences of {{a}} (a can be any character) by * to use the string for globbing output: config["common"]["first_xml_filename"] + ".xml" - log: "define_xml_tif.log" + log: "logs/a2_define_xml_tif.log" run: cmd_string = produce_string( """{fiji-prefix} {fiji-app} \ @@ -101,7 +101,7 @@ rule hdf5_xml: input: config["common"]["first_xml_filename"] + ".xml" output: expand("{dataset}.{suffix}",dataset=[ config["common"]["hdf5_xml_filename"].strip('\"')], suffix=["xml","h5"]), [ item+"_xml" for item in datasets ] - log: "hdf5_xml.log" + log: "logs/b1_hdf5_xml.log" run: part_string = produce_string( """{fiji-prefix} {fiji-app} \ @@ -131,7 +131,7 @@ rule hdf5_xml: rule resave_hdf5: input: rules.hdf5_xml.output # "{xml_base}-{file_id,\d+}-00.h5_xml" output: "{xml_base}-{file_id,\d+}-00.h5", "{xml_base}-{file_id,\d+}-00.h5_hdf5" - log: "resave_hdf5-{file_id}.log" + log: "logs/b2_resave_hdf5-{file_id}.log" run: part_string = produce_string( """{fiji-prefix} {fiji-app} \ @@ -160,7 +160,7 @@ rule resave_hdf5: rule registration: input: "{xml_base}-{file_id}-00.h5" output: "{xml_base}.job_{file_id,\d+}.xml"#, "{xml_base}-{file_id,\d+}-00.h5_registered", - log: "{xml_base}-{file_id}-registration.log" + log: "logs/c_{xml_base}-{file_id}-registration.log" run: cmd_string = produce_string( """{fiji-prefix} {fiji-app} \ @@ -214,7 +214,7 @@ rule registration: rule xml_merge: input: [ str(config["common"]["hdf5_xml_filename"].strip('\"')+".job_"+(padding_format.format(item))+".xml") for item in range(int(config["common"]["ntimepoints"])) ] #[ item+"_registered" for item in datasets ] output: "{xml_base}_merge.xml" - log: "{xml_base}_merge.log" + log: "logs/d1_{xml_base}_merge.log" run: cmd_string = produce_string( """{fiji-prefix} {fiji-app} \ @@ -232,7 +232,7 @@ rule xml_merge: rule timelapse: input: rules.xml_merge.output output: rules.xml_merge.output[0] + "_timelapse" - log: "{xml_base}_timelapse.log" + log: "logs/d2_{xml_base}_timelapse.log" run: cmd_string = produce_string( """{fiji-prefix} {fiji-app} \ @@ -265,7 +265,7 @@ rule timelapse: rule duplicate_transformations: input: rules.timelapse.output, merged_xml="{xml_base}_merge.xml" output: rules.timelapse.output[0] + "_duplicate" - log: "{xml_base}_duplicate_transformations.log" + log: "logs/d3_{xml_base}_duplicate_transformations.log" run: cmd_string = produce_string( """{fiji-prefix} {fiji-app} \ @@ -292,7 +292,7 @@ rule duplicate_transformations: rule fusion: input: [ str("{xml_base}_merge.xml_" + config["common"]["transformation_switch"] ) ], "{xml_base}-{file_id,\d+}-00.h5", merged_xml="{xml_base}_merge.xml" # rules.timelapse.output, "{xml_base}-{file_id,\d+}-00.h5", merged_xml="{xml_base}_merge.xml" output: "{xml_base}-{file_id,\d+}-00.h5_fusion" - log: "{xml_base}-{file_id,\d+}-00-fusion.log" + log: "logs/e1_{xml_base}-{file_id,\d+}-00-fusion.log" run: cmd_string = produce_string( """{fiji-prefix} {fiji-app} \ @@ -335,7 +335,7 @@ rule fusion: rule external_transform: input: rules.timelapse.output, merged_xml="{xml_base}_merge.xml" output: rules.timelapse.output[0] + "_external_trafo" - log: "external_transform.log" + log: "logs/e2_external_transform.log" run: cmd_string = produce_string( """{fiji-prefix} {fiji-app} \ @@ -363,7 +363,7 @@ rule external_transform: rule deconvolution: input: [ str("{xml_base}_merge.xml_" + config["common"]["transformation_switch"] ) ], "{xml_base}-{file_id,\d+}-00.h5", merged_xml="{xml_base}_merge.xml" # rules.timelapse.output, "{xml_base}-{file_id,\d+}-00.h5", merged_xml="{xml_base}_merge.xml" # rules.external_transform.output, "{xml_base}-{file_id,\d+}-00.h5", merged_xml="{xml_base}_merge.xml" output: "{xml_base}-{file_id,\d+}-00.h5_deconvolution" - log: "{xml_base}-{file_id,\d+}-00-deconvolution.log" + log: "logs/e2_{xml_base}-{file_id,\d+}-00-deconvolution.log" run: cmd_string = produce_string( """{fiji-prefix} {fiji-app} \ @@ -410,7 +410,7 @@ rule deconvolution: rule define_output: input: [ item + "_" + config["common"]["fusion_switch"] for item in datasets ], glob.glob('TP*') output: config["common"]["output_xml"].strip('\"') + ".xml" - log: "define_output.log" + log: "logs/f1_define_output.log" run: cmd_string = produce_string( """{fiji-prefix} {fiji-app} \ @@ -443,7 +443,7 @@ rule hdf5_xml_output: input: config["common"]["output_xml"].strip('\"') + ".xml" output: expand("{dataset}.{suffix}",dataset=[ config["common"]["hdf5_xml_filename"].strip('\"')], suffix=["xml","h5"]), [ item+"_output" for item in datasets ] - log: "output_hdf5_xml.log" + log: "logs/f2_output_hdf5_xml.log" run: part_string = produce_string( """{fiji-prefix} {fiji-app} \ @@ -474,7 +474,7 @@ rule hdf5_xml_output: rule resave_hdf5_output: input: rules.hdf5_xml_output.output output: "{xml_base}-{file_id,\d+}-00.h5_output_hdf5" - log: "resave_output-{file_id}.log" + log: "logs/f3_resave_output-{file_id}.log" run: part_string = produce_string( """{fiji-prefix} {fiji-app} \ -- GitLab