From 455f7ce274ca49affa98dd7cdd42c755efc09e5c Mon Sep 17 00:00:00 2001 From: Christopher Schmied <schmied@mpi-cbg.de> Date: Wed, 27 May 2015 17:34:38 +0200 Subject: [PATCH] 1. successful trial run --- spim_registration/timelapse/Snakefile | 10 ++++---- spim_registration/timelapse/export.bsh | 24 ++++++++++--------- spim_registration/timelapse/tomancak_czi.json | 10 ++++---- 3 files changed, 23 insertions(+), 21 deletions(-) diff --git a/spim_registration/timelapse/Snakefile b/spim_registration/timelapse/Snakefile index a40074c..778c1e2 100755 --- a/spim_registration/timelapse/Snakefile +++ b/spim_registration/timelapse/Snakefile @@ -93,7 +93,7 @@ ruleorder: define_xml_tif > define_xml_czi rule hdf5_xml: input: config["common"]["first_xml_filename"] + ".xml" output: expand("{dataset}.{suffix}",dataset=[ config["common"]["hdf5_xml_filename"].strip('\"')], suffix=["xml","h5"]), - [ item+"_empty" for item in datasets ] + [ item+"_xml" for item in datasets ] log: "hdf5_xml.log" run: part_string = produce_string( @@ -128,8 +128,8 @@ rule hdf5_xml: # resave .czi/.tif dataset as hdf5 rule resave_hdf5: - input: "{xml_base}-{file_id,\d+}-00.h5_empty" # rules.hdf5_xml.output - output: "{xml_base}-{file_id,\d+}-00.h5" + input: rules.hdf5_xml.output # "{xml_base}-{file_id,\d+}-00.h5_xml" + output: "{xml_base}-{file_id,\d+}-00.h5", "{xml_base}-{file_id,\d+}-00.h5_hdf5" log: "resave_hdf5-{file_id}.log" run: part_string = produce_string( @@ -154,8 +154,8 @@ rule resave_hdf5: jdir=JOBDIR, path_bsh=config["common"]["bsh_directory"] + config["resave_hdf5"]["bsh_file"], input_xml_base="{wildcards.xml_base}", - job_number=int(wildcards.file_id)+1) # problematic calculation not possible in cannot deal wiht wildcards file_id - part_string += " > {log} 2>&1" + job_number=int(wildcards.file_id)+1) + part_string += " > {log} 2>&1 && touch {output}" shell(part_string) rule registration: diff --git a/spim_registration/timelapse/export.bsh b/spim_registration/timelapse/export.bsh index 9722eb6..9905b6e 100755 --- a/spim_registration/timelapse/export.bsh +++ b/spim_registration/timelapse/export.bsh @@ -61,16 +61,18 @@ System.out.println( "run_only_job_number=" + run_only_job_number ); job_type = System.getProperty( "job_type" ); System.out.println( "Job type = " + job_type ); -if ( job_type.equalsIgnoreCase( "xml" ) ) - { - job_number = 0; - } -else if ( job_type.equalsIgnoreCase( "hdf5" ) ) - { - job_number = run_only_job_number + 1; - } - -System.out.println( "Job Number = " + job_number ); +// +//if ( job_type.equalsIgnoreCase( "xml" ) ) +// { +// job_number = 0; +// } +//else if ( job_type.equalsIgnoreCase( "hdf5" ) ) +// { +// job_number = run_only_job_number + 1; +// } +// +//System.out.println( "Job Number = " + job_number ); + // Activate cluster processing System.out.println("========================================================="); try{ @@ -98,7 +100,7 @@ IJ.run("As HDF5", "split_hdf5 " + "timepoints_per_partition=" + timepoints_per_partition + " " + "setups_per_partition=" + setups_per_partition + " " + - "run_only_job_number=" + job_number + " " + + "run_only_job_number=" + run_only_job_number + " " + "use_deflate_compression " + "export_path=" + image_file_directory + hdf5_xml_filename ); } diff --git a/spim_registration/timelapse/tomancak_czi.json b/spim_registration/timelapse/tomancak_czi.json index 5a46e1c..b1dbb8e 100755 --- a/spim_registration/timelapse/tomancak_czi.json +++ b/spim_registration/timelapse/tomancak_czi.json @@ -33,10 +33,10 @@ "define_xml_tif" : { - "timepoints" : "1-2", - "acquisition_angles" : "1,2,3,4,5", - "channels" : "0,1", - "image_file_pattern" : "spim_TL{{tt}}_Angle{{a}}.tif", + "timepoints" : "0-1", + "acquisition_angles" : "0,72,144,216,288", + "channels" : "0", + "image_file_pattern" : "img_TL{{t}}_Angle{{a}}.tif", "pixel_distance_x" : "0.2875535786151886", "pixel_distance_y" : "0.2875535786151886", "pixel_distance_z" : "1.50000", @@ -70,7 +70,7 @@ "illuminations" : "\"All illuminations\"", "angle" : "\"All angles\"", "channel" : "\"All channels\"", - "proc-ch" : "\"channel 1\"", + "proc-ch" : "\"channel 0\"", "algorithm" : "\"Fast 3d geometric hashing (rotation invariant)\"", "label_interest_points" : "\"beads\"", "type_of_registration" : "\"Register timepoints individually\"", -- GitLab