Skip to content
Snippets Groups Projects
Commit f27d9137 authored by Christopher Schmied's avatar Christopher Schmied
Browse files

hdf5_xml works

parent 8b9b17a8
No related branches found
No related tags found
No related merge requests found
......@@ -111,9 +111,8 @@ ruleorder: define_xml_tif > define_xml_czi
# create mother .xml/.h5
rule hdf5_xml:
input: config["common"]["first_xml_filename"] + ".xml"
output: expand("{dataset}.{suffix}",dataset=[ config["common"]["hdf5_xml_filename"] ], suffix=["xml","h5"]),
output: expand("{dataset}.{suffix}",dataset=[ config["common"]["hdf5_xml_filename"].strip('\"')], suffix=["xml","h5"]),
expand("{xml_base}-{file_id}-00.h5_empty", xml_base=[ config["common"]["hdf5_xml_filename"].strip('\"') ],file_id=range(int(config["common"]["ntimepoints"])))
log: "hdf5_xml.log"
run:
part_string = produce_string(
......@@ -142,15 +141,14 @@ rule hdf5_xml:
#create dummy files according to the number of timepoints found
for index in range(int(config["common"]["ntimepoints"])):
shell("touch {basename}-{file_id}-00.h5_empty".format(basename=config["common"]["hdf5_xml_filename"],file_id=index))
shell("touch {basename}-{file_id}-00.h5_empty".format(basename=config["common"]["hdf5_xml_filename"], file_id=index))
# resave .czi dataset as hdf5
rule resave_hdf5:
input: "{xml_base}-{file_id}-00.h5_empty"
input: "{xml_base}-{file_id}-00.h5_empty" # rules.hdf5_xml.output
output: "{xml_base}-{file_id}-00.h5"
message: "Execute resave_hdf5 on {input}"
threads: int(config["resave_hdf5"]["parallel_timepoints"]) # parallel timepoints should tell me how many timepoints to expect
log: "resave_hdf5-{file_id}.log"
run:
part_string = produce_string(
......@@ -167,20 +165,19 @@ rule resave_hdf5:
-Dtimepoints_per_partition={timepoints_per_partition} \
-Dsetups_per_partition={setups_per_partition} \
-Drun_only_job_number={job_number} \
-- --no-splash {path_bsh} > {log}-{job_number}.log 2>&1 &\n""", # the & submits everyting at once
-- --no-splash {path_bsh}""", # the & submits everyting at once
config["common"],
config["define_xml_czi"],
config["resave_hdf5"],
jdir=JOBDIR,
path_bsh=config["common"]["bsh_directory"] + config["resave_hdf5"]["bsh_file"],
intput_xml_base="{wildcards.xml_base}",
job_number=int("{wildcards.file_id}")+1
)
input_xml_base="{wildcards.xml_base}",
job_number="{wildcards.file_id}")
part_string += "> {log} 2>&1"
shell(part_string)
rule registration:
input: "{xml_base}-{file_id}-00.h5", rules.resave_hdf5.output
input: "{xml_base}-{file_id}-00.h5" # rules.resave_hdf5.output
#input: rules.resave_hdf5.output, "{xml_base}-{file_id}-00.h5"
output: "{xml_base}-{file_id,\d+}-00.h5_registered", #"{xml_base}.job_{file_id,\d+}.xml"
log: "{xml_base}-{file_id}-registration.log"
......
......@@ -45,7 +45,8 @@ subsampling_factors = System.getProperty( "subsampling_factors" );
hdf5_chunk_sizes = System.getProperty( "hdf5_chunk_sizes" );
timepoints_per_partition = System.getProperty( "timepoints_per_partition" );
setups_per_partition = System.getProperty( "setups_per_partition" );
run_only_job_number = System.getProperty( "run_only_job_number" );
int run_only_job_number = Integer.parseInt( System.getProperty( "run_only_job_number" ) );
run_only_job_number = run_only_job_number + 1;
System.out.println( "subsampling_factors=" + subsampling_factors);
System.out.println( "hdf5_chunk_sizes=" + hdf5_chunk_sizes );
......@@ -55,12 +56,19 @@ System.out.println( "run_only_job_number=" + run_only_job_number );
// Activate cluster processing
System.out.println("=========================================================");
try{
System.out.println("Cluster setting:");
IJ.run("Toggle Cluster Processing", "display_cluster");
}
catch ( e ) {
print( "[deconvolution-GPU] caught exception: "+e );
//important to fail the process if exception occurs
runtime.exit(1);
}
// Executes Fiji plugin
System.out.println("=========================================================");
System.out.println("Start plugin:");
try{
IJ.run("As HDF5",
"select_xml=" + image_file_directory + first_xml_filename + ".xml " +
"resave_angle=[" + resave_angle + "] " +
......@@ -76,7 +84,12 @@ IJ.run("As HDF5",
"run_only_job_number=" + run_only_job_number + " " +
"use_deflate_compression " +
"export_path=" + image_file_directory + hdf5_xml_filename );
}
catch ( e ) {
print( "[deconvolution-GPU] caught exception: "+e );
//important to fail the process if exception occurs
runtime.exit(1);
}
/* shutdown */
System.exit(0);
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment