Skip to content
Snippets Groups Projects
Commit b1d34bb7 authored by Christopher Schmied's avatar Christopher Schmied
Browse files

Added content based multiview fusion.

Build in a switch to decide witch fusion method should be used.
Tested workflow and works.
parent c6b675fb
No related branches found
No related tags found
No related merge requests found
...@@ -30,7 +30,8 @@ def produce_string(_fstring, *args, **kwargs): ...@@ -30,7 +30,8 @@ def produce_string(_fstring, *args, **kwargs):
return _fstring.format(**contents) return _fstring.format(**contents)
rule done: rule done:
input: [ ds+"_deconvolved" for ds in datasets ] #input: [ ds+"_fusion" for ds in datasets ]
input: [ ds + "_" + config["common"]["fusion_switch"] for ds in datasets ]
# defining xml for czi dataset # defining xml for czi dataset
rule define_xml_czi: rule define_xml_czi:
...@@ -236,6 +237,50 @@ rule timelapse: ...@@ -236,6 +237,50 @@ rule timelapse:
cmd_string += "> {log} 2>&1 && touch {output}" cmd_string += "> {log} 2>&1 && touch {output}"
shell(cmd_string) shell(cmd_string)
rule fusion:
input: rules.timelapse.output, "{xml_base}-{file_id,\d+}-00.h5", merged_xml="{xml_base}_merge.xml"
output: "{xml_base}-{file_id,\d+}-00.h5_fusion"
log: "{xml_base}-{file_id,\d+}-00-fusion.log"
run:
cmd_string = produce_string(
"""{fiji-prefix} {fiji-app} \
-Dimage_file_directory={jdir} \
-Dparallel_timepoints={file_id_w} \
-Dmerged_xml={merged_xml_file} \
-Dprocess_timepoint={process_timepoint} \
-Dprocess_channel={process_channel} \
-Dprocess_illumination={process_illumination} \
-Dprocess_angle={process_angle} \
-Dxml_output={xml_output} \
-Dfused_image={fused_image} \
-Dminimal_x={minimal_x} \
-Dminimal_y={minimal_y} \
-Dminimal_z={minimal_z} \
-Dmaximal_x={maximal_x} \
-Dmaximal_y={maximal_y} \
-Dmaximal_z={maximal_z} \
-Ddownsample={downsample} \
-Dpixel_type={pixel_type} \
-Dimglib2_container_fusion={imglib2_container_fusion} \
-Dprocess_views_in_paralell={process_views_in_paralell} \
-Dinterpolation={interpolation} \
-Dimglib2_data_container={imglib2_data_container} \
-Dsubsampling_factors={subsampling_factors} \
-Dhdf5_chunk_sizes={hdf5_chunk_sizes} \
-Dtimepoints_per_partition={timepoints_per_partition} \
-Dsetups_per_partition={setups_per_partition} \
-- --no-splash {path_bsh}""",
config["common"],
config["fusion"],
config["resave_czi_hdf5"],
path_bsh=config["common"]["bsh_directory"] + config["fusion"]["bsh_file"],
jdir=JOBDIR,
file_id_w="{wildcards.file_id}",
merged_xml_file="{input.merged_xml}"
)
cmd_string += "> {log} 2>&1 && touch {output}"
shell(cmd_string)
rule external_transform: rule external_transform:
input: rules.timelapse.output, merged_xml="{xml_base}_merge.xml" input: rules.timelapse.output, merged_xml="{xml_base}_merge.xml"
output: rules.timelapse.output[0] + "_external_trafo" output: rules.timelapse.output[0] + "_external_trafo"
...@@ -258,13 +303,14 @@ rule external_transform: ...@@ -258,13 +303,14 @@ rule external_transform:
config["external_transform"], config["external_transform"],
path_bsh=config["common"]["bsh_directory"] + config["external_transform"]["bsh_file"], path_bsh=config["common"]["bsh_directory"] + config["external_transform"]["bsh_file"],
jdir=JOBDIR, jdir=JOBDIR,
merged_xml_file="{input.merged_xml}") merged_xml_file="{input.merged_xml}"
)
cmd_string += "> {log} 2>&1 && touch {output}" cmd_string += "> {log} 2>&1 && touch {output}"
shell(cmd_string) shell(cmd_string)
rule deconvolution: rule deconvolution:
input: rules.external_transform.output, "{xml_base}-{file_id,\d+}-00.h5", merged_xml="{xml_base}_merge.xml" input: rules.external_transform.output, "{xml_base}-{file_id,\d+}-00.h5", merged_xml="{xml_base}_merge.xml"
output: "{xml_base}-{file_id,\d+}-00.h5_deconvolved" output: "{xml_base}-{file_id,\d+}-00.h5_deconvolution"
log: "{xml_base}-{file_id,\d+}-00-deconvolution.log" log: "{xml_base}-{file_id,\d+}-00-deconvolution.log"
run: run:
cmd_string = produce_string( cmd_string = produce_string(
......
...@@ -21,7 +21,7 @@ System.out.println( "Load xml file: " ); ...@@ -21,7 +21,7 @@ System.out.println( "Load xml file: " );
System.out.println( "selected_xml = " + image_file_directory + merged_xml); System.out.println( "selected_xml = " + image_file_directory + merged_xml);
// Load general Parameters // Load general Parameters
parallel_timepoints = Integer.parseInt(System.getProperty( "parallel_timepoints" )); int parallel_timepoints = Integer.parseInt(System.getProperty( "parallel_timepoints" ));
process_timepoint = System.getProperty( "process_timepoint" ); process_timepoint = System.getProperty( "process_timepoint" );
process_channel = System.getProperty( "process_channel" ); process_channel = System.getProperty( "process_channel" );
process_illumination = System.getProperty( "process_illumination" ); process_illumination = System.getProperty( "process_illumination" );
...@@ -134,7 +134,6 @@ IJ.run("Fuse/Deconvolve Dataset", ...@@ -134,7 +134,6 @@ IJ.run("Fuse/Deconvolve Dataset",
"directory=[" + directory_cuda + "] " + "directory=[" + directory_cuda + "] " +
"select_native_library_for_cudafourierconvolution=libFourierConvolutionCUDALib.so " + "select_native_library_for_cudafourierconvolution=libFourierConvolutionCUDALib.so " +
"gpu_1 " + "gpu_1 " +
// "gpu_2 " +
"detections_to_extract_psf_for_channel_0=" + detections_to_extract_psf_for_channel_0 + " " + "detections_to_extract_psf_for_channel_0=" + detections_to_extract_psf_for_channel_0 + " " +
// "detections_to_extract_psf_for_channel_1=" + detections_to_extract_psf_for_channel_1 + " " + // Dual Channel // "detections_to_extract_psf_for_channel_1=" + detections_to_extract_psf_for_channel_1 + " " + // Dual Channel
"psf_size_x=" + psf_size_x + " " + "psf_size_x=" + psf_size_x + " " +
......
// Load Fiji dependencies
import ij.IJ; // calls imagej
import ij.Prefs; // calls imagej settings
import ij.ImagePlus;
import java.lang.Runtime;
import java.io.File;
import java.io.FilenameFilter;
runtime = Runtime.getRuntime();
System.out.println(runtime.availableProcessors() + " cores available for multi-threading");
System.out.println( "Start to load Parameters:" );
Prefs.setThreads(6); // defines the number of threads allowed
print("Threads: "+Prefs.getThreads()); // prints thread setting in output
System.out.println( "Start to load Parameters:" );
// select xml
System.out.println( "-------------------------------------------------------" );
System.out.println( "Load xml file: " );
image_file_directory = System.getProperty( "image_file_directory" );
merged_xml = System.getProperty( "merged_xml" );
if ( ! merged_xml.endsWith(".xml") )
merged_xml = merged.xml + ".xml";
System.out.println( "xml_path=" + image_file_directory + merged_xml + ".xml" );
// Load general Parameters
System.out.println( "-------------------------------------------------------" );
System.out.println( "General parameters: " );
int parallel_timepoints = Integer.parseInt(System.getProperty( "parallel_timepoints" ));
process_timepoint = System.getProperty( "process_timepoint" );
process_channel = System.getProperty( "process_channel" );
process_illumination = System.getProperty( "process_illumination" );
process_angle = System.getProperty( "process_angle" );
System.out.println( process_timepoint );
System.out.println( "timepoint = " + parallel_timepoints);
System.out.println( "illuminations = " + process_illumination );
System.out.println( "angles = " + process_angle );
//Load bounding box
System.out.println( "-------------------------------------------------------" );
System.out.println( "Load bounding box: " );
minimal_x = System.getProperty( "minimal_x" );
minimal_y = System.getProperty( "minimal_y" );
minimal_z = System.getProperty( "minimal_z" );
maximal_x = System.getProperty( "maximal_x" );
maximal_y = System.getProperty( "maximal_y" );
maximal_z = System.getProperty( "maximal_z" );
downsample = System.getProperty( "downsample" );
System.out.println( "minimal_x=" + minimal_x );
System.out.println( "minimal_y=" + minimal_y );
System.out.println( "minimal_z=" + minimal_z );
System.out.println( "maximal_x=" + maximal_x );
System.out.println( "maximal_y=" + maximal_y );
System.out.println( "maximal_z=" + maximal_z );
System.out.println( "downsample=" + downsample );
// Load fusion parameters
System.out.println( "-------------------------------------------------------" );
System.out.println( "Fusion parameters: " );
pixel_type = System.getProperty( "pixel_type" );
imglib2_container_fusion = System.getProperty( "imglib2_container_fusion" );
interpolation = System.getProperty( "interpolation" );
imglib2_data_container = System.getProperty( "imglib2_data_container" );
System.out.println( "pixel_type=" + pixel_type );
System.out.println( "imglib2_container_fusion=" + imglib2_container_fusion );
System.out.println( "interpolation=" + interpolation );
System.out.println( "imglib2_data_container=" + imglib2_data_container );
// Execute Fiji Plugin
System.out.println( "=======================================================" );
System.out.println( "Starting Fusion" );
try{
IJ.run("Fuse/Deconvolve Dataset",
"select_xml=" + image_file_directory + merged_xml + " " +
"process_angle=[" + process_angle + "] " +
"process_illumination=[" + process_illumination + "] " +
"process_timepoint=[" + process_timepoint + "] " +
"processing_timepoint=[Timepoint " + parallel_timepoints + "] " +
// "xml_output=[Do not process on cluster] " +
"processing_timepoint=[Timepoint "+ parallel_timepoints +"] " +
"type_of_image_fusion=[Weighted-average fusion] " +
"bounding_box=[Define manually] " +
"fused_image=[Save as TIFF stack] " + // works but does not create xml file
"minimal_x=" + minimal_x + " " +
"minimal_y=" + minimal_y + " " +
"minimal_z=" + minimal_z + " " +
"maximal_x=" + maximal_x + " " +
"maximal_y=" + maximal_y + " " +
"maximal_z=" + maximal_z + " " +
"downsample=" + downsample + " " +
"pixel_type=[" + pixel_type + "] " +
"imglib2_container=" + imglib2_container_fusion + " " +
"process_views_in_paralell=All " +
"interpolation=[" + interpolation + "] " +
"blend " +
"content-based " +
"output_file_directory=" + image_file_directory + " " +
"lossless " +
"imglib2_data_container=[" + imglib2_data_container + "]");
}
catch ( e ) {
print( "[deconvolution-GPU] caught exception: "+e );
//important to fail the process if exception occurs
runtime.exit(1);
}
/* shutdown */
runtime.exit(0);
...@@ -7,7 +7,8 @@ ...@@ -7,7 +7,8 @@
"merged_xml" : "hdf5_test_unicore_merge", "merged_xml" : "hdf5_test_unicore_merge",
"bsh_directory" : "/projects/pilot_spim/Christopher/snakemake-workflows/spim_registration/timelapse/", "bsh_directory" : "/projects/pilot_spim/Christopher/snakemake-workflows/spim_registration/timelapse/",
"first_xml_filename" : "test_unicore", "first_xml_filename" : "test_unicore",
"hdf5_xml_filename" : "\"hdf5_test_unicore\"" "hdf5_xml_filename" : "\"hdf5_test_unicore\"",
"fusion_switch" : "fusion"
}, },
"define_xml_czi" : "define_xml_czi" :
...@@ -118,6 +119,29 @@ ...@@ -118,6 +119,29 @@
"define_mode_transform" : "\"Matrix\"", "define_mode_transform" : "\"Matrix\"",
"matrix_transform" : "\"0.5, 0.0, 0.0, 0.0, 0.0, 0.5, 0.0, 0.0, 0.0, 0.0, 0.5, 0.0\"" "matrix_transform" : "\"0.5, 0.0, 0.0, 0.0, 0.0, 0.5, 0.0, 0.0, 0.0, 0.0, 0.5, 0.0\""
}, },
"fusion" :
{
"minimal_x" : "128",
"minimal_y" : "-13",
"minimal_z" : "-407",
"maximal_x" : "986",
"maximal_y" : "1927",
"maximal_z" : "498",
"downsample" : "1",
"process_timepoint" : "\"Single Timepoint (Select from List)\"",
"process_channel" : "\"All channels\"",
"process_illumination" : "\"All illuminations\"",
"process_angle" : "\"All angles\"",
"xml_output" : "\"Save every XML with user-provided unique id\"",
"fused_image" : "\"Append to current XML Project\"",
"pixel_type" : "\"16-bit unsigned integer\"",
"imglib2_container_fusion" : "\"ArrayImg\"",
"process_views_in_paralell" : "\"All\"",
"interpolation" : "\"Linear Interpolation\"",
"imglib2_data_container" : "\"ArrayImg (faster)\"",
"bsh_file" : "fusion.bsh"
},
"deconvolution" : "deconvolution" :
{ {
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment