Newer
Older
import os, glob, sys, re
#where are we (can be configured through -d/--directory flag)
JOBDIR=os.path.abspath(os.path.curdir)
if JOBDIR[-1] != "/": # this checks if jobdir ends with slash if not it adds a slash
#data specific config file, expected to be inside JOBDIR
# problematic needs padding of file_id
datasets = expand("{xml_base}-0{file_id}-00.h5", xml_base=[ config["common"]["hdf5_xml_filename"].strip('\"') ], file_id=range(int(config["common"]["ntimepoints"]))) # searches JOBDIR for files that match this wildcard expression
#TODO: this should go into a python module in this path
fre = re.compile(r'(?P<xml_base>\w+)-(?P<file_id>\d+)-00.h5')
xml_merge_in = []
for ds in datasets:
bn = os.path.basename(ds)
bn_res = fre.search(bn)
if bn_res:
xml_base,file_id = bn_res.group('xml_base'),bn_res.group('file_id')
xml_merge_in.append("{xbase}.job_{fid}.xml".format(xbase=xml_base, fid=int(file_id)))
#TODO: this should go into a python module in this path
def produce_string(_fstring, *args, **kwargs):
contents = dict()
for item in args:
if type(item) == type(kwargs):
contents.update(item)
contents.update(kwargs)
return _fstring.format(**contents)
rule done:
#input: [ ds+"_fusion" for ds in datasets ]
input: [ ds + "_" + config["common"]["fusion_switch"] for ds in datasets ]
rule resave_prepared:
input: expand("{dataset}.{suffix}",dataset=[ config["common"]["hdf5_xml_filename"] ], suffix=["xml","h5"])
input: config["define_xml_czi"]["first_czi"]
output: config["common"]["first_xml_filename"] + ".xml"
cmd_string = produce_string(
"""{fiji-prefix} {fiji-app} \
-Dimage_file_directory={jdir} \
-Dfirst_czi={first_czi} \
-Dangle_1={angle_1} \
-Dangle_2={angle_2} \
-Dangle_3={angle_3} \
-Dangle_4={angle_4} \
-Dangle_5={angle_5} \
-Dchannel_1={channel_1} \
-Dchannel_2={channel_2} \
-Dillumination_1={illumination_1} \
-Drotation_around={rotation_around} \
-Dpixel_distance_x={pixel_distance_x} \
-Dpixel_distance_y={pixel_distance_y} \
-Dpixel_distance_z={pixel_distance_z} \
-Dpixel_unit={pixel_unit} \
-Dfirst_xml_filename={first_xml_filename} \
config["common"],
config["define_xml_czi"],
jdir=JOBDIR,
path_bsh=config["common"]["bsh_directory"] + config["define_xml_czi"]["bsh_file"])
Peter Steinbach
committed
cmd_string += " > {log} 2>&1"
input: glob.glob('*.tif')
output: config["common"]["first_xml_filename"] + ".xml"
log: "define_xml_tif.log"
run:
cmd_string = produce_string(
"""{fiji-prefix} {fiji-app} \
-Dimage_file_directory={jdir} \
-Dtimepoints={timepoints} \
-Dacquisition_angles={acquisition_angles} \
-Dchannels={channels} \
-Dimage_file_pattern={image_file_pattern} \
-Dpixel_distance_x={pixel_distance_x} \
-Dpixel_distance_y={pixel_distance_y} \
-Dpixel_distance_z={pixel_distance_z} \
-Dpixel_unit={pixel_unit} \
-Dxml_filename={first_xml_filename} \
-Dtype_of_dataset={type_of_dataset} \
-Dmultiple_timepoints={multiple_timepoints} \
-Dmultiple_channels={multiple_channels} \
-Dmultiple_illumination_directions={multiple_illumination_directions} \
-Dmultiple_angles={multiple_angles} \
-Dimglib_container={imglib_container} \
-- --no-splash {path_bsh}""",
config["common"],
config["define_xml_tif"],
jdir=JOBDIR,
path_bsh=config["common"]["bsh_directory"] + config["define_xml_tif"]["bsh_file"])
Peter Steinbach
committed
cmd_string +=" > {log} 2>&1"
ruleorder: define_xml_tif > define_xml_czi
# create mother .xml/.h5
rule hdf5_xml:
input: config["common"]["first_xml_filename"] + ".xml"
output: expand("{dataset}.{suffix}",dataset=[ config["common"]["hdf5_xml_filename"].strip('\"')], suffix=["xml","h5"]),
expand("{xml_base}-0{file_id}-00.h5_empty", xml_base=[ config["common"]["hdf5_xml_filename"].strip('\"') ],file_id=range(int(config["common"]["ntimepoints"]))) # problematic needs padding of file_id
log: "hdf5_xml.log"
run:
part_string = produce_string(
"""{fiji-prefix} {fiji-app} \
-Dimage_file_directory={jdir} \
-Dfirst_xml_filename={first_xml_filename} \
-Dhdf5_xml_filename={hdf5_xml_filename} \
-Dresave_angle={resave_angle} \
-Dresave_channel={resave_channel} \
-Dresave_illumination={resave_illumination} \
-Dresave_timepoint={resave_timepoint} \
-Dsubsampling_factors={subsampling_factors} \
-Dhdf5_chunk_sizes={hdf5_chunk_sizes} \
-Dtimepoints_per_partition={timepoints_per_partition} \
-Dsetups_per_partition={setups_per_partition} \
-Drun_only_job_number=0 \
-- --no-splash {path_bsh}""", # the & submits everyting at once
config["common"],
config["define_xml_czi"],
config["resave_hdf5"],
jdir=JOBDIR,
path_bsh=config["common"]["bsh_directory"] + config["resave_hdf5"]["bsh_file"])
Peter Steinbach
committed
part_string += " > {log} 2>&1 && touch {output}"
shell(part_string)
#create dummy files according to the number of timepoints found
Peter Steinbach
committed
# for index in range(int(config["common"]["ntimepoints"])):
# shell("touch {basename}-0{file_id}-00.h5_empty".format(basename=config["common"]["hdf5_xml_filename"], file_id=index)) # problematic needs padding of file_id
input: "{xml_base}-{file_id,\d+}-00.h5_empty" # rules.hdf5_xml.output
output: "{xml_base}-{file_id,\d+}-00.h5"
log: "resave_hdf5-{file_id}.log"
part_string = produce_string(
"""{fiji-prefix} {fiji-app} \
-Dimage_file_directory={jdir} \
-Dfirst_xml_filename={first_xml_filename} \
-Dhdf5_xml_filename={input_xml_base} \
-Dresave_angle={resave_angle} \
-Dresave_channel={resave_channel} \
-Dresave_illumination={resave_illumination} \
-Dresave_timepoint={resave_timepoint} \
-Dsubsampling_factors={subsampling_factors} \
-Dhdf5_chunk_sizes={hdf5_chunk_sizes} \
-Dtimepoints_per_partition={timepoints_per_partition} \
-Dsetups_per_partition={setups_per_partition} \
-Drun_only_job_number={job_number} \
-- --no-splash {path_bsh}""", # the & submits everyting at once
config["common"],
config["define_xml_czi"],
config["resave_hdf5"],
jdir=JOBDIR,
path_bsh=config["common"]["bsh_directory"] + config["resave_hdf5"]["bsh_file"],
Peter Steinbach
committed
job_number=int(wildcards.file_id)+1) # problematic calculation not possible in cannot deal wiht wildcards file_id
part_string += " > {log} 2>&1"
shell(part_string)
input: "{xml_base}-{file_id}-00.h5" # rules.resave_hdf5.output
#input: rules.resave_hdf5.output, "{xml_base}-{file_id}-00.h5"
output: "{xml_base}-{file_id,\d+}-00.h5_registered", #"{xml_base}.job_{file_id,\d+}.xml"
log: "{xml_base}-{file_id}-registration.log"
run:
cmd_string = produce_string(
"""{fiji-prefix} {fiji-app} \
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
-Dparallel_timepoints={file_id_w} \
-Dimage_file_directory={jdir} \
-Dxml_filename={input_xml} \
-Dprocess_timepoint={timepoint} \
-Dprocess_channel={channel} \
-Dprocess_illumination={illuminations} \
-Dprocess_angle={angle} \
-Dprocessing_channel={proc-ch} \
-Dlabel_interest_points={label_interest_points} \
-Dtype_of_registration={type_of_registration} \
-Dtype_of_detection={type_of_detection} \
-Dsubpixel_localization={subpixel_localization} \
-Dimglib_container={imglib_container} \
-Dradius_1={radius_1} \
-Dradius_2={radius_2} \
-Dthreshold={threshold} \
-Dinitial_sigma={initial_sigma} \
-Dthreshold_gaussian={threshold_gaussian} \
-Dregistration_algorithm={algorithm} \
-Dinterest_points_channel_0={interest_points_channel_0} \
-Dinterest_points_channel_1={interest_points_channel_1} \
-Dfix_tiles={fix_tiles} \
-Dmap_back_tiles={map_back_tiles} \
-Dtransformation_model={transformation_model} \
-Dmodel_to_regularize_with={model_to_regularize_with} \
-Dlambda={lambda} \
-Dallowed_error_for_ransac={allowed_error_for_ransac} \
-Ddetection_min_max={detection_min_max} \
-- --no-splash {path_bsh}""",
config["common"],
config["registration"],
file_id_w="{wildcards.file_id}",
path_bsh=config["common"]["bsh_directory"] + config["registration"]["bsh_file"],
jdir=JOBDIR,
input_xml="{wildcards.xml_base}")
cmd_string += "> {log} 2>&1 && touch {output}"
shell(cmd_string)
#shell("touch {output}")
rule xml_merge:
input: [ item+"_registered" for item in datasets ] #xml_merge_in
output: "{xml_base}_merge.xml"
log: "{xml_base}_merge.log"
run:
cmd_string = produce_string(
"""{fiji-prefix} {fiji-app} \
-Dimage_file_directory={jdir} \
-Dmerged_xml={output} \
-- --no-splash {path_bsh}""",
config["common"],
config["xml_merge"],
log="{log}",
path_bsh=config["common"]["bsh_directory"] + config["xml_merge"]["bsh_file"],
jdir=JOBDIR,
output="{output}")
cmd_string += "> {log} 2>&1 && touch {output}"
shell(cmd_string)
input: rules.xml_merge.output
output: rules.xml_merge.output[0] + "_timelapse"
log: "{xml_base}_timelapse.log"
run:
cmd_string = produce_string(
"""{fiji-prefix} {fiji-app} \
-Dimage_file_directory={jdir} \
-Dmerged_xml={input} \
-Dtimelapse_process_timepoints={timelapse_process_timepoints} \
-Dprocess_channel_timelapse={channel} \
-Dprocess_illumination={illuminations} \
-Dprocess_angle={angle} \
-Dreference_timepoint={reference_timepoint} \
-Dchannel_1={proc-ch} \
-Dtype_of_registration_timelapse={type_of_registration_timelapse} \
-Dregistration_algorithm={algorithm} \
-Dreg_1_interest_points_channel={interest_points_channel_0} \
-Dreg_2_interest_points_channel={interest_points_channel_1} \
-Dtransformation_model={transformation_model} \
-Dmodel_to_regularize_with={model_to_regularize_with} \
-Dlambda={lambda} \
-Dallowed_error_for_ransac={allowed_error_for_ransac} \
-Dsignificance={significance} \
-- --no-splash {path_bsh}""",
config["common"],
config["registration"],
config["timelapse"],
input="{input}",
path_bsh=config["common"]["bsh_directory"] + config["timelapse"]["bsh_file"],
jdir=JOBDIR)
cmd_string += "> {log} 2>&1 && touch {output}"
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
rule fusion:
input: rules.timelapse.output, "{xml_base}-{file_id,\d+}-00.h5", merged_xml="{xml_base}_merge.xml"
output: "{xml_base}-{file_id,\d+}-00.h5_fusion"
log: "{xml_base}-{file_id,\d+}-00-fusion.log"
run:
cmd_string = produce_string(
"""{fiji-prefix} {fiji-app} \
-Dimage_file_directory={jdir} \
-Dparallel_timepoints={file_id_w} \
-Dmerged_xml={merged_xml_file} \
-Dprocess_timepoint={process_timepoint} \
-Dprocess_channel={process_channel} \
-Dprocess_illumination={process_illumination} \
-Dprocess_angle={process_angle} \
-Dxml_output={xml_output} \
-Dfused_image={fused_image} \
-Dminimal_x={minimal_x} \
-Dminimal_y={minimal_y} \
-Dminimal_z={minimal_z} \
-Dmaximal_x={maximal_x} \
-Dmaximal_y={maximal_y} \
-Dmaximal_z={maximal_z} \
-Ddownsample={downsample} \
-Dpixel_type={pixel_type} \
-Dimglib2_container_fusion={imglib2_container_fusion} \
-Dprocess_views_in_paralell={process_views_in_paralell} \
-Dinterpolation={interpolation} \
-Dimglib2_data_container={imglib2_data_container} \
-Dsubsampling_factors={subsampling_factors} \
-Dhdf5_chunk_sizes={hdf5_chunk_sizes} \
-Dtimepoints_per_partition={timepoints_per_partition} \
-Dsetups_per_partition={setups_per_partition} \
-- --no-splash {path_bsh}""",
config["common"],
config["fusion"],
config["resave_czi_hdf5"],
path_bsh=config["common"]["bsh_directory"] + config["fusion"]["bsh_file"],
jdir=JOBDIR,
file_id_w="{wildcards.file_id}",
merged_xml_file="{input.merged_xml}"
)
cmd_string += "> {log} 2>&1 && touch {output}"
shell(cmd_string)
rule external_transform:
input: rules.timelapse.output, merged_xml="{xml_base}_merge.xml"
output: rules.timelapse.output[0] + "_external_trafo"
log: "external_transform.log"
cmd_string = produce_string(
"""{fiji-prefix} {fiji-app} \
-Dimage_file_directory={jdir} \
-Dmerged_xml={merged_xml_file} \
-Dtransform_angle={angle} \
-Dtransform_channel={channel} \
-Dtransform_illumination={illumination} \
-Dtransform_timepoint={timepoint} \
-Dtransformation={transformation} \
-Dapply_transformation={apply_transformation} \
-Ddefine_mode_transform={define_mode_transform} \
-Dmatrix_transform={matrix_transform} \
-- --no-splash {path_bsh}""",
config["common"],
config["external_transform"],
path_bsh=config["common"]["bsh_directory"] + config["external_transform"]["bsh_file"],
jdir=JOBDIR,
merged_xml_file="{input.merged_xml}"
)
cmd_string += "> {log} 2>&1 && touch {output}"
shell(cmd_string)
rule deconvolution:
input: rules.external_transform.output, "{xml_base}-{file_id,\d+}-00.h5", merged_xml="{xml_base}_merge.xml"
output: "{xml_base}-{file_id,\d+}-00.h5_deconvolution"
log: "{xml_base}-{file_id,\d+}-00-deconvolution.log"
cmd_string = produce_string(
"""{fiji-prefix} {fiji-app} \
-Dimage_file_directory={jdir} \
-Ddeco_output_file_directory={jdir} \
-Dmerged_xml={merged_xml_file} \
-Dparallel_timepoints={file_id_w} \
-Dprocess_timepoint={process_timepoint} \
-Dprocess_channel={process_channel} \
-Dprocess_illumination={process_illumination} \
-Dprocess_angle={process_angle} \
-Dminimal_x_deco={minimal_x} \
-Dminimal_y_deco={minimal_y} \
-Dminimal_z_deco={minimal_z} \
-Dmaximal_x_deco={maximal_x} \
-Dmaximal_y_deco={maximal_y} \
-Dmaximal_z_deco={maximal_z} \
-Dimglib2_container_deco={imglib2_container} \
-Dtype_of_iteration={type_of_iteration} \
-Dosem_acceleration={osem_acceleration} \
-DTikhonov_parameter={Tikhonov_parameter} \
-Dcompute={compute} \
-Dpsf_estimation={psf_estimation} \
-Ddirectory_cuda={directory_cuda} \
-Ddetections_to_extract_psf_for_channel_0={detections_to_extract_psf_for_channel_0} \
-Ddetections_to_extract_psf_for_channel_1={detections_to_extract_psf_for_channel_1} \
-Dpsf_size_x={psf_size_x} \
-Dpsf_size_y={psf_size_y} \
-Dpsf_size_z={psf_size_z} \
-Diterations={iterations} \
config["common"],
config["deconvolution"],
file_id_w="{wildcards.file_id}",
path_bsh=config["common"]["bsh_directory"] + config["deconvolution"]["bsh_file"],
jdir=JOBDIR,
merged_xml_file="{input.merged_xml}"
)
cmd_string += "> {log} 2>&1 && touch {output}"
shell(cmd_string)
rule distclean:
Peter Steinbach
committed
params : glob.glob(config["common"]["hdf5_xml_filename"].strip('\"')+"*"), glob.glob(config["common"]["first_xml_filename"].strip('\"')+"*"), glob.glob("*registered"), glob.glob("*log"), glob.glob("*_deconvolved"), glob.glob("*.xml~*"),"interestpoints", glob.glob("*empty"), expand("{dataset}.{suffix}",dataset=[ config["common"]["hdf5_xml_filename"].strip('\"')], suffix=["xml","h5"])# xml_merge_in,
message : "removing {params}"
shell : "rm -rf {params}"