diff --git a/haas-imagej-client/src/main/java/cz/it4i/fiji/haas/BenchmarkJobManager.java b/haas-imagej-client/src/main/java/cz/it4i/fiji/haas/BenchmarkJobManager.java
index 2194178f17777205dc32db6ed9987df6e2d82a04..34e41d1aeb51d2d2e797ac038ab15985b2977b5a 100644
--- a/haas-imagej-client/src/main/java/cz/it4i/fiji/haas/BenchmarkJobManager.java
+++ b/haas-imagej-client/src/main/java/cz/it4i/fiji/haas/BenchmarkJobManager.java
@@ -5,9 +5,10 @@ import java.io.ByteArrayOutputStream;
 import java.io.IOException;
 import java.nio.file.Path;
 import java.time.Instant;
-import java.util.Collections;
+import java.util.Arrays;
 
 import cz.it4i.fiji.haas.JobManager.JobInfo;
+import cz.it4i.fiji.haas_java_client.HaaSClient;
 import cz.it4i.fiji.haas_java_client.JobState;
 import net.imagej.updater.util.Progress;
 
@@ -26,7 +27,8 @@ public class BenchmarkJobManager {
 	}
 
 	public JobInfo startJob() throws IOException {
-		JobInfo jobInfo = jobManager.startJob(Collections.emptyList(), null);
+		
+		JobInfo jobInfo = jobManager.startJob(Arrays.asList(getUploadingFile()).stream(), progress);
 		jobInfo.waitForStart();
 		if (jobInfo.getState() != JobState.Running) {
 			throw new IllegalStateException("start of job: " + jobInfo + " failed");
@@ -40,6 +42,10 @@ public class BenchmarkJobManager {
 		return jobInfo;
 	}
 
+	private HaaSClient.UploadingFile getUploadingFile() {
+		return new UploadingFileFromResource("", "config.yaml");
+	}
+
 	public JobState getState(long jobId) {
 		return jobManager.getState(jobId);
 	}
diff --git a/haas-imagej-client/src/main/java/cz/it4i/fiji/haas/Job.java b/haas-imagej-client/src/main/java/cz/it4i/fiji/haas/Job.java
index ec931362fabdfa10c8027679aa362e1161e85d62..8249e8bc55f7f136be597ef6d45b37bac1814e0c 100644
--- a/haas-imagej-client/src/main/java/cz/it4i/fiji/haas/Job.java
+++ b/haas-imagej-client/src/main/java/cz/it4i/fiji/haas/Job.java
@@ -7,16 +7,17 @@ import java.nio.file.Files;
 import java.nio.file.Path;
 import java.nio.file.StandardOpenOption;
 import java.util.Calendar;
-import java.util.Collection;
 import java.util.Collections;
 import java.util.EnumSet;
 import java.util.Properties;
 import java.util.function.Supplier;
+import java.util.stream.Stream;
 
 import org.scijava.log.LogService;
 import org.scijava.plugin.Parameter;
 
 import cz.it4i.fiji.haas_java_client.HaaSClient;
+import cz.it4i.fiji.haas_java_client.HaaSClient.UploadingFile;
 import cz.it4i.fiji.haas_java_client.JobInfo;
 import cz.it4i.fiji.haas_java_client.JobState;
 import cz.it4i.fiji.haas_java_client.ProgressNotifier;
@@ -71,7 +72,7 @@ public class Job {
 		}
 	};
 
-	public Job(Path basePath, Collection<Path> files, Supplier<HaaSClient> haasClientSupplier, Progress progress)
+	public Job(Path basePath, Stream<UploadingFile> files, Supplier<HaaSClient> haasClientSupplier, Progress progress)
 			throws IOException {
 		this(haasClientSupplier);
 		HaaSClient client = this.haasClientSupplier.get();
diff --git a/haas-imagej-client/src/main/java/cz/it4i/fiji/haas/JobManager.java b/haas-imagej-client/src/main/java/cz/it4i/fiji/haas/JobManager.java
index 952d1b0fa6438ca7048d4265337ab0f5119d834b..92a040643657519fa60454d2c7cddae4930cfa8f 100644
--- a/haas-imagej-client/src/main/java/cz/it4i/fiji/haas/JobManager.java
+++ b/haas-imagej-client/src/main/java/cz/it4i/fiji/haas/JobManager.java
@@ -10,11 +10,13 @@ import java.util.Collection;
 import java.util.Iterator;
 import java.util.LinkedList;
 import java.util.stream.Collectors;
+import java.util.stream.Stream;
 
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 import cz.it4i.fiji.haas_java_client.HaaSClient;
+import cz.it4i.fiji.haas_java_client.HaaSClient.UploadingFile;
 import cz.it4i.fiji.haas_java_client.JobState;
 import cz.it4i.fiji.haas_java_client.Settings;
 import javafx.beans.value.ObservableValueBase;
@@ -45,7 +47,7 @@ public class JobManager {
 
 	}
 
-	public JobInfo startJob(Collection<Path> files, Progress progress) throws IOException {
+	public JobInfo startJob(Stream<UploadingFile> files, Progress progress) throws IOException {
 		Job job;
 		jobs.add(job = new Job(workDirectory, files, this::getHaasClient, progress));
 		return new JobInfo(job) {
diff --git a/haas-imagej-client/src/main/java/cz/it4i/fiji/haas/RunWithHaaS.java b/haas-imagej-client/src/main/java/cz/it4i/fiji/haas/RunWithHaaS.java
index 13199d040f44358ac5617d64b2ebe036db469880..e057315cf14ef6d2b12682b71d8a1e524fdbe8fd 100644
--- a/haas-imagej-client/src/main/java/cz/it4i/fiji/haas/RunWithHaaS.java
+++ b/haas-imagej-client/src/main/java/cz/it4i/fiji/haas/RunWithHaaS.java
@@ -19,6 +19,7 @@ import org.scijava.ui.UIService;
 import org.scijava.widget.UIComponent;
 
 import cz.it4i.fiji.haas.ui.ProgressDialog;
+import cz.it4i.fiji.haas_java_client.HaaSClient;
 import net.imagej.ImageJ;
 
 /**
@@ -50,7 +51,7 @@ public class RunWithHaaS implements Command {
 	public void run() {
 		try {
 			jobManager = new JobManager(getWorkingDirectoryPath(), TestingConstants.getSettings());
-			jobManager.startJob(getContent(dataDirectory), ModalDialogs.doModal(new ProgressDialog(getFrame())));
+			jobManager.startJob(getContent(dataDirectory).stream().map(HaaSClient::getUploadingFile), ModalDialogs.doModal(new ProgressDialog(getFrame())));
 		} catch (IOException e) {
 			log.error(e);
 		}
diff --git a/haas-imagej-client/src/main/java/cz/it4i/fiji/haas/UploadingFileFromResource.java b/haas-imagej-client/src/main/java/cz/it4i/fiji/haas/UploadingFileFromResource.java
new file mode 100644
index 0000000000000000000000000000000000000000..429ab37d49a6b3143be2e07f55fd28d60af3700b
--- /dev/null
+++ b/haas-imagej-client/src/main/java/cz/it4i/fiji/haas/UploadingFileFromResource.java
@@ -0,0 +1,58 @@
+package cz.it4i.fiji.haas;
+
+import java.io.IOException;
+import java.io.InputStream;
+import java.time.Instant;
+
+import cz.it4i.fiji.haas_java_client.HaaSClient.UploadingFile;
+
+public class UploadingFileFromResource implements UploadingFile {
+
+	private String fileName;
+	private String base;
+	private Long length;
+	private long lastTime;
+	
+	public UploadingFileFromResource(String base, String fileName) {
+		this.base = base;
+		this.fileName = fileName;
+		this.lastTime = Instant.now().getEpochSecond()*1000;
+	}
+	
+	@Override
+	public InputStream getInputStream() {
+		return this.getClass().getResourceAsStream(base + "/" + fileName);
+	}
+
+	@Override
+	public String getName() {
+		return fileName;
+	}
+
+	@Override
+	public long getLength() {
+		if(length == null) {
+			length = computeLenght();
+		}
+		return length;
+	}
+
+	private Long computeLenght() {
+		try(InputStream is = getInputStream()) {
+			long result = 0;
+			int available;
+			while(0 != (available = is.available())) {
+				result += is.skip(available);
+			}
+			return result;
+		} catch (IOException e) {
+			throw new RuntimeException(e);
+		}
+	}
+
+	@Override
+	public long getLastTime() {
+		return lastTime;
+	}
+
+}
diff --git a/haas-imagej-client/src/main/resources/config.yaml b/haas-imagej-client/src/main/resources/config.yaml
new file mode 100644
index 0000000000000000000000000000000000000000..575754f7df30f29c4e3ed0f72a8d7eb870ddb3f6
--- /dev/null
+++ b/haas-imagej-client/src/main/resources/config.yaml
@@ -0,0 +1,457 @@
+common: {
+  # ============================================================================
+  # ============================================================================
+  # yaml example file 
+  #
+  # DESCRIPTION: source file for cluster processing scripts
+  #
+  #      AUTHOR: Christopher Schmied, schmied@mpi-cbg.de
+  #   INSTITUTE: Max Planck Institute for Molecular Cell Biology and Genetics
+  #        BUGS:
+  #       NOTES:
+  #     Version: 3.3
+  #     CREATED: 2015-06-01
+  #    REVISION: 2015-07-19
+  # ============================================================================
+  # ============================================================================
+  # 1. Software directories
+  # 
+  # Description: paths to software dependencies of processing
+  # Options: Fiji location
+  #          beanshell and snakefile diretory
+  #          directory for cuda libraries
+  #          xvfb setting
+  sysconfcpus: "/scratch/work/project/dd-17-31/haas/spim/bin/sysconfcpus -n",
+  # ============================================================================
+  # current working Fiji
+  fiji-app: "/scratch/work/project/dd-17-31/haas/spim/Fiji.app/ImageJ-linux64 -Xss4m",
+  # bean shell scripts and Snakefile
+  bsh_directory: "/scratch/work/project/dd-17-31/haas/spim/snakemake-workflows/spim_registration/timelapse/",
+  # Directory that contains the cuda libraries
+  directory_cuda: "/sw/users/schmied/cuda/",
+  # xvfb 
+  fiji-prefix: "",
+  sysconfcpus: "/scratch/work/project/dd-17-31/haas/spim/bin/sysconfcpus -n",
+  memory-prefix: "-Xmx",
+  # ============================================================================
+  # 2. Processing switches
+  #
+  # Description: Use switches to decide which processing steps you need:
+  # Options:  transformation_switch: "timelapse",
+  #           goes directly into fusion after timelapse registration
+  #
+  #           transformation_switch: "timelapse_duplicate",
+  #           for dual channel processing one channel contains the beads
+  #           dublicates the transformation from the source channel to the 
+  #           target channel
+  #
+  #           Switches between content based fusion and deconvoltion
+  #           fusion_switch: "deconvolution", > for deconvolution
+  #           fusion_switch: "fusion", > for content based fusion
+  # ============================================================================
+  # Transformation switch:
+  transformation_switch: "timelapse",
+  # Fusion switch:
+  fusion_switch: "fusion",
+  # ============================================================================
+  # 3. Define dataset
+  #
+  # Description: key parameters for processing
+  # Options: General Settings
+  #          Settings for .czi files
+  #          Settings for .tif datasets
+  # ============================================================================
+  # 3.1. General Settings -------------------------------------------------------
+  #
+  # Description: applies to both .czi and tif datasets
+  # Options: xml file name
+  #          number of timepoints
+  #          angles
+  #          channels
+  #          illuminations
+  # ----------------------------------------------------------------------------
+  hdf5_xml_filename: '"HisRFP_test"', 
+  ntimepoints: 2,        # number of timepoints of dataset
+  angles: "1,2,3,4,5,6",   # format e.g.: "0,72,144,216,288",
+  channels: "0",     # format e.g.: "green,red", IMPORTANT: for tif numeric!
+  illumination: "0",     # format e.g.: "0,1",
+  #
+  # 3.2. Settings for .czi files -----------------------------------------------
+  #
+  # Description: applies only to .czi dataset
+  # Options: name of first czi file
+  # ----------------------------------------------------------------------------
+  first_czi: "2015-04-21_LZ2_Stock32.czi", 
+  #
+  # 3.3. Settings for .tif datasets --------------------------------------------
+  #
+  # Description: applies only to .tif dataset
+  # Options: file pattern of .tif files:
+  #          multi channel with one file per channel: 
+  #          spim_TL{tt}_Angle{a}_Channel{c}.tif
+  #          for padded zeros use tt 
+  # ----------------------------------------------------------------------------
+  image_file_pattern: 'spim_TL{{t}}_Angle{{a}}.tif',
+  multiple_channels: '"NO (one channel)"',         # '"YES (all channels in one file)"' or '"YES (one file per channel)"' or '"NO (one channel)"'
+  # ============================================================================
+  # 4. Detection and registration
+  #
+  # Description: settings for interest point detection and registration
+  # Options: Single channel and dual channel processing
+  #          Source and traget for dual channel one channel contains the beads
+  #          Interestpoints label
+  #          Difference-of-mean or difference-of-gaussian detection
+  # ============================================================================
+  # reg_process_channel:
+  # Single Channel: '"All channels"'
+  # Dual Channel: '"All channels"'
+  # Dual Channel one Channel contains beads: '"Single channel (Select from List)"'
+  reg_process_channel: '"All channels"',
+  #
+  # Dual channel 1 Channel contains the beads: which channel contains the beads?
+  # Ignore if Single Channel or Dual Channel both channels contain beads
+  source_channel: "red", # channel that contains the beads
+  target_channel: "green", # channel without beads
+  # reg_interest_points_channel:
+  # Single Channel: '"beads"'
+  # Dual Channel: '"beads,beads"'
+  # Dual Channel: Channel does not contain the beads '"[DO NOT register this channel],beads"'
+  reg_interest_points_channel: '"beads"',
+  #
+  # type of detection: '"Difference-of-Mean (Integral image based)"' or '"Difference-of-Gaussian"'
+  type_of_detection: '"Difference-of-Gaussian"',
+  # Settings for Difference-of-Mean
+  # For multiple channels 'value1,value2' delimiter is ,
+  reg_radius_1: '2',
+  reg_radius_2: '3',
+  reg_threshold: '0.005',
+  # Settings for Difference-of-Gaussian
+  # For multiple channels 'value1,value2' delimiter is ,
+  sigma: '1.3',
+  threshold_gaussian: '0.025',
+  # ============================================================================
+  # 5. Timelapse registration
+  #
+  # Description: settings for timelapse registration
+  # Options: reference timepoint
+  # ============================================================================
+  reference_timepoint: '1',   # Reference timepoint
+  # ============================================================================
+  # 6. Weighted-average fusion
+  #
+  # Description: settings for content-based multiview fusion
+  # Options: downsampling
+  #          Cropping parameters based on full resolution
+  # ============================================================================
+  downsample: '1',    # set downsampling
+  minimal_x: '23',   # Cropping parameters of full resolution
+  minimal_y: '97',
+  minimal_z: '-402',
+  maximal_x: '1880',
+  maximal_y: '935',
+  maximal_z: '483',
+  # ============================================================================
+  # 7. Multiview deconvolution
+  #
+  # Description: settings for multiview deconvolution
+  # Options: External transformation
+  #          Deconvolution settings
+  #
+  # ============================================================================
+  # 7.1. External transformation -----------------------------------------------
+  #
+  # Description: Allows downsampling prior deconvolution
+  # Options: no downsampling: 
+  #          external_trafo_switch: "_transform",
+  #
+  #          downsampling:
+  #          external_trafo_switch: "external_trafo",
+  #          IMPORTANT: boundingbox needs to reflect this downsampling. 
+  #
+  #          Matrix for downsampling
+  # ----------------------------------------------------------------------------
+  external_trafo_switch: "",
+  #
+  # Matrix for downsampling
+  matrix_transform: '"0.5, 0.0, 0.0, 0.0, 0.0, 0.5, 0.0, 0.0, 0.0, 0.0, 0.5, 0.0"',
+  #
+  # 7.2. Deconvolution settings ------------------------------------------------
+  # 
+  # Description: core settings for multiview deconvolution
+  # Options: number of iterations
+  #          Cropping parameters taking downsampling into account!
+  #          Channel settings for deconvolution
+  # ----------------------------------------------------------------------------
+  iterations: '15',        # number of iterations
+  minimal_x_deco: '23',  # Cropping parameters: take downsampling into account
+  minimal_y_deco: '97',
+  minimal_z_deco: '-402',
+  maximal_x_deco: '1880',
+  maximal_y_deco: '935',
+  maximal_z_deco: '483',
+  # Channel settings for deconvolution
+  # Single Channel: '"beads"'
+  # Dual Channel: '"beads,beads"'
+  # Dual Channel one channel contains beads: '"[Same PSF as channel red],beads"'
+  detections_to_extract_psf_for_channel: '"beads"',
+  # ============================================================================
+  # Resave output
+  #
+  # Description: writes new hdf5 dataset for fusion output
+  # Options: Naming pattern of output based on channel number
+  #          Channel settings
+  #          File name for resaving output into hdf5
+  #          Pixel size > isotropic resolution
+  #          Image type (16Bit from content-based fusion, 32Bit from deconvolution)
+  # ============================================================================
+  # Calibration
+  manual_calibration_output: "Yes", # calibration override: No or Yes
+  # pixel size of output: take downsampling into account!
+  output_pixel_distance_x: 0.2859371, 
+  output_pixel_distance_y: 0.2859371,
+  output_pixel_distance_z: 0.2859371,
+  output_pixel_unit: 'um'
+  }
+  
+  # ============================================================================
+  # 8. Advanced settings
+  # 
+  # Description: advanced and manual settings for each processing step
+  #              corresponds to each rule in the snakefile
+  #     Options: define_xml_czi
+  #              define_xml_tif
+  #              resave_hdf5
+  #              registration
+  #              xml_merge
+  #              timelapse
+  #              dublicate_transformations
+  #              fusion
+  #              external_transform
+  #              deconvolution
+  #              hdf5_output
+  # ============================================================================
+  # ----------------------------------------------------------------------------
+  # 8.1. define_xml_czi
+  # ----------------------------------------------------------------------------
+define_xml_czi: {
+  manual_calibration_czi: "No", # calibration override: No or Yes
+  czi_pixel_distance_x: '0.285901069641113',  # Manual calibration x
+  czi_pixel_distance_y: '0.285901069641113',  # Manual calibration y
+  czi_pixel_distance_z: '1.500000000000000',  # Manual calibration z
+  czi_pixel_unit: "um",             # unit of manual calibration
+  rotation_around: "X-Axis",       # axis of acquistion
+  bsh_file: "define_czi.bsh"       # .bsh script for defining .czi file
+  }
+  
+  # ----------------------------------------------------------------------------
+  # 8.2. define_xml_tif
+  # ----------------------------------------------------------------------------
+define_xml_tif: {
+  # Settings for ImageJ Opener
+  manual_calibration_tif: "Yes", # calibration override: No or Yes
+  pixel_distance_x: '0.2859371',  # Manual calibration x
+  pixel_distance_y: '0.2859371',  # Manual calibration y
+  pixel_distance_z: '1.0000000',  # Manual calibration z
+  pixel_unit: "um",             # unit of manual calibration
+  type_of_dataset: '"Image Stacks (ImageJ Opener)"', # '"Image Stacks (ImageJ Opener)"' or '"Image Stacks (LOCI Bioformats)"'
+  multiple_timepoints: '"YES (one file per time-point)"', # or NO (one time-point)
+  multiple_angles: '"YES (one file per angle)"',          # or NO (one angle)
+  multiple_illumination_directions: '"NO (one illumination direction)"', # or YES (one file per illumination direction)
+  imglib_container: '"ArrayImg (faster)"',        # '"ArrayImg (faster)"'
+  bsh_file: "define_tif_zip.bsh"
+  }
+  
+  # ----------------------------------------------------------------------------
+  # 8.3. resave_hdf5
+  # ----------------------------------------------------------------------------
+resave_hdf5: {
+  # Resaves .tif or .czi data into hdf5
+  # Subsampling and resolution settings for hdf5: data dependent
+  hdf5_chunk_sizes: '"{{ {{32,32,4}}, {{32,32,4}}, {{16,16,16}}, {{16,16,16}} }}"',
+  subsampling_factors: '"{{ {{1,1,1}}, {{2,2,1}}, {{4,4,1}}, {{8,8,1}} }}"',
+  # Standard settings for cluster processing
+  setups_per_partition: '0',
+  timepoints_per_partition: '1',
+  resave_timepoint: '"All Timepoints"',
+  resave_angle: '"All angles"',
+  resave_channel: '"All channels"',
+  resave_illumination: '"All illuminations"',
+  num_cores_hdf5_xml: 3,
+  num_cores_resave_hdf5: 3,
+  bsh_file: "export.bsh"
+  }
+  
+  # ----------------------------------------------------------------------------
+  # 8.4. registration
+  # ----------------------------------------------------------------------------
+registration: {
+  # Processing setting for Difference-of-Gaussian detection
+  # compute_on:'"GPU accurate (Nvidia CUDA via JNA)"'
+  compute_on: '"CPU (Java)"',
+  separableconvolution: '"libSeparableConvolutionCUDALib.so"',
+  # Downsampling settings
+  downsample_detection: "Yes", # "No" or "Yes"
+  downsample_xy: '"Match Z Resolution (less downsampling)"',
+  downsample_z: "1x",
+  # Standard Settings for bead based registration
+  label_interest_points: '"beads"',              
+  reg_process_timepoint: '"Single Timepoint (Select from List)"',
+  reg_process_angle: '"All angles"',
+  reg_process_illumination: '"All illuminations"',
+  subpixel_localization: '"3-dimensional quadratic fit"',
+  detection_min_max: "find_maxima",
+  type_of_registration: '"Register timepoints individually"',
+  algorithm: '"Fast 3d geometric hashing (rotation invariant)"',
+  transformation_model: "Affine",
+  allowed_error_for_ransac: '5',
+  significance: '10',
+  fix_tiles: '"Fix first tile"',
+  map_back_tiles: '"Map back to first tile using rigid model"',
+  model_to_regularize_with: "Rigid",
+  lambda: '0.10',
+  imglib_container: '"ArrayImg (faster)"',
+  num_cores_reg: 4,
+  bsh_file: "registration.bsh"  # .bsh script for registration
+  }
+
+  # ----------------------------------------------------------------------------
+  # 8.5. xml_merge
+  # ----------------------------------------------------------------------------
+xml_merge: {
+  bsh_file: "xml_merge.bsh"
+  }
+  
+  # ----------------------------------------------------------------------------
+  # 8.6. timelapse
+  # ---------------------------------------------------------------------------- 
+timelapse: {
+  # Standard settings for timelapse registration
+  type_of_registration_timelapse: '"Match against one reference timepoint (no global optimization)"',
+  timelapse_process_timepoints: '"All Timepoints"',
+  num_cores_time: 6,
+  bsh_file: "timelapse_registration.bsh"
+  }
+  
+  # ----------------------------------------------------------------------------
+  # 8.7. dublicate_transformations
+  # ----------------------------------------------------------------------------
+dublicate_transformations: {
+  # If dual channel processing and only one channel contains beads
+  # this allows you to dublicate the transformation for the 
+  # channel that does not contain beas
+  duplicate_which_transformations: '"Replace all transformations"', # mode of dublication
+  bsh_file: "dublicate_transformations.bsh" # .bsh script for dublication
+  }
+  
+  # ----------------------------------------------------------------------------
+  # 8.8. fusion
+  # ----------------------------------------------------------------------------
+fusion: {
+  # fused_image: '"Append to current XML Project"', does not work yet
+  process_timepoint: '"Single Timepoint (Select from List)"',
+  process_angle: '"All angles"',
+  process_channel: '"All channels"',
+  process_illumination: '"All illuminations"',
+  imglib2_container_fusion: '"ArrayImg"',
+  interpolation: '"Linear Interpolation"',
+  pixel_type: '"16-bit unsigned integer"',
+  imglib2_data_container: '"ArrayImg (faster)"',
+  process_views_in_paralell: '"All"',
+  fused_image: '"Save as TIFF stack"', 
+  num_cores_fusion: 6,
+  bsh_file: "fusion.bsh"
+  }
+  # ----------------------------------------------------------------------------
+  # 8.9. external_transform
+  # ----------------------------------------------------------------------------
+external_transform: {
+  # Downsamples for deconvolution
+  # channel setting: '"all_channels"'
+  # channel_setting: '"green"',
+  transform_timepoint: '"All Timepoints"',
+  transform_angle: '"All angles"',
+  transform_channel: '"All channels"',
+  transform_illumination: '"All illuminations"',
+  apply_transformation: '"Current view transformations (appends to current transforms)"',
+  define_mode_transform: '"Matrix"',
+  transformation: '"Rigid"',
+  bsh_file: "transform.bsh"
+  }
+  
+  # ----------------------------------------------------------------------------
+  # 8.10. external_transform
+  # ----------------------------------------------------------------------------
+deconvolution: {
+  # Settings for GPU or CPU processing 
+  # '"CPU (Java)"' or '"GPU (Nvidia CUDA via JNA)"'
+  compute_on: '"GPU (Nvidia CUDA via JNA)"',
+  cudafourierconvolution: "libFourierConvolutionCUDALib.so", # GPU processing name of cuda library
+  # Standard settings for deconvolution
+  process_timepoint: '"Single Timepoint (Select from List)"',
+  process_angle: '"All angles"',
+  process_channel: '"All channels"',
+  process_illumination: '"All illuminations"',
+  type_of_iteration: '"Efficient Bayesian - Optimization I (fast, precise)"',
+  Tikhonov_parameter: '0.0006',
+  compute: '"in 512x512x512 blocks"',
+  osem_acceleration: '"1 (balanced)"',
+  psf_estimation: '"Extract from beads"',
+  psf_size_x: '19',
+  psf_size_y: '19',
+  psf_size_z: '25',
+  imglib2_container: '"ArrayImg"',
+  fused_image: '"Save as TIFF stack"',
+  num_cores_deco: 7,
+  bsh_file: "deconvolution.bsh"
+  }
+  
+  # ----------------------------------------------------------------------------
+  # 8.11. hdf5_output
+  # ----------------------------------------------------------------------------
+hdf5_output: {
+  # if data is 32Bit then the data is converted into 16Bit data
+  convert_32bit: '"[Use min/max of first image (might saturate intenities over time)]"',
+  # subsampling and chunk size settings: dataset dependent
+  subsampling_output: '"{{ {{1,1,1}}, {{2,2,2}}, {{4,4,4}}, {{8,8,8}} }}"', # data dependent
+  chunk_sizes_output: '"{{ {{16,16,16}}, {{16,16,16}}, {{16,16,16}}, {{16,16,16}} }}"', # data dependent
+  # subsampling_output: '"{{ {{1,1,1}}, {{2,2,2}} }}"',
+  # chunk_sizes_output: '"{{ {{16,16,16}}, {{16,16,16}} }}"',
+  # Standard settings for hdf5_output
+  output_type_of_dataset: '"Image Stacks (ImageJ Opener)"', # '"Image Stacks (ImageJ Opener)"' or '"Image Stacks (LOCI Bioformats)"'
+  output_multiple_timepoints: '"YES (one file per time-point)"',
+  output_multiple_angles: '"NO (one angle)"',
+  output_illumination_directions: '"NO (one illumination direction)"',
+  output_imglib_container: '"ArrayImg (faster)"',
+  num_cores_hdf5_xml_output: 4,
+  num_cores_resave_hdf5_output: 4,
+  bsh_file_define: "define_output.bsh", # .bsh script for defining the dataset
+  bsh_file_hdf5: "export_output.bsh"    # .bsh script for resaving into hdf5
+  }
+
+ # ============================================================================
+  # 9. Fiji Resource settings
+  # 
+  # Description: number of cores and memory for Fiji
+  # Options: number of cores
+  #          memory in GB
+  # ============================================================================
+Fiji_resources: {
+  # setting for hdf5 resave:
+  num_cores_hdf5: 3,
+  mem_hdf5: "20g",
+  # setting for registration:
+  num_cores_reg: 4,
+  mem_reg: "40g",
+  # setting for timelapse registration:
+  num_cores_time: 3,
+  mem_time: "50g",
+  # settings for average fusion:
+  num_cores_fusion: 6,
+  mem_fusion: "50g", 
+  # settings for deconvolution:
+  num_cores_deco: 12,
+  mem_deco: "110g",
+  # settings for resaving of output:
+  num_cores_output: 3,
+  mem_output: "20g"
+  }
diff --git a/haas-imagej-client/src/test/java/TestUploadingData.java b/haas-imagej-client/src/test/java/TestUploadingData.java
new file mode 100644
index 0000000000000000000000000000000000000000..0611d77f67609f91177ac9fdd2247787377bb497
--- /dev/null
+++ b/haas-imagej-client/src/test/java/TestUploadingData.java
@@ -0,0 +1,13 @@
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import cz.it4i.fiji.haas.UploadingFileFromResource;
+import cz.it4i.fiji.haas_java_client.HaaSClient.UploadingFile;
+
+public class TestUploadingData {
+	private static Logger log = LoggerFactory.getLogger(TestUploadingData.class);
+	public static void main(String[] args) {
+		UploadingFile uf = new UploadingFileFromResource("", "config.yaml");
+		log.info("size: " + uf.getLength());
+	}
+}
diff --git a/haas-java-client/pom.xml b/haas-java-client/pom.xml
index 58f203c44b4b9d7dd419c5c01579f1092030489b..83d22c1df2320400430853242d8f0111f058167a 100644
--- a/haas-java-client/pom.xml
+++ b/haas-java-client/pom.xml
@@ -84,7 +84,18 @@
 			<artifactId>commons-logging</artifactId>
 			<version>1.1.1</version>
 		</dependency>
-
+		<!-- https://mvnrepository.com/artifact/org.slf4j/slf4j-api -->
+		<dependency>
+			<groupId>org.slf4j</groupId>
+			<artifactId>slf4j-api</artifactId>
+			<version>1.7.25</version>
+		</dependency>
+		<!-- https://mvnrepository.com/artifact/org.slf4j/slf4j-jdk14 -->
+		<dependency>
+			<groupId>org.slf4j</groupId>
+			<artifactId>slf4j-jdk14</artifactId>
+			<version>1.7.25</version>
+		</dependency>
 
 	</dependencies>
 </project>
diff --git a/haas-java-client/src/main/java/cz/it4i/fiji/haas_java_client/HaaSClient.java b/haas-java-client/src/main/java/cz/it4i/fiji/haas_java_client/HaaSClient.java
index ec345a94e900a6423a7169b8bb450c04a696a1e5..1511d72efe9c07a488247a989545f5e65c083944 100644
--- a/haas-java-client/src/main/java/cz/it4i/fiji/haas_java_client/HaaSClient.java
+++ b/haas-java-client/src/main/java/cz/it4i/fiji/haas_java_client/HaaSClient.java
@@ -19,9 +19,13 @@ import java.util.Map.Entry;
 import java.util.function.Predicate;
 import java.util.stream.Collectors;
 import java.util.stream.Stream;
+import java.util.stream.StreamSupport;
 
 import javax.xml.rpc.ServiceException;
 
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
 import com.jcraft.jsch.JSchException;
 
 import cz.it4i.fiji.haas_java_client.proxy.CommandTemplateParameterValueExt;
@@ -45,13 +49,19 @@ import cz.it4i.fiji.haas_java_client.proxy.UserAndLimitationManagementWsSoap;
 import cz.it4i.fiji.scpclient.ScpClient;
 
 public class HaaSClient {
-	interface UploadingFile {
+
+	private static Logger log = LoggerFactory.getLogger(cz.it4i.fiji.haas_java_client.HaaSClient.class);
+
+	public interface UploadingFile {
 		InputStream getInputStream();
+
 		String getName();
+
 		long getLength();
+
 		long getLastTime();
 	}
-	
+
 	static public class SynchronizableFiles {
 
 		private Collection<TaskFileOffsetExt> files = new LinkedList<>();
@@ -157,30 +167,41 @@ public class HaaSClient {
 		return start(files, name, templateParameters, dummyNotifier);
 	}
 
-	public long start(Iterable<Path> files, String name, Collection<Entry<String, String>> templateParameters,
+	private long start(Iterable<Path> files, String name, Collection<Entry<String, String>> templateParameters,
+			ProgressNotifier notifier) {
+		Stream<UploadingFile> fileStream = StreamSupport.stream(files.spliterator(), false)
+				.map(file -> getUploadingFile(file));
+		return start(fileStream, name, templateParameters, notifier);
+	}
+
+	public long start(Stream<UploadingFile> files, String name, Collection<Entry<String, String>> templateParameters,
 			ProgressNotifier notifier) {
 		notifier.setTitle("Starting job");
-		TaskSpecificationExt taskSpec = createTaskSpecification(name, templateId, templateParameters);
-		JobSpecificationExt jobSpecification = createJobSpecification(name, Arrays.asList(taskSpec));
 		try {
-			String item;
+			TaskSpecificationExt taskSpec = createTaskSpecification(name, templateId, templateParameters);
+			JobSpecificationExt jobSpecification = createJobSpecification(name, Arrays.asList(taskSpec));
 			String jobItem;
 			SubmittedJobInfoExt job = getJobManagement().createJob(jobSpecification, getSessionID());
 			notifier.addItem(jobItem = String.format("Created job: %d\n", job.getId()));
+
 			FileTransferMethodExt fileTransfer = getFileTransfer().getFileTransferMethod(job.getId(), getSessionID());
-			List<Long> totalSizes = getSizes(files);
+			List<Long> totalSizes = StreamSupport.stream(files.spliterator(), false).map(f -> f.getLength())
+					.collect(Collectors.toList());
 			long totalSize = totalSizes.stream().mapToLong(l -> l.longValue()).sum();
 			TransferFileProgressForHaaSClient progress = new TransferFileProgressForHaaSClient(totalSize, notifier);
 			try (ScpClient scpClient = getScpClient(fileTransfer)) {
 				int index = 0;
-				for (Path file : files) {
+				for (UploadingFile file : (Iterable<UploadingFile>) files::iterator) {
+					String item;
 					progress.startNewFile(totalSizes.get(index));
-					notifier.addItem(item = "Uploading file: " + file.getFileName());
-					String destFile = "'" + fileTransfer.getSharedBasepath() + "/" + file.getFileName() + "'";
-					boolean result = scpClient.upload(file, destFile, progress);
-					notifier.itemDone(item);
-					if (!result) {
-						throw new HaaSClientException("Uploading of " + file + " to " + destFile + " failed");
+					notifier.addItem(item = "Uploading file: " + file.getName());
+					String destFile = "'" + fileTransfer.getSharedBasepath() + "/" + file.getName() + "'";
+					try (InputStream is = file.getInputStream()) {
+						boolean result = scpClient.upload(is, destFile, file.getLength(), file.getLastTime(), progress);
+						notifier.itemDone(item);
+						if (!result) {
+							throw new HaaSClientException("Uploading of " + file + " to " + destFile + " failed");
+						}
 					}
 					index++;
 				}
@@ -297,13 +318,14 @@ public class HaaSClient {
 			throw new HaaSClientException(e);
 		}
 	}
-	
+
 	public void uploadFileData(Long jobId, InputStream inputStream, String fileName, long length,
 			long lastModification) {
 		try {
 			FileTransferMethodExt ft = getFileTransfer().getFileTransferMethod(jobId, getSessionID());
 			try (ScpClient scpClient = getScpClient(ft)) {
-				scpClient.upload(inputStream,fileName,length,lastModification, new TransferFileProgressForHaaSClient(0, dummyNotifier));
+				scpClient.upload(inputStream, fileName, length, lastModification,
+						new TransferFileProgressForHaaSClient(0, dummyNotifier));
 				getFileTransfer().endFileTransfer(jobId, ft, getSessionID());
 			}
 		} catch (IOException | JSchException | ServiceException e) {
@@ -311,6 +333,47 @@ public class HaaSClient {
 		}
 	}
 
+	public static UploadingFile getUploadingFile(Path file) {
+		return new UploadingFile() {
+
+			@Override
+			public InputStream getInputStream() {
+				try {
+					return Files.newInputStream(file);
+				} catch (IOException e) {
+					log.error(e.getMessage(), e);
+					throw new RuntimeException(e);
+				}
+			}
+
+			@Override
+			public String getName() {
+				return file.getFileName().toString();
+			}
+
+			@Override
+			public long getLength() {
+				try {
+					return Files.size(file);
+				} catch (IOException e) {
+					log.error(e.getMessage(), e);
+					throw new RuntimeException(e);
+				}
+			}
+
+			@Override
+			public long getLastTime() {
+				try {
+					return Files.getLastModifiedTime(file).toMillis();
+				} catch (IOException e) {
+					log.error(e.getMessage(), e);
+					throw new RuntimeException(e);
+				}
+			}
+
+		};
+	}
+
 	private List<Long> getSizes(List<String> asList, ScpClient scpClient, ProgressNotifier notifier)
 			throws JSchException, IOException {
 		List<Long> result = new LinkedList<>();
@@ -405,14 +468,6 @@ public class HaaSClient {
 		return fileTransfer;
 	}
 
-	private List<Long> getSizes(Iterable<Path> files) throws IOException {
-		List<Long> result = new LinkedList<>();
-		for (Path path : files) {
-			result.add(Files.size(path));
-		}
-		return result;
-	}
-
 	private String getSessionID() throws RemoteException, ServiceException {
 		if (sessionID == null) {
 			sessionID = authenticate();
@@ -469,6 +524,4 @@ public class HaaSClient {
 		}
 	}
 
-	
-
 }