diff --git a/haas-java-client/src/main/java/cz/it4i/fiji/haas_java_client/JobState.java b/haas-java-client/src/main/java/cz/it4i/fiji/haas_java_client/JobState.java
index 515310a6cdf1587d0e3b2f95fe38f7c6d187c4cb..374481004f6c13fb6f4dcccee151dc97b4cb0ab3 100644
--- a/haas-java-client/src/main/java/cz/it4i/fiji/haas_java_client/JobState.java
+++ b/haas-java-client/src/main/java/cz/it4i/fiji/haas_java_client/JobState.java
@@ -1,11 +1,12 @@
 package cz.it4i.fiji.haas_java_client;
 
 public enum JobState {
-	Configuring ,
-    Submitted ,
-    Queued ,
-    Running ,
-    Finished ,
-    Failed ,
+	Unknown,
+	Configuring,
+    Submitted,
+    Queued,
+    Running,
+    Finished,
+    Failed,
     Canceled;
 }
diff --git a/haas-spim-benchmark/src/main/java/cz/it4i/fiji/haas_spim_benchmark/core/BenchmarkJobManager.java b/haas-spim-benchmark/src/main/java/cz/it4i/fiji/haas_spim_benchmark/core/BenchmarkJobManager.java
index eeb218aa001c44cdd7fc7cc0655df445de1c33de..473f25510165e0f98d88f9fcc7aeb0a347a9b4dd 100644
--- a/haas-spim-benchmark/src/main/java/cz/it4i/fiji/haas_spim_benchmark/core/BenchmarkJobManager.java
+++ b/haas-spim-benchmark/src/main/java/cz/it4i/fiji/haas_spim_benchmark/core/BenchmarkJobManager.java
@@ -1,6 +1,7 @@
 package cz.it4i.fiji.haas_spim_benchmark.core;
 
 import java.io.BufferedReader;
+import java.io.File;
 import java.io.FileNotFoundException;
 import java.io.FileWriter;
 import java.io.IOException;
@@ -9,12 +10,14 @@ import java.nio.file.Files;
 import java.nio.file.InvalidPathException;
 import java.nio.file.Path;
 import java.nio.file.Paths;
+import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.Calendar;
 import java.util.Collection;
 import java.util.LinkedList;
 import java.util.List;
 import java.util.Map;
+import java.util.Scanner;
 import java.util.function.Predicate;
 import java.util.stream.Collectors;
 
@@ -49,20 +52,22 @@ public class BenchmarkJobManager {
 		
 		private Job job;
 		
-		private HaaSOutputHolder outputOfSnakemake;
-
 		private List<Task> tasks;
 
 		private SPIMComputationAccessor computationAccessor = new SPIMComputationAccessor() {
+			
+			private HaaSOutputHolder outputOfSnakemake =
+					new HaaSOutputHolderImpl(BenchmarkJob.this, SynchronizableFileType.StandardErrorFile);
+			
 			@Override
 			public String getActualOutput() {
 				return outputOfSnakemake.getActualOutput();
 			}
 			
 			@Override
-			public boolean fileExists(String fileName) {
-				// TASK 1011 modify interface of job for checking of file existence
-				return false;
+			public boolean fileExists(Path filePath) {
+				File f = new File(filePath.toString());
+				return f.exists() && !f.isDirectory();
 			}
 		};
 		
@@ -70,7 +75,6 @@ public class BenchmarkJobManager {
 		public BenchmarkJob(Job job) {
 			super();
 			this.job = job;
-			outputOfSnakemake = new HaaSOutputHolderImpl(this, SynchronizableFileType.StandardErrorFile);
 		}
 
 		public void startJob(Progress progress) throws IOException {
@@ -96,7 +100,7 @@ public class BenchmarkJobManager {
 			setDownloaded(true);
 		}
 
-		public void downloadStatistics(Progress progress) throws IOException {			
+		public void downloadStatistics(Progress progress) throws IOException {
 			job.download(BenchmarkJobManager.downloadStatistics(), progress);
 			Path resultFile = job.getDirectory().resolve(Constants.BENCHMARK_RESULT_FILE);
 			if (resultFile != null)
@@ -135,8 +139,7 @@ public class BenchmarkJobManager {
 		@Override
 		public boolean equals(Object obj) {
 			if (obj instanceof BenchmarkJob) {
-				BenchmarkJob job = (BenchmarkJob) obj;
-				return job.getId() == getId();
+				return ((BenchmarkJob) obj).getId() == getId();
 			}
 			return false;
 		}
@@ -146,9 +149,8 @@ public class BenchmarkJobManager {
 			return getDownloaded();
 		}
 
-		public BenchmarkJob update() {
+		public void update() {
 			job.updateInfo();
-			return this;
 		}
 	
 		public Path getDirectory() {
@@ -156,33 +158,39 @@ public class BenchmarkJobManager {
 		}
 		
 		public List<Task> getTasks() {
-			if(tasks == null) {
+			if (tasks == null) {
 				fillTasks();
 			}
 			return tasks;
-		}
-		
+		}		
 
 		private void fillTasks() {
-			SPIMComputationAccessor accessor = computationAccessor;
-			String snakeMakeoutput = outputOfSnakemake.getActualOutput();
-			//TASK 1011 parse snakeOutput, create tasks base part:
-//Job counts:
-//			count	jobs
-//			1	define_output
-//			1	define_xml_tif
-//			1	done
-//			2	fusion
-//			1	hdf5_xml
-//			1	hdf5_xml_output
-//			2	registration
-//			2	resave_hdf5
-//			2	resave_hdf5_output
-//			1	timelapse
-//			1	xml_merge
-//			15
 			
+			final String OUTPUT_PARSING_JOB_COUNTS = "Job counts:";
+			final String OUTPUT_PARSING_TAB_DELIMITER = "\\t";
+			final int OUTPUT_PARSING_EXPECTED_NUMBER_OF_WORDS_PER_LINE = 2;
+			
+			tasks = new ArrayList<>();
+			Scanner scanner = new Scanner(computationAccessor.getActualOutput());
+			while (scanner.hasNextLine()) {
+				if (!scanner.nextLine().equals(OUTPUT_PARSING_JOB_COUNTS)) {
+					continue;
+				}				
+				scanner.nextLine();
+				
+				while (true) {
+					List<String> lineWords = Arrays.stream(scanner.nextLine().split(OUTPUT_PARSING_TAB_DELIMITER))
+							.filter(word -> word.length() > 0).collect(Collectors.toList());
+					if (lineWords.size() != OUTPUT_PARSING_EXPECTED_NUMBER_OF_WORDS_PER_LINE) {
+						break;
+					}
+					tasks.add(new Task(computationAccessor, lineWords.get(1), Integer.parseInt(lineWords.get(0))));
+				}
+				break;
+			}
+			scanner.close();
 		}
+		
 		private void setDownloaded(boolean b) {
 			job.setProperty(JOB_HAS_DATA_TO_DOWNLOAD_PROPERTY, b + "");
 		}
diff --git a/haas-spim-benchmark/src/main/java/cz/it4i/fiji/haas_spim_benchmark/core/BenchmarkJobManager.java.orig b/haas-spim-benchmark/src/main/java/cz/it4i/fiji/haas_spim_benchmark/core/BenchmarkJobManager.java.orig
deleted file mode 100644
index 4dbef62b7c09997dcd9d4da19b0bcc8a421cd616..0000000000000000000000000000000000000000
--- a/haas-spim-benchmark/src/main/java/cz/it4i/fiji/haas_spim_benchmark/core/BenchmarkJobManager.java.orig
+++ /dev/null
@@ -1,420 +0,0 @@
-package cz.it4i.fiji.haas_spim_benchmark.core;
-
-import java.io.BufferedReader;
-import java.io.FileNotFoundException;
-import java.io.FileWriter;
-import java.io.IOException;
-import java.io.InputStream;
-import java.nio.file.Files;
-import java.nio.file.InvalidPathException;
-import java.nio.file.Path;
-import java.nio.file.Paths;
-import java.util.Arrays;
-import java.util.Calendar;
-import java.util.Collection;
-import java.util.LinkedList;
-import java.util.List;
-import java.util.Map;
-import java.util.function.Predicate;
-import java.util.stream.Collectors;
-
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-import org.yaml.snakeyaml.Yaml;
-
-<<<<<<< HEAD
-import cz.it4i.fiji.haas.HaaSOutputHolder;
-import cz.it4i.fiji.haas.HaaSOutputSource;
-=======
-import cz.it4i.fiji.haas.Job;
->>>>>>> master
-import cz.it4i.fiji.haas.JobManager;
-import cz.it4i.fiji.haas.JobManager.JobSynchronizableFile;
-import cz.it4i.fiji.haas.UploadingFileFromResource;
-import cz.it4i.fiji.haas_java_client.HaaSClient;
-import cz.it4i.fiji.haas_java_client.JobState;
-import cz.it4i.fiji.haas_java_client.Settings;
-import cz.it4i.fiji.haas_java_client.SynchronizableFileType;
-import javafx.beans.value.ObservableValueBase;
-import net.imagej.updater.util.Progress;
-
-public class BenchmarkJobManager {
-
-	private static final String JOB_HAS_DATA_TO_DOWNLOAD_PROPERTY = "job.needDownload";
-	
-	private static Logger log = LoggerFactory
-			.getLogger(cz.it4i.fiji.haas_spim_benchmark.core.BenchmarkJobManager.class);
-
-<<<<<<< HEAD
-	public final class Job extends ObservableValueBase<Job> implements HaaSOutputSource {
-		private JobInfo jobInfo;
-
-=======
-	public final class BenchmarkJob extends ObservableValueBase<BenchmarkJob> {
-		
-		private Job job;
->>>>>>> master
-		private JobState oldState;
-		
-		private HaaSOutputHolder outputOfSnakemake;
-
-		private Collection<Task> tasks;
-
-		public BenchmarkJob(Job job) {
-			super();
-<<<<<<< HEAD
-			this.jobInfo = ji;
-			outputOfSnakemake = new HaaSOutputHolder(getValue(), SynchronizableFileType.StandardErrorFile);
-=======
-			this.job = job;
->>>>>>> master
-		}
-
-		public void startJob(Progress progress) throws IOException {
-			job.uploadFilesByName(Arrays.asList(Constants.CONFIG_YAML), progress);
-			String outputName = getOutputName(job.openLocalFile(Constants.CONFIG_YAML));
-			job.submit();
-			job.setProperty(Constants.SPIM_OUTPUT_FILENAME_PATTERN, outputName);
-			fireValueChangedEvent();
-			setDownloaded(false);
-		}
-
-		public JobState getState() {
-			return oldState = job.getState();
-		}
-
-		public void downloadData(Progress progress) throws IOException {
-			if (job.getState() == JobState.Finished) {
-				String filePattern = job.getProperty(Constants.SPIM_OUTPUT_FILENAME_PATTERN);
-				job.download(downloadFinishedData(filePattern), progress);
-			} else if (job.getState() == JobState.Failed) {
-				job.download(downloadFailedData(), progress);
-			}
-			fireValueChangedEvent();
-			setDownloaded(true);
-		}
-
-		public void downloadStatistics(Progress progress) throws IOException {			
-			job.download(BenchmarkJobManager.downloadStatistics(), progress);
-			fireValueChangedEvent();
-			Path resultFile = job.getDirectory().resolve(Constants.BENCHMARK_RESULT_FILE);
-			if (resultFile != null)
-				BenchmarkJobManager.formatResultFile(resultFile);
-		}
-
-		public List<String> getOutput(List<JobSynchronizableFile> files) {
-			return job.getOutput(files);
-		}
-
-		public long getId() {
-			return job.getId();
-		}
-
-		public String getCreationTime() {
-			return getStringFromTimeSafely(job.getCreationTime());
-		}
-
-		public String getStartTime() {
-			return getStringFromTimeSafely(job.getStartTime());
-		}
-
-		public String getEndTime() {
-			return getStringFromTimeSafely(job.getEndTime());
-		}
-		
-		private String getStringFromTimeSafely(Calendar time) {
-			return time != null ? time.getTime().toString() : "N/A";
-		}
-
-		@Override
-		public BenchmarkJob getValue() {
-			return this;
-		}
-
-		@Override
-		public int hashCode() {
-			return Long.hashCode(job.getId());
-		}
-
-		@Override
-		public boolean equals(Object obj) {
-			if (obj instanceof BenchmarkJob) {
-				BenchmarkJob job = (BenchmarkJob) obj;
-				return job.getId() == getId();
-			}
-			return false;
-		}
-
-		public void update(BenchmarkJob benchmarkJob) {
-			job = benchmarkJob.job;
-			if (benchmarkJob.job.getState() != oldState)
-				fireValueChangedEvent();
-		}
-		
-		public boolean downloaded() {
-			return getDownloaded();
-		}
-
-		public BenchmarkJob update() {
-			job.updateInfo();
-			if (!job.getState().equals(oldState))
-				fireValueChangedEvent();
-			return this;
-		}
-
-		public Path getDirectory() {
-			return job.getDirectory();
-		}
-		
-<<<<<<< HEAD
-		public Collection<Task> getTasks() {
-			if(tasks == null) {
-				fillTasks();
-			}
-			return tasks;
-		}
-		
-
-		private void fillTasks() {
-			String snakeMakeoutput = outputOfSnakemake.getActualOutput();
-			
-		}
-
-=======
->>>>>>> master
-		private void setDownloaded(boolean b) {
-			job.setProperty(JOB_HAS_DATA_TO_DOWNLOAD_PROPERTY, b + "");
-		}
-		
-		private boolean getDownloaded() {
-			String downloadedStr = job.getProperty(JOB_HAS_DATA_TO_DOWNLOAD_PROPERTY);
-			return downloadedStr != null && Boolean.parseBoolean(downloadedStr);
-		}
-	}
-
-	private JobManager jobManager;
-
-	public BenchmarkJobManager(BenchmarkSPIMParameters params) throws IOException {
-		jobManager = new JobManager(params.workingDirectory(), constructSettingsFromParams(params));
-	}
-
-	public BenchmarkJob createJob() throws IOException {
-		Job job = jobManager.createJob();
-		job.storeDataInWorkdirectory(getUploadingFile());
-		return convertJob(job);
-	}
-
-	public Collection<BenchmarkJob> getJobs() throws IOException {
-		return jobManager.getJobs().stream().map(this::convertJob).collect(Collectors.toList());
-	}
-
-
-	private HaaSClient.UploadingFile getUploadingFile() {
-		return new UploadingFileFromResource("", Constants.CONFIG_YAML);
-	}
-
-	private BenchmarkJob convertJob(Job job) {
-		return new BenchmarkJob(job);
-	}
-
-	private String getOutputName(InputStream openLocalFile) throws IOException {
-		try (InputStream is = openLocalFile) {
-			Yaml yaml = new Yaml();
-
-			Map<String, Map<String, String>> map = yaml.load(is);
-			String result = map.get("common").get("hdf5_xml_filename");
-			if (result == null) {
-				throw new IllegalArgumentException("hdf5_xml_filename not found");
-			}
-			if (result.charAt(0) == '"' || result.charAt(0) == '\'') {
-				if (result.charAt(result.length() - 1) != result.charAt(0)) {
-					throw new IllegalArgumentException(result);
-				}
-				result = result.substring(1, result.length() - 1);
-			}
-
-			return result;
-		}
-
-	}
-
-	private static Predicate<String> downloadFinishedData(String filePattern) {
-		return name -> {
-			Path path = getPathSafely(name);
-			if (path == null)
-				return false;
-			
-			String fileName = path.getFileName().toString();
-			return fileName.startsWith(filePattern) && fileName.endsWith("h5") || fileName.equals(filePattern + ".xml")
-					|| fileName.equals(Constants.BENCHMARK_RESULT_FILE);
-		};
-	}
-
-	private static Predicate<String> downloadStatistics() {
-		return name -> {
-			Path path = getPathSafely(name);
-			if (path == null)
-				return false;
-			
-			String fileName = path.getFileName().toString();
-			return fileName.equals(Constants.BENCHMARK_RESULT_FILE);
-		};
-	}
-
-	private static Predicate<String> downloadFailedData() {
-		return name -> {
-			Path path = getPathSafely(name);
-			if (path == null)
-				return false;
-			
-			return path.getFileName().toString().startsWith("snakejob.")
-					|| path.getParent().getFileName().toString().equals("logs");
-		};
-	}
-	
-	private static Path getPathSafely(String name) {
-		try {
-			return Paths.get(name);
-		} catch(InvalidPathException ex) {
-			return null;
-		}
-	}
-	
-	private static void formatResultFile(Path filename) throws FileNotFoundException {
-		
-		List<ResultFileTask> identifiedTasks = new LinkedList<ResultFileTask>();
-		
-		final String newLineSeparator = "\n";
-		final String delimiter = ";";
-		final String summaryFileHeader = "Task;MemoryUsage;WallTime;JobCount";
-		
-		try {
-			String line = null;
-			
-			ResultFileTask processedTask = null;			
-			List<ResultFileJob> jobs = new LinkedList<>();
-			
-			BufferedReader reader = Files.newBufferedReader(filename);
-			while (null != (line = reader.readLine())) {
-				
-				line = line.trim();
-				if (line.isEmpty()) {
-					continue;
-				}
-				
-				String[] columns = line.split(delimiter);
-				
-				if (columns[0].equals(Constants.STATISTICS_TASK_NAME)) {
-					
-					// If there is a task being processed, add all cached jobs to it and wrap it up
-					if (null != processedTask ) {
-						processedTask.jobs.addAll(jobs);
-						identifiedTasks.add(processedTask);
-					}
-					
-					// Start processing a new task
-					processedTask = new ResultFileTask(columns[1]);
-					jobs.clear();
-					
-				} else if (columns[0].equals(Constants.STATISTICS_JOB_IDS)) {
-					
-					// Cache all found jobs
-					for (int i = 1; i < columns.length; i++) {
-						jobs.add(new ResultFileJob(columns[i]));
-					}
-					
-				} else if (!columns[0].equals(Constants.STATISTICS_JOB_COUNT)) {
-					
-					// Save values of a given property to cached jobs
-					for (int i = 1; i < columns.length; i++) {
-						jobs.get(i - 1).setValue(columns[0], columns[i]);
-					}
-					
-				} 
-			}
-			
-			// If there is a task being processed, add all cached jobs to it and wrap it up
-			if (null != processedTask ) {
-				processedTask.jobs.addAll(jobs);
-				identifiedTasks.add(processedTask);
-			}
-			
-		} catch (IOException e) {
-			log.error(e.getMessage(), e);
-		}
-		
-		FileWriter fileWriter = null;		
-		try {			
-			fileWriter = new FileWriter(filename.getParent().toString() + "/" + Constants.STATISTICS_SUMMARY_FILENAME);
-			fileWriter.append(summaryFileHeader).append(newLineSeparator);
-			
-			for (ResultFileTask task : identifiedTasks) {
-				fileWriter.append(Constants.STATISTICS_TASK_NAME_MAP.get(task.name)).append(delimiter);
-				fileWriter.append(Double.toString(task.getAverageMemoryUsage())).append(delimiter);
-				fileWriter.append(Double.toString(task.getAverageWallTime())).append(delimiter);
-				fileWriter.append(Integer.toString(task.getJobCount()));
-				fileWriter.append(newLineSeparator);
-			}
-		} catch (Exception e) {
-			log.error(e.getMessage(), e);
-		} finally {
-			try {
-				fileWriter.flush();
-				fileWriter.close();
-			} catch (Exception e) {
-				log.error(e.getMessage(), e);
-			}
-		}
-	}
-
-	private static Settings constructSettingsFromParams(BenchmarkSPIMParameters params) {
-		// TODO Auto-generated method stub
-		return new Settings() {
-
-			@Override
-			public String getUserName() {
-				return params.username();
-			}
-
-			@Override
-			public int getTimeout() {
-				return Constants.HAAS_TIMEOUT;
-			}
-
-			@Override
-			public long getTemplateId() {
-				return Constants.HAAS_TEMPLATE_ID;
-			}
-
-			@Override
-			public String getProjectId() {
-				return Constants.HAAS_PROJECT_ID;
-			}
-
-			@Override
-			public String getPhone() {
-				return params.phone();
-			}
-
-			@Override
-			public String getPassword() {
-				return params.password();
-			}
-
-			@Override
-			public String getJobName() {
-				return Constants.HAAS_JOB_NAME;
-			}
-
-			@Override
-			public String getEmail() {
-				return params.email();
-			}
-
-			@Override
-			public long getClusterNodeType() {
-				return Constants.HAAS_CLUSTER_NODE_TYPE;
-			}
-		};
-	}
-}
diff --git a/haas-spim-benchmark/src/main/java/cz/it4i/fiji/haas_spim_benchmark/core/SPIMComputationAccessor.java b/haas-spim-benchmark/src/main/java/cz/it4i/fiji/haas_spim_benchmark/core/SPIMComputationAccessor.java
index 0ba6590109e4406f0d99e247ac8757d25af4e908..39151ca434b506f873741dae844360934863ffbb 100644
--- a/haas-spim-benchmark/src/main/java/cz/it4i/fiji/haas_spim_benchmark/core/SPIMComputationAccessor.java
+++ b/haas-spim-benchmark/src/main/java/cz/it4i/fiji/haas_spim_benchmark/core/SPIMComputationAccessor.java
@@ -1,7 +1,9 @@
 package cz.it4i.fiji.haas_spim_benchmark.core;
 
+import java.nio.file.Path;
+
 import cz.it4i.fiji.haas.HaaSOutputHolder;
 
 public interface SPIMComputationAccessor extends HaaSOutputHolder {
-	boolean fileExists(String fileName);
+	boolean fileExists(Path fileName);
 }
diff --git a/haas-spim-benchmark/src/main/java/cz/it4i/fiji/haas_spim_benchmark/core/Task.java b/haas-spim-benchmark/src/main/java/cz/it4i/fiji/haas_spim_benchmark/core/Task.java
index 285a579ff2e7d0d91dbdebb594485e0ece3f3965..38f42ad8a8f74d6a6172399c814fd2852e1445b4 100644
--- a/haas-spim-benchmark/src/main/java/cz/it4i/fiji/haas_spim_benchmark/core/Task.java
+++ b/haas-spim-benchmark/src/main/java/cz/it4i/fiji/haas_spim_benchmark/core/Task.java
@@ -4,38 +4,28 @@ import java.util.LinkedList;
 import java.util.List;
 
 public class Task {
-	private SPIMComputationAccessor outputHolder;
-	private String description;
-	private List<TaskComputation> computations;
-	private int numComputations;
+	private final String description;
+	private final List<TaskComputation> computations;
 
 	public Task(SPIMComputationAccessor outputHolder, String description, int numComputations) {
 		this.description = description;
-		this.outputHolder = outputHolder;
-		this.numComputations = numComputations;
-	}
-
-	public List<TaskComputation> getComputations() {
-		if (computations == null) {
-			fillComputations();
+		this.computations = new LinkedList<>();
+		
+		for (int i = 0; i < numComputations; i++) {
+			computations.add(new TaskComputation(outputHolder, this, i + 1));
 		}
-		return computations;
 	}
 
 	public String getDescription() {
 		return description;
 	}
-
-	private void fillComputations() {
-		computations = new LinkedList<>();
-		for (int i = 0; i < numComputations; i++) {
-			computations.add(new TaskComputation(outputHolder, this, i + 1));
-		}
+	
+	public List<TaskComputation> getComputations() {
+		return computations;
 	}
 
 	public void update() {
 		// TODO Auto-generated method stub
 		
 	}
-
 }
diff --git a/haas-spim-benchmark/src/main/java/cz/it4i/fiji/haas_spim_benchmark/core/TaskComputation.java b/haas-spim-benchmark/src/main/java/cz/it4i/fiji/haas_spim_benchmark/core/TaskComputation.java
index c0fc812c89d4e0601ec441af204ecedc53458156..af869e56765636d16a861800e6a58341e7449fe5 100644
--- a/haas-spim-benchmark/src/main/java/cz/it4i/fiji/haas_spim_benchmark/core/TaskComputation.java
+++ b/haas-spim-benchmark/src/main/java/cz/it4i/fiji/haas_spim_benchmark/core/TaskComputation.java
@@ -1,78 +1,119 @@
 package cz.it4i.fiji.haas_spim_benchmark.core;
 
+import java.util.Arrays;
 import java.util.Collection;
 import java.util.LinkedList;
+import java.util.Scanner;
 
 import cz.it4i.fiji.haas_java_client.JobState;
 
-
 public class TaskComputation {
+	
+	// A single-purpose class dedicated to help with TaskComputation members initialization
+	private class ParsedTaskComputationValues {
+		private final Collection<String> logs;
+		private final Collection<String> inputs;
+		private final Collection<String> outputs;
+		private final Long id;
+		
+		public ParsedTaskComputationValues(Collection<String> inputs, Collection<String> outputs, Collection<String> logs, Long id) {
+			this.inputs = inputs;
+			this.outputs = outputs;
+			this.logs = logs;
+			this.id = id;
+		}
+	}
 
-	private SPIMComputationAccessor outputHolder;
-	private int timepoint;
-	private Long id;
+	private final Task task;
+	private final int timepoint;
+	private final Collection<String> inputs;
+	private final Collection<String> outputs;
+	private final Collection<String> logs;
+	private final Long id;
+	
 	//TASK 1011 what states will be defined and how it will be defined
-	private JobState state;
-	private Task task;
-	private Collection<String> logs = new LinkedList<>();
-	private Collection<String> outputs = new LinkedList<>();
-	private Collection<String> inputs = new LinkedList<>();
-
+	private JobState state = JobState.Unknown;
 	
-	public TaskComputation(SPIMComputationAccessor outputHolder,Task task, int timepoint) {
-		this.outputHolder = outputHolder;
-		this.timepoint = timepoint;
+	public TaskComputation(SPIMComputationAccessor outputHolder, Task task, int timepoint) {
 		this.task = task;
+		this.timepoint = timepoint;		
+		ParsedTaskComputationValues parsedValues = parseStuff(outputHolder);
+		this.inputs = parsedValues.inputs;
+		this.outputs = parsedValues.outputs;
+		this.logs = parsedValues.logs;
+		this.id = parsedValues.id;
 	}
 
 	public JobState getState() {
 		updateState();//TASK 1011 it is not good idea update every time when state is requested 
-		return state != null?state:JobState.Configuring;
+		return state;
 	}
 	
+	public void update() {
+	}
+
 	public int getTimepoint() {
 		return timepoint;
 	}
 
 	private void updateState() {
-		String snakeOutput = outputHolder.getActualOutput();
-		Long id = getId();
-		if(id == null) {
+		//TASK 1011 This should never happen, add some error handling to resolveId()
+		if (id == null) {
 			return;
 		}
+		
+		//String snakeOutput = outputHolder.getActualOutput();
+
 		//TASK 1011 
 		//resolve if job is queued (defined id), started (exists log file), finished (in log is Finished job 10.) or
 		//or failed (some error in log)
 	}
 
 	private Long getId() {
-		if(id == null) {
-			fillId();
-		}
 		return id;
 	}
 
-	private void fillId() {
-		//TASK 1011 
-		//find timepoint-th occurence of
-//rule resave_hdf5:
-//    input: HisRFP_test-01-00.h5_xml, HisRFP_test_first.xml
-//    output: HisRFP_test-01-00.h5, HisRFP_test-01-00.h5_hdf5
-//    log: logs/b2_resave_hdf5-01.log
-//    jobid: 7
-//    wildcards: xml_base=HisRFP_test, file_id=01
+	private ParsedTaskComputationValues parseStuff(SPIMComputationAccessor outputHolder) {
 		
-	//resave_hdf5 == task.getDescription()
-	//jobid -> id
-	//input->inputs
-	//...
-	//
-	//or return
-	}
-
-	public void update() {
-		// TODO Auto-generated method stub
+		final String OUTPUT_PARSING_RULE = "rule ";
+		final String OUTPUT_PARSING_COLON = ":";
+		final String OUTPUT_PARSING_COMMA_SPACE = ", ";
+		final String desiredPattern = OUTPUT_PARSING_RULE + task.getDescription() + OUTPUT_PARSING_COLON;
 		
-	}
-	
+		final String OUTPUT_PARSING_INPUTS = "input: ";
+		final String OUTPUT_PARSING_OUTPUTS = "output: ";
+		final String OUTPUT_PARSING_LOGS = "log: ";
+		final String OUTPUT_PARSING_JOB_ID = "jobid: ";
+		
+		Scanner scanner = new Scanner(outputHolder.getActualOutput());
+		int jobsToSkip = timepoint;
+		while (scanner.hasNextLine() && jobsToSkip > 0) {
+			if (scanner.nextLine().equals(desiredPattern)) {
+				jobsToSkip--;
+			}
+		}
+		
+		String currentLine;
+		Collection<String> resolvedInputs = new LinkedList<>();
+		Collection<String> resolvedOutputs = new LinkedList<>();
+		Collection<String> resolvedLogs = new LinkedList<>();
+		Long resolvedId = null;
+		while (scanner.hasNextLine()) {
+			currentLine = scanner.nextLine();
+			if (currentLine.contains(OUTPUT_PARSING_INPUTS)) {
+				resolvedInputs = Arrays.asList(currentLine.split(OUTPUT_PARSING_INPUTS)[1].split(OUTPUT_PARSING_COMMA_SPACE));
+			} else if (currentLine.contains(OUTPUT_PARSING_OUTPUTS)) {
+				resolvedOutputs = Arrays.asList(currentLine.split(OUTPUT_PARSING_OUTPUTS)[1].split(OUTPUT_PARSING_COMMA_SPACE));
+			} else if (currentLine.contains(OUTPUT_PARSING_LOGS)) {
+				resolvedLogs = Arrays.asList(currentLine.split(OUTPUT_PARSING_LOGS)[1].split(OUTPUT_PARSING_COMMA_SPACE));
+			} else if (currentLine.contains(OUTPUT_PARSING_JOB_ID)) {
+				resolvedId = Long.parseLong(currentLine.split(OUTPUT_PARSING_JOB_ID)[1]);
+			} else if (currentLine.trim().isEmpty()) {
+				break;				
+			}
+		}
+		scanner.close();
+		
+		return new ParsedTaskComputationValues(resolvedInputs, resolvedOutputs, resolvedLogs, resolvedId);
+	}	
 }