Skip to content
Snippets Groups Projects
Commit fd47e635 authored by Jan Kožusznik's avatar Jan Kožusznik
Browse files

basic definitions

parent 863ac5ef
No related branches found
No related tags found
1 merge request!5Iss1011
package cz.it4i.fiji.haas;
import java.util.Arrays;
public interface HaaSOutputHolder {
import cz.it4i.fiji.haas.JobManager.JobSynchronizableFile;
import cz.it4i.fiji.haas_java_client.SynchronizableFileType;
String getActualOutput();
public class HaaSOutputHolder {
private StringBuilder result = new StringBuilder();
private HaaSOutputSource source;
private SynchronizableFileType type;
public HaaSOutputHolder(HaaSOutputSource source, SynchronizableFileType typeForHold) {
super();
this.source = source;
this.type = typeForHold;
}
public String getActualOutput () {
updateData();
return result.toString();
}
private void updateData() {
JobSynchronizableFile file = new JobSynchronizableFile(type, result.length());
result.append(source.getOutput(Arrays.asList(file)).get(0));
}
}
}
\ No newline at end of file
package cz.it4i.fiji.haas;
import java.util.Arrays;
import cz.it4i.fiji.haas.JobManager.JobSynchronizableFile;
import cz.it4i.fiji.haas_java_client.SynchronizableFileType;
public class HaaSOutputHolderImpl implements HaaSOutputHolder {
private StringBuilder result = new StringBuilder();
private HaaSOutputSource source;
private SynchronizableFileType type;
public HaaSOutputHolderImpl(HaaSOutputSource source, SynchronizableFileType typeForHold) {
super();
this.source = source;
this.type = typeForHold;
}
/* (non-Javadoc)
* @see cz.it4i.fiji.haas.HaaSOutputHolder#getActualOutput()
*/
@Override
public String getActualOutput () {
updateData();
return result.toString();
}
private void updateData() {
JobSynchronizableFile file = new JobSynchronizableFile(type, result.length());
result.append(source.getOutput(Arrays.asList(file)).get(0));
}
}
......@@ -22,8 +22,10 @@ import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.yaml.snakeyaml.Yaml;
import cz.it4i.fiji.haas.Job;
import cz.it4i.fiji.haas.HaaSOutputHolder;
import cz.it4i.fiji.haas.HaaSOutputHolderImpl;
import cz.it4i.fiji.haas.HaaSOutputSource;
import cz.it4i.fiji.haas.Job;
import cz.it4i.fiji.haas.JobManager;
import cz.it4i.fiji.haas.JobManager.JobSynchronizableFile;
import cz.it4i.fiji.haas.UploadingFileFromResource;
......@@ -41,6 +43,9 @@ public class BenchmarkJobManager {
private static Logger log = LoggerFactory
.getLogger(cz.it4i.fiji.haas_spim_benchmark.core.BenchmarkJobManager.class);
private JobManager jobManager;
public final class BenchmarkJob extends ObservableValueBase<BenchmarkJob> implements HaaSOutputSource {
private Job job;
......@@ -48,12 +53,25 @@ public class BenchmarkJobManager {
private HaaSOutputHolder outputOfSnakemake;
private Collection<Task> tasks;
private List<Task> tasks;
private SPIMComputationAccessor computationAccessor = new SPIMComputationAccessor() {
@Override
public String getActualOutput() {
return outputOfSnakemake.getActualOutput();
}
@Override
public boolean fileExists(String fileName) {
// TASK 1011 modify interface of job for checking of file existence
return false;
}
};
public BenchmarkJob(Job job) {
super();
this.job = job;
outputOfSnakemake = new HaaSOutputHolder(getValue(), SynchronizableFileType.StandardErrorFile);
outputOfSnakemake = new HaaSOutputHolderImpl(getValue(), SynchronizableFileType.StandardErrorFile);
}
public void startJob(Progress progress) throws IOException {
......@@ -91,7 +109,7 @@ public class BenchmarkJobManager {
public List<String> getOutput(List<JobSynchronizableFile> files) {
return job.getOutput(files);
}
public long getId() {
return job.getId();
}
......@@ -152,7 +170,7 @@ public class BenchmarkJobManager {
return job.getDirectory();
}
public Collection<Task> getTasks() {
public List<Task> getTasks() {
if(tasks == null) {
fillTasks();
}
......@@ -161,7 +179,23 @@ public class BenchmarkJobManager {
private void fillTasks() {
SPIMComputationAccessor accessor = computationAccessor;
String snakeMakeoutput = outputOfSnakemake.getActualOutput();
//TASK 1011 parse snakeOutput, create tasks base part:
//Job counts:
// count jobs
// 1 define_output
// 1 define_xml_tif
// 1 done
// 2 fusion
// 1 hdf5_xml
// 1 hdf5_xml_output
// 2 registration
// 2 resave_hdf5
// 2 resave_hdf5_output
// 1 timelapse
// 1 xml_merge
// 15
}
private void setDownloaded(boolean b) {
......@@ -174,8 +208,6 @@ public class BenchmarkJobManager {
}
}
private JobManager jobManager;
public BenchmarkJobManager(BenchmarkSPIMParameters params) throws IOException {
jobManager = new JobManager(params.workingDirectory(), constructSettingsFromParams(params));
}
......@@ -350,7 +382,6 @@ public class BenchmarkJobManager {
}
private static Settings constructSettingsFromParams(BenchmarkSPIMParameters params) {
// TODO Auto-generated method stub
return new Settings() {
@Override
......
package cz.it4i.fiji.haas_spim_benchmark.core;
import java.util.Collection;
abstract public class PipelineBase<T extends PipelineBase<?,?>,S> {
private Collection<T> successors;
private S id;
public PipelineBase( S id) {
super();
this.id = id;
}
public Collection<T> getSuccessors() {
if(successors == null) {
successors = fillSuccesors();
}
return successors;
}
public S getId() {
return id;
}
abstract protected Collection<T> fillSuccesors();
}
package cz.it4i.fiji.haas_spim_benchmark.core;
import cz.it4i.fiji.haas.HaaSOutputHolder;
public interface SPIMComputationAccessor extends HaaSOutputHolder {
boolean fileExists(String fileName);
}
package cz.it4i.fiji.haas_spim_benchmark.core;
import java.util.Collection;
import cz.it4i.fiji.haas_spim_benchmark.core.BenchmarkJobManager.Job;
import java.util.LinkedList;
public class Task {
private SPIMComputationAccessor outputHolder;
private String description;
private Job job;
private Collection<TaskComputation> computations;
public Task(Job job,String description) {
private int numComputations;
public Task(SPIMComputationAccessor outputHolder, String description, int numComputations) {
this.description = description;
this.job = job;
this.outputHolder = outputHolder;
this.numComputations = numComputations;
}
public Collection<TaskComputation> getComputations() {
if (computations == null) {
fillComputations();
}
return computations;
}
public String getDescription() {
return description;
}
public Collection<Task> getPredecessors() {
return null;
private void fillComputations() {
computations = new LinkedList<>();
for (int i = 0; i < numComputations; i++) {
computations.add(new TaskComputation(outputHolder, this, i + 1));
}
}
}
package cz.it4i.fiji.haas_spim_benchmark.core;
import java.util.Collection;
import java.util.LinkedList;
import cz.it4i.fiji.haas_java_client.JobState;
public class TaskComputation {
private SPIMComputationAccessor outputHolder;
private int timepoint;
private Long id;
//TASK 1011 what states will be defined and how it will be defined
private JobState state;
private Task task;
private Collection<String> logs = new LinkedList<>();
private Collection<String> outputs = new LinkedList<>();
private Collection<String> inputs = new LinkedList<>();
public TaskComputation(SPIMComputationAccessor outputHolder,Task task, int timepoint) {
this.outputHolder = outputHolder;
this.timepoint = timepoint;
this.task = task;
}
public JobState getState() {
updateState();//TASK 1011 it is not good idea update every time when state is requested
return state != null?state:JobState.Configuring;
}
private void updateState() {
String snakeOutput = outputHolder.getActualOutput();
Long id = getId();
if(id == null) {
return;
}
//TASK 1011
//resolve if job is queued (defined id), started (exists log file), finished (in log is Finished job 10.) or
//or failed (some error in log)
}
private Long getId() {
if(id == null) {
fillId();
}
return id;
}
private void fillId() {
//TASK 1011
//find timepoint-th occurence of
//rule resave_hdf5:
// input: HisRFP_test-01-00.h5_xml, HisRFP_test_first.xml
// output: HisRFP_test-01-00.h5, HisRFP_test-01-00.h5_hdf5
// log: logs/b2_resave_hdf5-01.log
// jobid: 7
// wildcards: xml_base=HisRFP_test, file_id=01
//resave_hdf5 == task.getDescription()
//jobid -> id
//input->inputs
//...
//
//or return
}
}
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment