Skip to content
GitLab
Explore
Sign in
Primary navigation
Search or go to…
Project
H
hpc-workflow-manager
Manage
Activity
Members
Labels
Plan
Issues
Issue boards
Milestones
Wiki
Code
Merge requests
Repository
Branches
Commits
Tags
Repository graph
Compare revisions
Snippets
Build
Pipelines
Jobs
Pipeline schedules
Artifacts
Deploy
Releases
Container Registry
Model registry
Operate
Environments
Monitor
Incidents
Analyze
Value stream analytics
Contributor analytics
CI/CD analytics
Repository analytics
Model experiments
Help
Help
Support
GitLab documentation
Compare GitLab plans
Community forum
Contribute to GitLab
Provide feedback
Keyboard shortcuts
?
Snippets
Groups
Projects
Show more breadcrumbs
FIJI
hpc-workflow-manager
Commits
775b1deb
Commit
775b1deb
authored
7 years ago
by
Petr Bainar
Browse files
Options
Downloads
Patches
Plain Diff
iss1011: removing a committed .java.orig file
parent
fd47e635
No related branches found
No related tags found
1 merge request
!5
Iss1011
Changes
1
Hide whitespace changes
Inline
Side-by-side
Showing
1 changed file
haas-spim-benchmark/src/main/java/cz/it4i/fiji/haas_spim_benchmark/core/BenchmarkJobManager.java.orig
+0
-420
0 additions, 420 deletions
...ji/haas_spim_benchmark/core/BenchmarkJobManager.java.orig
with
0 additions
and
420 deletions
haas-spim-benchmark/src/main/java/cz/it4i/fiji/haas_spim_benchmark/core/BenchmarkJobManager.java.orig
deleted
100644 → 0
+
0
−
420
View file @
fd47e635
package cz.it4i.fiji.haas_spim_benchmark.core;
import java.io.BufferedReader;
import java.io.FileNotFoundException;
import java.io.FileWriter;
import java.io.IOException;
import java.io.InputStream;
import java.nio.file.Files;
import java.nio.file.InvalidPathException;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.Arrays;
import java.util.Calendar;
import java.util.Collection;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.function.Predicate;
import java.util.stream.Collectors;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.yaml.snakeyaml.Yaml;
<<<<<<< HEAD
import cz.it4i.fiji.haas.HaaSOutputHolder;
import cz.it4i.fiji.haas.HaaSOutputSource;
=======
import cz.it4i.fiji.haas.Job;
>>>>>>> master
import cz.it4i.fiji.haas.JobManager;
import cz.it4i.fiji.haas.JobManager.JobSynchronizableFile;
import cz.it4i.fiji.haas.UploadingFileFromResource;
import cz.it4i.fiji.haas_java_client.HaaSClient;
import cz.it4i.fiji.haas_java_client.JobState;
import cz.it4i.fiji.haas_java_client.Settings;
import cz.it4i.fiji.haas_java_client.SynchronizableFileType;
import javafx.beans.value.ObservableValueBase;
import net.imagej.updater.util.Progress;
public class BenchmarkJobManager {
private static final String JOB_HAS_DATA_TO_DOWNLOAD_PROPERTY = "job.needDownload";
private static Logger log = LoggerFactory
.getLogger(cz.it4i.fiji.haas_spim_benchmark.core.BenchmarkJobManager.class);
<<<<<<< HEAD
public final class Job extends ObservableValueBase<Job> implements HaaSOutputSource {
private JobInfo jobInfo;
=======
public final class BenchmarkJob extends ObservableValueBase<BenchmarkJob> {
private Job job;
>>>>>>> master
private JobState oldState;
private HaaSOutputHolder outputOfSnakemake;
private Collection<Task> tasks;
public BenchmarkJob(Job job) {
super();
<<<<<<< HEAD
this.jobInfo = ji;
outputOfSnakemake = new HaaSOutputHolder(getValue(), SynchronizableFileType.StandardErrorFile);
=======
this.job = job;
>>>>>>> master
}
public void startJob(Progress progress) throws IOException {
job.uploadFilesByName(Arrays.asList(Constants.CONFIG_YAML), progress);
String outputName = getOutputName(job.openLocalFile(Constants.CONFIG_YAML));
job.submit();
job.setProperty(Constants.SPIM_OUTPUT_FILENAME_PATTERN, outputName);
fireValueChangedEvent();
setDownloaded(false);
}
public JobState getState() {
return oldState = job.getState();
}
public void downloadData(Progress progress) throws IOException {
if (job.getState() == JobState.Finished) {
String filePattern = job.getProperty(Constants.SPIM_OUTPUT_FILENAME_PATTERN);
job.download(downloadFinishedData(filePattern), progress);
} else if (job.getState() == JobState.Failed) {
job.download(downloadFailedData(), progress);
}
fireValueChangedEvent();
setDownloaded(true);
}
public void downloadStatistics(Progress progress) throws IOException {
job.download(BenchmarkJobManager.downloadStatistics(), progress);
fireValueChangedEvent();
Path resultFile = job.getDirectory().resolve(Constants.BENCHMARK_RESULT_FILE);
if (resultFile != null)
BenchmarkJobManager.formatResultFile(resultFile);
}
public List<String> getOutput(List<JobSynchronizableFile> files) {
return job.getOutput(files);
}
public long getId() {
return job.getId();
}
public String getCreationTime() {
return getStringFromTimeSafely(job.getCreationTime());
}
public String getStartTime() {
return getStringFromTimeSafely(job.getStartTime());
}
public String getEndTime() {
return getStringFromTimeSafely(job.getEndTime());
}
private String getStringFromTimeSafely(Calendar time) {
return time != null ? time.getTime().toString() : "N/A";
}
@Override
public BenchmarkJob getValue() {
return this;
}
@Override
public int hashCode() {
return Long.hashCode(job.getId());
}
@Override
public boolean equals(Object obj) {
if (obj instanceof BenchmarkJob) {
BenchmarkJob job = (BenchmarkJob) obj;
return job.getId() == getId();
}
return false;
}
public void update(BenchmarkJob benchmarkJob) {
job = benchmarkJob.job;
if (benchmarkJob.job.getState() != oldState)
fireValueChangedEvent();
}
public boolean downloaded() {
return getDownloaded();
}
public BenchmarkJob update() {
job.updateInfo();
if (!job.getState().equals(oldState))
fireValueChangedEvent();
return this;
}
public Path getDirectory() {
return job.getDirectory();
}
<<<<<<< HEAD
public Collection<Task> getTasks() {
if(tasks == null) {
fillTasks();
}
return tasks;
}
private void fillTasks() {
String snakeMakeoutput = outputOfSnakemake.getActualOutput();
}
=======
>>>>>>> master
private void setDownloaded(boolean b) {
job.setProperty(JOB_HAS_DATA_TO_DOWNLOAD_PROPERTY, b + "");
}
private boolean getDownloaded() {
String downloadedStr = job.getProperty(JOB_HAS_DATA_TO_DOWNLOAD_PROPERTY);
return downloadedStr != null && Boolean.parseBoolean(downloadedStr);
}
}
private JobManager jobManager;
public BenchmarkJobManager(BenchmarkSPIMParameters params) throws IOException {
jobManager = new JobManager(params.workingDirectory(), constructSettingsFromParams(params));
}
public BenchmarkJob createJob() throws IOException {
Job job = jobManager.createJob();
job.storeDataInWorkdirectory(getUploadingFile());
return convertJob(job);
}
public Collection<BenchmarkJob> getJobs() throws IOException {
return jobManager.getJobs().stream().map(this::convertJob).collect(Collectors.toList());
}
private HaaSClient.UploadingFile getUploadingFile() {
return new UploadingFileFromResource("", Constants.CONFIG_YAML);
}
private BenchmarkJob convertJob(Job job) {
return new BenchmarkJob(job);
}
private String getOutputName(InputStream openLocalFile) throws IOException {
try (InputStream is = openLocalFile) {
Yaml yaml = new Yaml();
Map<String, Map<String, String>> map = yaml.load(is);
String result = map.get("common").get("hdf5_xml_filename");
if (result == null) {
throw new IllegalArgumentException("hdf5_xml_filename not found");
}
if (result.charAt(0) == '"' || result.charAt(0) == '\'') {
if (result.charAt(result.length() - 1) != result.charAt(0)) {
throw new IllegalArgumentException(result);
}
result = result.substring(1, result.length() - 1);
}
return result;
}
}
private static Predicate<String> downloadFinishedData(String filePattern) {
return name -> {
Path path = getPathSafely(name);
if (path == null)
return false;
String fileName = path.getFileName().toString();
return fileName.startsWith(filePattern) && fileName.endsWith("h5") || fileName.equals(filePattern + ".xml")
|| fileName.equals(Constants.BENCHMARK_RESULT_FILE);
};
}
private static Predicate<String> downloadStatistics() {
return name -> {
Path path = getPathSafely(name);
if (path == null)
return false;
String fileName = path.getFileName().toString();
return fileName.equals(Constants.BENCHMARK_RESULT_FILE);
};
}
private static Predicate<String> downloadFailedData() {
return name -> {
Path path = getPathSafely(name);
if (path == null)
return false;
return path.getFileName().toString().startsWith("snakejob.")
|| path.getParent().getFileName().toString().equals("logs");
};
}
private static Path getPathSafely(String name) {
try {
return Paths.get(name);
} catch(InvalidPathException ex) {
return null;
}
}
private static void formatResultFile(Path filename) throws FileNotFoundException {
List<ResultFileTask> identifiedTasks = new LinkedList<ResultFileTask>();
final String newLineSeparator = "\n";
final String delimiter = ";";
final String summaryFileHeader = "Task;MemoryUsage;WallTime;JobCount";
try {
String line = null;
ResultFileTask processedTask = null;
List<ResultFileJob> jobs = new LinkedList<>();
BufferedReader reader = Files.newBufferedReader(filename);
while (null != (line = reader.readLine())) {
line = line.trim();
if (line.isEmpty()) {
continue;
}
String[] columns = line.split(delimiter);
if (columns[0].equals(Constants.STATISTICS_TASK_NAME)) {
// If there is a task being processed, add all cached jobs to it and wrap it up
if (null != processedTask ) {
processedTask.jobs.addAll(jobs);
identifiedTasks.add(processedTask);
}
// Start processing a new task
processedTask = new ResultFileTask(columns[1]);
jobs.clear();
} else if (columns[0].equals(Constants.STATISTICS_JOB_IDS)) {
// Cache all found jobs
for (int i = 1; i < columns.length; i++) {
jobs.add(new ResultFileJob(columns[i]));
}
} else if (!columns[0].equals(Constants.STATISTICS_JOB_COUNT)) {
// Save values of a given property to cached jobs
for (int i = 1; i < columns.length; i++) {
jobs.get(i - 1).setValue(columns[0], columns[i]);
}
}
}
// If there is a task being processed, add all cached jobs to it and wrap it up
if (null != processedTask ) {
processedTask.jobs.addAll(jobs);
identifiedTasks.add(processedTask);
}
} catch (IOException e) {
log.error(e.getMessage(), e);
}
FileWriter fileWriter = null;
try {
fileWriter = new FileWriter(filename.getParent().toString() + "/" + Constants.STATISTICS_SUMMARY_FILENAME);
fileWriter.append(summaryFileHeader).append(newLineSeparator);
for (ResultFileTask task : identifiedTasks) {
fileWriter.append(Constants.STATISTICS_TASK_NAME_MAP.get(task.name)).append(delimiter);
fileWriter.append(Double.toString(task.getAverageMemoryUsage())).append(delimiter);
fileWriter.append(Double.toString(task.getAverageWallTime())).append(delimiter);
fileWriter.append(Integer.toString(task.getJobCount()));
fileWriter.append(newLineSeparator);
}
} catch (Exception e) {
log.error(e.getMessage(), e);
} finally {
try {
fileWriter.flush();
fileWriter.close();
} catch (Exception e) {
log.error(e.getMessage(), e);
}
}
}
private static Settings constructSettingsFromParams(BenchmarkSPIMParameters params) {
// TODO Auto-generated method stub
return new Settings() {
@Override
public String getUserName() {
return params.username();
}
@Override
public int getTimeout() {
return Constants.HAAS_TIMEOUT;
}
@Override
public long getTemplateId() {
return Constants.HAAS_TEMPLATE_ID;
}
@Override
public String getProjectId() {
return Constants.HAAS_PROJECT_ID;
}
@Override
public String getPhone() {
return params.phone();
}
@Override
public String getPassword() {
return params.password();
}
@Override
public String getJobName() {
return Constants.HAAS_JOB_NAME;
}
@Override
public String getEmail() {
return params.email();
}
@Override
public long getClusterNodeType() {
return Constants.HAAS_CLUSTER_NODE_TYPE;
}
};
}
}
This diff is collapsed.
Click to expand it.
Preview
0%
Loading
Try again
or
attach a new file
.
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Save comment
Cancel
Please
register
or
sign in
to comment