Skip to content
Snippets Groups Projects
Commit 2976e361 authored by Vojtech Moravec's avatar Vojtech Moravec
Browse files

Benchmarking stuff.

parent 8cdb8890
No related branches found
No related tags found
No related merge requests found
<?xml version="1.0" encoding="UTF-8"?>
<module org.jetbrains.idea.maven.project.MavenProjectsManager.isMavenModule="true" type="JAVA_MODULE" version="4">
<component name="NewModuleRootManager" LANGUAGE_LEVEL="JDK_1_9">
<component name="NewModuleRootManager" LANGUAGE_LEVEL="JDK_10">
<output url="file://$MODULE_DIR$/target/classes" />
<output-test url="file://$MODULE_DIR$/target/test-classes" />
<content url="file://$MODULE_DIR$">
......
import compression.benchmark.ScalarQuantizationBenchmark;
import compression.benchmark.VectorQuantizationBenchmark;
import compression.data.*;
......@@ -11,24 +12,24 @@ public class DataCompressor {
public static void main(String[] args) throws IOException {
// test2DChunking();
// test3DChunking();
// test2DVectorChunking();
//
// new ScalarQuantizationBenchmark("D:\\biology\\tiff_data\\benchmark\\fused_tp_10_ch_0_16bit_edited
// .raw",
// "D:\\biology\\benchmark\\tmp",
// new int[]{351},
// new V3i(1041, 996, 946)).startBenchmark();
VectorQuantizationBenchmark vqBench = new VectorQuantizationBenchmark(
"D:\\biology\\tiff_data\\benchmark\\fused_tp_10_ch_0_16bit.raw",
"D:\\biology\\benchmark\\vectorBench3x3",
new int[]{351},
new V3i(1041, 996, 946));
vqBench.startBenchmark(new V2i(3,3));
if (false) {
new ScalarQuantizationBenchmark("D:\\biology\\tiff_data\\benchmark\\fused_tp_10_ch_0_16bit.raw",
"D:\\biology\\benchmark\\fused_tp_10_ch_0_16bit\\scalar",
new int[]{351},
new V3i(1041, 996, 946)).startBenchmark();
} else {
VectorQuantizationBenchmark vqBench = new VectorQuantizationBenchmark(
"D:\\biology\\tiff_data\\benchmark\\fused_tp_10_ch_0_16bit.raw",
"D:\\biology\\benchmark\\fused_tp_10_ch_0_16bit\\vector3x3",
new int[]{351},
new V3i(1041, 996, 946));
vqBench.startBenchmark(new V2i(3,3));
}
}
static void test2DVectorChunking() {
......
......@@ -4,13 +4,15 @@ import compression.U16;
import compression.data.V3i;
import compression.de.DeException;
import compression.de.shade.ILShadeSolver;
import compression.io.RawDataIO;
import compression.quantization.QTrainIteration;
import compression.quantization.scalar.LloydMaxU16ScalarQuantization;
import compression.quantization.scalar.ScalarQuantizer;
import compression.utilities.TypeConverter;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.OutputStreamWriter;
public class ScalarQuantizationBenchmark extends BenchmarkBase {
private boolean useDiffEvolution = false;
......@@ -53,16 +55,12 @@ public class ScalarQuantizationBenchmark extends BenchmarkBase {
System.out.println("Scalar quantizer ready.");
final String method = useDiffEvolution ? "ilshade" : "lloyd";
final String centroidsFile = getFileNamePathIntoOutDir(String.format("p%d_cb%d%s_centroids.raw",
(planeIndex + 1),
codebookSize,
method));
// NOTE(Moravec): Centroids are saved in little endian order.
if (!RawDataIO.writeDataI32(centroidsFile, quantizer.getCentroids(), true)) {
System.err.println("Failed to save quantizer centroids.");
return;
}
final String centroidsFile = String.format("p%d_cb%d%s_centroids.txt",
planeIndex,
codebookSize,
method);
saveCentroids(quantizer.getCentroids(), centroidsFile);
final String quantizedFile = String.format("p%d_cb%d%s.raw", planeIndex, codebookSize, method);
......@@ -81,6 +79,31 @@ public class ScalarQuantizationBenchmark extends BenchmarkBase {
}
}
private void saveCentroids(final int[] centroids, final String centroidsFile) {
final String outFile = getFileNamePathIntoOutDir(centroidsFile);
try {
FileOutputStream fileStream = new FileOutputStream(outFile);
OutputStreamWriter writer = new OutputStreamWriter(fileStream);
StringBuilder sb = new StringBuilder();
for (final var entry : centroids) {
sb.append(entry);
sb.append('\n');
}
writer.write(sb.toString());
writer.flush();
fileStream.flush();
fileStream.close();
} catch (IOException ioE) {
ioE.printStackTrace();
System.err.println("Failed to save codebook vectors.");
}
}
private ScalarQuantizer trainLloydMaxQuantizer(final short[] data, final int codebookSize, final int planeIndex) {
LloydMaxU16ScalarQuantization lloydMax = new LloydMaxU16ScalarQuantization(data, codebookSize);
QTrainIteration[] trainingReport = lloydMax.train();
......
......@@ -28,13 +28,14 @@ public class VectorQuantizationBenchmark extends BenchmarkBase {
final int[][] vectors,
final V2i qVector) {
Chunk2D reconstructedChunk = new Chunk2D(new V2i(rawImageDims.getX(), rawImageDims.getY()), new V2l(0, 0));
if (qVector.getY() == 1) {
// 1D vector
reconstructedChunk.reconstructFromVectors(vectors);
} else {
if (qVector.getY() > 1) {
var chunks = plane.as2dChunk().divideIntoChunks(qVector);
Chunk2D.updateChunkData(chunks, vectors);
reconstructedChunk.reconstructFromChunks(chunks);
} else {
// 1D vector
reconstructedChunk.reconstructFromVectors(vectors);
}
return reconstructedChunk.asImageU16();
}
......@@ -49,6 +50,12 @@ public class VectorQuantizationBenchmark extends BenchmarkBase {
public void startBenchmark(final V2i qVector) {
if (qVector.getY() > 1) {
System.out.println("2D qVector");
} else {
System.out.println("1D qVector");
}
boolean dirCreated = new File(this.outputDirectory).mkdirs();
for (final int planeIndex : planes) {
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment