Skip to content
Snippets Groups Projects
Commit 9fedfb6e authored by Vojtech Moravec's avatar Vojtech Moravec
Browse files

Add verbose option.

Also removed unused test code.
parent f009a29f
No related branches found
No related tags found
No related merge requests found
...@@ -198,6 +198,7 @@ public class BigDataServer { ...@@ -198,6 +198,7 @@ public class BigDataServer {
options.addOption(new OptionWithOrder(CliConstants.createSQOption(), ++optionOrder)); options.addOption(new OptionWithOrder(CliConstants.createSQOption(), ++optionOrder));
options.addOption(new OptionWithOrder(CliConstants.createVQOption(), ++optionOrder)); options.addOption(new OptionWithOrder(CliConstants.createVQOption(), ++optionOrder));
options.addOption(new OptionWithOrder(CliConstants.createBitsOption(), ++optionOrder)); options.addOption(new OptionWithOrder(CliConstants.createBitsOption(), ++optionOrder));
options.addOption(new OptionWithOrder(CliConstants.createVerboseOption(false), ++optionOrder));
if (Constants.ENABLE_EXPERIMENTAL_FEATURES) { if (Constants.ENABLE_EXPERIMENTAL_FEATURES) {
...@@ -225,19 +226,19 @@ public class BigDataServer { ...@@ -225,19 +226,19 @@ public class BigDataServer {
final boolean enableQcmpCompression = cmd.hasOption(ENABLE_COMPRESSION); final boolean enableQcmpCompression = cmd.hasOption(ENABLE_COMPRESSION);
CompressionOptions compressionOptions = new CompressionOptions(); final CompressionOptions compressionOptions = new CompressionOptions();
if (enableQcmpCompression) { if (enableQcmpCompression) {
compressionOptions.setQuantizationType(QuantizationType.Invalid); compressionOptions.setQuantizationType(QuantizationType.Invalid);
if (cmd.hasOption(CliConstants.SCALAR_QUANTIZATION_LONG)) if (cmd.hasOption(CliConstants.SCALAR_QUANTIZATION_LONG))
compressionOptions.setQuantizationType(QuantizationType.Scalar); compressionOptions.setQuantizationType(QuantizationType.Scalar);
else if (cmd.hasOption(CliConstants.VECTOR_QUANTIZATION_LONG)) { else if (cmd.hasOption(CliConstants.VECTOR_QUANTIZATION_LONG)) {
final String vqValue = cmd.getOptionValue(CliConstants.VECTOR_QUANTIZATION_LONG); final String vqValue = cmd.getOptionValue(CliConstants.VECTOR_QUANTIZATION_LONG);
Optional<V2i> maybeV2 = ParseUtils.tryParseV2i(vqValue, 'x'); final Optional<V2i> maybeV2 = ParseUtils.tryParseV2i(vqValue, 'x');
if (maybeV2.isPresent()) { if (maybeV2.isPresent()) {
compressionOptions.setQuantizationType(QuantizationType.Vector2D); compressionOptions.setQuantizationType(QuantizationType.Vector2D);
compressionOptions.setQuantizationVector(new V3i(maybeV2.get().getX(), maybeV2.get().getY(), 1)); compressionOptions.setQuantizationVector(new V3i(maybeV2.get().getX(), maybeV2.get().getY(), 1));
} else { } else {
Optional<V3i> maybeV3 = ParseUtils.tryParseV3i(vqValue, 'x'); final Optional<V3i> maybeV3 = ParseUtils.tryParseV3i(vqValue, 'x');
if (maybeV3.isPresent()) { if (maybeV3.isPresent()) {
compressionOptions.setQuantizationType(QuantizationType.Vector3D); compressionOptions.setQuantizationType(QuantizationType.Vector3D);
compressionOptions.setQuantizationVector(maybeV3.get()); compressionOptions.setQuantizationVector(maybeV3.get());
...@@ -248,11 +249,14 @@ public class BigDataServer { ...@@ -248,11 +249,14 @@ public class BigDataServer {
throw new ParseException("Invalid quantization type."); throw new ParseException("Invalid quantization type.");
} }
// NOTE(Moravec): Test if using more workers make any sense. Since the server is already handling multiple requests.
compressionOptions.setWorkerCount(1);
compressionOptions.setCodebookType(CompressionOptions.CodebookType.Global); compressionOptions.setCodebookType(CompressionOptions.CodebookType.Global);
compressionOptions.setCodebookCacheFolder(cmd.getOptionValue(CliConstants.CODEBOOK_CACHE_FOLDER_LONG)); compressionOptions.setCodebookCacheFolder(cmd.getOptionValue(CliConstants.CODEBOOK_CACHE_FOLDER_LONG));
compressionOptions.setBitsPerCodebookIndex(Integer.parseInt(cmd.getOptionValue(CliConstants.BITS_LONG))); compressionOptions.setBitsPerCodebookIndex(Integer.parseInt(cmd.getOptionValue(CliConstants.BITS_LONG)));
compressionOptions.setVerbose(cmd.hasOption(CliConstants.VERBOSE_LONG));
StringBuilder compressionReport = new StringBuilder(); final StringBuilder compressionReport = new StringBuilder();
compressionReport.append("\u001b[33m"); compressionReport.append("\u001b[33m");
compressionReport.append("Quantization type: "); compressionReport.append("Quantization type: ");
switch (compressionOptions.getQuantizationType()) { switch (compressionOptions.getQuantizationType()) {
...@@ -273,6 +277,7 @@ public class BigDataServer { ...@@ -273,6 +277,7 @@ public class BigDataServer {
compressionReport.append('\n'); compressionReport.append('\n');
compressionReport.append("Bits per codebook index: ").append(compressionOptions.getBitsPerCodebookIndex()).append('\n'); compressionReport.append("Bits per codebook index: ").append(compressionOptions.getBitsPerCodebookIndex()).append('\n');
compressionReport.append("Codebook cache folder: ").append(compressionOptions.getCodebookCacheFolder()).append('\n'); compressionReport.append("Codebook cache folder: ").append(compressionOptions.getCodebookCacheFolder()).append('\n');
compressionReport.append("Verbose mode: ").append(compressionOptions.isVerbose() ? "ON" : "OFF").append('\n');
compressionReport.append("\u001b[0m"); compressionReport.append("\u001b[0m");
System.out.println(compressionReport.toString()); System.out.println(compressionReport.toString());
...@@ -343,8 +348,8 @@ public class BigDataServer { ...@@ -343,8 +348,8 @@ public class BigDataServer {
} else if (y instanceof OptionWithOrder) { } else if (y instanceof OptionWithOrder) {
return -1; return -1;
} else { } else {
Option opt1 = (Option) x; final Option opt1 = (Option) x;
Option opt2 = (Option) y; final Option opt2 = (Option) y;
return opt1.getOpt().compareToIgnoreCase(opt2.getOpt()); return opt1.getOpt().compareToIgnoreCase(opt2.getOpt());
} }
}); });
......
...@@ -9,6 +9,7 @@ import azgracompress.io.FileInputData; ...@@ -9,6 +9,7 @@ import azgracompress.io.FileInputData;
import azgracompress.io.FlatBufferInputData; import azgracompress.io.FlatBufferInputData;
import azgracompress.io.InputData; import azgracompress.io.InputData;
import azgracompress.io.MemoryOutputStream; import azgracompress.io.MemoryOutputStream;
import azgracompress.utilities.Stopwatch;
import bdv.BigDataViewer; import bdv.BigDataViewer;
import bdv.cache.CacheHints; import bdv.cache.CacheHints;
import bdv.cache.LoadingStrategy; import bdv.cache.LoadingStrategy;
...@@ -105,6 +106,7 @@ public class CellHandler extends ContextHandler { ...@@ -105,6 +106,7 @@ public class CellHandler extends ContextHandler {
private ICacheFile cachedCodebook = null; private ICacheFile cachedCodebook = null;
private final int INITIAL_BUFFER_SIZE = 2048; private final int INITIAL_BUFFER_SIZE = 2048;
private long accumulation = 0; private long accumulation = 0;
private long uncompressedAccumulation = 0;
private synchronized long addToAccumulation(final int value) { private synchronized long addToAccumulation(final int value) {
...@@ -112,6 +114,11 @@ public class CellHandler extends ContextHandler { ...@@ -112,6 +114,11 @@ public class CellHandler extends ContextHandler {
return accumulation; return accumulation;
} }
private synchronized long addToUncompressedAccumulation(final int value) {
uncompressedAccumulation += value;
return uncompressedAccumulation;
}
public CellHandler(final String baseUrl, final String xmlFilename, final String datasetName, final String thumbnailsDirectory, public CellHandler(final String baseUrl, final String xmlFilename, final String datasetName, final String thumbnailsDirectory,
final CompressionOptions compressionParams) throws SpimDataException, IOException { final CompressionOptions compressionParams) throws SpimDataException, IOException {
...@@ -253,45 +260,21 @@ public class CellHandler extends ContextHandler { ...@@ -253,45 +260,21 @@ public class CellHandler extends ContextHandler {
baseRequest.setHandled(true); baseRequest.setHandled(true);
} else if (parts[0].equals("cell_qcmp")) { } else if (parts[0].equals("cell_qcmp")) {
final Stopwatch stopwatch = Stopwatch.startNew();
final int[] cellDims = new int[]{Integer.parseInt(parts[5]), Integer.parseInt(parts[6]), Integer.parseInt(parts[7])}; final int[] cellDims = new int[]{Integer.parseInt(parts[5]), Integer.parseInt(parts[6]), Integer.parseInt(parts[7])};
final short[] data = getCachedVolatileCellData(parts, cellDims); final short[] data = getCachedVolatileCellData(parts, cellDims);
assert (compressor != null); assert (compressor != null);
final FlatBufferInputData inputData = createInputDataObject(data, cellDims); final FlatBufferInputData inputData = createInputDataObject(data, cellDims);
final MemoryOutputStream cellCompressionStream = getCachedCompressionBuffer(); final MemoryOutputStream cellCompressionStream = getCachedCompressionBuffer();
final int compressedContentLength = compressor.streamCompressChunk(cellCompressionStream, inputData); final int compressedContentLength = compressor.streamCompressChunk(cellCompressionStream, inputData);
// // DEBUG decompress in place.
// if (true) {
// final byte[] buffer = cellCompressionStream.getBuffer();
// final int bufferLength = cellCompressionStream.getCurrentBufferLength();
// ImageDecompressor decompressor = new ImageDecompressor(cachedCodebook);
// short[] decompressedData = null;
// try (InputStream is = new BufferedInputStream(new ByteArrayInputStream(buffer, 0, bufferLength))) {
// decompressedData = decompressor.decompressStream(is, bufferLength);
// } catch (ImageDecompressionException e) {
// e.printStackTrace();
// }
// assert (decompressedData != null);
// responseWithShortArray(response, decompressedData);
// return;
// }
response.setContentLength(compressedContentLength); response.setContentLength(compressedContentLength);
try (final OutputStream responseStream = response.getOutputStream()) { try (final OutputStream responseStream = response.getOutputStream()) {
responseStream.write(cellCompressionStream.getBuffer(), 0, cellCompressionStream.getCurrentBufferLength()); responseStream.write(cellCompressionStream.getBuffer(), 0, cellCompressionStream.getCurrentBufferLength());
} }
final long currentlySent = addToAccumulation(compressedContentLength);
LOG.info(String.format("Sending %dB instead of %dB. Currently sent %dB",
compressedContentLength,
(data.length * 2),
currentlySent));
assert (cellCompressionStream.getCurrentBufferLength() == compressedContentLength) : assert (cellCompressionStream.getCurrentBufferLength() == compressedContentLength) :
"compressor.streamCompressChunk() is not equal to cachedCompressionStream.getCurrentBufferLength()"; "compressor.streamCompressChunk() is not equal to cachedCompressionStream.getCurrentBufferLength()";
...@@ -306,6 +289,18 @@ public class CellHandler extends ContextHandler { ...@@ -306,6 +289,18 @@ public class CellHandler extends ContextHandler {
response.setStatus(HttpServletResponse.SC_OK); response.setStatus(HttpServletResponse.SC_OK);
baseRequest.setHandled(true); baseRequest.setHandled(true);
returnBufferForReuse(cellCompressionStream); returnBufferForReuse(cellCompressionStream);
stopwatch.stop();
if (compressionParams.isVerbose()) {
final long currentlySent = addToAccumulation(compressedContentLength);
final long uncompressedWouldSent = addToUncompressedAccumulation(data.length * 2);
LOG.info(String.format("Sending %dB instead of %dB. Currently sent %dB instead of %dB. Handler finished in %s",
compressedContentLength,
(data.length * 2),
currentlySent,
uncompressedWouldSent,
stopwatch.getElapsedTimeString()));
}
} else if (parts[0].equals("init")) { } else if (parts[0].equals("init")) {
respondWithString(baseRequest, response, "application/json", metadataJson); respondWithString(baseRequest, response, "application/json", metadataJson);
} else if (parts[0].equals("init_qcmp")) { } else if (parts[0].equals("init_qcmp")) {
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment