diff --git a/src/main/java/bdv/server/BigDataServer.java b/src/main/java/bdv/server/BigDataServer.java index 11576c8eee1921594ea09dcb0483d88fd83e3d7e..bc7adaa9c6c1ca482cc81013ed4bcc4a0e2456a1 100644 --- a/src/main/java/bdv/server/BigDataServer.java +++ b/src/main/java/bdv/server/BigDataServer.java @@ -198,6 +198,7 @@ public class BigDataServer { options.addOption(new OptionWithOrder(CliConstants.createSQOption(), ++optionOrder)); options.addOption(new OptionWithOrder(CliConstants.createVQOption(), ++optionOrder)); options.addOption(new OptionWithOrder(CliConstants.createBitsOption(), ++optionOrder)); + options.addOption(new OptionWithOrder(CliConstants.createVerboseOption(false), ++optionOrder)); if (Constants.ENABLE_EXPERIMENTAL_FEATURES) { @@ -225,19 +226,19 @@ public class BigDataServer { final boolean enableQcmpCompression = cmd.hasOption(ENABLE_COMPRESSION); - CompressionOptions compressionOptions = new CompressionOptions(); + final CompressionOptions compressionOptions = new CompressionOptions(); if (enableQcmpCompression) { compressionOptions.setQuantizationType(QuantizationType.Invalid); if (cmd.hasOption(CliConstants.SCALAR_QUANTIZATION_LONG)) compressionOptions.setQuantizationType(QuantizationType.Scalar); else if (cmd.hasOption(CliConstants.VECTOR_QUANTIZATION_LONG)) { final String vqValue = cmd.getOptionValue(CliConstants.VECTOR_QUANTIZATION_LONG); - Optional<V2i> maybeV2 = ParseUtils.tryParseV2i(vqValue, 'x'); + final Optional<V2i> maybeV2 = ParseUtils.tryParseV2i(vqValue, 'x'); if (maybeV2.isPresent()) { compressionOptions.setQuantizationType(QuantizationType.Vector2D); compressionOptions.setQuantizationVector(new V3i(maybeV2.get().getX(), maybeV2.get().getY(), 1)); } else { - Optional<V3i> maybeV3 = ParseUtils.tryParseV3i(vqValue, 'x'); + final Optional<V3i> maybeV3 = ParseUtils.tryParseV3i(vqValue, 'x'); if (maybeV3.isPresent()) { compressionOptions.setQuantizationType(QuantizationType.Vector3D); compressionOptions.setQuantizationVector(maybeV3.get()); @@ -248,11 +249,14 @@ public class BigDataServer { throw new ParseException("Invalid quantization type."); } + // NOTE(Moravec): Test if using more workers make any sense. Since the server is already handling multiple requests. + compressionOptions.setWorkerCount(1); compressionOptions.setCodebookType(CompressionOptions.CodebookType.Global); compressionOptions.setCodebookCacheFolder(cmd.getOptionValue(CliConstants.CODEBOOK_CACHE_FOLDER_LONG)); compressionOptions.setBitsPerCodebookIndex(Integer.parseInt(cmd.getOptionValue(CliConstants.BITS_LONG))); + compressionOptions.setVerbose(cmd.hasOption(CliConstants.VERBOSE_LONG)); - StringBuilder compressionReport = new StringBuilder(); + final StringBuilder compressionReport = new StringBuilder(); compressionReport.append("\u001b[33m"); compressionReport.append("Quantization type: "); switch (compressionOptions.getQuantizationType()) { @@ -273,6 +277,7 @@ public class BigDataServer { compressionReport.append('\n'); compressionReport.append("Bits per codebook index: ").append(compressionOptions.getBitsPerCodebookIndex()).append('\n'); compressionReport.append("Codebook cache folder: ").append(compressionOptions.getCodebookCacheFolder()).append('\n'); + compressionReport.append("Verbose mode: ").append(compressionOptions.isVerbose() ? "ON" : "OFF").append('\n'); compressionReport.append("\u001b[0m"); System.out.println(compressionReport.toString()); @@ -343,8 +348,8 @@ public class BigDataServer { } else if (y instanceof OptionWithOrder) { return -1; } else { - Option opt1 = (Option) x; - Option opt2 = (Option) y; + final Option opt1 = (Option) x; + final Option opt2 = (Option) y; return opt1.getOpt().compareToIgnoreCase(opt2.getOpt()); } }); diff --git a/src/main/java/bdv/server/CellHandler.java b/src/main/java/bdv/server/CellHandler.java index 69f37a38580af716621b0c69e37a1f933350cef9..c41fcccaa6922bfad11d5d6865a7aedc8738a08b 100644 --- a/src/main/java/bdv/server/CellHandler.java +++ b/src/main/java/bdv/server/CellHandler.java @@ -9,6 +9,7 @@ import azgracompress.io.FileInputData; import azgracompress.io.FlatBufferInputData; import azgracompress.io.InputData; import azgracompress.io.MemoryOutputStream; +import azgracompress.utilities.Stopwatch; import bdv.BigDataViewer; import bdv.cache.CacheHints; import bdv.cache.LoadingStrategy; @@ -105,6 +106,7 @@ public class CellHandler extends ContextHandler { private ICacheFile cachedCodebook = null; private final int INITIAL_BUFFER_SIZE = 2048; private long accumulation = 0; + private long uncompressedAccumulation = 0; private synchronized long addToAccumulation(final int value) { @@ -112,6 +114,11 @@ public class CellHandler extends ContextHandler { return accumulation; } + private synchronized long addToUncompressedAccumulation(final int value) { + uncompressedAccumulation += value; + return uncompressedAccumulation; + } + public CellHandler(final String baseUrl, final String xmlFilename, final String datasetName, final String thumbnailsDirectory, final CompressionOptions compressionParams) throws SpimDataException, IOException { @@ -253,45 +260,21 @@ public class CellHandler extends ContextHandler { baseRequest.setHandled(true); } else if (parts[0].equals("cell_qcmp")) { + final Stopwatch stopwatch = Stopwatch.startNew(); final int[] cellDims = new int[]{Integer.parseInt(parts[5]), Integer.parseInt(parts[6]), Integer.parseInt(parts[7])}; final short[] data = getCachedVolatileCellData(parts, cellDims); assert (compressor != null); - final FlatBufferInputData inputData = createInputDataObject(data, cellDims); - final MemoryOutputStream cellCompressionStream = getCachedCompressionBuffer(); final int compressedContentLength = compressor.streamCompressChunk(cellCompressionStream, inputData); - // // DEBUG decompress in place. - // if (true) { - // final byte[] buffer = cellCompressionStream.getBuffer(); - // final int bufferLength = cellCompressionStream.getCurrentBufferLength(); - // ImageDecompressor decompressor = new ImageDecompressor(cachedCodebook); - // short[] decompressedData = null; - // try (InputStream is = new BufferedInputStream(new ByteArrayInputStream(buffer, 0, bufferLength))) { - // decompressedData = decompressor.decompressStream(is, bufferLength); - // } catch (ImageDecompressionException e) { - // e.printStackTrace(); - // } - // assert (decompressedData != null); - // responseWithShortArray(response, decompressedData); - // return; - // } - response.setContentLength(compressedContentLength); try (final OutputStream responseStream = response.getOutputStream()) { responseStream.write(cellCompressionStream.getBuffer(), 0, cellCompressionStream.getCurrentBufferLength()); } - final long currentlySent = addToAccumulation(compressedContentLength); - LOG.info(String.format("Sending %dB instead of %dB. Currently sent %dB", - compressedContentLength, - (data.length * 2), - currentlySent)); - - assert (cellCompressionStream.getCurrentBufferLength() == compressedContentLength) : "compressor.streamCompressChunk() is not equal to cachedCompressionStream.getCurrentBufferLength()"; @@ -306,6 +289,18 @@ public class CellHandler extends ContextHandler { response.setStatus(HttpServletResponse.SC_OK); baseRequest.setHandled(true); returnBufferForReuse(cellCompressionStream); + stopwatch.stop(); + if (compressionParams.isVerbose()) { + final long currentlySent = addToAccumulation(compressedContentLength); + final long uncompressedWouldSent = addToUncompressedAccumulation(data.length * 2); + LOG.info(String.format("Sending %dB instead of %dB. Currently sent %dB instead of %dB. Handler finished in %s", + compressedContentLength, + (data.length * 2), + currentlySent, + uncompressedWouldSent, + stopwatch.getElapsedTimeString())); + + } } else if (parts[0].equals("init")) { respondWithString(baseRequest, response, "application/json", metadataJson); } else if (parts[0].equals("init_qcmp")) {