diff --git a/src/main/java/bdv/server/BigDataServer.java b/src/main/java/bdv/server/BigDataServer.java index bc7adaa9c6c1ca482cc81013ed4bcc4a0e2456a1..12e3e875b0967d60fe43c8757f9dca3a8a1b8e04 100644 --- a/src/main/java/bdv/server/BigDataServer.java +++ b/src/main/java/bdv/server/BigDataServer.java @@ -75,7 +75,19 @@ public class BigDataServer { new HashMap<String, String>(), thumbnailDirectory, enableManagerContext, - new CompressionOptions()); + new ExtendedCompressionOptions()); + } + + public static class ExtendedCompressionOptions extends CompressionOptions { + private int compressFromMipmapLevel; + + public int getCompressFromMipmapLevel() { + return compressFromMipmapLevel; + } + + public void setCompressFromMipmapLevel(final int compressFromMipmapLevel) { + this.compressFromMipmapLevel = compressFromMipmapLevel; + } } /** @@ -93,14 +105,13 @@ public class BigDataServer { private final String thumbnailDirectory; - - private final CompressionOptions compressionParam; + private final ExtendedCompressionOptions compressionParam; private final boolean enableManagerContext; Parameters(final int port, final String hostname, final Map<String, String> datasetNameToXml, final String thumbnailDirectory, final boolean enableManagerContext, - final CompressionOptions customCompressionParameters) { + final ExtendedCompressionOptions customCompressionParameters) { this.port = port; this.hostname = hostname; this.datasetNameToXml = datasetNameToXml; @@ -134,7 +145,7 @@ public class BigDataServer { return enableManagerContext; } - public CompressionOptions getCompressionParams() { + public ExtendedCompressionOptions getCompressionParams() { return compressionParam; } } @@ -146,6 +157,7 @@ public class BigDataServer { final Options options = new Options(); final String cmdLineSyntax = "BigDataServer [OPTIONS] [NAME XML] ...\n"; + final String CompressFromKey = "compressFrom"; final String description = "Serves one or more XML/HDF5 datasets for remote access over HTTP.\n" + @@ -197,8 +209,9 @@ public class BigDataServer { options.addOption(new OptionWithOrder(CliConstants.createCBCMethod(), ++optionOrder)); options.addOption(new OptionWithOrder(CliConstants.createSQOption(), ++optionOrder)); options.addOption(new OptionWithOrder(CliConstants.createVQOption(), ++optionOrder)); - options.addOption(new OptionWithOrder(CliConstants.createBitsOption(), ++optionOrder)); options.addOption(new OptionWithOrder(CliConstants.createVerboseOption(false), ++optionOrder)); + options.addOption(new OptionWithOrder(new Option(CompressFromKey, true, "Level from which the compression is enabled."), + ++optionOrder)); if (Constants.ENABLE_EXPERIMENTAL_FEATURES) { @@ -226,7 +239,7 @@ public class BigDataServer { final boolean enableQcmpCompression = cmd.hasOption(ENABLE_COMPRESSION); - final CompressionOptions compressionOptions = new CompressionOptions(); + final ExtendedCompressionOptions compressionOptions = new ExtendedCompressionOptions(); if (enableQcmpCompression) { compressionOptions.setQuantizationType(QuantizationType.Invalid); if (cmd.hasOption(CliConstants.SCALAR_QUANTIZATION_LONG)) @@ -253,9 +266,13 @@ public class BigDataServer { compressionOptions.setWorkerCount(1); compressionOptions.setCodebookType(CompressionOptions.CodebookType.Global); compressionOptions.setCodebookCacheFolder(cmd.getOptionValue(CliConstants.CODEBOOK_CACHE_FOLDER_LONG)); - compressionOptions.setBitsPerCodebookIndex(Integer.parseInt(cmd.getOptionValue(CliConstants.BITS_LONG))); compressionOptions.setVerbose(cmd.hasOption(CliConstants.VERBOSE_LONG)); + + if (cmd.hasOption(CompressFromKey)) { + compressionOptions.setCompressFromMipmapLevel(Integer.parseInt(cmd.getOptionValue(CompressFromKey))); + } + final StringBuilder compressionReport = new StringBuilder(); compressionReport.append("\u001b[33m"); compressionReport.append("Quantization type: "); @@ -275,9 +292,9 @@ public class BigDataServer { } compressionReport.append(compressionOptions.getQuantizationVector().toString()); compressionReport.append('\n'); - compressionReport.append("Bits per codebook index: ").append(compressionOptions.getBitsPerCodebookIndex()).append('\n'); compressionReport.append("Codebook cache folder: ").append(compressionOptions.getCodebookCacheFolder()).append('\n'); compressionReport.append("Verbose mode: ").append(compressionOptions.isVerbose() ? "ON" : "OFF").append('\n'); + compressionReport.append("CompressFromMipmapLevel: ").append(compressionOptions.getCompressFromMipmapLevel()).append('\n'); compressionReport.append("\u001b[0m"); System.out.println(compressionReport.toString()); diff --git a/src/main/java/bdv/server/CellHandler.java b/src/main/java/bdv/server/CellHandler.java index efa97a34e7d68441d06ddca1b9191aa2838e467c..b96500544a6de1399dcde2f396b9c3be8b03d203 100644 --- a/src/main/java/bdv/server/CellHandler.java +++ b/src/main/java/bdv/server/CellHandler.java @@ -48,6 +48,7 @@ import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.Paths; import java.util.ArrayList; +import java.util.Comparator; import java.util.HashMap; import java.util.Stack; @@ -102,10 +103,11 @@ public class CellHandler extends ContextHandler { /** * Compression stuff. */ - private final CompressionOptions compressionParams; + private final BigDataServer.ExtendedCompressionOptions compressionParams; private ArrayList<ICacheFile> cachedCodebooks = null; private HashMap<Integer, ImageCompressor> compressors = null; + private ImageCompressor lowestResCompressor = null; private Stack<MemoryOutputStream> cachedBuffers = null; private final int INITIAL_BUFFER_SIZE = 2048; @@ -124,11 +126,12 @@ public class CellHandler extends ContextHandler { } public CellHandler(final String baseUrl, final String xmlFilename, final String datasetName, final String thumbnailsDirectory, - final CompressionOptions compressionParams) throws SpimDataException, IOException { + final BigDataServer.ExtendedCompressionOptions compressionParams) throws SpimDataException, IOException { final XmlIoSpimDataMinimal io = new XmlIoSpimDataMinimal(); final SpimDataMinimal spimData = io.load(xmlFilename); final SequenceDescriptionMinimal seq = spimData.getSequenceDescription(); + final Hdf5ImageLoader imgLoader = (Hdf5ImageLoader) seq.getImgLoader(); this.compressionParams = compressionParams; @@ -142,16 +145,26 @@ public class CellHandler extends ContextHandler { baseFilename = xmlFilename.endsWith(".xml") ? xmlFilename.substring(0, xmlFilename.length() - ".xml".length()) : xmlFilename; dataSetURL = baseUrl; + final int numberOfMipmapLevels = imgLoader.getSetupImgLoader(0).numMipmapLevels(); + datasetXmlString = buildRemoteDatasetXML(io, spimData, baseUrl); metadataJson = buildMetadataJsonString(imgLoader, seq); settingsXmlString = buildSettingsXML(baseFilename); thumbnailFilename = createThumbnail(spimData, baseFilename, datasetName, thumbnailsDirectory); - initializeCompression(); + initializeCompression(numberOfMipmapLevels); + } + + private ImageCompressor getCompressorForMipmapLevel(final int mipmapLevel) { + assert (compressors != null && !compressors.isEmpty()); + if (compressors.containsKey(mipmapLevel)) { + return compressors.get(mipmapLevel); + } + return lowestResCompressor; } - private void initializeCompression() { + private void initializeCompression(final int numberOfMipmapLevels) { if (compressionParams == null) return; this.compressionParams.setInputDataInfo(new FileInputData(this.baseFilename)); @@ -163,17 +176,28 @@ public class CellHandler extends ContextHandler { return; } LOG.info(String.format("Found %d codebooks for %s.", cachedCodebooks.size(), this.baseFilename)); - compressors = new HashMap<>(cachedCodebooks.size()); - for (final ICacheFile cacheFile : cachedCodebooks) { - LOG.info(String.format(" Loaded codebook of size %d. '%s'", cacheFile.getHeader().getCodebookSize(), cacheFile)); + final int numberOfCompressors = Math.min((numberOfMipmapLevels - compressionParams.getCompressFromMipmapLevel()), + cachedCodebooks.size()); + + cachedCodebooks.sort(Comparator.comparingInt(obj -> obj.getHeader().getBitsPerCodebookIndex())); + compressors = new HashMap<>(numberOfCompressors); + for (int compressorIndex = 0; compressorIndex < numberOfCompressors; compressorIndex++) { + final ICacheFile levelCacheFile = cachedCodebooks.get((cachedCodebooks.size() - 1) - compressorIndex); + final int bitsPerCodebookIndex = levelCacheFile.getHeader().getBitsPerCodebookIndex(); - final int bitsPerCodebookIndex = cacheFile.getHeader().getBitsPerCodebookIndex(); final CompressionOptions compressorOptions = compressionParams.createClone(); assert (compressorOptions != compressionParams); - compressorOptions.setBitsPerCodebookIndex(bitsPerCodebookIndex); - compressors.put(bitsPerCodebookIndex, new ImageCompressor(compressorOptions, cacheFile)); + + final ImageCompressor compressor = new ImageCompressor(compressorOptions, levelCacheFile); + final int actualKey = compressorIndex + compressionParams.getCompressFromMipmapLevel(); + compressors.put(actualKey, compressor); + LOG.info(String.format(" Loaded codebook of size %d for mipmap level %d. '%s'", + levelCacheFile.getHeader().getCodebookSize(), + actualKey, + levelCacheFile.klass())); + lowestResCompressor = compressor; } final int initialCompressionCacheSize = 10; @@ -197,11 +221,10 @@ public class CellHandler extends ContextHandler { cachedBuffers.push(buffer); } - private short[] getCachedVolatileCellData(final String[] parts, final int[] cellDims) { + private short[] getCachedVolatileCellData(final String[] parts, final int[] cellDims, final int level) { final int index = Integer.parseInt(parts[1]); final int timepoint = Integer.parseInt(parts[2]); final int setup = Integer.parseInt(parts[3]); - final int level = Integer.parseInt(parts[4]); final Key key = new VolatileGlobalCellCache.Key(timepoint, setup, level, index); VolatileCell<?> cell = cache.getLoadingVolatileCache().getIfPresent(key, cacheHints); @@ -260,13 +283,13 @@ public class CellHandler extends ContextHandler { } final String[] parts = cellString.split("/"); if (parts[0].equals("cell")) { - + final int level = Integer.parseInt(parts[4]); final int[] cellDims = new int[]{ Integer.parseInt(parts[5]), Integer.parseInt(parts[6]), Integer.parseInt(parts[7])}; - final short[] data = getCachedVolatileCellData(parts, cellDims); + final short[] data = getCachedVolatileCellData(parts, cellDims, level); responseWithShortArray(response, data); @@ -276,15 +299,17 @@ public class CellHandler extends ContextHandler { } else if (parts[0].equals("cell_qcmp")) { final Stopwatch stopwatch = Stopwatch.startNew(); + final int mipmapLevel = Integer.parseInt(parts[4]); final int[] cellDims = new int[]{Integer.parseInt(parts[5]), Integer.parseInt(parts[6]), Integer.parseInt(parts[7])}; - final short[] data = getCachedVolatileCellData(parts, cellDims); + final short[] data = getCachedVolatileCellData(parts, cellDims, mipmapLevel); assert (compressors != null && !compressors.isEmpty()); final FlatBufferInputData inputData = createInputDataObject(data, cellDims); final MemoryOutputStream cellCompressionStream = getCachedCompressionBuffer(); - // TODO(Moravec): Choose compressor based on `level`. - final int compressedContentLength = compressors.get(8).streamCompressChunk(cellCompressionStream, inputData); + + final int compressedContentLength = getCompressorForMipmapLevel(mipmapLevel).streamCompressChunk(cellCompressionStream, + inputData); response.setContentLength(compressedContentLength); try (final OutputStream responseStream = response.getOutputStream()) {