Skip to content
Snippets Groups Projects
Commit 1c1b5d7e authored by Vojtech Moravec's avatar Vojtech Moravec
Browse files

Multiple changes to support stream compression/decompression.

parent cd1470f8
Branches
No related tags found
No related merge requests found
Showing with 282 additions and 28 deletions
package azgracompress.compression;
import azgracompress.cache.ICacheFile;
import azgracompress.compression.exception.ImageCompressionException;
import java.io.DataOutputStream;
......@@ -10,14 +11,33 @@ public interface IImageCompressor extends IListenable {
* Compress the image planes.
*
* @param compressStream Compressed data stream.
* @return Size of compressed chunks.
* @throws ImageCompressionException when compression fails.
*/
long[] compress(DataOutputStream compressStream) throws ImageCompressionException;
/**
* Compress image planes in stream mode. QCMP header is not written to the stream only compressed image data without any additional
* META information.
*
* @param compressStream Compressed data stream.
* @return Size of compressed chunks.
* @throws ImageCompressionException when compression fails
*/
long[] compressStreamMode(DataOutputStream compressStream) throws ImageCompressionException;
/**
* Train codebook from selected frames and save the learned codebook to cache file.
*
* @throws ImageCompressionException when training or saving fails.
*/
void trainAndSaveCodebook() throws ImageCompressionException;
/**
* Preload compressor codebook and Huffman tree for stream compressor from provided cache file.
*
* @param codebookCacheFile Codebook cache file.
*/
void preloadGlobalCodebook(final ICacheFile codebookCacheFile);
}
......@@ -40,6 +40,9 @@ public interface IImageDecompressor extends IListenable {
short[][] buffer,
final QCMPFileHeader header) throws ImageDecompressionException;
short[] decompressStreamMode(final DataInputStream compressedStream,
final QCMPFileHeader header) throws ImageDecompressionException;
/**
* Preload decompressor codebook and Huffman tree for stream decompressor from provided cache file.
*
......
package azgracompress.compression;
import azgracompress.cache.ICacheFile;
import azgracompress.compression.exception.ImageCompressionException;
import azgracompress.data.Range;
import azgracompress.fileformat.QCMPFileHeader;
import azgracompress.io.InputData;
import org.scijava.util.ArrayUtils;
import java.io.*;
import java.util.Arrays;
public class ImageCompressor extends CompressorDecompressorBase {
final int PLANE_DATA_SIZES_OFFSET = 23;
......@@ -16,6 +20,20 @@ public class ImageCompressor extends CompressorDecompressorBase {
imageCompressor = getImageCompressor();
}
public ImageCompressor(final CompressionOptions options, final ICacheFile codebookCacheFile) {
this(options);
imageCompressor.preloadGlobalCodebook(codebookCacheFile);
}
/**
* Set InputData object for compressor.
*
* @param inputData Current input data information.
*/
public void setInputData(final InputData inputData) {
options.setInputDataInfo(inputData);
}
/**
* Create compressor based on set options.
*
......@@ -67,9 +85,27 @@ public class ImageCompressor extends CompressorDecompressorBase {
return true;
}
public int streamCompress(final OutputStream outputStream) {
assert (false) : "Not implemented!";
return -1;
public int streamCompressChunk(final OutputStream outputStream) {
assert (imageCompressor != null);
try (DataOutputStream compressStream = new DataOutputStream(new BufferedOutputStream(outputStream, 8192))) {
final long[] chunkSizes = imageCompressor.compressStreamMode(compressStream);
for (final long chunkSize : chunkSizes) {
assert (chunkSize < Integer.MAX_VALUE);
compressStream.writeInt((int) chunkSize);
compressStream.writeInt((int) chunkSize);
}
return (int) Arrays.stream(chunkSizes).sum() + (3 * 2) + (chunkSizes.length * 4);
} catch (ImageCompressionException ice) {
System.err.println(ice.getMessage());
return -1;
} catch (Exception e) {
System.err.println(e.getMessage());
e.printStackTrace();
return -1;
}
}
public boolean compress() {
......
......@@ -6,6 +6,7 @@ import azgracompress.data.ImageU16Dataset;
import azgracompress.fileformat.QCMPFileHeader;
import azgracompress.fileformat.QuantizationType;
import azgracompress.utilities.Stopwatch;
import azgracompress.utilities.Utils;
import org.jetbrains.annotations.Nullable;
import java.io.*;
......@@ -16,6 +17,7 @@ import java.util.Optional;
public class ImageDecompressor extends CompressorDecompressorBase {
private IImageDecompressor cachedDecompressor = null;
private QCMPFileHeader cachedHeader = null;
public ImageDecompressor(final CompressionOptions passedOptions) {
super(passedOptions);
......@@ -25,7 +27,13 @@ public class ImageDecompressor extends CompressorDecompressorBase {
public ImageDecompressor(final ICacheFile codebookCacheFile) {
this(new CompressionOptions(codebookCacheFile));
cachedDecompressor = getImageDecompressor(options.getQuantizationType());
assert (cachedDecompressor != null);
cachedDecompressor.preloadGlobalCodebook(codebookCacheFile);
cachedHeader = new QCMPFileHeader();
cachedHeader.setQuantizationType(codebookCacheFile.getHeader().getQuantizationType());
cachedHeader.setBitsPerCodebookIndex((byte) ((int) Utils.log2(codebookCacheFile.getHeader().getCodebookSize())));
cachedHeader.setVectorDimension(codebookCacheFile.getHeader().getVectorDim());
}
/**
......@@ -280,8 +288,42 @@ public class ImageDecompressor extends CompressorDecompressorBase {
}
}
public short[] decompressStream(final InputStream compressedStream) {
return new short[0];
public short[] decompressStream(final InputStream compressedStream, final int contentLength) throws ImageDecompressionException {
try (DataInputStream dis = new DataInputStream(new BufferedInputStream(compressedStream))) {
assert (dis.markSupported());
cachedHeader.setImageSizeX(dis.readUnsignedShort());
cachedHeader.setImageSizeY(dis.readUnsignedShort());
cachedHeader.setImageSizeZ(dis.readUnsignedShort());
final int chunkCount = dis.readUnsignedShort();
final long[] chunkSizes = new long[chunkCount];
dis.mark(contentLength);
{
int toSkip = contentLength - (4 * 2);
while (toSkip > 0) {
int skipped = dis.skipBytes(toSkip);
assert (skipped > 0);
toSkip -= skipped;
}
assert (toSkip == 0);
for (int i = 0; i < chunkCount; i++) {
chunkSizes[i] = dis.readInt();
}
}
dis.reset();
cachedHeader.setPlaneDataSizes(chunkSizes);
return cachedDecompressor.decompressStreamMode(dis, cachedHeader);
} catch (IOException e) {
throw new ImageDecompressionException("Unable to decompress chunk of image from stream.", e);
}
}
@Nullable
......
package azgracompress.compression;
import azgracompress.U16;
import azgracompress.cache.ICacheFile;
import azgracompress.cache.QuantizationCacheManager;
import azgracompress.cache.SQCacheFile;
import azgracompress.compression.exception.ImageCompressionException;
import azgracompress.huffman.Huffman;
import azgracompress.io.InputData;
......@@ -17,6 +19,9 @@ import java.io.IOException;
public class SQImageCompressor extends CompressorDecompressorBase implements IImageCompressor {
private ScalarQuantizer cachedQuantizer;
private Huffman cachedHuffman;
public SQImageCompressor(CompressionOptions options) {
super(options);
}
......@@ -30,12 +35,19 @@ public class SQImageCompressor extends CompressorDecompressorBase implements IIm
private ScalarQuantizer trainScalarQuantizerFromData(final int[] planeData) {
LloydMaxU16ScalarQuantization lloydMax = new LloydMaxU16ScalarQuantization(planeData,
getCodebookSize(),
options.getWorkerCount());
getCodebookSize(),
options.getWorkerCount());
lloydMax.train();
return new ScalarQuantizer(U16.Min, U16.Max, lloydMax.getCodebook());
}
@Override
public void preloadGlobalCodebook(ICacheFile codebookCacheFile) {
final SQCodebook cachedCodebook = ((SQCacheFile) codebookCacheFile).getCodebook();
cachedQuantizer = new ScalarQuantizer(cachedCodebook);
cachedHuffman = createHuffmanCoder(createHuffmanSymbols(cachedCodebook.getCodebookSize()), cachedCodebook.getSymbolFrequencies());
}
/**
* Writes the scalar quantizer to the compressed stream.
*
......@@ -77,7 +89,7 @@ public class SQImageCompressor extends CompressorDecompressorBase implements IIm
}
final SQCodebook codebook = cacheManager.loadSQCodebook(options.getInputDataInfo().getCacheFileName(),
getCodebookSize());
getCodebookSize());
if (codebook == null) {
throw new ImageCompressionException("Failed to read quantization values from cache file.");
}
......@@ -164,7 +176,7 @@ public class SQImageCompressor extends CompressorDecompressorBase implements IIm
stopwatch.stop();
reportProgressToListeners(planeIndex, planeIndices.length,
"Compressed plane %d in %s.", planeIndex, stopwatch.getElapsedTimeString());
"Compressed plane %d in %s.", planeIndex, stopwatch.getElapsedTimeString());
}
return planeDataSizes;
}
......@@ -211,8 +223,8 @@ public class SQImageCompressor extends CompressorDecompressorBase implements IIm
int[] trainData = loadConfiguredPlanesData();
LloydMaxU16ScalarQuantization lloydMax = new LloydMaxU16ScalarQuantization(trainData,
getCodebookSize(),
options.getWorkerCount());
getCodebookSize(),
options.getWorkerCount());
reportStatusToListeners("Starting LloydMax training.");
lloydMax.setStatusListener(this::reportStatusToListeners);
......@@ -229,4 +241,9 @@ public class SQImageCompressor extends CompressorDecompressorBase implements IIm
}
reportStatusToListeners("Operation completed.");
}
@Override
public long[] compressStreamMode(DataOutputStream compressStream) throws ImageCompressionException {
throw new ImageCompressionException("Not implemented yet");
}
}
......@@ -189,7 +189,10 @@ public class SQImageDecompressor extends CompressorDecompressorBase implements I
ex);
}
}
}
@Override
public short[] decompressStreamMode(DataInputStream compressedStream, QCMPFileHeader header) throws ImageDecompressionException {
throw new ImageDecompressionException("Not implemented yet.");
}
}
package azgracompress.compression;
import azgracompress.cache.ICacheFile;
import azgracompress.cache.QuantizationCacheManager;
import azgracompress.cache.VQCacheFile;
import azgracompress.compression.exception.ImageCompressionException;
import azgracompress.data.Range;
import azgracompress.data.V3i;
import azgracompress.fileformat.QuantizationType;
import azgracompress.huffman.Huffman;
import azgracompress.io.InputData;
......@@ -18,10 +21,20 @@ import java.io.IOException;
public class VQImageCompressor extends CompressorDecompressorBase implements IImageCompressor {
private VectorQuantizer cachedQuantizer = null;
private Huffman cachedHuffman = null;
public VQImageCompressor(CompressionOptions options) {
super(options);
}
@Override
public void preloadGlobalCodebook(final ICacheFile codebookCacheFile) {
final VQCodebook cachedCodebook = ((VQCacheFile) codebookCacheFile).getCodebook();
cachedQuantizer = new VectorQuantizer(cachedCodebook);
cachedHuffman = createHuffmanCoder(createHuffmanSymbols(cachedCodebook.getCodebookSize()), cachedCodebook.getVectorFrequencies());
}
/**
* Train vector quantizer from plane vectors.
*
......@@ -73,7 +86,6 @@ public class VQImageCompressor extends CompressorDecompressorBase implements IIm
* @throws ImageCompressionException when fails to read cached codebook.
*/
private VectorQuantizer loadQuantizerFromCache() throws ImageCompressionException {
QuantizationCacheManager cacheManager = new QuantizationCacheManager(options.getCodebookCacheFolder());
if (!cacheManager.doesVQCacheExists(options.getInputDataInfo().getCacheFileName(),
......@@ -101,14 +113,24 @@ public class VQImageCompressor extends CompressorDecompressorBase implements IIm
@Override
public long[] compress(DataOutputStream compressStream) throws ImageCompressionException {
if (options.getQuantizationType() == QuantizationType.Vector3D) {
return compressVoxels(compressStream);
return compressVoxels(compressStream, false);
}
assert (options.getQuantizationVector().getZ() == 1);
return compress1D2DVectors(compressStream);
return compress1D2DVectors(compressStream, false);
}
@Override
public long[] compressStreamMode(DataOutputStream compressStream) throws ImageCompressionException {
if (options.getQuantizationType() == QuantizationType.Vector3D) {
return compressVoxels(compressStream, true);
}
assert (options.getQuantizationVector().getZ() == 1);
return compress1D2DVectors(compressStream, true);
}
@NotNull
private long[] compress1D2DVectors(DataOutputStream compressStream) throws ImageCompressionException {
private long[] compress1D2DVectors(final DataOutputStream compressStream, final boolean streamMode) throws ImageCompressionException {
final InputData inputDataInfo = options.getInputDataInfo();
Stopwatch stopwatch = new Stopwatch();
final boolean hasGeneralQuantizer = options.getCodebookType() != CompressionOptions.CodebookType.Individual;
......@@ -127,19 +149,35 @@ public class VQImageCompressor extends CompressorDecompressorBase implements IIm
quantizer = loadQuantizerFromCache();
huffman = createHuffmanCoder(huffmanSymbols, quantizer.getFrequencies());
reportStatusToListeners("Cached quantizer with huffman coder created.");
writeQuantizerToCompressStream(quantizer, compressStream);
if (!streamMode)
writeQuantizerToCompressStream(quantizer, compressStream);
} else if (options.getCodebookType() == CompressionOptions.CodebookType.MiddlePlane) {
stopwatch.restart();
reportStatusToListeners("Training vector quantizer from middle plane.");
final int[][] refPlaneVectors = planeLoader.loadVectorsFromPlaneRange(options, Utils.singlePlaneRange(getMiddlePlaneIndex()));
quantizer = trainVectorQuantizerFromPlaneVectors(refPlaneVectors);
huffman = createHuffmanCoder(huffmanSymbols, quantizer.getFrequencies());
writeQuantizerToCompressStream(quantizer, compressStream);
if (!streamMode)
writeQuantizerToCompressStream(quantizer, compressStream);
stopwatch.stop();
reportStatusToListeners("Middle plane codebook created in: " + stopwatch.getElapsedTimeString());
}
final int[] planeIndices = getPlaneIndicesForCompression();
if (streamMode) {
try {
final V3i imageDims = options.getInputDataInfo().getDimensions();
// Image dimensions
compressStream.writeShort(imageDims.getX());
compressStream.writeShort(imageDims.getY());
compressStream.writeShort(imageDims.getZ());
// Write voxel layer in stream mode.
compressStream.writeShort(planeIndices.length);
} catch (IOException e) {
throw new ImageCompressionException("Failed to write short value to compression stream.", e);
}
}
long[] planeDataSizes = new long[planeIndices.length];
int planeCounter = 0;
......@@ -153,7 +191,8 @@ public class VQImageCompressor extends CompressorDecompressorBase implements IIm
reportStatusToListeners(String.format("Training vector quantizer from plane %d.", planeIndex));
quantizer = trainVectorQuantizerFromPlaneVectors(planeVectors);
huffman = createHuffmanCoder(huffmanSymbols, quantizer.getFrequencies());
writeQuantizerToCompressStream(quantizer, compressStream);
if (!streamMode)
writeQuantizerToCompressStream(quantizer, compressStream);
}
assert (quantizer != null);
......@@ -234,7 +273,7 @@ public class VQImageCompressor extends CompressorDecompressorBase implements IIm
return (datasetPlaneCount / voxelDepth);
}
public long[] compressVoxels(DataOutputStream compressStream) throws ImageCompressionException {
public long[] compressVoxels(final DataOutputStream compressStream, final boolean streamMode) throws ImageCompressionException {
assert (options.getCodebookType() == CompressionOptions.CodebookType.Global);
final IPlaneLoader planeLoader;
final int[] huffmanSymbols = createHuffmanSymbols(getCodebookSize());
......@@ -247,11 +286,26 @@ public class VQImageCompressor extends CompressorDecompressorBase implements IIm
final int voxelLayerDepth = options.getQuantizationVector().getZ();
final int voxelLayerCount = calculateVoxelLayerCount(options.getInputDataInfo().getDimensions().getZ(), voxelLayerDepth);
if (streamMode) {
try {
final V3i imageDims = options.getInputDataInfo().getDimensions();
// Image dimensions
compressStream.writeShort(imageDims.getX());
compressStream.writeShort(imageDims.getY());
compressStream.writeShort(imageDims.getZ());
// Write voxel layer in stream mode.
compressStream.writeShort(voxelLayerCount);
} catch (IOException e) {
throw new ImageCompressionException("Failed to write short value to compression stream.", e);
}
}
long[] voxelLayersSizes = new long[voxelLayerCount];
final VectorQuantizer quantizer = loadQuantizerFromCache();
final Huffman huffman = createHuffmanCoder(huffmanSymbols, quantizer.getFrequencies());
writeQuantizerToCompressStream(quantizer, compressStream);
final VectorQuantizer quantizer = (cachedQuantizer != null) ? cachedQuantizer : loadQuantizerFromCache();
final Huffman huffman = (cachedHuffman != null) ? cachedHuffman : createHuffmanCoder(huffmanSymbols, quantizer.getFrequencies());
if (!streamMode)
writeQuantizerToCompressStream(quantizer, compressStream);
int[][] voxelData;
Stopwatch stopwatch = new Stopwatch();
......
......@@ -44,11 +44,6 @@ public class VQImageDecompressor extends CompressorDecompressorBase implements I
return (vectorXCount * vectorYCount);
}
private long calculatePlaneDataSize(final long planeVectorCount, final int bpp) {
// Data size of single plane indices.
return (long) Math.ceil((planeVectorCount * bpp) / 8.0);
}
private VQCodebook readCodebook(DataInputStream compressedStream,
final int codebookSize,
final int vectorSize) throws ImageDecompressionException {
......@@ -275,6 +270,64 @@ public class VQImageDecompressor extends CompressorDecompressorBase implements I
}
}
@SuppressWarnings("DuplicatedCode")
private void decompressVoxelsStreamModeImpl(DataInputStream compressedStream,
QCMPFileHeader header,
DecompressVoxelCallback callback) throws ImageDecompressionException {
assert (header.getQuantizationType() == QuantizationType.Vector3D);
assert (!header.isCodebookPerPlane()); // SHOULD ALWAYS BE GLOBAL.
final V3i voxelDims = new V3i(header.getVectorSizeX(), header.getVectorSizeY(), header.getVectorSizeZ());
final int vectorSize = (int) voxelDims.multiplyTogether();
final int voxelLayerDepth = voxelDims.getZ();
final int voxelLayerCount = VQImageCompressor.calculateVoxelLayerCount(header.getImageSizeZ(), header.getVectorSizeZ());
Stopwatch stopwatch = new Stopwatch();
for (int voxelLayerIndex = 0; voxelLayerIndex < voxelLayerCount; voxelLayerIndex++) {
stopwatch.restart();
final int fromZ = (voxelLayerIndex * voxelLayerDepth);
final int toZ = (voxelLayerIndex == voxelLayerCount - 1)
? header.getImageSizeZ()
: (voxelLayerDepth + (voxelLayerIndex * voxelLayerDepth));
final V3i currentVoxelLayerDims = new V3i(header.getImageSizeX(), header.getImageSizeY(), toZ - fromZ);
final int voxelLayerDataSize = (int) header.getPlaneDataSizes()[voxelLayerIndex];
final int voxelLayerVoxelCount = Voxel.calculateRequiredVoxelCount(currentVoxelLayerDims, voxelDims);
int[][] decompressedVoxels = new int[voxelLayerVoxelCount][vectorSize];
try (InBitStream inBitStream = new InBitStream(compressedStream, header.getBitsPerCodebookIndex(), voxelLayerDataSize)) {
inBitStream.readToBuffer();
inBitStream.setAllowReadFromUnderlyingStream(false);
for (int voxelIndex = 0; voxelIndex < voxelLayerVoxelCount; voxelIndex++) {
final int huffmanSymbol = decodeHuffmanSymbol(cachedHuffman, inBitStream);
System.arraycopy(cachedCodebook.getVectors()[huffmanSymbol], 0, decompressedVoxels[voxelIndex], 0, vectorSize);
}
} catch (Exception e) {
throw new ImageDecompressionException("VQImageDecompressor::decompressVoxels() - Unable to read indices from InBitStream.",
e);
}
final Voxel currentVoxel = new Voxel(currentVoxelLayerDims);
callback.process(currentVoxel, decompressedVoxels, (voxelLayerIndex * voxelLayerDepth));
stopwatch.stop();
if (options.isConsoleApplication()) {
reportStatusToListeners("Decompressed voxel layer %d/%d in %s",
voxelLayerIndex, voxelLayerCount, stopwatch.getElapsedTimeString());
} else {
reportProgressToListeners(voxelLayerIndex, voxelLayerCount,
"Decompressed voxel layer %d/%d in %s",
voxelLayerIndex, voxelLayerCount, stopwatch.getElapsedTimeString());
}
}
}
private void decompressVoxelsToBuffer(DataInputStream compressedStream,
short[][] buffer,
......@@ -312,4 +365,26 @@ public class VQImageDecompressor extends CompressorDecompressorBase implements I
}
return currentHuffmanNode.getSymbol();
}
@Override
public short[] decompressStreamMode(final DataInputStream compressedStream,
final QCMPFileHeader header) throws ImageDecompressionException {
// TODO(Moravec): Implement missing quantization type.
assert (header.getQuantizationType() == QuantizationType.Vector3D);
final short[] buffer = new short[(int) header.getImageDims().multiplyTogether()];
final V3i voxelDim = new V3i(header.getVectorSizeX(), header.getVectorSizeY(), header.getVectorSizeZ());
decompressVoxelsStreamModeImpl(compressedStream, header, (voxel, voxelData, planeOffset) -> {
final ImageU16Dataset currentVoxelLayer = voxel.reconstructFromVoxelsToDataset(voxelDim, voxelData);
int offset = planeOffset * (voxelDim.getX() * voxelDim.getY());
for (int layer = 0; layer < voxel.getDims().getZ(); layer++) {
final short[] voxelLayerData = currentVoxelLayer.getPlaneData(layer);
System.arraycopy(voxelLayerData, 0, buffer, offset, voxelLayerData.length);
offset += voxelLayerData.length;
}
});
return buffer;
}
}
......@@ -221,6 +221,10 @@ public class QCMPFileHeader {
return planeDataSizes;
}
public void setPlaneDataSizes(final long[] sizes) {
planeDataSizes = sizes;
}
public long getHeaderSize() {
final int chunkCount = (quantizationType != QuantizationType.Vector3D)
? imageSizeZ
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment