diff --git a/src/main/java/azgracompress/compression/ImageDecompressor.java b/src/main/java/azgracompress/compression/ImageDecompressor.java index 6a552ea1beb2daabb70453464f4c137c8f9bd38a..38f28ec0144c10b56750e8ead5b5e3ee19f52ab8 100644 --- a/src/main/java/azgracompress/compression/ImageDecompressor.java +++ b/src/main/java/azgracompress/compression/ImageDecompressor.java @@ -1,8 +1,6 @@ package azgracompress.compression; import azgracompress.compression.exception.ImageDecompressionException; -import azgracompress.compression.listeners.IProgressListener; -import azgracompress.compression.listeners.IStatusListener; import azgracompress.data.ImageU16Dataset; import azgracompress.fileformat.QCMPFileHeader; import azgracompress.utilities.Stopwatch; @@ -47,9 +45,9 @@ public class ImageDecompressor extends CompressorDecompressorBase { break; case Vector1D: case Vector2D: + case Vector3D: decompressor = new VQImageDecompressor(options); break; - case Vector3D: case Invalid: default: return null; @@ -70,7 +68,7 @@ public class ImageDecompressor extends CompressorDecompressorBase { StringBuilder logBuilder = new StringBuilder(); boolean validFile = true; - QCMPFileHeader header = null; + QCMPFileHeader header; try (FileInputStream fileInputStream = new FileInputStream(options.getInputDataInfo().getFilePath()); DataInputStream dataInputStream = new DataInputStream(fileInputStream)) { header = readQCMPFileHeader(dataInputStream); diff --git a/src/main/java/azgracompress/compression/VQImageCompressor.java b/src/main/java/azgracompress/compression/VQImageCompressor.java index 68d8b49e8a187c3a59d430365605f64b0170a513..a68bf572a5677d35460b1a0798339aa6a8f46318 100644 --- a/src/main/java/azgracompress/compression/VQImageCompressor.java +++ b/src/main/java/azgracompress/compression/VQImageCompressor.java @@ -1,16 +1,18 @@ package azgracompress.compression; import azgracompress.cache.QuantizationCacheManager; -import azgracompress.fileformat.QuantizationType; -import azgracompress.io.InputData; import azgracompress.compression.exception.ImageCompressionException; import azgracompress.data.Chunk2D; import azgracompress.data.ImageU16; +import azgracompress.data.Range; +import azgracompress.fileformat.QuantizationType; import azgracompress.huffman.Huffman; +import azgracompress.io.InputData; import azgracompress.io.loader.IPlaneLoader; import azgracompress.io.loader.PlaneLoaderFactory; import azgracompress.quantization.vector.*; import azgracompress.utilities.Stopwatch; +import org.jetbrains.annotations.NotNull; import java.io.DataOutputStream; import java.io.IOException; @@ -104,7 +106,11 @@ public class VQImageCompressor extends CompressorDecompressorBase implements IIm return compressVoxels(compressStream); } assert (options.getQuantizationVector().getZ() == 1); + return compress1D2DVectors(compressStream); + } + @NotNull + private long[] compress1D2DVectors(DataOutputStream compressStream) throws ImageCompressionException { final InputData inputDataInfo = options.getInputDataInfo(); Stopwatch stopwatch = new Stopwatch(); final boolean hasGeneralQuantizer = options.getCodebookType() != CompressionOptions.CodebookType.Individual; @@ -113,7 +119,7 @@ public class VQImageCompressor extends CompressorDecompressorBase implements IIm try { planeLoader = PlaneLoaderFactory.getPlaneLoaderForInputFile(inputDataInfo); } catch (Exception e) { - throw new ImageCompressionException("Unable to create SCIFIO reader. " + e.getMessage()); + throw new ImageCompressionException("Unable to create plane reader. " + e.getMessage()); } VectorQuantizer quantizer = null; Huffman huffman = null; @@ -285,19 +291,67 @@ public class VQImageCompressor extends CompressorDecompressorBase implements IIm reportStatusToListeners("Operation completed."); } + /** + * Calculate the number of voxel layers needed for dataset of plane count. + * + * @param datasetPlaneCount Dataset plane count + * @param voxelDepth Z dimension of voxel. + * @return Number of voxel layers. + */ + public static int calculateVoxelLayerCount(final int datasetPlaneCount, final int voxelDepth) { + return (datasetPlaneCount / voxelDepth); + } public long[] compressVoxels(DataOutputStream compressStream) throws ImageCompressionException { - // int[][] voxels; - // try { - // IPlaneLoader loader = PlaneLoaderFactory.getPlaneLoaderForInputFile(options.getInputDataInfo()); - // final int[] data = loader.loadAllPlanesU16Data(); - // Chunk3D bigVoxel = new Chunk3D(options.getInputDataInfo().getDimensions(), data); - // voxels = bigVoxel.divideInto3DVectors(options.getQuantizationVector()); - // } catch (Exception e) { - // throw new ImageCompressionException("Unable to create data loader or load image data.", e); - // } - return null; + assert (options.getCodebookType() == CompressionOptions.CodebookType.Global); + final IPlaneLoader planeLoader; + final int[] huffmanSymbols = createHuffmanSymbols(getCodebookSize()); + try { + planeLoader = PlaneLoaderFactory.getPlaneLoaderForInputFile(options.getInputDataInfo()); + } catch (Exception e) { + throw new ImageCompressionException("Unable to create plane reader. " + e.getMessage()); + } + + final int voxelLayerDepth = options.getQuantizationVector().getZ(); + final int voxelLayerCount = calculateVoxelLayerCount(options.getInputDataInfo().getDimensions().getZ(), voxelLayerDepth); + long[] voxelLayersSizes = new long[voxelLayerCount]; + + final VectorQuantizer quantizer = loadQuantizerFromCache(); + final Huffman huffman = createHuffmanCoder(huffmanSymbols, quantizer.getFrequencies()); + writeQuantizerToCompressStream(quantizer, compressStream); + + int[][] voxelData; + Stopwatch stopwatch = new Stopwatch(); + for (int voxelLayerIndex = 0; voxelLayerIndex < voxelLayerCount; voxelLayerIndex++) { + stopwatch.restart(); + final int fromZ = (voxelLayerIndex * voxelLayerDepth); + + // TODO(Moravec): There is a problem! + // If dataset.Z is not divisible by voxel.Z we end up creating a lot stupid voxels. + // Those stupid voxels have only one or two layers of actual data and the rest are zeros. + // This ends up increasing the file size because they have quite long Huffman codes. + final int toZ = (voxelLayerIndex == voxelLayerCount - 1) + ? options.getInputDataInfo().getDimensions().getZ() + : (voxelLayerDepth + (voxelLayerIndex * voxelLayerDepth)); + + final Range<Integer> voxelLayerRange = new Range<>(fromZ, toZ); + + try { + voxelData = planeLoader.loadVoxels(options.getQuantizationVector(), voxelLayerRange); + System.out.println("voxelData.length=" + voxelData.length); + } catch (IOException e) { + throw new ImageCompressionException("Unable to load voxels from voxel layer " + voxelLayerRange, e); + } + + final int[] indices = quantizer.quantizeIntoIndices(voxelData, options.getWorkerCount()); + voxelLayersSizes[voxelLayerIndex] = writeHuffmanEncodedIndices(compressStream, huffman, indices); + stopwatch.stop(); + reportProgressToListeners(voxelLayerIndex, voxelLayerCount, + "%d/%d Finished voxel layer %s compression pass in %s", + voxelLayerIndex, voxelLayerCount, voxelLayerRange.toString(), stopwatch.getElapsedTimeString()); + } + return voxelLayersSizes; } } diff --git a/src/main/java/azgracompress/compression/VQImageDecompressor.java b/src/main/java/azgracompress/compression/VQImageDecompressor.java index 3f5d5784e3f5584832c4f619d3354ccb6769713d..a1c1e50e6ef46eef6db86ccd6d92252ec9bc064d 100644 --- a/src/main/java/azgracompress/compression/VQImageDecompressor.java +++ b/src/main/java/azgracompress/compression/VQImageDecompressor.java @@ -76,14 +76,16 @@ public class VQImageDecompressor extends CompressorDecompressorBase implements I public long getExpectedDataSize(QCMPFileHeader header) { // Vector count in codebook final int codebookSize = (int) Math.pow(2, header.getBitsPerCodebookIndex()); + System.out.println("codebookSize=" + codebookSize); // Single vector size in bytes. - assert (header.getVectorSizeZ() == 1); final int vectorDataSize = 2 * header.getVectorSizeX() * header.getVectorSizeY() * header.getVectorSizeZ(); + System.out.println("vectorDataSize=" + vectorDataSize); // Total codebook size in bytes. final long codebookDataSize = ((codebookSize * vectorDataSize) + (codebookSize * LONG_BYTES)) * (header.isCodebookPerPlane() ? header.getImageSizeZ() : 1); + System.out.println("codebookDataSize=" + codebookDataSize); // Indices are encoded using huffman. Plane data size is written in the header. long[] planeDataSizes = header.getPlaneDataSizes(); @@ -91,6 +93,8 @@ public class VQImageDecompressor extends CompressorDecompressorBase implements I for (final long planeDataSize : planeDataSizes) { totalPlaneDataSize += planeDataSize; } + System.out.println("totalPlaneDataSize=" + totalPlaneDataSize); + System.out.println("TOTAL=" + (codebookDataSize + totalPlaneDataSize)); return (codebookDataSize + totalPlaneDataSize); } @@ -132,8 +136,8 @@ public class VQImageDecompressor extends CompressorDecompressorBase implements I final int planeDataSize = (int) header.getPlaneDataSizes()[planeIndex]; try (InBitStream inBitStream = new InBitStream(compressedStream, - header.getBitsPerCodebookIndex(), - planeDataSize)) { + header.getBitsPerCodebookIndex(), + planeDataSize)) { inBitStream.readToBuffer(); inBitStream.setAllowReadFromUnderlyingStream(false); @@ -146,16 +150,16 @@ public class VQImageDecompressor extends CompressorDecompressorBase implements I currentHuffmanNode = currentHuffmanNode.traverse(bit); } System.arraycopy(codebook.getVectors()[currentHuffmanNode.getSymbol()].getVector(), - 0, - decompressedVectors[vecIndex], - 0, - vectorSize); + 0, + decompressedVectors[vecIndex], + 0, + vectorSize); } final ImageU16 decompressedPlane = reconstructImageFromQuantizedVectors(decompressedVectors, - qVector, - header.getImageDims()); + qVector, + header.getImageDims()); decompressedPlaneData = TypeConverter.unsignedShortArrayToByteArray(decompressedPlane.getData(), false); } catch (Exception ex) { @@ -171,12 +175,14 @@ public class VQImageDecompressor extends CompressorDecompressorBase implements I stopwatch.stop(); reportProgressToListeners(planeIndex, planeCountForDecompression, - "Decompressed plane %d in %s", planeIndex, stopwatch.getElapsedTimeString()); + "Decompressed plane %d in %s", planeIndex, stopwatch.getElapsedTimeString()); } } @Override - public void decompressToBuffer(DataInputStream compressedStream, short[][] buffer, QCMPFileHeader header) throws ImageDecompressionException { + public void decompressToBuffer(DataInputStream compressedStream, + short[][] buffer, + QCMPFileHeader header) throws ImageDecompressionException { // TODO: Think how to remove the duplicate code. final int codebookSize = (int) Math.pow(2, header.getBitsPerCodebookIndex()); assert (header.getVectorSizeZ() == 1); @@ -205,8 +211,8 @@ public class VQImageDecompressor extends CompressorDecompressorBase implements I final int planeDataSize = (int) header.getPlaneDataSizes()[planeIndex]; try (InBitStream inBitStream = new InBitStream(compressedStream, - header.getBitsPerCodebookIndex(), - planeDataSize)) { + header.getBitsPerCodebookIndex(), + planeDataSize)) { inBitStream.readToBuffer(); inBitStream.setAllowReadFromUnderlyingStream(false); @@ -219,23 +225,23 @@ public class VQImageDecompressor extends CompressorDecompressorBase implements I currentHuffmanNode = currentHuffmanNode.traverse(bit); } System.arraycopy(codebook.getVectors()[currentHuffmanNode.getSymbol()].getVector(), - 0, - decompressedVectors[vecIndex], - 0, - vectorSize); + 0, + decompressedVectors[vecIndex], + 0, + vectorSize); } final ImageU16 decompressedPlane = reconstructImageFromQuantizedVectors(decompressedVectors, - qVector, - header.getImageDims()); + qVector, + header.getImageDims()); buffer[planeIndex] = TypeConverter.intArrayToShortArray(decompressedPlane.getData()); } catch (Exception ex) { throw new ImageDecompressionException("Unable to read indices from InBitStream.", ex); } reportProgressToListeners(planeIndex, planeCountForDecompression, - "Decompressed plane %d.", planeIndex); + "Decompressed plane %d.", planeIndex); } } }