diff --git a/src/main/java/azgracompress/DataCompressor.java b/src/main/java/azgracompress/DataCompressor.java index 91921cd8d356b0769454752556184cc51944951c..c13921e600965df0ef4ed6d8b614a47be16a5c61 100644 --- a/src/main/java/azgracompress/DataCompressor.java +++ b/src/main/java/azgracompress/DataCompressor.java @@ -12,8 +12,6 @@ import org.apache.commons.cli.*; import java.io.IOException; public class DataCompressor { - - public static void main(String[] args) { Options options = CliConstants.getOptions(); diff --git a/src/main/java/azgracompress/compression/CompressorDecompressorBase.java b/src/main/java/azgracompress/compression/CompressorDecompressorBase.java index 15a17901951dcd8452141335adc03014deb67a28..f80ba046a1d3d4e808df6329c78c7be4dac271b5 100644 --- a/src/main/java/azgracompress/compression/CompressorDecompressorBase.java +++ b/src/main/java/azgracompress/compression/CompressorDecompressorBase.java @@ -3,6 +3,7 @@ package azgracompress.compression; import azgracompress.cli.ParsedCliOptions; public abstract class CompressorDecompressorBase { + public static final int LONG_BYTES = 8; public static final String EXTENSION = ".QCMP"; protected final ParsedCliOptions options; diff --git a/src/main/java/azgracompress/compression/ImageDecompressor.java b/src/main/java/azgracompress/compression/ImageDecompressor.java index 40ac32bc89ba20039ec779cde30df82b7db7b045..a12f6c83919c36145872e086146cbeaedd560f2e 100644 --- a/src/main/java/azgracompress/compression/ImageDecompressor.java +++ b/src/main/java/azgracompress/compression/ImageDecompressor.java @@ -109,8 +109,10 @@ public class ImageDecompressor extends CompressorDecompressorBase { logBuilder.append("Vector size Z:\t\t").append(header.getVectorSizeZ()).append('\n'); final long fileSize = new File(options.getInputFile()).length(); - final long dataSize = fileSize - QCMPFileHeader.QCMP_HEADER_SIZE; + final long dataSize = fileSize - header.getHeaderSize(); + final IImageDecompressor decompressor = getImageDecompressor(header); + if (decompressor != null) { final long expectedDataSize = decompressor.getExpectedDataSize(header); validFile = (dataSize == expectedDataSize); @@ -149,7 +151,7 @@ public class ImageDecompressor extends CompressorDecompressorBase { } final long fileSize = new File(options.getInputFile()).length(); - final long dataSize = fileSize - QCMPFileHeader.QCMP_HEADER_SIZE; + final long dataSize = fileSize - header.getHeaderSize(); final long expectedDataSize = imageDecompressor.getExpectedDataSize(header); if (dataSize != expectedDataSize) { System.err.println("Invalid file size."); diff --git a/src/main/java/azgracompress/compression/SQImageCompressor.java b/src/main/java/azgracompress/compression/SQImageCompressor.java index 7c4258ec4a1c322517cba0dd415d1c8860af4da0..82d8eb88cf307811dfca1270eefac7782901d441 100644 --- a/src/main/java/azgracompress/compression/SQImageCompressor.java +++ b/src/main/java/azgracompress/compression/SQImageCompressor.java @@ -90,7 +90,7 @@ public class SQImageCompressor extends CompressorDecompressorBase implements IIm */ public long[] compress(DataOutputStream compressStream) throws ImageCompressionException { Stopwatch stopwatch = new Stopwatch(); - long[] planeDataSizes = new long[options.getImageDimension().getZ()]; + final boolean hasGeneralQuantizer = options.hasCodebookCacheFolder() || options.hasReferencePlaneIndex(); ScalarQuantizer quantizer = null; @@ -126,6 +126,8 @@ public class SQImageCompressor extends CompressorDecompressorBase implements IIm } final int[] planeIndices = getPlaneIndicesForCompression(); + long[] planeDataSizes = new long[planeIndices.length]; + int planeCounter = 0; for (final int planeIndex : planeIndices) { stopwatch.restart(); Log(String.format("Loading plane %d.", planeIndex)); @@ -148,20 +150,38 @@ public class SQImageCompressor extends CompressorDecompressorBase implements IIm huffman = new Huffman(huffmanSymbols, quantizer.getCodebook().getSymbolFrequencies()); huffman.buildHuffmanTree(); } + assert (quantizer != null) : "Scalar Quantizer wasn't initialized."; assert (huffman != null) : "Huffman wasn't initialized."; Log("Compressing plane..."); final int[] indices = quantizer.quantizeIntoIndices(plane.getData(), 1); +// //////////////////////// +// for (int i = 0; i < indices.length; i++) { +// final boolean[] huffmanCode = huffman.getCode(indices[i]); +// HuffmanNode currentHuffmanNode = huffman.getRoot(); +// boolean bit; +// int index = 0; +// while (!currentHuffmanNode.isLeaf()) { +// bit = huffmanCode[index++]; +// currentHuffmanNode = currentHuffmanNode.traverse(bit); +// } +// assert (indices[i] == currentHuffmanNode.getSymbol()); +// } +// //////////////////////////////// + + try (OutBitStream outBitStream = new OutBitStream(compressStream, options.getBitsPerPixel(), 2048)) { for (final int index : indices) { outBitStream.write(huffman.getCode(index)); } + planeDataSizes[planeCounter++] = outBitStream.getBytesWritten(); //outBitStream.write(indices); } catch (Exception ex) { throw new ImageCompressionException("Unable to write indices to OutBitStream.", ex); } + // TODO: Fill plane data size stopwatch.stop(); Log("Plane time: " + stopwatch.getElapsedTimeString()); diff --git a/src/main/java/azgracompress/compression/SQImageDecompressor.java b/src/main/java/azgracompress/compression/SQImageDecompressor.java index ead979a372e7c91939eacbe385fda805ed4655a5..75eb8e2925960f3d1b65acd2671ad14eb1de147d 100644 --- a/src/main/java/azgracompress/compression/SQImageDecompressor.java +++ b/src/main/java/azgracompress/compression/SQImageDecompressor.java @@ -4,6 +4,7 @@ import azgracompress.cli.ParsedCliOptions; import azgracompress.compression.exception.ImageDecompressionException; import azgracompress.fileformat.QCMPFileHeader; import azgracompress.huffman.Huffman; +import azgracompress.huffman.HuffmanNode; import azgracompress.io.InBitStream; import azgracompress.quantization.scalar.ScalarQuantizationCodebook; import azgracompress.utilities.Stopwatch; @@ -39,17 +40,26 @@ public class SQImageDecompressor extends CompressorDecompressorBase implements I // Quantization value count. final int codebookSize = (int) Math.pow(2, header.getBitsPerPixel()); - // Total codebook size in bytes. - long codebookDataSize = (2 * codebookSize) * (header.isCodebookPerPlane() ? header.getImageSizeZ() : 1); + // Total codebook size in bytes. Also symbol frequencies for Huffman. + long codebookDataSize = ((2 * codebookSize) + (LONG_BYTES * codebookSize)) * + (header.isCodebookPerPlane() ? header.getImageSizeZ() : 1); - // Data size of single plane indices. - final long planeIndicesDataSize = - (long) Math.ceil(((header.getImageSizeX() * header.getImageSizeY()) * header.getBitsPerPixel()) / 8.0); + // Indices are encoded using huffman. Plane data size is written in the header. + long[] planeDataSizes = header.getPlaneDataSizes(); + long totalPlaneDataSize = 0; + for (final long planeDataSize : planeDataSizes) { + totalPlaneDataSize += planeDataSize; + } - // All planes data size. - final long allPlaneIndicesDataSize = planeIndicesDataSize * header.getImageSizeZ(); + // // Data size of single plane indices. + // final long planeIndicesDataSize = + // (long) Math.ceil(((header.getImageSizeX() * header.getImageSizeY()) * header + // .getBitsPerPixel()) / 8.0); + // + // // All planes data size. + // final long allPlaneIndicesDataSize = planeIndicesDataSize * header.getImageSizeZ(); - return (codebookDataSize + allPlaneIndicesDataSize); + return (codebookDataSize + totalPlaneDataSize); } @Override @@ -89,17 +99,24 @@ public class SQImageDecompressor extends CompressorDecompressorBase implements I Log(String.format("Decompressing plane %d...", planeIndex)); byte[] decompressedPlaneData = null; + final int planeDataSize = (int) header.getPlaneDataSizes()[planeIndex]; try (InBitStream inBitStream = new InBitStream(compressedStream, header.getBitsPerPixel(), - planeIndicesDataSize)) { + planeDataSize)) { inBitStream.readToBuffer(); inBitStream.setAllowReadFromUnderlyingStream(false); - final int[] indices = inBitStream.readNValues(planePixelCount); int[] decompressedValues = new int[planePixelCount]; - for (int i = 0; i < planePixelCount; i++) { - decompressedValues[i] = quantizationValues[indices[i]]; + for (int pixel = 0; pixel < planePixelCount; pixel++) { + HuffmanNode currentHuffmanNode = huffman.getRoot(); + boolean bit; + while (!currentHuffmanNode.isLeaf()) { + bit = inBitStream.readBit(); + currentHuffmanNode = currentHuffmanNode.traverse(bit); + } + decompressedValues[pixel] = quantizationValues[currentHuffmanNode.getSymbol()]; } + decompressedPlaneData = TypeConverter.unsignedShortArrayToByteArray(decompressedValues, false); diff --git a/src/main/java/azgracompress/fileformat/QCMPFileHeader.java b/src/main/java/azgracompress/fileformat/QCMPFileHeader.java index 0912f7e5dd85b4fc89e0f27dad29663c1d487bdd..3359e62faa2500d7608e4626882c739110c6663e 100644 --- a/src/main/java/azgracompress/fileformat/QCMPFileHeader.java +++ b/src/main/java/azgracompress/fileformat/QCMPFileHeader.java @@ -9,7 +9,7 @@ import java.io.DataOutputStream; import java.io.IOException; public class QCMPFileHeader { - public static final int QCMP_HEADER_SIZE = 23; + public static final int BASE_QCMP_HEADER_SIZE = 23; public static final String QCMP_MAGIC_VALUE = "QCMPFILE"; private String magicValue = QCMP_MAGIC_VALUE; @@ -25,6 +25,8 @@ public class QCMPFileHeader { private int vectorSizeY; private int vectorSizeZ; + private long[] planeDataSizes; + /** * Validate that all header values are in their valid range. @@ -79,7 +81,7 @@ public class QCMPFileHeader { public boolean readHeader(DataInputStream inputStream) throws IOException { - if (inputStream.available() < QCMP_HEADER_SIZE) { + if (inputStream.available() < BASE_QCMP_HEADER_SIZE) { return false; } @@ -107,6 +109,12 @@ public class QCMPFileHeader { vectorSizeY = inputStream.readUnsignedShort(); vectorSizeZ = inputStream.readUnsignedShort(); + planeDataSizes = new long[imageSizeZ]; + for (int i = 0; i < imageSizeZ; i++) { + final long readValue = inputStream.readInt(); + planeDataSizes[i] = (readValue & 0x00000000FFFFFFFFL); + } + return true; } @@ -201,4 +209,12 @@ public class QCMPFileHeader { vectorSizeY = vectorDims.getY(); vectorSizeZ = 1; } + + public long[] getPlaneDataSizes() { + return planeDataSizes; + } + + public long getHeaderSize() { + return BASE_QCMP_HEADER_SIZE + (imageSizeZ * 4); + } } \ No newline at end of file diff --git a/src/main/java/azgracompress/huffman/Huffman.java b/src/main/java/azgracompress/huffman/Huffman.java index 5586088afc3915be7cd426965ff94bbfe1e6072d..8a13ebfbef2cf84a04b0a671b52222d013b5dde9 100644 --- a/src/main/java/azgracompress/huffman/Huffman.java +++ b/src/main/java/azgracompress/huffman/Huffman.java @@ -1,56 +1,11 @@ package azgracompress.huffman; -import org.jetbrains.annotations.NotNull; - import java.util.ArrayList; import java.util.HashMap; import java.util.PriorityQueue; public class Huffman { - - class Node implements Comparable<Node> { - private int symbol = -1; - private long symbolFrequency = -1; - - private boolean bit; - private boolean leaf = false; - private double probability = 0.0; - - final Node subNodeA; - final Node subNodeB; - - public Node(final int symbol, final double probability, final long frequency) { - this.symbol = symbol; - this.probability = probability; - this.symbolFrequency = frequency; - subNodeA = null; - subNodeB = null; - this.leaf = true; - } - - public Node(final double probability, Node parentA, Node parentB) { - this.probability = probability; - this.subNodeA = parentA; - this.subNodeB = parentB; - } - - Node traverse(final boolean bit) { - if (subNodeA != null && subNodeA.bit == bit) - return subNodeA; - if (subNodeB != null && subNodeB.bit == bit) - return subNodeB; - - assert (false) : "Corrupted huffman tree"; - return null; - } - - @Override - public int compareTo(@NotNull Huffman.Node otherNode) { - return Double.compare(probability, otherNode.probability); - } - } - - Node root = null; + HuffmanNode root = null; HashMap<Integer, boolean[]> symbolCodes; final int[] symbols; final long[] symbolFrequencies; @@ -62,20 +17,26 @@ public class Huffman { } public void buildHuffmanTree() { - PriorityQueue<Node> queue = buildPriorityQueue(); + PriorityQueue<HuffmanNode> queue = buildPriorityQueue(); while (queue.size() != 1) { - final Node parentA = queue.poll(); - final Node parentB = queue.poll(); - assert (parentA.probability <= parentB.probability); + final HuffmanNode parentA = queue.poll(); + final HuffmanNode parentB = queue.poll(); + if (!(parentA.getProbability() <= parentB.getProbability())) { + System.err.println(String.format("Parent A prob: %.6f\nParent B prob: %.6f", + parentA.getProbability(), + parentB.getProbability())); + assert (parentA.getProbability() <= parentB.getProbability()); + } assert (parentA != null && parentB != null); - parentA.bit = true; - parentB.bit = false; - final double mergedProbabilities = parentA.probability + parentB.probability; - final Node mergedNode = new Node(mergedProbabilities, parentA, parentB); + parentA.setBit(1); + parentB.setBit(0); + + final double mergedProbabilities = parentA.getProbability() + parentB.getProbability(); + final HuffmanNode mergedNode = new HuffmanNode(mergedProbabilities, parentA, parentB); queue.add(mergedNode); } root = queue.poll(); @@ -88,10 +49,11 @@ public class Huffman { traverseSymbolCodes(root, new ArrayList<Boolean>()); } - private void traverseSymbolCodes(Node currentNode, ArrayList<Boolean> currentCode) { + private void traverseSymbolCodes(HuffmanNode currentNode, ArrayList<Boolean> currentCode) { boolean inLeaf = true; - if (!currentNode.leaf) { - currentCode.add(currentNode.bit); + final int bit = currentNode.getBit(); + if (bit != -1) { + currentCode.add(bit == 1); } if (currentNode.subNodeA != null) { @@ -106,28 +68,29 @@ public class Huffman { } if (inLeaf) { - assert (currentNode.leaf); + assert (currentNode.isLeaf()); + //currentNode.setIsLeaf(true); boolean[] finalSymbolCode = new boolean[currentCode.size()]; for (int i = 0; i < finalSymbolCode.length; i++) { finalSymbolCode[i] = currentCode.get(i); } - symbolCodes.put(currentNode.symbol, finalSymbolCode); + symbolCodes.put(currentNode.getSymbol(), finalSymbolCode); } } - private PriorityQueue<Node> buildPriorityQueue() { + private PriorityQueue<HuffmanNode> buildPriorityQueue() { double totalFrequency = 0.0; for (final long symbolFrequency : symbolFrequencies) { totalFrequency += symbolFrequency; } - PriorityQueue<Node> queue = new PriorityQueue<>(symbols.length); + PriorityQueue<HuffmanNode> queue = new PriorityQueue<>(symbols.length); for (int sIndex = 0; sIndex < symbols.length; sIndex++) { final double symbolProbability = (double) symbolFrequencies[sIndex] / totalFrequency; - queue.add(new Node(symbols[sIndex], symbolProbability, symbolFrequencies[sIndex])); + queue.add(new HuffmanNode(symbols[sIndex], symbolProbability, symbolFrequencies[sIndex])); } return queue; @@ -138,7 +101,7 @@ public class Huffman { return symbolCodes.get(symbol); } - public Node getRoot() { + public HuffmanNode getRoot() { return root; } } diff --git a/src/main/java/azgracompress/huffman/HuffmanNode.java b/src/main/java/azgracompress/huffman/HuffmanNode.java new file mode 100644 index 0000000000000000000000000000000000000000..4b0ea724055980ebf3770dfcd103bb841f6b8705 --- /dev/null +++ b/src/main/java/azgracompress/huffman/HuffmanNode.java @@ -0,0 +1,77 @@ +package azgracompress.huffman; + +import org.jetbrains.annotations.NotNull; + +public class HuffmanNode implements Comparable<HuffmanNode> { + private int symbol = -1; + private long symbolFrequency = -1; + + private int bit = -1; + private boolean leaf = false; + private double probability = 0.0; + + final HuffmanNode subNodeA; + final HuffmanNode subNodeB; + + public HuffmanNode(final int symbol, final double probability, final long frequency) { + this.symbol = symbol; + this.probability = probability; + this.symbolFrequency = frequency; + subNodeA = null; + subNodeB = null; + this.leaf = true; + } + + public HuffmanNode(final double probability, HuffmanNode parentA, HuffmanNode parentB) { + this.probability = probability; + this.subNodeA = parentA; + this.subNodeB = parentB; + } + + public HuffmanNode traverse(final boolean queryBit) { + if (subNodeA != null && subNodeA.bit == (queryBit ? 1 : 0)) + return subNodeA; + if (subNodeB != null && subNodeB.bit == (queryBit ? 1 : 0)) + return subNodeB; + + assert (false) : "Corrupted huffman tree"; + return null; + } + + @Override + public int compareTo(@NotNull HuffmanNode otherNode) { + return Double.compare(probability, otherNode.probability); + } + + public void setBit(int bit) { + this.bit = bit; + } + + public int getSymbol() { + return symbol; + } + + public long getSymbolFrequency() { + return symbolFrequency; + } + + public int getBit() { + return bit; + } + + public boolean isLeaf() { + return leaf; + } + + public double getProbability() { + return probability; + } + + public HuffmanNode getSubNodeA() { + return subNodeA; + } + + public HuffmanNode getSubNodeB() { + return subNodeB; + } +} \ No newline at end of file diff --git a/src/main/java/azgracompress/io/InBitStream.java b/src/main/java/azgracompress/io/InBitStream.java index c64c120652212e616db550ab8db86df394e3dad7..7945e4e5fc4595a141cfdd4ab868f517c96555dd 100644 --- a/src/main/java/azgracompress/io/InBitStream.java +++ b/src/main/java/azgracompress/io/InBitStream.java @@ -52,7 +52,12 @@ public class InBitStream implements AutoCloseable { } } - private int readBit() throws IOException { + + public boolean readBit() throws IOException { + return (readBitFromBuffer() == 1); + } + + private int readBitFromBuffer() throws IOException { if (bitBufferSize == 0) { readByteToBitBuffer(); } @@ -67,7 +72,7 @@ public class InBitStream implements AutoCloseable { //writing => bit = (value & (1 << shift)); for (int shift = 0; shift < bitsPerValue; shift++) { - bit = readBit(); + bit = readBitFromBuffer(); result |= (bit << shift); } return result; diff --git a/src/main/java/azgracompress/io/OutBitStream.java b/src/main/java/azgracompress/io/OutBitStream.java index 6a840e6d3f9d3d73667b43280153ec7427d5575d..2d3b88625902701f6b1d5ba793ebd6dc68a396ea 100644 --- a/src/main/java/azgracompress/io/OutBitStream.java +++ b/src/main/java/azgracompress/io/OutBitStream.java @@ -14,6 +14,8 @@ public class OutBitStream implements AutoCloseable { private final int bitsPerValue; + private long bytesWritten = 0; + public OutBitStream(OutputStream outputStream, final int bitsPerValue, final int bufferSize) { outStream = outputStream; @@ -31,6 +33,7 @@ public class OutBitStream implements AutoCloseable { */ private void flushBuffer() throws IOException { outStream.write(buffer, 0, bufferPosition); + bytesWritten += bufferPosition; bufferPosition = 0; } @@ -108,4 +111,14 @@ public class OutBitStream implements AutoCloseable { public void close() throws Exception { flush(); } + + /** + * Get the number of bytes written to this stream so far. + * + * @return Bytes written. + */ + public long getBytesWritten() { + // Bytes written to the underlying stream + bytes count in this stream buffer. + return bytesWritten + bufferPosition + ((bitBufferSize > 0) ? 1 : 0); + } }