Skip to content
Snippets Groups Projects
Commit cf76dd71 authored by Vojtech Moravec's avatar Vojtech Moravec
Browse files

First working version of SQ with huffman coding.

parent 5b16f45b
No related branches found
No related tags found
No related merge requests found
Showing with 196 additions and 84 deletions
......@@ -12,8 +12,6 @@ import org.apache.commons.cli.*;
import java.io.IOException;
public class DataCompressor {
public static void main(String[] args) {
Options options = CliConstants.getOptions();
......
......@@ -3,6 +3,7 @@ package azgracompress.compression;
import azgracompress.cli.ParsedCliOptions;
public abstract class CompressorDecompressorBase {
public static final int LONG_BYTES = 8;
public static final String EXTENSION = ".QCMP";
protected final ParsedCliOptions options;
......
......@@ -109,8 +109,10 @@ public class ImageDecompressor extends CompressorDecompressorBase {
logBuilder.append("Vector size Z:\t\t").append(header.getVectorSizeZ()).append('\n');
final long fileSize = new File(options.getInputFile()).length();
final long dataSize = fileSize - QCMPFileHeader.QCMP_HEADER_SIZE;
final long dataSize = fileSize - header.getHeaderSize();
final IImageDecompressor decompressor = getImageDecompressor(header);
if (decompressor != null) {
final long expectedDataSize = decompressor.getExpectedDataSize(header);
validFile = (dataSize == expectedDataSize);
......@@ -149,7 +151,7 @@ public class ImageDecompressor extends CompressorDecompressorBase {
}
final long fileSize = new File(options.getInputFile()).length();
final long dataSize = fileSize - QCMPFileHeader.QCMP_HEADER_SIZE;
final long dataSize = fileSize - header.getHeaderSize();
final long expectedDataSize = imageDecompressor.getExpectedDataSize(header);
if (dataSize != expectedDataSize) {
System.err.println("Invalid file size.");
......
......@@ -90,7 +90,7 @@ public class SQImageCompressor extends CompressorDecompressorBase implements IIm
*/
public long[] compress(DataOutputStream compressStream) throws ImageCompressionException {
Stopwatch stopwatch = new Stopwatch();
long[] planeDataSizes = new long[options.getImageDimension().getZ()];
final boolean hasGeneralQuantizer = options.hasCodebookCacheFolder() || options.hasReferencePlaneIndex();
ScalarQuantizer quantizer = null;
......@@ -126,6 +126,8 @@ public class SQImageCompressor extends CompressorDecompressorBase implements IIm
}
final int[] planeIndices = getPlaneIndicesForCompression();
long[] planeDataSizes = new long[planeIndices.length];
int planeCounter = 0;
for (final int planeIndex : planeIndices) {
stopwatch.restart();
Log(String.format("Loading plane %d.", planeIndex));
......@@ -148,20 +150,38 @@ public class SQImageCompressor extends CompressorDecompressorBase implements IIm
huffman = new Huffman(huffmanSymbols, quantizer.getCodebook().getSymbolFrequencies());
huffman.buildHuffmanTree();
}
assert (quantizer != null) : "Scalar Quantizer wasn't initialized.";
assert (huffman != null) : "Huffman wasn't initialized.";
Log("Compressing plane...");
final int[] indices = quantizer.quantizeIntoIndices(plane.getData(), 1);
// ////////////////////////
// for (int i = 0; i < indices.length; i++) {
// final boolean[] huffmanCode = huffman.getCode(indices[i]);
// HuffmanNode currentHuffmanNode = huffman.getRoot();
// boolean bit;
// int index = 0;
// while (!currentHuffmanNode.isLeaf()) {
// bit = huffmanCode[index++];
// currentHuffmanNode = currentHuffmanNode.traverse(bit);
// }
// assert (indices[i] == currentHuffmanNode.getSymbol());
// }
// ////////////////////////////////
try (OutBitStream outBitStream = new OutBitStream(compressStream, options.getBitsPerPixel(), 2048)) {
for (final int index : indices) {
outBitStream.write(huffman.getCode(index));
}
planeDataSizes[planeCounter++] = outBitStream.getBytesWritten();
//outBitStream.write(indices);
} catch (Exception ex) {
throw new ImageCompressionException("Unable to write indices to OutBitStream.", ex);
}
// TODO: Fill plane data size
stopwatch.stop();
Log("Plane time: " + stopwatch.getElapsedTimeString());
......
......@@ -4,6 +4,7 @@ import azgracompress.cli.ParsedCliOptions;
import azgracompress.compression.exception.ImageDecompressionException;
import azgracompress.fileformat.QCMPFileHeader;
import azgracompress.huffman.Huffman;
import azgracompress.huffman.HuffmanNode;
import azgracompress.io.InBitStream;
import azgracompress.quantization.scalar.ScalarQuantizationCodebook;
import azgracompress.utilities.Stopwatch;
......@@ -39,17 +40,26 @@ public class SQImageDecompressor extends CompressorDecompressorBase implements I
// Quantization value count.
final int codebookSize = (int) Math.pow(2, header.getBitsPerPixel());
// Total codebook size in bytes.
long codebookDataSize = (2 * codebookSize) * (header.isCodebookPerPlane() ? header.getImageSizeZ() : 1);
// Total codebook size in bytes. Also symbol frequencies for Huffman.
long codebookDataSize = ((2 * codebookSize) + (LONG_BYTES * codebookSize)) *
(header.isCodebookPerPlane() ? header.getImageSizeZ() : 1);
// Data size of single plane indices.
final long planeIndicesDataSize =
(long) Math.ceil(((header.getImageSizeX() * header.getImageSizeY()) * header.getBitsPerPixel()) / 8.0);
// Indices are encoded using huffman. Plane data size is written in the header.
long[] planeDataSizes = header.getPlaneDataSizes();
long totalPlaneDataSize = 0;
for (final long planeDataSize : planeDataSizes) {
totalPlaneDataSize += planeDataSize;
}
// All planes data size.
final long allPlaneIndicesDataSize = planeIndicesDataSize * header.getImageSizeZ();
// // Data size of single plane indices.
// final long planeIndicesDataSize =
// (long) Math.ceil(((header.getImageSizeX() * header.getImageSizeY()) * header
// .getBitsPerPixel()) / 8.0);
//
// // All planes data size.
// final long allPlaneIndicesDataSize = planeIndicesDataSize * header.getImageSizeZ();
return (codebookDataSize + allPlaneIndicesDataSize);
return (codebookDataSize + totalPlaneDataSize);
}
@Override
......@@ -89,17 +99,24 @@ public class SQImageDecompressor extends CompressorDecompressorBase implements I
Log(String.format("Decompressing plane %d...", planeIndex));
byte[] decompressedPlaneData = null;
final int planeDataSize = (int) header.getPlaneDataSizes()[planeIndex];
try (InBitStream inBitStream = new InBitStream(compressedStream,
header.getBitsPerPixel(),
planeIndicesDataSize)) {
planeDataSize)) {
inBitStream.readToBuffer();
inBitStream.setAllowReadFromUnderlyingStream(false);
final int[] indices = inBitStream.readNValues(planePixelCount);
int[] decompressedValues = new int[planePixelCount];
for (int i = 0; i < planePixelCount; i++) {
decompressedValues[i] = quantizationValues[indices[i]];
for (int pixel = 0; pixel < planePixelCount; pixel++) {
HuffmanNode currentHuffmanNode = huffman.getRoot();
boolean bit;
while (!currentHuffmanNode.isLeaf()) {
bit = inBitStream.readBit();
currentHuffmanNode = currentHuffmanNode.traverse(bit);
}
decompressedValues[pixel] = quantizationValues[currentHuffmanNode.getSymbol()];
}
decompressedPlaneData =
TypeConverter.unsignedShortArrayToByteArray(decompressedValues, false);
......
......@@ -9,7 +9,7 @@ import java.io.DataOutputStream;
import java.io.IOException;
public class QCMPFileHeader {
public static final int QCMP_HEADER_SIZE = 23;
public static final int BASE_QCMP_HEADER_SIZE = 23;
public static final String QCMP_MAGIC_VALUE = "QCMPFILE";
private String magicValue = QCMP_MAGIC_VALUE;
......@@ -25,6 +25,8 @@ public class QCMPFileHeader {
private int vectorSizeY;
private int vectorSizeZ;
private long[] planeDataSizes;
/**
* Validate that all header values are in their valid range.
......@@ -79,7 +81,7 @@ public class QCMPFileHeader {
public boolean readHeader(DataInputStream inputStream) throws IOException {
if (inputStream.available() < QCMP_HEADER_SIZE) {
if (inputStream.available() < BASE_QCMP_HEADER_SIZE) {
return false;
}
......@@ -107,6 +109,12 @@ public class QCMPFileHeader {
vectorSizeY = inputStream.readUnsignedShort();
vectorSizeZ = inputStream.readUnsignedShort();
planeDataSizes = new long[imageSizeZ];
for (int i = 0; i < imageSizeZ; i++) {
final long readValue = inputStream.readInt();
planeDataSizes[i] = (readValue & 0x00000000FFFFFFFFL);
}
return true;
}
......@@ -201,4 +209,12 @@ public class QCMPFileHeader {
vectorSizeY = vectorDims.getY();
vectorSizeZ = 1;
}
public long[] getPlaneDataSizes() {
return planeDataSizes;
}
public long getHeaderSize() {
return BASE_QCMP_HEADER_SIZE + (imageSizeZ * 4);
}
}
\ No newline at end of file
package azgracompress.huffman;
import org.jetbrains.annotations.NotNull;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.PriorityQueue;
public class Huffman {
class Node implements Comparable<Node> {
private int symbol = -1;
private long symbolFrequency = -1;
private boolean bit;
private boolean leaf = false;
private double probability = 0.0;
final Node subNodeA;
final Node subNodeB;
public Node(final int symbol, final double probability, final long frequency) {
this.symbol = symbol;
this.probability = probability;
this.symbolFrequency = frequency;
subNodeA = null;
subNodeB = null;
this.leaf = true;
}
public Node(final double probability, Node parentA, Node parentB) {
this.probability = probability;
this.subNodeA = parentA;
this.subNodeB = parentB;
}
Node traverse(final boolean bit) {
if (subNodeA != null && subNodeA.bit == bit)
return subNodeA;
if (subNodeB != null && subNodeB.bit == bit)
return subNodeB;
assert (false) : "Corrupted huffman tree";
return null;
}
@Override
public int compareTo(@NotNull Huffman.Node otherNode) {
return Double.compare(probability, otherNode.probability);
}
}
Node root = null;
HuffmanNode root = null;
HashMap<Integer, boolean[]> symbolCodes;
final int[] symbols;
final long[] symbolFrequencies;
......@@ -62,20 +17,26 @@ public class Huffman {
}
public void buildHuffmanTree() {
PriorityQueue<Node> queue = buildPriorityQueue();
PriorityQueue<HuffmanNode> queue = buildPriorityQueue();
while (queue.size() != 1) {
final Node parentA = queue.poll();
final Node parentB = queue.poll();
assert (parentA.probability <= parentB.probability);
final HuffmanNode parentA = queue.poll();
final HuffmanNode parentB = queue.poll();
if (!(parentA.getProbability() <= parentB.getProbability())) {
System.err.println(String.format("Parent A prob: %.6f\nParent B prob: %.6f",
parentA.getProbability(),
parentB.getProbability()));
assert (parentA.getProbability() <= parentB.getProbability());
}
assert (parentA != null && parentB != null);
parentA.bit = true;
parentB.bit = false;
final double mergedProbabilities = parentA.probability + parentB.probability;
final Node mergedNode = new Node(mergedProbabilities, parentA, parentB);
parentA.setBit(1);
parentB.setBit(0);
final double mergedProbabilities = parentA.getProbability() + parentB.getProbability();
final HuffmanNode mergedNode = new HuffmanNode(mergedProbabilities, parentA, parentB);
queue.add(mergedNode);
}
root = queue.poll();
......@@ -88,10 +49,11 @@ public class Huffman {
traverseSymbolCodes(root, new ArrayList<Boolean>());
}
private void traverseSymbolCodes(Node currentNode, ArrayList<Boolean> currentCode) {
private void traverseSymbolCodes(HuffmanNode currentNode, ArrayList<Boolean> currentCode) {
boolean inLeaf = true;
if (!currentNode.leaf) {
currentCode.add(currentNode.bit);
final int bit = currentNode.getBit();
if (bit != -1) {
currentCode.add(bit == 1);
}
if (currentNode.subNodeA != null) {
......@@ -106,28 +68,29 @@ public class Huffman {
}
if (inLeaf) {
assert (currentNode.leaf);
assert (currentNode.isLeaf());
//currentNode.setIsLeaf(true);
boolean[] finalSymbolCode = new boolean[currentCode.size()];
for (int i = 0; i < finalSymbolCode.length; i++) {
finalSymbolCode[i] = currentCode.get(i);
}
symbolCodes.put(currentNode.symbol, finalSymbolCode);
symbolCodes.put(currentNode.getSymbol(), finalSymbolCode);
}
}
private PriorityQueue<Node> buildPriorityQueue() {
private PriorityQueue<HuffmanNode> buildPriorityQueue() {
double totalFrequency = 0.0;
for (final long symbolFrequency : symbolFrequencies) {
totalFrequency += symbolFrequency;
}
PriorityQueue<Node> queue = new PriorityQueue<>(symbols.length);
PriorityQueue<HuffmanNode> queue = new PriorityQueue<>(symbols.length);
for (int sIndex = 0; sIndex < symbols.length; sIndex++) {
final double symbolProbability = (double) symbolFrequencies[sIndex] / totalFrequency;
queue.add(new Node(symbols[sIndex], symbolProbability, symbolFrequencies[sIndex]));
queue.add(new HuffmanNode(symbols[sIndex], symbolProbability, symbolFrequencies[sIndex]));
}
return queue;
......@@ -138,7 +101,7 @@ public class Huffman {
return symbolCodes.get(symbol);
}
public Node getRoot() {
public HuffmanNode getRoot() {
return root;
}
}
package azgracompress.huffman;
import org.jetbrains.annotations.NotNull;
public class HuffmanNode implements Comparable<HuffmanNode> {
private int symbol = -1;
private long symbolFrequency = -1;
private int bit = -1;
private boolean leaf = false;
private double probability = 0.0;
final HuffmanNode subNodeA;
final HuffmanNode subNodeB;
public HuffmanNode(final int symbol, final double probability, final long frequency) {
this.symbol = symbol;
this.probability = probability;
this.symbolFrequency = frequency;
subNodeA = null;
subNodeB = null;
this.leaf = true;
}
public HuffmanNode(final double probability, HuffmanNode parentA, HuffmanNode parentB) {
this.probability = probability;
this.subNodeA = parentA;
this.subNodeB = parentB;
}
public HuffmanNode traverse(final boolean queryBit) {
if (subNodeA != null && subNodeA.bit == (queryBit ? 1 : 0))
return subNodeA;
if (subNodeB != null && subNodeB.bit == (queryBit ? 1 : 0))
return subNodeB;
assert (false) : "Corrupted huffman tree";
return null;
}
@Override
public int compareTo(@NotNull HuffmanNode otherNode) {
return Double.compare(probability, otherNode.probability);
}
public void setBit(int bit) {
this.bit = bit;
}
public int getSymbol() {
return symbol;
}
public long getSymbolFrequency() {
return symbolFrequency;
}
public int getBit() {
return bit;
}
public boolean isLeaf() {
return leaf;
}
public double getProbability() {
return probability;
}
public HuffmanNode getSubNodeA() {
return subNodeA;
}
public HuffmanNode getSubNodeB() {
return subNodeB;
}
}
\ No newline at end of file
......@@ -52,7 +52,12 @@ public class InBitStream implements AutoCloseable {
}
}
private int readBit() throws IOException {
public boolean readBit() throws IOException {
return (readBitFromBuffer() == 1);
}
private int readBitFromBuffer() throws IOException {
if (bitBufferSize == 0) {
readByteToBitBuffer();
}
......@@ -67,7 +72,7 @@ public class InBitStream implements AutoCloseable {
//writing => bit = (value & (1 << shift));
for (int shift = 0; shift < bitsPerValue; shift++) {
bit = readBit();
bit = readBitFromBuffer();
result |= (bit << shift);
}
return result;
......
......@@ -14,6 +14,8 @@ public class OutBitStream implements AutoCloseable {
private final int bitsPerValue;
private long bytesWritten = 0;
public OutBitStream(OutputStream outputStream, final int bitsPerValue, final int bufferSize) {
outStream = outputStream;
......@@ -31,6 +33,7 @@ public class OutBitStream implements AutoCloseable {
*/
private void flushBuffer() throws IOException {
outStream.write(buffer, 0, bufferPosition);
bytesWritten += bufferPosition;
bufferPosition = 0;
}
......@@ -108,4 +111,14 @@ public class OutBitStream implements AutoCloseable {
public void close() throws Exception {
flush();
}
/**
* Get the number of bytes written to this stream so far.
*
* @return Bytes written.
*/
public long getBytesWritten() {
// Bytes written to the underlying stream + bytes count in this stream buffer.
return bytesWritten + bufferPosition + ((bitBufferSize > 0) ? 1 : 0);
}
}
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment