Skip to content
Snippets Groups Projects
Commit c1d45d0d authored by Vojtech Moravec's avatar Vojtech Moravec
Browse files

Huffman encoding test.

parent 980b7cac
No related branches found
No related tags found
No related merge requests found
...@@ -2,8 +2,7 @@ package azgracompress.benchmark; ...@@ -2,8 +2,7 @@ package azgracompress.benchmark;
import azgracompress.U16; import azgracompress.U16;
import azgracompress.cli.ParsedCliOptions; import azgracompress.cli.ParsedCliOptions;
import azgracompress.io.IPlaneLoader; import azgracompress.data.V3i;
import azgracompress.io.PlaneLoaderFactory;
import azgracompress.quantization.QTrainIteration; import azgracompress.quantization.QTrainIteration;
import azgracompress.quantization.QuantizationValueCache; import azgracompress.quantization.QuantizationValueCache;
import azgracompress.quantization.scalar.LloydMaxU16ScalarQuantization; import azgracompress.quantization.scalar.LloydMaxU16ScalarQuantization;
......
...@@ -13,6 +13,14 @@ public abstract class CompressorDecompressorBase { ...@@ -13,6 +13,14 @@ public abstract class CompressorDecompressorBase {
this.codebookSize = (int) Math.pow(2, this.options.getBitsPerPixel()); this.codebookSize = (int) Math.pow(2, this.options.getBitsPerPixel());
} }
protected int[] createHuffmanSymbols() {
int[] symbols = new int[codebookSize];
for (int i = 0; i < codebookSize; i++) {
symbols[i] = i;
}
return symbols;
}
protected int[] getPlaneIndicesForCompression() { protected int[] getPlaneIndicesForCompression() {
if (options.hasPlaneIndexSet()) { if (options.hasPlaneIndexSet()) {
return new int[]{options.getPlaneIndex()}; return new int[]{options.getPlaneIndex()};
......
...@@ -4,10 +4,12 @@ import azgracompress.U16; ...@@ -4,10 +4,12 @@ import azgracompress.U16;
import azgracompress.cli.ParsedCliOptions; import azgracompress.cli.ParsedCliOptions;
import azgracompress.compression.exception.ImageCompressionException; import azgracompress.compression.exception.ImageCompressionException;
import azgracompress.data.ImageU16; import azgracompress.data.ImageU16;
import azgracompress.huffman.Huffman;
import azgracompress.io.OutBitStream; import azgracompress.io.OutBitStream;
import azgracompress.io.RawDataIO; import azgracompress.io.RawDataIO;
import azgracompress.quantization.QuantizationValueCache; import azgracompress.quantization.QuantizationValueCache;
import azgracompress.quantization.scalar.LloydMaxU16ScalarQuantization; import azgracompress.quantization.scalar.LloydMaxU16ScalarQuantization;
import azgracompress.quantization.scalar.ScalarQuantizationCodebook;
import azgracompress.quantization.scalar.ScalarQuantizer; import azgracompress.quantization.scalar.ScalarQuantizer;
import azgracompress.utilities.Stopwatch; import azgracompress.utilities.Stopwatch;
...@@ -43,11 +45,16 @@ public class SQImageCompressor extends CompressorDecompressorBase implements IIm ...@@ -43,11 +45,16 @@ public class SQImageCompressor extends CompressorDecompressorBase implements IIm
*/ */
private void writeCodebookToOutputStream(final ScalarQuantizer quantizer, private void writeCodebookToOutputStream(final ScalarQuantizer quantizer,
DataOutputStream compressStream) throws ImageCompressionException { DataOutputStream compressStream) throws ImageCompressionException {
final int[] centroids = quantizer.getCentroids(); final ScalarQuantizationCodebook codebook = quantizer.getCodebook();
final int[] centroids = codebook.getCentroids();
final long[] frequencies = codebook.getSymbolFrequencies();
try { try {
for (final int quantizationValue : centroids) { for (final int quantizationValue : centroids) {
compressStream.writeShort(quantizationValue); compressStream.writeShort(quantizationValue);
} }
for (final long symbolFrequency : frequencies) {
compressStream.writeLong(symbolFrequency);
}
} catch (IOException ioEx) { } catch (IOException ioEx) {
throw new ImageCompressionException("Unable to write codebook to compress stream.", ioEx); throw new ImageCompressionException("Unable to write codebook to compress stream.", ioEx);
} }
...@@ -65,7 +72,8 @@ public class SQImageCompressor extends CompressorDecompressorBase implements IIm ...@@ -65,7 +72,8 @@ public class SQImageCompressor extends CompressorDecompressorBase implements IIm
private ScalarQuantizer loadQuantizerFromCache() throws ImageCompressionException { private ScalarQuantizer loadQuantizerFromCache() throws ImageCompressionException {
QuantizationValueCache cache = new QuantizationValueCache(options.getCodebookCacheFolder()); QuantizationValueCache cache = new QuantizationValueCache(options.getCodebookCacheFolder());
try { try {
final int[] quantizationValues = cache.readCachedValues(options.getInputFileInfo().getFilePath(),
final int[] quantizationValues = cache.readCachedValues(options.getInputFile(),
codebookSize); codebookSize);
// TODO(Moravec): FIXME the null value. // TODO(Moravec): FIXME the null value.
return new ScalarQuantizer(U16.Min, U16.Max, null); return new ScalarQuantizer(U16.Min, U16.Max, null);
...@@ -85,7 +93,10 @@ public class SQImageCompressor extends CompressorDecompressorBase implements IIm ...@@ -85,7 +93,10 @@ public class SQImageCompressor extends CompressorDecompressorBase implements IIm
final boolean hasGeneralQuantizer = options.hasCodebookCacheFolder() || options.hasReferencePlaneIndex(); final boolean hasGeneralQuantizer = options.hasCodebookCacheFolder() || options.hasReferencePlaneIndex();
ScalarQuantizer quantizer = null; ScalarQuantizer quantizer = null;
Huffman huffman = null;
final int[] huffmanSymbols = createHuffmanSymbols();
if (options.hasCodebookCacheFolder()) { if (options.hasCodebookCacheFolder()) {
// TODO(Moravec): Create huffman.
Log("Loading codebook from cache file."); Log("Loading codebook from cache file.");
quantizer = loadQuantizerFromCache(); quantizer = loadQuantizerFromCache();
Log("Cached quantizer created."); Log("Cached quantizer created.");
...@@ -98,6 +109,7 @@ public class SQImageCompressor extends CompressorDecompressorBase implements IIm ...@@ -98,6 +109,7 @@ public class SQImageCompressor extends CompressorDecompressorBase implements IIm
referencePlane = RawDataIO.loadImageU16(options.getInputFile(), referencePlane = RawDataIO.loadImageU16(options.getInputFile(),
options.getImageDimension(), options.getImageDimension(),
options.getReferencePlaneIndex()); options.getReferencePlaneIndex());
// TODO(Moravec): Create huffman.
} catch (Exception ex) { } catch (Exception ex) {
throw new ImageCompressionException("Unable to load reference plane data.", ex); throw new ImageCompressionException("Unable to load reference plane data.", ex);
} }
...@@ -131,15 +143,21 @@ public class SQImageCompressor extends CompressorDecompressorBase implements IIm ...@@ -131,15 +143,21 @@ public class SQImageCompressor extends CompressorDecompressorBase implements IIm
Log(String.format("Training scalar quantizer from plane %d.", planeIndex)); Log(String.format("Training scalar quantizer from plane %d.", planeIndex));
quantizer = trainScalarQuantizerFromData(plane.getData()); quantizer = trainScalarQuantizerFromData(plane.getData());
writeCodebookToOutputStream(quantizer, compressStream); writeCodebookToOutputStream(quantizer, compressStream);
}
assert (quantizer != null); huffman = new Huffman(huffmanSymbols, quantizer.getCodebook().getSymbolFrequencies());
huffman.buildHuffmanTree();
}
assert (quantizer != null) : "Scalar Quantizer wasn't initialized.";
assert (huffman != null) : "Huffman wasn't initialized.";
Log("Compressing plane..."); Log("Compressing plane...");
final int[] indices = quantizer.quantizeIntoIndices(plane.getData(), 1); final int[] indices = quantizer.quantizeIntoIndices(plane.getData(), 1);
try (OutBitStream outBitStream = new OutBitStream(compressStream, options.getBitsPerPixel(), 2048)) { try (OutBitStream outBitStream = new OutBitStream(compressStream, options.getBitsPerPixel(), 2048)) {
outBitStream.write(indices); for (final int index : indices) {
outBitStream.write(huffman.getCode(index));
}
//outBitStream.write(indices);
} catch (Exception ex) { } catch (Exception ex) {
throw new ImageCompressionException("Unable to write indices to OutBitStream.", ex); throw new ImageCompressionException("Unable to write indices to OutBitStream.", ex);
} }
......
package azgracompress.huffman;
import org.jetbrains.annotations.NotNull;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.PriorityQueue;
public class Huffman {
class Node implements Comparable<Node> {
private int symbol = -1;
private long symbolFrequency = -1;
private boolean bit;
private boolean leaf = false;
private double probability = 0.0;
final Node subNodeA;
final Node subNodeB;
public Node(final int symbol, final double probability, final long frequency) {
this.symbol = symbol;
this.probability = probability;
this.symbolFrequency = frequency;
subNodeA = null;
subNodeB = null;
this.leaf = true;
}
public Node(final double probability, Node parentA, Node parentB) {
this.probability = probability;
this.subNodeA = parentA;
this.subNodeB = parentB;
}
Node traverse(final boolean bit) {
if (subNodeA != null && subNodeA.bit == bit)
return subNodeA;
if (subNodeB != null && subNodeB.bit == bit)
return subNodeB;
assert (false) : "Corrupted huffman tree";
return null;
}
@Override
public int compareTo(@NotNull Huffman.Node otherNode) {
return Double.compare(probability, otherNode.probability);
}
}
Node root = null;
HashMap<Integer, boolean[]> symbolCodes;
final int[] symbols;
final long[] symbolFrequencies;
public Huffman(int[] symbols, long[] symbolFrequencies) {
assert (symbols.length == symbolFrequencies.length) : "Array lengths mismatch";
this.symbols = symbols;
this.symbolFrequencies = symbolFrequencies;
}
public void buildHuffmanTree() {
PriorityQueue<Node> queue = buildPriorityQueue();
while (queue.size() != 1) {
final Node parentA = queue.poll();
final Node parentB = queue.poll();
assert (parentA.probability <= parentB.probability);
assert (parentA != null && parentB != null);
parentA.bit = true;
parentB.bit = false;
final double mergedProbabilities = parentA.probability + parentB.probability;
final Node mergedNode = new Node(mergedProbabilities, parentA, parentB);
queue.add(mergedNode);
}
root = queue.poll();
buildHuffmanCodes();
}
private void buildHuffmanCodes() {
symbolCodes = new HashMap<>(symbols.length);
traverseSymbolCodes(root, new ArrayList<Boolean>());
}
private void traverseSymbolCodes(Node currentNode, ArrayList<Boolean> currentCode) {
boolean inLeaf = true;
if (!currentNode.leaf) {
currentCode.add(currentNode.bit);
}
if (currentNode.subNodeA != null) {
ArrayList<Boolean> codeCopy = new ArrayList<Boolean>(currentCode);
traverseSymbolCodes(currentNode.subNodeA, codeCopy);
inLeaf = false;
}
if (currentNode.subNodeB != null) {
ArrayList<Boolean> codeCopy = new ArrayList<Boolean>(currentCode);
traverseSymbolCodes(currentNode.subNodeB, codeCopy);
inLeaf = false;
}
if (inLeaf) {
assert (currentNode.leaf);
boolean[] finalSymbolCode = new boolean[currentCode.size()];
for (int i = 0; i < finalSymbolCode.length; i++) {
finalSymbolCode[i] = currentCode.get(i);
}
symbolCodes.put(currentNode.symbol, finalSymbolCode);
}
}
private PriorityQueue<Node> buildPriorityQueue() {
double totalFrequency = 0.0;
for (final long symbolFrequency : symbolFrequencies) {
totalFrequency += symbolFrequency;
}
PriorityQueue<Node> queue = new PriorityQueue<>(symbols.length);
for (int sIndex = 0; sIndex < symbols.length; sIndex++) {
final double symbolProbability = (double) symbolFrequencies[sIndex] / totalFrequency;
queue.add(new Node(symbols[sIndex], symbolProbability, symbolFrequencies[sIndex]));
}
return queue;
}
public boolean[] getCode(final int symbol) {
return symbolCodes.get(symbol);
}
public Node getRoot() {
return root;
}
}
...@@ -62,8 +62,13 @@ public class OutBitStream implements AutoCloseable { ...@@ -62,8 +62,13 @@ public class OutBitStream implements AutoCloseable {
* @param bit True for 1 * @param bit True for 1
*/ */
private void writeBit(final int bit) throws IOException { private void writeBit(final int bit) throws IOException {
writeBit(bit > 0);
}
private void writeBit(final boolean bit) throws IOException {
++bitBufferSize; ++bitBufferSize;
if (bit > 0) {
if (bit) {
bitBuffer |= (1 << (8 - bitBufferSize)); bitBuffer |= (1 << (8 - bitBufferSize));
} }
...@@ -76,10 +81,14 @@ public class OutBitStream implements AutoCloseable { ...@@ -76,10 +81,14 @@ public class OutBitStream implements AutoCloseable {
int bit; int bit;
for (int shift = 0; shift < bitsPerValue; shift++) { for (int shift = 0; shift < bitsPerValue; shift++) {
bit = (value & (1 << shift)); bit = (value & (1 << shift));
writeBit(bit);
}
}
//bit = (value & (1 << (31 - shift)));
public void write(final boolean[] bits) throws IOException {
for (final boolean bit : bits) {
writeBit(bit); writeBit(bit);
} }
} }
...@@ -92,6 +101,7 @@ public class OutBitStream implements AutoCloseable { ...@@ -92,6 +101,7 @@ public class OutBitStream implements AutoCloseable {
/** /**
* Flush the bitsteam on close. * Flush the bitsteam on close.
*
* @throws Exception when flush fails. * @throws Exception when flush fails.
*/ */
@Override @Override
......
...@@ -28,7 +28,7 @@ public class ScalarQuantizationCodebook { ...@@ -28,7 +28,7 @@ public class ScalarQuantizationCodebook {
return centroids; return centroids;
} }
public long[] getIndicesFrequency() { public long[] getSymbolFrequencies() {
return indexFrequencies; return indexFrequencies;
} }
......
...@@ -93,7 +93,7 @@ public class ScalarQuantizer { ...@@ -93,7 +93,7 @@ public class ScalarQuantizer {
return mse; return mse;
} }
public int[] getCentroids() { public ScalarQuantizationCodebook getCodebook() {
return codebook.getCentroids(); return codebook;
} }
} }
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment