Skip to content
Snippets Groups Projects
Commit 9ffbddbd authored by Vojtech Moravec's avatar Vojtech Moravec
Browse files

Refactored the decompression.

parent a379acbb
No related branches found
No related tags found
No related merge requests found
...@@ -48,12 +48,8 @@ public class DataCompressor { ...@@ -48,12 +48,8 @@ public class DataCompressor {
} }
case Decompress: { case Decompress: {
ImageDecompressor decompressor = new ImageDecompressor(parsedCliOptions); ImageDecompressor decompressor = new ImageDecompressor(parsedCliOptions);
try { if (!decompressor.decompress()) {
decompressor.decompress();
} catch (Exception e) {
System.err.println("Errors occurred during decompression."); System.err.println("Errors occurred during decompression.");
System.err.println(e.getMessage());
e.printStackTrace();
} }
return; return;
} }
......
...@@ -5,5 +5,11 @@ import java.io.DataOutputStream; ...@@ -5,5 +5,11 @@ import java.io.DataOutputStream;
public interface IImageCompressor { public interface IImageCompressor {
// TODO(Moravec): Replace default Exception with better Exception type. // TODO(Moravec): Replace default Exception with better Exception type.
/**
* Compress the image planes.
* @param compressStream Compressed data stream.
* @throws Exception when compression fails.
*/
void compress(DataOutputStream compressStream) throws Exception; void compress(DataOutputStream compressStream) throws Exception;
} }
package azgracompress.compression;
import azgracompress.fileformat.QCMPFileHeader;
import java.io.DataInputStream;
import java.io.DataOutputStream;
public interface IImageDecompressor {
/**
* Get correct size of data block.
*
* @param header QCMPFile header with information about compressed file.
* @return Expected size of data.
*/
long getExpectedDataSize(final QCMPFileHeader header);
/**
* Decompress the image planes.
*
* @param compressedStream Input stream of compressed data.
* @param decompressStream Output stream for decompressed data.
* @param header QCMPFile information.
* @throws Exception when decompression fails.
*/
void decompress(DataInputStream compressedStream,
DataOutputStream decompressStream,
final QCMPFileHeader header) throws Exception;
}
package azgracompress.compression; package azgracompress.compression;
import azgracompress.cli.ParsedCliOptions; import azgracompress.cli.ParsedCliOptions;
import azgracompress.data.*;
import azgracompress.fileformat.QCMPFileHeader; import azgracompress.fileformat.QCMPFileHeader;
import azgracompress.io.InBitStream;
import azgracompress.utilities.TypeConverter;
import java.io.*; import java.io.*;
...@@ -15,84 +12,48 @@ public class ImageDecompressor extends CompressorDecompressorBase { ...@@ -15,84 +12,48 @@ public class ImageDecompressor extends CompressorDecompressorBase {
super(options); super(options);
} }
private long getExpectedDataSizeForScalarQuantization(final QCMPFileHeader header) {
// Quantization value count.
final int codebookSize = (int) Math.pow(2, header.getBitsPerPixel());
// Total codebook size in bytes. /**
long codebookDataSize = (2 * codebookSize) * (header.isCodebookPerPlane() ? header.getImageSizeZ() : 1); * Read compressed QCMP file header.
*
// Data size of single plane indices. * @param inputStream Compressed data stream.
final long planeIndicesDataSize = * @return Decompressed file header.
(long) Math.ceil(((header.getImageSizeX() * header.getImageSizeY()) * header.getBitsPerPixel()) / 8.0); * @throws IOException when failed to read header.
*/
// All planes data size. private QCMPFileHeader readQCMPFileHeader(DataInputStream inputStream) throws IOException {
final long allPlaneIndicesDataSize = planeIndicesDataSize * header.getImageSizeZ(); QCMPFileHeader header = new QCMPFileHeader();
if (!header.readHeader(inputStream)) {
return (codebookDataSize + allPlaneIndicesDataSize); // Invalid QCMPFile header.
} return null;
private long calculatePlaneVectorCount(final QCMPFileHeader header) {
final int vectorXCount = (int) Math.ceil((double) header.getImageSizeX() / (double) header.getVectorSizeX());
final int vectorYCount = (int) Math.ceil((double) header.getImageSizeY() / (double) header.getVectorSizeY());
// Number of vectors per plane.
return (vectorXCount * vectorYCount);
}
private long calculatePlaneDataSize(final long planeVectorCount, final int bpp) {
// Data size of single plane indices.
return (long) Math.ceil((planeVectorCount * bpp) / 8.0);
} }
return header;
private long getExpectedDataSizeForVectorQuantization(final QCMPFileHeader header) {
// Vector count in codebook
final int codebookSize = (int) Math.pow(2, header.getBitsPerPixel());
// Single vector size in bytes.
assert (header.getVectorSizeZ() == 1);
final int vectorDataSize = 2 * header.getVectorSizeX() * header.getVectorSizeY() * header.getVectorSizeZ();
// Total codebook size in bytes.
final long codebookDataSize = (codebookSize * vectorDataSize) * (header.isCodebookPerPlane() ?
header.getImageSizeZ() : 1);
// Number of vectors per plane.
final long planeVectorCount = calculatePlaneVectorCount(header);
// Data size of single plane indices.
final long planeDataSize = calculatePlaneDataSize(planeVectorCount, header.getBitsPerPixel());
// All planes data size.
final long allPlanesDataSize = planeDataSize * header.getImageSizeZ();
return (codebookDataSize + allPlanesDataSize);
} }
/**
private long getExpectedDataSize(final QCMPFileHeader header) { * Get image plane decompressor for set quantization type.
switch (header.getQuantizationType()) { *
case Scalar: { * @return Correct implementation of image decompressor.
return getExpectedDataSizeForScalarQuantization(header); */
} private IImageDecompressor getImageDecompressor() {
switch (options.getQuantizationType()) {
case Scalar:
return new SQImageDecompressor(options);
case Vector1D: case Vector1D:
case Vector2D: case Vector2D:
return new VQImageDecompressor(options);
case Vector3D: case Vector3D:
return getExpectedDataSizeForVectorQuantization(header);
case Invalid: case Invalid:
return -1; default:
}
return -1;
}
private QCMPFileHeader readQCMPFileHeader(DataInputStream inputStream) throws IOException {
QCMPFileHeader header = new QCMPFileHeader();
if (!header.readHeader(inputStream)) {
// Invalid QCMPFile header.
return null; return null;
} }
return header;
} }
/**
* Inspect the compressed file by returning information contained in its header.
*
* @return Information from header.
* @throws IOException When fails to read the header.
*/
public String inspectCompressedFile() throws IOException { public String inspectCompressedFile() throws IOException {
StringBuilder logBuilder = new StringBuilder(); StringBuilder logBuilder = new StringBuilder();
boolean validFile = true; boolean validFile = true;
...@@ -148,205 +109,61 @@ public class ImageDecompressor extends CompressorDecompressorBase { ...@@ -148,205 +109,61 @@ public class ImageDecompressor extends CompressorDecompressorBase {
final long fileSize = new File(options.getInputFile()).length(); final long fileSize = new File(options.getInputFile()).length();
final long dataSize = fileSize - QCMPFileHeader.QCMP_HEADER_SIZE; final long dataSize = fileSize - QCMPFileHeader.QCMP_HEADER_SIZE;
final long expectedDataSize = getExpectedDataSize(header); final var decompressor = getImageDecompressor();
if (decompressor != null) {
final long expectedDataSize = getImageDecompressor().getExpectedDataSize(header);
validFile = (dataSize == expectedDataSize); validFile = (dataSize == expectedDataSize);
logBuilder.append("Data size:\t\t").append(dataSize).append(" Bytes ").append(dataSize == expectedDataSize ? "(correct)\n" : "(INVALID)\n"); logBuilder.append("Data size:\t\t").append(dataSize).append(" Bytes ").append(dataSize == expectedDataSize ? "(correct)\n" : "(INVALID)\n");
} }
}
logBuilder.append("\n=== Input file is ").append(validFile ? "VALID" : "INVALID").append(" ===\n"); logBuilder.append("\n=== Input file is ").append(validFile ? "VALID" : "INVALID").append(" ===\n");
return logBuilder.toString(); return logBuilder.toString();
} }
public void decompress() throws Exception { public boolean decompress() {
var fileInputStream = new FileInputStream(options.getInputFile()); try (FileInputStream fileInputStream = new FileInputStream(options.getInputFile());
var dataInputStream = new DataInputStream(fileInputStream); DataInputStream dataInputStream = new DataInputStream(fileInputStream)) {
final QCMPFileHeader header = readQCMPFileHeader(dataInputStream); final QCMPFileHeader header = readQCMPFileHeader(dataInputStream);
if (header == null) { if (header == null) {
throw new Exception("Failed to read QCMPFile header"); System.err.println("Failed to read QCMPFile header");
return false;
} }
if (!header.validateHeader()) { if (!header.validateHeader()) {
throw new Exception("QCMPFile header is invalid"); System.err.println("QCMPFile header is invalid");
return false;
}
IImageDecompressor imageDecompressor = getImageDecompressor();
if (imageDecompressor == null) {
System.err.println("Unable to create correct decompressor.");
return false;
} }
final long fileSize = new File(options.getInputFile()).length(); final long fileSize = new File(options.getInputFile()).length();
final long dataSize = fileSize - QCMPFileHeader.QCMP_HEADER_SIZE; final long dataSize = fileSize - QCMPFileHeader.QCMP_HEADER_SIZE;
final long expectedDataSize = getExpectedDataSize(header); final long expectedDataSize = imageDecompressor.getExpectedDataSize(header);
if (dataSize != expectedDataSize) { if (dataSize != expectedDataSize) {
throw new Exception("Invalid file size."); System.err.println("Invalid file size.");
return false;
} }
FileOutputStream fos = new FileOutputStream(options.getOutputFile(), false); try (FileOutputStream fos = new FileOutputStream(options.getOutputFile(), false);
DataOutputStream decompressStream = new DataOutputStream(fos); DataOutputStream decompressStream = new DataOutputStream(fos)) {
imageDecompressor.decompress(dataInputStream, decompressStream, header);
switch (header.getQuantizationType()) { } catch (Exception ex) {
case Scalar: ex.printStackTrace();
decompressUsingScalarQuantization(dataInputStream, decompressStream, header); return false;
break;
case Vector1D:
case Vector2D:
case Vector3D:
decompressUsingVectorQuantization(dataInputStream, decompressStream, header);
break;
case Invalid:
throw new Exception("Invalid quantization type;");
}
dataInputStream.close();
fileInputStream.close();
decompressStream.flush();
decompressStream.close();
fos.flush();
fos.close();
}
private int[] readScalarQuantizationValues(DataInputStream compressedStream, final int n) throws IOException {
int[] quantizationValues = new int[n];
for (int i = 0; i < n; i++) {
quantizationValues[i] = compressedStream.readUnsignedShort();
}
return quantizationValues;
}
private int[][] readCodebookVectors(DataInputStream compressedStream,
final int codebookSize,
final int vectorSize) throws IOException {
int[][] codebook = new int[codebookSize][vectorSize];
for (int codebookIndex = 0; codebookIndex < codebookSize; codebookIndex++) {
for (int vecIndex = 0; vecIndex < vectorSize; vecIndex++) {
codebook[codebookIndex][vecIndex] = compressedStream.readUnsignedShort();
} }
}
return codebook;
}
private ImageU16 reconstructImageFromQuantizedVectors(final int[][] vectors,
final V2i qVector,
final V3i imageDims) {
Chunk2D reconstructedChunk = new Chunk2D(new V2i(imageDims.getX(), imageDims.getY()), new V2l(0, 0));
if (qVector.getY() > 1) {
// FIXME
// Chunk2D new Chunk2D(new V2i(width, height), new V2l(0, 0), data);
// var chunks = plane.as2dChunk().divideIntoChunks(qVector);
var chunks = reconstructedChunk.divideIntoChunks(qVector);
Chunk2D.updateChunkData(chunks, vectors);
reconstructedChunk.reconstructFromChunks(chunks);
} else {
// 1D vector
reconstructedChunk.reconstructFromVectors(vectors);
}
return reconstructedChunk.asImageU16();
}
private void decompressUsingVectorQuantization(DataInputStream compressedStream,
DataOutputStream decompressStream,
final QCMPFileHeader header) throws Exception {
final int codebookSize = (int) Math.pow(2, header.getBitsPerPixel());
assert (header.getVectorSizeZ() == 1);
final int vectorSize = header.getVectorSizeX() * header.getVectorSizeY() * header.getVectorSizeZ();
final int planeCountForDecompression = header.getImageSizeZ();
final int planePixelCount = header.getImageSizeX() * header.getImageSizeY();
final long planeVectorCount = calculatePlaneVectorCount(header);
final long planeDataSize = calculatePlaneDataSize(planeVectorCount, header.getBitsPerPixel());
final V2i qVector = new V2i(header.getVectorSizeX(), header.getVectorSizeY());
int[][] quantizationVectors = null; } catch (IOException ioEx) {
if (!header.isCodebookPerPlane()) { ioEx.printStackTrace();
// There is only one codebook. return false;
Log("Loading reference codebook...");
quantizationVectors = readCodebookVectors(compressedStream, codebookSize, vectorSize);
} }
return true;
for (int planeIndex = 0; planeIndex < planeCountForDecompression; planeIndex++) {
if (header.isCodebookPerPlane()) {
Log("Loading plane codebook...");
quantizationVectors = readCodebookVectors(compressedStream, codebookSize, vectorSize);
}
assert (quantizationVectors != null);
Log(String.format("Decompressing plane %d...", planeIndex));
InBitStream inBitStream = new InBitStream(compressedStream, header.getBitsPerPixel(), (int) planeDataSize);
inBitStream.readToBuffer();
inBitStream.setAllowReadFromUnderlyingStream(false);
final int[] indices = inBitStream.readNValues((int) planeVectorCount);
int[][] decompressedVectors = new int[(int) planeVectorCount][vectorSize];
for (int vecIndex = 0; vecIndex < planeVectorCount; vecIndex++) {
System.arraycopy(quantizationVectors[indices[vecIndex]],
0,
decompressedVectors[vecIndex],
0,
vectorSize);
}
// int[] decompressedValues = new int[planePixelCount];
// for (int vecIndex = 0; vecIndex < planeVectorCount; vecIndex++) {
// System.arraycopy(quantizationVectors[indices[vecIndex]],
// 0,
// decompressedValues,
// (vecIndex * vectorSize),
// vectorSize);
// }
final ImageU16 decompressedPlane = reconstructImageFromQuantizedVectors(decompressedVectors,
qVector,
header.getImageDims());
final byte[] decompressedPlaneData = TypeConverter.shortArrayToByteArray(decompressedPlane.getData(),
false);
decompressStream.write(decompressedPlaneData);
Log(String.format("Decompressed plane %d.", planeIndex));
}
}
private void decompressUsingScalarQuantization(DataInputStream compressedStream,
DataOutputStream decompressStream,
final QCMPFileHeader header) throws Exception {
final int codebookSize = (int) Math.pow(2, header.getBitsPerPixel());
final int planeCountForDecompression = header.getImageSizeZ();
final int planePixelCount = header.getImageSizeX() * header.getImageSizeY();
final int planeIndicesDataSize = (int) Math.ceil((planePixelCount * header.getBitsPerPixel()) / 8.0);
int[] quantizationValues = null;
if (!header.isCodebookPerPlane()) {
// There is only one codebook.
Log("Loading reference codebook...");
quantizationValues = readScalarQuantizationValues(compressedStream, codebookSize);
}
for (int planeIndex = 0; planeIndex < planeCountForDecompression; planeIndex++) {
if (header.isCodebookPerPlane()) {
Log("Loading plane codebook...");
quantizationValues = readScalarQuantizationValues(compressedStream, codebookSize);
}
assert (quantizationValues != null);
Log(String.format("Decompressing plane %d...", planeIndex));
InBitStream inBitStream = new InBitStream(compressedStream, header.getBitsPerPixel(), planeIndicesDataSize);
inBitStream.readToBuffer();
inBitStream.setAllowReadFromUnderlyingStream(false);
final int[] indices = inBitStream.readNValues(planePixelCount);
short[] decompressedValues = new short[planePixelCount];
for (int i = 0; i < planePixelCount; i++) {
decompressedValues[i] = TypeConverter.intToShort(quantizationValues[indices[i]]);
}
final byte[] decompressedPlaneData = TypeConverter.shortArrayToByteArray(decompressedValues, false);
decompressStream.write(decompressedPlaneData);
Log(String.format("Decompressed plane %d.", planeIndex));
}
} }
} }
\ No newline at end of file
package azgracompress.compression;
import azgracompress.cli.ParsedCliOptions;
import azgracompress.fileformat.QCMPFileHeader;
import azgracompress.io.InBitStream;
import azgracompress.utilities.TypeConverter;
import java.io.DataInputStream;
import java.io.DataOutputStream;
import java.io.IOException;
public class SQImageDecompressor extends CompressorDecompressorBase implements IImageDecompressor {
public SQImageDecompressor(ParsedCliOptions options) {
super(options);
}
private int[] readScalarQuantizationValues(DataInputStream compressedStream, final int n) throws IOException {
int[] quantizationValues = new int[n];
for (int i = 0; i < n; i++) {
quantizationValues[i] = compressedStream.readUnsignedShort();
}
return quantizationValues;
}
@Override
public long getExpectedDataSize(QCMPFileHeader header) {
// Quantization value count.
final int codebookSize = (int) Math.pow(2, header.getBitsPerPixel());
// Total codebook size in bytes.
long codebookDataSize = (2 * codebookSize) * (header.isCodebookPerPlane() ? header.getImageSizeZ() : 1);
// Data size of single plane indices.
final long planeIndicesDataSize =
(long) Math.ceil(((header.getImageSizeX() * header.getImageSizeY()) * header.getBitsPerPixel()) / 8.0);
// All planes data size.
final long allPlaneIndicesDataSize = planeIndicesDataSize * header.getImageSizeZ();
return (codebookDataSize + allPlaneIndicesDataSize);
}
@Override
public void decompress(DataInputStream compressedStream,
DataOutputStream decompressStream,
QCMPFileHeader header) throws Exception {
final int codebookSize = (int) Math.pow(2, header.getBitsPerPixel());
final int planeCountForDecompression = header.getImageSizeZ();
final int planePixelCount = header.getImageSizeX() * header.getImageSizeY();
final int planeIndicesDataSize = (int) Math.ceil((planePixelCount * header.getBitsPerPixel()) / 8.0);
int[] quantizationValues = null;
if (!header.isCodebookPerPlane()) {
// There is only one codebook.
Log("Loading reference codebook...");
quantizationValues = readScalarQuantizationValues(compressedStream, codebookSize);
}
for (int planeIndex = 0; planeIndex < planeCountForDecompression; planeIndex++) {
if (header.isCodebookPerPlane()) {
Log("Loading plane codebook...");
quantizationValues = readScalarQuantizationValues(compressedStream, codebookSize);
}
assert (quantizationValues != null);
Log(String.format("Decompressing plane %d...", planeIndex));
InBitStream inBitStream = new InBitStream(compressedStream, header.getBitsPerPixel(), planeIndicesDataSize);
inBitStream.readToBuffer();
inBitStream.setAllowReadFromUnderlyingStream(false);
final int[] indices = inBitStream.readNValues(planePixelCount);
short[] decompressedValues = new short[planePixelCount];
for (int i = 0; i < planePixelCount; i++) {
decompressedValues[i] = TypeConverter.intToShort(quantizationValues[indices[i]]);
}
final byte[] decompressedPlaneData = TypeConverter.shortArrayToByteArray(decompressedValues, false);
decompressStream.write(decompressedPlaneData);
Log(String.format("Decompressed plane %d.", planeIndex));
}
}
}
package azgracompress.compression;
import azgracompress.cli.ParsedCliOptions;
import azgracompress.data.*;
import azgracompress.fileformat.QCMPFileHeader;
import azgracompress.io.InBitStream;
import azgracompress.utilities.TypeConverter;
import java.io.DataInputStream;
import java.io.DataOutputStream;
import java.io.IOException;
public class VQImageDecompressor extends CompressorDecompressorBase implements IImageDecompressor {
public VQImageDecompressor(ParsedCliOptions options) {
super(options);
}
private long calculatePlaneVectorCount(final QCMPFileHeader header) {
final int vectorXCount = (int) Math.ceil((double) header.getImageSizeX() / (double) header.getVectorSizeX());
final int vectorYCount = (int) Math.ceil((double) header.getImageSizeY() / (double) header.getVectorSizeY());
// Number of vectors per plane.
return (vectorXCount * vectorYCount);
}
private long calculatePlaneDataSize(final long planeVectorCount, final int bpp) {
// Data size of single plane indices.
return (long) Math.ceil((planeVectorCount * bpp) / 8.0);
}
private int[][] readCodebookVectors(DataInputStream compressedStream,
final int codebookSize,
final int vectorSize) throws IOException {
int[][] codebook = new int[codebookSize][vectorSize];
for (int codebookIndex = 0; codebookIndex < codebookSize; codebookIndex++) {
for (int vecIndex = 0; vecIndex < vectorSize; vecIndex++) {
codebook[codebookIndex][vecIndex] = compressedStream.readUnsignedShort();
}
}
return codebook;
}
private ImageU16 reconstructImageFromQuantizedVectors(final int[][] vectors,
final V2i qVector,
final V3i imageDims) {
Chunk2D reconstructedChunk = new Chunk2D(new V2i(imageDims.getX(), imageDims.getY()), new V2l(0, 0));
if (qVector.getY() > 1) {
// FIXME
// Chunk2D new Chunk2D(new V2i(width, height), new V2l(0, 0), data);
// var chunks = plane.as2dChunk().divideIntoChunks(qVector);
var chunks = reconstructedChunk.divideIntoChunks(qVector);
Chunk2D.updateChunkData(chunks, vectors);
reconstructedChunk.reconstructFromChunks(chunks);
} else {
// 1D vector
reconstructedChunk.reconstructFromVectors(vectors);
}
return reconstructedChunk.asImageU16();
}
@Override
public long getExpectedDataSize(QCMPFileHeader header) {
// Vector count in codebook
final int codebookSize = (int) Math.pow(2, header.getBitsPerPixel());
// Single vector size in bytes.
assert (header.getVectorSizeZ() == 1);
final int vectorDataSize = 2 * header.getVectorSizeX() * header.getVectorSizeY() * header.getVectorSizeZ();
// Total codebook size in bytes.
final long codebookDataSize = (codebookSize * vectorDataSize) * (header.isCodebookPerPlane() ?
header.getImageSizeZ() : 1);
// Number of vectors per plane.
final long planeVectorCount = calculatePlaneVectorCount(header);
// Data size of single plane indices.
final long planeDataSize = calculatePlaneDataSize(planeVectorCount, header.getBitsPerPixel());
// All planes data size.
final long allPlanesDataSize = planeDataSize * header.getImageSizeZ();
return (codebookDataSize + allPlanesDataSize);
}
@Override
public void decompress(DataInputStream compressedStream,
DataOutputStream decompressStream,
QCMPFileHeader header) throws Exception {
final int codebookSize = (int) Math.pow(2, header.getBitsPerPixel());
assert (header.getVectorSizeZ() == 1);
final int vectorSize = header.getVectorSizeX() * header.getVectorSizeY() * header.getVectorSizeZ();
final int planeCountForDecompression = header.getImageSizeZ();
final int planePixelCount = header.getImageSizeX() * header.getImageSizeY();
final long planeVectorCount = calculatePlaneVectorCount(header);
final long planeDataSize = calculatePlaneDataSize(planeVectorCount, header.getBitsPerPixel());
final V2i qVector = new V2i(header.getVectorSizeX(), header.getVectorSizeY());
int[][] quantizationVectors = null;
if (!header.isCodebookPerPlane()) {
// There is only one codebook.
Log("Loading reference codebook...");
quantizationVectors = readCodebookVectors(compressedStream, codebookSize, vectorSize);
}
for (int planeIndex = 0; planeIndex < planeCountForDecompression; planeIndex++) {
if (header.isCodebookPerPlane()) {
Log("Loading plane codebook...");
quantizationVectors = readCodebookVectors(compressedStream, codebookSize, vectorSize);
}
assert (quantizationVectors != null);
Log(String.format("Decompressing plane %d...", planeIndex));
InBitStream inBitStream = new InBitStream(compressedStream, header.getBitsPerPixel(), (int) planeDataSize);
inBitStream.readToBuffer();
inBitStream.setAllowReadFromUnderlyingStream(false);
final int[] indices = inBitStream.readNValues((int) planeVectorCount);
int[][] decompressedVectors = new int[(int) planeVectorCount][vectorSize];
for (int vecIndex = 0; vecIndex < planeVectorCount; vecIndex++) {
System.arraycopy(quantizationVectors[indices[vecIndex]],
0,
decompressedVectors[vecIndex],
0,
vectorSize);
}
// int[] decompressedValues = new int[planePixelCount];
// for (int vecIndex = 0; vecIndex < planeVectorCount; vecIndex++) {
// System.arraycopy(quantizationVectors[indices[vecIndex]],
// 0,
// decompressedValues,
// (vecIndex * vectorSize),
// vectorSize);
// }
final ImageU16 decompressedPlane = reconstructImageFromQuantizedVectors(decompressedVectors,
qVector,
header.getImageDims());
final byte[] decompressedPlaneData = TypeConverter.shortArrayToByteArray(decompressedPlane.getData(),
false);
decompressStream.write(decompressedPlaneData);
Log(String.format("Decompressed plane %d.", planeIndex));
}
}
}
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment