Skip to content
Snippets Groups Projects
Commit ea4c44c6 authored by Vojtech Moravec's avatar Vojtech Moravec
Browse files

Merge branch 'master' into compression_experiment

parents 4deb3a3f 368cc7c0
No related branches found
No related tags found
No related merge requests found
......@@ -8,7 +8,7 @@
.project
.settings
/target
classes/
# IntelliJ
.idea/
*.iml
......
......@@ -201,5 +201,11 @@
<artifactId>junit</artifactId>
<scope>test</scope>
</dependency>
</dependencies>
<dependency>
<groupId>org.azgra</groupId>
<artifactId>DataCompressor</artifactId>
<version>1.0-SNAPSHOT</version>
<scope>compile</scope>
</dependency>
</dependencies>
</project>
package azgracompress;
public class ViewerCompressionOptions {
private boolean enabled = false;
private int compressFromMipmapLevel = 0;
public boolean isEnabled() {
return enabled;
}
public void setEnabled(final boolean enable) {
this.enabled = enable;
}
public int getCompressFromMipmapLevel() {
return compressFromMipmapLevel;
}
public void setCompressFromMipmapLevel(final int compressFrom) {
this.compressFromMipmapLevel = compressFrom;
}
}
This diff is collapsed.
......@@ -7,13 +7,13 @@
* %%
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
......@@ -29,13 +29,11 @@
*/
package bdv.img.remote;
import java.io.IOException;
import java.io.InputStreamReader;
import java.net.URL;
import java.util.HashMap;
import com.google.gson.GsonBuilder;
import azgracompress.ViewerCompressionOptions;
import azgracompress.cache.ICacheFile;
import azgracompress.cache.QuantizationCacheManager;
import azgracompress.compression.ImageDecompressor;
import azgracompress.utilities.ColorConsole;
import bdv.AbstractViewerSetupImgLoader;
import bdv.ViewerImgLoader;
import bdv.img.cache.VolatileCachedCellImg;
......@@ -44,6 +42,7 @@ import bdv.img.hdf5.DimsAndExistence;
import bdv.img.hdf5.MipmapInfo;
import bdv.img.hdf5.ViewLevelId;
import bdv.util.ConstantRandomAccessible;
import com.google.gson.GsonBuilder;
import mpicbg.spim.data.generic.sequence.ImgLoaderHint;
import net.imglib2.FinalInterval;
import net.imglib2.RandomAccessibleInterval;
......@@ -57,211 +56,245 @@ import net.imglib2.type.volatiles.VolatileUnsignedShortType;
import net.imglib2.util.IntervalIndexer;
import net.imglib2.view.Views;
public class RemoteImageLoader implements ViewerImgLoader
{
protected String baseUrl;
protected RemoteImageLoaderMetaData metadata;
protected HashMap< ViewLevelId, int[] > cellsDimensions;
protected VolatileGlobalCellCache cache;
protected RemoteVolatileShortArrayLoader shortLoader;
/**
* TODO
*/
protected final HashMap< Integer, SetupImgLoader > setupImgLoaders;
public RemoteImageLoader( final String baseUrl ) throws IOException
{
this( baseUrl, true );
}
public RemoteImageLoader( final String baseUrl, final boolean doOpen ) throws IOException
{
this.baseUrl = baseUrl;
setupImgLoaders = new HashMap<>();
if ( doOpen )
open();
}
@Override
public SetupImgLoader getSetupImgLoader( final int setupId )
{
tryopen();
return setupImgLoaders.get( setupId );
}
private boolean isOpen = false;
private void open() throws IOException
{
if ( ! isOpen )
{
synchronized ( this )
{
if ( isOpen )
return;
isOpen = true;
final URL url = new URL( baseUrl + "?p=init" );
final GsonBuilder gsonBuilder = new GsonBuilder();
gsonBuilder.registerTypeAdapter( AffineTransform3D.class, new AffineTransform3DJsonSerializer() );
metadata = gsonBuilder.create().fromJson(
new InputStreamReader( url.openStream() ),
RemoteImageLoaderMetaData.class );
shortLoader = new RemoteVolatileShortArrayLoader( this );
cache = new VolatileGlobalCellCache( metadata.maxNumLevels, 10 );
cellsDimensions = metadata.createCellsDimensions();
for ( final int setupId : metadata.perSetupMipmapInfo.keySet() )
setupImgLoaders.put( setupId, new SetupImgLoader( setupId ) );
}
}
}
private void tryopen()
{
try
{
open();
}
catch ( final IOException e )
{
throw new RuntimeException( e );
}
}
@Override
public VolatileGlobalCellCache getCacheControl()
{
tryopen();
return cache;
}
public MipmapInfo getMipmapInfo( final int setupId )
{
tryopen();
return metadata.perSetupMipmapInfo.get( setupId );
}
/**
* Checks whether the given image data is present on the server.
*
* @return true, if the given image data is present.
*/
public boolean existsImageData( final ViewLevelId id )
{
return getDimsAndExistence( id ).exists();
}
/**
* For images that are missing in the hdf5, a constant image is created. If
* the dimension of the missing image is known (see
* {@link #getDimsAndExistence(ViewLevelId)}) then use that. Otherwise
* create a 1x1x1 image.
*/
protected < T > RandomAccessibleInterval< T > getMissingDataImage( final ViewLevelId id, final T constant )
{
final long[] d = getDimsAndExistence( id ).getDimensions();
return Views.interval( new ConstantRandomAccessible<>( constant, 3 ), new FinalInterval( d ) );
}
public DimsAndExistence getDimsAndExistence( final ViewLevelId id )
{
tryopen();
return metadata.dimsAndExistence.get( id );
}
int getCellIndex( final int timepoint, final int setup, final int level, final long[] globalPosition )
{
final int[] cellDims = cellsDimensions.get( new ViewLevelId( timepoint, setup, level ) );
final int[] cellSize = getMipmapInfo( setup ).getSubdivisions()[ level ];
final int[] cellPos = new int[] {
( int ) globalPosition[ 0 ] / cellSize[ 0 ],
( int ) globalPosition[ 1 ] / cellSize[ 1 ],
( int ) globalPosition[ 2 ] / cellSize[ 2 ] };
return IntervalIndexer.positionToIndex( cellPos, cellDims );
}
/**
* Create a {@link VolatileCachedCellImg} backed by the cache. The
* {@code type} should be either {@link UnsignedShortType} and
* {@link VolatileUnsignedShortType}.
*/
protected < T extends NativeType< T > > RandomAccessibleInterval< T > prepareCachedImage(
final ViewLevelId id,
final LoadingStrategy loadingStrategy,
final T type )
{
tryopen();
if ( cache == null )
throw new RuntimeException( "no connection open" );
// final ViewLevelId id = new ViewLevelId( timepointId, setupId, level );
if ( ! existsImageData( id ) )
{
System.err.println( String.format(
"image data for timepoint %d setup %d level %d could not be found.",
id.getTimePointId(), id.getViewSetupId(), id.getLevel() ) );
return getMissingDataImage( id, type );
}
final int timepointId = id.getTimePointId();
final int setupId = id.getViewSetupId();
final int level = id.getLevel();
final MipmapInfo mipmapInfo = metadata.perSetupMipmapInfo.get( setupId );
final long[] dimensions = metadata.dimsAndExistence.get( id ).getDimensions();
final int[] cellDimensions = mipmapInfo.getSubdivisions()[ level ];
final CellGrid grid = new CellGrid( dimensions, cellDimensions );
final int priority = mipmapInfo.getMaxLevel() - level;
final CacheHints cacheHints = new CacheHints( loadingStrategy, priority, false );
return cache.createImg( grid, timepointId, setupId, level, cacheHints, shortLoader, type );
}
public class SetupImgLoader extends AbstractViewerSetupImgLoader< UnsignedShortType, VolatileUnsignedShortType >
{
private final int setupId;
protected SetupImgLoader( final int setupId )
{
super( new UnsignedShortType(), new VolatileUnsignedShortType() );
this.setupId = setupId;
}
@Override
public RandomAccessibleInterval< UnsignedShortType > getImage( final int timepointId, final int level, final ImgLoaderHint... hints )
{
final ViewLevelId id = new ViewLevelId( timepointId, setupId, level );
return prepareCachedImage( id, LoadingStrategy.BLOCKING, type );
}
@Override
public RandomAccessibleInterval< VolatileUnsignedShortType > getVolatileImage( final int timepointId, final int level, final ImgLoaderHint... hints )
{
final ViewLevelId id = new ViewLevelId( timepointId, setupId, level );
return prepareCachedImage( id, LoadingStrategy.BUDGETED, volatileType );
}
@Override
public double[][] getMipmapResolutions()
{
return getMipmapInfo( setupId ).getResolutions();
}
@Override
public AffineTransform3D[] getMipmapTransforms()
{
return getMipmapInfo( setupId ).getTransforms();
}
@Override
public int numMipmapLevels()
{
return getMipmapInfo( setupId ).getNumLevels();
}
}
import java.io.DataInputStream;
import java.io.IOException;
import java.io.InputStreamReader;
import java.net.HttpURLConnection;
import java.net.URL;
import java.util.ArrayList;
import java.util.HashMap;
public class RemoteImageLoader implements ViewerImgLoader {
protected String baseUrl;
protected RemoteImageLoaderMetaData metadata;
protected HashMap<ViewLevelId, int[]> cellsDimensions;
protected VolatileGlobalCellCache cache;
protected RemoteVolatileShortArrayLoader shortLoader;
/**
* Flag whether we allow the server to send us compressed data.
*/
private ViewerCompressionOptions viewerCompressionOptions;
/**
* TODO
*/
protected final HashMap<Integer, SetupImgLoader> setupImgLoaders;
public RemoteImageLoader(final String baseUrl) throws IOException {
this(baseUrl, true);
}
public RemoteImageLoader(final String baseUrl,
final boolean doOpen) throws IOException {
this.baseUrl = baseUrl;
setupImgLoaders = new HashMap<>();
if (doOpen)
open();
}
@Override
public SetupImgLoader getSetupImgLoader(final int setupId) {
tryopen();
return setupImgLoaders.get(setupId);
}
private boolean isOpen = false;
private void open() throws IOException {
if (!isOpen) {
synchronized (this) {
if (isOpen)
return;
isOpen = true;
final URL url = new URL(baseUrl + "?p=init");
final GsonBuilder gsonBuilder = new GsonBuilder();
gsonBuilder.registerTypeAdapter(AffineTransform3D.class, new AffineTransform3DJsonSerializer());
metadata = gsonBuilder.create().fromJson(
new InputStreamReader(url.openStream()),
RemoteImageLoaderMetaData.class);
shortLoader = new RemoteVolatileShortArrayLoader(this);
cache = new VolatileGlobalCellCache(metadata.maxNumLevels, 10);
cellsDimensions = metadata.createCellsDimensions();
for (final int setupId : metadata.perSetupMipmapInfo.keySet())
setupImgLoaders.put(setupId, new SetupImgLoader(setupId));
if (viewerCompressionOptions.isEnabled()) {
setupCompression();
}
}
}
}
public void setViewerCompressionOptions(final ViewerCompressionOptions ops) {
this.viewerCompressionOptions = ops;
}
private void setupCompression() throws IOException {
final URL url = new URL(baseUrl + "?p=init_qcmp");
final HttpURLConnection connection = (HttpURLConnection) url.openConnection();
connection.setRequestMethod("GET");
connection.connect();
if (connection.getResponseCode() != HttpURLConnection.HTTP_OK) {
System.out.println("\u001b[33mRemoteImageLoader::setupCompression() - Server doesn't provide compressed data.\u001b[0m");
return;
}
final ArrayList<ICacheFile> cacheFiles = new ArrayList<>();
try (final DataInputStream dis = new DataInputStream(connection.getInputStream())) {
final int codebookCount = dis.readByte();
for (int cbIndex = 0; cbIndex < codebookCount; cbIndex++) {
final ICacheFile readCacheFile = QuantizationCacheManager.readCacheFile(dis);
if (readCacheFile == null) {
ColorConsole.fprintf(ColorConsole.Target.stderr,
ColorConsole.Color.Red,
"Failed to read codebook from input stream. Compression can't be used.");
return;
}
cacheFiles.add(readCacheFile);
}
}
ColorConsole.fprintf(ColorConsole.Target.stdout, ColorConsole.Color.Yellow, "Received %d cache files.", cacheFiles.size());
final ImageDecompressor[] decompressors = new ImageDecompressor[cacheFiles.size()];
for (int i = 0; i < cacheFiles.size(); i++) {
decompressors[i] = new ImageDecompressor(cacheFiles.get(i));
}
shortLoader.setDataDecompressors(decompressors, metadata.maxNumLevels, viewerCompressionOptions.getCompressFromMipmapLevel());
}
private void tryopen() {
try {
open();
} catch (final IOException e) {
throw new RuntimeException(e);
}
}
@Override
public VolatileGlobalCellCache getCacheControl() {
tryopen();
return cache;
}
public MipmapInfo getMipmapInfo(final int setupId) {
tryopen();
return metadata.perSetupMipmapInfo.get(setupId);
}
/**
* Checks whether the given image data is present on the server.
*
* @return true, if the given image data is present.
*/
public boolean existsImageData(final ViewLevelId id) {
return getDimsAndExistence(id).exists();
}
/**
* For images that are missing in the hdf5, a constant image is created. If
* the dimension of the missing image is known (see
* {@link #getDimsAndExistence(ViewLevelId)}) then use that. Otherwise
* create a 1x1x1 image.
*/
protected <T> RandomAccessibleInterval<T> getMissingDataImage(final ViewLevelId id, final T constant) {
final long[] d = getDimsAndExistence(id).getDimensions();
return Views.interval(new ConstantRandomAccessible<>(constant, 3), new FinalInterval(d));
}
public DimsAndExistence getDimsAndExistence(final ViewLevelId id) {
tryopen();
return metadata.dimsAndExistence.get(id);
}
int getCellIndex(final int timepoint, final int setup, final int level, final long[] globalPosition) {
final int[] cellDims = cellsDimensions.get(new ViewLevelId(timepoint, setup, level));
final int[] cellSize = getMipmapInfo(setup).getSubdivisions()[level];
final int[] cellPos = new int[]{
(int) globalPosition[0] / cellSize[0],
(int) globalPosition[1] / cellSize[1],
(int) globalPosition[2] / cellSize[2]};
return IntervalIndexer.positionToIndex(cellPos, cellDims);
}
/**
* Create a {@link VolatileCachedCellImg} backed by the cache. The
* {@code type} should be either {@link UnsignedShortType} and
* {@link VolatileUnsignedShortType}.
*/
protected <T extends NativeType<T>> RandomAccessibleInterval<T> prepareCachedImage(
final ViewLevelId id,
final LoadingStrategy loadingStrategy,
final T type) {
tryopen();
if (cache == null)
throw new RuntimeException("no connection open");
// final ViewLevelId id = new ViewLevelId( timepointId, setupId, level );
if (!existsImageData(id)) {
System.err.println(String.format(
"image data for timepoint %d setup %d level %d could not be found.",
id.getTimePointId(), id.getViewSetupId(), id.getLevel()));
return getMissingDataImage(id, type);
}
final int timepointId = id.getTimePointId();
final int setupId = id.getViewSetupId();
final int level = id.getLevel();
final MipmapInfo mipmapInfo = metadata.perSetupMipmapInfo.get(setupId);
final long[] dimensions = metadata.dimsAndExistence.get(id).getDimensions();
final int[] cellDimensions = mipmapInfo.getSubdivisions()[level];
final CellGrid grid = new CellGrid(dimensions, cellDimensions);
final int priority = mipmapInfo.getMaxLevel() - level;
final CacheHints cacheHints = new CacheHints(loadingStrategy, priority, false);
return cache.createImg(grid, timepointId, setupId, level, cacheHints, shortLoader, type);
}
public class SetupImgLoader extends AbstractViewerSetupImgLoader<UnsignedShortType, VolatileUnsignedShortType> {
private final int setupId;
protected SetupImgLoader(final int setupId) {
super(new UnsignedShortType(), new VolatileUnsignedShortType());
this.setupId = setupId;
}
@Override
public RandomAccessibleInterval<UnsignedShortType> getImage(final int timepointId, final int level, final ImgLoaderHint... hints) {
final ViewLevelId id = new ViewLevelId(timepointId, setupId, level);
return prepareCachedImage(id, LoadingStrategy.BLOCKING, type);
}
@Override
public RandomAccessibleInterval<VolatileUnsignedShortType> getVolatileImage(final int timepointId,
final int level,
final ImgLoaderHint... hints) {
final ViewLevelId id = new ViewLevelId(timepointId, setupId, level);
return prepareCachedImage(id, LoadingStrategy.BUDGETED, volatileType);
}
@Override
public double[][] getMipmapResolutions() {
return getMipmapInfo(setupId).getResolutions();
}
@Override
public AffineTransform3D[] getMipmapTransforms() {
return getMipmapInfo(setupId).getTransforms();
}
@Override
public int numMipmapLevels() {
return getMipmapInfo(setupId).getNumLevels();
}
}
}
......@@ -7,13 +7,13 @@
* %%
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
......@@ -29,62 +29,146 @@
*/
package bdv.img.remote;
import azgracompress.compression.CompressorDecompressorBase;
import azgracompress.compression.ImageDecompressor;
import azgracompress.utilities.ColorConsole;
import bdv.img.cache.CacheArrayLoader;
import net.imglib2.img.basictypeaccess.volatiles.array.VolatileShortArray;
import java.io.IOException;
import java.io.InputStream;
import java.net.HttpURLConnection;
import java.net.MalformedURLException;
import java.net.URL;
import java.util.Arrays;
import java.util.Comparator;
import java.util.HashMap;
import bdv.img.cache.CacheArrayLoader;
import net.imglib2.img.basictypeaccess.volatiles.array.VolatileShortArray;
public class RemoteVolatileShortArrayLoader implements CacheArrayLoader<VolatileShortArray> {
private final RemoteImageLoader imgLoader;
private boolean requestCompressedData = false;
private HashMap<Integer, ImageDecompressor> decompressors;
private ImageDecompressor lowestResDecompressor;
private int compressFromMipmapLevel = 0;
public RemoteVolatileShortArrayLoader(final RemoteImageLoader imgLoader) {
this.imgLoader = imgLoader;
}
private String constructRequestUrl(final String baseParam,
final int timepoint,
final int setup,
final int level,
final int[] dimensions,
final long[] min) {
final int index = imgLoader.getCellIndex(timepoint, setup, level, min);
return String.format("%s?p=%s/%d/%d/%d/%d/%d/%d/%d/%d/%d/%d",
imgLoader.baseUrl, baseParam,
index, timepoint, setup, level,
dimensions[0], dimensions[1], dimensions[2],
min[0], min[1], min[2]);
}
@Override
public VolatileShortArray loadArray(final int timepoint,
final int setup,
final int level,
final int[] dimensions,
final long[] min) {
if (requestCompressedData && level >= compressFromMipmapLevel) {
return loadArrayFromCompressedDataStream(timepoint, setup, level, dimensions, min);
}
final short[] data = new short[dimensions[0] * dimensions[1] * dimensions[2]];
try {
final URL url = new URL(constructRequestUrl("cell", timepoint, setup, level, dimensions, min));
final byte[] buf = new byte[data.length * 2];
final InputStream urlStream = url.openStream();
//noinspection StatementWithEmptyBody
for (int i = 0, l = urlStream.read(buf, 0, buf.length);
l > 0;
i += l, l = urlStream.read(buf, i, buf.length - i))
;
for (int i = 0, j = 0; i < data.length; ++i, j += 2)
data[i] = (short) (((buf[j] & 0xff) << 8) | (buf[j + 1] & 0xff));
urlStream.close();
} catch (final MalformedURLException e) {
e.printStackTrace();
} catch (final IOException e) {
e.printStackTrace();
}
return new VolatileShortArray(data, true);
}
public VolatileShortArray loadArrayFromCompressedDataStream(final int timepoint,
final int setup,
final int mipmapLevel,
final int[] dimensions,
final long[] min) {
short[] data = null;
try {
final URL url = new URL(constructRequestUrl("cell_qcmp", timepoint, setup, mipmapLevel, dimensions, min));
final HttpURLConnection connection = (HttpURLConnection) url.openConnection();
connection.setRequestMethod("GET");
connection.connect();
final int contentLength = connection.getContentLength();
final InputStream urlStream = connection.getInputStream();
data = getDecompressorForMipmapLevel(mipmapLevel).decompressStream(urlStream, contentLength);
urlStream.close();
} catch (final Exception e) {
e.printStackTrace();
}
return new VolatileShortArray(data, true);
}
@Override
public int getBytesPerElement() {
return 2;
}
private ImageDecompressor getDecompressorForMipmapLevel(final int mipmapLevel) {
assert (decompressors != null && !decompressors.isEmpty());
if (decompressors.containsKey(mipmapLevel)) {
return decompressors.get(mipmapLevel);
}
return lowestResDecompressor;
}
public void setDataDecompressors(final ImageDecompressor[] imageDecompressors,
final int levelCount,
final int compressFromMipmapLevel) {
Arrays.sort(imageDecompressors, Comparator.comparingInt(CompressorDecompressorBase::getBitsPerCodebookIndex));
final int numberOfDecompressionLevels = Math.min((levelCount - compressFromMipmapLevel), imageDecompressors.length);
decompressors = new HashMap<>(numberOfDecompressionLevels);
for (int mipmapLevel = 0; mipmapLevel < numberOfDecompressionLevels; mipmapLevel++) {
final ImageDecompressor decompressor = imageDecompressors[(imageDecompressors.length - 1) - mipmapLevel];
final int cbSize = (int) Math.pow(2, decompressor.getBitsPerCodebookIndex());
final int actualKey = mipmapLevel + compressFromMipmapLevel;
decompressors.put(actualKey, decompressor);
public class RemoteVolatileShortArrayLoader implements CacheArrayLoader< VolatileShortArray >
{
private final RemoteImageLoader imgLoader;
public RemoteVolatileShortArrayLoader( final RemoteImageLoader imgLoader )
{
this.imgLoader = imgLoader;
}
@Override
public VolatileShortArray loadArray( final int timepoint, final int setup, final int level, final int[] dimensions, final long[] min ) throws InterruptedException
{
final int index = imgLoader.getCellIndex( timepoint, setup, level, min );
final short[] data = new short[ dimensions[ 0 ] * dimensions[ 1 ] * dimensions[ 2 ] ];
try
{
final URL url = new URL( String.format( "%s?p=cell/%d/%d/%d/%d/%d/%d/%d/%d/%d/%d",
imgLoader.baseUrl,
index,
timepoint,
setup,
level,
dimensions[ 0 ],
dimensions[ 1 ],
dimensions[ 2 ],
min[ 0 ],
min[ 1 ],
min[ 2 ] ) );
final InputStream s = url.openStream();
final byte[] buf = new byte[ data.length * 2 ];
for ( int i = 0, l = s.read( buf, 0, buf.length ); l > 0; i += l, l = s.read( buf, i, buf.length - i ) );
for ( int i = 0, j = 0; i < data.length; ++i, j += 2 )
data[ i ] = ( short ) ( ( ( buf[ j ] & 0xff ) << 8 ) | ( buf[ j + 1 ] & 0xff ) );
s.close();
}
catch ( final MalformedURLException e )
{
e.printStackTrace();
}
catch ( final IOException e )
{
e.printStackTrace();
}
return new VolatileShortArray( data, true );
}
@Override
public int getBytesPerElement() {
return 2;
}
ColorConsole.fprintf(ColorConsole.Target.stdout, ColorConsole.Color.Yellow,
"Created decompressor for mipmap level %d with codebook of size %d.",
actualKey, cbSize);
lowestResDecompressor = decompressor;
}
this.compressFromMipmapLevel = compressFromMipmapLevel;
requestCompressedData = !decompressors.isEmpty();
}
}
......@@ -7,13 +7,13 @@
* %%
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
......@@ -41,31 +41,36 @@ import mpicbg.spim.data.generic.sequence.XmlIoBasicImgLoader;
import org.jdom2.Element;
@ImgLoaderIo( format = "bdv.remote", type = RemoteImageLoader.class )
public class XmlIoRemoteImageLoader implements XmlIoBasicImgLoader< RemoteImageLoader >
{
@ImgLoaderIo(format = "bdv.remote", type = RemoteImageLoader.class)
public class XmlIoRemoteImageLoader implements XmlIoBasicImgLoader<RemoteImageLoader> {
@Override
public Element toXml( final RemoteImageLoader imgLoader, final File basePath )
{
final Element elem = new Element( "ImageLoader" );
elem.setAttribute( IMGLOADER_FORMAT_ATTRIBUTE_NAME, "bdv.remote" );
elem.addContent( XmlHelpers.textElement( "baseUrl", imgLoader.baseUrl ) );
return elem;
}
private boolean allowCompression = false;
@Override
public RemoteImageLoader fromXml( final Element elem, final File basePath, final AbstractSequenceDescription< ?, ?, ? > sequenceDescription )
{
final String baseUrl = elem.getChildText( "baseUrl" );
try
{
return new RemoteImageLoader( baseUrl );
}
catch ( final IOException e )
{
throw new RuntimeException( e );
}
}
@Override
public Element toXml(final RemoteImageLoader imgLoader, final File basePath) {
final Element elem = new Element("ImageLoader");
elem.setAttribute(IMGLOADER_FORMAT_ATTRIBUTE_NAME, "bdv.remote");
elem.addContent(XmlHelpers.textElement("baseUrl", imgLoader.baseUrl));
return elem;
}
@Override
public RemoteImageLoader fromXml(final Element elem,
final File basePath,
final AbstractSequenceDescription<?, ?, ?> sequenceDescription) {
final String baseUrl = elem.getChildText("baseUrl");
try {
return new RemoteImageLoader(baseUrl, allowCompression);
} catch (final IOException e) {
throw new RuntimeException(e);
}
}
public boolean shouldAllowCompression() {
return allowCompression;
}
public void setAllowCompression(boolean allowCompression) {
this.allowCompression = allowCompression;
}
}
......@@ -7,13 +7,13 @@
* %%
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
......@@ -29,10 +29,9 @@
*/
package bdv.spimdata;
import static mpicbg.spim.data.XmlKeys.SPIMDATA_TAG;
import java.io.File;
import azgracompress.ViewerCompressionOptions;
import bdv.img.remote.RemoteImageLoader;
import bdv.spimdata.legacy.XmlIoSpimDataMinimalLegacy;
import mpicbg.spim.data.SpimDataException;
import mpicbg.spim.data.SpimDataIOException;
import mpicbg.spim.data.generic.XmlIoAbstractSpimData;
......@@ -42,47 +41,53 @@ import mpicbg.spim.data.generic.sequence.XmlIoBasicViewSetups;
import mpicbg.spim.data.registration.XmlIoViewRegistrations;
import mpicbg.spim.data.sequence.XmlIoMissingViews;
import mpicbg.spim.data.sequence.XmlIoTimePoints;
import org.jdom2.Document;
import org.jdom2.Element;
import org.jdom2.input.SAXBuilder;
import bdv.spimdata.legacy.XmlIoSpimDataMinimalLegacy;
import java.io.File;
import static mpicbg.spim.data.XmlKeys.SPIMDATA_TAG;
public class XmlIoSpimDataMinimal extends XmlIoAbstractSpimData<SequenceDescriptionMinimal, SpimDataMinimal> {
private ViewerCompressionOptions compressionOptions;
public XmlIoSpimDataMinimal() {
super(SpimDataMinimal.class,
new XmlIoAbstractSequenceDescription<>(
SequenceDescriptionMinimal.class,
new XmlIoTimePoints(),
new XmlIoBasicViewSetups<>(BasicViewSetup.class),
new XmlIoMissingViews()),
new XmlIoViewRegistrations());
}
public class XmlIoSpimDataMinimal extends XmlIoAbstractSpimData< SequenceDescriptionMinimal, SpimDataMinimal >
{
public XmlIoSpimDataMinimal()
{
super( SpimDataMinimal.class,
new XmlIoAbstractSequenceDescription<>(
SequenceDescriptionMinimal.class,
new XmlIoTimePoints(),
new XmlIoBasicViewSetups<>( BasicViewSetup.class ),
new XmlIoMissingViews() ),
new XmlIoViewRegistrations() );
}
public void setViewerCompressionOptions(final ViewerCompressionOptions ops) {
this.compressionOptions = ops;
}
@Override
public SpimDataMinimal load( final String xmlFilename ) throws SpimDataException
{
final SAXBuilder sax = new SAXBuilder();
Document doc;
try
{
doc = sax.build( xmlFilename );
}
catch ( final Exception e )
{
throw new SpimDataIOException( e );
}
final Element root = doc.getRootElement();
@Override
public SpimDataMinimal load(final String xmlFilename) throws SpimDataException {
final SAXBuilder sax = new SAXBuilder();
final Document doc;
try {
doc = sax.build(xmlFilename);
} catch (final Exception e) {
throw new SpimDataIOException(e);
}
final Element root = doc.getRootElement();
if ( root.getName().equals( "SequenceDescription" ) )
return XmlIoSpimDataMinimalLegacy.fromXml( root, new File( xmlFilename ) );
if (root.getName().equals("SequenceDescription"))
return XmlIoSpimDataMinimalLegacy.fromXml(root, new File(xmlFilename));
if ( root.getName() != SPIMDATA_TAG )
throw new RuntimeException( "expected <" + SPIMDATA_TAG + "> root element. wrong file?" );
if (root.getName() != SPIMDATA_TAG)
throw new RuntimeException("expected <" + SPIMDATA_TAG + "> root element. wrong file?");
return fromXml( root, new File( xmlFilename ) );
}
final SpimDataMinimal spimDataMinimal = fromXml(root, new File(xmlFilename));
if (spimDataMinimal.getSequenceDescription().getImgLoader() instanceof RemoteImageLoader) {
final RemoteImageLoader remoteImageLoader = (RemoteImageLoader) spimDataMinimal.getSequenceDescription().getImgLoader();
remoteImageLoader.setViewerCompressionOptions(compressionOptions);
}
return spimDataMinimal;
}
}
......@@ -165,6 +165,7 @@ public class XmlIoSpimDataMinimalLegacy
{
final Element elem = sequenceDescriptionElem.getChild( "ImageLoader" );
final String classn = elem.getAttributeValue( "class" );
if ( classn.equals( "viewer.hdf5.Hdf5ImageLoader" ) || classn.equals( "bdv.img.hdf5.Hdf5ImageLoader" ) )
{
final String path = loadPath( elem, "hdf5", basePath ).toString();
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment