Skip to content
Snippets Groups Projects
Commit e9b5073b authored by Tobias Pietzsch's avatar Tobias Pietzsch
Browse files

N5 export and ImgLoader

parent 12f59331
No related branches found
No related tags found
No related merge requests found
......@@ -197,6 +197,18 @@
<artifactId>scijava-listeners</artifactId>
<version>1.0.0-beta-2</version>
</dependency>
<dependency>
<groupId>org.janelia.saalfeldlab</groupId>
<artifactId>n5-imglib2</artifactId>
<version>3.4.1</version>
</dependency>
<dependency>
<groupId>org.janelia.saalfeldlab</groupId>
<artifactId>n5</artifactId>
<version>2.1.1</version>
</dependency>
<!-- test dependencies -->
<dependency>
<groupId>junit</groupId>
......
package bdv.export.n5;
import bdv.export.ExportMipmapInfo;
import bdv.export.ExportScalePyramid;
import bdv.export.ProgressWriter;
import bdv.export.ProgressWriterNull;
import bdv.export.SubTaskProgressWriter;
import bdv.export.ExportScalePyramid.AfterEachPlane;
import bdv.export.ExportScalePyramid.LoopbackHeuristic;
import bdv.img.cache.SimpleCacheArrayLoader;
import bdv.img.n5.N5ImageLoader;
import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.function.Function;
import java.util.stream.Collectors;
import mpicbg.spim.data.generic.sequence.AbstractSequenceDescription;
import mpicbg.spim.data.generic.sequence.BasicImgLoader;
import mpicbg.spim.data.generic.sequence.BasicSetupImgLoader;
import mpicbg.spim.data.generic.sequence.BasicViewSetup;
import mpicbg.spim.data.sequence.TimePoint;
import mpicbg.spim.data.sequence.ViewId;
import net.imglib2.RandomAccessibleInterval;
import net.imglib2.cache.img.ReadOnlyCachedCellImgFactory;
import net.imglib2.img.cell.Cell;
import net.imglib2.img.cell.CellGrid;
import net.imglib2.type.NativeType;
import net.imglib2.type.numeric.RealType;
import net.imglib2.util.Cast;
import org.janelia.saalfeldlab.n5.ByteArrayDataBlock;
import org.janelia.saalfeldlab.n5.Compression;
import org.janelia.saalfeldlab.n5.DataBlock;
import org.janelia.saalfeldlab.n5.DataType;
import org.janelia.saalfeldlab.n5.DatasetAttributes;
import org.janelia.saalfeldlab.n5.DoubleArrayDataBlock;
import org.janelia.saalfeldlab.n5.FloatArrayDataBlock;
import org.janelia.saalfeldlab.n5.IntArrayDataBlock;
import org.janelia.saalfeldlab.n5.LongArrayDataBlock;
import org.janelia.saalfeldlab.n5.N5FSWriter;
import org.janelia.saalfeldlab.n5.N5Writer;
import org.janelia.saalfeldlab.n5.ShortArrayDataBlock;
import org.janelia.saalfeldlab.n5.imglib2.N5Utils;
import static bdv.img.n5.BdvN5Format.DATA_TYPE_KEY;
import static bdv.img.n5.BdvN5Format.DOWNSAMPLING_FACTORS_KEY;
import static bdv.img.n5.BdvN5Format.getPathName;
import static net.imglib2.cache.img.ReadOnlyCachedCellImgOptions.options;
/**
* @author Tobias Pietzsch
* @author John Bogovic
*/
public class WriteSequenceToN5
{
/**
* Create a n5 group containing image data from all views and all
* timepoints in a chunked, mipmaped representation.
*
* @param seq
* description of the sequence to be stored as hdf5. (The
* {@link AbstractSequenceDescription} contains the number of
* setups and timepoints as well as an {@link BasicImgLoader}
* that provides the image data, Registration information is not
* needed here, that will go into the accompanying xml).
* @param perSetupMipmapInfo
* this maps from setup {@link BasicViewSetup#getId() id} to
* {@link ExportMipmapInfo} for that setup. The
* {@link ExportMipmapInfo} contains for each mipmap level, the
* subsampling factors and subdivision block sizes.
* @param compression
* whether to compress the data with the HDF5 DEFLATE filter.
* @param n5File
* n5 root.
* @param loopbackHeuristic
* heuristic to decide whether to create each resolution level by
* reading pixels from the original image or by reading back a
* finer resolution level already written to the hdf5. may be
* null (in this case always use the original image).
* @param afterEachPlane
* this is called after each "plane of chunks" is written, giving
* the opportunity to clear caches, etc.
* @param numCellCreatorThreads
* The number of threads that will be instantiated to generate
* cell data. Must be at least 1. (In addition the cell creator
* threads there is one writer thread that saves the generated
* data to HDF5.)
* @param progressWriter
* completion ratio and status output will be directed here.
*/
public static void writeN5File(
final AbstractSequenceDescription< ?, ?, ? > seq,
final Map< Integer, ExportMipmapInfo > perSetupMipmapInfo,
final Compression compression,
final File n5File,
final LoopbackHeuristic loopbackHeuristic,
final AfterEachPlane afterEachPlane,
final int numCellCreatorThreads,
ProgressWriter progressWriter ) throws IOException
{
if ( progressWriter == null )
progressWriter = new ProgressWriterNull();
progressWriter.setProgress( 0 );
final BasicImgLoader imgLoader = seq.getImgLoader();
for ( final BasicViewSetup setup : seq.getViewSetupsOrdered() )
{
final Object type = imgLoader.getSetupImgLoader( setup.getId() ).getImageType();
if ( !( type instanceof RealType &&
type instanceof NativeType &&
N5Utils.dataType( Cast.unchecked( type ) ) != null ) )
throw new IllegalArgumentException( "Unsupported pixel type: " + type.getClass().getSimpleName() );
}
final List< Integer > timepointIds = seq.getTimePoints().getTimePointsOrdered().stream()
.map( TimePoint::getId )
.collect( Collectors.toList() );
final List< Integer > setupIds = seq.getViewSetupsOrdered().stream()
.map( BasicViewSetup::getId )
.collect( Collectors.toList() );
N5Writer n5 = new N5FSWriter( n5File.getAbsolutePath() );
// write Mipmap descriptions
for ( final int setupId : setupIds )
{
final String pathName = getPathName( setupId );
final int[][] downsamplingFactors = perSetupMipmapInfo.get( setupId ).getExportResolutions();
final DataType dataType = N5Utils.dataType( Cast.unchecked( imgLoader.getSetupImgLoader( setupId ).getImageType() ) );
n5.createGroup( pathName );
n5.setAttribute( pathName, DOWNSAMPLING_FACTORS_KEY, downsamplingFactors );
n5.setAttribute( pathName, DATA_TYPE_KEY, dataType );
}
// calculate number of tasks for progressWriter
int numTasks = 0; // first task is for writing mipmap descriptions etc...
for ( final int timepointIdSequence : timepointIds )
for ( final int setupIdSequence : setupIds )
if ( seq.getViewDescriptions().get( new ViewId( timepointIdSequence, setupIdSequence ) ).isPresent() )
numTasks++;
int numCompletedTasks = 0;
final ExecutorService executorService = Executors.newFixedThreadPool( numCellCreatorThreads );
try
{
// write image data for all views
final int numTimepoints = timepointIds.size();
int timepointIndex = 0;
for ( final int timepointId : timepointIds )
{
progressWriter.out().printf( "proccessing timepoint %d / %d\n", ++timepointIndex, numTimepoints );
// assemble the viewsetups that are present in this timepoint
final ArrayList< Integer > setupsTimePoint = new ArrayList<>();
for ( final int setupId : setupIds )
if ( seq.getViewDescriptions().get( new ViewId( timepointId, setupId ) ).isPresent() )
setupsTimePoint.add( setupId );
final int numSetups = setupsTimePoint.size();
int setupIndex = 0;
for ( final int setupId : setupsTimePoint )
{
progressWriter.out().printf( "proccessing setup %d / %d\n", ++setupIndex, numSetups );
final ExportMipmapInfo mipmapInfo = perSetupMipmapInfo.get( setupId );
final double startCompletionRatio = ( double ) numCompletedTasks++ / numTasks;
final double endCompletionRatio = ( double ) numCompletedTasks / numTasks;
final ProgressWriter subProgressWriter = new SubTaskProgressWriter( progressWriter, startCompletionRatio, endCompletionRatio );
writeScalePyramid(
n5, compression,
imgLoader, setupId, timepointId, mipmapInfo,
executorService, numCellCreatorThreads,
loopbackHeuristic, afterEachPlane, subProgressWriter );
}
}
}
finally
{
executorService.shutdown();
}
progressWriter.setProgress( 1.0 );
}
static < T extends RealType< T > & NativeType< T > > void writeScalePyramid(
final N5Writer n5,
final Compression compression,
final BasicImgLoader imgLoader,
final int setupId,
final int timepointId,
final ExportMipmapInfo mipmapInfo,
final ExecutorService executorService,
final int numThreads,
final LoopbackHeuristic loopbackHeuristic,
final AfterEachPlane afterEachPlane,
ProgressWriter progressWriter ) throws IOException
{
final BasicSetupImgLoader< T > setupImgLoader = Cast.unchecked( imgLoader.getSetupImgLoader( setupId ) );
final RandomAccessibleInterval< T > img = setupImgLoader.getImage( timepointId );
final T type = setupImgLoader.getImageType();
final N5DatasetIO< T > io = new N5DatasetIO<>( n5, compression, setupId, timepointId, type );
ExportScalePyramid.writeScalePyramid(
img, type, mipmapInfo, io,
executorService, numThreads,
loopbackHeuristic, afterEachPlane, progressWriter );
}
static class N5Dataset
{
final String pathName;
final DatasetAttributes attributes;
public N5Dataset( final String pathName, final DatasetAttributes attributes )
{
this.pathName = pathName;
this.attributes = attributes;
}
}
static class N5DatasetIO< T extends RealType< T > & NativeType< T > > implements ExportScalePyramid.DatasetIO< N5Dataset, T >
{
private final N5Writer n5;
private final Compression compression;
private final int setupId;
private final int timepointId;
private final DataType dataType;
private final T type;
private final Function< ExportScalePyramid.Block< T >, DataBlock< ? > > getDataBlock;
public N5DatasetIO( final N5Writer n5, final Compression compression, final int setupId, final int timepointId, final T type )
{
this.n5 = n5;
this.compression = compression;
this.setupId = setupId;
this.timepointId = timepointId;
this.dataType = N5Utils.dataType( type );
this.type = type;
switch ( dataType )
{
case UINT8:
getDataBlock = b -> new ByteArrayDataBlock( b.getSize(), b.getGridPosition(), Cast.unchecked( b.getData().getStorageArray() ) );
break;
case UINT16:
getDataBlock = b -> new ShortArrayDataBlock( b.getSize(), b.getGridPosition(), Cast.unchecked( b.getData().getStorageArray() ) );
break;
case UINT32:
getDataBlock = b -> new IntArrayDataBlock( b.getSize(), b.getGridPosition(), Cast.unchecked( b.getData().getStorageArray() ) );
break;
case UINT64:
getDataBlock = b -> new LongArrayDataBlock( b.getSize(), b.getGridPosition(), Cast.unchecked( b.getData().getStorageArray() ) );
break;
case INT8:
getDataBlock = b -> new ByteArrayDataBlock( b.getSize(), b.getGridPosition(), Cast.unchecked( b.getData().getStorageArray() ) );
break;
case INT16:
getDataBlock = b -> new ShortArrayDataBlock( b.getSize(), b.getGridPosition(), Cast.unchecked( b.getData().getStorageArray() ) );
break;
case INT32:
getDataBlock = b -> new IntArrayDataBlock( b.getSize(), b.getGridPosition(), Cast.unchecked( b.getData().getStorageArray() ) );
break;
case INT64:
getDataBlock = b -> new LongArrayDataBlock( b.getSize(), b.getGridPosition(), Cast.unchecked( b.getData().getStorageArray() ) );
break;
case FLOAT32:
getDataBlock = b -> new FloatArrayDataBlock( b.getSize(), b.getGridPosition(), Cast.unchecked( b.getData().getStorageArray() ) );
break;
case FLOAT64:
getDataBlock = b -> new DoubleArrayDataBlock( b.getSize(), b.getGridPosition(), Cast.unchecked( b.getData().getStorageArray() ) );
break;
default:
throw new IllegalArgumentException();
}
}
@Override
public N5Dataset createDataset( final int level, final long[] dimensions, final int[] blockSize ) throws IOException
{
final String pathName = getPathName( setupId, timepointId, level );
n5.createDataset( pathName, dimensions, blockSize, dataType, compression );
final DatasetAttributes attributes = n5.getDatasetAttributes( pathName );
return new N5Dataset( pathName, attributes );
}
@Override
public void writeBlock( final N5Dataset dataset, final ExportScalePyramid.Block< T > dataBlock ) throws IOException
{
n5.writeBlock( dataset.pathName, dataset.attributes, getDataBlock.apply( dataBlock ) );
}
@Override
public void flush( final N5Dataset dataset )
{}
@Override
public RandomAccessibleInterval< T > getImage( final int level ) throws IOException
{
final String pathName = getPathName( setupId, timepointId, level );
final DatasetAttributes attributes = n5.getDatasetAttributes( pathName );
final long[] dimensions = attributes.getDimensions();
final int[] cellDimensions = attributes.getBlockSize();
final CellGrid grid = new CellGrid( dimensions, cellDimensions );
final SimpleCacheArrayLoader< ? > cacheArrayLoader = N5ImageLoader.createCacheArrayLoader( n5, pathName );
return new ReadOnlyCachedCellImgFactory().createWithCacheLoader(
dimensions, type,
key -> {
final int n = grid.numDimensions();
final long[] cellMin = new long[ n ];
final int[] cellDims = new int[ n ];
final long[] cellGridPosition = new long[ n ];
grid.getCellDimensions( key, cellMin, cellDims );
grid.getCellGridPositionFlat( key, cellGridPosition );
return new Cell<>( cellDims, cellMin, cacheArrayLoader.loadArray( cellGridPosition ) );
},
options().cellDimensions( cellDimensions ) );
}
}
}
package bdv.img.n5;
public class BdvN5Format
{
public static final String DOWNSAMPLING_FACTORS_KEY = "downsamplingFactors";
public static final String DATA_TYPE_KEY = "dataType";
public static String getPathName( final int setupId )
{
return String.format( "setup%02d", setupId );
}
public static String getPathName( final int setupId, final int timepointId )
{
return String.format( "setup%02d/timepoint%05d", setupId, timepointId );
}
public static String getPathName( final int setupId, final int timepointId, final int level )
{
return String.format( "setup%02d/timepoint%05d/s%d", setupId, timepointId, level );
}
}
/*
* #%L
* BigDataViewer core classes with minimal dependencies
* %%
* Copyright (C) 2012 - 2016 Tobias Pietzsch, Stephan Saalfeld, Stephan Preibisch,
* Jean-Yves Tinevez, HongKee Moon, Johannes Schindelin, Curtis Rueden, John Bogovic
* %%
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDERS OR CONTRIBUTORS BE
* LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
* #L%
*/
package bdv.img.n5;
import bdv.AbstractViewerSetupImgLoader;
import bdv.ViewerImgLoader;
import bdv.cache.CacheControl;
import bdv.img.cache.SimpleCacheArrayLoader;
import bdv.img.cache.VolatileGlobalCellCache;
import bdv.util.ConstantRandomAccessible;
import bdv.util.MipmapTransforms;
import java.io.File;
import java.io.IOException;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.Callable;
import java.util.function.Function;
import mpicbg.spim.data.generic.sequence.AbstractSequenceDescription;
import mpicbg.spim.data.generic.sequence.BasicViewSetup;
import mpicbg.spim.data.generic.sequence.ImgLoaderHint;
import mpicbg.spim.data.sequence.MultiResolutionImgLoader;
import mpicbg.spim.data.sequence.MultiResolutionSetupImgLoader;
import mpicbg.spim.data.sequence.VoxelDimensions;
import net.imglib2.Dimensions;
import net.imglib2.FinalDimensions;
import net.imglib2.FinalInterval;
import net.imglib2.RandomAccessibleInterval;
import net.imglib2.Volatile;
import net.imglib2.cache.queue.BlockingFetchQueues;
import net.imglib2.cache.queue.FetcherThreads;
import net.imglib2.cache.volatiles.CacheHints;
import net.imglib2.cache.volatiles.LoadingStrategy;
import net.imglib2.img.basictypeaccess.volatiles.array.VolatileByteArray;
import net.imglib2.img.basictypeaccess.volatiles.array.VolatileDoubleArray;
import net.imglib2.img.basictypeaccess.volatiles.array.VolatileFloatArray;
import net.imglib2.img.basictypeaccess.volatiles.array.VolatileIntArray;
import net.imglib2.img.basictypeaccess.volatiles.array.VolatileLongArray;
import net.imglib2.img.basictypeaccess.volatiles.array.VolatileShortArray;
import net.imglib2.img.cell.CellGrid;
import net.imglib2.img.cell.CellImg;
import net.imglib2.realtransform.AffineTransform3D;
import net.imglib2.type.NativeType;
import net.imglib2.type.numeric.integer.ByteType;
import net.imglib2.type.numeric.integer.IntType;
import net.imglib2.type.numeric.integer.LongType;
import net.imglib2.type.numeric.integer.ShortType;
import net.imglib2.type.numeric.integer.UnsignedByteType;
import net.imglib2.type.numeric.integer.UnsignedIntType;
import net.imglib2.type.numeric.integer.UnsignedLongType;
import net.imglib2.type.numeric.integer.UnsignedShortType;
import net.imglib2.type.numeric.real.DoubleType;
import net.imglib2.type.numeric.real.FloatType;
import net.imglib2.type.volatiles.VolatileByteType;
import net.imglib2.type.volatiles.VolatileDoubleType;
import net.imglib2.type.volatiles.VolatileFloatType;
import net.imglib2.type.volatiles.VolatileIntType;
import net.imglib2.type.volatiles.VolatileLongType;
import net.imglib2.type.volatiles.VolatileShortType;
import net.imglib2.type.volatiles.VolatileUnsignedByteType;
import net.imglib2.type.volatiles.VolatileUnsignedIntType;
import net.imglib2.type.volatiles.VolatileUnsignedLongType;
import net.imglib2.type.volatiles.VolatileUnsignedShortType;
import net.imglib2.util.Cast;
import net.imglib2.view.Views;
import org.janelia.saalfeldlab.n5.DataBlock;
import org.janelia.saalfeldlab.n5.DataType;
import org.janelia.saalfeldlab.n5.DatasetAttributes;
import org.janelia.saalfeldlab.n5.N5FSReader;
import org.janelia.saalfeldlab.n5.N5Reader;
import static bdv.img.n5.BdvN5Format.DATA_TYPE_KEY;
import static bdv.img.n5.BdvN5Format.DOWNSAMPLING_FACTORS_KEY;
import static bdv.img.n5.BdvN5Format.getPathName;
public class N5ImageLoader implements ViewerImgLoader, MultiResolutionImgLoader
{
private final File n5File;
// TODO: it would be good if this would not be needed
// find available setups from the n5
private final AbstractSequenceDescription< ?, ?, ? > seq;
/**
* Maps setup id to {@link SetupImgLoader}.
*/
private final Map< Integer, SetupImgLoader > setupImgLoaders = new HashMap<>();
public N5ImageLoader( final File n5File, final AbstractSequenceDescription< ?, ?, ? > sequenceDescription )
{
this.n5File = n5File;
this.seq = sequenceDescription;
}
public File getN5File()
{
return n5File;
}
private volatile boolean isOpen = false;
private FetcherThreads fetchers;
private VolatileGlobalCellCache cache;
private N5Reader n5;
private void open()
{
if ( !isOpen )
{
synchronized ( this )
{
if ( isOpen )
return;
try
{
this.n5 = new N5FSReader( n5File.getAbsolutePath() );
int maxNumLevels = 0;
final List< ? extends BasicViewSetup > setups = seq.getViewSetupsOrdered();
for ( final BasicViewSetup setup : setups )
{
final int setupId = setup.getId();
final SetupImgLoader setupImgLoader = createSetupImgLoader( setupId );
setupImgLoaders.put( setupId, setupImgLoader );
maxNumLevels = Math.max( maxNumLevels, setupImgLoader.numMipmapLevels() );
}
final int numFetcherThreads = 1;
final BlockingFetchQueues< Callable< ? > > queue = new BlockingFetchQueues<>( maxNumLevels, numFetcherThreads );
fetchers = new FetcherThreads( queue, numFetcherThreads );
cache = new VolatileGlobalCellCache( queue );
}
catch ( IOException e )
{
throw new RuntimeException( e );
}
isOpen = true;
}
}
}
/**
* Clear the cache. Images that were obtained from
* this loader before {@link #close()} will stop working. Requesting images
* after {@link #close()} will cause the n5 to be reopened (with a
* new cache).
*/
public void close()
{
if ( isOpen )
{
synchronized ( this )
{
if ( !isOpen )
return;
fetchers.shutdown();
cache.clearCache();
isOpen = false;
}
}
}
@Override
public SetupImgLoader getSetupImgLoader( final int setupId )
{
open();
return setupImgLoaders.get( setupId );
}
private < T extends NativeType< T >, V extends Volatile< T > & NativeType< V > > SetupImgLoader< T, V > createSetupImgLoader( final int setupId ) throws IOException
{
final String pathName = getPathName( setupId );
final DataType dataType = n5.getAttribute( pathName, DATA_TYPE_KEY, DataType.class );
switch ( dataType )
{
case UINT8:
return Cast.unchecked( new SetupImgLoader<>( setupId, new UnsignedByteType(), new VolatileUnsignedByteType() ) );
case UINT16:
return Cast.unchecked( new SetupImgLoader<>( setupId, new UnsignedShortType(), new VolatileUnsignedShortType() ) );
case UINT32:
return Cast.unchecked( new SetupImgLoader<>( setupId, new UnsignedIntType(), new VolatileUnsignedIntType() ) );
case UINT64:
return Cast.unchecked( new SetupImgLoader<>( setupId, new UnsignedLongType(), new VolatileUnsignedLongType() ) );
case INT8:
return Cast.unchecked( new SetupImgLoader<>( setupId, new ByteType(), new VolatileByteType() ) );
case INT16:
return Cast.unchecked( new SetupImgLoader<>( setupId, new ShortType(), new VolatileShortType() ) );
case INT32:
return Cast.unchecked( new SetupImgLoader<>( setupId, new IntType(), new VolatileIntType() ) );
case INT64:
return Cast.unchecked( new SetupImgLoader<>( setupId, new LongType(), new VolatileLongType() ) );
case FLOAT32:
return Cast.unchecked( new SetupImgLoader<>( setupId, new FloatType(), new VolatileFloatType() ) );
case FLOAT64:
return Cast.unchecked( new SetupImgLoader<>( setupId, new DoubleType(), new VolatileDoubleType() ) );
}
return null;
}
@Override
public CacheControl getCacheControl()
{
open();
return cache;
}
public class SetupImgLoader< T extends NativeType< T >, V extends Volatile< T > & NativeType< V > >
extends AbstractViewerSetupImgLoader< T, V >
implements MultiResolutionSetupImgLoader< T >
{
private final int setupId;
private final double[][] mipmapResolutions;
private final AffineTransform3D[] mipmapTransforms;
public SetupImgLoader( final int setupId, final T type, final V volatileType ) throws IOException
{
super( type, volatileType );
this.setupId = setupId;
final String pathName = getPathName( setupId );
mipmapResolutions = n5.getAttribute( pathName, DOWNSAMPLING_FACTORS_KEY, double[][].class );
mipmapTransforms = new AffineTransform3D[ mipmapResolutions.length ];
for ( int level = 0; level < mipmapResolutions.length; level++ )
mipmapTransforms[ level ] = MipmapTransforms.getMipmapTransformDefault( mipmapResolutions[ level ] );
}
@Override
public RandomAccessibleInterval< V > getVolatileImage( final int timepointId, final int level, final ImgLoaderHint... hints )
{
return prepareCachedImage( timepointId, level, LoadingStrategy.BUDGETED, volatileType );
}
@Override
public RandomAccessibleInterval< T > getImage( final int timepointId, final int level, final ImgLoaderHint... hints )
{
return prepareCachedImage( timepointId, level, LoadingStrategy.BLOCKING, type );
}
@Override
public Dimensions getImageSize( final int timepointId, final int level )
{
try
{
final String pathName = getPathName( setupId, timepointId, level );
final DatasetAttributes attributes = n5.getDatasetAttributes( pathName );
return new FinalDimensions( attributes.getDimensions() );
}
catch( Exception e )
{
return null;
}
}
@Override
public double[][] getMipmapResolutions()
{
return mipmapResolutions;
}
@Override
public AffineTransform3D[] getMipmapTransforms()
{
return mipmapTransforms;
}
@Override
public int numMipmapLevels()
{
return mipmapResolutions.length;
}
@Override
public VoxelDimensions getVoxelSize( final int timepointId )
{
return null;
}
/**
* Create a {@link CellImg} backed by the cache.
*/
private < T extends NativeType< T > > RandomAccessibleInterval< T > prepareCachedImage( final int timepointId, final int level, final LoadingStrategy loadingStrategy, final T type )
{
try
{
final String pathName = getPathName( setupId, timepointId, level );
final DatasetAttributes attributes = n5.getDatasetAttributes( pathName );
final long[] dimensions = attributes.getDimensions();
final int[] cellDimensions = attributes.getBlockSize();
final CellGrid grid = new CellGrid( dimensions, cellDimensions );
final int priority = numMipmapLevels() - 1 - level;
final CacheHints cacheHints = new CacheHints( loadingStrategy, priority, false );
final SimpleCacheArrayLoader< ? > loader = createCacheArrayLoader( n5, pathName );
return cache.createImg( grid, timepointId, setupId, level, cacheHints, loader, type );
}
catch ( IOException e )
{
System.err.println( String.format(
"image data for timepoint %d setup %d level %d could not be found.",
timepointId, setupId, level ) );
return Views.interval(
new ConstantRandomAccessible<>( type.createVariable(), 3 ),
new FinalInterval( 1, 1, 1 ) );
}
}
}
private static class N5CacheArrayLoader< A > implements SimpleCacheArrayLoader< A >
{
private final N5Reader n5;
private final String pathName;
private final DatasetAttributes attributes;
private final Function< DataBlock< ? >, A > createArray;
N5CacheArrayLoader( final N5Reader n5, final String pathName, final DatasetAttributes attributes, final Function< DataBlock< ? >, A > createArray )
{
this.n5 = n5;
this.pathName = pathName;
this.attributes = attributes;
this.createArray = createArray;
}
@Override
public A loadArray( final long[] gridPosition ) throws IOException
{
return createArray.apply( n5.readBlock( pathName, attributes, gridPosition ) );
}
}
public static SimpleCacheArrayLoader< ? > createCacheArrayLoader( final N5Reader n5, final String pathName ) throws IOException
{
final DatasetAttributes attributes = n5.getDatasetAttributes( pathName );
switch ( attributes.getDataType() )
{
case UINT8:
case INT8:
return new N5CacheArrayLoader<>( n5, pathName, attributes,
dataBlock -> new VolatileByteArray( Cast.unchecked( dataBlock.getData() ), true ) );
case UINT16:
case INT16:
return new N5CacheArrayLoader<>( n5, pathName, attributes,
dataBlock -> new VolatileShortArray( Cast.unchecked( dataBlock.getData() ), true ) );
case UINT32:
case INT32:
return new N5CacheArrayLoader<>( n5, pathName, attributes,
dataBlock -> new VolatileIntArray( Cast.unchecked( dataBlock.getData() ), true ) );
case UINT64:
case INT64:
return new N5CacheArrayLoader<>( n5, pathName, attributes,
dataBlock -> new VolatileLongArray( Cast.unchecked( dataBlock.getData() ), true ) );
case FLOAT32:
return new N5CacheArrayLoader<>( n5, pathName, attributes,
dataBlock -> new VolatileFloatArray( Cast.unchecked( dataBlock.getData() ), true ) );
case FLOAT64:
return new N5CacheArrayLoader<>( n5, pathName, attributes,
dataBlock -> new VolatileDoubleArray( Cast.unchecked( dataBlock.getData() ), true ) );
default:
throw new IllegalArgumentException();
}
}
}
/*
* #%L
* BigDataViewer core classes with minimal dependencies
* %%
* Copyright (C) 2012 - 2016 Tobias Pietzsch, Stephan Saalfeld, Stephan Preibisch,
* Jean-Yves Tinevez, HongKee Moon, Johannes Schindelin, Curtis Rueden, John Bogovic
* %%
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDERS OR CONTRIBUTORS BE
* LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
* #L%
*/
package bdv.img.n5;
import java.io.File;
import mpicbg.spim.data.XmlHelpers;
import mpicbg.spim.data.generic.sequence.AbstractSequenceDescription;
import mpicbg.spim.data.generic.sequence.ImgLoaderIo;
import mpicbg.spim.data.generic.sequence.XmlIoBasicImgLoader;
import org.jdom2.Element;
import static mpicbg.spim.data.XmlHelpers.loadPath;
import static mpicbg.spim.data.XmlKeys.IMGLOADER_FORMAT_ATTRIBUTE_NAME;
@ImgLoaderIo( format = "bdv.n5", type = N5ImageLoader.class )
public class XmlIoN5ImageLoader implements XmlIoBasicImgLoader< N5ImageLoader >
{
@Override
public Element toXml( final N5ImageLoader imgLoader, final File basePath )
{
final Element elem = new Element( "ImageLoader" );
elem.setAttribute( IMGLOADER_FORMAT_ATTRIBUTE_NAME, "bdv.n5" );
elem.setAttribute( "version", "1.0" );
elem.addContent( XmlHelpers.pathElement( "n5", imgLoader.getN5File(), basePath ) );
return elem;
}
@Override
public N5ImageLoader fromXml( final Element elem, final File basePath, final AbstractSequenceDescription< ?, ?, ? > sequenceDescription )
{
// final String version = elem.getAttributeValue( "version" );
final File path = loadPath( elem, "n5", basePath );
return new N5ImageLoader( path, sequenceDescription );
}
}
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment