Skip to content
Snippets Groups Projects
Commit 93f64136 authored by Vojtech Moravec's avatar Vojtech Moravec
Browse files

Merge remote-tracking branch 'upstream/master' into fork_sync

parents 368cc7c0 c698350b
Branches
No related tags found
No related merge requests found
Showing
with 1615 additions and 457 deletions
/*-
* #%L
* BigDataViewer core classes with minimal dependencies.
* %%
* Copyright (C) 2012 - 2020 BigDataViewer developers.
* %%
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDERS OR CONTRIBUTORS BE
* LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
* #L%
*/
package bdv.export;
import java.util.Arrays;
import net.imglib2.Cursor;
import net.imglib2.RandomAccess;
import net.imglib2.img.array.ArrayImgs;
import net.imglib2.loops.ClassCopyProvider;
import net.imglib2.type.numeric.RealType;
import net.imglib2.type.numeric.real.DoubleType;
import net.imglib2.util.Intervals;
public interface DownsampleBlock< T extends RealType< T > >
{
void downsampleBlock( final RandomAccess< T > in, final Cursor< T > out, final int[] dimensions );
static < T extends RealType< T > > DownsampleBlock< T > create(
final int[] blockDimensions,
final int[] downsamplingFactors,
final Class< ? > pixelTypeClass,
final Class< ? > inAccessClass )
{
return DownsampleBlockInstances.create( blockDimensions, downsamplingFactors, pixelTypeClass, inAccessClass );
}
}
class DownsampleBlockInstances
{
@SuppressWarnings( "rawtypes" )
private static ClassCopyProvider< DownsampleBlock > provider;
@SuppressWarnings( "unchecked" )
public static < T extends RealType< T > > DownsampleBlock< T > create(
final int[] blockDimensions,
final int[] downsamplingFactors,
final Class< ? > pixelTypeClass,
final Class< ? > inAccessClass )
{
if ( provider == null )
{
synchronized ( DownsampleBlockInstances.class )
{
if ( provider == null )
provider = new ClassCopyProvider<>( Imp.class, DownsampleBlock.class, int[].class, int[].class );
}
}
final int numDimensions = blockDimensions.length;
Object key = Arrays.asList( numDimensions, pixelTypeClass, inAccessClass );
return provider.newInstanceForKey( key, blockDimensions, downsamplingFactors );
}
public static class Imp< T extends RealType< T > > implements DownsampleBlock< T >
{
private final int n;
private final int[] downsamplingFactors;
private final double scale;
private final double[] accumulator;
private final RandomAccess< DoubleType > acc;
public Imp(
final int[] blockDimensions,
final int[] downsamplingFactors )
{
n = blockDimensions.length;
if ( n < 1 || n > 3 )
throw new IllegalArgumentException();
this.downsamplingFactors = downsamplingFactors;
scale = 1.0 / Intervals.numElements( downsamplingFactors );
accumulator = new double[ ( int ) Intervals.numElements( blockDimensions ) ];
final long[] dims = new long[ n ];
Arrays.setAll( dims, d -> blockDimensions[ d ] );
acc = ArrayImgs.doubles( accumulator, dims ).randomAccess();
}
@Override
public void downsampleBlock(
final RandomAccess< T > in,
final Cursor< T > out, // must be flat iteration order
final int[] dimensions )
{
clearAccumulator();
if ( n == 3 )
{
downsampleBlock3D( acc, dimensions[ 0 ], dimensions[ 1 ], dimensions[ 2 ], in );
writeOutput3D( out, dimensions[ 0 ], dimensions[ 1 ], dimensions[ 2 ], acc );
}
else if ( n == 2 )
{
downsampleBlock2D( acc, dimensions[ 0 ], dimensions[ 1 ], in );
writeOutput2D( out, dimensions[ 0 ], dimensions[ 1 ], acc );
}
else
{
downsampleBlock1D( acc, dimensions[ 0 ], in );
writeOutput1D( out, dimensions[ 0 ], acc );
}
}
private void clearAccumulator()
{
Arrays.fill( accumulator, 0, accumulator.length, 0 );
}
private void downsampleBlock3D(
final RandomAccess< DoubleType > acc,
final int asx, // size of output (resp accumulator) image
final int asy,
final int asz,
final RandomAccess< T > in )
{
final int bsz = downsamplingFactors[ 2 ];
final int sz = asz * bsz;
for ( int z = 0, bz = 0; z < sz; ++z )
{
downsampleBlock2D( acc, asx, asy, in );
in.fwd( 2 );
if ( ++bz == bsz )
{
bz = 0;
acc.fwd( 2 );
}
}
in.move( -sz, 2 );
acc.move( -asz, 2 );
}
private void downsampleBlock2D(
final RandomAccess< DoubleType > acc,
final int asx, // size of output (resp accumulator) image
final int asy,
final RandomAccess< T > in )
{
final int bsy = downsamplingFactors[ 1 ];
final int sy = asy * bsy;
for ( int y = 0, by = 0; y < sy; ++y )
{
downsampleBlock1D( acc, asx, in );
in.fwd( 1 );
if ( ++by == bsy )
{
by = 0;
acc.fwd( 1 );
}
}
in.move( -sy, 1 );
acc.move( -asy, 1 );
}
private void downsampleBlock1D(
final RandomAccess< DoubleType > acc,
final int asx, // size of output (resp accumulator) image
final RandomAccess< T > in )
{
final int bsx = downsamplingFactors[ 0 ];
final int sx = asx * bsx;
for ( int x = 0, bx = 0; x < sx; ++x )
{
acc.get().set( acc.get().get() + in.get().getRealDouble() );
in.fwd( 0 );
if ( ++bx == bsx )
{
bx = 0;
acc.fwd( 0 );
}
}
in.move( -sx, 0 );
acc.move( -asx, 0 );
}
private void writeOutput3D(
final Cursor< T > out, // must be flat iteration order
final int asx, // size of output (resp accumulator) image
final int asy,
final int asz,
final RandomAccess< DoubleType > acc )
{
for ( int z = 0; z < asz; ++z )
{
writeOutput2D( out, asx, asy, acc );
acc.fwd( 2 );
}
acc.move( -asz, 2 );
}
private void writeOutput2D(
final Cursor< T > out, // must be flat iteration order
final int asx, // size of output (resp accumulator) image
final int asy,
final RandomAccess< DoubleType > acc )
{
for ( int y = 0; y < asy; ++y )
{
writeOutput1D( out, asx, acc );
acc.fwd( 1 );
}
acc.move( -asy, 1 );
}
private void writeOutput1D(
final Cursor< T > out, // must be flat iteration order
final int asx, // size of output (resp accumulator) image
final RandomAccess< DoubleType > acc )
{
final double scale = this.scale;
for ( int x = 0; x < asx; ++x )
{
out.next().setReal( acc.get().get() * scale );
acc.fwd( 0 );
}
acc.move( -asx, 0 );
}
}
}
/*
* #%L
* BigDataViewer core classes with minimal dependencies
* BigDataViewer core classes with minimal dependencies.
* %%
* Copyright (C) 2012 - 2016 Tobias Pietzsch, Stephan Saalfeld, Stephan Preibisch,
* Jean-Yves Tinevez, HongKee Moon, Johannes Schindelin, Curtis Rueden, John Bogovic
* Copyright (C) 2012 - 2020 BigDataViewer developers.
* %%
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
......
/*
* #%L
* BigDataViewer core classes with minimal dependencies.
* %%
* Copyright (C) 2012 - 2020 BigDataViewer developers.
* %%
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDERS OR CONTRIBUTORS BE
* LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
* #L%
*/
package bdv.export;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.Callable;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Future;
import java.util.concurrent.atomic.AtomicInteger;
import net.imglib2.FinalInterval;
import net.imglib2.RandomAccess;
import net.imglib2.RandomAccessibleInterval;
import net.imglib2.cache.img.SingleCellArrayImg;
import net.imglib2.img.basictypeaccess.ArrayDataAccessFactory;
import net.imglib2.img.basictypeaccess.array.ArrayDataAccess;
import net.imglib2.img.cell.CellGrid;
import net.imglib2.type.NativeType;
import net.imglib2.type.NativeTypeFactory;
import net.imglib2.type.numeric.RealType;
import net.imglib2.util.Cast;
import net.imglib2.util.Intervals;
import net.imglib2.view.Views;
/**
* Write an image to a chunked mipmap representation.
*/
public class ExportScalePyramid
{
/**
* A heuristic to decide for a given resolution level whether the source
* pixels should be taken from the original image or read from a previously
* written resolution level in the output dataset.
*/
public interface LoopbackHeuristic
{
/**
* @return {@code true} if source pixels should be read back from
* dataset. {@code false} if source pixels should be taken from
* original image.
*/
boolean decide(
final RandomAccessibleInterval< ? > originalImg,
final int[] factorsToOriginalImg,
final int previousLevel,
final int[] factorsToPreviousLevel,
final int[] chunkSize );
}
/**
* Simple heuristic: use loopback image loader if saving 8 times or more on
* number of pixel access with respect to the original image.
*/
public static class DefaultLoopbackHeuristic implements LoopbackHeuristic
{
@Override
public boolean decide( final RandomAccessibleInterval< ? > originalImg, final int[] factorsToOriginalImg, final int previousLevel, final int[] factorsToPreviousLevel, final int[] chunkSize )
{
if ( previousLevel < 0 )
return false;
if ( Intervals.numElements( factorsToOriginalImg ) / Intervals.numElements( factorsToPreviousLevel ) >= 8 )
return true;
return false;
}
}
/**
* Callback that is called after each "plane of blocks" is written, giving
* the opportunity to clear caches, etc.
*/
public interface AfterEachPlane
{
/**
* Called after a "plane of blocks" is written.
*
* @param usedLoopBack
* {@code true}, if source was previously written resolution
* level in the output dataset. {@code false}, if source was
* the original image.
*/
void afterEachPlane( final boolean usedLoopBack );
}
/**
* A block to be written. See {@link DatasetIO#writeBlock(Object, Block)
* DatasetIO.writeBlock()}.
*/
public static class Block< T extends NativeType< T > >
{
final SingleCellArrayImg< T, ? > data;
final int[] size;
final long[] position;
Block( final SingleCellArrayImg< T, ? > data, final int[] size, final long[] position )
{
this.data = data;
this.size = size;
this.position = position;
}
public SingleCellArrayImg< T, ? > getData()
{
return data;
}
public int[] getSize()
{
return size;
}
public long[] getGridPosition()
{
return position;
}
}
/**
* Writing and reading back data for each resolution level.
*
* @param <D>
* Dataset handle
* @param <T>
* Pixel type
*/
public interface DatasetIO< D, T extends NativeType< T > >
{
/**
* Create a dataset for the image of the given resolution {@code level}.
*
* @return a handle to the dataset.
*/
D createDataset(
final int level,
final long[] dimensions,
final int[] blockSize ) throws IOException;
/**
* Write the given {@code dataBlock} to the {@code dataset}.
*/
void writeBlock(
final D dataset,
final Block< T > dataBlock ) throws IOException;
/**
* Blocks until all pending data was written to {@code dataset}.
*/
void flush( D dataset ) throws IOException;
/**
* Opens a dataset that was already written as a
* {@code RaπdomAccessibleInterval}.
*/
default RandomAccessibleInterval< T > getImage( final int level ) throws IOException
{
return null;
}
}
/**
* Write an image to a chunked mipmap representation.
*
* @param img
* the image to be written.
* @param type
* instance of the pixel type of the image.
* @param mipmapInfo
* contains for each mipmap level of the setup, the subsampling
* factors and block sizes.
* @param io
* writer for image blocks.
* @param executorService
* ExecutorService where block-creator tasks are submitted.
* @param numThreads
* How many block-creator tasks to run in parallel. (This many
* tasks are submitted to the @code
* @param loopbackHeuristic
* heuristic to decide whether to create each resolution level by
* reading pixels from the original image or by reading back a
* finer resolution level already written to the hdf5. may be
* null (in this case always use the original image).
* @param afterEachPlane
* this is called after each "plane of blocks" is written, giving
* the opportunity to clear caches, etc. may be null.
* @param progressWriter
* completion ratio and status output will be directed here. may
* be null.
*
* @param <T>
* Pixel type
* @param <D>
* Dataset handle
*
* @throws IOException
*/
public static < T extends RealType< T > & NativeType< T >, D > void writeScalePyramid(
final RandomAccessibleInterval< T > img,
final T type,
final ExportMipmapInfo mipmapInfo,
final DatasetIO< D, T > io,
final ExecutorService executorService,
final int numThreads,
final LoopbackHeuristic loopbackHeuristic,
final AfterEachPlane afterEachPlane,
ProgressWriter progressWriter ) throws IOException
{
final BlockCreator< T > blockCreator = BlockCreator.forType( type );
if ( progressWriter == null )
progressWriter = new ProgressWriterNull();
// for progressWriter
final int numTasks = mipmapInfo.getNumLevels();
int numCompletedTasks = 0;
progressWriter.setProgress( 0.0 );
// write image data for all views to the HDF5 file
final int n = 3; // TODO checkNumDimensions( img.numDimensions() );
final long[] dimensions = new long[ n ];
final int[][] resolutions = mipmapInfo.getExportResolutions();
final int[][] subdivisions = mipmapInfo.getSubdivisions();
final int numLevels = mipmapInfo.getNumLevels();
for ( int level = 0; level < numLevels; ++level )
{
progressWriter.out().println( "writing level " + level );
boolean useLoopBack = false;
int[] factorsToPreviousLevel = null;
RandomAccessibleInterval< T > loopbackImg = null;
if ( loopbackHeuristic != null )
{
// Are downsampling factors a multiple of a level that we have
// already written?
int previousLevel = -1;
A:
for ( int l = level - 1; l >= 0; --l )
{
final int[] f = new int[ n ];
for ( int d = 0; d < n; ++d )
{
f[ d ] = resolutions[ level ][ d ] / resolutions[ l ][ d ];
if ( f[ d ] * resolutions[ l ][ d ] != resolutions[ level ][ d ] )
continue A;
}
factorsToPreviousLevel = f;
previousLevel = l;
break;
}
// Now, if previousLevel >= 0 we can use loopback ImgLoader on
// previousLevel and downsample with factorsToPreviousLevel.
//
// whether it makes sense to actually do so is determined by a
// heuristic based on the following considerations:
// * if downsampling a lot over original image, the cost of
// reading images back from hdf5 outweighs the cost of
// accessing and averaging original pixels.
// * original image may already be cached (for example when
// exporting an ImageJ virtual stack. To compute blocks
// that downsample a lot in Z, many planes of the virtual
// stack need to be accessed leading to cache thrashing if
// individual planes are very large.
if ( previousLevel >= 0 )
useLoopBack = loopbackHeuristic.decide( img, resolutions[ level ], previousLevel, factorsToPreviousLevel, subdivisions[ level ] );
if ( useLoopBack )
loopbackImg = io.getImage( previousLevel );
if ( loopbackImg == null )
useLoopBack = false;
}
final RandomAccessibleInterval< T > sourceImg;
final int[] factor;
if ( useLoopBack )
{
sourceImg = loopbackImg;
factor = factorsToPreviousLevel;
}
else
{
sourceImg = img;
factor = resolutions[ level ];
}
sourceImg.dimensions( dimensions );
final long size = Intervals.numElements( factor );
final boolean fullResolution = size == 1;
if ( !fullResolution )
{
for ( int d = 0; d < n; ++d )
dimensions[ d ] = Math.max( dimensions[ d ] / factor[ d ], 1 );
}
final long[] minRequiredInput = new long[ n ];
final long[] maxRequiredInput = new long[ n ];
sourceImg.min( minRequiredInput );
for ( int d = 0; d < n; ++d )
maxRequiredInput[ d ] = minRequiredInput[ d ] + dimensions[ d ] * factor[ d ] - 1;
// TODO: pass OutOfBoundsFactory
final RandomAccessibleInterval< T > extendedImg = Views.interval( Views.extendBorder( sourceImg ), new FinalInterval( minRequiredInput, maxRequiredInput ) );
final int[] cellDimensions = subdivisions[ level ];
final D dataset = io.createDataset( level, dimensions, cellDimensions );
final ProgressWriter subProgressWriter = new SubTaskProgressWriter(
progressWriter, ( double ) numCompletedTasks / numTasks,
( double ) ( numCompletedTasks + 1 ) / numTasks );
// generate one "plane" of cells after the other to avoid cache thrashing when exporting from virtual stacks
final CellGrid grid = new CellGrid( dimensions, cellDimensions );
final long[] numCells = grid.getGridDimensions();
final long numBlocksPerPlane = numElements( numCells, 0, 2 );
final long numPlanes = numElements( numCells, 2, n );
for ( int plane = 0; plane < numPlanes; ++plane )
{
final long planeBaseIndex = numBlocksPerPlane * plane;
final AtomicInteger nextCellInPlane = new AtomicInteger();
final List< Callable< Void > > tasks = new ArrayList<>();
for ( int threadNum = 0; threadNum < numThreads; ++threadNum )
{
tasks.add( () -> {
final long[] currentCellMin = new long[ n ];
final int[] currentCellDim = new int[ n ];
final long[] currentCellPos = new long[ n ];
final long[] blockMin = new long[ n ];
final RandomAccess< T > in = extendedImg.randomAccess();
final Class< ? extends RealType > kl1 = type.getClass();
final Class< ? extends RandomAccess > kl2 = in.getClass();
final CopyBlock< T > copyBlock = fullResolution ? CopyBlock.create( n, kl1, kl2 ) : null;
final DownsampleBlock< T > downsampleBlock = fullResolution ? null : DownsampleBlock.create( cellDimensions, factor, kl1, kl2 );
for ( int i = nextCellInPlane.getAndIncrement(); i < numBlocksPerPlane; i = nextCellInPlane.getAndIncrement() )
{
final long index = planeBaseIndex + i;
grid.getCellDimensions( index, currentCellMin, currentCellDim );
grid.getCellGridPositionFlat( index, currentCellPos );
final Block< T > block = blockCreator.create( currentCellDim, currentCellMin, currentCellPos );
if ( fullResolution )
{
final RandomAccess< T > out = block.getData().randomAccess();
in.setPosition( currentCellMin );
out.setPosition( currentCellMin );
copyBlock.copyBlock( in, out, currentCellDim );
}
else
{
for ( int d = 0; d < n; ++d )
blockMin[ d ] = currentCellMin[ d ] * factor[ d ];
in.setPosition( blockMin );
downsampleBlock.downsampleBlock( in, block.getData().cursor(), currentCellDim );
}
io.writeBlock( dataset, block );
}
return null;
} );
}
try
{
final List< Future< Void > > futures = executorService.invokeAll( tasks );
for ( final Future< Void > future : futures )
future.get();
}
catch ( final InterruptedException | ExecutionException e )
{
// TODO...
e.printStackTrace();
throw new IOException( e );
}
if ( afterEachPlane != null )
afterEachPlane.afterEachPlane( useLoopBack );
subProgressWriter.setProgress( ( double ) plane / numPlanes );
}
io.flush( dataset );
progressWriter.setProgress( ( double ) ++numCompletedTasks / numTasks );
}
}
private static long numElements( final long[] size, final int mind, final int maxd )
{
long numElements = 1;
for ( int d = mind; d < maxd; ++d )
numElements *= size[ d ];
return numElements;
}
private interface BlockCreator< T extends NativeType< T > >
{
Block< T > create( final int[] blockSize, final long[] blockMin, final long[] gridPosition );
static < T extends NativeType< T > & RealType< T >, A extends ArrayDataAccess< A > > BlockCreator< T > forType( final T type )
{
final A accessFactory = Cast.unchecked( ArrayDataAccessFactory.get( type ) );
final NativeTypeFactory< T, A > nativeTypeFactory = Cast.unchecked( type.getNativeTypeFactory() );
return ( blockSize, blockMin, gridPosition ) -> {
final A data = accessFactory.createArray( ( int ) Intervals.numElements( blockSize ) );
final SingleCellArrayImg< T, A > img = new SingleCellArrayImg<>( blockSize, blockMin, data, null );
img.setLinkedType( nativeTypeFactory.createLinkedType( img ) );
return new Block<>( img, blockSize, gridPosition );
};
}
}
}
/*
* #%L
* BigDataViewer core classes with minimal dependencies
* BigDataViewer core classes with minimal dependencies.
* %%
* Copyright (C) 2012 - 2016 Tobias Pietzsch, Stephan Saalfeld, Stephan Preibisch,
* Jean-Yves Tinevez, HongKee Moon, Johannes Schindelin, Curtis Rueden, John Bogovic
* Copyright (C) 2012 - 2020 BigDataViewer developers.
* %%
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
......
/*
* #%L
* BigDataViewer core classes with minimal dependencies
* BigDataViewer core classes with minimal dependencies.
* %%
* Copyright (C) 2012 - 2016 Tobias Pietzsch, Stephan Saalfeld, Stephan Preibisch,
* Jean-Yves Tinevez, HongKee Moon, Johannes Schindelin, Curtis Rueden, John Bogovic
* Copyright (C) 2012 - 2020 BigDataViewer developers.
* %%
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
......
/*
* #%L
* BigDataViewer core classes with minimal dependencies
* BigDataViewer core classes with minimal dependencies.
* %%
* Copyright (C) 2012 - 2016 Tobias Pietzsch, Stephan Saalfeld, Stephan Preibisch,
* Jean-Yves Tinevez, HongKee Moon, Johannes Schindelin, Curtis Rueden, John Bogovic
* Copyright (C) 2012 - 2020 BigDataViewer developers.
* %%
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
......@@ -42,9 +41,9 @@ class Hdf5BlockWriterThread extends Thread implements IHDF5Access
{
private final IHDF5Access hdf5Access;
private static interface Hdf5Task
private interface Hdf5Task
{
public void run( final IHDF5Access hdf5Access );
void run( final IHDF5Access hdf5Access );
}
private final BlockingQueue< Hdf5BlockWriterThread.Hdf5Task > queue;
......@@ -163,6 +162,7 @@ class Hdf5BlockWriterThread extends Thread implements IHDF5Access
public void closeDataset()
{
put( new CloseDatasetTask() );
waitUntilEmpty();
}
private boolean put( final Hdf5BlockWriterThread.Hdf5Task task )
......
/*
* #%L
* BigDataViewer core classes with minimal dependencies
* BigDataViewer core classes with minimal dependencies.
* %%
* Copyright (C) 2012 - 2016 Tobias Pietzsch, Stephan Saalfeld, Stephan Preibisch,
* Jean-Yves Tinevez, HongKee Moon, Johannes Schindelin, Curtis Rueden, John Bogovic
* Copyright (C) 2012 - 2020 BigDataViewer developers.
* %%
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
......@@ -34,16 +33,16 @@ import ch.systemsx.cisd.hdf5.IHDF5Writer;
interface IHDF5Access
{
public void writeMipmapDescription( final int setupIdPartition, final ExportMipmapInfo mipmapInfo );
void writeMipmapDescription( final int setupIdPartition, final ExportMipmapInfo mipmapInfo );
public void createAndOpenDataset( final String path, long[] dimensions, int[] cellDimensions, HDF5IntStorageFeatures features );
void createAndOpenDataset( final String path, long[] dimensions, int[] cellDimensions, HDF5IntStorageFeatures features );
public void writeBlockWithOffset( final short[] data, final long[] blockDimensions, final long[] offset );
void writeBlockWithOffset( final short[] data, final long[] blockDimensions, final long[] offset );
public void closeDataset();
void closeDataset();
public void close();
void close();
// this is for sharing with Hdf5ImageLoader for loopback loader when exporting
public IHDF5Writer getIHDF5Writer();
IHDF5Writer getIHDF5Writer();
}
/*
* #%L
* BigDataViewer core classes with minimal dependencies
* BigDataViewer core classes with minimal dependencies.
* %%
* Copyright (C) 2012 - 2016 Tobias Pietzsch, Stephan Saalfeld, Stephan Preibisch,
* Jean-Yves Tinevez, HongKee Moon, Johannes Schindelin, Curtis Rueden, John Bogovic
* Copyright (C) 2012 - 2020 BigDataViewer developers.
* %%
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
......@@ -33,9 +32,9 @@ import java.io.PrintStream;
public interface ProgressWriter
{
public PrintStream out();
PrintStream out();
public PrintStream err();
PrintStream err();
public void setProgress( double completionRatio );
void setProgress( double completionRatio );
}
/*
* #%L
* BigDataViewer core classes with minimal dependencies
* BigDataViewer core classes with minimal dependencies.
* %%
* Copyright (C) 2012 - 2016 Tobias Pietzsch, Stephan Saalfeld, Stephan Preibisch,
* Jean-Yves Tinevez, HongKee Moon, Johannes Schindelin, Curtis Rueden, John Bogovic
* Copyright (C) 2012 - 2020 BigDataViewer developers.
* %%
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
......
/*-
* #%L
* BigDataViewer core classes with minimal dependencies.
* %%
* Copyright (C) 2012 - 2020 BigDataViewer developers.
* %%
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDERS OR CONTRIBUTORS BE
* LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
* #L%
*/
package bdv.export;
import java.io.OutputStream;
import java.io.PrintStream;
public class ProgressWriterNull implements ProgressWriter
{
private final PrintStream blackhole;
public ProgressWriterNull()
{
blackhole = new PrintStream( new OutputStream() {
@Override
public void write( final int b )
{}
@Override
public void write( final byte[] b )
{}
@Override
public void write( final byte[] b, final int off, final int len )
{}
} );
}
@Override
public PrintStream out()
{
return blackhole;
}
@Override
public PrintStream err()
{
return blackhole;
}
@Override
public void setProgress( final double completionRatio )
{}
}
/*
* #%L
* BigDataViewer core classes with minimal dependencies
* BigDataViewer core classes with minimal dependencies.
* %%
* Copyright (C) 2012 - 2016 Tobias Pietzsch, Stephan Saalfeld, Stephan Preibisch,
* Jean-Yves Tinevez, HongKee Moon, Johannes Schindelin, Curtis Rueden, John Bogovic
* Copyright (C) 2012 - 2020 BigDataViewer developers.
* %%
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
......@@ -30,6 +29,7 @@
package bdv.export;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.Map;
......@@ -43,11 +43,13 @@ import mpicbg.spim.data.sequence.VoxelDimensions;
*
* <p>
* Choice of proposed chunksize is not based on any hard benchmark data
* currently. Chunksize is set as either 16x16x16 or 32x32x4 depending on which
* one is closer to isotropic. It is very likely that more efficient choices can
* be found by manual tuning, depending on hardware and use case.
* currently. Chunk sizes are proposed such that chunks have power-of-two side
* lengths, are roughly square in world space, and contain close to (but not
* more than) a specified number of elements (4096 by default). It is very
* likely that more efficient choices can be found by manual tuning, depending
* on hardware and use case.
*
* @author Tobias Pietzsch &lt;tobias.pietzsch@gmail.com&gt;
* @author Tobias Pietzsch
*/
public class ProposeMipmaps
{
......@@ -69,12 +71,28 @@ public class ProposeMipmaps
/**
* Propose number of mipmap levels as well subsampling factors and chunk
* size for each level, based on the image and voxel size of the given
* setup.
* setup. Chunks contain close to (but not more than) 4096 elements.
*
* @param setup
* @return proposed mipmap settings
*/
public static ExportMipmapInfo proposeMipmaps( final BasicViewSetup setup )
{
return proposeMipmaps( setup, 4096 );
}
/**
* Propose number of mipmap levels as well subsampling factors and chunk
* size for each level, based on the image and voxel size of the given
* setup. Chunk sizes are proposed such that chunks have power-of-two side
* lengths, are roughly square in world space, and contain close to (but not
* more than) {@code maxNumElements}.
*
* @param setup
* @param maxNumElements
* @return proposed mipmap settings
*/
public static ExportMipmapInfo proposeMipmaps( final BasicViewSetup setup, final int maxNumElements )
{
final VoxelDimensions voxelSize = setup.getVoxelSize();
final double[] voxelScale = new double[ 3 ];
......@@ -102,10 +120,7 @@ public class ProposeMipmaps
dmax = d;
}
}
if ( ( 4 * vmax / 32 ) > ( 1 / vmax ) )
subdivisions.add( subdiv_32_32_4[ dmax ] );
else
subdivisions.add( subdiv_16_16_16 );
subdivisions.add( suggestPoTBlockSize( voxelScale, maxNumElements ) );
setup.getSize().dimensions( size );
long maxSize = 0;
......@@ -175,7 +190,75 @@ public class ProposeMipmaps
size[ d ] /= minVoxelDim;
}
private static int[] subdiv_16_16_16 = new int[] { 16, 16, 16 };
/**
* Propose block size such that
* <ol>
* <li>each dimension is power-of-two,</li>
* <li>number of elements is as big as possible, but not larger than
* {@code maxNumElements}</li>
* <li>and the block (scaled by the {@code voxelSize}) is as close to square
* as possible given constraints 1 and 2.</li>
* </ol>
*/
public static int[] suggestPoTBlockSize( final double[] voxelSize, final int maxNumElements )
{
final int n = voxelSize.length;
final double[] bias = new double[ n ];
Arrays.setAll( bias, d -> 0.01 * ( n - d ) );
return suggestPoTBlockSize( voxelSize, maxNumElements, bias );
}
private static int[][] subdiv_32_32_4 = new int[][] { { 4, 32, 32 }, { 32, 4, 32 }, { 32, 32, 4 } };
/**
* Propose block size such that
* <ol>
* <li>each dimension is power-of-two,</li>
* <li>number of elements is as big as possible, but not larger than
* {@code maxNumElements}</li>
* <li>and the block (scaled by the {@code voxelSize}) is as close to square
* as possible given constraints 1 and 2.</li>
* </ol>
*
* Determination works by finding real PoT for each dimension, then rounding
* down, and increasing one by one the PoT for dimensions until going over
* maxNumElements. Dimensions are ordered by decreasing fractional remainder
* of real PoT plus some per-dimension bias (usually set such that X is
* enlarged before Y before Z...)
*/
private static int[] suggestPoTBlockSize( final double[] voxelSize, final int maxNumElements, final double[] bias )
{
final int n = voxelSize.length;
final double[] shape = new double[ n ];
double shapeVol = 1;
for ( int d = 0; d < n; ++d )
{
shape[ d ] = 1 / voxelSize[ d ];
shapeVol *= shape[ d ];
}
final double m = Math.pow( maxNumElements / shapeVol, 1. / n );
final double sumNumBits = Math.log( maxNumElements ) / Math.log( 2 );
final double[] numBits = new double[ n ];
Arrays.setAll( numBits, d -> Math.log( m * shape[ d ] ) / Math.log( 2 ) );
final int[] intNumBits = new int[ n ];
Arrays.setAll( intNumBits, d -> Math.max( 0, ( int ) numBits[ d ] ) );
for ( int sumIntNumBits = Arrays.stream( intNumBits ).sum(); sumIntNumBits + 1 <= sumNumBits; ++sumIntNumBits )
{
double maxDiff = 0;
int maxDiffDim = 0;
for ( int d = 0; d < n; ++d )
{
final double diff = numBits[ d ] - intNumBits[ d ] + bias[ d ];
if ( diff > maxDiff )
{
maxDiff = diff;
maxDiffDim = d;
}
}
++intNumBits[ maxDiffDim ];
}
final int[] blockSize = new int[ n ];
for ( int d = 0; d < n; ++d )
blockSize[ d ] = 1 << intNumBits[ d ];
return blockSize;
}
}
/*
* #%L
* BigDataViewer core classes with minimal dependencies
* BigDataViewer core classes with minimal dependencies.
* %%
* Copyright (C) 2012 - 2016 Tobias Pietzsch, Stephan Saalfeld, Stephan Preibisch,
* Jean-Yves Tinevez, HongKee Moon, Johannes Schindelin, Curtis Rueden, John Bogovic
* Copyright (C) 2012 - 2020 BigDataViewer developers.
* %%
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
......
This diff is collapsed.
/*-
* #%L
* BigDataViewer core classes with minimal dependencies.
* %%
* Copyright (C) 2012 - 2020 BigDataViewer developers.
* %%
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDERS OR CONTRIBUTORS BE
* LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
* #L%
*/
package bdv.export.n5;
import bdv.export.ExportMipmapInfo;
import bdv.export.ExportScalePyramid;
import bdv.export.ProgressWriter;
import bdv.export.ProgressWriterNull;
import bdv.export.SubTaskProgressWriter;
import bdv.export.ExportScalePyramid.AfterEachPlane;
import bdv.export.ExportScalePyramid.LoopbackHeuristic;
import bdv.img.cache.SimpleCacheArrayLoader;
import bdv.img.n5.N5ImageLoader;
import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.function.Function;
import java.util.stream.Collectors;
import mpicbg.spim.data.generic.sequence.AbstractSequenceDescription;
import mpicbg.spim.data.generic.sequence.BasicImgLoader;
import mpicbg.spim.data.generic.sequence.BasicSetupImgLoader;
import mpicbg.spim.data.generic.sequence.BasicViewSetup;
import mpicbg.spim.data.sequence.TimePoint;
import mpicbg.spim.data.sequence.ViewId;
import mpicbg.spim.data.sequence.VoxelDimensions;
import net.imglib2.RandomAccessibleInterval;
import net.imglib2.cache.img.ReadOnlyCachedCellImgFactory;
import net.imglib2.img.cell.Cell;
import net.imglib2.img.cell.CellGrid;
import net.imglib2.type.NativeType;
import net.imglib2.type.numeric.RealType;
import net.imglib2.util.Cast;
import org.janelia.saalfeldlab.n5.ByteArrayDataBlock;
import org.janelia.saalfeldlab.n5.Compression;
import org.janelia.saalfeldlab.n5.DataBlock;
import org.janelia.saalfeldlab.n5.DataType;
import org.janelia.saalfeldlab.n5.DatasetAttributes;
import org.janelia.saalfeldlab.n5.DoubleArrayDataBlock;
import org.janelia.saalfeldlab.n5.FloatArrayDataBlock;
import org.janelia.saalfeldlab.n5.IntArrayDataBlock;
import org.janelia.saalfeldlab.n5.LongArrayDataBlock;
import org.janelia.saalfeldlab.n5.N5FSWriter;
import org.janelia.saalfeldlab.n5.N5Writer;
import org.janelia.saalfeldlab.n5.ShortArrayDataBlock;
import org.janelia.saalfeldlab.n5.imglib2.N5Utils;
import static bdv.img.n5.BdvN5Format.DATA_TYPE_KEY;
import static bdv.img.n5.BdvN5Format.DOWNSAMPLING_FACTORS_KEY;
import static bdv.img.n5.BdvN5Format.getPathName;
import static net.imglib2.cache.img.ReadOnlyCachedCellImgOptions.options;
/**
* @author Tobias Pietzsch
* @author John Bogovic
*/
public class WriteSequenceToN5
{
private static final String MULTI_SCALE_KEY = "multiScale";
private static final String RESOLUTION_KEY = "resolution";
/**
* Create a n5 group containing image data from all views and all
* timepoints in a chunked, mipmaped representation.
*
* @param seq
* description of the sequence to be stored as hdf5. (The
* {@link AbstractSequenceDescription} contains the number of
* setups and timepoints as well as an {@link BasicImgLoader}
* that provides the image data, Registration information is not
* needed here, that will go into the accompanying xml).
* @param perSetupMipmapInfo
* this maps from setup {@link BasicViewSetup#getId() id} to
* {@link ExportMipmapInfo} for that setup. The
* {@link ExportMipmapInfo} contains for each mipmap level, the
* subsampling factors and subdivision block sizes.
* @param compression
* n5 compression scheme.
* @param n5File
* n5 root.
* @param loopbackHeuristic
* heuristic to decide whether to create each resolution level by
* reading pixels from the original image or by reading back a
* finer resolution level already written to the hdf5. may be
* null (in this case always use the original image).
* @param afterEachPlane
* this is called after each "plane of chunks" is written, giving
* the opportunity to clear caches, etc.
* @param numCellCreatorThreads
* The number of threads that will be instantiated to generate
* cell data. Must be at least 1. (In addition the cell creator
* threads there is one writer thread that saves the generated
* data to HDF5.)
* @param progressWriter
* completion ratio and status output will be directed here.
*/
public static void writeN5File(
final AbstractSequenceDescription< ?, ?, ? > seq,
final Map< Integer, ExportMipmapInfo > perSetupMipmapInfo,
final Compression compression,
final File n5File,
final LoopbackHeuristic loopbackHeuristic,
final AfterEachPlane afterEachPlane,
final int numCellCreatorThreads,
ProgressWriter progressWriter ) throws IOException
{
if ( progressWriter == null )
progressWriter = new ProgressWriterNull();
progressWriter.setProgress( 0 );
final BasicImgLoader imgLoader = seq.getImgLoader();
for ( final BasicViewSetup setup : seq.getViewSetupsOrdered() )
{
final Object type = imgLoader.getSetupImgLoader( setup.getId() ).getImageType();
if ( !( type instanceof RealType &&
type instanceof NativeType &&
N5Utils.dataType( Cast.unchecked( type ) ) != null ) )
throw new IllegalArgumentException( "Unsupported pixel type: " + type.getClass().getSimpleName() );
}
final List< Integer > timepointIds = seq.getTimePoints().getTimePointsOrdered().stream()
.map( TimePoint::getId )
.collect( Collectors.toList() );
final List< Integer > setupIds = seq.getViewSetupsOrdered().stream()
.map( BasicViewSetup::getId )
.collect( Collectors.toList() );
N5Writer n5 = new N5FSWriter( n5File.getAbsolutePath() );
// write Mipmap descriptions
for ( final int setupId : setupIds )
{
final String pathName = getPathName( setupId );
final int[][] downsamplingFactors = perSetupMipmapInfo.get( setupId ).getExportResolutions();
final DataType dataType = N5Utils.dataType( Cast.unchecked( imgLoader.getSetupImgLoader( setupId ).getImageType() ) );
n5.createGroup( pathName );
n5.setAttribute( pathName, DOWNSAMPLING_FACTORS_KEY, downsamplingFactors );
n5.setAttribute( pathName, DATA_TYPE_KEY, dataType );
}
// calculate number of tasks for progressWriter
int numTasks = 0; // first task is for writing mipmap descriptions etc...
for ( final int timepointIdSequence : timepointIds )
for ( final int setupIdSequence : setupIds )
if ( seq.getViewDescriptions().get( new ViewId( timepointIdSequence, setupIdSequence ) ).isPresent() )
numTasks++;
int numCompletedTasks = 0;
final ExecutorService executorService = Executors.newFixedThreadPool( numCellCreatorThreads );
try
{
// write image data for all views
final int numTimepoints = timepointIds.size();
int timepointIndex = 0;
for ( final int timepointId : timepointIds )
{
progressWriter.out().printf( "proccessing timepoint %d / %d\n", ++timepointIndex, numTimepoints );
// assemble the viewsetups that are present in this timepoint
final ArrayList< Integer > setupsTimePoint = new ArrayList<>();
for ( final int setupId : setupIds )
if ( seq.getViewDescriptions().get( new ViewId( timepointId, setupId ) ).isPresent() )
setupsTimePoint.add( setupId );
final int numSetups = setupsTimePoint.size();
int setupIndex = 0;
for ( final int setupId : setupsTimePoint )
{
progressWriter.out().printf( "proccessing setup %d / %d\n", ++setupIndex, numSetups );
final ExportMipmapInfo mipmapInfo = perSetupMipmapInfo.get( setupId );
final double startCompletionRatio = ( double ) numCompletedTasks++ / numTasks;
final double endCompletionRatio = ( double ) numCompletedTasks / numTasks;
final ProgressWriter subProgressWriter = new SubTaskProgressWriter( progressWriter, startCompletionRatio, endCompletionRatio );
writeScalePyramid(
n5, compression,
imgLoader, setupId, timepointId, mipmapInfo,
executorService, numCellCreatorThreads,
loopbackHeuristic, afterEachPlane, subProgressWriter );
// additional attributes for paintera compatibility
final String pathName = getPathName( setupId, timepointId );
n5.createGroup( pathName );
n5.setAttribute( pathName, MULTI_SCALE_KEY, true );
final VoxelDimensions voxelSize = seq.getViewSetups().get( setupId ).getVoxelSize();
if ( voxelSize != null )
{
final double[] resolution = new double[ voxelSize.numDimensions() ];
voxelSize.dimensions( resolution );
n5.setAttribute( pathName, RESOLUTION_KEY, resolution );
}
final int[][] downsamplingFactors = perSetupMipmapInfo.get( setupId ).getExportResolutions();
for( int l = 0; l < downsamplingFactors.length; ++l )
n5.setAttribute( getPathName( setupId, timepointId, l ), DOWNSAMPLING_FACTORS_KEY, downsamplingFactors[ l ] );
}
}
}
finally
{
executorService.shutdown();
}
progressWriter.setProgress( 1.0 );
}
static < T extends RealType< T > & NativeType< T > > void writeScalePyramid(
final N5Writer n5,
final Compression compression,
final BasicImgLoader imgLoader,
final int setupId,
final int timepointId,
final ExportMipmapInfo mipmapInfo,
final ExecutorService executorService,
final int numThreads,
final LoopbackHeuristic loopbackHeuristic,
final AfterEachPlane afterEachPlane,
ProgressWriter progressWriter ) throws IOException
{
final BasicSetupImgLoader< T > setupImgLoader = Cast.unchecked( imgLoader.getSetupImgLoader( setupId ) );
final RandomAccessibleInterval< T > img = setupImgLoader.getImage( timepointId );
final T type = setupImgLoader.getImageType();
final N5DatasetIO< T > io = new N5DatasetIO<>( n5, compression, setupId, timepointId, type );
ExportScalePyramid.writeScalePyramid(
img, type, mipmapInfo, io,
executorService, numThreads,
loopbackHeuristic, afterEachPlane, progressWriter );
}
static class N5Dataset
{
final String pathName;
final DatasetAttributes attributes;
public N5Dataset( final String pathName, final DatasetAttributes attributes )
{
this.pathName = pathName;
this.attributes = attributes;
}
}
static class N5DatasetIO< T extends RealType< T > & NativeType< T > > implements ExportScalePyramid.DatasetIO< N5Dataset, T >
{
private final N5Writer n5;
private final Compression compression;
private final int setupId;
private final int timepointId;
private final DataType dataType;
private final T type;
private final Function< ExportScalePyramid.Block< T >, DataBlock< ? > > getDataBlock;
public N5DatasetIO( final N5Writer n5, final Compression compression, final int setupId, final int timepointId, final T type )
{
this.n5 = n5;
this.compression = compression;
this.setupId = setupId;
this.timepointId = timepointId;
this.dataType = N5Utils.dataType( type );
this.type = type;
switch ( dataType )
{
case UINT8:
getDataBlock = b -> new ByteArrayDataBlock( b.getSize(), b.getGridPosition(), Cast.unchecked( b.getData().getStorageArray() ) );
break;
case UINT16:
getDataBlock = b -> new ShortArrayDataBlock( b.getSize(), b.getGridPosition(), Cast.unchecked( b.getData().getStorageArray() ) );
break;
case UINT32:
getDataBlock = b -> new IntArrayDataBlock( b.getSize(), b.getGridPosition(), Cast.unchecked( b.getData().getStorageArray() ) );
break;
case UINT64:
getDataBlock = b -> new LongArrayDataBlock( b.getSize(), b.getGridPosition(), Cast.unchecked( b.getData().getStorageArray() ) );
break;
case INT8:
getDataBlock = b -> new ByteArrayDataBlock( b.getSize(), b.getGridPosition(), Cast.unchecked( b.getData().getStorageArray() ) );
break;
case INT16:
getDataBlock = b -> new ShortArrayDataBlock( b.getSize(), b.getGridPosition(), Cast.unchecked( b.getData().getStorageArray() ) );
break;
case INT32:
getDataBlock = b -> new IntArrayDataBlock( b.getSize(), b.getGridPosition(), Cast.unchecked( b.getData().getStorageArray() ) );
break;
case INT64:
getDataBlock = b -> new LongArrayDataBlock( b.getSize(), b.getGridPosition(), Cast.unchecked( b.getData().getStorageArray() ) );
break;
case FLOAT32:
getDataBlock = b -> new FloatArrayDataBlock( b.getSize(), b.getGridPosition(), Cast.unchecked( b.getData().getStorageArray() ) );
break;
case FLOAT64:
getDataBlock = b -> new DoubleArrayDataBlock( b.getSize(), b.getGridPosition(), Cast.unchecked( b.getData().getStorageArray() ) );
break;
default:
throw new IllegalArgumentException();
}
}
@Override
public N5Dataset createDataset( final int level, final long[] dimensions, final int[] blockSize ) throws IOException
{
final String pathName = getPathName( setupId, timepointId, level );
n5.createDataset( pathName, dimensions, blockSize, dataType, compression );
final DatasetAttributes attributes = n5.getDatasetAttributes( pathName );
return new N5Dataset( pathName, attributes );
}
@Override
public void writeBlock( final N5Dataset dataset, final ExportScalePyramid.Block< T > dataBlock ) throws IOException
{
n5.writeBlock( dataset.pathName, dataset.attributes, getDataBlock.apply( dataBlock ) );
}
@Override
public void flush( final N5Dataset dataset )
{}
@Override
public RandomAccessibleInterval< T > getImage( final int level ) throws IOException
{
final String pathName = getPathName( setupId, timepointId, level );
final DatasetAttributes attributes = n5.getDatasetAttributes( pathName );
final long[] dimensions = attributes.getDimensions();
final int[] cellDimensions = attributes.getBlockSize();
final CellGrid grid = new CellGrid( dimensions, cellDimensions );
final SimpleCacheArrayLoader< ? > cacheArrayLoader = N5ImageLoader.createCacheArrayLoader( n5, pathName );
return new ReadOnlyCachedCellImgFactory().createWithCacheLoader(
dimensions, type,
key -> {
final int n = grid.numDimensions();
final long[] cellMin = new long[ n ];
final int[] cellDims = new int[ n ];
final long[] cellGridPosition = new long[ n ];
grid.getCellDimensions( key, cellMin, cellDims );
grid.getCellGridPositionFlat( key, cellGridPosition );
return new Cell<>( cellDims, cellMin, cacheArrayLoader.loadArray( cellGridPosition ) );
},
options().cellDimensions( cellDimensions ) );
}
}
}
/*
* #%L
* BigDataViewer core classes with minimal dependencies
* BigDataViewer core classes with minimal dependencies.
* %%
* Copyright (C) 2012 - 2016 Tobias Pietzsch, Stephan Saalfeld, Stephan Preibisch,
* Jean-Yves Tinevez, HongKee Moon, Johannes Schindelin, Curtis Rueden, John Bogovic
* Copyright (C) 2012 - 2020 BigDataViewer developers.
* %%
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
......@@ -30,7 +29,6 @@
package bdv.img.cache;
import bdv.ViewerImgLoader;
import bdv.img.catmaid.CatmaidImageLoader;
import net.imglib2.img.basictypeaccess.volatiles.VolatileAccess;
import net.imglib2.img.basictypeaccess.volatiles.VolatileArrayDataAccess;
import net.imglib2.img.basictypeaccess.volatiles.array.DirtyVolatileByteArray;
......@@ -58,7 +56,7 @@ import net.imglib2.type.NativeType;
* type of access to cell data, currently always a
* {@link VolatileAccess}.
*
* @author Tobias Pietzsch &lt;tobias.pietzsch@gmail.com&gt;
* @author Tobias Pietzsch
*/
public interface CacheArrayLoader< A >
{
......@@ -69,7 +67,7 @@ public interface CacheArrayLoader< A >
*
* @return number of bytes required to store one element.
*/
public default int getBytesPerElement()
default int getBytesPerElement()
{
return 1;
}
......@@ -103,7 +101,7 @@ public interface CacheArrayLoader< A >
*
* @return an {@link EmptyArrayCreator} for {@code A} or null.
*/
public default EmptyArrayCreator< A > getEmptyArrayCreator()
default EmptyArrayCreator< A > getEmptyArrayCreator()
{
return null;
}
......@@ -128,10 +126,10 @@ public interface CacheArrayLoader< A >
* back-end. You do not need to be able to load blocks of arbitrary sizes
* and offsets here -- just the ones that you will use from the images
* returned by your {@link ViewerImgLoader}. For an example, look at
* {@link CatmaidImageLoader}. There, the blockDimensions are defined in the
* {@code CatmaidImageLoader}. There, the blockDimensions are defined in the
* constructor, according to the tile size of the data set. These
* blockDimensions are then used for every image that the
* {@link CatmaidImageLoader} provides. Therefore, all calls to
* {@code CatmaidImageLoader} provides. Therefore, all calls to
* {@link #loadArray(int, int, int, int[], long[])} will have predictable
* {@code dimensions} (corresponding to tile size of the data set) and
* {@code min} offsets (multiples of the tile size).
......@@ -156,5 +154,6 @@ public interface CacheArrayLoader< A >
* the min coordinate of the block in the stack (in voxels).
* @return loaded cell data.
*/
public A loadArray( final int timepoint, final int setup, final int level, int[] dimensions, long[] min ) throws InterruptedException;
// TODO: It would make more sense to throw IOException here. Declare both IOException and InterruptedException. Throw IOException in bdv-core implementations.
A loadArray( final int timepoint, final int setup, final int level, int[] dimensions, long[] min ) throws InterruptedException;
}
/*-
* #%L
* BigDataViewer core classes with minimal dependencies.
* %%
* Copyright (C) 2012 - 2020 BigDataViewer developers.
* %%
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDERS OR CONTRIBUTORS BE
* LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
* #L%
*/
package bdv.img.cache;
import net.imglib2.cache.volatiles.CreateInvalid;
......
/*-
* #%L
* BigDataViewer core classes with minimal dependencies.
* %%
* Copyright (C) 2012 - 2020 BigDataViewer developers.
* %%
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDERS OR CONTRIBUTORS BE
* LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
* #L%
*/
package bdv.img.cache;
import net.imglib2.img.basictypeaccess.volatiles.VolatileArrayDataAccess;
......
/*-
* #%L
* BigDataViewer core classes with minimal dependencies.
* %%
* Copyright (C) 2012 - 2020 BigDataViewer developers.
* %%
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDERS OR CONTRIBUTORS BE
* LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
* #L%
*/
package bdv.img.cache;
import net.imglib2.img.basictypeaccess.AccessFlags;
......@@ -21,9 +49,9 @@ import net.imglib2.type.PrimitiveType;
*/
public interface EmptyArrayCreator< A >
{
public A getEmptyArray( final long numEntities );
A getEmptyArray( final long numEntities );
public static < A extends VolatileArrayDataAccess< A > > EmptyArrayCreator< A > get(
static < A extends VolatileArrayDataAccess< A > > EmptyArrayCreator< A > get(
final PrimitiveType primitiveType,
final boolean dirty )
{
......
/*
* #%L
* BigDataViewer core classes with minimal dependencies.
* %%
* Copyright (C) 2012 - 2020 BigDataViewer developers.
* %%
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDERS OR CONTRIBUTORS BE
* LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
* #L%
*/
package bdv.img.cache;
import java.io.IOException;
import net.imglib2.img.basictypeaccess.volatiles.VolatileAccess;
import net.imglib2.img.basictypeaccess.volatiles.VolatileArrayDataAccess;
import net.imglib2.img.basictypeaccess.volatiles.array.DirtyVolatileByteArray;
import net.imglib2.img.basictypeaccess.volatiles.array.DirtyVolatileCharArray;
import net.imglib2.img.basictypeaccess.volatiles.array.DirtyVolatileDoubleArray;
import net.imglib2.img.basictypeaccess.volatiles.array.DirtyVolatileFloatArray;
import net.imglib2.img.basictypeaccess.volatiles.array.DirtyVolatileIntArray;
import net.imglib2.img.basictypeaccess.volatiles.array.DirtyVolatileLongArray;
import net.imglib2.img.basictypeaccess.volatiles.array.DirtyVolatileShortArray;
import net.imglib2.img.basictypeaccess.volatiles.array.VolatileByteArray;
import net.imglib2.img.basictypeaccess.volatiles.array.VolatileCharArray;
import net.imglib2.img.basictypeaccess.volatiles.array.VolatileDoubleArray;
import net.imglib2.img.basictypeaccess.volatiles.array.VolatileFloatArray;
import net.imglib2.img.basictypeaccess.volatiles.array.VolatileIntArray;
import net.imglib2.img.basictypeaccess.volatiles.array.VolatileLongArray;
import net.imglib2.img.basictypeaccess.volatiles.array.VolatileShortArray;
import net.imglib2.img.cell.CellGrid;
import net.imglib2.type.NativeType;
/**
* Provider of volatile {@link net.imglib2.img.cell.Cell} data. This is
* implemented by data back-ends to the {@link VolatileGlobalCellCache}.
* <p>
* {@code SimpleCacheArrayLoader} is supposed to load data one specific image.
* {@code loadArray()} will not get information about which timepoint,
* resolution level, etc a requested block belongs to, and also the appropriate
* block size is supposed to be known.
* <p>
* This is in contrast to {@link CacheArrayLoader}, where all information to
* identify a particular block in a whole dataset is provided. Whether it makes
* more sense to implement {@code CacheArrayLoader} or
* {@code SimpleCacheArrayLoader} depends on the particular back-end.
*
* @param <A>
* type of access to cell data, currently always a
* {@link VolatileAccess}.
*
* @author Tobias Pietzsch
*/
public interface SimpleCacheArrayLoader< A >
{
/**
* Implementing classes must override this if {@code A} is not a standard
* {@link VolatileArrayDataAccess} type. The default implementation returns
* {@code null}, which will let
* {@link CreateInvalidVolatileCell#get(CellGrid, NativeType, boolean)
* CreateInvalidVolatileCell.get(...)} try to figure out the appropriate
* {@link DefaultEmptyArrayCreator}.
* <p>
* Default access types are
* </p>
* <ul>
* <li>{@link DirtyVolatileByteArray}</li>
* <li>{@link VolatileByteArray}</li>
* <li>{@link DirtyVolatileCharArray}</li>
* <li>{@link VolatileCharArray}</li>
* <li>{@link DirtyVolatileDoubleArray}</li>
* <li>{@link VolatileDoubleArray}</li>
* <li>{@link DirtyVolatileFloatArray}</li>
* <li>{@link VolatileFloatArray}</li>
* <li>{@link DirtyVolatileIntArray}</li>
* <li>{@link VolatileIntArray}</li>
* <li>{@link DirtyVolatileLongArray}</li>
* <li>{@link VolatileLongArray}</li>
* <li>{@link DirtyVolatileShortArray}</li>
* <li>{@link VolatileShortArray}</li>
* </ul>
*
* @return an {@link EmptyArrayCreator} for {@code A} or null.
*/
default EmptyArrayCreator< A > getEmptyArrayCreator()
{
return null;
}
/**
* Load cell data into memory. This method blocks until data is successfully
* loaded. If it completes normally, the returned data is always valid. If
* anything goes wrong, an {@link IOException} is thrown.
* <p>
* {@code SimpleCacheArrayLoader} is supposed to load data one specific
* image. {@code loadArray()} will not get information about which
* timepoint, resolution level, etc a requested block belongs to. Also the
* appropriate block size is supposed to be known to the
* {@code SimpleCacheArrayLoader}.
* <p>
* This is in contrast to
* {@link CacheArrayLoader#loadArray(int, int, int, int[], long[])}, where
* all information to identify a particular block in a whole dataset is
* provided.
*
* @param gridPosition
* the coordinate of the cell in the cell grid.
*
* @return loaded cell data.
*/
A loadArray( long[] gridPosition ) throws IOException;
}
/*-
* #%L
* BigDataViewer core classes with minimal dependencies.
* %%
* Copyright (C) 2012 - 2020 BigDataViewer developers.
* %%
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDERS OR CONTRIBUTORS BE
* LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
* #L%
*/
package bdv.img.cache;
import java.util.function.Function;
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment