Commit 90a62c22 authored by Tobias Pietzsch's avatar Tobias Pietzsch
Browse files

Merge branch 'master' into hongkee

Conflicts:
	src/main/resources/plugins.config
parents 86aaf393 b1b70bbc
......@@ -5,11 +5,11 @@
<parent>
<groupId>sc.fiji</groupId>
<artifactId>pom-bigdataviewer</artifactId>
<version>1.0.5</version>
<version>1.0.7</version>
</parent>
<artifactId>bigdataviewer_fiji</artifactId>
<version>1.0.4-SNAPSHOT</version>
<version>1.0.7-SNAPSHOT</version>
<name>BigDataViewer Fiji</name>
<description>Fiji plugins for starting BigDataViewer and exporting data.</description>
......
......@@ -19,7 +19,6 @@ import java.util.ArrayList;
import java.util.HashMap;
import java.util.Map;
import mpicbg.spim.data.generic.sequence.BasicImgLoader;
import mpicbg.spim.data.generic.sequence.BasicViewSetup;
import mpicbg.spim.data.registration.ViewRegistration;
import mpicbg.spim.data.registration.ViewRegistrations;
......@@ -28,13 +27,15 @@ import mpicbg.spim.data.sequence.FinalVoxelDimensions;
import mpicbg.spim.data.sequence.TimePoint;
import mpicbg.spim.data.sequence.TimePoints;
import net.imglib2.FinalDimensions;
import net.imglib2.RandomAccessibleInterval;
import net.imglib2.realtransform.AffineTransform3D;
import net.imglib2.type.numeric.integer.UnsignedShortType;
import bdv.export.ExportMipmapInfo;
import bdv.export.ProgressWriter;
import bdv.export.ProposeMipmaps;
import bdv.export.SubTaskProgressWriter;
import bdv.export.WriteSequenceToHdf5;
import bdv.export.WriteSequenceToHdf5.AfterEachPlane;
import bdv.export.WriteSequenceToHdf5.LoopbackHeuristic;
import bdv.ij.export.imgloader.ImagePlusImgLoader;
import bdv.ij.export.imgloader.ImagePlusImgLoader.MinMaxOption;
import bdv.ij.util.PluginHelper;
......@@ -116,7 +117,7 @@ public class ExportImagePlusPlugIn implements PlugIn
progressWriter.out().println( "starting export..." );
// create ImgLoader wrapping the image
final BasicImgLoader< UnsignedShortType > imgLoader;
final ImagePlusImgLoader< ? > imgLoader;
switch ( imp.getType() )
{
case ImagePlus.GRAY8:
......@@ -157,6 +158,57 @@ public class ExportImagePlusPlugIn implements PlugIn
for ( final BasicViewSetup setup : seq.getViewSetupsOrdered() )
perSetupExportMipmapInfo.put( setup.getId(), mipmapInfo );
// LoopBackHeuristic:
// - If saving more than 8x on pixel reads use the loopback image over
// original image
// - For virtual stacks also consider the cache size that would be
// required for all original planes contributing to a "plane of
// blocks" at the current level. If this is more than 1/4 of
// available memory, use the loopback image.
final boolean isVirtual = imp.getStack().isVirtual();
final long planeSizeInBytes = imp.getWidth() * imp.getHeight() * imp.getBytesPerPixel();
final long ijMaxMemory = IJ.maxMemory();
final LoopbackHeuristic loopbackHeuristic = new LoopbackHeuristic()
{
@Override
public boolean decide( final RandomAccessibleInterval< ? > originalImg, final int[] factorsToOriginalImg, final int previousLevel, final int[] factorsToPreviousLevel, final int[] chunkSize )
{
if ( previousLevel < 0 )
return false;
if ( WriteSequenceToHdf5.numElements( factorsToOriginalImg ) / WriteSequenceToHdf5.numElements( factorsToPreviousLevel ) >= 8 )
return true;
if ( isVirtual )
{
final long requiredCacheSize = planeSizeInBytes * factorsToOriginalImg[ 2 ] * chunkSize[ 2 ];
if ( requiredCacheSize > ijMaxMemory / 4 )
return true;
}
return false;
}
};
final AfterEachPlane afterEachPlane = new AfterEachPlane()
{
@Override
public void afterEachPlane( final boolean usedLoopBack )
{
if ( !usedLoopBack && isVirtual )
{
final long free = Runtime.getRuntime().freeMemory();
final long total = Runtime.getRuntime().totalMemory();
final long max = Runtime.getRuntime().maxMemory();
final long actuallyFree = max - total + free;
if ( actuallyFree < max / 2 )
imgLoader.clearCache();
}
}
};
final ArrayList< Partition > partitions;
if ( params.split )
{
......@@ -168,14 +220,14 @@ public class ExportImagePlusPlugIn implements PlugIn
{
final Partition partition = partitions.get( i );
final ProgressWriter p = new SubTaskProgressWriter( progressWriter, 0, 0.95 * i / partitions.size() );
WriteSequenceToHdf5.writeHdf5PartitionFile( seq, perSetupExportMipmapInfo, params.deflate, partition, p );
WriteSequenceToHdf5.writeHdf5PartitionFile( seq, perSetupExportMipmapInfo, params.deflate, partition, loopbackHeuristic, afterEachPlane, p );
}
WriteSequenceToHdf5.writeHdf5PartitionLinkFile( seq, perSetupExportMipmapInfo, partitions, params.hdf5File );
}
else
{
partitions = null;
WriteSequenceToHdf5.writeHdf5File( seq, perSetupExportMipmapInfo, params.deflate, params.hdf5File, new SubTaskProgressWriter( progressWriter, 0, 0.95 ) );
WriteSequenceToHdf5.writeHdf5File( seq, perSetupExportMipmapInfo, params.deflate, params.hdf5File, loopbackHeuristic, afterEachPlane, new SubTaskProgressWriter( progressWriter, 0, 0.95 ) );
}
// write xml sequence description
......@@ -283,16 +335,16 @@ public class ExportImagePlusPlugIn implements PlugIn
{
final GenericDialogPlus gd = new GenericDialogPlus( "Export for BigDataViewer" );
gd.addCheckbox( "manual mipmap setup", lastSetMipmapManual );
gd.addCheckbox( "manual_mipmap_setup", lastSetMipmapManual );
final Checkbox cManualMipmap = ( Checkbox ) gd.getCheckboxes().lastElement();
gd.addStringField( "Subsampling factors", lastSubsampling, 25 );
gd.addStringField( "Subsampling_factors", lastSubsampling, 25 );
final TextField tfSubsampling = ( TextField ) gd.getStringFields().lastElement();
gd.addStringField( "Hdf5 chunk sizes", lastChunkSizes, 25 );
gd.addStringField( "Hdf5_chunk_sizes", lastChunkSizes, 25 );
final TextField tfChunkSizes = ( TextField ) gd.getStringFields().lastElement();
gd.addMessage( "" );
final String[] minMaxChoices = new String[] { "Use ImageJ's current min/max setting", "Compute min/max of the (hyper-)stack", "Use values specified below" };
gd.addChoice( "Value range", minMaxChoices, minMaxChoices[ lastMinMaxChoice ] );
gd.addChoice( "Value_range", minMaxChoices, minMaxChoices[ lastMinMaxChoice ] );
final Choice cMinMaxChoices = (Choice) gd.getChoices().lastElement();
gd.addNumericField( "Min", lastMin, 0 );
final TextField tfMin = (TextField) gd.getNumericFields().lastElement();
......@@ -300,18 +352,18 @@ public class ExportImagePlusPlugIn implements PlugIn
final TextField tfMax = (TextField) gd.getNumericFields().lastElement();
gd.addMessage( "" );
gd.addCheckbox( "split hdf5", lastSplit );
gd.addCheckbox( "split_hdf5", lastSplit );
final Checkbox cSplit = ( Checkbox ) gd.getCheckboxes().lastElement();
gd.addNumericField( "timepoints per partition", lastTimepointsPerPartition, 0, 25, "" );
gd.addNumericField( "timepoints_per_partition", lastTimepointsPerPartition, 0, 25, "" );
final TextField tfSplitTimepoints = ( TextField ) gd.getNumericFields().lastElement();
gd.addNumericField( "setups per partition", lastSetupsPerPartition, 0, 25, "" );
gd.addNumericField( "setups_per_partition", lastSetupsPerPartition, 0, 25, "" );
final TextField tfSplitSetups = ( TextField ) gd.getNumericFields().lastElement();
gd.addMessage( "" );
gd.addCheckbox( "use deflate compression", lastDeflate );
gd.addCheckbox( "use_deflate_compression", lastDeflate );
gd.addMessage( "" );
PluginHelper.addSaveAsFileField( gd, "Export path", lastExportPath, 25 );
PluginHelper.addSaveAsFileField( gd, "Export_path", lastExportPath, 25 );
// gd.addMessage( "" );
// gd.addMessage( "This Plugin is developed by Tobias Pietzsch (pietzsch@mpi-cbg.de)\n" );
......
......@@ -294,7 +294,7 @@ public class ExportSpimFusionPlugIn implements PlugIn
for ( final Partition partition : newPartitions )
{
final SubTaskProgressWriter subtaskProgress = new SubTaskProgressWriter( progress, complete, complete + completionStep );
WriteSequenceToHdf5.writeHdf5PartitionFile( fusionSeq, perSetupExportMipmapInfo, params.deflate, partition, subtaskProgress );
WriteSequenceToHdf5.writeHdf5PartitionFile( fusionSeq, perSetupExportMipmapInfo, params.deflate, partition, null, null, subtaskProgress );
complete += completionStep;
}
......@@ -346,13 +346,13 @@ public class ExportSpimFusionPlugIn implements PlugIn
{
final Partition partition = partitions.get( i );
final ProgressWriter p = new SubTaskProgressWriter( progress, 0, 0.95 * i / partitions.size() );
WriteSequenceToHdf5.writeHdf5PartitionFile( desc, perSetupExportMipmapInfo, params.deflate, partition, p );
WriteSequenceToHdf5.writeHdf5PartitionFile( desc, perSetupExportMipmapInfo, params.deflate, partition, null, null, p );
}
WriteSequenceToHdf5.writeHdf5PartitionLinkFile( desc, perSetupExportMipmapInfo, partitions, params.hdf5File );
}
else
{
WriteSequenceToHdf5.writeHdf5File( desc, perSetupExportMipmapInfo, params.deflate, params.hdf5File, new SubTaskProgressWriter( progress, 0, 0.95 ) );
WriteSequenceToHdf5.writeHdf5File( desc, perSetupExportMipmapInfo, params.deflate, params.hdf5File, null, null, new SubTaskProgressWriter( progress, 0, 0.95 ) );
}
// write xml file
......
......@@ -86,14 +86,14 @@ public class ExportSpimSequencePlugIn implements PlugIn
{
final Partition partition = partitions.get( i );
final ProgressWriter p = new SubTaskProgressWriter( progress, 0, 0.95 * i / partitions.size() );
WriteSequenceToHdf5.writeHdf5PartitionFile( desc, perSetupExportMipmapInfo, params.deflate, partition, p );
WriteSequenceToHdf5.writeHdf5PartitionFile( desc, perSetupExportMipmapInfo, params.deflate, partition, null, null, p );
}
WriteSequenceToHdf5.writeHdf5PartitionLinkFile( desc, perSetupExportMipmapInfo, partitions, params.hdf5File );
}
else
{
partitions = null;
WriteSequenceToHdf5.writeHdf5File( desc, perSetupExportMipmapInfo, params.deflate, params.hdf5File, new SubTaskProgressWriter( progress, 0, 0.95 ) );
WriteSequenceToHdf5.writeHdf5File( desc, perSetupExportMipmapInfo, params.deflate, params.hdf5File, null, null, new SubTaskProgressWriter( progress, 0, 0.95 ) );
}
final Hdf5ImageLoader loader = new Hdf5ImageLoader( params.hdf5File, partitions, null, false );
......
......@@ -25,6 +25,8 @@ import mpicbg.spim.data.sequence.ViewId;
import mpicbg.spim.data.sequence.VoxelDimensions;
import net.imglib2.RandomAccessibleInterval;
import net.imglib2.type.numeric.integer.UnsignedShortType;
import bdv.ViewerImgLoader;
import bdv.img.hdf5.Hdf5ImageLoader;
import bdv.spimdata.SequenceDescriptionMinimal;
import bdv.spimdata.SpimDataMinimal;
import bdv.spimdata.XmlIoSpimDataMinimal;
......@@ -39,6 +41,7 @@ public class ImportPlugIn implements PlugIn
public static String xmlFile = "";
public static int timepoint = 0;
public static int setup = 0;
public static int mipmap = 0;
public static boolean openAsVirtualStack = false;
private static SequenceDescriptionMinimal openSequence( final String xmlFilename ) throws SpimDataException
......@@ -65,6 +68,9 @@ public class ImportPlugIn implements PlugIn
gd.addSlider( "setup index", 0, 0, setup );
final Scrollbar slSetup = (Scrollbar) gd.getSliders().lastElement();
final TextField tfSetup = (TextField) gd.getNumericFields().lastElement();
gd.addSlider( "resolution level", 0, 0, setup );
final Scrollbar slMipmap = (Scrollbar) gd.getSliders().lastElement();
final TextField tfMipmap = (TextField) gd.getNumericFields().lastElement();
gd.addCheckbox( "open as virtual stack", openAsVirtualStack );
final Checkbox cVirtual = (Checkbox) gd.getCheckboxes().lastElement();
......@@ -73,6 +79,7 @@ public class ImportPlugIn implements PlugIn
void check( final String xmlFilename )
{
boolean enable = false;
boolean enableMipmap = false;
try
{
final SequenceDescriptionMinimal seq = openSequence( xmlFilename );
......@@ -84,6 +91,19 @@ public class ImportPlugIn implements PlugIn
slTimepoint.setMaximum( numTimepoints );
slSetup.setMaximum( numSetups );
enable = true;
if ( seq.getImgLoader() instanceof ViewerImgLoader )
{
final ViewerImgLoader< ?, ? > vil = ( ViewerImgLoader< ?, ? > ) seq.getImgLoader();
final int numMipmapLevels = vil.numMipmapLevels( seq.getViewSetupsOrdered().get( 0 ).getId() );
slMipmap.setMaximum( numMipmapLevels );
enableMipmap = true;
}
else
{
enableMipmap = false;
}
}
}
catch ( final Exception ex )
......@@ -95,6 +115,8 @@ public class ImportPlugIn implements PlugIn
tfTimepoint.setEnabled( enable );
slSetup.setEnabled( enable );
tfSetup.setEnabled( enable );
slMipmap.setEnabled( enableMipmap );
tfMipmap.setEnabled( enableMipmap );
cVirtual.setEnabled( enable );
}
}
......@@ -109,6 +131,7 @@ public class ImportPlugIn implements PlugIn
gd.getNextString();
gd.getNextNumber();
gd.getNextNumber();
gd.getNextNumber();
gd.getNextBoolean();
if ( e instanceof TextEvent && e.getID() == TextEvent.TEXT_VALUE_CHANGED && e.getSource() == tfXmlFile )
{
......@@ -127,6 +150,7 @@ public class ImportPlugIn implements PlugIn
xmlFile = gd.getNextString();
timepoint = ( int ) gd.getNextNumber();
setup = ( int ) gd.getNextNumber();
mipmap = ( int ) gd.getNextNumber();
openAsVirtualStack = gd.getNextBoolean();
System.out.println( xmlFile + " " + timepoint + " " + setup );
......@@ -143,22 +167,32 @@ public class ImportPlugIn implements PlugIn
setup = Math.max( Math.min( setup, numSetups - 1 ), 0 );
final int timepointId = timepointsOrdered.get( timepoint ).getId();
final int setupId = setupsOrdered.get( setup ).getId();
@SuppressWarnings( "unchecked" )
final BasicImgLoader< UnsignedShortType > il = ( BasicImgLoader< UnsignedShortType > ) seq.getImgLoader();
final RandomAccessibleInterval< UnsignedShortType > img = il.getImage( new ViewId( timepointId, setupId ) );
// final UnsignedShortType t = new UnsignedShortType();
// final Img< UnsignedShortType > copy = net.imglib2.util.Util.getArrayOrCellImgFactory( img, t ).create( img, t );
// final Cursor< UnsignedShortType > in = Views.flatIterable( img ).cursor();
// final Cursor< UnsignedShortType > out = Views.flatIterable( copy ).cursor();
// final long t0 = System.currentTimeMillis();
// while( in.hasNext() )
// out.next().set( in.next() );
// final long t1 = System.currentTimeMillis();
// System.out.println( t1 - t0 );
BasicImgLoader< UnsignedShortType > il = ( BasicImgLoader< UnsignedShortType > ) seq.getImgLoader();
boolean duplicateImp = !openAsVirtualStack;
if ( !openAsVirtualStack && il instanceof Hdf5ImageLoader )
{
final Hdf5ImageLoader h5il = ( Hdf5ImageLoader ) il;
il = h5il.getMonolithicImageLoader();
duplicateImp = false;
}
final RandomAccessibleInterval< UnsignedShortType > img;
if ( il instanceof ViewerImgLoader )
{
final ViewerImgLoader< UnsignedShortType, ? > vil = ( ViewerImgLoader< UnsignedShortType, ? > ) seq.getImgLoader();
final int numMipmapLevels = vil.numMipmapLevels( setupId );
if ( mipmap >= numMipmapLevels )
mipmap = numMipmapLevels - 1;
img = vil.getImage( new ViewId( timepointId, setupId ), mipmap );
}
else
img = il.getImage( new ViewId( timepointId, setupId ) );
ImagePlus imp = net.imglib2.img.display.imagej.ImageJFunctions.wrap( img, "" );
if ( !openAsVirtualStack )
imp.setDimensions( 1, imp.getImageStackSize(), 1 );
if ( duplicateImp )
imp = imp.duplicate();
imp.setTitle( new File( xmlFile ).getName() + " " + timepoint + " " + setup );
final VoxelDimensions voxelSize = setupsOrdered.get( setup ).getVoxelSize();
......
package bdv.ij;
import java.io.File;
import java.util.ArrayList;
import java.util.HashMap;
import bdv.BigDataViewer;
import bdv.ij.util.ProgressWriterIJ;
import bdv.img.imagestack.ImageStackImageLoader;
import bdv.img.virtualstack.VirtualStackImageLoader;
import bdv.spimdata.SequenceDescriptionMinimal;
import bdv.spimdata.SpimDataMinimal;
import bdv.spimdata.WrapBasicImgLoader;
import bdv.tools.brightness.ConverterSetup;
import bdv.tools.brightness.SetupAssignments;
import bdv.viewer.DisplayMode;
import bdv.viewer.VisibilityAndGrouping;
import ij.CompositeImage;
import ij.IJ;
import ij.ImageJ;
import ij.ImagePlus;
import ij.WindowManager;
import ij.plugin.PlugIn;
import ij.process.LUT;
import mpicbg.spim.data.SpimDataException;
import mpicbg.spim.data.generic.sequence.BasicImgLoader;
import mpicbg.spim.data.generic.sequence.BasicViewSetup;
import mpicbg.spim.data.registration.ViewRegistration;
import mpicbg.spim.data.registration.ViewRegistrations;
import mpicbg.spim.data.sequence.Channel;
import mpicbg.spim.data.sequence.FinalVoxelDimensions;
import mpicbg.spim.data.sequence.TimePoint;
import mpicbg.spim.data.sequence.TimePoints;
import net.imglib2.FinalDimensions;
import net.imglib2.realtransform.AffineTransform3D;
import net.imglib2.type.numeric.ARGBType;
/**
* ImageJ plugin to show the current image in BigDataViewer.
*
* @author Tobias Pietzsch <tobias.pietzsch@gmail.com>
*/
public class OpenImagePlusPlugIn implements PlugIn
{
public static void main( final String[] args )
{
System.setProperty( "apple.laf.useScreenMenuBar", "true" );
new ImageJ();
IJ.run("Confocal Series (2.2MB)");
// IJ.run("Fly Brain (1MB)");
new OpenImagePlusPlugIn().run( null );
}
@Override
public void run( final String arg )
{
// get the current image
final ImagePlus imp = WindowManager.getCurrentImage();
// make sure there is one
if ( imp == null )
{
IJ.showMessage( "Please open an image first." );
return;
}
// check the image type
switch ( imp.getType() )
{
case ImagePlus.GRAY8:
case ImagePlus.GRAY16:
case ImagePlus.GRAY32:
case ImagePlus.COLOR_RGB:
break;
default:
IJ.showMessage( "Only 8, 16, 32-bit images and RGB images are supported currently!" );
return;
}
// check the image dimensionality
if ( imp.getNDimensions() < 3 )
{
IJ.showMessage( "Image must be at least 3-dimensional!" );
return;
}
// get calibration and image size
final double pw = imp.getCalibration().pixelWidth;
final double ph = imp.getCalibration().pixelHeight;
final double pd = imp.getCalibration().pixelDepth;
String punit = imp.getCalibration().getUnit();
if ( punit == null || punit.isEmpty() )
punit = "px";
final FinalVoxelDimensions voxelSize = new FinalVoxelDimensions( punit, pw, ph, pd );
final int w = imp.getWidth();
final int h = imp.getHeight();
final int d = imp.getNSlices();
final FinalDimensions size = new FinalDimensions( new int[] { w, h, d } );
// propose reasonable mipmap settings
// final ExportMipmapInfo autoMipmapSettings = ProposeMipmaps.proposeMipmaps( new BasicViewSetup( 0, "", size, voxelSize ) );
// imp.getDisplayRangeMin();
// imp.getDisplayRangeMax();
// create ImgLoader wrapping the image
final BasicImgLoader< ? > imgLoader;
if ( imp.getStack().isVirtual() )
{
switch ( imp.getType() )
{
case ImagePlus.GRAY8:
imgLoader = VirtualStackImageLoader.createUnsignedByteInstance( imp );
break;
case ImagePlus.GRAY16:
imgLoader = VirtualStackImageLoader.createUnsignedShortInstance( imp );
break;
case ImagePlus.GRAY32:
imgLoader = VirtualStackImageLoader.createFloatInstance( imp );
break;
case ImagePlus.COLOR_RGB:
default:
imgLoader = VirtualStackImageLoader.createARGBInstance( imp );
break;
}
}
else
{
switch ( imp.getType() )
{
case ImagePlus.GRAY8:
imgLoader = ImageStackImageLoader.createUnsignedByteInstance( imp );
break;
case ImagePlus.GRAY16:
imgLoader = ImageStackImageLoader.createUnsignedShortInstance( imp );
break;
case ImagePlus.GRAY32:
imgLoader = ImageStackImageLoader.createFloatInstance( imp );
break;
case ImagePlus.COLOR_RGB:
default:
imgLoader = ImageStackImageLoader.createARGBInstance( imp );
break;
}
}
final int numTimepoints = imp.getNFrames();
final int numSetups = imp.getNChannels();
// create setups from channels
final HashMap< Integer, BasicViewSetup > setups = new HashMap< Integer, BasicViewSetup >( numSetups );
for ( int s = 0; s < numSetups; ++s )
{
final BasicViewSetup setup = new BasicViewSetup( s, String.format( "channel %d", s + 1 ), size, voxelSize );
setup.setAttribute( new Channel( s + 1 ) );
setups.put( s, setup );
}
// create timepoints
final ArrayList< TimePoint > timepoints = new ArrayList< TimePoint >( numTimepoints );
for ( int t = 0; t < numTimepoints; ++t )
timepoints.add( new TimePoint( t ) );
final SequenceDescriptionMinimal seq = new SequenceDescriptionMinimal( new TimePoints( timepoints ), setups, imgLoader, null );
// create ViewRegistrations from the images calibration
final AffineTransform3D sourceTransform = new AffineTransform3D();
sourceTransform.set( pw, 0, 0, 0, 0, ph, 0, 0, 0, 0, pd, 0 );
final ArrayList< ViewRegistration > registrations = new ArrayList< ViewRegistration >();
for ( int t = 0; t < numTimepoints; ++t )
for ( int s = 0; s < numSetups; ++s )
registrations.add( new ViewRegistration( t, s, sourceTransform ) );
final File basePath = new File(".");
final SpimDataMinimal spimData = new SpimDataMinimal( basePath, seq, new ViewRegistrations( registrations ) );
WrapBasicImgLoader.wrapImgLoaderIfNecessary( spimData );
try
{
final BigDataViewer bdv = new BigDataViewer( spimData, "BigDataViewer", new ProgressWriterIJ() );
final SetupAssignments sa = bdv.getSetupAssignments();
final VisibilityAndGrouping vg = bdv.getViewer().getVisibilityAndGrouping();
if ( imp.isComposite() )
transferChannelSettings( ( CompositeImage ) imp, sa, vg );
else if ( imp.getType() == ImagePlus.COLOR_RGB )
transferSettingsRGB( imp, sa );
}
catch ( final SpimDataException e )
{
throw new RuntimeException( e );
}
}
protected void transferChannelSettings( final CompositeImage ci, final SetupAssignments setupAssignments, final VisibilityAndGrouping visibility )
{
final int nChannels = ci.getNChannels();
final int mode = ci.getCompositeMode();
final boolean transferColor = mode == IJ.COMPOSITE || mode == IJ.COLOR;
for ( int c = 0; c < nChannels; ++c )
{
final LUT lut = ci.getChannelLut( c + 1 );
final ConverterSetup setup = setupAssignments.getConverterSetups().get( c );
if ( transferColor )
setup.setColor( new ARGBType( lut.getRGB( 255 ) ) );
setup.setDisplayRange( lut.min, lut.max );
}
if ( mode == IJ.COMPOSITE )
{
final boolean[] activeChannels = ci.getActiveChannels();
visibility.setDisplayMode( DisplayMode.FUSED );
for ( int i = 0; i < activeChannels.length; ++i )
visibility.setSourceActive( i, activeChannels[ i ] );
}
else
visibility.setDisplayMode( DisplayMode.SINGLE );
visibility.setCurrentSource( ci.getChannel() - 1 );
}
protected void transferSettingsRGB( final ImagePlus imp, final SetupAssignments setupAssignments )
{
final ConverterSetup setup = setupAssignments.getConverterSetups().get( 0 );
setup.setDisplayRange( imp.getDisplayRangeMin(), imp.getDisplayRangeMax() );
}
}
......@@ -203,7 +203,7 @@ public class Scripting
public void writePartition( final int index )
{
if ( index >= 0 && index < partitions.size() )
WriteSequenceToHdf5.writeHdf5PartitionFile( spimData.getSequenceDescription(), perSetupMipmapInfo, deflate, partitions.get( index ), null );
WriteSequenceToHdf5.writeHdf5PartitionFile( spimData.getSequenceDescription(), perSetupMipmapInfo, deflate, partitions.get( index ), null, null, null );
}
public void writeXmlAndLinks() throws SpimDataException
......
......@@ -12,6 +12,8 @@ import net.imglib2.type.numeric.RealType;
import net.imglib2.type.numeric.integer.UnsignedByteType;
import net.imglib2.type.numeric.integer.UnsignedShortType;
import net.imglib2.type.numeric.real.FloatType;
import bdv.img.cache.VolatileGlobalCellCache;
import bdv.img.imagestack.ImageStackImageLoader;
import bdv.img.virtualstack.VirtualStackImageLoader;
/**
......@@ -40,33 +42,45 @@ public class ImagePlusImgLoader< T extends RealType< T > & NativeType< T > > imp
{
if( imp.getType() != ImagePlus.GRAY8 )
throw new RuntimeException( "expected ImagePlus type GRAY8" );
return new ImagePlusImgLoader< UnsignedByteType >( imp, VirtualStackImageLoader.createUnsignedByteInstance( imp ), minMaxOption, min, max );
if ( imp.getStack() != null && imp.getStack().isVirtual() )
return new ImagePlusImgLoader< UnsignedByteType >( imp, VirtualStackImageLoader.createUnsignedByteInstance( imp ), minMaxOption, min, max );
else
return new ImagePlusImgLoader< UnsignedByteType >( imp, ImageStackImageLoader.createUnsignedByteInstance( imp ), minMaxOption, min, max );