Commit 653a63a4 authored by Stephan Preibisch's avatar Stephan Preibisch
Browse files

Merge branch 'master' of github.com:tpietzsch/spimviewer

parents 090063bb b94899e0
......@@ -20,7 +20,7 @@
<dependency>
<groupId>sc.fiji</groupId>
<artifactId>bigdataviewer-core</artifactId>
<version>1.0.0-SNAPSHOT</version>
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>net.imglib2</groupId>
......@@ -37,12 +37,10 @@
<dependency>
<groupId>sc.fiji</groupId>
<artifactId>SPIM_Registration</artifactId>
<version>2.0.0-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>java3d</groupId>
<artifactId>vecmath</artifactId>
<version>1.3.1</version>
</dependency>
<!--
The following dependencies are added to deal with old SPIM_Registration version currently deployed in maven.imagej.net.
......
......@@ -17,6 +17,7 @@ import java.awt.event.ItemEvent;
import java.io.File;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Map;
import mpicbg.spim.data.generic.sequence.BasicImgLoader;
import mpicbg.spim.data.generic.sequence.BasicViewSetup;
......@@ -39,6 +40,7 @@ import bdv.ij.export.imgloader.ImagePlusImgLoader.MinMaxOption;
import bdv.ij.util.PluginHelper;
import bdv.ij.util.ProgressWriterIJ;
import bdv.img.hdf5.Hdf5ImageLoader;
import bdv.img.hdf5.Partition;
import bdv.spimdata.SequenceDescriptionMinimal;
import bdv.spimdata.SpimDataMinimal;
import bdv.spimdata.XmlIoSpimDataMinimal;
......@@ -137,12 +139,6 @@ public class ExportImagePlusPlugIn implements PlugIn
sourceTransform.set( pw, 0, 0, 0, 0, ph, 0, 0, 0, 0, pd, 0 );
// write hdf5
final File seqFile = params.seqFile;
final File hdf5File = params.hdf5File;
final int[][] resolutions = params.resolutions;
final int[][] subdivisions = params.subdivisions;
final boolean deflate = params.deflate;
final HashMap< Integer, BasicViewSetup > setups = new HashMap< Integer, BasicViewSetup >( numSetups );
for ( int s = 0; s < numSetups; ++s )
{
......@@ -155,10 +151,35 @@ public class ExportImagePlusPlugIn implements PlugIn
timepoints.add( new TimePoint( t ) );
final SequenceDescriptionMinimal seq = new SequenceDescriptionMinimal( new TimePoints( timepoints ), setups, imgLoader, null );
WriteSequenceToHdf5.writeHdf5File( seq, resolutions, subdivisions, deflate, hdf5File, new SubTaskProgressWriter( progressWriter, 0, 0.95 ) );
Map< Integer, ExportMipmapInfo > perSetupExportMipmapInfo;
perSetupExportMipmapInfo = new HashMap< Integer, ExportMipmapInfo >();
final ExportMipmapInfo mipmapInfo = new ExportMipmapInfo( params.resolutions, params.subdivisions );
for ( final BasicViewSetup setup : seq.getViewSetupsOrdered() )
perSetupExportMipmapInfo.put( setup.getId(), mipmapInfo );
final ArrayList< Partition > partitions;
if ( params.split )
{
final String xmlFilename = params.seqFile.getAbsolutePath();
final String basename = xmlFilename.endsWith( ".xml" ) ? xmlFilename.substring( 0, xmlFilename.length() - 4 ) : xmlFilename;
partitions = Partition.split( timepoints, seq.getViewSetupsOrdered(), params.timepointsPerPartition, params.setupsPerPartition, basename );
for ( int i = 0; i < partitions.size(); ++i )
{
final Partition partition = partitions.get( i );
final ProgressWriter p = new SubTaskProgressWriter( progressWriter, 0, 0.95 * i / partitions.size() );
WriteSequenceToHdf5.writeHdf5PartitionFile( seq, perSetupExportMipmapInfo, params.deflate, partition, p );
}
WriteSequenceToHdf5.writeHdf5PartitionLinkFile( seq, perSetupExportMipmapInfo, partitions, params.hdf5File );
}
else
{
partitions = null;
WriteSequenceToHdf5.writeHdf5File( seq, perSetupExportMipmapInfo, params.deflate, params.hdf5File, new SubTaskProgressWriter( progressWriter, 0, 0.95 ) );
}
// write xml sequence description
final Hdf5ImageLoader hdf5Loader = new Hdf5ImageLoader( hdf5File, null, null, false );
final Hdf5ImageLoader hdf5Loader = new Hdf5ImageLoader( params.hdf5File, partitions, null, false );
final SequenceDescriptionMinimal seqh5 = new SequenceDescriptionMinimal( seq, hdf5Loader );
final ArrayList< ViewRegistration > registrations = new ArrayList< ViewRegistration >();
......@@ -166,12 +187,12 @@ public class ExportImagePlusPlugIn implements PlugIn
for ( int s = 0; s < numSetups; ++s )
registrations.add( new ViewRegistration( t, s, sourceTransform ) );
final File basePath = seqFile.getParentFile();
final File basePath = params.seqFile.getParentFile();
final SpimDataMinimal spimData = new SpimDataMinimal( basePath, seqh5, new ViewRegistrations( registrations ) );
try
{
new XmlIoSpimDataMinimal().save( spimData, seqFile.getAbsolutePath() );
new XmlIoSpimDataMinimal().save( spimData, params.seqFile.getAbsolutePath() );
progressWriter.setProgress( 1.0 );
}
catch ( final Exception e )
......@@ -201,7 +222,17 @@ public class ExportImagePlusPlugIn implements PlugIn
final boolean deflate;
public Parameters( final boolean setMipmapManual, final int[][] resolutions, final int[][] subdivisions, final File seqFile, final File hdf5File, final MinMaxOption minMaxOption, final double rangeMin, final double rangeMax, final boolean deflate )
final boolean split;
final int timepointsPerPartition;
final int setupsPerPartition;
public Parameters(
final boolean setMipmapManual, final int[][] resolutions, final int[][] subdivisions,
final File seqFile, final File hdf5File,
final MinMaxOption minMaxOption, final double rangeMin, final double rangeMax, final boolean deflate,
final boolean split, final int timepointsPerPartition, final int setupsPerPartition )
{
this.setMipmapManual = setMipmapManual;
this.resolutions = resolutions;
......@@ -212,6 +243,9 @@ public class ExportImagePlusPlugIn implements PlugIn
this.rangeMin = rangeMin;
this.rangeMax = rangeMax;
this.deflate = deflate;
this.split = split;
this.timepointsPerPartition = timepointsPerPartition;
this.setupsPerPartition = setupsPerPartition;
}
}
......@@ -227,6 +261,12 @@ public class ExportImagePlusPlugIn implements PlugIn
static double lastMax = 65535;
static boolean lastSplit = false;
static int lastTimepointsPerPartition = 0;
static int lastSetupsPerPartition = 0;
static boolean lastDeflate = true;
static String lastExportPath = "./export.xml";
......@@ -259,6 +299,14 @@ public class ExportImagePlusPlugIn implements PlugIn
gd.addNumericField( "Max", lastMax, 0 );
final TextField tfMax = (TextField) gd.getNumericFields().lastElement();
gd.addMessage( "" );
gd.addCheckbox( "split hdf5", lastSplit );
final Checkbox cSplit = ( Checkbox ) gd.getCheckboxes().lastElement();
gd.addNumericField( "timepoints per partition", lastTimepointsPerPartition, 0, 25, "" );
final TextField tfSplitTimepoints = ( TextField ) gd.getNumericFields().lastElement();
gd.addNumericField( "setups per partition", lastSetupsPerPartition, 0, 25, "" );
final TextField tfSplitSetups = ( TextField ) gd.getNumericFields().lastElement();
gd.addMessage( "" );
gd.addCheckbox( "use deflate compression", lastDeflate );
......@@ -283,6 +331,9 @@ public class ExportImagePlusPlugIn implements PlugIn
gd.getNextNumber();
gd.getNextNumber();
gd.getNextBoolean();
gd.getNextNumber();
gd.getNextNumber();
gd.getNextBoolean();
gd.getNextString();
if ( e instanceof ItemEvent && e.getID() == ItemEvent.ITEM_STATE_CHANGED && e.getSource() == cMinMaxChoices )
{
......@@ -301,6 +352,12 @@ public class ExportImagePlusPlugIn implements PlugIn
tfChunkSizes.setText( autoChunkSizes );
}
}
else if ( e instanceof ItemEvent && e.getID() == ItemEvent.ITEM_STATE_CHANGED && e.getSource() == cSplit )
{
final boolean split = cSplit.getState();
tfSplitTimepoints.setEnabled( split );
tfSplitSetups.setEnabled( split );
}
return true;
}
} );
......@@ -317,6 +374,9 @@ public class ExportImagePlusPlugIn implements PlugIn
tfChunkSizes.setText( autoChunkSizes );
}
tfSplitTimepoints.setEnabled( lastSplit );
tfSplitSetups.setEnabled( lastSplit );
gd.showDialog();
if ( gd.wasCanceled() )
return null;
......@@ -327,6 +387,9 @@ public class ExportImagePlusPlugIn implements PlugIn
lastMinMaxChoice = gd.getNextChoiceIndex();
lastMin = gd.getNextNumber();
lastMax = gd.getNextNumber();
lastSplit = gd.getNextBoolean();
lastTimepointsPerPartition = ( int ) gd.getNextNumber();
lastSetupsPerPartition = ( int ) gd.getNextNumber();
lastDeflate = gd.getNextBoolean();
lastExportPath = gd.getNextString();
......@@ -370,7 +433,6 @@ public class ExportImagePlusPlugIn implements PlugIn
final String hdf5Filename = seqFilename.substring( 0, seqFilename.length() - 4 ) + ".h5";
final File hdf5File = new File( hdf5Filename );
return new Parameters( lastSetMipmapManual, resolutions, subdivisions, seqFile, hdf5File, minMaxOption, lastMin, lastMax, lastDeflate );
}
return new Parameters( lastSetMipmapManual, resolutions, subdivisions, seqFile, hdf5File, minMaxOption, lastMin, lastMax, lastDeflate, lastSplit, lastTimepointsPerPartition, lastSetupsPerPartition ); }
}
}
......@@ -19,8 +19,12 @@ import java.io.File;
import java.io.FilenameFilter;
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import mpicbg.spim.data.generic.sequence.BasicViewSetup;
import mpicbg.spim.data.sequence.TimePoint;
import mpicbg.spim.io.ConfigurationParserException;
import mpicbg.spim.io.IOFunctions;
import mpicbg.spim.io.SPIMConfiguration;
......@@ -35,6 +39,7 @@ import bdv.ij.export.SpimRegistrationSequence;
import bdv.ij.util.PluginHelper;
import bdv.ij.util.ProgressWriterIJ;
import bdv.img.hdf5.Hdf5ImageLoader;
import bdv.img.hdf5.Partition;
import bdv.spimdata.SequenceDescriptionMinimal;
import bdv.spimdata.SpimDataMinimal;
import bdv.spimdata.XmlIoSpimDataMinimal;
......@@ -55,16 +60,43 @@ public class ExportSpimSequencePlugIn implements PlugIn
final SpimRegistrationSequence sequence = new SpimRegistrationSequence( params.conf );
final SequenceDescriptionMinimal desc = sequence.getSequenceDescription();
final boolean setMipmapManual = params.setMipmapManual;
if ( setMipmapManual )
WriteSequenceToHdf5.writeHdf5File( desc, params.resolutions, params.subdivisions, params.deflate, params.hdf5File, new SubTaskProgressWriter( progress, 0, 0.95 ) );
Map< Integer, ExportMipmapInfo > perSetupExportMipmapInfo;
if ( params.setMipmapManual )
{
perSetupExportMipmapInfo = new HashMap< Integer, ExportMipmapInfo >();
final ExportMipmapInfo mipmapInfo = new ExportMipmapInfo( params.resolutions, params.subdivisions );
for ( final BasicViewSetup setup : desc.getViewSetupsOrdered() )
perSetupExportMipmapInfo.put( setup.getId(), mipmapInfo );
}
else
{
perSetupExportMipmapInfo = ProposeMipmaps.proposeMipmaps( desc );
}
final ArrayList< Partition > partitions;
if ( params.split )
{
final String xmlFilename = params.seqFile.getAbsolutePath();
final String basename = xmlFilename.endsWith( ".xml" ) ? xmlFilename.substring( 0, xmlFilename.length() - 4 ) : xmlFilename;
final List< TimePoint > timepoints = desc.getTimePoints().getTimePointsOrdered();
final List< BasicViewSetup > setups = desc.getViewSetupsOrdered();
partitions = Partition.split( timepoints, setups, params.timepointsPerPartition, params.setupsPerPartition, basename );
for ( int i = 0; i < partitions.size(); ++i )
{
final Partition partition = partitions.get( i );
final ProgressWriter p = new SubTaskProgressWriter( progress, 0, 0.95 * i / partitions.size() );
WriteSequenceToHdf5.writeHdf5PartitionFile( desc, perSetupExportMipmapInfo, params.deflate, partition, p );
}
WriteSequenceToHdf5.writeHdf5PartitionLinkFile( desc, perSetupExportMipmapInfo, partitions, params.hdf5File );
}
else
{
final Map< Integer, ExportMipmapInfo > perSetupExportMipmapInfo = ProposeMipmaps.proposeMipmaps( desc );
partitions = null;
WriteSequenceToHdf5.writeHdf5File( desc, perSetupExportMipmapInfo, params.deflate, params.hdf5File, new SubTaskProgressWriter( progress, 0, 0.95 ) );
}
final Hdf5ImageLoader loader = new Hdf5ImageLoader( params.hdf5File, null, null, false );
final Hdf5ImageLoader loader = new Hdf5ImageLoader( params.hdf5File, partitions, null, false );
final SequenceDescriptionMinimal sequenceDescription = new SequenceDescriptionMinimal( desc, loader );
final File basePath = params.seqFile.getParentFile();
......@@ -88,6 +120,12 @@ public class ExportSpimSequencePlugIn implements PlugIn
static String lastChunkSizes = "{16,16,16}, {16,16,16}, {16,16,16}";
static boolean lastSplit = false;
static int lastTimepointsPerPartition = 0;
static int lastSetupsPerPartition = 0;
static boolean lastDeflate = true;
public static String fusionType[] = new String[] { "Single-channel", "Multi-channel" };
......@@ -110,7 +148,17 @@ public class ExportSpimSequencePlugIn implements PlugIn
final boolean deflate;
public Parameters( final SPIMConfiguration conf, final boolean setMipmapManual, final int[][] resolutions, final int[][] subdivisions, final File seqFile, final File hdf5File, final boolean deflate )
final boolean split;
final int timepointsPerPartition;
final int setupsPerPartition;
public Parameters(
final SPIMConfiguration conf,
final boolean setMipmapManual, final int[][] resolutions, final int[][] subdivisions,
final File seqFile, final File hdf5File, final boolean deflate,
final boolean split, final int timepointsPerPartition, final int setupsPerPartition )
{
this.conf = conf;
this.setMipmapManual = setMipmapManual;
......@@ -119,6 +167,9 @@ public class ExportSpimSequencePlugIn implements PlugIn
this.seqFile = seqFile;
this.hdf5File = hdf5File;
this.deflate = deflate;
this.split = split;
this.timepointsPerPartition = timepointsPerPartition;
this.setupsPerPartition = setupsPerPartition;
}
}
......@@ -447,6 +498,14 @@ public class ExportSpimSequencePlugIn implements PlugIn
gd2.addStringField( "Hdf5 chunk sizes", lastChunkSizes, 25 );
final TextField tfChunkSizes = ( TextField ) gd2.getStringFields().lastElement();
gd2.addMessage( "" );
gd2.addCheckbox( "split hdf5", lastSplit );
final Checkbox cSplit = ( Checkbox ) gd2.getCheckboxes().lastElement();
gd2.addNumericField( "timepoints per partition", lastTimepointsPerPartition, 0, 25, "" );
final TextField tfSplitTimepoints = ( TextField ) gd2.getNumericFields().lastElement();
gd2.addNumericField( "setups per partition", lastSetupsPerPartition, 0, 25, "" );
final TextField tfSplitSetups = ( TextField ) gd2.getNumericFields().lastElement();
gd2.addMessage( "" );
gd2.addCheckbox( "use deflate compression", lastDeflate );
......@@ -479,6 +538,20 @@ public class ExportSpimSequencePlugIn implements PlugIn
tfChunkSizes.setText( autoChunkSizes );
}
cSplit.addItemListener( new ItemListener()
{
@Override
public void itemStateChanged( final ItemEvent arg0 )
{
final boolean split = cSplit.getState();
tfSplitTimepoints.setEnabled( split );
tfSplitSetups.setEnabled( split );
}
} );
tfSplitTimepoints.setEnabled( lastSplit );
tfSplitSetups.setEnabled( lastSplit );
// gd.addMessage("");
// gd.addMessage("This Plugin is developed by Tobias Pietzsch (pietzsch@mpi-cbg.de)\n");
// Bead_Registration.addHyperLinkListener( (MultiLineLabel) gd.getMessage(), "mailto:pietzsch@mpi-cbg.de");
......@@ -559,6 +632,10 @@ public class ExportSpimSequencePlugIn implements PlugIn
return null;
}
lastSplit = gd2.getNextBoolean();
lastTimepointsPerPartition = ( int ) gd2.getNextNumber();
lastSetupsPerPartition = ( int ) gd2.getNextNumber();
lastDeflate = gd2.getNextBoolean();
String seqFilename = gd2.getNextString();
......@@ -574,7 +651,7 @@ public class ExportSpimSequencePlugIn implements PlugIn
final String hdf5Filename = seqFilename.substring( 0, seqFilename.length() - 4 ) + ".h5";
final File hdf5File = new File( hdf5Filename );
return new Parameters( conf, lastSetMipmapManual, resolutions, subdivisions, seqFile, hdf5File, lastDeflate );
return new Parameters( conf, lastSetMipmapManual, resolutions, subdivisions, seqFile, hdf5File, lastDeflate, lastSplit, lastTimepointsPerPartition, lastSetupsPerPartition );
}
protected static double loadZStretching( final String file )
......
......@@ -7,7 +7,6 @@ import java.util.List;
import java.util.Map;
import mpicbg.spim.data.generic.base.Entity;
import mpicbg.spim.data.generic.sequence.AbstractSequenceDescription;
import mpicbg.spim.data.generic.sequence.BasicViewSetup;
import mpicbg.spim.data.registration.ViewRegistration;
import mpicbg.spim.data.registration.ViewRegistrations;
......@@ -26,7 +25,7 @@ import bdv.spimdata.SequenceDescriptionMinimal;
public class FusionResult
{
private final AbstractSequenceDescription< ?, ?, ? > desc;
private final SequenceDescriptionMinimal desc;
private final ViewRegistrations regs;
......@@ -111,7 +110,7 @@ public class FusionResult
regs = new ViewRegistrations( registrations );
}
public AbstractSequenceDescription< ?, ?, ? > getSequenceDescription()
public SequenceDescriptionMinimal getSequenceDescription()
{
return desc;
}
......
......@@ -19,7 +19,7 @@ public class ViewSetupWrapper extends BasicViewSetup
private final int sourceSetupId;
protected ViewSetupWrapper( final int id, final AbstractSequenceDescription< ?, ?, ? > sourceSequence, final BasicViewSetup sourceSetup )
public ViewSetupWrapper( final int id, final AbstractSequenceDescription< ?, ?, ? > sourceSequence, final BasicViewSetup sourceSetup )
{
super( id, sourceSetup.getName(), sourceSetup.getSize(), sourceSetup.getVoxelSize() );
this.sourceSequence = sourceSequence;
......
......@@ -159,19 +159,21 @@ public class PluginHelper
return resolutions;
}
public static File createNewPartitionFile( final File xmlSequenceFile ) throws IOException
public static File createNewPartitionFile( final String baseFilename ) throws IOException
{
final String seqFilename = xmlSequenceFile.getAbsolutePath();
if ( !seqFilename.endsWith( ".xml" ) )
throw new IllegalArgumentException();
final String baseFilename = seqFilename.substring( 0, seqFilename.length() - 4 );
File hdf5File = new File( String.format( "%s.h5", baseFilename ) );
if ( ! hdf5File.exists() )
if ( hdf5File.createNewFile() )
return hdf5File;
for ( int i = 0; i < Integer.MAX_VALUE; ++i )
{
final File hdf5File = new File( String.format( "%s-%d.h5", baseFilename, i ) );
hdf5File = new File( String.format( "%s-%d.h5", baseFilename, i ) );
if ( ! hdf5File.exists() )
if ( hdf5File.createNewFile() )
return hdf5File;
}
throw new RuntimeException( "could not generate new partition filename" );
}
}
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment