diff --git a/install b/install
index c5de74ee..047c57df 100755
--- a/install
+++ b/install
@@ -129,6 +129,8 @@ install_command solve-intensities "net.preibisch.bigstitcher.spark.IntensitySolv
install_command create-fusion-container "net.preibisch.bigstitcher.spark.CreateFusionContainer"
install_command affine-fusion "net.preibisch.bigstitcher.spark.SparkAffineFusion"
install_command nonrigid-fusion "net.preibisch.bigstitcher.spark.SparkNonRigidFusion"
+install_command create-dataset "net.preibisch.bigstitcher.spark.CreateDataset"
+install_command chain-commands "net.preibisch.bigstitcher.spark.ChainCommands"
echo 'Installing utils ...'
diff --git a/pom.xml b/pom.xml
index 8e2e35b4..79b1d43b 100644
--- a/pom.xml
+++ b/pom.xml
@@ -103,8 +103,8 @@
1.0.2
2.3.5
- 8.0.0
- 2.5.0
+ 8.1.3
+ 2.6.1-SNAPSHOT
2.3.0
3.5.0
diff --git a/src/main/java/net/preibisch/bigstitcher/spark/ChainCommands.java b/src/main/java/net/preibisch/bigstitcher/spark/ChainCommands.java
new file mode 100644
index 00000000..62e21c97
--- /dev/null
+++ b/src/main/java/net/preibisch/bigstitcher/spark/ChainCommands.java
@@ -0,0 +1,129 @@
+package net.preibisch.bigstitcher.spark;
+
+import java.io.Serializable;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.List;
+import java.util.Stack;
+import java.util.concurrent.Callable;
+
+import mpicbg.spim.data.SpimDataException;
+import net.preibisch.bigstitcher.spark.abstractcmdline.AbstractInfrastructure;
+import picocli.CommandLine;
+
+public class ChainCommands extends AbstractInfrastructure implements Callable, Serializable
+{
+ private static final long serialVersionUID = 1584686229152127469L;
+
+ static class CommandWithArguments {
+ final List cmdArgs;
+
+ CommandWithArguments(List cmdArgs) {
+ this.cmdArgs = cmdArgs;
+ }
+ }
+
+ /**
+ * Custom converter to collect all arguments after --command until a separator
+ * (either ';' or '+') or the end of the input.
+ */
+ static class CommandArgsConverter implements CommandLine.IParameterConsumer {
+ @Override
+ public void consumeParameters(Stack args, CommandLine.Model.ArgSpec argSpec, CommandLine.Model.CommandSpec commandSpec) {
+ List currentCommands = argSpec.getValue();
+ List commandArgs = new ArrayList<>();
+ while (!args.isEmpty()) {
+ String arg = args.pop();
+
+ if (";".equals(arg) || "+".equals(arg)) {
+ break;
+ }
+ if (arg.equals("-h") || arg.equals("--help")) {
+ // add back the help flag at the bottom of the stack
+ // but before check if there was anything left and if there wasn't stop after this
+ boolean done = args.isEmpty();
+ args.add(0, arg);
+ if (done) break;
+ } else
+ commandArgs.add(arg);
+ }
+ currentCommands.add(new CommandWithArguments(commandArgs));
+ }
+ }
+
+ @CommandLine.Option(names = { "-h", "--help" }, description = "display this help message", usageHelp = true)
+ boolean helpFlag;
+
+ @CommandLine.Option(names = { "--command" }, parameterConsumer = CommandArgsConverter.class,
+ description = "Command to execute with its arguments. Multiple commands can be chained using ';' or '+'.\n"
+ + "Example: --command create-dataset --input-path /data/images/ --input-pattern '*.tif' ; "
+ + "--command detect-interestpoints --detector SIFT --descriptor SIFT ; "
+ + "--command match-interestpoints --matcher FLANN ; stitching --stitchingModel Affine")
+ List commands = new ArrayList<>();
+
+ @Override
+ public Void call() throws Exception {
+ for (CommandWithArguments commandArgs : commands) {
+ if (commandArgs.cmdArgs.isEmpty())
+ continue;
+
+ String cmdName = commandArgs.cmdArgs.get(0);
+ List cmdArgs = new ArrayList<>(commandArgs.cmdArgs.subList(1, commandArgs.cmdArgs.size()));
+ addCommonOptions(cmdArgs);
+
+ AbstractInfrastructure cmdInstance = getCmdInstance(cmdName);
+ CommandLine currentCmdLine = new CommandLine(cmdInstance);
+ System.out.println("Execute command: " + cmdName + " with args: " + cmdArgs);
+ int exitCode = currentCmdLine.execute(cmdArgs.toArray(new String[0]));
+ if (exitCode != 0) {
+ System.err.println("Command " + cmdName + " failed with exit code " + exitCode);
+ System.exit(exitCode);
+ }
+ }
+ return null;
+ }
+
+ private AbstractInfrastructure getCmdInstance(String name) {
+ switch (name) {
+ case "clear-interestpoints": return new ClearInterestPoints();
+ case "clear-registrations": return new ClearRegistrations();
+ case "create-container": return new CreateFusionContainer();
+ case "detect-interestpoints": return new SparkInterestPointDetection();
+ case "match-interestpoints": return new SparkGeometricDescriptorMatching();
+ case "nonrigid-fusion": return new SparkNonRigidFusion();
+ case "create-dataset": return new CreateDataset();
+ case "stitching": return new SparkPairwiseStitching();
+ case "resave": return new SparkResaveN5();
+ case "downsample": return new SparkDownsample();
+ case "affine-fusion": return new SparkAffineFusion();
+ case "solver": return new Solver();
+ default: throw new IllegalArgumentException("Unknown command: " + name);
+ }
+ }
+
+ private void addCommonOptions(List cmdArgs) {
+ if (this.dryRun) {
+ cmdArgs.add("--dryRun");
+ }
+ if (this.localSparkBindAddress) {
+ cmdArgs.add("--localSparkBindAddress");
+ }
+ if (this.s3Region != null && !this.s3Region.isEmpty()) {
+ cmdArgs.add("--s3Region");
+ cmdArgs.add(this.s3Region);
+ }
+ }
+
+ public static void main(final String... args) throws SpimDataException {
+ System.out.println(Arrays.toString(args));
+
+ ChainCommands chainedCommands = new ChainCommands();
+ CommandLine commandLine = new CommandLine(chainedCommands)
+ .setUnmatchedOptionsArePositionalParams(true)
+ ;
+
+
+ System.exit(commandLine.execute(args));
+ }
+
+}
diff --git a/src/main/java/net/preibisch/bigstitcher/spark/Compressions.java b/src/main/java/net/preibisch/bigstitcher/spark/Compressions.java
new file mode 100644
index 00000000..847912a1
--- /dev/null
+++ b/src/main/java/net/preibisch/bigstitcher/spark/Compressions.java
@@ -0,0 +1,5 @@
+package net.preibisch.bigstitcher.spark;
+
+public enum Compressions {
+ Lz4, Gzip, Zstandard, Blosc, Bzip2, Xz, Raw
+}
diff --git a/src/main/java/net/preibisch/bigstitcher/spark/CreateDataset.java b/src/main/java/net/preibisch/bigstitcher/spark/CreateDataset.java
new file mode 100644
index 00000000..1f758765
--- /dev/null
+++ b/src/main/java/net/preibisch/bigstitcher/spark/CreateDataset.java
@@ -0,0 +1,68 @@
+package net.preibisch.bigstitcher.spark;
+
+import java.io.Serializable;
+import java.net.URI;
+import java.nio.file.Path;
+import java.nio.file.Paths;
+import java.util.Arrays;
+import java.util.concurrent.Callable;
+
+import mpicbg.spim.data.SpimDataException;
+import net.preibisch.bigstitcher.spark.abstractcmdline.AbstractBasic;
+import net.preibisch.mvrecon.dataset.SpimDatasetBuilder;
+import net.preibisch.mvrecon.fiji.spimdata.SpimData2;
+import net.preibisch.mvrecon.fiji.spimdata.XmlIoSpimData2;
+import picocli.CommandLine;
+import picocli.CommandLine.Option;
+import util.URITools;
+
+public class CreateDataset extends AbstractBasic implements Callable, Serializable
+{
+ private static final long serialVersionUID = -5155338208494730656L;
+
+ @Option(names = {"--input-path"}, required = true, description = "Path to the input images, e.g. /data/images/")
+ private String inputPath = null;
+
+ @Option(names = {"--input-pattern"}, description = "Glob pattern for input images, e.g. /data/images/*.tif")
+ private String inputPattern = "*";
+
+ @Override
+ public Void call() throws Exception {
+ this.setRegion();
+
+ SpimData2 spimData = createDataset();
+
+ URI xmlURI = URITools.toURI(xmlURIString);
+
+ System.out.println("Save spimData with original tiles to " + xmlURI);
+ prepareSaveLocation(xmlURI);
+ new XmlIoSpimData2().save(spimData, xmlURI);
+
+ return null;
+ }
+
+ private SpimData2 createDataset() {
+ SpimDatasetBuilder spimDatasetBuilder = new SpimDatasetBuilder(inputPattern);
+ return spimDatasetBuilder.createDataset(URITools.toURI(inputPath));
+ }
+
+ private void prepareSaveLocation(URI xmlURI) {
+ if (URITools.isFile( xmlURI )) {
+ Path xmlPath = Paths.get(xmlURI);
+ // create parent directories if necessary
+ if ( !xmlPath.getParent().toFile().exists() ) {
+ if (!xmlPath.getParent().toFile().mkdirs()) {
+ // log the error but continue
+ // if the directory wasn't create it will fail later when trying to write the file
+ System.out.println("Failed to create parent directory for " + xmlURI);
+ }
+ }
+ }
+ }
+
+ public static void main(final String... args) throws SpimDataException {
+ System.out.println(Arrays.toString(args));
+
+ System.exit(new CommandLine(new CreateDataset()).execute(args));
+ }
+}
diff --git a/src/main/java/net/preibisch/bigstitcher/spark/CreateFusionContainer.java b/src/main/java/net/preibisch/bigstitcher/spark/CreateFusionContainer.java
index f83178de..893bdc8d 100644
--- a/src/main/java/net/preibisch/bigstitcher/spark/CreateFusionContainer.java
+++ b/src/main/java/net/preibisch/bigstitcher/spark/CreateFusionContainer.java
@@ -7,11 +7,18 @@
import java.util.Arrays;
import java.util.Collection;
import java.util.HashMap;
+import java.util.HashSet;
import java.util.List;
import java.util.Map;
+import java.util.Set;
import java.util.concurrent.Callable;
import java.util.function.Function;
+import mpicbg.spim.data.generic.base.Entity;
+import mpicbg.spim.data.registration.ViewRegistrations;
+import net.imglib2.util.Pair;
+import net.imglib2.util.ValuePair;
+import net.preibisch.mvrecon.process.interestpointregistration.pairwise.constellation.grouping.Group;
import org.janelia.saalfeldlab.n5.Compression;
import org.janelia.saalfeldlab.n5.DataType;
import org.janelia.saalfeldlab.n5.N5Writer;
@@ -59,8 +66,6 @@ public class CreateFusionContainer extends AbstractBasic implements Callable numTimepointsXML )
System.out.println( "Fusion target: " + boundingBox.getTitle() + ": " + Util.printInterval( boundingBox ) + " with blocksize " + Util.printCoordinates( blockSize ) );
// compression and data type
- final Compression compression = N5Util.getCompression( this.compression, this.compressionLevel );
+ final Compression compression = N5Util.getCompression( this.compressionType, this.compressionLevel );
- System.out.println( "Compression: " + this.compression );
+ System.out.println( "Compression: " + this.compressionType );
System.out.println( "Compression level: " + ( compressionLevel == null ? "default" : compressionLevel ) );
final DataType dt;
@@ -299,24 +330,27 @@ else if ( storageType == StorageFormat.N5 || storageType == StorageFormat.ZARR )
return null;
}
- driverVolumeWriter.setAttribute( "/", "Bigstitcher-Spark/InputXML", xmlURI );
+ // if there is a group different from the root, create it
+ if ( ! getContainerGroupPath().equals("/") ) driverVolumeWriter.createGroup( getContainerGroupPath() );
- driverVolumeWriter.setAttribute( "/", "Bigstitcher-Spark/NumTimepoints", numTimepoints );
- driverVolumeWriter.setAttribute( "/", "Bigstitcher-Spark/NumChannels", numChannels );
+ driverVolumeWriter.setAttribute( getContainerGroupPath(), "Bigstitcher-Spark/InputXML", xmlURI );
- driverVolumeWriter.setAttribute( "/", "Bigstitcher-Spark/Boundingbox_min", boundingBox.minAsLongArray() );
- driverVolumeWriter.setAttribute( "/", "Bigstitcher-Spark/Boundingbox_max", boundingBox.maxAsLongArray() );
+ driverVolumeWriter.setAttribute( getContainerGroupPath(), "Bigstitcher-Spark/NumTimepoints", numTimepoints );
+ driverVolumeWriter.setAttribute( getContainerGroupPath(), "Bigstitcher-Spark/NumChannels", numChannels );
- driverVolumeWriter.setAttribute( "/", "Bigstitcher-Spark/PreserveAnisotropy", preserveAnisotropy );
+ driverVolumeWriter.setAttribute( getContainerGroupPath(), "Bigstitcher-Spark/Boundingbox_min", boundingBox.minAsLongArray() );
+ driverVolumeWriter.setAttribute( getContainerGroupPath(), "Bigstitcher-Spark/Boundingbox_max", boundingBox.maxAsLongArray() );
+
+ driverVolumeWriter.setAttribute( getContainerGroupPath(), "Bigstitcher-Spark/PreserveAnisotropy", preserveAnisotropy );
if (preserveAnisotropy) // cannot write Double.NaN into JSON
- driverVolumeWriter.setAttribute( "/", "Bigstitcher-Spark/AnisotropyFactor", anisotropyFactor );
- driverVolumeWriter.setAttribute( "/", "Bigstitcher-Spark/DataType", dt );
- driverVolumeWriter.setAttribute( "/", "Bigstitcher-Spark/BlockSize", blockSize );
+ driverVolumeWriter.setAttribute( getContainerGroupPath(), "Bigstitcher-Spark/AnisotropyFactor", anisotropyFactor );
+ driverVolumeWriter.setAttribute( getContainerGroupPath(), "Bigstitcher-Spark/DataType", dt );
+ driverVolumeWriter.setAttribute( getContainerGroupPath(), "Bigstitcher-Spark/BlockSize", blockSize );
if ( minIntensity != null && maxIntensity != null )
{
- driverVolumeWriter.setAttribute( "/", "Bigstitcher-Spark/MinIntensity", minIntensity );
- driverVolumeWriter.setAttribute( "/", "Bigstitcher-Spark/MaxIntensity", maxIntensity );
+ driverVolumeWriter.setAttribute( getContainerGroupPath(), "Bigstitcher-Spark/MinIntensity", minIntensity );
+ driverVolumeWriter.setAttribute( getContainerGroupPath(), "Bigstitcher-Spark/MaxIntensity", maxIntensity );
}
// setup datasets and metadata
@@ -333,7 +367,7 @@ else if ( storageType == StorageFormat.N5 || storageType == StorageFormat.ZARR )
System.out.println( "Creating 5D OME-ZARR metadata for '" + outPathURI + "' ... " );
if ( !bdv )
- driverVolumeWriter.setAttribute( "/", "Bigstitcher-Spark/FusionFormat", "OME-ZARR" );
+ driverVolumeWriter.setAttribute( getContainerGroupPath(), "Bigstitcher-Spark/FusionFormat", "OME-ZARR" );
final long[] dim3d = boundingBox.dimensionsAsLongArray();
@@ -343,14 +377,12 @@ else if ( storageType == StorageFormat.N5 || storageType == StorageFormat.ZARR )
for ( int d = 0; d < ds.length; ++d )
ds[ d ] = new int[] { downsamplings[ d ][ 0 ], downsamplings[ d ][ 1 ], downsamplings[ d ][ 2 ], 1, 1 };
- final Function levelToName = (level) -> "/" + level;
-
mrInfos = new MultiResolutionLevelInfo[ 1 ][];
// all is 5d now
mrInfos[ 0 ] = N5ApiTools.setupMultiResolutionPyramid(
driverVolumeWriter,
- levelToName,
+ (level) -> getContainerGroupPath() + level, // multiscale pyramid will be created for the entire provided group
dt,
dim, //5d
compression,
@@ -362,44 +394,37 @@ else if ( storageType == StorageFormat.N5 || storageType == StorageFormat.ZARR )
final Function levelToMipmapTransform =
(level) -> MipmapTransforms.getMipmapTransformDefault( mrInfo[level].absoluteDownsamplingDouble() );
+ updateAnisotropyAndCalibration(dataGlobal, viewIdsGlobal);
// extract the resolution of the s0 export
- // TODO: this is inaccurate, we should actually estimate it from the final transformn that is applied
- // TODO: this is a hack (returns 1,1,1) so the export downsampling pyramid is working
- final VoxelDimensions vx = new FinalVoxelDimensions( "micrometer", new double[] { 1, 1, 1 } );// dataGlobal.getSequenceDescription().getViewSetupsOrdered().iterator().next().getVoxelSize();
- final double[] resolutionS0 = OMEZarrAttibutes.getResolutionS0( vx, anisotropyFactor, Double.NaN );
+ final double[] resolutionS0 = OMEZarrAttibutes.getResolutionS0( cal, avgAnisotropy, Double.NaN );
- System.out.println( "Resolution of level 0: " + Util.printCoordinates( resolutionS0 ) + " " + "micrometer" ); //vx.unit() might not be OME-ZARR compatiblevx.unit() );
+ System.out.println( "Resolution of level 0: " + Util.printCoordinates( resolutionS0 ) + " " + calUnit );
// create metadata
final OmeNgffMultiScaleMetadata[] meta = OMEZarrAttibutes.createOMEZarrMetadata(
5, // int n
- "/", // String name, I also saw "/"
+ getContainerGroupPath(), // String name, I also saw "/"
resolutionS0, // double[] resolutionS0,
- "micrometer", //vx.unit() might not be OME-ZARR compatible // String unitXYZ, // e.g micrometer
+ calUnit, //vx.unit() might not be OME-ZARR compatible // String unitXYZ, // e.g micrometer
mrInfos[ 0 ].length, // int numResolutionLevels,
- levelToName,
+ (level) -> "/" + level, // OME-ZARR metadata will be created relative to the provided group
levelToMipmapTransform );
// save metadata
-
- //org.janelia.saalfeldlab.n5.universe.metadata.ome.ngff.v04.OmeNgffMetadata
- // for this to work you need to register an adapter in the N5Factory class
- // final GsonBuilder builder = new GsonBuilder().registerTypeAdapter( CoordinateTransformation.class, new CoordinateTransformationAdapter() );
- driverVolumeWriter.setAttribute( "/", "multiscales", meta );
+ driverVolumeWriter.setAttribute( getContainerGroupPath(), "multiscales", meta );
}
if ( bdv )
{
System.out.println( "Creating BDV compatible container at '" + outPathURI + "' ... " );
-
if ( storageType == StorageFormat.N5 )
- driverVolumeWriter.setAttribute( "/", "Bigstitcher-Spark/FusionFormat", "BDV/N5" );
+ driverVolumeWriter.setAttribute( getContainerGroupPath(), "Bigstitcher-Spark/FusionFormat", "BDV/N5" );
else if ( storageType == StorageFormat.ZARR )
- driverVolumeWriter.setAttribute( "/", "Bigstitcher-Spark/FusionFormat", "BDV/OME-ZARR" );
+ driverVolumeWriter.setAttribute( getContainerGroupPath(), "Bigstitcher-Spark/FusionFormat", "BDV/OME-ZARR" );
else
- driverVolumeWriter.setAttribute( "/", "Bigstitcher-Spark/FusionFormat", "BDV/HDF5" );
+ driverVolumeWriter.setAttribute( getContainerGroupPath(), "Bigstitcher-Spark/FusionFormat", "BDV/HDF5" );
- driverVolumeWriter.setAttribute( "/", "Bigstitcher-Spark/OutputXML", xmlOutURI );
+ driverVolumeWriter.setAttribute( getContainerGroupPath(), "Bigstitcher-Spark/OutputXML", xmlOutURI );
final long[] bb = boundingBox.dimensionsAsLongArray();
@@ -410,14 +435,12 @@ else if ( storageType == StorageFormat.ZARR )
tps.add( new TimePoint( t ) );
// extract the resolution of the s0 export
- // TODO: this is inaccurate, we should actually estimate it from the final transformn that is applied
- // TODO: this is a hack (returns 1,1,1) so the export downsampling pyramid is working
- final VoxelDimensions vx = new FinalVoxelDimensions( "micrometer", new double[] { 1, 1, 1 } );// dataGlobal.getSequenceDescription().getViewSetupsOrdered().iterator().next().getVoxelSize();
- final double[] resolutionS0 = OMEZarrAttibutes.getResolutionS0( vx, anisotropyFactor, Double.NaN );
+
+ final double[] resolutionS0 = OMEZarrAttibutes.getResolutionS0( cal, avgAnisotropy, Double.NaN );
System.out.println( "Resolution of level 0: " + Util.printCoordinates( resolutionS0 ) + " " + "m" ); //vx.unit() might not be OME-ZARR compatiblevx.unit() );
- final VoxelDimensions vxNew = new FinalVoxelDimensions( "micrometer", resolutionS0 );
+ final VoxelDimensions vxNew = new FinalVoxelDimensions( calUnit, resolutionS0 );
for ( int c = 0; c < numChannels; ++c )
{
@@ -443,7 +466,7 @@ else if ( storageType == StorageFormat.ZARR )
for ( int t = 0; t < numTimepoints; ++t )
{
final OMEZARREntry omeZarrEntry = new OMEZARREntry(
- mrInfos[ 0 ][ 0 ].dataset.substring(0, mrInfos[ 0 ][ 0 ].dataset.lastIndexOf( "/" ) ),
+ mrInfos[ t ][ c ].dataset.substring(0, mrInfos[ t ][ c ].dataset.lastIndexOf( "/" ) ),
new int[] { c, t } );
viewIdToPath.put( new ViewId( t, c ), omeZarrEntry );
@@ -476,7 +499,7 @@ else if ( storageType == StorageFormat.ZARR )
myMrInfo[ c + t*c ] = N5ApiTools.setupBdvDatasetsN5(
driverVolumeWriter, vd, dt, bb, compression, blockSize, downsamplings);
- driverVolumeWriter.setAttribute( "/", "Bigstitcher-Spark/FusionFormat", "BDV/N5" );
+ driverVolumeWriter.setAttribute( getContainerGroupPath(), "Bigstitcher-Spark/FusionFormat", "BDV/N5" );
}
else // HDF5
{
@@ -492,9 +515,9 @@ else if ( storageType == StorageFormat.N5 || storageType == StorageFormat.HDF5 )
mrInfos = new MultiResolutionLevelInfo[ numChannels * numTimepoints ][];
if ( storageType == StorageFormat.N5 )
- driverVolumeWriter.setAttribute( "/", "Bigstitcher-Spark/FusionFormat", "N5" );
+ driverVolumeWriter.setAttribute( getContainerGroupPath(), "Bigstitcher-Spark/FusionFormat", "N5" );
else
- driverVolumeWriter.setAttribute( "/", "Bigstitcher-Spark/FusionFormat", "HDF5" );
+ driverVolumeWriter.setAttribute( getContainerGroupPath(), "Bigstitcher-Spark/FusionFormat", "HDF5" );
for ( int c = 0; c < numChannels; ++c )
for ( int t = 0; t < numTimepoints; ++t )
@@ -516,23 +539,42 @@ else if ( storageType == StorageFormat.N5 || storageType == StorageFormat.HDF5 )
}
// TODO: set extra attributes to load the state
- driverVolumeWriter.setAttribute( "/", "Bigstitcher-Spark/MultiResolutionInfos", mrInfos );
+ driverVolumeWriter.setAttribute( getContainerGroupPath(), "Bigstitcher-Spark/MultiResolutionInfos", mrInfos );
driverVolumeWriter.close();
return null;
}
- public static void main(final String... args) throws SpimDataException
+ private void updateAnisotropyAndCalibration( SpimData2 dataGlobal, List viewIdsGlobal )
{
+ ViewRegistrations registrations = dataGlobal.getViewRegistrations();
+ // get all view descriptions
+ List vds = SpimData2.getAllViewDescriptionsSorted(dataGlobal, viewIdsGlobal);
+ // group by timepoint and channel
+ Set> groupingFactors = new HashSet<>(Arrays.asList(TimePoint.class, Channel.class));
+ List> fusionGroups = Group.splitBy( vds, groupingFactors );
+ Pair calAndUnit = fusionGroups.stream().findFirst()
+ .map(group -> TransformationTools.computeAverageCalibration(group, registrations))
+ .orElse(new ValuePair<>(new double[]{ 1, 1, 1 }, "micrometer"));
+ cal = calAndUnit.getA();
+ calUnit = calAndUnit.getB();
+
+ if (preserveAnisotropy) {
+ if (!Double.isNaN(this.anisotropyFactor)) {
+ avgAnisotropy = this.anisotropyFactor;
+ } else {
+ avgAnisotropy = TransformationTools.getAverageAnisotropyFactor(dataGlobal, viewIdsGlobal);
+ }
+ } else {
+ avgAnisotropy = Double.NaN;
+ }
+ }
- //final XmlIoSpimData io = new XmlIoSpimData();
- //final SpimData spimData = io.load( "/Users/preibischs/Documents/Microscopy/Stitching/Truman/standard/output/dataset.xml" );
- //BdvFunctions.show( spimData );
- //SimpleMultiThreading.threadHaltUnClean();
-
+ public static void main(final String... args) throws SpimDataException
+ {
System.out.println(Arrays.toString(args));
System.exit(new CommandLine(new CreateFusionContainer()).execute(args));
}
-}
\ No newline at end of file
+}
diff --git a/src/main/java/net/preibisch/bigstitcher/spark/Solver.java b/src/main/java/net/preibisch/bigstitcher/spark/Solver.java
index b27594b2..e9a0f236 100644
--- a/src/main/java/net/preibisch/bigstitcher/spark/Solver.java
+++ b/src/main/java/net/preibisch/bigstitcher/spark/Solver.java
@@ -289,7 +289,7 @@ public Void call() throws Exception
return null;
}
- final GlobalOptimizationParameters globalOptParameters = new GlobalOptimizationParameters(relativeThreshold, absoluteThreshold, globalOptType, false );
+ final GlobalOptimizationParameters globalOptParameters = new GlobalOptimizationParameters(relativeThreshold, absoluteThreshold, globalOptType, false, false );
final Collection< Pair< Group< ViewId >, Group< ViewId > > > removedInconsistentPairs = new ArrayList<>();
final HashMap models;
final Model> model = createModelInstance(transformationModel, regularizationModel, regularizationLambda);
@@ -299,7 +299,8 @@ public Void call() throws Exception
final ConvergenceStrategy cs = new ConvergenceStrategy( maxError, maxIterations, maxPlateauwidth );
models = (HashMap)GlobalOpt.computeTiles(
- (Model)(Object)model,
+ (Model)model,
+ globalOptParameters.preAlign,
pmc,
cs,
fixedViewIds,
@@ -308,7 +309,8 @@ public Void call() throws Exception
else if ( globalOptParameters.method == GlobalOptType.ONE_ROUND_ITERATIVE )
{
models = (HashMap)GlobalOptIterative.computeTiles(
- (Model)(Object)model,
+ (Model)model,
+ globalOptParameters.preAlign,
pmc,
new SimpleIterativeConvergenceStrategy( Double.MAX_VALUE, maxIterations, maxPlateauwidth, globalOptParameters.relativeThreshold, globalOptParameters.absoluteThreshold ),
new MaxErrorLinkRemoval(),
@@ -322,7 +324,8 @@ else if ( globalOptParameters.method == GlobalOptType.ONE_ROUND_ITERATIVE )
globalOptParameters.relativeThreshold = globalOptParameters.absoluteThreshold = Double.MAX_VALUE;
models = (HashMap)GlobalOptTwoRound.computeTiles(
- (Model & Affine3D)(Object)model,
+ (Model & Affine3D)model,
+ globalOptParameters.preAlign,
pmc,
new SimpleIterativeConvergenceStrategy( Double.MAX_VALUE, maxIterations, maxPlateauwidth, globalOptParameters.relativeThreshold, globalOptParameters.absoluteThreshold ), // if it's simple, both will be Double.MAX
new MaxErrorLinkRemoval(),
@@ -512,18 +515,18 @@ public static InterestPointMatchCreator setupPointMatchesFromInterestPoints(
pairResult.setLabelA( labelA );
pairResult.setLabelB( labelB );
- final List cpA = dataGlobal.getViewInterestPoints().getViewInterestPointLists( vA ).getInterestPointList( labelA ).getCorrespondingInterestPointsCopy();
+ final Collection cpA = dataGlobal.getViewInterestPoints().getViewInterestPointLists( vA ).getInterestPointList( labelA ).getCorrespondingInterestPointsCopy();
//List cpB = dataGlobal.getViewInterestPoints().getViewInterestPointLists( vB ).getInterestPointList( label ).getCorrespondingInterestPointsCopy();
- final List ipListA = dataGlobal.getViewInterestPoints().getViewInterestPointLists( vA ).getInterestPointList( labelA ).getInterestPointsCopy();
- final List ipListB = dataGlobal.getViewInterestPoints().getViewInterestPointLists( vB ).getInterestPointList( labelB ).getInterestPointsCopy();
+ final Map ipMapA = dataGlobal.getViewInterestPoints().getViewInterestPointLists( vA ).getInterestPointList( labelA ).getInterestPointsCopy();
+ final Map ipMapB = dataGlobal.getViewInterestPoints().getViewInterestPointLists( vB ).getInterestPointList( labelB ).getInterestPointsCopy();
for ( final CorrespondingInterestPoints p : cpA )
{
if ( p.getCorrespodingLabel().equals( labelB ) && p.getCorrespondingViewId().equals( vB ) )
{
- InterestPoint ipA = ipListA.get( p.getDetectionId() );
- InterestPoint ipB = ipListB.get( p.getCorrespondingDetectionId() );
+ InterestPoint ipA = ipMapA.get( p.getDetectionId() );
+ InterestPoint ipB = ipMapB.get( p.getCorrespondingDetectionId() );
// we need to copy the array because it might not be bijective
// (some points in one list might correspond with the same point in the other list)
diff --git a/src/main/java/net/preibisch/bigstitcher/spark/SparkAffineFusion.java b/src/main/java/net/preibisch/bigstitcher/spark/SparkAffineFusion.java
index dc62e119..7460f0df 100644
--- a/src/main/java/net/preibisch/bigstitcher/spark/SparkAffineFusion.java
+++ b/src/main/java/net/preibisch/bigstitcher/spark/SparkAffineFusion.java
@@ -33,7 +33,6 @@
import java.util.concurrent.Callable;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
-import java.util.stream.Collectors;
import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaRDD;
@@ -153,7 +152,6 @@ public enum DataTypeFusion
@Option(names = { "--prefetch" }, description = "prefetch all blocks required for fusion in each Spark job using unlimited threads, useful in cloud environments (default: false)")
protected boolean prefetch = false;
-
// TODO: add support for loading coefficients during fusion
@CommandLine.Option(names = { "--intensityN5Path" }, description = "N5/ZARR/HDF5 base path for loading coefficients (e.g. s3://myBucket/coefficients.n5)")
private String intensityN5PathURIString = null;
@@ -167,6 +165,9 @@ public enum DataTypeFusion
@CommandLine.Option(names = { "--intensityN5Dataset" }, description = "dataset name for each coefficient dataset (default: \"intensity\"). The coefficients for view(s,t) are stored in dataset \"{-n5Group}/setup{s}/timepoint{t}/{n5Dataset}\"")
private String intensityN5Dataset = "intensity";
+ @Option(names = { "--group" }, description = "Container group path")
+ private String groupPath = "";
+
URI outPathURI = null;
/**
* Prefetching now works with a Executors.newCachedThreadPool();
@@ -175,6 +176,18 @@ public enum DataTypeFusion
URI intensityN5PathURI = null;
+ /**
+ * @return container group path always terminated with a '/'
+ */
+ private String getContainerGroupPath()
+ {
+ if (!groupPath.endsWith("/")) {
+ return groupPath + "/";
+ } else {
+ return groupPath;
+ }
+ }
+
@Override
public Void call() throws Exception
{
@@ -238,7 +251,7 @@ else if ( outputPathURIString.toLowerCase().endsWith( ".h5" ) || outPathURI.toSt
final N5Writer driverVolumeWriter = N5Util.createN5Writer( outPathURI, storageType );
- final String fusionFormat = driverVolumeWriter.getAttribute( "/", "Bigstitcher-Spark/FusionFormat", String.class );
+ final String fusionFormat = driverVolumeWriter.getAttribute( getContainerGroupPath(), "Bigstitcher-Spark/FusionFormat", String.class );
if ( fusionFormat == null )
{
@@ -248,14 +261,14 @@ else if ( outputPathURIString.toLowerCase().endsWith( ".h5" ) || outPathURI.toSt
}
final boolean bdv = fusionFormat.toLowerCase().contains( "BDV" );
- final URI xmlURI = driverVolumeWriter.getAttribute( "/", "Bigstitcher-Spark/InputXML", URI.class );
+ final URI xmlURI = driverVolumeWriter.getAttribute( getContainerGroupPath(), "Bigstitcher-Spark/InputXML", URI.class );
final int numTimepoints, numChannels;
if ( timepointIndex == null )
{
- numTimepoints = driverVolumeWriter.getAttribute( "/", "Bigstitcher-Spark/NumTimepoints", int.class );
- numChannels = driverVolumeWriter.getAttribute( "/", "Bigstitcher-Spark/NumChannels", int.class );
+ numTimepoints = driverVolumeWriter.getAttribute( getContainerGroupPath(), "Bigstitcher-Spark/NumTimepoints", int.class );
+ numChannels = driverVolumeWriter.getAttribute( getContainerGroupPath(), "Bigstitcher-Spark/NumChannels", int.class );
}
else
{
@@ -263,16 +276,16 @@ else if ( outputPathURIString.toLowerCase().endsWith( ".h5" ) || outPathURI.toSt
numTimepoints = numChannels = 1;
}
- final long[] bbMin = driverVolumeWriter.getAttribute( "/", "Bigstitcher-Spark/Boundingbox_min", long[].class );
- final long[] bbMax = driverVolumeWriter.getAttribute( "/", "Bigstitcher-Spark/Boundingbox_max", long[].class );
-
+ final long[] bbMin = driverVolumeWriter.getAttribute( getContainerGroupPath(), "Bigstitcher-Spark/Boundingbox_min", long[].class );
+ final long[] bbMax = driverVolumeWriter.getAttribute( getContainerGroupPath(), "Bigstitcher-Spark/Boundingbox_max", long[].class );
+
final BoundingBox boundingBox = new BoundingBox( new FinalInterval( bbMin, bbMax ) );
- final boolean preserveAnisotropy = driverVolumeWriter.getAttribute( "/", "Bigstitcher-Spark/PreserveAnisotropy", boolean.class );
- final double anisotropyFactor = preserveAnisotropy ? driverVolumeWriter.getAttribute( "/", "Bigstitcher-Spark/AnisotropyFactor", double.class ) : Double.NaN;
- final int[] blockSize = driverVolumeWriter.getAttribute( "/", "Bigstitcher-Spark/BlockSize", int[].class );
+ final boolean preserveAnisotropy = driverVolumeWriter.getAttribute( getContainerGroupPath(), "Bigstitcher-Spark/PreserveAnisotropy", boolean.class );
+ final double anisotropyFactor = preserveAnisotropy ? driverVolumeWriter.getAttribute( getContainerGroupPath(), "Bigstitcher-Spark/AnisotropyFactor", double.class ) : Double.NaN;
+ final int[] blockSize = driverVolumeWriter.getAttribute( getContainerGroupPath(), "Bigstitcher-Spark/BlockSize", int[].class );
- final DataType dataType = driverVolumeWriter.getAttribute( "/", "Bigstitcher-Spark/DataType", DataType.class );
+ final DataType dataType = driverVolumeWriter.getAttribute( getContainerGroupPath(), "Bigstitcher-Spark/DataType", DataType.class );
System.out.println( "FusionFormat: " + fusionFormat );
System.out.println( "FusionType: " + fusionType );
@@ -289,8 +302,8 @@ else if ( outputPathURIString.toLowerCase().endsWith( ".h5" ) || outPathURI.toSt
double minI = Double.NaN, maxI = Double.NaN;
try
{
- minI = driverVolumeWriter.getAttribute( "/", "Bigstitcher-Spark/MinIntensity", double.class );
- maxI = driverVolumeWriter.getAttribute( "/", "Bigstitcher-Spark/MaxIntensity", double.class );
+ minI = driverVolumeWriter.getAttribute( getContainerGroupPath(), "Bigstitcher-Spark/MinIntensity", double.class );
+ maxI = driverVolumeWriter.getAttribute( getContainerGroupPath(), "Bigstitcher-Spark/MaxIntensity", double.class );
}
catch ( Exception e )
{
@@ -304,7 +317,7 @@ else if ( outputPathURIString.toLowerCase().endsWith( ".h5" ) || outPathURI.toSt
System.out.println( "maxIntensity: " + maxI );
final MultiResolutionLevelInfo[][] mrInfos =
- driverVolumeWriter.getAttribute( "/", "Bigstitcher-Spark/MultiResolutionInfos", MultiResolutionLevelInfo[][].class );
+ driverVolumeWriter.getAttribute( getContainerGroupPath(), "Bigstitcher-Spark/MultiResolutionInfos", MultiResolutionLevelInfo[][].class );
System.out.println( "Loaded " + mrInfos.length + " metadata object for fused " + storageType + " volume(s)" );
@@ -462,8 +475,6 @@ else if ( intensityN5PathURIString.toLowerCase().endsWith( ".h5" ) || intensityN
System.out.println( "numJobs = " + grid.size() );
- //driverVolumeWriter.setAttribute( n5Dataset, "offset", minBB );
-
final RetryTrackerSpark retryTracker =
RetryTrackerSpark.forGridBlocks("s0 block processing", grid.size());
@@ -540,11 +551,7 @@ else if ( dataType == DataType.UINT16 )
return gridBlock;
// load intensity correction coefficients for all overlapping views
-
-
final Map< ViewId, Coefficients > coefficients;
-
-
if ( intensityN5PathURI != null )
{
coefficients = new HashMap<>();
@@ -598,7 +605,6 @@ else if ( dataType == DataType.UINT16 )
System.out.println( "Fusing block: offset=" + Util.printCoordinates( gridBlock[0] ) + ", dimension=" + Util.printCoordinates( gridBlock[1] ) );
// returns a zero-min interval
- //blockSupplier = BlkAffineFusion.init(
blockSupplier = BlkAffineFusion.initWithIntensityCoefficients(
conv,
dataLocal.getSequenceDescription().getImgLoader(),
@@ -760,18 +766,18 @@ else if ( dataType == DataType.UINT16 )
rddDSResult.cache();
rddDSResult.count();
-
+
// extract all blocks that failed
final Set failedBlocksSet =
retryTrackerDS.processWithSpark( rddDSResult, grid );
-
+
// Use RetryTracker to handle retry counting and removal
if (!retryTrackerDS.processFailures(failedBlocksSet))
{
System.out.println( "Stopping." );
System.exit( 1 );
}
-
+
// Update grid for next iteration with remaining failed blocks
grid.clear();
grid.addAll(failedBlocksSet);
@@ -785,13 +791,6 @@ else if ( dataType == DataType.UINT16 )
// close main writer (is shared over Spark-threads if it's HDF5, thus just closing it here)
driverVolumeWriter.close();
- /*
- if ( multiRes )
- System.out.println( "Saved, e.g. view with './n5-view -i " + n5PathURI + " -d " + n5Dataset.substring( 0, n5Dataset.length() - 3) + "'" );
- else
- System.out.println( "Saved, e.g. view with './n5-view -i " + n5PathURI + " -d " + n5Dataset + "'" );
- */
-
System.out.println( "done, took: " + (System.currentTimeMillis() - totalTime ) + " ms." );
sc.close();
diff --git a/src/main/java/net/preibisch/bigstitcher/spark/SparkGeometricDescriptorMatching.java b/src/main/java/net/preibisch/bigstitcher/spark/SparkGeometricDescriptorMatching.java
index ad4103cf..ea859837 100644
--- a/src/main/java/net/preibisch/bigstitcher/spark/SparkGeometricDescriptorMatching.java
+++ b/src/main/java/net/preibisch/bigstitcher/spark/SparkGeometricDescriptorMatching.java
@@ -24,6 +24,7 @@
import java.net.URI;
import java.util.ArrayList;
import java.util.Arrays;
+import java.util.Collection;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
@@ -283,7 +284,7 @@ else if ( ransacIterations == null )
} );
// load & transform all interest points
- final Map< ViewId, HashMap< String, List< InterestPoint > > > interestpoints =
+ final Map< ViewId, HashMap< String, Collection< InterestPoint > > > interestpoints =
TransformationTools.getAllTransformedInterestPoints(
views,
data.getViewRegistrations().getViewRegistrations(),
@@ -299,8 +300,8 @@ else if ( ransacIterations == null )
interestpoints, groups, data.getViewRegistrations().getViewRegistrations(), data.getSequenceDescription().getViewDescriptions() );
System.out.println( Group.pvid( task.vA ) + " (" + task.labelA + ") <=> " + Group.pvid( task.vB ) + " (" + task.labelB + "): Remaining interest points for alignment: " );
- for ( final Entry< ViewId, HashMap< String, List< InterestPoint > > > element: interestpoints.entrySet() )
- for ( final Entry< String, List< InterestPoint > > subElement : element.getValue().entrySet() )
+ for ( final Entry< ViewId, HashMap< String, Collection< InterestPoint > > > element: interestpoints.entrySet() )
+ for ( final Entry< String, Collection< InterestPoint > > subElement : element.getValue().entrySet() )
System.out.println( Group.pvid( element.getKey() ) + ", '" + subElement.getKey() + "' : " + subElement.getValue().size() );
}
@@ -390,7 +391,7 @@ else if ( ransacIterations == null )
} );
// load & transform all interest points
- final Map< ViewId, HashMap< String, List< InterestPoint > > > interestpoints =
+ final Map< ViewId, HashMap< String, Collection< InterestPoint >> > interestpoints =
TransformationTools.getAllTransformedInterestPoints(
views,
data.getViewRegistrations().getViewRegistrations(),
@@ -410,12 +411,12 @@ else if ( ransacIterations == null )
TransformationTools.filterForOverlappingInterestPoints( interestpoints, groups, data.getViewRegistrations().getViewRegistrations(), data.getSequenceDescription().getViewDescriptions() );
System.out.println( task.vA + " (" + task.labelA + ") <=> " + task.vB + " (" + task.labelB + "): Remaining interest points for alignment: " );
- for ( final Entry< ViewId, HashMap< String, List< InterestPoint > > > element: interestpoints.entrySet() )
- for ( final Entry< String, List< InterestPoint > > subElement : element.getValue().entrySet() )
+ for ( final Entry< ViewId, HashMap< String, Collection< InterestPoint > > > element: interestpoints.entrySet() )
+ for ( final Entry< String, Collection< InterestPoint > > subElement : element.getValue().entrySet() )
System.out.println( Group.pvid( element.getKey() ) + ", '" + subElement.getKey() + "' : " + subElement.getValue().size() );
}
- final Map< Group< ViewId >, HashMap< String, List< GroupedInterestPoint< ViewId > > > > groupedInterestpoints = new HashMap<>();
+ final Map< Group< ViewId >, HashMap< String, Collection< GroupedInterestPoint< ViewId > > > > groupedInterestpoints = new HashMap<>();
final InterestPointGroupingMinDistance< ViewId > ipGrouping
= new InterestPointGroupingMinDistance<>( interestPointMergeDistance, interestpoints );
diff --git a/src/main/java/net/preibisch/bigstitcher/spark/SparkPairwiseStitching.java b/src/main/java/net/preibisch/bigstitcher/spark/SparkPairwiseStitching.java
index 0a0b8162..68dcbed6 100644
--- a/src/main/java/net/preibisch/bigstitcher/spark/SparkPairwiseStitching.java
+++ b/src/main/java/net/preibisch/bigstitcher/spark/SparkPairwiseStitching.java
@@ -30,6 +30,8 @@
import java.util.concurrent.Executors;
import java.util.stream.Collectors;
+import mpicbg.spim.data.registration.ViewTransform;
+import mpicbg.spim.data.registration.ViewTransformAffine;
import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
@@ -331,13 +333,19 @@ public Void call() throws Exception
System.out.println( new Date( System.currentTimeMillis() ) + ": Remaining pairs: " + results.size() );
-
// update StitchingResults with Results
for ( final PairwiseStitchingResult< ViewId > psr : results )
{
if (psr == null)
continue;
+ // update the registrations transformations
+ psr.pair().getA().getViews().forEach( viewId -> {
+ dataGlobal.getViewRegistrations().getViewRegistration(viewId)
+ .preconcatenateTransform(new ViewTransformAffine(
+ "Stitching Transform",
+ new AffineTransform3D().concatenate(psr.getInverseTransform())));
+ });
dataGlobal.getStitchingResults().setPairwiseResultForPair(psr.pair(), psr );
}
diff --git a/src/main/java/net/preibisch/bigstitcher/spark/SparkResaveN5.java b/src/main/java/net/preibisch/bigstitcher/spark/SparkResaveN5.java
index 45c64233..32f710ac 100644
--- a/src/main/java/net/preibisch/bigstitcher/spark/SparkResaveN5.java
+++ b/src/main/java/net/preibisch/bigstitcher/spark/SparkResaveN5.java
@@ -32,20 +32,11 @@
import java.util.concurrent.Callable;
import java.util.stream.Collectors;
-import org.apache.spark.SparkConf;
-import org.apache.spark.api.java.JavaRDD;
-import org.apache.spark.api.java.JavaSparkContext;
-import org.bigdataviewer.n5.N5CloudImageLoader;
-import org.janelia.saalfeldlab.n5.Compression;
-import org.janelia.saalfeldlab.n5.DataType;
-import org.janelia.saalfeldlab.n5.N5Writer;
-import org.janelia.saalfeldlab.n5.universe.StorageFormat;
-
import bdv.img.n5.N5ImageLoader;
import mpicbg.spim.data.sequence.ViewId;
+import mpicbg.spim.data.sequence.VoxelDimensions;
import net.imglib2.util.Util;
import net.imglib2.util.ValuePair;
-import net.preibisch.bigstitcher.spark.CreateFusionContainer.Compressions;
import net.preibisch.bigstitcher.spark.abstractcmdline.AbstractBasic;
import net.preibisch.bigstitcher.spark.util.Import;
import net.preibisch.bigstitcher.spark.util.N5Util;
@@ -58,6 +49,14 @@
import net.preibisch.mvrecon.fiji.spimdata.imgloaders.AllenOMEZarrLoader.OMEZARREntry;
import net.preibisch.mvrecon.process.n5api.N5ApiTools;
import net.preibisch.mvrecon.process.n5api.N5ApiTools.MultiResolutionLevelInfo;
+import org.apache.spark.SparkConf;
+import org.apache.spark.api.java.JavaRDD;
+import org.apache.spark.api.java.JavaSparkContext;
+import org.bigdataviewer.n5.N5CloudImageLoader;
+import org.janelia.saalfeldlab.n5.Compression;
+import org.janelia.saalfeldlab.n5.DataType;
+import org.janelia.saalfeldlab.n5.N5Writer;
+import org.janelia.saalfeldlab.n5.universe.StorageFormat;
import picocli.CommandLine;
import picocli.CommandLine.Option;
import util.URITools;
@@ -96,7 +95,7 @@ public class SparkResaveN5 extends AbstractBasic implements Callable, Seri
@Option(names = {"-c", "--compression"}, defaultValue = "Zstandard", showDefaultValue = CommandLine.Help.Visibility.ALWAYS,
description = "Dataset compression")
- private Compressions compression = null;
+ private Compressions compressionType = null;
@Option(names = {"-cl", "--compressionLevel" }, description = "compression level, if supported by the codec (default: gzip 1, Zstandard 3, xz 6)")
private Integer compressionLevel = null;
@@ -109,33 +108,8 @@ public Void call() throws Exception
{
this.setRegion();
- /*
- Exception in thread "main" java.lang.IllegalAccessError: tried to access method com.google.common.collect.ImmutableList$Builder.(I)V from class com.google.common.collect.Streams
- at com.google.common.collect.Streams.concat(Streams.java:204)
- at org.janelia.saalfeldlab.n5.universe.metadata.ome.ngff.v04.coordinateTransformations.TransformationUtils.tranformsToAffine(TransformationUtils.java:27)
- at org.janelia.saalfeldlab.n5.universe.metadata.ome.ngff.v04.OmeNgffMultiScaleMetadata.buildMetadata(OmeNgffMultiScaleMetadata.java:159)
- at org.janelia.saalfeldlab.n5.universe.metadata.ome.ngff.v04.OmeNgffMultiScaleMetadata.(OmeNgffMultiScaleMetadata.java:101)
- at org.janelia.saalfeldlab.n5.universe.metadata.ome.ngff.v04.OmeNgffMultiScaleMetadata.(OmeNgffMultiScaleMetadata.java:91)
- at net.preibisch.mvrecon.fiji.spimdata.imgloaders.OMEZarrAttibutes.createOMEZarrMetadata(OMEZarrAttibutes.java:128)
- at net.preibisch.mvrecon.process.n5api.N5ApiTools.setupBdvDatasetsOMEZARR(N5ApiTools.java:422)
- at net.preibisch.bigstitcher.spark.SparkResaveN5.lambda$call$1(SparkResaveN5.java:219)
- */
-
- /*
- local:
- com.google.common.collect.ImmutableList: file:/home/preibischs@hhmi.org/.m2/repository/com/google/guava/guava/33.3.1-jre/guava-33.3.1-jre.jar
- com.google.common.collect.Streams: file:/home/preibischs@hhmi.org/.m2/repository/com/google/guava/guava/33.3.1-jre/guava-33.3.1-jre.jar
- */
-
- /*
- cluster:
- com.google.common.collect.ImmutableList: file:/misc/local/spark-3.4.1/jars/guava-14.0.1.jar
- com.google.common.collect.Streams: file:/groups/scicompsoft/home/preibischs/Keller/BigStitcher-Spark-0.1.0-SNAPSHOT.jar
- */
-
System.out.println( "com.google.common.collect.ImmutableList: " + com.google.common.collect.ImmutableList.class.getProtectionDomain().getCodeSource().getLocation() );
System.out.println( "com.google.common.collect.Streams: " + com.google.common.collect.Streams.class.getProtectionDomain().getCodeSource().getLocation() );
- //System.exit( 0 );
final SpimData2 dataGlobal = this.loadSpimData2();
@@ -164,7 +138,7 @@ public Void call() throws Exception
}
final URI n5PathURI = URITools.toURI( this.n5PathURIString == null ? URITools.appendName( URITools.getParentURI( xmlOutURI ), (useN5 ? "dataset.n5" : "dataset.ome.zarr") ) : n5PathURIString );
- final Compression compression = N5Util.getCompression( this.compression, this.compressionLevel );
+ final Compression compression = N5Util.getCompression( this.compressionType, this.compressionLevel );
final int[] blockSize = Import.csvStringToIntArray(blockSizeString);
final int[] blockScale = Import.csvStringToIntArray(blockScaleString);
@@ -174,10 +148,9 @@ public Void call() throws Exception
blockSize[1] * blockScale[ 1 ],
blockSize[2] * blockScale[ 2 ] };
- //final N5Writer n5 = new N5FSWriter(n5Path);
final N5Writer n5Writer = URITools.instantiateN5Writer( useN5 ? StorageFormat.N5 : StorageFormat.ZARR, n5PathURI );
- System.out.println( "Compression: " + this.compression );
+ System.out.println( "Compression: " + this.compressionType );
System.out.println( "Compression level: " + ( compressionLevel == null ? "default" : compressionLevel ) );
System.out.println( "N5 block size=" + Util.printCoordinates( blockSize ) );
System.out.println( "Compute block size=" + Util.printCoordinates( computeBlockSize ) );
@@ -242,13 +215,14 @@ public Void call() throws Exception
else
{
System.out.println( Arrays.toString( blockSize ) );
-
+ VoxelDimensions vx = dataGlobal.getSequenceDescription().getViewDescription( viewId ).getViewSetup().getVoxelSize();
mrInfo = N5ApiTools.setupBdvDatasetsOMEZARR(
n5Writer,
viewId,
dataTypes.get( viewId.getViewSetupId() ),
dimensions.get( viewId.getViewSetupId() ),
- //dataGlobal.getSequenceDescription().getViewDescription( viewId ).getViewSetup().getVoxelSize().dimensionsAsDoubleArray(), // TODO: this is a hack for now
+ vx.dimensionsAsDoubleArray(),
+ vx.unit(),
compression,
blockSize,
downsamplings);
@@ -267,8 +241,6 @@ public Void call() throws Exception
if ( localSparkBindAddress )
conf.set("spark.driver.bindAddress", "127.0.0.1");
- //System.exit( 0 );
-
final JavaSparkContext sc = new JavaSparkContext(conf);
sc.setLogLevel("ERROR");
@@ -361,8 +333,7 @@ public Void call() throws Exception
}
final JavaRDD rddsN = sc.parallelize(allBlocks, Math.min( Spark.maxPartitions, allBlocks.size() ) );
-
-
+
final JavaRDD rdds0Result = rddsN.map( gridBlock ->
{
final N5Writer n5Lcl = URITools.instantiateN5Writer( useN5 ? StorageFormat.N5 : StorageFormat.ZARR, n5PathURI );
diff --git a/src/main/java/net/preibisch/bigstitcher/spark/SplitDatasets.java b/src/main/java/net/preibisch/bigstitcher/spark/SplitDatasets.java
index 84893355..f2fff98b 100644
--- a/src/main/java/net/preibisch/bigstitcher/spark/SplitDatasets.java
+++ b/src/main/java/net/preibisch/bigstitcher/spark/SplitDatasets.java
@@ -41,7 +41,7 @@ public class SplitDatasets extends AbstractBasic
private boolean disableOptimization = false;
@Option(names = { "-fip", "--fakeInterestPoints" }, description = "add fake interest points to overlapping regions of split images/views")
- private boolean fakeInterestPoints = false;
+ private Split_Views.InterestPointAdding fakeInterestPoints = Split_Views.InterestPointAdding.NONE;
@Option(names = { "--fipDensity" }, description = "density of fake interest points; number of points per 100x100x100 px volume (default: 100.0)")
private double fipDensity = 100.0;
diff --git a/src/main/java/net/preibisch/bigstitcher/spark/util/N5Util.java b/src/main/java/net/preibisch/bigstitcher/spark/util/N5Util.java
index 7d5b52c4..a93a81f1 100644
--- a/src/main/java/net/preibisch/bigstitcher/spark/util/N5Util.java
+++ b/src/main/java/net/preibisch/bigstitcher/spark/util/N5Util.java
@@ -36,7 +36,7 @@
import org.janelia.saalfeldlab.n5.universe.StorageFormat;
import org.janelia.scicomp.n5.zstandard.ZstandardCompression;
-import net.preibisch.bigstitcher.spark.CreateFusionContainer.Compressions;
+import net.preibisch.bigstitcher.spark.Compressions;
import net.preibisch.legacy.io.IOFunctions;
import util.URITools;
diff --git a/src/main/java/net/preibisch/mvrecon/dataset/N5MultichannelLoader.java b/src/main/java/net/preibisch/mvrecon/dataset/N5MultichannelLoader.java
new file mode 100644
index 00000000..2b07631a
--- /dev/null
+++ b/src/main/java/net/preibisch/mvrecon/dataset/N5MultichannelLoader.java
@@ -0,0 +1,62 @@
+/*-
+ * #%L
+ * Software for the reconstruction of multi-view microscopic acquisitions
+ * like Selective Plane Illumination Microscopy (SPIM) Data.
+ * %%
+ * Copyright (C) 2012 - 2025 Multiview Reconstruction developers.
+ * %%
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as
+ * published by the Free Software Foundation, either version 2 of the
+ * License, or (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public
+ * License along with this program. If not, see
+ * .
+ * #L%
+ */
+package net.preibisch.mvrecon.dataset;
+
+import java.net.URI;
+import java.util.Map;
+
+import bdv.img.n5.N5ImageLoader;
+import bdv.img.n5.N5Properties;
+import mpicbg.spim.data.generic.sequence.AbstractSequenceDescription;
+import mpicbg.spim.data.sequence.ViewId;
+import org.janelia.saalfeldlab.n5.universe.StorageFormat;
+import util.URITools;
+
+public class N5MultichannelLoader extends N5ImageLoader
+{
+ private final AbstractSequenceDescription< ?, ?, ? > sequenceDescription;
+
+ private final Map< ViewId, String > viewIdToPath;
+
+ public N5MultichannelLoader(
+ final URI n5URI,
+ final StorageFormat storageFormat,
+ final AbstractSequenceDescription< ?, ?, ? > sequenceDescription,
+ final Map< ViewId, String > viewIdToPath )
+ {
+ super( URITools.instantiateN5Reader( storageFormat, n5URI ), n5URI, sequenceDescription );
+ this.sequenceDescription = sequenceDescription;
+
+ this.viewIdToPath = viewIdToPath;
+ }
+
+ @Override
+ protected N5Properties createN5PropertiesInstance()
+ {
+ return new N5MultichannelProperties( sequenceDescription, viewIdToPath );
+ }
+
+ public Map getViewIdToPath() {
+ return viewIdToPath;
+ }
+}
diff --git a/src/main/java/net/preibisch/mvrecon/dataset/N5MultichannelProperties.java b/src/main/java/net/preibisch/mvrecon/dataset/N5MultichannelProperties.java
new file mode 100644
index 00000000..2556741c
--- /dev/null
+++ b/src/main/java/net/preibisch/mvrecon/dataset/N5MultichannelProperties.java
@@ -0,0 +1,153 @@
+/*-
+ * #%L
+ * Software for the reconstruction of multi-view microscopic acquisitions
+ * like Selective Plane Illumination Microscopy (SPIM) Data.
+ * %%
+ * Copyright (C) 2012 - 2025 Multiview Reconstruction developers.
+ * %%
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as
+ * published by the Free Software Foundation, either version 2 of the
+ * License, or (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public
+ * License along with this program. If not, see
+ * .
+ * #L%
+ */
+package net.preibisch.mvrecon.dataset;
+
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.List;
+import java.util.Map;
+
+import bdv.img.n5.N5Properties;
+import mpicbg.spim.data.generic.sequence.AbstractSequenceDescription;
+import mpicbg.spim.data.sequence.TimePoint;
+import mpicbg.spim.data.sequence.ViewId;
+import org.janelia.saalfeldlab.n5.DataType;
+import org.janelia.saalfeldlab.n5.N5Reader;
+
+public class N5MultichannelProperties implements N5Properties
+{
+ private final AbstractSequenceDescription< ?, ?, ? > sequenceDescription;
+
+ private final Map< ViewId, String > viewIdToPath;
+ private int numMipmapLevels;
+
+ public N5MultichannelProperties(
+ final AbstractSequenceDescription< ?, ?, ? > sequenceDescription,
+ final Map< ViewId, String > viewIdToPath )
+ {
+ this.sequenceDescription = sequenceDescription;
+ this.viewIdToPath = viewIdToPath;
+ this.numMipmapLevels = -1;
+ }
+
+ private String getPath( final int setupId, final int timepointId )
+ {
+ return viewIdToPath.get( new ViewId( timepointId, setupId ) );
+ }
+
+ @Override
+ public String getDatasetPath( final int setupId, final int timepointId, final int level )
+ {
+ return String.format( getPath( setupId, timepointId )+ "/s%d", level );
+ }
+
+ @Override
+ public DataType getDataType( final N5Reader n5, final int setupId )
+ {
+ return N5MultichannelProperties.getDataType( this, n5, setupId );
+ }
+
+ @Override
+ public double[][] getMipmapResolutions( final N5Reader n5, final int setupId )
+ {
+ return getMipMapResolutions( this, n5, setupId );
+ }
+
+ @Override
+ public long[] getDimensions( final N5Reader n5, final int setupId, final int timepointId, final int level )
+ {
+ final String path = getDatasetPath( setupId, timepointId, level );
+ final long[] dimensions = n5.getDatasetAttributes( path ).getDimensions();
+ return Arrays.copyOf( dimensions, 3 );
+ }
+
+ public T getRootAttribute( N5Reader n5, String attributeKey, Class attributeType )
+ {
+ return n5.getAttribute("", attributeKey, attributeType);
+ }
+
+ public T getAttribute( N5Reader n5, int setupId, int timepointId, int level, String attributeKey, Class attributeType )
+ {
+ String path;
+ if (level >= 0) {
+ path = getDatasetPath( setupId, timepointId, level );
+ } else {
+ path = getPath( setupId, timepointId );
+ }
+ return n5.getAttribute(path, attributeKey, attributeType);
+ }
+
+ private int getNumMipmapLevels( final N5Reader n5, final int setupId, final int timepointId )
+ {
+ if ( numMipmapLevels >=0 )
+ return numMipmapLevels;
+
+ final String path = getPath( setupId, timepointId );
+ String[] subgroups = n5.list(path);
+ numMipmapLevels = subgroups != null ? subgroups.length : 0;
+ return numMipmapLevels;
+ }
+
+ //
+ // static methods
+ //
+
+ private static int getFirstAvailableTimepointId( final AbstractSequenceDescription< ?, ?, ? > seq, final int setupId )
+ {
+ for ( final TimePoint tp : seq.getTimePoints().getTimePointsOrdered() )
+ {
+ if ( seq.getMissingViews() == null || seq.getMissingViews().getMissingViews() == null || !seq.getMissingViews().getMissingViews().contains( new ViewId( tp.getId(), setupId ) ) )
+ return tp.getId();
+ }
+
+ throw new RuntimeException( "All timepoints for setupId " + setupId + " are declared missing. Stopping." );
+ }
+
+ private static DataType getDataType(final N5MultichannelProperties n5properties, final N5Reader n5, final int setupId )
+ {
+ final int timePointId = getFirstAvailableTimepointId( n5properties.sequenceDescription, setupId );
+ return n5.getDatasetAttributes( n5properties.getDatasetPath( setupId, timePointId, 0 ) ).getDataType();
+ }
+
+ private static double[][] getMipMapResolutions(final N5MultichannelProperties n5properties, final N5Reader n5, final int setupId )
+ {
+ final int timePointId = getFirstAvailableTimepointId( n5properties.sequenceDescription, setupId );
+
+ // read scales and pixelResolution attributes from the base container and build the mipmap resolutions from that
+ List scales = new ArrayList<>();
+ int numLevels = n5properties.getNumMipmapLevels(n5, setupId, timePointId);
+ for (int level = 0; level < numLevels; level++ ) {
+ double[] pixelResolution = n5properties.getAttribute(n5, setupId, timePointId, level, "pixelResolution", double[].class);
+ double[] downSamplingFactors = n5properties.getAttribute(n5, setupId, timePointId, level, "downsamplingFactors", double[].class);
+ if (pixelResolution != null) {
+ if (downSamplingFactors != null) {
+ for (int d = 0; d < pixelResolution.length && d < downSamplingFactors.length; d++) {
+ pixelResolution[d] *= downSamplingFactors[d];
+ }
+ }
+ scales.add(pixelResolution);
+ }
+ }
+ return !scales.isEmpty() ? scales.toArray( new double[0][]) : new double[][] { { 1, 1, 1 } };
+ }
+}
diff --git a/src/main/java/net/preibisch/mvrecon/dataset/SpimDatasetBuilder.java b/src/main/java/net/preibisch/mvrecon/dataset/SpimDatasetBuilder.java
new file mode 100644
index 00000000..5aa27006
--- /dev/null
+++ b/src/main/java/net/preibisch/mvrecon/dataset/SpimDatasetBuilder.java
@@ -0,0 +1,615 @@
+package net.preibisch.mvrecon.dataset;
+
+import java.io.File;
+import java.io.IOException;
+import java.io.UncheckedIOException;
+import java.net.URI;
+import java.nio.file.FileSystems;
+import java.nio.file.Files;
+import java.nio.file.Path;
+import java.nio.file.PathMatcher;
+import java.nio.file.Paths;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Objects;
+import java.util.Set;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+import java.util.stream.Collectors;
+
+import loci.formats.IFormatReader;
+import loci.formats.ImageReader;
+import loci.formats.meta.MetadataRetrieve;
+import mpicbg.spim.data.generic.sequence.BasicViewDescription;
+import mpicbg.spim.data.registration.ViewRegistrations;
+import mpicbg.spim.data.sequence.Angle;
+import mpicbg.spim.data.sequence.Channel;
+import mpicbg.spim.data.sequence.FinalVoxelDimensions;
+import mpicbg.spim.data.sequence.Illumination;
+import mpicbg.spim.data.sequence.SequenceDescription;
+import mpicbg.spim.data.sequence.Tile;
+import mpicbg.spim.data.sequence.TimePoint;
+import mpicbg.spim.data.sequence.TimePoints;
+import mpicbg.spim.data.sequence.ViewDescription;
+import mpicbg.spim.data.sequence.ViewId;
+import mpicbg.spim.data.sequence.ViewSetup;
+import mpicbg.spim.data.sequence.VoxelDimensions;
+import net.imglib2.Dimensions;
+import net.imglib2.FinalDimensions;
+import net.preibisch.mvrecon.fiji.datasetmanager.DatasetCreationUtils;
+import net.preibisch.mvrecon.fiji.spimdata.SpimData2;
+import net.preibisch.mvrecon.fiji.spimdata.boundingbox.BoundingBoxes;
+import net.preibisch.mvrecon.fiji.spimdata.imgloaders.FileMapImgLoaderLOCI;
+import net.preibisch.mvrecon.fiji.spimdata.imgloaders.LegacyStackImgLoaderLOCI;
+import net.preibisch.mvrecon.fiji.spimdata.imgloaders.filemap2.FileMapEntry;
+import net.preibisch.mvrecon.fiji.spimdata.intensityadjust.IntensityAdjustments;
+import net.preibisch.mvrecon.fiji.spimdata.interestpoints.ViewInterestPoints;
+import net.preibisch.mvrecon.fiji.spimdata.pointspreadfunctions.PointSpreadFunctions;
+import net.preibisch.mvrecon.fiji.spimdata.stitchingresults.StitchingResults;
+import ome.units.UNITS;
+import ome.units.quantity.Length;
+import org.apache.commons.lang3.builder.ToStringBuilder;
+import org.janelia.saalfeldlab.n5.N5FSReader;
+import org.janelia.saalfeldlab.n5.N5Reader;
+import org.janelia.saalfeldlab.n5.universe.StorageFormat;
+
+public class SpimDatasetBuilder {
+
+ static class ViewIndex {
+ final int tp, ch, il, ang;
+
+ ViewIndex(int tp, int ch, int il, int ang) {
+ this.tp = tp;
+ this.ch = ch;
+ this.il = il;
+ this.ang = ang;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (o == null || getClass() != o.getClass()) return false;
+ ViewIndex tileIndex = (ViewIndex) o;
+ return tp == tileIndex.tp && ch == tileIndex.ch && il == tileIndex.il && ang == tileIndex.ang;
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(tp, ch, il, ang);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringBuilder(this)
+ .append("tp", tp)
+ .append("ch", ch)
+ .append("il", il)
+ .append("ang", ang)
+ .toString();
+ }
+ }
+
+ static class StackFile {
+ final ViewIndex viewIndex;
+ final int ti;
+ final URI baseURI;
+ final String relativeFilePath;
+ int nImages = -1;
+ int nTp = -1;
+ int nCh = -1;
+ int sizeZ = -1;
+ int sizeY = -1;
+ int sizeX = -1;
+
+ StackFile(int tp, int ch, int il, int ang, int ti, URI baseURI, String relativeFilePath )
+ {
+ this.viewIndex = new ViewIndex(tp, ch, il, ang);
+ this.ti = ti;
+ this.baseURI = baseURI;
+ this.relativeFilePath = relativeFilePath;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (o == null || getClass() != o.getClass()) return false;
+ StackFile stackFile = (StackFile) o;
+ return ti == stackFile.ti &&
+ Objects.equals(viewIndex, stackFile.viewIndex) &&
+ Objects.equals(baseURI, stackFile.baseURI) &&
+ Objects.equals(relativeFilePath, stackFile.relativeFilePath);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(viewIndex, ti, baseURI, relativeFilePath);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringBuilder(this)
+ .append("view", viewIndex)
+ .append("ti", ti)
+ .append("baseURI", baseURI)
+ .append("relativeFilePath", relativeFilePath)
+ .toString();
+ }
+
+ Path getFilePath() {
+ return Paths.get(baseURI).resolve(relativeFilePath);
+ }
+
+ int getTp() {
+ return viewIndex.tp;
+ }
+
+ int getTi() {
+ return ti;
+ }
+
+ public int getCh() {
+ return viewIndex.ch;
+ }
+
+ public int getAng() {
+ return viewIndex.ang;
+ }
+
+ public int getIl() {
+ return viewIndex.il;
+ }
+ }
+
+ interface ViewSetupBuilder {
+ SequenceDescription getSequenceDescription();
+ ViewSetupBuilder setImgLoader();
+ ViewSetupBuilder createViewSetups(List stackFiles);
+ }
+
+
+ /**
+ * LOCIViewSetupBuilder is based on LegacyStackImgLoaderLOCI from multiview-reconstruction library
+ */
+ static class LOCIViewSetupBuilder implements ViewSetupBuilder {
+
+ private final SequenceDescription sequenceDescription;
+ private final Map viewToStackFileMap = new HashMap<>();
+
+ LOCIViewSetupBuilder(TimePoints timePoints) {
+ this.sequenceDescription = new SequenceDescription(
+ timePoints,
+ /*view setups*/Collections.emptyList()
+ );
+ }
+
+ @Override
+ public SequenceDescription getSequenceDescription() {
+ return sequenceDescription;
+ }
+
+ @Override
+ public LOCIViewSetupBuilder setImgLoader() {
+ Map, FileMapEntry> fileMap = new HashMap<>();
+ for (ViewSetup vs : sequenceDescription.getViewSetupsOrdered()) {
+ StackFile stackFile = viewToStackFileMap.get(vs.getId());
+ ViewDescription vdI = sequenceDescription.getViewDescription( stackFile.getTp(), vs.getId() );
+ FileMapEntry fileMapEntry = new FileMapEntry(
+ stackFile.getFilePath().toFile(),
+ vs.getTile().getId() - (stackFile.getTi() * stackFile.nImages), // recreate the image index within the file
+ vs.getChannel().getId());
+ fileMap.put( vdI, fileMapEntry);
+ }
+
+ sequenceDescription.setImgLoader(new FileMapImgLoaderLOCI(
+ fileMap,
+ sequenceDescription,
+ false
+ ));
+ return this;
+ }
+
+ @SuppressWarnings("unchecked")
+ @Override
+ public LOCIViewSetupBuilder createViewSetups(List stackFiles) {
+ int nfiles = stackFiles.size();
+ for (int sfi = 0; sfi < nfiles; sfi++) {
+ StackFile stackFile = stackFiles.get(sfi);
+ File tileFile = stackFile.getFilePath().toFile();
+ if ( !tileFile.exists() )
+ {
+ continue;
+ }
+ IFormatReader formatReader = new ImageReader();
+ try {
+ if ( !LegacyStackImgLoaderLOCI.createOMEXMLMetadata( formatReader ) ) {
+ try {
+ formatReader.close();
+ } catch (IOException e) {
+ e.printStackTrace();
+ }
+ continue;
+ }
+
+ formatReader.setId( tileFile.toString() );
+
+ MetadataRetrieve retrieve = (MetadataRetrieve)formatReader.getMetadataStore();
+
+ int seriesCount = formatReader.getSeriesCount();
+ Map globalMetadata = formatReader.getGlobalMetadata();
+
+ for (int series = 0; series < seriesCount; series++) {
+ formatReader.setSeries(series);
+ stackFile.nImages = seriesCount;
+ stackFile.nTp = formatReader.getSizeT();
+ stackFile.nCh = formatReader.getSizeC();
+ stackFile.sizeZ = formatReader.getSizeZ();
+ stackFile.sizeY = formatReader.getSizeY();
+ stackFile.sizeX = formatReader.getSizeX();
+
+ double oX = getOffsetX(globalMetadata, series);
+ double oY = getOffsetY(globalMetadata, series);
+ double oZ = getOffsetZ(globalMetadata, series);
+ Length resX = retrieve.getPixelsPhysicalSizeX(series);
+ Length resY = retrieve.getPixelsPhysicalSizeY(series);
+ Length resZ = retrieve.getPixelsPhysicalSizeZ(series);
+
+ double rX = resX != null ? resX.value(UNITS.MICROMETER).doubleValue() : 1;
+ double rY = resY != null ? resY.value(UNITS.MICROMETER).doubleValue() : 1;
+ double rZ = resZ != null ? resZ.value(UNITS.MICROMETER).doubleValue() : 1;
+
+ VoxelDimensions voxelDimensions = new FinalVoxelDimensions("um", rX, rY, rZ);
+ System.out.println("Voxel dimensions: " + voxelDimensions);
+ int metadataChannels = retrieve.getChannelCount(series);
+
+ for (int metadataChIndex = 0; metadataChIndex < metadataChannels; metadataChIndex++) {
+ String chName = retrieve.getChannelName(series, metadataChIndex);
+ // currently viewIndex is only based on the number of images and channels
+ // but a correct implementation would also consider timepoints, illuminations and angles
+ // for now I am ignoring them because so far we never needed them.
+ int viewIndex = metadataChIndex * nfiles * seriesCount + sfi * seriesCount + series;
+ Tile tile = new Tile(stackFile.getTi() * seriesCount + series);
+ tile.setLocation(new double[]{oX, oY, oZ});
+ Channel channel = new Channel(metadataChIndex, chName);
+ ViewSetup vs = new ViewSetup(
+ viewIndex,
+ String.valueOf(viewIndex),
+ new FinalDimensions(stackFile.sizeX, stackFile.sizeY, stackFile.sizeZ),
+ voxelDimensions,
+ tile,
+ channel,
+ new Angle(stackFile.getAng()),
+ new Illumination(stackFile.getIl())
+ );
+ viewToStackFileMap.put(viewIndex, stackFile);
+ ((Map) sequenceDescription.getViewSetups()).put(viewIndex, vs);
+ }
+ }
+ } catch (Exception e) {
+ throw new IllegalStateException("Could not read " + stackFile, e);
+ }
+ }
+ return this;
+ }
+
+ private double getOffsetX(Map globalMetadata, int series) {
+ // get position from global metadata (image number in metadata is 1-based in global metadata)
+ Double val = findPositionInGlobalMeta(
+ globalMetadata,
+ series + 1,
+ Arrays.asList("PositionX", "Position|X"));
+
+ if (val != null) {
+ return val;
+ }
+
+ return 0;
+ }
+
+ private double getOffsetY(Map globalMetadata, int series) {
+ // get position from global metadata (image number in metadata is 1-based in global metadata)
+ Double val = findPositionInGlobalMeta(
+ globalMetadata,
+ series + 1,
+ Arrays.asList("PositionY", "Position|Y"));
+
+ if (val != null) {
+ return val;
+ }
+
+ return 0;
+ }
+
+ private double getOffsetZ(Map globalMetadata, int series) {
+ // get position from global metadata (image number in metadata is 1-based in global metadata)
+ Double val = findPositionInGlobalMeta(
+ globalMetadata,
+ series + 1,
+ Arrays.asList("PositionZ", "Position|Z"));
+
+ if (val != null) {
+ return val;
+ }
+
+ return 0;
+ }
+
+ private Double findPositionInGlobalMeta(Map globalMeta, int imageNumber, List patterns) {
+ // Preferred key patterns
+ String[] prefixes = {
+ "Information|Image|V|View|",
+ "Information|Image|S|Scene|",
+ ""
+ };
+ for (String prefix : prefixes) {
+ for (String pattern : patterns) {
+ String searchPattern = prefix + pattern;
+ for (String key : globalMeta.keySet()) {
+ if (key.contains(searchPattern) && key.contains("#")) {
+ int imageNumberFromKey = extractImageNumberFromKey(key);
+ if (imageNumberFromKey == imageNumber) {
+ Object val = globalMeta.get(key);
+ Double dval = null;
+ if (val instanceof Double) {
+ dval = (Double) val;
+ } else if (val instanceof String) {
+ dval = Double.parseDouble((String) val);
+ }
+ if (dval != null) {
+ // Convert from meters to micrometers if needed
+ if (Math.abs(dval) < 1 && Math.abs(dval) > 0)
+ dval *= 1e6;
+ return dval;
+ }
+ }
+ }
+ }
+ }
+ }
+ return null;
+ }
+
+ private int extractImageNumberFromKey(String key) {
+ int hashIdx = key.lastIndexOf('#');
+ if (hashIdx >= 0 && hashIdx < key.length() - 1) {
+ try {
+ return Integer.parseInt(key.substring(hashIdx + 1).trim());
+ } catch (NumberFormatException e) {
+ return -1;
+ }
+ }
+ return -1;
+ }
+ }
+
+
+ static class N5MultichannelViewSetupBuilder implements ViewSetupBuilder {
+
+ private final URI n5ContainerURI;
+ private final SequenceDescription sequenceDescription;
+ private final Map viewIdToPath;
+ private final N5Reader n5Reader;
+ private final N5MultichannelProperties n5MultichannelProperties;
+
+ public N5MultichannelViewSetupBuilder(URI n5ContainerURI, TimePoints timePoints) {
+ this.n5ContainerURI = n5ContainerURI;
+ this.sequenceDescription = new SequenceDescription(
+ timePoints,
+ /*view setups*/Collections.emptyList()
+ );
+ this.viewIdToPath = new HashMap<>();
+ n5Reader = new N5FSReader(n5ContainerURI.toString());
+ n5MultichannelProperties = new N5MultichannelProperties(sequenceDescription, viewIdToPath);
+ }
+
+ @Override
+ public SequenceDescription getSequenceDescription() {
+ return sequenceDescription;
+ }
+
+ @Override
+ public N5MultichannelViewSetupBuilder setImgLoader() {
+ sequenceDescription.setImgLoader(
+ new N5MultichannelLoader( n5ContainerURI, StorageFormat.N5, sequenceDescription, viewIdToPath )
+ );
+ return this;
+ }
+
+ @SuppressWarnings("unchecked")
+ @Override
+ public N5MultichannelViewSetupBuilder createViewSetups(List stackFiles) {
+ for (int i = 0; i < stackFiles.size(); i++) {
+ StackFile stackFile = stackFiles.get(i);
+ if ( Files.notExists(stackFile.getFilePath()) )
+ {
+ continue;
+ }
+ viewIdToPath.put(new ViewId(stackFile.getTp(), i), stackFile.relativeFilePath);
+
+ Map pixelResolutions = n5MultichannelProperties.getRootAttribute(n5Reader, "pixelResolution", Map.class);
+ VoxelDimensions voxelDimensions;
+ if (pixelResolutions != null) {
+ double[] res = ((List) pixelResolutions.getOrDefault("dimensions", Arrays.asList(1., 1., 1.)))
+ .stream()
+ .mapToDouble(d -> d)
+ .toArray();
+ String resUnits = (String) pixelResolutions.getOrDefault("unit", "voxel");
+ voxelDimensions = new FinalVoxelDimensions(resUnits, res);
+ } else {
+ voxelDimensions = new FinalVoxelDimensions("voxel", 1., 1., 1.);
+ }
+ System.out.println("Voxel dimensions: " + voxelDimensions);
+ long[] dims = n5MultichannelProperties.getDimensions(n5Reader, i, stackFile.getTp(), 0);
+ Dimensions size = new FinalDimensions(dims[0], dims[1], dims[2]);
+ ViewSetup vs = new ViewSetup(
+ i, // in this case view index coincides with stack file index
+ stackFile.relativeFilePath,
+ size,
+ voxelDimensions,
+ new Tile(stackFile.getTi()),
+ new Channel(stackFile.getCh()),
+ new Angle(stackFile.getAng()),
+ new Illumination(stackFile.getIl())
+ );
+ ((Map) sequenceDescription.getViewSetups()).put(i, vs);
+ }
+
+ return this;
+ }
+
+ }
+
+ static class StackPattern {
+ final String sourcePattern;
+ // globPattern is used for file lookup
+ final String globPattern;
+ // regexPattern is used for creating tile metadata based on t|c|i|a|x
+ final Pattern regexPattern;
+ final Set keys;
+
+ StackPattern( String sourcePattern ) {
+ this.sourcePattern = sourcePattern;
+ this.globPattern = sourcePattern
+ .replaceAll("\\{t\\}", "*")
+ .replaceAll("\\{c\\}", "*")
+ .replaceAll("\\{i\\}", "*")
+ .replaceAll("\\{a\\}", "*")
+ .replaceAll("\\{x\\}", "*");
+ this.regexPattern = Pattern.compile( sourcePattern
+ .replaceAll("\\.", "\\\\.") // escape dot
+ .replaceAll("\\*", ".*")
+ .replaceAll("\\{t\\}", "(?\\\\D*?\\\\d+)") // timepoint
+ .replaceAll("\\{c\\}", "(?\\\\D*?\\\\d+)") // channel
+ .replaceAll("\\{i\\}", "(?\\\\D*?\\\\d+)") // illumination
+ .replaceAll("\\{a\\}", "(?\\\\D*?\\\\d+)") // angle
+ .replaceAll("\\{x\\}", "(?\\\\D*?\\\\d+)") ); // tile
+
+ this.keys = initializeKeys(sourcePattern);
+ }
+
+ private Set initializeKeys(String sourcePattern) {
+ Set patternKeys = new HashSet<>();
+ String regexStr = Pattern.quote(sourcePattern);
+ Matcher m = Pattern.compile("\\{(t|c|i|a|x)\\}").matcher(regexStr);
+ while (m.find()) {
+ String key = m.group(1);
+ patternKeys.add(key);
+ }
+ return patternKeys;
+ }
+
+ String getGlobPattern() {
+ return "glob:" + globPattern;
+ }
+
+ int getSearchDepth() {
+ return (int) sourcePattern.chars().filter(c -> c == File.separatorChar).count();
+ }
+
+ boolean hasKey(String key) {
+ return keys.contains(key);
+ }
+ }
+
+ private final StackPattern fileNamePattern;
+
+ public SpimDatasetBuilder(String fileNamePattern )
+ {
+ this.fileNamePattern = new StackPattern(fileNamePattern);
+ }
+
+ public SpimData2 createDataset(URI imagePath) {
+ List stackFiles = getStackFiles(imagePath);
+
+ // collect timepoints from stack files
+ Set timePoints = stackFiles.stream()
+ .map(si -> new TimePoint(si.viewIndex.tp))
+ .collect(Collectors.toSet());
+
+ SequenceDescription sequenceDescription = createViewSetupBuilder(imagePath, new TimePoints(timePoints))
+ .createViewSetups(stackFiles)
+ .setImgLoader()
+ .getSequenceDescription();
+
+ // get the min resolution from all calibrations
+ double minResolution = DatasetCreationUtils.minResolution(
+ sequenceDescription,
+ sequenceDescription.getViewDescriptions().values()
+ );
+
+ ViewRegistrations viewRegistrations = DatasetCreationUtils.createViewRegistrations(
+ sequenceDescription.getViewDescriptions(),
+ minResolution
+ );
+
+ ViewInterestPoints viewInterestPoints = new ViewInterestPoints();
+
+ return new SpimData2(
+ imagePath,
+ sequenceDescription,
+ viewRegistrations,
+ viewInterestPoints,
+ new BoundingBoxes(),
+ new PointSpreadFunctions(),
+ new StitchingResults(),
+ new IntensityAdjustments()
+ );
+ }
+
+ /**
+ * So far only local paths are supported.
+ *
+ * @param imageURI
+ * @return
+ */
+ private List getStackFiles(URI imageURI)
+ {
+ int searchDepth = fileNamePattern.getSearchDepth();
+ try {
+ Path imagePath = Paths.get(imageURI);
+ // get the files
+ PathMatcher matcher = FileSystems.getDefault().getPathMatcher(fileNamePattern.getGlobPattern());
+ List fs = Files.walk( imagePath , searchDepth+1)
+ .filter(path -> matcher.matches(imagePath.relativize(path)))
+ .map(p -> getStackFile(imageURI, imagePath.relativize(p).toString()))
+ .collect(Collectors.toList());
+ System.out.println(fs);
+ return fs;
+ } catch (IOException e) {
+ throw new UncheckedIOException(e);
+ }
+ }
+
+ private StackFile getStackFile(URI imageURI, String imageRelativePath)
+ {
+ Matcher m = fileNamePattern.regexPattern.matcher(imageRelativePath);
+ if ( m.matches() ) {
+ int tp = extractInt(fileNamePattern.hasKey("t") ? m.group("tp") : "0"); // timepoint
+ int ch = extractInt(fileNamePattern.hasKey("c") ? m.group("ch") : "0"); // channel
+ int il = extractInt(fileNamePattern.hasKey("i") ? m.group("il") : "0"); // illumination
+ int ang = extractInt(fileNamePattern.hasKey("a") ? m.group("ang") : "0"); // angle
+ int ti = extractInt(fileNamePattern.hasKey("x") ? m.group("ti") : "0"); // tile
+ return new StackFile(tp, ch, il, ang, ti, imageURI, imageRelativePath);
+ } else {
+ throw new IllegalArgumentException(imageRelativePath + " does not match " + fileNamePattern.sourcePattern + ". Refine the pattern and try again");
+ }
+ }
+
+ int extractInt(String input) {
+ Matcher m = Pattern.compile("\\D*(\\d+)").matcher(input);
+ if (m.matches()) {
+ return Integer.parseInt(m.group(1));
+ } else {
+ return 0;
+ }
+ }
+
+ private ViewSetupBuilder createViewSetupBuilder(URI imageURI, TimePoints timePoints) {
+ if ( imageURI.getScheme().equals("n5") || imageURI.getScheme().equals("file") && imageURI.getPath().contains(".n5") ) {
+ return new N5MultichannelViewSetupBuilder(imageURI, timePoints);
+ } else {
+ return new LOCIViewSetupBuilder(timePoints);
+ }
+ }
+}
diff --git a/src/main/java/net/preibisch/mvrecon/dataset/XmlToN5MultichannelLoader.java b/src/main/java/net/preibisch/mvrecon/dataset/XmlToN5MultichannelLoader.java
new file mode 100644
index 00000000..1c07745c
--- /dev/null
+++ b/src/main/java/net/preibisch/mvrecon/dataset/XmlToN5MultichannelLoader.java
@@ -0,0 +1,63 @@
+package net.preibisch.mvrecon.dataset;
+
+import java.io.File;
+import java.net.URI;
+import java.util.HashMap;
+import java.util.Map;
+
+import mpicbg.spim.data.XmlHelpers;
+import mpicbg.spim.data.generic.sequence.AbstractSequenceDescription;
+import mpicbg.spim.data.generic.sequence.ImgLoaderIo;
+import mpicbg.spim.data.generic.sequence.XmlIoBasicImgLoader;
+import mpicbg.spim.data.sequence.ViewId;
+import net.preibisch.mvrecon.fiji.spimdata.imgloaders.AllenOMEZarrLoader;
+import org.janelia.saalfeldlab.n5.universe.StorageFormat;
+import org.jdom2.Element;
+
+import static mpicbg.spim.data.XmlKeys.IMGLOADER_FORMAT_ATTRIBUTE_NAME;
+
+@ImgLoaderIo( format = "bdv.multchimg.n5", type = N5MultichannelLoader.class )
+public class XmlToN5MultichannelLoader implements XmlIoBasicImgLoader {
+ @Override
+ public Element toXml(N5MultichannelLoader imgLoader, File basePath) {
+ final Element imgLoaderElement = new Element( "ImageLoader" );
+ imgLoaderElement.setAttribute( IMGLOADER_FORMAT_ATTRIBUTE_NAME, "bdv.multchimg.n5" );
+ imgLoaderElement.setAttribute( "version", "1.0" );
+
+ imgLoaderElement.addContent( XmlHelpers.pathElementURI( "n5", imgLoader.getN5URI(), basePath.toURI() ));
+
+ final Element zgroupsElement = new Element( "n5groups" );
+
+ for ( final Map.Entry entry : imgLoader.getViewIdToPath().entrySet() )
+ {
+ final Element n5groupElement = new Element("n5group");
+ n5groupElement.setAttribute( "setup", String.valueOf( entry.getKey().getViewSetupId() ) );
+ n5groupElement.setAttribute( "tp", String.valueOf( entry.getKey().getTimePointId() ) );
+ n5groupElement.setAttribute( "path", String.valueOf( entry.getValue() ) );
+
+ zgroupsElement.addContent( n5groupElement );
+ }
+
+ imgLoaderElement.addContent( zgroupsElement );
+
+ return imgLoaderElement;
+ }
+
+ @Override
+ public N5MultichannelLoader fromXml(Element elem, File basePath, AbstractSequenceDescription, ?, ?> sequenceDescription) {
+ final Map n5groups = new HashMap<>();
+
+ URI uri = XmlHelpers.loadPathURI( elem, "n5", basePath.toURI() );
+
+ final Element n5groupsElem = elem.getChild( "n5groups" );
+ for ( final Element c : n5groupsElem.getChildren( "n5group" ) )
+ {
+ final int timepointId = Integer.parseInt( c.getAttributeValue( "tp" ) );
+ final int setupId = Integer.parseInt( c.getAttributeValue( "setup" ) );
+ final String path = c.getAttributeValue( "path" );
+ n5groups.put( new ViewId( timepointId, setupId ), path );
+ }
+
+ return new N5MultichannelLoader(uri, StorageFormat.N5, sequenceDescription, n5groups);
+ }
+}
diff --git a/src/test/java/net/preibisch/mvrecon/dataset/CZIMetadataDebug.java b/src/test/java/net/preibisch/mvrecon/dataset/CZIMetadataDebug.java
new file mode 100644
index 00000000..0b80240c
--- /dev/null
+++ b/src/test/java/net/preibisch/mvrecon/dataset/CZIMetadataDebug.java
@@ -0,0 +1,474 @@
+package net.preibisch.mvrecon.dataset;
+
+import java.io.File;
+import java.util.ArrayList;
+import java.util.List;
+
+import javax.xml.parsers.DocumentBuilder;
+import javax.xml.parsers.DocumentBuilderFactory;
+
+import loci.formats.in.DefaultMetadataOptions;
+import loci.formats.in.MetadataLevel;
+import org.w3c.dom.Document;
+import org.w3c.dom.Element;
+import org.w3c.dom.NodeList;
+
+import loci.common.services.ServiceFactory;
+import loci.formats.ChannelSeparator;
+import loci.formats.IFormatReader;
+import loci.formats.meta.MetadataRetrieve;
+import loci.formats.ome.OMEXMLMetadataImpl;
+import loci.formats.services.OMEXMLService;
+import ome.units.UNITS;
+import ome.units.quantity.Length;
+
+import java.util.Arrays;
+import java.util.Hashtable;
+import java.util.Map;
+
+public class CZIMetadataDebug {
+
+ /**
+ * Simple class to hold tile position from MVL file
+ */
+ public static class MvlTilePosition {
+ public final int index;
+ public final double x;
+ public final double y;
+ public final double z;
+
+ public MvlTilePosition(int index, double x, double y, double z) {
+ this.index = index;
+ this.x = x;
+ this.y = y;
+ this.z = z;
+ }
+
+ @Override
+ public String toString() {
+ return String.format("MvlTile[%d: (%.2f, %.2f, %.2f)]", index, x, y, z);
+ }
+ }
+
+ /**
+ * Parses an MVL file and extracts tile positions from Entry elements.
+ * Entry elements have attributes like PositionX, PositionY, PositionZ.
+ */
+ public static List parseMvlFile(File mvlFile) throws Exception {
+ List positions = new ArrayList<>();
+
+ DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance();
+ factory.setNamespaceAware(false);
+ DocumentBuilder builder = factory.newDocumentBuilder();
+ Document doc = builder.parse(mvlFile);
+
+ // Look for Entry elements (Entry1, Entry2, etc.) which contain position data
+ NodeList allElements = doc.getElementsByTagName("*");
+ int entryIndex = 0;
+ for (int i = 0; i < allElements.getLength(); i++) {
+ Element el = (Element) allElements.item(i);
+ String tagName = el.getTagName();
+
+ // Match Entry1, Entry2, Entry3, etc.
+ if (tagName.startsWith("Entry") && el.hasAttribute("PositionX")) {
+ double posX = Double.parseDouble(el.getAttribute("PositionX"));
+ double posY = el.hasAttribute("PositionY") ? Double.parseDouble(el.getAttribute("PositionY")) : 0;
+ double posZ = el.hasAttribute("PositionZ") ? Double.parseDouble(el.getAttribute("PositionZ")) : 0;
+
+ positions.add(new MvlTilePosition(entryIndex, posX, posY, posZ));
+ entryIndex++;
+ }
+ }
+
+ System.out.println("MVL: Found " + positions.size() + " Entry elements with positions");
+
+ return positions;
+ }
+
+ /**
+ * Extracts the index number from a metadata key like "Something #3" -> 3
+ */
+ private static int extractImageNumberFromKey(String key) {
+ int hashIdx = key.lastIndexOf('#');
+ if (hashIdx >= 0 && hashIdx < key.length() - 1) {
+ try {
+ return Integer.parseInt(key.substring(hashIdx + 1).trim());
+ } catch (NumberFormatException e) {
+ return -1;
+ }
+ }
+ return -1;
+ }
+
+ /**
+ * Formats a position value, converting from meters to micrometers if needed.
+ */
+ private static String formatPositionValue(Object val) {
+ if (val == null) return "null";
+ try {
+ double d = Double.parseDouble(val.toString());
+ // If value is very small (looks like meters), convert to micrometers
+ if (Math.abs(d) < 1 && Math.abs(d) > 0) {
+ return String.format("%.6f m = %.2f µm", d, d * 1e6);
+ } else {
+ return String.format("%.2f µm", d);
+ }
+ } catch (NumberFormatException e) {
+ return val.toString();
+ }
+ }
+
+ public static void main(String[] args) throws Exception {
+ String filePath = args.length > 0
+ ? args[0]
+// : "/Users/goinac/Work/HHMI/stitching/datasets/medium/t1/LHA3_R3_medium.czi";
+ : "/nrs/scicompsoft/goinac/multifish/testlaura/results/stitching/r0/Gel1_04x_GFP_Tpbg_633_Ribo550_nDapi.czi";
+
+ String mvlFileName = args.length > 1
+ ? args[1]
+ : null; // "Gel1_04x_4x6.mvl";
+
+ File cziFile = new File(filePath);
+ if (!cziFile.exists()) {
+ System.err.println("File not found: " + filePath);
+ return;
+ }
+
+ // Try to find MVL file
+ File mvlFile = null;
+ if (mvlFileName != null) {
+ mvlFile = new File(cziFile.getParentFile(), mvlFileName);
+ } else {
+ // Try same name with .mvl extension
+ String baseName = cziFile.getName().replaceAll("\\.czi$", "");
+ mvlFile = new File(cziFile.getParentFile(), baseName + ".mvl");
+ }
+
+ System.out.println("=== CZI/MVL Metadata Debug ===");
+ System.out.println("CZI File: " + cziFile.getAbsolutePath());
+ System.out.println("MVL File: " + mvlFile.getAbsolutePath());
+ System.out.println("MVL exists: " + mvlFile.exists());
+ System.out.println();
+
+ // Parse MVL file if it exists
+ List mvlPositions = null;
+ if (mvlFile.exists()) {
+ System.out.println("=== Parsing MVL file ===");
+ mvlPositions = parseMvlFile(mvlFile);
+ System.out.println();
+ System.out.println("=== Tile positions from MVL ===");
+ for (MvlTilePosition pos : mvlPositions) {
+ System.out.println(pos);
+ }
+ System.out.println();
+ }
+
+ // Use IFormatReader with ChannelSeparator (auto-detects format)
+ IFormatReader formatReader = new ChannelSeparator();
+
+ formatReader.setMetadataOptions(new DefaultMetadataOptions(MetadataLevel.ALL));
+
+ // Set up OME-XML metadata store
+ ServiceFactory factory = new ServiceFactory();
+ OMEXMLService service = factory.getInstance(OMEXMLService.class);
+ OMEXMLMetadataImpl meta = (OMEXMLMetadataImpl) service.createOMEXMLMetadata();
+ formatReader.setMetadataStore(meta);
+
+ System.out.println("Set IFormatReader ID: " + cziFile);
+ long startTime = System.currentTimeMillis();
+ formatReader.setId(cziFile.toString());
+ System.out.println("setId() took: " + (System.currentTimeMillis() - startTime) + " ms");
+
+ MetadataRetrieve retrieve = (MetadataRetrieve) formatReader.getMetadataStore();
+
+ int seriesCount = formatReader.getSeriesCount();
+
+ // Dump original metadata looking for MVL-like entry data
+ // MVL Entry elements have: PositionX, PositionY, PositionZ, PositionR, StackRange, StackSlices, etc.
+ System.out.println("=== CZI Global Metadata (MVL-like Entry data) ===");
+ Hashtable globalMeta = formatReader.getGlobalMetadata();
+
+ // Find all view/tile indices by looking for Position keys
+ // Look for patterns like: "Information|Image|V|View|Position|X #N"
+ List viewIndices = new ArrayList<>();
+ for (String key : globalMeta.keySet()) {
+ if (key.contains("Position|X") || key.contains("PositionX")) {
+ int idx = extractImageNumberFromKey(key);
+ if (idx >= 0 && !viewIndices.contains(idx)) {
+ viewIndices.add(idx);
+ }
+ }
+ }
+ java.util.Collections.sort(viewIndices);
+
+ System.out.println("Found " + viewIndices.size() + " views/entries with position data in CZI metadata");
+ System.out.println();
+
+ // For each view, print position attributes
+ for (int idx : viewIndices) {
+ System.out.println(" View/Entry " + idx + ":");
+
+ // Look for various position key patterns
+ String[] positionPatterns = {
+ "Information|Image|V|View|Position|X",
+ "Information|Image|V|View|Position|Y",
+ "Information|Image|V|View|Position|Z",
+ "Position|X", "Position|Y", "Position|Z",
+ "PositionX", "PositionY", "PositionZ"
+ };
+
+ for (String pattern : positionPatterns) {
+ String searchKey = pattern + " #" + idx;
+ // Try exact match first
+ Object val = globalMeta.get(searchKey);
+ if (val != null) {
+ String displayVal = formatPositionValue(val);
+ System.out.println(" " + pattern + " = " + displayVal);
+ } else {
+ // Try partial match
+ for (String key : globalMeta.keySet()) {
+ if (key.contains(pattern) && key.endsWith("#" + idx)) {
+ val = globalMeta.get(key);
+ String displayVal = formatPositionValue(val);
+ System.out.println(" " + pattern + " = " + displayVal + " (key: " + key + ")");
+ break;
+ }
+ }
+ }
+ }
+ }
+ System.out.println();
+
+ // Also try getStageLabelX/Y/Z
+ System.out.println("=== StageLabel positions ===");
+ for (int s = 0; s < Math.min(5, seriesCount); s++) {
+ try {
+ Length stageLabelX = retrieve.getStageLabelX(s);
+ Length stageLabelY = retrieve.getStageLabelY(s);
+ Length stageLabelZ = retrieve.getStageLabelZ(s);
+ double slX = stageLabelX != null && stageLabelX.value(UNITS.MICROMETER) != null ? stageLabelX.value(UNITS.MICROMETER).doubleValue() : 0;
+ double slY = stageLabelY != null && stageLabelY.value(UNITS.MICROMETER) != null ? stageLabelY.value(UNITS.MICROMETER).doubleValue() : 0;
+ double slZ = stageLabelZ != null && stageLabelZ.value(UNITS.MICROMETER) != null ? stageLabelZ.value(UNITS.MICROMETER).doubleValue() : 0;
+ System.out.println(" Series " + s + " StageLabel: (" + slX + ", " + slY + ", " + slZ + ")");
+ } catch (Exception e) {
+ System.out.println(" Series " + s + " StageLabel: not available (" + e.getMessage() + ")");
+ }
+ }
+ System.out.println();
+
+ System.out.println("=== CZI File Metadata Debug ===");
+ System.out.println("File: " + filePath);
+ System.out.println("Series count (from IFormatReader): " + seriesCount);
+ System.out.println();
+
+ // Display ViewSetups as created by SpimDatasetBuilder.LOCIViewSetupBuilder.createViewSetups()
+ System.out.println("=== ViewSetups (as created by SpimDatasetBuilder) ===");
+ System.out.println();
+ System.out.printf("%-8s | %-8s | %-20s | %-8s | %-30s | %-30s | %-25s%n",
+ "ViewIdx", "TileID", "Tile Location (um)", "Channel", "Dimensions (X,Y,Z)", "Voxel Size (um)", "Channel Name");
+ System.out.println("--------------------------------------------------------------------------------------------------------------------------------------");
+
+ int viewIndex = 0;
+ for (int series = 0; series < seriesCount; series++) {
+ formatReader.setSeries(series);
+
+ // Get dimensions from formatReader (after setSeries)
+ int sizeX = formatReader.getSizeX();
+ int sizeY = formatReader.getSizeY();
+ int sizeZ = formatReader.getSizeZ();
+
+ // Get position from global metadata or MetadataRetrieve
+ double oX = getOffsetX(globalMeta, series);
+ double oY = getOffsetY(globalMeta, series);
+ double oZ = getOffsetZ(globalMeta, series);
+
+ // Get voxel/pixel size from MetadataRetrieve
+ Length physSizeX = retrieve.getPixelsPhysicalSizeX(series);
+ Length physSizeY = retrieve.getPixelsPhysicalSizeY(series);
+ Length physSizeZ = retrieve.getPixelsPhysicalSizeZ(series);
+
+ double voxelX = physSizeX != null ? physSizeX.value(UNITS.MICROMETER).doubleValue() : 1;
+ double voxelY = physSizeY != null ? physSizeY.value(UNITS.MICROMETER).doubleValue() : 1;
+ double voxelZ = physSizeZ != null ? physSizeZ.value(UNITS.MICROMETER).doubleValue() : 1;
+
+ // Get channel count from metadata (this is what SpimDatasetBuilder uses)
+ int metadataChannels = retrieve.getChannelCount(series);
+
+ // Tile ID = series (for single file case)
+ int tileId = series;
+
+ String tileLocation = String.format("(%.2f, %.2f, %.2f)", oX, oY, oZ);
+ String dims = String.format("(%d, %d, %d)", sizeX, sizeY, sizeZ);
+ String voxel = String.format("(%.4f, %.4f, %.4f)", voxelX, voxelY, voxelZ);
+
+ // Iterate over channels (as SpimDatasetBuilder does)
+ for (int chIndex = 0; chIndex < metadataChannels; chIndex++) {
+ String chName = retrieve.getChannelName(series, chIndex);
+ if (chName == null) chName = "(unnamed)";
+
+ System.out.printf("%-8d | %-8d | %-20s | %-8d | %-30s | %-30s | %-25s%n",
+ viewIndex, tileId, tileLocation, chIndex, dims, voxel, chName);
+
+ viewIndex++;
+ }
+ }
+
+ System.out.println();
+ System.out.println("Total ViewSetups created: " + viewIndex);
+ System.out.println();
+
+ // Summary
+ System.out.println("=== Summary ===");
+ System.out.println("Series (tiles): " + seriesCount);
+ formatReader.setSeries(0);
+ int channelsPerTile = retrieve.getChannelCount(0);
+ System.out.println("Channels per tile: " + channelsPerTile);
+ System.out.println("Expected ViewSetups: " + (seriesCount * channelsPerTile));
+ System.out.println("Actual ViewSetups: " + viewIndex);
+
+ formatReader.close();
+
+ // If we have MVL positions, show comparison
+ if (mvlPositions != null && !mvlPositions.isEmpty()) {
+ System.out.println();
+ System.out.println("=== COMPARISON: MVL vs CZI positions ===");
+ System.out.println("(This shows if MVL file has the correct positions that CZI is missing)");
+ System.out.println();
+ System.out.printf("%-6s | %-40s | %-40s%n", "Image", "CZI Position (um)", "MVL Position (um)");
+ System.out.println("-----------------------------------------------------------------------------------------");
+
+ // Re-open to get CZI positions for comparison (using seriesCount)
+ IFormatReader formatReader2 = new ChannelSeparator();
+ ServiceFactory factory2 = new ServiceFactory();
+ OMEXMLService service2 = factory2.getInstance(OMEXMLService.class);
+ formatReader2.setMetadataStore(service2.createOMEXMLMetadata());
+ formatReader2.setId(cziFile.toString());
+ MetadataRetrieve retrieve2 = (MetadataRetrieve) formatReader2.getMetadataStore();
+
+ int seriesCount2 = formatReader2.getSeriesCount();
+ int compareCount = Math.min(seriesCount2, mvlPositions.size());
+
+ for (int series = 0; series < compareCount; series++) {
+ formatReader2.setSeries(series);
+
+ Length cziX = retrieve2.getPlanePositionX(series, 0);
+ Length cziY = retrieve2.getPlanePositionY(series, 0);
+ Length cziZ = retrieve2.getPlanePositionZ(series, 0);
+
+ double cx = cziX != null && cziX.value(UNITS.MICROMETER) != null ? cziX.value(UNITS.MICROMETER).doubleValue() : 0;
+ double cy = cziY != null && cziY.value(UNITS.MICROMETER) != null ? cziY.value(UNITS.MICROMETER).doubleValue() : 0;
+ double cz = cziZ != null && cziZ.value(UNITS.MICROMETER) != null ? cziZ.value(UNITS.MICROMETER).doubleValue() : 0;
+
+ MvlTilePosition mvl = mvlPositions.get(series);
+
+ String cziPos = String.format("(%.2f, %.2f, %.2f)", cx, cy, cz);
+ String mvlPos = String.format("(%.2f, %.2f, %.2f)", mvl.x, mvl.y, mvl.z);
+
+ // Mark if CZI position is (0,0,0) but MVL has real values
+ String marker = (cx == 0 && cy == 0 && (mvl.x != 0 || mvl.y != 0)) ? " <-- MVL has data!" : "";
+
+ System.out.printf("%-6d | %-40s | %-40s%s%n", series, cziPos, mvlPos, marker);
+ }
+
+ formatReader2.close();
+
+ System.out.println();
+ System.out.println("CONCLUSION: If MVL has positions but CZI shows (0,0,0), use MVL file for tile positions!");
+ }
+
+ System.out.println();
+ System.out.println("=== Done ===");
+ }
+
+ /**
+ * Gets X offset for a series, trying multiple sources:
+ * 1. PlanePosition with plane index 0
+ * 2. Global metadata: Information|Image|V|View|Position|X or Information|Image|S|Scene|Position|X
+ */
+ private static double getOffsetX(Map globalMetadata, int series) {
+ // Try global metadata keys (series is 1-based in global metadata)
+ Double val = findPositionInGlobalMeta(globalMetadata, series + 1, Arrays.asList("PositionX", "Position|X"));
+ if (val != null) {
+ String dir = (String) globalMetadata.get("Experiment|Axis|X|Direction");
+ return "IncreasingLeft".equals(dir) ? -val : val;
+ }
+
+ System.out.println("Series " + series + ": No x offset found");
+ return 0;
+ }
+
+ /**
+ * Gets Y offset for a series, trying multiple sources.
+ */
+ private static double getOffsetY(Map globalMetadata, int series) {
+ // Try global metadata keys (series is 1-based in global metadata)
+ Double val = findPositionInGlobalMeta(globalMetadata, series + 1, Arrays.asList("PositionY", "Position|Y"));
+ if (val != null) {
+ String dir = (String) globalMetadata.get("Experiment|Axis|Y|Direction");
+ return "IncreasingLeft".equals(dir) ? -val : val;
+ }
+
+ System.out.println("Series " + series + ": No y offset found");
+ return 0;
+ }
+
+ /**
+ * Gets Z offset for a series, trying multiple sources.
+ */
+ private static double getOffsetZ(Map globalMetadata, int series) {
+ // Try global metadata keys (series is 1-based in global metadata)
+ Double val = findPositionInGlobalMeta(globalMetadata, series + 1, Arrays.asList("PositionZ", "Position|Z"));
+ if (val != null) {
+ String dir = (String) globalMetadata.get("Experiment|Axis|Z|Direction");
+ return "IncreasingLeft".equals(dir) ? -val : val;
+ }
+
+ System.out.println("Series " + series + ": No z offset found");
+ return 0;
+ }
+
+ /**
+ * Searches global metadata for a position value matching the image number.
+ * Handles different number formats like "#1", "#01", "#001".
+ */
+ private static Double findPositionInGlobalMeta(Map globalMeta, int imageNumber, List patterns) {
+ // Preferred key patterns in order of priority
+ String[] prefixes = {
+ "Information|Image|V|View|",
+ "Information|Image|S|Scene|",
+ ""
+ };
+
+ for (String prefix : prefixes) {
+ for (String pattern : patterns) {
+ String searchPattern = prefix + pattern;
+ for (String key : globalMeta.keySet()) {
+ if (key.contains(searchPattern) && key.contains("#")) {
+ int imageNumberFromKey = extractImageNumberFromKey(key);
+ if (imageNumberFromKey == imageNumber) {
+ Object val = globalMeta.get(key);
+ Double dval = null;
+ if (val instanceof Double) {
+ dval = (Double) val;
+ } else if (val instanceof String) {
+ dval = Double.parseDouble((String) val);
+ } else if (val != null) {
+ dval = Double.parseDouble(val.toString());
+ }
+ if (dval != null) {
+ // Convert from meters to micrometers if needed
+ if (Math.abs(dval) < 1 && Math.abs(dval) > 0)
+ dval *= 1e6;
+ System.out.println("Image " + imageNumber + ": Found " + pattern + " offset: " + dval + " µm using " + key);
+ return dval;
+ }
+ }
+ }
+ }
+ }
+ }
+
+ return null;
+ }
+
+}