diff --git a/README.md b/README.md index ad89dc5b..0ead51eb 100644 --- a/README.md +++ b/README.md @@ -65,7 +65,7 @@ If the input file has multiple series, a subset of the series can be converted b bioformats2raw /path/to/file.scn /path/to/zarr-pyramid --series 0,2,3,4 -By default, three additional readers (MiraxReader, PyramidTiffReader, and BioTekReader) are added to the beginning of Bio-Formats' list of reader classes. +By default, four additional readers (MiraxReader, PyramidTiffReader, BioTekReader, and ND2PlateReader) are added to the beginning of Bio-Formats' list of reader classes. These readers are considered to be experimental and as a result only a limited range of input data is supported. Any of these readers can be excluded with the `--extra-readers` option: diff --git a/build.gradle b/build.gradle index 071caa15..61db2b29 100644 --- a/build.gradle +++ b/build.gradle @@ -31,7 +31,11 @@ repositories { url 'https://artifacts.glencoesoftware.com/artifactory/scijava-thirdparty' } maven { - url 'https://nexus.senbox.net/nexus/content/groups/public' + url 'https://artifacts.glencoesoftware.com/artifactory/jzarr-releases' + } + maven { + name 'Unidata' + url 'https://artifacts.glencoesoftware.com/artifactory/unidata-releases' } } @@ -42,7 +46,7 @@ configurations.all { dependencies { implementation 'net.java.dev.jna:jna:5.10.0' - implementation 'ome:formats-gpl:6.8.0' + implementation 'ome:formats-gpl:6.9.1' implementation 'info.picocli:picocli:4.6.1' implementation 'com.univocity:univocity-parsers:2.8.4' implementation 'com.bc.zarr:jzarr:0.3.3-gs-SNAPSHOT' diff --git a/src/main/java/com/glencoesoftware/bioformats2raw/Converter.java b/src/main/java/com/glencoesoftware/bioformats2raw/Converter.java index bd4441e4..a3de1a9c 100644 --- a/src/main/java/com/glencoesoftware/bioformats2raw/Converter.java +++ b/src/main/java/com/glencoesoftware/bioformats2raw/Converter.java @@ -19,7 +19,6 @@ import java.nio.file.Path; import java.nio.file.Paths; import java.util.ArrayList; -import java.util.Collections; import java.util.Comparator; import java.util.HashMap; import java.util.List; @@ -56,6 +55,7 @@ import loci.formats.ome.OMEXMLMetadata; import loci.formats.services.OMEXMLService; import loci.formats.services.OMEXMLServiceImpl; +import ome.units.quantity.Quantity; import ome.xml.meta.OMEXMLMetadataRoot; import ome.xml.model.enums.DimensionOrder; import ome.xml.model.enums.EnumerationException; @@ -239,7 +239,8 @@ public class Converter implements Callable { "(default: ${DEFAULT-VALUE})" ) private volatile Class[] extraReaders = new Class[] { - PyramidTiffReader.class, MiraxReader.class, BioTekReader.class + PyramidTiffReader.class, MiraxReader.class, + BioTekReader.class, ND2PlateReader.class }; @Option( @@ -562,6 +563,12 @@ public void convert() if (!noHCS) { noHCS = !hasValidPlate(meta); + int plateCount = meta.getPlateCount(); + if (!noHCS && plateCount > 1) { + throw new IllegalArgumentException( + "Found " + plateCount + " plates; only one can be converted. " + + "Use --no-hcs to as a work-around."); + } } else { ((OMEXMLMetadata) meta).resolveReferences(); @@ -1522,35 +1529,84 @@ private void setSeriesLevelMetadata(int series, int resolutions) multiscale.put("type", downsampling.getName()); } multiscale.put("metadata", metadata); - multiscale.put("version", nested ? "0.2" : "0.1"); + multiscale.put("version", nested ? "0.4" : "0.1"); multiscales.add(multiscale); - List> datasets = new ArrayList>(); + + IFormatReader v = null; + IMetadata meta = null; + String axisOrder = null; + try { + v = readers.take(); + meta = (IMetadata) v.getMetadataStore(); + + if (dimensionOrder != null) { + axisOrder = dimensionOrder.toString(); + } + else { + axisOrder = v.getDimensionOrder(); + } + } + finally { + readers.put(v); + } + + List> datasets = new ArrayList>(); for (int r = 0; r < resolutions; r++) { resolutionString = String.format( scaleFormatString, getScaleFormatStringArgs(series, r)); String lastPath = resolutionString.substring( resolutionString.lastIndexOf('/') + 1); - datasets.add(Collections.singletonMap("path", lastPath)); - } - multiscale.put("datasets", datasets); - String axisOrder = null; - if (dimensionOrder != null) { - axisOrder = dimensionOrder.toString(); - } - else { - IFormatReader reader = readers.take(); - try { - axisOrder = reader.getDimensionOrder(); - } - finally { - readers.put(reader); + List> transforms = + new ArrayList>(); + Map scale = new HashMap(); + scale.put("type", "scale"); + List axisValues = new ArrayList(); + double resolutionScale = Math.pow(PYRAMID_SCALE, r); + for (int i=axisOrder.length()-1; i>=0; i--) { + Quantity axisScale = getScale(meta, series, axisOrder, i); + String axisChar = axisOrder.substring(i, i + 1).toLowerCase(); + + if (axisScale != null) { + // if physical dimension information is defined, + // use it directly for dimensions that aren't scaled (Z and T) + // increase it according to the resolution number for dimensions that + // are scaled (X and Y) + if (axisChar.equals("x") || axisChar.equals("y")) { + axisValues.add(axisScale.value().doubleValue() * resolutionScale); + } + else { + axisValues.add(axisScale.value().doubleValue()); + } + } + else { + // if physical dimension information is not defined, + // store the scale factor for the dimension in the current resolution, + // i.e. 1.0 for everything other than X and Y + if (axisChar.equals("x") || axisChar.equals("y")) { + axisValues.add(resolutionScale); + } + else { + axisValues.add(1.0); + } + } } + scale.put("scale", axisValues); + + transforms.add(scale); + + Map dataset = new HashMap(); + dataset.put("path", lastPath); + dataset.put("coordinateTransformations", transforms); + datasets.add(dataset); } + multiscale.put("datasets", datasets); + List> axes = new ArrayList>(); for (int i=axisOrder.length()-1; i>=0; i--) { String axis = axisOrder.substring(i, i + 1).toLowerCase(); String type = "space"; + Quantity scale = getScale(meta, series, axisOrder, i); if (axis.equals("t")) { type = "time"; } @@ -1560,6 +1616,9 @@ else if (axis.equals("c")) { Map thisAxis = new HashMap(); thisAxis.put("name", axis); thisAxis.put("type", type); + if (scale != null) { + thisAxis.put("unit", scale.unit().getSymbol()); + } axes.add(thisAxis); } multiscale.put("axes", axes); @@ -1573,6 +1632,33 @@ else if (axis.equals("c")) { LOGGER.debug(" finished writing subgroup attributes"); } + private Quantity getScale( + IMetadata meta, int series, String axisOrder, int axis) + { + if (meta == null) { + return null; + } + int seriesIndex = seriesList.indexOf(series); + + if (seriesIndex < 0 || seriesIndex >= meta.getImageCount()) { + return null; + } + + String axisChar = axisOrder.substring(axis, axis + 1).toLowerCase(); + switch (axisChar.charAt(0)) { + case 'x': + return meta.getPixelsPhysicalSizeX(seriesIndex); + case 'y': + return meta.getPixelsPhysicalSizeY(seriesIndex); + case 'z': + return meta.getPixelsPhysicalSizeZ(seriesIndex); + case 't': + return meta.getPixelsTimeIncrement(seriesIndex); + default: + return null; + } + } + /** * Takes exception from asynchronous execution and re-throw known exception * types. If the end is reached with no known exception detected, either the diff --git a/src/main/java/com/glencoesoftware/bioformats2raw/MiraxReader.java b/src/main/java/com/glencoesoftware/bioformats2raw/MiraxReader.java index f6e8ac7f..d5618a46 100644 --- a/src/main/java/com/glencoesoftware/bioformats2raw/MiraxReader.java +++ b/src/main/java/com/glencoesoftware/bioformats2raw/MiraxReader.java @@ -273,7 +273,11 @@ public byte[] openBytes(int no, byte[] buf, int x, int y, int w, int h) TilePointer thisOffset = lookupTile(index, col, row, no / MAX_CHANNELS); if (thisOffset != null) { int channel = no % MAX_CHANNELS; - if (fluorescence && getSizeC() != 2) { + // 2 channel JPEG data needs to have the channel index inverted + // 2 channel JPEG-2000 data should not have the channel index inverted + if (fluorescence && + (getSizeC() != 2 || format.get(index).equals("JPEG"))) + { channel = MAX_CHANNELS - channel - 1; } diff --git a/src/main/java/com/glencoesoftware/bioformats2raw/ND2PlateReader.java b/src/main/java/com/glencoesoftware/bioformats2raw/ND2PlateReader.java new file mode 100644 index 00000000..39d9effe --- /dev/null +++ b/src/main/java/com/glencoesoftware/bioformats2raw/ND2PlateReader.java @@ -0,0 +1,273 @@ +/** + * Copyright (c) 2022 Glencoe Software, Inc. All rights reserved. + * + * This software is distributed under the terms described by the LICENSE.txt + * file you can find at the root of the distribution bundle. If the file is + * missing please request a copy by contacting info@glencoesoftware.com + */ + +package com.glencoesoftware.bioformats2raw; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Comparator; +import java.util.List; +import java.util.regex.Matcher; +import java.util.regex.Pattern; + +import loci.common.Location; + +import loci.formats.CoreMetadata; +import loci.formats.FormatException; +import loci.formats.FormatReader; +import loci.formats.FormatTools; +import loci.formats.MetadataTools; + +import loci.formats.in.NativeND2Reader; + +import loci.formats.meta.MetadataStore; +import loci.formats.ome.OMEXMLMetadata; + +import ome.xml.meta.MetadataConverter; +import ome.xml.meta.OMEXMLMetadataRoot; +import ome.xml.model.Image; +import ome.xml.model.primitives.NonNegativeInteger; +import ome.xml.model.primitives.PositiveInteger; + +/** + * + * @see NativeND2Reader + */ +public class ND2PlateReader extends FormatReader { + + private static final String PLATE_REGEX = + "(.*_?)Well([A-Z])(\\d{2})_Channel(.*)_Seq(\\d{4}).nd2"; + + private transient NativeND2Reader reader = new NativeND2Reader(); + private transient Pattern platePattern; + private String[] files; + private Integer[] fieldCount; + + // -- Constructor -- + + /** Constructs a new ND2 reader. */ + public ND2PlateReader() { + super("Nikon ND2 Plate", "nd2"); + suffixSufficient = false; + domains = new String[] {FormatTools.HCS_DOMAIN}; + } + + @Override + public boolean isThisType(String name, boolean open) { + if (!isGroupFiles() || !reader.isThisType(name, open)) { + return false; + } + Pattern p = Pattern.compile(PLATE_REGEX); + return p.matcher(name).matches(); + } + + @Override + public void close(boolean fileOnly) throws IOException { + super.close(fileOnly); + if (reader != null) { + reader.close(fileOnly); + files = null; + fieldCount = null; + platePattern = null; + } + } + + @Override + public String[] getSeriesUsedFiles(boolean noPixels) { + if (noPixels) { + return super.getSeriesUsedFiles(noPixels); + } + return new String[] {files[getFileIndex(getSeries())]}; + } + + @Override + public byte[] openBytes(int no, byte[] buf, int x, int y, int w, int h) + throws FormatException, IOException + { + Arrays.fill(buf, (byte) 0); + + int fileIndex = getFileIndex(getSeries()); + if (fileIndex >= 0 && fileIndex < files.length) { + if (files[fileIndex] != null) { + try { + reader.setId(files[fileIndex]); + reader.setSeries(getFieldIndex(getSeries())); + return reader.openBytes(no, buf, x, y, w, h); + } + catch (Exception e) { + LOGGER.error( + files[fileIndex] + " could not be read; returning blank planes", e); + } + } + } + return buf; + } + + @Override + protected void initFile(String id) throws FormatException, IOException { + super.initFile(id); + + // use an OMEXMLMetadataStore independent of this reader's MetadataStore + // so that Images can for sure be copied + reader.setMetadataStore(MetadataTools.createOMEXMLMetadata()); + + // look for all files in the directory that match the regex + // and have the same plate name + + platePattern = Pattern.compile(PLATE_REGEX); + Location currentFile = new Location(id).getAbsoluteFile(); + Location parentDir = currentFile.getParentFile(); + Matcher currentMatcher = platePattern.matcher(currentFile.getName()); + String plateName = null; + if (currentMatcher.matches()) { + plateName = currentMatcher.group(1); + } + + List allFiles = new ArrayList(); + for (String s : parentDir.list(true)) { + Matcher m = platePattern.matcher(s); + if (m.matches() && m.group(1).equals(plateName)) { + allFiles.add(new Location(parentDir, s).getAbsolutePath()); + } + } + + // sort list of files by well row and column + + allFiles.sort(new Comparator() { + @Override + public int compare(String s1, String s2) { + int[] well1 = getWellCoordinates(s1); + int[] well2 = getWellCoordinates(s2); + + if (well1[0] != well2[0]) { + return well1[0] - well2[0]; + } + return well1[1] - well2[1]; + } + }); + + // populate MetadataStore Plate data based on min/max well row and column + + // copy each well file's metadata to core metadata and MetadataStore + + core.clear(); + + OMEXMLMetadataRoot tmpRoot = new OMEXMLMetadataRoot(); + + int minRow = Integer.MAX_VALUE; + int maxRow = 0; + int minCol = Integer.MAX_VALUE; + int maxCol = 0; + List wells = new ArrayList(); + List fieldCounts = new ArrayList(); + for (int f=0; f wellCores = reader.getCoreMetadataList(); + for (CoreMetadata c : wellCores) { + core.add(new CoreMetadata(c)); + } + + OMEXMLMetadata omeMeta = (OMEXMLMetadata) reader.getMetadataStore(); + OMEXMLMetadataRoot wellRoot = (OMEXMLMetadataRoot) omeMeta.getRoot(); + if (f == 0) { + // only need one copy of the Instrument data + tmpRoot.addInstrument(wellRoot.copyInstrumentList().get(0)); + } + + List images = wellRoot.copyImageList(); + for (Image img : images) { + tmpRoot.addImage(img); + } + + int[] rowColumn = getWellCoordinates(allFiles.get(f)); + minRow = (int) Math.min(minRow, rowColumn[0]); + maxRow = (int) Math.max(maxRow, rowColumn[0]); + minCol = (int) Math.min(minCol, rowColumn[1]); + maxCol = (int) Math.max(maxCol, rowColumn[1]); + wells.add(rowColumn); + } + files = allFiles.toArray(new String[allFiles.size()]); + fieldCount = fieldCounts.toArray(new Integer[fieldCounts.size()]); + + OMEXMLMetadata tmpMeta = + (OMEXMLMetadata) MetadataTools.createOMEXMLMetadata(); + tmpMeta.setRoot(tmpRoot); + MetadataStore store = makeFilterMetadata(); + MetadataConverter.convertMetadata(tmpMeta, store); + MetadataTools.populatePixels(store, this); + + store.setPlateID(MetadataTools.createLSID("Plate", 0), 0); + + // remove trailing underscore + if (plateName != null && plateName.endsWith("_")) { + plateName = plateName.substring(0, plateName.length() - 1); + } + store.setPlateName(plateName, 0); + + store.setPlateRows(new PositiveInteger(maxRow + 1), 0); + store.setPlateColumns(new PositiveInteger(maxCol + 1), 0); + + int imageIndex = 0; + for (int f=0; f multiscale = multiscales.get(0); - assertEquals("0.2", multiscale.get("version")); + assertEquals("0.4", multiscale.get("version")); List> datasets = (List>) multiscale.get("datasets"); assertTrue(datasets.size() > 0); @@ -271,6 +271,23 @@ public void testMultiscalesMetadata() throws Exception { List> axes = (List>) multiscale.get("axes"); checkAxes(axes, "TCZYX"); + + for (int r=0; r dataset = datasets.get(r); + List> transforms = + (List>) dataset.get("coordinateTransformations"); + assertEquals(1, transforms.size()); + Map scale = transforms.get(0); + assertEquals("scale", scale.get("type")); + List axisValues = (List) scale.get("scale"); + + assertEquals(5, axisValues.size()); + double factor = Math.pow(2, r); + // X and Y are the only dimensions that are downsampled, + // so the TCZ physical scales remain the same across all resolutions + assertEquals(axisValues, Arrays.asList(new Double[] { + 1.0, 1.0, 1.0, factor, factor})); + } } /** @@ -327,6 +344,47 @@ public void testSetOriginalDimensionOrder() throws Exception { checkAxes(axes, "TZCYX"); } + /** + * Test that physical sizes are saved in axes/transformations metadata. + */ + @Test + public void testPhysicalSizes() throws Exception { + input = fake("physicalSizeX", "1.0mm", + "physicalSizeY", "0.5mm", + "physicalSizeZ", "2cm"); + assertTool(); + + ZarrGroup z = ZarrGroup.open(output.resolve("0").toString()); + List> multiscales = (List>) + z.getAttributes().get("multiscales"); + assertEquals(1, multiscales.size()); + Map multiscale = multiscales.get(0); + List> axes = + (List>) multiscale.get("axes"); + checkAxes(axes, "TCZYX"); + + List> datasets = + (List>) multiscale.get("datasets"); + assertEquals(2, datasets.size()); + + for (int r=0; r dataset = datasets.get(r); + List> transforms = + (List>) dataset.get("coordinateTransformations"); + assertEquals(1, transforms.size()); + Map scale = transforms.get(0); + assertEquals("scale", scale.get("type")); + List axisValues = (List) scale.get("scale"); + + assertEquals(5, axisValues.size()); + double factor = Math.pow(2, r); + // X and Y are the only dimensions that are downsampled, + // so the TCZ physical scales remain the same across all resolutions + assertEquals(axisValues, Arrays.asList(new Double[] { + 1.0, 1.0, 2.0, 0.5 * factor, factor})); + } + } + /** * Test using a different tile size from the default (1024). */ @@ -787,7 +845,7 @@ public void testDownsampleTypes(Downsampling type) throws IOException { (List>) z.getAttributes().get("multiscales"); assertEquals(1, multiscales.size()); Map multiscale = multiscales.get(0); - assertEquals("0.2", multiscale.get("version")); + assertEquals("0.4", multiscale.get("version")); Map metadata = (Map) multiscale.get("metadata"); @@ -846,6 +904,19 @@ public void testNoHCSOption() throws Exception { assertEquals(0, ome.sizeOfPlateList()); } + /** + * Make sure conversion fails when multiple plates are present. + */ + @Test + public void testMultiPlates() throws Exception { + input = fake( + "plates", "2", "plateAcqs", "1", + "plateRows", "2", "plateCols", "3", "fields", "2"); + assertThrows(ExecutionException.class, () -> { + assertTool(); + }); + } + /** * Convert a plate with default options. * The output should be compliant with OME Zarr HCS.