Skip to content

Commit

Permalink
Merge branch 'master' of github.com:glencoesoftware/bioformats2raw in…
Browse files Browse the repository at this point in the history
…to jna-bump
  • Loading branch information
melissalinkert committed May 5, 2022
2 parents 1e47087 + 1959ddd commit 4143ee3
Show file tree
Hide file tree
Showing 6 changed files with 462 additions and 24 deletions.
2 changes: 1 addition & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -65,7 +65,7 @@ If the input file has multiple series, a subset of the series can be converted b

bioformats2raw /path/to/file.scn /path/to/zarr-pyramid --series 0,2,3,4

By default, three additional readers (MiraxReader, PyramidTiffReader, and BioTekReader) are added to the beginning of Bio-Formats' list of reader classes.
By default, four additional readers (MiraxReader, PyramidTiffReader, BioTekReader, and ND2PlateReader) are added to the beginning of Bio-Formats' list of reader classes.
These readers are considered to be experimental and as a result only a limited range of input data is supported.

Any of these readers can be excluded with the `--extra-readers` option:
Expand Down
8 changes: 6 additions & 2 deletions build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,11 @@ repositories {
url 'https://artifacts.glencoesoftware.com/artifactory/scijava-thirdparty'
}
maven {
url 'https://nexus.senbox.net/nexus/content/groups/public'
url 'https://artifacts.glencoesoftware.com/artifactory/jzarr-releases'
}
maven {
name 'Unidata'
url 'https://artifacts.glencoesoftware.com/artifactory/unidata-releases'
}
}

Expand All @@ -42,7 +46,7 @@ configurations.all {

dependencies {
implementation 'net.java.dev.jna:jna:5.10.0'
implementation 'ome:formats-gpl:6.8.0'
implementation 'ome:formats-gpl:6.9.1'
implementation 'info.picocli:picocli:4.6.1'
implementation 'com.univocity:univocity-parsers:2.8.4'
implementation 'com.bc.zarr:jzarr:0.3.3-gs-SNAPSHOT'
Expand Down
122 changes: 104 additions & 18 deletions src/main/java/com/glencoesoftware/bioformats2raw/Converter.java
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,6 @@
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
import java.util.List;
Expand Down Expand Up @@ -56,6 +55,7 @@
import loci.formats.ome.OMEXMLMetadata;
import loci.formats.services.OMEXMLService;
import loci.formats.services.OMEXMLServiceImpl;
import ome.units.quantity.Quantity;
import ome.xml.meta.OMEXMLMetadataRoot;
import ome.xml.model.enums.DimensionOrder;
import ome.xml.model.enums.EnumerationException;
Expand Down Expand Up @@ -239,7 +239,8 @@ public class Converter implements Callable<Void> {
"(default: ${DEFAULT-VALUE})"
)
private volatile Class<?>[] extraReaders = new Class[] {
PyramidTiffReader.class, MiraxReader.class, BioTekReader.class
PyramidTiffReader.class, MiraxReader.class,
BioTekReader.class, ND2PlateReader.class
};

@Option(
Expand Down Expand Up @@ -562,6 +563,12 @@ public void convert()

if (!noHCS) {
noHCS = !hasValidPlate(meta);
int plateCount = meta.getPlateCount();
if (!noHCS && plateCount > 1) {
throw new IllegalArgumentException(
"Found " + plateCount + " plates; only one can be converted. " +
"Use --no-hcs to as a work-around.");
}
}
else {
((OMEXMLMetadata) meta).resolveReferences();
Expand Down Expand Up @@ -1522,35 +1529,84 @@ private void setSeriesLevelMetadata(int series, int resolutions)
multiscale.put("type", downsampling.getName());
}
multiscale.put("metadata", metadata);
multiscale.put("version", nested ? "0.2" : "0.1");
multiscale.put("version", nested ? "0.4" : "0.1");
multiscales.add(multiscale);
List<Map<String, String>> datasets = new ArrayList<Map<String, String>>();

IFormatReader v = null;
IMetadata meta = null;
String axisOrder = null;
try {
v = readers.take();
meta = (IMetadata) v.getMetadataStore();

if (dimensionOrder != null) {
axisOrder = dimensionOrder.toString();
}
else {
axisOrder = v.getDimensionOrder();
}
}
finally {
readers.put(v);
}

List<Map<String, Object>> datasets = new ArrayList<Map<String, Object>>();
for (int r = 0; r < resolutions; r++) {
resolutionString = String.format(
scaleFormatString, getScaleFormatStringArgs(series, r));
String lastPath = resolutionString.substring(
resolutionString.lastIndexOf('/') + 1);
datasets.add(Collections.singletonMap("path", lastPath));
}
multiscale.put("datasets", datasets);

String axisOrder = null;
if (dimensionOrder != null) {
axisOrder = dimensionOrder.toString();
}
else {
IFormatReader reader = readers.take();
try {
axisOrder = reader.getDimensionOrder();
}
finally {
readers.put(reader);
List<Map<String, Object>> transforms =
new ArrayList<Map<String, Object>>();
Map<String, Object> scale = new HashMap<String, Object>();
scale.put("type", "scale");
List<Double> axisValues = new ArrayList<Double>();
double resolutionScale = Math.pow(PYRAMID_SCALE, r);
for (int i=axisOrder.length()-1; i>=0; i--) {
Quantity axisScale = getScale(meta, series, axisOrder, i);
String axisChar = axisOrder.substring(i, i + 1).toLowerCase();

if (axisScale != null) {
// if physical dimension information is defined,
// use it directly for dimensions that aren't scaled (Z and T)
// increase it according to the resolution number for dimensions that
// are scaled (X and Y)
if (axisChar.equals("x") || axisChar.equals("y")) {
axisValues.add(axisScale.value().doubleValue() * resolutionScale);
}
else {
axisValues.add(axisScale.value().doubleValue());
}
}
else {
// if physical dimension information is not defined,
// store the scale factor for the dimension in the current resolution,
// i.e. 1.0 for everything other than X and Y
if (axisChar.equals("x") || axisChar.equals("y")) {
axisValues.add(resolutionScale);
}
else {
axisValues.add(1.0);
}
}
}
scale.put("scale", axisValues);

transforms.add(scale);

Map<String, Object> dataset = new HashMap<String, Object>();
dataset.put("path", lastPath);
dataset.put("coordinateTransformations", transforms);
datasets.add(dataset);
}
multiscale.put("datasets", datasets);

List<Map<String, String>> axes = new ArrayList<Map<String, String>>();
for (int i=axisOrder.length()-1; i>=0; i--) {
String axis = axisOrder.substring(i, i + 1).toLowerCase();
String type = "space";
Quantity scale = getScale(meta, series, axisOrder, i);
if (axis.equals("t")) {
type = "time";
}
Expand All @@ -1560,6 +1616,9 @@ else if (axis.equals("c")) {
Map<String, String> thisAxis = new HashMap<String, String>();
thisAxis.put("name", axis);
thisAxis.put("type", type);
if (scale != null) {
thisAxis.put("unit", scale.unit().getSymbol());
}
axes.add(thisAxis);
}
multiscale.put("axes", axes);
Expand All @@ -1573,6 +1632,33 @@ else if (axis.equals("c")) {
LOGGER.debug(" finished writing subgroup attributes");
}

private Quantity getScale(
IMetadata meta, int series, String axisOrder, int axis)
{
if (meta == null) {
return null;
}
int seriesIndex = seriesList.indexOf(series);

if (seriesIndex < 0 || seriesIndex >= meta.getImageCount()) {
return null;
}

String axisChar = axisOrder.substring(axis, axis + 1).toLowerCase();
switch (axisChar.charAt(0)) {
case 'x':
return meta.getPixelsPhysicalSizeX(seriesIndex);
case 'y':
return meta.getPixelsPhysicalSizeY(seriesIndex);
case 'z':
return meta.getPixelsPhysicalSizeZ(seriesIndex);
case 't':
return meta.getPixelsTimeIncrement(seriesIndex);
default:
return null;
}
}

/**
* Takes exception from asynchronous execution and re-throw known exception
* types. If the end is reached with no known exception detected, either the
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -273,7 +273,11 @@ public byte[] openBytes(int no, byte[] buf, int x, int y, int w, int h)
TilePointer thisOffset = lookupTile(index, col, row, no / MAX_CHANNELS);
if (thisOffset != null) {
int channel = no % MAX_CHANNELS;
if (fluorescence && getSizeC() != 2) {
// 2 channel JPEG data needs to have the channel index inverted
// 2 channel JPEG-2000 data should not have the channel index inverted
if (fluorescence &&
(getSizeC() != 2 || format.get(index).equals("JPEG")))
{
channel = MAX_CHANNELS - channel - 1;
}

Expand Down
Loading

0 comments on commit 4143ee3

Please sign in to comment.