Skip to content

Commit 4143ee3

Browse files
Merge branch 'master' of github.com:glencoesoftware/bioformats2raw into jna-bump
2 parents 1e47087 + 1959ddd commit 4143ee3

File tree

6 files changed

+462
-24
lines changed

6 files changed

+462
-24
lines changed

README.md

+1-1
Original file line numberDiff line numberDiff line change
@@ -65,7 +65,7 @@ If the input file has multiple series, a subset of the series can be converted b
6565

6666
bioformats2raw /path/to/file.scn /path/to/zarr-pyramid --series 0,2,3,4
6767

68-
By default, three additional readers (MiraxReader, PyramidTiffReader, and BioTekReader) are added to the beginning of Bio-Formats' list of reader classes.
68+
By default, four additional readers (MiraxReader, PyramidTiffReader, BioTekReader, and ND2PlateReader) are added to the beginning of Bio-Formats' list of reader classes.
6969
These readers are considered to be experimental and as a result only a limited range of input data is supported.
7070

7171
Any of these readers can be excluded with the `--extra-readers` option:

build.gradle

+6-2
Original file line numberDiff line numberDiff line change
@@ -31,7 +31,11 @@ repositories {
3131
url 'https://artifacts.glencoesoftware.com/artifactory/scijava-thirdparty'
3232
}
3333
maven {
34-
url 'https://nexus.senbox.net/nexus/content/groups/public'
34+
url 'https://artifacts.glencoesoftware.com/artifactory/jzarr-releases'
35+
}
36+
maven {
37+
name 'Unidata'
38+
url 'https://artifacts.glencoesoftware.com/artifactory/unidata-releases'
3539
}
3640
}
3741

@@ -42,7 +46,7 @@ configurations.all {
4246

4347
dependencies {
4448
implementation 'net.java.dev.jna:jna:5.10.0'
45-
implementation 'ome:formats-gpl:6.8.0'
49+
implementation 'ome:formats-gpl:6.9.1'
4650
implementation 'info.picocli:picocli:4.6.1'
4751
implementation 'com.univocity:univocity-parsers:2.8.4'
4852
implementation 'com.bc.zarr:jzarr:0.3.3-gs-SNAPSHOT'

src/main/java/com/glencoesoftware/bioformats2raw/Converter.java

+104-18
Original file line numberDiff line numberDiff line change
@@ -19,7 +19,6 @@
1919
import java.nio.file.Path;
2020
import java.nio.file.Paths;
2121
import java.util.ArrayList;
22-
import java.util.Collections;
2322
import java.util.Comparator;
2423
import java.util.HashMap;
2524
import java.util.List;
@@ -56,6 +55,7 @@
5655
import loci.formats.ome.OMEXMLMetadata;
5756
import loci.formats.services.OMEXMLService;
5857
import loci.formats.services.OMEXMLServiceImpl;
58+
import ome.units.quantity.Quantity;
5959
import ome.xml.meta.OMEXMLMetadataRoot;
6060
import ome.xml.model.enums.DimensionOrder;
6161
import ome.xml.model.enums.EnumerationException;
@@ -239,7 +239,8 @@ public class Converter implements Callable<Void> {
239239
"(default: ${DEFAULT-VALUE})"
240240
)
241241
private volatile Class<?>[] extraReaders = new Class[] {
242-
PyramidTiffReader.class, MiraxReader.class, BioTekReader.class
242+
PyramidTiffReader.class, MiraxReader.class,
243+
BioTekReader.class, ND2PlateReader.class
243244
};
244245

245246
@Option(
@@ -562,6 +563,12 @@ public void convert()
562563

563564
if (!noHCS) {
564565
noHCS = !hasValidPlate(meta);
566+
int plateCount = meta.getPlateCount();
567+
if (!noHCS && plateCount > 1) {
568+
throw new IllegalArgumentException(
569+
"Found " + plateCount + " plates; only one can be converted. " +
570+
"Use --no-hcs to as a work-around.");
571+
}
565572
}
566573
else {
567574
((OMEXMLMetadata) meta).resolveReferences();
@@ -1522,35 +1529,84 @@ private void setSeriesLevelMetadata(int series, int resolutions)
15221529
multiscale.put("type", downsampling.getName());
15231530
}
15241531
multiscale.put("metadata", metadata);
1525-
multiscale.put("version", nested ? "0.2" : "0.1");
1532+
multiscale.put("version", nested ? "0.4" : "0.1");
15261533
multiscales.add(multiscale);
1527-
List<Map<String, String>> datasets = new ArrayList<Map<String, String>>();
1534+
1535+
IFormatReader v = null;
1536+
IMetadata meta = null;
1537+
String axisOrder = null;
1538+
try {
1539+
v = readers.take();
1540+
meta = (IMetadata) v.getMetadataStore();
1541+
1542+
if (dimensionOrder != null) {
1543+
axisOrder = dimensionOrder.toString();
1544+
}
1545+
else {
1546+
axisOrder = v.getDimensionOrder();
1547+
}
1548+
}
1549+
finally {
1550+
readers.put(v);
1551+
}
1552+
1553+
List<Map<String, Object>> datasets = new ArrayList<Map<String, Object>>();
15281554
for (int r = 0; r < resolutions; r++) {
15291555
resolutionString = String.format(
15301556
scaleFormatString, getScaleFormatStringArgs(series, r));
15311557
String lastPath = resolutionString.substring(
15321558
resolutionString.lastIndexOf('/') + 1);
1533-
datasets.add(Collections.singletonMap("path", lastPath));
1534-
}
1535-
multiscale.put("datasets", datasets);
15361559

1537-
String axisOrder = null;
1538-
if (dimensionOrder != null) {
1539-
axisOrder = dimensionOrder.toString();
1540-
}
1541-
else {
1542-
IFormatReader reader = readers.take();
1543-
try {
1544-
axisOrder = reader.getDimensionOrder();
1545-
}
1546-
finally {
1547-
readers.put(reader);
1560+
List<Map<String, Object>> transforms =
1561+
new ArrayList<Map<String, Object>>();
1562+
Map<String, Object> scale = new HashMap<String, Object>();
1563+
scale.put("type", "scale");
1564+
List<Double> axisValues = new ArrayList<Double>();
1565+
double resolutionScale = Math.pow(PYRAMID_SCALE, r);
1566+
for (int i=axisOrder.length()-1; i>=0; i--) {
1567+
Quantity axisScale = getScale(meta, series, axisOrder, i);
1568+
String axisChar = axisOrder.substring(i, i + 1).toLowerCase();
1569+
1570+
if (axisScale != null) {
1571+
// if physical dimension information is defined,
1572+
// use it directly for dimensions that aren't scaled (Z and T)
1573+
// increase it according to the resolution number for dimensions that
1574+
// are scaled (X and Y)
1575+
if (axisChar.equals("x") || axisChar.equals("y")) {
1576+
axisValues.add(axisScale.value().doubleValue() * resolutionScale);
1577+
}
1578+
else {
1579+
axisValues.add(axisScale.value().doubleValue());
1580+
}
1581+
}
1582+
else {
1583+
// if physical dimension information is not defined,
1584+
// store the scale factor for the dimension in the current resolution,
1585+
// i.e. 1.0 for everything other than X and Y
1586+
if (axisChar.equals("x") || axisChar.equals("y")) {
1587+
axisValues.add(resolutionScale);
1588+
}
1589+
else {
1590+
axisValues.add(1.0);
1591+
}
1592+
}
15481593
}
1594+
scale.put("scale", axisValues);
1595+
1596+
transforms.add(scale);
1597+
1598+
Map<String, Object> dataset = new HashMap<String, Object>();
1599+
dataset.put("path", lastPath);
1600+
dataset.put("coordinateTransformations", transforms);
1601+
datasets.add(dataset);
15491602
}
1603+
multiscale.put("datasets", datasets);
1604+
15501605
List<Map<String, String>> axes = new ArrayList<Map<String, String>>();
15511606
for (int i=axisOrder.length()-1; i>=0; i--) {
15521607
String axis = axisOrder.substring(i, i + 1).toLowerCase();
15531608
String type = "space";
1609+
Quantity scale = getScale(meta, series, axisOrder, i);
15541610
if (axis.equals("t")) {
15551611
type = "time";
15561612
}
@@ -1560,6 +1616,9 @@ else if (axis.equals("c")) {
15601616
Map<String, String> thisAxis = new HashMap<String, String>();
15611617
thisAxis.put("name", axis);
15621618
thisAxis.put("type", type);
1619+
if (scale != null) {
1620+
thisAxis.put("unit", scale.unit().getSymbol());
1621+
}
15631622
axes.add(thisAxis);
15641623
}
15651624
multiscale.put("axes", axes);
@@ -1573,6 +1632,33 @@ else if (axis.equals("c")) {
15731632
LOGGER.debug(" finished writing subgroup attributes");
15741633
}
15751634

1635+
private Quantity getScale(
1636+
IMetadata meta, int series, String axisOrder, int axis)
1637+
{
1638+
if (meta == null) {
1639+
return null;
1640+
}
1641+
int seriesIndex = seriesList.indexOf(series);
1642+
1643+
if (seriesIndex < 0 || seriesIndex >= meta.getImageCount()) {
1644+
return null;
1645+
}
1646+
1647+
String axisChar = axisOrder.substring(axis, axis + 1).toLowerCase();
1648+
switch (axisChar.charAt(0)) {
1649+
case 'x':
1650+
return meta.getPixelsPhysicalSizeX(seriesIndex);
1651+
case 'y':
1652+
return meta.getPixelsPhysicalSizeY(seriesIndex);
1653+
case 'z':
1654+
return meta.getPixelsPhysicalSizeZ(seriesIndex);
1655+
case 't':
1656+
return meta.getPixelsTimeIncrement(seriesIndex);
1657+
default:
1658+
return null;
1659+
}
1660+
}
1661+
15761662
/**
15771663
* Takes exception from asynchronous execution and re-throw known exception
15781664
* types. If the end is reached with no known exception detected, either the

src/main/java/com/glencoesoftware/bioformats2raw/MiraxReader.java

+5-1
Original file line numberDiff line numberDiff line change
@@ -273,7 +273,11 @@ public byte[] openBytes(int no, byte[] buf, int x, int y, int w, int h)
273273
TilePointer thisOffset = lookupTile(index, col, row, no / MAX_CHANNELS);
274274
if (thisOffset != null) {
275275
int channel = no % MAX_CHANNELS;
276-
if (fluorescence && getSizeC() != 2) {
276+
// 2 channel JPEG data needs to have the channel index inverted
277+
// 2 channel JPEG-2000 data should not have the channel index inverted
278+
if (fluorescence &&
279+
(getSizeC() != 2 || format.get(index).equals("JPEG")))
280+
{
277281
channel = MAX_CHANNELS - channel - 1;
278282
}
279283

0 commit comments

Comments
 (0)