Skip to content

Commit

Permalink
Create a layer for each channel of NGFF-Zarr datasets (#6609)
Browse files Browse the repository at this point in the history
Co-authored-by: Florian M <[email protected]>
  • Loading branch information
frcroth and fm3 authored Nov 10, 2022
1 parent a2a524f commit 2fb50d6
Show file tree
Hide file tree
Showing 15 changed files with 90 additions and 39 deletions.
1 change: 1 addition & 0 deletions CHANGELOG.unreleased.md
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@ For upgrade instructions, please check the [migration guide](MIGRATIONS.released
- Button for switching organizations for Voxelytics workflows. [#6572](https://github.com/scalableminds/webknossos/pull/6572)
- Added ability to shuffle / set colors for a whole tree group. [#6586](https://github.com/scalableminds/webknossos/pull/6586)
- Annotation layers can now be removed. [#6593](https://github.com/scalableminds/webknossos/pull/6593)
- When adding remote Zarr datasets with multiple channels, channels are converted into layers. [#6609](https://github.com/scalableminds/webknossos/pull/6609)

### Changed
- The log viewer in the Voxelytics workflow reporting now uses a virtualized list. [#6579](https://github.com/scalableminds/webknossos/pull/6579)
Expand Down
2 changes: 1 addition & 1 deletion app/models/binary/explore/N5ArrayExplorer.scala
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@ class N5ArrayExplorer extends RemoteLayerExplorer {
elementClass <- n5Header.elementClass ?~> "failed to read element class from n5 header"
guessedAxisOrder = AxisOrder.asZyxFromRank(n5Header.rank)
boundingBox <- n5Header.boundingBox(guessedAxisOrder) ?~> "failed to read bounding box from zarr header. Make sure data is in (T/C)ZYX format"
magLocator = MagLocator(Vec3Int.ones, Some(remotePath.toString), credentials, Some(guessedAxisOrder))
magLocator = MagLocator(Vec3Int.ones, Some(remotePath.toString), credentials, Some(guessedAxisOrder), None)
layer: N5Layer = if (looksLikeSegmentationLayer(name, elementClass)) {
N5SegmentationLayer(name, boundingBox, elementClass, List(magLocator), largestSegmentId = None)
} else N5DataLayer(name, Category.color, boundingBox, elementClass, List(magLocator))
Expand Down
2 changes: 1 addition & 1 deletion app/models/binary/explore/N5MultiscalesExplorer.scala
Original file line number Diff line number Diff line change
Expand Up @@ -115,7 +115,7 @@ class N5MultiscalesExplorer extends RemoteLayerExplorer with FoxImplicits {
elementClass <- n5Header.elementClass ?~> s"failed to read element class from n5 header at $headerPath"
boundingBox <- n5Header.boundingBox(axisOrder) ?~> s"failed to read bounding box from n5 header at $headerPath"
} yield
MagWithAttributes(MagLocator(mag, Some(magPath.toString), credentials, Some(axisOrder)),
MagWithAttributes(MagLocator(mag, Some(magPath.toString), credentials, Some(axisOrder), None),
magPath,
elementClass,
boundingBox)
Expand Down
59 changes: 43 additions & 16 deletions app/models/binary/explore/NgffExplorer.scala
Original file line number Diff line number Diff line change
Expand Up @@ -25,36 +25,63 @@ class NgffExplorer extends RemoteLayerExplorer {
for {
zattrsPath <- Fox.successful(remotePath.resolve(NgffMetadata.FILENAME_DOT_ZATTRS))
ngffHeader <- parseJsonFromPath[NgffMetadata](zattrsPath) ?~> s"Failed to read OME NGFF header at $zattrsPath"
layers <- Fox.serialCombined(ngffHeader.multiscales)(layerFromNgffMultiscale(_, remotePath, credentials))

layerLists: List[List[(ZarrLayer, Vec3Double)]] <- Fox.serialCombined(ngffHeader.multiscales)(multiScale => {
for {
channelCount: Int <- getNgffMultiscaleChannelCount(multiScale, remotePath)
layers <- layersFromNgffMultiscale(multiScale, remotePath, credentials, channelCount)
} yield layers
})
layers: List[(ZarrLayer, Vec3Double)] = layerLists.flatten
} yield layers

private def layerFromNgffMultiscale(multiscale: NgffMultiscalesItem,
remotePath: Path,
credentials: Option[FileSystemCredentials]): Fox[(ZarrLayer, Vec3Double)] =
private def getNgffMultiscaleChannelCount(multiscale: NgffMultiscalesItem, remotePath: Path): Fox[Int] =
for {
firstDataset <- multiscale.datasets.headOption.toFox
magPath = remotePath.resolve(firstDataset.path)
zarrayPath = magPath.resolve(ZarrHeader.FILENAME_DOT_ZARRAY)
zarrHeader <- parseJsonFromPath[ZarrHeader](zarrayPath) ?~> s"failed to read zarr header at $zarrayPath"
axisOrder <- extractAxisOrder(multiscale.axes) ?~> "Could not extract XYZ axis order mapping. Does the data have x, y and z axes, stated in multiscales metadata?"
channelAxisIndex <- axisOrder.c.toFox
} yield zarrHeader.shape(channelAxisIndex)

private def layersFromNgffMultiscale(multiscale: NgffMultiscalesItem,
remotePath: Path,
credentials: Option[FileSystemCredentials],
channelCount: Int): Fox[List[(ZarrLayer, Vec3Double)]] =
for {
axisOrder <- extractAxisOrder(multiscale.axes) ?~> "Could not extract XYZ axis order mapping. Does the data have x, y and z axes, stated in multiscales metadata?"
axisUnitFactors <- extractAxisUnitFactors(multiscale.axes, axisOrder) ?~> "Could not extract axis unit-to-nm factors"
voxelSizeInAxisUnits <- extractVoxelSizeInAxisUnits(
multiscale.datasets.map(_.coordinateTransformations),
axisOrder) ?~> "Could not extract voxel size from scale transforms"
magsWithAttributes <- Fox.serialCombined(multiscale.datasets)(d =>
zarrMagFromNgffDataset(d, remotePath, voxelSizeInAxisUnits, axisOrder, credentials))
_ <- bool2Fox(magsWithAttributes.nonEmpty) ?~> "zero mags in layer"
elementClass <- elementClassFromMags(magsWithAttributes) ?~> "Could not extract element class from mags"
boundingBox = boundingBoxFromMags(magsWithAttributes)
voxelSizeNanometers = voxelSizeInAxisUnits * axisUnitFactors
nameFromPath <- guessNameFromPath(remotePath)
name = multiscale.name.getOrElse(nameFromPath)
voxelSizeNanometers = voxelSizeInAxisUnits * axisUnitFactors
layer: ZarrLayer = if (looksLikeSegmentationLayer(name, elementClass)) {
ZarrSegmentationLayer(name, boundingBox, elementClass, magsWithAttributes.map(_.mag), largestSegmentId = None)
} else ZarrDataLayer(name, Category.color, boundingBox, elementClass, magsWithAttributes.map(_.mag))
} yield (layer, voxelSizeNanometers)
layerTuples <- Fox.serialCombined((0 until channelCount).toList)({ channelIndex: Int =>
for {
magsWithAttributes <- Fox.serialCombined(multiscale.datasets)(d =>
zarrMagFromNgffDataset(d, remotePath, voxelSizeInAxisUnits, axisOrder, credentials, Some(channelIndex)))
_ <- bool2Fox(magsWithAttributes.nonEmpty) ?~> "zero mags in layer"
elementClass <- elementClassFromMags(magsWithAttributes) ?~> "Could not extract element class from mags"
boundingBox = boundingBoxFromMags(magsWithAttributes)
layer: ZarrLayer = if (looksLikeSegmentationLayer(name, elementClass)) {
ZarrSegmentationLayer(name,
boundingBox,
elementClass,
magsWithAttributes.map(_.mag),
largestSegmentId = None)
} else ZarrDataLayer(name, Category.color, boundingBox, elementClass, magsWithAttributes.map(_.mag))
} yield (layer, voxelSizeNanometers)
})
} yield layerTuples

private def zarrMagFromNgffDataset(ngffDataset: NgffDataset,
layerPath: Path,
voxelSizeInAxisUnits: Vec3Double,
axisOrder: AxisOrder,
credentials: Option[FileSystemCredentials]): Fox[MagWithAttributes] =
credentials: Option[FileSystemCredentials],
channelIndex: Option[Int]): Fox[MagWithAttributes] =
for {
mag <- magFromTransforms(ngffDataset.coordinateTransformations, voxelSizeInAxisUnits, axisOrder) ?~> "Could not extract mag from scale transforms"
magPath = layerPath.resolve(ngffDataset.path)
Expand All @@ -63,7 +90,7 @@ class NgffExplorer extends RemoteLayerExplorer {
elementClass <- zarrHeader.elementClass ?~> s"failed to read element class from zarr header at $zarrayPath"
boundingBox <- zarrHeader.boundingBox(axisOrder) ?~> s"failed to read bounding box from zarr header at $zarrayPath"
} yield
MagWithAttributes(MagLocator(mag, Some(magPath.toString), credentials, Some(axisOrder)),
MagWithAttributes(MagLocator(mag, Some(magPath.toString), credentials, Some(axisOrder), channelIndex),
magPath,
elementClass,
boundingBox)
Expand Down
2 changes: 1 addition & 1 deletion app/models/binary/explore/ZarrArrayExplorer.scala
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@ class ZarrArrayExplorer extends RemoteLayerExplorer {
elementClass <- zarrHeader.elementClass ?~> "failed to read element class from zarr header"
guessedAxisOrder = AxisOrder.asZyxFromRank(zarrHeader.rank)
boundingBox <- zarrHeader.boundingBox(guessedAxisOrder) ?~> "failed to read bounding box from zarr header. Make sure data is in (T/C)ZYX format"
magLocator = MagLocator(Vec3Int.ones, Some(remotePath.toString), credentials, Some(guessedAxisOrder))
magLocator = MagLocator(Vec3Int.ones, Some(remotePath.toString), credentials, Some(guessedAxisOrder), None)
layer: ZarrLayer = if (looksLikeSegmentationLayer(name, elementClass)) {
ZarrSegmentationLayer(name, boundingBox, elementClass, List(magLocator), largestSegmentId = None)
} else ZarrDataLayer(name, Category.color, boundingBox, elementClass, List(magLocator))
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -117,15 +117,15 @@ class ZarrStreamingController @Inject()(
d.category,
d.boundingBox,
d.elementClass,
d.resolutions.map(x => MagLocator(x, None, None, Some(AxisOrder.cxyz))),
d.resolutions.map(x => MagLocator(x, None, None, Some(AxisOrder.cxyz), None)),
numChannels = Some(if (d.elementClass == ElementClass.uint24) 3 else 1)
)
case s: WKWSegmentationLayer =>
ZarrSegmentationLayer(
s.name,
s.boundingBox,
s.elementClass,
s.resolutions.map(x => MagLocator(x, None, None, Some(AxisOrder.cxyz))),
s.resolutions.map(x => MagLocator(x, None, None, Some(AxisOrder.cxyz), None)),
mappings = s.mappings,
largestSegmentId = s.largestSegmentId,
numChannels = Some(if (s.elementClass == ElementClass.uint24) 3 else 1)
Expand All @@ -136,15 +136,15 @@ class ZarrStreamingController @Inject()(
z.category,
z.boundingBox,
z.elementClass,
z.resolutions.map(x => MagLocator(x, None, None, Some(AxisOrder.cxyz))),
z.resolutions.map(x => MagLocator(x, None, None, Some(AxisOrder.cxyz), None)),
numChannels = Some(if (z.elementClass == ElementClass.uint24) 3 else 1)
)
case zs: ZarrSegmentationLayer =>
ZarrSegmentationLayer(
zs.name,
zs.boundingBox,
zs.elementClass,
zs.resolutions.map(x => MagLocator(x, None, None, Some(AxisOrder.cxyz))),
zs.resolutions.map(x => MagLocator(x, None, None, Some(AxisOrder.cxyz), None)),
mappings = zs.mappings,
largestSegmentId = zs.largestSegmentId,
numChannels = Some(if (zs.elementClass == ElementClass.uint24) 3 else 1)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,8 @@ import java.net.URI
case class MagLocator(mag: Vec3Int,
path: Option[String],
credentials: Option[FileSystemCredentials],
axisOrder: Option[AxisOrder]) {
axisOrder: Option[AxisOrder],
channelIndex: Option[Int]) {

lazy val pathWithFallback: String = path.getOrElse(mag.toMagLiteral(allowScalar = true))
private lazy val uri: URI = new URI(pathWithFallback)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,8 @@ class N5BucketProvider(layer: N5Layer) extends BucketProvider with LazyLogging w
magPathOpt match {
case None => Empty
case Some(magPath) =>
tryo(onError = e => logError(e))(N5Array.open(magPath, n5Mag.axisOrder)).map(new N5CubeHandle(_))
tryo(onError = e => logError(e))(N5Array.open(magPath, n5Mag.axisOrder, n5Mag.channelIndex))
.map(new N5CubeHandle(_))
}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,6 @@ trait N5Layer extends DataLayer {
def lengthOfUnderlyingCubes(resolution: Vec3Int): Int = Int.MaxValue // Prevents the wkw-shard-specific handle caching

def numChannels: Option[Int] = Some(if (elementClass == ElementClass.uint24) 3 else 1)

}

case class N5DataLayer(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,8 @@ class ZarrBucketProvider(layer: ZarrLayer) extends BucketProvider with LazyLoggi
magPathOpt match {
case None => Empty
case Some(magPath) =>
tryo(onError = e => logError(e))(ZarrArray.open(magPath, zarrMag.axisOrder)).map(new ZarrCubeHandle(_))
tryo(onError = e => logError(e))(ZarrArray.open(magPath, zarrMag.axisOrder, zarrMag.channelIndex))
.map(new ZarrCubeHandle(_))
}
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,12 @@ import play.api.libs.json.{Json, OFormat}

case class AxisOrder(x: Int, y: Int, z: Int, c: Option[Int] = None, t: Option[Int] = None) {
def permutation(rank: Int): Array[Int] =
((0 until (rank - 3)).toList :+ x :+ y :+ z).toArray
c match {
case Some(channel) =>
((0 until (rank - 4)).toList :+ channel :+ x :+ y :+ z).toArray
case None =>
((0 until (rank - 3)).toList :+ x :+ y :+ z).toArray
}

def inversePermutation(rank: Int): Array[Int] = {
val permutationMutable: Array[Int] = Array.fill(rank)(0)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,11 @@ import java.nio.ByteOrder
import java.util
import scala.concurrent.{ExecutionContext, Future}

class DatasetArray(relativePath: DatasetPath, store: FileSystemStore, header: DatasetHeader, axisOrder: AxisOrder)
class DatasetArray(relativePath: DatasetPath,
store: FileSystemStore,
header: DatasetHeader,
axisOrder: AxisOrder,
channelIndex: Option[Int])
extends LazyLogging {

protected val chunkReader: ChunkReader =
Expand All @@ -32,7 +36,11 @@ class DatasetArray(relativePath: DatasetPath, store: FileSystemStore, header: Da
@throws[InvalidRangeException]
def readBytesXYZ(shape: Vec3Int, offset: Vec3Int)(implicit ec: ExecutionContext): Fox[Array[Byte]] = {
val paddingDimensionsCount = header.rank - 3
val offsetArray = Array.fill(paddingDimensionsCount)(0) :+ offset.x :+ offset.y :+ offset.z
val offsetArray = channelIndex match {
case Some(c) if header.rank >= 4 =>
Array.fill(paddingDimensionsCount - 1)(0) :+ c :+ offset.x :+ offset.y :+ offset.z
case _ => Array.fill(paddingDimensionsCount)(0) :+ offset.x :+ offset.y :+ offset.z
}
val shapeArray = Array.fill(paddingDimensionsCount)(1) :+ shape.x :+ shape.y :+ shape.z

readBytes(shapeArray, offsetArray)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@ import java.nio.file.Path

object N5Array extends LazyLogging {
@throws[IOException]
def open(path: Path, axisOrderOpt: Option[AxisOrder]): N5Array = {
def open(path: Path, axisOrderOpt: Option[AxisOrder], channelIndex: Option[Int]): N5Array = {
val store = new FileSystemStore(path)
val rootPath = new DatasetPath("")
val headerPath = rootPath.resolve(N5Header.FILENAME_ATTRIBUTES_JSON)
Expand All @@ -37,12 +37,16 @@ object N5Array extends LazyLogging {
throw new IllegalArgumentException(
f"Chunk size of this N5 Array exceeds limit of ${DatasetArray.chunkSizeLimitBytes}, got ${header.bytesPerChunk}")
}
new N5Array(rootPath, store, header, axisOrderOpt.getOrElse(AxisOrder.asZyxFromRank(header.rank)))
new N5Array(rootPath, store, header, axisOrderOpt.getOrElse(AxisOrder.asZyxFromRank(header.rank)), channelIndex)
}
}

class N5Array(relativePath: DatasetPath, store: FileSystemStore, header: DatasetHeader, axisOrder: AxisOrder)
extends DatasetArray(relativePath, store, header, axisOrder)
class N5Array(relativePath: DatasetPath,
store: FileSystemStore,
header: DatasetHeader,
axisOrder: AxisOrder,
channelIndex: Option[Int])
extends DatasetArray(relativePath, store, header, axisOrder, channelIndex)
with LazyLogging {

override protected val chunkReader: ChunkReader =
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ import play.api.libs.json.{JsError, JsSuccess, Json}

object ZarrArray extends LazyLogging {
@throws[IOException]
def open(path: Path, axisOrderOpt: Option[AxisOrder]): ZarrArray = {
def open(path: Path, axisOrderOpt: Option[AxisOrder], channelIndex: Option[Int]): ZarrArray = {
val store = new FileSystemStore(path)
val rootPath = new DatasetPath("")
val headerPath = rootPath.resolve(ZarrHeader.FILENAME_DOT_ZARRAY)
Expand All @@ -36,13 +36,17 @@ object ZarrArray extends LazyLogging {
throw new IllegalArgumentException(
f"Chunk size of this Zarr Array exceeds limit of ${DatasetArray.chunkSizeLimitBytes}, got ${header.bytesPerChunk}")
}
new ZarrArray(rootPath, store, header, axisOrderOpt.getOrElse(AxisOrder.asZyxFromRank(header.rank)))
new ZarrArray(rootPath, store, header, axisOrderOpt.getOrElse(AxisOrder.asZyxFromRank(header.rank)), channelIndex)
}

}

class ZarrArray(relativePath: DatasetPath, store: FileSystemStore, header: DatasetHeader, axisOrder: AxisOrder)
extends DatasetArray(relativePath, store, header, axisOrder)
class ZarrArray(relativePath: DatasetPath,
store: FileSystemStore,
header: DatasetHeader,
axisOrder: AxisOrder,
channelIndex: Option[Int])
extends DatasetArray(relativePath, store, header, axisOrder, channelIndex)
with LazyLogging {

override protected val chunkReader: ChunkReader =
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -171,7 +171,7 @@ class VolumeTracingZarrStreamingController @Inject()(
largestSegmentId = tracing.largestSegmentId,
boundingBox = tracing.boundingBox,
elementClass = tracing.elementClass,
mags = tracing.resolutions.toList.map(x => MagLocator(x, None, None, Some(AxisOrder.cxyz))),
mags = tracing.resolutions.toList.map(x => MagLocator(x, None, None, Some(AxisOrder.cxyz), None)),
mappings = None,
numChannels = Some(if (tracing.elementClass.isuint24) 3 else 1)
)
Expand Down

0 comments on commit 2fb50d6

Please sign in to comment.