Skip to content

Commit

Permalink
Support 2d ome zarr (#7349)
Browse files Browse the repository at this point in the history
  • Loading branch information
frcroth authored Oct 16, 2023
1 parent dec2382 commit d15170f
Show file tree
Hide file tree
Showing 7 changed files with 101 additions and 35 deletions.
1 change: 1 addition & 0 deletions CHANGELOG.unreleased.md
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@ For upgrade instructions, please check the [migration guide](MIGRATIONS.released
- Added a new tool that allows either measuring the distance of a path or a non-self-crossing area. [#7258](https://github.com/scalableminds/webknossos/pull/7258)
- Added social media link previews for links to datasets and annotations (only if they are public or if the links contain sharing tokens). [#7331](https://github.com/scalableminds/webknossos/pull/7331)
- Loading sharded zarr3 datasets is now significantly faster. [#7363](https://github.com/scalableminds/webknossos/pull/7363) and [#7370](https://github.com/scalableminds/webknossos/pull/7370)
- OME-NGFF datasets with only 2 dimensions can now be imported and viewed. [#7349](https://github.com/scalableminds/webknossos/pull/7349)
- Higher-dimension coordinates (e.g., for the t axis) are now encoded in the URL, too, so that reloading the page will keep you at your current position. Only relevant for 4D datasets. [#7328](https://github.com/scalableminds/webknossos/pull/7328)
- WEBKNOSSOS can now also explore datasets on the local file system if enabled in the new config key `datastore.localFolderWhitelist`. [#7389](https://github.com/scalableminds/webknossos/pull/7389)

Expand Down
6 changes: 3 additions & 3 deletions app/models/dataset/explore/N5MultiscalesExplorer.scala
Original file line number Diff line number Diff line change
Expand Up @@ -48,7 +48,7 @@ class N5MultiscalesExplorer(implicit val ec: ExecutionContext) extends RemoteLay
val cOpt = if (c == -1) None else Some(c)
for {
_ <- bool2Fox(x >= 0 && y >= 0 && z >= 0) ?~> s"invalid xyz axis order: $x,$y,$z."
} yield AxisOrder(x, y, z, cOpt)
} yield AxisOrder(x, y, Some(z), cOpt)
}

private def extractAxisUnitFactors(unitsOpt: Option[List[String]], axisOrder: AxisOrder): Fox[Vec3Double] =
Expand All @@ -57,7 +57,7 @@ class N5MultiscalesExplorer(implicit val ec: ExecutionContext) extends RemoteLay
for {
xUnitFactor <- spaceUnitToNmFactor(units(axisOrder.x))
yUnitFactor <- spaceUnitToNmFactor(units(axisOrder.y))
zUnitFactor <- spaceUnitToNmFactor(units(axisOrder.z))
zUnitFactor <- spaceUnitToNmFactor(units(axisOrder.zWithFallback))
} yield Vec3Double(xUnitFactor, yUnitFactor, zUnitFactor)
case None => Fox.successful(Vec3Double(1e3, 1e3, 1e3)) // assume default micrometers
}
Expand Down Expand Up @@ -93,7 +93,7 @@ class N5MultiscalesExplorer(implicit val ec: ExecutionContext) extends RemoteLay
} yield voxelSizeInAxisUnits * axisUnitFactors

private def extractVoxelSizeInAxisUnits(scale: List[Double], axisOrder: AxisOrder): Fox[Vec3Double] =
tryo(Vec3Double(scale(axisOrder.x), scale(axisOrder.y), scale(axisOrder.z)))
tryo(Vec3Double(scale(axisOrder.x), scale(axisOrder.y), scale(axisOrder.zWithFallback)))

private def n5MagFromDataset(n5Dataset: N5MultiscalesDataset,
layerPath: VaultPath,
Expand Down
26 changes: 19 additions & 7 deletions app/models/dataset/explore/NgffExplorer.scala
Original file line number Diff line number Diff line change
Expand Up @@ -188,7 +188,14 @@ class NgffExplorer(implicit val ec: ExecutionContext) extends RemoteLayerExplore
private def getZarrHeader(ngffDataset: NgffDataset, layerPath: VaultPath) = {
val magPath = layerPath / ngffDataset.path
val zarrayPath = magPath / ZarrHeader.FILENAME_DOT_ZARRAY
parseJsonFromPath[ZarrHeader](zarrayPath) ?~> s"failed to read zarr header at $zarrayPath"
for {
parsedHeader <- parseJsonFromPath[ZarrHeader](zarrayPath) ?~> s"failed to read zarr header at $zarrayPath"
header = parsedHeader.shape.length match {
case 2 =>
parsedHeader.copy(shape = parsedHeader.shape ++ Array(1), chunks = parsedHeader.chunks ++ Array(1))
case _ => parsedHeader
}
} yield header
}

private def zarrMagFromNgffDataset(ngffDataset: NgffDataset,
Expand Down Expand Up @@ -220,16 +227,21 @@ class NgffExplorer(implicit val ec: ExecutionContext) extends RemoteLayerExplore
val c = axes.indexWhere(_.`type` == "channel")
val cOpt = if (c == -1) None else Some(c)
for {
_ <- bool2Fox(x >= 0 && y >= 0 && z >= 0) ?~> s"invalid xyz axis order: $x,$y,$z."
} yield AxisOrder(x, y, z, cOpt)
_ <- bool2Fox(x >= 0 && y >= 0) ?~> s"invalid xyz axis order: $x,$y,$z. ${x >= 0 && y >= 0}"
} yield
if (z >= 0) {
AxisOrder(x, y, Some(z), cOpt)
} else {
AxisOrder(x, y, None, cOpt)
}
}

private def extractAxisUnitFactors(axes: List[NgffAxis], axisOrder: AxisOrder): Fox[Vec3Double] =
for {
xUnitFactor <- axes(axisOrder.x).spaceUnitToNmFactor
yUnitFactor <- axes(axisOrder.y).spaceUnitToNmFactor
zUnitFactor <- axes(axisOrder.z).spaceUnitToNmFactor
} yield Vec3Double(xUnitFactor, yUnitFactor, zUnitFactor)
zUnitFactor <- Fox.runIf(axisOrder.hasZAxis)(axes(axisOrder.zWithFallback).spaceUnitToNmFactor)
} yield Vec3Double(xUnitFactor, yUnitFactor, zUnitFactor.getOrElse(1))

private def magFromTransforms(coordinateTransforms: List[NgffCoordinateTransformation],
voxelSizeInAxisUnits: Vec3Double,
Expand All @@ -240,7 +252,7 @@ class NgffExplorer(implicit val ec: ExecutionContext) extends RemoteLayerExplore
val combinedScale = extractAndCombineScaleTransforms(coordinateTransforms, axisOrder)
val mag = (combinedScale / voxelSizeInAxisUnits).round.toVec3Int
for {
_ <- bool2Fox(isPowerOfTwo(mag.x) && isPowerOfTwo(mag.x) && isPowerOfTwo(mag.x)) ?~> s"invalid mag: $mag. Must all be powers of two"
_ <- bool2Fox(isPowerOfTwo(mag.x) && isPowerOfTwo(mag.y) && isPowerOfTwo(mag.z)) ?~> s"invalid mag: $mag. Must all be powers of two"
} yield mag
}

Expand All @@ -267,7 +279,7 @@ class NgffExplorer(implicit val ec: ExecutionContext) extends RemoteLayerExplore
val scalesFromTransforms = filtered.flatMap(_.scale)
val xFactors = scalesFromTransforms.map(_(axisOrder.x))
val yFactors = scalesFromTransforms.map(_(axisOrder.y))
val zFactors = scalesFromTransforms.map(_(axisOrder.z))
val zFactors = if (axisOrder.hasZAxis) scalesFromTransforms.map(_(axisOrder.zWithFallback)) else Seq(1.0, 1.0)
Vec3Double(xFactors.product, yFactors.product, zFactors.product)
}
}
2 changes: 1 addition & 1 deletion app/models/dataset/explore/PrecomputedExplorer.scala
Original file line number Diff line number Diff line change
Expand Up @@ -64,7 +64,7 @@ class PrecomputedExplorer(implicit val ec: ExecutionContext) extends RemoteLayer

// Neuroglancer precomputed specification does not specify axis order, but uses x,y,z implicitly.
// https://github.com/google/neuroglancer/blob/master/src/neuroglancer/datasource/precomputed/volume.md#unsharded-chunk-storage
axisOrder = AxisOrder(0, 1, 2)
axisOrder = AxisOrder.xyz(0, 1, 2)
} yield MagLocator(mag, Some(path.toString), None, Some(axisOrder), channelIndex = None, credentialId)
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -2,13 +2,22 @@ package com.scalableminds.webknossos.datastore.datareaders

import play.api.libs.json.{Json, OFormat}

case class AxisOrder(x: Int, y: Int, z: Int, c: Option[Int] = None) {
case class AxisOrder(x: Int, y: Int, z: Option[Int], c: Option[Int] = None) {

def hasZAxis: Boolean = z.isDefined

def zWithFallback: Int = z match {
case Some(value) => value
// z is appended to the end of the array (this is reflected in DatasetArray adding 1 at the end of header shape and chunksize)
case None => Math.max(Math.max(x, y), c.getOrElse(-1)) + 1
}

def permutation(rank: Int): Array[Int] =
c match {
case Some(channel) =>
((0 until (rank - 4)).toList :+ channel :+ x :+ y :+ z).toArray
((0 until (rank - 4)).toList :+ channel :+ x :+ y :+ zWithFallback).toArray
case None =>
((0 until (rank - 3)).toList :+ x :+ y :+ z).toArray
((0 until (rank - 3)).toList :+ x :+ y :+ zWithFallback).toArray
}

def inversePermutation(rank: Int): Array[Int] = {
Expand All @@ -29,17 +38,19 @@ case class AxisOrder(x: Int, y: Int, z: Int, c: Option[Int] = None) {
}

object AxisOrder {

// assumes that the last three elements of the shape are z,y,x (standard in OME NGFF)
def asZyxFromRank(rank: Int): AxisOrder = AxisOrder(rank - 1, rank - 2, rank - 3)
def asZyxFromRank(rank: Int): AxisOrder = AxisOrder.xyz(rank - 1, rank - 2, rank - 3)

def cxyz: AxisOrder = asCxyzFromRank(rank = 4)
def xyz(x: Int, y: Int, z: Int): AxisOrder = AxisOrder(x, y, Some(z))

// assumes that the last three elements of the shapre are (c),x,y,z (which is what webKnossos sends to the frontend)
// assumes that the last three elements of the shape are (c),x,y,z (which is what WEBKNOSSOS sends to the frontend)
def asCxyzFromRank(rank: Int): AxisOrder =
if (rank == 3)
AxisOrder(rank - 3, rank - 2, rank - 1)
AxisOrder.xyz(rank - 3, rank - 2, rank - 1)
else
AxisOrder(rank - 3, rank - 2, rank - 1, Some(rank - 4))
AxisOrder(rank - 3, rank - 2, Some(rank - 1), Some(rank - 4))

def cxyz: AxisOrder = asCxyzFromRank(rank = 4)
implicit val jsonFormat: OFormat[AxisOrder] = Json.format[AxisOrder]
}
Original file line number Diff line number Diff line change
Expand Up @@ -30,11 +30,36 @@ class DatasetArray(vaultPath: VaultPath,

protected lazy val chunkReader: ChunkReader = new ChunkReader(header)

// Helper variables to allow reading 2d datasets as 3d datasets with depth 1

lazy val rank: Int = if (axisOrder.hasZAxis) {
header.rank
} else {
header.rank + 1
}

lazy val datasetShape: Array[Int] = if (axisOrder.hasZAxis) {
header.datasetShape
} else {
header.datasetShape :+ 1
}

lazy val chunkSize: Array[Int] = if (axisOrder.hasZAxis) {
header.chunkSize
} else {
header.chunkSize :+ 1
}

private def chunkSizeAtIndex(index: Array[Int]) =
if (axisOrder.hasZAxis) { header.chunkSizeAtIndex(index) } else {
chunkSize // irregular sized chunk indexes are currently not supported for 2d datasets
}

// Returns byte array in fortran-order with little-endian values
def readBytesXYZ(shape: Vec3Int, offset: Vec3Int)(implicit ec: ExecutionContext): Fox[Array[Byte]] = {
val paddingDimensionsCount = header.rank - 3
val paddingDimensionsCount = rank - 3
val offsetArray = channelIndex match {
case Some(c) if header.rank >= 4 =>
case Some(c) if rank >= 4 =>
Array.fill(paddingDimensionsCount - 1)(0) :+ c :+ offset.x :+ offset.y :+ offset.z
case _ => Array.fill(paddingDimensionsCount)(0) :+ offset.x :+ offset.y :+ offset.z
}
Expand Down Expand Up @@ -89,11 +114,12 @@ class DatasetArray(vaultPath: VaultPath,

// Read from array. Note that shape and offset should be passed in XYZ order, left-padded with 0 and 1 respectively.
// This function will internally adapt to the array's axis order so that XYZ data in fortran-order is returned.

private def readAsFortranOrder(shape: Array[Int], offset: Array[Int])(
implicit ec: ExecutionContext): Fox[MultiArray] = {
val totalOffset: Array[Int] = offset.zip(header.voxelOffset).map { case (o, v) => o - v }
val chunkIndices = ChunkUtils.computeChunkIndices(axisOrder.permuteIndicesReverse(header.datasetShape),
axisOrder.permuteIndicesReverse(header.chunkSize),
val totalOffset: Array[Int] = offset.zip(header.voxelOffset).map { case (o, v) => o - v }.padTo(offset.length, 0)
val chunkIndices = ChunkUtils.computeChunkIndices(axisOrder.permuteIndicesReverse(datasetShape),
axisOrder.permuteIndicesReverse(chunkSize),
shape,
totalOffset)
if (partialCopyingIsNotNeeded(shape, totalOffset, chunkIndices)) {
Expand Down Expand Up @@ -145,17 +171,21 @@ class DatasetArray(vaultPath: VaultPath,
if (header.isSharded) {
for {
(shardPath, chunkRange) <- getShardedChunkPathAndRange(chunkIndex) ?~> "chunk.getShardedPathAndRange.failed"
chunkShape = header.chunkSizeAtIndex(chunkIndex)
chunkShape = chunkSizeAtIndex(chunkIndex)
multiArray <- chunkReader.read(shardPath, chunkShape, Some(chunkRange), useSkipTypingShortcut)
} yield multiArray
} else {
val chunkPath = vaultPath / getChunkFilename(chunkIndex)
val chunkShape = header.chunkSizeAtIndex(chunkIndex)
val chunkShape = chunkSizeAtIndex(chunkIndex)
chunkReader.read(chunkPath, chunkShape, None, useSkipTypingShortcut)
}

protected def getChunkFilename(chunkIndex: Array[Int]): String =
chunkIndex.mkString(header.dimension_separator.toString)
if (axisOrder.hasZAxis) {
chunkIndex.mkString(header.dimension_separator.toString)
} else {
chunkIndex.drop(1).mkString(header.dimension_separator.toString) // (c),x,y,z -> z is dropped in 2d case
}

private def partialCopyingIsNotNeeded(bufferShape: Array[Int],
globalOffset: Array[Int],
Expand All @@ -166,19 +196,19 @@ class DatasetArray(vaultPath: VaultPath,
header.order == ArrayOrder.F &&
isZeroOffset(offsetInChunk) &&
isBufferShapeEqualChunkShape(bufferShape) &&
axisOrder == AxisOrder.asCxyzFromRank(header.rank)
axisOrder == AxisOrder.asCxyzFromRank(rank)
case _ => false
}

private def isBufferShapeEqualChunkShape(bufferShape: Array[Int]): Boolean =
util.Arrays.equals(bufferShape, header.chunkSize)
util.Arrays.equals(bufferShape, chunkSize)

private def isZeroOffset(offset: Array[Int]): Boolean =
util.Arrays.equals(offset, new Array[Int](offset.length))

private def computeOffsetInChunk(chunkIndex: Array[Int], globalOffset: Array[Int]): Array[Int] =
chunkIndex.indices.map { dim =>
globalOffset(dim) - (chunkIndex(dim) * axisOrder.permuteIndicesReverse(header.chunkSize)(dim))
globalOffset(dim) - (chunkIndex(dim) * axisOrder.permuteIndicesReverse(chunkSize)(dim))
}.toArray

override def toString: String =
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -8,9 +8,10 @@ import ArrayDataType.{ArrayDataType, bytesPerElementFor}
import java.nio.ByteOrder

trait DatasetHeader {
def datasetShape: Array[Int] // shape of the entire array

def chunkSize: Array[Int] // shape of each chunk
// Note that in DatasetArray, datasetShape and chunkSize are adapted for 2d datasets
def datasetShape: Array[Int] // shape of the entire array
def chunkSize: Array[Int] // shape of each chunk,

def dimension_separator: DimensionSeparator

Expand All @@ -37,11 +38,22 @@ trait DatasetHeader {
}

def boundingBox(axisOrder: AxisOrder): Option[BoundingBox] =
if (Math.max(Math.max(axisOrder.x, axisOrder.y), axisOrder.z) >= rank)
if (Math.max(Math.max(axisOrder.x, axisOrder.y), axisOrder.zWithFallback) >= rank && axisOrder.hasZAxis)
None
else
Some(BoundingBox(Vec3Int.zeros, datasetShape(axisOrder.x), datasetShape(axisOrder.y), datasetShape(axisOrder.z)))
else {
if (axisOrder.hasZAxis) {
Some(
BoundingBox(Vec3Int.zeros,
datasetShape(axisOrder.x),
datasetShape(axisOrder.y),
datasetShape(axisOrder.zWithFallback)))
} else {
Some(BoundingBox(Vec3Int.zeros, datasetShape(axisOrder.x), datasetShape(axisOrder.y), 1))
}

}

// Note that in DatasetArray, this is adapted for 2d datasets
lazy val rank: Int = datasetShape.length

def chunkSizeAtIndex(chunkIndex: Array[Int]): Array[Int] = chunkSize
Expand Down

0 comments on commit d15170f

Please sign in to comment.