Skip to content

Commit

Permalink
N5 support (#6466)
Browse files Browse the repository at this point in the history
* add N5 layer classes

* first working version

* refactor zarr and n5 with common superclasses, and refactor datareader in simpler composition instead of inheritance

* add PaddedChunkReader

* rename zarrMagOpt to n5magopt

* add CompressionOption

* update changelog

* Update webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/n5/ChunkReaderN5.scala

Co-authored-by: Florian M <[email protected]>

* incorporate pr feedback

* reformat

* Fix JsonImplicits import, rename DatasetLocatorMag to MagLocator

Co-authored-by: leowe <[email protected]>
Co-authored-by: Florian M <[email protected]>
Co-authored-by: Florian M <[email protected]>
  • Loading branch information
4 people authored Sep 22, 2022
1 parent 0f57cc3 commit 6d688e9
Show file tree
Hide file tree
Showing 44 changed files with 1,208 additions and 681 deletions.
1 change: 1 addition & 0 deletions CHANGELOG.unreleased.md
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@ For upgrade instructions, please check the [migration guide](MIGRATIONS.released
- The proofreading tool now supports merging and splitting (via min-cut) agglomerates by rightclicking a segment (and not a node). Note that there still has to be an active node so that both partners of the operation are defined. [#6464](https://github.com/scalableminds/webknossos/pull/6464)
- Added workflow reporting and logging features for Voxelytics into webKnossos. If activated, the workflows can be accessed from the `Administration` > `Voxelytics` menu item. [#6416](https://github.com/scalableminds/webknossos/pull/6416) [#6460](https://github.com/scalableminds/webknossos/pull/6460)
- The color of a segments can now be changed in the segments tab. Rightclick a segment in the list and select "Change Color" to open a color picker. [#6372](https://github.com/scalableminds/webknossos/pull/6372)
- Added possibility to read N5 datasets. [#6466](https://github.com/scalableminds/webknossos/pull/6466)

### Changed
- Selecting a node with the proofreading tool won't have any side effects anymore. Previous versions could load additional agglomerate skeletons in certain scenarios which could be confusing. [#6477](https://github.com/scalableminds/webknossos/pull/6477)
Expand Down
13 changes: 9 additions & 4 deletions app/models/binary/ExploreRemoteLayerService.scala
Original file line number Diff line number Diff line change
Expand Up @@ -5,8 +5,10 @@ import java.nio.charset.StandardCharsets
import java.nio.file.{Files, Path}
import com.scalableminds.util.geometry.{BoundingBox, Vec3Double, Vec3Int}
import com.scalableminds.util.tools.{Fox, FoxImplicits, JsonHelper}
import com.scalableminds.webknossos.datastore.dataformats.MagLocator
import com.scalableminds.webknossos.datastore.dataformats.zarr._
import com.scalableminds.webknossos.datastore.jzarr._
import com.scalableminds.webknossos.datastore.datareaders.AxisOrder
import com.scalableminds.webknossos.datastore.datareaders.jzarr._
import com.scalableminds.webknossos.datastore.models.datasource._
import com.scalableminds.webknossos.datastore.storage.FileSystemsHolder
import com.typesafe.scalalogging.LazyLogging
Expand All @@ -24,7 +26,10 @@ object ExploreRemoteDatasetParameters {
implicit val jsonFormat: OFormat[ExploreRemoteDatasetParameters] = Json.format[ExploreRemoteDatasetParameters]
}

case class MagWithAttributes(mag: ZarrMag, remotePath: Path, elementClass: ElementClass.Value, boundingBox: BoundingBox)
case class MagWithAttributes(mag: MagLocator,
remotePath: Path,
elementClass: ElementClass.Value,
boundingBox: BoundingBox)

class ExploreRemoteLayerService @Inject()() extends FoxImplicits with LazyLogging {

Expand Down Expand Up @@ -143,7 +148,7 @@ class ExploreRemoteLayerService @Inject()() extends FoxImplicits with LazyLoggin
elementClass <- zarrHeader.elementClass ?~> "failed to read element class from zarr header"
guessedAxisOrder = AxisOrder.asZyxFromRank(zarrHeader.rank)
boundingBox <- zarrHeader.boundingBox(guessedAxisOrder) ?~> "failed to read bounding box from zarr header. Make sure data is in (T/C)ZYX format"
zarrMag = ZarrMag(Vec3Int.ones, Some(remotePath.toString), credentials, Some(guessedAxisOrder))
zarrMag = MagLocator(Vec3Int.ones, Some(remotePath.toString), credentials, Some(guessedAxisOrder))
layer: ZarrLayer = if (looksLikeSegmentationLayer(name, elementClass)) {
ZarrSegmentationLayer(name, boundingBox, elementClass, List(zarrMag), largestSegmentId = 0L)
} else ZarrDataLayer(name, Category.color, boundingBox, elementClass, List(zarrMag))
Expand Down Expand Up @@ -200,7 +205,7 @@ class ExploreRemoteLayerService @Inject()() extends FoxImplicits with LazyLoggin
elementClass <- zarrHeader.elementClass ?~> s"failed to read element class from zarr header at $zarrayPath"
boundingBox <- zarrHeader.boundingBox(axisOrder) ?~> s"failed to read bounding box from zarr header at $zarrayPath"
} yield
MagWithAttributes(ZarrMag(mag, Some(magPath.toString), credentials, Some(axisOrder)),
MagWithAttributes(MagLocator(mag, Some(magPath.toString), credentials, Some(axisOrder)),
magPath,
elementClass,
boundingBox)
Expand Down
5 changes: 3 additions & 2 deletions project/Dependencies.scala
Original file line number Diff line number Diff line change
Expand Up @@ -51,7 +51,7 @@ object Dependencies {
private val awsS3 = "com.amazonaws" % "aws-java-sdk-s3" % "1.12.288"
private val tika = "org.apache.tika" % "tika-core" % "1.5"
private val jackson = "com.fasterxml.jackson.module" %% "jackson-module-scala" % "2.12.7"

private val commonsCompress = "org.apache.commons" % "commons-compress" % "1.21"

private val sql = Seq(
"com.typesafe.slick" %% "slick" % "3.3.3",
Expand Down Expand Up @@ -100,7 +100,8 @@ object Dependencies {
awsS3,
tika,
jblosc,
scalajHttp
scalajHttp,
commonsCompress
)

val webknossosTracingstoreDependencies: Seq[ModuleID] = Seq(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -4,5 +4,6 @@ import play.api.libs.json.{Format, Json}

abstract class ExtendedEnumeration extends Enumeration {
implicit val format: Format[Value] = Json.formatEnum(this)
def fromString(s: String): Option[Value] = values.find(_.toString == s)
def fromString(s: String): Option[Value] =
values.find(_.toString == s)
}
Original file line number Diff line number Diff line change
Expand Up @@ -3,15 +3,12 @@ package com.scalableminds.webknossos.datastore.controllers
import com.google.inject.Inject
import com.scalableminds.util.geometry.Vec3Int
import com.scalableminds.util.tools.Fox
import com.scalableminds.webknossos.datastore.dataformats.MagLocator
import com.scalableminds.webknossos.datastore.dataformats.wkw.{WKWDataLayer, WKWSegmentationLayer}
import com.scalableminds.webknossos.datastore.dataformats.zarr.ZarrCoordinatesParser.parseDotCoordinates
import com.scalableminds.webknossos.datastore.dataformats.zarr.{
ZarrDataLayer,
ZarrLayer,
ZarrMag,
ZarrSegmentationLayer
}
import com.scalableminds.webknossos.datastore.jzarr.{AxisOrder, OmeNgffGroupHeader, OmeNgffHeader, ZarrHeader}
import com.scalableminds.webknossos.datastore.dataformats.zarr.{ZarrDataLayer, ZarrLayer, ZarrSegmentationLayer}
import com.scalableminds.webknossos.datastore.datareaders.AxisOrder
import com.scalableminds.webknossos.datastore.datareaders.jzarr.{OmeNgffGroupHeader, OmeNgffHeader, ZarrHeader}
import com.scalableminds.webknossos.datastore.models.VoxelPosition
import com.scalableminds.webknossos.datastore.models.annotation.AnnotationLayerType
import com.scalableminds.webknossos.datastore.models.datasource._
Expand Down Expand Up @@ -118,15 +115,15 @@ class ZarrStreamingController @Inject()(
d.category,
d.boundingBox,
d.elementClass,
d.resolutions.map(x => ZarrMag(x, None, None, Some(AxisOrder.cxyz))),
d.resolutions.map(x => MagLocator(x, None, None, Some(AxisOrder.cxyz))),
numChannels = Some(if (d.elementClass == ElementClass.uint24) 3 else 1)
)
case s: WKWSegmentationLayer =>
ZarrSegmentationLayer(
s.name,
s.boundingBox,
s.elementClass,
s.resolutions.map(x => ZarrMag(x, None, None, Some(AxisOrder.cxyz))),
s.resolutions.map(x => MagLocator(x, None, None, Some(AxisOrder.cxyz))),
mappings = s.mappings,
largestSegmentId = s.largestSegmentId,
numChannels = Some(if (s.elementClass == ElementClass.uint24) 3 else 1)
Expand All @@ -137,15 +134,15 @@ class ZarrStreamingController @Inject()(
z.category,
z.boundingBox,
z.elementClass,
z.resolutions.map(x => ZarrMag(x, None, None, Some(AxisOrder.cxyz))),
z.resolutions.map(x => MagLocator(x, None, None, Some(AxisOrder.cxyz))),
numChannels = Some(if (z.elementClass == ElementClass.uint24) 3 else 1)
)
case zs: ZarrSegmentationLayer =>
ZarrSegmentationLayer(
zs.name,
zs.boundingBox,
zs.elementClass,
zs.resolutions.map(x => ZarrMag(x, None, None, Some(AxisOrder.cxyz))),
zs.resolutions.map(x => MagLocator(x, None, None, Some(AxisOrder.cxyz))),
mappings = zs.mappings,
largestSegmentId = zs.largestSegmentId,
numChannels = Some(if (zs.elementClass == ElementClass.uint24) 3 else 1)
Expand Down
Original file line number Diff line number Diff line change
@@ -1,12 +1,14 @@
package com.scalableminds.webknossos.datastore.dataformats

import com.scalableminds.util.tools.{Fox, FoxImplicits}
import com.scalableminds.webknossos.datastore.dataformats.zarr.RemoteSourceDescriptor
import com.scalableminds.webknossos.datastore.models.BucketPosition
import com.scalableminds.webknossos.datastore.models.requests.DataReadInstruction
import com.scalableminds.webknossos.datastore.storage.DataCubeCache
import com.scalableminds.webknossos.datastore.storage.{DataCubeCache, FileSystemsHolder}
import com.typesafe.scalalogging.LazyLogging
import net.liftweb.common.{Box, Empty}

import java.nio.file.{FileSystem, Path}
import scala.concurrent.ExecutionContext

trait BucketProvider extends FoxImplicits with LazyLogging {
Expand Down Expand Up @@ -38,4 +40,20 @@ trait BucketProvider extends FoxImplicits with LazyLogging {
def bucketStream(version: Option[Long] = None): Iterator[(BucketPosition, Array[Byte])] =
Iterator.empty

protected def remotePathFrom(remoteSource: RemoteSourceDescriptor): Option[Path] =
FileSystemsHolder.getOrCreate(remoteSource).map { fileSystem: FileSystem =>
fileSystem.getPath(remoteSource.remotePath)
}

protected def localPathFrom(readInstruction: DataReadInstruction, relativeMagPath: String): Option[Path] = {
val magPath = readInstruction.baseDir
.resolve(readInstruction.dataSource.id.team)
.resolve(readInstruction.dataSource.id.name)
.resolve(readInstruction.dataLayer.name)
.resolve(relativeMagPath)
if (magPath.toFile.exists()) {
Some(magPath)
} else None
}

}
Original file line number Diff line number Diff line change
@@ -0,0 +1,30 @@
package com.scalableminds.webknossos.datastore.dataformats

import com.scalableminds.util.geometry.Vec3Int
import com.scalableminds.webknossos.datastore.dataformats.zarr.{FileSystemCredentials, RemoteSourceDescriptor}
import com.scalableminds.webknossos.datastore.datareaders.AxisOrder
import com.scalableminds.webknossos.datastore.models.datasource.ResolutionFormatHelper
import com.scalableminds.webknossos.datastore.storage.FileSystemsHolder
import play.api.libs.json.{Json, OFormat}

import java.net.URI

case class MagLocator(mag: Vec3Int,
path: Option[String],
credentials: Option[FileSystemCredentials],
axisOrder: Option[AxisOrder]) {

lazy val pathWithFallback: String = path.getOrElse(mag.toMagLiteral(allowScalar = true))
private lazy val uri: URI = new URI(pathWithFallback)
private lazy val isRemote: Boolean = FileSystemsHolder.isSupportedRemoteScheme(uri.getScheme)
lazy val remoteSource: Option[RemoteSourceDescriptor] =
if (isRemote)
Some(RemoteSourceDescriptor(uri, credentials.map(_.user), credentials.flatMap(_.password)))
else
None

}

object MagLocator extends ResolutionFormatHelper {
implicit val jsonFormat: OFormat[MagLocator] = Json.format[MagLocator]
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,54 @@
package com.scalableminds.webknossos.datastore.dataformats.n5

import com.scalableminds.util.geometry.Vec3Int
import com.scalableminds.util.requestlogging.RateLimitedErrorLogging
import com.scalableminds.util.tools.Fox
import com.scalableminds.webknossos.datastore.dataformats.{BucketProvider, DataCubeHandle, MagLocator}
import com.scalableminds.webknossos.datastore.models.BucketPosition
import com.scalableminds.webknossos.datastore.models.requests.DataReadInstruction
import com.scalableminds.webknossos.datastore.datareaders.n5.N5Array
import com.typesafe.scalalogging.LazyLogging
import net.liftweb.common.{Box, Empty, Failure, Full}
import net.liftweb.util.Helpers.tryo

import java.nio.file.Path
import scala.concurrent.ExecutionContext

class N5CubeHandle(n5Array: N5Array) extends DataCubeHandle with LazyLogging with RateLimitedErrorLogging {

def cutOutBucket(bucket: BucketPosition)(implicit ec: ExecutionContext): Fox[Array[Byte]] = {
val shape = Vec3Int.full(bucket.bucketLength)
val offset = Vec3Int(bucket.voxelXInMag, bucket.voxelYInMag, bucket.voxelZInMag)
n5Array.readBytesXYZ(shape, offset).recover {
case t: Throwable => logError(t); Failure(t.getMessage, Full(t), Empty)
}
}

override protected def onFinalize(): Unit = ()

}

class N5BucketProvider(layer: N5Layer) extends BucketProvider with LazyLogging with RateLimitedErrorLogging {

override def loadFromUnderlying(readInstruction: DataReadInstruction): Box[N5CubeHandle] = {
val n5MagOpt: Option[MagLocator] =
layer.mags.find(_.mag == readInstruction.bucket.mag)

n5MagOpt match {
case None => Empty
case Some(n5Mag) =>
val magPathOpt: Option[Path] = {
n5Mag.remoteSource match {
case Some(remoteSource) => remotePathFrom(remoteSource)
case None => localPathFrom(readInstruction, n5Mag.pathWithFallback)
}
}
magPathOpt match {
case None => Empty
case Some(magPath) =>
tryo(onError = e => logError(e))(N5Array.open(magPath, n5Mag.axisOrder)).map(new N5CubeHandle(_))
}
}

}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,55 @@
package com.scalableminds.webknossos.datastore.dataformats.n5

import com.scalableminds.util.geometry.{BoundingBox, Vec3Int}
import com.scalableminds.webknossos.datastore.dataformats.MagLocator
import com.scalableminds.webknossos.datastore.models.datasource.LayerViewConfiguration.LayerViewConfiguration
import com.scalableminds.webknossos.datastore.models.datasource._
import play.api.libs.json.{Json, OFormat}

trait N5Layer extends DataLayer {

val dataFormat: DataFormat.Value = DataFormat.n5

lazy val bucketProvider = new N5BucketProvider(this)

def resolutions: List[Vec3Int] = mags.map(_.mag)

def mags: List[MagLocator]

def lengthOfUnderlyingCubes(resolution: Vec3Int): Int = Int.MaxValue // Prevents the wkw-shard-specific handle caching

def numChannels: Option[Int] = Some(if (elementClass == ElementClass.uint24) 3 else 1)

}

case class N5DataLayer(
name: String,
category: Category.Value,
boundingBox: BoundingBox,
elementClass: ElementClass.Value,
mags: List[MagLocator],
defaultViewConfiguration: Option[LayerViewConfiguration] = None,
adminViewConfiguration: Option[LayerViewConfiguration] = None,
override val numChannels: Option[Int] = Some(1)
) extends N5Layer

object N5DataLayer {
implicit val jsonFormat: OFormat[N5DataLayer] = Json.format[N5DataLayer]
}

case class N5SegmentationLayer(
name: String,
boundingBox: BoundingBox,
elementClass: ElementClass.Value,
mags: List[MagLocator],
largestSegmentId: Long,
mappings: Option[Set[String]] = None,
defaultViewConfiguration: Option[LayerViewConfiguration] = None,
adminViewConfiguration: Option[LayerViewConfiguration] = None,
override val numChannels: Option[Int] = Some(1)
) extends SegmentationLayer
with N5Layer

object N5SegmentationLayer {
implicit val jsonFormat: OFormat[N5SegmentationLayer] = Json.format[N5SegmentationLayer]
}
Original file line number Diff line number Diff line change
@@ -1,18 +1,16 @@
package com.scalableminds.webknossos.datastore.dataformats.zarr

import java.nio.file.{FileSystem, Path}

import java.nio.file.Path
import com.scalableminds.util.geometry.Vec3Int
import com.scalableminds.util.requestlogging.RateLimitedErrorLogging
import com.scalableminds.util.tools.Fox
import com.scalableminds.webknossos.datastore.dataformats.{BucketProvider, DataCubeHandle}
import com.scalableminds.webknossos.datastore.jzarr.ZarrArray
import com.scalableminds.webknossos.datastore.dataformats.{BucketProvider, DataCubeHandle, MagLocator}
import com.scalableminds.webknossos.datastore.datareaders.jzarr.ZarrArray
import com.scalableminds.webknossos.datastore.models.BucketPosition
import com.scalableminds.webknossos.datastore.models.requests.DataReadInstruction
import com.scalableminds.webknossos.datastore.storage.FileSystemsHolder
import com.typesafe.scalalogging.LazyLogging
import net.liftweb.common.Box.tryo
import net.liftweb.common.{Box, Empty, Failure, Full}
import net.liftweb.util.Helpers.tryo

import scala.concurrent.ExecutionContext

Expand All @@ -33,7 +31,7 @@ class ZarrCubeHandle(zarrArray: ZarrArray) extends DataCubeHandle with LazyLoggi
class ZarrBucketProvider(layer: ZarrLayer) extends BucketProvider with LazyLogging with RateLimitedErrorLogging {

override def loadFromUnderlying(readInstruction: DataReadInstruction): Box[ZarrCubeHandle] = {
val zarrMagOpt: Option[ZarrMag] =
val zarrMagOpt: Option[MagLocator] =
layer.mags.find(_.mag == readInstruction.bucket.mag)

zarrMagOpt match {
Expand All @@ -53,21 +51,4 @@ class ZarrBucketProvider(layer: ZarrLayer) extends BucketProvider with LazyLoggi
}

}

private def remotePathFrom(remoteSource: RemoteSourceDescriptor): Option[Path] =
FileSystemsHolder.getOrCreate(remoteSource).map { fileSystem: FileSystem =>
fileSystem.getPath(remoteSource.remotePath)
}

private def localPathFrom(readInstruction: DataReadInstruction, relativeMagPath: String): Option[Path] = {
val magPath = readInstruction.baseDir
.resolve(readInstruction.dataSource.id.team)
.resolve(readInstruction.dataSource.id.name)
.resolve(readInstruction.dataLayer.name)
.resolve(relativeMagPath)
if (magPath.toFile.exists()) {
Some(magPath)
} else None
}

}
Loading

0 comments on commit 6d688e9

Please sign in to comment.