Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Include all-zero buckets in volume annotation downloads #7576

Merged
merged 6 commits into from
Jan 24, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions CHANGELOG.unreleased.md
Original file line number Diff line number Diff line change
Expand Up @@ -64,6 +64,7 @@ For upgrade instructions, please check the [migration guide](MIGRATIONS.released
- Fixed the "Download Meshes" functionality which was affected by the recent introduction of the CSP. [#7577](https://github.com/scalableminds/webknossos/pull/7577)
- Fixed a bug where listing the annotations of other users would result in empty lists even if there are annotations, and you should be allowed to see them. [#7563](https://github.com/scalableminds/webknossos/pull/7563)
- Fixed errors showing when viewing the annotation list. [#7579](https://github.com/scalableminds/webknossos/pull/7579)
- Fixed a bug where all-zero chunks/buckets were omitted when downloading volume annotation even in case of a fallback segmentation layer, where their zeroed-bucket information is actually needed. [#7576](https://github.com/scalableminds/webknossos/pull/7576)

### Removed
- Removed several unused frontend libraries. [#7521](https://github.com/scalableminds/webknossos/pull/7521)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@ package com.scalableminds.webknossos.tracingstore.tracings.volume
import java.io.{File, FileOutputStream, InputStream}
import com.scalableminds.util.geometry.Vec3Int
import com.scalableminds.util.io.ZipIO
import com.scalableminds.util.tools.{BoxImplicits, ByteUtils, JsonHelper}
import com.scalableminds.util.tools.{BoxImplicits, JsonHelper}
import com.scalableminds.webknossos.datastore.dataformats.wkw.WKWDataFormatHelper
import com.scalableminds.webknossos.datastore.datareaders.{
BloscCompressor,
Expand All @@ -23,7 +23,11 @@ import org.apache.commons.io.IOUtils
import java.util.zip.{ZipEntry, ZipFile}
import scala.collection.mutable

trait VolumeDataZipHelper extends WKWDataFormatHelper with ByteUtils with BoxImplicits with LazyLogging {
trait VolumeDataZipHelper
extends WKWDataFormatHelper
with VolumeBucketReversionHelper
with BoxImplicits
with LazyLogging {

protected def withBucketsFromZip(zipFile: File)(block: (BucketPosition, Array[Byte]) => Unit): Box[Unit] =
for {
Expand Down Expand Up @@ -51,7 +55,7 @@ trait VolumeDataZipHelper extends WKWDataFormatHelper with ByteUtils with BoxImp
parseWKWFilePath(fileName.toString).map { bucketPosition: BucketPosition =>
if (buckets.hasNext) {
val data = buckets.next()
if (!isAllZero(data)) {
if (!isRevertedBucket(data)) {
block(bucketPosition, data)
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,9 +17,9 @@ import scala.concurrent.ExecutionContext
import scala.concurrent.duration._

trait VolumeBucketReversionHelper {
private def isRevertedBucket(data: Array[Byte]): Boolean = data sameElements Array[Byte](0)
protected def isRevertedBucket(data: Array[Byte]): Boolean = data sameElements Array[Byte](0)

def isRevertedBucket(bucket: VersionedKeyValuePair[Array[Byte]]): Boolean = isRevertedBucket(bucket.value)
protected def isRevertedBucket(bucket: VersionedKeyValuePair[Array[Byte]]): Boolean = isRevertedBucket(bucket.value)
}

trait VolumeBucketCompression extends LazyLogging {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -371,13 +371,15 @@ class VolumeTracingService @Inject()(
val dataLayer = volumeTracingLayer(tracingId, tracing)
val buckets: Iterator[NamedStream] = volumeDataZipFormmat match {
case VolumeDataZipFormat.wkw =>
new WKWBucketStreamSink(dataLayer)(dataLayer.bucketProvider.bucketStream(Some(tracing.version)),
tracing.resolutions.map(mag => vec3IntFromProto(mag)))
new WKWBucketStreamSink(dataLayer, tracing.fallbackLayer.nonEmpty)(
dataLayer.bucketProvider.bucketStream(Some(tracing.version)),
tracing.resolutions.map(mag => vec3IntFromProto(mag)))
case VolumeDataZipFormat.zarr3 =>
new Zarr3BucketStreamSink(dataLayer)(dataLayer.bucketProvider.bucketStream(Some(tracing.version)),
tracing.resolutions.map(mag => vec3IntFromProto(mag)),
tracing.additionalAxes,
voxelSize)
new Zarr3BucketStreamSink(dataLayer, tracing.fallbackLayer.nonEmpty)(
dataLayer.bucketProvider.bucketStream(Some(tracing.version)),
tracing.resolutions.map(mag => vec3IntFromProto(mag)),
tracing.additionalAxes,
voxelSize)
}

val before = Instant.now
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -11,24 +11,34 @@ import com.scalableminds.webknossos.wrap.{BlockType, WKWFile, WKWHeader}
import java.io.DataOutputStream
import scala.concurrent.{ExecutionContext, Future}

class WKWBucketStreamSink(val layer: DataLayer) extends WKWDataFormatHelper with ByteUtils {
class WKWBucketStreamSink(val layer: DataLayer, tracingHasFallbackLayer: Boolean)
extends WKWDataFormatHelper
with VolumeBucketReversionHelper
with ByteUtils {

def apply(bucketStream: Iterator[(BucketPosition, Array[Byte])], mags: Seq[Vec3Int])(
implicit ec: ExecutionContext): Iterator[NamedStream] = {
val (voxelType, numChannels) = WKWDataFormat.elementClassToVoxelType(layer.elementClass)
val header = WKWHeader(1, DataLayer.bucketLength, BlockType.LZ4, voxelType, numChannels)
bucketStream.flatMap {
case (bucket, data) if !isAllZero(data) =>
val filePath = wkwFilePath(bucket.toCube(bucket.bucketLength)).toString
Some(
NamedFunctionStream(
filePath,
os => Future.successful(WKWFile.write(os, header, Array(data).iterator))
))
case (bucket, data) =>
val skipBucket = if (tracingHasFallbackLayer) isRevertedBucket(data) else isAllZero(data)
if (skipBucket) {
// If the tracing has no fallback segmentation, all-zero buckets can be omitted entirely
None
} else {
val filePath = wkwFilePath(bucket.toCube(bucket.bucketLength)).toString
Some(
NamedFunctionStream(
filePath,
os => Future.successful(WKWFile.write(os, header, Array(data).iterator))
))
}
case _ => None
} ++ mags.map { mag =>
NamedFunctionStream(wkwHeaderFilePath(mag).toString,
os => Future.successful(header.writeTo(new DataOutputStream(os), isHeaderFile = true)))
}
}

}
Original file line number Diff line number Diff line change
Expand Up @@ -16,15 +16,14 @@ import com.scalableminds.webknossos.datastore.geometry.AdditionalAxisProto
import com.scalableminds.webknossos.datastore.helpers.ProtoGeometryImplicits
import com.scalableminds.webknossos.datastore.models.datasource.{DataLayer, DataSourceId, GenericDataSource}
import com.scalableminds.webknossos.datastore.models.{AdditionalCoordinate, BucketPosition}
import com.typesafe.scalalogging.LazyLogging
import play.api.libs.json.Json

import scala.concurrent.{ExecutionContext, Future}

// Creates data zip from volume tracings
class Zarr3BucketStreamSink(val layer: VolumeTracingLayer)
extends LazyLogging
with ProtoGeometryImplicits
class Zarr3BucketStreamSink(val layer: VolumeTracingLayer, tracingHasFallbackLayer: Boolean)
extends ProtoGeometryImplicits
with VolumeBucketReversionHelper
with ByteUtils {

private lazy val defaultLayerName = "volumeAnnotationData"
Expand Down Expand Up @@ -69,13 +68,20 @@ class Zarr3BucketStreamSink(val layer: VolumeTracingLayer)
dimension_names = Some(Array("c") ++ additionalAxes.map(_.name).toArray ++ Seq("x", "y", "z"))
)
bucketStream.flatMap {
case (bucket, data) if !isAllZero(data) =>
val filePath = zarrChunkFilePath(defaultLayerName, bucket)
Some(
NamedFunctionStream(
filePath,
os => Future.successful(os.write(compressor.compress(data)))
))
case (bucket, data) =>
val skipBucket = if (tracingHasFallbackLayer) isAllZero(data) else isRevertedBucket(data)
if (skipBucket) {
// If the tracing has no fallback segmentation, all-zero buckets can be omitted entirely
None
} else {
val filePath = zarrChunkFilePath(defaultLayerName, bucket)
Some(
NamedFunctionStream(
filePath,
os => Future.successful(os.write(compressor.compress(data)))
)
)
}
case _ => None
} ++ mags.map { mag =>
NamedFunctionStream.fromString(zarrHeaderFilePath(defaultLayerName, mag), Json.prettyPrint(Json.toJson(header)))
Expand Down