diff --git a/CHANGELOG.unreleased.md b/CHANGELOG.unreleased.md index 9519e7f256..bfc9a8cab4 100644 --- a/CHANGELOG.unreleased.md +++ b/CHANGELOG.unreleased.md @@ -18,6 +18,7 @@ For upgrade instructions, please check the [migration guide](MIGRATIONS.released - Added a new "Split from all neighboring segments" feature for the proofreading mode. [#7611](https://github.com/scalableminds/webknossos/pull/7611) - If storage scan is enabled, the measured used storage is now displayed in the dashboard’s dataset detail view. [#7677](https://github.com/scalableminds/webknossos/pull/7677) - Prepared support to download full stl meshes via the HTTP api. [#7587](https://github.com/scalableminds/webknossos/pull/7587) +- You can now place segment index files with your on-disk segmentation layers, which makes segment stats available when viewing these segmentations, and also when working on volume annotations based on these segmentation layers. [#7437](https://github.com/scalableminds/webknossos/pull/7437) - Added an action to delete erronous, unimported datasets directly from the dashboard. [#7448](https://github.com/scalableminds/webknossos/pull/7448) - Added support for `window`, `active`, `inverted` keys from the `omero` info in the NGFF metadata. [7685](https://github.com/scalableminds/webknossos/pull/7685) - Added getSegment function to JavaScript API. Also, createSegmentGroup returns the id of the new group now. [#7694](https://github.com/scalableminds/webknossos/pull/7694) diff --git a/app/WebknossosModule.scala b/app/WebknossosModule.scala index 5ca1366379..8832d9b5a9 100644 --- a/app/WebknossosModule.scala +++ b/app/WebknossosModule.scala @@ -4,7 +4,7 @@ import controllers.InitialDataService import files.TempFileService import mail.MailchimpTicker import models.analytics.AnalyticsSessionService -import models.annotation.{AnnotationMutexService, AnnotationStore} +import models.annotation.{AnnotationMutexService, AnnotationStore, TracingDataSourceTemporaryStore} import models.dataset.{DatasetService, ThumbnailCachingService} import models.job.{JobService, WorkerLivenessService} import models.storage.UsedStorageService @@ -23,8 +23,6 @@ class WebknossosModule extends AbstractModule { bind(classOf[UserService]).asEagerSingleton() bind(classOf[TaskService]).asEagerSingleton() bind(classOf[UserDAO]).asEagerSingleton() - bind(classOf[UserExperiencesDAO]).asEagerSingleton() - bind(classOf[UserDatasetConfigurationDAO]).asEagerSingleton() bind(classOf[AnnotationStore]).asEagerSingleton() bind(classOf[AnnotationMutexService]).asEagerSingleton() bind(classOf[DatasetService]).asEagerSingleton() @@ -39,5 +37,6 @@ class WebknossosModule extends AbstractModule { bind(classOf[LokiClient]).asEagerSingleton() bind(classOf[UsedStorageService]).asEagerSingleton() bind(classOf[ThumbnailCachingService]).asEagerSingleton() + bind(classOf[TracingDataSourceTemporaryStore]).asEagerSingleton() } } diff --git a/app/controllers/AnnotationController.scala b/app/controllers/AnnotationController.scala index ce3f9cae3d..c30464ae3e 100755 --- a/app/controllers/AnnotationController.scala +++ b/app/controllers/AnnotationController.scala @@ -75,6 +75,7 @@ class AnnotationController @Inject()( analyticsService: AnalyticsService, slackNotificationService: SlackNotificationService, mailchimpClient: MailchimpClient, + tracingDataSourceTemporaryStore: TracingDataSourceTemporaryStore, conf: WkConf, rpc: RPC, sil: Silhouette[WkEnv])(implicit ec: ExecutionContext, bodyParsers: PlayBodyParsers) @@ -391,7 +392,7 @@ class AnnotationController @Inject()( var processedCount = 0 for { tracingStore <- tracingStoreDAO.findFirst(GlobalAccessContext) ?~> "tracingStore.notFound" - client = new WKRemoteTracingStoreClient(tracingStore, null, rpc) + client = new WKRemoteTracingStoreClient(tracingStore, null, rpc, tracingDataSourceTemporaryStore) batchCount = annotationLayerBatch.length results <- Fox.serialSequenceBox(annotationLayerBatch) { annotationLayer => processedCount += 1 diff --git a/app/controllers/AnnotationIOController.scala b/app/controllers/AnnotationIOController.scala index 51ad028d6e..b14a8854fd 100755 --- a/app/controllers/AnnotationIOController.scala +++ b/app/controllers/AnnotationIOController.scala @@ -20,6 +20,7 @@ import com.scalableminds.webknossos.datastore.models.annotation.{ import com.scalableminds.webknossos.datastore.models.datasource.{ AbstractSegmentationLayer, DataLayerLike, + DataSourceLike, GenericDataSource, SegmentationLayer } @@ -122,10 +123,13 @@ class AnnotationIOController @Inject()( wkUrl) volumeLayersGrouped <- adaptVolumeTracingsToFallbackLayer(volumeLayersGroupedRaw, dataset) tracingStoreClient <- tracingStoreService.clientFor(dataset) + dataSource <- datasetService.dataSourceFor(dataset) + usableDataSource <- dataSource.toUsable.toFox mergedVolumeLayers <- mergeAndSaveVolumeLayers(volumeLayersGrouped, tracingStoreClient, - parsedFiles.otherFiles) - mergedSkeletonLayers <- mergeAndSaveSkeletonLayers(skeletonTracings, tracingStoreClient) + parsedFiles.otherFiles, + usableDataSource) + mergedSkeletonLayers <- mergeAndSaveSkeletonLayers(skeletonTracings, tracingStoreClient, usableDataSource) annotation <- annotationService.createFrom(request.identity, dataset, mergedSkeletonLayers ::: mergedVolumeLayers, @@ -144,7 +148,8 @@ class AnnotationIOController @Inject()( private def mergeAndSaveVolumeLayers(volumeLayersGrouped: Seq[List[UploadedVolumeLayer]], client: WKRemoteTracingStoreClient, - otherFiles: Map[String, File]): Fox[List[AnnotationLayer]] = + otherFiles: Map[String, File], + dataSource: DataSourceLike): Fox[List[AnnotationLayer]] = if (volumeLayersGrouped.isEmpty) Fox.successful(List()) else if (volumeLayersGrouped.length > 1 && volumeLayersGrouped.exists(_.length > 1)) @@ -155,7 +160,8 @@ class AnnotationIOController @Inject()( val idx = volumeLayerWithIndex._2 for { savedTracingId <- client.saveVolumeTracing(uploadedVolumeLayer.tracing, - uploadedVolumeLayer.getDataZipFrom(otherFiles)) + uploadedVolumeLayer.getDataZipFrom(otherFiles), + dataSource = Some(dataSource)) } yield AnnotationLayer( savedTracingId, @@ -169,6 +175,7 @@ class AnnotationIOController @Inject()( for { mergedTracingId <- client.mergeVolumeTracingsByContents( VolumeTracings(uploadedVolumeLayersFlat.map(v => VolumeTracingOpt(Some(v.tracing)))), + dataSource, uploadedVolumeLayersFlat.map(v => v.getDataZipFrom(otherFiles)), persistTracing = true ) @@ -183,7 +190,8 @@ class AnnotationIOController @Inject()( } private def mergeAndSaveSkeletonLayers(skeletonTracings: List[SkeletonTracing], - tracingStoreClient: WKRemoteTracingStoreClient): Fox[List[AnnotationLayer]] = + tracingStoreClient: WKRemoteTracingStoreClient, + dataSource: DataSourceLike): Fox[List[AnnotationLayer]] = if (skeletonTracings.isEmpty) Fox.successful(List()) else { diff --git a/app/controllers/TaskController.scala b/app/controllers/TaskController.scala index f249782ef5..d5e48d8919 100755 --- a/app/controllers/TaskController.scala +++ b/app/controllers/TaskController.scala @@ -94,11 +94,13 @@ class TaskController @Inject()(taskCreationService: TaskCreationService, .addVolumeFallbackBoundingBoxes(extractedTracingBoxesRaw, request.identity._organization) fullParams: List[Box[TaskParameters]] = taskCreationService.buildFullParamsFromFiles(params, extractedTracingBoxes) - (skeletonBases, volumeBases) <- taskCreationService.fillInMissingTracings(extractedTracingBoxes.map(_.skeleton), - extractedTracingBoxes.map(_.volume), - fullParams, - taskType, - request.identity._organization) + (skeletonBases, volumeBases) <- taskCreationService.fillInMissingTracings( + extractedTracingBoxes.map(_.skeleton), + extractedTracingBoxes.map(_.volume), + fullParams, + taskType, + request.identity._organization + ) fullParamsWithTracings = taskCreationService.combineParamsWithTracings(fullParams, skeletonBases, volumeBases) result <- taskCreationService.createTasks(fullParamsWithTracings, request.identity) diff --git a/app/controllers/WKRemoteTracingStoreController.scala b/app/controllers/WKRemoteTracingStoreController.scala index a40e7a8640..b0e97c2f87 100644 --- a/app/controllers/WKRemoteTracingStoreController.scala +++ b/app/controllers/WKRemoteTracingStoreController.scala @@ -14,6 +14,7 @@ import models.annotation.{ AnnotationDAO, AnnotationInformationProvider, AnnotationLayerDAO, + TracingDataSourceTemporaryStore, TracingStoreService } import models.dataset.{DatasetDAO, DatasetService} @@ -28,19 +29,21 @@ import utils.WkConf import scala.concurrent.ExecutionContext -class WKRemoteTracingStoreController @Inject()( - tracingStoreService: TracingStoreService, - wkSilhouetteEnvironment: WkSilhouetteEnvironment, - timeSpanService: TimeSpanService, - datasetService: DatasetService, - organizationDAO: OrganizationDAO, - userDAO: UserDAO, - annotationInformationProvider: AnnotationInformationProvider, - analyticsService: AnalyticsService, - datasetDAO: DatasetDAO, - annotationDAO: AnnotationDAO, - annotationLayerDAO: AnnotationLayerDAO, - wkConf: WkConf)(implicit ec: ExecutionContext, playBodyParsers: PlayBodyParsers) +class WKRemoteTracingStoreController @Inject()(tracingStoreService: TracingStoreService, + wkSilhouetteEnvironment: WkSilhouetteEnvironment, + timeSpanService: TimeSpanService, + datasetService: DatasetService, + organizationDAO: OrganizationDAO, + userDAO: UserDAO, + annotationInformationProvider: AnnotationInformationProvider, + analyticsService: AnalyticsService, + datasetDAO: DatasetDAO, + annotationDAO: AnnotationDAO, + annotationLayerDAO: AnnotationLayerDAO, + wkConf: WkConf, + tracingDataSourceTemporaryStore: TracingDataSourceTemporaryStore)( + implicit ec: ExecutionContext, + playBodyParsers: PlayBodyParsers) extends Controller with FoxImplicits { @@ -86,11 +89,15 @@ class WKRemoteTracingStoreController @Inject()( Action.async { implicit request => tracingStoreService.validateAccess(name, key) { _ => implicit val ctx: DBAccessContext = GlobalAccessContext - for { - annotation <- annotationInformationProvider.annotationForTracing(tracingId) ?~> s"No annotation for tracing $tracingId" - dataset <- datasetDAO.findOne(annotation._dataset) - dataSource <- datasetService.dataSourceFor(dataset) - } yield Ok(Json.toJson(dataSource)) + tracingDataSourceTemporaryStore.find(tracingId) match { + case Some(dataSource) => Fox.successful(Ok(Json.toJson(dataSource))) + case None => + for { + annotation <- annotationInformationProvider.annotationForTracing(tracingId) ?~> s"No annotation for tracing $tracingId" + dataset <- datasetDAO.findOne(annotation._dataset) + dataSource <- datasetService.dataSourceFor(dataset) + } yield Ok(Json.toJson(dataSource)) + } } } diff --git a/app/models/annotation/AnnotationService.scala b/app/models/annotation/AnnotationService.scala index c4cdafb56e..27acf214a7 100755 --- a/app/models/annotation/AnnotationService.scala +++ b/app/models/annotation/AnnotationService.scala @@ -30,6 +30,7 @@ import com.scalableminds.webknossos.datastore.models.datasource.{ DataSourceLike => DataSource, SegmentationLayerLike => SegmentationLayer } +import com.scalableminds.webknossos.datastore.rpc.RPC import com.scalableminds.webknossos.tracingstore.tracings._ import com.scalableminds.webknossos.tracingstore.tracings.volume.VolumeDataZipFormat.VolumeDataZipFormat import com.scalableminds.webknossos.tracingstore.tracings.volume.{ @@ -103,10 +104,11 @@ class AnnotationService @Inject()( dataStoreDAO: DataStoreDAO, projectDAO: ProjectDAO, organizationDAO: OrganizationDAO, - annotationRestrictionDefults: AnnotationRestrictionDefaults, + annotationRestrictionDefaults: AnnotationRestrictionDefaults, nmlWriter: NmlWriter, temporaryFileCreator: TemporaryFileCreator, conf: WkConf, + rpc: RPC )(implicit ec: ExecutionContext, val materializer: Materializer) extends BoxImplicits with FoxImplicits @@ -136,6 +138,7 @@ class AnnotationService @Inject()( private def createVolumeTracing( dataSource: DataSource, datasetOrganizationName: String, + datasetDataStore: DataStore, fallbackLayer: Option[SegmentationLayer], boundingBox: Option[BoundingBox] = None, startPosition: Option[Vec3Int] = None, @@ -149,6 +152,12 @@ class AnnotationService @Inject()( fallbackLayer.map(_.additionalAxes).getOrElse(dataSource.additionalAxesUnion) for { _ <- bool2Fox(resolutionsRestricted.nonEmpty) ?~> "annotation.volume.resolutionRestrictionsTooTight" + remoteDatastoreClient = new WKRemoteDataStoreClient(datasetDataStore, rpc) + fallbackLayerHasSegmentIndex <- fallbackLayer match { + case Some(layer) => + remoteDatastoreClient.hasSegmentIndexFile(datasetOrganizationName, dataSource.id.name, layer.name) + case None => Fox.successful(false) + } } yield VolumeTracing( None, @@ -166,7 +175,7 @@ class AnnotationService @Inject()( organizationName = Some(datasetOrganizationName), mappingName = mappingName, resolutions = resolutionsRestricted.map(vec3IntToProto), - hasSegmentIndex = Some(fallbackLayer.isEmpty), + hasSegmentIndex = Some(fallbackLayer.isEmpty || fallbackLayerHasSegmentIndex), additionalAxes = AdditionalAxis.toProto(additionalCoordinates) ) } @@ -233,9 +242,9 @@ class AnnotationService @Inject()( fallbackLayer.elementClass)) ?~> "annotation.volume.largestSegmentIdExceedsRange" } yield fallbackLayer - def createAndSaveAnnotationLayer( - annotationLayerParameters: AnnotationLayerParameters, - oldPrecedenceLayerProperties: Option[RedundantTracingProperties]): Fox[AnnotationLayer] = + def createAndSaveAnnotationLayer(annotationLayerParameters: AnnotationLayerParameters, + oldPrecedenceLayerProperties: Option[RedundantTracingProperties], + dataStore: DataStore): Fox[AnnotationLayer] = for { client <- tracingStoreService.clientFor(dataset) tracingIdAndName <- annotationLayerParameters.typ match { @@ -269,6 +278,7 @@ class AnnotationService @Inject()( volumeTracing <- createVolumeTracing( dataSource, datasetOrganizationName, + dataStore, fallbackLayer, resolutionRestrictions = annotationLayerParameters.resolutionRestrictions.getOrElse(ResolutionRestrictions.empty), @@ -283,7 +293,7 @@ class AnnotationService @Inject()( editPositionAdditionalCoordinates = p.editPositionAdditionalCoordinates ) }.getOrElse(volumeTracing) - volumeTracingId <- client.saveVolumeTracing(volumeTracingAdapted) + volumeTracingId <- client.saveVolumeTracing(volumeTracingAdapted, dataSource = Some(dataSource)) name = annotationLayerParameters.name .orElse(autoFallbackLayerName) .getOrElse(AnnotationLayer.defaultNameForType(annotationLayerParameters.typ)) @@ -346,9 +356,10 @@ class AnnotationService @Inject()( All of this is skipped if existingAnnotationLayers is empty. */ oldPrecedenceLayer <- fetchOldPrecedenceLayer + dataStore <- dataStoreDAO.findOneByName(dataset._dataStore.trim) ?~> "dataStore.notFoundForDataset" precedenceProperties = oldPrecedenceLayer.map(extractPrecedenceProperties) newAnnotationLayers <- Fox.serialCombined(allAnnotationLayerParameters)(p => - createAndSaveAnnotationLayer(p, precedenceProperties)) + createAndSaveAnnotationLayer(p, precedenceProperties, dataStore)) } yield newAnnotationLayers } @@ -555,7 +566,7 @@ class AnnotationService @Inject()( dataset <- datasetDAO.findOneByNameAndOrganization(datasetName, organizationId) ?~> Messages("dataset.notFound", datasetName) dataSource <- datasetService.dataSourceFor(dataset).flatMap(_.toUsable) - + dataStore <- dataStoreDAO.findOneByName(dataset._dataStore.trim) fallbackLayer = if (volumeShowFallbackLayer) { dataSource.dataLayers.flatMap { case layer: SegmentationLayer => Some(layer) @@ -567,6 +578,7 @@ class AnnotationService @Inject()( volumeTracing <- createVolumeTracing( dataSource, organization.name, + dataStore, fallbackLayer = fallbackLayer, boundingBox = boundingBox.flatMap { box => if (box.isEmpty) None else Some(box) @@ -875,7 +887,7 @@ class AnnotationService @Inject()( userJson <- userJsonForAnnotation(annotation._user) settings <- settingsFor(annotation) restrictionsJs <- AnnotationRestrictions.writeAsJson( - restrictionsOpt.getOrElse(annotationRestrictionDefults.defaultsFor(annotation)), + restrictionsOpt.getOrElse(annotationRestrictionDefaults.defaultsFor(annotation)), requestingUser) dataStore <- dataStoreDAO.findOneByName(dataset._dataStore.trim) ?~> "datastore.notFound" dataStoreJs <- dataStoreService.publicWrites(dataStore) diff --git a/app/models/annotation/AnnotationUploadService.scala b/app/models/annotation/AnnotationUploadService.scala index caec03597d..f83c4b0443 100644 --- a/app/models/annotation/AnnotationUploadService.scala +++ b/app/models/annotation/AnnotationUploadService.scala @@ -1,29 +1,46 @@ package models.annotation +import com.scalableminds.util.accesscontext.GlobalAccessContext + import java.io.{File, FileInputStream, InputStream} import java.nio.file.{Files, Path, StandardCopyOption} import com.scalableminds.util.io.ZipIO +import com.scalableminds.util.tools.Fox import com.scalableminds.webknossos.datastore.SkeletonTracing.{SkeletonTracing, TreeGroup} import com.scalableminds.webknossos.datastore.VolumeTracing.{SegmentGroup, VolumeTracing} +import com.scalableminds.webknossos.datastore.rpc.RPC import com.typesafe.scalalogging.LazyLogging import files.TempFileService import javax.inject.Inject import models.annotation.nml.NmlResults._ import models.annotation.nml.{NmlParser, NmlResults} +import models.dataset.{DatasetDAO, DatasetService, WKRemoteDataStoreClient} +import models.organization.OrganizationDAO import net.liftweb.common.{Box, Empty, Failure, Full} import net.liftweb.common.Box.tryo import play.api.i18n.MessagesProvider +import scala.concurrent.ExecutionContext +import scala.concurrent.duration.DurationInt + case class UploadedVolumeLayer(tracing: VolumeTracing, dataZipLocation: String, name: Option[String]) { def getDataZipFrom(otherFiles: Map[String, File]): Option[File] = otherFiles.get(dataZipLocation) } -class AnnotationUploadService @Inject()(tempFileService: TempFileService) extends LazyLogging { - - private def extractFromNml(file: File, name: String, overwritingDatasetName: Option[String], isTaskUpload: Boolean)( - implicit m: MessagesProvider): NmlParseResult = +class AnnotationUploadService @Inject()(tempFileService: TempFileService, + datasetService: DatasetService, + datasetDAO: DatasetDAO, + organizationDAO: OrganizationDAO, + rpc: RPC) + extends LazyLogging { + + private def extractFromNmlFile( + file: File, + name: String, + overwritingDatasetName: Option[String], + isTaskUpload: Boolean)(implicit m: MessagesProvider, ec: ExecutionContext): NmlParseResult = extractFromNml(new FileInputStream(file), name, overwritingDatasetName, isTaskUpload) private def formatChain(chain: Box[Failure]): String = chain match { @@ -32,12 +49,18 @@ class AnnotationUploadService @Inject()(tempFileService: TempFileService) extend case _ => "" } - private def extractFromNml(inputStream: InputStream, - name: String, - overwritingDatasetName: Option[String], - isTaskUpload: Boolean, - basePath: Option[String] = None)(implicit m: MessagesProvider): NmlParseResult = - NmlParser.parse(name, inputStream, overwritingDatasetName, isTaskUpload, basePath) match { + def extractFromNml( + inputStream: InputStream, + name: String, + overwritingDatasetName: Option[String], + isTaskUpload: Boolean, + basePath: Option[String] = None)(implicit m: MessagesProvider, ec: ExecutionContext): NmlParseResult = + NmlParser.parse(name, + inputStream, + overwritingDatasetName, + isTaskUpload, + basePath, + (a, b) => getRemoteDatastoreClientForDatasetNameAndOrg(a, b)) match { case Full((skeletonTracing, uploadedVolumeLayers, description, wkUrl)) => NmlParseSuccess(name, skeletonTracing, uploadedVolumeLayers, description, wkUrl) case Failure(msg, _, chain) => NmlParseFailure(name, msg + chain.map(_ => formatChain(chain)).getOrElse("")) @@ -48,7 +71,7 @@ class AnnotationUploadService @Inject()(tempFileService: TempFileService) extend zipFileName: Option[String] = None, useZipName: Boolean, overwritingDatasetName: Option[String], - isTaskUpload: Boolean)(implicit m: MessagesProvider): MultiNmlParseResult = { + isTaskUpload: Boolean)(implicit m: MessagesProvider, ec: ExecutionContext): MultiNmlParseResult = { val name = zipFileName getOrElse file.getName var otherFiles = Map.empty[String, File] var parseResults = List.empty[NmlParseResult] @@ -131,10 +154,11 @@ class AnnotationUploadService @Inject()(tempFileService: TempFileService) extend } } - def extractFromFiles(files: Seq[(File, String)], - useZipName: Boolean, - overwritingDatasetName: Option[String] = None, - isTaskUpload: Boolean = false)(implicit m: MessagesProvider): MultiNmlParseResult = + def extractFromFiles( + files: Seq[(File, String)], + useZipName: Boolean, + overwritingDatasetName: Option[String] = None, + isTaskUpload: Boolean = false)(implicit m: MessagesProvider, ec: ExecutionContext): MultiNmlParseResult = files.foldLeft(NmlResults.MultiNmlParseResult()) { case (acc, (file, name)) => if (name.endsWith(".zip")) @@ -149,23 +173,36 @@ class AnnotationUploadService @Inject()(tempFileService: TempFileService) extend overwritingDatasetName, isTaskUpload )) - else acc.combineWith(extractFromFile(file, name, useZipName, overwritingDatasetName, isTaskUpload)) + else + acc.combineWith(extractFromFile(file, name, useZipName, overwritingDatasetName, isTaskUpload)) case _ => acc - } else acc.combineWith(extractFromFile(file, name, useZipName, overwritingDatasetName, isTaskUpload)) + } else + acc.combineWith(extractFromFile(file, name, useZipName, overwritingDatasetName, isTaskUpload)) } - private def extractFromFile(file: File, - fileName: String, - useZipName: Boolean, - overwritingDatasetName: Option[String], - isTaskUpload: Boolean)(implicit m: MessagesProvider): MultiNmlParseResult = + def extractFromFile(file: File, + fileName: String, + useZipName: Boolean, + overwritingDatasetName: Option[String], + isTaskUpload: Boolean)(implicit m: MessagesProvider, ec: ExecutionContext): MultiNmlParseResult = if (fileName.endsWith(".zip")) { logger.trace("Extracting from Zip file") extractFromZip(file, Some(fileName), useZipName, overwritingDatasetName, isTaskUpload) } else { logger.trace("Extracting from Nml file") - val parseResult = extractFromNml(file, fileName, overwritingDatasetName, isTaskUpload) + val parseResult = extractFromNmlFile(file, fileName, overwritingDatasetName, isTaskUpload) MultiNmlParseResult(List(parseResult), Map.empty) } + def getRemoteDatastoreClientForDatasetNameAndOrg(datasetName: String, organizationName: String)( + implicit ec: ExecutionContext): Option[WKRemoteDataStoreClient] = { + val fox = for { + _ <- Fox.successful(()) + organizationObjectId <- organizationDAO.findIdByName(organizationName)(GlobalAccessContext) + dataset <- datasetDAO.findOneByNameAndOrganization(datasetName, organizationObjectId)(GlobalAccessContext) + dataStore <- datasetService.dataStoreFor(dataset)(GlobalAccessContext) + } yield new WKRemoteDataStoreClient(dataStore, rpc) + fox.await("No fox context in AnnotationUploadService, see #7551", 1 minute).toOption + } + } diff --git a/app/models/annotation/TracingDataSourceTemporaryStore.scala b/app/models/annotation/TracingDataSourceTemporaryStore.scala new file mode 100644 index 0000000000..4140bf2226 --- /dev/null +++ b/app/models/annotation/TracingDataSourceTemporaryStore.scala @@ -0,0 +1,24 @@ +package models.annotation + +import com.scalableminds.webknossos.datastore.models.datasource.DataSourceLike +import com.scalableminds.webknossos.datastore.storage.TemporaryStore + +import javax.inject.Inject +import scala.concurrent.ExecutionContext +import scala.concurrent.duration.DurationInt + +/** + * Used to store a mapping from tracing id to datasource. This makes it possible for WK to answer a + * /tracingstores/:name/dataSource request before an annotation is created. This happens when uploading an annotation. + */ +class TracingDataSourceTemporaryStore @Inject()(temporaryStore: TemporaryStore[String, DataSourceLike]) { + + private val timeOut = 7 * 24 hours + + def store(tracingId: String, dataSource: DataSourceLike)(implicit ec: ExecutionContext): Unit = + temporaryStore.insert(tracingId, dataSource, Some(timeOut)) + + def find(tracingId: String): Option[DataSourceLike] = + temporaryStore.find(tracingId) + +} diff --git a/app/models/annotation/TracingStore.scala b/app/models/annotation/TracingStore.scala index 608662d7ab..d1a24f1c6c 100644 --- a/app/models/annotation/TracingStore.scala +++ b/app/models/annotation/TracingStore.scala @@ -29,7 +29,10 @@ object TracingStore { TracingStore(name, url, publicUrl, "") } -class TracingStoreService @Inject()(tracingStoreDAO: TracingStoreDAO, rpc: RPC)(implicit ec: ExecutionContext) +class TracingStoreService @Inject()( + tracingStoreDAO: TracingStoreDAO, + rpc: RPC, + tracingDataSourceTemporaryStore: TracingDataSourceTemporaryStore)(implicit ec: ExecutionContext) extends FoxImplicits with LazyLogging with Results { @@ -54,7 +57,7 @@ class TracingStoreService @Inject()(tracingStoreDAO: TracingStoreDAO, rpc: RPC)( def clientFor(dataset: Dataset)(implicit ctx: DBAccessContext): Fox[WKRemoteTracingStoreClient] = for { tracingStore <- tracingStoreDAO.findFirst ?~> "tracingStore.notFound" - } yield new WKRemoteTracingStoreClient(tracingStore, dataset, rpc) + } yield new WKRemoteTracingStoreClient(tracingStore, dataset, rpc, tracingDataSourceTemporaryStore) } class TracingStoreDAO @Inject()(sqlClient: SqlClient)(implicit ec: ExecutionContext) diff --git a/app/models/annotation/WKRemoteTracingStoreClient.scala b/app/models/annotation/WKRemoteTracingStoreClient.scala index 9d515a9c69..6073166d17 100644 --- a/app/models/annotation/WKRemoteTracingStoreClient.scala +++ b/app/models/annotation/WKRemoteTracingStoreClient.scala @@ -13,6 +13,7 @@ import com.scalableminds.webknossos.datastore.models.annotation.{ AnnotationLayerType, FetchedAnnotationLayer } +import com.scalableminds.webknossos.datastore.models.datasource.DataSourceLike import com.scalableminds.webknossos.datastore.rpc.RPC import com.scalableminds.webknossos.tracingstore.tracings.TracingSelector import com.scalableminds.webknossos.tracingstore.tracings.volume.ResolutionRestrictions @@ -24,7 +25,11 @@ import net.liftweb.common.Box import scala.concurrent.ExecutionContext -class WKRemoteTracingStoreClient(tracingStore: TracingStore, dataset: Dataset, rpc: RPC)(implicit ec: ExecutionContext) +class WKRemoteTracingStoreClient( + tracingStore: TracingStore, + dataset: Dataset, + rpc: RPC, + tracingDataSourceTemporaryStore: TracingDataSourceTemporaryStore)(implicit ec: ExecutionContext) extends LazyLogging { def baseInfo = s" Dataset: ${dataset.name} Tracingstore: ${tracingStore.url}" @@ -145,6 +150,7 @@ class WKRemoteTracingStoreClient(tracingStore: TracingStore, dataset: Dataset, r } def mergeVolumeTracingsByContents(tracings: VolumeTracings, + dataSource: DataSourceLike, initialData: List[Option[File]], persistTracing: Boolean): Fox[String] = { logger.debug("Called to merge VolumeTracings by contents." + baseInfo) @@ -154,6 +160,7 @@ class WKRemoteTracingStoreClient(tracingStore: TracingStore, dataset: Dataset, r .addQueryString("persist" -> persistTracing.toString) .postProtoWithJsonResponse[VolumeTracings, String](tracings) packedVolumeDataZips = packVolumeDataZips(initialData.flatten) + _ = tracingDataSourceTemporaryStore.store(tracingId, dataSource) _ <- rpc(s"${tracingStore.url}/tracings/volume/$tracingId/initialDataMultiple").withLongTimeout .addQueryString("token" -> RpcTokenHolder.webknossosToken) .post(packedVolumeDataZips) @@ -165,12 +172,14 @@ class WKRemoteTracingStoreClient(tracingStore: TracingStore, dataset: Dataset, r def saveVolumeTracing(tracing: VolumeTracing, initialData: Option[File] = None, - resolutionRestrictions: ResolutionRestrictions = ResolutionRestrictions.empty): Fox[String] = { + resolutionRestrictions: ResolutionRestrictions = ResolutionRestrictions.empty, + dataSource: Option[DataSourceLike] = None): Fox[String] = { logger.debug("Called to create VolumeTracing." + baseInfo) for { tracingId <- rpc(s"${tracingStore.url}/tracings/volume/save") .addQueryString("token" -> RpcTokenHolder.webknossosToken) .postProtoWithJsonResponse[VolumeTracing, String](tracing) + _ = dataSource.foreach(d => tracingDataSourceTemporaryStore.store(tracingId, d)) _ <- initialData match { case Some(file) => rpc(s"${tracingStore.url}/tracings/volume/$tracingId/initialData").withLongTimeout diff --git a/app/models/annotation/nml/NmlParser.scala b/app/models/annotation/nml/NmlParser.scala index 932454876c..5c4e605879 100755 --- a/app/models/annotation/nml/NmlParser.scala +++ b/app/models/annotation/nml/NmlParser.scala @@ -2,6 +2,7 @@ package models.annotation.nml import com.scalableminds.util.geometry.{BoundingBox, Vec3Double, Vec3Int} import com.scalableminds.util.tools.ExtendedTypes.{ExtendedDouble, ExtendedString} +import com.scalableminds.util.tools.Fox import com.scalableminds.webknossos.datastore.SkeletonTracing._ import com.scalableminds.webknossos.datastore.VolumeTracing.{Segment, SegmentGroup, VolumeTracing} import com.scalableminds.webknossos.datastore.geometry.{ @@ -17,15 +18,17 @@ import com.scalableminds.webknossos.datastore.models.datasource.ElementClass import com.scalableminds.webknossos.tracingstore.tracings.ColorGenerator import com.scalableminds.webknossos.tracingstore.tracings.skeleton.updating.TreeType import com.scalableminds.webknossos.tracingstore.tracings.skeleton.{MultiComponentTreeSplitter, TreeValidator} -import com.scalableminds.webknossos.tracingstore.tracings.volume.VolumeSegmentIndexService import com.typesafe.scalalogging.LazyLogging import models.annotation.UploadedVolumeLayer +import models.dataset.WKRemoteDataStoreClient import net.liftweb.common.Box._ import net.liftweb.common.{Box, Empty, Failure, Full} import play.api.i18n.{Messages, MessagesProvider} import java.io.InputStream import scala.collection.{immutable, mutable} +import scala.concurrent.ExecutionContext +import scala.concurrent.duration._ import scala.xml.{Attribute, NodeSeq, XML, Node => XMLNode} object NmlParser extends LazyLogging with ProtoGeometryImplicits with ColorGenerator { @@ -42,8 +45,10 @@ object NmlParser extends LazyLogging with ProtoGeometryImplicits with ColorGener nmlInputStream: InputStream, overwritingDatasetName: Option[String], isTaskUpload: Boolean, - basePath: Option[String] = None)( - implicit m: MessagesProvider): Box[(Option[SkeletonTracing], List[UploadedVolumeLayer], String, Option[String])] = + basePath: Option[String] = None, + getRemoteDataStoreClient: (String, String) => Option[WKRemoteDataStoreClient])( + implicit m: MessagesProvider, + ec: ExecutionContext): Box[(Option[SkeletonTracing], List[UploadedVolumeLayer], String, Option[String])] = try { val data = XML.load(nmlInputStream) for { @@ -59,12 +64,25 @@ object NmlParser extends LazyLogging with ProtoGeometryImplicits with ColorGener treeGroupsAfterSplit = treesAndGroupsAfterSplitting._2 _ <- TreeValidator.validateTrees(treesSplit, treeGroupsAfterSplit, branchPoints, comments) additionalAxisProtos <- parseAdditionalAxes(parameters \ "additionalAxes") + datasetName = overwritingDatasetName.getOrElse(parseDatasetName(parameters \ "experiment")) + organizationName = if (overwritingDatasetName.isDefined) None + else parseOrganizationName(parameters \ "experiment") + remoteDataStoreClientOpt = getRemoteDataStoreClient(datasetName, organizationName.getOrElse("")) + canHaveSegmentIndexBools <- Fox + .combined( + volumes + .map( + v => + canHaveSegmentIndex(remoteDataStoreClientOpt, + organizationName.getOrElse(""), + datasetName, + v.fallbackLayerName)) + .toList) + .await("NMLParser/parse was changed to return Fox in #7437. Removing this await is tracked in #7551", + 5 seconds) } yield { - val datasetName = overwritingDatasetName.getOrElse(parseDatasetName(parameters \ "experiment")) val description = parseDescription(parameters \ "experiment") val wkUrl = parseWkUrl(parameters \ "experiment") - val organizationName = - if (overwritingDatasetName.isDefined) None else parseOrganizationName(parameters \ "experiment") val activeNodeId = parseActiveNode(parameters \ "activeNode") val (editPosition, editPositionAdditionalCoordinates) = parseEditPosition(parameters \ "editPosition").getOrElse((SkeletonTracingDefaults.editPosition, Seq())) @@ -81,32 +99,33 @@ object NmlParser extends LazyLogging with ProtoGeometryImplicits with ColorGener logger.debug(s"Parsed NML file. Trees: ${treesSplit.size}, Volumes: ${volumes.size}") val volumeLayers: List[UploadedVolumeLayer] = - volumes.toList.map { v => - UploadedVolumeLayer( - VolumeTracing( - None, - boundingBoxToProto(taskBoundingBox.getOrElse(BoundingBox.empty)), - timestamp, - datasetName, - editPosition, - editRotation, - ElementClass.uint32, - v.fallbackLayerName, - v.largestSegmentId, - 0, - zoomLevel, - None, - userBoundingBoxes, - organizationName, - segments = v.segments, - segmentGroups = v.segmentGroups, - hasSegmentIndex = VolumeSegmentIndexService.canHaveSegmentIndexOpt(v.fallbackLayerName), - editPositionAdditionalCoordinates = editPositionAdditionalCoordinates, - additionalAxes = additionalAxisProtos - ), - basePath.getOrElse("") + v.dataZipPath, - v.name, - ) + volumes.zip(canHaveSegmentIndexBools).toList.map { + case (v, canHaveSegmentIndexBool) => + UploadedVolumeLayer( + VolumeTracing( + None, + boundingBoxToProto(taskBoundingBox.getOrElse(BoundingBox.empty)), + timestamp, + datasetName, + editPosition, + editRotation, + ElementClass.uint32, + v.fallbackLayerName, + v.largestSegmentId, + 0, + zoomLevel, + None, + userBoundingBoxes, + organizationName, + segments = v.segments, + segmentGroups = v.segmentGroups, + hasSegmentIndex = Some(canHaveSegmentIndexBool), + editPositionAdditionalCoordinates = editPositionAdditionalCoordinates, + additionalAxes = additionalAxisProtos + ), + basePath.getOrElse("") + v.dataZipPath, + v.name, + ) } val skeletonTracingOpt: Option[SkeletonTracing] = @@ -531,4 +550,19 @@ object NmlParser extends LazyLogging with ProtoGeometryImplicits with ColorGener }.toSeq } + private def canHaveSegmentIndex(remoteDatastoreClient: Option[WKRemoteDataStoreClient], + organizationName: String, + datasetName: String, + fallbackLayerName: Option[String])(implicit ec: ExecutionContext): Fox[Boolean] = + for { + canHaveSegmentIndex <- fallbackLayerName match { + case Some(layerName) => + remoteDatastoreClient match { + case Some(rdc) => rdc.hasSegmentIndexFile(organizationName, datasetName, layerName) + case None => Fox.successful(false) + } + case None => Fox.successful(true) + } + } yield canHaveSegmentIndex + } diff --git a/app/models/dataset/DataStore.scala b/app/models/dataset/DataStore.scala index 78e596728e..2c90021737 100644 --- a/app/models/dataset/DataStore.scala +++ b/app/models/dataset/DataStore.scala @@ -83,7 +83,6 @@ class DataStoreService @Inject()(dataStoreDAO: DataStoreDAO, jobService: JobServ _ <- bool2Fox(key == dataStore.key) result <- block(dataStore) } yield result).getOrElse(Forbidden(Json.obj("granted" -> false, "msg" -> Messages("dataStore.notFound")))) - } class DataStoreDAO @Inject()(sqlClient: SqlClient)(implicit ec: ExecutionContext) diff --git a/app/models/dataset/WKRemoteDataStoreClient.scala b/app/models/dataset/WKRemoteDataStoreClient.scala index 397bac91d0..9a1a904f57 100644 --- a/app/models/dataset/WKRemoteDataStoreClient.scala +++ b/app/models/dataset/WKRemoteDataStoreClient.scala @@ -82,4 +82,13 @@ class WKRemoteDataStoreClient(dataStore: DataStore, rpc: RPC) extends LazyLoggin .put(dataSource) } yield () + def hasSegmentIndexFile(organizationName: String, datasetName: String, layerName: String): Fox[Boolean] = + for { + hasIndexFile <- rpc( + s"${dataStore.url}/data/datasets/$organizationName/$datasetName/layers/$layerName/hasSegmentIndex") + .addQueryString("token" -> RpcTokenHolder.webknossosToken) + .silent + .getWithJsonResponse[Boolean] + } yield hasIndexFile + } diff --git a/frontend/javascripts/admin/admin_rest_api.ts b/frontend/javascripts/admin/admin_rest_api.ts index 3202837e35..e1b402498c 100644 --- a/frontend/javascripts/admin/admin_rest_api.ts +++ b/frontend/javascripts/admin/admin_rest_api.ts @@ -953,39 +953,46 @@ export function getNewestVersionForTracing( ); } +export function hasSegmentIndexInDataStore( + dataStoreUrl: string, + dataSetName: string, + dataLayerName: string, + organizationName: string, +) { + return doWithToken((token) => + Request.receiveJSON( + `${dataStoreUrl}/data/datasets/${organizationName}/${dataSetName}/layers/${dataLayerName}/hasSegmentIndex?token=${token}`, + ), + ); +} + export function getSegmentVolumes( - tracingStoreUrl: string, - tracingId: string, + requestUrl: string, mag: Vector3, segmentIds: Array, additionalCoordinates: AdditionalCoordinate[] | undefined | null, + mappingName: string | null | undefined, ): Promise { return doWithToken((token) => - Request.sendJSONReceiveJSON( - `${tracingStoreUrl}/tracings/volume/${tracingId}/segmentStatistics/volume?token=${token}`, - { - data: { additionalCoordinates, mag, segmentIds }, - method: "POST", - }, - ), + Request.sendJSONReceiveJSON(`${requestUrl}/segmentStatistics/volume?token=${token}`, { + data: { additionalCoordinates, mag, segmentIds, mappingName }, + method: "POST", + }), ); } export function getSegmentBoundingBoxes( - tracingStoreUrl: string, - tracingId: string, + requestUrl: string, mag: Vector3, segmentIds: Array, additionalCoordinates: AdditionalCoordinate[] | undefined | null, + mappingName: string | null | undefined, ): Promise> { return doWithToken((token) => - Request.sendJSONReceiveJSON( - `${tracingStoreUrl}/tracings/volume/${tracingId}/segmentStatistics/boundingBox?token=${token}`, - { - data: { additionalCoordinates, mag, segmentIds }, - method: "POST", - }, - ), + Request.sendJSONReceiveJSON(`${requestUrl}/segmentStatistics/boundingBox?token=${token}`, { + data: { additionalCoordinates, mag, segmentIds, mappingName }, + method: "POST", + }), ); } diff --git a/frontend/javascripts/oxalis/default_state.ts b/frontend/javascripts/oxalis/default_state.ts index 50c9b5e5d0..e2ef6fa4fd 100644 --- a/frontend/javascripts/oxalis/default_state.ts +++ b/frontend/javascripts/oxalis/default_state.ts @@ -177,8 +177,8 @@ const defaultState: OxalisState = { }, isBusyInfo: { skeleton: false, - volume: false, - mapping: false, + volumes: {}, + mappings: {}, }, lastSaveTimestamp: { skeleton: 0, diff --git a/frontend/javascripts/oxalis/model/accessors/dataset_accessor.ts b/frontend/javascripts/oxalis/model/accessors/dataset_accessor.ts index 9e2def1679..d03929fb12 100644 --- a/frontend/javascripts/oxalis/model/accessors/dataset_accessor.ts +++ b/frontend/javascripts/oxalis/model/accessors/dataset_accessor.ts @@ -827,3 +827,16 @@ export function getEffectiveIntensityRange( return layerConfiguration.intensityRange || defaultIntensityRange; } + +// Note that `hasSegmentIndex` needs to be loaded first (otherwise, the returned +// value will be undefined). Dispatch an ensureSegmentIndexIsLoadedAction to make +// sure this info is fetched. +export function getMaybeSegmentIndexAvailability( + dataset: APIDataset, + layerName: string | null | undefined, +) { + if (layerName == null) { + return false; + } + return dataset.dataSource.dataLayers.find((layer) => layer.name === layerName)?.hasSegmentIndex; +} diff --git a/frontend/javascripts/oxalis/model/accessors/save_accessor.ts b/frontend/javascripts/oxalis/model/accessors/save_accessor.ts index 299ec12f6c..e35ed69805 100644 --- a/frontend/javascripts/oxalis/model/accessors/save_accessor.ts +++ b/frontend/javascripts/oxalis/model/accessors/save_accessor.ts @@ -1,7 +1,13 @@ import type { IsBusyInfo, OxalisState, SaveQueueEntry } from "oxalis/store"; import type { SaveQueueType } from "oxalis/model/actions/save_actions"; +import * as Utils from "libs/utils"; + export function isBusy(isBusyInfo: IsBusyInfo): boolean { - return isBusyInfo.skeleton || isBusyInfo.volume; + return ( + isBusyInfo.skeleton || + Utils.values(isBusyInfo.volumes).some((el) => el) || + Utils.values(isBusyInfo.mappings).some((el) => el) + ); } export function selectQueue( state: OxalisState, diff --git a/frontend/javascripts/oxalis/model/actions/dataset_actions.ts b/frontend/javascripts/oxalis/model/actions/dataset_actions.ts index 6039a8766e..a42cb471d4 100644 --- a/frontend/javascripts/oxalis/model/actions/dataset_actions.ts +++ b/frontend/javascripts/oxalis/model/actions/dataset_actions.ts @@ -5,12 +5,16 @@ type SetLayerTransformsAction = ReturnType; export type EnsureLayerMappingsAreLoadedAction = ReturnType< typeof ensureLayerMappingsAreLoadedAction >; +type SetLayerHasSegmentIndexAction = ReturnType; +export type EnsureSegmentIndexIsLoadedAction = ReturnType; export type DatasetAction = | SetDatasetAction | SetLayerMappingsAction | SetLayerTransformsAction - | EnsureLayerMappingsAreLoadedAction; + | EnsureLayerMappingsAreLoadedAction + | SetLayerHasSegmentIndexAction + | EnsureSegmentIndexIsLoadedAction; export const setDatasetAction = (dataset: APIDataset) => ({ @@ -45,3 +49,16 @@ export const ensureLayerMappingsAreLoadedAction = (layerName?: string) => type: "ENSURE_LAYER_MAPPINGS_ARE_LOADED", layerName, }) as const; + +export const setLayerHasSegmentIndexAction = (layerName: string, hasSegmentIndex: boolean) => + ({ + type: "SET_LAYER_HAS_SEGMENT_INDEX", + layerName, + hasSegmentIndex, + }) as const; + +export const ensureSegmentIndexIsLoadedAction = (layerName: string | null | undefined) => + ({ + type: "ENSURE_SEGMENT_INDEX_IS_LOADED", + layerName, + }) as const; diff --git a/frontend/javascripts/oxalis/model/actions/save_actions.ts b/frontend/javascripts/oxalis/model/actions/save_actions.ts index 8c890ab1e6..dca4997b9f 100644 --- a/frontend/javascripts/oxalis/model/actions/save_actions.ts +++ b/frontend/javascripts/oxalis/model/actions/save_actions.ts @@ -9,7 +9,7 @@ export type PushSaveQueueTransaction = ReturnType; export type ShiftSaveQueueAction = ReturnType; type DiscardSaveQueuesAction = ReturnType; -type SetSaveBusyAction = ReturnType; +export type SetSaveBusyAction = ReturnType; export type SetLastSaveTimestampAction = ReturnType; export type SetVersionNumberAction = ReturnType; export type UndoAction = ReturnType; @@ -64,11 +64,16 @@ export const discardSaveQueuesAction = () => type: "DISCARD_SAVE_QUEUES", }) as const; -export const setSaveBusyAction = (isBusy: boolean, saveQueueType: SaveQueueType) => +export const setSaveBusyAction = ( + isBusy: boolean, + saveQueueType: SaveQueueType, + tracingId: string, +) => ({ type: "SET_SAVE_BUSY", isBusy, saveQueueType, + tracingId, }) as const; export const setLastSaveTimestampAction = (saveQueueType: SaveQueueType, tracingId: string) => diff --git a/frontend/javascripts/oxalis/model/reducers/dataset_reducer.ts b/frontend/javascripts/oxalis/model/reducers/dataset_reducer.ts index e2e0652107..82b8abb348 100644 --- a/frontend/javascripts/oxalis/model/reducers/dataset_reducer.ts +++ b/frontend/javascripts/oxalis/model/reducers/dataset_reducer.ts @@ -68,6 +68,24 @@ function DatasetReducer(state: OxalisState, action: Action): OxalisState { }); } + case "SET_LAYER_HAS_SEGMENT_INDEX": { + const { layerName, hasSegmentIndex } = action; + const newLayers = state.dataset.dataSource.dataLayers.map((layer) => { + if (layer.name === layerName) { + return { + ...layer, + hasSegmentIndex, + }; + } else { + return layer; + } + }); + + return updateKey2(state, "dataset", "dataSource", { + dataLayers: newLayers, + }); + } + case "SET_LAYER_TRANSFORMS": { const { layerName, coordinateTransformations } = action; diff --git a/frontend/javascripts/oxalis/model/reducers/save_reducer.ts b/frontend/javascripts/oxalis/model/reducers/save_reducer.ts index e02527f00c..7ee44a160f 100644 --- a/frontend/javascripts/oxalis/model/reducers/save_reducer.ts +++ b/frontend/javascripts/oxalis/model/reducers/save_reducer.ts @@ -3,10 +3,9 @@ import update from "immutability-helper"; import type { Action } from "oxalis/model/actions/actions"; import type { OxalisState, SaveState, SaveQueueEntry } from "oxalis/store"; import type { - PushSaveQueueTransaction, SetVersionNumberAction, - ShiftSaveQueueAction, SetLastSaveTimestampAction, + SaveQueueType, } from "oxalis/model/actions/save_actions"; import { getActionLog } from "oxalis/model/helpers/action_logger_middleware"; import { getStats } from "oxalis/model/accessors/annotation_accessor"; @@ -20,20 +19,32 @@ import { import Date from "libs/date"; import * as Utils from "libs/utils"; -function updateQueueObj( - action: PushSaveQueueTransaction | ShiftSaveQueueAction, - oldQueueObj: SaveState["queue"], - newQueue: any, -): SaveState["queue"] { +type TracingDict = { + skeleton: V; + volumes: Record; + mappings: Record; +}; + +function updateTracingDict( + action: { saveQueueType: SaveQueueType; tracingId: string }, + oldDict: TracingDict, + newValue: V, +): TracingDict { if (action.saveQueueType === "skeleton") { - return { ...oldQueueObj, skeleton: newQueue }; + return { ...oldDict, skeleton: newValue }; } else if (action.saveQueueType === "volume") { - return { ...oldQueueObj, volumes: { ...oldQueueObj.volumes, [action.tracingId]: newQueue } }; + return { + ...oldDict, + volumes: { ...oldDict.volumes, [action.tracingId]: newValue }, + }; } else if (action.saveQueueType === "mapping") { - return { ...oldQueueObj, mappings: { ...oldQueueObj.mappings, [action.tracingId]: newQueue } }; + return { + ...oldDict, + mappings: { ...oldDict.mappings, [action.tracingId]: newValue }, + }; } - return oldQueueObj; + return oldDict; } export function getTotalSaveQueueLength(queueObj: SaveState["queue"]) { @@ -146,7 +157,7 @@ function SaveReducer(state: OxalisState, action: Action): OxalisState { info: actionLogInfo, })), ); - const newQueueObj = updateQueueObj(action, state.save.queue, newQueue); + const newQueueObj = updateTracingDict(action, state.save.queue, newQueue); return update(state, { save: { queue: { @@ -176,7 +187,7 @@ function SaveReducer(state: OxalisState, action: Action): OxalisState { ); const remainingQueue = queue.slice(count); - const newQueueObj = updateQueueObj(action, state.save.queue, remainingQueue); + const newQueueObj = updateTracingDict(action, state.save.queue, remainingQueue); const remainingQueueLength = getTotalSaveQueueLength(newQueueObj); const resetCounter = remainingQueueLength === 0; return update(state, { @@ -224,12 +235,11 @@ function SaveReducer(state: OxalisState, action: Action): OxalisState { } case "SET_SAVE_BUSY": { + const newIsBusyInfo = updateTracingDict(action, state.save.isBusyInfo, action.isBusy); return update(state, { save: { isBusyInfo: { - [action.saveQueueType]: { - $set: action.isBusy, - }, + $set: newIsBusyInfo, }, }, }); diff --git a/frontend/javascripts/oxalis/model/sagas/dataset_saga.ts b/frontend/javascripts/oxalis/model/sagas/dataset_saga.ts index e3e8098978..31179a886d 100644 --- a/frontend/javascripts/oxalis/model/sagas/dataset_saga.ts +++ b/frontend/javascripts/oxalis/model/sagas/dataset_saga.ts @@ -1,13 +1,14 @@ -import { call, take, takeEvery, takeLatest } from "typed-redux-saga"; +import { call, put, take, takeEvery, takeLatest } from "typed-redux-saga"; import { sum } from "lodash"; import type { Saga } from "oxalis/model/sagas/effect-generators"; import { select } from "oxalis/model/sagas/effect-generators"; - import { sleep } from "libs/utils"; import Toast from "libs/toast"; import messages from "messages"; import { getEnabledLayers, + getLayerByName, + getMaybeSegmentIndexAvailability, getResolutionInfo, getTransformsForLayer, invertAndTranspose, @@ -17,6 +18,11 @@ import { getCurrentResolution } from "../accessors/flycam_accessor"; import { getViewportExtents } from "../accessors/view_mode_accessor"; import { V3 } from "libs/mjs"; import { Identity4x4 } from "oxalis/constants"; +import { hasSegmentIndex } from "oxalis/view/right-border-tabs/segments_tab/segments_view_helper"; +import { + type EnsureSegmentIndexIsLoadedAction, + setLayerHasSegmentIndexAction, +} from "../actions/dataset_actions"; export function* watchMaximumRenderableLayers(): Saga { function* warnMaybe(): Saga { @@ -149,3 +155,32 @@ export function* watchZ1Downsampling(): Saga { maybeShowWarning, ); } + +export function* ensureSegmentIndexIsLoaded(): Saga { + function* maybeFetchHasSegmentIndex(action: EnsureSegmentIndexIsLoadedAction): Saga { + const { layerName } = action; + const dataset = yield* select((state) => state.dataset); + if (layerName == null) return; + const segmentationLayer = yield* call(getLayerByName, dataset, layerName); + const maybeIsSegmentIndexAvailable = yield* call( + getMaybeSegmentIndexAvailability, + dataset, + layerName, + ); + if (maybeIsSegmentIndexAvailable == null && segmentationLayer != null) { + const tracing = yield* select((state) => state.tracing); + const updatedIsSegmentIndexAvailable = yield* call( + hasSegmentIndex, + segmentationLayer, + dataset, + tracing, + ); + yield* put( + setLayerHasSegmentIndexAction(segmentationLayer.name, updatedIsSegmentIndexAvailable), + ); + } + } + yield* takeEvery("ENSURE_SEGMENT_INDEX_IS_LOADED", maybeFetchHasSegmentIndex); +} + +export default [watchMaximumRenderableLayers, watchZ1Downsampling, ensureSegmentIndexIsLoaded]; diff --git a/frontend/javascripts/oxalis/model/sagas/root_saga.ts b/frontend/javascripts/oxalis/model/sagas/root_saga.ts index 200a885957..3a154561d3 100644 --- a/frontend/javascripts/oxalis/model/sagas/root_saga.ts +++ b/frontend/javascripts/oxalis/model/sagas/root_saga.ts @@ -9,7 +9,7 @@ import { watchDataRelevantChanges } from "oxalis/model/sagas/prefetch_saga"; import SkeletontracingSagas from "oxalis/model/sagas/skeletontracing_saga"; import ErrorHandling from "libs/error_handling"; import meshSaga, { handleAdditionalCoordinateUpdate } from "oxalis/model/sagas/mesh_saga"; -import { watchMaximumRenderableLayers, watchZ1Downsampling } from "oxalis/model/sagas/dataset_saga"; +import DatasetSagas from "oxalis/model/sagas/dataset_saga"; import { watchToolDeselection, watchToolReset } from "oxalis/model/sagas/annotation_tool_saga"; import SettingsSaga from "oxalis/model/sagas/settings_saga"; import watchTasksAsync, { warnAboutMagRestriction } from "oxalis/model/sagas/task_saga"; @@ -55,7 +55,6 @@ function* restartableSaga(): Saga { call(watchDataRelevantChanges), call(meshSaga), call(watchTasksAsync), - call(watchMaximumRenderableLayers), call(MappingSaga), call(watchToolDeselection), call(watchToolReset), @@ -64,10 +63,10 @@ function* restartableSaga(): Saga { ...SaveSagas.map((saga) => call(saga)), call(UndoSaga), ...VolumetracingSagas.map((saga) => call(saga)), - call(watchZ1Downsampling), call(warnIfEmailIsUnverified), call(listenToErrorEscalation), call(handleAdditionalCoordinateUpdate), + ...DatasetSagas.map((saga) => call(saga)), ]); } catch (err) { rootSagaCrashed = true; diff --git a/frontend/javascripts/oxalis/model/sagas/save_saga.ts b/frontend/javascripts/oxalis/model/sagas/save_saga.ts index d2e07c8864..1d3141a23e 100644 --- a/frontend/javascripts/oxalis/model/sagas/save_saga.ts +++ b/frontend/javascripts/oxalis/model/sagas/save_saga.ts @@ -86,7 +86,7 @@ export function* pushSaveQueueAsync(saveQueueType: SaveQueueType, tracingId: str timeout: delay(PUSH_THROTTLE_TIME), forcePush: take("SAVE_NOW"), }); - yield* put(setSaveBusyAction(true, saveQueueType)); + yield* put(setSaveBusyAction(true, saveQueueType, tracingId)); // Send (parts) of the save queue to the server. // There are two main cases: @@ -120,7 +120,7 @@ export function* pushSaveQueueAsync(saveQueueType: SaveQueueType, tracingId: str } } - yield* put(setSaveBusyAction(false, saveQueueType)); + yield* put(setSaveBusyAction(false, saveQueueType, tracingId)); } } export function sendRequestWithToken( @@ -468,11 +468,27 @@ const VERSION_POLL_INTERVAL_SINGLE_EDITOR = 30 * 1000; function* watchForSaveConflicts() { function* checkForNewVersion() { - const allowSave = yield* select((state) => state.tracing.restrictions.allowSave); - const allowUpdate = yield* select((state) => state.tracing.restrictions.allowUpdate); - const othersMayEdit = yield* select((state) => state.tracing.othersMayEdit); - if (!allowUpdate && othersMayEdit) { - // The user does not have the annotation's mutex and therefore no repeated version check is needed. + const allowSave = yield* select( + (state) => state.tracing.restrictions.allowSave && state.tracing.restrictions.allowUpdate, + ); + if (allowSave) { + // The active user is currently the only one that is allowed to mutate the annotation. + // Since we only acquire the mutex upon page load, there shouldn't be any unseen updates + // between the page load and this check here. + // A race condition where + // 1) another user saves version X + // 2) we load the annotation but only get see version X - 1 (this is the race) + // 3) we acquire a mutex + // should not occur, because there is a grace period for which the mutex has to be free until it can + // be acquired again (see annotation.mutex.expiryTime in application.conf). + // The downside of an early return here is that we won't be able to warn the user early + // if the user opened the annotation in two tabs and mutated it there. + // However, + // a) this scenario is pretty rare and the worst case is that they get a 409 error + // during saving and + // b) checking for newer versions when the active user may update the annotation introduces + // a race condition between this saga and the actual save saga. Synchronizing these sagas + // would be possible, but would add further complexity to the mission critical save saga. return; } @@ -493,25 +509,6 @@ function* watchForSaveConflicts() { tracing.type, ); - // There is a rare chance of a race condition happening - // which can result in an incorrect toast warning. - // It occurs if certain saga effects run in the following order: - // 1) a new version is pushed to the server (in sendRequestToServer) - // 2) the current server version is fetched (in this saga here) - // 3) the server version is compared with the client version (in this saga here) - // 4) only now is the local version number updated because the - // sendRequestToServer saga was scheduled now. - // Since (4) is happening too late, the comparison in (3) will assume - // that client and server are out of sync. - // The chance of this happening is relatively low, but from time to time it - // happened. To mitigate this problem, we introduce a simple sleep here which - // means that the chance of (4) being scheduled after (2) should be close to - // 0 (hopefully). - // Note that a false-positive warning in this saga doesn't have serious side-effects - // (except for potentially confusing the user). This is why a more complex mutex- or - // semaphore-based solution to this problem was not chosen. - yield* call(sleep, 2000); - // Read the tracing version again from the store, since the // old reference to tracing might be outdated now due to the // immutability. diff --git a/frontend/javascripts/oxalis/store.ts b/frontend/javascripts/oxalis/store.ts index 3d3ead7d60..11bd1264f9 100644 --- a/frontend/javascripts/oxalis/store.ts +++ b/frontend/javascripts/oxalis/store.ts @@ -68,7 +68,6 @@ import defaultState from "oxalis/default_state"; import overwriteActionMiddleware from "oxalis/model/helpers/overwrite_action_middleware"; import reduceReducers from "oxalis/model/helpers/reduce_reducers"; import ConnectomeReducer from "oxalis/model/reducers/connectome_reducer"; -import { SaveQueueType } from "./model/actions/save_actions"; import OrganizationReducer from "./model/reducers/organization_reducer"; import { StartAIJobModalState } from "./view/action-bar/starting_job_modals"; @@ -436,7 +435,11 @@ export type ProgressInfo = { readonly processedActionCount: number; readonly totalActionCount: number; }; -export type IsBusyInfo = Record; +export type IsBusyInfo = { + readonly skeleton: boolean; + readonly volumes: Record; + readonly mappings: Record; +}; export type SaveState = { readonly isBusyInfo: IsBusyInfo; readonly queue: { diff --git a/frontend/javascripts/oxalis/view/context_menu.tsx b/frontend/javascripts/oxalis/view/context_menu.tsx index b33a33df02..4c1b33e330 100644 --- a/frontend/javascripts/oxalis/view/context_menu.tsx +++ b/frontend/javascripts/oxalis/view/context_menu.tsx @@ -1,8 +1,8 @@ import { CopyOutlined, PushpinOutlined, ReloadOutlined, WarningOutlined } from "@ant-design/icons"; import type { Dispatch } from "redux"; import { Dropdown, Empty, notification, Tooltip, Popover, Input, MenuProps, Modal } from "antd"; -import { connect } from "react-redux"; -import React, { createContext, MouseEvent, useContext, useState } from "react"; +import { connect, useSelector } from "react-redux"; +import React, { createContext, MouseEvent, useContext, useEffect, useState } from "react"; import type { APIConnectomeFile, APIDataset, @@ -72,14 +72,17 @@ import { getVisibleSegmentationLayer, getMappingInfo, getResolutionInfo, - hasFallbackLayer, + getMaybeSegmentIndexAvailability, } from "oxalis/model/accessors/dataset_accessor"; import { loadAgglomerateSkeletonAtPosition, loadSynapsesOfAgglomerateAtPosition, } from "oxalis/controller/combinations/segmentation_handlers"; import { isBoundingBoxUsableForMinCut } from "oxalis/model/sagas/min_cut_saga"; -import { withMappingActivationConfirmation } from "oxalis/view/right-border-tabs/segments_tab/segments_view_helper"; +import { + getVolumeRequestUrl, + withMappingActivationConfirmation, +} from "oxalis/view/right-border-tabs/segments_tab/segments_view_helper"; import { maybeGetSomeTracing } from "oxalis/model/accessors/tracing_accessor"; import { clickSegmentAction, @@ -113,7 +116,10 @@ import { AsyncIconButton } from "components/async_clickables"; import { type AdditionalCoordinate } from "types/api_flow_types"; import { voxelToNm3 } from "oxalis/model/scaleinfo"; import { getBoundingBoxInMag1 } from "oxalis/model/sagas/volume/helpers"; -import { ensureLayerMappingsAreLoadedAction } from "oxalis/model/actions/dataset_actions"; +import { + ensureLayerMappingsAreLoadedAction, + ensureSegmentIndexIsLoadedAction, +} from "oxalis/model/actions/dataset_actions"; type ContextMenuContextValue = React.MutableRefObject | null; export const ContextMenuContext = createContext(null); @@ -1201,38 +1207,58 @@ function ContextMenuInner(propsWithInputRef: Props) { } = props; const segmentIdAtPosition = globalPosition != null ? getSegmentIdForPosition(globalPosition) : 0; - const hasNoFallbackLayer = - visibleSegmentationLayer != null && !hasFallbackLayer(visibleSegmentationLayer); - const [segmentVolume, boundingBoxInfo] = useFetch( + + // Currently either segmentIdAtPosition or maybeClickedMeshId is set, but not both. + // segmentIdAtPosition is only set if a segment is hovered in one of the xy, xz, or yz viewports. + // maybeClickedMeshId is only set, when a mesh is hovered in the 3d viewport. + // Thus the segment id is always unambiguous / clearly defined. + const clickedSegmentOrMeshId = + maybeClickedMeshId != null ? maybeClickedMeshId : segmentIdAtPosition; + const wasSegmentOrMeshClicked = clickedSegmentOrMeshId > 0; + + const { dataset, tracing, flycam } = useSelector((state: OxalisState) => state); + useEffect(() => { + Store.dispatch(ensureSegmentIndexIsLoadedAction(visibleSegmentationLayer?.name)); + }, [visibleSegmentationLayer]); + const isSegmentIndexAvailable = useSelector((state: OxalisState) => + getMaybeSegmentIndexAvailability(state.dataset, visibleSegmentationLayer?.name), + ); + const mappingName: string | null | undefined = useSelector((state: OxalisState) => { + if (volumeTracing?.mappingName != null) return volumeTracing?.mappingName; + const mappingInfo = getMappingInfo( + state.temporaryConfiguration.activeMappingByLayer, + visibleSegmentationLayer?.name, + ); + return mappingInfo.mappingName; + }); + const isLoadingMessage = "loading"; + const isLoadingVolumeAndBB = [isLoadingMessage, isLoadingMessage]; + const [segmentVolumeLabel, boundingBoxInfoLabel] = useFetch( async () => { - if ( - contextMenuPosition == null || - volumeTracing == null || - !hasNoFallbackLayer || - !volumeTracing.hasSegmentIndex - ) { - return []; - } else { - const state = Store.getState(); - const tracingId = volumeTracing.tracingId; - const tracingStoreUrl = state.tracing.tracingStore.url; - const magInfo = getResolutionInfo(visibleSegmentationLayer.resolutions); - const layersFinestResolution = magInfo.getFinestResolution(); - const dataSetScale = state.dataset.dataSource.scale; - const additionalCoordinates = state.flycam.additionalCoordinates; + // The value that is returned if the context menu is closed is shown if it's still loading + if (contextMenuPosition == null || !wasSegmentOrMeshClicked) return isLoadingVolumeAndBB; + if (visibleSegmentationLayer == null || !isSegmentIndexAvailable) return []; + const tracingId = volumeTracing?.tracingId; + const additionalCoordinates = flycam.additionalCoordinates; + const requestUrl = getVolumeRequestUrl(dataset, tracing, tracingId, visibleSegmentationLayer); + const magInfo = getResolutionInfo(visibleSegmentationLayer.resolutions); + const layersFinestResolution = magInfo.getFinestResolution(); + const dataSetScale = dataset.dataSource.scale; + + try { const [segmentSize] = await getSegmentVolumes( - tracingStoreUrl, - tracingId, + requestUrl, layersFinestResolution, - [segmentIdAtPosition], + [clickedSegmentOrMeshId], additionalCoordinates, + mappingName, ); const [boundingBoxInRequestedMag] = await getSegmentBoundingBoxes( - tracingStoreUrl, - tracingId, + requestUrl, layersFinestResolution, - [segmentIdAtPosition], + [clickedSegmentOrMeshId], additionalCoordinates, + mappingName, ); const boundingBoxInMag1 = getBoundingBoxInMag1( boundingBoxInRequestedMag, @@ -1245,24 +1271,21 @@ function ContextMenuInner(propsWithInputRef: Props) { formatNumberToVolume(volumeInNm3), `${boundingBoxTopLeftString}, ${boundingBoxSizeString}`, ]; + } catch (_error) { + const notFetchedMessage = "could not be fetched"; + return [notFetchedMessage, notFetchedMessage]; } }, - ["loading", "loading"], + isLoadingVolumeAndBB, // Update segment infos when opening the context menu, in case the annotation was saved since the context menu was last opened. // Of course the info should also be updated when the menu is opened for another segment, or after the refresh button was pressed. - [contextMenuPosition, segmentIdAtPosition, lastTimeSegmentInfoShouldBeFetched], + [contextMenuPosition, clickedSegmentOrMeshId, lastTimeSegmentInfoShouldBeFetched], ); if (contextMenuPosition == null || maybeViewport == null) { return <>; } - // Currently either segmentIdAtPosition or maybeClickedMeshId is set, but not both. - // segmentIdAtPosition is only set if a segment is hovered in one of the xy, xz, or yz viewports. - // maybeClickedMeshId is only set, when a mesh is hovered in the 3d viewport. - // Thus the segment id is always unambiguous / clearly defined. - const isHoveredSegmentOrMesh = segmentIdAtPosition > 0 || maybeClickedMeshId != null; - const activeTreeId = skeletonTracing != null ? skeletonTracing.activeTreeId : null; const activeNodeId = skeletonTracing?.activeNodeId; @@ -1351,8 +1374,7 @@ function ContextMenuInner(propsWithInputRef: Props) { ); - const areSegmentStatisticsAvailable = - hasNoFallbackLayer && volumeTracing?.hasSegmentIndex && isHoveredSegmentOrMesh; + const areSegmentStatisticsAvailable = wasSegmentOrMeshClicked && isSegmentIndexAvailable; if (areSegmentStatisticsAvailable) { infoRows.push( @@ -1360,8 +1382,8 @@ function ContextMenuInner(propsWithInputRef: Props) { "volumeInfo", <> - Volume: {segmentVolume} - {copyIconWithTooltip(segmentVolume as string, "Copy volume")} + Volume: {segmentVolumeLabel} + {copyIconWithTooltip(segmentVolumeLabel as string, "Copy volume")} {refreshButton} , ), @@ -1376,8 +1398,11 @@ function ContextMenuInner(propsWithInputRef: Props) { <>Bounding Box:
- {boundingBoxInfo} - {copyIconWithTooltip(boundingBoxInfo as string, "Copy BBox top left point and extent")} + {boundingBoxInfoLabel} + {copyIconWithTooltip( + boundingBoxInfoLabel as string, + "Copy BBox top left point and extent", + )} {refreshButton}
, @@ -1400,14 +1425,14 @@ function ContextMenuInner(propsWithInputRef: Props) { ); } - if (isHoveredSegmentOrMesh) { - const segmentId = maybeClickedMeshId ? maybeClickedMeshId : segmentIdAtPosition; + if (wasSegmentOrMeshClicked) { infoRows.push( getInfoMenuItem( "copy-cell", <>
- Segment ID: {`${segmentId}`} {copyIconWithTooltip(segmentId, "Copy Segment ID")} + Segment ID: {`${clickedSegmentOrMeshId}`}{" "} + {copyIconWithTooltip(clickedSegmentOrMeshId, "Copy Segment ID")} , ), ); diff --git a/frontend/javascripts/oxalis/view/right-border-tabs/segments_tab/segment_statistics_modal.tsx b/frontend/javascripts/oxalis/view/right-border-tabs/segments_tab/segment_statistics_modal.tsx index 9c81965c89..a04e2a4451 100644 --- a/frontend/javascripts/oxalis/view/right-border-tabs/segments_tab/segment_statistics_modal.tsx +++ b/frontend/javascripts/oxalis/view/right-border-tabs/segments_tab/segment_statistics_modal.tsx @@ -4,10 +4,14 @@ import saveAs from "file-saver"; import { formatNumberToVolume } from "libs/format_utils"; import { useFetch } from "libs/react_helpers"; import { Vector3 } from "oxalis/constants"; -import { getResolutionInfo } from "oxalis/model/accessors/dataset_accessor"; +import { getMappingInfo, getResolutionInfo } from "oxalis/model/accessors/dataset_accessor"; import { OxalisState, Segment } from "oxalis/store"; import React from "react"; -import { SegmentHierarchyNode, SegmentHierarchyGroup } from "./segments_view_helper"; +import { + SegmentHierarchyNode, + SegmentHierarchyGroup, + getVolumeRequestUrl, +} from "./segments_view_helper"; import { api } from "oxalis/singletons"; import { APISegmentationLayer } from "types/api_flow_types"; import { voxelToNm3 } from "oxalis/model/scaleinfo"; @@ -18,6 +22,7 @@ import { hasAdditionalCoordinates, } from "oxalis/model/accessors/flycam_accessor"; import { pluralize } from "libs/utils"; +import { getVolumeTracingById } from "oxalis/model/accessors/volumetracing_accessor"; const MODAL_ERROR_MESSAGE = "Segment statistics could not be fetched. Check the console for more details."; @@ -31,8 +36,7 @@ const ADDITIONAL_COORDS_COLUMN = "additionalCoordinates"; type Props = { onCancel: (...args: Array) => any; - tracingId: string; - tracingStoreUrl: string; + tracingId: string | undefined; visibleSegmentationLayer: APISegmentationLayer; relevantSegments: Segment[]; parentGroup: number; @@ -57,7 +61,7 @@ type SegmentInfo = { const exportStatisticsToCSV = ( segmentInformation: Array, - tracingId: string, + tracingIdOrDatasetName: string, groupIdToExport: number, hasAdditionalCoords: boolean, ) => { @@ -87,8 +91,8 @@ const exportStatisticsToCSV = ( const csv = [csv_header, segmentStatisticsAsString].join("\n"); const filename = groupIdToExport === -1 - ? `segmentStatistics_tracing-${tracingId}.csv` - : `segmentStatistics_tracing-${tracingId}_group-${groupIdToExport}.csv`; + ? `segmentStatistics_${tracingIdOrDatasetName}.csv` + : `segmentStatistics_${tracingIdOrDatasetName}_group-${groupIdToExport}.csv`; const blob = new Blob([csv], { type: "text/plain;charset=utf-8", }); @@ -98,15 +102,23 @@ const exportStatisticsToCSV = ( export function SegmentStatisticsModal({ onCancel, tracingId, - tracingStoreUrl, visibleSegmentationLayer, relevantSegments: segments, parentGroup, groupTree, }: Props) { + const { dataset, tracing, temporaryConfiguration } = useSelector((state: OxalisState) => state); const magInfo = getResolutionInfo(visibleSegmentationLayer.resolutions); const layersFinestResolution = magInfo.getFinestResolution(); - const dataSetScale = useSelector((state: OxalisState) => state.dataset.dataSource.scale); + const dataSetScale = dataset.dataSource.scale; + // Omit checking that all prerequisites for segment stats (such as a segment index) are + // met right here because that should happen before opening the modal. + const requestUrl = getVolumeRequestUrl( + dataset, + tracing, + visibleSegmentationLayer.tracingId, + visibleSegmentationLayer, + ); const additionalCoordinates = useSelector( (state: OxalisState) => state.flycam.additionalCoordinates, ); @@ -118,20 +130,31 @@ export function SegmentStatisticsModal({ const segmentStatisticsObjects = useFetch( async () => { await api.tracing.save(); - const segmentStatisticsObjects: Array | null = await Promise.all([ + if (requestUrl == null) return; + const maybeVolumeTracing = + tracingId != null ? getVolumeTracingById(tracing, tracingId) : null; + const maybeGetMappingName = () => { + if (maybeVolumeTracing?.mappingName != null) return maybeVolumeTracing.mappingName; + const mappingInfo = getMappingInfo( + temporaryConfiguration.activeMappingByLayer, + visibleSegmentationLayer?.name, + ); + return mappingInfo.mappingName; + }; + const segmentStatisticsObjects = await Promise.all([ getSegmentVolumes( - tracingStoreUrl, - tracingId, + requestUrl, layersFinestResolution, segments.map((segment) => segment.id), additionalCoordinates, + maybeGetMappingName(), ), getSegmentBoundingBoxes( - tracingStoreUrl, - tracingId, + requestUrl, layersFinestResolution, segments.map((segment) => segment.id), additionalCoordinates, + maybeGetMappingName(), ), ]).then( (response) => { @@ -236,16 +259,15 @@ export function SegmentStatisticsModal({ title="Segment Statistics" onCancel={onCancel} width={700} - onOk={() => { - if (!isErrorCase) { - exportStatisticsToCSV( - segmentStatisticsObjects, - tracingId, - parentGroup, - hasAdditionalCoords, - ); - } - }} + onOk={() => + !isErrorCase && + exportStatisticsToCSV( + segmentStatisticsObjects, + tracingId || dataset.name, + parentGroup, + hasAdditionalCoords, + ) + } okText="Export to CSV" okButtonProps={{ disabled: isErrorCase }} > @@ -256,7 +278,6 @@ export function SegmentStatisticsModal({ <> {hasAdditionalCoords && ( ; selectedIds: { segments: number[]; group: number | null }; @@ -176,6 +178,11 @@ const mapStateToProps = (state: OxalisState): StateProps => { ? getMeshesForCurrentAdditionalCoordinates(state, visibleSegmentationLayer?.name) : undefined; + const isSegmentIndexAvailable = getMaybeSegmentIndexAvailability( + state.dataset, + visibleSegmentationLayer?.name, + ); + return { activeCellId: activeVolumeTracing?.activeCellId, meshes: meshesForCurrentAdditionalCoordinates || EMPTY_OBJECT, // satisfy ts @@ -185,6 +192,7 @@ const mapStateToProps = (state: OxalisState): StateProps => { mappingInfo, flycam: state.flycam, hasVolumeTracing: state.tracing.volumes.length > 0, + isSegmentIndexAvailable, segments, segmentGroups, selectedIds: getCleanedSelectedSegmentsOrGroup(state), @@ -412,6 +420,8 @@ class SegmentsView extends React.Component { ) { this.pollJobData(); } + + Store.dispatch(ensureSegmentIndexIsLoadedAction(this.props.visibleSegmentationLayer?.name)); } componentDidUpdate(prevProps: Props) { @@ -1111,15 +1121,7 @@ class SegmentsView extends React.Component { }; getShowSegmentStatistics = (id: number): ItemType => { - const visibleSegmentationLayer = this.props.visibleSegmentationLayer; - if ( - visibleSegmentationLayer == null || - visibleSegmentationLayer.fallbackLayer != null || - !this.props.activeVolumeTracing?.hasSegmentIndex - ) { - // In this case there is a fallback layer or an ND annotation. - return null; - } + if (!this.props.isSegmentIndexAvailable) return null; return { key: "segmentStatistics", label: ( @@ -1543,21 +1545,14 @@ class SegmentsView extends React.Component { getSegmentStatisticsModal = (groupId: number) => { const segments = this.getSegmentsOfGroupRecursively(groupId); const visibleSegmentationLayer = this.props.visibleSegmentationLayer; - const hasNoFallbackLayer = - visibleSegmentationLayer != null && !hasFallbackLayer(visibleSegmentationLayer); - if (hasNoFallbackLayer && this.props.hasVolumeTracing && segments != null) { - const state = Store.getState(); - const tracingId = this.props.activeVolumeTracing?.tracingId; - if (tracingId == null) return null; - const tracingStoreUrl = state.tracing.tracingStore.url; + if (visibleSegmentationLayer != null && segments != null && segments.length > 0) { return this.state.activeStatisticsModalGroupId === groupId ? ( { this.setState({ activeStatisticsModalGroupId: null }); }} visibleSegmentationLayer={visibleSegmentationLayer} - tracingId={tracingId} - tracingStoreUrl={tracingStoreUrl} + tracingId={this.props.activeVolumeTracing?.tracingId} relevantSegments={segments} parentGroup={groupId} groupTree={this.state.searchableTreeItemList} diff --git a/frontend/javascripts/oxalis/view/right-border-tabs/segments_tab/segments_view_helper.tsx b/frontend/javascripts/oxalis/view/right-border-tabs/segments_tab/segments_view_helper.tsx index 2c59b2bdbc..84a82d5a23 100644 --- a/frontend/javascripts/oxalis/view/right-border-tabs/segments_tab/segments_view_helper.tsx +++ b/frontend/javascripts/oxalis/view/right-border-tabs/segments_tab/segments_view_helper.tsx @@ -1,13 +1,17 @@ import { Modal } from "antd"; -import type { APIDataLayer } from "types/api_flow_types"; -import type { ActiveMappingInfo, Segment } from "oxalis/store"; +import type { APIDataLayer, APIDataset, APISegmentationLayer } from "types/api_flow_types"; +import type { ActiveMappingInfo, HybridTracing, Segment } from "oxalis/store"; import Store from "oxalis/store"; import { MappingStatusEnum } from "oxalis/constants"; import { setMappingAction, setMappingEnabledAction } from "oxalis/model/actions/settings_actions"; import { waitForCondition } from "libs/utils"; import { getMappingInfo } from "oxalis/model/accessors/dataset_accessor"; -import { getEditableMappingForVolumeTracingId } from "oxalis/model/accessors/volumetracing_accessor"; +import { + getEditableMappingForVolumeTracingId, + getVolumeTracingById, +} from "oxalis/model/accessors/volumetracing_accessor"; import type { MenuClickEventHandler } from "rc-menu/lib/interface"; +import { hasSegmentIndexInDataStore } from "admin/admin_rest_api"; const { confirm } = Modal; @@ -35,6 +39,46 @@ export function getBaseSegmentationName(segmentationLayer: APIDataLayer) { ); } +export function getVolumeRequestUrl( + dataset: APIDataset, + tracing: HybridTracing | null, + tracingId: string | undefined, + visibleSegmentationLayer: APISegmentationLayer | APIDataLayer, +) { + if (tracing == null || tracingId == null) { + return `${dataset.dataStore.url}/data/datasets/${dataset.owningOrganization}/${dataset.name}/layers/${visibleSegmentationLayer.name}`; + } else { + const tracingStoreHost = tracing?.tracingStore.url; + return `${tracingStoreHost}/tracings/volume/${tracingId}`; + } +} + +export async function hasSegmentIndex( + visibleSegmentationLayer: APIDataLayer, + dataset: APIDataset, + tracing: HybridTracing | null | undefined, +) { + const maybeVolumeTracing = + "tracingId" in visibleSegmentationLayer && + visibleSegmentationLayer.tracingId != null && + tracing != null + ? getVolumeTracingById(tracing, visibleSegmentationLayer.tracingId) + : null; + let segmentIndexInDataStore = false; + if (maybeVolumeTracing == null) { + segmentIndexInDataStore = await hasSegmentIndexInDataStore( + dataset.dataStore.url, + dataset.name, + visibleSegmentationLayer.name, + dataset.owningOrganization, + ); + } + return ( + visibleSegmentationLayer != null && + (maybeVolumeTracing?.hasSegmentIndex || segmentIndexInDataStore) + ); +} + export function withMappingActivationConfirmation( originalOnClick: MenuClickEventHandler, mappingName: string | null | undefined, diff --git a/frontend/javascripts/test/reducers/save_reducer.spec.ts b/frontend/javascripts/test/reducers/save_reducer.spec.ts index ca6c8cece4..c4f91933e2 100644 --- a/frontend/javascripts/test/reducers/save_reducer.spec.ts +++ b/frontend/javascripts/test/reducers/save_reducer.spec.ts @@ -24,8 +24,8 @@ const initialState: { save: SaveState; activeUser: APIUser; tracing: EmptyObject save: { isBusyInfo: { skeleton: false, - volume: false, - mapping: false, + volumes: {}, + mappings: {}, }, queue: { skeleton: [], diff --git a/frontend/javascripts/test/sagas/save_saga.spec.ts b/frontend/javascripts/test/sagas/save_saga.spec.ts index c4459d1795..fd6debdd31 100644 --- a/frontend/javascripts/test/sagas/save_saga.spec.ts +++ b/frontend/javascripts/test/sagas/save_saga.spec.ts @@ -94,7 +94,7 @@ test("SaveSaga should send update actions", (t) => { saga.next({ forcePush: SaveActions.saveNowAction(), }), - put(setSaveBusyAction(true, TRACING_TYPE)), + put(setSaveBusyAction(true, TRACING_TYPE, tracingId)), ); saga.next(); // advance to next select state @@ -102,7 +102,7 @@ test("SaveSaga should send update actions", (t) => { expectValueDeepEqual(t, saga.next(saveQueue), call(sendRequestToServer, TRACING_TYPE, tracingId)); saga.next(saveQueue.length); // select state - expectValueDeepEqual(t, saga.next([]), put(setSaveBusyAction(false, TRACING_TYPE))); + expectValueDeepEqual(t, saga.next([]), put(setSaveBusyAction(false, TRACING_TYPE, tracingId))); // Test that loop repeats saga.next(); // select state @@ -224,7 +224,7 @@ test("SaveSaga should send update actions right away and try to reach a state wh saga.next(1); // advance to select state - expectValueDeepEqual(t, saga.next([]), put(setSaveBusyAction(false, TRACING_TYPE))); + expectValueDeepEqual(t, saga.next([]), put(setSaveBusyAction(false, TRACING_TYPE, tracingId))); }); test("SaveSaga should not try to reach state with all actions being saved when saving is triggered by a timeout", (t) => { const updateActions = [[UpdateActions.createEdge(1, 0, 1)], [UpdateActions.createEdge(1, 1, 2)]]; @@ -243,7 +243,7 @@ test("SaveSaga should not try to reach state with all actions being saved when s saga.next(saveQueue); // call sendRequestToServer - expectValueDeepEqual(t, saga.next([]), put(setSaveBusyAction(false, TRACING_TYPE))); + expectValueDeepEqual(t, saga.next([]), put(setSaveBusyAction(false, TRACING_TYPE, tracingId))); }); test("SaveSaga should remove the correct update actions", (t) => { const saveQueue = createSaveQueueFromUpdateActions( diff --git a/frontend/javascripts/types/api_flow_types.ts b/frontend/javascripts/types/api_flow_types.ts index 09fc30c9ff..138d8c0318 100644 --- a/frontend/javascripts/types/api_flow_types.ts +++ b/frontend/javascripts/types/api_flow_types.ts @@ -75,6 +75,7 @@ type APIDataLayerBase = { readonly dataFormat?: "wkw" | "zarr"; readonly additionalAxes: Array | null; readonly coordinateTransformations?: CoordinateTransformation[] | null; + readonly hasSegmentIndex?: boolean; }; type APIColorLayer = APIDataLayerBase & { readonly category: "color"; diff --git a/frontend/stylesheets/main.less b/frontend/stylesheets/main.less index 5ae028009f..7547901a36 100644 --- a/frontend/stylesheets/main.less +++ b/frontend/stylesheets/main.less @@ -165,7 +165,6 @@ body { line-height: normal; } - button.narrow { // undo/redo + toolbar buttons padding: 0 10px; @@ -641,7 +640,6 @@ button.narrow { background-repeat: no-repeat; background-size: cover; } - } } @@ -649,8 +647,3 @@ button.narrow { text-align: center; display: inherit; } - -.segments-stats-info-alert { - margin-bottom: 10px; - margin-top: -14px; // wanted margin - modal padding = 10-24 -} diff --git a/test/backend/NMLUnitTestSuite.scala b/test/backend/NMLUnitTestSuite.scala index 849e0bb633..d66986887c 100644 --- a/test/backend/NMLUnitTestSuite.scala +++ b/test/backend/NMLUnitTestSuite.scala @@ -7,13 +7,14 @@ import com.scalableminds.webknossos.datastore.models.annotation.{AnnotationLayer import com.scalableminds.webknossos.tracingstore.tracings.volume.VolumeDataZipFormat import models.annotation.nml.{NmlParser, NmlWriter} import models.annotation.UploadedVolumeLayer -import net.liftweb.common.{Box, Full} +import net.liftweb.common.{Box, Empty, Full} import org.apache.commons.io.output.ByteArrayOutputStream import org.scalatestplus.play.PlaySpec import play.api.i18n.{DefaultMessagesApi, Messages, MessagesProvider} import play.api.test.FakeRequest import scala.concurrent.Await +import scala.concurrent.ExecutionContext.Implicits.global import scala.concurrent.duration.Duration class NMLUnitTestSuite extends PlaySpec { @@ -45,7 +46,12 @@ class NMLUnitTestSuite extends PlaySpec { val os = new ByteArrayOutputStream() Await.result(nmlFunctionStream.writeTo(os)(scala.concurrent.ExecutionContext.global), Duration.Inf) val array = os.toByteArray - NmlParser.parse("", new ByteArrayInputStream(array), None, isTaskUpload = true) + NmlParser.parse("", + new ByteArrayInputStream(array), + None, + isTaskUpload = true, + None, + (a: String, b: String) => None) } def isParseSuccessful( diff --git a/util/src/main/scala/com/scalableminds/util/io/ZipIO.scala b/util/src/main/scala/com/scalableminds/util/io/ZipIO.scala index fca29b06c3..d06c23b1f0 100644 --- a/util/src/main/scala/com/scalableminds/util/io/ZipIO.scala +++ b/util/src/main/scala/com/scalableminds/util/io/ZipIO.scala @@ -1,5 +1,7 @@ package com.scalableminds.util.io +import com.scalableminds.util.tools.Fox.{box2Fox, future2Fox} + import java.io._ import java.nio.file.{Files, Path, Paths} import java.util.zip.{GZIPOutputStream => DefaultGZIPOutputStream, _} @@ -164,6 +166,63 @@ object ZipIO extends LazyLogging { def withUnziped[A](file: File)(f: (Path, InputStream) => A): Box[List[A]] = tryo(new java.util.zip.ZipFile(file)).flatMap(withUnziped(_)((name, is) => Full(f(name, is)))) + def withUnzipedAsync[A](file: File)(f: (Path, InputStream) => Fox[A])(implicit ec: ExecutionContext): Fox[List[A]] = + for { + zip <- tryo(new java.util.zip.ZipFile(file)).toFox + resultList <- withUnzipedAsync(zip)((name, is) => f(name, is)) + } yield resultList + + def withUnzipedAsync[A](zip: ZipFile, + includeHiddenFiles: Boolean = false, + hiddenFilesWhitelist: List[String] = List(), + truncateCommonPrefix: Boolean = false, + excludeFromPrefix: Option[List[String]] = None)(f: (Path, InputStream) => Fox[A])( + implicit ec: ExecutionContext): Fox[List[A]] = { + + val zipEntries = zip.entries.asScala.filter { e: ZipEntry => + !e.isDirectory && (includeHiddenFiles || !isFileHidden(e) || hiddenFilesWhitelist.contains( + Paths.get(e.getName).getFileName.toString)) + }.toList + + val commonPrefix = if (truncateCommonPrefix) { + val commonPrefixNotFixed = PathUtils.commonPrefix(zipEntries.map(e => Paths.get(e.getName))) + val strippedPrefix = + PathUtils.cutOffPathAtLastOccurrenceOf(commonPrefixNotFixed, excludeFromPrefix.getOrElse(List.empty)) + PathUtils.removeSingleFileNameFromPrefix(strippedPrefix, zipEntries.map(_.getName)) + } else { + Paths.get("") + } + + val resultFox = zipEntries.foldLeft[Fox[List[A]]](Fox.successful(List.empty)) { (results, entry) => + results.futureBox.map { + case Full(rs) => + val input: InputStream = zip.getInputStream(entry) + val path = commonPrefix.relativize(Paths.get(entry.getName)) + val innerResultFox: Fox[List[A]] = f(path, input).futureBox.map { + case Full(result) => + input.close() + Full(rs :+ result) + case Empty => + input.close() + Empty + case failure: Failure => + input.close() + failure + } + innerResultFox + case e => + e.toFox + }.toFox.flatten + } + + for { + result <- resultFox.futureBox.map { resultBox => + zip.close() // close even if result is not success + resultBox + } + } yield result + } + def withUnziped[A](zip: ZipFile, includeHiddenFiles: Boolean = false, hiddenFilesWhitelist: List[String] = List(), diff --git a/util/src/main/scala/com/scalableminds/util/tools/Fox.scala b/util/src/main/scala/com/scalableminds/util/tools/Fox.scala index 27b8438dc8..32f9257793 100644 --- a/util/src/main/scala/com/scalableminds/util/tools/Fox.scala +++ b/util/src/main/scala/com/scalableminds/util/tools/Fox.scala @@ -383,6 +383,13 @@ class Fox[+A](val futureBox: Future[Box[A]])(implicit ec: ExecutionContext) { case f: Failure => f }) + def fillEmpty[B >: A](fillValue: B) = + new Fox(futureBox.map { + case Full(value) => Full(value) + case Empty => Full(fillValue) + case f: Failure => f + }) + /** * Makes Fox play better with Scala 2.8 for comprehensions */ diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/DataSourceController.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/DataSourceController.scala index d695f38efc..32854e6b73 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/DataSourceController.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/DataSourceController.scala @@ -1,14 +1,21 @@ package com.scalableminds.webknossos.datastore.controllers import com.google.inject.Inject +import com.scalableminds.util.geometry.Vec3Int import com.scalableminds.util.tools.{Fox, FoxImplicits} import com.scalableminds.webknossos.datastore.ListOfLong.ListOfLong +import com.scalableminds.webknossos.datastore.helpers.{ + GetMultipleSegmentIndexParameters, + GetSegmentIndexParameters, + SegmentIndexData, + SegmentStatisticsParameters +} import com.scalableminds.webknossos.datastore.models.datasource.inbox.{ InboxDataSource, InboxDataSourceLike, UnusableInboxDataSource } -import com.scalableminds.webknossos.datastore.models.datasource.{DataSource, DataSourceId} +import com.scalableminds.webknossos.datastore.models.datasource.{DataLayer, DataSource, DataSourceId} import com.scalableminds.webknossos.datastore.services._ import com.scalableminds.webknossos.datastore.services.uploading.{ CancelUploadInformation, @@ -28,7 +35,7 @@ import java.io.File import com.scalableminds.webknossos.datastore.storage.AgglomerateFileKey import play.api.libs.Files -import scala.concurrent.ExecutionContext +import scala.concurrent.{ExecutionContext, Future} import scala.concurrent.duration._ class DataSourceController @Inject()( @@ -36,14 +43,18 @@ class DataSourceController @Inject()( dataSourceService: DataSourceService, remoteWebknossosClient: DSRemoteWebknossosClient, accessTokenService: DataStoreAccessTokenService, - binaryDataServiceHolder: BinaryDataServiceHolder, + val binaryDataServiceHolder: BinaryDataServiceHolder, connectomeFileService: ConnectomeFileService, + segmentIndexFileService: SegmentIndexFileService, storageUsageService: DSUsedStorageService, datasetErrorLoggingService: DatasetErrorLoggingService, uploadService: UploadService, - composeService: ComposeService + composeService: ComposeService, + val dsRemoteWebknossosClient: DSRemoteWebknossosClient, + val dsRemoteTracingstoreClient: DSRemoteTracingstoreClient, )(implicit bodyParsers: PlayBodyParsers, ec: ExecutionContext) extends Controller + with MeshMappingHelper with FoxImplicits { override def allowRemoteOrigin: Boolean = true @@ -554,4 +565,137 @@ class DataSourceController @Inject()( } } + def checkSegmentIndexFile(token: Option[String], + organizationName: String, + dataSetName: String, + dataLayerName: String): Action[AnyContent] = + Action.async { implicit request => + accessTokenService.validateAccess(UserAccessRequest.readDataSources(DataSourceId(dataSetName, organizationName)), + urlOrHeaderToken(token, request)) { + val segmentIndexFileOpt = + segmentIndexFileService.getSegmentIndexFile(organizationName, dataSetName, dataLayerName).toOption + Future.successful(Ok(Json.toJson(segmentIndexFileOpt.isDefined))) + } + } + + /** + * Query the segment index file for a single segment + * @return List of bucketPositions as positions (not indices) of 32³ buckets in mag + */ + def getSegmentIndex(token: Option[String], + organizationName: String, + datasetName: String, + dataLayerName: String, + segmentId: String): Action[GetSegmentIndexParameters] = + Action.async(validateJson[GetSegmentIndexParameters]) { implicit request => + accessTokenService.validateAccess(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationName)), + urlOrHeaderToken(token, request)) { + for { + segmentIds <- segmentIdsForAgglomerateIdIfNeeded( + organizationName, + datasetName, + dataLayerName, + request.body.mappingName, + request.body.editableMappingTracingId, + segmentId.toLong, + mappingNameForMeshFile = None, + urlOrHeaderToken(token, request) + ) + fileMag <- segmentIndexFileService.readFileMag(organizationName, datasetName, dataLayerName) + topLeftsNested: Seq[Array[Vec3Int]] <- Fox.serialCombined(segmentIds)(sId => + segmentIndexFileService.readSegmentIndex(organizationName, datasetName, dataLayerName, sId)) + topLefts: Array[Vec3Int] = topLeftsNested.toArray.flatten + bucketPositions = segmentIndexFileService.topLeftsToDistinctBucketPositions(topLefts, + request.body.mag, + fileMag) + bucketPositionsForCubeSize = bucketPositions + .map(_.scale(DataLayer.bucketLength)) // bucket positions raw are indices of 32³ buckets + .map(_ / request.body.cubeSize) + .distinct // divide by requested cube size to map them to larger buckets, select unique + .map(_ * request.body.cubeSize) // return positions, not indices + } yield Ok(Json.toJson(bucketPositionsForCubeSize)) + } + } + + /** + * Query the segment index file for multiple segments + * @return List of bucketPositions as indices of 32³ buckets + */ + def querySegmentIndex(token: Option[String], + organizationName: String, + datasetName: String, + dataLayerName: String): Action[GetMultipleSegmentIndexParameters] = + Action.async(validateJson[GetMultipleSegmentIndexParameters]) { implicit request => + accessTokenService.validateAccess(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationName)), + urlOrHeaderToken(token, request)) { + for { + segmentIdsAndBucketPositions <- Fox.serialCombined(request.body.segmentIds) { segmentOrAgglomerateId => + for { + segmentIds <- segmentIdsForAgglomerateIdIfNeeded( + organizationName, + datasetName, + dataLayerName, + request.body.mappingName, + request.body.editableMappingTracingId, + segmentOrAgglomerateId, + mappingNameForMeshFile = None, + urlOrHeaderToken(token, request) + ) + fileMag <- segmentIndexFileService.readFileMag(organizationName, datasetName, dataLayerName) + topLeftsNested: Seq[Array[Vec3Int]] <- Fox.serialCombined(segmentIds)(sId => + segmentIndexFileService.readSegmentIndex(organizationName, datasetName, dataLayerName, sId)) + topLefts: Array[Vec3Int] = topLeftsNested.toArray.flatten + bucketPositions = segmentIndexFileService.topLeftsToDistinctBucketPositions(topLefts, + request.body.mag, + fileMag) + } yield SegmentIndexData(segmentOrAgglomerateId, bucketPositions.toSeq) + } + } yield Ok(Json.toJson(segmentIdsAndBucketPositions)) + } + } + + def getSegmentVolume(token: Option[String], + organizationName: String, + datasetName: String, + dataLayerName: String): Action[SegmentStatisticsParameters] = + Action.async(validateJson[SegmentStatisticsParameters]) { implicit request => + accessTokenService.validateAccess(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationName)), + urlOrHeaderToken(token, request)) { + for { + _ <- segmentIndexFileService.assertSegmentIndexFileExists(organizationName, datasetName, dataLayerName) + volumes <- Fox.serialCombined(request.body.segmentIds) { segmentId => + segmentIndexFileService.getSegmentVolume( + organizationName, + datasetName, + dataLayerName, + segmentId, + request.body.mag, + request.body.mappingName + ) + } + } yield Ok(Json.toJson(volumes)) + } + } + + def getSegmentBoundingBox(token: Option[String], + organizationName: String, + datasetName: String, + dataLayerName: String): Action[SegmentStatisticsParameters] = + Action.async(validateJson[SegmentStatisticsParameters]) { implicit request => + accessTokenService.validateAccess(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationName)), + urlOrHeaderToken(token, request)) { + for { + _ <- segmentIndexFileService.assertSegmentIndexFileExists(organizationName, datasetName, dataLayerName) + boxes <- Fox.serialCombined(request.body.segmentIds) { segmentId => + segmentIndexFileService.getSegmentBoundingBox(organizationName, + datasetName, + dataLayerName, + segmentId, + request.body.mag, + request.body.mappingName) + } + } yield Ok(Json.toJson(boxes)) + } + } + } diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/wkw/WKWFile.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/wkw/WKWFile.scala index 97080fb6fc..1d846be567 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/wkw/WKWFile.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/wkw/WKWFile.scala @@ -3,12 +3,14 @@ package com.scalableminds.webknossos.datastore.dataformats.wkw import java.io._ import org.apache.commons.io.IOUtils import com.google.common.io.LittleEndianDataInputStream -import com.scalableminds.util.tools.BoxImplicits -import com.scalableminds.webknossos.datastore.dataformats.wkw.util.ResourceBox +import com.scalableminds.util.tools.Fox.box2Fox +import com.scalableminds.util.tools.{BoxImplicits, Fox} import net.jpountz.lz4.LZ4Factory import net.liftweb.common.{Box, Failure, Full} import net.liftweb.common.Box.tryo +import scala.concurrent.ExecutionContext + trait WKWCompressionHelper extends BoxImplicits { protected def error(msg: String): String = @@ -65,18 +67,18 @@ trait WKWCompressionHelper extends BoxImplicits { object WKWFile extends WKWCompressionHelper { - def read[T](is: InputStream)(f: (WKWHeader, Iterator[Array[Byte]]) => T): Box[T] = - ResourceBox.manage(new LittleEndianDataInputStream(is)) { dataStream => - for { - header <- WKWHeader(dataStream, readJumpTable = true) - } yield { - val blockIterator = header.blockLengths.flatMap { blockLength => - val data: Array[Byte] = IOUtils.toByteArray(dataStream, blockLength) - if (header.isCompressed) decompressChunk(header.blockType, header.numBytesPerChunk)(data) else Full(data) - } - f(header, blockIterator) + def read[T](is: InputStream)(f: (WKWHeader, Iterator[Array[Byte]]) => Fox[T])( + implicit ec: ExecutionContext): Fox[T] = { + val dataStream = new LittleEndianDataInputStream(is) + for { + header <- WKWHeader(dataStream, readJumpTable = true).toFox + blockIterator = header.blockLengths.flatMap { blockLength => + val data: Array[Byte] = IOUtils.toByteArray(dataStream, blockLength) + if (header.isCompressed) decompressChunk(header.blockType, header.numBytesPerChunk)(data) else Full(data) } - } + result <- f(header, blockIterator) + } yield result + } def write(os: OutputStream, header: WKWHeader, blocks: Iterator[Array[Byte]]): Box[Unit] = { val dataBuffer = new ByteArrayOutputStream() diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/helpers/GetSegmentIndexParameters.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/helpers/GetSegmentIndexParameters.scala new file mode 100644 index 0000000000..8494d31071 --- /dev/null +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/helpers/GetSegmentIndexParameters.scala @@ -0,0 +1,36 @@ +package com.scalableminds.webknossos.datastore.helpers + +import com.scalableminds.util.geometry.Vec3Int +import com.scalableminds.webknossos.datastore.models.AdditionalCoordinate +import play.api.libs.json.{Format, Json} + +case class GetSegmentIndexParameters( + mag: Vec3Int, + cubeSize: Vec3Int, // Use the cubeSize parameter to map the found bucket indices to different size of cubes (e.g. reducing granularity with higher cubeSize) + additionalCoordinates: Option[Seq[AdditionalCoordinate]], + mappingName: Option[String], // Specify the mapping when querying the datastore + editableMappingTracingId: Option[String] +) + +object GetSegmentIndexParameters { + implicit val format: Format[GetSegmentIndexParameters] = Json.format[GetSegmentIndexParameters] +} + +case class GetMultipleSegmentIndexParameters( + segmentIds: List[Long], + mag: Vec3Int, + additionalCoordinates: Option[Seq[AdditionalCoordinate]], + mappingName: Option[String], + editableMappingTracingId: Option[String], +) + +object GetMultipleSegmentIndexParameters { + implicit val format: Format[GetMultipleSegmentIndexParameters] = Json.format[GetMultipleSegmentIndexParameters] +} + +// positions = List of indices of buckets directly in a requested mag +case class SegmentIndexData(segmentId: Long, positions: Seq[Vec3Int]) + +object SegmentIndexData { + implicit val format: Format[SegmentIndexData] = Json.format[SegmentIndexData] +} diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/helpers/SegmentStatistics.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/helpers/SegmentStatistics.scala new file mode 100644 index 0000000000..70e686826f --- /dev/null +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/helpers/SegmentStatistics.scala @@ -0,0 +1,136 @@ +package com.scalableminds.webknossos.datastore.helpers + +import com.scalableminds.util.geometry.{BoundingBox, Vec3Int} +import com.scalableminds.util.tools.{Fox, FoxImplicits} +import com.scalableminds.webknossos.datastore.geometry.ListOfVec3IntProto +import com.scalableminds.webknossos.datastore.models.datasource.DataLayer +import com.scalableminds.webknossos.datastore.models.{AdditionalCoordinate, UnsignedInteger} +import play.api.libs.json.{Json, OFormat} + +import scala.concurrent.ExecutionContext + +case class SegmentStatisticsParameters(mag: Vec3Int, + segmentIds: List[Long], + mappingName: Option[String], + additionalCoordinates: Option[Seq[AdditionalCoordinate]]) +object SegmentStatisticsParameters { + implicit val jsonFormat: OFormat[SegmentStatisticsParameters] = Json.format[SegmentStatisticsParameters] +} + +trait SegmentStatistics extends ProtoGeometryImplicits with FoxImplicits { + + def calculateSegmentVolume( + segmentId: Long, + mag: Vec3Int, + additionalCoordinates: Option[Seq[AdditionalCoordinate]], + getBucketPositions: (Long, Vec3Int) => Fox[ListOfVec3IntProto], + getTypedDataForBucketPosition: ( + Vec3Int, + Vec3Int, + Option[Seq[AdditionalCoordinate]]) => Fox[Array[UnsignedInteger]])(implicit ec: ExecutionContext): Fox[Long] = + for { + bucketPositionsProtos: ListOfVec3IntProto <- getBucketPositions(segmentId, mag) + bucketPositionsInMag = bucketPositionsProtos.values.map(vec3IntFromProto) + volumeBoxes <- Fox.serialSequence(bucketPositionsInMag.toList)(bucketPosition => + for { + dataTyped: Array[UnsignedInteger] <- getTypedDataForBucketPosition(bucketPosition, mag, additionalCoordinates) + count = dataTyped.count(unsignedInteger => unsignedInteger.toPositiveLong == segmentId) + } yield count.toLong) + counts <- Fox.combined(volumeBoxes.map(_.toFox)) + } yield counts.sum + + // Returns the bounding box in voxels in the target mag + def calculateSegmentBoundingBox(segmentId: Long, + mag: Vec3Int, + additionalCoordinates: Option[Seq[AdditionalCoordinate]], + getBucketPositions: (Long, Vec3Int) => Fox[ListOfVec3IntProto], + getTypedDataForBucketPosition: ( + Vec3Int, + Vec3Int, + Option[Seq[AdditionalCoordinate]]) => Fox[Array[UnsignedInteger]])( + implicit ec: ExecutionContext): Fox[BoundingBox] = + for { + allBucketPositions: ListOfVec3IntProto <- getBucketPositions(segmentId, mag) + relevantBucketPositions = filterOutInnerBucketPositions(allBucketPositions) + boundingBoxMutable = scala.collection.mutable.ListBuffer[Int](Int.MaxValue, + Int.MaxValue, + Int.MaxValue, + Int.MinValue, + Int.MinValue, + Int.MinValue) //topleft, bottomright + _ <- Fox.serialCombined(relevantBucketPositions.iterator)( + bucketPosition => + extendBoundingBoxByData(mag, + segmentId, + boundingBoxMutable, + bucketPosition, + additionalCoordinates, + getTypedDataForBucketPosition)) + } yield + if (boundingBoxMutable.exists(item => item == Int.MaxValue || item == Int.MinValue)) { + BoundingBox.empty + } else + BoundingBox( + Vec3Int(boundingBoxMutable(0), boundingBoxMutable(1), boundingBoxMutable(2)), + boundingBoxMutable(3) - boundingBoxMutable(0) + 1, + boundingBoxMutable(4) - boundingBoxMutable(1) + 1, + boundingBoxMutable(5) - boundingBoxMutable(2) + 1 + ) + + // The buckets that form the outer walls of the bounding box are relevant (in each of those the real min/max voxel positions could occur) + private def filterOutInnerBucketPositions(bucketPositions: ListOfVec3IntProto): Seq[Vec3Int] = + if (bucketPositions.values.isEmpty) List.empty + else { + val minX = bucketPositions.values.map(_.x).min + val minY = bucketPositions.values.map(_.y).min + val minZ = bucketPositions.values.map(_.z).min + val maxX = bucketPositions.values.map(_.x).max + val maxY = bucketPositions.values.map(_.y).max + val maxZ = bucketPositions.values.map(_.z).max + bucketPositions.values + .filter(pos => + pos.x == minX || pos.x == maxX || pos.y == minY || pos.y == maxY || pos.z == minZ || pos.z == maxZ) + .map(vec3IntFromProto) + } + + private def extendBoundingBoxByData( + mag: Vec3Int, + segmentId: Long, + mutableBoundingBox: scala.collection.mutable.ListBuffer[Int], + bucketPosition: Vec3Int, + additionalCoordinates: Option[Seq[AdditionalCoordinate]], + getTypedDataForBucketPosition: (Vec3Int, + Vec3Int, + Option[Seq[AdditionalCoordinate]]) => Fox[Array[UnsignedInteger]]): Fox[Unit] = + for { + dataTyped: Array[UnsignedInteger] <- getTypedDataForBucketPosition(bucketPosition, mag, additionalCoordinates) + bucketTopLeftInTargetMagVoxels = bucketPosition * DataLayer.bucketLength + _ = scanDataAndExtendBoundingBox(dataTyped, bucketTopLeftInTargetMagVoxels, segmentId, mutableBoundingBox) + } yield () + + private def scanDataAndExtendBoundingBox(dataTyped: Array[UnsignedInteger], + bucketTopLeftInTargetMagVoxels: Vec3Int, + segmentId: Long, + mutableBoundingBox: scala.collection.mutable.ListBuffer[Int]): Unit = + for { + x <- 0 until DataLayer.bucketLength + y <- 0 until DataLayer.bucketLength + z <- 0 until DataLayer.bucketLength + index = z * DataLayer.bucketLength * DataLayer.bucketLength + y * DataLayer.bucketLength + x + } yield { + if (dataTyped(index).toPositiveLong == segmentId) { + val voxelPosition = bucketTopLeftInTargetMagVoxels + Vec3Int(x, y, z) + extendBoundingBoxByPosition(mutableBoundingBox, voxelPosition) + } + } + + private def extendBoundingBoxByPosition(mutableBoundingBox: scala.collection.mutable.ListBuffer[Int], + position: Vec3Int): Unit = { + mutableBoundingBox(0) = Math.min(mutableBoundingBox(0), position.x) + mutableBoundingBox(1) = Math.min(mutableBoundingBox(1), position.y) + mutableBoundingBox(2) = Math.min(mutableBoundingBox(2), position.z) + mutableBoundingBox(3) = Math.max(mutableBoundingBox(3), position.x) + mutableBoundingBox(4) = Math.max(mutableBoundingBox(4), position.y) + mutableBoundingBox(5) = Math.max(mutableBoundingBox(5), position.z) + } +} diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/models/requests/DataServiceRequests.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/models/requests/DataServiceRequests.scala index c2398fbb25..95d2671dc9 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/models/requests/DataServiceRequests.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/models/requests/DataServiceRequests.scala @@ -17,7 +17,7 @@ object DataServiceRequestSettings { case class DataServiceDataRequest( dataSource: DataSource, // null in VolumeTracings dataLayer: DataLayer, - dataLayerMapping: Option[String], + dataLayerMapping: Option[String], // No longer used. use appliedAgglomerate in settings instead cuboid: Cuboid, settings: DataServiceRequestSettings ) diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/Hdf5HashedArrayUtils.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/Hdf5HashedArrayUtils.scala new file mode 100644 index 0000000000..75f0c4f8f1 --- /dev/null +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/Hdf5HashedArrayUtils.scala @@ -0,0 +1,17 @@ +package com.scalableminds.webknossos.datastore.services + +import org.apache.commons.codec.digest.MurmurHash3 + +import java.nio.ByteBuffer + +trait Hdf5HashedArrayUtils { + + val hdf5FileExtension = "hdf5" + + def getHashFunction(name: String): Long => Long = name match { + case "identity" => identity + case "murmurhash3_x64_128" => + x: Long => + Math.abs(MurmurHash3.hash128x64(ByteBuffer.allocate(8).putLong(x).array())(1)) + } +} diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/MeshFileService.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/MeshFileService.scala index fd3a4683e4..25e747804b 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/MeshFileService.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/MeshFileService.scala @@ -10,12 +10,10 @@ import com.scalableminds.webknossos.datastore.storage.{CachedHdf5File, Hdf5FileC import com.typesafe.scalalogging.LazyLogging import net.liftweb.common.Box import net.liftweb.common.Box.tryo -import org.apache.commons.codec.digest.MurmurHash3 import org.apache.commons.io.FilenameUtils import play.api.libs.json.{Json, OFormat} import java.io.ByteArrayInputStream -import java.nio.ByteBuffer import java.nio.file.{Path, Paths} import javax.inject.Inject import scala.collection.mutable.ListBuffer @@ -181,25 +179,19 @@ object WebknossosSegmentInfo { class MeshFileService @Inject()(config: DataStoreConfig)(implicit ec: ExecutionContext) extends FoxImplicits with LazyLogging + with Hdf5HashedArrayUtils with ByteUtils { private val dataBaseDir = Paths.get(config.Datastore.baseFolder) private val meshesDir = "meshes" - private val meshFileExtension = "hdf5" private val defaultLevelOfDetail = 0 - private def getHashFunction(name: String): Long => Long = name match { - case "identity" => identity - case "murmurhash3_x64_128" => - x: Long => - Math.abs(MurmurHash3.hash128x64(ByteBuffer.allocate(8).putLong(x).array())(1)) - } private lazy val meshFileCache = new Hdf5FileCache(30) def exploreMeshFiles(organizationName: String, datasetName: String, dataLayerName: String): Fox[Set[MeshFileInfo]] = { val layerDir = dataBaseDir.resolve(organizationName).resolve(datasetName).resolve(dataLayerName) val meshFileNames = PathUtils - .listFiles(layerDir.resolve(meshesDir), silent = true, PathUtils.fileExtensionFilter(meshFileExtension)) + .listFiles(layerDir.resolve(meshesDir), silent = true, PathUtils.fileExtensionFilter(hdf5FileExtension)) .map { paths => paths.map(path => FilenameUtils.removeExtension(path.getFileName.toString)) } @@ -207,12 +199,12 @@ class MeshFileService @Inject()(config: DataStoreConfig)(implicit ec: ExecutionC .getOrElse(Nil) val meshFileVersions = meshFileNames.map { fileName => - val meshFilePath = layerDir.resolve(meshesDir).resolve(s"$fileName.$meshFileExtension") + val meshFilePath = layerDir.resolve(meshesDir).resolve(s"$fileName.$hdf5FileExtension") versionForMeshFile(meshFilePath) } val mappingNameFoxes = meshFileNames.lazyZip(meshFileVersions).map { (fileName, fileVersion) => - val meshFilePath = layerDir.resolve(meshesDir).resolve(s"$fileName.$meshFileExtension") + val meshFilePath = layerDir.resolve(meshesDir).resolve(s"$fileName.$hdf5FileExtension") mappingNameForMeshFile(meshFilePath, fileVersion) } @@ -246,7 +238,7 @@ class MeshFileService @Inject()(config: DataStoreConfig)(implicit ec: ExecutionC .resolve(datasetName) .resolve(dataLayerName) .resolve(meshesDir) - .resolve(s"${meshFileName}.$meshFileExtension") + .resolve(s"$meshFileName.$hdf5FileExtension") executeWithCachedHdf5(meshFilePath, meshFileCache) { cachedMeshFile => cachedMeshFile.reader.string().getAttr("/", "mapping_name") }.toOption.flatMap { value => @@ -269,7 +261,7 @@ class MeshFileService @Inject()(config: DataStoreConfig)(implicit ec: ExecutionC .resolve(datasetName) .resolve(dataLayerName) .resolve(meshesDir) - .resolve(s"${listMeshChunksRequest.meshFile}.$meshFileExtension") + .resolve(s"${listMeshChunksRequest.meshFile}.$hdf5FileExtension") executeWithCachedHdf5(meshFilePath, meshFileCache) { cachedMeshFile => val chunkPositionLiterals = cachedMeshFile.reader @@ -306,7 +298,7 @@ class MeshFileService @Inject()(config: DataStoreConfig)(implicit ec: ExecutionC .resolve(datasetName) .resolve(dataLayerName) .resolve(meshesDir) - .resolve(s"${meshFileName}.$meshFileExtension") + .resolve(s"$meshFileName.$hdf5FileExtension") executeWithCachedHdf5(meshFilePath, meshFileCache) { cachedMeshFile => val encoding = cachedMeshFile.reader.string().getAttr("/", "mesh_format") @@ -402,7 +394,7 @@ class MeshFileService @Inject()(config: DataStoreConfig)(implicit ec: ExecutionC .resolve(datasetName) .resolve(dataLayerName) .resolve(meshesDir) - .resolve(s"${meshChunkDataRequest.meshFile}.$meshFileExtension") + .resolve(s"${meshChunkDataRequest.meshFile}.$hdf5FileExtension") executeWithCachedHdf5(meshFilePath, meshFileCache) { cachedMeshFile => val encoding = cachedMeshFile.reader.string().getAttr("/", "metadata/encoding") @@ -429,7 +421,7 @@ class MeshFileService @Inject()(config: DataStoreConfig)(implicit ec: ExecutionC .resolve(datasetName) .resolve(dataLayerName) .resolve(meshesDir) - .resolve(s"${meshChunkDataRequests.meshFile}.$meshFileExtension") + .resolve(s"${meshChunkDataRequests.meshFile}.$hdf5FileExtension") executeWithCachedHdf5(meshFilePath, meshFileCache) { cachedMeshFile => val meshFormat = cachedMeshFile.reader.string().getAttr("/", "mesh_format") diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/MeshMappingHelper.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/MeshMappingHelper.scala index 34b88747fc..e14907a605 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/MeshMappingHelper.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/MeshMappingHelper.scala @@ -22,7 +22,6 @@ trait MeshMappingHelper { mappingNameForMeshFile: Option[String], token: Option[String])(implicit ec: ExecutionContext): Fox[List[Long]] = targetMappingName match { - case None => // No mapping selected, assume id matches meshfile Fox.successful(List(agglomerateId)) diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/SegmentIndexFileService.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/SegmentIndexFileService.scala new file mode 100644 index 0000000000..6091593a58 --- /dev/null +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/SegmentIndexFileService.scala @@ -0,0 +1,261 @@ +package com.scalableminds.webknossos.datastore.services + +import com.scalableminds.util.geometry.{BoundingBox, Vec3Int} +import com.scalableminds.util.io.PathUtils +import com.scalableminds.util.tools.{Fox, FoxImplicits} +import com.scalableminds.webknossos.datastore.DataStoreConfig +import com.scalableminds.webknossos.datastore.geometry.ListOfVec3IntProto +import com.scalableminds.webknossos.datastore.helpers.SegmentStatistics +import com.scalableminds.webknossos.datastore.models.datasource.DataLayer +import com.scalableminds.webknossos.datastore.models.requests.{ + Cuboid, + DataServiceDataRequest, + DataServiceRequestSettings +} +import com.scalableminds.webknossos.datastore.models.{ + AdditionalCoordinate, + UnsignedInteger, + UnsignedIntegerArray, + VoxelPosition, + datasource +} +import com.scalableminds.webknossos.datastore.storage.{AgglomerateFileKey, CachedHdf5File, Hdf5FileCache} +import net.liftweb.common.{Box, Full} +import play.api.i18n.MessagesProvider + +import java.nio.file.{Path, Paths} +import javax.inject.Inject +import scala.concurrent.ExecutionContext + +class SegmentIndexFileService @Inject()(config: DataStoreConfig, + binaryDataServiceHolder: BinaryDataServiceHolder, + dataSourceRepository: DataSourceRepository)(implicit ec: ExecutionContext) + extends FoxImplicits + with Hdf5HashedArrayUtils + with SegmentStatistics { + private val dataBaseDir = Paths.get(config.Datastore.baseFolder) + private val segmentIndexDir = "segmentIndex" + + private lazy val fileHandleCache = new Hdf5FileCache(10) + + def getSegmentIndexFile(organizationName: String, datasetName: String, dataLayerName: String): Box[Path] = + for { + _ <- Full("") + layerDir = dataBaseDir.resolve(organizationName).resolve(datasetName).resolve(dataLayerName) + segmentIndexDir = layerDir.resolve(this.segmentIndexDir) + files <- PathUtils.listFiles(segmentIndexDir, silent = true, PathUtils.fileExtensionFilter(hdf5FileExtension)) + file <- files.headOption + } yield file + + /** + * Read the segment index file and return the bucket positions for the given segment id. + * The bucket positions are the top left corners of the buckets that contain the segment in the file mag. + * @param organizationName + * @param datasetName + * @param dataLayerName + * @param segmentId + * @param mag + * @param cubeSize + * @param additionalCoordinates + * @param mappingName + * @param m + * @return (bucketPositions, fileMag) + */ + def readSegmentIndex(organizationName: String, + datasetName: String, + dataLayerName: String, + segmentId: Long): Fox[Array[Vec3Int]] = + for { + segmentIndexPath <- getSegmentIndexFile(organizationName, datasetName, dataLayerName).toFox + segmentIndex = fileHandleCache.withCache(segmentIndexPath)(CachedHdf5File.fromPath) + hashFunction = getHashFunction(segmentIndex.reader.string().getAttr("/", "hash_function")) + nBuckets = segmentIndex.reader.uint64().getAttr("/", "n_hash_buckets") + + bucketIndex = hashFunction(segmentId) % nBuckets + bucketOffsets = segmentIndex.reader.uint64().readArrayBlockWithOffset("hash_bucket_offsets", 2, bucketIndex) + bucketStart = bucketOffsets(0) + bucketEnd = bucketOffsets(1) + + hashBucketExists = bucketEnd - bucketStart != 0 + topLeftsOpt <- Fox.runIf(hashBucketExists)(readTopLefts(segmentIndex, bucketStart, bucketEnd, segmentId)) + topLefts = topLeftsOpt.flatten + } yield + topLefts match { + case Some(topLefts) => topLefts.flatMap(topLeft => Vec3Int.fromArray(topLeft.map(_.toInt))) + case None => Array.empty + } + + def readFileMag(organizationName: String, datasetName: String, dataLayerName: String): Fox[Vec3Int] = + for { + segmentIndexPath <- getSegmentIndexFile(organizationName, datasetName, dataLayerName).toFox + segmentIndex = fileHandleCache.withCache(segmentIndexPath)(CachedHdf5File.fromPath) + mag <- Vec3Int.fromArray(segmentIndex.reader.uint64().getArrayAttr("/", "mag").map(_.toInt)).toFox + } yield mag + + private def readTopLefts(segmentIndex: CachedHdf5File, + bucketStart: Long, + bucketEnd: Long, + segmentId: Long): Fox[Option[Array[Array[Short]]]] = + for { + _ <- Fox.successful(()) + buckets = segmentIndex.reader + .uint64() + .readMatrixBlockWithOffset("hash_buckets", (bucketEnd - bucketStart + 1).toInt, 3, bucketStart, 0) + bucketLocalOffset = buckets.map(_(0)).indexOf(segmentId) + topLeftOpts <- Fox.runIf(bucketLocalOffset >= 0)(for { + _ <- Fox.successful(()) + topLeftStart = buckets(bucketLocalOffset)(1) + topLeftEnd = buckets(bucketLocalOffset)(2) + topLefts = segmentIndex.reader + .uint16() // Read datatype from attributes? + .readMatrixBlockWithOffset("top_lefts", (topLeftEnd - topLeftStart).toInt, 3, topLeftStart, 0) + } yield topLefts) + } yield topLeftOpts + + def topLeftsToDistinctBucketPositions(topLefts: Array[Vec3Int], + targetMag: Vec3Int, + fileMag: Vec3Int): Array[Vec3Int] = + topLefts + .map(_.scale(DataLayer.bucketLength)) // map indices to positions + .map(_ / (targetMag / fileMag)) + .map(_ / Vec3Int.full(DataLayer.bucketLength)) // map positions to cube indices + .distinct + + def getSegmentVolume(organizationName: String, + datasetName: String, + dataLayerName: String, + segmentId: Long, + mag: Vec3Int, + mappingName: Option[String])(implicit m: MessagesProvider): Fox[Long] = + calculateSegmentVolume( + segmentId, + mag, + None, // see #7556 + getBucketPositions(organizationName, datasetName, dataLayerName, mappingName), + getTypedDataForBucketPosition(organizationName, datasetName, dataLayerName, mappingName) + ) + + def getSegmentBoundingBox(organizationName: String, + datasetName: String, + dataLayerName: String, + segmentId: Long, + mag: Vec3Int, + mappingName: Option[String])(implicit m: MessagesProvider): Fox[BoundingBox] = + for { + + bb <- calculateSegmentBoundingBox( + segmentId, + mag, + None, // see #7556 + getBucketPositions(organizationName, datasetName, dataLayerName, mappingName), + getTypedDataForBucketPosition(organizationName, datasetName, dataLayerName, mappingName) + ) + } yield bb + + def assertSegmentIndexFileExists(organizationName: String, datasetName: String, dataLayerName: String): Fox[Path] = + Fox.box2Fox(getSegmentIndexFile(organizationName, datasetName, dataLayerName)) ?~> "segmentIndexFile.notFound" + + private def getTypedDataForBucketPosition(organizationName: String, + datasetName: String, + dataLayerName: String, + mappingName: Option[String])( + bucketPosition: Vec3Int, + mag: Vec3Int, + additionalCoordinates: Option[Seq[AdditionalCoordinate]])(implicit m: MessagesProvider) = + for { + // Additional coordinates parameter ignored, see #7556 + (dataSource, dataLayer) <- dataSourceRepository.getDataSourceAndDataLayer(organizationName, + datasetName, + dataLayerName) + data <- getDataForBucketPositions(dataSource, dataLayer, mag, Seq(bucketPosition * mag), mappingName) + dataTyped: Array[UnsignedInteger] = UnsignedIntegerArray.fromByteArray(data, dataLayer.elementClass) + } yield dataTyped + + private def getBucketPositions( + organizationName: String, + datasetName: String, + dataLayerName: String, + mappingName: Option[String])(segmentOrAgglomerateId: Long, mag: Vec3Int): Fox[ListOfVec3IntProto] = + for { + segmentIds <- getSegmentIdsForAgglomerateIdIfNeeded(organizationName, + datasetName, + dataLayerName, + segmentOrAgglomerateId, + mappingName) + positionsPerSegment <- Fox.serialCombined(segmentIds)(segmentId => + getBucketPositions(organizationName, datasetName, dataLayerName, segmentId, mag)) + positionsCollected = positionsPerSegment.flatten.distinct + } yield ListOfVec3IntProto.of(positionsCollected.map(vec3IntToProto)) + + private def getBucketPositions(organizationName: String, + datasetName: String, + dataLayerName: String, + segmentId: Long, + mag: Vec3Int): Fox[Array[Vec3Int]] = + for { + fileMag <- readFileMag(organizationName, datasetName, dataLayerName) + bucketPositionsInFileMag <- readSegmentIndex(organizationName, datasetName, dataLayerName, segmentId) + bucketPositions = bucketPositionsInFileMag.map(_ / (mag / fileMag)) + } yield bucketPositions + + def getSegmentIdsForAgglomerateIdIfNeeded(organizationName: String, + datasetName: String, + dataLayerName: String, + segmentOrAgglomerateId: Long, + mappingNameOpt: Option[String]): Fox[List[Long]] = + // Editable mappings cannot happen here since those requests go to the tracingstore + mappingNameOpt match { + case Some(mappingName) => + for { + agglomerateService <- binaryDataServiceHolder.binaryDataService.agglomerateServiceOpt.toFox + agglomerateFileKey = AgglomerateFileKey( + organizationName, + datasetName, + dataLayerName, + mappingName + ) + largestAgglomerateId <- agglomerateService.largestAgglomerateId(agglomerateFileKey).toFox + segmentIds <- if (segmentOrAgglomerateId <= largestAgglomerateId) { + agglomerateService + .segmentIdsForAgglomerateId( + agglomerateFileKey, + segmentOrAgglomerateId + ) + .toFox + } else + Fox.successful(List.empty) // agglomerate id is outside of file range, was likely created during brushing + } yield segmentIds + case None => Fox.successful(List(segmentOrAgglomerateId)) + } + + private def getDataForBucketPositions(dataSource: datasource.DataSource, + dataLayer: DataLayer, + mag: Vec3Int, + mag1BucketPositions: Seq[Vec3Int], + mappingName: Option[String]): Fox[Array[Byte]] = { + val dataRequests = mag1BucketPositions.map { position => + DataServiceDataRequest( + dataSource = dataSource, + dataLayer = dataLayer, + dataLayerMapping = None, + cuboid = Cuboid( + VoxelPosition(position.x * DataLayer.bucketLength, + position.y * DataLayer.bucketLength, + position.z * DataLayer.bucketLength, + mag), + DataLayer.bucketLength, + DataLayer.bucketLength, + DataLayer.bucketLength + ), + settings = DataServiceRequestSettings(halfByte = false, + appliedAgglomerate = mappingName, + version = None, + additionalCoordinates = None), + ) + }.toList + for { + (data, _) <- binaryDataServiceHolder.binaryDataService.handleDataRequests(dataRequests) + } yield data + } + +} diff --git a/webknossos-datastore/conf/com.scalableminds.webknossos.datastore.routes b/webknossos-datastore/conf/com.scalableminds.webknossos.datastore.routes index f314dd3310..e3550e6e86 100644 --- a/webknossos-datastore/conf/com.scalableminds.webknossos.datastore.routes +++ b/webknossos-datastore/conf/com.scalableminds.webknossos.datastore.routes @@ -73,6 +73,13 @@ POST /datasets/:organizationName/:datasetName/layers/:dataLayerName/con # Ad-Hoc Meshing POST /datasets/:organizationName/:datasetName/layers/:dataLayerName/adHocMesh @com.scalableminds.webknossos.datastore.controllers.BinaryDataController.requestAdHocMesh(token: Option[String], organizationName: String, datasetName: String, dataLayerName: String) +# Segment-Index files +GET /datasets/:organizationName/:dataSetName/layers/:dataLayerName/hasSegmentIndex @com.scalableminds.webknossos.datastore.controllers.DataSourceController.checkSegmentIndexFile(token: Option[String], organizationName: String, dataSetName: String, dataLayerName: String) +POST /datasets/:organizationName/:dataSetName/layers/:dataLayerName/segmentIndex @com.scalableminds.webknossos.datastore.controllers.DataSourceController.querySegmentIndex(token: Option[String], organizationName: String, dataSetName: String, dataLayerName: String) +POST /datasets/:organizationName/:dataSetName/layers/:dataLayerName/segmentIndex/:segmentId @com.scalableminds.webknossos.datastore.controllers.DataSourceController.getSegmentIndex(token: Option[String], organizationName: String, dataSetName: String, dataLayerName: String, segmentId: String) +POST /datasets/:organizationName/:dataSetName/layers/:dataLayerName/segmentStatistics/volume @com.scalableminds.webknossos.datastore.controllers.DataSourceController.getSegmentVolume(token: Option[String], organizationName: String, dataSetName: String, dataLayerName: String) +POST /datasets/:organizationName/:dataSetName/layers/:dataLayerName/segmentStatistics/boundingBox @com.scalableminds.webknossos.datastore.controllers.DataSourceController.getSegmentBoundingBox(token: Option[String], organizationName: String, dataSetName: String, dataLayerName: String) + # DataSource management POST /datasets @com.scalableminds.webknossos.datastore.controllers.DataSourceController.uploadChunk(token: Option[String]) POST /datasets/reserveUpload @com.scalableminds.webknossos.datastore.controllers.DataSourceController.reserveUpload(token: Option[String]) diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/TSRemoteDatastoreClient.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/TSRemoteDatastoreClient.scala index a42e4cc1fa..aea0f2690a 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/TSRemoteDatastoreClient.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/TSRemoteDatastoreClient.scala @@ -6,7 +6,13 @@ import com.scalableminds.util.geometry.{Vec3Double, Vec3Int} import com.scalableminds.util.tools.Fox import com.scalableminds.webknossos.datastore.AgglomerateGraph.AgglomerateGraph import com.scalableminds.webknossos.datastore.ListOfLong.ListOfLong -import com.scalableminds.webknossos.datastore.helpers.MissingBucketHeaders +import com.scalableminds.webknossos.datastore.helpers.{ + GetMultipleSegmentIndexParameters, + GetSegmentIndexParameters, + MissingBucketHeaders, + SegmentIndexData +} +import com.scalableminds.webknossos.datastore.models.datasource.DataLayer import com.scalableminds.webknossos.datastore.models.WebknossosDataRequest import com.scalableminds.webknossos.datastore.models.datasource.inbox.InboxDataSource import com.scalableminds.webknossos.datastore.rpc.RPC @@ -115,6 +121,57 @@ class TSRemoteDatastoreClient @Inject()( ) } + def hasSegmentIndexFile(remoteFallbackLayer: RemoteFallbackLayer, userToken: Option[String]): Fox[Boolean] = + for { + remoteLayerUri <- getRemoteLayerUri(remoteFallbackLayer) + hasIndexFile <- rpc(s"$remoteLayerUri/hasSegmentIndex") + .addQueryStringOptional("token", userToken) + .silent + .getWithJsonResponse[Boolean] + } yield hasIndexFile + + def querySegmentIndex(remoteFallbackLayer: RemoteFallbackLayer, + segmentId: Long, + mag: Vec3Int, + mappingName: Option[String], + editableMappingTracingId: Option[String], + userToken: Option[String]): Fox[Seq[Vec3Int]] = + for { + remoteLayerUri <- getRemoteLayerUri(remoteFallbackLayer) + positions <- rpc(s"$remoteLayerUri/segmentIndex/$segmentId") + .addQueryStringOptional("token", userToken) + .silent + .postJsonWithJsonResponse[GetSegmentIndexParameters, Seq[Vec3Int]](GetSegmentIndexParameters( + mag, + cubeSize = Vec3Int.ones, // Don't use the cubeSize parameter here (since we want to calculate indices later anyway) + additionalCoordinates = None, + mappingName = mappingName, + editableMappingTracingId = editableMappingTracingId + )) + + indices = positions.map(_.scale(1f / DataLayer.bucketLength)) // Route returns positions to use the same interface as tracing store, we want indices + } yield indices + + def querySegmentIndexForMultipleSegments(remoteFallbackLayer: RemoteFallbackLayer, + segmentIds: Seq[Long], + mag: Vec3Int, + mappingName: Option[String], + editableMappingTracingId: Option[String], + userToken: Option[String]): Fox[Seq[(Long, Seq[Vec3Int])]] = + for { + remoteLayerUri <- getRemoteLayerUri(remoteFallbackLayer) + result <- rpc(s"$remoteLayerUri/segmentIndex") + .addQueryStringOptional("token", userToken) + .silent + .postJsonWithJsonResponse[GetMultipleSegmentIndexParameters, Seq[SegmentIndexData]]( + GetMultipleSegmentIndexParameters(segmentIds.toList, + mag, + additionalCoordinates = None, + mappingName = mappingName, + editableMappingTracingId = editableMappingTracingId)) + + } yield result.map(data => (data.segmentId, data.positions)) + def loadFullMeshStl(token: Option[String], remoteFallbackLayer: RemoteFallbackLayer, fullMeshRequest: FullMeshRequest): Fox[Array[Byte]] = diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/TracingController.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/TracingController.scala index 67e2de89ef..4644d58e6a 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/TracingController.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/TracingController.scala @@ -275,7 +275,8 @@ trait TracingController[T <: GeneratedMessage, Ts <: GeneratedMessage] extends C tracingsWithIds.map(_._1), newId, newVersion = 0L, - toCache = !persist) + toCache = !persist, + token) newEditableMappingIdBox <- tracingService .mergeEditableMappings(tracingsWithIds, urlOrHeaderToken(token, request)) .futureBox diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingController.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingController.scala index 548427c5bf..98e38feb3a 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingController.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingController.scala @@ -8,13 +8,13 @@ import com.scalableminds.util.tools.Fox import com.scalableminds.webknossos.datastore.AgglomerateGraph.AgglomerateGraph import com.scalableminds.webknossos.datastore.VolumeTracing.{VolumeTracing, VolumeTracingOpt, VolumeTracings} import com.scalableminds.webknossos.datastore.geometry.ListOfVec3IntProto -import com.scalableminds.webknossos.datastore.helpers.ProtoGeometryImplicits -import com.scalableminds.webknossos.datastore.models.datasource.{AdditionalAxis, DataLayer} -import com.scalableminds.webknossos.datastore.models.{ - AdditionalCoordinate, - WebknossosDataRequest, - WebknossosAdHocMeshRequest +import com.scalableminds.webknossos.datastore.helpers.{ + GetSegmentIndexParameters, + ProtoGeometryImplicits, + SegmentStatisticsParameters } +import com.scalableminds.webknossos.datastore.models.datasource.{AdditionalAxis, DataLayer} +import com.scalableminds.webknossos.datastore.models.{WebknossosDataRequest, WebknossosAdHocMeshRequest} import com.scalableminds.webknossos.datastore.rpc.RPC import com.scalableminds.webknossos.datastore.services.{ EditableMappingSegmentListResult, @@ -31,7 +31,6 @@ import com.scalableminds.webknossos.tracingstore.tracings.editablemapping.{ import com.scalableminds.webknossos.tracingstore.tracings.volume.{ MergedVolumeStats, ResolutionRestrictions, - SegmentStatisticsParameters, TSFullMeshService, UpdateMappingNameAction, VolumeDataZipFormat, @@ -49,23 +48,13 @@ import com.scalableminds.webknossos.tracingstore.{ import net.liftweb.common.{Box, Empty, Failure, Full} import play.api.i18n.Messages import play.api.libs.Files.TemporaryFile -import play.api.libs.json.{Format, Json} +import play.api.libs.json.Json import play.api.mvc.{Action, AnyContent, MultipartFormData, PlayBodyParsers} import java.io.File import java.nio.{ByteBuffer, ByteOrder} import scala.concurrent.ExecutionContext -case class GetSegmentIndexParameters( - mag: Vec3Int, - cubeSize: Vec3Int, // Use the cubeSize parameter to map the found bucket indices to different size of cubes (e.g. reducing granularity with higher cubeSize) - additionalCoordinates: Option[Seq[AdditionalCoordinate]] -) - -object GetSegmentIndexParameters { - implicit val format: Format[GetSegmentIndexParameters] = Json.format[GetSegmentIndexParameters] -} - class VolumeTracingController @Inject()( val tracingService: VolumeTracingService, val config: TracingStoreConfig, @@ -106,7 +95,7 @@ class VolumeTracingController @Inject()( tracing <- tracingService.find(tracingId) ?~> Messages("tracing.notFound") resolutionRestrictions = ResolutionRestrictions(minResolution, maxResolution) resolutions <- tracingService - .initializeWithData(tracingId, tracing, initialData, resolutionRestrictions) + .initializeWithData(tracingId, tracing, initialData, resolutionRestrictions, token) .toFox _ <- tracingService.updateResolutionList(tracingId, tracing, resolutions) } yield Ok(Json.toJson(tracingId)) @@ -142,7 +131,7 @@ class VolumeTracingController @Inject()( for { initialData <- request.body.asRaw.map(_.asFile) ?~> Messages("zipFile.notFound") tracing <- tracingService.find(tracingId) ?~> Messages("tracing.notFound") - resolutions <- tracingService.initializeWithDataMultiple(tracingId, tracing, initialData).toFox + resolutions <- tracingService.initializeWithDataMultiple(tracingId, tracing, initialData, token).toFox _ <- tracingService.updateResolutionList(tracingId, tracing, resolutions) } yield Ok(Json.toJson(tracingId)) } @@ -222,9 +211,10 @@ class VolumeTracingController @Inject()( editPositionParsed, editRotationParsed, boundingBoxParsed, - newEditableMappingId + newEditableMappingId, + userToken ) - _ <- Fox.runIfOptionTrue(downsample)(tracingService.downsample(newId, tracingId, newTracing)) + _ <- Fox.runIfOptionTrue(downsample)(tracingService.downsample(newId, tracingId, newTracing, userToken)) } yield Ok(Json.toJson(newId)) } } @@ -257,11 +247,13 @@ class VolumeTracingController @Inject()( tracing <- tracingService.find(tracingId) ?~> "tracing.notFound" currentVersion <- tracingService.currentVersion(tracingId) before = Instant.now - processedBucketCountOpt <- tracingService.addSegmentIndex(tracingId, - tracing, - currentVersion, - urlOrHeaderToken(token, request), - dryRun) ?~> "addSegmentIndex.failed" + canAddSegmentIndex <- tracingService.checkIfSegmentIndexMayBeAdded(tracingId, tracing, token) + processedBucketCountOpt <- Fox.runIf(canAddSegmentIndex)( + tracingService.addSegmentIndex(tracingId, + tracing, + currentVersion, + urlOrHeaderToken(token, request), + dryRun)) ?~> "addSegmentIndex.failed" currentVersionNew <- tracingService.currentVersion(tracingId) _ <- Fox.runIf(!dryRun)(bool2Fox( processedBucketCountOpt.isEmpty || currentVersionNew == currentVersion + 1L) ?~> "Version increment failed. Looks like someone edited the annotation layer in the meantime.") @@ -503,10 +495,13 @@ class VolumeTracingController @Inject()( Action.async(validateJson[SegmentStatisticsParameters]) { implicit request => accessTokenService.validateAccess(UserAccessRequest.readTracing(tracingId), urlOrHeaderToken(token, request)) { for { + tracing <- tracingService.find(tracingId) + mappingName <- tracingService.baseMappingName(tracing) segmentVolumes <- Fox.serialCombined(request.body.segmentIds) { segmentId => volumeSegmentStatisticsService.getSegmentVolume(tracingId, segmentId, request.body.mag, + mappingName, request.body.additionalCoordinates, urlOrHeaderToken(token, request)) } @@ -518,10 +513,13 @@ class VolumeTracingController @Inject()( Action.async(validateJson[SegmentStatisticsParameters]) { implicit request => accessTokenService.validateAccess(UserAccessRequest.readTracing(tracingId), urlOrHeaderToken(token, request)) { for { + tracing <- tracingService.find(tracingId) + mappingName <- tracingService.baseMappingName(tracing) segmentBoundingBoxes: List[BoundingBox] <- Fox.serialCombined(request.body.segmentIds) { segmentId => volumeSegmentStatisticsService.getSegmentBoundingBox(tracingId, segmentId, request.body.mag, + mappingName, request.body.additionalCoordinates, urlOrHeaderToken(token, request)) } @@ -533,14 +531,21 @@ class VolumeTracingController @Inject()( Action.async(validateJson[GetSegmentIndexParameters]) { implicit request => accessTokenService.validateAccess(UserAccessRequest.readTracing(tracingId), urlOrHeaderToken(token, request)) { for { - tracing <- tracingService.find(tracingId) ?~> "tracing.notFound" + fallbackLayer <- tracingService.getFallbackLayer(tracingId) + tracing <- tracingService.find(tracingId) ?~> Messages("tracing.notFound") + mappingName <- tracingService.baseMappingName(tracing) bucketPositionsRaw: ListOfVec3IntProto <- volumeSegmentIndexService .getSegmentToBucketIndexWithEmptyFallbackWithoutBuffer( + fallbackLayer, tracingId, segmentId, request.body.mag, - request.body.additionalCoordinates, - AdditionalAxis.fromProtosAsOpt(tracing.additionalAxes)) + additionalCoordinates = request.body.additionalCoordinates, + additionalAxes = AdditionalAxis.fromProtosAsOpt(tracing.additionalAxes), + mappingName = mappingName, + editableMappingTracingId = tracingService.editableMappingTracingId(tracing, tracingId), + userToken = urlOrHeaderToken(token, request) + ) bucketPositionsForCubeSize = bucketPositionsRaw.values .map(vec3IntFromProto) .map(_.scale(DataLayer.bucketLength)) // bucket positions raw are indices of 32³ buckets diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/RemoteFallbackLayer.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/RemoteFallbackLayer.scala index 655967a9c0..2017567e22 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/RemoteFallbackLayer.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/RemoteFallbackLayer.scala @@ -5,7 +5,9 @@ import com.scalableminds.util.tools.Fox import com.scalableminds.util.tools.Fox.option2Fox import com.scalableminds.webknossos.datastore.VolumeTracing.VolumeTracing import com.scalableminds.webknossos.datastore.VolumeTracing.VolumeTracing.ElementClassProto +import com.scalableminds.webknossos.datastore.helpers.ProtoGeometryImplicits import com.scalableminds.webknossos.datastore.models.WebknossosDataRequest +import com.scalableminds.webknossos.datastore.models.datasource.{DataLayerLike, DataSourceId} import com.scalableminds.webknossos.tracingstore.tracings.editablemapping.FallbackDataKey import com.scalableminds.webknossos.tracingstore.{TSRemoteDatastoreClient, TSRemoteWebknossosClient} @@ -16,6 +18,10 @@ case class RemoteFallbackLayer(organizationName: String, layerName: String, elementClass: ElementClassProto) +object RemoteFallbackLayer extends ProtoGeometryImplicits { + def fromDataLayerAndDataSource(dataLayer: DataLayerLike, dataSource: DataSourceId): RemoteFallbackLayer = + RemoteFallbackLayer(dataSource.team, dataSource.name, dataLayer.name, dataLayer.elementClass) +} trait FallbackDataHelper { def remoteDatastoreClient: TSRemoteDatastoreClient def remoteWebknossosClient: TSRemoteWebknossosClient diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/TracingService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/TracingService.scala index f1992001b8..cd0a6ac6d4 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/TracingService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/TracingService.scala @@ -186,7 +186,8 @@ trait TracingService[T <: GeneratedMessage] tracings: Seq[T], newId: String, newVersion: Long, - toCache: Boolean): Fox[MergedVolumeStats] + toCache: Boolean, + userToken: Option[String]): Fox[MergedVolumeStats] def mergeEditableMappings(tracingsWithIds: List[(T, String)], userToken: Option[String]): Fox[String] } diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingService.scala index c45efebf19..1da99b36bb 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingService.scala @@ -223,6 +223,16 @@ class EditableMappingService @Inject()( (info, _) <- getInfoAndActualVersion(editableMappingId, version, remoteFallbackLayer, userToken) } yield info + def getBaseMappingName(editableMappingId: String): Fox[Option[String]] = + for { + desiredVersion <- getClosestMaterializableVersionOrZero(editableMappingId, None) + infoBox <- getClosestMaterialized(editableMappingId, desiredVersion).futureBox + } yield + infoBox match { + case Full(info) => Some(info.value.baseMappingName) + case _ => None + } + def getInfoAndActualVersion(editableMappingId: String, requestedVersion: Option[Long] = None, remoteFallbackLayer: RemoteFallbackLayer, diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/skeleton/SkeletonTracingService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/skeleton/SkeletonTracingService.scala index c501e72663..36d502e5b6 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/skeleton/SkeletonTracingService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/skeleton/SkeletonTracingService.scala @@ -207,7 +207,8 @@ class SkeletonTracingService @Inject()( tracings: Seq[SkeletonTracing], newId: String, newVersion: Long, - toCache: Boolean): Fox[MergedVolumeStats] = Fox.successful(MergedVolumeStats.empty()) + toCache: Boolean, + userToken: Option[String]): Fox[MergedVolumeStats] = Fox.successful(MergedVolumeStats.empty()) def updateActionLog(tracingId: String, newestVersion: Option[Long], oldestVersion: Option[Long]): Fox[JsValue] = { def versionedTupleToJson(tuple: (Long, List[SkeletonUpdateAction])): JsObject = diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/MergedVolume.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/MergedVolume.scala index 935f9cc328..800380e388 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/MergedVolume.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/MergedVolume.scala @@ -8,7 +8,6 @@ import com.scalableminds.webknossos.datastore.services.DataConverter import com.scalableminds.webknossos.datastore.VolumeTracing.VolumeTracing.ElementClassProto import com.scalableminds.webknossos.datastore.geometry.Vec3IntProto import com.scalableminds.webknossos.datastore.helpers.ProtoGeometryImplicits -import net.liftweb.common.Box import scala.collection.mutable import scala.concurrent.ExecutionContext @@ -35,13 +34,13 @@ class MergedVolume(elementClass: ElementClassProto, initialLargestSegmentId: Lon private val labelMaps = mutable.ListBuffer[mutable.HashMap[UnsignedInteger, UnsignedInteger]]() var largestSegmentId: UnsignedInteger = UnsignedInteger.zeroFromElementClass(elementClass) - def addLabelSetFromDataZip(zipFile: File): Box[Unit] = { + def addLabelSetFromDataZip(zipFile: File)(implicit ec: ExecutionContext): Fox[Unit] = { val importLabelSet: mutable.Set[UnsignedInteger] = scala.collection.mutable.Set() val unzipResult = withBucketsFromZip(zipFile) { (_, bytes) => val dataTyped = UnsignedIntegerArray.fromByteArray(bytes, elementClass) val nonZeroData = UnsignedIntegerArray.filterNonZero(dataTyped) - importLabelSet ++= nonZeroData + Fox.successful(importLabelSet ++= nonZeroData) } for { _ <- unzipResult @@ -95,9 +94,9 @@ class MergedVolume(elementClass: ElementClassProto, initialLargestSegmentId: Lon } } - def addFromDataZip(sourceVolumeIndex: Int, zipFile: File): Box[Unit] = + def addFromDataZip(sourceVolumeIndex: Int, zipFile: File)(implicit ec: ExecutionContext): Fox[Unit] = withBucketsFromZip(zipFile) { (bucketPosition, bytes) => - add(sourceVolumeIndex, bucketPosition, bytes) + Fox.successful(add(sourceVolumeIndex, bucketPosition, bytes)) } def add(sourceVolumeIndex: Int, bucketPosition: BucketPosition, data: Array[Byte]): Unit = { @@ -132,10 +131,9 @@ class MergedVolume(elementClass: ElementClassProto, initialLargestSegmentId: Lon def withMergedBuckets(block: (BucketPosition, Array[Byte]) => Fox[Unit])(implicit ec: ExecutionContext): Fox[Unit] = for { - _ <- Fox.combined(mergedVolume.map { - case (bucketPosition, bucketData) => - block(bucketPosition, UnsignedIntegerArray.toByteArray(bucketData, elementClass)) - }.toList) + _ <- Fox.serialCombined(mergedVolume.keysIterator) { bucketPosition => + block(bucketPosition, UnsignedIntegerArray.toByteArray(mergedVolume(bucketPosition), elementClass)) + } } yield () def presentResolutions: Set[Vec3Int] = diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/TSFullMeshService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/TSFullMeshService.scala index 89fea311db..7a9c3dd2a1 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/TSFullMeshService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/TSFullMeshService.scala @@ -85,12 +85,21 @@ class TSFullMeshService @Inject()(volumeTracingService: VolumeTracingService, voxelSize: Vec3Double, fullMeshRequest: FullMeshRequest)(implicit ec: ExecutionContext): Fox[List[Array[Float]]] = for { + fallbackLayer <- volumeTracingService.getFallbackLayer(tracingId) + mappingName <- volumeTracingService.baseMappingName(tracing) bucketPositionsRaw: ListOfVec3IntProto <- volumeSegmentIndexService - .getSegmentToBucketIndexWithEmptyFallbackWithoutBuffer(tracingId, - fullMeshRequest.segmentId, - mag, - fullMeshRequest.additionalCoordinates, - AdditionalAxis.fromProtosAsOpt(tracing.additionalAxes)) + .getSegmentToBucketIndexWithEmptyFallbackWithoutBuffer( + fallbackLayer, + tracingId, + fullMeshRequest.segmentId, + mag, + version = None, + mappingName = mappingName, + editableMappingTracingId = volumeTracingService.editableMappingTracingId(tracing, tracingId), + fullMeshRequest.additionalCoordinates, + AdditionalAxis.fromProtosAsOpt(tracing.additionalAxes), + token + ) bucketPositions = bucketPositionsRaw.values .map(vec3IntFromProto) .map(_ * mag * DataLayer.bucketLength) diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeDataZipHelper.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeDataZipHelper.scala index 9feffd0d27..ce74001a66 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeDataZipHelper.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeDataZipHelper.scala @@ -3,7 +3,8 @@ package com.scalableminds.webknossos.tracingstore.tracings.volume import java.io.{File, FileOutputStream, InputStream} import com.scalableminds.util.geometry.Vec3Int import com.scalableminds.util.io.ZipIO -import com.scalableminds.util.tools.{BoxImplicits, JsonHelper} +import com.scalableminds.util.tools.Fox.{box2Fox, option2Fox} +import com.scalableminds.util.tools.{BoxImplicits, Fox, JsonHelper} import com.scalableminds.webknossos.datastore.dataformats.wkw.WKWDataFormatHelper import com.scalableminds.webknossos.datastore.datareaders.{ BloscCompressor, @@ -22,6 +23,7 @@ import org.apache.commons.io.IOUtils import java.util.zip.{ZipEntry, ZipFile} import scala.collection.mutable +import scala.concurrent.ExecutionContext trait VolumeDataZipHelper extends WKWDataFormatHelper @@ -29,9 +31,10 @@ trait VolumeDataZipHelper with BoxImplicits with LazyLogging { - protected def withBucketsFromZip(zipFile: File)(block: (BucketPosition, Array[Byte]) => Unit): Box[Unit] = + protected def withBucketsFromZip(zipFile: File)(block: (BucketPosition, Array[Byte]) => Fox[Unit])( + implicit ec: ExecutionContext): Fox[Unit] = for { - format <- detectVolumeDataZipFormat(zipFile) + format <- detectVolumeDataZipFormat(zipFile).toFox _ <- if (format == VolumeDataZipFormat.wkw) withBucketsFromWkwZip(zipFile)(block) else withBucketsFromZarr3Zip(zipFile)(block) @@ -46,40 +49,50 @@ trait VolumeDataZipHelper } else VolumeDataZipFormat.wkw } - private def withBucketsFromWkwZip(zipFile: File)(block: (BucketPosition, Array[Byte]) => Unit): Box[Unit] = - tryo(ZipIO.withUnziped(zipFile) { - case (fileName, is) => - WKWFile.read(is) { - case (header, buckets) => - if (header.numChunksPerShard == 1) { - parseWKWFilePath(fileName.toString).map { bucketPosition: BucketPosition => - if (buckets.hasNext) { - val data = buckets.next() - if (!isRevertedBucket(data)) { - block(bucketPosition, data) - } - } - } + private def withBucketsFromWkwZip(zipFile: File)(block: (BucketPosition, Array[Byte]) => Fox[Unit])( + implicit ec: ExecutionContext): Fox[Unit] = + for { + _ <- ZipIO.withUnzipedAsync(zipFile) { + case (fileName, is) if fileName.toString.endsWith(".wkw") && !fileName.toString.endsWith("header.wkw") => + WKWFile + .read(is) { + case (header, buckets) => + if (header.numChunksPerShard == 1) { + parseWKWFilePath(fileName.toString).map { bucketPosition: BucketPosition => + if (buckets.hasNext) { + val data = buckets.next() + if (!isRevertedBucket(data)) { + block(bucketPosition, data) + } else Fox.successful(()) + } else Fox.successful(()) + }.getOrElse(Fox.successful(())) + } else Fox.successful(()) + case _ => Fox.successful(()) } - } - }) + .toFox + case _ => Fox.successful(()) + } + } yield () - private def withBucketsFromZarr3Zip(zipFile: File)(block: (BucketPosition, Array[Byte]) => Unit): Box[Unit] = + private def withBucketsFromZarr3Zip(zipFile: File)(block: (BucketPosition, Array[Byte]) => Fox[Unit])( + implicit ec: ExecutionContext): Fox[Unit] = for { - firstHeaderFilePath <- option2Box( - ZipIO.entries(new ZipFile(zipFile)).find(entry => entry.getName.endsWith(Zarr3ArrayHeader.FILENAME_ZARR_JSON))) - firstHeaderString <- ZipIO.readAt(new ZipFile(zipFile), firstHeaderFilePath) - firstHeader <- JsonHelper.parseAndValidateJson[Zarr3ArrayHeader](firstHeaderString) - _ <- firstHeader.assertValid - _ <- ZipIO.withUnziped(zipFile) { + firstHeaderFilePath <- ZipIO + .entries(new ZipFile(zipFile)) + .find(entry => entry.getName.endsWith(Zarr3ArrayHeader.FILENAME_ZARR_JSON)) + .toFox + firstHeaderString <- ZipIO.readAt(new ZipFile(zipFile), firstHeaderFilePath).toFox + firstHeader <- JsonHelper.parseAndValidateJson[Zarr3ArrayHeader](firstHeaderString).toFox + _ <- firstHeader.assertValid.toFox + _ <- ZipIO.withUnzipedAsync(zipFile) { case (filename, inputStream) => - if (filename.endsWith(Zarr3ArrayHeader.FILENAME_ZARR_JSON)) () + if (filename.endsWith(Zarr3ArrayHeader.FILENAME_ZARR_JSON)) Fox.successful(()) else { parseZarrChunkPath(filename.toString, firstHeader).map { bucketPosition => val dataCompressed = IOUtils.toByteArray(inputStream) val data = compressor.decompress(dataCompressed) block(bucketPosition, data) - } + }.getOrElse(Fox.successful(())) } } } yield () @@ -140,12 +153,16 @@ trait VolumeDataZipHelper } } - protected def withZipsFromMultiZip[T](multiZip: File)(block: (Int, File) => T): Box[Unit] = { + protected def withZipsFromMultiZipAsync[T](multiZip: File)(block: (Int, File) => Fox[Unit])( + implicit ec: ExecutionContext): Fox[Unit] = { var index: Int = 0 - val unzipResult = ZipIO.withUnziped(multiZip) { + val unzipResult = ZipIO.withUnzipedAsync(multiZip) { case (_, is) => - block(index, inputStreamToTempfile(is)) - index += 1 + for { + res <- block(index, inputStreamToTempfile(is)) + _ = index += 1 + } yield res + case _ => Fox.successful(()) } for { _ <- unzipResult diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeSegmentIndexBuffer.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeSegmentIndexBuffer.scala index 8cedb6305a..70867d8a46 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeSegmentIndexBuffer.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeSegmentIndexBuffer.scala @@ -3,9 +3,11 @@ package com.scalableminds.webknossos.tracingstore.tracings.volume import com.scalableminds.util.geometry.Vec3Int import com.scalableminds.util.tools.Fox import com.scalableminds.webknossos.datastore.geometry.ListOfVec3IntProto +import com.scalableminds.webknossos.datastore.helpers.ProtoGeometryImplicits +import com.scalableminds.webknossos.tracingstore.TSRemoteDatastoreClient import com.scalableminds.webknossos.datastore.models.AdditionalCoordinate import com.scalableminds.webknossos.datastore.models.datasource.AdditionalAxis -import com.scalableminds.webknossos.tracingstore.tracings.{FossilDBClient, KeyValueStoreImplicits} +import com.scalableminds.webknossos.tracingstore.tracings.{FossilDBClient, KeyValueStoreImplicits, RemoteFallbackLayer} import com.typesafe.scalalogging.LazyLogging import scala.collection.mutable @@ -22,20 +24,28 @@ trait SegmentIndexKeyHelper extends AdditionalCoordinateKey { } // To introduce buffering for updating the segment-to-bucket index for a volume tracing -// read provides fallback data from fossildb +// read provides fallback data from fossildb / segment index file // while write is done only locally in-memory, until flush is called // This saves a lot of db interactions (since adjacent bucket updates usually touch the same segments) class VolumeSegmentIndexBuffer(tracingId: String, volumeSegmentIndexClient: FossilDBClient, version: Long, - additionalAxes: Option[Seq[AdditionalAxis]]) + remoteDatastoreClient: TSRemoteDatastoreClient, + fallbackLayer: Option[RemoteFallbackLayer], + additionalAxes: Option[Seq[AdditionalAxis]], + userToken: Option[String]) extends KeyValueStoreImplicits with SegmentIndexKeyHelper + with ProtoGeometryImplicits with LazyLogging { private lazy val segmentIndexBuffer: mutable.Map[String, ListOfVec3IntProto] = new mutable.HashMap[String, ListOfVec3IntProto]() + // Used during initial saving of annotation: For each bucket, multiple segment ids are requested, which may overlap. + private lazy val fileSegmentIndexCache: mutable.Map[String, ListOfVec3IntProto] = + new mutable.HashMap[String, ListOfVec3IntProto]() + def put(segmentId: Long, mag: Vec3Int, additionalCoordinates: Option[Seq[AdditionalCoordinate]], @@ -43,15 +53,16 @@ class VolumeSegmentIndexBuffer(tracingId: String, segmentIndexBuffer(segmentIndexKey(tracingId, segmentId, mag, additionalCoordinates, additionalAxes)) = segmentPositions - def getWithFallback(segmentId: Long, mag: Vec3Int, additionalCoordinates: Option[Seq[AdditionalCoordinate]])( + def getWithFallback(segmentId: Long, + mag: Vec3Int, + mappingName: Option[String], + editableMappingTracingId: Option[String], + additionalCoordinates: Option[Seq[AdditionalCoordinate]])( implicit ec: ExecutionContext): Fox[ListOfVec3IntProto] = { val key = segmentIndexKey(tracingId, segmentId, mag, additionalCoordinates, additionalAxes) segmentIndexBuffer.get(key) match { case Some(positions) => Fox.successful(positions) - case None => - volumeSegmentIndexClient - .get(key, Some(version), mayBeEmpty = Some(true))(fromProtoBytes[ListOfVec3IntProto]) - .map(_.value) + case None => getFallback(segmentId, mag, mappingName, editableMappingTracingId, additionalCoordinates) } } @@ -62,4 +73,126 @@ class VolumeSegmentIndexBuffer(tracingId: String, } } yield () + private def getFallback(segmentId: Long, + mag: Vec3Int, + mappingName: Option[String], + editableMappingTracingId: Option[String], + additionalCoordinates: Option[Seq[AdditionalCoordinate]])(implicit ec: ExecutionContext) = { + val key = segmentIndexKey(tracingId, segmentId, mag, additionalCoordinates, additionalAxes) + for { + fossilDbData <- volumeSegmentIndexClient + .get(key, Some(version), mayBeEmpty = Some(true))(fromProtoBytes[ListOfVec3IntProto]) + .map(_.value) + .fillEmpty(ListOfVec3IntProto.of(Seq())) + data <- fallbackLayer match { + case Some(layer) if fossilDbData.length == 0 => + remoteDatastoreClient.querySegmentIndex(layer, + segmentId, + mag, + mappingName, + editableMappingTracingId, + userToken) + case _ => Fox.successful(fossilDbData.values.map(vec3IntFromProto)) + } + } yield ListOfVec3IntProto(data.map(vec3IntToProto)) + } + + private def getSegmentsFromFossilDBNoteMisses( + tracingId: String, + mag: Vec3Int, + additionalCoordinates: Option[Seq[AdditionalCoordinate]], + additionalAxes: Option[Seq[AdditionalAxis]], + segmentIds: List[Long])(implicit ec: ExecutionContext): Fox[(List[(Long, Seq[Vec3Int])], List[Long])] = { + var misses = List[Long]() + var hits = List[(Long, Seq[Vec3Int])]() + for { + _ <- Fox.serialCombined(segmentIds)(segmentId => + for { + _ <- Fox.successful(()) + key = segmentIndexKey(tracingId, segmentId, mag, additionalCoordinates, additionalAxes) + bucketPositions <- volumeSegmentIndexClient + .get(key, Some(version), mayBeEmpty = Some(true))(fromProtoBytes[ListOfVec3IntProto]) + .map(_.value) + .fillEmpty(ListOfVec3IntProto.of(Seq())) + wasMiss = bucketPositions.length == 0 + _ = if (wasMiss) misses = segmentId :: misses + else hits = (segmentId, bucketPositions.values.map(vec3IntFromProto)) :: hits + } yield ()) + _ = misses.map( + segmentId => + fileSegmentIndexCache + .get(segmentIndexKey(tracingId, segmentId, mag, additionalCoordinates, additionalAxes)) match { + case Some(positions) => { + hits = (segmentId, positions.values.map(vec3IntFromProto)) :: hits + misses = misses.filterNot(_ == segmentId) + } + case None => () + }) + } yield (hits, misses) + } + + private def getSegmentsFromBufferNoteMisses( + segmentIds: List[Long], + mag: Vec3Int, + additionalCoordinates: Option[Seq[AdditionalCoordinate]] + ): (List[(Long, Seq[Vec3Int])], List[Long]) = { + val hits = segmentIds.flatMap(id => { + val key = segmentIndexKey(tracingId, id, mag, additionalCoordinates, additionalAxes) + val values = segmentIndexBuffer.get(key).map(_.values.map(vec3IntFromProto)) + values match { + case Some(positions) => Some(id, positions) + case None => None + } + }) + val misses = segmentIds.filterNot(id => hits.exists(_._1 == id)) + (hits, misses) + } + + // Get a map from segment index to bucket position (e.g. an index) from all sources (buffer, fossilDB, file) + def getSegmentToBucketIndexMap(segmentIds: List[Long], + mag: Vec3Int, + mappingName: Option[String], + editableMappingTracingId: Option[String], + additionalCoordinates: Option[Seq[AdditionalCoordinate]])( + implicit ec: ExecutionContext): Fox[List[(Long, Seq[Vec3Int])]] = + for { + _ <- Fox.successful(()) + + (bufferHits, bufferMisses) = getSegmentsFromBufferNoteMisses(segmentIds, mag, additionalCoordinates) + (mutableIndexHits, mutableIndexMisses) <- getSegmentsFromFossilDBNoteMisses(tracingId, + mag, + additionalCoordinates, + additionalAxes, + bufferMisses) + missesSoFar = bufferMisses ++ mutableIndexMisses + fileBucketPositions <- fallbackLayer match { + case Some(layer) => + for { + fileBucketPositionsOpt <- Fox.runIf(missesSoFar.nonEmpty)( + remoteDatastoreClient.querySegmentIndexForMultipleSegments(layer, + missesSoFar, + mag, + mappingName, + editableMappingTracingId, + userToken)) + fileBucketPositions = fileBucketPositionsOpt.getOrElse(Seq()) + _ = fileBucketPositions.map { + case (segmentId, positions) => + fileSegmentIndexCache( + segmentIndexKey(tracingId, segmentId, mag, additionalCoordinates, additionalAxes)) = + ListOfVec3IntProto(positions.map(vec3IntToProto)) + } + + } yield fileBucketPositions + case _ => Fox.successful(List[(Long, Seq[Vec3Int])]()) + } + allHits = mutableIndexHits ++ fileBucketPositions ++ bufferHits + allHitsFilled = segmentIds.map { segmentId => + allHits.find(_._1 == segmentId) match { + case Some((_, positions)) => (segmentId, positions) + case None => (segmentId, Seq()) + } + } + } yield allHitsFilled + } diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeSegmentIndexService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeSegmentIndexService.scala index 13ae75b736..150c4938bb 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeSegmentIndexService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeSegmentIndexService.scala @@ -9,13 +9,15 @@ import com.scalableminds.webknossos.datastore.VolumeTracing.VolumeTracing.Elemen import com.scalableminds.webknossos.datastore.models.datasource.{AdditionalAxis, ElementClass} import com.scalableminds.webknossos.datastore.geometry.ListOfVec3IntProto import com.scalableminds.webknossos.datastore.helpers.ProtoGeometryImplicits -import com.scalableminds.webknossos.datastore.models.{ - AdditionalCoordinate, - BucketPosition, - UnsignedInteger, - UnsignedIntegerArray +import com.scalableminds.webknossos.datastore.models.{BucketPosition, UnsignedInteger, UnsignedIntegerArray} +import com.scalableminds.webknossos.tracingstore.TSRemoteDatastoreClient +import com.scalableminds.webknossos.datastore.models.AdditionalCoordinate +import com.scalableminds.webknossos.tracingstore.tracings.{ + FossilDBClient, + KeyValueStoreImplicits, + RemoteFallbackLayer, + TracingDataStore } -import com.scalableminds.webknossos.tracingstore.tracings.{FossilDBClient, KeyValueStoreImplicits, TracingDataStore} import com.typesafe.scalalogging.LazyLogging import net.liftweb.common.{Box, Empty, Failure, Full} import net.liftweb.common.Box.tryo @@ -23,16 +25,20 @@ import net.liftweb.common.Box.tryo import scala.concurrent.ExecutionContext object VolumeSegmentIndexService { - // Currently, segment index is not supported for volume tracings with fallback layer - def canHaveSegmentIndexOpt(fallbackLayerName: Option[String]): Option[Boolean] = Some(fallbackLayerName.isEmpty) - - def canHaveSegmentIndex(fallbackLayerName: Option[String]): Boolean = fallbackLayerName.isEmpty + def canHaveSegmentIndex(remoteDatastoreClient: TSRemoteDatastoreClient, + fallbackLayer: Option[RemoteFallbackLayer], + userToken: Option[String])(implicit ec: ExecutionContext): Fox[Boolean] = + fallbackLayer match { + case Some(layer) => remoteDatastoreClient.hasSegmentIndexFile(layer, userToken) + case None => Fox.successful(true) + } } // Segment-to-Bucket index for volume tracings in FossilDB // key: tracing id, segment id, mag – value: list of buckets // used for calculating segment statistics -class VolumeSegmentIndexService @Inject()(val tracingDataStore: TracingDataStore) +class VolumeSegmentIndexService @Inject()(val tracingDataStore: TracingDataStore, + remoteDatastoreClient: TSRemoteDatastoreClient) extends KeyValueStoreImplicits with ProtoGeometryImplicits with VolumeBucketCompression @@ -49,19 +55,33 @@ class VolumeSegmentIndexService @Inject()(val tracingDataStore: TracingDataStore bucketPosition: BucketPosition, bucketBytes: Array[Byte], previousBucketBytesBox: Box[Array[Byte]], - elementClass: ElementClassProto)(implicit ec: ExecutionContext): Fox[Unit] = + elementClass: ElementClassProto, + mappingName: Option[String], + editableMappingTracingId: Option[String])(implicit ec: ExecutionContext): Fox[Unit] = for { bucketBytesDecompressed <- tryo( decompressIfNeeded(bucketBytes, expectedUncompressedBucketSizeFor(elementClass), "")).toFox - previousBucketBytesWithEmptyFallback <- bytesWithEmptyFallback(previousBucketBytesBox, elementClass) ?~> "volumeSegmentIndex.udpate.getPreviousBucket.failed" + // previous bytes: include fallback layer bytes if available, otherwise use empty bytes + previousBucketBytesWithEmptyFallback <- bytesWithEmptyFallback(previousBucketBytesBox, elementClass) ?~> "volumeSegmentIndex.update.getPreviousBucket.failed" segmentIds: Set[Long] <- collectSegmentIds(bucketBytesDecompressed, elementClass) - previousSegmentIds: Set[Long] <- collectSegmentIds(previousBucketBytesWithEmptyFallback, elementClass) ?~> "volumeSegmentIndex.udpate.collectSegmentIds.failed" + previousSegmentIds: Set[Long] <- collectSegmentIds(previousBucketBytesWithEmptyFallback, elementClass) ?~> "volumeSegmentIndex.update.collectSegmentIds.failed" additions = segmentIds.diff(previousSegmentIds) removals = previousSegmentIds.diff(segmentIds) - _ <- Fox.serialCombined(removals.toList)(segmentId => - removeBucketFromSegmentIndex(segmentIndexBuffer, segmentId, bucketPosition)) ?~> "volumeSegmentIndex.udpate.removeBucket.failed" - _ <- Fox.serialCombined(additions.toList)(segmentId => - addBucketToSegmentIndex(segmentIndexBuffer, segmentId, bucketPosition)) ?~> "volumeSegmentIndex.udpate.addBucket.failed" + _ <- Fox.serialCombined(removals.toList)( + segmentId => + // When fallback layer is used we also need to include relevant segments here into the fossildb since otherwise the fallback layer would be used with invalid data + removeBucketFromSegmentIndex(segmentIndexBuffer, + segmentId, + bucketPosition, + mappingName, + editableMappingTracingId)) ?~> "volumeSegmentIndex.update.removeBucket.failed" + // When fallback layer is used, copy the entire bucketlist for this segment instead of one bucket + _ <- Fox.runIf(additions.nonEmpty)( + addBucketToSegmentIndex(segmentIndexBuffer, + additions.toList, + bucketPosition, + mappingName, + editableMappingTracingId)) ?~> "volumeSegmentIndex.update.addBucket.failed" } yield () private def bytesWithEmptyFallback(bytesBox: Box[Array[Byte]], elementClass: ElementClassProto)( @@ -72,30 +92,47 @@ class VolumeSegmentIndexService @Inject()(val tracingDataStore: TracingDataStore case f: Failure => f.toFox } - private def removeBucketFromSegmentIndex(segmentIndexBuffer: VolumeSegmentIndexBuffer, - segmentId: Long, - bucketPosition: BucketPosition)(implicit ec: ExecutionContext): Fox[Unit] = + private def removeBucketFromSegmentIndex( + segmentIndexBuffer: VolumeSegmentIndexBuffer, + segmentId: Long, + bucketPosition: BucketPosition, + mappingName: Option[String], + editableMappingTracingId: Option[String])(implicit ec: ExecutionContext): Fox[Unit] = for { previousBucketList: ListOfVec3IntProto <- getSegmentToBucketIndexWithEmptyFallback( segmentIndexBuffer, segmentId, bucketPosition.mag, + mappingName, + editableMappingTracingId, bucketPosition.additionalCoordinates) bucketPositionProto = bucketPosition.toVec3IntProto newBucketList = ListOfVec3IntProto(previousBucketList.values.filterNot(_ == bucketPositionProto)) _ = segmentIndexBuffer.put(segmentId, bucketPosition.mag, bucketPosition.additionalCoordinates, newBucketList) } yield () - private def addBucketToSegmentIndex(segmentIndexBuffer: VolumeSegmentIndexBuffer, - segmentId: Long, - bucketPosition: BucketPosition)(implicit ec: ExecutionContext): Fox[Unit] = + private def addBucketToSegmentIndex( + segmentIndexBuffer: VolumeSegmentIndexBuffer, + segmentIds: List[Long], + bucketPosition: BucketPosition, + mappingName: Option[String], + editableMappingTracingId: Option[String])(implicit ec: ExecutionContext): Fox[Unit] = for { - previousBucketList <- getSegmentToBucketIndexWithEmptyFallback(segmentIndexBuffer, - segmentId, - bucketPosition.mag, - bucketPosition.additionalCoordinates) - newBucketList = ListOfVec3IntProto((bucketPosition.toVec3IntProto +: previousBucketList.values).distinct) - _ <- segmentIndexBuffer.put(segmentId, bucketPosition.mag, bucketPosition.additionalCoordinates, newBucketList) + previousBuckets <- segmentIndexBuffer.getSegmentToBucketIndexMap(segmentIds, + bucketPosition.mag, + mappingName, + editableMappingTracingId, + bucketPosition.additionalCoordinates) + _ <- Fox.serialCombined(previousBuckets) { + case (segmentId, previousBucketList) => + val newBucketList = ListOfVec3IntProto( + (bucketPosition.toVec3IntProto +: ListOfVec3IntProto + .of(previousBucketList.map(vec3IntToProto)) + .values).distinct) + segmentIndexBuffer.put(segmentId, bucketPosition.mag, bucketPosition.additionalCoordinates, newBucketList) + Fox.successful(()) + case _ => Fox.successful(()) + } } yield () private def collectSegmentIds(bytes: Array[Byte], elementClass: ElementClassProto)( @@ -110,27 +147,39 @@ class VolumeSegmentIndexService @Inject()(val tracingDataStore: TracingDataStore private def getSegmentToBucketIndexWithEmptyFallback(segmentIndexBuffer: VolumeSegmentIndexBuffer, segmentId: Long, mag: Vec3Int, + mappingName: Option[String], + editableMappingTracingId: Option[String], additionalCoordinates: Option[Seq[AdditionalCoordinate]])( implicit ec: ExecutionContext): Fox[ListOfVec3IntProto] = for { - bucketListBox <- segmentIndexBuffer.getWithFallback(segmentId, mag, additionalCoordinates).futureBox + bucketListBox <- segmentIndexBuffer + .getWithFallback(segmentId, mag, mappingName, editableMappingTracingId, additionalCoordinates) + .futureBox bucketList <- addEmptyFallback(bucketListBox) } yield bucketList def getSegmentToBucketIndexWithEmptyFallbackWithoutBuffer( + fallbackLayer: Option[RemoteFallbackLayer], tracingId: String, segmentId: Long, mag: Vec3Int, + version: Option[Long] = None, + mappingName: Option[String], + editableMappingTracingId: Option[String], additionalCoordinates: Option[Seq[AdditionalCoordinate]], additionalAxes: Option[Seq[AdditionalAxis]], - version: Option[Long] = None)(implicit ec: ExecutionContext): Fox[ListOfVec3IntProto] = + userToken: Option[String])(implicit ec: ExecutionContext): Fox[ListOfVec3IntProto] = for { - bucketListBox <- getSegmentToBucketIndex(tracingId, + bucketListBox <- getSegmentToBucketIndex(fallbackLayer, + tracingId, segmentId, mag, + version, + mappingName, + editableMappingTracingId, additionalCoordinates, additionalAxes, - version).futureBox + userToken).futureBox bucketList <- addEmptyFallback(bucketListBox) } yield bucketList @@ -142,14 +191,54 @@ class VolumeSegmentIndexService @Inject()(val tracingDataStore: TracingDataStore case Empty => Fox.successful(ListOfVec3IntProto(Seq.empty)) } - private def getSegmentToBucketIndex(tracingId: String, - segmentId: Long, - mag: Vec3Int, - additionalCoordinates: Option[Seq[AdditionalCoordinate]], - additionalAxes: Option[Seq[AdditionalAxis]], - version: Option[Long]): Fox[ListOfVec3IntProto] = { + private def getSegmentToBucketIndex( + fallbackLayerOpt: Option[RemoteFallbackLayer], + tracingId: String, + segmentId: Long, + mag: Vec3Int, + version: Option[Long], + mappingName: Option[String], + editableMappingTracingId: Option[String], + additionalCoordinates: Option[Seq[AdditionalCoordinate]], + additionalAxes: Option[Seq[AdditionalAxis]], + userToken: Option[String])(implicit ec: ExecutionContext): Fox[ListOfVec3IntProto] = + for { + fromMutableIndex <- getSegmentToBucketIndexFromFossilDB(tracingId, + segmentId, + mag, + version, + additionalCoordinates, + additionalAxes).fillEmpty(ListOfVec3IntProto.of(Seq())) + fromFileIndex <- fallbackLayerOpt match { // isEmpty is not the same as length == 0 here :( + case Some(fallbackLayer) if fromMutableIndex.length == 0 => + getSegmentToBucketIndexFromFile(fallbackLayer, + segmentId, + mag, + mappingName, + editableMappingTracingId, + userToken) // additional coordinates not supported, see #7556 + case _ => Fox.successful(Seq.empty) + } + combined = fromMutableIndex.values.map(vec3IntFromProto) ++ fromFileIndex + } yield ListOfVec3IntProto(combined.map(vec3IntToProto)) + + private def getSegmentToBucketIndexFromFossilDB( + tracingId: String, + segmentId: Long, + mag: Vec3Int, + version: Option[Long], + additionalCoordinates: Option[Seq[AdditionalCoordinate]], + additionalAxes: Option[Seq[AdditionalAxis]]): Fox[ListOfVec3IntProto] = { val key = segmentIndexKey(tracingId, segmentId, mag, additionalCoordinates, additionalAxes) volumeSegmentIndexClient.get(key, version, mayBeEmpty = Some(true))(fromProtoBytes[ListOfVec3IntProto]).map(_.value) } + private def getSegmentToBucketIndexFromFile(layer: RemoteFallbackLayer, + segmentId: Long, + mag: Vec3Int, + mappingName: Option[String], + editableMappingTracingId: Option[String], + userToken: Option[String]) = + remoteDatastoreClient.querySegmentIndex(layer, segmentId, mag, mappingName, editableMappingTracingId, userToken) + } diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeSegmentStatisticsService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeSegmentStatisticsService.scala index a997581326..4848ba247c 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeSegmentStatisticsService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeSegmentStatisticsService.scala @@ -4,123 +4,57 @@ import com.scalableminds.util.geometry.{BoundingBox, Vec3Int} import com.scalableminds.util.tools.Fox import com.scalableminds.webknossos.datastore.VolumeTracing.VolumeTracing import com.scalableminds.webknossos.datastore.geometry.ListOfVec3IntProto -import com.scalableminds.webknossos.datastore.helpers.ProtoGeometryImplicits -import com.scalableminds.webknossos.datastore.models.{ - AdditionalCoordinate, - UnsignedInteger, - UnsignedIntegerArray, - WebknossosDataRequest -} -import com.scalableminds.webknossos.datastore.models.datasource.{AdditionalAxis, DataLayer} +import com.scalableminds.webknossos.datastore.helpers.{ProtoGeometryImplicits, SegmentStatistics} +import com.scalableminds.webknossos.datastore.models.{UnsignedInteger, UnsignedIntegerArray, WebknossosDataRequest} +import com.scalableminds.webknossos.datastore.models.datasource.DataLayer +import com.scalableminds.webknossos.datastore.models.AdditionalCoordinate +import com.scalableminds.webknossos.datastore.models.datasource.AdditionalAxis import com.scalableminds.webknossos.tracingstore.tracings.editablemapping.EditableMappingService -import play.api.libs.json.{Json, OFormat} import javax.inject.Inject import scala.concurrent.ExecutionContext -case class SegmentStatisticsParameters(mag: Vec3Int, - segmentIds: List[Long], - additionalCoordinates: Option[Seq[AdditionalCoordinate]]) -object SegmentStatisticsParameters { - implicit val jsonFormat: OFormat[SegmentStatisticsParameters] = Json.format[SegmentStatisticsParameters] -} - class VolumeSegmentStatisticsService @Inject()(volumeTracingService: VolumeTracingService, volumeSegmentIndexService: VolumeSegmentIndexService, editableMappingService: EditableMappingService) - extends ProtoGeometryImplicits { + extends ProtoGeometryImplicits + with SegmentStatistics { // Returns the segment volume (=number of voxels) in the target mag def getSegmentVolume(tracingId: String, segmentId: Long, mag: Vec3Int, + mappingName: Option[String], additionalCoordinates: Option[Seq[AdditionalCoordinate]], userToken: Option[String])(implicit ec: ExecutionContext): Fox[Long] = - for { - tracing <- volumeTracingService.find(tracingId) ?~> "tracing.notFound" - bucketPositions: ListOfVec3IntProto <- volumeSegmentIndexService - .getSegmentToBucketIndexWithEmptyFallbackWithoutBuffer(tracingId, - segmentId, - mag, - additionalCoordinates, - AdditionalAxis.fromProtosAsOpt(tracing.additionalAxes)) - volumeData <- getVolumeDataForPositions(tracing, - tracingId, - mag, - bucketPositions, - additionalCoordinates, - userToken) - dataTyped: Array[UnsignedInteger] = UnsignedIntegerArray.fromByteArray(volumeData, tracing.elementClass) - volumeInVx = dataTyped.count(unsignedInteger => unsignedInteger.toPositiveLong == segmentId) - } yield volumeInVx + calculateSegmentVolume( + segmentId, + mag, + additionalCoordinates, + getBucketPositions(tracingId, mappingName, additionalCoordinates, userToken), + getTypedDataForBucketPosition(tracingId, userToken) + ) - // Returns the bounding box in voxels in the target mag def getSegmentBoundingBox(tracingId: String, segmentId: Long, mag: Vec3Int, + mappingName: Option[String], additionalCoordinates: Option[Seq[AdditionalCoordinate]], userToken: Option[String])(implicit ec: ExecutionContext): Fox[BoundingBox] = - for { - tracing <- volumeTracingService.find(tracingId) ?~> "tracing.notFound" - allBucketPositions: ListOfVec3IntProto <- volumeSegmentIndexService - .getSegmentToBucketIndexWithEmptyFallbackWithoutBuffer(tracingId, - segmentId, - mag, - additionalCoordinates, - AdditionalAxis.fromProtosAsOpt(tracing.additionalAxes)) - relevantBucketPositions = filterOutInnerBucketPositions(allBucketPositions) - boundingBoxMutable = scala.collection.mutable.ListBuffer[Int](Int.MaxValue, - Int.MaxValue, - Int.MaxValue, - Int.MinValue, - Int.MinValue, - Int.MinValue) //topleft, bottomright - _ <- Fox.serialCombined(relevantBucketPositions.iterator)( - bucketPosition => - extendBoundingBoxByData(tracing, - tracingId, - mag, - segmentId, - boundingBoxMutable, - bucketPosition, - additionalCoordinates, - userToken)) - } yield - if (boundingBoxMutable.exists(item => item == Int.MaxValue || item == Int.MinValue)) { - BoundingBox.empty - } else - BoundingBox( - Vec3Int(boundingBoxMutable(0), boundingBoxMutable(1), boundingBoxMutable(2)), - boundingBoxMutable(3) - boundingBoxMutable(0) + 1, - boundingBoxMutable(4) - boundingBoxMutable(1) + 1, - boundingBoxMutable(5) - boundingBoxMutable(2) + 1 - ) + calculateSegmentBoundingBox( + segmentId, + mag, + additionalCoordinates, + getBucketPositions(tracingId, mappingName, additionalCoordinates, userToken), + getTypedDataForBucketPosition(tracingId, userToken) + ) - // The buckets that form the outer walls of the bounding box are relevant (in each of those the real min/max voxel positions could occur) - private def filterOutInnerBucketPositions(bucketPositions: ListOfVec3IntProto): Seq[Vec3Int] = - if (bucketPositions.values.isEmpty) List.empty - else { - val minX = bucketPositions.values.map(_.x).min - val minY = bucketPositions.values.map(_.y).min - val minZ = bucketPositions.values.map(_.z).min - val maxX = bucketPositions.values.map(_.x).max - val maxY = bucketPositions.values.map(_.y).max - val maxZ = bucketPositions.values.map(_.z).max - bucketPositions.values - .filter(pos => - pos.x == minX || pos.x == maxX || pos.y == minY || pos.y == maxY || pos.z == minZ || pos.z == maxZ) - .map(vec3IntFromProto) - } - - private def extendBoundingBoxByData(tracing: VolumeTracing, - tracingId: String, - mag: Vec3Int, - segmentId: Long, - mutableBoundingBox: scala.collection.mutable.ListBuffer[Int], - bucketPosition: Vec3Int, - additionalCoordinates: Option[Seq[AdditionalCoordinate]], - userToken: Option[String]): Fox[Unit] = + private def getTypedDataForBucketPosition(tracingId: String, userToken: Option[String])( + bucketPosition: Vec3Int, + mag: Vec3Int, + additionalCoordinates: Option[Seq[AdditionalCoordinate]]) = for { + tracing <- volumeTracingService.find(tracingId) ?~> "tracing.notFound" bucketData <- getVolumeDataForPositions(tracing, tracingId, mag, @@ -130,48 +64,31 @@ class VolumeSegmentStatisticsService @Inject()(volumeTracingService: VolumeTraci dataTyped: Array[UnsignedInteger] = UnsignedIntegerArray.fromByteArray( bucketData, elementClassFromProto(tracing.elementClass)) - bucketTopLeftInTargetMagVoxels = bucketPosition * DataLayer.bucketLength - _ = scanDataAndExtendBoundingBox(dataTyped, bucketTopLeftInTargetMagVoxels, segmentId, mutableBoundingBox) - } yield () + } yield dataTyped - private def scanDataAndExtendBoundingBox(dataTyped: Array[UnsignedInteger], - bucketTopLeftInTargetMagVoxels: Vec3Int, - segmentId: Long, - mutableBoundingBox: scala.collection.mutable.ListBuffer[Int]): Unit = + private def getBucketPositions( + tracingId: String, + mappingName: Option[String], + additionalCoordinates: Option[Seq[AdditionalCoordinate]], + userToken: Option[String])(segmentId: Long, mag: Vec3Int)(implicit ec: ExecutionContext) = for { - x <- 0 until DataLayer.bucketLength - y <- 0 until DataLayer.bucketLength - z <- 0 until DataLayer.bucketLength - index = z * DataLayer.bucketLength * DataLayer.bucketLength + y * DataLayer.bucketLength + x - } yield { - if (dataTyped(index).toPositiveLong == segmentId) { - val voxelPosition = bucketTopLeftInTargetMagVoxels + Vec3Int(x, y, z) - extendBoundingBoxByPosition(mutableBoundingBox, voxelPosition) - } - } - - private def extendBoundingBoxByPosition(mutableBoundingBox: scala.collection.mutable.ListBuffer[Int], - position: Vec3Int): Unit = { - mutableBoundingBox(0) = Math.min(mutableBoundingBox(0), position.x) - mutableBoundingBox(1) = Math.min(mutableBoundingBox(1), position.y) - mutableBoundingBox(2) = Math.min(mutableBoundingBox(2), position.z) - mutableBoundingBox(3) = Math.max(mutableBoundingBox(3), position.x) - mutableBoundingBox(4) = Math.max(mutableBoundingBox(4), position.y) - mutableBoundingBox(5) = Math.max(mutableBoundingBox(5), position.z) - } - - private def getVolumeDataForPositions(tracing: VolumeTracing, - tracingId: String, - mag: Vec3Int, - bucketPositions: ListOfVec3IntProto, - additionalCoordinates: Option[Seq[AdditionalCoordinate]], - userToken: Option[String]): Fox[Array[Byte]] = - getVolumeDataForPositions(tracing, - tracingId, - mag, - bucketPositions.values.map(vec3IntFromProto), - additionalCoordinates, - userToken) + fallbackLayer <- volumeTracingService.getFallbackLayer(tracingId) + tracing <- volumeTracingService.find(tracingId) ?~> "tracing.notFound" + additionalAxes = AdditionalAxis.fromProtosAsOpt(tracing.additionalAxes) + allBucketPositions: ListOfVec3IntProto <- volumeSegmentIndexService + .getSegmentToBucketIndexWithEmptyFallbackWithoutBuffer( + fallbackLayer, + tracingId, + segmentId, + mag, + None, + mappingName, + editableMappingTracingId = volumeTracingService.editableMappingTracingId(tracing, tracingId), + additionalCoordinates, + additionalAxes, + userToken + ) + } yield allBucketPositions private def getVolumeDataForPositions(tracing: VolumeTracing, tracingId: String, @@ -179,6 +96,7 @@ class VolumeSegmentStatisticsService @Inject()(volumeTracingService: VolumeTraci bucketPositions: Seq[Vec3Int], additionalCoordinates: Option[Seq[AdditionalCoordinate]], userToken: Option[String]): Fox[Array[Byte]] = { + val dataRequests = bucketPositions.map { position => WebknossosDataRequest( position = position * mag * DataLayer.bucketLength, @@ -193,7 +111,7 @@ class VolumeSegmentStatisticsService @Inject()(volumeTracingService: VolumeTraci for { (data, _) <- if (tracing.mappingIsEditable.getOrElse(false)) editableMappingService.volumeData(tracing, tracingId, dataRequests, userToken) - else volumeTracingService.data(tracingId, tracing, dataRequests) + else volumeTracingService.data(tracingId, tracing, dataRequests, includeFallbackDataIfAvailable = true, userToken) } yield data } diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingDownsampling.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingDownsampling.scala index f82518d2de..fee28ac7bc 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingDownsampling.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingDownsampling.scala @@ -66,15 +66,22 @@ trait VolumeTracingDownsampling bucketPosition: BucketPosition, bucketBytes: Array[Byte], previousBucketBytesBox: Box[Array[Byte]], - elementClass: ElementClassProto): Fox[Unit] + elementClass: ElementClassProto, + mappingName: Option[String], + editableMappingTracingId: Option[String]): Fox[Unit] + + protected def editableMappingTracingId(tracing: VolumeTracing, tracingId: String): Option[String] + + protected def baseMappingName(tracing: VolumeTracing): Fox[Option[String]] protected def volumeSegmentIndexClient: FossilDBClient - protected def downsampleWithLayer( - tracingId: String, - oldTracingId: String, - tracing: VolumeTracing, - dataLayer: VolumeTracingLayer)(implicit ec: ExecutionContext): Fox[List[Vec3Int]] = { + protected def downsampleWithLayer(tracingId: String, + oldTracingId: String, + tracing: VolumeTracing, + dataLayer: VolumeTracingLayer, + tracingService: VolumeTracingService, + userToken: Option[String])(implicit ec: ExecutionContext): Fox[List[Vec3Int]] = { val bucketVolume = 32 * 32 * 32 for { _ <- bool2Fox(tracing.version == 0L) ?~> "Tracing has already been edited." @@ -97,19 +104,29 @@ trait VolumeTracingDownsampling dataLayer) requiredMag } + fallbackLayer <- tracingService.getFallbackLayer(tracingId) + tracing <- tracingService.find(tracingId) ?~> "tracing.notFound" segmentIndexBuffer = new VolumeSegmentIndexBuffer(tracingId, volumeSegmentIndexClient, tracing.version, - dataLayer.additionalAxes) + tracingService.remoteDatastoreClient, + fallbackLayer, + dataLayer.additionalAxes, + userToken) _ <- Fox.serialCombined(updatedBucketsMutable.toList) { bucketPosition: BucketPosition => for { _ <- saveBucket(dataLayer, bucketPosition, bucketDataMapMutable(bucketPosition), tracing.version) + mappingName <- baseMappingName(tracing) _ <- Fox.runIfOptionTrue(tracing.hasSegmentIndex)( - updateSegmentIndex(segmentIndexBuffer, - bucketPosition, - bucketDataMapMutable(bucketPosition), - Empty, - tracing.elementClass)) + updateSegmentIndex( + segmentIndexBuffer, + bucketPosition, + bucketDataMapMutable(bucketPosition), + Empty, + tracing.elementClass, + mappingName, + editableMappingTracingId(tracing, tracingId) + )) } yield () } _ <- segmentIndexBuffer.flush() diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingLayer.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingLayer.scala index f56875e71f..8f0b874c55 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingLayer.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingLayer.scala @@ -96,6 +96,8 @@ case class VolumeTracingLayer( private lazy val volumeResolutions: List[Vec3Int] = tracing.resolutions.map(vec3IntFromProto).toList + override def bucketProviderCacheKey: String = s"$name-withFallbackData=$includeFallbackDataIfAvailable" + def lengthOfUnderlyingCubes(resolution: Vec3Int): Int = DataLayer.bucketLength val dataFormat: DataFormat.Value = DataFormat.tracing diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala index dde3df8618..3b6e450dd9 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala @@ -1,6 +1,7 @@ package com.scalableminds.webknossos.tracingstore.tracings.volume import com.google.inject.Inject +import com.scalableminds.util.cache.AlfuCache import com.scalableminds.util.geometry.{BoundingBox, Vec3Double, Vec3Int} import com.scalableminds.util.io.{NamedStream, ZipIO} import com.scalableminds.util.time.Instant @@ -85,26 +86,33 @@ class VolumeTracingService @Inject()( /* We want to reuse the bucket loading methods from binaryDataService for the volume tracings, however, it does not actually load anything from disk, unlike its “normal” instance in the datastore (only from the volume tracing store) */ - val binaryDataService = new BinaryDataService(Paths.get(""), 100, None, None, None, None, None) + private val binaryDataService = new BinaryDataService(Paths.get(""), 100, None, None, None, None, None) adHocMeshServiceHolder.tracingStoreAdHocMeshConfig = (binaryDataService, 30 seconds, 1) val adHocMeshService: AdHocMeshService = adHocMeshServiceHolder.tracingStoreAdHocMeshService + private val fallbackLayerCache: AlfuCache[String, Option[RemoteFallbackLayer]] = AlfuCache(maxCapacity = 100) + override def currentVersion(tracingId: String): Fox[Long] = tracingDataStore.volumes.getVersion(tracingId, mayBeEmpty = Some(true), emptyFallback = Some(0L)) override def currentVersion(tracing: VolumeTracing): Long = tracing.version - override protected def updateSegmentIndex(segmentIndexBuffer: VolumeSegmentIndexBuffer, - bucketPosition: BucketPosition, - bucketBytes: Array[Byte], - previousBucketBytesBox: Box[Array[Byte]], - elementClass: ElementClassProto): Fox[Unit] = + override protected def updateSegmentIndex( + segmentIndexBuffer: VolumeSegmentIndexBuffer, + bucketPosition: BucketPosition, + bucketBytes: Array[Byte], + previousBucketBytesBox: Box[Array[Byte]], + elementClass: ElementClassProto, + mappingName: Option[String], // should be the base mapping name in case of editable mapping + editableMappingTracingId: Option[String]): Fox[Unit] = volumeSegmentIndexService.updateFromBucket(segmentIndexBuffer, bucketPosition, bucketBytes, previousBucketBytesBox, - elementClass) ?~> "volumeSegmentIndex.update.failed" + elementClass, + mappingName, + editableMappingTracingId) ?~> "volumeSegmentIndex.update.failed" def handleUpdateGroup(tracingId: String, updateGroup: UpdateActionGroup[VolumeTracing], @@ -113,13 +121,19 @@ class VolumeTracingService @Inject()( for { // warning, may be called multiple times with the same version number (due to transaction management). // frontend ensures that each bucket is only updated once per transaction + fallbackLayer <- getFallbackLayer(tracingId) tracing <- find(tracingId) ?~> "tracing.notFound" segmentIndexBuffer <- Fox.successful( - new VolumeSegmentIndexBuffer(tracingId, - volumeSegmentIndexClient, - updateGroup.version, - AdditionalAxis.fromProtosAsOpt(tracing.additionalAxes))) - updatedTracing: VolumeTracing <- updateGroup.actions.foldLeft(Fox.successful(tracing)) { (tracingFox, action) => + new VolumeSegmentIndexBuffer( + tracingId, + volumeSegmentIndexClient, + updateGroup.version, + remoteDatastoreClient, + fallbackLayer, + AdditionalAxis.fromProtosAsOpt(tracing.additionalAxes), + userToken + )) + updatedTracing: VolumeTracing <- updateGroup.actions.foldLeft(find(tracingId)) { (tracingFox, action) => tracingFox.futureBox.flatMap { case Full(tracing) => action match { @@ -127,7 +141,7 @@ class VolumeTracingService @Inject()( if (tracing.getMappingIsEditable) { Fox.failure("Cannot mutate volume data in annotation with editable mapping.") } else - updateBucket(tracingId, tracing, a, segmentIndexBuffer, updateGroup.version, userToken) ?~> "Failed to save volume data." + updateBucket(tracingId, tracing, a, segmentIndexBuffer, updateGroup.version) ?~> "Failed to save volume data." case a: UpdateTracingVolumeAction => Fox.successful( tracing.copy( @@ -140,12 +154,12 @@ class VolumeTracingService @Inject()( AdditionalCoordinate.toProto(a.editPositionAdditionalCoordinates) )) case a: RevertToVersionVolumeAction => - revertToVolumeVersion(tracingId, a.sourceVersion, updateGroup.version, tracing) + revertToVolumeVersion(tracingId, a.sourceVersion, updateGroup.version, tracing, userToken) case a: DeleteSegmentDataVolumeAction => if (!tracing.getHasSegmentIndex) { Fox.failure("Cannot delete segment data for annotations without segment index.") } else - deleteSegmentData(tracingId, tracing, a, segmentIndexBuffer, updateGroup.version) ?~> "Failed to delete segment data." + deleteSegmentData(tracingId, tracing, a, segmentIndexBuffer, updateGroup.version, userToken) ?~> "Failed to delete segment data." case _: UpdateTdCamera => Fox.successful(tracing) case a: ApplyableVolumeAction => Fox.successful(a.applyOn(tracing)) case _ => Fox.failure("Unknown action.") @@ -168,8 +182,7 @@ class VolumeTracingService @Inject()( volumeTracing: VolumeTracing, action: UpdateBucketVolumeAction, segmentIndexBuffer: VolumeSegmentIndexBuffer, - updateGroupVersion: Long, - userToken: Option[String]): Fox[VolumeTracing] = + updateGroupVersion: Long): Fox[VolumeTracing] = for { _ <- assertMagIsValid(volumeTracing, action.mag) ?~> s"Received a mag-${action.mag.toMagLiteral(allowScalar = true)} bucket, which is invalid for this annotation." bucketPosition = BucketPosition(action.position.x, @@ -180,24 +193,37 @@ class VolumeTracingService @Inject()( _ <- bool2Fox(!bucketPosition.hasNegativeComponent) ?~> s"Received a bucket at negative position ($bucketPosition), must be positive" dataLayer = volumeTracingLayer(tracingId, volumeTracing) _ <- saveBucket(dataLayer, bucketPosition, action.data, updateGroupVersion) ?~> "failed to save bucket" + mappingName <- baseMappingName(volumeTracing) _ <- Fox.runIfOptionTrue(volumeTracing.hasSegmentIndex) { for { previousBucketBytes <- loadBucket(dataLayer, bucketPosition, Some(updateGroupVersion - 1L)).futureBox - _ <- updateSegmentIndex(segmentIndexBuffer, - bucketPosition, - action.data, - previousBucketBytes, - volumeTracing.elementClass) ?~> "failed to update segment index" + _ <- updateSegmentIndex( + segmentIndexBuffer, + bucketPosition, + action.data, + previousBucketBytes, + volumeTracing.elementClass, + mappingName, + editableMappingTracingId(volumeTracing, tracingId) + ) ?~> "failed to update segment index" } yield () } - _ <- segmentIndexBuffer.flush() } yield volumeTracing + override def editableMappingTracingId(tracing: VolumeTracing, tracingId: String): Option[String] = + if (tracing.mappingIsEditable.getOrElse(false)) Some(tracingId) else None + + override def baseMappingName(tracing: VolumeTracing): Fox[Option[String]] = + if (tracing.mappingIsEditable.getOrElse(false)) + tracing.mappingName.map(editableMappingService.getBaseMappingName).getOrElse(Fox.successful(None)) + else Fox.successful(tracing.mappingName) + private def deleteSegmentData(tracingId: String, volumeTracing: VolumeTracing, a: DeleteSegmentDataVolumeAction, segmentIndexBuffer: VolumeSegmentIndexBuffer, - version: Long): Fox[VolumeTracing] = + version: Long, + userToken: Option[String]): Fox[VolumeTracing] = for { _ <- Fox.successful(()) dataLayer = volumeTracingLayer(tracingId, volumeTracing) @@ -207,16 +233,24 @@ class VolumeTracingService @Inject()( } else { possibleAdditionalCoordinates.toList } + mappingName <- baseMappingName(volumeTracing) _ <- Fox.serialCombined(volumeTracing.resolutions.toList)(resolution => Fox.serialCombined(additionalCoordinateList)(additionalCoordinates => { val mag = vec3IntFromProto(resolution) for { + fallbackLayer <- getFallbackLayer(tracingId) bucketPositionsRaw <- volumeSegmentIndexService.getSegmentToBucketIndexWithEmptyFallbackWithoutBuffer( + fallbackLayer, tracingId, a.id, mag, + None, + mappingName, + editableMappingTracingId(volumeTracing, tracingId), additionalCoordinates, - dataLayer.additionalAxes) + dataLayer.additionalAxes, + userToken + ) bucketPositions = bucketPositionsRaw.values .map(vec3IntFromProto) .map(_ * mag * DataLayer.bucketLength) @@ -231,11 +265,15 @@ class VolumeTracingService @Inject()( if (elem.toLong == a.id) UnsignedInteger.zeroFromElementClass(volumeTracing.elementClass) else elem) filteredBytes = UnsignedIntegerArray.toByteArray(filteredData, volumeTracing.elementClass) _ <- saveBucket(dataLayer, bucketPosition, filteredBytes, version) - _ <- updateSegmentIndex(segmentIndexBuffer, - bucketPosition, - filteredBytes, - Some(data), - volumeTracing.elementClass) + _ <- updateSegmentIndex( + segmentIndexBuffer, + bucketPosition, + filteredBytes, + Some(data), + volumeTracing.elementClass, + mappingName, + editableMappingTracingId(volumeTracing, tracingId) + ) } yield () } } yield () @@ -253,15 +291,23 @@ class VolumeTracingService @Inject()( private def revertToVolumeVersion(tracingId: String, sourceVersion: Long, newVersion: Long, - tracing: VolumeTracing): Fox[VolumeTracing] = { + tracing: VolumeTracing, + userToken: Option[String]): Fox[VolumeTracing] = { val dataLayer = volumeTracingLayer(tracingId, tracing) val bucketStream = dataLayer.volumeBucketProvider.bucketStreamWithVersion() - val segmentIndexBuffer = - new VolumeSegmentIndexBuffer(tracingId, volumeSegmentIndexClient, newVersion, dataLayer.additionalAxes) for { + fallbackLayer <- getFallbackLayer(tracingId) + segmentIndexBuffer = new VolumeSegmentIndexBuffer(tracingId, + volumeSegmentIndexClient, + newVersion, + remoteDatastoreClient, + fallbackLayer, + dataLayer.additionalAxes, + userToken) sourceTracing <- find(tracingId, Some(sourceVersion)) + mappingName <- baseMappingName(sourceTracing) _ <- Fox.serialCombined(bucketStream) { case (bucketPosition, dataBeforeRevert, version) => if (version > sourceVersion) { @@ -270,22 +316,30 @@ class VolumeTracingService @Inject()( for { _ <- saveBucket(dataLayer, bucketPosition, dataAfterRevert, newVersion) _ <- Fox.runIfOptionTrue(tracing.hasSegmentIndex)( - updateSegmentIndex(segmentIndexBuffer, - bucketPosition, - dataAfterRevert, - Full(dataBeforeRevert), - sourceTracing.elementClass)) + updateSegmentIndex( + segmentIndexBuffer, + bucketPosition, + dataAfterRevert, + Full(dataBeforeRevert), + sourceTracing.elementClass, + mappingName, + editableMappingTracingId(sourceTracing, tracingId) + )) } yield () case Empty => for { dataAfterRevert <- Fox.successful(Array[Byte](0)) _ <- saveBucket(dataLayer, bucketPosition, dataAfterRevert, newVersion) _ <- Fox.runIfOptionTrue(tracing.hasSegmentIndex)( - updateSegmentIndex(segmentIndexBuffer, - bucketPosition, - dataAfterRevert, - Full(dataBeforeRevert), - sourceTracing.elementClass)) + updateSegmentIndex( + segmentIndexBuffer, + bucketPosition, + dataAfterRevert, + Full(dataBeforeRevert), + sourceTracing.elementClass, + mappingName, + editableMappingTracingId(sourceTracing, tracingId) + )) } yield () case Failure(msg, _, chain) => Fox.failure(msg, Empty, chain) } @@ -295,85 +349,121 @@ class VolumeTracingService @Inject()( } yield sourceTracing } - def initializeWithDataMultiple(tracingId: String, tracing: VolumeTracing, initialData: File): Fox[Set[Vec3Int]] = + def initializeWithDataMultiple(tracingId: String, + tracing: VolumeTracing, + initialData: File, + userToken: Option[String]): Fox[Set[Vec3Int]] = if (tracing.version != 0L) Failure("Tracing has already been edited.") else { val resolutionSets = new mutable.HashSet[Set[Vec3Int]]() - withZipsFromMultiZip(initialData) { (_, dataZip) => - val resolutionSet = resolutionSetFromZipfile(dataZip) - if (resolutionSet.nonEmpty) resolutionSets.add(resolutionSet) - } - // if none of the tracings contained any volume data do not save buckets, use full resolution list, as already initialized on wk-side - if (resolutionSets.isEmpty) - Fox.successful(tracing.resolutions.map(vec3IntFromProto).toSet) - else { - val resolutionsDoMatch = resolutionSets.headOption.forall { head => - resolutionSets.forall(_ == head) + for { + _ <- withZipsFromMultiZipAsync(initialData) { (_, dataZip) => + for { + _ <- Fox.successful(()) + resolutionSet = resolutionSetFromZipfile(dataZip) + _ = if (resolutionSet.nonEmpty) resolutionSets.add(resolutionSet) + } yield () } - if (!resolutionsDoMatch) - Fox.failure("annotation.volume.resolutionsDoNotMatch") + mappingName <- baseMappingName(tracing) + resolutions <- + // if none of the tracings contained any volume data do not save buckets, use full resolution list, as already initialized on wk-side + if (resolutionSets.isEmpty) + Fox.successful(tracing.resolutions.map(vec3IntFromProto).toSet) else { - val mergedVolume = new MergedVolume(tracing.elementClass) - for { - _ <- withZipsFromMultiZip(initialData)((_, dataZip) => mergedVolume.addLabelSetFromDataZip(dataZip)).toFox - _ <- withZipsFromMultiZip(initialData)((index, dataZip) => mergedVolume.addFromDataZip(index, dataZip)).toFox - _ <- bool2Fox(ElementClass.largestSegmentIdIsInRange( - mergedVolume.largestSegmentId.toLong, - tracing.elementClass)) ?~> "annotation.volume.largestSegmentIdExceedsRange" - destinationDataLayer = volumeTracingLayer(tracingId, tracing) - segmentIndexBuffer = new VolumeSegmentIndexBuffer(tracingId, - volumeSegmentIndexClient, - tracing.version, - AdditionalAxis.fromProtosAsOpt(tracing.additionalAxes)) - _ <- mergedVolume.withMergedBuckets { (bucketPosition, bytes) => - for { - _ <- saveBucket(destinationDataLayer, bucketPosition, bytes, tracing.version) - _ <- Fox.runIfOptionTrue(tracing.hasSegmentIndex)( - updateSegmentIndex(segmentIndexBuffer, bucketPosition, bytes, Empty, tracing.elementClass)) - } yield () - } - _ <- segmentIndexBuffer.flush() - } yield mergedVolume.presentResolutions + val resolutionsDoMatch = resolutionSets.headOption.forall { head => + resolutionSets.forall(_ == head) + } + if (!resolutionsDoMatch) + Fox.failure("annotation.volume.resolutionsDoNotMatch") + else { + val mergedVolume = new MergedVolume(tracing.elementClass) + for { + _ <- withZipsFromMultiZipAsync(initialData)((_, dataZip) => mergedVolume.addLabelSetFromDataZip(dataZip)) + _ <- withZipsFromMultiZipAsync(initialData)((index, dataZip) => + mergedVolume.addFromDataZip(index, dataZip)) + _ <- bool2Fox(ElementClass.largestSegmentIdIsInRange( + mergedVolume.largestSegmentId.toLong, + tracing.elementClass)) ?~> "annotation.volume.largestSegmentIdExceedsRange" + destinationDataLayer = volumeTracingLayer(tracingId, tracing) + fallbackLayer <- getFallbackLayer(tracingId) + segmentIndexBuffer = new VolumeSegmentIndexBuffer( + tracingId, + volumeSegmentIndexClient, + tracing.version, + remoteDatastoreClient, + fallbackLayer, + AdditionalAxis.fromProtosAsOpt(tracing.additionalAxes), + userToken + ) + _ <- mergedVolume.withMergedBuckets { (bucketPosition, bytes) => + for { + _ <- saveBucket(destinationDataLayer, bucketPosition, bytes, tracing.version) + _ <- Fox.runIfOptionTrue(tracing.hasSegmentIndex)( + updateSegmentIndex(segmentIndexBuffer, + bucketPosition, + bytes, + Empty, + tracing.elementClass, + mappingName, + editableMappingTracingId(tracing, tracingId))) + } yield () + } + _ <- segmentIndexBuffer.flush() + } yield mergedVolume.presentResolutions + } } - } + } yield resolutions } def initializeWithData(tracingId: String, tracing: VolumeTracing, initialData: File, - resolutionRestrictions: ResolutionRestrictions): Fox[Set[Vec3Int]] = + resolutionRestrictions: ResolutionRestrictions, + userToken: Option[String]): Fox[Set[Vec3Int]] = if (tracing.version != 0L) { Failure("Tracing has already been edited.") } else { - val dataLayer = volumeTracingLayer(tracingId, tracing) val savedResolutions = new mutable.HashSet[Vec3Int]() - val segmentIndexBuffer = new VolumeSegmentIndexBuffer(tracingId, - volumeSegmentIndexClient, - tracing.version, - AdditionalAxis.fromProtosAsOpt(tracing.additionalAxes)) - - val unzipResult = withBucketsFromZip(initialData) { (bucketPosition, bytes) => - if (resolutionRestrictions.isForbidden(bucketPosition.mag)) { - Fox.successful(()) + for { + fallbackLayer <- getFallbackLayer(tracingId) + mappingName <- baseMappingName(tracing) + segmentIndexBuffer = new VolumeSegmentIndexBuffer( + tracingId, + volumeSegmentIndexClient, + tracing.version, + remoteDatastoreClient, + fallbackLayer, + AdditionalAxis.fromProtosAsOpt(tracing.additionalAxes), + userToken + ) + _ <- withBucketsFromZip(initialData) { (bucketPosition, bytes) => + if (resolutionRestrictions.isForbidden(bucketPosition.mag)) { + Fox.successful(()) + } else { + savedResolutions.add(bucketPosition.mag) + for { + _ <- saveBucket(dataLayer, bucketPosition, bytes, tracing.version) + _ <- Fox.runIfOptionTrue(tracing.hasSegmentIndex)( + updateSegmentIndex(segmentIndexBuffer, + bucketPosition, + bytes, + Empty, + tracing.elementClass, + mappingName, + editableMappingTracingId(tracing, tracingId))) + } yield () + } + } ?~> "failed to import volume data from zipfile" + _ <- segmentIndexBuffer.flush() + } yield { + if (savedResolutions.isEmpty) { + resolutionSetFromZipfile(initialData) } else { - savedResolutions.add(bucketPosition.mag) - for { - _ <- saveBucket(dataLayer, bucketPosition, bytes, tracing.version) - _ <- Fox.runIfOptionTrue(tracing.hasSegmentIndex)( - updateSegmentIndex(segmentIndexBuffer, bucketPosition, bytes, Empty, tracing.elementClass)) - } yield () + savedResolutions.toSet } } - if (savedResolutions.isEmpty) { - val resolutionSet = resolutionSetFromZipfile(initialData) - Fox.successful(resolutionSet) - } else - for { - _ <- unzipResult.toFox - _ <- segmentIndexBuffer.flush() - } yield savedResolutions.toSet } def allDataZip(tracingId: String, @@ -419,10 +509,12 @@ class VolumeTracingService @Inject()( def data(tracingId: String, tracing: VolumeTracing, - dataRequests: DataRequestCollection): Fox[(Array[Byte], List[Int])] = + dataRequests: DataRequestCollection, + includeFallbackDataIfAvailable: Boolean = false, + userToken: Option[String] = None): Fox[(Array[Byte], List[Int])] = for { isTemporaryTracing <- isTemporaryTracing(tracingId) - dataLayer = volumeTracingLayer(tracingId, tracing, isTemporaryTracing) + dataLayer = volumeTracingLayer(tracingId, tracing, isTemporaryTracing, includeFallbackDataIfAvailable, userToken) requests = dataRequests.map(r => DataServiceDataRequest(null, dataLayer, None, r.cuboid(dataLayer), r.settings.copy(appliedAgglomerate = None))) data <- binaryDataService.handleDataRequests(requests) @@ -436,24 +528,26 @@ class VolumeTracingService @Inject()( editPosition: Option[Vec3Int], editRotation: Option[Vec3Double], boundingBox: Option[BoundingBox], - mappingName: Option[String]): Fox[(String, VolumeTracing)] = { + mappingName: Option[String], + userToken: Option[String]): Fox[(String, VolumeTracing)] = { val tracingWithBB = addBoundingBoxFromTaskIfRequired(sourceTracing, fromTask, datasetBoundingBox) val tracingWithResolutionRestrictions = restrictMagList(tracingWithBB, resolutionRestrictions) - val newTracing = tracingWithResolutionRestrictions.copy( - createdTimestamp = System.currentTimeMillis(), - editPosition = editPosition.map(vec3IntToProto).getOrElse(tracingWithResolutionRestrictions.editPosition), - editRotation = editRotation.map(vec3DoubleToProto).getOrElse(tracingWithResolutionRestrictions.editRotation), - boundingBox = boundingBoxOptToProto(boundingBox).getOrElse(tracingWithResolutionRestrictions.boundingBox), - mappingName = mappingName.orElse(tracingWithResolutionRestrictions.mappingName), - version = 0, - // Adding segment index on duplication if the volume tracing allows it. This will be used in duplicateData - hasSegmentIndex = - VolumeSegmentIndexService.canHaveSegmentIndexOpt(tracingWithResolutionRestrictions.fallbackLayer) - ) for { + fallbackLayer <- getFallbackLayer(tracingId) + hasSegmentIndex <- VolumeSegmentIndexService.canHaveSegmentIndex(remoteDatastoreClient, fallbackLayer, userToken) + newTracing = tracingWithResolutionRestrictions.copy( + createdTimestamp = System.currentTimeMillis(), + editPosition = editPosition.map(vec3IntToProto).getOrElse(tracingWithResolutionRestrictions.editPosition), + editRotation = editRotation.map(vec3DoubleToProto).getOrElse(tracingWithResolutionRestrictions.editRotation), + boundingBox = boundingBoxOptToProto(boundingBox).getOrElse(tracingWithResolutionRestrictions.boundingBox), + mappingName = mappingName.orElse(tracingWithResolutionRestrictions.mappingName), + version = 0, + // Adding segment index on duplication if the volume tracing allows it. This will be used in duplicateData + hasSegmentIndex = Some(hasSegmentIndex) + ) _ <- bool2Fox(newTracing.resolutions.nonEmpty) ?~> "resolutionRestrictions.tooTight" newId <- save(newTracing, None, newTracing.version) - _ <- duplicateData(tracingId, sourceTracing, newId, newTracing) + _ <- duplicateData(tracingId, sourceTracing, newId, newTracing, userToken) } yield (newId, newTracing) } @@ -476,23 +570,39 @@ class VolumeTracingService @Inject()( private def duplicateData(sourceId: String, sourceTracing: VolumeTracing, destinationId: String, - destinationTracing: VolumeTracing): Fox[Unit] = + destinationTracing: VolumeTracing, + userToken: Option[String]): Fox[Unit] = for { isTemporaryTracing <- isTemporaryTracing(sourceId) sourceDataLayer = volumeTracingLayer(sourceId, sourceTracing, isTemporaryTracing) buckets: Iterator[(BucketPosition, Array[Byte])] = sourceDataLayer.bucketProvider.bucketStream() destinationDataLayer = volumeTracingLayer(destinationId, destinationTracing) - segmentIndexBuffer = new VolumeSegmentIndexBuffer(destinationId, - volumeSegmentIndexClient, - destinationTracing.version, - AdditionalAxis.fromProtosAsOpt(sourceTracing.additionalAxes)) + fallbackLayer <- getFallbackLayer(sourceId) + segmentIndexBuffer = new VolumeSegmentIndexBuffer( + destinationId, + volumeSegmentIndexClient, + destinationTracing.version, + remoteDatastoreClient, + fallbackLayer, + AdditionalAxis.fromProtosAsOpt(sourceTracing.additionalAxes), + userToken + ) + mappingName <- baseMappingName(sourceTracing) _ <- Fox.serialCombined(buckets) { case (bucketPosition, bucketData) => if (destinationTracing.resolutions.contains(vec3IntToProto(bucketPosition.mag))) { for { _ <- saveBucket(destinationDataLayer, bucketPosition, bucketData, destinationTracing.version) _ <- Fox.runIfOptionTrue(destinationTracing.hasSegmentIndex)( - updateSegmentIndex(segmentIndexBuffer, bucketPosition, bucketData, Empty, sourceTracing.elementClass)) + updateSegmentIndex( + segmentIndexBuffer, + bucketPosition, + bucketData, + Empty, + sourceTracing.elementClass, + mappingName, + editableMappingTracingId(sourceTracing, sourceId) + )) } yield () } else Fox.successful(()) } @@ -545,12 +655,17 @@ class VolumeTracingService @Inject()( toCache) } yield id - def downsample(tracingId: String, oldTracingId: String, tracing: VolumeTracing): Fox[Unit] = + def downsample(tracingId: String, + oldTracingId: String, + tracing: VolumeTracing, + userToken: Option[String]): Fox[Unit] = for { resultingResolutions <- downsampleWithLayer(tracingId, oldTracingId, tracing, - volumeTracingLayer(tracingId, tracing)) + volumeTracingLayer(tracingId, tracing), + this, + userToken) _ <- updateResolutionList(tracingId, tracing, resultingResolutions.toSet) } yield () @@ -676,7 +791,8 @@ class VolumeTracingService @Inject()( tracings: Seq[VolumeTracing], newId: String, newVersion: Long, - toCache: Boolean): Fox[MergedVolumeStats] = { + toCache: Boolean, + userToken: Option[String]): Fox[MergedVolumeStats] = { val elementClass = tracings.headOption.map(_.elementClass).getOrElse(elementClassToProto(ElementClass.uint8)) val resolutionSets = new mutable.HashSet[Set[Vec3Int]]() @@ -724,15 +840,25 @@ class VolumeTracingService @Inject()( _ <- bool2Fox(ElementClass.largestSegmentIdIsInRange(mergedVolume.largestSegmentId.toLong, elementClass)) ?~> "annotation.volume.largestSegmentIdExceedsRange" mergedAdditionalAxes <- Fox.box2Fox(AdditionalAxis.mergeAndAssertSameAdditionalAxes(tracings.map(t => AdditionalAxis.fromProtosAsOpt(t.additionalAxes)))) + fallbackLayer <- getFallbackLayer(tracingSelectors.head.tracingId) segmentIndexBuffer = new VolumeSegmentIndexBuffer(newId, volumeSegmentIndexClient, newVersion, - mergedAdditionalAxes) + remoteDatastoreClient, + fallbackLayer, + mergedAdditionalAxes, + userToken) _ <- mergedVolume.withMergedBuckets { (bucketPosition, bucketBytes) => for { _ <- saveBucket(newId, elementClass, bucketPosition, bucketBytes, newVersion, toCache, mergedAdditionalAxes) _ <- Fox.runIf(shouldCreateSegmentIndex)( - updateSegmentIndex(segmentIndexBuffer, bucketPosition, bucketBytes, Empty, elementClass)) + updateSegmentIndex(segmentIndexBuffer, + bucketPosition, + bucketBytes, + Empty, + elementClass, + tracings.headOption.flatMap(_.mappingName), + None)) } yield () } _ <- segmentIndexBuffer.flush() @@ -744,43 +870,57 @@ class VolumeTracingService @Inject()( tracing: VolumeTracing, currentVersion: Long, userToken: Option[String], - dryRun: Boolean): Fox[Option[Int]] = - if (tracing.hasSegmentIndex.getOrElse(false)) { - // tracing has a segment index already, do nothing - Fox.successful(None) - } else if (!VolumeSegmentIndexService.canHaveSegmentIndex(tracing.fallbackLayer)) { - // tracing is not eligible for segment index, do nothing - Fox.successful(None) - } else { - var processedBucketCount = 0 - for { - isTemporaryTracing <- isTemporaryTracing(tracingId) - sourceDataLayer = volumeTracingLayer(tracingId, tracing, isTemporaryTracing) - buckets: Iterator[(BucketPosition, Array[Byte])] = sourceDataLayer.bucketProvider.bucketStream() - segmentIndexBuffer = new VolumeSegmentIndexBuffer(tracingId, - volumeSegmentIndexClient, - currentVersion + 1L, - sourceDataLayer.additionalAxes) - _ <- Fox.serialCombined(buckets) { - case (bucketPosition, bucketData) => - processedBucketCount += 1 - updateSegmentIndex(segmentIndexBuffer, bucketPosition, bucketData, Empty, tracing.elementClass) - } - _ <- Fox.runIf(!dryRun)(segmentIndexBuffer.flush()) - updateGroup = UpdateActionGroup[VolumeTracing]( - tracing.version + 1L, - System.currentTimeMillis(), - None, - List(AddSegmentIndex()), - None, - None, - "dummyTransactionId", - 1, - 0 - ) - _ <- Fox.runIf(!dryRun)(handleUpdateGroup(tracingId, updateGroup, tracing.version, userToken)) - } yield Some(processedBucketCount) - } + dryRun: Boolean): Fox[Option[Int]] = { + var processedBucketCount = 0 + for { + isTemporaryTracing <- isTemporaryTracing(tracingId) + sourceDataLayer = volumeTracingLayer(tracingId, tracing, isTemporaryTracing) + buckets: Iterator[(BucketPosition, Array[Byte])] = sourceDataLayer.bucketProvider.bucketStream() + fallbackLayer <- getFallbackLayer(tracingId) + mappingName <- baseMappingName(tracing) + segmentIndexBuffer = new VolumeSegmentIndexBuffer(tracingId, + volumeSegmentIndexClient, + currentVersion + 1L, + remoteDatastoreClient, + fallbackLayer, + sourceDataLayer.additionalAxes, + userToken) + _ <- Fox.serialCombined(buckets) { + case (bucketPosition, bucketData) => + processedBucketCount += 1 + updateSegmentIndex(segmentIndexBuffer, + bucketPosition, + bucketData, + Empty, + tracing.elementClass, + mappingName, + editableMappingTracingId(tracing, tracingId)) + } + _ <- Fox.runIf(!dryRun)(segmentIndexBuffer.flush()) + updateGroup = UpdateActionGroup[VolumeTracing]( + tracing.version + 1L, + System.currentTimeMillis(), + None, + List(AddSegmentIndex()), + None, + None, + "dummyTransactionId", + 1, + 0 + ) + _ <- Fox.runIf(!dryRun)(handleUpdateGroup(tracingId, updateGroup, tracing.version, userToken)) + } yield Some(processedBucketCount) + } + + def checkIfSegmentIndexMayBeAdded(tracingId: String, tracing: VolumeTracing, userToken: Option[String])( + implicit ec: ExecutionContext): Fox[Boolean] = + for { + fallbackLayer <- remoteFallbackLayerFromVolumeTracing(tracing, tracingId) + canHaveSegmentIndex <- VolumeSegmentIndexService.canHaveSegmentIndex(remoteDatastoreClient, + Some(fallbackLayer), + userToken) + alreadyHasSegmentIndex = tracing.hasSegmentIndex.getOrElse(false) + } yield canHaveSegmentIndex && !alreadyHasSegmentIndex def importVolumeData(tracingId: String, tracing: VolumeTracing, @@ -790,7 +930,6 @@ class VolumeTracingService @Inject()( if (currentVersion != tracing.version) Fox.failure("version.mismatch") else { - val resolutionSet = resolutionSetFromZipfile(zipFile) val resolutionsDoMatch = resolutionSet.isEmpty || resolutionSet == resolveLegacyResolutionList(tracing.resolutions) @@ -811,22 +950,31 @@ class VolumeTracingService @Inject()( mergedVolume.largestSegmentId.toLong, tracing.elementClass)) ?~> "annotation.volume.largestSegmentIdExceedsRange" dataLayer = volumeTracingLayer(tracingId, tracing) + fallbackLayer <- getFallbackLayer(tracingId) + mappingName <- baseMappingName(tracing) segmentIndexBuffer <- Fox.successful( new VolumeSegmentIndexBuffer(tracingId, volumeSegmentIndexClient, tracing.version + 1, - dataLayer.additionalAxes)) + remoteDatastoreClient, + fallbackLayer, + dataLayer.additionalAxes, + userToken)) _ <- mergedVolume.withMergedBuckets { (bucketPosition, bucketBytes) => for { _ <- saveBucket(volumeLayer, bucketPosition, bucketBytes, tracing.version + 1) _ <- Fox.runIfOptionTrue(tracing.hasSegmentIndex) { for { previousBucketBytes <- loadBucket(dataLayer, bucketPosition, Some(tracing.version)).futureBox - _ <- updateSegmentIndex(segmentIndexBuffer, - bucketPosition, - bucketBytes, - previousBucketBytes, - tracing.elementClass) ?~> "failed to update segment index" + _ <- updateSegmentIndex( + segmentIndexBuffer, + bucketPosition, + bucketBytes, + previousBucketBytes, + tracing.elementClass, + mappingName, + editableMappingTracingId(tracing, tracingId) + ) ?~> "failed to update segment index" } yield () } } yield () @@ -867,4 +1015,19 @@ class VolumeTracingService @Inject()( Fox.failure("Cannot merge tracings with and without editable mappings") } + def getFallbackLayer(tracingId: String): Fox[Option[RemoteFallbackLayer]] = + fallbackLayerCache.getOrLoad(tracingId, t => getFallbackLayerFromWebknossos(t)) + + private def getFallbackLayerFromWebknossos(tracingId: String) = Fox[Option[RemoteFallbackLayer]] { + for { + tracing <- find(tracingId) + dataSource <- remoteWebknossosClient.getDataSourceForTracing(tracingId) + dataSourceId = dataSource.id + fallbackLayerName = tracing.fallbackLayer + fallbackLayer = dataSource.dataLayers + .find(_.name == fallbackLayerName.getOrElse("")) + .map(RemoteFallbackLayer.fromDataLayerAndDataSource(_, dataSourceId)) + } yield fallbackLayer + } + } diff --git a/webknossos-tracingstore/deployment/README.md b/webknossos-tracingstore/deployment/README.md index 705d5df8aa..2ac8f786b8 100644 --- a/webknossos-tracingstore/deployment/README.md +++ b/webknossos-tracingstore/deployment/README.md @@ -37,10 +37,10 @@ systemctl stop webknossos-tracingstore ``` ## Using a cluster proxy/firewall for HTTP(S) routing -If your cluster enviroment has a firewall that supports HTTP(S) routing, you can expose the tracingstore directly on Port 80. +If your cluster environment has a firewall that supports HTTP(S) routing, you can expose the tracingstore directly on Port 80. ## Using nginx for HTTP(S) routing -Nginx is a high performance HTTP server that allows for proxing HTTP(S) request. This is useful, because the tracingstore doesn't support HTTPS by itself. So, you can put the nginx in front of the tracingstore to accept HTTPS requests from the outside and route them as regular HTTP requests to the tracingstore. +Nginx is a high performance HTTP server that allows for proxying HTTP(S) request. This is useful, because the tracingstore doesn't support HTTPS by itself. So, you can put the nginx in front of the tracingstore to accept HTTPS requests from the outside and route them as regular HTTP requests to the tracingstore. [DigitalOcean has a great tutorial for setting up nginx](https://www.digitalocean.com/community/tutorials/understanding-nginx-http-proxying-load-balancing-buffering-and-caching).