From 1a563594b0edca178ad100befb7b5c5467ab00d6 Mon Sep 17 00:00:00 2001 From: frcroth Date: Wed, 13 Nov 2024 16:01:06 +0100 Subject: [PATCH 1/6] Log data loading errors in conversion / mapping applicatioN --- .../services/BinaryDataService.scala | 47 ++++++++++++++----- 1 file changed, 36 insertions(+), 11 deletions(-) diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/BinaryDataService.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/BinaryDataService.scala index fa255676d2b..207315f78f7 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/BinaryDataService.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/BinaryDataService.scala @@ -10,7 +10,7 @@ import com.scalableminds.webknossos.datastore.models.datasource.{Category, DataL import com.scalableminds.webknossos.datastore.models.requests.{DataReadInstruction, DataServiceDataRequest} import com.scalableminds.webknossos.datastore.storage._ import com.typesafe.scalalogging.LazyLogging -import net.liftweb.common.{Box, Failure, Full} +import net.liftweb.common.{Box, Empty, EmptyBox, Failure, Full} import ucar.ma2.{Array => MultiArray} import net.liftweb.common.Box.tryo @@ -55,8 +55,27 @@ class BinaryDataService(val dataBaseDir: Path, def handleDataRequests(requests: List[DataServiceDataRequest]): Fox[(Array[Byte], List[Int])] = { def convertIfNecessary(isNecessary: Boolean, inputArray: Array[Byte], - conversionFunc: Array[Byte] => Box[Array[Byte]]): Box[Array[Byte]] = - if (isNecessary) conversionFunc(inputArray) else Full(inputArray) + conversionFunc: Array[Byte] => Box[Array[Byte]], + request: DataServiceDataRequest): Box[Array[Byte]] = + if (isNecessary) conversionFunc(inputArray) match { + case Full(value) => Full(value) + case box: EmptyBox => + box match { + case Empty => + logError( + request, + s"Failed to convert data for layer ${request.dataLayer.name} of dataset ${request.dataSource.id.team}/${request.dataSource.id.name} at ${request.cuboid}, result is Empty" + ) + Empty + case f: Failure => + logError( + request, + s"Failed to convert data for layer ${request.dataLayer.name} of dataset ${request.dataSource.id.team}/${request.dataSource.id.name} at ${request.cuboid}, result is Failure: ${Fox + .failureChainAsString(f, includeStackTraces = true)}" + ) + f + } + } else Full(inputArray) val requestsCount = requests.length val requestData = requests.zipWithIndex.map { @@ -67,10 +86,11 @@ class BinaryDataService(val dataBaseDir: Path, convertIfNecessary( request.settings.appliedAgglomerate.isDefined && request.dataLayer.category == Category.segmentation && request.cuboid.mag.maxDim <= MaxMagForAgglomerateMapping, data, - agglomerateService.applyAgglomerate(request) + agglomerateService.applyAgglomerate(request), + request ) }.getOrElse(Full(data)) ?~> "Failed to apply agglomerate mapping" - resultData <- convertIfNecessary(request.settings.halfByte, mappedData, convertToHalfByte) + resultData <- convertIfNecessary(request.settings.halfByte, mappedData, convertToHalfByte, request) } yield (resultData, index) } @@ -98,12 +118,11 @@ class BinaryDataService(val dataBaseDir: Path, s"Caught internal error: $msg while loading a bucket for layer ${request.dataLayer.name} of dataset ${request.dataSource.id}") Fox.failure(e.getMessage) case f: Failure => - if (datasetErrorLoggingService.exists(_.shouldLog(request.dataSource.id.team, request.dataSource.id.name))) { - logger.error( - s"Bucket loading for layer ${request.dataLayer.name} of dataset ${request.dataSource.id.team}/${request.dataSource.id.name} at ${readInstruction.bucket} failed: ${Fox - .failureChainAsString(f, includeStackTraces = true)}") - datasetErrorLoggingService.foreach(_.registerLogged(request.dataSource.id.team, request.dataSource.id.name)) - } + logError( + request, + s"Bucket loading for layer ${request.dataLayer.name} of dataset ${request.dataSource.id.team}/${request.dataSource.id.name} at ${readInstruction.bucket} failed: ${Fox + .failureChainAsString(f, includeStackTraces = true)}" + ) f.toFox case Full(data) => if (data.length == 0) { @@ -198,4 +217,10 @@ class BinaryDataService(val dataBaseDir: Path, (closedAgglomerateFileHandleCount, clearedBucketProviderCount, removedChunksCount) } + def logError(request: DataServiceDataRequest, msg: String): Unit = + if (datasetErrorLoggingService.exists(_.shouldLog(request.dataSource.id.team, request.dataSource.id.name))) { + logger.error(msg) + datasetErrorLoggingService.foreach(_.registerLogged(request.dataSource.id.team, request.dataSource.id.name)) + } + } From 826d938d72981e1e9f84e1c5353bf35713ee28a3 Mon Sep 17 00:00:00 2001 From: frcroth Date: Mon, 18 Nov 2024 11:48:31 +0100 Subject: [PATCH 2/6] Update changelog --- CHANGELOG.unreleased.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.unreleased.md b/CHANGELOG.unreleased.md index 3784c9075c7..bf2f19b58c4 100644 --- a/CHANGELOG.unreleased.md +++ b/CHANGELOG.unreleased.md @@ -15,6 +15,7 @@ For upgrade instructions, please check the [migration guide](MIGRATIONS.released ### Changed - Reading image files on datastore filesystem is now done asynchronously. [#8126](https://github.com/scalableminds/webknossos/pull/8126) - Improved error messages for starting jobs on datasets from other organizations. [#8181](https://github.com/scalableminds/webknossos/pull/8181) +- Improved logging for errors when loading datasets and problems arise during a conversion step. [#8202](https://github.com/scalableminds/webknossos/pull/8202) ### Fixed - Fix performance bottleneck when deleting a lot of trees at once. [#8176](https://github.com/scalableminds/webknossos/pull/8176) From a56da43159fdbc38d3f150dc5dd74b68637802ef Mon Sep 17 00:00:00 2001 From: frcroth Date: Wed, 20 Nov 2024 11:41:45 +0100 Subject: [PATCH 3/6] Extract error logging into DatasetErrorLoggingService --- .../services/BinaryDataService.scala | 67 +++++-------------- .../services/DatasetErrorLoggingService.scala | 39 +++++++++++ 2 files changed, 56 insertions(+), 50 deletions(-) diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/BinaryDataService.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/BinaryDataService.scala index 207315f78f7..96bfd1f066a 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/BinaryDataService.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/BinaryDataService.scala @@ -10,7 +10,7 @@ import com.scalableminds.webknossos.datastore.models.datasource.{Category, DataL import com.scalableminds.webknossos.datastore.models.requests.{DataReadInstruction, DataServiceDataRequest} import com.scalableminds.webknossos.datastore.storage._ import com.typesafe.scalalogging.LazyLogging -import net.liftweb.common.{Box, Empty, EmptyBox, Failure, Full} +import net.liftweb.common.{Box, Full} import ucar.ma2.{Array => MultiArray} import net.liftweb.common.Box.tryo @@ -55,26 +55,11 @@ class BinaryDataService(val dataBaseDir: Path, def handleDataRequests(requests: List[DataServiceDataRequest]): Fox[(Array[Byte], List[Int])] = { def convertIfNecessary(isNecessary: Boolean, inputArray: Array[Byte], - conversionFunc: Array[Byte] => Box[Array[Byte]], - request: DataServiceDataRequest): Box[Array[Byte]] = - if (isNecessary) conversionFunc(inputArray) match { - case Full(value) => Full(value) - case box: EmptyBox => - box match { - case Empty => - logError( - request, - s"Failed to convert data for layer ${request.dataLayer.name} of dataset ${request.dataSource.id.team}/${request.dataSource.id.name} at ${request.cuboid}, result is Empty" - ) - Empty - case f: Failure => - logError( - request, - s"Failed to convert data for layer ${request.dataLayer.name} of dataset ${request.dataSource.id.team}/${request.dataSource.id.name} at ${request.cuboid}, result is Failure: ${Fox - .failureChainAsString(f, includeStackTraces = true)}" - ) - f - } + conversionFunc: Array[Byte] => Fox[Array[Byte]], + request: DataServiceDataRequest): Fox[Array[Byte]] = + if (isNecessary) datasetErrorLoggingService match { + case Some(value) => value.withErrorLogging(request.dataSource.id, "Conversion", conversionFunc(inputArray)) + case None => conversionFunc(inputArray) } else Full(inputArray) val requestsCount = requests.length @@ -82,14 +67,15 @@ class BinaryDataService(val dataBaseDir: Path, case (request, index) => for { data <- handleDataRequest(request) - mappedData <- agglomerateServiceOpt.map { agglomerateService => + mappedDataFox <- agglomerateServiceOpt.map { agglomerateService => convertIfNecessary( request.settings.appliedAgglomerate.isDefined && request.dataLayer.category == Category.segmentation && request.cuboid.mag.maxDim <= MaxMagForAgglomerateMapping, data, agglomerateService.applyAgglomerate(request), request ) - }.getOrElse(Full(data)) ?~> "Failed to apply agglomerate mapping" + }.fillEmpty(Fox.successful(data)) ?~> "Failed to apply agglomerate mapping" + mappedData <- mappedDataFox resultData <- convertIfNecessary(request.settings.halfByte, mappedData, convertToHalfByte, request) } yield (resultData, index) } @@ -111,27 +97,15 @@ class BinaryDataService(val dataBaseDir: Path, val bucketProvider = bucketProviderCache.getOrLoadAndPut((dataSourceId, request.dataLayer.bucketProviderCacheKey))(_ => request.dataLayer.bucketProvider(remoteSourceDescriptorServiceOpt, dataSourceId, sharedChunkContentsCache)) - bucketProvider.load(readInstruction).futureBox.flatMap { - case Failure(msg, Full(e: InternalError), _) => - applicationHealthService.foreach(a => a.pushError(e)) - logger.error( - s"Caught internal error: $msg while loading a bucket for layer ${request.dataLayer.name} of dataset ${request.dataSource.id}") - Fox.failure(e.getMessage) - case f: Failure => - logError( - request, - s"Bucket loading for layer ${request.dataLayer.name} of dataset ${request.dataSource.id.team}/${request.dataSource.id.name} at ${readInstruction.bucket} failed: ${Fox - .failureChainAsString(f, includeStackTraces = true)}" + datasetErrorLoggingService match { + case Some(d) => + d.withErrorLogging( + request.dataSource.id, + s"Bucket loading for layer ${request.dataLayer.name} at ${readInstruction.bucket}, cuboid: ${request.cuboid}", + bucketProvider.load(readInstruction), + e => applicationHealthService.foreach(a => a.pushError(e)) ) - f.toFox - case Full(data) => - if (data.length == 0) { - val msg = - s"Bucket provider returned Full, but data is zero-length array. Layer ${request.dataLayer.name} of dataset ${request.dataSource.id}, ${request.cuboid}" - logger.warn(msg) - Fox.failure(msg) - } else Fox.successful(data) - case other => other.toFox + case None => bucketProvider.load(readInstruction) } } else Fox.empty @@ -216,11 +190,4 @@ class BinaryDataService(val dataBaseDir: Path, (closedAgglomerateFileHandleCount, clearedBucketProviderCount, removedChunksCount) } - - def logError(request: DataServiceDataRequest, msg: String): Unit = - if (datasetErrorLoggingService.exists(_.shouldLog(request.dataSource.id.team, request.dataSource.id.name))) { - logger.error(msg) - datasetErrorLoggingService.foreach(_.registerLogged(request.dataSource.id.team, request.dataSource.id.name)) - } - } diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/DatasetErrorLoggingService.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/DatasetErrorLoggingService.scala index 5eb93d923eb..7b1bc7b0ac4 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/DatasetErrorLoggingService.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/DatasetErrorLoggingService.scala @@ -2,8 +2,12 @@ package com.scalableminds.webknossos.datastore.services import org.apache.pekko.actor.ActorSystem import com.google.inject.name.Named +import com.scalableminds.util.tools.{Fox, TextUtils} +import com.scalableminds.util.tools.Fox.box2Fox import com.scalableminds.webknossos.datastore.helpers.IntervalScheduler +import com.scalableminds.webknossos.datastore.models.datasource.DataSourceId import com.typesafe.scalalogging.LazyLogging +import net.liftweb.common.{Empty, Failure, Full} import play.api.inject.ApplicationLifecycle import javax.inject.Inject @@ -41,4 +45,39 @@ class DatasetErrorLoggingService @Inject()( recentErrors.remove((organizationId, datasetName)) override protected def tick(): Unit = recentErrors.clear() + + def withErrorLogging(dataSourceId: DataSourceId, + label: String, + resultFox: Fox[Array[Byte]], + onInternalError: InternalError => Unit = _ => ()): Fox[Array[Byte]] = + resultFox.futureBox.flatMap { + case Full(data) => + if (data.length == 0) { + val msg = s"Zero-length array returned while $label for $dataSourceId" + if (shouldLog(dataSourceId.team, dataSourceId.name)) { + logger.warn(msg) + registerLogged(dataSourceId.team, dataSourceId.name) + } + Fox.failure(msg) + } else { + Fox.successful(data) + } + case Failure(msg, Full(e: InternalError), _) => + logger.error(s"Caught internal error: $label for $dataSourceId", e) + onInternalError(e) + Fox.failure(msg, Full(e)) + case Failure(msg, Full(exception), _) => + if (shouldLog(dataSourceId.team, dataSourceId.name)) { + logger.error(s"Error while $label for $dataSourceId Stack trace: ${TextUtils.stackTraceAsString(exception)} ") + registerLogged(dataSourceId.team, dataSourceId.name) + } + Fox.failure(msg, Full(exception)) + case Failure(msg, Empty, _) => + if (shouldLog(dataSourceId.team, dataSourceId.name)) { + logger.error(s"Error while $label for $dataSourceId, Empty failure") + registerLogged(dataSourceId.team, dataSourceId.name) + } + Fox.failure(msg) + case other => other.toFox + } } From 1d6219a1e561cb9e770fb0fdc08b3b0822d3215d Mon Sep 17 00:00:00 2001 From: frcroth Date: Mon, 25 Nov 2024 10:58:53 +0100 Subject: [PATCH 4/6] Move application health service to dataseterrorloggingservice --- .../webknossos/datastore/services/BinaryDataService.scala | 4 +--- .../datastore/services/BinaryDataServiceHolder.scala | 2 -- .../datastore/services/DatasetErrorLoggingService.scala | 8 +++----- .../tracings/editablemapping/EditableMappingService.scala | 2 +- .../tracings/volume/VolumeTracingService.scala | 2 +- 5 files changed, 6 insertions(+), 12 deletions(-) diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/BinaryDataService.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/BinaryDataService.scala index 96bfd1f066a..2e1dc225a45 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/BinaryDataService.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/BinaryDataService.scala @@ -20,7 +20,6 @@ import scala.concurrent.ExecutionContext class BinaryDataService(val dataBaseDir: Path, val agglomerateServiceOpt: Option[AgglomerateService], remoteSourceDescriptorServiceOpt: Option[RemoteSourceDescriptorService], - val applicationHealthService: Option[ApplicationHealthService], sharedChunkContentsCache: Option[AlfuCache[String, MultiArray]], datasetErrorLoggingService: Option[DatasetErrorLoggingService])(implicit ec: ExecutionContext) extends FoxImplicits @@ -102,8 +101,7 @@ class BinaryDataService(val dataBaseDir: Path, d.withErrorLogging( request.dataSource.id, s"Bucket loading for layer ${request.dataLayer.name} at ${readInstruction.bucket}, cuboid: ${request.cuboid}", - bucketProvider.load(readInstruction), - e => applicationHealthService.foreach(a => a.pushError(e)) + bucketProvider.load(readInstruction) ) case None => bucketProvider.load(readInstruction) } diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/BinaryDataServiceHolder.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/BinaryDataServiceHolder.scala index de6071e9489..9bca015857b 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/BinaryDataServiceHolder.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/BinaryDataServiceHolder.scala @@ -22,7 +22,6 @@ import scala.concurrent.ExecutionContext class BinaryDataServiceHolder @Inject()( config: DataStoreConfig, agglomerateService: AgglomerateService, - applicationHealthService: ApplicationHealthService, remoteSourceDescriptorService: RemoteSourceDescriptorService, datasetErrorLoggingService: DatasetErrorLoggingService)(implicit ec: ExecutionContext) extends LazyLogging { @@ -46,7 +45,6 @@ class BinaryDataServiceHolder @Inject()( Paths.get(config.Datastore.baseFolder), Some(agglomerateService), Some(remoteSourceDescriptorService), - Some(applicationHealthService), Some(sharedChunkContentsCache), Some(datasetErrorLoggingService) ) diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/DatasetErrorLoggingService.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/DatasetErrorLoggingService.scala index 7b1bc7b0ac4..3204be2f606 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/DatasetErrorLoggingService.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/DatasetErrorLoggingService.scala @@ -16,6 +16,7 @@ import scala.concurrent.duration._ class DatasetErrorLoggingService @Inject()( val lifecycle: ApplicationLifecycle, + val applicationHealthService: ApplicationHealthService, @Named("webknossos-datastore") val system: ActorSystem)(implicit val ec: ExecutionContext) extends IntervalScheduler with LazyLogging { @@ -46,10 +47,7 @@ class DatasetErrorLoggingService @Inject()( override protected def tick(): Unit = recentErrors.clear() - def withErrorLogging(dataSourceId: DataSourceId, - label: String, - resultFox: Fox[Array[Byte]], - onInternalError: InternalError => Unit = _ => ()): Fox[Array[Byte]] = + def withErrorLogging(dataSourceId: DataSourceId, label: String, resultFox: Fox[Array[Byte]]): Fox[Array[Byte]] = resultFox.futureBox.flatMap { case Full(data) => if (data.length == 0) { @@ -64,7 +62,7 @@ class DatasetErrorLoggingService @Inject()( } case Failure(msg, Full(e: InternalError), _) => logger.error(s"Caught internal error: $label for $dataSourceId", e) - onInternalError(e) + applicationHealthService.pushError(e) Fox.failure(msg, Full(e)) case Failure(msg, Full(exception), _) => if (shouldLog(dataSourceId.team, dataSourceId.name)) { diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingService.scala index 0f9f857ca61..147c052f1c5 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingService.scala @@ -103,7 +103,7 @@ class EditableMappingService @Inject()( private def generateId: String = UUID.randomUUID.toString - val binaryDataService = new BinaryDataService(Paths.get(""), None, None, None, None, None) + val binaryDataService = new BinaryDataService(Paths.get(""), None, None, None, None) adHocMeshServiceHolder.tracingStoreAdHocMeshConfig = (binaryDataService, 30 seconds, 1) private val adHocMeshService: AdHocMeshService = adHocMeshServiceHolder.tracingStoreAdHocMeshService diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala index 77dca95a5b2..5dfe5bb6723 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala @@ -88,7 +88,7 @@ class VolumeTracingService @Inject()( /* We want to reuse the bucket loading methods from binaryDataService for the volume tracings, however, it does not actually load anything from disk, unlike its “normal” instance in the datastore (only from the volume tracing store) */ - private val binaryDataService = new BinaryDataService(Paths.get(""), None, None, None, None, None) + private val binaryDataService = new BinaryDataService(Paths.get(""), None, None, None, None) adHocMeshServiceHolder.tracingStoreAdHocMeshConfig = (binaryDataService, 30 seconds, 1) val adHocMeshService: AdHocMeshService = adHocMeshServiceHolder.tracingStoreAdHocMeshService From 921ec9ccf70450aa42fb819d21022bcdc1d37e70 Mon Sep 17 00:00:00 2001 From: frcroth Date: Wed, 27 Nov 2024 09:16:27 +0100 Subject: [PATCH 5/6] Improve grammar in errors Co-authored-by: Florian M --- .../webknossos/datastore/services/BinaryDataService.scala | 4 ++-- .../datastore/services/DatasetErrorLoggingService.scala | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/BinaryDataService.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/BinaryDataService.scala index 2e1dc225a45..fc467ed2000 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/BinaryDataService.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/BinaryDataService.scala @@ -57,7 +57,7 @@ class BinaryDataService(val dataBaseDir: Path, conversionFunc: Array[Byte] => Fox[Array[Byte]], request: DataServiceDataRequest): Fox[Array[Byte]] = if (isNecessary) datasetErrorLoggingService match { - case Some(value) => value.withErrorLogging(request.dataSource.id, "Conversion", conversionFunc(inputArray)) + case Some(value) => value.withErrorLogging(request.dataSource.id, "converting bucket data", conversionFunc(inputArray)) case None => conversionFunc(inputArray) } else Full(inputArray) @@ -100,7 +100,7 @@ class BinaryDataService(val dataBaseDir: Path, case Some(d) => d.withErrorLogging( request.dataSource.id, - s"Bucket loading for layer ${request.dataLayer.name} at ${readInstruction.bucket}, cuboid: ${request.cuboid}", + s"loading bucket for layer ${request.dataLayer.name} at ${readInstruction.bucket}, cuboid: ${request.cuboid}", bucketProvider.load(readInstruction) ) case None => bucketProvider.load(readInstruction) diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/DatasetErrorLoggingService.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/DatasetErrorLoggingService.scala index 3204be2f606..769f4c75db8 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/DatasetErrorLoggingService.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/DatasetErrorLoggingService.scala @@ -61,7 +61,7 @@ class DatasetErrorLoggingService @Inject()( Fox.successful(data) } case Failure(msg, Full(e: InternalError), _) => - logger.error(s"Caught internal error: $label for $dataSourceId", e) + logger.error(s"Caught internal error while $label for $dataSourceId:", e) applicationHealthService.pushError(e) Fox.failure(msg, Full(e)) case Failure(msg, Full(exception), _) => From 75495aef7fbb4332d8768c2dc89caef6b8609465 Mon Sep 17 00:00:00 2001 From: frcroth Date: Wed, 27 Nov 2024 09:28:40 +0100 Subject: [PATCH 6/6] Lint --- .../webknossos/datastore/services/BinaryDataService.scala | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/BinaryDataService.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/BinaryDataService.scala index fc467ed2000..5591ff5522a 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/BinaryDataService.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/BinaryDataService.scala @@ -57,8 +57,9 @@ class BinaryDataService(val dataBaseDir: Path, conversionFunc: Array[Byte] => Fox[Array[Byte]], request: DataServiceDataRequest): Fox[Array[Byte]] = if (isNecessary) datasetErrorLoggingService match { - case Some(value) => value.withErrorLogging(request.dataSource.id, "converting bucket data", conversionFunc(inputArray)) - case None => conversionFunc(inputArray) + case Some(value) => + value.withErrorLogging(request.dataSource.id, "converting bucket data", conversionFunc(inputArray)) + case None => conversionFunc(inputArray) } else Full(inputArray) val requestsCount = requests.length