diff --git a/CHANGELOG.unreleased.md b/CHANGELOG.unreleased.md index fea864b28a..eb8d153be5 100644 --- a/CHANGELOG.unreleased.md +++ b/CHANGELOG.unreleased.md @@ -26,6 +26,7 @@ For upgrade instructions, please check the [migration guide](MIGRATIONS.released - A "Locked by anonymous user" banner is no longer shown when opening public editable annotations of other organizations. [#8273](https://github.com/scalableminds/webknossos/pull/8273) - Fixed a bug that uploading a zarr dataset with an already existing `datasource-properties.json` file failed. [#8268](https://github.com/scalableminds/webknossos/pull/8268) - Fixed the organization switching feature for datasets opened via old links. [#8257](https://github.com/scalableminds/webknossos/pull/8257) +- Fixed that uploading an NML file without an organization id failed. Now the user's organization is used as fallback. [#8277](https://github.com/scalableminds/webknossos/pull/8277) - Fixed that the frontend did not ensure a minium length for annotation layer names. Moreover, names starting with a `.` are also disallowed now. [#8244](https://github.com/scalableminds/webknossos/pull/8244) - Fixed a bug where in the add remote dataset view the dataset name setting was not in sync with the datasource setting of the advanced tab making the form not submittable. [#8245](https://github.com/scalableminds/webknossos/pull/8245) - Fix read and update dataset route for versions 8 and lower. [#8263](https://github.com/scalableminds/webknossos/pull/8263) diff --git a/app/controllers/AnnotationIOController.scala b/app/controllers/AnnotationIOController.scala index 183a6e11d0..a587c9d954 100755 --- a/app/controllers/AnnotationIOController.scala +++ b/app/controllers/AnnotationIOController.scala @@ -105,10 +105,12 @@ class AnnotationIOController @Inject()( request.body.dataParts("createGroupForEachFile").headOption.contains("true") val overwritingDatasetId: Option[String] = request.body.dataParts.get("datasetId").flatMap(_.headOption) + val userOrganizationId = request.identity._organization val attachedFiles = request.body.files.map(f => (f.ref.path.toFile, f.filename)) for { - parsedFiles <- annotationUploadService - .extractFromFiles(attachedFiles, SharedParsingParameters(useZipName = true, overwritingDatasetId)) + parsedFiles <- annotationUploadService.extractFromFiles( + attachedFiles, + SharedParsingParameters(useZipName = true, overwritingDatasetId, userOrganizationId)) parsedFilesWrapped = annotationUploadService.wrapOrPrefixGroups(parsedFiles.parseResults, shouldCreateGroupForEachFile) parseResultsFiltered: List[NmlParseResult] = parsedFilesWrapped.filter(_.succeeded) diff --git a/app/controllers/TaskController.scala b/app/controllers/TaskController.scala index dc56612d3f..76daf25323 100755 --- a/app/controllers/TaskController.scala +++ b/app/controllers/TaskController.scala @@ -82,14 +82,16 @@ class TaskController @Inject()(taskCreationService: TaskCreationService, _ <- bool2Fox(inputFiles.nonEmpty) ?~> "nml.file.notFound" jsonString <- body.dataParts.get("formJSON").flatMap(_.headOption) ?~> "format.json.missing" params <- JsonHelper.parseAndValidateJson[NmlTaskParameters](jsonString) ?~> "task.create.failed" + userOrganizationId = request.identity._organization _ <- taskCreationService.assertBatchLimit(inputFiles.length, List(params.taskTypeId)) taskTypeIdValidated <- ObjectId.fromString(params.taskTypeId) ?~> "taskType.id.invalid" taskType <- taskTypeDAO.findOne(taskTypeIdValidated) ?~> "taskType.notFound" ~> NOT_FOUND project <- projectDAO .findOneByNameAndOrganization(params.projectName, request.identity._organization) ?~> "project.notFound" ~> NOT_FOUND _ <- Fox.assertTrue(userService.isTeamManagerOrAdminOf(request.identity, project._team)) - extractedFiles <- nmlService.extractFromFiles(inputFiles.map(f => (f.ref.path.toFile, f.filename)), - SharedParsingParameters(useZipName = false, isTaskUpload = true)) + extractedFiles <- nmlService.extractFromFiles( + inputFiles.map(f => (f.ref.path.toFile, f.filename)), + SharedParsingParameters(useZipName = false, isTaskUpload = true, userOrganizationId = userOrganizationId)) extractedTracingBoxesRaw: List[TracingBoxContainer] = extractedFiles.toBoxes extractedTracingBoxes: List[TracingBoxContainer] <- taskCreationService.addVolumeFallbackBoundingBoxes( extractedTracingBoxesRaw) diff --git a/app/models/annotation/AnnotationUploadService.scala b/app/models/annotation/AnnotationUploadService.scala index bf4814957a..da9968c9bb 100644 --- a/app/models/annotation/AnnotationUploadService.scala +++ b/app/models/annotation/AnnotationUploadService.scala @@ -27,6 +27,7 @@ case class UploadedVolumeLayer(tracing: VolumeTracing, dataZipLocation: String, case class SharedParsingParameters(useZipName: Boolean, overwritingDatasetId: Option[String] = None, + userOrganizationId: String, isTaskUpload: Boolean = false) class AnnotationUploadService @Inject()(tempFileService: TempFileService, nmlParser: NmlParser) extends LazyLogging { @@ -53,8 +54,7 @@ class AnnotationUploadService @Inject()(tempFileService: TempFileService, nmlPar nmlParser.parse( name, inputStream, - sharedParsingParameters.overwritingDatasetId, - sharedParsingParameters.isTaskUpload, + sharedParsingParameters, basePath ) parserOutput.futureBox.map { diff --git a/app/models/annotation/nml/NmlParser.scala b/app/models/annotation/nml/NmlParser.scala index 99a1672778..05c36ff3f8 100755 --- a/app/models/annotation/nml/NmlParser.scala +++ b/app/models/annotation/nml/NmlParser.scala @@ -24,7 +24,7 @@ import com.scalableminds.webknossos.tracingstore.tracings.ColorGenerator import com.scalableminds.webknossos.tracingstore.tracings.skeleton.updating.TreeType import com.scalableminds.webknossos.tracingstore.tracings.skeleton.{MultiComponentTreeSplitter, TreeValidator} import com.typesafe.scalalogging.LazyLogging -import models.annotation.UploadedVolumeLayer +import models.annotation.{SharedParsingParameters, UploadedVolumeLayer} import models.dataset.DatasetDAO import net.liftweb.common.Box._ import net.liftweb.common.{Box, Empty, Failure, Full} @@ -48,13 +48,12 @@ class NmlParser @Inject()(datasetDAO: DatasetDAO) extends LazyLogging with Proto def parse(name: String, nmlInputStream: InputStream, - overwritingDatasetId: Option[String], - isTaskUpload: Boolean, + sharedParsingParameters: SharedParsingParameters, basePath: Option[String] = None)(implicit m: MessagesProvider, ec: ExecutionContext, ctx: DBAccessContext): Fox[NmlParseSuccessWithoutFile] = for { - nmlParsedParameters <- getParametersFromNML(nmlInputStream, name, overwritingDatasetId, isTaskUpload).toFox + nmlParsedParameters <- getParametersFromNML(nmlInputStream, name, sharedParsingParameters).toFox parsedResult <- nmlParametersToResult(nmlParsedParameters, basePath) } yield parsedResult @@ -121,10 +120,10 @@ class NmlParser @Inject()(datasetDAO: DatasetDAO) extends LazyLogging with Proto } yield NmlParseSuccessWithoutFile(skeletonTracingOpt, volumeLayers, dataset._id, nmlParams.description, nmlParams.wkUrl) - private def getParametersFromNML(nmlInputStream: InputStream, - name: String, - overwritingDatasetId: Option[String], - isTaskUpload: Boolean)(implicit m: MessagesProvider): Box[NmlParsedParameters] = + private def getParametersFromNML( + nmlInputStream: InputStream, + name: String, + sharedParsingParameters: SharedParsingParameters)(implicit m: MessagesProvider): Box[NmlParsedParameters] = try { val nmlData = XML.load(nmlInputStream) for { @@ -143,9 +142,10 @@ class NmlParser @Inject()(datasetDAO: DatasetDAO) extends LazyLogging with Proto _ <- TreeValidator.validateTrees(treesSplit, treeGroupsAfterSplit, branchPoints, comments) additionalAxisProtos <- parseAdditionalAxes(parameters \ "additionalAxes") datasetName = parseDatasetName(parameters \ "experiment") - datasetIdOpt = if (overwritingDatasetId.isDefined) overwritingDatasetId + datasetIdOpt = if (sharedParsingParameters.overwritingDatasetId.isDefined) + sharedParsingParameters.overwritingDatasetId else parseDatasetId(parameters \ "experiment") - organizationId = parseOrganizationId(parameters \ "experiment") + organizationId = parseOrganizationId(parameters \ "experiment", sharedParsingParameters.userOrganizationId) description = parseDescription(parameters \ "experiment") wkUrl = parseWkUrl(parameters \ "experiment") activeNodeId = parseActiveNode(parameters \ "activeNode") @@ -153,11 +153,12 @@ class NmlParser @Inject()(datasetDAO: DatasetDAO) extends LazyLogging with Proto .getOrElse((SkeletonTracingDefaults.editPosition, Seq())) editRotation = parseEditRotation(parameters \ "editRotation").getOrElse(SkeletonTracingDefaults.editRotation) zoomLevel = parseZoomLevel(parameters \ "zoomLevel").getOrElse(SkeletonTracingDefaults.zoomLevel) - taskBoundingBox: Option[BoundingBox] = if (isTaskUpload) parseTaskBoundingBox(parameters \ "taskBoundingBox") + taskBoundingBox: Option[BoundingBox] = if (sharedParsingParameters.isTaskUpload) + parseTaskBoundingBox(parameters \ "taskBoundingBox") else None userBoundingBoxes = parseBoundingBoxes(parameters \ "userBoundingBox") } yield { - if (!isTaskUpload) { + if (!sharedParsingParameters.isTaskUpload) { parseTaskBoundingBoxAsUserBoundingBox(parameters \ "taskBoundingBox", userBoundingBoxes) .map(asUserBoundingBox => userBoundingBoxes :+ asUserBoundingBox) } @@ -379,8 +380,8 @@ class NmlParser @Inject()(datasetDAO: DatasetDAO) extends LazyLogging with Proto private def parseWkUrl(nodes: NodeSeq): Option[String] = nodes.headOption.map(node => getSingleAttribute(node, "wkUrl")) - private def parseOrganizationId(nodes: NodeSeq): String = - nodes.headOption.map(node => getSingleAttribute(node, "organization")).getOrElse("") + private def parseOrganizationId(nodes: NodeSeq, fallbackOrganization: String): String = + nodes.headOption.flatMap(node => getSingleAttributeOpt(node, "organization")).getOrElse(fallbackOrganization) private def parseActiveNode(nodes: NodeSeq): Option[Int] = nodes.headOption.flatMap(node => getSingleAttribute(node, "id").toIntOpt) diff --git a/test/backend/NMLUnitTestSuite.scala b/test/backend/NMLUnitTestSuite.scala index db896f38bf..c629b83647 100644 --- a/test/backend/NMLUnitTestSuite.scala +++ b/test/backend/NMLUnitTestSuite.scala @@ -8,6 +8,7 @@ import com.scalableminds.webknossos.datastore.SkeletonTracing._ import com.scalableminds.webknossos.datastore.geometry.{AdditionalAxisProto, Vec2IntProto} import com.scalableminds.webknossos.datastore.models.annotation.{AnnotationLayer, FetchedAnnotationLayer} import com.scalableminds.webknossos.tracingstore.tracings.volume.VolumeDataZipFormat +import models.annotation.SharedParsingParameters import models.annotation.nml.{NmlParseSuccessWithoutFile, NmlParser, NmlWriter} import net.liftweb.common.{Box, Full} import org.apache.commons.io.output.ByteArrayOutputStream @@ -54,8 +55,10 @@ class NMLUnitTestSuite @Inject()(nmlParser: NmlParser) extends PlaySpec { .parse( "", new ByteArrayInputStream(array), - overwritingDatasetId = None, - isTaskUpload = true, + SharedParsingParameters(useZipName = false, + overwritingDatasetId = None, + userOrganizationId = "testOrganization", + isTaskUpload = true), basePath = None )(messagesProvider, scala.concurrent.ExecutionContext.global, GlobalAccessContext) .futureBox,