Skip to content

Commit

Permalink
Merge branch 'master' into cursor-bbox-tool
Browse files Browse the repository at this point in the history
  • Loading branch information
knollengewaechs authored Dec 16, 2024
2 parents 584f05d + 6febb57 commit bc8d014
Show file tree
Hide file tree
Showing 6 changed files with 31 additions and 22 deletions.
1 change: 1 addition & 0 deletions CHANGELOG.unreleased.md
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,7 @@ For upgrade instructions, please check the [migration guide](MIGRATIONS.released
- A "Locked by anonymous user" banner is no longer shown when opening public editable annotations of other organizations. [#8273](https://github.com/scalableminds/webknossos/pull/8273)
- Fixed a bug that uploading a zarr dataset with an already existing `datasource-properties.json` file failed. [#8268](https://github.com/scalableminds/webknossos/pull/8268)
- Fixed the organization switching feature for datasets opened via old links. [#8257](https://github.com/scalableminds/webknossos/pull/8257)
- Fixed that uploading an NML file without an organization id failed. Now the user's organization is used as fallback. [#8277](https://github.com/scalableminds/webknossos/pull/8277)
- Fixed that the frontend did not ensure a minium length for annotation layer names. Moreover, names starting with a `.` are also disallowed now. [#8244](https://github.com/scalableminds/webknossos/pull/8244)
- Fixed a bug where in the add remote dataset view the dataset name setting was not in sync with the datasource setting of the advanced tab making the form not submittable. [#8245](https://github.com/scalableminds/webknossos/pull/8245)
- Fix read and update dataset route for versions 8 and lower. [#8263](https://github.com/scalableminds/webknossos/pull/8263)
Expand Down
6 changes: 4 additions & 2 deletions app/controllers/AnnotationIOController.scala
Original file line number Diff line number Diff line change
Expand Up @@ -105,10 +105,12 @@ class AnnotationIOController @Inject()(
request.body.dataParts("createGroupForEachFile").headOption.contains("true")
val overwritingDatasetId: Option[String] =
request.body.dataParts.get("datasetId").flatMap(_.headOption)
val userOrganizationId = request.identity._organization
val attachedFiles = request.body.files.map(f => (f.ref.path.toFile, f.filename))
for {
parsedFiles <- annotationUploadService
.extractFromFiles(attachedFiles, SharedParsingParameters(useZipName = true, overwritingDatasetId))
parsedFiles <- annotationUploadService.extractFromFiles(
attachedFiles,
SharedParsingParameters(useZipName = true, overwritingDatasetId, userOrganizationId))
parsedFilesWrapped = annotationUploadService.wrapOrPrefixGroups(parsedFiles.parseResults,
shouldCreateGroupForEachFile)
parseResultsFiltered: List[NmlParseResult] = parsedFilesWrapped.filter(_.succeeded)
Expand Down
6 changes: 4 additions & 2 deletions app/controllers/TaskController.scala
Original file line number Diff line number Diff line change
Expand Up @@ -82,14 +82,16 @@ class TaskController @Inject()(taskCreationService: TaskCreationService,
_ <- bool2Fox(inputFiles.nonEmpty) ?~> "nml.file.notFound"
jsonString <- body.dataParts.get("formJSON").flatMap(_.headOption) ?~> "format.json.missing"
params <- JsonHelper.parseAndValidateJson[NmlTaskParameters](jsonString) ?~> "task.create.failed"
userOrganizationId = request.identity._organization
_ <- taskCreationService.assertBatchLimit(inputFiles.length, List(params.taskTypeId))
taskTypeIdValidated <- ObjectId.fromString(params.taskTypeId) ?~> "taskType.id.invalid"
taskType <- taskTypeDAO.findOne(taskTypeIdValidated) ?~> "taskType.notFound" ~> NOT_FOUND
project <- projectDAO
.findOneByNameAndOrganization(params.projectName, request.identity._organization) ?~> "project.notFound" ~> NOT_FOUND
_ <- Fox.assertTrue(userService.isTeamManagerOrAdminOf(request.identity, project._team))
extractedFiles <- nmlService.extractFromFiles(inputFiles.map(f => (f.ref.path.toFile, f.filename)),
SharedParsingParameters(useZipName = false, isTaskUpload = true))
extractedFiles <- nmlService.extractFromFiles(
inputFiles.map(f => (f.ref.path.toFile, f.filename)),
SharedParsingParameters(useZipName = false, isTaskUpload = true, userOrganizationId = userOrganizationId))
extractedTracingBoxesRaw: List[TracingBoxContainer] = extractedFiles.toBoxes
extractedTracingBoxes: List[TracingBoxContainer] <- taskCreationService.addVolumeFallbackBoundingBoxes(
extractedTracingBoxesRaw)
Expand Down
4 changes: 2 additions & 2 deletions app/models/annotation/AnnotationUploadService.scala
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,7 @@ case class UploadedVolumeLayer(tracing: VolumeTracing, dataZipLocation: String,

case class SharedParsingParameters(useZipName: Boolean,
overwritingDatasetId: Option[String] = None,
userOrganizationId: String,
isTaskUpload: Boolean = false)

class AnnotationUploadService @Inject()(tempFileService: TempFileService, nmlParser: NmlParser) extends LazyLogging {
Expand All @@ -53,8 +54,7 @@ class AnnotationUploadService @Inject()(tempFileService: TempFileService, nmlPar
nmlParser.parse(
name,
inputStream,
sharedParsingParameters.overwritingDatasetId,
sharedParsingParameters.isTaskUpload,
sharedParsingParameters,
basePath
)
parserOutput.futureBox.map {
Expand Down
29 changes: 15 additions & 14 deletions app/models/annotation/nml/NmlParser.scala
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ import com.scalableminds.webknossos.tracingstore.tracings.ColorGenerator
import com.scalableminds.webknossos.tracingstore.tracings.skeleton.updating.TreeType
import com.scalableminds.webknossos.tracingstore.tracings.skeleton.{MultiComponentTreeSplitter, TreeValidator}
import com.typesafe.scalalogging.LazyLogging
import models.annotation.UploadedVolumeLayer
import models.annotation.{SharedParsingParameters, UploadedVolumeLayer}
import models.dataset.DatasetDAO
import net.liftweb.common.Box._
import net.liftweb.common.{Box, Empty, Failure, Full}
Expand All @@ -48,13 +48,12 @@ class NmlParser @Inject()(datasetDAO: DatasetDAO) extends LazyLogging with Proto

def parse(name: String,
nmlInputStream: InputStream,
overwritingDatasetId: Option[String],
isTaskUpload: Boolean,
sharedParsingParameters: SharedParsingParameters,
basePath: Option[String] = None)(implicit m: MessagesProvider,
ec: ExecutionContext,
ctx: DBAccessContext): Fox[NmlParseSuccessWithoutFile] =
for {
nmlParsedParameters <- getParametersFromNML(nmlInputStream, name, overwritingDatasetId, isTaskUpload).toFox
nmlParsedParameters <- getParametersFromNML(nmlInputStream, name, sharedParsingParameters).toFox
parsedResult <- nmlParametersToResult(nmlParsedParameters, basePath)
} yield parsedResult

Expand Down Expand Up @@ -121,10 +120,10 @@ class NmlParser @Inject()(datasetDAO: DatasetDAO) extends LazyLogging with Proto
} yield
NmlParseSuccessWithoutFile(skeletonTracingOpt, volumeLayers, dataset._id, nmlParams.description, nmlParams.wkUrl)

private def getParametersFromNML(nmlInputStream: InputStream,
name: String,
overwritingDatasetId: Option[String],
isTaskUpload: Boolean)(implicit m: MessagesProvider): Box[NmlParsedParameters] =
private def getParametersFromNML(
nmlInputStream: InputStream,
name: String,
sharedParsingParameters: SharedParsingParameters)(implicit m: MessagesProvider): Box[NmlParsedParameters] =
try {
val nmlData = XML.load(nmlInputStream)
for {
Expand All @@ -143,21 +142,23 @@ class NmlParser @Inject()(datasetDAO: DatasetDAO) extends LazyLogging with Proto
_ <- TreeValidator.validateTrees(treesSplit, treeGroupsAfterSplit, branchPoints, comments)
additionalAxisProtos <- parseAdditionalAxes(parameters \ "additionalAxes")
datasetName = parseDatasetName(parameters \ "experiment")
datasetIdOpt = if (overwritingDatasetId.isDefined) overwritingDatasetId
datasetIdOpt = if (sharedParsingParameters.overwritingDatasetId.isDefined)
sharedParsingParameters.overwritingDatasetId
else parseDatasetId(parameters \ "experiment")
organizationId = parseOrganizationId(parameters \ "experiment")
organizationId = parseOrganizationId(parameters \ "experiment", sharedParsingParameters.userOrganizationId)
description = parseDescription(parameters \ "experiment")
wkUrl = parseWkUrl(parameters \ "experiment")
activeNodeId = parseActiveNode(parameters \ "activeNode")
(editPosition, editPositionAdditionalCoordinates) = parseEditPosition(parameters \ "editPosition")
.getOrElse((SkeletonTracingDefaults.editPosition, Seq()))
editRotation = parseEditRotation(parameters \ "editRotation").getOrElse(SkeletonTracingDefaults.editRotation)
zoomLevel = parseZoomLevel(parameters \ "zoomLevel").getOrElse(SkeletonTracingDefaults.zoomLevel)
taskBoundingBox: Option[BoundingBox] = if (isTaskUpload) parseTaskBoundingBox(parameters \ "taskBoundingBox")
taskBoundingBox: Option[BoundingBox] = if (sharedParsingParameters.isTaskUpload)
parseTaskBoundingBox(parameters \ "taskBoundingBox")
else None
userBoundingBoxes = parseBoundingBoxes(parameters \ "userBoundingBox")
} yield {
if (!isTaskUpload) {
if (!sharedParsingParameters.isTaskUpload) {
parseTaskBoundingBoxAsUserBoundingBox(parameters \ "taskBoundingBox", userBoundingBoxes)
.map(asUserBoundingBox => userBoundingBoxes :+ asUserBoundingBox)
}
Expand Down Expand Up @@ -379,8 +380,8 @@ class NmlParser @Inject()(datasetDAO: DatasetDAO) extends LazyLogging with Proto
private def parseWkUrl(nodes: NodeSeq): Option[String] =
nodes.headOption.map(node => getSingleAttribute(node, "wkUrl"))

private def parseOrganizationId(nodes: NodeSeq): String =
nodes.headOption.map(node => getSingleAttribute(node, "organization")).getOrElse("")
private def parseOrganizationId(nodes: NodeSeq, fallbackOrganization: String): String =
nodes.headOption.flatMap(node => getSingleAttributeOpt(node, "organization")).getOrElse(fallbackOrganization)

private def parseActiveNode(nodes: NodeSeq): Option[Int] =
nodes.headOption.flatMap(node => getSingleAttribute(node, "id").toIntOpt)
Expand Down
7 changes: 5 additions & 2 deletions test/backend/NMLUnitTestSuite.scala
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@ import com.scalableminds.webknossos.datastore.SkeletonTracing._
import com.scalableminds.webknossos.datastore.geometry.{AdditionalAxisProto, Vec2IntProto}
import com.scalableminds.webknossos.datastore.models.annotation.{AnnotationLayer, FetchedAnnotationLayer}
import com.scalableminds.webknossos.tracingstore.tracings.volume.VolumeDataZipFormat
import models.annotation.SharedParsingParameters
import models.annotation.nml.{NmlParseSuccessWithoutFile, NmlParser, NmlWriter}
import net.liftweb.common.{Box, Full}
import org.apache.commons.io.output.ByteArrayOutputStream
Expand Down Expand Up @@ -54,8 +55,10 @@ class NMLUnitTestSuite @Inject()(nmlParser: NmlParser) extends PlaySpec {
.parse(
"",
new ByteArrayInputStream(array),
overwritingDatasetId = None,
isTaskUpload = true,
SharedParsingParameters(useZipName = false,
overwritingDatasetId = None,
userOrganizationId = "testOrganization",
isTaskUpload = true),
basePath = None
)(messagesProvider, scala.concurrent.ExecutionContext.global, GlobalAccessContext)
.futureBox,
Expand Down

0 comments on commit bc8d014

Please sign in to comment.