Skip to content

Commit

Permalink
Merge branch 'master' of github.com:scalableminds/webknossos into nav…
Browse files Browse the repository at this point in the history
…bar-menu

* 'master' of github.com:scalableminds/webknossos:
  Fix style of connectome tab by restricting directory tree style (#6864)
  Remove legacy datasets tab and use new compact route (#6834)
  Fix basicauth for exploring remote http datasets (#6866)
  • Loading branch information
hotzenklotz committed Feb 22, 2023
2 parents 1f8844e + 8815ab5 commit ef89a0e
Show file tree
Hide file tree
Showing 39 changed files with 686 additions and 1,019 deletions.
6 changes: 5 additions & 1 deletion CHANGELOG.unreleased.md
Original file line number Diff line number Diff line change
Expand Up @@ -22,15 +22,19 @@ For upgrade instructions, please check the [migration guide](MIGRATIONS.released
- Rewrite the database tools in `tools/postgres` to JavaScript and adding support for non-default Postgres username-password combinations. [#6803](https://github.com/scalableminds/webknossos/pull/6803)
- Added owner name to organization page. [#6811](https://github.com/scalableminds/webknossos/pull/6811)
- Remove multiline <TextArea> support from <InputComponent>. [#6839](https://github.com/scalableminds/webknossos/pull/6839)
- Improved the performance of the dataset table in the dashboard. [#6834](https://github.com/scalableminds/webknossos/pull/6834)
- Updated the styling and background of login, password reset/change and sign up pages. [#6844](https://github.com/scalableminds/webknossos/pull/6844)
- Replaced date handling and formatting library momentjs with dayjs. [#6849](https://github.com/scalableminds/webknossos/pull/6849)

### Fixed
- Fixed saving allowed teams in dataset settings. [#6817](https://github.com/scalableminds/webknossos/pull/6817)
- Fixed log streaming in Voxelytics workflow reports. [#6828](https://github.com/scalableminds/webknossos/pull/6828) [#6831](https://github.com/scalableminds/webknossos/pull/6831)
- Fixed some layouting issues with line breaks in segment list/dataset info tab [#6799](https://github.com/scalableminds/webknossos/pull/6799)
- Fixed some layouting issues with line breaks in segment list/dataset info tab. [#6799](https://github.com/scalableminds/webknossos/pull/6799)
- Fixed basic auth for exploring remote http datasets. [#6866](https://github.com/scalableminds/webknossos/pull/6866)
- Fixed the layouting in the connectome tab. [#6864](https://github.com/scalableminds/webknossos/pull/6864)
- Fixed deprecation warnings for antd' <Menu> component in Navbar. [#6860](https://github.com/scalableminds/webknossos/pull/6860)

### Removed
- Removed the old Datasets tab in favor of the Dataset Folders tab. [#6834](https://github.com/scalableminds/webknossos/pull/6834)

### Breaking Changes
8 changes: 5 additions & 3 deletions app/controllers/AnnotationController.scala
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,7 @@ import models.user.{User, UserDAO, UserService}
import oxalis.mail.{MailchimpClient, MailchimpTag}
import oxalis.security.{URLSharing, WkEnv}
import play.api.i18n.{Messages, MessagesProvider}
import play.api.libs.json.Json.WithDefaultValues
import play.api.libs.json._
import play.api.mvc.{Action, AnyContent, PlayBodyParsers}
import utils.{ObjectId, WkConf}
Expand All @@ -34,11 +35,13 @@ import scala.concurrent.duration._

case class AnnotationLayerParameters(typ: AnnotationLayerType,
fallbackLayerName: Option[String],
autoFallbackLayer: Boolean = false,
mappingName: Option[String] = None,
resolutionRestrictions: Option[ResolutionRestrictions],
name: String)
name: Option[String])
object AnnotationLayerParameters {
implicit val jsonFormat: OFormat[AnnotationLayerParameters] = Json.format[AnnotationLayerParameters]
implicit val jsonFormat: OFormat[AnnotationLayerParameters] =
Json.using[WithDefaultValues].format[AnnotationLayerParameters]
}

@Api
Expand All @@ -55,7 +58,6 @@ class AnnotationController @Inject()(
teamService: TeamService,
projectDAO: ProjectDAO,
teamDAO: TeamDAO,
annotationPrivateLinkDAO: AnnotationPrivateLinkDAO,
timeSpanService: TimeSpanService,
annotationMerger: AnnotationMerger,
tracingStoreService: TracingStoreService,
Expand Down
70 changes: 56 additions & 14 deletions app/controllers/DataSetController.scala
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,21 @@ import javax.inject.Inject
import scala.collection.mutable.ListBuffer
import scala.concurrent.duration._
import scala.concurrent.{ExecutionContext, Future}
import com.scalableminds.util.tools.TristateOptionJsonHelper

case class DatasetUpdateParameters(
description: Option[Option[String]] = Some(None),
displayName: Option[Option[String]] = Some(None),
sortingKey: Option[Instant],
isPublic: Option[Boolean],
tags: Option[List[String]],
folderId: Option[ObjectId]
)

object DatasetUpdateParameters extends TristateOptionJsonHelper {
implicit val jsonFormat: OFormat[DatasetUpdateParameters] =
Json.configured(tristateOptionParsing).format[DatasetUpdateParameters]
}

@Api
class DataSetController @Inject()(userService: UserService,
Expand Down Expand Up @@ -244,7 +259,6 @@ class DataSetController @Inject()(userService: UserService,
requestingUser,
Some(organization),
Some(dataStore),
skipResolutions = true,
requestingUserTeamManagerMemberships) ?~> Messages("dataset.list.writesFailed", d.name)
}
} yield resultByDataStore
Expand Down Expand Up @@ -319,9 +333,45 @@ class DataSetController @Inject()(userService: UserService,
.clientFor(dataSet)(GlobalAccessContext)
.flatMap(_.findPositionWithData(organizationName, dataSet, datalayer.name).flatMap(posWithData =>
bool2Fox(posWithData.value("position") != JsNull))) ?~> "dataSet.loadingDataFailed"
} yield {
Ok("Ok")
}
} yield Ok("Ok")
}

@ApiOperation(
value =
"""Update information for a dataset.
Expects:
- As JSON object body with all optional keys (missing keys will not be updated, keys set to null will be set to null):
- description (string, nullable)
- displayName (string, nullable)
- sortingKey (timestamp)
- isPublic (boolean)
- tags (list of string)
- folderId (string)
- As GET parameters:
- organizationName (string): url-safe name of the organization owning the dataset
- dataSetName (string): name of the dataset
""",
nickname = "datasetUpdatePartial"
)
@ApiImplicitParams(
Array(
new ApiImplicitParam(name = "datasetPartialUpdateInformation",
required = true,
dataTypeClass = classOf[JsObject],
paramType = "body")))
def updatePartial(@ApiParam(value = "The url-safe name of the organization owning the dataset",
example = "sample_organization") organizationName: String,
@ApiParam(value = "The name of the dataset") dataSetName: String): Action[DatasetUpdateParameters] =
sil.SecuredAction.async(validateJson[DatasetUpdateParameters]) { implicit request =>
for {
dataSet <- dataSetDAO.findOneByNameAndOrganization(dataSetName, request.identity._organization) ?~> notFoundMessage(
dataSetName) ~> NOT_FOUND
_ <- Fox.assertTrue(dataSetService.isEditableBy(dataSet, Some(request.identity))) ?~> "notAllowed" ~> FORBIDDEN
_ <- dataSetDAO.updatePartial(dataSet._id, request.body)
updated <- dataSetDAO.findOneByNameAndOrganization(dataSetName, request.identity._organization)
_ = analyticsService.track(ChangeDatasetSettingsEvent(request.identity, updated))
js <- dataSetService.publicWrites(updated, Some(request.identity))
} yield Ok(js)
}

@ApiOperation(
Expand All @@ -348,9 +398,7 @@ Expects:
paramType = "body")))
def update(@ApiParam(value = "The url-safe name of the organization owning the dataset",
example = "sample_organization") organizationName: String,
@ApiParam(value = "The name of the dataset") dataSetName: String,
@ApiParam(value = "If true, the resolutions of the dataset layers in the returned json are skipped")
skipResolutions: Option[Boolean]): Action[JsValue] =
@ApiParam(value = "The name of the dataset") dataSetName: String): Action[JsValue] =
sil.SecuredAction.async(parse.json) { implicit request =>
withJsonBodyUsing(dataSetPublicReads) {
case (description, displayName, sortingKey, isPublic, tags, folderId) =>
Expand All @@ -367,13 +415,7 @@ Expects:
_ <- dataSetDAO.updateTags(dataSet._id, tags)
updated <- dataSetDAO.findOneByNameAndOrganization(dataSetName, request.identity._organization)
_ = analyticsService.track(ChangeDatasetSettingsEvent(request.identity, updated))
organization <- organizationDAO.findOne(updated._organization)(GlobalAccessContext)
dataStore <- dataSetService.dataStoreFor(updated)
js <- dataSetService.publicWrites(updated,
Some(request.identity),
Some(organization),
Some(dataStore),
skipResolutions.getOrElse(false))
js <- dataSetService.publicWrites(updated, Some(request.identity))
} yield Ok(Json.toJson(js))
}
}
Expand Down
26 changes: 16 additions & 10 deletions app/controllers/LegacyApiController.scala
Original file line number Diff line number Diff line change
Expand Up @@ -425,20 +425,26 @@ class LegacyApiController @Inject()(annotationController: AnnotationController,
if (request.body.typ == "volume") None
else
Some(
AnnotationLayerParameters(AnnotationLayerType.Skeleton,
request.body.fallbackLayerName,
None,
request.body.resolutionRestrictions,
name = AnnotationLayer.defaultSkeletonLayerName))
AnnotationLayerParameters(
AnnotationLayerType.Skeleton,
request.body.fallbackLayerName,
autoFallbackLayer = false,
None,
request.body.resolutionRestrictions,
name = Some(AnnotationLayer.defaultSkeletonLayerName)
))
val volumeParameters =
if (request.body.typ == "skeleton") None
else
Some(
AnnotationLayerParameters(AnnotationLayerType.Volume,
request.body.fallbackLayerName,
None,
request.body.resolutionRestrictions,
name = AnnotationLayer.defaultVolumeLayerName))
AnnotationLayerParameters(
AnnotationLayerType.Volume,
request.body.fallbackLayerName,
autoFallbackLayer = false,
None,
request.body.resolutionRestrictions,
name = Some(AnnotationLayer.defaultVolumeLayerName)
))
List(skeletonParameters, volumeParameters).flatten
}

Expand Down
38 changes: 28 additions & 10 deletions app/models/annotation/AnnotationService.scala
Original file line number Diff line number Diff line change
Expand Up @@ -180,10 +180,10 @@ class AnnotationService @Inject()(
VolumeTracingDefaults.largestSegmentId
}

def addAnnotationLayer(annotation: Annotation,
organizationName: String,
annotationLayerParameters: AnnotationLayerParameters)(implicit ec: ExecutionContext,
ctx: DBAccessContext): Fox[Unit] =
def addAnnotationLayer(
annotation: Annotation,
organizationName: String,
annotationLayerParameters: AnnotationLayerParameters)(implicit ctx: DBAccessContext): Fox[Unit] =
for {
dataSet <- dataSetDAO.findOne(annotation._dataSet) ?~> "dataSet.notFoundForAnnotation"
dataSource <- dataSetService.dataSourceFor(dataSet).flatMap(_.toUsable) ?~> "dataSource.notFound"
Expand All @@ -207,6 +207,12 @@ class AnnotationService @Inject()(
existingAnnotationLayers: List[AnnotationLayer] = List())(
implicit ctx: DBAccessContext): Fox[List[AnnotationLayer]] = {

def getAutoFallbackLayerName: Option[String] =
dataSource.dataLayers.find {
case _: SegmentationLayer => true
case _ => false
}.map(_.name)

def getFallbackLayer(fallbackLayerName: String): Fox[SegmentationLayer] =
for {
fallbackLayer <- dataSource.dataLayers
Expand All @@ -227,7 +233,7 @@ class AnnotationService @Inject()(
oldPrecedenceLayerProperties: Option[RedundantTracingProperties]): Fox[AnnotationLayer] =
for {
client <- tracingStoreService.clientFor(dataSet)
tracingId <- annotationLayerParameters.typ match {
tracingIdAndName <- annotationLayerParameters.typ match {
case AnnotationLayerType.Skeleton =>
val skeleton = SkeletonTracingDefaults.createInstance.copy(
dataSetName = dataSet.name,
Expand All @@ -242,10 +248,17 @@ class AnnotationService @Inject()(
userBoundingBoxes = p.userBoundingBoxes
)
}.getOrElse(skeleton)
client.saveSkeletonTracing(skeletonAdapted)
for {
tracingId <- client.saveSkeletonTracing(skeletonAdapted)
name = annotationLayerParameters.name.getOrElse(
AnnotationLayer.defaultNameForType(annotationLayerParameters.typ))
} yield (tracingId, name)
case AnnotationLayerType.Volume =>
val autoFallbackLayerName =
if (annotationLayerParameters.autoFallbackLayer) getAutoFallbackLayerName else None
val fallbackLayerName = annotationLayerParameters.fallbackLayerName.orElse(autoFallbackLayerName)
for {
fallbackLayer <- Fox.runOptional(annotationLayerParameters.fallbackLayerName)(getFallbackLayer)
fallbackLayer <- Fox.runOptional(fallbackLayerName)(getFallbackLayer)
volumeTracing <- createVolumeTracing(
dataSource,
datasetOrganizationName,
Expand All @@ -263,11 +276,14 @@ class AnnotationService @Inject()(
)
}.getOrElse(volumeTracing)
volumeTracingId <- client.saveVolumeTracing(volumeTracingAdapted)
} yield volumeTracingId
name = annotationLayerParameters.name
.orElse(autoFallbackLayerName)
.getOrElse(AnnotationLayer.defaultNameForType(annotationLayerParameters.typ))
} yield (volumeTracingId, name)
case _ =>
Fox.failure(s"Unknown AnnotationLayerType: ${annotationLayerParameters.typ}")
}
} yield AnnotationLayer(tracingId, annotationLayerParameters.typ, annotationLayerParameters.name)
} yield AnnotationLayer(tracingIdAndName._1, annotationLayerParameters.typ, tracingIdAndName._2)

def fetchOldPrecedenceLayer: Fox[Option[FetchedAnnotationLayer]] =
if (existingAnnotationLayers.isEmpty) Fox.successful(None)
Expand Down Expand Up @@ -364,9 +380,11 @@ class AnnotationService @Inject()(
newAnnotationLayerParameters = AnnotationLayerParameters(
newAnnotationLayerType,
usedFallbackLayerName,
autoFallbackLayer = false,
None,
Some(ResolutionRestrictions.empty),
AnnotationLayer.defaultNameForType(newAnnotationLayerType))
Some(AnnotationLayer.defaultNameForType(newAnnotationLayerType))
)
_ <- addAnnotationLayer(annotation, organizationName, newAnnotationLayerParameters) ?~> "makeHybrid.createTracings.failed"
} yield ()

Expand Down
42 changes: 32 additions & 10 deletions app/models/binary/DataSet.scala
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@ import com.scalableminds.webknossos.datastore.models.datasource.{
DataLayerLike => DataLayer
}
import com.scalableminds.webknossos.schema.Tables._
import controllers.DatasetUpdateParameters

import javax.inject.Inject
import models.organization.OrganizationDAO
Expand Down Expand Up @@ -62,7 +63,7 @@ case class DataSet(
case class DatasetCompactInfo(
id: ObjectId,
name: String,
organizationName: String,
owningOrganization: String,
folderId: ObjectId,
isActive: Boolean,
displayName: String,
Expand Down Expand Up @@ -268,7 +269,6 @@ class DataSetDAO @Inject()(sqlClient: SqlClient,
LEFT JOIN webknossos.dataSet_lastUsedTimes lastUsedTimes
ON lastUsedTimes._dataSet = d._id AND lastUsedTimes._user = u._id
"""
_ = logger.info(query.debugInfo)
rows <- run(
query.as[(ObjectId, String, String, ObjectId, Boolean, String, Instant, Boolean, Instant, String, String)])
} yield
Expand All @@ -277,7 +277,7 @@ class DataSetDAO @Inject()(sqlClient: SqlClient,
DatasetCompactInfo(
id = row._1,
name = row._2,
organizationName = row._3,
owningOrganization = row._3,
folderId = row._4,
isActive = row._5,
displayName = row._6,
Expand Down Expand Up @@ -426,9 +426,33 @@ class DataSetDAO @Inject()(sqlClient: SqlClient,
for {
accessQuery <- readAccessQuery
_ <- run(
q"update webknossos.datasets_ set sharingToken = $sharingToken where name = $name and _organization = $organizationId and $accessQuery".asUpdate)
q"update webknossos.datasets set sharingToken = $sharingToken where name = $name and _organization = $organizationId and $accessQuery".asUpdate)
} yield ()

def updatePartial(dataSetId: ObjectId, params: DatasetUpdateParameters)(implicit ctx: DBAccessContext): Fox[Unit] = {
val setQueries = List(
params.description.map(d => q"description = $d"),
params.displayName.map(v => q"displayName = $v"),
params.sortingKey.map(v => q"sortingKey = $v"),
params.isPublic.map(v => q"isPublic = $v"),
params.tags.map(v => q"tags = $v"),
params.folderId.map(v => q"_folder = $v"),
).flatten
if (setQueries.isEmpty) {
Fox.successful(())
} else {
for {
_ <- assertUpdateAccess(dataSetId)
setQueriesJoined = SqlToken.joinBySeparator(setQueries, ", ")
_ <- run(q"""UPDATE webknossos.datasets
SET
$setQueriesJoined
WHERE _id = $dataSetId
""".asUpdate)
} yield ()
}
}

def updateFields(_id: ObjectId,
description: Option[String],
displayName: Option[String],
Expand Down Expand Up @@ -595,16 +619,14 @@ class DataSetDataLayerDAO @Inject()(sqlClient: SqlClient, dataSetResolutionsDAO:
implicit ec: ExecutionContext)
extends SimpleSQLDAO(sqlClient) {

private def parseRow(row: DatasetLayersRow, dataSetId: ObjectId, skipResolutions: Boolean): Fox[DataLayer] = {
private def parseRow(row: DatasetLayersRow, dataSetId: ObjectId): Fox[DataLayer] = {
val result: Fox[Fox[DataLayer]] = for {
category <- Category.fromString(row.category).toFox ?~> "Could not parse Layer Category"
boundingBox <- BoundingBox
.fromSQL(parseArrayLiteral(row.boundingbox).map(_.toInt))
.toFox ?~> "Could not parse boundingbox"
elementClass <- ElementClass.fromString(row.elementclass).toFox ?~> "Could not parse Layer ElementClass"
standinResolutions: Option[List[Vec3Int]] = if (skipResolutions) Some(List.empty[Vec3Int]) else None
resolutions <- Fox.fillOption(standinResolutions)(
dataSetResolutionsDAO.findDataResolutionForLayer(dataSetId, row.name) ?~> "Could not find resolution for layer")
resolutions <- dataSetResolutionsDAO.findDataResolutionForLayer(dataSetId, row.name) ?~> "Could not find resolution for layer"
defaultViewConfigurationOpt <- Fox.runOptional(row.defaultviewconfiguration)(
JsonHelper.parseAndValidateJson[LayerViewConfiguration](_))
adminViewConfigurationOpt <- Fox.runOptional(row.adminviewconfiguration)(
Expand Down Expand Up @@ -642,10 +664,10 @@ class DataSetDataLayerDAO @Inject()(sqlClient: SqlClient, dataSetResolutionsDAO:
result.flatten
}

def findAllForDataSet(dataSetId: ObjectId, skipResolutions: Boolean = false): Fox[List[DataLayer]] =
def findAllForDataSet(dataSetId: ObjectId): Fox[List[DataLayer]] =
for {
rows <- run(DatasetLayers.filter(_._Dataset === dataSetId.id).result).map(_.toList)
rowsParsed <- Fox.combined(rows.map(parseRow(_, dataSetId, skipResolutions)))
rowsParsed <- Fox.combined(rows.map(parseRow(_, dataSetId)))
} yield rowsParsed

private def insertLayerQuery(dataSetId: ObjectId, layer: DataLayer): SqlAction[Int, NoStream, Effect] =
Expand Down
Loading

0 comments on commit ef89a0e

Please sign in to comment.