Skip to content

Commit

Permalink
Merge branch 'master' into misc-layouting
Browse files Browse the repository at this point in the history
  • Loading branch information
philippotto authored Dec 2, 2024
2 parents 023b628 + 0a2afa4 commit 18097ce
Show file tree
Hide file tree
Showing 76 changed files with 804 additions and 840 deletions.
1 change: 1 addition & 0 deletions CHANGELOG.unreleased.md
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@ For upgrade instructions, please check the [migration guide](MIGRATIONS.released
- Added the total volume of a dataset to a tooltip in the dataset info tab. [#8229](https://github.com/scalableminds/webknossos/pull/8229)

### Changed
- Renamed "resolution" to "magnification" in more places within the codebase, including local variables. [#8168](https://github.com/scalableminds/webknossos/pull/8168)
- Reading image files on datastore filesystem is now done asynchronously. [#8126](https://github.com/scalableminds/webknossos/pull/8126)
- Datasets can now be renamed and can have duplicate names. [#8075](https://github.com/scalableminds/webknossos/pull/8075)
- Improved error messages for starting jobs on datasets from other organizations. [#8181](https://github.com/scalableminds/webknossos/pull/8181)
Expand Down
105 changes: 63 additions & 42 deletions app/controllers/DatasetController.scala
Original file line number Diff line number Diff line change
Expand Up @@ -176,70 +176,91 @@ class DatasetController @Inject()(userService: UserService,
// Change output format to return only a compact list with essential information on the datasets
compact: Option[Boolean]
): Action[AnyContent] = sil.UserAwareAction.async { implicit request =>
for {
folderIdValidated <- Fox.runOptional(folderId)(ObjectId.fromString)
uploaderIdValidated <- Fox.runOptional(uploaderId)(ObjectId.fromString)
organizationIdOpt = if (onlyMyOrganization.getOrElse(false))
request.identity.map(_._organization)
else
organizationId
js <- if (compact.getOrElse(false)) {
for {
datasetInfos <- datasetDAO.findAllCompactWithSearch(
isActive,
isUnreported,
organizationIdOpt,
folderIdValidated,
uploaderIdValidated,
searchQuery,
request.identity.map(_._id),
recursive.getOrElse(false),
limitOpt = limit
)
} yield Json.toJson(datasetInfos)
} else {
for {
datasets <- datasetDAO.findAllWithSearch(isActive,
isUnreported,
organizationIdOpt,
folderIdValidated,
uploaderIdValidated,
searchQuery,
recursive.getOrElse(false),
limit) ?~> "dataset.list.failed"
js <- listGrouped(datasets, request.identity) ?~> "dataset.list.failed"
} yield Json.toJson(js)
}
_ = Fox.runOptional(request.identity)(user => userDAO.updateLastActivity(user._id))
} yield addRemoteOriginHeaders(Ok(js))
log() {
for {
folderIdValidated <- Fox.runOptional(folderId)(ObjectId.fromString)
uploaderIdValidated <- Fox.runOptional(uploaderId)(ObjectId.fromString)
organizationIdOpt = if (onlyMyOrganization.getOrElse(false))
request.identity.map(_._organization)
else
organizationId
js <- if (compact.getOrElse(false)) {
for {
datasetInfos <- datasetDAO.findAllCompactWithSearch(
isActive,
isUnreported,
organizationIdOpt,
folderIdValidated,
uploaderIdValidated,
searchQuery,
request.identity.map(_._id),
recursive.getOrElse(false),
limitOpt = limit
)
} yield Json.toJson(datasetInfos)
} else {
for {
_ <- Fox.successful(())
_ = logger.info(
s"Requesting listing datasets with isActive '$isActive', isUnreported '$isUnreported', organizationId '$organizationIdOpt', folderId '$folderIdValidated', uploaderId '$uploaderIdValidated', searchQuery '$searchQuery', recursive '$recursive', limit '$limit'")
datasets <- datasetDAO.findAllWithSearch(isActive,
isUnreported,
organizationIdOpt,
folderIdValidated,
uploaderIdValidated,
searchQuery,
recursive.getOrElse(false),
limit) ?~> "dataset.list.failed" ?~> "Dataset listing failed"
_ = logger.info(s"Found ${datasets.size} datasets successfully")
js <- listGrouped(datasets, request.identity) ?~> "dataset.list.failed" ?~> "Grouping datasets failed"
} yield Json.toJson(js)
}
_ = Fox.runOptional(request.identity)(user => userDAO.updateLastActivity(user._id))
} yield addRemoteOriginHeaders(Ok(js))
}
}

private def listGrouped(datasets: List[Dataset], requestingUser: Option[User])(
implicit ctx: DBAccessContext,
m: MessagesProvider): Fox[List[JsObject]] =
for {
_ <- Fox.successful(())
_ = logger.info(s"datasets: $datasets, requestingUser: ${requestingUser.map(_._id)}")
requestingUserTeamManagerMemberships <- Fox.runOptional(requestingUser)(user =>
userService.teamManagerMembershipsFor(user._id))
userService
.teamManagerMembershipsFor(user._id)) ?~> s"Could not find team manager memberships for user ${requestingUser
.map(_._id)}"
_ = logger.info(
s"requestingUserTeamManagerMemberships: ${requestingUserTeamManagerMemberships.map(_.map(_.toString))}")
groupedByOrga = datasets.groupBy(_._organization).toList
js <- Fox.serialCombined(groupedByOrga) { byOrgaTuple: (String, List[Dataset]) =>
for {
organization <- organizationDAO.findOne(byOrgaTuple._1)(GlobalAccessContext)
_ <- Fox.successful(())
_ = logger.info(s"byOrgaTuple orga: ${byOrgaTuple._1}, datasets: ${byOrgaTuple._2}")
organization <- organizationDAO.findOne(byOrgaTuple._1)(GlobalAccessContext) ?~> s"Could not find organization ${byOrgaTuple._1}"
groupedByDataStore = byOrgaTuple._2.groupBy(_._dataStore).toList
_ <- Fox.serialCombined(groupedByDataStore) { byDataStoreTuple: (String, List[Dataset]) =>
{
logger.info(s"datastore: ${byDataStoreTuple._1}, datasets: ${byDataStoreTuple._2}")
Fox.successful(())
}
}
result <- Fox.serialCombined(groupedByDataStore) { byDataStoreTuple: (String, List[Dataset]) =>
for {
dataStore <- dataStoreDAO.findOneByName(byDataStoreTuple._1.trim)(GlobalAccessContext)
dataStore <- dataStoreDAO.findOneByName(byDataStoreTuple._1.trim)(GlobalAccessContext) ?~>
s"Could not find data store ${byDataStoreTuple._1}"
resultByDataStore: Seq[JsObject] <- Fox.serialCombined(byDataStoreTuple._2) { d =>
datasetService.publicWrites(
d,
requestingUser,
Some(organization),
Some(dataStore),
requestingUserTeamManagerMemberships) ?~> Messages("dataset.list.writesFailed", d.name)
}
} ?~> "Could not find public writes for datasets"
} yield resultByDataStore
}
} ?~> s"Could not group by datastore for datasets ${byOrgaTuple._2.map(_._id)}"
} yield result.flatten
}
} ?~> s"Could not group by organization for datasets ${datasets.map(_._id)}"
} yield js.flatten

def accessList(datasetId: String): Action[AnyContent] = sil.SecuredAction.async { implicit request =>
Expand Down
14 changes: 9 additions & 5 deletions app/models/dataset/Dataset.scala
Original file line number Diff line number Diff line change
Expand Up @@ -115,12 +115,14 @@ class DatasetDAO @Inject()(sqlClient: SqlClient, datasetLayerDAO: DatasetLayerDA

protected def parse(r: DatasetsRow): Fox[Dataset] =
for {
voxelSize <- parseVoxelSizeOpt(r.voxelsizefactor, r.voxelsizeunit)
voxelSize <- parseVoxelSizeOpt(r.voxelsizefactor, r.voxelsizeunit) ?~> "could not parse dataset voxel size"
defaultViewConfigurationOpt <- Fox.runOptional(r.defaultviewconfiguration)(
JsonHelper.parseAndValidateJson[DatasetViewConfiguration](_))
JsonHelper
.parseAndValidateJson[DatasetViewConfiguration](_)) ?~> "could not parse dataset default view configuration"
adminViewConfigurationOpt <- Fox.runOptional(r.adminviewconfiguration)(
JsonHelper.parseAndValidateJson[DatasetViewConfiguration](_))
metadata <- JsonHelper.parseAndValidateJson[JsArray](r.metadata)
JsonHelper
.parseAndValidateJson[DatasetViewConfiguration](_)) ?~> "could not parse dataset admin view configuration"
metadata <- JsonHelper.parseAndValidateJson[JsArray](r.metadata) ?~> "could not parse dataset metadata"
} yield {
Dataset(
ObjectId(r._Id),
Expand Down Expand Up @@ -218,9 +220,11 @@ class DatasetDAO @Inject()(sqlClient: SqlClient, datasetLayerDAO: DatasetLayerDA
includeSubfolders,
None,
None)
_ = logger.info(s"Requesting datasets with selection predicates '$selectionPredicates'")
limitQuery = limitOpt.map(l => q"LIMIT $l").getOrElse(q"")
_ = logger.info("Requesting datasets with query")
r <- run(q"SELECT $columns FROM $existingCollectionName WHERE $selectionPredicates $limitQuery".as[DatasetsRow])
parsed <- parseAll(r)
parsed <- parseAll(r) ?~> "Parsing datasets failed"
} yield parsed

def findAllCompactWithSearch(isActiveOpt: Option[Boolean] = None,
Expand Down
22 changes: 11 additions & 11 deletions app/models/dataset/DatasetService.scala
Original file line number Diff line number Diff line change
Expand Up @@ -349,19 +349,19 @@ class DatasetService @Inject()(organizationDAO: OrganizationDAO,
organizationDAO.findOne(dataset._organization) ?~> "organization.notFound"
}
dataStore <- Fox.fillOption(dataStore) {
dataStoreFor(dataset)
dataStoreFor(dataset) ?~> s"fetching data store failed for dataset ${dataset._id}"
}
teams <- teamService.allowedTeamsForDataset(dataset, cumulative = false, requestingUserOpt) ?~> "dataset.list.fetchAllowedTeamsFailed"
teamsJs <- Fox.serialCombined(teams)(t => teamService.publicWrites(t, Some(organization))) ?~> "dataset.list.teamWritesFailed"
teamsCumulative <- teamService.allowedTeamsForDataset(dataset, cumulative = true, requestingUserOpt) ?~> "dataset.list.fetchAllowedTeamsFailed"
teamsCumulativeJs <- Fox.serialCombined(teamsCumulative)(t => teamService.publicWrites(t, Some(organization))) ?~> "dataset.list.teamWritesFailed"
logoUrl <- logoUrlFor(dataset, Some(organization)) ?~> "dataset.list.fetchLogoUrlFailed"
isEditable <- isEditableBy(dataset, requestingUserOpt, requestingUserTeamManagerMemberships) ?~> "dataset.list.isEditableCheckFailed"
lastUsedByUser <- lastUsedTimeFor(dataset._id, requestingUserOpt) ?~> "dataset.list.fetchLastUsedTimeFailed"
dataStoreJs <- dataStoreService.publicWrites(dataStore) ?~> "dataset.list.dataStoreWritesFailed"
dataSource <- dataSourceFor(dataset, Some(organization)) ?~> "dataset.list.fetchDataSourceFailed"
teams <- teamService.allowedTeamsForDataset(dataset, cumulative = false, requestingUserOpt) ?~> "dataset.list.fetchAllowedTeamsFailed" ?~> s"for dataset ${dataset._id}"
teamsJs <- Fox.serialCombined(teams)(t => teamService.publicWrites(t, Some(organization))) ?~> "dataset.list.teamWritesFailed" ?~> s"for dataset ${dataset._id}"
teamsCumulative <- teamService.allowedTeamsForDataset(dataset, cumulative = true, requestingUserOpt) ?~> "dataset.list.fetchAllowedTeamsFailed" ?~> s"for dataset ${dataset._id}"
teamsCumulativeJs <- Fox.serialCombined(teamsCumulative)(t => teamService.publicWrites(t, Some(organization))) ?~> "dataset.list.teamWritesFailed" ?~> s"for dataset ${dataset._id}"
logoUrl <- logoUrlFor(dataset, Some(organization)) ?~> "dataset.list.fetchLogoUrlFailed" ?~> s"for dataset ${dataset._id}"
isEditable <- isEditableBy(dataset, requestingUserOpt, requestingUserTeamManagerMemberships) ?~> "dataset.list.isEditableCheckFailed" ?~> s"for dataset ${dataset._id}"
lastUsedByUser <- lastUsedTimeFor(dataset._id, requestingUserOpt) ?~> "dataset.list.fetchLastUsedTimeFailed" ?~> s"for dataset ${dataset._id}"
dataStoreJs <- dataStoreService.publicWrites(dataStore) ?~> "dataset.list.dataStoreWritesFailed" ?~> s"for dataset ${dataset._id}"
dataSource <- dataSourceFor(dataset, Some(organization)) ?~> "dataset.list.fetchDataSourceFailed" ?~> s"for dataset ${dataset._id}"
usedStorageBytes <- Fox.runIf(requestingUserOpt.exists(u => u._organization == dataset._organization))(
organizationDAO.getUsedStorageForDataset(dataset._id))
organizationDAO.getUsedStorageForDataset(dataset._id)) ?~> s"fetching used storage failed for ${dataset._id}"
} yield {
Json.obj(
"id" -> dataset._id,
Expand Down
16 changes: 8 additions & 8 deletions frontend/javascripts/admin/tasktype/task_type_create_view.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,7 @@ type Props = {
};

type FormValues = {
isResolutionRestricted: boolean;
isMagRestricted: boolean;
summary: string;
teamId: string;
description: string;
Expand Down Expand Up @@ -103,7 +103,7 @@ function TaskTypeCreateView({ taskTypeId, history }: Props) {
const taskType = taskTypeId ? await getTaskType(taskTypeId) : null;

const defaultValues = {
isResolutionRestricted: false,
isMagRestricted: false,
settings: {
somaClickingAllowed: true,
branchPointsAllowed: true,
Expand Down Expand Up @@ -131,15 +131,15 @@ function TaskTypeCreateView({ taskTypeId, history }: Props) {
}

if (taskType?.settings.magRestrictions.min || taskType?.settings.magRestrictions.max)
form.setFieldValue(["isResolutionRestricted"], true);
form.setFieldValue(["isMagRestricted"], true);
}

async function onFinish(formValues: FormValues) {
const {
settings,
teamId,
recommendedConfiguration,
isResolutionRestricted: _isResolutionRestricted,
isMagRestricted: _isMagRestricted,
...rest
} = formValues;
const teamName = teams.find((team) => team.id === teamId)!["name"];
Expand Down Expand Up @@ -411,7 +411,7 @@ function TaskTypeCreateView({ taskTypeId, history }: Props) {
</FormItem>

<FormItem
name={["isResolutionRestricted"]}
name={["isMagRestricted"]}
valuePropName="checked"
style={{
marginBottom: 6,
Expand All @@ -431,12 +431,12 @@ function TaskTypeCreateView({ taskTypeId, history }: Props) {
<FormItem
noStyle
shouldUpdate={(prevValues, curValues) =>
!prevValues.isResolutionRestricted ||
prevValues.isResolutionRestricted !== curValues.isResolutionRestricted
!prevValues.isMagRestricted ||
prevValues.isMagRestricted !== curValues.isMagRestricted
}
>
{({ getFieldValue }) =>
getFieldValue(["isResolutionRestricted"]) ? (
getFieldValue(["isMagRestricted"]) ? (
<div
style={{
marginLeft: 24,
Expand Down
4 changes: 2 additions & 2 deletions frontend/javascripts/admin/voxelytics/ai_model_list_view.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -129,8 +129,8 @@ function TrainNewAiJobModal({ onClose }: { onClose: () => void }) {
const volumeTracingIndex = volumeTracings.findIndex(
(tracing) => tracing.tracingId === annotationLayer.tracingId,
);
const resolutions = volumeTracingMags[volumeTracingIndex] || ([[1, 1, 1]] as Vector3[]);
return getMagInfo(resolutions).getFinestMag();
const mags = volumeTracingMags[volumeTracingIndex] || ([[1, 1, 1]] as Vector3[]);
return getMagInfo(mags).getFinestMag();
} else {
const segmentationLayer = getSegmentationLayerByName(dataset, layerName);
return getMagInfo(segmentationLayer.resolutions).getFinestMag();
Expand Down
Loading

0 comments on commit 18097ce

Please sign in to comment.