From 7b03f97c6a1f18177cbc22dfefff8c4160ef4284 Mon Sep 17 00:00:00 2001 From: Florian M Date: Thu, 24 Oct 2024 13:03:27 +0200 Subject: [PATCH 01/13] update default predict wofklow template (#8144) --- .../view/action-bar/default-predict-workflow-template.ts | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/frontend/javascripts/oxalis/view/action-bar/default-predict-workflow-template.ts b/frontend/javascripts/oxalis/view/action-bar/default-predict-workflow-template.ts index fdfcc186963..ffaffb19b0c 100644 --- a/frontend/javascripts/oxalis/view/action-bar/default-predict-workflow-template.ts +++ b/frontend/javascripts/oxalis/view/action-bar/default-predict-workflow-template.ts @@ -1,8 +1,9 @@ export default `predict: task: PredictTask distribution: - default: - processes: 2 + step: + strategy: sequential + num_io_threads: 5 inputs: model: TO_BE_SET_BY_WORKER config: @@ -19,6 +20,6 @@ publish_dataset_meshes: config: name: TO_BE_SET_BY_WORKER public_directory: TO_BE_SET_BY_WORKER - webknossos_organization: TO_BE_SET_BY_WORKER use_symlinks: False - move_dataset_symlink_artifact: True`; + move_dataset_symlink_artifact: True + keep_symlinks_to: TO_BE_SET_BY_WORKER`; From 6a0dcc0571c2dbc44c881b76d990e0df024aabba Mon Sep 17 00:00:00 2001 From: Tom Herold Date: Thu, 24 Oct 2024 15:21:19 +0200 Subject: [PATCH 02/13] add stubs for CI workflow --- .github/workflows/build_test_deploy.yml | 13 +++++++++++++ 1 file changed, 13 insertions(+) create mode 100644 .github/workflows/build_test_deploy.yml diff --git a/.github/workflows/build_test_deploy.yml b/.github/workflows/build_test_deploy.yml new file mode 100644 index 00000000000..599e2b22e00 --- /dev/null +++ b/.github/workflows/build_test_deploy.yml @@ -0,0 +1,13 @@ +name: CI Pipeline + +on: + workflow_dispatch: + +jobs: + foo: + runs-on: ubuntu-20.04 + steps: + - name: Checkout code + uses: actions/checkout@v3 + with: + fetch-depth: 5 \ No newline at end of file From 2953fa7ffbb6ba53a992db7f59d39fb52f36c32f Mon Sep 17 00:00:00 2001 From: MichaelBuessemeyer <39529669+MichaelBuessemeyer@users.noreply.github.com> Date: Mon, 28 Oct 2024 08:41:18 +0100 Subject: [PATCH 03/13] Automatically use user token when sharing token is not sufficient for a request (#8139) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * WIP auto use user token when sharing token is not sufficient for a request * remove token via urlmanager to keep removal (otherwise the urlmanager would have always restored the token in the url) * remove unused window import * local var renaming * remove token expiration toast * add changelog entry * undo removing token from url upon successful request with user token * dont show auto error toast when failing saved and only show manual error toast created by save_saga * remove unused import * fix save saga tests * apply some coderabbit suggestions * fix typechecking * apply code rabbit suggestion --------- Co-authored-by: Michael Büßemeyer --- CHANGELOG.unreleased.md | 1 + frontend/javascripts/admin/admin_rest_api.ts | 2 +- frontend/javascripts/admin/api/token.ts | 35 ++++++++++++++----- frontend/javascripts/libs/request.ts | 12 +++++-- .../oxalis/model/sagas/save_saga.ts | 3 ++ .../javascripts/test/sagas/save_saga.spec.ts | 3 ++ 6 files changed, 44 insertions(+), 12 deletions(-) diff --git a/CHANGELOG.unreleased.md b/CHANGELOG.unreleased.md index 516413a4895..f285b180b9e 100644 --- a/CHANGELOG.unreleased.md +++ b/CHANGELOG.unreleased.md @@ -27,6 +27,7 @@ For upgrade instructions, please check the [migration guide](MIGRATIONS.released ### Fixed - Fixed a bug during dataset upload in case the configured `datastore.baseFolder` is an absolute path. [#8098](https://github.com/scalableminds/webknossos/pull/8098) [#8103](https://github.com/scalableminds/webknossos/pull/8103) +- When trying to save an annotation opened via a link including a sharing token, the token is automatically discarded in case it is insufficient for update actions but the users token is. [#8139](https://github.com/scalableminds/webknossos/pull/8139) - Fixed that the skeleton search did not automatically expand groups that contained the selected tree [#8129](https://github.com/scalableminds/webknossos/pull/8129) - Fixed a bug that zarr streaming version 3 returned the shape of mag (1, 1, 1) / the finest mag for all mags. [#8116](https://github.com/scalableminds/webknossos/pull/8116) - Fixed sorting of mags in outbound zarr streaming. [#8125](https://github.com/scalableminds/webknossos/pull/8125) diff --git a/frontend/javascripts/admin/admin_rest_api.ts b/frontend/javascripts/admin/admin_rest_api.ts index 5af922c7a3a..6c3b4ef8e78 100644 --- a/frontend/javascripts/admin/admin_rest_api.ts +++ b/frontend/javascripts/admin/admin_rest_api.ts @@ -2035,7 +2035,7 @@ export function computeAdHocMesh( }, }, ); - const neighbors = Utils.parseMaybe(headers.neighbors) || []; + const neighbors = (Utils.parseMaybe(headers.neighbors) as number[] | null) || []; return { buffer, neighbors, diff --git a/frontend/javascripts/admin/api/token.ts b/frontend/javascripts/admin/api/token.ts index 54f730d40e7..3a430d55756 100644 --- a/frontend/javascripts/admin/api/token.ts +++ b/frontend/javascripts/admin/api/token.ts @@ -2,9 +2,12 @@ import { location } from "libs/window"; import Request from "libs/request"; import * as Utils from "libs/utils"; +const MAX_TOKEN_RETRY_ATTEMPTS = 3; + let tokenPromise: Promise; let tokenRequestPromise: Promise | null; +let shouldUseURLToken: boolean = true; function requestUserToken(): Promise { if (tokenRequestPromise) { @@ -33,22 +36,36 @@ export function getSharingTokenFromUrlParameters(): string | null | undefined { return null; } -export function doWithToken(fn: (token: string) => Promise, tries: number = 1): Promise { - const sharingToken = getSharingTokenFromUrlParameters(); +export async function doWithToken( + fn: (token: string) => Promise, + tries: number = 1, + useURLTokenIfAvailable: boolean = true, +): Promise { + let token = + useURLTokenIfAvailable && shouldUseURLToken ? getSharingTokenFromUrlParameters() : null; - if (sharingToken != null) { - return fn(sharingToken); + if (token == null) { + tokenPromise = tokenPromise == null ? requestUserToken() : tokenPromise; + } else { + tokenPromise = Promise.resolve(token); } - if (!tokenPromise) tokenPromise = requestUserToken(); - return tokenPromise.then(fn).catch((error) => { + return tokenPromise.then(fn).catch(async (error) => { if (error.status === 403) { - console.warn("Token expired. Requesting new token..."); + console.warn( + `Token expired (attempt ${tries}/${MAX_TOKEN_RETRY_ATTEMPTS}). Requesting new token...`, + ); tokenPromise = requestUserToken(); // If three new tokens did not fix the 403, abort, otherwise we'll get into an endless loop here - if (tries < 3) { - return doWithToken(fn, tries + 1); + if (tries < MAX_TOKEN_RETRY_ATTEMPTS) { + // If using the url sharing token failed, we try the user specific token instead. + const result = await doWithToken(fn, tries + 1, false); + // Upon successful retry with own token, discard the url token. + if (useURLTokenIfAvailable) { + shouldUseURLToken = false; + } + return result; } } diff --git a/frontend/javascripts/libs/request.ts b/frontend/javascripts/libs/request.ts index 1b1271e4846..25bf31657e5 100644 --- a/frontend/javascripts/libs/request.ts +++ b/frontend/javascripts/libs/request.ts @@ -311,7 +311,11 @@ class Request { ...message, key: json.status.toString(), })); - if (showErrorToast) Toast.messages(messages); + if (showErrorToast) { + Toast.messages(messages); // Note: Toast.error internally logs to console + } else { + console.error(messages); + } // Check whether the error chain mentions an url which belongs // to a datastore. Then, ping the datastore pingMentionedDataStores(text); @@ -319,7 +323,11 @@ class Request { /* eslint-disable-next-line prefer-promise-reject-errors */ return Promise.reject({ ...json, url: requestedUrl }); } catch (_jsonError) { - if (showErrorToast) Toast.error(text); + if (showErrorToast) { + Toast.error(text); // Note: Toast.error internally logs to console + } else { + console.error(`Request failed for ${requestedUrl}:`, text); + } /* eslint-disable-next-line prefer-promise-reject-errors */ return Promise.reject({ diff --git a/frontend/javascripts/oxalis/model/sagas/save_saga.ts b/frontend/javascripts/oxalis/model/sagas/save_saga.ts index e9e09a12a32..d2acc8ca949 100644 --- a/frontend/javascripts/oxalis/model/sagas/save_saga.ts +++ b/frontend/javascripts/oxalis/model/sagas/save_saga.ts @@ -196,6 +196,9 @@ export function* sendRequestToServer( method: "POST", data: compactedSaveQueue, compress: process.env.NODE_ENV === "production", + // Suppressing error toast, as the doWithToken retry with personal token functionality should not show an error. + // Instead the error is logged and toggleErrorHighlighting should take care of showing an error to the user. + showErrorToast: false, }, ); const endTime = Date.now(); diff --git a/frontend/javascripts/test/sagas/save_saga.spec.ts b/frontend/javascripts/test/sagas/save_saga.spec.ts index 677568ef279..4707cf28bee 100644 --- a/frontend/javascripts/test/sagas/save_saga.spec.ts +++ b/frontend/javascripts/test/sagas/save_saga.spec.ts @@ -130,6 +130,7 @@ test("SaveSaga should send request to server", (t) => { method: "POST", data: saveQueueWithVersions, compress: false, + showErrorToast: false, }), ); }); @@ -147,6 +148,7 @@ test("SaveSaga should retry update actions", (t) => { method: "POST", data: saveQueueWithVersions, compress: false, + showErrorToast: false, }, ); const saga = sendRequestToServer(TRACING_TYPE, tracingId); @@ -187,6 +189,7 @@ test("SaveSaga should escalate on permanent client error update actions", (t) => method: "POST", data: saveQueueWithVersions, compress: false, + showErrorToast: false, }), ); saga.throw({ From 9d6e322fa8be3a02ed426629cb5af6d2bec6aca9 Mon Sep 17 00:00:00 2001 From: Tom Herold Date: Mon, 28 Oct 2024 09:48:15 +0100 Subject: [PATCH 04/13] Update pen_tablets youtube video link --- docs/volume_annotation/pen_tablets.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/volume_annotation/pen_tablets.md b/docs/volume_annotation/pen_tablets.md index dbe9402708a..1238ae15934 100644 --- a/docs/volume_annotation/pen_tablets.md +++ b/docs/volume_annotation/pen_tablets.md @@ -5,7 +5,7 @@ Beyond the mouse and keyboard WEBKNOSSOS is great for annotating datasets with a ## Using Wacom/Pen tablets Using pen tablet can significantly boost your annotation productivity, especially if you set it up correctly with WEBKNOSSOS. -![youtube-video](https://www.youtube.com/embed/xk0gqsVx494) +![youtube-video](https://www.youtube.com/embed/qCrqswDwmi8) To streamline your workflow, program your tablet and pen buttons to match the WEBKNOSSOS shortcuts. By doing so, you can focus on your pen without the need of a mouse or keyboard. Here is an example configuration using a Wacom tablet and the Wacom driver software: @@ -26,7 +26,7 @@ You can find the full list for keyboard shortcuts in the [documentation](../ui/k ### Annotating with Wacom Pens Now, let’s dive into the annotation process! In this example, we begin by quick-selecting a cell. -![youtube-video](https://www.youtube.com/embed/xk0gqsVx494?start=46) +![youtube-video](https://www.youtube.com/embed/qCrqswDwmi8?start=37) If the annotation isn’t precise enough, we can easily switch to the eraser tool (middle left button) and erase a corner. Selecting the brush tool is as simple as pressing the left button, allowing us to add small surfaces to the annotation. When ready, pressing the right button creates a new segment, and we can repeat the process for other cells. From 8993bab55d4890d18e166735b0b2808c71f4b562 Mon Sep 17 00:00:00 2001 From: Tom Herold Date: Mon, 28 Oct 2024 11:50:55 +0100 Subject: [PATCH 05/13] updated broken links to WK docs --- app/views/mail/jobFailedUploadConvert.scala.html | 4 ++-- app/views/mail/jobSuccessfulSegmentation.scala.html | 2 +- frontend/javascripts/admin/auth/auth_token_view.tsx | 4 ++-- frontend/javascripts/admin/dataset/dataset_add_view.tsx | 2 +- .../javascripts/admin/dataset/dataset_upload_view.tsx | 8 ++++---- frontend/javascripts/admin/job/job_list_view.tsx | 2 +- frontend/javascripts/admin/onboarding.tsx | 4 ++-- .../admin/statistic/available_tasks_report_view.tsx | 2 +- frontend/javascripts/admin/task/task_list_view.tsx | 2 +- .../admin/user/permissions_and_teams_modal_view.tsx | 2 +- .../javascripts/dashboard/dashboard_task_list_view.tsx | 2 +- frontend/javascripts/dashboard/dataset_folder_view.tsx | 2 +- .../oxalis/view/action-bar/download_modal_view.tsx | 2 +- 13 files changed, 19 insertions(+), 19 deletions(-) diff --git a/app/views/mail/jobFailedUploadConvert.scala.html b/app/views/mail/jobFailedUploadConvert.scala.html index 80c3d557e33..4c459fc303d 100644 --- a/app/views/mail/jobFailedUploadConvert.scala.html +++ b/app/views/mail/jobFailedUploadConvert.scala.html @@ -11,9 +11,9 @@

Here are some tips for uploading and converting datasets:

diff --git a/app/views/mail/jobSuccessfulSegmentation.scala.html b/app/views/mail/jobSuccessfulSegmentation.scala.html index 560d974fa64..ec5d71c1c4d 100644 --- a/app/views/mail/jobSuccessfulSegmentation.scala.html +++ b/app/views/mail/jobSuccessfulSegmentation.scala.html @@ -38,7 +38,7 @@

- Do you want to make corrections to the automated segmentation? Use the easy-to-use, built-in proof-reading tools in WEBKNOSSOS (requires Power plan). + Do you want to make corrections to the automated segmentation? Use the easy-to-use, built-in proof-reading tools in WEBKNOSSOS (requires Power plan).

An Auth Token is a series of symbols that serves to authenticate you. It is used in - communication with the backend API and sent with every request to verify your identity. + communication with the Python API and sent with every request to verify your identity.

You should revoke it if somebody else has acquired your token or you have the suspicion this has happened.{" "} - + Read more

diff --git a/frontend/javascripts/admin/dataset/dataset_add_view.tsx b/frontend/javascripts/admin/dataset/dataset_add_view.tsx index c1513f1f7c7..c7677831751 100644 --- a/frontend/javascripts/admin/dataset/dataset_add_view.tsx +++ b/frontend/javascripts/admin/dataset/dataset_add_view.tsx @@ -183,7 +183,7 @@ const alignBanner = ( />

diff --git a/frontend/javascripts/admin/dataset/dataset_upload_view.tsx b/frontend/javascripts/admin/dataset/dataset_upload_view.tsx index b4f235e9110..653d2ff1249 100644 --- a/frontend/javascripts/admin/dataset/dataset_upload_view.tsx +++ b/frontend/javascripts/admin/dataset/dataset_upload_view.tsx @@ -1243,7 +1243,7 @@ function FileUploadArea({ e.stopPropagation()} @@ -1265,7 +1265,7 @@ function FileUploadArea({ e.stopPropagation()} @@ -1287,7 +1287,7 @@ function FileUploadArea({ e.stopPropagation()} @@ -1309,7 +1309,7 @@ function FileUploadArea({ Have a look at{" "} e.stopPropagation()} > our documentation diff --git a/frontend/javascripts/admin/job/job_list_view.tsx b/frontend/javascripts/admin/job/job_list_view.tsx index 17c32a2a56a..bfa069ae7c3 100644 --- a/frontend/javascripts/admin/job/job_list_view.tsx +++ b/frontend/javascripts/admin/job/job_list_view.tsx @@ -366,7 +366,7 @@ function JobListView() { Some actions such as dataset conversions or export as Tiff files require some time for processing in the background. diff --git a/frontend/javascripts/admin/onboarding.tsx b/frontend/javascripts/admin/onboarding.tsx index 09fcb9b3e9a..93dd46c5c01 100644 --- a/frontend/javascripts/admin/onboarding.tsx +++ b/frontend/javascripts/admin/onboarding.tsx @@ -548,7 +548,7 @@ class OnboardingView extends React.PureComponent { height={250} > You can also copy it directly onto the hosting server.{" "} - + Learn more about supported data formats. @@ -583,7 +583,7 @@ class OnboardingView extends React.PureComponent { }> Upload more of your datasets.{" "} - Learn more about + Learn more about the formats and upload processes WEBKNOSSOS supports. }> diff --git a/frontend/javascripts/admin/statistic/available_tasks_report_view.tsx b/frontend/javascripts/admin/statistic/available_tasks_report_view.tsx index 737ab2a906d..f972d3f5a6d 100644 --- a/frontend/javascripts/admin/statistic/available_tasks_report_view.tsx +++ b/frontend/javascripts/admin/statistic/available_tasks_report_view.tsx @@ -45,7 +45,7 @@ function AvailableTasksReportView() { available to multiple users here, but each will only be handed to the first user to request it. diff --git a/frontend/javascripts/admin/task/task_list_view.tsx b/frontend/javascripts/admin/task/task_list_view.tsx index c110b6c72cc..c80ec113c4b 100644 --- a/frontend/javascripts/admin/task/task_list_view.tsx +++ b/frontend/javascripts/admin/task/task_list_view.tsx @@ -222,7 +222,7 @@ function TaskListView({ initialFieldValues }: Props) {

To learn more about the task system in WEBKNOSSOS,{" "} diff --git a/frontend/javascripts/admin/user/permissions_and_teams_modal_view.tsx b/frontend/javascripts/admin/user/permissions_and_teams_modal_view.tsx index ebd803d0dd2..33adc70da4f 100644 --- a/frontend/javascripts/admin/user/permissions_and_teams_modal_view.tsx +++ b/frontend/javascripts/admin/user/permissions_and_teams_modal_view.tsx @@ -243,7 +243,7 @@ function PermissionsAndTeamsModalView({

Organization Permissions{" "} diff --git a/frontend/javascripts/dashboard/dashboard_task_list_view.tsx b/frontend/javascripts/dashboard/dashboard_task_list_view.tsx index 899a5957c87..cec4d65b369 100644 --- a/frontend/javascripts/dashboard/dashboard_task_list_view.tsx +++ b/frontend/javascripts/dashboard/dashboard_task_list_view.tsx @@ -414,7 +414,7 @@ class DashboardTaskListView extends React.PureComponent { as part of the WEBKNOSSOS project management.{" "}

diff --git a/frontend/javascripts/dashboard/dataset_folder_view.tsx b/frontend/javascripts/dashboard/dataset_folder_view.tsx index f3eaf659fc9..157da42d1fb 100644 --- a/frontend/javascripts/dashboard/dataset_folder_view.tsx +++ b/frontend/javascripts/dashboard/dataset_folder_view.tsx @@ -134,7 +134,7 @@ function DatasetFolderViewInner(props: Props) {

WEBKNOSSOS supports a variety of (remote){" "} diff --git a/frontend/javascripts/oxalis/view/action-bar/download_modal_view.tsx b/frontend/javascripts/oxalis/view/action-bar/download_modal_view.tsx index 9be46f578f4..a0ef1d37e31 100644 --- a/frontend/javascripts/oxalis/view/action-bar/download_modal_view.tsx +++ b/frontend/javascripts/oxalis/view/action-bar/download_modal_view.tsx @@ -433,7 +433,7 @@ function _DownloadModalView({ > For more information on how to work with {typeDependentFileName} visit the{" "} From e4eec0f073a9c7927781887f0a160587701289a6 Mon Sep 17 00:00:00 2001 From: Tom Herold Date: Mon, 28 Oct 2024 13:57:37 +0100 Subject: [PATCH 06/13] fix formatting --- frontend/javascripts/admin/auth/auth_token_view.tsx | 4 +--- frontend/javascripts/admin/onboarding.tsx | 4 ++-- 2 files changed, 3 insertions(+), 5 deletions(-) diff --git a/frontend/javascripts/admin/auth/auth_token_view.tsx b/frontend/javascripts/admin/auth/auth_token_view.tsx index 2e2f3a5e84f..821fb40b11e 100644 --- a/frontend/javascripts/admin/auth/auth_token_view.tsx +++ b/frontend/javascripts/admin/auth/auth_token_view.tsx @@ -107,9 +107,7 @@ function AuthTokenView() {

You should revoke it if somebody else has acquired your token or you have the suspicion this has happened.{" "} - - Read more - + Read more

diff --git a/frontend/javascripts/admin/onboarding.tsx b/frontend/javascripts/admin/onboarding.tsx index 93dd46c5c01..bd634a6efb9 100644 --- a/frontend/javascripts/admin/onboarding.tsx +++ b/frontend/javascripts/admin/onboarding.tsx @@ -583,8 +583,8 @@ class OnboardingView extends React.PureComponent { }> Upload more of your datasets.{" "} - Learn more about - the formats and upload processes WEBKNOSSOS supports. + Learn more about the + formats and upload processes WEBKNOSSOS supports. }> Date: Mon, 28 Oct 2024 14:08:11 +0100 Subject: [PATCH 07/13] Fix datavault tests for non-existent s3 buckets (#8151) * Fix datavault tests for non-existent s3 buckets * typo * format frontend after master commit --- test/backend/DataVaultTestSuite.scala | 30 ++++++++++++++----- .../datastore/datavault/S3DataVault.scala | 9 +++++- 2 files changed, 30 insertions(+), 9 deletions(-) diff --git a/test/backend/DataVaultTestSuite.scala b/test/backend/DataVaultTestSuite.scala index 424b560847e..75c8ab9d666 100644 --- a/test/backend/DataVaultTestSuite.scala +++ b/test/backend/DataVaultTestSuite.scala @@ -18,6 +18,7 @@ import net.liftweb.common.{Box, Empty, EmptyBox, Failure, Full} import play.api.libs.json.JsString import play.api.test.WsTestClient +import java.util.UUID import scala.collection.immutable.NumericRange import scala.concurrent.ExecutionContext import scala.concurrent.ExecutionContext.{global => globalExecutionContext} @@ -59,9 +60,11 @@ class DataVaultTestSuite extends PlaySpec { } "return empty box" when { - "requesting a nox-existent object" in { + "requesting a non-existent object" in { val result = - (vaultPath / "non-existent-key").readBytes()(globalExecutionContext).await(handleFoxJustification) + (vaultPath / s"non-existent-key${UUID.randomUUID}") + .readBytes()(globalExecutionContext) + .await(handleFoxJustification) assertBoxEmpty(result) } } @@ -141,8 +144,18 @@ class DataVaultTestSuite extends PlaySpec { } "return empty box" when { - "requesting a nox-existent object" in { - val uri = new URI("s3://non-existing-bucket/non-existing-object") + "requesting a non-existent bucket" in { + val uri = new URI(s"s3://non-existent-bucket${UUID.randomUUID}/non-existent-object") + val s3DataVault = S3DataVault.create(RemoteSourceDescriptor(uri, None)) + val vaultPath = new VaultPath(uri, s3DataVault) + val result = vaultPath.readBytes()(globalExecutionContext).await(handleFoxJustification) + assertBoxEmpty(result) + } + } + + "return empty box" when { + "requesting a non-existent object in existent bucket" in { + val uri = new URI(s"s3://open-neurodata/non-existent-object${UUID.randomUUID}") val s3DataVault = S3DataVault.create(RemoteSourceDescriptor(uri, None)) val vaultPath = new VaultPath(uri, s3DataVault) val result = vaultPath.readBytes()(globalExecutionContext).await(handleFoxJustification) @@ -165,15 +178,16 @@ class DataVaultTestSuite extends PlaySpec { _.toUri == new URI("s3://janelia-cosem-datasets/jrc_hela-3/jrc_hela-3.n5/em/fibsem-uint16/s0/"))) } - "return empty box" when { - "requesting directory listing on nox-existent bucket" in { - val uri = new URI("s3://non-existing-bucket/non-existing-object/") + "return failure" when { + "requesting directory listing on non-existent bucket" in { + val uri = new URI(f"s3://non-existent-bucket${UUID.randomUUID}/non-existent-object/") val s3DataVault = S3DataVault.create(RemoteSourceDescriptor(uri, None)) val vaultPath = new VaultPath(uri, s3DataVault) val result = vaultPath.listDirectory(maxItems = 5)(globalExecutionContext).await(handleFoxJustification) - assertBoxEmpty(result) + assertBoxFailure(result) } } + } } diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datavault/S3DataVault.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datavault/S3DataVault.scala index 3f47f88274f..d86e4553212 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datavault/S3DataVault.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datavault/S3DataVault.scala @@ -88,6 +88,13 @@ class S3DataVault(s3AccessKeyCredential: Option[S3AccessKeyCredential], uri: URI Future.successful(box) } + private def notFoundToFailure[T](resultFuture: Future[T])(implicit ec: ExecutionContext): Fox[T] = + resultFuture.transformWith { + case TrySuccess(value) => Fox.successful(value).futureBox + case TryFailure(exception) => + Future.successful(BoxFailure(exception.getMessage, Full(exception), Empty)) + } + override def readBytesAndEncoding(path: VaultPath, range: RangeSpecifier)( implicit ec: ExecutionContext): Fox[(Array[Byte], Encoding.Value)] = for { @@ -115,7 +122,7 @@ class S3DataVault(s3AccessKeyCredential: Option[S3AccessKeyCredential], uri: URI val listObjectsRequest = ListObjectsV2Request.builder().bucket(bucketName).prefix(keyPrefix).delimiter("/").maxKeys(maxKeys).build() for { - objectListing: ListObjectsV2Response <- notFoundToEmpty(client.listObjectsV2(listObjectsRequest).asScala) + objectListing: ListObjectsV2Response <- notFoundToFailure(client.listObjectsV2(listObjectsRequest).asScala) s3SubPrefixes: List[CommonPrefix] = objectListing.commonPrefixes().asScala.take(maxItems).toList } yield s3SubPrefixes.map(_.prefix()) } From fda194b2649d4079c5cd96db228937179da1dfdb Mon Sep 17 00:00:00 2001 From: Norman Rzepka Date: Mon, 28 Oct 2024 14:55:08 +0100 Subject: [PATCH 08/13] Update zarr.md --- docs/data/zarr.md | 21 ++++++++++++++------- 1 file changed, 14 insertions(+), 7 deletions(-) diff --git a/docs/data/zarr.md b/docs/data/zarr.md index ad79133d630..cb85ce65244 100644 --- a/docs/data/zarr.md +++ b/docs/data/zarr.md @@ -111,22 +111,27 @@ For OME-Zarr (v0.5) datasets, the structure is slightly different (See [OME-Zarr ## Conversion to Zarr You can easily convert image stacks manually with the [WEBKNOSSOS CLI](https://docs.webknossos.org/cli). -The CLI tool expects all image files in a single folder with numbered file names. +The CLI tool expects a single file or all image files in a single folder with numbered file names. After installing, you can convert image stacks to Zarr datasets with the following command: ```shell -pip install webknossos +pip install --extra-index-url https://pypi.scm.io/simple "webknossos[all]" webknossos convert \ + --layer-name em \ --voxel-size 11.24,11.24,25 \ - --name my_dataset \ + --chunk-shape 64,64,64 \ --data-format zarr \ - data/source data/target + --jobs 4 \ + input.tif output.zarr + +webknossos compress --jobs 4 output.zarr +webknossos downsample --jobs 4 output.zarr ``` -This snippet converts an image stack that is located in directory called `data/source` into a Zarr dataset which will be located at `data/target`. -It will create a so called `color` layer containing your raw greyscale/color image. -The supplied `--voxel-size` is specified in nanometers. +This example will create an unsharded Zarr v2 dataset with a voxel size of (4,4,4) nm3 and a chunk size of (64,64,64) voxel. +A maximum of 4 parallel jobs will be used to parallelize the conversion, compression and downsampling. +Using the `--data-format zarr3` argument will produce sharded Zarr v3 datasets. Read the full documentation at [WEBKNOSSOS CLI](https://docs.webknossos.org/cli). @@ -170,3 +175,5 @@ To get the best streaming performance for Zarr datasets consider the following s - Use chunk sizes of 32 - 128 voxels^3 - Enable sharding (only available in Zarr 3+) +- Use 3D downsampling + From 9b5a12eb089bb4539913c8e12bbf77afe4bca178 Mon Sep 17 00:00:00 2001 From: MichaelBuessemeyer <39529669+MichaelBuessemeyer@users.noreply.github.com> Date: Mon, 28 Oct 2024 16:55:57 +0100 Subject: [PATCH 09/13] Fix bbox export via bbox menu (#8152) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * fix bbox export via bbox menu * add changelog entry * fix formatting --------- Co-authored-by: Michael Büßemeyer --- CHANGELOG.unreleased.md | 1 + .../oxalis/view/right-border-tabs/bounding_box_tab.tsx | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.unreleased.md b/CHANGELOG.unreleased.md index f285b180b9e..29cd042e85a 100644 --- a/CHANGELOG.unreleased.md +++ b/CHANGELOG.unreleased.md @@ -27,6 +27,7 @@ For upgrade instructions, please check the [migration guide](MIGRATIONS.released ### Fixed - Fixed a bug during dataset upload in case the configured `datastore.baseFolder` is an absolute path. [#8098](https://github.com/scalableminds/webknossos/pull/8098) [#8103](https://github.com/scalableminds/webknossos/pull/8103) +- Fixed bbox export menu item [#8152](https://github.com/scalableminds/webknossos/pull/8152) - When trying to save an annotation opened via a link including a sharing token, the token is automatically discarded in case it is insufficient for update actions but the users token is. [#8139](https://github.com/scalableminds/webknossos/pull/8139) - Fixed that the skeleton search did not automatically expand groups that contained the selected tree [#8129](https://github.com/scalableminds/webknossos/pull/8129) - Fixed a bug that zarr streaming version 3 returned the shape of mag (1, 1, 1) / the finest mag for all mags. [#8116](https://github.com/scalableminds/webknossos/pull/8116) diff --git a/frontend/javascripts/oxalis/view/right-border-tabs/bounding_box_tab.tsx b/frontend/javascripts/oxalis/view/right-border-tabs/bounding_box_tab.tsx index 126e0f03109..cc837f4ac31 100644 --- a/frontend/javascripts/oxalis/view/right-border-tabs/bounding_box_tab.tsx +++ b/frontend/javascripts/oxalis/view/right-border-tabs/bounding_box_tab.tsx @@ -90,7 +90,7 @@ export default function BoundingBoxTab() { } function handleExportBoundingBox(bb: UserBoundingBox) { - _.partial(setSelectedBoundingBoxForExport, bb); + setSelectedBoundingBoxForExport(bb); hideContextMenu(); } From 684a588c40220a69cfd3baf4f6821ea19aa8eafd Mon Sep 17 00:00:00 2001 From: Charlie Meister Date: Tue, 29 Oct 2024 08:41:28 +0100 Subject: [PATCH 10/13] Add button to select all trees and all segments that match a search (#8123) * add button to select all trees that match a search * add function for segments and improve icon * remove console log * add ts-expect-error tag again * focus first search result and only allow select all matches for leaves * fix select segment group as search result * expand parent groups and fix mixed tree and tree group selection * changelog * lint * address review * add placeholder and disable field if all matches all selected * fix case where group is selected --------- Co-authored-by: MichaelBuessemeyer <39529669+MichaelBuessemeyer@users.noreply.github.com> --- CHANGELOG.unreleased.md | 1 + .../advanced_search_popover.tsx | 57 +++++++++++++++++-- .../segments_tab/segments_view.tsx | 38 ++++++++++++- .../right-border-tabs/skeleton_tab_view.tsx | 16 +++++- .../right-border-tabs/tree_hierarchy_view.tsx | 16 +++--- 5 files changed, 112 insertions(+), 16 deletions(-) diff --git a/CHANGELOG.unreleased.md b/CHANGELOG.unreleased.md index 29cd042e85a..142e985933d 100644 --- a/CHANGELOG.unreleased.md +++ b/CHANGELOG.unreleased.md @@ -16,6 +16,7 @@ For upgrade instructions, please check the [migration guide](MIGRATIONS.released - Most sliders have been improved: Wheeling above a slider now changes its value and double-clicking its knob resets it to its default value. [#8095](https://github.com/scalableminds/webknossos/pull/8095) - It is now possible to search for unnamed segments with the full default name instead of only their id. [#8133](https://github.com/scalableminds/webknossos/pull/8133) - Increased loading speed for precomputed meshes. [#8110](https://github.com/scalableminds/webknossos/pull/8110) +- Added a button to the search popover in the skeleton and segment tab to select all matching non-group results. [#8123](https://github.com/scalableminds/webknossos/pull/8123) - Unified wording in UI and code: “Magnification”/“mag” is now used in place of “Resolution“ most of the time, compare [https://docs.webknossos.org/webknossos/terminology.html](terminology document). [#8111](https://github.com/scalableminds/webknossos/pull/8111) - Added support for adding remote OME-Zarr NGFF version 0.5 datasets. [#8122](https://github.com/scalableminds/webknossos/pull/8122) diff --git a/frontend/javascripts/oxalis/view/right-border-tabs/advanced_search_popover.tsx b/frontend/javascripts/oxalis/view/right-border-tabs/advanced_search_popover.tsx index 76bcb9e0399..54852103229 100644 --- a/frontend/javascripts/oxalis/view/right-border-tabs/advanced_search_popover.tsx +++ b/frontend/javascripts/oxalis/view/right-border-tabs/advanced_search_popover.tsx @@ -1,5 +1,5 @@ import { Input, Tooltip, Popover, Space, type InputRef } from "antd"; -import { DownOutlined, UpOutlined } from "@ant-design/icons"; +import { CheckSquareOutlined, DownOutlined, UpOutlined } from "@ant-design/icons"; import * as React from "react"; import memoizeOne from "memoize-one"; import ButtonComponent from "oxalis/view/components/button_component"; @@ -7,10 +7,13 @@ import Shortcut from "libs/shortcut_component"; import DomVisibilityObserver from "oxalis/view/components/dom_visibility_observer"; import { mod } from "libs/utils"; +const PRIMARY_COLOR = "var(--ant-color-primary)"; + type Props = { data: S[]; searchKey: keyof S | ((item: S) => string); onSelect: (arg0: S) => void; + onSelectAllMatches?: (arg0: S[]) => void; children: React.ReactNode; provideShortcut?: boolean; targetId: string; @@ -20,6 +23,7 @@ type State = { isVisible: boolean; searchQuery: string; currentPosition: number | null | undefined; + areAllMatchesSelected: boolean; }; export default class AdvancedSearchPopover< @@ -29,6 +33,7 @@ export default class AdvancedSearchPopover< isVisible: false, searchQuery: "", currentPosition: null, + areAllMatchesSelected: false, }; getAvailableOptions = memoizeOne( @@ -69,6 +74,7 @@ export default class AdvancedSearchPopover< currentPosition = mod(currentPosition + offset, numberOfAvailableOptions); this.setState({ currentPosition, + areAllMatchesSelected: false, }); this.props.onSelect(availableOptions[currentPosition]); }; @@ -101,7 +107,7 @@ export default class AdvancedSearchPopover< render() { const { data, searchKey, provideShortcut, children, targetId } = this.props; - const { searchQuery, isVisible } = this.state; + const { searchQuery, isVisible, areAllMatchesSelected } = this.state; let { currentPosition } = this.state; const availableOptions = this.getAvailableOptions(data, searchQuery, searchKey); const numberOfAvailableOptions = availableOptions.length; @@ -109,13 +115,17 @@ export default class AdvancedSearchPopover< currentPosition = currentPosition == null ? -1 : Math.min(currentPosition, numberOfAvailableOptions - 1); const hasNoResults = numberOfAvailableOptions === 0; - const hasMultipleResults = numberOfAvailableOptions > 1; + const availableOptionsToSelectAllMatches = availableOptions.filter( + (result) => result.type === "Tree" || result.type === "segment", + ); + const isSelectAllMatchesDisabled = availableOptionsToSelectAllMatches.length < 2; const additionalInputStyle = hasNoResults && searchQuery !== "" ? { color: "red", } : {}; + const selectAllMatchesButtonColor = areAllMatchesSelected ? PRIMARY_COLOR : undefined; return ( {provideShortcut ? ( @@ -171,9 +181,23 @@ export default class AdvancedSearchPopover< this.setState({ searchQuery: evt.target.value, currentPosition: null, + areAllMatchesSelected: false, }) } - addonAfter={`${currentPosition + 1}/${numberOfAvailableOptions}`} + addonAfter={ +
+ {areAllMatchesSelected + ? "all" + : `${currentPosition + 1}/${numberOfAvailableOptions}`} +
+ } ref={this.autoFocus} autoFocus /> @@ -183,7 +207,7 @@ export default class AdvancedSearchPopover< width: 40, }} onClick={this.selectPreviousOption} - disabled={!hasMultipleResults} + disabled={hasNoResults} > @@ -194,11 +218,32 @@ export default class AdvancedSearchPopover< width: 40, }} onClick={this.selectNextOption} - disabled={!hasMultipleResults} + disabled={hasNoResults} > + + { + this.props.onSelectAllMatches!(availableOptionsToSelectAllMatches); + if (!areAllMatchesSelected) + this.setState({ areAllMatchesSelected: true }); + } + : undefined + } + disabled={isSelectAllMatchesDisabled} + > + + +
) diff --git a/frontend/javascripts/oxalis/view/right-border-tabs/segments_tab/segments_view.tsx b/frontend/javascripts/oxalis/view/right-border-tabs/segments_tab/segments_view.tsx index 5bb582a04be..e795a0a581e 100644 --- a/frontend/javascripts/oxalis/view/right-border-tabs/segments_tab/segments_view.tsx +++ b/frontend/javascripts/oxalis/view/right-border-tabs/segments_tab/segments_view.tsx @@ -137,6 +137,8 @@ import { SegmentStatisticsModal } from "./segment_statistics_modal"; import type { ItemType } from "antd/lib/menu/interface"; import { InputWithUpdateOnBlur } from "oxalis/view/components/input_with_update_on_blur"; +const SCROLL_DELAY_MS = 50; + const { confirm } = Modal; const { Option } = Select; // Interval in ms to check for running mesh file computation jobs for this dataset @@ -1590,7 +1592,7 @@ class SegmentsView extends React.Component { this.setState(({ renamingCounter }) => ({ renamingCounter: renamingCounter - 1 })); }; - handleSearchSelect = (selectedElement: SegmentHierarchyNode) => { + maybeExpandParentGroup = (selectedElement: SegmentHierarchyNode) => { if (this.tree?.current == null) { return; } @@ -1606,16 +1608,47 @@ class SegmentsView extends React.Component { if (expandedGroups) { this.setExpandedGroupsFromSet(expandedGroups); } + }; + + handleSearchSelect = (selectedElement: SegmentHierarchyNode) => { + this.maybeExpandParentGroup(selectedElement); // As parent groups might still need to expand, we need to wait for this to finish. setTimeout(() => { if (this.tree.current) this.tree.current.scrollTo({ key: selectedElement.key }); - }, 50); + }, SCROLL_DELAY_MS); const isASegment = "color" in selectedElement; if (isASegment) { this.onSelectSegment(selectedElement); + } else { + if (this.props.visibleSegmentationLayer == null) return; + Store.dispatch( + setSelectedSegmentsOrGroupAction( + [], + selectedElement.id, + this.props.visibleSegmentationLayer?.name, + ), + ); } }; + handleSelectAllMatchingSegments = (allMatches: SegmentHierarchyNode[]) => { + if (this.props.visibleSegmentationLayer == null) return; + const allMatchingSegmentIds = allMatches.map((match) => { + this.maybeExpandParentGroup(match); + return match.id; + }); + Store.dispatch( + setSelectedSegmentsOrGroupAction( + allMatchingSegmentIds, + null, + this.props.visibleSegmentationLayer.name, + ), + ); + setTimeout(() => { + this.tree.current?.scrollTo({ key: allMatches[0].key }); + }, SCROLL_DELAY_MS); + }; + getSegmentStatisticsModal = (groupId: number) => { const visibleSegmentationLayer = this.props.visibleSegmentationLayer; if (visibleSegmentationLayer == null) { @@ -1833,6 +1866,7 @@ class SegmentsView extends React.Component { searchKey={(item) => getSegmentName(item)} provideShortcut targetId={segmentsTabId} + onSelectAllMatches={this.handleSelectAllMatchingSegments} > { }); }; - handleSearchSelect = (selectedElement: TreeOrTreeGroup) => { + maybeExpandParentGroups = (selectedElement: TreeOrTreeGroup) => { const { skeletonTracing } = this.props; if (!skeletonTracing) { return; @@ -682,6 +682,10 @@ class SkeletonTabView extends React.PureComponent { if (expandedGroups) { this.props.onSetExpandedGroups(expandedGroups); } + }; + + handleSearchSelect = (selectedElement: TreeOrTreeGroup) => { + this.maybeExpandParentGroups(selectedElement); if (selectedElement.type === GroupTypeEnum.TREE) { this.props.onSetActiveTree(selectedElement.id); } else { @@ -689,6 +693,15 @@ class SkeletonTabView extends React.PureComponent { } }; + handleSelectAllMatchingTrees = (matchingTrees: TreeOrTreeGroup[]) => { + this.props.onDeselectActiveGroup(); + const treeIds = matchingTrees.map((tree) => { + this.maybeExpandParentGroups(tree); + return tree.id; + }); + this.setState({ selectedTreeIds: treeIds }); + }; + getTreesComponents(sortBy: string) { if (!this.props.skeletonTracing) { return null; @@ -864,6 +877,7 @@ class SkeletonTabView extends React.PureComponent { searchKey="name" provideShortcut targetId={treeTabId} + onSelectAllMatches={this.handleSelectAllMatchingTrees} > diff --git a/frontend/javascripts/oxalis/view/right-border-tabs/tree_hierarchy_view.tsx b/frontend/javascripts/oxalis/view/right-border-tabs/tree_hierarchy_view.tsx index bb05294399f..29312811bf1 100644 --- a/frontend/javascripts/oxalis/view/right-border-tabs/tree_hierarchy_view.tsx +++ b/frontend/javascripts/oxalis/view/right-border-tabs/tree_hierarchy_view.tsx @@ -188,8 +188,7 @@ function TreeHierarchyView(props: Props) { } } - function onSelectGroupNode(node: TreeNode) { - const groupId = node.id; + function onSelectGroupNode(groupId: number) { const numberOfSelectedTrees = props.selectedTreeIds.length; if (numberOfSelectedTrees > 1) { @@ -254,11 +253,14 @@ function TreeHierarchyView(props: Props) { const checkedKeys = deepFlatFilter(UITreeData, (node) => node.isChecked).map((node) => node.key); // selectedKeys is mainly used for highlighting, i.e. blueish background color - const selectedKeys = props.selectedTreeIds.map((treeId) => - getNodeKey(GroupTypeEnum.TREE, treeId), - ); + const selectedKeys = props.activeGroupId + ? [getNodeKey(GroupTypeEnum.GROUP, props.activeGroupId)] + : props.selectedTreeIds.map((treeId) => getNodeKey(GroupTypeEnum.TREE, treeId)); - if (props.activeGroupId) selectedKeys.push(getNodeKey(GroupTypeEnum.GROUP, props.activeGroupId)); + useEffect( + () => treeRef.current?.scrollTo({ key: selectedKeys[0], align: "auto" }), + [selectedKeys[0]], + ); return ( <> @@ -297,7 +299,7 @@ function TreeHierarchyView(props: Props) { onSelect={(_selectedKeys, info: { node: TreeNode; nativeEvent: MouseEvent }) => info.node.type === GroupTypeEnum.TREE ? onSelectTreeNode(info.node, info.nativeEvent) - : onSelectGroupNode(info.node) + : onSelectGroupNode(info.node.id) } onDrop={onDrop} onCheck={onCheck} From d40dfbec7edc6d75d146bf1f79adafce638db22c Mon Sep 17 00:00:00 2001 From: Florian M Date: Tue, 29 Oct 2024 13:04:29 +0100 Subject: [PATCH 11/13] Route to register aiModel (#8127) * Route to register aiModel * update schema with new model categories. skip id taken check for the moment * bump schema version * add uniqueness checks for name + id. add sql evolution for enum values * no transaction block in evolution * delete incompatible models in reversion --- MIGRATIONS.unreleased.md | 1 + app/controllers/AiModelController.scala | 32 +++++++++++++++++++ app/models/aimodels/AiModel.scala | 7 ++++ app/models/aimodels/AiModelCategory.scala | 2 +- app/utils/sql/SQLDAO.scala | 2 +- conf/evolutions/123-more-model-categories.sql | 11 +++++++ .../reversions/123-more-model-categories.sql | 11 +++++++ conf/webknossos.latest.routes | 1 + tools/postgres/schema.sql | 4 +-- 9 files changed, 67 insertions(+), 4 deletions(-) create mode 100644 conf/evolutions/123-more-model-categories.sql create mode 100644 conf/evolutions/reversions/123-more-model-categories.sql diff --git a/MIGRATIONS.unreleased.md b/MIGRATIONS.unreleased.md index 0e50f676c9e..f6d640f469d 100644 --- a/MIGRATIONS.unreleased.md +++ b/MIGRATIONS.unreleased.md @@ -12,3 +12,4 @@ User-facing changes are documented in the [changelog](CHANGELOG.released.md). - [121-worker-name.sql](conf/evolutions/121-worker-name.sql) - [122-resolution-to-mag.sql](conf/evolutions/122-resolution-to-mag.sql) +- [123-more-model-categories.sql](conf/evolutions/123-more-model-categories.sql) diff --git a/app/controllers/AiModelController.scala b/app/controllers/AiModelController.scala index 3a332504239..e09d8a4f534 100644 --- a/app/controllers/AiModelController.scala +++ b/app/controllers/AiModelController.scala @@ -57,6 +57,16 @@ object UpdateAiModelParameters { implicit val jsonFormat: OFormat[UpdateAiModelParameters] = Json.format[UpdateAiModelParameters] } +case class RegisterAiModelParameters(id: ObjectId, // must be a valid MongoDB ObjectId + dataStoreName: String, + name: String, + comment: Option[String], + category: Option[AiModelCategory]) + +object RegisterAiModelParameters { + implicit val jsonFormat: OFormat[RegisterAiModelParameters] = Json.format[RegisterAiModelParameters] +} + class AiModelController @Inject()( aiModelDAO: AiModelDAO, aiModelService: AiModelService, @@ -209,6 +219,28 @@ class AiModelController @Inject()( } yield Ok(jsResult) } + def registerAiModel: Action[RegisterAiModelParameters] = + sil.SecuredAction.async(validateJson[RegisterAiModelParameters]) { implicit request => + for { + _ <- userService.assertIsSuperUser(request.identity) + _ <- dataStoreDAO.findOneByName(request.body.dataStoreName) ?~> "dataStore.notFound" + _ <- aiModelDAO.findOne(request.body.id).reverse ?~> "aiModel.id.taken" + _ <- aiModelDAO.findOneByName(request.body.name).reverse ?~> "aiModel.name.taken" + _ <- aiModelDAO.insertOne( + AiModel( + request.body.id, + _organization = request.identity._organization, + request.body.dataStoreName, + request.identity._id, + None, + List.empty, + request.body.name, + request.body.comment, + request.body.category + )) + } yield Ok + } + def deleteAiModel(aiModelId: String): Action[AnyContent] = sil.SecuredAction.async { implicit request => for { diff --git a/app/models/aimodels/AiModel.scala b/app/models/aimodels/AiModel.scala index 053913b90e3..5857f85e63d 100644 --- a/app/models/aimodels/AiModel.scala +++ b/app/models/aimodels/AiModel.scala @@ -144,4 +144,11 @@ class AiModelDAO @Inject()(sqlClient: SqlClient)(implicit ec: ExecutionContext) q"UPDATE webknossos.aiModels SET name = ${a.name}, comment = ${a.comment}, modified = ${a.modified} WHERE _id = ${a._id}".asUpdate) } yield () + def findOneByName(name: String)(implicit ctx: DBAccessContext): Fox[AiModel] = + for { + accessQuery <- readAccessQuery + r <- run(q"SELECT $columns FROM $existingCollectionName WHERE name = $name AND $accessQuery".as[AimodelsRow]) + parsed <- parseFirst(r, name) + } yield parsed + } diff --git a/app/models/aimodels/AiModelCategory.scala b/app/models/aimodels/AiModelCategory.scala index 70f556a09b8..8f1ab9f861d 100644 --- a/app/models/aimodels/AiModelCategory.scala +++ b/app/models/aimodels/AiModelCategory.scala @@ -4,5 +4,5 @@ import com.scalableminds.util.enumeration.ExtendedEnumeration object AiModelCategory extends ExtendedEnumeration { type AiModelCategory = Value - val em_neurons, em_nuclei = Value + val em_neurons, em_nuclei, em_synapses, em_neuron_types, em_cell_organelles = Value } diff --git a/app/utils/sql/SQLDAO.scala b/app/utils/sql/SQLDAO.scala index 8ef7548d1ef..2cf9d7fe40a 100644 --- a/app/utils/sql/SQLDAO.scala +++ b/app/utils/sql/SQLDAO.scala @@ -47,7 +47,7 @@ abstract class SQLDAO[C, R, X <: AbstractTable[R]] @Inject()(sqlClient: SqlClien case Some(r) => parse(r) ?~> ("sql: could not parse database row for object" + id) case _ => - Fox.failure("sql: could not find object " + id) + Fox.empty }.flatten @nowarn // suppress warning about unused implicit ctx, as it is used in subclasses diff --git a/conf/evolutions/123-more-model-categories.sql b/conf/evolutions/123-more-model-categories.sql new file mode 100644 index 00000000000..f2e2e0c30be --- /dev/null +++ b/conf/evolutions/123-more-model-categories.sql @@ -0,0 +1,11 @@ + +-- no transaction here, since ALTER TYPE ... ADD cannot run inside a transaction block + +do $$ begin ASSERT (select schemaVersion from webknossos.releaseInformation) = 122, 'Previous schema version mismatch'; end; $$ LANGUAGE plpgsql; + +ALTER TYPE webknossos.AI_MODEL_CATEGORY ADD VALUE 'em_synapses'; +ALTER TYPE webknossos.AI_MODEL_CATEGORY ADD VALUE 'em_neuron_types'; +ALTER TYPE webknossos.AI_MODEL_CATEGORY ADD VALUE 'em_cell_organelles'; + +UPDATE webknossos.releaseInformation SET schemaVersion = 123; + diff --git a/conf/evolutions/reversions/123-more-model-categories.sql b/conf/evolutions/reversions/123-more-model-categories.sql new file mode 100644 index 00000000000..f1a2d7f9293 --- /dev/null +++ b/conf/evolutions/reversions/123-more-model-categories.sql @@ -0,0 +1,11 @@ +START TRANSACTION; + +do $$ begin ASSERT (select schemaVersion from webknossos.releaseInformation) = 123, 'Previous schema version mismatch'; end; $$ LANGUAGE plpgsql; + +-- removing enum values is not supported in postgres, see https://www.postgresql.org/docs/current/datatype-enum.html#DATATYPE-ENUM-IMPLEMENTATION-DETAILS + +UPDATE webknossos.aiModels SET isDeleted = TRUE WHERE category IN ('em_synapses', 'em_neuron_types', 'em_cell_organelles'); + +UPDATE webknossos.releaseInformation SET schemaVersion = 122; + +COMMIT TRANSACTION; diff --git a/conf/webknossos.latest.routes b/conf/webknossos.latest.routes index a3ffbbbb158..470c28e1271 100644 --- a/conf/webknossos.latest.routes +++ b/conf/webknossos.latest.routes @@ -283,6 +283,7 @@ POST /aiModels/inferences/runInference GET /aiModels/inferences/:id controllers.AiModelController.readAiInferenceInfo(id: String) GET /aiModels/inferences controllers.AiModelController.listAiInferences GET /aiModels controllers.AiModelController.listAiModels +POST /aiModels/register controllers.AiModelController.registerAiModel GET /aiModels/:id controllers.AiModelController.readAiModelInfo(id: String) PUT /aiModels/:id controllers.AiModelController.updateAiModelInfo(id: String) DELETE /aiModels/:id controllers.AiModelController.deleteAiModel(id: String) diff --git a/tools/postgres/schema.sql b/tools/postgres/schema.sql index cbb40d65f16..20453b54ba7 100644 --- a/tools/postgres/schema.sql +++ b/tools/postgres/schema.sql @@ -20,7 +20,7 @@ CREATE TABLE webknossos.releaseInformation ( schemaVersion BIGINT NOT NULL ); -INSERT INTO webknossos.releaseInformation(schemaVersion) values(122); +INSERT INTO webknossos.releaseInformation(schemaVersion) values(123); COMMIT TRANSACTION; @@ -546,7 +546,7 @@ CREATE TABLE webknossos.emailVerificationKeys( isUsed BOOLEAN NOT NULL DEFAULT false ); -CREATE TYPE webknossos.AI_MODEL_CATEGORY AS ENUM ('em_neurons', 'em_nuclei'); +CREATE TYPE webknossos.AI_MODEL_CATEGORY AS ENUM ('em_neurons', 'em_nuclei', 'em_synapses', 'em_neuron_types', 'em_cell_organelles'); CREATE TABLE webknossos.aiModels( _id CHAR(24) PRIMARY KEY, From 421368485d2b065c29951aede4d005289c5df5fd Mon Sep 17 00:00:00 2001 From: Florian M Date: Wed, 30 Oct 2024 12:54:31 +0100 Subject: [PATCH 12/13] Allow downloading tasks of teams you are not in (#8155) * Allow downloading tasks of teams you are not in * changelog * fix error msg --- CHANGELOG.unreleased.md | 1 + app/controllers/AnnotationIOController.scala | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.unreleased.md b/CHANGELOG.unreleased.md index 142e985933d..51833ebe1d0 100644 --- a/CHANGELOG.unreleased.md +++ b/CHANGELOG.unreleased.md @@ -35,6 +35,7 @@ For upgrade instructions, please check the [migration guide](MIGRATIONS.released - Fixed sorting of mags in outbound zarr streaming. [#8125](https://github.com/scalableminds/webknossos/pull/8125) - Fixed a bug where you could not create annotations for public datasets of other organizations. [#8107](https://github.com/scalableminds/webknossos/pull/8107) - Users without edit permissions to a dataset can no longer delete sharing tokens via the API. [#8083](https://github.com/scalableminds/webknossos/issues/8083) +- Fixed downloading task annotations of teams you are not in, when accessing directly via URI. [#8155](https://github.com/scalableminds/webknossos/pull/8155) ### Removed diff --git a/app/controllers/AnnotationIOController.scala b/app/controllers/AnnotationIOController.scala index 3fffaa23121..8f183d84494 100755 --- a/app/controllers/AnnotationIOController.scala +++ b/app/controllers/AnnotationIOController.scala @@ -457,7 +457,7 @@ class AnnotationIOController @Inject()( tracingStoreClient.getSkeletonTracing(skeletonAnnotationLayer, skeletonVersion) } ?~> "annotation.download.fetchSkeletonLayer.failed" user <- userService.findOneCached(annotation._user)(GlobalAccessContext) ?~> "annotation.download.findUser.failed" - taskOpt <- Fox.runOptional(annotation._task)(taskDAO.findOne) + taskOpt <- Fox.runOptional(annotation._task)(taskDAO.findOne(_)(GlobalAccessContext)) ?~> "task.notFound" nmlStream = nmlWriter.toNmlStream( name, fetchedSkeletonLayers ::: fetchedVolumeLayers, From f937be0f4df960d582d8cc7eefdc33c58b0856f7 Mon Sep 17 00:00:00 2001 From: frcroth Date: Mon, 4 Nov 2024 09:08:29 +0100 Subject: [PATCH 13/13] Use mapping from viewconfig when generating thumbnail (#8157) --- CHANGELOG.unreleased.md | 1 + app/models/dataset/ThumbnailService.scala | 49 +++++++++++++++-------- 2 files changed, 33 insertions(+), 17 deletions(-) diff --git a/CHANGELOG.unreleased.md b/CHANGELOG.unreleased.md index 51833ebe1d0..4e0cb15ae5d 100644 --- a/CHANGELOG.unreleased.md +++ b/CHANGELOG.unreleased.md @@ -25,6 +25,7 @@ For upgrade instructions, please check the [migration guide](MIGRATIONS.released - Admins can now see and cancel all jobs. The owner of the job is shown in the job list. [#8112](https://github.com/scalableminds/webknossos/pull/8112) - Migrated nightly screenshot tests from CircleCI to GitHub actions. [#8134](https://github.com/scalableminds/webknossos/pull/8134) - Migrated nightly screenshot tests for wk.org from CircleCI to GitHub actions. [#8135](https://github.com/scalableminds/webknossos/pull/8135) +- Thumbnails for datasets now use the selected mapping from the view configuration if available. [#8157](https://github.com/scalableminds/webknossos/pull/8157) ### Fixed - Fixed a bug during dataset upload in case the configured `datastore.baseFolder` is an absolute path. [#8098](https://github.com/scalableminds/webknossos/pull/8098) [#8103](https://github.com/scalableminds/webknossos/pull/8103) diff --git a/app/models/dataset/ThumbnailService.scala b/app/models/dataset/ThumbnailService.scala index 88e8385c0da..ff4e4e5ecc3 100644 --- a/app/models/dataset/ThumbnailService.scala +++ b/app/models/dataset/ThumbnailService.scala @@ -14,7 +14,7 @@ import models.configuration.DatasetConfigurationService import net.liftweb.common.Full import play.api.http.Status.NOT_FOUND import play.api.i18n.{Messages, MessagesProvider} -import play.api.libs.json.JsArray +import play.api.libs.json.{JsArray, JsObject} import utils.ObjectId import utils.sql.{SimpleSQLDAO, SqlClient} @@ -74,39 +74,41 @@ class ThumbnailService @Inject()(datasetService: DatasetService, viewConfiguration <- datasetConfigurationService.getDatasetViewConfigurationForDataset(List.empty, datasetName, organizationId)(ctx) - (mag1BoundingBox, mag, intensityRangeOpt, colorSettingsOpt) = selectParameters(viewConfiguration, - usableDataSource, - layerName, - layer, - width, - height) + (mag1BoundingBox, mag, intensityRangeOpt, colorSettingsOpt, mapping) = selectParameters(viewConfiguration, + usableDataSource, + layerName, + layer, + width, + height, + mappingName) client <- datasetService.clientFor(dataset) image <- client.getDataLayerThumbnail(organizationId, dataset, layerName, mag1BoundingBox, mag, - mappingName, + mapping, intensityRangeOpt, colorSettingsOpt) _ <- thumbnailDAO.upsertThumbnail(dataset._id, layerName, width, height, - mappingName, + mapping, image, jpegMimeType, mag, mag1BoundingBox) } yield image - private def selectParameters( - viewConfiguration: DatasetViewConfiguration, - usableDataSource: GenericDataSource[DataLayerLike], - layerName: String, - layer: DataLayerLike, - targetMagWidth: Int, - targetMagHeigt: Int): (BoundingBox, Vec3Int, Option[(Double, Double)], Option[ThumbnailColorSettings]) = { + private def selectParameters(viewConfiguration: DatasetViewConfiguration, + usableDataSource: GenericDataSource[DataLayerLike], + layerName: String, + layer: DataLayerLike, + targetMagWidth: Int, + targetMagHeigt: Int, + mappingName: Option[String]) + : (BoundingBox, Vec3Int, Option[(Double, Double)], Option[ThumbnailColorSettings], Option[String]) = { val configuredCenterOpt = viewConfiguration.get("position").flatMap(jsValue => JsonHelper.jsResultToOpt(jsValue.validate[Vec3Int])) val centerOpt = @@ -124,7 +126,13 @@ class ThumbnailService @Inject()(datasetService: DatasetService, val x = center.x - mag1Width / 2 val y = center.y - mag1Height / 2 val z = center.z - (BoundingBox(Vec3Int(x, y, z), mag1Width, mag1Height, 1), mag, intensityRangeOpt, colorSettingsOpt) + + val mappingNameResult = mappingName.orElse(readMappingName(viewConfiguration, layerName)) + (BoundingBox(Vec3Int(x, y, z), mag1Width, mag1Height, 1), + mag, + intensityRangeOpt, + colorSettingsOpt, + mappingNameResult) } private def readIntensityRange(viewConfiguration: DatasetViewConfiguration, @@ -147,6 +155,13 @@ class ThumbnailService @Inject()(datasetService: DatasetService, b <- colorArray(2).validate[Int].asOpt } yield ThumbnailColorSettings(Color(r / 255d, g / 255d, b / 255d, 0), isInverted) + private def readMappingName(viewConfiguration: DatasetViewConfiguration, layerName: String): Option[String] = + for { + layersJsValue <- viewConfiguration.get("layers") + mapping <- (layersJsValue \ layerName \ "mapping").validate[JsObject].asOpt + mappingName <- mapping("name").validate[String].asOpt + } yield mappingName + private def magForZoom(dataLayer: DataLayerLike, zoom: Double): Vec3Int = dataLayer.resolutions.minBy(r => Math.abs(r.maxDim - zoom))