diff --git a/.github/workflows/build_test_deploy.yml b/.github/workflows/build_test_deploy.yml new file mode 100644 index 00000000000..599e2b22e00 --- /dev/null +++ b/.github/workflows/build_test_deploy.yml @@ -0,0 +1,13 @@ +name: CI Pipeline + +on: + workflow_dispatch: + +jobs: + foo: + runs-on: ubuntu-20.04 + steps: + - name: Checkout code + uses: actions/checkout@v3 + with: + fetch-depth: 5 \ No newline at end of file diff --git a/CHANGELOG.unreleased.md b/CHANGELOG.unreleased.md index 516413a4895..4e0cb15ae5d 100644 --- a/CHANGELOG.unreleased.md +++ b/CHANGELOG.unreleased.md @@ -16,6 +16,7 @@ For upgrade instructions, please check the [migration guide](MIGRATIONS.released - Most sliders have been improved: Wheeling above a slider now changes its value and double-clicking its knob resets it to its default value. [#8095](https://github.com/scalableminds/webknossos/pull/8095) - It is now possible to search for unnamed segments with the full default name instead of only their id. [#8133](https://github.com/scalableminds/webknossos/pull/8133) - Increased loading speed for precomputed meshes. [#8110](https://github.com/scalableminds/webknossos/pull/8110) +- Added a button to the search popover in the skeleton and segment tab to select all matching non-group results. [#8123](https://github.com/scalableminds/webknossos/pull/8123) - Unified wording in UI and code: “Magnification”/“mag” is now used in place of “Resolution“ most of the time, compare [https://docs.webknossos.org/webknossos/terminology.html](terminology document). [#8111](https://github.com/scalableminds/webknossos/pull/8111) - Added support for adding remote OME-Zarr NGFF version 0.5 datasets. [#8122](https://github.com/scalableminds/webknossos/pull/8122) @@ -24,14 +25,18 @@ For upgrade instructions, please check the [migration guide](MIGRATIONS.released - Admins can now see and cancel all jobs. The owner of the job is shown in the job list. [#8112](https://github.com/scalableminds/webknossos/pull/8112) - Migrated nightly screenshot tests from CircleCI to GitHub actions. [#8134](https://github.com/scalableminds/webknossos/pull/8134) - Migrated nightly screenshot tests for wk.org from CircleCI to GitHub actions. [#8135](https://github.com/scalableminds/webknossos/pull/8135) +- Thumbnails for datasets now use the selected mapping from the view configuration if available. [#8157](https://github.com/scalableminds/webknossos/pull/8157) ### Fixed - Fixed a bug during dataset upload in case the configured `datastore.baseFolder` is an absolute path. [#8098](https://github.com/scalableminds/webknossos/pull/8098) [#8103](https://github.com/scalableminds/webknossos/pull/8103) +- Fixed bbox export menu item [#8152](https://github.com/scalableminds/webknossos/pull/8152) +- When trying to save an annotation opened via a link including a sharing token, the token is automatically discarded in case it is insufficient for update actions but the users token is. [#8139](https://github.com/scalableminds/webknossos/pull/8139) - Fixed that the skeleton search did not automatically expand groups that contained the selected tree [#8129](https://github.com/scalableminds/webknossos/pull/8129) - Fixed a bug that zarr streaming version 3 returned the shape of mag (1, 1, 1) / the finest mag for all mags. [#8116](https://github.com/scalableminds/webknossos/pull/8116) - Fixed sorting of mags in outbound zarr streaming. [#8125](https://github.com/scalableminds/webknossos/pull/8125) - Fixed a bug where you could not create annotations for public datasets of other organizations. [#8107](https://github.com/scalableminds/webknossos/pull/8107) - Users without edit permissions to a dataset can no longer delete sharing tokens via the API. [#8083](https://github.com/scalableminds/webknossos/issues/8083) +- Fixed downloading task annotations of teams you are not in, when accessing directly via URI. [#8155](https://github.com/scalableminds/webknossos/pull/8155) ### Removed diff --git a/MIGRATIONS.unreleased.md b/MIGRATIONS.unreleased.md index 0e50f676c9e..f6d640f469d 100644 --- a/MIGRATIONS.unreleased.md +++ b/MIGRATIONS.unreleased.md @@ -12,3 +12,4 @@ User-facing changes are documented in the [changelog](CHANGELOG.released.md). - [121-worker-name.sql](conf/evolutions/121-worker-name.sql) - [122-resolution-to-mag.sql](conf/evolutions/122-resolution-to-mag.sql) +- [123-more-model-categories.sql](conf/evolutions/123-more-model-categories.sql) diff --git a/app/controllers/AiModelController.scala b/app/controllers/AiModelController.scala index 3a332504239..e09d8a4f534 100644 --- a/app/controllers/AiModelController.scala +++ b/app/controllers/AiModelController.scala @@ -57,6 +57,16 @@ object UpdateAiModelParameters { implicit val jsonFormat: OFormat[UpdateAiModelParameters] = Json.format[UpdateAiModelParameters] } +case class RegisterAiModelParameters(id: ObjectId, // must be a valid MongoDB ObjectId + dataStoreName: String, + name: String, + comment: Option[String], + category: Option[AiModelCategory]) + +object RegisterAiModelParameters { + implicit val jsonFormat: OFormat[RegisterAiModelParameters] = Json.format[RegisterAiModelParameters] +} + class AiModelController @Inject()( aiModelDAO: AiModelDAO, aiModelService: AiModelService, @@ -209,6 +219,28 @@ class AiModelController @Inject()( } yield Ok(jsResult) } + def registerAiModel: Action[RegisterAiModelParameters] = + sil.SecuredAction.async(validateJson[RegisterAiModelParameters]) { implicit request => + for { + _ <- userService.assertIsSuperUser(request.identity) + _ <- dataStoreDAO.findOneByName(request.body.dataStoreName) ?~> "dataStore.notFound" + _ <- aiModelDAO.findOne(request.body.id).reverse ?~> "aiModel.id.taken" + _ <- aiModelDAO.findOneByName(request.body.name).reverse ?~> "aiModel.name.taken" + _ <- aiModelDAO.insertOne( + AiModel( + request.body.id, + _organization = request.identity._organization, + request.body.dataStoreName, + request.identity._id, + None, + List.empty, + request.body.name, + request.body.comment, + request.body.category + )) + } yield Ok + } + def deleteAiModel(aiModelId: String): Action[AnyContent] = sil.SecuredAction.async { implicit request => for { diff --git a/app/controllers/AnnotationIOController.scala b/app/controllers/AnnotationIOController.scala index 3fffaa23121..8f183d84494 100755 --- a/app/controllers/AnnotationIOController.scala +++ b/app/controllers/AnnotationIOController.scala @@ -457,7 +457,7 @@ class AnnotationIOController @Inject()( tracingStoreClient.getSkeletonTracing(skeletonAnnotationLayer, skeletonVersion) } ?~> "annotation.download.fetchSkeletonLayer.failed" user <- userService.findOneCached(annotation._user)(GlobalAccessContext) ?~> "annotation.download.findUser.failed" - taskOpt <- Fox.runOptional(annotation._task)(taskDAO.findOne) + taskOpt <- Fox.runOptional(annotation._task)(taskDAO.findOne(_)(GlobalAccessContext)) ?~> "task.notFound" nmlStream = nmlWriter.toNmlStream( name, fetchedSkeletonLayers ::: fetchedVolumeLayers, diff --git a/app/models/aimodels/AiModel.scala b/app/models/aimodels/AiModel.scala index 053913b90e3..5857f85e63d 100644 --- a/app/models/aimodels/AiModel.scala +++ b/app/models/aimodels/AiModel.scala @@ -144,4 +144,11 @@ class AiModelDAO @Inject()(sqlClient: SqlClient)(implicit ec: ExecutionContext) q"UPDATE webknossos.aiModels SET name = ${a.name}, comment = ${a.comment}, modified = ${a.modified} WHERE _id = ${a._id}".asUpdate) } yield () + def findOneByName(name: String)(implicit ctx: DBAccessContext): Fox[AiModel] = + for { + accessQuery <- readAccessQuery + r <- run(q"SELECT $columns FROM $existingCollectionName WHERE name = $name AND $accessQuery".as[AimodelsRow]) + parsed <- parseFirst(r, name) + } yield parsed + } diff --git a/app/models/aimodels/AiModelCategory.scala b/app/models/aimodels/AiModelCategory.scala index 70f556a09b8..8f1ab9f861d 100644 --- a/app/models/aimodels/AiModelCategory.scala +++ b/app/models/aimodels/AiModelCategory.scala @@ -4,5 +4,5 @@ import com.scalableminds.util.enumeration.ExtendedEnumeration object AiModelCategory extends ExtendedEnumeration { type AiModelCategory = Value - val em_neurons, em_nuclei = Value + val em_neurons, em_nuclei, em_synapses, em_neuron_types, em_cell_organelles = Value } diff --git a/app/models/dataset/ThumbnailService.scala b/app/models/dataset/ThumbnailService.scala index 88e8385c0da..ff4e4e5ecc3 100644 --- a/app/models/dataset/ThumbnailService.scala +++ b/app/models/dataset/ThumbnailService.scala @@ -14,7 +14,7 @@ import models.configuration.DatasetConfigurationService import net.liftweb.common.Full import play.api.http.Status.NOT_FOUND import play.api.i18n.{Messages, MessagesProvider} -import play.api.libs.json.JsArray +import play.api.libs.json.{JsArray, JsObject} import utils.ObjectId import utils.sql.{SimpleSQLDAO, SqlClient} @@ -74,39 +74,41 @@ class ThumbnailService @Inject()(datasetService: DatasetService, viewConfiguration <- datasetConfigurationService.getDatasetViewConfigurationForDataset(List.empty, datasetName, organizationId)(ctx) - (mag1BoundingBox, mag, intensityRangeOpt, colorSettingsOpt) = selectParameters(viewConfiguration, - usableDataSource, - layerName, - layer, - width, - height) + (mag1BoundingBox, mag, intensityRangeOpt, colorSettingsOpt, mapping) = selectParameters(viewConfiguration, + usableDataSource, + layerName, + layer, + width, + height, + mappingName) client <- datasetService.clientFor(dataset) image <- client.getDataLayerThumbnail(organizationId, dataset, layerName, mag1BoundingBox, mag, - mappingName, + mapping, intensityRangeOpt, colorSettingsOpt) _ <- thumbnailDAO.upsertThumbnail(dataset._id, layerName, width, height, - mappingName, + mapping, image, jpegMimeType, mag, mag1BoundingBox) } yield image - private def selectParameters( - viewConfiguration: DatasetViewConfiguration, - usableDataSource: GenericDataSource[DataLayerLike], - layerName: String, - layer: DataLayerLike, - targetMagWidth: Int, - targetMagHeigt: Int): (BoundingBox, Vec3Int, Option[(Double, Double)], Option[ThumbnailColorSettings]) = { + private def selectParameters(viewConfiguration: DatasetViewConfiguration, + usableDataSource: GenericDataSource[DataLayerLike], + layerName: String, + layer: DataLayerLike, + targetMagWidth: Int, + targetMagHeigt: Int, + mappingName: Option[String]) + : (BoundingBox, Vec3Int, Option[(Double, Double)], Option[ThumbnailColorSettings], Option[String]) = { val configuredCenterOpt = viewConfiguration.get("position").flatMap(jsValue => JsonHelper.jsResultToOpt(jsValue.validate[Vec3Int])) val centerOpt = @@ -124,7 +126,13 @@ class ThumbnailService @Inject()(datasetService: DatasetService, val x = center.x - mag1Width / 2 val y = center.y - mag1Height / 2 val z = center.z - (BoundingBox(Vec3Int(x, y, z), mag1Width, mag1Height, 1), mag, intensityRangeOpt, colorSettingsOpt) + + val mappingNameResult = mappingName.orElse(readMappingName(viewConfiguration, layerName)) + (BoundingBox(Vec3Int(x, y, z), mag1Width, mag1Height, 1), + mag, + intensityRangeOpt, + colorSettingsOpt, + mappingNameResult) } private def readIntensityRange(viewConfiguration: DatasetViewConfiguration, @@ -147,6 +155,13 @@ class ThumbnailService @Inject()(datasetService: DatasetService, b <- colorArray(2).validate[Int].asOpt } yield ThumbnailColorSettings(Color(r / 255d, g / 255d, b / 255d, 0), isInverted) + private def readMappingName(viewConfiguration: DatasetViewConfiguration, layerName: String): Option[String] = + for { + layersJsValue <- viewConfiguration.get("layers") + mapping <- (layersJsValue \ layerName \ "mapping").validate[JsObject].asOpt + mappingName <- mapping("name").validate[String].asOpt + } yield mappingName + private def magForZoom(dataLayer: DataLayerLike, zoom: Double): Vec3Int = dataLayer.resolutions.minBy(r => Math.abs(r.maxDim - zoom)) diff --git a/app/utils/sql/SQLDAO.scala b/app/utils/sql/SQLDAO.scala index 8ef7548d1ef..2cf9d7fe40a 100644 --- a/app/utils/sql/SQLDAO.scala +++ b/app/utils/sql/SQLDAO.scala @@ -47,7 +47,7 @@ abstract class SQLDAO[C, R, X <: AbstractTable[R]] @Inject()(sqlClient: SqlClien case Some(r) => parse(r) ?~> ("sql: could not parse database row for object" + id) case _ => - Fox.failure("sql: could not find object " + id) + Fox.empty }.flatten @nowarn // suppress warning about unused implicit ctx, as it is used in subclasses diff --git a/app/views/mail/jobFailedUploadConvert.scala.html b/app/views/mail/jobFailedUploadConvert.scala.html index 80c3d557e33..4c459fc303d 100644 --- a/app/views/mail/jobFailedUploadConvert.scala.html +++ b/app/views/mail/jobFailedUploadConvert.scala.html @@ -11,9 +11,9 @@

Here are some tips for uploading and converting datasets:

diff --git a/app/views/mail/jobSuccessfulSegmentation.scala.html b/app/views/mail/jobSuccessfulSegmentation.scala.html index 560d974fa64..ec5d71c1c4d 100644 --- a/app/views/mail/jobSuccessfulSegmentation.scala.html +++ b/app/views/mail/jobSuccessfulSegmentation.scala.html @@ -38,7 +38,7 @@

- Do you want to make corrections to the automated segmentation? Use the easy-to-use, built-in proof-reading tools in WEBKNOSSOS (requires Power plan). + Do you want to make corrections to the automated segmentation? Use the easy-to-use, built-in proof-reading tools in WEBKNOSSOS (requires Power plan).

3 and a chunk size of (64,64,64) voxel. +A maximum of 4 parallel jobs will be used to parallelize the conversion, compression and downsampling. +Using the `--data-format zarr3` argument will produce sharded Zarr v3 datasets. Read the full documentation at [WEBKNOSSOS CLI](https://docs.webknossos.org/cli). @@ -170,3 +175,5 @@ To get the best streaming performance for Zarr datasets consider the following s - Use chunk sizes of 32 - 128 voxels^3 - Enable sharding (only available in Zarr 3+) +- Use 3D downsampling + diff --git a/docs/volume_annotation/pen_tablets.md b/docs/volume_annotation/pen_tablets.md index dbe9402708a..1238ae15934 100644 --- a/docs/volume_annotation/pen_tablets.md +++ b/docs/volume_annotation/pen_tablets.md @@ -5,7 +5,7 @@ Beyond the mouse and keyboard WEBKNOSSOS is great for annotating datasets with a ## Using Wacom/Pen tablets Using pen tablet can significantly boost your annotation productivity, especially if you set it up correctly with WEBKNOSSOS. -![youtube-video](https://www.youtube.com/embed/xk0gqsVx494) +![youtube-video](https://www.youtube.com/embed/qCrqswDwmi8) To streamline your workflow, program your tablet and pen buttons to match the WEBKNOSSOS shortcuts. By doing so, you can focus on your pen without the need of a mouse or keyboard. Here is an example configuration using a Wacom tablet and the Wacom driver software: @@ -26,7 +26,7 @@ You can find the full list for keyboard shortcuts in the [documentation](../ui/k ### Annotating with Wacom Pens Now, let’s dive into the annotation process! In this example, we begin by quick-selecting a cell. -![youtube-video](https://www.youtube.com/embed/xk0gqsVx494?start=46) +![youtube-video](https://www.youtube.com/embed/qCrqswDwmi8?start=37) If the annotation isn’t precise enough, we can easily switch to the eraser tool (middle left button) and erase a corner. Selecting the brush tool is as simple as pressing the left button, allowing us to add small surfaces to the annotation. When ready, pressing the right button creates a new segment, and we can repeat the process for other cells. diff --git a/frontend/javascripts/admin/admin_rest_api.ts b/frontend/javascripts/admin/admin_rest_api.ts index 5af922c7a3a..6c3b4ef8e78 100644 --- a/frontend/javascripts/admin/admin_rest_api.ts +++ b/frontend/javascripts/admin/admin_rest_api.ts @@ -2035,7 +2035,7 @@ export function computeAdHocMesh( }, }, ); - const neighbors = Utils.parseMaybe(headers.neighbors) || []; + const neighbors = (Utils.parseMaybe(headers.neighbors) as number[] | null) || []; return { buffer, neighbors, diff --git a/frontend/javascripts/admin/api/token.ts b/frontend/javascripts/admin/api/token.ts index 54f730d40e7..3a430d55756 100644 --- a/frontend/javascripts/admin/api/token.ts +++ b/frontend/javascripts/admin/api/token.ts @@ -2,9 +2,12 @@ import { location } from "libs/window"; import Request from "libs/request"; import * as Utils from "libs/utils"; +const MAX_TOKEN_RETRY_ATTEMPTS = 3; + let tokenPromise: Promise; let tokenRequestPromise: Promise | null; +let shouldUseURLToken: boolean = true; function requestUserToken(): Promise { if (tokenRequestPromise) { @@ -33,22 +36,36 @@ export function getSharingTokenFromUrlParameters(): string | null | undefined { return null; } -export function doWithToken(fn: (token: string) => Promise, tries: number = 1): Promise { - const sharingToken = getSharingTokenFromUrlParameters(); +export async function doWithToken( + fn: (token: string) => Promise, + tries: number = 1, + useURLTokenIfAvailable: boolean = true, +): Promise { + let token = + useURLTokenIfAvailable && shouldUseURLToken ? getSharingTokenFromUrlParameters() : null; - if (sharingToken != null) { - return fn(sharingToken); + if (token == null) { + tokenPromise = tokenPromise == null ? requestUserToken() : tokenPromise; + } else { + tokenPromise = Promise.resolve(token); } - if (!tokenPromise) tokenPromise = requestUserToken(); - return tokenPromise.then(fn).catch((error) => { + return tokenPromise.then(fn).catch(async (error) => { if (error.status === 403) { - console.warn("Token expired. Requesting new token..."); + console.warn( + `Token expired (attempt ${tries}/${MAX_TOKEN_RETRY_ATTEMPTS}). Requesting new token...`, + ); tokenPromise = requestUserToken(); // If three new tokens did not fix the 403, abort, otherwise we'll get into an endless loop here - if (tries < 3) { - return doWithToken(fn, tries + 1); + if (tries < MAX_TOKEN_RETRY_ATTEMPTS) { + // If using the url sharing token failed, we try the user specific token instead. + const result = await doWithToken(fn, tries + 1, false); + // Upon successful retry with own token, discard the url token. + if (useURLTokenIfAvailable) { + shouldUseURLToken = false; + } + return result; } } diff --git a/frontend/javascripts/admin/auth/auth_token_view.tsx b/frontend/javascripts/admin/auth/auth_token_view.tsx index 3286c872689..821fb40b11e 100644 --- a/frontend/javascripts/admin/auth/auth_token_view.tsx +++ b/frontend/javascripts/admin/auth/auth_token_view.tsx @@ -102,14 +102,12 @@ function AuthTokenView() {

An Auth Token is a series of symbols that serves to authenticate you. It is used in - communication with the backend API and sent with every request to verify your identity. + communication with the Python API and sent with every request to verify your identity.

You should revoke it if somebody else has acquired your token or you have the suspicion this has happened.{" "} - - Read more - + Read more

diff --git a/frontend/javascripts/admin/dataset/dataset_add_view.tsx b/frontend/javascripts/admin/dataset/dataset_add_view.tsx index c1513f1f7c7..c7677831751 100644 --- a/frontend/javascripts/admin/dataset/dataset_add_view.tsx +++ b/frontend/javascripts/admin/dataset/dataset_add_view.tsx @@ -183,7 +183,7 @@ const alignBanner = ( />

diff --git a/frontend/javascripts/admin/dataset/dataset_upload_view.tsx b/frontend/javascripts/admin/dataset/dataset_upload_view.tsx index b4f235e9110..653d2ff1249 100644 --- a/frontend/javascripts/admin/dataset/dataset_upload_view.tsx +++ b/frontend/javascripts/admin/dataset/dataset_upload_view.tsx @@ -1243,7 +1243,7 @@ function FileUploadArea({ e.stopPropagation()} @@ -1265,7 +1265,7 @@ function FileUploadArea({ e.stopPropagation()} @@ -1287,7 +1287,7 @@ function FileUploadArea({ e.stopPropagation()} @@ -1309,7 +1309,7 @@ function FileUploadArea({ Have a look at{" "} e.stopPropagation()} > our documentation diff --git a/frontend/javascripts/admin/job/job_list_view.tsx b/frontend/javascripts/admin/job/job_list_view.tsx index 17c32a2a56a..bfa069ae7c3 100644 --- a/frontend/javascripts/admin/job/job_list_view.tsx +++ b/frontend/javascripts/admin/job/job_list_view.tsx @@ -366,7 +366,7 @@ function JobListView() { Some actions such as dataset conversions or export as Tiff files require some time for processing in the background. diff --git a/frontend/javascripts/admin/onboarding.tsx b/frontend/javascripts/admin/onboarding.tsx index 09fcb9b3e9a..bd634a6efb9 100644 --- a/frontend/javascripts/admin/onboarding.tsx +++ b/frontend/javascripts/admin/onboarding.tsx @@ -548,7 +548,7 @@ class OnboardingView extends React.PureComponent { height={250} > You can also copy it directly onto the hosting server.{" "} - + Learn more about supported data formats. @@ -583,8 +583,8 @@ class OnboardingView extends React.PureComponent { }> Upload more of your datasets.{" "} - Learn more about - the formats and upload processes WEBKNOSSOS supports. + Learn more about the + formats and upload processes WEBKNOSSOS supports. }> diff --git a/frontend/javascripts/admin/task/task_list_view.tsx b/frontend/javascripts/admin/task/task_list_view.tsx index c110b6c72cc..c80ec113c4b 100644 --- a/frontend/javascripts/admin/task/task_list_view.tsx +++ b/frontend/javascripts/admin/task/task_list_view.tsx @@ -222,7 +222,7 @@ function TaskListView({ initialFieldValues }: Props) {

To learn more about the task system in WEBKNOSSOS,{" "} diff --git a/frontend/javascripts/admin/user/permissions_and_teams_modal_view.tsx b/frontend/javascripts/admin/user/permissions_and_teams_modal_view.tsx index ebd803d0dd2..33adc70da4f 100644 --- a/frontend/javascripts/admin/user/permissions_and_teams_modal_view.tsx +++ b/frontend/javascripts/admin/user/permissions_and_teams_modal_view.tsx @@ -243,7 +243,7 @@ function PermissionsAndTeamsModalView({

Organization Permissions{" "} diff --git a/frontend/javascripts/dashboard/dashboard_task_list_view.tsx b/frontend/javascripts/dashboard/dashboard_task_list_view.tsx index 899a5957c87..cec4d65b369 100644 --- a/frontend/javascripts/dashboard/dashboard_task_list_view.tsx +++ b/frontend/javascripts/dashboard/dashboard_task_list_view.tsx @@ -414,7 +414,7 @@ class DashboardTaskListView extends React.PureComponent { as part of the WEBKNOSSOS project management.{" "}

diff --git a/frontend/javascripts/dashboard/dataset_folder_view.tsx b/frontend/javascripts/dashboard/dataset_folder_view.tsx index f3eaf659fc9..157da42d1fb 100644 --- a/frontend/javascripts/dashboard/dataset_folder_view.tsx +++ b/frontend/javascripts/dashboard/dataset_folder_view.tsx @@ -134,7 +134,7 @@ function DatasetFolderViewInner(props: Props) {

WEBKNOSSOS supports a variety of (remote){" "} diff --git a/frontend/javascripts/libs/request.ts b/frontend/javascripts/libs/request.ts index 1b1271e4846..25bf31657e5 100644 --- a/frontend/javascripts/libs/request.ts +++ b/frontend/javascripts/libs/request.ts @@ -311,7 +311,11 @@ class Request { ...message, key: json.status.toString(), })); - if (showErrorToast) Toast.messages(messages); + if (showErrorToast) { + Toast.messages(messages); // Note: Toast.error internally logs to console + } else { + console.error(messages); + } // Check whether the error chain mentions an url which belongs // to a datastore. Then, ping the datastore pingMentionedDataStores(text); @@ -319,7 +323,11 @@ class Request { /* eslint-disable-next-line prefer-promise-reject-errors */ return Promise.reject({ ...json, url: requestedUrl }); } catch (_jsonError) { - if (showErrorToast) Toast.error(text); + if (showErrorToast) { + Toast.error(text); // Note: Toast.error internally logs to console + } else { + console.error(`Request failed for ${requestedUrl}:`, text); + } /* eslint-disable-next-line prefer-promise-reject-errors */ return Promise.reject({ diff --git a/frontend/javascripts/oxalis/model/sagas/save_saga.ts b/frontend/javascripts/oxalis/model/sagas/save_saga.ts index e9e09a12a32..d2acc8ca949 100644 --- a/frontend/javascripts/oxalis/model/sagas/save_saga.ts +++ b/frontend/javascripts/oxalis/model/sagas/save_saga.ts @@ -196,6 +196,9 @@ export function* sendRequestToServer( method: "POST", data: compactedSaveQueue, compress: process.env.NODE_ENV === "production", + // Suppressing error toast, as the doWithToken retry with personal token functionality should not show an error. + // Instead the error is logged and toggleErrorHighlighting should take care of showing an error to the user. + showErrorToast: false, }, ); const endTime = Date.now(); diff --git a/frontend/javascripts/oxalis/view/action-bar/default-predict-workflow-template.ts b/frontend/javascripts/oxalis/view/action-bar/default-predict-workflow-template.ts index fdfcc186963..ffaffb19b0c 100644 --- a/frontend/javascripts/oxalis/view/action-bar/default-predict-workflow-template.ts +++ b/frontend/javascripts/oxalis/view/action-bar/default-predict-workflow-template.ts @@ -1,8 +1,9 @@ export default `predict: task: PredictTask distribution: - default: - processes: 2 + step: + strategy: sequential + num_io_threads: 5 inputs: model: TO_BE_SET_BY_WORKER config: @@ -19,6 +20,6 @@ publish_dataset_meshes: config: name: TO_BE_SET_BY_WORKER public_directory: TO_BE_SET_BY_WORKER - webknossos_organization: TO_BE_SET_BY_WORKER use_symlinks: False - move_dataset_symlink_artifact: True`; + move_dataset_symlink_artifact: True + keep_symlinks_to: TO_BE_SET_BY_WORKER`; diff --git a/frontend/javascripts/oxalis/view/action-bar/download_modal_view.tsx b/frontend/javascripts/oxalis/view/action-bar/download_modal_view.tsx index 9be46f578f4..a0ef1d37e31 100644 --- a/frontend/javascripts/oxalis/view/action-bar/download_modal_view.tsx +++ b/frontend/javascripts/oxalis/view/action-bar/download_modal_view.tsx @@ -433,7 +433,7 @@ function _DownloadModalView({ > For more information on how to work with {typeDependentFileName} visit the{" "} diff --git a/frontend/javascripts/oxalis/view/right-border-tabs/advanced_search_popover.tsx b/frontend/javascripts/oxalis/view/right-border-tabs/advanced_search_popover.tsx index 76bcb9e0399..54852103229 100644 --- a/frontend/javascripts/oxalis/view/right-border-tabs/advanced_search_popover.tsx +++ b/frontend/javascripts/oxalis/view/right-border-tabs/advanced_search_popover.tsx @@ -1,5 +1,5 @@ import { Input, Tooltip, Popover, Space, type InputRef } from "antd"; -import { DownOutlined, UpOutlined } from "@ant-design/icons"; +import { CheckSquareOutlined, DownOutlined, UpOutlined } from "@ant-design/icons"; import * as React from "react"; import memoizeOne from "memoize-one"; import ButtonComponent from "oxalis/view/components/button_component"; @@ -7,10 +7,13 @@ import Shortcut from "libs/shortcut_component"; import DomVisibilityObserver from "oxalis/view/components/dom_visibility_observer"; import { mod } from "libs/utils"; +const PRIMARY_COLOR = "var(--ant-color-primary)"; + type Props = { data: S[]; searchKey: keyof S | ((item: S) => string); onSelect: (arg0: S) => void; + onSelectAllMatches?: (arg0: S[]) => void; children: React.ReactNode; provideShortcut?: boolean; targetId: string; @@ -20,6 +23,7 @@ type State = { isVisible: boolean; searchQuery: string; currentPosition: number | null | undefined; + areAllMatchesSelected: boolean; }; export default class AdvancedSearchPopover< @@ -29,6 +33,7 @@ export default class AdvancedSearchPopover< isVisible: false, searchQuery: "", currentPosition: null, + areAllMatchesSelected: false, }; getAvailableOptions = memoizeOne( @@ -69,6 +74,7 @@ export default class AdvancedSearchPopover< currentPosition = mod(currentPosition + offset, numberOfAvailableOptions); this.setState({ currentPosition, + areAllMatchesSelected: false, }); this.props.onSelect(availableOptions[currentPosition]); }; @@ -101,7 +107,7 @@ export default class AdvancedSearchPopover< render() { const { data, searchKey, provideShortcut, children, targetId } = this.props; - const { searchQuery, isVisible } = this.state; + const { searchQuery, isVisible, areAllMatchesSelected } = this.state; let { currentPosition } = this.state; const availableOptions = this.getAvailableOptions(data, searchQuery, searchKey); const numberOfAvailableOptions = availableOptions.length; @@ -109,13 +115,17 @@ export default class AdvancedSearchPopover< currentPosition = currentPosition == null ? -1 : Math.min(currentPosition, numberOfAvailableOptions - 1); const hasNoResults = numberOfAvailableOptions === 0; - const hasMultipleResults = numberOfAvailableOptions > 1; + const availableOptionsToSelectAllMatches = availableOptions.filter( + (result) => result.type === "Tree" || result.type === "segment", + ); + const isSelectAllMatchesDisabled = availableOptionsToSelectAllMatches.length < 2; const additionalInputStyle = hasNoResults && searchQuery !== "" ? { color: "red", } : {}; + const selectAllMatchesButtonColor = areAllMatchesSelected ? PRIMARY_COLOR : undefined; return ( {provideShortcut ? ( @@ -171,9 +181,23 @@ export default class AdvancedSearchPopover< this.setState({ searchQuery: evt.target.value, currentPosition: null, + areAllMatchesSelected: false, }) } - addonAfter={`${currentPosition + 1}/${numberOfAvailableOptions}`} + addonAfter={ +

+ } ref={this.autoFocus} autoFocus /> @@ -183,7 +207,7 @@ export default class AdvancedSearchPopover< width: 40, }} onClick={this.selectPreviousOption} - disabled={!hasMultipleResults} + disabled={hasNoResults} > @@ -194,11 +218,32 @@ export default class AdvancedSearchPopover< width: 40, }} onClick={this.selectNextOption} - disabled={!hasMultipleResults} + disabled={hasNoResults} > + + { + this.props.onSelectAllMatches!(availableOptionsToSelectAllMatches); + if (!areAllMatchesSelected) + this.setState({ areAllMatchesSelected: true }); + } + : undefined + } + disabled={isSelectAllMatchesDisabled} + > + + + ) diff --git a/frontend/javascripts/oxalis/view/right-border-tabs/bounding_box_tab.tsx b/frontend/javascripts/oxalis/view/right-border-tabs/bounding_box_tab.tsx index 126e0f03109..cc837f4ac31 100644 --- a/frontend/javascripts/oxalis/view/right-border-tabs/bounding_box_tab.tsx +++ b/frontend/javascripts/oxalis/view/right-border-tabs/bounding_box_tab.tsx @@ -90,7 +90,7 @@ export default function BoundingBoxTab() { } function handleExportBoundingBox(bb: UserBoundingBox) { - _.partial(setSelectedBoundingBoxForExport, bb); + setSelectedBoundingBoxForExport(bb); hideContextMenu(); } diff --git a/frontend/javascripts/oxalis/view/right-border-tabs/segments_tab/segments_view.tsx b/frontend/javascripts/oxalis/view/right-border-tabs/segments_tab/segments_view.tsx index 5bb582a04be..e795a0a581e 100644 --- a/frontend/javascripts/oxalis/view/right-border-tabs/segments_tab/segments_view.tsx +++ b/frontend/javascripts/oxalis/view/right-border-tabs/segments_tab/segments_view.tsx @@ -137,6 +137,8 @@ import { SegmentStatisticsModal } from "./segment_statistics_modal"; import type { ItemType } from "antd/lib/menu/interface"; import { InputWithUpdateOnBlur } from "oxalis/view/components/input_with_update_on_blur"; +const SCROLL_DELAY_MS = 50; + const { confirm } = Modal; const { Option } = Select; // Interval in ms to check for running mesh file computation jobs for this dataset @@ -1590,7 +1592,7 @@ class SegmentsView extends React.Component { this.setState(({ renamingCounter }) => ({ renamingCounter: renamingCounter - 1 })); }; - handleSearchSelect = (selectedElement: SegmentHierarchyNode) => { + maybeExpandParentGroup = (selectedElement: SegmentHierarchyNode) => { if (this.tree?.current == null) { return; } @@ -1606,16 +1608,47 @@ class SegmentsView extends React.Component { if (expandedGroups) { this.setExpandedGroupsFromSet(expandedGroups); } + }; + + handleSearchSelect = (selectedElement: SegmentHierarchyNode) => { + this.maybeExpandParentGroup(selectedElement); // As parent groups might still need to expand, we need to wait for this to finish. setTimeout(() => { if (this.tree.current) this.tree.current.scrollTo({ key: selectedElement.key }); - }, 50); + }, SCROLL_DELAY_MS); const isASegment = "color" in selectedElement; if (isASegment) { this.onSelectSegment(selectedElement); + } else { + if (this.props.visibleSegmentationLayer == null) return; + Store.dispatch( + setSelectedSegmentsOrGroupAction( + [], + selectedElement.id, + this.props.visibleSegmentationLayer?.name, + ), + ); } }; + handleSelectAllMatchingSegments = (allMatches: SegmentHierarchyNode[]) => { + if (this.props.visibleSegmentationLayer == null) return; + const allMatchingSegmentIds = allMatches.map((match) => { + this.maybeExpandParentGroup(match); + return match.id; + }); + Store.dispatch( + setSelectedSegmentsOrGroupAction( + allMatchingSegmentIds, + null, + this.props.visibleSegmentationLayer.name, + ), + ); + setTimeout(() => { + this.tree.current?.scrollTo({ key: allMatches[0].key }); + }, SCROLL_DELAY_MS); + }; + getSegmentStatisticsModal = (groupId: number) => { const visibleSegmentationLayer = this.props.visibleSegmentationLayer; if (visibleSegmentationLayer == null) { @@ -1833,6 +1866,7 @@ class SegmentsView extends React.Component { searchKey={(item) => getSegmentName(item)} provideShortcut targetId={segmentsTabId} + onSelectAllMatches={this.handleSelectAllMatchingSegments} > { }); }; - handleSearchSelect = (selectedElement: TreeOrTreeGroup) => { + maybeExpandParentGroups = (selectedElement: TreeOrTreeGroup) => { const { skeletonTracing } = this.props; if (!skeletonTracing) { return; @@ -682,6 +682,10 @@ class SkeletonTabView extends React.PureComponent { if (expandedGroups) { this.props.onSetExpandedGroups(expandedGroups); } + }; + + handleSearchSelect = (selectedElement: TreeOrTreeGroup) => { + this.maybeExpandParentGroups(selectedElement); if (selectedElement.type === GroupTypeEnum.TREE) { this.props.onSetActiveTree(selectedElement.id); } else { @@ -689,6 +693,15 @@ class SkeletonTabView extends React.PureComponent { } }; + handleSelectAllMatchingTrees = (matchingTrees: TreeOrTreeGroup[]) => { + this.props.onDeselectActiveGroup(); + const treeIds = matchingTrees.map((tree) => { + this.maybeExpandParentGroups(tree); + return tree.id; + }); + this.setState({ selectedTreeIds: treeIds }); + }; + getTreesComponents(sortBy: string) { if (!this.props.skeletonTracing) { return null; @@ -864,6 +877,7 @@ class SkeletonTabView extends React.PureComponent { searchKey="name" provideShortcut targetId={treeTabId} + onSelectAllMatches={this.handleSelectAllMatchingTrees} > diff --git a/frontend/javascripts/oxalis/view/right-border-tabs/tree_hierarchy_view.tsx b/frontend/javascripts/oxalis/view/right-border-tabs/tree_hierarchy_view.tsx index bb05294399f..29312811bf1 100644 --- a/frontend/javascripts/oxalis/view/right-border-tabs/tree_hierarchy_view.tsx +++ b/frontend/javascripts/oxalis/view/right-border-tabs/tree_hierarchy_view.tsx @@ -188,8 +188,7 @@ function TreeHierarchyView(props: Props) { } } - function onSelectGroupNode(node: TreeNode) { - const groupId = node.id; + function onSelectGroupNode(groupId: number) { const numberOfSelectedTrees = props.selectedTreeIds.length; if (numberOfSelectedTrees > 1) { @@ -254,11 +253,14 @@ function TreeHierarchyView(props: Props) { const checkedKeys = deepFlatFilter(UITreeData, (node) => node.isChecked).map((node) => node.key); // selectedKeys is mainly used for highlighting, i.e. blueish background color - const selectedKeys = props.selectedTreeIds.map((treeId) => - getNodeKey(GroupTypeEnum.TREE, treeId), - ); + const selectedKeys = props.activeGroupId + ? [getNodeKey(GroupTypeEnum.GROUP, props.activeGroupId)] + : props.selectedTreeIds.map((treeId) => getNodeKey(GroupTypeEnum.TREE, treeId)); - if (props.activeGroupId) selectedKeys.push(getNodeKey(GroupTypeEnum.GROUP, props.activeGroupId)); + useEffect( + () => treeRef.current?.scrollTo({ key: selectedKeys[0], align: "auto" }), + [selectedKeys[0]], + ); return ( <> @@ -297,7 +299,7 @@ function TreeHierarchyView(props: Props) { onSelect={(_selectedKeys, info: { node: TreeNode; nativeEvent: MouseEvent }) => info.node.type === GroupTypeEnum.TREE ? onSelectTreeNode(info.node, info.nativeEvent) - : onSelectGroupNode(info.node) + : onSelectGroupNode(info.node.id) } onDrop={onDrop} onCheck={onCheck} diff --git a/frontend/javascripts/test/sagas/save_saga.spec.ts b/frontend/javascripts/test/sagas/save_saga.spec.ts index 677568ef279..4707cf28bee 100644 --- a/frontend/javascripts/test/sagas/save_saga.spec.ts +++ b/frontend/javascripts/test/sagas/save_saga.spec.ts @@ -130,6 +130,7 @@ test("SaveSaga should send request to server", (t) => { method: "POST", data: saveQueueWithVersions, compress: false, + showErrorToast: false, }), ); }); @@ -147,6 +148,7 @@ test("SaveSaga should retry update actions", (t) => { method: "POST", data: saveQueueWithVersions, compress: false, + showErrorToast: false, }, ); const saga = sendRequestToServer(TRACING_TYPE, tracingId); @@ -187,6 +189,7 @@ test("SaveSaga should escalate on permanent client error update actions", (t) => method: "POST", data: saveQueueWithVersions, compress: false, + showErrorToast: false, }), ); saga.throw({ diff --git a/test/backend/DataVaultTestSuite.scala b/test/backend/DataVaultTestSuite.scala index 424b560847e..75c8ab9d666 100644 --- a/test/backend/DataVaultTestSuite.scala +++ b/test/backend/DataVaultTestSuite.scala @@ -18,6 +18,7 @@ import net.liftweb.common.{Box, Empty, EmptyBox, Failure, Full} import play.api.libs.json.JsString import play.api.test.WsTestClient +import java.util.UUID import scala.collection.immutable.NumericRange import scala.concurrent.ExecutionContext import scala.concurrent.ExecutionContext.{global => globalExecutionContext} @@ -59,9 +60,11 @@ class DataVaultTestSuite extends PlaySpec { } "return empty box" when { - "requesting a nox-existent object" in { + "requesting a non-existent object" in { val result = - (vaultPath / "non-existent-key").readBytes()(globalExecutionContext).await(handleFoxJustification) + (vaultPath / s"non-existent-key${UUID.randomUUID}") + .readBytes()(globalExecutionContext) + .await(handleFoxJustification) assertBoxEmpty(result) } } @@ -141,8 +144,18 @@ class DataVaultTestSuite extends PlaySpec { } "return empty box" when { - "requesting a nox-existent object" in { - val uri = new URI("s3://non-existing-bucket/non-existing-object") + "requesting a non-existent bucket" in { + val uri = new URI(s"s3://non-existent-bucket${UUID.randomUUID}/non-existent-object") + val s3DataVault = S3DataVault.create(RemoteSourceDescriptor(uri, None)) + val vaultPath = new VaultPath(uri, s3DataVault) + val result = vaultPath.readBytes()(globalExecutionContext).await(handleFoxJustification) + assertBoxEmpty(result) + } + } + + "return empty box" when { + "requesting a non-existent object in existent bucket" in { + val uri = new URI(s"s3://open-neurodata/non-existent-object${UUID.randomUUID}") val s3DataVault = S3DataVault.create(RemoteSourceDescriptor(uri, None)) val vaultPath = new VaultPath(uri, s3DataVault) val result = vaultPath.readBytes()(globalExecutionContext).await(handleFoxJustification) @@ -165,15 +178,16 @@ class DataVaultTestSuite extends PlaySpec { _.toUri == new URI("s3://janelia-cosem-datasets/jrc_hela-3/jrc_hela-3.n5/em/fibsem-uint16/s0/"))) } - "return empty box" when { - "requesting directory listing on nox-existent bucket" in { - val uri = new URI("s3://non-existing-bucket/non-existing-object/") + "return failure" when { + "requesting directory listing on non-existent bucket" in { + val uri = new URI(f"s3://non-existent-bucket${UUID.randomUUID}/non-existent-object/") val s3DataVault = S3DataVault.create(RemoteSourceDescriptor(uri, None)) val vaultPath = new VaultPath(uri, s3DataVault) val result = vaultPath.listDirectory(maxItems = 5)(globalExecutionContext).await(handleFoxJustification) - assertBoxEmpty(result) + assertBoxFailure(result) } } + } } diff --git a/tools/postgres/schema.sql b/tools/postgres/schema.sql index cbb40d65f16..20453b54ba7 100644 --- a/tools/postgres/schema.sql +++ b/tools/postgres/schema.sql @@ -20,7 +20,7 @@ CREATE TABLE webknossos.releaseInformation ( schemaVersion BIGINT NOT NULL ); -INSERT INTO webknossos.releaseInformation(schemaVersion) values(122); +INSERT INTO webknossos.releaseInformation(schemaVersion) values(123); COMMIT TRANSACTION; @@ -546,7 +546,7 @@ CREATE TABLE webknossos.emailVerificationKeys( isUsed BOOLEAN NOT NULL DEFAULT false ); -CREATE TYPE webknossos.AI_MODEL_CATEGORY AS ENUM ('em_neurons', 'em_nuclei'); +CREATE TYPE webknossos.AI_MODEL_CATEGORY AS ENUM ('em_neurons', 'em_nuclei', 'em_synapses', 'em_neuron_types', 'em_cell_organelles'); CREATE TABLE webknossos.aiModels( _id CHAR(24) PRIMARY KEY, diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datavault/S3DataVault.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datavault/S3DataVault.scala index 3f47f88274f..d86e4553212 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datavault/S3DataVault.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datavault/S3DataVault.scala @@ -88,6 +88,13 @@ class S3DataVault(s3AccessKeyCredential: Option[S3AccessKeyCredential], uri: URI Future.successful(box) } + private def notFoundToFailure[T](resultFuture: Future[T])(implicit ec: ExecutionContext): Fox[T] = + resultFuture.transformWith { + case TrySuccess(value) => Fox.successful(value).futureBox + case TryFailure(exception) => + Future.successful(BoxFailure(exception.getMessage, Full(exception), Empty)) + } + override def readBytesAndEncoding(path: VaultPath, range: RangeSpecifier)( implicit ec: ExecutionContext): Fox[(Array[Byte], Encoding.Value)] = for { @@ -115,7 +122,7 @@ class S3DataVault(s3AccessKeyCredential: Option[S3AccessKeyCredential], uri: URI val listObjectsRequest = ListObjectsV2Request.builder().bucket(bucketName).prefix(keyPrefix).delimiter("/").maxKeys(maxKeys).build() for { - objectListing: ListObjectsV2Response <- notFoundToEmpty(client.listObjectsV2(listObjectsRequest).asScala) + objectListing: ListObjectsV2Response <- notFoundToFailure(client.listObjectsV2(listObjectsRequest).asScala) s3SubPrefixes: List[CommonPrefix] = objectListing.commonPrefixes().asScala.take(maxItems).toList } yield s3SubPrefixes.map(_.prefix()) }