diff --git a/.circleci/config.yml b/.circleci/config.yml index f35317647f1..a70616eb2d6 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -118,6 +118,7 @@ jobs: - run: name: Run end-to-end tests command: | + mkdir -p binaryData/Organization_X && chmod 777 binaryData/Organization_X for i in {1..3}; do # retry .circleci/not-on-master.sh docker-compose run e2e-tests && s=0 && break || s=$? done diff --git a/CHANGELOG.released.md b/CHANGELOG.released.md index 201055984b8..676a694171f 100644 --- a/CHANGELOG.released.md +++ b/CHANGELOG.released.md @@ -7,6 +7,56 @@ The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/) and this project adheres to [Calendar Versioning](http://calver.org/) `0Y.0M.MICRO`. For upgrade instructions, please check the [migration guide](MIGRATIONS.released.md). +## [24.11.1](https://github.com/scalableminds/webknossos/releases/tag/24.11.1) - 2024-11-13 +[Commits](https://github.com/scalableminds/webknossos/compare/24.10.0...24.11.1) + +### Highlights +- It is now possible to add metadata in annotations to Trees and Segments. [#7875](https://github.com/scalableminds/webknossos/pull/7875) +- Added a button to the search popover in the skeleton and segment tab to select all matching non-group results. [#8123](https://github.com/scalableminds/webknossos/pull/8123) + +### Added +- It is now possible to add metadata in annotations to Trees and Segments. [#7875](https://github.com/scalableminds/webknossos/pull/7875) +- Added a summary row to the time tracking overview, where times and annotations/tasks are summed. [#8092](https://github.com/scalableminds/webknossos/pull/8092) +- Most sliders have been improved: Wheeling above a slider now changes its value and double-clicking its knob resets it to its default value. [#8095](https://github.com/scalableminds/webknossos/pull/8095) +- It is now possible to search for unnamed segments with the full default name instead of only their id. [#8133](https://github.com/scalableminds/webknossos/pull/8133) +- Increased loading speed for precomputed meshes. [#8110](https://github.com/scalableminds/webknossos/pull/8110) +- Added a button to the search popover in the skeleton and segment tab to select all matching non-group results. [#8123](https://github.com/scalableminds/webknossos/pull/8123) +- Unified wording in UI and code: “Magnification”/“mag” is now used in place of “Resolution“ most of the time, compare [https://docs.webknossos.org/webknossos/terminology.html](terminology document). [#8111](https://github.com/scalableminds/webknossos/pull/8111) +- Added support for adding remote OME-Zarr NGFF version 0.5 datasets. [#8122](https://github.com/scalableminds/webknossos/pull/8122) +- Workflow reports may be deleted by superusers. [#8156](https://github.com/scalableminds/webknossos/pull/8156) + +### Changed +- Some mesh-related actions were disabled in proofreading-mode when using meshfiles that were created for a mapping rather than an oversegmentation. [#8091](https://github.com/scalableminds/webknossos/pull/8091) +- Admins can now see and cancel all jobs. The owner of the job is shown in the job list. [#8112](https://github.com/scalableminds/webknossos/pull/8112) +- Migrated nightly screenshot tests from CircleCI to GitHub actions. [#8134](https://github.com/scalableminds/webknossos/pull/8134) +- Migrated nightly screenshot tests for wk.org from CircleCI to GitHub actions. [#8135](https://github.com/scalableminds/webknossos/pull/8135) +- Thumbnails for datasets now use the selected mapping from the view configuration if available. [#8157](https://github.com/scalableminds/webknossos/pull/8157) + +### Fixed +- Fixed a bug during dataset upload in case the configured `datastore.baseFolder` is an absolute path. [#8098](https://github.com/scalableminds/webknossos/pull/8098) [#8103](https://github.com/scalableminds/webknossos/pull/8103) +- Fixed bbox export menu item [#8152](https://github.com/scalableminds/webknossos/pull/8152) +- When trying to save an annotation opened via a link including a sharing token, the token is automatically discarded in case it is insufficient for update actions but the users token is. [#8139](https://github.com/scalableminds/webknossos/pull/8139) +- Fix that scrolling in the trees and segments tab did not work while dragging. [#8162](https://github.com/scalableminds/webknossos/pull/8162) +- Fixed that uploading a dataset which needs a conversion failed when the angstrom unit was configured for the conversion. [#8173](https://github.com/scalableminds/webknossos/pull/8173) +- Fixed that the skeleton search did not automatically expand groups that contained the selected tree [#8129](https://github.com/scalableminds/webknossos/pull/8129) +- Fixed interactions in the trees and segments tab like the search due to a bug introduced by [#8162](https://github.com/scalableminds/webknossos/pull/8162). [#8186](https://github.com/scalableminds/webknossos/pull/8186) +- Fixed a bug that zarr streaming version 3 returned the shape of mag (1, 1, 1) / the finest mag for all mags. [#8116](https://github.com/scalableminds/webknossos/pull/8116) +- Fixed sorting of mags in outbound zarr streaming. [#8125](https://github.com/scalableminds/webknossos/pull/8125) +- Fixed a bug where you could not create annotations for public datasets of other organizations. [#8107](https://github.com/scalableminds/webknossos/pull/8107) +- Users without edit permissions to a dataset can no longer delete sharing tokens via the API. [#8083](https://github.com/scalableminds/webknossos/issues/8083) +- Fixed downloading task annotations of teams you are not in, when accessing directly via URI. [#8155](https://github.com/scalableminds/webknossos/pull/8155) +- Removed unnecessary scrollbars in skeleton tab that occurred especially after resizing. [#8148](https://github.com/scalableminds/webknossos/pull/8148) +- Deleting a bounding box is now possible independently of a visible segmentation layer. [#8164](https://github.com/scalableminds/webknossos/pull/8164) +- S3-compliant object storages can now be accessed via HTTPS. [#8167](https://github.com/scalableminds/webknossos/pull/8167) +- Fixed that skeleton tree nodes were created with the wrong mag. [#8185](https://github.com/scalableminds/webknossos/pull/8185) +- Fixed the expected type of a tree node received from the server. Fixes nml export to include the `inMag` field correctly. [#8187](https://github.com/scalableminds/webknossos/pull/8187) +- Fixed a layout persistence bug leading to empty viewports, triggered when switching between orthogonal, flight, or oblique mode. [#8177](https://github.com/scalableminds/webknossos/pull/8177) + +### Removed + +### Breaking Changes + + ## [24.10.0](https://github.com/scalableminds/webknossos/releases/tag/24.10.0) - 2024-09-24 [Commits](https://github.com/scalableminds/webknossos/compare/24.08.1...24.10.0) diff --git a/CHANGELOG.unreleased.md b/CHANGELOG.unreleased.md index 9fd9c074d3a..cfe2f25611d 100644 --- a/CHANGELOG.unreleased.md +++ b/CHANGELOG.unreleased.md @@ -8,42 +8,18 @@ and this project adheres to [Calendar Versioning](http://calver.org/) `0Y.0M.MIC For upgrade instructions, please check the [migration guide](MIGRATIONS.released.md). ## Unreleased -[Commits](https://github.com/scalableminds/webknossos/compare/24.10.0...HEAD) +[Commits](https://github.com/scalableminds/webknossos/compare/24.11.1...HEAD) ### Added -- It is now possible to add metadata in annotations to Trees and Segments. [#7875](https://github.com/scalableminds/webknossos/pull/7875) -- Added a summary row to the time tracking overview, where times and annotations/tasks are summed. [#8092](https://github.com/scalableminds/webknossos/pull/8092) -- Most sliders have been improved: Wheeling above a slider now changes its value and double-clicking its knob resets it to its default value. [#8095](https://github.com/scalableminds/webknossos/pull/8095) -- It is now possible to search for unnamed segments with the full default name instead of only their id. [#8133](https://github.com/scalableminds/webknossos/pull/8133) -- Increased loading speed for precomputed meshes. [#8110](https://github.com/scalableminds/webknossos/pull/8110) -- Added a button to the search popover in the skeleton and segment tab to select all matching non-group results. [#8123](https://github.com/scalableminds/webknossos/pull/8123) -- Unified wording in UI and code: “Magnification”/“mag” is now used in place of “Resolution“ most of the time, compare [https://docs.webknossos.org/webknossos/terminology.html](terminology document). [#8111](https://github.com/scalableminds/webknossos/pull/8111) -- Added support for adding remote OME-Zarr NGFF version 0.5 datasets. [#8122](https://github.com/scalableminds/webknossos/pull/8122) -- Workflow reports may be deleted by superusers. [#8156](https://github.com/scalableminds/webknossos/pull/8156) ### Changed -- Some mesh-related actions were disabled in proofreading-mode when using meshfiles that were created for a mapping rather than an oversegmentation. [#8091](https://github.com/scalableminds/webknossos/pull/8091) -- Admins can now see and cancel all jobs. The owner of the job is shown in the job list. [#8112](https://github.com/scalableminds/webknossos/pull/8112) -- Migrated nightly screenshot tests from CircleCI to GitHub actions. [#8134](https://github.com/scalableminds/webknossos/pull/8134) -- Migrated nightly screenshot tests for wk.org from CircleCI to GitHub actions. [#8135](https://github.com/scalableminds/webknossos/pull/8135) -- Thumbnails for datasets now use the selected mapping from the view configuration if available. [#8157](https://github.com/scalableminds/webknossos/pull/8157) - Renamed "resolution" to "magnification" in more places within the codebase, including local variables. [#8168](https://github.com/scalableminds/webknossos/pull/8168) +- Reading image files on datastore filesystem is now done asynchronously. [#8126](https://github.com/scalableminds/webknossos/pull/8126) ### Fixed -- Fixed a bug during dataset upload in case the configured `datastore.baseFolder` is an absolute path. [#8098](https://github.com/scalableminds/webknossos/pull/8098) [#8103](https://github.com/scalableminds/webknossos/pull/8103) -- Fixed bbox export menu item [#8152](https://github.com/scalableminds/webknossos/pull/8152) -- When trying to save an annotation opened via a link including a sharing token, the token is automatically discarded in case it is insufficient for update actions but the users token is. [#8139](https://github.com/scalableminds/webknossos/pull/8139) -- Fix that scrolling in the trees and segments tab did not work while dragging. [#8162](https://github.com/scalableminds/webknossos/pull/8162) -- Fixed that uploading a dataset which needs a conversion failed when the angstrom unit was configured for the conversion. [#8173](https://github.com/scalableminds/webknossos/pull/8173) -- Fixed that the skeleton search did not automatically expand groups that contained the selected tree [#8129](https://github.com/scalableminds/webknossos/pull/8129) -- Fixed a bug that zarr streaming version 3 returned the shape of mag (1, 1, 1) / the finest mag for all mags. [#8116](https://github.com/scalableminds/webknossos/pull/8116) -- Fixed sorting of mags in outbound zarr streaming. [#8125](https://github.com/scalableminds/webknossos/pull/8125) -- Fixed a bug where you could not create annotations for public datasets of other organizations. [#8107](https://github.com/scalableminds/webknossos/pull/8107) -- Users without edit permissions to a dataset can no longer delete sharing tokens via the API. [#8083](https://github.com/scalableminds/webknossos/issues/8083) -- Fixed downloading task annotations of teams you are not in, when accessing directly via URI. [#8155](https://github.com/scalableminds/webknossos/pull/8155) -- Removed unnecessary scrollbars in skeleton tab that occurred especially after resizing. [#8148](https://github.com/scalableminds/webknossos/pull/8148) -- Deleting a bounding box is now possible independently of a visible segmentation layer. [#8164](https://github.com/scalableminds/webknossos/pull/8164) -- S3-compliant object storages can now be accessed via HTTPS. [#8167](https://github.com/scalableminds/webknossos/pull/8167) +- Fix performance bottleneck when deleting a lot of trees at once. [#8176](https://github.com/scalableminds/webknossos/pull/8176) +- Fix a bug when importing an NML with groups when only groups but no trees exist in an annotation. [#8176](https://github.com/scalableminds/webknossos/pull/8176) +- Fix a bug where trying to delete a non-existing node (via the API, for example) would delete the whole active tree. [#8176](https://github.com/scalableminds/webknossos/pull/8176) ### Removed diff --git a/MIGRATIONS.released.md b/MIGRATIONS.released.md index 2b490c94b58..d0db04bb6a0 100644 --- a/MIGRATIONS.released.md +++ b/MIGRATIONS.released.md @@ -6,6 +6,16 @@ See `MIGRATIONS.unreleased.md` for the changes which are not yet part of an offi This project adheres to [Calendar Versioning](http://calver.org/) `0Y.0M.MICRO`. User-facing changes are documented in the [changelog](CHANGELOG.released.md). +## [24.11.1](https://github.com/scalableminds/webknossos/releases/tag/24.11.1) - 2024-11-13 +[Commits](https://github.com/scalableminds/webknossos/compare/24.10.0...24.11.1) + +### Postgres Evolutions: + +- [121-worker-name.sql](conf/evolutions/121-worker-name.sql) +- [122-resolution-to-mag.sql](conf/evolutions/122-resolution-to-mag.sql) +- [123-more-model-categories.sql](conf/evolutions/123-more-model-categories.sql) + + ## [24.10.0](https://github.com/scalableminds/webknossos/releases/tag/24.10.0) - 2024-09-24 [Commits](https://github.com/scalableminds/webknossos/compare/24.08.1...24.10.0) diff --git a/MIGRATIONS.unreleased.md b/MIGRATIONS.unreleased.md index f6d640f469d..20414e596e6 100644 --- a/MIGRATIONS.unreleased.md +++ b/MIGRATIONS.unreleased.md @@ -6,10 +6,6 @@ This project adheres to [Calendar Versioning](http://calver.org/) `0Y.0M.MICRO`. User-facing changes are documented in the [changelog](CHANGELOG.released.md). ## Unreleased -[Commits](https://github.com/scalableminds/webknossos/compare/24.10.0...HEAD) +[Commits](https://github.com/scalableminds/webknossos/compare/24.11.1...HEAD) ### Postgres Evolutions: - -- [121-worker-name.sql](conf/evolutions/121-worker-name.sql) -- [122-resolution-to-mag.sql](conf/evolutions/122-resolution-to-mag.sql) -- [123-more-model-categories.sql](conf/evolutions/123-more-model-categories.sql) diff --git a/conf/application.conf b/conf/application.conf index 07d8b5d2dd1..0ae8b6f25dd 100644 --- a/conf/application.conf +++ b/conf/application.conf @@ -125,7 +125,7 @@ webKnossos { securityTxt { enabled = true content ="""Contact: https://github.com/scalableminds/webknossos/security/advisories/new -Expires: 2024-07-03T10:00:00.000Z +Expires: 2025-07-03T10:00:00.000Z Preferred-Languages: en,de """ } diff --git a/conf/messages b/conf/messages index 44eff4e660d..4f59c9f0e84 100644 --- a/conf/messages +++ b/conf/messages @@ -73,10 +73,6 @@ oidc.disabled=OIDC is disabled oidc.configuration.invalid=OIDC configuration is invalid oidc.authentication.failed=Failed to register / log in via Single-Sign-On (SSO with OIDC) -braintracing.new=An account on braintracing.org was created for you. You can use the same credentials as on WEBKNOSSOS to login. -braintracing.error=We could not automatically create an account for you on braintracing.org. Please do it on your own. -braintracing.exists=Great, you already have an account on braintracing.org. Please double check that you have uploaded all requested information. - dataset=Dataset dataset.notFound=Dataset {0} does not exist or could not be accessed dataset.notFoundConsiderLogin=Dataset {0} does not exist or could not be accessed. You may need to log in. diff --git a/docker-compose.yml b/docker-compose.yml index 76f802426bb..a1f0e549c0e 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -201,7 +201,7 @@ services: -Ddatastore.redis.address=redis -Ddatastore.watchFileSystem.enabled=false" volumes: - - ./binaryData/Connectomics department:/home/${USER_NAME:-sbt-user}/webknossos/binaryData/Organization_X + - ./binaryData/Organization_X:/home/${USER_NAME:-sbt-user}/webknossos/binaryData/Organization_X screenshot-tests: image: scalableminds/puppeteer:master diff --git a/frontend/javascripts/libs/diffable_map.ts b/frontend/javascripts/libs/diffable_map.ts index bdfd028d47c..eebd14d429a 100644 --- a/frontend/javascripts/libs/diffable_map.ts +++ b/frontend/javascripts/libs/diffable_map.ts @@ -54,7 +54,7 @@ class DiffableMap { if (value !== undefined) { return value; } else { - throw new Error("Get empty"); + throw new Error(`Key '${key}' does not exist in diffable map.`); } } diff --git a/frontend/javascripts/oxalis/model/accessors/skeletontracing_accessor.ts b/frontend/javascripts/oxalis/model/accessors/skeletontracing_accessor.ts index b30708bb6c9..657a18ca2e0 100644 --- a/frontend/javascripts/oxalis/model/accessors/skeletontracing_accessor.ts +++ b/frontend/javascripts/oxalis/model/accessors/skeletontracing_accessor.ts @@ -175,7 +175,7 @@ export function getNodeAndTree( let node = null; if (nodeId != null) { - node = tree.nodes.getOrThrow(nodeId); + node = tree.nodes.getNullable(nodeId); } else { const { activeNodeId } = skeletonTracing; diff --git a/frontend/javascripts/oxalis/model/actions/skeletontracing_actions.tsx b/frontend/javascripts/oxalis/model/actions/skeletontracing_actions.tsx index ac6df363627..a5fd84d8b7c 100644 --- a/frontend/javascripts/oxalis/model/actions/skeletontracing_actions.tsx +++ b/frontend/javascripts/oxalis/model/actions/skeletontracing_actions.tsx @@ -39,6 +39,7 @@ type CreateTreeAction = ReturnType; type SetEdgeVisibilityAction = ReturnType; type AddTreesAndGroupsAction = ReturnType; type DeleteTreeAction = ReturnType; +type DeleteTreesAction = ReturnType; type ResetSkeletonTracingAction = ReturnType; type SetActiveTreeAction = ReturnType; type SetActiveTreeByNameAction = ReturnType; @@ -65,7 +66,11 @@ type UpdateNavigationListAction = ReturnType; export type LoadAgglomerateSkeletonAction = ReturnType; type NoAction = ReturnType; -export type BatchableUpdateTreeAction = SetTreeGroupAction | DeleteTreeAction | SetTreeGroupsAction; +export type BatchableUpdateTreeAction = + | SetTreeGroupAction + | DeleteTreeAction + | DeleteTreesAction + | SetTreeGroupsAction; export type BatchUpdateGroupsAndTreesAction = { type: "BATCH_UPDATE_GROUPS_AND_TREES"; payload: BatchableUpdateTreeAction[]; @@ -93,6 +98,7 @@ export type SkeletonTracingAction = | SetEdgeVisibilityAction | AddTreesAndGroupsAction | DeleteTreeAction + | DeleteTreesAction | ResetSkeletonTracingAction | SetActiveTreeAction | SetActiveTreeByNameAction @@ -139,6 +145,7 @@ export const SkeletonTracingSaveRelevantActions = [ "SET_EDGES_ARE_VISIBLE", "ADD_TREES_AND_GROUPS", "DELETE_TREE", + "DELETE_TREES", "SET_ACTIVE_TREE", "SET_ACTIVE_TREE_BY_NAME", "SET_TREE_NAME", @@ -337,6 +344,19 @@ export const deleteTreeAction = (treeId?: number, suppressActivatingNextNode: bo suppressActivatingNextNode, }) as const; +export const deleteTreesAction = (treeIds: number[], suppressActivatingNextNode: boolean = false) => + // If suppressActivatingNextNode is true, the trees will be deleted without activating + // another node (nor tree). Use this in cases where you want to avoid changing + // the active position (due to the auto-centering). One could also suppress the auto-centering + // behavior, but the semantics of changing the active node might also be confusing to the user + // (e.g., when proofreading). So, it might be clearer to not have an active node in the first + // place. + ({ + type: "DELETE_TREES", + treeIds, + suppressActivatingNextNode, + }) as const; + export const resetSkeletonTracingAction = () => ({ type: "RESET_SKELETON_TRACING", @@ -555,11 +575,15 @@ export const deleteNodeAsUserAction = ( return deleteNodeAction(node.id, tree.treeId); }) // If the tree is empty, it will be deleted - .getOrElse(deleteTreeAction(treeId)); + .getOrElse( + getTree(skeletonTracing, treeId) + .map((tree) => (tree.nodes.size() === 0 ? deleteTreeAction(tree.treeId) : noAction())) + .getOrElse(noAction()), + ); }; // Let the user confirm the deletion of the initial node (node with id 1) of a task -function confirmDeletingInitialNode(treeId?: number) { +function confirmDeletingInitialNode(treeId: number) { Modal.confirm({ title: messages["tracing.delete_tree_with_initial_node"], onOk: () => { @@ -573,12 +597,15 @@ export const deleteTreeAsUserAction = (treeId?: number): NoAction => { const skeletonTracing = enforceSkeletonTracing(state.tracing); getTree(skeletonTracing, treeId).map((tree) => { if (state.task != null && tree.nodes.has(1)) { - confirmDeletingInitialNode(treeId); + confirmDeletingInitialNode(tree.treeId); } else if (state.userConfiguration.hideTreeRemovalWarning) { - Store.dispatch(deleteTreeAction(treeId)); + Store.dispatch(deleteTreeAction(tree.treeId)); } else { renderIndependently((destroy) => ( - Store.dispatch(deleteTreeAction(treeId))} destroy={destroy} /> + Store.dispatch(deleteTreeAction(tree.treeId))} + destroy={destroy} + /> )); } }); diff --git a/frontend/javascripts/oxalis/model/helpers/nml_helpers.ts b/frontend/javascripts/oxalis/model/helpers/nml_helpers.ts index be2424c0d24..fc5d8eb965a 100644 --- a/frontend/javascripts/oxalis/model/helpers/nml_helpers.ts +++ b/frontend/javascripts/oxalis/model/helpers/nml_helpers.ts @@ -412,7 +412,7 @@ function serializeNodes( rotY: node.rotation[1], rotZ: node.rotation[2], inVp: node.viewport, - inMag: node.mag, + inMag: node.resolution, bitDepth: node.bitDepth, interpolation: node.interpolation, time: node.timestamp, @@ -963,7 +963,7 @@ export function parseNml(nmlString: string): Promise<{ }), bitDepth: _parseInt(attr, "bitDepth", { defaultValue: DEFAULT_BITDEPTH }), viewport: _parseInt(attr, "inVp", { defaultValue: DEFAULT_VIEWPORT }), - mag: _parseInt(attr, "inMag", { defaultValue: DEFAULT_MAG }), + resolution: _parseInt(attr, "inMag", { defaultValue: DEFAULT_MAG }), radius: _parseFloat(attr, "radius", { defaultValue: Constants.DEFAULT_NODE_RADIUS }), timestamp: _parseTimestamp(attr, "time", { defaultValue: DEFAULT_TIMESTAMP }), }; diff --git a/frontend/javascripts/oxalis/model/reducers/skeletontracing_reducer.ts b/frontend/javascripts/oxalis/model/reducers/skeletontracing_reducer.ts index f6224380f9f..16b3c90e4f0 100644 --- a/frontend/javascripts/oxalis/model/reducers/skeletontracing_reducer.ts +++ b/frontend/javascripts/oxalis/model/reducers/skeletontracing_reducer.ts @@ -13,7 +13,7 @@ import { deleteBranchPoint, createNode, createTree, - deleteTree, + deleteTrees, deleteNode, deleteEdge, shuffleTreeColor, @@ -890,10 +890,16 @@ function SkeletonTracingReducer(state: OxalisState, action: Action): OxalisState .getOrElse(state); } - case "DELETE_TREE": { - const { treeId, suppressActivatingNextNode } = action; - return getTree(skeletonTracing, treeId) - .chain((tree) => deleteTree(skeletonTracing, tree, suppressActivatingNextNode)) + case "DELETE_TREE": + case "DELETE_TREES": { + const { suppressActivatingNextNode } = action; + const treeIds = + action.type === "DELETE_TREE" + ? getTree(skeletonTracing, action.treeId) // The treeId in a DELETE_TREE action can be undefined which will select the active tree + .map((tree) => [tree.treeId]) + .getOrElse([]) + : action.treeIds; + return deleteTrees(skeletonTracing, treeIds, suppressActivatingNextNode) .map(([trees, newActiveTreeId, newActiveNodeId, newMaxNodeId]) => update(state, { tracing: { diff --git a/frontend/javascripts/oxalis/model/reducers/skeletontracing_reducer_helpers.ts b/frontend/javascripts/oxalis/model/reducers/skeletontracing_reducer_helpers.ts index 09e5b207941..298ff49be78 100644 --- a/frontend/javascripts/oxalis/model/reducers/skeletontracing_reducer_helpers.ts +++ b/frontend/javascripts/oxalis/model/reducers/skeletontracing_reducer_helpers.ts @@ -146,7 +146,7 @@ export function createNode( radius, rotation, viewport, - mag: mag, + resolution: mag, id: nextNewId, timestamp, bitDepth: state.datasetConfiguration.fourBit ? 4 : 8, @@ -561,7 +561,10 @@ export function addTreesAndGroups( ); const hasInvalidNodeIds = getMinimumNodeId(trees) < Constants.MIN_NODE_ID; const needsReassignedIds = - Object.keys(skeletonTracing.trees).length > 0 || hasInvalidTreeIds || hasInvalidNodeIds; + Object.keys(skeletonTracing.trees).length > 0 || + skeletonTracing.treeGroups.length > 0 || + hasInvalidTreeIds || + hasInvalidNodeIds; if (!needsReassignedIds) { // Without reassigning ids, the code is considerably faster. @@ -631,20 +634,22 @@ export function addTreesAndGroups( return Maybe.Just([newTrees, treeGroups, newNodeId - 1]); } -export function deleteTree( +export function deleteTrees( skeletonTracing: SkeletonTracing, - tree: Tree, + treeIds: number[], suppressActivatingNextNode: boolean = false, ): Maybe<[TreeMap, number | null | undefined, number | null | undefined, number]> { - // Delete tree - const newTrees = _.omit(skeletonTracing.trees, tree.treeId); + if (treeIds.length === 0) return Maybe.Nothing(); + // Delete trees + const newTrees = _.omit(skeletonTracing.trees, treeIds); let newActiveTreeId = null; let newActiveNodeId = null; if (_.size(newTrees) > 0 && !suppressActivatingNextNode) { - // Setting the tree active whose id is the next highest compared to the id of the deleted tree. - newActiveTreeId = getNearestTreeId(tree.treeId, newTrees); + // Setting the tree active whose id is the next highest compared to the ids of the deleted trees. + const maximumTreeId = _.max(treeIds) || Constants.MIN_TREE_ID; + newActiveTreeId = getNearestTreeId(maximumTreeId, newTrees); // @ts-expect-error ts-migrate(2571) FIXME: Object is of type 'unknown'. newActiveNodeId = +_.first(Array.from(newTrees[newActiveTreeId].nodes.keys())) || null; } @@ -846,7 +851,7 @@ function serverNodeToMutableNode(n: ServerNode): MutableNode { rotation: Utils.point3ToVector3(n.rotation), bitDepth: n.bitDepth, viewport: n.viewport, - mag: n.mag, + resolution: n.mag, radius: n.radius, timestamp: n.createdTimestamp, interpolation: n.interpolation, diff --git a/frontend/javascripts/oxalis/model/sagas/proofread_saga.ts b/frontend/javascripts/oxalis/model/sagas/proofread_saga.ts index 47ea84ff230..3839d8f48c3 100644 --- a/frontend/javascripts/oxalis/model/sagas/proofread_saga.ts +++ b/frontend/javascripts/oxalis/model/sagas/proofread_saga.ts @@ -270,7 +270,7 @@ function* createEditableMapping(): Saga { // Get volume tracing again to make sure the version is up to date const upToDateVolumeTracing = yield* select((state) => getActiveSegmentationTracing(state)); if (upToDateVolumeTracing == null) { - throw new Error("No active segmentation tracing layer. Cannot create editble mapping."); + throw new Error("No active segmentation tracing layer. Cannot create editable mapping."); } const volumeTracingId = upToDateVolumeTracing.tracingId; diff --git a/frontend/javascripts/oxalis/model/sagas/skeletontracing_saga.ts b/frontend/javascripts/oxalis/model/sagas/skeletontracing_saga.ts index d2c3aa976e7..287f71350dd 100644 --- a/frontend/javascripts/oxalis/model/sagas/skeletontracing_saga.ts +++ b/frontend/javascripts/oxalis/model/sagas/skeletontracing_saga.ts @@ -468,6 +468,7 @@ export function* watchSkeletonTracingAsync(): Saga { "DELETE_BRANCHPOINT", "SELECT_NEXT_TREE", "DELETE_TREE", + "DELETE_TREES", "BATCH_UPDATE_GROUPS_AND_TREES", "CENTER_ACTIVE_NODE", ], diff --git a/frontend/javascripts/oxalis/store.ts b/frontend/javascripts/oxalis/store.ts index f36f1c95592..dc4847893e1 100644 --- a/frontend/javascripts/oxalis/store.ts +++ b/frontend/javascripts/oxalis/store.ts @@ -89,7 +89,7 @@ export type MutableNode = { rotation: Vector3; bitDepth: number; viewport: number; - mag: number; + resolution: number; radius: number; timestamp: number; interpolation: boolean; diff --git a/frontend/javascripts/oxalis/view/layouting/flex_layout_wrapper.tsx b/frontend/javascripts/oxalis/view/layouting/flex_layout_wrapper.tsx index 6c79170cbcf..59b34a3300a 100644 --- a/frontend/javascripts/oxalis/view/layouting/flex_layout_wrapper.tsx +++ b/frontend/javascripts/oxalis/view/layouting/flex_layout_wrapper.tsx @@ -207,10 +207,7 @@ class FlexLayoutWrapper extends React.PureComponent { rebuildLayout() { const model = this.loadCurrentModel(); this.updateToModelStateAndAdjustIt(model); - this.setState({ - model, - }); - setTimeout(this.onLayoutChange, 1); + this.setState({ model }, () => this.onLayoutChange()); if (this.props.layoutName !== DEFAULT_LAYOUT_NAME) { sendAnalyticsEvent("load_custom_layout", { diff --git a/frontend/javascripts/oxalis/view/layouting/tracing_layout_view.tsx b/frontend/javascripts/oxalis/view/layouting/tracing_layout_view.tsx index a1671264ae5..4a92943d240 100644 --- a/frontend/javascripts/oxalis/view/layouting/tracing_layout_view.tsx +++ b/frontend/javascripts/oxalis/view/layouting/tracing_layout_view.tsx @@ -194,14 +194,12 @@ class TracingLayoutView extends React.PureComponent { app.vent.emit("rerender"); if (model != null) { - this.setState({ - model, + this.setState({ model }, () => { + if (this.props.autoSaveLayouts) { + this.saveCurrentLayout(layoutName); + } }); } - - if (this.props.autoSaveLayouts) { - this.saveCurrentLayout(layoutName); - } }; debouncedOnLayoutChange = _.debounce(() => this.onLayoutChange(), Constants.RESIZE_THROTTLE_TIME); diff --git a/frontend/javascripts/oxalis/view/right-border-tabs/connectome_tab/connectome_view.tsx b/frontend/javascripts/oxalis/view/right-border-tabs/connectome_tab/connectome_view.tsx index 75535fa47b8..1b22d003da9 100644 --- a/frontend/javascripts/oxalis/view/right-border-tabs/connectome_tab/connectome_view.tsx +++ b/frontend/javascripts/oxalis/view/right-border-tabs/connectome_tab/connectome_view.tsx @@ -163,7 +163,7 @@ const synapseNodeCreator = (synapseId: number, synapsePosition: Vector3): Mutabl radius: Constants.DEFAULT_NODE_RADIUS, rotation: [0, 0, 0], viewport: 0, - mag: 0, + resolution: 0, id: synapseId, timestamp: Date.now(), bitDepth: 8, diff --git a/frontend/javascripts/oxalis/view/right-border-tabs/scrollable_virtualized_tree.tsx b/frontend/javascripts/oxalis/view/right-border-tabs/scrollable_virtualized_tree.tsx index f2a8a8439fe..aa02951bc02 100644 --- a/frontend/javascripts/oxalis/view/right-border-tabs/scrollable_virtualized_tree.tsx +++ b/frontend/javascripts/oxalis/view/right-border-tabs/scrollable_virtualized_tree.tsx @@ -1,7 +1,7 @@ import { Tree as AntdTree, type TreeProps } from "antd"; import type { BasicDataNode } from "antd/es/tree"; import { throttle } from "lodash"; -import { useCallback, useRef } from "react"; +import { forwardRef, useCallback, useRef } from "react"; import type RcTree from "rc-tree"; const MIN_SCROLL_SPEED = 30; @@ -10,8 +10,10 @@ const MIN_SCROLL_AREA_HEIGHT = 60; const SCROLL_AREA_RATIO = 10; // 1/10th of the container height const THROTTLE_TIME = 25; -function ScrollableVirtualizedTree( - props: TreeProps & { ref: React.RefObject }, +// React.forwardRef does not support generic types, so we need to define the type of the ref separately. +function ScrollableVirtualizedTreeInner( + props: TreeProps, + ref: React.Ref, ) { const wrapperRef = useRef(null); // biome-ignore lint/correctness/useExhaustiveDependencies: biome is not smart enough to notice that the function needs to be re-created when wrapperRef changes. @@ -56,9 +58,15 @@ function ScrollableVirtualizedTree( return (
- +
); } +const ScrollableVirtualizedTree = forwardRef(ScrollableVirtualizedTreeInner) as < + T extends BasicDataNode, +>( + props: TreeProps & { ref?: React.Ref }, +) => ReturnType; + export default ScrollableVirtualizedTree; diff --git a/frontend/javascripts/oxalis/view/right-border-tabs/segments_tab/segments_view.tsx b/frontend/javascripts/oxalis/view/right-border-tabs/segments_tab/segments_view.tsx index 65ecdfaec27..15a23d13a6d 100644 --- a/frontend/javascripts/oxalis/view/right-border-tabs/segments_tab/segments_view.tsx +++ b/frontend/javascripts/oxalis/view/right-border-tabs/segments_tab/segments_view.tsx @@ -1900,7 +1900,7 @@ class SegmentsView extends React.Component { overflow: "hidden", }} > - + { }); checkAndConfirmDeletingInitialNode(treeIdsToDelete).then(() => { // Update the store at once - const deleteTreeActions: BatchableUpdateTreeAction[] = treeIdsToDelete.map((treeId) => - deleteTreeAction(treeId), - ); this.props.onBatchUpdateGroupsAndTreesAction( - updateTreeActions.concat(deleteTreeActions, [setTreeGroupsAction(newTreeGroups)]), + updateTreeActions.concat([ + deleteTreesAction(treeIdsToDelete), + setTreeGroupsAction(newTreeGroups), + ]), ); }); }; @@ -510,8 +510,7 @@ class SkeletonTabView extends React.PureComponent { if (selectedTreeCount > 0) { const deleteAllSelectedTrees = () => { checkAndConfirmDeletingInitialNode(selectedTreeIds).then(() => { - const deleteTreeActions = selectedTreeIds.map((treeId) => deleteTreeAction(treeId)); - this.props.onBatchActions(deleteTreeActions, "DELETE_TREE"); + this.props.onDeleteTrees(selectedTreeIds); this.setState({ selectedTreeIds: [], }); @@ -1033,6 +1032,10 @@ const mapDispatchToProps = (dispatch: Dispatch) => ({ dispatch(deleteTreeAsUserAction()); }, + onDeleteTrees(treeIds: number[]) { + dispatch(deleteTreesAction(treeIds)); + }, + onBatchActions(actions: Array, actionName: string) { dispatch(batchActions(actions, actionName)); }, diff --git a/frontend/javascripts/oxalis/view/right-border-tabs/trees_tab/tree_hierarchy_renderers.tsx b/frontend/javascripts/oxalis/view/right-border-tabs/trees_tab/tree_hierarchy_renderers.tsx index 08039bc4897..c43db01f878 100644 --- a/frontend/javascripts/oxalis/view/right-border-tabs/trees_tab/tree_hierarchy_renderers.tsx +++ b/frontend/javascripts/oxalis/view/right-border-tabs/trees_tab/tree_hierarchy_renderers.tsx @@ -140,6 +140,7 @@ const createMenuForTree = (tree: Tree, props: Props, hideContextMenu: () => void onClick: () => { props.deselectAllTrees(); Store.dispatch(deleteTreeAction(tree.treeId)); + hideContextMenu(); }, title: "Delete Tree", disabled: isEditingDisabled, diff --git a/frontend/javascripts/test/backend-snapshot-tests/datasets.e2e.ts b/frontend/javascripts/test/backend-snapshot-tests/datasets.e2e.ts index ab2e5f0856f..9a091665301 100644 --- a/frontend/javascripts/test/backend-snapshot-tests/datasets.e2e.ts +++ b/frontend/javascripts/test/backend-snapshot-tests/datasets.e2e.ts @@ -1,8 +1,15 @@ import _ from "lodash"; -import { tokenUserA, setCurrToken, resetDatabase, writeTypeCheckingFile } from "test/e2e-setup"; +import { + tokenUserA, + setCurrToken, + resetDatabase, + writeTypeCheckingFile, + replaceVolatileValues, +} from "test/e2e-setup"; import type { APIDataset } from "types/api_flow_types"; import * as api from "admin/admin_rest_api"; import test from "ava"; +import fs from "node:fs"; async function getFirstDataset(): Promise { const datasets = await api.getActiveDatasetsOfMyOrganization(); @@ -15,6 +22,7 @@ async function getFirstDataset(): Promise { test.before("Reset database and change token", async () => { resetDatabase(); setCurrToken(tokenUserA); + await api.triggerDatasetCheck("http://localhost:9000"); }); test.serial("getDatasets", async (t) => { let datasets = await api.getDatasets(); @@ -29,19 +37,19 @@ test.serial("getDatasets", async (t) => { writeTypeCheckingFile(datasets, "dataset", "APIDatasetCompact", { isArray: true, }); - t.snapshot(datasets); + t.snapshot(replaceVolatileValues(datasets)); }); test("getActiveDatasets", async (t) => { let datasets = await api.getActiveDatasetsOfMyOrganization(); datasets = _.sortBy(datasets, (d) => d.name); - t.snapshot(datasets); + t.snapshot(replaceVolatileValues(datasets)); }); test("getDatasetAccessList", async (t) => { const dataset = await getFirstDataset(); const accessList = _.sortBy(await api.getDatasetAccessList(dataset), (user) => user.id); - t.snapshot(accessList); + t.snapshot(replaceVolatileValues(accessList)); }); test("updateDatasetTeams", async (t) => { const [dataset, newTeams] = await Promise.all([getFirstDataset(), api.getEditableTeams()]); @@ -49,7 +57,7 @@ test("updateDatasetTeams", async (t) => { dataset, newTeams.map((team) => team.id), ); - t.snapshot(updatedDataset); + t.snapshot(replaceVolatileValues(updatedDataset)); // undo the Change await api.updateDatasetTeams( dataset, @@ -62,3 +70,139 @@ test("updateDatasetTeams", async (t) => { // await api.revokeDatasetSharingToken(dataset.name); // t.pass(); // }); + +test("Zarr streaming", async (t) => { + const zattrsResp = await fetch("/data/zarr/Organization_X/test-dataset/segmentation/.zattrs", { + headers: new Headers(), + }); + const zattrs = await zattrsResp.text(); + t.snapshot(zattrs); + + const rawDataResponse = await fetch( + "/data/zarr/Organization_X/test-dataset/segmentation/1/0.1.1.0", + { + headers: new Headers(), + }, + ); + const bytes = await rawDataResponse.arrayBuffer(); + const base64 = btoa(String.fromCharCode(...new Uint8Array(bytes.slice(-128)))); + t.snapshot(base64); +}); + +test("Zarr 3 streaming", async (t) => { + const zarrJsonResp = await fetch( + "/data/zarr3_experimental/Organization_X/test-dataset/segmentation/zarr.json", + { + headers: new Headers(), + }, + ); + const zarrJson = await zarrJsonResp.text(); + t.snapshot(zarrJson); + + const rawDataResponse = await fetch( + "/data/zarr3_experimental/Organization_X/test-dataset/segmentation/1/0.1.1.0", + { + headers: new Headers(), + }, + ); + const bytes = await rawDataResponse.arrayBuffer(); + const base64 = btoa(String.fromCharCode(...new Uint8Array(bytes.slice(-128)))); + t.snapshot(base64); +}); + +test("Dataset upload", async (t) => { + const uploadId = "test-dataset-upload-" + Date.now(); + + await fetch("/data/datasets/reserveUpload", { + method: "POST", + headers: new Headers({ + "Content-Type": "application/json", + }), + body: JSON.stringify({ + filePaths: ["test-dataset-upload.zip"], + folderId: "570b9f4e4bb848d0885ea917", + initialTeams: [], + layersToLink: [], + name: "test-dataset-upload", + organization: "Organization_X", + totalFileCount: 1, + uploadId: uploadId, + }), + }); + + const filePath = "test/dataset/test-dataset.zip"; + const testDataset = fs.readFileSync(filePath); + + let formData = new FormData(); + formData.append("resumableChunkNumber", "1"); + formData.append("resumableChunkSize", "10485760"); + formData.append("resumableCurrentChunkSize", "71988"); + formData.append("resumableTotalSize", "71988"); + formData.append("resumableType", "application/zip"); + formData.append("resumableIdentifier", uploadId + "/test-dataset.zip"); + formData.append("resumableFilename", "test-dataset.zip"); + formData.append("resumableRelativePath", "test-dataset.zip"); + formData.append("resumableTotalChunks", "1"); + + // Setting the correct content type header automatically does not work (the boundary is not included) + // We can not extract the boundary from the FormData object + // Thus we have to set the content type header ourselves and create the body manually + + const boundary = "----WebKitFormBoundaryAqTsFa4N9FW7zF7I"; + let bodyString = `--${boundary}\r\n`; + // @ts-ignore + for (const [key, value] of formData.entries()) { + bodyString += `Content-Disposition: form-data; name="${key}"\r\n\r\n${value}\r\n`; + bodyString += `--${boundary}\r\n`; + } + bodyString += `Content-Disposition: form-data; name="file"; filename="test-dataset.zip"\r\n`; + bodyString += "Content-Type: application/octet-stream\r\n\r\n"; + + // We have to send the file as bytes, otherwise JS does some encoding, resulting in erroneous bytes + + const formBytes = new TextEncoder().encode(bodyString); + const fileBytes = new Uint8Array(testDataset); + const endBytes = new TextEncoder().encode(`\r\n--${boundary}--`); + const body = new Uint8Array(formBytes.length + fileBytes.length + endBytes.length); + body.set(formBytes, 0); + body.set(fileBytes, formBytes.length); + body.set(endBytes, formBytes.length + fileBytes.length); + + let content_type = `multipart/form-data; boundary=${boundary}`; + + const uploadResult = await fetch("/data/datasets", { + method: "POST", + headers: new Headers({ + "Content-Type": content_type, + }), + body: body, + }); + + if (uploadResult.status !== 200) { + t.fail("Dataset upload failed"); + } + + const finishResult = await fetch("/data/datasets/finishUpload", { + method: "POST", + headers: new Headers({ + "Content-Type": "application/json", + }), + body: JSON.stringify({ + uploadId: uploadId, + needsConversion: false, + }), + }); + + if (finishResult.status !== 200) { + t.fail("Dataset upload failed at finish"); + } + + const result = await fetch("/api/datasets/Organization_X/test-dataset-upload/health", { + headers: new Headers(), + }); + + if (result.status !== 200) { + t.fail("Dataset health check after upload failed"); + } + t.pass(); +}); diff --git a/frontend/javascripts/test/e2e-setup.ts b/frontend/javascripts/test/e2e-setup.ts index 9e8dee48c81..e8d6ed720d8 100644 --- a/frontend/javascripts/test/e2e-setup.ts +++ b/frontend/javascripts/test/e2e-setup.ts @@ -3,7 +3,7 @@ import _ from "lodash"; // @ts-expect-error ts-migrate(7016) FIXME: Could not find a declaration file for module 'deep... Remove this comment to see the full error message import deepForEach from "deep-for-each"; // @ts-expect-error ts-migrate(7016) FIXME: Could not find a declaration file for module 'node... Remove this comment to see the full error message -import fetch, { Headers, Request, Response, FetchError } from "node-fetch"; +import fetch, { Headers, FormData, Request, Response, FetchError, File } from "node-fetch"; import fs from "node:fs"; // @ts-expect-error ts-migrate(7016) FIXME: Could not find a declaration file for module 'shel... Remove this comment to see the full error message import shell from "shelljs"; @@ -39,6 +39,7 @@ const volatileKeys: Array = [ "lastActivity", "tracingTime", "tracingId", + "sortingKey", ]; export function replaceVolatileValues(obj: ArbitraryObject | null | undefined) { if (obj == null) return obj; @@ -66,7 +67,7 @@ global.fetch = function fetchWrapper(url, options) { let newUrl = url; // @ts-expect-error ts-migrate(2339) FIXME: Property 'indexOf' does not exist on type 'Request... Remove this comment to see the full error message - if (url.indexOf("http:") === -1) { + if (url.indexOf("http:") === -1 && url.indexOf("https:") === -1) { newUrl = `http://localhost:9000${url}`; } @@ -83,6 +84,8 @@ global.Request = Request; global.Response = Response; // @ts-ignore FIXME: Element implicitly has an 'any' type because type ... Remove this comment to see the full error message global.FetchError = FetchError; +global.FormData = FormData; +global.File = File; const { JSDOM } = require("jsdom"); @@ -130,7 +133,7 @@ export async function writeTypeCheckingFile( const fullTypeAnnotation = options.isArray ? `Array<${typeString}>` : typeString; fs.writeFileSync( `frontend/javascripts/test/snapshots/type-check/test-type-checking-${name}.ts`, - ` + ` import type { ${typeString} } from "types/api_flow_types"; const a: ${fullTypeAnnotation} = ${JSON.stringify(object)}`, ); diff --git a/frontend/javascripts/test/libs/nml.spec.ts b/frontend/javascripts/test/libs/nml.spec.ts index e73937386f7..5506e01c012 100644 --- a/frontend/javascripts/test/libs/nml.spec.ts +++ b/frontend/javascripts/test/libs/nml.spec.ts @@ -34,7 +34,7 @@ const createDummyNode = (id: number): Node => ({ untransformedPosition: [id, id, id], additionalCoordinates: [], radius: id, - mag: 10, + resolution: 10, rotation: [id, id, id], timestamp: id, viewport: 1, diff --git a/frontend/javascripts/test/reducers/skeletontracing_reducer.spec.ts b/frontend/javascripts/test/reducers/skeletontracing_reducer.spec.ts index 331db69c183..5c29fa9d46c 100644 --- a/frontend/javascripts/test/reducers/skeletontracing_reducer.spec.ts +++ b/frontend/javascripts/test/reducers/skeletontracing_reducer.spec.ts @@ -124,7 +124,7 @@ test("SkeletonTracing should add a new node", (t) => { untransformedPosition: position, rotation, viewport, - mag: mag, + resolution: mag, id: 1, radius: 1, }); @@ -289,7 +289,7 @@ test("SkeletonTracing should delete nodes and split the tree", (t) => { untransformedPosition: [0, 0, 0], additionalCoordinates: null, radius: 10, - mag: 10, + resolution: 10, rotation: [0, 0, 0], timestamp: 0, viewport: 1, @@ -447,7 +447,7 @@ test("SkeletonTracing should delete an edge and split the tree", (t) => { untransformedPosition: [0, 0, 0], additionalCoordinates: null, radius: 10, - mag: 10, + resolution: 10, rotation: [0, 0, 0], timestamp: 0, viewport: 1, @@ -882,6 +882,20 @@ test("SkeletonTracing should delete several trees", (t) => { t.deepEqual(_.size(newSkeletonTracing.trees), 0); t.not(newSkeletonTracing.trees, initialSkeletonTracing.trees); }); +test("SkeletonTracing should delete several trees at once", (t) => { + const createTreeAction = SkeletonTracingActions.createTreeAction(); + const deleteTreesAction = SkeletonTracingActions.deleteTreesAction([1, 2, 3]); + // create trees and delete them + const newState = ChainReducer(initialState) + .apply(SkeletonTracingReducer, createTreeAction) + .apply(SkeletonTracingReducer, createTreeAction) + .apply(SkeletonTracingReducer, deleteTreesAction) + .unpack(); + t.not(newState, initialState); + const newSkeletonTracing = enforceSkeletonTracing(newState.tracing); + t.deepEqual(_.size(newSkeletonTracing.trees), 0); + t.not(newSkeletonTracing.trees, initialSkeletonTracing.trees); +}); test("SkeletonTracing should set a new active tree", (t) => { const createTreeAction = SkeletonTracingActions.createTreeAction(); const setActiveTreeAction = SkeletonTracingActions.setActiveTreeAction(2); diff --git a/frontend/javascripts/test/snapshots/public-test/test-bundle/test/backend-snapshot-tests/datasets.e2e.js.md b/frontend/javascripts/test/snapshots/public-test/test-bundle/test/backend-snapshot-tests/datasets.e2e.js.md index 719d80174c7..f54066d7901 100644 --- a/frontend/javascripts/test/snapshots/public-test/test-bundle/test/backend-snapshot-tests/datasets.e2e.js.md +++ b/frontend/javascripts/test/snapshots/public-test/test-bundle/test/backend-snapshot-tests/datasets.e2e.js.md @@ -11,10 +11,10 @@ Generated by [AVA](https://avajs.dev). [ { colorLayerNames: [], - created: 1460379470082, + created: 'created', displayName: null, folderId: '570b9f4e4bb848d0885ea917', - id: '570b9f4e4bb848d0885ee711', + id: 'id', isActive: false, isEditable: true, isUnreported: true, @@ -27,10 +27,10 @@ Generated by [AVA](https://avajs.dev). }, { colorLayerNames: [], - created: 1460379470080, + created: 'created', displayName: null, folderId: '570b9f4e4bb848d0885ea917', - id: '570b9f4e4bb848d0885ee713', + id: 'id', isActive: false, isEditable: true, isUnreported: true, @@ -43,10 +43,10 @@ Generated by [AVA](https://avajs.dev). }, { colorLayerNames: [], - created: 1460379470079, + created: 'created', displayName: null, folderId: '570b9f4e4bb848d0885ea917', - id: '570b9f4e4bb848d0885ee712', + id: 'id', isActive: false, isEditable: true, isUnreported: true, @@ -58,51 +58,43 @@ Generated by [AVA](https://avajs.dev). tags: [], }, { - colorLayerNames: [ - 'color_1', - 'color_2', - 'color_3', - ], - created: 1508495293763, + colorLayerNames: [], + created: 'created', displayName: null, folderId: '570b9f4e4bb848d0885ea917', - id: '59e9cfbdba632ac2ab8b23b3', - isActive: true, + id: 'id', + isActive: false, isEditable: true, - isUnreported: false, + isUnreported: true, lastUsedByUser: 0, name: 'confocal-multi_knossos', owningOrganization: 'Organization_X', segmentationLayerNames: [], - status: '', + status: 'No longer available on datastore.', tags: [], }, { - colorLayerNames: [ - 'color', - ], - created: 1508495293789, + colorLayerNames: [], + created: 'created', displayName: null, folderId: '570b9f4e4bb848d0885ea917', - id: '59e9cfbdba632ac2ab8b23b5', - isActive: true, + id: 'id', + isActive: false, isEditable: true, - isUnreported: false, + isUnreported: true, lastUsedByUser: 0, name: 'l4_sample', owningOrganization: 'Organization_X', - segmentationLayerNames: [ - 'segmentation', - ], - status: '', + segmentationLayerNames: [], + status: 'No longer available on datastore.', tags: [], }, { colorLayerNames: [], - created: 1460379603792, + created: 'created', displayName: null, folderId: '570b9f4e4bb848d0885ea917', - id: '570b9fd34bb848d0885ee716', + id: 'id', isActive: false, isEditable: true, isUnreported: true, @@ -113,276 +105,51 @@ Generated by [AVA](https://avajs.dev). status: 'No longer available on datastore.', tags: [], }, - ] - -## getActiveDatasets - -> Snapshot 1 - - [ { - allowedTeams: [ - { - id: '570b9f4b2a7c0e3b008da6ec', - name: 'team_X1', - organization: 'Organization_X', - }, - ], - allowedTeamsCumulative: [ - { - id: '570b9f4b2a7c0e3b008da6ec', - name: 'team_X1', - organization: 'Organization_X', - }, - ], - created: 1508495293763, - dataSource: { - dataLayers: [ - { - boundingBox: { - depth: 256, - height: 512, - topLeft: [ - 0, - 0, - 0, - ], - width: 512, - }, - category: 'color', - elementClass: 'uint8', - name: 'color_1', - resolutions: [ - [ - 1, - 1, - 1, - ], - [ - 2, - 2, - 2, - ], - [ - 4, - 4, - 4, - ], - [ - 8, - 8, - 8, - ], - [ - 16, - 16, - 16, - ], - ], - }, - { - boundingBox: { - depth: 256, - height: 512, - topLeft: [ - 0, - 0, - 0, - ], - width: 512, - }, - category: 'color', - elementClass: 'uint8', - name: 'color_2', - resolutions: [ - [ - 1, - 1, - 1, - ], - [ - 2, - 2, - 2, - ], - [ - 4, - 4, - 4, - ], - [ - 8, - 8, - 8, - ], - [ - 16, - 16, - 16, - ], - ], - }, - { - boundingBox: { - depth: 256, - height: 512, - topLeft: [ - 0, - 0, - 0, - ], - width: 512, - }, - category: 'color', - elementClass: 'uint8', - name: 'color_3', - resolutions: [ - [ - 1, - 1, - 1, - ], - [ - 2, - 2, - 2, - ], - [ - 4, - 4, - 4, - ], - [ - 8, - 8, - 8, - ], - [ - 16, - 16, - 16, - ], - ], - }, - ], - id: { - name: 'confocal-multi_knossos', - team: 'Organization_X', - }, - scale: { - factor: [ - 22, - 22, - 44.599998474121094, - ], - unit: 'nanometer', - }, - }, - dataStore: { - allowsUpload: true, - isScratch: false, - jobsEnabled: false, - jobsSupportedByAvailableWorkers: [], - name: 'localhost', - url: 'http://localhost:9000', - }, - description: null, + colorLayerNames: [], + created: 'created', displayName: null, folderId: '570b9f4e4bb848d0885ea917', + id: 'id', isActive: true, isEditable: true, - isPublic: false, isUnreported: false, lastUsedByUser: 0, - logoUrl: '/assets/images/mpi-logos.svg', - metadata: [ - { - key: 'key', - type: 'number', - value: 4, - }, - ], - name: 'confocal-multi_knossos', + name: 'test-dataset', owningOrganization: 'Organization_X', - publication: null, - sortingKey: 1508495293763, + segmentationLayerNames: [ + 'segmentation', + ], + status: '', tags: [], - usedStorageBytes: 0, }, + ] + +## getActiveDatasets + +> Snapshot 1 + + [ { - allowedTeams: [ - { - id: '570b9f4b2a7c0e3b008da6ec', - name: 'team_X1', - organization: 'Organization_X', - }, - ], - allowedTeamsCumulative: [ - { - id: '570b9f4b2a7c0e3b008da6ec', - name: 'team_X1', - organization: 'Organization_X', - }, - ], - created: 1508495293789, + allowedTeams: [], + allowedTeamsCumulative: [], + created: 'created', dataSource: { dataLayers: [ { boundingBox: { - depth: 1024, - height: 1024, + depth: 100, + height: 100, topLeft: [ - 3072, - 3072, - 512, + 50, + 50, + 25, ], - width: 1024, - }, - category: 'color', - elementClass: 'uint8', - name: 'color', - resolutions: [ - [ - 1, - 1, - 1, - ], - [ - 2, - 2, - 1, - ], - [ - 4, - 4, - 1, - ], - [ - 8, - 8, - 2, - ], - [ - 16, - 16, - 4, - ], - ], - }, - { - boundingBox: { - depth: 1024, - height: 1024, - topLeft: [ - 3072, - 3072, - 512, - ], - width: 1024, + width: 100, }, category: 'segmentation', elementClass: 'uint32', - largestSegmentId: 2504697, + largestSegmentId: 176, name: 'segmentation', resolutions: [ [ @@ -390,37 +157,14 @@ Generated by [AVA](https://avajs.dev). 1, 1, ], - [ - 2, - 2, - 1, - ], - [ - 4, - 4, - 1, - ], - [ - 8, - 8, - 2, - ], - [ - 16, - 16, - 4, - ], ], }, ], - id: { - name: 'l4_sample', - team: 'Organization_X', - }, + id: 'id', scale: { factor: [ - 11.239999771118164, - 11.239999771118164, + 11.24, + 11.24, 28, ], unit: 'nanometer', @@ -444,10 +188,10 @@ Generated by [AVA](https://avajs.dev). lastUsedByUser: 0, logoUrl: '/assets/images/mpi-logos.svg', metadata: [], - name: 'l4_sample', + name: 'test-dataset', owningOrganization: 'Organization_X', publication: null, - sortingKey: 1508495293789, + sortingKey: 'sortingKey', tags: [], usedStorageBytes: 0, }, @@ -461,24 +205,24 @@ Generated by [AVA](https://avajs.dev). { email: 'user_A@scalableminds.com', firstName: 'user_A', - id: '570b9f4d2a7c0e4d008da6ef', + id: 'id', isAdmin: true, isAnonymous: false, isDatasetManager: true, lastName: 'last_A', teams: [ { - id: '570b9f4b2a7c0e3b008da6ec', + id: 'id', isTeamManager: true, name: 'team_X1', }, { - id: '59882b370d889b84020efd3f', + id: 'id', isTeamManager: false, name: 'team_X3', }, { - id: '59882b370d889b84020efd6f', + id: 'id', isTeamManager: true, name: 'team_X4', }, @@ -487,35 +231,19 @@ Generated by [AVA](https://avajs.dev). { email: 'user_B@scalableminds.com', firstName: 'user_B', - id: '670b9f4d2a7c0e4d008da6ef', + id: 'id', isAdmin: false, isAnonymous: false, isDatasetManager: true, lastName: 'last_B', teams: [ { - id: '570b9f4b2a7c0e3b008da6ec', + id: 'id', isTeamManager: true, name: 'team_X1', }, ], }, - { - email: 'user_C@scalableminds.com', - firstName: 'user_C', - id: '770b9f4d2a7c0e4d008da6ef', - isAdmin: false, - isAnonymous: false, - isDatasetManager: false, - lastName: 'last_C', - teams: [ - { - id: '570b9f4b2a7c0e3b008da6ec', - isTeamManager: false, - name: 'team_X1', - }, - ], - }, ] ## updateDatasetTeams @@ -528,3 +256,23 @@ Generated by [AVA](https://avajs.dev). '59882b370d889b84020efd6f', '69882b370d889b84020efd4f', ] + +## Zarr streaming + +> Snapshot 1 + + '{"multiscales":[{"version":"0.4","name":"segmentation","axes":[{"name":"c","type":"channel"},{"name":"x","type":"space","unit":"nanometer"},{"name":"y","type":"space","unit":"nanometer"},{"name":"z","type":"space","unit":"nanometer"}],"datasets":[{"path":"1","coordinateTransformations":[{"type":"scale","scale":[1,11.24,11.24,28]}]}]}]}' + +> Snapshot 2 + + 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAQAAAAEAAAABAAAAAQAAAAEAAAABAAAAAQAAAAEAAAABAAAAAQAAAAEAAAABAAAAAQAAAAEAAAA=' + +## Zarr 3 streaming + +> Snapshot 1 + + '{"zarr_format":3,"node_type":"group","attributes":{"ome":{"version":"0.5","multiscales":[{"name":"segmentation","axes":[{"name":"c","type":"channel"},{"name":"x","type":"space","unit":"nanometer"},{"name":"y","type":"space","unit":"nanometer"},{"name":"z","type":"space","unit":"nanometer"}],"datasets":[{"path":"1","coordinateTransformations":[{"type":"scale","scale":[1,11.24,11.24,28]}]}]}]}}}' + +> Snapshot 2 + + 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAQAAAAEAAAABAAAAAQAAAAEAAAABAAAAAQAAAAEAAAABAAAAAQAAAAEAAAABAAAAAQAAAAEAAAA=' diff --git a/frontend/javascripts/test/snapshots/public-test/test-bundle/test/backend-snapshot-tests/datasets.e2e.js.snap b/frontend/javascripts/test/snapshots/public-test/test-bundle/test/backend-snapshot-tests/datasets.e2e.js.snap index 054bb7f37c2..a47da3e0e5c 100644 Binary files a/frontend/javascripts/test/snapshots/public-test/test-bundle/test/backend-snapshot-tests/datasets.e2e.js.snap and b/frontend/javascripts/test/snapshots/public-test/test-bundle/test/backend-snapshot-tests/datasets.e2e.js.snap differ diff --git a/test/dataset/.gitignore b/test/dataset/.gitignore new file mode 100644 index 00000000000..867373c7958 --- /dev/null +++ b/test/dataset/.gitignore @@ -0,0 +1 @@ +!test-dataset.zip diff --git a/test/dataset/test-dataset.zip b/test/dataset/test-dataset.zip new file mode 100644 index 00000000000..16e1a3924d2 Binary files /dev/null and b/test/dataset/test-dataset.zip differ diff --git a/test/e2e/End2EndSpec.scala b/test/e2e/End2EndSpec.scala index 9a85437e684..dc61e6c5d38 100644 --- a/test/e2e/End2EndSpec.scala +++ b/test/e2e/End2EndSpec.scala @@ -1,5 +1,6 @@ package e2e +import com.scalableminds.util.io.{PathUtils, ZipIO} import com.typesafe.scalalogging.LazyLogging import org.scalatestplus.play.guice._ import org.specs2.main.Arguments @@ -8,6 +9,8 @@ import play.api.inject.guice.GuiceApplicationBuilder import play.api.libs.ws.{WSClient, WSResponse} import play.api.test.WithServer +import java.io.File +import java.nio.file.Paths import scala.concurrent.Await import scala.concurrent.duration._ import scala.sys.process._ @@ -27,6 +30,8 @@ class End2EndSpec(arguments: Arguments) extends Specification with GuiceFakeAppl "pass the e2e tests" in new WithServer(app = application, port = testPort) { + ensureTestDataset() + val resp: WSResponse = Await.result(ws.url(s"http://localhost:$testPort").get(), 2 seconds) resp.status === 200 @@ -43,4 +48,43 @@ class End2EndSpec(arguments: Arguments) extends Specification with GuiceFakeAppl customArgumentsMap.groupBy(_(0).substring(2)).view.mapValues(_(0).last).toMap } + private def ensureTestDataset(): Unit = { + val testDatasetPath = "test/dataset/test-dataset.zip" + val dataDirectory = new File("binaryData/Organization_X") + if (dataDirectory.exists()) { + println("Deleting existing data directory Organization_X") + PathUtils.deleteDirectoryRecursively(dataDirectory.toPath) + } + dataDirectory.mkdirs() + val testDatasetZip = new File(testDatasetPath) + if (!testDatasetZip.exists()) { + throw new Exception("Test dataset zip file does not exist.") + } + // Skip unzipping if the test dataset is already present + if (!dataDirectory.listFiles().exists(_.getName == "test-dataset")) + ZipIO.unzipToFolder( + testDatasetZip, + Paths.get(dataDirectory.toPath.toString, "test-dataset"), + includeHiddenFiles = true, + hiddenFilesWhitelist = List(), + truncateCommonPrefix = true, + excludeFromPrefix = None + ) + + // Test if the dataset was unzipped successfully + if (!dataDirectory.listFiles().exists(_.getName == "test-dataset")) { + throw new Exception("Test dataset was not unzipped successfully.") + } + val testFile = new File(dataDirectory, "test-dataset/datasource-properties.json") + if (!testFile.exists()) { + throw new Exception("Required file does not exist.") + } + val testFileSource = scala.io.Source.fromFile(testFile) + val testFileContent = try testFileSource.mkString + finally testFileSource.close() + if (testFileContent.isEmpty) { + throw new Exception("Required file is empty.") + } + } + } diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datavault/FileSystemDataVault.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datavault/FileSystemDataVault.scala index bb16d34cd00..0d42244c6a0 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datavault/FileSystemDataVault.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datavault/FileSystemDataVault.scala @@ -1,15 +1,16 @@ package com.scalableminds.webknossos.datastore.datavault import com.scalableminds.util.tools.Fox -import net.liftweb.common.Box.tryo -import com.scalableminds.util.tools.Fox.{bool2Fox, box2Fox} +import com.scalableminds.util.tools.Fox.bool2Fox import com.scalableminds.webknossos.datastore.storage.DataVaultService +import net.liftweb.common.{Box, Full} import org.apache.commons.lang3.builder.HashCodeBuilder import java.nio.ByteBuffer -import java.nio.file.{Files, Path, Paths} +import java.nio.channels.{AsynchronousFileChannel, CompletionHandler} +import java.nio.file.{Files, Path, Paths, StandardOpenOption} import java.util.stream.Collectors -import scala.concurrent.ExecutionContext +import scala.concurrent.{ExecutionContext, Promise} import scala.jdk.CollectionConverters._ class FileSystemDataVault extends DataVault { @@ -24,31 +25,55 @@ class FileSystemDataVault extends DataVault { private def readBytesLocal(localPath: Path, range: RangeSpecifier)(implicit ec: ExecutionContext): Fox[Array[Byte]] = if (Files.exists(localPath)) { range match { - case Complete() => tryo(Files.readAllBytes(localPath)).toFox + case Complete() => + readAsync(localPath, 0, Math.toIntExact(Files.size(localPath))) + case StartEnd(r) => - tryo { - val channel = Files.newByteChannel(localPath) - val buf = ByteBuffer.allocateDirect(r.length) - channel.position(r.start) - channel.read(buf) - buf.rewind() - val arr = new Array[Byte](r.length) - buf.get(arr) - arr - }.toFox + readAsync(localPath, r.start, r.length) + case SuffixLength(length) => - tryo { - val channel = Files.newByteChannel(localPath) - val buf = ByteBuffer.allocateDirect(length) - channel.position(channel.size() - length) - channel.read(buf) - buf.rewind() - val arr = new Array[Byte](length) - buf.get(arr) - arr - }.toFox + val fileSize = Files.size(localPath) + readAsync(localPath, fileSize - length, length) } - } else Fox.empty + } else { + Fox.empty + } + + private def readAsync(path: Path, position: Long, length: Int)(implicit ec: ExecutionContext): Fox[Array[Byte]] = { + val promise = Promise[Box[Array[Byte]]]() + val buffer = ByteBuffer.allocateDirect(length) + var channel: AsynchronousFileChannel = null + + try { + channel = AsynchronousFileChannel.open(path, StandardOpenOption.READ) + + channel.read( + buffer, + position, + buffer, + new CompletionHandler[Integer, ByteBuffer] { + override def completed(result: Integer, buffer: ByteBuffer): Unit = { + buffer.rewind() + val arr = new Array[Byte](length) + buffer.get(arr) + promise.success(Full(arr)) + channel.close() + } + + override def failed(exc: Throwable, buffer: ByteBuffer): Unit = { + promise.failure(exc) + channel.close() + } + } + ) + } catch { + case e: Throwable => + promise.failure(e) + if (channel != null && channel.isOpen) channel.close() + } + + promise.future + } override def listDirectory(path: VaultPath, maxItems: Int)(implicit ec: ExecutionContext): Fox[List[VaultPath]] = vaultPathToLocalPath(path).map(