From 8c79390e5d1534562601934a985567db1b0ee8e8 Mon Sep 17 00:00:00 2001 From: Tom Herold Date: Thu, 23 Mar 2023 14:36:36 +0100 Subject: [PATCH 01/14] Refactor deprecated antd Dropdown menus (#6898) * fix navbar Menu deprecatation warning (WIP) * restored more navbar items (WIP) * more navbar fixes * refactor FolderTree menu * remove data-group-id attributes * updated changelog * makes changes compatible with antd v4.23.x * fix linter * applied PR feedback * more PR feedback * upgrade antd to v4.24.8 * updated changelog * apply PR feedback * upgrade antd to v 4.28.x * refactor all occurances of * changed more dropdown menus (WIP) * more menu conversions (WIP) * more menu crazyiness (WIP) * upgrade antd to v 4.28.x * restored context menu (WIP) * restore layout menu * updated changelog * restore context menu position styling * fix typechecking * applied PR feedback * fix menu crash in dashboard * reactivated mapping warnings * fix rules of hooks error in context menu * applied PR feedback --- CHANGELOG.unreleased.md | 2 +- .../admin/task/task_annotation_view.tsx | 110 +- .../admin/task/task_search_form.tsx | 20 +- .../admin/voxelytics/task_list_view.tsx | 46 +- .../advanced_dataset/dataset_action_view.tsx | 121 +- .../advanced_dataset/dataset_table.tsx | 9 +- .../javascripts/dashboard/dataset_view.tsx | 10 +- .../dashboard/folders/folder_tree.tsx | 92 +- .../view/action-bar/private_links_view.tsx | 94 +- .../share_view_dataset_modal_view.tsx | 2 +- .../oxalis/view/action-bar/toolbar_view.tsx | 43 +- .../view/action-bar/tracing_actions_view.tsx | 285 ++--- .../action-bar/view_dataset_actions_view.tsx | 59 +- .../oxalis/view/action_bar_view.tsx | 33 +- .../javascripts/oxalis/view/context_menu.tsx | 1122 +++++++++-------- .../comment_tab/comment_tab_view.tsx | 40 +- .../connectome_tab/synapse_tree.tsx | 31 +- .../dataset_info_tab_view.tsx | 49 +- .../segments_tab/segment_list_item.tsx | 177 ++- .../segments_tab/segments_view_helper.tsx | 140 +- .../right-border-tabs/skeleton_tab_view.tsx | 93 +- .../right-border-tabs/tree_hierarchy_view.tsx | 254 ++-- .../oxalis/view/td_view_controls.tsx | 153 ++- 23 files changed, 1490 insertions(+), 1495 deletions(-) diff --git a/CHANGELOG.unreleased.md b/CHANGELOG.unreleased.md index c43f6c0857..914c946bd3 100644 --- a/CHANGELOG.unreleased.md +++ b/CHANGELOG.unreleased.md @@ -26,7 +26,7 @@ For upgrade instructions, please check the [migration guide](MIGRATIONS.released - Fixed support for rendering of negative floats. [#6895](https://github.com/scalableminds/webknossos/pull/6895) - Fixed caching issues with webworkers. [#6932](https://github.com/scalableminds/webknossos/pull/6932) - Fixed download button for annotations which was disabled in some cases. [#6931](https://github.com/scalableminds/webknossos/pull/6931) - +- Fixed antd deprecation warning for Dropdown menus. [#6898](https://github.com/scalableminds/webknossos/pull/6898) ### Removed ### Breaking Changes diff --git a/frontend/javascripts/admin/task/task_annotation_view.tsx b/frontend/javascripts/admin/task/task_annotation_view.tsx index d600640e56..2ff8b4ca53 100644 --- a/frontend/javascripts/admin/task/task_annotation_view.tsx +++ b/frontend/javascripts/admin/task/task_annotation_view.tsx @@ -1,4 +1,4 @@ -import { Dropdown, Menu, Modal } from "antd"; +import { Dropdown, MenuProps, Modal } from "antd"; import { EyeOutlined, PlayCircleOutlined, @@ -30,7 +30,6 @@ import Toast from "libs/toast"; import TransferTaskModal from "dashboard/transfer_task_modal"; import messages from "messages"; import { getVolumeDescriptors } from "oxalis/model/accessors/volumetracing_accessor"; -const { Item } = Menu; const { confirm } = Modal; type OwnProps = { task: APITask; @@ -101,7 +100,7 @@ class TaskAnnotationView extends React.PureComponent { })); }; - getDropdownMenu(annotation: APIAnnotation) { + getDropdownMenu(annotation: APIAnnotation): MenuProps { let doesAnnotationNotBelongToActiveUser = true; if (annotation.owner && this.props.activeUser) { @@ -120,57 +119,64 @@ class TaskAnnotationView extends React.PureComponent { Open ); - return ( - - - {label} - - - + return { + items: [ + { + key: `${annotation.id}-view`, + label: {label}, + }, + { + key: `${annotation.id}-transfer`, + onClick: () => this.setState({ currentAnnotation: annotation, isTransferModalOpen: true, - }) - } - > - - Transfer - - - { - const isVolumeIncluded = getVolumeDescriptors(annotation).length > 0; - return downloadAnnotation(annotation.id, "Task", isVolumeIncluded); - }} - icon={} - > - Download - - - this.resetAnnotation(annotation)}> - - Reset - - this.deleteAnnotation(annotation)}> - - Reset and Cancel - - {annotation.state === "Finished" ? ( - this.reOpenAnnotation(annotation)}> - - Reopen - - ) : ( - this.finishAnnotation(annotation)}> - - Finish - - )} - - ); + }), + icon: , + label: "Transfer", + }, + { + key: `${annotation.id}-download`, + label: ( + { + const isVolumeIncluded = getVolumeDescriptors(annotation).length > 0; + return downloadAnnotation(annotation.id, "Task", isVolumeIncluded); + }} + icon={} + > + Download + + ), + }, + { + key: `${annotation.id}-reset`, + onClick: () => this.resetAnnotation(annotation), + icon: , + label: "Reset", + }, + { + key: `${annotation.id}-delete`, + onClick: () => this.deleteAnnotation(annotation), + icon: , + label: "Reset and Cancel", + }, + annotation.state === "Finished" + ? { + key: `${annotation.id}-reopen`, + onClick: () => this.reOpenAnnotation(annotation), + icon: , + label: "Reopen", + } + : { + key: `${annotation.id}-finish`, + onClick: () => this.finishAnnotation(annotation), + icon: , + label: "Finish", + }, + ], + }; } render() { @@ -206,7 +212,7 @@ class TaskAnnotationView extends React.PureComponent { - + Actions diff --git a/frontend/javascripts/admin/task/task_search_form.tsx b/frontend/javascripts/admin/task/task_search_form.tsx index 25d0a19201..4a99977d5f 100644 --- a/frontend/javascripts/admin/task/task_search_form.tsx +++ b/frontend/javascripts/admin/task/task_search_form.tsx @@ -1,4 +1,4 @@ -import { Form, Row, Dropdown, Menu, Col, Button, Input, Select } from "antd"; +import { Form, Row, Dropdown, Col, Button, Input, Select } from "antd"; import { FormInstance } from "antd/lib/form"; import { DownloadOutlined, DownOutlined, RetweetOutlined } from "@ant-design/icons"; // @ts-expect-error ts-migrate(7016) FIXME: Could not find a declaration file for module '@sca... Remove this comment to see the full error message @@ -276,14 +276,16 @@ class TaskSearchForm extends React.Component { }} > this.handleSearchFormFinish(true)}> - - - Show random subset - - - } + menu={{ + onClick: () => this.handleSearchFormFinish(true), + items: [ + { + key: "1", + icon: , + label: "Show random subset", + }, + ], + }} > - + From 80fa50921c993e423fd8dc1d09bc04ffecb6ac4f Mon Sep 17 00:00:00 2001 From: Florian M Date: Mon, 27 Mar 2023 10:29:38 +0200 Subject: [PATCH 02/14] Reject dataset uploads if organization storage quota is exceeded (#6893) * Reject dataset uploads if organization storage quota is exceeded * warn user in upload-dataset-view when storage is exceeded and disable upload button * changelog --------- Co-authored-by: Philipp Otto Co-authored-by: Philipp Otto --- CHANGELOG.unreleased.md | 1 + .../WKRemoteDataStoreController.scala | 3 ++ conf/messages | 1 + .../admin/dataset/dataset_upload_view.tsx | 32 +++++++++++++++++-- 4 files changed, 35 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.unreleased.md b/CHANGELOG.unreleased.md index 914c946bd3..d57d75a003 100644 --- a/CHANGELOG.unreleased.md +++ b/CHANGELOG.unreleased.md @@ -19,6 +19,7 @@ For upgrade instructions, please check the [migration guide](MIGRATIONS.released - Interpolation during rendering is now more performance intensive, since the rendering approach was changed. Therefore, interpolation is disabled by default. On the flip side, the rendered quality is often higher than it used to be. [#6748](https://github.com/scalableminds/webknossos/pull/6748) - Updated the styling of the "welcome" screen for new users to be in line with the new branding. [#6904](https://github.com/scalableminds/webknossos/pull/6904) - Improved Terms-of-Service modal (e.g., allow to switch organization even when modal was blocking the remaining usage of WEBKNOSSOS). [#6930](https://github.com/scalableminds/webknossos/pull/6930) +- Uploads are now blocked when the organization’s storage quota is exceeded. [#6893](https://github.com/scalableminds/webknossos/pull/6893) ### Fixed - Fixed an issue with text hints not being visible on the logout page for dark mode users. [#6916](https://github.com/scalableminds/webknossos/pull/6916) diff --git a/app/controllers/WKRemoteDataStoreController.scala b/app/controllers/WKRemoteDataStoreController.scala index c51a95e816..f259cdc78c 100644 --- a/app/controllers/WKRemoteDataStoreController.scala +++ b/app/controllers/WKRemoteDataStoreController.scala @@ -60,6 +60,9 @@ class WKRemoteDataStoreController @Inject()( organization <- organizationDAO.findOneByName(uploadInfo.organization)(GlobalAccessContext) ?~> Messages( "organization.notFound", uploadInfo.organization) ~> NOT_FOUND + usedStorageBytes <- organizationDAO.getUsedStorage(organization._id) + _ <- Fox.runOptional(organization.includedStorageBytes)(includedStorage => + bool2Fox(usedStorageBytes <= includedStorage)) ?~> "dataSet.upload.storageExceeded" ~> FORBIDDEN _ <- bool2Fox(organization._id == user._organization) ?~> "notAllowed" ~> FORBIDDEN _ <- dataSetService.assertValidDataSetName(uploadInfo.name) _ <- dataSetService.assertNewDataSetName(uploadInfo.name, organization._id) ?~> "dataSet.name.alreadyTaken" diff --git a/conf/messages b/conf/messages index 5c84737d5f..254bd2671e 100644 --- a/conf/messages +++ b/conf/messages @@ -100,6 +100,7 @@ dataSet.upload.validation.failed=Failed to validate Dataset information for uplo dataSet.upload.linkRestricted=Can only link layers of datasets that are either public or allowed to be administrated by your account dataSet.upload.invalidLinkedLayers=Could not link all requested layers dataSet.upload.noFiles=Tried to finish upload with no files. May be a retry of a failed finish request, see previous errors. +dataSet.upload.storageExceeded=Cannot upload dataset because the storage quota of the organization is exceeded. dataSet.explore.failed.readFile=Failed to read remote file dataSet.explore.magDtypeMismatch=Element class must be the same for all mags of a layer. Got {0} diff --git a/frontend/javascripts/admin/dataset/dataset_upload_view.tsx b/frontend/javascripts/admin/dataset/dataset_upload_view.tsx index 1b136d63d2..ae2fa94de2 100644 --- a/frontend/javascripts/admin/dataset/dataset_upload_view.tsx +++ b/frontend/javascripts/admin/dataset/dataset_upload_view.tsx @@ -9,9 +9,15 @@ import classnames from "classnames"; import _ from "lodash"; import { useDropzone, FileWithPath } from "react-dropzone"; import ErrorHandling from "libs/error_handling"; -import type { RouteComponentProps } from "react-router-dom"; +import { Link, RouteComponentProps } from "react-router-dom"; import { withRouter } from "react-router-dom"; -import type { APITeam, APIDataStore, APIUser, APIDatasetId } from "types/api_flow_types"; +import type { + APITeam, + APIDataStore, + APIUser, + APIDatasetId, + APIOrganization, +} from "types/api_flow_types"; import type { OxalisState } from "oxalis/store"; import { reserveDatasetUpload, @@ -41,6 +47,8 @@ import { FormInstance } from "antd/lib/form"; import type { Vector3 } from "oxalis/constants"; import { FormItemWithInfo, confirmAsync } from "../../dashboard/dataset/helper_components"; import FolderSelection from "dashboard/folders/folder_selection"; +import { hasPricingPlanExceededStorage } from "admin/organization/pricing_plan_utils"; +import { enforceActiveOrganization } from "oxalis/model/accessors/organization_accessors"; const FormItem = Form.Item; const REPORT_THROTTLE_THRESHOLD = 1 * 60 * 1000; // 1 min @@ -56,6 +64,7 @@ type OwnProps = { }; type StateProps = { activeUser: APIUser | null | undefined; + organization: APIOrganization; }; type Props = OwnProps & StateProps; type PropsWithFormAndRouter = Props & { @@ -613,6 +622,23 @@ class DatasetUploadView extends React.Component { }} > + {hasPricingPlanExceededStorage(this.props.organization) ? ( + + Your organization has exceeded the available storage. Uploading new datasets is + disabled. Visit the{" "} + + organization page + {" "} + for details. + + } + style={{ marginBottom: 8 }} + /> + ) : null} +
{ size="large" type="primary" htmlType="submit" + disabled={hasPricingPlanExceededStorage(this.props.organization)} style={{ width: "100%", }} @@ -1043,6 +1070,7 @@ function FileUploadArea({ const mapStateToProps = (state: OxalisState): StateProps => ({ activeUser: state.activeUser, + organization: enforceActiveOrganization(state.activeOrganization), }); const connector = connect(mapStateToProps); From d46eb37abcfe49f22f0cfa717b1f43af44967d4d Mon Sep 17 00:00:00 2001 From: frcroth Date: Mon, 27 Mar 2023 11:49:55 +0200 Subject: [PATCH 03/14] Implement viewing sharded neuroglancer precomputed datasets (#6920) --- CHANGELOG.unreleased.md | 1 + MIGRATIONS.released.md | 3 +- .../binary/explore/PrecomputedExplorer.scala | 1 - .../CompressedMortonCodeTestSuite.scala | 37 ++++ .../scalableminds/util/geometry/Vec3Int.scala | 3 + .../datastore/datareaders/ChunkReader.scala | 10 +- .../datastore/datareaders/ChunkUtils.scala | 2 +- .../datastore/datareaders/DatasetArray.scala | 33 +++- .../datastore/datareaders/DatasetHeader.scala | 2 + .../datareaders/n5/N5ChunkReader.scala | 7 +- .../precomputed/CompressedMortonCode.scala | 39 ++++ .../precomputed/PrecomputedArray.scala | 181 ++++++++++++++++++ .../precomputed/PrecomputedHeader.scala | 17 +- .../datastore/services/FindDataService.scala | 3 +- 14 files changed, 318 insertions(+), 21 deletions(-) create mode 100644 test/backend/CompressedMortonCodeTestSuite.scala create mode 100644 webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/precomputed/CompressedMortonCode.scala diff --git a/CHANGELOG.unreleased.md b/CHANGELOG.unreleased.md index d57d75a003..4659b1fa0b 100644 --- a/CHANGELOG.unreleased.md +++ b/CHANGELOG.unreleased.md @@ -14,6 +14,7 @@ For upgrade instructions, please check the [migration guide](MIGRATIONS.released - Added support for datasets where layers are transformed individually (with an affine matrix). Transformations can be specified via datasource-properties.json or via JS API (will be ephemeral, then). [#6748](https://github.com/scalableminds/webknossos/pull/6748) - Added list of all respective team members to the administration page for teams. [#6915](https://github.com/scalableminds/webknossos/pull/6915) - Added email notifications for WK worker jobs. [#6918](https://github.com/scalableminds/webknossos/pull/6918) +- Added support for viewing sharded neuroglancer precomputed datasets. [#6920](https://github.com/scalableminds/webknossos/pull/6920) ### Changed - Interpolation during rendering is now more performance intensive, since the rendering approach was changed. Therefore, interpolation is disabled by default. On the flip side, the rendered quality is often higher than it used to be. [#6748](https://github.com/scalableminds/webknossos/pull/6748) diff --git a/MIGRATIONS.released.md b/MIGRATIONS.released.md index b6668ec7ef..767a422156 100644 --- a/MIGRATIONS.released.md +++ b/MIGRATIONS.released.md @@ -1,5 +1,6 @@ # Migration Guide (Released) -All migrations of WEBKNOSOSS are documented in this file. + +All migrations of WEBKNOSSOS are documented in this file. See `MIGRATIONS.unreleased.md` for the changes which are not yet part of an official release. This project adheres to [Calendar Versioning](http://calver.org/) `0Y.0M.MICRO`. diff --git a/app/models/binary/explore/PrecomputedExplorer.scala b/app/models/binary/explore/PrecomputedExplorer.scala index ca52456fdb..1de8e1397c 100644 --- a/app/models/binary/explore/PrecomputedExplorer.scala +++ b/app/models/binary/explore/PrecomputedExplorer.scala @@ -30,7 +30,6 @@ class PrecomputedExplorer extends RemoteLayerExplorer { for { name <- guessNameFromPath(remotePath) firstScale <- precomputedHeader.scales.headOption.toFox - _ <- bool2Fox(firstScale.sharding.isEmpty) ?~> "Failed to read dataset: sharding not supported" boundingBox <- BoundingBox.fromSizeArray(firstScale.size).toFox elementClass: ElementClass.Value <- elementClassFromPrecomputedDataType(precomputedHeader.data_type) ?~> "Unknown data type" smallestResolution = firstScale.resolution diff --git a/test/backend/CompressedMortonCodeTestSuite.scala b/test/backend/CompressedMortonCodeTestSuite.scala new file mode 100644 index 0000000000..1410930790 --- /dev/null +++ b/test/backend/CompressedMortonCodeTestSuite.scala @@ -0,0 +1,37 @@ +package backend + +import com.scalableminds.webknossos.datastore.datareaders.precomputed.CompressedMortonCode +import org.scalatestplus.play.PlaySpec + +class CompressedMortonCodeTestSuite extends PlaySpec { + + "Compressed Morton Code" when { + "Grid size = 10,10,10" should { + val grid_size = Array(10, 10, 10) + "encode 0,0,0" in { + assert(CompressedMortonCode.encode(Array(0, 0, 0), grid_size) == 0) + } + "encode 1,2,3" in { + assert(CompressedMortonCode.encode(Array(1, 2, 3), grid_size) == 53) + } + "encode 9,9,9" in { + assert(CompressedMortonCode.encode(Array(9, 9, 9), grid_size) == 3591) + } + "encode 10,10,10" in { + assert(CompressedMortonCode.encode(Array(10, 10, 10), grid_size) == 3640) + } + } + "Grid size = 2048,2048,1024" should { + val grid_size = Array(2048, 2048, 1024) + "encode 0,0,0" in { + assert(CompressedMortonCode.encode(Array(0, 0, 0), grid_size) == 0) + } + "encode 1,2,3" in { + assert(CompressedMortonCode.encode(Array(1, 2, 3), grid_size) == 53) + } + "encode 1024, 512, 684" in { + assert(CompressedMortonCode.encode(Array(1024, 512, 684), grid_size) == 1887570176) + } + } + } +} diff --git a/util/src/main/scala/com/scalableminds/util/geometry/Vec3Int.scala b/util/src/main/scala/com/scalableminds/util/geometry/Vec3Int.scala index be79e2fb6b..27ae5a5b8e 100644 --- a/util/src/main/scala/com/scalableminds/util/geometry/Vec3Int.scala +++ b/util/src/main/scala/com/scalableminds/util/geometry/Vec3Int.scala @@ -63,6 +63,9 @@ case class Vec3Int(x: Int, y: Int, z: Int) { } yield Vec3Int(x, y, z) def product: Int = x * y * z + + def alignWithGridFloor(gridCellSize: Vec3Int): Vec3Int = + this / gridCellSize * gridCellSize } object Vec3Int { diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/ChunkReader.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/ChunkReader.scala index 16f6fff434..fb9d234669 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/ChunkReader.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/ChunkReader.scala @@ -6,6 +6,7 @@ import ucar.ma2.{Array => MultiArray, DataType => MADataType} import java.io.{ByteArrayInputStream, ByteArrayOutputStream, IOException} import javax.imageio.stream.MemoryCacheImageInputStream +import scala.collection.immutable.NumericRange import scala.concurrent.Future import scala.util.Using @@ -28,16 +29,17 @@ class ChunkReader(val header: DatasetHeader, val vaultPath: VaultPath, val chunk lazy val chunkSize: Int = header.chunkSize.toList.product @throws[IOException] - def read(path: String, chunkShape: Array[Int]): Future[MultiArray] = { - val chunkBytesAndShape = readChunkBytesAndShape(path) + def read(path: String, chunkShape: Array[Int], range: Option[NumericRange[Long]]): Future[MultiArray] = { + val chunkBytesAndShape = readChunkBytesAndShape(path, range) chunkTyper.wrapAndType(chunkBytesAndShape.map(_._1), chunkBytesAndShape.flatMap(_._2).getOrElse(chunkShape)) } // Returns bytes (optional, None may later be replaced with fill value) // and chunk shape (optional, only for data formats where each chunk reports its own shape, e.g. N5) - protected def readChunkBytesAndShape(path: String): Option[(Array[Byte], Option[Array[Int]])] = + protected def readChunkBytesAndShape(path: String, + range: Option[NumericRange[Long]]): Option[(Array[Byte], Option[Array[Int]])] = Using.Manager { use => - (vaultPath / path).readBytes().map { bytes => + (vaultPath / path).readBytes(range).map { bytes => val is = use(new ByteArrayInputStream(bytes)) val os = use(new ByteArrayOutputStream()) header.compressorImpl.uncompress(is, os) diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/ChunkUtils.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/ChunkUtils.scala index f6d10acb2c..630656fbba 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/ChunkUtils.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/ChunkUtils.scala @@ -23,7 +23,7 @@ object ChunkUtils extends LazyLogging { } if (numChunks < 0) { logger.warn( - s"Failed to compute zarr chunk indices. array shape ${arrayShape.toList}, chunkShape: ${arrayChunkSize.toList}, requested ${selectedShape.toList} at ${selectedOffset.toList}") + s"Failed to compute chunk indices. array shape ${arrayShape.toList}, chunkShape: ${arrayChunkSize.toList}, requested ${selectedShape.toList} at ${selectedOffset.toList}") } val chunkIndices = new Array[Array[Int]](numChunks) val currentIdx = start.clone diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/DatasetArray.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/DatasetArray.scala index df06c55537..f6f39a80e9 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/DatasetArray.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/DatasetArray.scala @@ -13,6 +13,7 @@ import ucar.ma2.{InvalidRangeException, Array => MultiArray} import java.io.IOException import java.nio.ByteOrder import java.util +import scala.collection.immutable.NumericRange import scala.concurrent.{ExecutionContext, Future} class DatasetArray(relativePath: DatasetPath, @@ -73,7 +74,7 @@ class DatasetArray(relativePath: DatasetPath, val targetBuffer = MultiArrayUtils.createDataBuffer(header.resolvedDataType, shape) val targetInCOrder: MultiArray = MultiArrayUtils.orderFlippedView(MultiArrayUtils.createArrayWithGivenStorage(targetBuffer, shape.reverse)) - val wasCopiedFox = Fox.serialCombined(chunkIndices) { chunkIndex: Array[Int] => + val copiedFuture = Future.sequence(chunkIndices.map { chunkIndex: Array[Int] => for { sourceChunk: MultiArray <- getSourceChunkDataWithCache(axisOrder.permuteIndices(chunkIndex)) offsetInChunk = computeOffsetInChunk(chunkIndex, offset) @@ -82,21 +83,33 @@ class DatasetArray(relativePath: DatasetPath, flip = header.order != ArrayOrder.C) _ = MultiArrayUtils.copyRange(offsetInChunk, sourceChunkInCOrder, targetInCOrder) } yield () - } + }) for { - _ <- wasCopiedFox + _ <- copiedFuture } yield targetBuffer } } - private def getSourceChunkDataWithCache(chunkIndex: Array[Int]): Future[MultiArray] = { - val chunkFilename = getChunkFilename(chunkIndex) - val chunkFilePath = relativePath.resolve(chunkFilename) - val storeKey = chunkFilePath.storeKey - val chunkShape = header.chunkSizeAtIndex(chunkIndex) + protected def getShardedChunkPathAndRange(chunkIndex: Array[Int])( + implicit ec: ExecutionContext): Future[(VaultPath, NumericRange[Long])] = ??? - chunkContentsCache.getOrLoad(storeKey, key => chunkReader.read(key, chunkShape)) - } + private def getSourceChunkDataWithCache(chunkIndex: Array[Int])(implicit ec: ExecutionContext): Future[MultiArray] = + chunkContentsCache.getOrLoad(chunkIndex.mkString(","), _ => readSourceChunkData(chunkIndex)) + + private def readSourceChunkData(chunkIndex: Array[Int])(implicit ec: ExecutionContext): Future[MultiArray] = + if (header.isSharded) { + for { + (shardPath, chunkRange) <- getShardedChunkPathAndRange(chunkIndex) + chunkShape = header.chunkSizeAtIndex(chunkIndex) + multiArray <- chunkReader.read(shardPath.toString, chunkShape, Some(chunkRange)) + } yield multiArray + } else { + val chunkFilename = getChunkFilename(chunkIndex) + val chunkFilePath = relativePath.resolve(chunkFilename) + val storeKey = chunkFilePath.storeKey + val chunkShape = header.chunkSizeAtIndex(chunkIndex) + chunkReader.read(storeKey, chunkShape, None) + } protected def getChunkFilename(chunkIndex: Array[Int]): String = chunkIndex.mkString(header.dimension_separator.toString) diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/DatasetHeader.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/DatasetHeader.scala index 35741c015a..caa218365d 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/DatasetHeader.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/DatasetHeader.scala @@ -45,4 +45,6 @@ trait DatasetHeader { lazy val rank: Int = datasetShape.length def chunkSizeAtIndex(chunkIndex: Array[Int]): Array[Int] = chunkSize + + def isSharded = false } diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/n5/N5ChunkReader.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/n5/N5ChunkReader.scala index 3db9782aff..1e76d3756e 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/n5/N5ChunkReader.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/n5/N5ChunkReader.scala @@ -5,6 +5,7 @@ import com.scalableminds.webknossos.datastore.datavault.VaultPath import com.typesafe.scalalogging.LazyLogging import java.io.{ByteArrayInputStream, ByteArrayOutputStream} +import scala.collection.immutable.NumericRange import scala.util.Using object N5ChunkReader { @@ -23,7 +24,9 @@ class N5ChunkReader(header: DatasetHeader, vaultPath: VaultPath, typedChunkReade val dataExtractor: N5DataExtractor = new N5DataExtractor - override protected def readChunkBytesAndShape(path: String): Option[(Array[Byte], Option[Array[Int]])] = + override protected def readChunkBytesAndShape( + path: String, + range: Option[NumericRange[Long]]): Option[(Array[Byte], Option[Array[Int]])] = Using.Manager { use => def processBytes(bytes: Array[Byte], expectedElementCount: Int): Array[Byte] = { val is = use(new ByteArrayInputStream(bytes)) @@ -37,7 +40,7 @@ class N5ChunkReader(header: DatasetHeader, vaultPath: VaultPath, typedChunkReade } for { - bytes <- (vaultPath / path).readBytes() + bytes <- (vaultPath / path).readBytes(range) (blockHeader, data) = dataExtractor.readBytesAndHeader(bytes) paddedChunkBytes = processBytes(data, blockHeader.blockSize.product) } yield (paddedChunkBytes, Some(blockHeader.blockSize)) diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/precomputed/CompressedMortonCode.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/precomputed/CompressedMortonCode.scala new file mode 100644 index 0000000000..43a6fcb330 --- /dev/null +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/precomputed/CompressedMortonCode.scala @@ -0,0 +1,39 @@ +package com.scalableminds.webknossos.datastore.datareaders.precomputed + +import scala.math.log10 + +object CompressedMortonCode { + + def log2(x: Double): Double = log10(x) / log10(2.0) + + def encode(position: Array[Int], gridSize: Array[Int]): Long = { + /* + Computes the compressed morton code as per + https://github.com/google/neuroglancer/blob/master/src/neuroglancer/datasource/precomputed/volume.md#compressed-morton-code + https://github.com/google/neuroglancer/blob/162b698f703c86e0b3e92b8d8e0cacb0d3b098df/src/neuroglancer/util/zorder.ts#L72 + */ + val bits = gridSize.map(log2(_).ceil.toInt) + val maxBits = bits.max + var outputBit = 0L + val one = 1L + + var output = 0L + for (bit <- 0 to maxBits) { + if (bit < bits(0)) { + output |= (((position(0) >> bit) & one) << outputBit) + outputBit += 1 + } + if (bit < bits(1)) { + output |= (((position(1) >> bit) & one) << outputBit) + outputBit += 1 + } + if (bit < bits(2)) { + output |= (((position(2) >> bit) & one) << outputBit) + outputBit += 1 + } + } + + output + } + +} diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/precomputed/PrecomputedArray.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/precomputed/PrecomputedArray.scala index 58dbaf27d5..440f1bd127 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/precomputed/PrecomputedArray.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/precomputed/PrecomputedArray.scala @@ -1,12 +1,19 @@ package com.scalableminds.webknossos.datastore.datareaders.precomputed +import com.scalableminds.util.cache.AlfuFoxCache +import com.scalableminds.util.tools.Fox import com.scalableminds.webknossos.datastore.datareaders.{AxisOrder, ChunkReader, DatasetArray, DatasetPath} import com.scalableminds.webknossos.datastore.datavault.VaultPath import com.typesafe.scalalogging.LazyLogging import play.api.libs.json.{JsError, JsSuccess, Json} import java.io.IOException +import java.nio.ByteOrder + +import java.nio.ByteBuffer import java.nio.charset.StandardCharsets +import scala.collection.immutable.NumericRange +import scala.concurrent.{ExecutionContext, Future} object PrecomputedArray extends LazyLogging { @throws[IOException] @@ -69,4 +76,178 @@ class PrecomputedArray(relativePath: DatasetPath, .mkString(header.dimension_separator.toString) } + // SHARDING + // Implemented according to https://github.com/google/neuroglancer/blob/master/src/neuroglancer/datasource/precomputed/sharded.md, + // directly adapted from https://github.com/scalableminds/webknossos-connect/blob/master/wkconnect/backends/neuroglancer/sharding.py. + + private val shardIndexCache: AlfuFoxCache[VaultPath, Array[Byte]] = + AlfuFoxCache() + + private val minishardIndexCache: AlfuFoxCache[(VaultPath, Int), Seq[(Long, Long, Long)]] = + AlfuFoxCache() + + private def getHashForChunk(chunkIndex: Array[Int]): Long = + CompressedMortonCode.encode(chunkIndex, header.gridSize) + + private lazy val minishardMask = { + header.precomputedScale.sharding match { + case Some(shardingSpec: ShardingSpecification) => + if (shardingSpec.minishard_bits == 0) { + 0 + } else { + var minishardMask = 1L + for (_ <- 0 until shardingSpec.minishard_bits - 1) { + minishardMask <<= 1 + minishardMask |= 1 + } + minishardMask + } + case None => 0 + } + } + + private lazy val shardMask = { + header.precomputedScale.sharding match { + case Some(shardingSpec: ShardingSpecification) => + val oneMask = Long.MinValue // 0xFFFFFFFFFFFFFFFF + val cursor = shardingSpec.minishard_bits + shardingSpec.shard_bits + val shardMask = ~((oneMask >> cursor) << cursor) + shardMask & (~minishardMask) + case None => 0 + } + } + + private lazy val minishardCount = 1 << header.precomputedScale.sharding.map(_.minishard_bits).getOrElse(0) + + private lazy val shardIndexRange: NumericRange.Exclusive[Long] = { + val end = minishardCount * 16 + Range.Long(0, end, 1) + } + + private def getShardIndex(shardPath: VaultPath)(implicit ec: ExecutionContext): Fox[Array[Byte]] = + shardIndexCache.getOrLoad(shardPath, readShardIndex) + + private def readShardIndex(shardPath: VaultPath)(implicit ec: ExecutionContext): Fox[Array[Byte]] = + Fox.option2Fox(shardPath.readBytes(Some(shardIndexRange))) + + private def parseShardIndex(index: Array[Byte]): Seq[(Long, Long)] = + // See https://github.com/google/neuroglancer/blob/master/src/neuroglancer/datasource/precomputed/sharded.md#shard-index-format + index + .grouped(16) // 16 Bytes: 2 uint64 numbers: start_offset, end_offset + .map((bytes: Array[Byte]) => { + (BigInt(bytes.take(8).reverse).toLong, BigInt(bytes.slice(8, 16).reverse).toLong) // bytes reversed because they are stored little endian + }) + .toSeq + + private def getMinishardInfo(chunkHash: Long): (Long, Long) = + header.precomputedScale.sharding match { + case Some(shardingSpec: ShardingSpecification) => + val rawChunkIdentifier = chunkHash >> shardingSpec.preshift_bits + val chunkIdentifier = shardingSpec.hashFunction(rawChunkIdentifier) + val minishardNumber = chunkIdentifier & minishardMask + val shardNumber = (chunkIdentifier & shardMask) >> shardingSpec.minishard_bits + (shardNumber, minishardNumber) + case None => (0, 0) + } + + private def getPathForShard(shardNumber: Long): VaultPath = { + val shardBits = header.precomputedScale.sharding.map(_.shard_bits.toFloat).getOrElse(0f) + if (shardBits == 0) { + vaultPath / relativePath.storeKey / "0.shard" + } else { + val shardString = String.format(s"%1$$${(shardBits / 4).ceil.toInt}s", shardNumber.toHexString).replace(' ', '0') + vaultPath / relativePath.storeKey / s"$shardString.shard" + } + + } + + private def getMinishardIndexRange(minishardNumber: Int, + parsedShardIndex: Seq[(Long, Long)]): NumericRange.Exclusive[Long] = { + val miniShardIndexStart: Long = (shardIndexRange.end) + parsedShardIndex(minishardNumber)._1 + val miniShardIndexEnd: Long = (shardIndexRange.end) + parsedShardIndex(minishardNumber)._2 + Range.Long(miniShardIndexStart, miniShardIndexEnd, 1) + } + + private def parseMinishardIndex(bytes: Array[Byte]): Seq[(Long, Long, Long)] = { + // Because readBytes already decodes gzip, we don't need to decompress here + /* + From: https://github.com/google/neuroglancer/blob/master/src/neuroglancer/datasource/precomputed/sharded.md#minishard-index-format + The decoded "minishard index" is a binary string of 24*n bytes, specifying a contiguous C-order array of [3, n] + uint64le values. + */ + val n = bytes.length / 24 + val buf = ByteBuffer.allocate(bytes.length) + buf.put(bytes) + + val longArray = new Array[Long](n * 3) + buf.position(0) + buf.order(ByteOrder.LITTLE_ENDIAN) + buf.asLongBuffer().get(longArray) + // longArray is row major / C-order + /* + From: https://github.com/google/neuroglancer/blob/master/src/neuroglancer/datasource/precomputed/sharded.md#minishard-index-format + Values array[0, 0], ..., array[0, n-1] specify the chunk IDs in the minishard, and are delta encoded, such that + array[0, 0] is equal to the ID of the first chunk, and the ID of chunk i is equal to the sum + of array[0, 0], ..., array[0, i]. + */ + val chunkIds = new Array[Long](n) + chunkIds(0) = longArray(0) + for (i <- 1 until n) { + chunkIds(i) = longArray(i) + chunkIds(i - 1) + } + /* + From: https://github.com/google/neuroglancer/blob/master/src/neuroglancer/datasource/precomputed/sharded.md#minishard-index-format + The size of the data for chunk i is stored as array[2, i]. + Values array[1, 0], ..., array[1, n-1] specify the starting offsets in the shard file of the data corresponding to + each chunk, and are also delta encoded relative to the end of the prior chunk, such that the starting offset of the + first chunk is equal to shard_index_end + array[1, 0], and the starting offset of chunk i is the sum of + shard_index_end + array[1, 0], ..., array[1, i] and array[2, 0], ..., array[2, i-1]. + */ + val chunkSizes = longArray.slice(2 * n, 3 * n) + val chunkStartOffsets = new Array[Long](n) + chunkStartOffsets(0) = longArray(n) + for (i <- 1 until n) { + val startOffsetIndex = i + n + chunkStartOffsets(i) = chunkStartOffsets(i - 1) + longArray(startOffsetIndex) + chunkSizes(i - 1) + } + (chunkIds, chunkStartOffsets, chunkSizes).zipped.map((a, b, c) => (a, b, c)) + } + + private def getMinishardIndex(shardPath: VaultPath, minishardNumber: Int)( + implicit ec: ExecutionContext): Fox[Seq[(Long, Long, Long)]] = + minishardIndexCache.getOrLoad((shardPath, minishardNumber), readMinishardIndex) + + private def readMinishardIndex(vaultPathAndMinishardNumber: (VaultPath, Int))( + implicit ec: ExecutionContext): Fox[Seq[(Long, Long, Long)]] = { + val (vaultPath, minishardNumber) = vaultPathAndMinishardNumber + for { + index <- getShardIndex(vaultPath) + parsedIndex = parseShardIndex(index) + minishardIndexRange = getMinishardIndexRange(minishardNumber, parsedIndex) + indexRaw <- vaultPath.readBytes(Some(minishardIndexRange)) + } yield parseMinishardIndex(indexRaw) + } + + private def getChunkRange(chunkId: Long, + minishardIndex: Seq[(Long, Long, Long)]): Option[NumericRange.Exclusive[Long]] = + for { + chunkSpecification <- minishardIndex.find(_._1 == chunkId) + chunkStart = (shardIndexRange.end) + chunkSpecification._2 + chunkEnd = (shardIndexRange.end) + chunkSpecification._2 + chunkSpecification._3 + } yield Range.Long(chunkStart, chunkEnd, 1) + + override def getShardedChunkPathAndRange(chunkIndex: Array[Int])( + implicit ec: ExecutionContext): Future[(VaultPath, NumericRange[Long])] = { + val chunkIdentifier = getHashForChunk(chunkIndex) + val minishardInfo = getMinishardInfo(chunkIdentifier) + val shardPath = getPathForShard(minishardInfo._1) + for { + minishardIndex <- getMinishardIndex(shardPath, minishardInfo._2.toInt) + .toFutureOrThrowException("Could not get minishard index") + chunkRange: NumericRange.Exclusive[Long] <- Fox + .option2Fox(getChunkRange(chunkIdentifier, minishardIndex)) + .toFutureOrThrowException("Chunk range not found in minishard index") + } yield (shardPath, chunkRange) + } + } diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/precomputed/PrecomputedHeader.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/precomputed/PrecomputedHeader.scala index c70a366e06..12b9c45c42 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/precomputed/PrecomputedHeader.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/precomputed/PrecomputedHeader.scala @@ -69,9 +69,24 @@ case class PrecomputedScaleHeader(precomputedScale: PrecomputedScale, precompute .min(precomputedScale.size(dim)) (beginOffset, endOffset) }) + + def gridSize: Array[Int] = (chunkSize, precomputedScale.size).zipped.map((c, s) => (s.toDouble / c).ceil.toInt) + + override def isSharded: Boolean = precomputedScale.sharding.isDefined } -case class ShardingSpecification(`@type`: String) +case class ShardingSpecification(`@type`: String, + preshift_bits: Long, + hash: String, + minishard_bits: Int, + shard_bits: Long, + minishard_index_encoding: String = "raw", + data_encoding: String = "raw") { + + def hashFunction(input: Long): Long = + if (hash == "identity") input + else ??? // not implemented: murmurhash3_x86_128 +} object ShardingSpecification extends JsonImplicits { implicit object ShardingSpecificationFormat extends Format[ShardingSpecification] { diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/FindDataService.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/FindDataService.scala index c6af6aa836..eb4ac4cfee 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/FindDataService.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/FindDataService.scala @@ -94,7 +94,8 @@ class FindDataService @Inject()(dataServicesHolder: BinaryDataServiceHolder)(imp } } - positionCreationIter((1 to iterationCount).toList, List[Vec3Int]()) :+ dataLayer.boundingBox.topLeft + val positions = positionCreationIter((1 to iterationCount).toList, List[Vec3Int]()) :+ dataLayer.boundingBox.topLeft + positions.map(_.alignWithGridFloor(Vec3Int.full(DataLayer.bucketLength))).distinct } private def checkAllPositionsForData(dataSource: DataSource, From 27dcf807daa2c45bdf181a2d840facdec98d6e5d Mon Sep 17 00:00:00 2001 From: Daniel Date: Mon, 27 Mar 2023 14:10:08 +0200 Subject: [PATCH 04/14] Use new zip.js version to allow zip64 uploads (#6939) * Reject dataset uploads if organization storage quota is exceeded * warn user in upload-dataset-view when storage is exceeded and disable upload button * changelog * Use new zip.js version to allow zip64 uploads, for example * update changelog * also notify airbrake for zip errors * Remove jszip dependency and rewrite code to use zip.js instead --------- Co-authored-by: Florian M Co-authored-by: Philipp Otto Co-authored-by: Philipp Otto --- CHANGELOG.unreleased.md | 1 + .../admin/dataset/dataset_upload_view.tsx | 68 +++++++++---------- .../right-border-tabs/skeleton_tab_view.tsx | 34 ++++++---- package.json | 5 +- yarn.lock | 39 ++--------- 5 files changed, 63 insertions(+), 84 deletions(-) diff --git a/CHANGELOG.unreleased.md b/CHANGELOG.unreleased.md index 4659b1fa0b..260d4b6420 100644 --- a/CHANGELOG.unreleased.md +++ b/CHANGELOG.unreleased.md @@ -15,6 +15,7 @@ For upgrade instructions, please check the [migration guide](MIGRATIONS.released - Added list of all respective team members to the administration page for teams. [#6915](https://github.com/scalableminds/webknossos/pull/6915) - Added email notifications for WK worker jobs. [#6918](https://github.com/scalableminds/webknossos/pull/6918) - Added support for viewing sharded neuroglancer precomputed datasets. [#6920](https://github.com/scalableminds/webknossos/pull/6920) +- Added support for uploading zip64 files. [#6939](https://github.com/scalableminds/webknossos/pull/6939) ### Changed - Interpolation during rendering is now more performance intensive, since the rendering approach was changed. Therefore, interpolation is disabled by default. On the flip side, the rendered quality is often higher than it used to be. [#6748](https://github.com/scalableminds/webknossos/pull/6748) diff --git a/frontend/javascripts/admin/dataset/dataset_upload_view.tsx b/frontend/javascripts/admin/dataset/dataset_upload_view.tsx index ae2fa94de2..7078de3134 100644 --- a/frontend/javascripts/admin/dataset/dataset_upload_view.tsx +++ b/frontend/javascripts/admin/dataset/dataset_upload_view.tsx @@ -32,8 +32,7 @@ import Toast from "libs/toast"; import * as Utils from "libs/utils"; import messages from "messages"; import { trackAction } from "oxalis/model/helpers/analytics"; -// @ts-expect-error ts-migrate(2306) FIXME: File ... Remove this comment to see the full error message -import { createReader, BlobReader, ZipReader, Entry } from "zip-js-webpack"; +import { BlobReader, ZipReader, Entry } from "@zip.js/zip.js"; import { CardContainer, DatasetNameFormItem, @@ -473,7 +472,7 @@ class DatasetUploadView extends React.Component { ); }; - validateFiles = (files: FileWithPath[]) => { + validateFiles = async (files: FileWithPath[]) => { if (files.length === 0) { return; } @@ -490,41 +489,40 @@ class DatasetUploadView extends React.Component { }); if (fileExtension === "zip") { - createReader( - new BlobReader(file), - (reader: ZipReader) => { - reader.getEntries((entries: Array) => { - const wkwFile = entries.find((entry: Entry) => - Utils.isFileExtensionEqualTo(entry.filename, "wkw"), - ); - const needsConversion = wkwFile == null; - this.handleNeedsConversionInfo(needsConversion); - - const nmlFile = entries.find((entry: Entry) => - Utils.isFileExtensionEqualTo(entry.filename, "nml"), - ); - if (nmlFile) { - Modal.error({ - content: messages["dataset.upload_zip_with_nml"], - }); - } - }); - }, - () => { + try { + const reader = new ZipReader(new BlobReader(file)); + const entries = await reader.getEntries(); + await reader.close(); + const wkwFile = entries.find((entry: Entry) => + Utils.isFileExtensionEqualTo(entry.filename, "wkw"), + ); + const needsConversion = wkwFile == null; + this.handleNeedsConversionInfo(needsConversion); + + const nmlFile = entries.find((entry: Entry) => + Utils.isFileExtensionEqualTo(entry.filename, "nml"), + ); + if (nmlFile) { Modal.error({ - content: messages["dataset.upload_invalid_zip"], + content: messages["dataset.upload_zip_with_nml"], }); - const form = this.formRef.current; - - if (!form) { - return; - } + } + } catch (e) { + console.error(e); + ErrorHandling.notify(e as Error); + Modal.error({ + content: messages["dataset.upload_invalid_zip"], + }); + const form = this.formRef.current; + + if (!form) { + return; + } - form.setFieldsValue({ - zipFile: [], - }); - }, - ); + form.setFieldsValue({ + zipFile: [], + }); + } // We return here since not more than 1 zip archive is supported anyway. return; } else if (fileExtension === "wkw") { diff --git a/frontend/javascripts/oxalis/view/right-border-tabs/skeleton_tab_view.tsx b/frontend/javascripts/oxalis/view/right-border-tabs/skeleton_tab_view.tsx index 1bd8b77c82..a5a04dda0a 100644 --- a/frontend/javascripts/oxalis/view/right-border-tabs/skeleton_tab_view.tsx +++ b/frontend/javascripts/oxalis/view/right-border-tabs/skeleton_tab_view.tsx @@ -21,7 +21,7 @@ import { import { batchActions } from "redux-batched-actions"; import { connect } from "react-redux"; import { saveAs } from "file-saver"; -import JSZip from "jszip"; +import { BlobReader, BlobWriter, ZipReader, Entry } from "@zip.js/zip.js"; import * as React from "react"; import _ from "lodash"; import memoizeOne from "memoize-one"; @@ -208,22 +208,29 @@ export async function importTracingFiles(files: Array, createGroupForEachF const tryParsingFileAsZip = async (file: File) => { try { - // @ts-expect-error ts-migrate(2345) FIXME: Argument of type 'Promise' is not ass... Remove this comment to see the full error message - const zipFile = await JSZip().loadAsync(readFileAsArrayBuffer(file)); - const nmlFileName = Object.keys(zipFile.files).find((key) => - Utils.isFileExtensionEqualTo(key, "nml"), + const reader = new ZipReader(new BlobReader(file)); + const entries = await reader.getEntries(); + const nmlFileEntry = entries.find((entry: Entry) => + Utils.isFileExtensionEqualTo(entry.filename, "nml"), ); - // @ts-expect-error ts-migrate(2769) FIXME: No overload matches this call. - const nmlFile = await zipFile.file(nmlFileName).async("blob"); + + if (nmlFileEntry == null) { + await reader.close(); + throw Error("Zip file doesn't contain an NML file."); + } + + const nmlBlob = await nmlFileEntry.getData(new BlobWriter()); + const nmlFile = new File([nmlBlob], nmlFileEntry.filename); + const nmlImportActions = await tryParsingFileAsNml(nmlFile); - const dataFileName = Object.keys(zipFile.files).find((key) => - Utils.isFileExtensionEqualTo(key, "zip"), + + const dataFileEntry = entries.find((entry: Entry) => + Utils.isFileExtensionEqualTo(entry.filename, "zip"), ); - if (dataFileName) { - // @ts-expect-error ts-migrate(2531) FIXME: Object is possibly 'null'. - const dataBlob = await zipFile.file(dataFileName).async("blob"); - const dataFile = new File([dataBlob], dataFileName); + if (dataFileEntry) { + const dataBlob = await dataFileEntry.getData(new BlobWriter()); + const dataFile = new File([dataBlob], dataFileEntry.filename); await Model.ensureSavedState(); const storeState = Store.getState(); const { tracing, dataset } = storeState; @@ -263,6 +270,7 @@ export async function importTracingFiles(files: Array, createGroupForEachF } } + await reader.close(); return nmlImportActions; } catch (error) { // @ts-ignore diff --git a/package.json b/package.json index 13c1539ff7..e9a0b4695f 100644 --- a/package.json +++ b/package.json @@ -151,6 +151,7 @@ "@types/pngjs": "^6.0.1", "@types/three": "^0.142.0", "@use-it/interval": "^1.0.0", + "@zip.js/zip.js": "^2.6.81", "ansi-to-react": "^6.1.6", "antd": "^4.24.8", "backbone-events-standalone": "^0.2.7", @@ -182,7 +183,6 @@ "javascript-natural-sort": "^0.7.1", "js-priority-queue": "^0.1.5", "jsonschema": "^1.2.4", - "jszip": "^3.7.0", "lodash": "^4.17.21", "lz-string": "^1.4.4", "lz4-wasm": "^0.9.2", @@ -229,8 +229,7 @@ "typed-redux-saga": "^1.4.0", "url": "^0.11.0", "url-join": "^4.0.0", - "worker-loader": "^3.0.8", - "zip-js-webpack": "^1.0.0" + "worker-loader": "^3.0.8" }, "resolutions": { "**/mini-store": "^1.1.0" diff --git a/yarn.lock b/yarn.lock index 160d0e5c7f..934cb2d2fe 100644 --- a/yarn.lock +++ b/yarn.lock @@ -2311,6 +2311,11 @@ resolved "https://registry.yarnpkg.com/@xtuc/long/-/long-4.2.2.tgz#d291c6a4e97989b5c61d9acf396ae4fe133a718d" integrity sha512-NuHqBY1PB/D8xU6s/thBgOAiAP7HOYDQ32+BFZILJ8ivkUkAHQnWfn6WhL79Owj1qmUnoN/YPhktdIoucipkAQ== +"@zip.js/zip.js@^2.6.81": + version "2.6.81" + resolved "https://registry.yarnpkg.com/@zip.js/zip.js/-/zip.js-2.6.81.tgz#c3c9618a8e02f3a24d359a0a14d46985fea971f5" + integrity sha512-VXrwa5fthYq74sIZsHarCFVSwnKdispTd/WQBgcNEuB9X0N3L5s8odRCjx9Zw6XsvpG5krqB4ZN4X0lLMyjgDA== + JSONStream@^1.0.3, JSONStream@^1.0.4: version "1.3.5" resolved "https://registry.yarnpkg.com/JSONStream/-/JSONStream-1.3.5.tgz#3208c1f08d3a4d99261ab64f92302bc15e111ca0" @@ -7168,11 +7173,6 @@ image-size@~0.5.0: resolved "https://registry.yarnpkg.com/image-size/-/image-size-0.5.5.tgz#09dfd4ab9d20e29eb1c3e80b8990378df9e3cb9c" integrity sha1-Cd/Uq50g4p6xw+gLiZA3jfnjy5w= -immediate@~3.0.5: - version "3.0.6" - resolved "https://registry.yarnpkg.com/immediate/-/immediate-3.0.6.tgz#9db1dbd0faf8de6fbe0f5dd5e56bb606280de69b" - integrity sha1-nbHb0Pr43m++D13V5Wu2BigN5ps= - immutability-helper@^3.1.1: version "3.1.1" resolved "https://registry.yarnpkg.com/immutability-helper/-/immutability-helper-3.1.1.tgz#2b86b2286ed3b1241c9e23b7b21e0444f52f77b7" @@ -8161,16 +8161,6 @@ jsprim@^1.2.2: json-schema "0.2.3" verror "1.10.0" -jszip@^3.7.0: - version "3.7.0" - resolved "https://registry.yarnpkg.com/jszip/-/jszip-3.7.0.tgz#9b8b995a4e7c9024653ce743e902076a82fdf4e6" - integrity sha512-Y2OlFIzrDOPWUnpU0LORIcDn2xN7rC9yKffFM/7pGhQuhO+SUhfm2trkJ/S5amjFvem0Y+1EALz/MEPkvHXVNw== - dependencies: - lie "~3.3.0" - pako "~1.0.2" - readable-stream "~2.3.6" - set-immediate-shim "~1.0.1" - just-extend@^4.0.2: version "4.2.1" resolved "https://registry.yarnpkg.com/just-extend/-/just-extend-4.2.1.tgz#ef5e589afb61e5d66b24eca749409a8939a8c744" @@ -8313,13 +8303,6 @@ levn@~0.3.0: prelude-ls "~1.1.2" type-check "~0.3.2" -lie@~3.3.0: - version "3.3.0" - resolved "https://registry.yarnpkg.com/lie/-/lie-3.3.0.tgz#dcf82dee545f46074daf200c7c1c5a08e0f40f6a" - integrity sha512-UaiMJzeWRlEujzAuw5LokY1L5ecNQYZKfmyZ9L7wDHb/p5etKaxXhohBcrw0EYby+G/NA52vRSN4N39dxHAIwQ== - dependencies: - immediate "~3.0.5" - lines-and-columns@^1.1.6: version "1.1.6" resolved "https://registry.yarnpkg.com/lines-and-columns/-/lines-and-columns-1.1.6.tgz#1c00c743b433cd0a4e80758f7b64a57440d9ff00" @@ -10027,7 +10010,7 @@ packet-reader@1.0.0: resolved "https://registry.yarnpkg.com/packet-reader/-/packet-reader-1.0.0.tgz#9238e5480dedabacfe1fe3f2771063f164157d74" integrity sha512-HAKu/fG3HpHFO0AA8WE8q2g+gBJaZ9MG7fcKk+IJPLTGAD6Psw4443l+9DGRbOIh3/aXr7Phy0TjilYivJo5XQ== -pako@^1.0.5, pako@~1.0.2: +pako@^1.0.5: version "1.0.11" resolved "https://registry.yarnpkg.com/pako/-/pako-1.0.11.tgz#6c9599d340d54dfd3946380252a35705a6b992bf" integrity sha512-4hLB8Py4zZce5s4yd9XzopqwVv/yGNhV1Bl8NTmCq1763HeK2+EwVTv+leGeL13Dnh2wfbqowVPXCIO0z4taYw== @@ -12507,11 +12490,6 @@ set-blocking@^2.0.0: resolved "https://registry.yarnpkg.com/set-blocking/-/set-blocking-2.0.0.tgz#045f9782d011ae9a6803ddd382b24392b3d890f7" integrity sha1-BF+XgtARrppoA93TgrJDkrPYkPc= -set-immediate-shim@~1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/set-immediate-shim/-/set-immediate-shim-1.0.1.tgz#4b2b1b27eb808a9f8dcc481a58e5e56f599f3f61" - integrity sha1-SysbJ+uAip+NzEgaWOXlb1mfP2E= - set-value@^2.0.0, set-value@^2.0.1: version "2.0.1" resolved "https://registry.yarnpkg.com/set-value/-/set-value-2.0.1.tgz#a18d40530e6f07de4228c7defe4227af8cad005b" @@ -14672,11 +14650,6 @@ yocto-queue@^0.1.0: resolved "https://registry.yarnpkg.com/yocto-queue/-/yocto-queue-0.1.0.tgz#0294eb3dee05028d31ee1a5fa2c556a6aaf10a1b" integrity sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q== -zip-js-webpack@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/zip-js-webpack/-/zip-js-webpack-1.0.0.tgz#553dd15c76a9b2cfb4de97f338f4eb0fe0e2a897" - integrity sha512-epPHhnoh3nxrpEzM07yLZZtKANIubMeWUH2bslUIPMGo/vBDKJhdYqoupsfI6uT24i8tR9i7+y/ajC3Sa4R21A== - zustand@^3.7.2: version "3.7.2" resolved "https://registry.yarnpkg.com/zustand/-/zustand-3.7.2.tgz#7b44c4f4a5bfd7a8296a3957b13e1c346f42514d" From 46996b0896f20899854ce1c4db11651ea0e1363a Mon Sep 17 00:00:00 2001 From: Philipp Otto Date: Tue, 28 Mar 2023 10:21:12 +0200 Subject: [PATCH 05/14] Release 23.04.0 (#6945) --- CHANGELOG.released.md | 26 ++++++++++++++++++++++++++ CHANGELOG.unreleased.md | 18 ++---------------- MIGRATIONS.released.md | 6 ++++++ MIGRATIONS.unreleased.md | 2 +- 4 files changed, 35 insertions(+), 17 deletions(-) diff --git a/CHANGELOG.released.md b/CHANGELOG.released.md index d8c0236365..f03adb34e2 100644 --- a/CHANGELOG.released.md +++ b/CHANGELOG.released.md @@ -7,6 +7,32 @@ The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/) and this project adheres to [Calendar Versioning](http://calver.org/) `0Y.0M.MICRO`. For upgrade instructions, please check the [migration guide](MIGRATIONS.released.md). +## [23.04.0](https://github.com/scalableminds/webknossos/releases/tag/23.04.0) - 2023-03-27 +[Commits](https://github.com/scalableminds/webknossos/compare/23.03.1...23.04.0) + +### Highlights +- Added email notifications for WK worker jobs. [#6918](https://github.com/scalableminds/webknossos/pull/6918) +- Added support for viewing sharded neuroglancer precomputed datasets. [#6920](https://github.com/scalableminds/webknossos/pull/6920) + +### Added +- Added support for datasets where layers are transformed individually (with an affine matrix). Transformations can be specified via datasource-properties.json or via JS API (will be ephemeral, then). [#6748](https://github.com/scalableminds/webknossos/pull/6748) +- Added list of all respective team members to the administration page for teams. [#6915](https://github.com/scalableminds/webknossos/pull/6915) +- Added support for uploading zip64 files. [#6939](https://github.com/scalableminds/webknossos/pull/6939) + +### Changed +- Interpolation during rendering is now more performance intensive, since the rendering approach was changed. Therefore, interpolation is disabled by default. On the flip side, the rendered quality is often higher than it used to be. [#6748](https://github.com/scalableminds/webknossos/pull/6748) +- Updated the styling of the "welcome" screen for new users to be in line with the new branding. [#6904](https://github.com/scalableminds/webknossos/pull/6904) +- Improved Terms-of-Service modal (e.g., allow to switch organization even when modal was blocking the remaining usage of WEBKNOSSOS). [#6930](https://github.com/scalableminds/webknossos/pull/6930) +- Uploads are now blocked when the organization’s storage quota is exceeded. [#6893](https://github.com/scalableminds/webknossos/pull/6893) + +### Fixed +- Fixed an issue with text hints not being visible on the logout page for dark mode users. [#6916](https://github.com/scalableminds/webknossos/pull/6916) +- Fixed creating task types with a selected preferred mode. [#6928](https://github.com/scalableminds/webknossos/pull/6928) +- Fixed support for rendering of negative floats. [#6895](https://github.com/scalableminds/webknossos/pull/6895) +- Fixed caching issues with webworkers. [#6932](https://github.com/scalableminds/webknossos/pull/6932) +- Fixed download button for annotations which was disabled in some cases. [#6931](https://github.com/scalableminds/webknossos/pull/6931) +- Fixed antd deprecation warning for Dropdown menus. [#6898](https://github.com/scalableminds/webknossos/pull/6898) + ## [23.03.1](https://github.com/scalableminds/webknossos/releases/tag/23.03.1) - 2023-03-14 [Commits](https://github.com/scalableminds/webknossos/compare/23.03.0...23.03.1) diff --git a/CHANGELOG.unreleased.md b/CHANGELOG.unreleased.md index 260d4b6420..d52a793fd5 100644 --- a/CHANGELOG.unreleased.md +++ b/CHANGELOG.unreleased.md @@ -8,28 +8,14 @@ and this project adheres to [Calendar Versioning](http://calver.org/) `0Y.0M.MIC For upgrade instructions, please check the [migration guide](MIGRATIONS.released.md). ## Unreleased -[Commits](https://github.com/scalableminds/webknossos/compare/23.03.1...HEAD) +[Commits](https://github.com/scalableminds/webknossos/compare/23.04.0...HEAD) ### Added -- Added support for datasets where layers are transformed individually (with an affine matrix). Transformations can be specified via datasource-properties.json or via JS API (will be ephemeral, then). [#6748](https://github.com/scalableminds/webknossos/pull/6748) -- Added list of all respective team members to the administration page for teams. [#6915](https://github.com/scalableminds/webknossos/pull/6915) -- Added email notifications for WK worker jobs. [#6918](https://github.com/scalableminds/webknossos/pull/6918) -- Added support for viewing sharded neuroglancer precomputed datasets. [#6920](https://github.com/scalableminds/webknossos/pull/6920) -- Added support for uploading zip64 files. [#6939](https://github.com/scalableminds/webknossos/pull/6939) ### Changed -- Interpolation during rendering is now more performance intensive, since the rendering approach was changed. Therefore, interpolation is disabled by default. On the flip side, the rendered quality is often higher than it used to be. [#6748](https://github.com/scalableminds/webknossos/pull/6748) -- Updated the styling of the "welcome" screen for new users to be in line with the new branding. [#6904](https://github.com/scalableminds/webknossos/pull/6904) -- Improved Terms-of-Service modal (e.g., allow to switch organization even when modal was blocking the remaining usage of WEBKNOSSOS). [#6930](https://github.com/scalableminds/webknossos/pull/6930) -- Uploads are now blocked when the organization’s storage quota is exceeded. [#6893](https://github.com/scalableminds/webknossos/pull/6893) ### Fixed -- Fixed an issue with text hints not being visible on the logout page for dark mode users. [#6916](https://github.com/scalableminds/webknossos/pull/6916) -- Fixed creating task types with a selected preferred mode. [#6928](https://github.com/scalableminds/webknossos/pull/6928) -- Fixed support for rendering of negative floats. [#6895](https://github.com/scalableminds/webknossos/pull/6895) -- Fixed caching issues with webworkers. [#6932](https://github.com/scalableminds/webknossos/pull/6932) -- Fixed download button for annotations which was disabled in some cases. [#6931](https://github.com/scalableminds/webknossos/pull/6931) -- Fixed antd deprecation warning for Dropdown menus. [#6898](https://github.com/scalableminds/webknossos/pull/6898) + ### Removed ### Breaking Changes diff --git a/MIGRATIONS.released.md b/MIGRATIONS.released.md index 767a422156..bfdaa76e8a 100644 --- a/MIGRATIONS.released.md +++ b/MIGRATIONS.released.md @@ -6,6 +6,12 @@ See `MIGRATIONS.unreleased.md` for the changes which are not yet part of an offi This project adheres to [Calendar Versioning](http://calver.org/) `0Y.0M.MICRO`. User-facing changes are documented in the [changelog](CHANGELOG.released.md). +## [23.04.0](https://github.com/scalableminds/webknossos/releases/tag/23.04.0) - 2023-03-27 +[Commits](https://github.com/scalableminds/webknossos/compare/23.03.1...23.04.0) + +### Postgres Evolutions: +None. + ## [23.03.1](https://github.com/scalableminds/webknossos/releases/tag/23.03.1) - 2023-03-14 [Commits](https://github.com/scalableminds/webknossos/compare/23.03.0...23.03.1) - WEBKNOSSOS now requires at least Java 11 (up from Java 8). [#6869](https://github.com/scalableminds/webknossos/pull/6869) diff --git a/MIGRATIONS.unreleased.md b/MIGRATIONS.unreleased.md index cea99dd65d..6c92428cdd 100644 --- a/MIGRATIONS.unreleased.md +++ b/MIGRATIONS.unreleased.md @@ -6,6 +6,6 @@ This project adheres to [Calendar Versioning](http://calver.org/) `0Y.0M.MICRO`. User-facing changes are documented in the [changelog](CHANGELOG.released.md). ## Unreleased -[Commits](https://github.com/scalableminds/webknossos/compare/23.03.1...HEAD) +[Commits](https://github.com/scalableminds/webknossos/compare/23.04.0...HEAD) ### Postgres Evolutions: From 46a8e5d6f49b15e89ba65343ac1bb3929a5ad2f1 Mon Sep 17 00:00:00 2001 From: Florian M Date: Tue, 28 Mar 2023 11:39:49 +0200 Subject: [PATCH 06/14] VaultPath no longer extends nio.Path (#6942) * VaultPath no longer extends nio.Path * rename things (pr feedback) * format --- .../binary/credential/CredentialDAO.scala | 6 +- .../binary/credential/CredentialService.scala | 6 +- .../explore/ExploreRemoteLayerService.scala | 8 +-- .../binary/explore/N5ArrayExplorer.scala | 8 +-- .../explore/N5MultiscalesExplorer.scala | 16 ++--- app/models/binary/explore/NgffExplorer.scala | 33 +++++----- .../binary/explore/PrecomputedExplorer.scala | 16 ++--- .../binary/explore/RemoteLayerExplorer.scala | 17 ++--- .../explore/WebknossosZarrExplorer.scala | 10 +-- .../binary/explore/ZarrArrayExplorer.scala | 8 +-- conf/messages | 6 +- .../dataformats/BucketProvider.scala | 6 +- .../datastore/dataformats/MagLocator.scala | 4 +- .../dataformats/n5/N5BucketProvider.scala | 10 +-- .../dataformats/n5/N5DataLayers.scala | 4 +- .../PrecomputedBucketProvider.scala | 10 +-- .../precomputed/PrecomputedDataLayers.scala | 6 +- .../dataformats/wkw/WKWBucketProvider.scala | 4 +- .../dataformats/wkw/WKWDataLayers.scala | 6 +- .../dataformats/zarr/ZarrBucketProvider.scala | 10 +-- .../dataformats/zarr/ZarrDataLayers.scala | 6 +- .../datastore/datareaders/DatasetArray.scala | 2 +- .../precomputed/PrecomputedArray.scala | 10 ++- .../datavault/FileSystemDataVault.scala | 16 +++++ .../datavault/FileSystemVaultPath.scala | 66 ++++--------------- .../datastore/datavault/HttpsDataVault.scala | 12 ++-- .../datastore/datavault/VaultPath.scala | 58 ++-------------- .../models/datasource/DataLayer.scala | 4 +- .../services/BinaryDataService.scala | 4 +- .../services/BinaryDataServiceHolder.scala | 6 +- .../services/DSRemoteWebKnossosClient.scala | 8 +-- ...tials.scala => DataVaultCredentials.scala} | 18 ++--- ...emService.scala => DataVaultService.scala} | 10 +-- .../EditableMappingLayer.scala | 6 +- .../tracings/volume/VolumeTracingLayer.scala | 8 +-- 35 files changed, 178 insertions(+), 250 deletions(-) rename webknossos-datastore/app/com/scalableminds/webknossos/datastore/storage/{FileSystemCredentials.scala => DataVaultCredentials.scala} (74%) rename webknossos-datastore/app/com/scalableminds/webknossos/datastore/storage/{FileSystemService.scala => DataVaultService.scala} (74%) diff --git a/app/models/binary/credential/CredentialDAO.scala b/app/models/binary/credential/CredentialDAO.scala index c6893f3b03..b21e4ab93a 100644 --- a/app/models/binary/credential/CredentialDAO.scala +++ b/app/models/binary/credential/CredentialDAO.scala @@ -2,7 +2,7 @@ package models.binary.credential import com.scalableminds.util.tools.Fox import com.scalableminds.webknossos.datastore.storage.{ - FileSystemCredential, + DataVaultCredential, GoogleServiceAccountCredential, HttpBasicAuthCredential, S3AccessKeyCredential @@ -79,14 +79,14 @@ class CredentialDAO @Inject()(sqlClient: SqlClient)(implicit ec: ExecutionContex values(${_id}, ${CredentialType.GoogleServiceAccount}, ${credential.name}, ${credential.secretJson.toString}, ${credential.user}, ${credential.organization})""".asUpdate) } yield () - def findOne(id: ObjectId): Fox[FileSystemCredential] = + def findOne(id: ObjectId): Fox[DataVaultCredential] = for { r <- run(q"select $columns from webknossos.credentials_ where _id = $id".as[CredentialsRow]) firstRow <- r.headOption.toFox parsed <- parseAnyCredential(firstRow) } yield parsed - private def parseAnyCredential(r: CredentialsRow): Fox[FileSystemCredential] = + private def parseAnyCredential(r: CredentialsRow): Fox[DataVaultCredential] = for { typeParsed <- CredentialType.fromString(r.`type`).toFox parsed <- typeParsed match { diff --git a/app/models/binary/credential/CredentialService.scala b/app/models/binary/credential/CredentialService.scala index fdb2d1daec..3bde582d3a 100644 --- a/app/models/binary/credential/CredentialService.scala +++ b/app/models/binary/credential/CredentialService.scala @@ -2,7 +2,7 @@ package models.binary.credential import com.scalableminds.util.tools.Fox import com.scalableminds.webknossos.datastore.storage.{ - FileSystemCredential, + DataVaultCredential, DataVaultsHolder, GoogleServiceAccountCredential, HttpBasicAuthCredential, @@ -22,7 +22,7 @@ class CredentialService @Inject()(credentialDAO: CredentialDAO) { credentialIdentifier: Option[String], credentialSecret: Option[String], userId: ObjectId, - organizationId: ObjectId): Option[FileSystemCredential] = + organizationId: ObjectId): Option[DataVaultCredential] = uri.getScheme match { case DataVaultsHolder.schemeHttps | DataVaultsHolder.schemeHttp => credentialIdentifier.map( @@ -45,7 +45,7 @@ class CredentialService @Inject()(credentialDAO: CredentialDAO) { } yield GoogleServiceAccountCredential(uri.toString, secretJson, userId.toString, organizationId.toString) } - def insertOne(credential: FileSystemCredential)(implicit ec: ExecutionContext): Fox[ObjectId] = { + def insertOne(credential: DataVaultCredential)(implicit ec: ExecutionContext): Fox[ObjectId] = { val _id = ObjectId.generate for { _ <- credential match { diff --git a/app/models/binary/explore/ExploreRemoteLayerService.scala b/app/models/binary/explore/ExploreRemoteLayerService.scala index 52ec5f0a6a..f222e046ce 100644 --- a/app/models/binary/explore/ExploreRemoteLayerService.scala +++ b/app/models/binary/explore/ExploreRemoteLayerService.scala @@ -10,6 +10,7 @@ import com.scalableminds.webknossos.datastore.dataformats.precomputed.{ import com.scalableminds.webknossos.datastore.dataformats.zarr._ import com.scalableminds.webknossos.datastore.datareaders.n5.N5Header import com.scalableminds.webknossos.datastore.datareaders.zarr._ +import com.scalableminds.webknossos.datastore.datavault.VaultPath import com.scalableminds.webknossos.datastore.models.datasource._ import com.scalableminds.webknossos.datastore.storage.{DataVaultsHolder, RemoteSourceDescriptor} import com.typesafe.scalalogging.LazyLogging @@ -21,7 +22,6 @@ import oxalis.security.WkEnv import play.api.libs.json.{Json, OFormat} import java.net.URI -import java.nio.file.Path import javax.inject.Inject import scala.collection.mutable.ListBuffer import scala.concurrent.ExecutionContext @@ -162,8 +162,8 @@ class ExploreRemoteLayerService @Inject()(credentialService: CredentialService) requestingUser._id, requestingUser._organization) remoteSource = RemoteSourceDescriptor(uri, credentialOpt) - credentialId <- Fox.runOptional(credentialOpt)(c => credentialService.insertOne(c)) ?~> "remoteFileSystem.credential.insert.failed" - remotePath <- DataVaultsHolder.getVaultPath(remoteSource) ?~> "remoteFileSystem.setup.failed" + credentialId <- Fox.runOptional(credentialOpt)(c => credentialService.insertOne(c)) ?~> "dataVault.credential.insert.failed" + remotePath <- DataVaultsHolder.getVaultPath(remoteSource) ?~> "dataVault.setup.failed" layersWithVoxelSizes <- exploreRemoteLayersForRemotePath( remotePath, credentialId.map(_.toString), @@ -186,7 +186,7 @@ class ExploreRemoteLayerService @Inject()(credentialService: CredentialService) else uri private def exploreRemoteLayersForRemotePath( - remotePath: Path, + remotePath: VaultPath, credentialId: Option[String], reportMutable: ListBuffer[String], explorers: List[RemoteLayerExplorer])(implicit ec: ExecutionContext): Fox[List[(DataLayer, Vec3Double)]] = diff --git a/app/models/binary/explore/N5ArrayExplorer.scala b/app/models/binary/explore/N5ArrayExplorer.scala index a91f0f8e83..0c47c01670 100644 --- a/app/models/binary/explore/N5ArrayExplorer.scala +++ b/app/models/binary/explore/N5ArrayExplorer.scala @@ -5,19 +5,19 @@ import com.scalableminds.webknossos.datastore.dataformats.MagLocator import com.scalableminds.webknossos.datastore.dataformats.n5.{N5DataLayer, N5Layer, N5SegmentationLayer} import com.scalableminds.webknossos.datastore.datareaders.AxisOrder import com.scalableminds.webknossos.datastore.datareaders.n5.N5Header +import com.scalableminds.webknossos.datastore.datavault.VaultPath import com.scalableminds.webknossos.datastore.models.datasource.Category -import java.nio.file.Path import scala.concurrent.ExecutionContext.Implicits.global class N5ArrayExplorer extends RemoteLayerExplorer { override def name: String = "N5 Array" - override def explore(remotePath: Path, credentialId: Option[String]): Fox[List[(N5Layer, Vec3Double)]] = + override def explore(remotePath: VaultPath, credentialId: Option[String]): Fox[List[(N5Layer, Vec3Double)]] = for { - headerPath <- Fox.successful(remotePath.resolve(N5Header.FILENAME_ATTRIBUTES_JSON)) - name <- guessNameFromPath(remotePath) + headerPath <- Fox.successful(remotePath / N5Header.FILENAME_ATTRIBUTES_JSON) + name = guessNameFromPath(remotePath) n5Header <- parseJsonFromPath[N5Header](headerPath) ?~> s"failed to read n5 header at $headerPath" elementClass <- n5Header.elementClass ?~> "failed to read element class from n5 header" guessedAxisOrder = AxisOrder.asZyxFromRank(n5Header.rank) diff --git a/app/models/binary/explore/N5MultiscalesExplorer.scala b/app/models/binary/explore/N5MultiscalesExplorer.scala index a3a597c5ab..03c9af7e7e 100644 --- a/app/models/binary/explore/N5MultiscalesExplorer.scala +++ b/app/models/binary/explore/N5MultiscalesExplorer.scala @@ -6,25 +6,25 @@ import com.scalableminds.webknossos.datastore.dataformats.MagLocator import com.scalableminds.webknossos.datastore.dataformats.n5.{N5DataLayer, N5Layer, N5SegmentationLayer} import com.scalableminds.webknossos.datastore.datareaders.AxisOrder import com.scalableminds.webknossos.datastore.datareaders.n5._ +import com.scalableminds.webknossos.datastore.datavault.VaultPath import com.scalableminds.webknossos.datastore.models.datasource.Category import net.liftweb.util.Helpers.tryo -import java.nio.file.Path import scala.concurrent.ExecutionContext.Implicits.global class N5MultiscalesExplorer extends RemoteLayerExplorer with FoxImplicits { override def name: String = "N5 Multiscales" - override def explore(remotePath: Path, credentialId: Option[String]): Fox[List[(N5Layer, Vec3Double)]] = + override def explore(remotePath: VaultPath, credentialId: Option[String]): Fox[List[(N5Layer, Vec3Double)]] = for { - metadataPath <- Fox.successful(remotePath.resolve(N5Metadata.FILENAME_ATTRIBUTES_JSON)) + metadataPath <- Fox.successful(remotePath / N5Metadata.FILENAME_ATTRIBUTES_JSON) n5Metadata <- parseJsonFromPath[N5Metadata](metadataPath) ?~> s"Failed to read N5 header at $metadataPath" layers <- Fox.serialCombined(n5Metadata.multiscales)(layerFromN5MultiscalesItem(_, remotePath, credentialId)) } yield layers private def layerFromN5MultiscalesItem(multiscalesItem: N5MultiscalesItem, - remotePath: Path, + remotePath: VaultPath, credentialId: Option[String]): Fox[(N5Layer, Vec3Double)] = for { voxelSizeNanometers <- extractVoxelSize(multiscalesItem.datasets.map(_.transform)) @@ -33,7 +33,7 @@ class N5MultiscalesExplorer extends RemoteLayerExplorer with FoxImplicits { _ <- bool2Fox(magsWithAttributes.nonEmpty) ?~> "zero mags in layer" elementClass <- elementClassFromMags(magsWithAttributes) ?~> "Could not extract element class from mags" boundingBox = boundingBoxFromMags(magsWithAttributes) - name <- guessNameFromPath(remotePath) + name = guessNameFromPath(remotePath) layer: N5Layer = if (looksLikeSegmentationLayer(name, elementClass)) { N5SegmentationLayer(name, boundingBox, elementClass, magsWithAttributes.map(_.mag), largestSegmentId = None) } else N5DataLayer(name, Category.color, boundingBox, elementClass, magsWithAttributes.map(_.mag)) @@ -96,14 +96,14 @@ class N5MultiscalesExplorer extends RemoteLayerExplorer with FoxImplicits { tryo(Vec3Double(scale(axisOrder.x), scale(axisOrder.y), scale(axisOrder.z))) private def n5MagFromDataset(n5Dataset: N5MultiscalesDataset, - layerPath: Path, + layerPath: VaultPath, voxelSize: Vec3Double, credentialId: Option[String]): Fox[MagWithAttributes] = for { axisOrder <- extractAxisOrder(n5Dataset.transform.axes) ?~> "Could not extract XYZ axis order mapping. Does the data have x, y and z axes, stated in multiscales metadata?" mag <- magFromTransform(voxelSize, n5Dataset.transform) ?~> "Could not extract mag from transforms" - magPath = layerPath.resolve(n5Dataset.path) - headerPath = magPath.resolve(N5Header.FILENAME_ATTRIBUTES_JSON) + magPath = layerPath / n5Dataset.path + headerPath = magPath / N5Header.FILENAME_ATTRIBUTES_JSON n5Header <- parseJsonFromPath[N5Header](headerPath) ?~> s"failed to read n5 header at $headerPath" elementClass <- n5Header.elementClass ?~> s"failed to read element class from n5 header at $headerPath" boundingBox <- n5Header.boundingBox(axisOrder) ?~> s"failed to read bounding box from n5 header at $headerPath" diff --git a/app/models/binary/explore/NgffExplorer.scala b/app/models/binary/explore/NgffExplorer.scala index 0c17cc23a4..2bf867dd4b 100644 --- a/app/models/binary/explore/NgffExplorer.scala +++ b/app/models/binary/explore/NgffExplorer.scala @@ -6,18 +6,18 @@ import com.scalableminds.webknossos.datastore.dataformats.MagLocator import com.scalableminds.webknossos.datastore.dataformats.zarr.{ZarrDataLayer, ZarrLayer, ZarrSegmentationLayer} import com.scalableminds.webknossos.datastore.datareaders.AxisOrder import com.scalableminds.webknossos.datastore.datareaders.zarr._ +import com.scalableminds.webknossos.datastore.datavault.VaultPath import com.scalableminds.webknossos.datastore.models.datasource.{Category, ElementClass} -import java.nio.file.Path import scala.concurrent.ExecutionContext.Implicits.global class NgffExplorer extends RemoteLayerExplorer { override def name: String = "OME NGFF Zarr v0.4" - override def explore(remotePath: Path, credentialId: Option[String]): Fox[List[(ZarrLayer, Vec3Double)]] = + override def explore(remotePath: VaultPath, credentialId: Option[String]): Fox[List[(ZarrLayer, Vec3Double)]] = for { - zattrsPath <- Fox.successful(remotePath.resolve(NgffMetadata.FILENAME_DOT_ZATTRS)) + zattrsPath <- Fox.successful(remotePath / NgffMetadata.FILENAME_DOT_ZATTRS) ngffHeader <- parseJsonFromPath[NgffMetadata](zattrsPath) ?~> s"Failed to read OME NGFF header at $zattrsPath" labelLayers <- exploreLabelLayers(remotePath, credentialId).orElse( Fox.successful(List[(ZarrLayer, Vec3Double)]())) @@ -31,11 +31,11 @@ class NgffExplorer extends RemoteLayerExplorer { layers: List[(ZarrLayer, Vec3Double)] = layerLists.flatten } yield layers ++ labelLayers - private def getNgffMultiscaleChannelCount(multiscale: NgffMultiscalesItem, remotePath: Path): Fox[Int] = + private def getNgffMultiscaleChannelCount(multiscale: NgffMultiscalesItem, remotePath: VaultPath): Fox[Int] = for { firstDataset <- multiscale.datasets.headOption.toFox - magPath = remotePath.resolve(firstDataset.path) - zarrayPath = magPath.resolve(ZarrHeader.FILENAME_DOT_ZARRAY) + magPath = remotePath / firstDataset.path + zarrayPath = magPath / ZarrHeader.FILENAME_DOT_ZARRAY zarrHeader <- parseJsonFromPath[ZarrHeader](zarrayPath) ?~> s"failed to read zarr header at $zarrayPath" axisOrder <- extractAxisOrder(multiscale.axes) ?~> "Could not extract XYZ axis order mapping. Does the data have x, y and z axes, stated in multiscales metadata?" channelCount = axisOrder.c match { @@ -45,7 +45,7 @@ class NgffExplorer extends RemoteLayerExplorer { } yield channelCount private def layersFromNgffMultiscale(multiscale: NgffMultiscalesItem, - remotePath: Path, + remotePath: VaultPath, credentialId: Option[String], channelCount: Int, isSegmentation: Boolean = false): Fox[List[(ZarrLayer, Vec3Double)]] = @@ -56,7 +56,7 @@ class NgffExplorer extends RemoteLayerExplorer { multiscale.datasets.map(_.coordinateTransformations), axisOrder) ?~> "Could not extract voxel size from scale transforms" voxelSizeNanometers = voxelSizeInAxisUnits * axisUnitFactors - nameFromPath <- guessNameFromPath(remotePath) + nameFromPath = guessNameFromPath(remotePath) name = multiscale.name.getOrElse(nameFromPath) layerTuples <- Fox.serialCombined((0 until channelCount).toList)({ channelIndex: Int => for { @@ -78,21 +78,22 @@ class NgffExplorer extends RemoteLayerExplorer { }) } yield layerTuples - private def exploreLabelLayers(remotePath: Path, credentialId: Option[String]): Fox[List[(ZarrLayer, Vec3Double)]] = + private def exploreLabelLayers(remotePath: VaultPath, + credentialId: Option[String]): Fox[List[(ZarrLayer, Vec3Double)]] = for { - labelDescriptionPath <- Fox.successful(remotePath.resolve(NgffLabelsGroup.LABEL_PATH)) + labelDescriptionPath <- Fox.successful(remotePath / NgffLabelsGroup.LABEL_PATH) labelGroup <- parseJsonFromPath[NgffLabelsGroup](labelDescriptionPath) layerTuples <- Fox.serialCombined(labelGroup.labels) { labelPath => layersForLabel(remotePath, labelPath, credentialId) } } yield layerTuples.flatten - private def layersForLabel(remotePath: Path, + private def layersForLabel(remotePath: VaultPath, labelPath: String, credentialId: Option[String]): Fox[List[(ZarrLayer, Vec3Double)]] = for { - fullLabelPath <- Fox.successful(remotePath.resolve("labels").resolve(labelPath)) - zattrsPath = fullLabelPath.resolve(NgffMetadata.FILENAME_DOT_ZATTRS) + fullLabelPath <- Fox.successful(remotePath / "labels" / labelPath) + zattrsPath = fullLabelPath / NgffMetadata.FILENAME_DOT_ZATTRS ngffHeader <- parseJsonFromPath[NgffMetadata](zattrsPath) ?~> s"Failed to read OME NGFF header at $zattrsPath" layers: List[List[(ZarrLayer, Vec3Double)]] <- Fox.serialCombined(ngffHeader.multiscales)( multiscale => @@ -113,15 +114,15 @@ class NgffExplorer extends RemoteLayerExplorer { } private def zarrMagFromNgffDataset(ngffDataset: NgffDataset, - layerPath: Path, + layerPath: VaultPath, voxelSizeInAxisUnits: Vec3Double, axisOrder: AxisOrder, credentialId: Option[String], channelIndex: Option[Int]): Fox[MagWithAttributes] = for { mag <- magFromTransforms(ngffDataset.coordinateTransformations, voxelSizeInAxisUnits, axisOrder) ?~> "Could not extract mag from scale transforms" - magPath = layerPath.resolve(ngffDataset.path) - zarrayPath = magPath.resolve(ZarrHeader.FILENAME_DOT_ZARRAY) + magPath = layerPath / ngffDataset.path + zarrayPath = magPath / ZarrHeader.FILENAME_DOT_ZARRAY zarrHeader <- parseJsonFromPath[ZarrHeader](zarrayPath) ?~> s"failed to read zarr header at $zarrayPath" elementClass <- zarrHeader.elementClass ?~> s"failed to read element class from zarr header at $zarrayPath" boundingBox <- zarrHeader.boundingBox(axisOrder) ?~> s"failed to read bounding box from zarr header at $zarrayPath" diff --git a/app/models/binary/explore/PrecomputedExplorer.scala b/app/models/binary/explore/PrecomputedExplorer.scala index 1de8e1397c..7eb2b8102b 100644 --- a/app/models/binary/explore/PrecomputedExplorer.scala +++ b/app/models/binary/explore/PrecomputedExplorer.scala @@ -9,26 +9,26 @@ import com.scalableminds.webknossos.datastore.dataformats.precomputed.{ } import com.scalableminds.webknossos.datastore.datareaders.AxisOrder import com.scalableminds.webknossos.datastore.datareaders.precomputed.{PrecomputedHeader, PrecomputedScale} +import com.scalableminds.webknossos.datastore.datavault.VaultPath import com.scalableminds.webknossos.datastore.models.datasource.{Category, ElementClass} -import java.nio.file.Path import scala.concurrent.ExecutionContext.Implicits.global class PrecomputedExplorer extends RemoteLayerExplorer { override def name: String = "Neuroglancer Precomputed" - override def explore(remotePath: Path, credentialId: Option[String]): Fox[List[(PrecomputedLayer, Vec3Double)]] = + override def explore(remotePath: VaultPath, credentialId: Option[String]): Fox[List[(PrecomputedLayer, Vec3Double)]] = for { - infoPath <- Fox.successful(remotePath.resolve(PrecomputedHeader.FILENAME_INFO)) + infoPath <- Fox.successful(remotePath / PrecomputedHeader.FILENAME_INFO) precomputedHeader <- parseJsonFromPath[PrecomputedHeader](infoPath) ?~> s"Failed to read neuroglancer precomputed metadata at $infoPath" layerAndVoxelSize <- layerFromPrecomputedHeader(precomputedHeader, remotePath, credentialId) } yield List(layerAndVoxelSize) private def layerFromPrecomputedHeader(precomputedHeader: PrecomputedHeader, - remotePath: Path, + remotePath: VaultPath, credentialId: Option[String]): Fox[(PrecomputedLayer, Vec3Double)] = for { - name <- guessNameFromPath(remotePath) + name <- Fox.successful(guessNameFromPath(remotePath)) firstScale <- precomputedHeader.scales.headOption.toFox boundingBox <- BoundingBox.fromSizeArray(firstScale.size).toFox elementClass: ElementClass.Value <- elementClassFromPrecomputedDataType(precomputedHeader.data_type) ?~> "Unknown data type" @@ -53,16 +53,16 @@ class PrecomputedExplorer extends RemoteLayerExplorer { private def getMagFromScale(scale: PrecomputedScale, minimalResolution: Array[Int], - remotePath: Path, + remotePath: VaultPath, credentialId: Option[String]): Fox[MagLocator] = { val normalizedResolution = (scale.resolution, minimalResolution).zipped.map((r, m) => r / m) for { mag <- Vec3Int.fromList(normalizedResolution.toList) - path = remotePath.resolve(scale.key) + path = remotePath / scale.key // Neuroglancer precomputed specification does not specify axis order, but uses x,y,z implicitly. // https://github.com/google/neuroglancer/blob/master/src/neuroglancer/datasource/precomputed/volume.md#unsharded-chunk-storage axisOrder = AxisOrder(0, 1, 2) - } yield MagLocator(mag, Some(path.toUri.toString), None, Some(axisOrder), channelIndex = None, credentialId) + } yield MagLocator(mag, Some(path.toString), None, Some(axisOrder), channelIndex = None, credentialId) } } diff --git a/app/models/binary/explore/RemoteLayerExplorer.scala b/app/models/binary/explore/RemoteLayerExplorer.scala index 123cfbf959..71111e2a91 100644 --- a/app/models/binary/explore/RemoteLayerExplorer.scala +++ b/app/models/binary/explore/RemoteLayerExplorer.scala @@ -1,7 +1,6 @@ package models.binary.explore import com.scalableminds.util.geometry.{BoundingBox, Vec3Double} -import com.scalableminds.util.io.ZipIO import com.scalableminds.util.tools.{Fox, FoxImplicits, JsonHelper} import com.scalableminds.webknossos.datastore.dataformats.MagLocator import com.scalableminds.webknossos.datastore.models.datasource.{DataLayer, ElementClass} @@ -10,26 +9,22 @@ import net.liftweb.util.Helpers.tryo import play.api.libs.json.Reads import java.nio.charset.StandardCharsets -import java.nio.file.{Files, Path} import scala.concurrent.ExecutionContext.Implicits.global case class MagWithAttributes(mag: MagLocator, - remotePath: Path, + remotePath: VaultPath, elementClass: ElementClass.Value, boundingBox: BoundingBox) trait RemoteLayerExplorer extends FoxImplicits { - def explore(remotePath: Path, credentialId: Option[String]): Fox[List[(DataLayer, Vec3Double)]] + def explore(remotePath: VaultPath, credentialId: Option[String]): Fox[List[(DataLayer, Vec3Double)]] def name: String - protected def parseJsonFromPath[T: Reads](path: Path): Fox[T] = + protected def parseJsonFromPath[T: Reads](path: VaultPath): Fox[T] = for { - fileBytes <- path match { - case path: VaultPath => path.readBytes() ?~> "dataSet.explore.failed.readFile" - case _ => tryo(ZipIO.tryGunzip(Files.readAllBytes(path))) ?~> "dataSet.explore.failed.readFile" - } + fileBytes <- path.readBytes().toFox fileAsString <- tryo(new String(fileBytes, StandardCharsets.UTF_8)).toFox ?~> "dataSet.explore.failed.readFile" parsed <- JsonHelper.parseAndValidateJson[T](fileAsString) } yield parsed @@ -38,8 +33,8 @@ trait RemoteLayerExplorer extends FoxImplicits { Set("segmentation", "labels").contains(layerName.toLowerCase) && ElementClass.segmentationElementClasses.contains( elementClass) - protected def guessNameFromPath(path: Path): Fox[String] = - path.toString.split("/").lastOption.toFox + protected def guessNameFromPath(path: VaultPath): String = + path.basename protected def elementClassFromMags(magsWithAttributes: List[MagWithAttributes]): Fox[ElementClass.Value] = { val elementClasses = magsWithAttributes.map(_.elementClass) diff --git a/app/models/binary/explore/WebknossosZarrExplorer.scala b/app/models/binary/explore/WebknossosZarrExplorer.scala index 38feb5f21b..02d6be6d10 100644 --- a/app/models/binary/explore/WebknossosZarrExplorer.scala +++ b/app/models/binary/explore/WebknossosZarrExplorer.scala @@ -3,24 +3,24 @@ package models.binary.explore import com.scalableminds.util.geometry.Vec3Double import com.scalableminds.util.tools.Fox import com.scalableminds.webknossos.datastore.dataformats.zarr.{ZarrDataLayer, ZarrLayer, ZarrSegmentationLayer} +import com.scalableminds.webknossos.datastore.datavault.VaultPath import com.scalableminds.webknossos.datastore.models.datasource.{Category, DataSource} -import java.nio.file.Path import scala.concurrent.ExecutionContext.Implicits.global class WebknossosZarrExplorer extends RemoteLayerExplorer { override def name: String = "WEBKNOSSOS-based Zarr" - override def explore(remotePath: Path, credentialId: Option[String]): Fox[List[(ZarrLayer, Vec3Double)]] = + override def explore(remotePath: VaultPath, credentialId: Option[String]): Fox[List[(ZarrLayer, Vec3Double)]] = for { - dataSourcePropertiesPath <- Fox.successful(remotePath.resolve("datasource-properties.json")) + dataSourcePropertiesPath <- Fox.successful(remotePath / "datasource-properties.json") dataSource <- parseJsonFromPath[DataSource](dataSourcePropertiesPath) ngffExplorer = new NgffExplorer zarrLayers <- Fox.serialCombined(dataSource.dataLayers) { case l: ZarrSegmentationLayer => for { - zarrLayersFromNgff <- ngffExplorer.explore(remotePath.resolve(l.name), credentialId) + zarrLayersFromNgff <- ngffExplorer.explore(remotePath / l.name, credentialId) } yield zarrLayersFromNgff.map( zarrLayer => @@ -32,7 +32,7 @@ class WebknossosZarrExplorer extends RemoteLayerExplorer { zarrLayer._2)) case l: ZarrDataLayer => for { - zarrLayersFromNgff <- ngffExplorer.explore(remotePath.resolve(l.name), credentialId) + zarrLayersFromNgff <- ngffExplorer.explore(remotePath / l.name, credentialId) } yield zarrLayersFromNgff.map( zarrLayer => diff --git a/app/models/binary/explore/ZarrArrayExplorer.scala b/app/models/binary/explore/ZarrArrayExplorer.scala index 7a2c15eb85..2013d11930 100644 --- a/app/models/binary/explore/ZarrArrayExplorer.scala +++ b/app/models/binary/explore/ZarrArrayExplorer.scala @@ -6,19 +6,19 @@ import com.scalableminds.webknossos.datastore.dataformats.MagLocator import com.scalableminds.webknossos.datastore.dataformats.zarr.{ZarrDataLayer, ZarrLayer, ZarrSegmentationLayer} import com.scalableminds.webknossos.datastore.datareaders.AxisOrder import com.scalableminds.webknossos.datastore.datareaders.zarr.ZarrHeader +import com.scalableminds.webknossos.datastore.datavault.VaultPath import com.scalableminds.webknossos.datastore.models.datasource.Category import scala.concurrent.ExecutionContext.Implicits.global -import java.nio.file.Path class ZarrArrayExplorer extends RemoteLayerExplorer { override def name: String = "Zarr Array" - override def explore(remotePath: Path, credentialId: Option[String]): Fox[List[(ZarrLayer, Vec3Double)]] = + override def explore(remotePath: VaultPath, credentialId: Option[String]): Fox[List[(ZarrLayer, Vec3Double)]] = for { - zarrayPath <- Fox.successful(remotePath.resolve(ZarrHeader.FILENAME_DOT_ZARRAY)) - name <- guessNameFromPath(remotePath) + zarrayPath <- Fox.successful(remotePath / ZarrHeader.FILENAME_DOT_ZARRAY) + name = guessNameFromPath(remotePath) zarrHeader <- parseJsonFromPath[ZarrHeader](zarrayPath) ?~> s"failed to read zarr header at $zarrayPath" elementClass <- zarrHeader.elementClass ?~> "failed to read element class from zarr header" guessedAxisOrder = AxisOrder.asZyxFromRank(zarrHeader.rank) diff --git a/conf/messages b/conf/messages index 254bd2671e..d3f99045ec 100644 --- a/conf/messages +++ b/conf/messages @@ -104,9 +104,9 @@ dataSet.upload.storageExceeded=Cannot upload dataset because the storage quota o dataSet.explore.failed.readFile=Failed to read remote file dataSet.explore.magDtypeMismatch=Element class must be the same for all mags of a layer. Got {0} -remoteFileSystem.insert.failed=Failed to store remote file system credential -remoteFileSystem.setup.failed=Failed to set up remote file system -remoteFileSystem.getPath.failed=Failed to get remote path +dataVault.insert.failed=Failed to store remote file system credential +dataVault.setup.failed=Failed to set up remote file system +dataVault.getPath.failed=Failed to get remote path dataSource.notFound=Datasource not found on datastore server diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/BucketProvider.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/BucketProvider.scala index 2bf6f564f7..a1e6132974 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/BucketProvider.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/BucketProvider.scala @@ -4,7 +4,7 @@ import com.scalableminds.util.tools.{Fox, FoxImplicits} import com.scalableminds.webknossos.datastore.datavault.{FileSystemVaultPath, VaultPath} import com.scalableminds.webknossos.datastore.models.BucketPosition import com.scalableminds.webknossos.datastore.models.requests.DataReadInstruction -import com.scalableminds.webknossos.datastore.storage.{DataCubeCache, FileSystemService} +import com.scalableminds.webknossos.datastore.storage.{DataCubeCache, DataVaultService} import com.typesafe.scalalogging.LazyLogging import net.liftweb.common.Empty @@ -12,7 +12,7 @@ import scala.concurrent.ExecutionContext trait BucketProvider extends FoxImplicits with LazyLogging { - def fileSystemServiceOpt: Option[FileSystemService] + def dataVaultServiceOpt: Option[DataVaultService] // To be defined in subclass. def loadFromUnderlying(readInstruction: DataReadInstruction)(implicit ec: ExecutionContext): Fox[DataCubeHandle] = @@ -52,7 +52,7 @@ trait BucketProvider extends FoxImplicits with LazyLogging { .resolve(readInstruction.dataSource.id.name) .resolve(readInstruction.dataLayer.name) .resolve(relativeMagPath)) - if (magPath.toFile.exists()) { + if (magPath.exists) { Fox.successful(magPath) } else Fox.empty } diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/MagLocator.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/MagLocator.scala index 30f92c8761..7c8cacc019 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/MagLocator.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/MagLocator.scala @@ -3,14 +3,14 @@ package com.scalableminds.webknossos.datastore.dataformats import com.scalableminds.util.geometry.Vec3Int import com.scalableminds.webknossos.datastore.datareaders.AxisOrder import com.scalableminds.webknossos.datastore.models.datasource.ResolutionFormatHelper -import com.scalableminds.webknossos.datastore.storage.{DataVaultsHolder, LegacyFileSystemCredential} +import com.scalableminds.webknossos.datastore.storage.{DataVaultsHolder, LegacyDataVaultCredential} import play.api.libs.json.{Json, OFormat} import java.net.URI case class MagLocator(mag: Vec3Int, path: Option[String], - credentials: Option[LegacyFileSystemCredential], + credentials: Option[LegacyDataVaultCredential], axisOrder: Option[AxisOrder], channelIndex: Option[Int], credentialId: Option[String]) { diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/n5/N5BucketProvider.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/n5/N5BucketProvider.scala index b0980ceb52..5e00811ef2 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/n5/N5BucketProvider.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/n5/N5BucketProvider.scala @@ -8,7 +8,7 @@ import com.scalableminds.webknossos.datastore.datareaders.n5.N5Array import com.scalableminds.webknossos.datastore.datavault.VaultPath import com.scalableminds.webknossos.datastore.models.BucketPosition import com.scalableminds.webknossos.datastore.models.requests.DataReadInstruction -import com.scalableminds.webknossos.datastore.storage.FileSystemService +import com.scalableminds.webknossos.datastore.storage.DataVaultService import com.typesafe.scalalogging.LazyLogging import net.liftweb.common.{Empty, Failure, Full} import net.liftweb.util.Helpers.tryo @@ -29,7 +29,7 @@ class N5CubeHandle(n5Array: N5Array) extends DataCubeHandle with LazyLogging wit } -class N5BucketProvider(layer: N5Layer, val fileSystemServiceOpt: Option[FileSystemService]) +class N5BucketProvider(layer: N5Layer, val dataVaultServiceOpt: Option[DataVaultService]) extends BucketProvider with LazyLogging with RateLimitedErrorLogging { @@ -42,11 +42,11 @@ class N5BucketProvider(layer: N5Layer, val fileSystemServiceOpt: Option[FileSyst n5MagOpt match { case None => Fox.empty case Some(n5Mag) => - fileSystemServiceOpt match { - case Some(fileSystemService: FileSystemService) => + dataVaultServiceOpt match { + case Some(dataVaultService: DataVaultService) => for { magPath: VaultPath <- if (n5Mag.isRemote) { - fileSystemService.remotePathFor(n5Mag) + dataVaultService.vaultPathFor(n5Mag) } else localPathFrom(readInstruction, n5Mag.pathWithFallback) cubeHandle <- tryo(onError = e => logError(e))(N5Array.open(magPath, n5Mag.axisOrder, n5Mag.channelIndex)) .map(new N5CubeHandle(_)) diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/n5/N5DataLayers.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/n5/N5DataLayers.scala index d863ee351d..39d6f81dda 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/n5/N5DataLayers.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/n5/N5DataLayers.scala @@ -4,14 +4,14 @@ import com.scalableminds.util.geometry.{BoundingBox, Vec3Int} import com.scalableminds.webknossos.datastore.dataformats.MagLocator import com.scalableminds.webknossos.datastore.models.datasource.LayerViewConfiguration.LayerViewConfiguration import com.scalableminds.webknossos.datastore.models.datasource._ -import com.scalableminds.webknossos.datastore.storage.FileSystemService +import com.scalableminds.webknossos.datastore.storage.DataVaultService import play.api.libs.json.{Json, OFormat} trait N5Layer extends DataLayer { val dataFormat: DataFormat.Value = DataFormat.n5 - def bucketProvider(fileSystemServiceOpt: Option[FileSystemService]) = new N5BucketProvider(this, fileSystemServiceOpt) + def bucketProvider(dataVaultServiceOpt: Option[DataVaultService]) = new N5BucketProvider(this, dataVaultServiceOpt) def resolutions: List[Vec3Int] = mags.map(_.mag) diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/precomputed/PrecomputedBucketProvider.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/precomputed/PrecomputedBucketProvider.scala index 4ffdd15d7c..bddbcb5f7a 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/precomputed/PrecomputedBucketProvider.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/precomputed/PrecomputedBucketProvider.scala @@ -8,7 +8,7 @@ import com.scalableminds.webknossos.datastore.datareaders.precomputed.Precompute import com.scalableminds.webknossos.datastore.datavault.VaultPath import com.scalableminds.webknossos.datastore.models.BucketPosition import com.scalableminds.webknossos.datastore.models.requests.DataReadInstruction -import com.scalableminds.webknossos.datastore.storage.FileSystemService +import com.scalableminds.webknossos.datastore.storage.DataVaultService import com.typesafe.scalalogging.LazyLogging import net.liftweb.common.{Empty, Failure, Full} import net.liftweb.util.Helpers.tryo @@ -32,7 +32,7 @@ class PrecomputedCubeHandle(precomputedArray: PrecomputedArray) } -class PrecomputedBucketProvider(layer: PrecomputedLayer, val fileSystemServiceOpt: Option[FileSystemService]) +class PrecomputedBucketProvider(layer: PrecomputedLayer, val dataVaultServiceOpt: Option[DataVaultService]) extends BucketProvider with LazyLogging with RateLimitedErrorLogging { @@ -45,11 +45,11 @@ class PrecomputedBucketProvider(layer: PrecomputedLayer, val fileSystemServiceOp precomputedMagOpt match { case None => Fox.empty case Some(precomputedMag) => - fileSystemServiceOpt match { - case Some(fileSystemService: FileSystemService) => + dataVaultServiceOpt match { + case Some(dataVaultService: DataVaultService) => for { magPath: VaultPath <- if (precomputedMag.isRemote) { - fileSystemService.remotePathFor(precomputedMag) + dataVaultService.vaultPathFor(precomputedMag) } else localPathFrom(readInstruction, precomputedMag.pathWithFallback) cubeHandle <- tryo(onError = e => logError(e))( PrecomputedArray.open(magPath, precomputedMag.axisOrder, precomputedMag.channelIndex)) diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/precomputed/PrecomputedDataLayers.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/precomputed/PrecomputedDataLayers.scala index bff880540a..92a967743c 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/precomputed/PrecomputedDataLayers.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/precomputed/PrecomputedDataLayers.scala @@ -11,15 +11,15 @@ import com.scalableminds.webknossos.datastore.models.datasource.{ ElementClass, SegmentationLayer } -import com.scalableminds.webknossos.datastore.storage.FileSystemService +import com.scalableminds.webknossos.datastore.storage.DataVaultService import play.api.libs.json.{Json, OFormat} trait PrecomputedLayer extends DataLayer { val dataFormat: DataFormat.Value = DataFormat.neuroglancerPrecomputed - def bucketProvider(fileSystemServiceOpt: Option[FileSystemService]) = - new PrecomputedBucketProvider(this, fileSystemServiceOpt) + def bucketProvider(dataVaultServiceOpt: Option[DataVaultService]) = + new PrecomputedBucketProvider(this, dataVaultServiceOpt) def resolutions: List[Vec3Int] = mags.map(_.mag) diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/wkw/WKWBucketProvider.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/wkw/WKWBucketProvider.scala index 2b8e91879c..8421e7592e 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/wkw/WKWBucketProvider.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/wkw/WKWBucketProvider.scala @@ -4,7 +4,7 @@ import com.scalableminds.util.tools.{Fox, FoxImplicits} import com.scalableminds.webknossos.datastore.dataformats.{BucketProvider, DataCubeHandle} import com.scalableminds.webknossos.datastore.models.BucketPosition import com.scalableminds.webknossos.datastore.models.requests.DataReadInstruction -import com.scalableminds.webknossos.datastore.storage.FileSystemService +import com.scalableminds.webknossos.datastore.storage.DataVaultService import com.scalableminds.webknossos.wrap.WKWFile import net.liftweb.common.{Empty, Failure, Full} import java.nio.file.Path @@ -33,7 +33,7 @@ class WKWCubeHandle(wkwFile: WKWFile, wkwFilePath: Path) extends DataCubeHandle class WKWBucketProvider(layer: WKWLayer) extends BucketProvider with WKWDataFormatHelper { - override def fileSystemServiceOpt: Option[FileSystemService] = None + override def dataVaultServiceOpt: Option[DataVaultService] = None override def loadFromUnderlying(readInstruction: DataReadInstruction)( implicit ec: ExecutionContext): Fox[WKWCubeHandle] = { diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/wkw/WKWDataLayers.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/wkw/WKWDataLayers.scala index 8b266eb677..51983ab899 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/wkw/WKWDataLayers.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/wkw/WKWDataLayers.scala @@ -3,8 +3,8 @@ package com.scalableminds.webknossos.datastore.dataformats.wkw import com.scalableminds.util.geometry.{BoundingBox, Vec3Int} import com.scalableminds.webknossos.datastore.dataformats.BucketProvider import com.scalableminds.webknossos.datastore.models.datasource.LayerViewConfiguration.LayerViewConfiguration -import com.scalableminds.webknossos.datastore.models.datasource.{DataFormat, _} -import com.scalableminds.webknossos.datastore.storage.FileSystemService +import com.scalableminds.webknossos.datastore.models.datasource._ +import com.scalableminds.webknossos.datastore.storage.DataVaultService import play.api.libs.json.{Json, OFormat} case class WKWResolution(resolution: Vec3Int, cubeLength: Int) @@ -17,7 +17,7 @@ trait WKWLayer extends DataLayer { val dataFormat: DataFormat.Value = DataFormat.wkw - override def bucketProvider(fileSystemServiceOpt: Option[FileSystemService]): BucketProvider = + override def bucketProvider(dataVaultServiceOpt: Option[DataVaultService]): BucketProvider = new WKWBucketProvider(this) def wkwResolutions: List[WKWResolution] diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/zarr/ZarrBucketProvider.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/zarr/ZarrBucketProvider.scala index e26c9e9504..ef92327623 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/zarr/ZarrBucketProvider.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/zarr/ZarrBucketProvider.scala @@ -8,7 +8,7 @@ import com.scalableminds.webknossos.datastore.datareaders.zarr.ZarrArray import com.scalableminds.webknossos.datastore.datavault.VaultPath import com.scalableminds.webknossos.datastore.models.BucketPosition import com.scalableminds.webknossos.datastore.models.requests.DataReadInstruction -import com.scalableminds.webknossos.datastore.storage.FileSystemService +import com.scalableminds.webknossos.datastore.storage.DataVaultService import com.typesafe.scalalogging.LazyLogging import net.liftweb.common.{Empty, Failure, Full} import net.liftweb.util.Helpers.tryo @@ -29,7 +29,7 @@ class ZarrCubeHandle(zarrArray: ZarrArray) extends DataCubeHandle with LazyLoggi } -class ZarrBucketProvider(layer: ZarrLayer, val fileSystemServiceOpt: Option[FileSystemService]) +class ZarrBucketProvider(layer: ZarrLayer, val dataVaultServiceOpt: Option[DataVaultService]) extends BucketProvider with LazyLogging with RateLimitedErrorLogging { @@ -42,11 +42,11 @@ class ZarrBucketProvider(layer: ZarrLayer, val fileSystemServiceOpt: Option[File zarrMagOpt match { case None => Fox.empty case Some(zarrMag) => - fileSystemServiceOpt match { - case Some(fileSystemService: FileSystemService) => + dataVaultServiceOpt match { + case Some(dataVaultService: DataVaultService) => for { magPath: VaultPath <- if (zarrMag.isRemote) { - fileSystemService.remotePathFor(zarrMag) + dataVaultService.vaultPathFor(zarrMag) } else localPathFrom(readInstruction, zarrMag.pathWithFallback) cubeHandle <- tryo(onError = e => logError(e))( ZarrArray.open(magPath, zarrMag.axisOrder, zarrMag.channelIndex)).map(new ZarrCubeHandle(_)) diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/zarr/ZarrDataLayers.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/zarr/ZarrDataLayers.scala index 170db237bc..18be9532de 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/zarr/ZarrDataLayers.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/zarr/ZarrDataLayers.scala @@ -4,15 +4,15 @@ import com.scalableminds.util.geometry.{BoundingBox, Vec3Int} import com.scalableminds.webknossos.datastore.dataformats.MagLocator import com.scalableminds.webknossos.datastore.models.datasource.LayerViewConfiguration.LayerViewConfiguration import com.scalableminds.webknossos.datastore.models.datasource._ -import com.scalableminds.webknossos.datastore.storage.FileSystemService +import com.scalableminds.webknossos.datastore.storage.DataVaultService import play.api.libs.json.{Json, OFormat} trait ZarrLayer extends DataLayer { val dataFormat: DataFormat.Value = DataFormat.zarr - def bucketProvider(fileSystemServiceOpt: Option[FileSystemService]) = - new ZarrBucketProvider(this, fileSystemServiceOpt) + def bucketProvider(dataVaultServiceOpt: Option[DataVaultService]) = + new ZarrBucketProvider(this, dataVaultServiceOpt) def resolutions: List[Vec3Int] = mags.map(_.mag) diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/DatasetArray.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/DatasetArray.scala index f6f39a80e9..4f455d3280 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/DatasetArray.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/DatasetArray.scala @@ -140,7 +140,7 @@ class DatasetArray(relativePath: DatasetPath, override def toString: String = s"${getClass.getCanonicalName} {'/${relativePath.storeKey}' axisOrder=$axisOrder shape=${header.datasetShape.mkString( - ",")} chunks=${header.chunkSize.mkString(",")} dtype=${header.dataType} fillValue=${header.fillValueNumber}, ${header.compressorImpl}, byteOrder=${header.byteOrder}, vault=${vaultPath.getName}}" + ",")} chunks=${header.chunkSize.mkString(",")} dtype=${header.dataType} fillValue=${header.fillValueNumber}, ${header.compressorImpl}, byteOrder=${header.byteOrder}, vault=${vaultPath.summary}}" } diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/precomputed/PrecomputedArray.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/precomputed/PrecomputedArray.scala index 440f1bd127..34a238cbc3 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/precomputed/PrecomputedArray.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/precomputed/PrecomputedArray.scala @@ -19,7 +19,7 @@ object PrecomputedArray extends LazyLogging { @throws[IOException] def open(magPath: VaultPath, axisOrderOpt: Option[AxisOrder], channelIndex: Option[Int]): PrecomputedArray = { - val basePath = magPath.getParent.asInstanceOf[VaultPath] + val basePath = magPath.parent val headerPath = s"${PrecomputedHeader.FILENAME_INFO}" val headerBytes = (basePath / headerPath).readBytes() if (headerBytes.isEmpty) @@ -34,18 +34,16 @@ object PrecomputedArray extends LazyLogging { throw new Exception("Validating json as precomputed metadata failed: " + JsError.toJson(errors).toString()) } - val key = magPath.getFileName + val key = magPath.basename val scaleHeader: PrecomputedScaleHeader = PrecomputedScaleHeader( - rootHeader - .getScale(key.toString) - .getOrElse(throw new IllegalArgumentException(s"Did not find a scale for key $key")), + rootHeader.getScale(key).getOrElse(throw new IllegalArgumentException(s"Did not find a scale for key $key")), rootHeader) if (scaleHeader.bytesPerChunk > DatasetArray.chunkSizeLimitBytes) { throw new IllegalArgumentException( f"Chunk size of this Precomputed Array exceeds limit of ${DatasetArray.chunkSizeLimitBytes}, got ${scaleHeader.bytesPerChunk}") } - val datasetPath = new DatasetPath(key.toString) + val datasetPath = new DatasetPath(key) new PrecomputedArray(datasetPath, basePath, scaleHeader, diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datavault/FileSystemDataVault.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datavault/FileSystemDataVault.scala index 5d4c77af3d..eab2f6b5ff 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datavault/FileSystemDataVault.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datavault/FileSystemDataVault.scala @@ -1,9 +1,25 @@ package com.scalableminds.webknossos.datastore.datavault +import java.nio.ByteBuffer +import java.nio.file.{Files, Path} import scala.collection.immutable.NumericRange class FileSystemDataVault extends DataVault { override def readBytes(path: VaultPath, range: Option[NumericRange[Long]]): Array[Byte] = ??? + + def readBytesLocal(path: Path, range: Option[NumericRange[Long]]): Array[Byte] = + range match { + case None => Files.readAllBytes(path) + case Some(r) => + val channel = Files.newByteChannel(path) + val buf = ByteBuffer.allocateDirect(r.length) + channel.position(r.start) + channel.read(buf) + buf.position(0) + val arr = new Array[Byte](r.length) + buf.get(arr) + arr + } } object FileSystemDataVault { def create: FileSystemDataVault = new FileSystemDataVault diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datavault/FileSystemVaultPath.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datavault/FileSystemVaultPath.scala index 81c7333d3b..a3da5af0cc 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datavault/FileSystemVaultPath.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datavault/FileSystemVaultPath.scala @@ -1,69 +1,31 @@ package com.scalableminds.webknossos.datastore.datavault +import com.scalableminds.util.io.ZipIO +import net.liftweb.util.Helpers.tryo + import java.net.URI -import java.nio.ByteBuffer -import java.nio.file.{FileSystem, Files, LinkOption, Path, WatchEvent, WatchKey, WatchService} +import java.nio.file.Path import scala.collection.immutable.NumericRange -class FileSystemVaultPath(basePath: Path) extends VaultPath(uri = new URI(""), dataVault = FileSystemDataVault.create) { - - override def readBytesGet(range: Option[NumericRange[Long]]): Array[Byte] = - range match { - case Some(r) => - val channel = Files.newByteChannel(basePath) - val buf = ByteBuffer.allocateDirect(r.length) - channel.position(r.start) - channel.read(buf) - buf.position(0) - val arr = new Array[Byte](r.length) - buf.get(arr) - arr - case None => Files.readAllBytes(basePath) - } - - override def getFileSystem: FileSystem = basePath.getFileSystem - - override def isAbsolute: Boolean = basePath.isAbsolute - - override def getRoot: Path = new FileSystemVaultPath(basePath.getRoot) - - override def getFileName: Path = new FileSystemVaultPath(basePath.getFileName) - - override def getParent: Path = new FileSystemVaultPath(basePath.getParent) - - override def getNameCount: Int = basePath.getNameCount - - override def getName(index: Int): Path = new FileSystemVaultPath(basePath.getName(index)) +class FileSystemVaultPath(basePath: Path, dataVault: FileSystemDataVault) + extends VaultPath(uri = new URI(""), dataVault = dataVault) { - override def subpath(beginIndex: Int, endIndex: Int): Path = basePath.subpath(beginIndex: Int, endIndex: Int) + override def readBytes(range: Option[NumericRange[Long]] = None): Option[Array[Byte]] = + tryo(dataVault.readBytesLocal(basePath, range)).toOption.map(ZipIO.tryGunzip) - override def startsWith(other: Path): Boolean = basePath.startsWith(other) + override def basename: String = basePath.getFileName.toString - override def endsWith(other: Path): Boolean = basePath.endsWith(other) + override def parent: VaultPath = new FileSystemVaultPath(basePath.getParent, dataVault) - override def normalize(): Path = new FileSystemVaultPath(basePath.normalize()) - - override def resolve(other: Path): Path = new FileSystemVaultPath(basePath.resolve(other)) - - override def /(key: String): VaultPath = new FileSystemVaultPath(basePath.resolve(key)) - - override def relativize(other: Path): Path = new FileSystemVaultPath(basePath.relativize(other)) + override def /(key: String): VaultPath = new FileSystemVaultPath(basePath.resolve(key), dataVault) override def toUri: URI = basePath.toUri - override def toAbsolutePath: Path = new FileSystemVaultPath(basePath.toAbsolutePath) - - override def toRealPath(options: LinkOption*): Path = ??? - - override def register(watcher: WatchService, - events: Array[WatchEvent.Kind[_]], - modifiers: WatchEvent.Modifier*): WatchKey = ??? - - override def compareTo(other: Path): Int = basePath.compareTo(other) - override def toString: String = basePath.toString + + def exists: Boolean = basePath.toFile.exists() } object FileSystemVaultPath { - def fromPath(path: Path): FileSystemVaultPath = new FileSystemVaultPath(path) + def fromPath(path: Path): FileSystemVaultPath = new FileSystemVaultPath(path, FileSystemDataVault.create) } diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datavault/HttpsDataVault.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datavault/HttpsDataVault.scala index f88e508fb7..67d9b7e204 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datavault/HttpsDataVault.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datavault/HttpsDataVault.scala @@ -1,9 +1,9 @@ package com.scalableminds.webknossos.datastore.datavault import com.scalableminds.webknossos.datastore.storage.{ - FileSystemCredential, + DataVaultCredential, HttpBasicAuthCredential, - LegacyFileSystemCredential, + LegacyDataVaultCredential, RemoteSourceDescriptor } @@ -14,7 +14,7 @@ import sttp.model.Uri import scala.collection.immutable.NumericRange -class HttpsDataVault(credential: Option[FileSystemCredential]) extends DataVault { +class HttpsDataVault(credential: Option[DataVaultCredential]) extends DataVault { private val connectionTimeout = 1 minute private val readTimeout = 10 minutes @@ -23,9 +23,9 @@ class HttpsDataVault(credential: Option[FileSystemCredential]) extends DataVault def getBasicAuthCredential: Option[HttpBasicAuthCredential] = credential.flatMap { c => c match { - case h: HttpBasicAuthCredential => Some(h) - case l: LegacyFileSystemCredential => Some(l.toBasicAuth) - case _ => None + case h: HttpBasicAuthCredential => Some(h) + case l: LegacyDataVaultCredential => Some(l.toBasicAuth) + case _ => None } } diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datavault/VaultPath.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datavault/VaultPath.scala index 00f71ebd76..4d9abd8a01 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datavault/VaultPath.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datavault/VaultPath.scala @@ -4,55 +4,23 @@ import com.scalableminds.util.io.ZipIO import net.liftweb.util.Helpers.tryo import java.net.URI -import java.nio.file.{FileSystem, LinkOption, Path, Paths, WatchEvent, WatchKey, WatchService} import scala.collection.immutable.NumericRange -/* -VaultPath implements Path so that a drop in replacement is possible while continuing to use Paths for local storage. -This class does not implement all relevant methods and it might be a good idea to remove the inheritance on Path in the -future. - */ - -class VaultPath(uri: URI, dataVault: DataVault) extends Path { - - protected def readBytesGet(range: Option[NumericRange[Long]]): Array[Byte] = - dataVault.readBytes(this, range) +class VaultPath(uri: URI, dataVault: DataVault) { def readBytes(range: Option[NumericRange[Long]] = None): Option[Array[Byte]] = - tryo(readBytesGet(range)).toOption.map(ZipIO.tryGunzip) - - override def getFileSystem: FileSystem = ??? - - override def isAbsolute: Boolean = ??? - - override def getRoot: Path = ??? + tryo(dataVault.readBytes(this, range)).toOption.map(ZipIO.tryGunzip) - override def getFileName: Path = - Paths.get(uri.toString.split("/").last) + def basename: String = + uri.toString.split("/").last - override def getParent: Path = { + def parent: VaultPath = { val newUri = if (uri.getPath.endsWith("/")) uri.resolve("..") else uri.resolve(".") new VaultPath(newUri, dataVault) } - override def getNameCount: Int = ??? - - override def getName(index: Int): Path = ??? - - override def subpath(beginIndex: Int, endIndex: Int): Path = ??? - - override def startsWith(other: Path): Boolean = ??? - - override def endsWith(other: Path): Boolean = ??? - - override def normalize(): Path = ??? - - override def resolve(other: String): Path = this / other - - override def resolve(other: Path): Path = this / other.toString - def /(key: String): VaultPath = if (uri.toString.endsWith("/")) { new VaultPath(uri.resolve(key), dataVault) @@ -60,22 +28,10 @@ class VaultPath(uri: URI, dataVault: DataVault) extends Path { new VaultPath(new URI(s"${uri.toString}/").resolve(key), dataVault) } - override def relativize(other: Path): Path = ??? - - override def toUri: URI = + def toUri: URI = uri - override def toAbsolutePath: Path = ??? - - override def compareTo(other: Path): Int = ??? - - override def toRealPath(options: LinkOption*): Path = ??? - - override def register(watcher: WatchService, - events: Array[WatchEvent.Kind[_]], - modifiers: WatchEvent.Modifier*): WatchKey = ??? - override def toString: String = uri.toString - def getName: String = s"VaultPath: ${this.toString} for ${dataVault.getClass.getSimpleName}" + def summary: String = s"VaultPath: ${this.toString} for ${dataVault.getClass.getSimpleName}" } diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/models/datasource/DataLayer.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/models/datasource/DataLayer.scala index ac7950480c..5f6726bd79 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/models/datasource/DataLayer.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/models/datasource/DataLayer.scala @@ -14,7 +14,7 @@ import com.scalableminds.webknossos.datastore.dataformats.zarr.{ZarrDataLayer, Z import com.scalableminds.webknossos.datastore.datareaders.ArrayDataType import com.scalableminds.webknossos.datastore.datareaders.ArrayDataType.ArrayDataType import com.scalableminds.webknossos.datastore.models.datasource.LayerViewConfiguration.LayerViewConfiguration -import com.scalableminds.webknossos.datastore.storage.FileSystemService +import com.scalableminds.webknossos.datastore.storage.DataVaultService import play.api.libs.json._ object DataFormat extends ExtendedEnumeration { @@ -181,7 +181,7 @@ trait DataLayer extends DataLayerLike { */ def lengthOfUnderlyingCubes(resolution: Vec3Int): Int - def bucketProvider(fileSystemServiceOpt: Option[FileSystemService]): BucketProvider + def bucketProvider(dataVaultServiceOpt: Option[DataVaultService]): BucketProvider def containsResolution(resolution: Vec3Int): Boolean = resolutions.contains(resolution) diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/BinaryDataService.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/BinaryDataService.scala index b32c16c985..ced2c0fd24 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/BinaryDataService.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/BinaryDataService.scala @@ -17,7 +17,7 @@ import scala.concurrent.ExecutionContext.Implicits.global class BinaryDataService(val dataBaseDir: Path, maxCacheSize: Int, val agglomerateServiceOpt: Option[AgglomerateService], - fileSystemServiceOpt: Option[FileSystemService], + dataVaultServiceOpt: Option[DataVaultService], val applicationHealthService: Option[ApplicationHealthService]) extends FoxImplicits with DataSetDeleter @@ -83,7 +83,7 @@ class BinaryDataService(val dataBaseDir: Path, val readInstruction = DataReadInstruction(dataBaseDir, request.dataSource, request.dataLayer, bucket, request.settings.version) val bucketProvider = bucketProviderCache.getOrLoadAndPut(request.dataLayer)(dataLayer => - dataLayer.bucketProvider(fileSystemServiceOpt)) + dataLayer.bucketProvider(dataVaultServiceOpt)) bucketProvider.load(readInstruction, shardHandleCache).futureBox.flatMap { case Failure(msg, Full(e: InternalError), _) => applicationHealthService.foreach(a => a.pushError(e)) diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/BinaryDataServiceHolder.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/BinaryDataServiceHolder.scala index 15d30f6af8..c4db9940bf 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/BinaryDataServiceHolder.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/BinaryDataServiceHolder.scala @@ -2,7 +2,7 @@ package com.scalableminds.webknossos.datastore.services import java.nio.file.Paths import com.scalableminds.webknossos.datastore.DataStoreConfig -import com.scalableminds.webknossos.datastore.storage.FileSystemService +import com.scalableminds.webknossos.datastore.storage.DataVaultService import javax.inject.Inject @@ -16,13 +16,13 @@ import javax.inject.Inject class BinaryDataServiceHolder @Inject()(config: DataStoreConfig, agglomerateService: AgglomerateService, applicationHealthService: ApplicationHealthService, - fileSystemService: FileSystemService) { + dataVaultService: DataVaultService) { val binaryDataService: BinaryDataService = new BinaryDataService( Paths.get(config.Datastore.baseFolder), config.Datastore.Cache.DataCube.maxEntries, Some(agglomerateService), - Some(fileSystemService), + Some(dataVaultService), Some(applicationHealthService) ) diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/DSRemoteWebKnossosClient.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/DSRemoteWebKnossosClient.scala index 682ec8e321..f2ad42498b 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/DSRemoteWebKnossosClient.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/DSRemoteWebKnossosClient.scala @@ -12,7 +12,7 @@ import com.scalableminds.webknossos.datastore.models.annotation.AnnotationSource import com.scalableminds.webknossos.datastore.models.datasource.DataSourceId import com.scalableminds.webknossos.datastore.models.datasource.inbox.InboxDataSourceLike import com.scalableminds.webknossos.datastore.rpc.RPC -import com.scalableminds.webknossos.datastore.storage.FileSystemCredential +import com.scalableminds.webknossos.datastore.storage.DataVaultCredential import com.typesafe.scalalogging.LazyLogging import play.api.inject.ApplicationLifecycle import play.api.libs.json.{Json, OFormat} @@ -150,10 +150,10 @@ class DSRemoteWebKnossosClient @Inject()( .getWithJsonResponse[AnnotationSource] ) - private lazy val credentialCache: AlfuFoxCache[String, FileSystemCredential] = + private lazy val credentialCache: AlfuFoxCache[String, DataVaultCredential] = AlfuFoxCache(timeToLive = 5 seconds, timeToIdle = 5 seconds) - def getCredential(credentialId: String): Fox[FileSystemCredential] = + def getCredential(credentialId: String): Fox[DataVaultCredential] = credentialCache.getOrLoad( credentialId, _ => @@ -161,6 +161,6 @@ class DSRemoteWebKnossosClient @Inject()( .addQueryString("credentialId" -> credentialId) .addQueryString("key" -> dataStoreKey) .silent - .getWithJsonResponse[FileSystemCredential] + .getWithJsonResponse[DataVaultCredential] ) } diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/storage/FileSystemCredentials.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/storage/DataVaultCredentials.scala similarity index 74% rename from webknossos-datastore/app/com/scalableminds/webknossos/datastore/storage/FileSystemCredentials.scala rename to webknossos-datastore/app/com/scalableminds/webknossos/datastore/storage/DataVaultCredentials.scala index 13bec15215..7d4f541b56 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/storage/FileSystemCredentials.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/storage/DataVaultCredentials.scala @@ -2,14 +2,14 @@ package com.scalableminds.webknossos.datastore.storage import play.api.libs.json.{JsValue, Json, OFormat} -sealed trait FileSystemCredential +sealed trait DataVaultCredential -object FileSystemCredential { - implicit val jsonFormat: OFormat[FileSystemCredential] = Json.format[FileSystemCredential] +object DataVaultCredential { + implicit val jsonFormat: OFormat[DataVaultCredential] = Json.format[DataVaultCredential] } case class HttpBasicAuthCredential(name: String, username: String, password: String, user: String, organization: String) - extends FileSystemCredential + extends DataVaultCredential object HttpBasicAuthCredential { implicit val jsonFormat: OFormat[HttpBasicAuthCredential] = Json.format[HttpBasicAuthCredential] @@ -20,20 +20,20 @@ case class S3AccessKeyCredential(name: String, secretAccessKey: String, user: String, organization: String) - extends FileSystemCredential + extends DataVaultCredential object S3AccessKeyCredential { implicit val jsonFormat: OFormat[S3AccessKeyCredential] = Json.format[S3AccessKeyCredential] } case class GoogleServiceAccountCredential(name: String, secretJson: JsValue, user: String, organization: String) - extends FileSystemCredential + extends DataVaultCredential object GoogleServiceAccountCredential { implicit val jsonFormat: OFormat[GoogleServiceAccountCredential] = Json.format[GoogleServiceAccountCredential] } -case class LegacyFileSystemCredential(user: String, password: Option[String]) extends FileSystemCredential { +case class LegacyDataVaultCredential(user: String, password: Option[String]) extends DataVaultCredential { def toBasicAuth: HttpBasicAuthCredential = HttpBasicAuthCredential(name = "", username = user, password = password.getOrElse(""), user = "", organization = "") @@ -45,6 +45,6 @@ case class LegacyFileSystemCredential(user: String, password: Option[String]) ex organization = "") } -object LegacyFileSystemCredential { - implicit val jsonFormat: OFormat[LegacyFileSystemCredential] = Json.format[LegacyFileSystemCredential] +object LegacyDataVaultCredential { + implicit val jsonFormat: OFormat[LegacyDataVaultCredential] = Json.format[LegacyDataVaultCredential] } diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/storage/FileSystemService.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/storage/DataVaultService.scala similarity index 74% rename from webknossos-datastore/app/com/scalableminds/webknossos/datastore/storage/FileSystemService.scala rename to webknossos-datastore/app/com/scalableminds/webknossos/datastore/storage/DataVaultService.scala index e79ec31d09..686317c506 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/storage/FileSystemService.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/storage/DataVaultService.scala @@ -9,18 +9,18 @@ import java.net.URI import javax.inject.Inject import scala.concurrent.ExecutionContext -case class RemoteSourceDescriptor(uri: URI, credential: Option[FileSystemCredential]) +case class RemoteSourceDescriptor(uri: URI, credential: Option[DataVaultCredential]) -class FileSystemService @Inject()(dSRemoteWebKnossosClient: DSRemoteWebKnossosClient) { +class DataVaultService @Inject()(dSRemoteWebKnossosClient: DSRemoteWebKnossosClient) { - def remotePathFor(magLocator: MagLocator)(implicit ec: ExecutionContext): Fox[VaultPath] = + def vaultPathFor(magLocator: MagLocator)(implicit ec: ExecutionContext): Fox[VaultPath] = for { credentialBox <- credentialFor(magLocator: MagLocator).futureBox remoteSource = RemoteSourceDescriptor(magLocator.uri, credentialBox.toOption) - remotePath <- DataVaultsHolder.getVaultPath(remoteSource) ?~> "remoteFileSystem.setup.failed" + remotePath <- DataVaultsHolder.getVaultPath(remoteSource) ?~> "dataVault.setup.failed" } yield remotePath - private def credentialFor(magLocator: MagLocator)(implicit ec: ExecutionContext): Fox[FileSystemCredential] = + private def credentialFor(magLocator: MagLocator)(implicit ec: ExecutionContext): Fox[DataVaultCredential] = magLocator.credentialId match { case Some(credentialId) => dSRemoteWebKnossosClient.getCredential(credentialId) diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingLayer.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingLayer.scala index d154c25cac..0ecebae77c 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingLayer.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingLayer.scala @@ -15,13 +15,13 @@ import com.scalableminds.webknossos.datastore.models.datasource.{ SegmentationLayer } import com.scalableminds.webknossos.datastore.models.requests.DataReadInstruction -import com.scalableminds.webknossos.datastore.storage.{DataCubeCache, FileSystemService} +import com.scalableminds.webknossos.datastore.storage.{DataCubeCache, DataVaultService} import scala.concurrent.ExecutionContext class EditableMappingBucketProvider(layer: EditableMappingLayer) extends BucketProvider with ProtoGeometryImplicits { - override def fileSystemServiceOpt: Option[FileSystemService] = None + override def dataVaultServiceOpt: Option[DataVaultService] = None override def load(readInstruction: DataReadInstruction, cache: DataCubeCache)( implicit ec: ExecutionContext): Fox[Array[Byte]] = { @@ -73,7 +73,7 @@ case class EditableMappingLayer(name: String, override def lengthOfUnderlyingCubes(resolution: Vec3Int): Int = DataLayer.bucketLength - override def bucketProvider(fileSystemServiceOpt: Option[FileSystemService]): BucketProvider = + override def bucketProvider(dataVaultServiceOpt: Option[DataVaultService]): BucketProvider = new EditableMappingBucketProvider(layer = this) override def mappings: Option[Set[String]] = None diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingLayer.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingLayer.scala index d407d35c7d..0a12f08db0 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingLayer.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingLayer.scala @@ -5,9 +5,9 @@ import com.scalableminds.util.tools.{Fox, FoxImplicits} import com.scalableminds.webknossos.datastore.dataformats.BucketProvider import com.scalableminds.webknossos.datastore.models.BucketPosition import com.scalableminds.webknossos.datastore.models.datasource.LayerViewConfiguration.LayerViewConfiguration -import com.scalableminds.webknossos.datastore.models.datasource.{ElementClass, _} +import com.scalableminds.webknossos.datastore.models.datasource._ import com.scalableminds.webknossos.datastore.models.requests.DataReadInstruction -import com.scalableminds.webknossos.datastore.storage.{DataCubeCache, FileSystemService} +import com.scalableminds.webknossos.datastore.storage.{DataCubeCache, DataVaultService} import com.scalableminds.webknossos.datastore.VolumeTracing.VolumeTracing import com.scalableminds.webknossos.datastore.helpers.ProtoGeometryImplicits import com.scalableminds.webknossos.tracingstore.tracings.{ @@ -21,7 +21,7 @@ import scala.concurrent.ExecutionContext trait AbstractVolumeTracingBucketProvider extends BucketProvider with VolumeTracingBucketHelper with FoxImplicits { - override def fileSystemServiceOpt: Option[FileSystemService] = None + override def dataVaultServiceOpt: Option[DataVaultService] = None def bucketStreamWithVersion(version: Option[Long] = None): Iterator[(BucketPosition, Array[Byte], Long)] } @@ -100,7 +100,7 @@ case class VolumeTracingLayer( else new VolumeTracingBucketProvider(this) - override def bucketProvider(fileSystemServiceOpt: Option[FileSystemService]): BucketProvider = volumeBucketProvider + override def bucketProvider(dataVaultServiceOpt: Option[DataVaultService]): BucketProvider = volumeBucketProvider def bucketProvider: AbstractVolumeTracingBucketProvider = volumeBucketProvider From 915309988de4a04ce196eaa384a6dd3a1eae580a Mon Sep 17 00:00:00 2001 From: Philipp Otto Date: Wed, 29 Mar 2023 15:02:54 +0200 Subject: [PATCH 07/14] Fix layout of view mode switcher and move it (#6949) * fix layout issue with view-mode-switch * move view-mode-switch between position input and toolbar * update changelog --- CHANGELOG.unreleased.md | 1 + .../oxalis/view/action-bar/toolbar_view.tsx | 22 +++++++++++-------- .../oxalis/view/action_bar_view.tsx | 2 +- 3 files changed, 15 insertions(+), 10 deletions(-) diff --git a/CHANGELOG.unreleased.md b/CHANGELOG.unreleased.md index d52a793fd5..a3186ed1c5 100644 --- a/CHANGELOG.unreleased.md +++ b/CHANGELOG.unreleased.md @@ -13,6 +13,7 @@ For upgrade instructions, please check the [migration guide](MIGRATIONS.released ### Added ### Changed +- Moved the view mode selection in the toolbar next to the position field. [#6949](https://github.com/scalableminds/webknossos/pull/6949) ### Fixed diff --git a/frontend/javascripts/oxalis/view/action-bar/toolbar_view.tsx b/frontend/javascripts/oxalis/view/action-bar/toolbar_view.tsx index 6fbba43086..9eb8a98855 100644 --- a/frontend/javascripts/oxalis/view/action-bar/toolbar_view.tsx +++ b/frontend/javascripts/oxalis/view/action-bar/toolbar_view.tsx @@ -301,15 +301,19 @@ function VolumeInterpolationButton() { ); return ( - } - menu={menu} - onClick={onInterpolateClick} - style={{ padding: "0 5px 0 6px" }} - buttonsRender={buttonsRender} - > - {React.cloneElement(INTERPOLATION_ICON[interpolationMode], { style: { margin: -4 } })} - + // Without the outer div, the Dropdown can eat up all the remaining horizontal space, + // moving sibling elements to the far right. +
+ } + menu={menu} + onClick={onInterpolateClick} + style={{ padding: "0 5px 0 6px" }} + buttonsRender={buttonsRender} + > + {React.cloneElement(INTERPOLATION_ICON[interpolationMode], { style: { margin: -4 } })} + +
); } diff --git a/frontend/javascripts/oxalis/view/action_bar_view.tsx b/frontend/javascripts/oxalis/view/action_bar_view.tsx index 4545825b88..9b12496670 100644 --- a/frontend/javascripts/oxalis/view/action_bar_view.tsx +++ b/frontend/javascripts/oxalis/view/action_bar_view.tsx @@ -173,8 +173,8 @@ class ActionBarView extends React.PureComponent { )} {showVersionRestore ? VersionRestoreWarning : null} - {!isReadOnly && constants.MODES_PLANE.indexOf(viewMode) > -1 ? : null} {isArbitrarySupported && !is2d ? : null} + {!isReadOnly && constants.MODES_PLANE.indexOf(viewMode) > -1 ? : null} {isViewMode ? this.renderStartTracingButton() : null} Date: Thu, 30 Mar 2023 10:16:59 +0200 Subject: [PATCH 08/14] Add LOD mesh support for frontend (#6909) * experiment with LOD object * play around with custom LOD class * hack together proof of concept for LOD rendering for meshes * WIP: integrate lod meshes to frontend * Fix mesh chunk byte offset calculation for non-zero lods * fix adding all lods to scene * add lod factor for chunk positions, make sure byte offsets are Long * handle old mesh file format better * fix handling meshes from old file format * fix removing mesh and fix linting errors * wip apply feedback * apply code review feedback * add custom raycaster * apply code review feedback * add changelog entry --------- Co-authored-by: Philipp Otto Co-authored-by: Florian M --- CHANGELOG.unreleased.md | 1 + app/controllers/MeshController.scala | 3 + .../libs/visibility_aware_raycaster.ts | 52 ++++++ .../oxalis/controller/custom_lod.ts | 62 +++++++ .../oxalis/controller/scene_controller.ts | 81 ++++++--- .../oxalis/model/sagas/isosurface_saga.ts | 172 +++++++++++------- .../javascripts/oxalis/view/plane_view.ts | 15 +- .../datastore/services/MeshFileService.scala | 91 ++++----- 8 files changed, 332 insertions(+), 145 deletions(-) create mode 100644 frontend/javascripts/libs/visibility_aware_raycaster.ts create mode 100644 frontend/javascripts/oxalis/controller/custom_lod.ts diff --git a/CHANGELOG.unreleased.md b/CHANGELOG.unreleased.md index a3186ed1c5..21b4beba4d 100644 --- a/CHANGELOG.unreleased.md +++ b/CHANGELOG.unreleased.md @@ -11,6 +11,7 @@ For upgrade instructions, please check the [migration guide](MIGRATIONS.released [Commits](https://github.com/scalableminds/webknossos/compare/23.04.0...HEAD) ### Added +- Added rendering precomputed meshes with level of detail depending on the zoom of the 3D viewport. This feature only works with version 3 mesh files. [#6909](https://github.com/scalableminds/webknossos/pull/6909) ### Changed - Moved the view mode selection in the toolbar next to the position field. [#6949](https://github.com/scalableminds/webknossos/pull/6949) diff --git a/app/controllers/MeshController.scala b/app/controllers/MeshController.scala index e3ca283bd9..d376f22b7e 100644 --- a/app/controllers/MeshController.scala +++ b/app/controllers/MeshController.scala @@ -11,6 +11,9 @@ import javax.inject.Inject import scala.concurrent.ExecutionContext +// Note that this wk-side controller deals with user-uploaded meshes stored in postgres +// Not to be confused with the DSMeshController that deals with on-disk meshfiles + class MeshController @Inject()(meshDAO: MeshDAO, annotationDAO: AnnotationDAO, sil: Silhouette[WkEnv], diff --git a/frontend/javascripts/libs/visibility_aware_raycaster.ts b/frontend/javascripts/libs/visibility_aware_raycaster.ts new file mode 100644 index 0000000000..48ca8a8e1e --- /dev/null +++ b/frontend/javascripts/libs/visibility_aware_raycaster.ts @@ -0,0 +1,52 @@ +import * as THREE from "three"; + +export type RaycastIntersection = + THREE.Intersection; + +function ascSort(a: RaycastIntersection, b: RaycastIntersection) { + return a.distance - b.distance; +} + +export default class VisibilityAwareRaycaster extends THREE.Raycaster { + // A modified version of the Raycaster.js from three.js. + // The original version can be found here: https://github.com/mrdoob/three.js/blob/dev/src/core/Raycaster.js. + // Types retrieved from https://github.com/DefinitelyTyped/DefinitelyTyped/blob/master/types/three/src/core/Raycaster.d.ts. + intersectObjects( + objects: THREE.Object3D[], + recursive?: boolean, + intersects: THREE.Intersection[] = [], + ): THREE.Intersection[] { + for (let i = 0, l = objects.length; i < l; i++) { + if (objects[i].visible) { + this.intersectObject(objects[i], recursive, intersects); + } + } + + intersects.sort(ascSort); + + return intersects; + } + intersectObject( + object: THREE.Object3D, + recursive?: boolean, + intersects: THREE.Intersection[] = [], + ): THREE.Intersection[] { + if (object.layers.test(this.layers)) { + object.raycast(this, intersects); + } + + if (recursive === true) { + const children = object.children; + + for (let i = 0, l = children.length; i < l; i++) { + if (children[i].visible) { + this.intersectObject(children[i], true, intersects); + } + } + } + + intersects.sort(ascSort); + + return intersects; + } +} diff --git a/frontend/javascripts/oxalis/controller/custom_lod.ts b/frontend/javascripts/oxalis/controller/custom_lod.ts new file mode 100644 index 0000000000..9fd6ebab16 --- /dev/null +++ b/frontend/javascripts/oxalis/controller/custom_lod.ts @@ -0,0 +1,62 @@ +import * as THREE from "three"; +import Store from "oxalis/store"; +import { getTDViewZoom } from "oxalis/model/accessors/view_mode_accessor"; + +export default class CustomLOD extends THREE.LOD { + noLODGroup: THREE.Group; + lodLevelCount: number; + lodThresholds: number[]; + constructor() { + super(); + this.lodLevelCount = 0; + this.noLODGroup = new THREE.Group(); + this.add(this.noLODGroup); + this.lodThresholds = [0.7, 3]; + } + + getCurrentLOD(): number { + const state = Store.getState(); + const scale = getTDViewZoom(state); + let currentIndex = 0; + while (scale > this.lodThresholds[currentIndex] && currentIndex < this.lodLevelCount - 1) { + currentIndex++; + } + return currentIndex; + } + + update(_camera: any) { + const levels = this.levels; + + const visibleIndex = this.getCurrentLOD(); + for (let i = 0; i < this.levels.length; i++) { + levels[i].object.visible = i === visibleIndex; + } + } + + addNoLODSupportedMesh(meshGroup: THREE.Group) { + this.noLODGroup.add(meshGroup); + } + + addLODMesh(meshGroup: THREE.Group, level: number) { + while (this.lodLevelCount <= level) { + this.addLevel(new THREE.Group(), this.lodLevelCount); + this.lodLevelCount++; + // Add a new threshold if the number of thresholds is not sufficient. + // A new threshold is only needed if LOD count is greater than the count of thresholds + // as the last threshold is also used for all scales greater than itself. + // Thus this.lodThresholds.length will always be equal to this.lodLevelCount - 1. + if (this.lodLevelCount > this.lodThresholds.length) { + this.lodThresholds.push(this.lodThresholds[this.lodThresholds.length - 1] * 2); + } + } + this.levels[level].object.add(meshGroup); + } + + removeNoLODSupportedMesh(meshGroup: THREE.Group) { + this.noLODGroup.remove(meshGroup); + } + + removeLODMesh(meshGroup: THREE.Group, level: number) { + this.levels[level].object.remove(meshGroup); + } +} diff --git a/frontend/javascripts/oxalis/controller/scene_controller.ts b/frontend/javascripts/oxalis/controller/scene_controller.ts index caeb88029b..b4f7330260 100644 --- a/frontend/javascripts/oxalis/controller/scene_controller.ts +++ b/frontend/javascripts/oxalis/controller/scene_controller.ts @@ -43,6 +43,8 @@ import { setSceneController } from "oxalis/controller/scene_controller_provider" import { getSegmentColorAsHSLA } from "oxalis/model/accessors/volumetracing_accessor"; import { mergeVertices } from "libs/BufferGeometryUtils"; import { getPlaneScalingFactor } from "oxalis/model/accessors/view_mode_accessor"; +import { NO_LOD_MESH_INDEX } from "oxalis/model/sagas/isosurface_saga"; +import CustomLOD from "oxalis/controller/custom_lod"; const CUBE_COLOR = 0x999999; const LAYER_CUBE_COLOR = 0xffff99; @@ -83,8 +85,8 @@ class SceneController { // isosurfacesRootGroup holds lights and one group per segmentation id. // Each group can hold multiple meshes. // @ts-expect-error ts-migrate(2564) FIXME: Property 'isosurfacesRootGroup' has no initializer... Remove this comment to see the full error message - isosurfacesRootGroup: THREE.Group; - isosurfacesGroupsPerSegmentationId: Record = {}; + isosurfacesLODRootGroup: CustomLOD; + isosurfacesGroupsPerSegmentationId: Record> = {}; // This class collects all the meshes displayed in the Skeleton View and updates position and scale of each // element depending on the provided flycam. @@ -113,14 +115,14 @@ class SceneController { // scene.scale does not have an effect. this.rootGroup = new THREE.Object3D(); this.rootGroup.add(this.getRootNode()); - this.isosurfacesRootGroup = new THREE.Group(); + this.isosurfacesLODRootGroup = new CustomLOD(); this.meshesRootGroup = new THREE.Group(); this.highlightedBBoxId = null; // The dimension(s) with the highest resolution will not be distorted this.rootGroup.scale.copy(new THREE.Vector3(...Store.getState().dataset.dataSource.scale)); // Add scene to the group, all Geometries are then added to group this.scene.add(this.rootGroup); - this.scene.add(this.isosurfacesRootGroup); + this.scene.add(this.isosurfacesLODRootGroup); this.scene.add(this.meshesRootGroup); this.rootGroup.add(new THREE.DirectionalLight()); this.addLights(); @@ -205,8 +207,13 @@ class SceneController { window.removeBucketMesh = (mesh: THREE.LineSegments) => this.rootNode.remove(mesh); } - getIsosurfaceGeometry(cellId: number): THREE.Group { - return this.isosurfacesGroupsPerSegmentationId[cellId]; + getIsosurfaceGeometryInBestLOD(cellId: number): THREE.Group { + const bestLod = Math.min( + ...Object.keys(this.isosurfacesGroupsPerSegmentationId[cellId]).map((lodVal) => + parseInt(lodVal), + ), + ); + return this.isosurfacesGroupsPerSegmentationId[cellId][bestLod]; } getColorObjectForSegment(cellId: number) { @@ -224,6 +231,7 @@ class SceneController { meshMaterial.transparent = true; const mesh = new THREE.Mesh(geometry, meshMaterial); + mesh.castShadow = true; mesh.receiveShadow = true; const tweenAnimation = new TWEEN.Tween({ @@ -270,7 +278,7 @@ class SceneController { bufferGeometry = mergeVertices(bufferGeometry); bufferGeometry.computeVertexNormals(); - this.addIsosurfaceFromGeometry(bufferGeometry, segmentationId); + this.addIsosurfaceFromGeometry(bufferGeometry, segmentationId, null, null, NO_LOD_MESH_INDEX); } addIsosurfaceFromGeometry( @@ -278,11 +286,19 @@ class SceneController { segmentationId: number, offset: Vector3 | null = null, scale: Vector3 | null = null, + lod: number, ): void { if (this.isosurfacesGroupsPerSegmentationId[segmentationId] == null) { + this.isosurfacesGroupsPerSegmentationId[segmentationId] = {}; + } + if (this.isosurfacesGroupsPerSegmentationId[segmentationId][lod] == null) { const newGroup = new THREE.Group(); - this.isosurfacesGroupsPerSegmentationId[segmentationId] = newGroup; - this.isosurfacesRootGroup.add(newGroup); + this.isosurfacesGroupsPerSegmentationId[segmentationId][lod] = newGroup; + if (lod === NO_LOD_MESH_INDEX) { + this.isosurfacesLODRootGroup.addNoLODSupportedMesh(newGroup); + } else { + this.isosurfacesLODRootGroup.addLODMesh(newGroup, lod); + } // @ts-ignore newGroup.cellId = segmentationId; if (scale != null) { @@ -296,18 +312,22 @@ class SceneController { mesh.translateZ(offset[2]); } - this.isosurfacesGroupsPerSegmentationId[segmentationId].add(mesh); + this.isosurfacesGroupsPerSegmentationId[segmentationId][lod].add(mesh); } removeIsosurfaceById(segmentationId: number): void { if (this.isosurfacesGroupsPerSegmentationId[segmentationId] == null) { return; } - - const group = this.isosurfacesGroupsPerSegmentationId[segmentationId]; - this.isosurfacesRootGroup.remove(group); - // @ts-expect-error ts-migrate(2322) FIXME: Type 'null' is not assignable to type 'Group'. - this.isosurfacesGroupsPerSegmentationId[segmentationId] = null; + _.forEach(this.isosurfacesGroupsPerSegmentationId[segmentationId], (meshGroup, lod) => { + const lodNumber = parseInt(lod); + if (lodNumber !== NO_LOD_MESH_INDEX) { + this.isosurfacesLODRootGroup.removeLODMesh(meshGroup, lodNumber); + } else { + this.isosurfacesLODRootGroup.removeNoLODSupportedMesh(meshGroup); + } + }); + delete this.isosurfacesGroupsPerSegmentationId[segmentationId]; } addLights(): void { @@ -336,10 +356,10 @@ class SceneController { pointLight.position.y = -25; pointLight.position.z = 10; - this.isosurfacesRootGroup.add(ambientLight); - this.isosurfacesRootGroup.add(directionalLight); - this.isosurfacesRootGroup.add(directionalLight2); - this.isosurfacesRootGroup.add(pointLight); + this.isosurfacesLODRootGroup.add(ambientLight); + this.isosurfacesLODRootGroup.add(directionalLight); + this.isosurfacesLODRootGroup.add(directionalLight2); + this.isosurfacesLODRootGroup.add(pointLight); } removeSTL(id: string): void { @@ -351,18 +371,21 @@ class SceneController { } setIsosurfaceVisibility(id: number, visibility: boolean): void { - this.isosurfacesGroupsPerSegmentationId[id].visible = visibility; + _.forEach(this.isosurfacesGroupsPerSegmentationId[id], (meshGroup) => { + meshGroup.visible = visibility; + }); } setIsosurfaceColor(id: number): void { const color = this.getColorObjectForSegment(id); - const group = this.isosurfacesGroupsPerSegmentationId[id]; - if (group) { - for (const child of group.children) { - // @ts-ignore - child.material.color = color; + _.forEach(this.isosurfacesGroupsPerSegmentationId[id], (meshGroup) => { + if (meshGroup) { + for (const child of meshGroup.children) { + // @ts-ignore + child.material.color = color; + } } - } + }); } updateMeshPostion(id: string, position: Vector3): void { @@ -487,7 +510,7 @@ class SceneController { this.taskBoundingBox?.updateForCam(id); - this.isosurfacesRootGroup.visible = id === OrthoViews.TDView; + this.isosurfacesLODRootGroup.visible = id === OrthoViews.TDView; this.annotationToolsGeometryGroup.visible = id !== OrthoViews.TDView; const originalPosition = getPosition(Store.getState().flycam); @@ -678,8 +701,8 @@ class SceneController { this.taskBoundingBox?.setVisibility(false); - if (this.isosurfacesRootGroup != null) { - this.isosurfacesRootGroup.visible = false; + if (this.isosurfacesLODRootGroup != null) { + this.isosurfacesLODRootGroup.visible = false; } } diff --git a/frontend/javascripts/oxalis/model/sagas/isosurface_saga.ts b/frontend/javascripts/oxalis/model/sagas/isosurface_saga.ts index 1075b22199..66f510f07f 100644 --- a/frontend/javascripts/oxalis/model/sagas/isosurface_saga.ts +++ b/frontend/javascripts/oxalis/model/sagas/isosurface_saga.ts @@ -74,6 +74,7 @@ import processTaskWithPool from "libs/task_pool"; import { getBaseSegmentationName } from "oxalis/view/right-border-tabs/segments_tab/segments_view_helper"; import { UpdateSegmentAction } from "../actions/volumetracing_actions"; +export const NO_LOD_MESH_INDEX = -1; const MAX_RETRY_COUNT = 5; const RETRY_WAIT_TIME = 5000; const MESH_CHUNK_THROTTLE_DELAY = 500; @@ -609,6 +610,8 @@ function* loadPrecomputedMesh(action: LoadPrecomputedMeshAction) { }); } +type ChunksMap = Record; + function* loadPrecomputedMeshForSegmentId( id: number, seedPosition: Vector3, @@ -619,9 +622,15 @@ function* loadPrecomputedMeshForSegmentId( yield* put(addPrecomputedIsosurfaceAction(layerName, id, seedPosition, meshFileName)); yield* put(startedLoadingIsosurfaceAction(layerName, id)); const dataset = yield* select((state) => state.dataset); + const sceneController = yield* call(getSceneController); + const currentLODIndex = yield* call({ + context: sceneController.isosurfacesLODRootGroup, + fn: sceneController.isosurfacesLODRootGroup.getCurrentLOD, + }); - let availableChunks = null; + let availableChunksMap: ChunksMap = {}; let scale: Vector3 | null = null; + let loadingOrder: number[] = []; const availableMeshFiles = yield* call( dispatchMaybeFetchMeshFilesAsync, @@ -687,9 +696,14 @@ function* loadPrecomputedMeshForSegmentId( segmentInfo.transform[1][1], segmentInfo.transform[2][2], ]; - availableChunks = _.first(segmentInfo.chunks.lods)?.chunks || []; + segmentInfo.chunks.lods.forEach((chunks, lodIndex) => { + availableChunksMap[lodIndex] = chunks?.chunks; + loadingOrder.push(lodIndex); + }); + // Load the chunks closest to the current LOD first. + loadingOrder.sort((a, b) => Math.abs(a - currentLODIndex) - Math.abs(b - currentLODIndex)); } else { - availableChunks = yield* call( + availableChunksMap[NO_LOD_MESH_INDEX] = yield* call( meshV0.getMeshfileChunksForSegment, dataset.dataStore.url, dataset, @@ -697,6 +711,7 @@ function* loadPrecomputedMeshForSegmentId( meshFileName, id, ); + loadingOrder = [NO_LOD_MESH_INDEX]; } } catch (exception) { console.warn("Mesh chunk couldn't be loaded due to", exception); @@ -706,73 +721,88 @@ function* loadPrecomputedMeshForSegmentId( return; } - // Sort the chunks by distance to the seedPosition, so that the mesh loads from the inside out - const sortedAvailableChunks = _.sortBy(availableChunks, (chunk: Vector3 | meshV3.MeshChunk) => - V3.length(V3.sub(seedPosition, "position" in chunk ? chunk.position : chunk)), - ) as Array | Array; - - const tasks = sortedAvailableChunks.map( - (chunk) => - function* loadChunk() { - const sceneController = yield* call(getSceneController); - - if ("position" in chunk) { - // V3 - const dracoData = yield* call( - meshV3.getMeshfileChunkData, - dataset.dataStore.url, - dataset, - getBaseSegmentationName(segmentationLayer), - meshFileName, - chunk.byteOffset, - chunk.byteSize, - ); - const loader = getDracoLoader(); - - const geometry = yield* call(loader.decodeDracoFileAsync, dracoData); - // Compute vertex normals to achieve smooth shading - geometry.computeVertexNormals(); - - yield* call( - { context: sceneController, fn: sceneController.addIsosurfaceFromGeometry }, - geometry, - id, - chunk.position, - // Apply the scale from the segment info, which includes dataset scale and mag - scale, - ); - } else { - // V0 - const stlData = yield* call( - meshV0.getMeshfileChunkData, - dataset.dataStore.url, - dataset, - getBaseSegmentationName(segmentationLayer), - meshFileName, - id, - chunk, - ); - let geometry = yield* call(parseStlBuffer, stlData); - - // Delete existing vertex normals (since these are not interpolated - // across faces). - geometry.deleteAttribute("normal"); - // Ensure that vertices of adjacent faces are shared. - geometry = mergeVertices(geometry); - // Recompute normals to achieve smooth shading - geometry.computeVertexNormals(); - - yield* call( - { context: sceneController, fn: sceneController.addIsosurfaceFromGeometry }, - geometry, - id, - ); + const loadChunksTasks = _.compact( + _.flatten( + loadingOrder.map((lod) => { + if (availableChunksMap[lod] == null) { + return; } - }, + const availableChunks = availableChunksMap[lod]; + // Sort the chunks by distance to the seedPosition, so that the mesh loads from the inside out + const sortedAvailableChunks = _.sortBy( + availableChunks, + (chunk: Vector3 | meshV3.MeshChunk) => + V3.length(V3.sub(seedPosition, "position" in chunk ? chunk.position : chunk)), + ) as Array | Array; + + const tasks = sortedAvailableChunks.map( + (chunk) => + function* loadChunk(): Saga { + if ("position" in chunk) { + // V3 + const dracoData = yield* call( + meshV3.getMeshfileChunkData, + dataset.dataStore.url, + dataset, + getBaseSegmentationName(segmentationLayer), + meshFileName, + chunk.byteOffset, + chunk.byteSize, + ); + const loader = getDracoLoader(); + + const geometry = yield* call(loader.decodeDracoFileAsync, dracoData); + // Compute vertex normals to achieve smooth shading + geometry.computeVertexNormals(); + + yield* call( + { context: sceneController, fn: sceneController.addIsosurfaceFromGeometry }, + geometry, + id, + chunk.position, + // Apply the scale from the segment info, which includes dataset scale and mag + scale, + lod, + ); + } else { + // V0 + const stlData = yield* call( + meshV0.getMeshfileChunkData, + dataset.dataStore.url, + dataset, + getBaseSegmentationName(segmentationLayer), + meshFileName, + id, + chunk, + ); + let geometry = yield* call(parseStlBuffer, stlData); + + // Delete existing vertex normals (since these are not interpolated + // across faces). + geometry.deleteAttribute("normal"); + // Ensure that vertices of adjacent faces are shared. + geometry = mergeVertices(geometry); + // Recompute normals to achieve smooth shading + geometry.computeVertexNormals(); + + yield* call( + { context: sceneController, fn: sceneController.addIsosurfaceFromGeometry }, + geometry, + id, + null, + null, + lod, + ); + } + }, + ); + return tasks; + }), + ), ); try { - yield* call(processTaskWithPool, tasks, PARALLEL_PRECOMPUTED_MESH_LOADING_COUNT); + yield* call(processTaskWithPool, loadChunksTasks, PARALLEL_PRECOMPUTED_MESH_LOADING_COUNT); } catch (exception) { console.error(exception); Toast.warning("Some mesh objects could not be loaded."); @@ -788,7 +818,7 @@ function* loadPrecomputedMeshForSegmentId( */ function* downloadIsosurfaceCellById(cellName: string, cellId: number): Saga { const sceneController = getSceneController(); - const geometry = sceneController.getIsosurfaceGeometry(cellId); + const geometry = sceneController.getIsosurfaceGeometryInBestLOD(cellId); if (geometry == null) { const errorMessage = messages["tracing.not_isosurface_available_to_download"]; @@ -818,7 +848,13 @@ function* importIsosurfaceFromStl(action: ImportIsosurfaceFromStlAction): Saga>; +import VisibilityAwareRaycaster, { + type RaycastIntersection, +} from "libs/visibility_aware_raycaster"; const createDirLight = ( position: Vector3, @@ -32,7 +33,7 @@ const createDirLight = ( return dirLight; }; -const raycaster = new THREE.Raycaster(); +const raycaster = new VisibilityAwareRaycaster(); let oldRaycasterHit: THREE.Object3D | null = null; const ISOSURFACE_HOVER_THROTTLING_DELAY = 150; @@ -45,7 +46,7 @@ class PlaneView { cameras: OrthoViewMap; throttledPerformIsosurfaceHitTest: ( arg0: [number, number], - ) => RaycastIntersection | null | undefined; + ) => RaycastIntersection | null | undefined; running: boolean; needsRerender: boolean; @@ -147,10 +148,10 @@ class PlaneView { performIsosurfaceHitTest( mousePosition: [number, number], - ): RaycastIntersection | null | undefined { + ): RaycastIntersection | null | undefined { const storeState = Store.getState(); const SceneController = getSceneController(); - const { isosurfacesRootGroup } = SceneController; + const { isosurfacesLODRootGroup } = SceneController; const tdViewport = getInputCatcherRect(storeState, "TDView"); const { hoveredSegmentId } = storeState.temporaryConfiguration; @@ -172,7 +173,7 @@ class PlaneView { ((mousePosition[1] / tdViewport.height) * 2 - 1) * -1, ); raycaster.setFromCamera(mouse, this.cameras[OrthoViews.TDView]); - const intersectableObjects = isosurfacesRootGroup.children; + const intersectableObjects = isosurfacesLODRootGroup.children; // The second parameter of intersectObjects is set to true to ensure that // the groups which contain the actual meshes are traversed. const intersections = raycaster.intersectObjects(intersectableObjects, true); diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/MeshFileService.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/MeshFileService.scala index dee7599661..810ef419e9 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/MeshFileService.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/MeshFileService.scala @@ -61,21 +61,21 @@ object MeshFileInfo { implicit val jsonFormat: OFormat[MeshFileInfo] = Json.format[MeshFileInfo] } -case class NeuroglancerSegmentInfo(chunkShape: Vec3Float, - gridOrigin: Vec3Float, - numLods: Int, - lodScales: Array[Float], - vertexOffsets: Array[Vec3Float], - numChunksPerLod: Array[Int], - chunkPositions: List[List[Vec3Int]], - chunkByteOffsets: List[List[Int]]) - -object NeuroglancerSegmentInfo { - def fromBytes(manifest: Array[Byte]): NeuroglancerSegmentInfo = { +case class NeuroglancerSegmentManifest(chunkShape: Vec3Float, + gridOrigin: Vec3Float, + numLods: Int, + lodScales: Array[Float], + vertexOffsets: Array[Vec3Float], + numChunksPerLod: Array[Int], + chunkPositions: List[List[Vec3Int]], + chunkByteSizes: List[List[Long]]) + +object NeuroglancerSegmentManifest { + def fromBytes(manifestBytes: Array[Byte]): NeuroglancerSegmentManifest = { // All Ints here should be UInt32 per spec. We assume that the sign bit is not necessary (the encoded values are at most 2^31). // But they all are used to index into Arrays and JVM doesn't allow for Long Array Indexes, // we can't convert them. - val byteInput = new ByteArrayInputStream(manifest) + val byteInput = new ByteArrayInputStream(manifestBytes) val dis = new LittleEndianDataInputStream(byteInput) val chunkShape = Vec3Float(x = dis.readFloat, y = dis.readFloat, z = dis.readFloat) @@ -99,7 +99,7 @@ object NeuroglancerSegmentInfo { } val chunkPositionsList = new ListBuffer[List[Vec3Int]] - val chunkSizes = new ListBuffer[List[Int]] + val chunkSizes = new ListBuffer[List[Long]] for (lod <- 0 until numLods) { val currentChunkPositions = (ListBuffer[Int](), ListBuffer[Int](), ListBuffer[Int]()) for (row <- 0 until 3; _ <- 0 until numChunksPerLod(lod)) { @@ -112,21 +112,21 @@ object NeuroglancerSegmentInfo { chunkPositionsList.append(currentChunkPositions.zipped.map(Vec3Int(_, _, _)).toList) - val currentChunkSizes = ListBuffer[Int]() + val currentChunkSizes = ListBuffer[Long]() for (_ <- 0 until numChunksPerLod(lod)) { - currentChunkSizes.append(dis.readInt) + currentChunkSizes.append(dis.readInt.toLong) // Converting to long for convenient + safe summing later } chunkSizes.append(currentChunkSizes.toList) } - NeuroglancerSegmentInfo(chunkShape, - gridOrigin, - numLods, - lodScales, - vertexOffsets, - numChunksPerLod, - chunkPositionsList.toList, - chunkSizes.toList) + NeuroglancerSegmentManifest(chunkShape, + gridOrigin, + numLods, + lodScales, + vertexOffsets, + numChunksPerLod, + chunkPositionsList.toList, + chunkSizes.toList) } } @@ -213,14 +213,14 @@ class MeshFileService @Inject()(config: DataStoreConfig)(implicit ec: ExecutionC Note that null is a valid value here for once. Meshfiles with no information about the meshFilePath will return Fox.empty, while meshfiles with one marked as empty, will return Fox.successful(null) */ - def mappingNameForMeshFile(meshFilePath: Path, meshFileVersion: Long): Fox[String] = { + private def mappingNameForMeshFile(meshFilePath: Path, meshFileVersion: Long): Fox[String] = { val attributeName = if (meshFileVersion == 0) "metadata/mapping_name" else "mapping_name" executeWithCachedHdf5(meshFilePath, meshFileCache) { cachedMeshFile => cachedMeshFile.reader.string().getAttr("/", attributeName) } ?~> "mesh.file.readEncoding.failed" } - def mappingVersionForMeshFile(meshFilePath: Path): Long = + private def mappingVersionForMeshFile(meshFilePath: Path): Long = executeWithCachedHdf5(meshFilePath, meshFileCache) { cachedMeshFile => cachedMeshFile.reader.int64().getAttr("/", "artifact_schema_version") }.toOption.getOrElse(0) @@ -265,42 +265,51 @@ class MeshFileService @Inject()(config: DataStoreConfig)(implicit ec: ExecutionC val lodScaleMultiplier = cachedMeshFile.reader.float64().getAttr("/", "lod_scale_multiplier") val transform = cachedMeshFile.reader.float64().getMatrixAttr("/", "transform") - val (neuroglancerStart, neuroglancerEnd) = getNeuroglancerOffsets(segmentId, cachedMeshFile) + val (neuroglancerSegmentManifestStart, neuroglancerSegmentManifestEnd) = + getNeuroglancerSegmentManifestOffsets(segmentId, cachedMeshFile) - val manifest = cachedMeshFile.reader + val manifestBytes = cachedMeshFile.reader .uint8() - .readArrayBlockWithOffset("/neuroglancer", (neuroglancerEnd - neuroglancerStart).toInt, neuroglancerStart) - val segmentInfo = NeuroglancerSegmentInfo.fromBytes(manifest) - val enrichedSegmentInfo = enrichSegmentInfo(segmentInfo, lodScaleMultiplier, neuroglancerStart) - WebknossosSegmentInfo(transform = transform, meshFormat = encoding, chunks = enrichedSegmentInfo) + .readArrayBlockWithOffset("/neuroglancer", + (neuroglancerSegmentManifestEnd - neuroglancerSegmentManifestStart).toInt, + neuroglancerSegmentManifestStart) + val segmentManifest = NeuroglancerSegmentManifest.fromBytes(manifestBytes) + val enrichedSegmentManifest = + enrichSegmentInfo(segmentManifest, lodScaleMultiplier, neuroglancerSegmentManifestStart) + WebknossosSegmentInfo(transform = transform, meshFormat = encoding, chunks = enrichedSegmentManifest) } } - private def enrichSegmentInfo(segmentInfo: NeuroglancerSegmentInfo, + private def enrichSegmentInfo(segmentInfo: NeuroglancerSegmentManifest, lodScaleMultiplier: Double, neuroglancerOffsetStart: Long): MeshSegmentInfo = { - val totalMeshSize = segmentInfo.chunkByteOffsets.map(_.sum).sum + val bytesPerLod = segmentInfo.chunkByteSizes.map(_.sum) + val totalMeshSize = bytesPerLod.sum val meshByteStartOffset = neuroglancerOffsetStart - totalMeshSize - val chunkByteOffsets = segmentInfo.chunkByteOffsets.map(_.scanLeft(0)(_ + _)) // This builds a cumulative sum + val chunkByteOffsetsInLod = segmentInfo.chunkByteSizes.map(_.scanLeft(0L)(_ + _)) // builds cumulative sum + + def getChunkByteOffset(lod: Int, currentChunk: Int): Long = + // get past the finer lods first, then take offset in selected lod + bytesPerLod.take(lod).sum + chunkByteOffsetsInLod(lod)(currentChunk) def computeGlobalPositionAndOffset(lod: Int, currentChunk: Int): MeshChunk = { val globalPosition = segmentInfo.gridOrigin + segmentInfo .chunkPositions(lod)(currentChunk) - .toVec3Float * segmentInfo.chunkShape * segmentInfo.lodScales(lod) * lodScaleMultiplier + .toVec3Float * segmentInfo.chunkShape * Math.pow(2, lod) * segmentInfo.lodScales(lod) * lodScaleMultiplier MeshChunk( position = globalPosition, // This position is in Voxel Space - byteOffset = meshByteStartOffset + chunkByteOffsets(lod)(currentChunk), - byteSize = segmentInfo.chunkByteOffsets(lod)(currentChunk), + byteOffset = meshByteStartOffset + getChunkByteOffset(lod, currentChunk), + byteSize = segmentInfo.chunkByteSizes(lod)(currentChunk).toInt, // size must be int32 to fit in java array ) } - val lods = for (lod <- 0 until segmentInfo.numLods) yield lod + val lods: Seq[Int] = for (lod <- 0 until segmentInfo.numLods) yield lod - def chunkNums(lod: Int): IndexedSeq[(Int, Int)] = + def chunkCountsWithLod(lod: Int): IndexedSeq[(Int, Int)] = for (currentChunk <- 0 until segmentInfo.numChunksPerLod(lod)) yield (lod, currentChunk) - val chunks = lods.map(lod => chunkNums(lod).map(x => computeGlobalPositionAndOffset(x._1, x._2)).toList) + val chunks = lods.map(lod => chunkCountsWithLod(lod).map(x => computeGlobalPositionAndOffset(x._1, x._2)).toList) val meshfileLods = lods .map( @@ -313,7 +322,7 @@ class MeshFileService @Inject()(config: DataStoreConfig)(implicit ec: ExecutionC MeshSegmentInfo(chunkShape = segmentInfo.chunkShape, gridOrigin = segmentInfo.gridOrigin, lods = meshfileLods) } - private def getNeuroglancerOffsets(segmentId: Long, cachedMeshFile: CachedHdf5File): (Long, Long) = { + private def getNeuroglancerSegmentManifestOffsets(segmentId: Long, cachedMeshFile: CachedHdf5File): (Long, Long) = { val nBuckets = cachedMeshFile.reader.uint64().getAttr("/", "n_buckets") val hashName = cachedMeshFile.reader.string().getAttr("/", "hash_function") From 666a8b3b2a5063becfd260dbcd2b4f9fc505f420 Mon Sep 17 00:00:00 2001 From: Florian M Date: Thu, 30 Mar 2023 13:03:54 +0200 Subject: [PATCH 09/14] Rename demo instance to wkorg instance (#6941) * Rename demo instance to wkorg instance * changelog * snapshots, html template --- MIGRATIONS.unreleased.md | 2 ++ app/RequestHandler.scala | 10 +++++----- .../AuthenticationController.scala | 4 ++-- ...oller.scala => WkorgProxyController.scala} | 4 ++-- .../organization/OrganizationService.scala | 4 ++-- app/utils/WkConf.scala | 2 +- app/views/main.scala.html | 4 ++-- conf/application.conf | 8 ++++---- .../admin/auth/authentication_modal.tsx | 2 +- .../admin/auth/registration_view.tsx | 2 +- .../admin/dataset/dataset_add_view.tsx | 2 +- .../admin/dataset/dataset_upload_view.tsx | 4 ++-- frontend/javascripts/admin/onboarding.tsx | 2 +- .../javascripts/admin/task/task_list_view.tsx | 2 +- .../javascripts/dashboard/dashboard_view.tsx | 4 ++-- .../javascripts/dashboard/dataset_view.tsx | 4 ++-- frontend/javascripts/navbar.tsx | 6 +++--- .../novel_user_experiences/welcome_toast.tsx | 2 +- frontend/javascripts/router.tsx | 6 +++--- .../backend-snapshot-tests/misc.e2e.js.md | 2 +- .../backend-snapshot-tests/misc.e2e.js.snap | Bin 1253 -> 1254 bytes frontend/javascripts/types/api_flow_types.ts | 2 +- 22 files changed, 40 insertions(+), 38 deletions(-) rename app/controllers/{DemoProxyController.scala => WkorgProxyController.scala} (89%) diff --git a/MIGRATIONS.unreleased.md b/MIGRATIONS.unreleased.md index 6c92428cdd..0a670b32c9 100644 --- a/MIGRATIONS.unreleased.md +++ b/MIGRATIONS.unreleased.md @@ -8,4 +8,6 @@ User-facing changes are documented in the [changelog](CHANGELOG.released.md). ## Unreleased [Commits](https://github.com/scalableminds/webknossos/compare/23.04.0...HEAD) + - The config key features.isDemoInstance was renamed to features.isWkorgInstance (only needs to be adapted for the main wkorg instance). [#6941](https://github.com/scalableminds/webknossos/pull/6941/files) + ### Postgres Evolutions: diff --git a/app/RequestHandler.scala b/app/RequestHandler.scala index a9cab56709..0c14016546 100644 --- a/app/RequestHandler.scala +++ b/app/RequestHandler.scala @@ -1,5 +1,5 @@ import com.typesafe.scalalogging.LazyLogging -import controllers.{Assets, DemoProxyController, SitemapController} +import controllers.{Assets, WkorgProxyController, SitemapController} import javax.inject.Inject import play.api.OptionalDevContext import play.api.http.{DefaultHttpRequestHandler, HttpConfiguration, HttpErrorHandler, HttpFilters} @@ -13,7 +13,7 @@ class RequestHandler @Inject()(webCommands: WebCommands, router: Router, errorHandler: HttpErrorHandler, httpConfiguration: HttpConfiguration, - demoProxyController: DemoProxyController, + wkorgProxyController: WkorgProxyController, filters: HttpFilters, conf: WkConf, assets: Assets, @@ -35,11 +35,11 @@ class RequestHandler @Inject()(webCommands: WebCommands, } else if (request.uri.matches("^(/assets/).*$")) { val path = request.path.replaceFirst("^(/assets/)", "") Some(assets.at(path = "/public", file = path)) - } else if (request.uri.matches("""^/sitemap.xml$""") && conf.Features.isDemoInstance) { + } else if (request.uri.matches("""^/sitemap.xml$""") && conf.Features.isWkorgInstance) { Some(sitemapController.getSitemap(conf.Http.uri)) - } else if (request.uri.matches("^/sw\\.(.*)\\.js$") && conf.Features.isDemoInstance) { + } else if (request.uri.matches("^/sw\\.(.*)\\.js$") && conf.Features.isWkorgInstance) { Some(Action { Ok("").as("text/javascript") }) } else if (request.uri == "/favicon.ico") { Some(Action { NotFound }) - } else Some(demoProxyController.proxyPageOrMainView) + } else Some(wkorgProxyController.proxyPageOrMainView) } diff --git a/app/controllers/AuthenticationController.scala b/app/controllers/AuthenticationController.scala index 3b3aafbbc6..696d771e70 100755 --- a/app/controllers/AuthenticationController.scala +++ b/app/controllers/AuthenticationController.scala @@ -144,7 +144,7 @@ class AuthenticationController @Inject()( _ <- Fox.runIf(inviteBox.isDefined)(Fox.runOptional(inviteBox.toOption)(i => inviteService.deactivateUsedInvite(i)(GlobalAccessContext))) brainDBResult <- Fox.runIf(registerBrainDB)(brainTracing.registerIfNeeded(user, password.getOrElse(""))) - _ = if (conf.Features.isDemoInstance) { + _ = if (conf.Features.isWkorgInstance) { mailchimpClient.registerUser(user, multiUser, tag = MailchimpTag.RegisteredAsUser) } else { Mailer ! Send(defaultMails.newUserMail(user.name, email, brainDBResult.flatten, autoActivate)) @@ -597,7 +597,7 @@ class AuthenticationController @Inject()( defaultMails.newOrganizationMail(organization.displayName, email.toLowerCase, request.headers.get("Host").getOrElse(""))) - if (conf.Features.isDemoInstance) { + if (conf.Features.isWkorgInstance) { mailchimpClient.registerUser(user, multiUser, MailchimpTag.RegisteredAsAdmin) } Ok diff --git a/app/controllers/DemoProxyController.scala b/app/controllers/WkorgProxyController.scala similarity index 89% rename from app/controllers/DemoProxyController.scala rename to app/controllers/WkorgProxyController.scala index ff7fac3a20..b3239f89c5 100644 --- a/app/controllers/DemoProxyController.scala +++ b/app/controllers/WkorgProxyController.scala @@ -14,7 +14,7 @@ import utils.WkConf import scala.concurrent.ExecutionContext import scala.util.matching.Regex -class DemoProxyController @Inject()(ws: WSClient, conf: WkConf, sil: Silhouette[WkEnv], multiUserDAO: MultiUserDAO)( +class WkorgProxyController @Inject()(ws: WSClient, conf: WkConf, sil: Silhouette[WkEnv], multiUserDAO: MultiUserDAO)( implicit ec: ExecutionContext) extends Controller { @@ -30,7 +30,7 @@ class DemoProxyController @Inject()(ws: WSClient, conf: WkConf, sil: Silhouette[ } private def matchesProxyPage(request: UserAwareRequest[WkEnv, AnyContent]): Boolean = - conf.Features.isDemoInstance && conf.Proxy.routes + conf.Features.isWkorgInstance && conf.Proxy.routes .exists(route => matchesPageWithWildcard(route, request.path)) && (request.identity.isEmpty || request.uri != "/") private def matchesPageWithWildcard(routeWithWildcard: String, actualRequest: String): Boolean = { diff --git a/app/models/organization/OrganizationService.scala b/app/models/organization/OrganizationService.scala index 959ec566ff..2fd83784ab 100644 --- a/app/models/organization/OrganizationService.scala +++ b/app/models/organization/OrganizationService.scala @@ -77,7 +77,7 @@ class OrganizationService @Inject()(organizationDAO: OrganizationDAO, def assertMayCreateOrganization(requestingUser: Option[User]): Fox[Unit] = { val noOrganizationPresent = initialDataService.assertNoOrganizationsPresent - val activatedInConfig = bool2Fox(conf.Features.isDemoInstance) ?~> "allowOrganizationCreation.notEnabled" + val activatedInConfig = bool2Fox(conf.Features.isWkorgInstance) ?~> "allowOrganizationCreation.notEnabled" val userIsSuperUser = requestingUser.toFox.flatMap(user => multiUserDAO.findOne(user._multiUser)(GlobalAccessContext).flatMap(multiUser => bool2Fox(multiUser.isSuperUser))) @@ -93,7 +93,7 @@ class OrganizationService @Inject()(organizationDAO: OrganizationDAO, .replaceAll(" ", "_") existingOrganization <- organizationDAO.findOneByName(organizationName)(GlobalAccessContext).futureBox _ <- bool2Fox(existingOrganization.isEmpty) ?~> "organization.name.alreadyInUse" - initialPricingParameters = if (conf.Features.isDemoInstance) (PricingPlan.Basic, Some(3), Some(50000000000L)) + initialPricingParameters = if (conf.Features.isWkorgInstance) (PricingPlan.Basic, Some(3), Some(50000000000L)) else (PricingPlan.Custom, None, None) organizationRootFolder = Folder(ObjectId.generate, folderService.defaultRootName) diff --git a/app/utils/WkConf.scala b/app/utils/WkConf.scala index f3d19843ac..a70012d40c 100644 --- a/app/utils/WkConf.scala +++ b/app/utils/WkConf.scala @@ -95,7 +95,7 @@ class WkConf @Inject()(configuration: Configuration) extends ConfigReader with L } object Features { - val isDemoInstance: Boolean = get[Boolean]("features.isDemoInstance") + val isWkorgInstance: Boolean = get[Boolean]("features.isWkorgInstance") val jobsEnabled: Boolean = get[Boolean]("features.jobsEnabled") val voxelyticsEnabled: Boolean = get[Boolean]("features.voxelyticsEnabled") val taskReopenAllowed: FiniteDuration = get[Int]("features.taskReopenAllowedInSeconds") seconds diff --git a/app/views/main.scala.html b/app/views/main.scala.html index a0b47eefaa..b2347cf5d9 100755 --- a/app/views/main.scala.html +++ b/app/views/main.scala.html @@ -5,7 +5,7 @@ @(conf.WebKnossos.tabTitle) - @if(conf.Features.isDemoInstance){ + @if(conf.Features.isWkorgInstance){ - @if(conf.Features.isDemoInstance){ + @if(conf.Features.isWkorgInstance){