diff --git a/CHANGELOG.unreleased.md b/CHANGELOG.unreleased.md index 3cc1bb05d91..320c004b755 100644 --- a/CHANGELOG.unreleased.md +++ b/CHANGELOG.unreleased.md @@ -26,9 +26,11 @@ For upgrade instructions, please check the [migration guide](MIGRATIONS.released - Improved progress indicator when saving volume data. [#7264](https://github.com/scalableminds/webknossos/pull/7264) - The order of color layers can now also be manipulated in additive blend mode (see [#7188](https://github.com/scalableminds/webknossos/pull/7188)). [#7289](https://github.com/scalableminds/webknossos/pull/7289) - OpenID Connect authorization now fetches the server’s public key automatically. The config keys `singleSignOn.openIdConnect.publicKey` and `singleSignOn.openIdConnect.publicKeyAlgorithm` are now unused. [7267](https://github.com/scalableminds/webknossos/pull/7267) +- When importing a remote dataset and adding another layer with a different voxel size, that layer is now scaled to match the first layer. [#7213](https://github.com/scalableminds/webknossos/pull/7213) + ### Fixed -- Fixed that is was possible to have larger active segment ids that supported by the data type of the segmentation layer which caused the segmentation ids to overflow. [#7240](https://github.com/scalableminds/webknossos/pull/7240) +- Fixed that it was possible to have larger active segment ids that supported by the data type of the segmentation layer which caused the segmentation ids to overflow. [#7240](https://github.com/scalableminds/webknossos/pull/7240) - Fixed that folders could appear in the dataset search output in the dashboard. [#7232](https://github.com/scalableminds/webknossos/pull/7232) - Fixed that the edit icon for an annotation description could disappear in Firefox. [#7250](https://github.com/scalableminds/webknossos/pull/7250) - Fixed that assigning an invalid script name (e.g. with special characters) would trigger an error in the database. Now leads to a more descriptive error. [#7525](https://github.com/scalableminds/webknossos/pull/7525) diff --git a/app/controllers/DataSetController.scala b/app/controllers/DataSetController.scala index 3f6167a2557..7600dbca5d2 100755 --- a/app/controllers/DataSetController.scala +++ b/app/controllers/DataSetController.scala @@ -136,7 +136,7 @@ class DataSetController @Inject()(userService: UserService, def exploreAndAddRemoteDataset(): Action[ExploreAndAddRemoteDatasetParameters] = sil.SecuredAction.async(validateJson[ExploreAndAddRemoteDatasetParameters]) { implicit request => val reportMutable = ListBuffer[String]() - val adaptedParameters = ExploreRemoteDatasetParameters(request.body.remoteUri, None, None) + val adaptedParameters = ExploreRemoteDatasetParameters(request.body.remoteUri, None, None, None) for { dataSource <- exploreRemoteLayerService.exploreRemoteDatasource(List(adaptedParameters), request.identity, diff --git a/app/models/binary/explore/ExploreRemoteLayerService.scala b/app/models/binary/explore/ExploreRemoteLayerService.scala index 08b84df6494..66c0ad3dee7 100644 --- a/app/models/binary/explore/ExploreRemoteLayerService.scala +++ b/app/models/binary/explore/ExploreRemoteLayerService.scala @@ -37,7 +37,8 @@ import scala.util.Try case class ExploreRemoteDatasetParameters(remoteUri: String, credentialIdentifier: Option[String], - credentialSecret: Option[String]) + credentialSecret: Option[String], + preferredVoxelSize: Option[Vec3Double]) object ExploreRemoteDatasetParameters { implicit val jsonFormat: OFormat[ExploreRemoteDatasetParameters] = Json.format[ExploreRemoteDatasetParameters] @@ -63,11 +64,11 @@ class ExploreRemoteLayerService @Inject()(credentialService: CredentialService, private lazy val bearerTokenService = wkSilhouetteEnvironment.combinedAuthenticatorService.tokenAuthenticatorService def exploreRemoteDatasource( - urisWithCredentials: List[ExploreRemoteDatasetParameters], + parameters: List[ExploreRemoteDatasetParameters], requestIdentity: WkEnv#I, reportMutable: ListBuffer[String])(implicit ec: ExecutionContext): Fox[GenericDataSource[DataLayer]] = for { - exploredLayersNested <- Fox.serialCombined(urisWithCredentials)( + exploredLayersNested <- Fox.serialCombined(parameters)( parameters => exploreRemoteLayersForUri(parameters.remoteUri, parameters.credentialIdentifier, @@ -75,14 +76,18 @@ class ExploreRemoteLayerService @Inject()(credentialService: CredentialService, reportMutable, requestIdentity)) layersWithVoxelSizes = exploredLayersNested.flatten + preferredVoxelSize = parameters.flatMap(_.preferredVoxelSize).headOption _ <- bool2Fox(layersWithVoxelSizes.nonEmpty) ?~> "Detected zero layers" - rescaledLayersAndVoxelSize <- rescaleLayersByCommonVoxelSize(layersWithVoxelSizes) ?~> "Could not extract common voxel size from layers" + rescaledLayersAndVoxelSize <- rescaleLayersByCommonVoxelSize(layersWithVoxelSizes, preferredVoxelSize) ?~> "Could not extract common voxel size from layers" rescaledLayers = rescaledLayersAndVoxelSize._1 voxelSize = rescaledLayersAndVoxelSize._2 renamedLayers = makeLayerNamesUnique(rescaledLayers) + layersWithCoordinateTransformations = addCoordinateTransformationsToLayers(renamedLayers, + preferredVoxelSize, + voxelSize) dataSource = GenericDataSource[DataLayer]( DataSourceId("", ""), // Frontend will prompt user for a good name - renamedLayers, + layersWithCoordinateTransformations, voxelSize ) } yield dataSource @@ -124,10 +129,35 @@ class ExploreRemoteLayerService @Inject()(credentialService: CredentialService, } } + private def addCoordinateTransformationsToLayers(layers: List[DataLayer], + preferredVoxelSize: Option[Vec3Double], + voxelSize: Vec3Double): List[DataLayer] = + layers.map(l => { + val coordinateTransformations = coordinateTransformationForVoxelSize(voxelSize, preferredVoxelSize) + l match { + case l: ZarrDataLayer => l.copy(coordinateTransformations = coordinateTransformations) + case l: ZarrSegmentationLayer => l.copy(coordinateTransformations = coordinateTransformations) + case l: N5DataLayer => l.copy(coordinateTransformations = coordinateTransformations) + case l: N5SegmentationLayer => l.copy(coordinateTransformations = coordinateTransformations) + case l: PrecomputedDataLayer => l.copy(coordinateTransformations = coordinateTransformations) + case l: PrecomputedSegmentationLayer => l.copy(coordinateTransformations = coordinateTransformations) + case l: Zarr3DataLayer => l.copy(coordinateTransformations = coordinateTransformations) + case l: Zarr3SegmentationLayer => l.copy(coordinateTransformations = coordinateTransformations) + case _ => throw new Exception("Encountered unsupported layer format during explore remote") + } + }) + + private def isPowerOfTwo(x: Int): Boolean = + x != 0 && (x & (x - 1)) == 0 + + private def isPowerOfTwo(x: Double): Boolean = { + val epsilon = 0.0001 + val l = (math.log(x) / math.log(2)) + math.abs(l - l.round.toDouble) < epsilon + } + private def magFromVoxelSize(minVoxelSize: Vec3Double, voxelSize: Vec3Double)( implicit ec: ExecutionContext): Fox[Vec3Int] = { - def isPowerOfTwo(x: Int): Boolean = - x != 0 && (x & (x - 1)) == 0 val mag = (voxelSize / minVoxelSize).round.toVec3Int for { @@ -140,8 +170,42 @@ class ExploreRemoteLayerService @Inject()(credentialService: CredentialService, _ <- bool2Fox(magGroup.length == 1) ?~> s"detected mags are not unique, found $magGroup" } yield () - private def rescaleLayersByCommonVoxelSize(layersWithVoxelSizes: List[(DataLayer, Vec3Double)])( - implicit ec: ExecutionContext): Fox[(List[DataLayer], Vec3Double)] = { + private def findBaseVoxelSize(minVoxelSize: Vec3Double, preferredVoxelSizeOpt: Option[Vec3Double]): Vec3Double = + preferredVoxelSizeOpt match { + case Some(preferredVoxelSize) => + val baseMag = minVoxelSize / preferredVoxelSize + if (isPowerOfTwo(baseMag.x) && isPowerOfTwo(baseMag.y) && isPowerOfTwo(baseMag.z)) { + preferredVoxelSize + } else { + minVoxelSize + } + case None => minVoxelSize + } + + private def coordinateTransformationForVoxelSize( + foundVoxelSize: Vec3Double, + preferredVoxelSize: Option[Vec3Double]): Option[List[CoordinateTransformation]] = + preferredVoxelSize match { + case None => None + case Some(voxelSize) => + if (voxelSize == foundVoxelSize) { None } else { + val scale = foundVoxelSize / voxelSize + Some( + List( + CoordinateTransformation(CoordinateTransformationType.affine, + matrix = Some( + List( + List(scale.x, 0, 0, 0), + List(0, scale.y, 0, 0), + List(0, 0, scale.z, 0), + List(0, 0, 0, 1) + ))))) + } + } + + private def rescaleLayersByCommonVoxelSize( + layersWithVoxelSizes: List[(DataLayer, Vec3Double)], + preferredVoxelSize: Option[Vec3Double])(implicit ec: ExecutionContext): Fox[(List[DataLayer], Vec3Double)] = { val allVoxelSizes = layersWithVoxelSizes .flatMap(layerWithVoxelSize => { val layer = layerWithVoxelSize._1 @@ -154,14 +218,15 @@ class ExploreRemoteLayerService @Inject()(credentialService: CredentialService, for { minVoxelSize <- option2Fox(minVoxelSizeOpt) - allMags <- Fox.combined(allVoxelSizes.map(magFromVoxelSize(minVoxelSize, _)).toList) ?~> s"voxel sizes for layers are not uniform, got ${layersWithVoxelSizes + baseVoxelSize = findBaseVoxelSize(minVoxelSize, preferredVoxelSize) + allMags <- Fox.combined(allVoxelSizes.map(magFromVoxelSize(baseVoxelSize, _)).toList) ?~> s"voxel sizes for layers are not uniform, got ${layersWithVoxelSizes .map(_._2)}" groupedMags = allMags.groupBy(_.maxDim) _ <- Fox.combined(groupedMags.values.map(checkForDuplicateMags).toList) rescaledLayers = layersWithVoxelSizes.map(layerWithVoxelSize => { val layer = layerWithVoxelSize._1 val layerVoxelSize = layerWithVoxelSize._2 - val magFactors = (layerVoxelSize / minVoxelSize).toVec3Int + val magFactors = (layerVoxelSize / baseVoxelSize).toVec3Int layer match { case l: ZarrDataLayer => l.copy(mags = l.mags.map(mag => mag.copy(mag = mag.mag * magFactors)), @@ -190,7 +255,7 @@ class ExploreRemoteLayerService @Inject()(credentialService: CredentialService, case _ => throw new Exception("Encountered unsupported layer format during explore remote") } }) - } yield (rescaledLayers, minVoxelSize) + } yield (rescaledLayers, baseVoxelSize) } private def exploreRemoteLayersForUri( diff --git a/frontend/javascripts/admin/admin_rest_api.ts b/frontend/javascripts/admin/admin_rest_api.ts index 28512bdf223..f6aa73d823e 100644 --- a/frontend/javascripts/admin/admin_rest_api.ts +++ b/frontend/javascripts/admin/admin_rest_api.ts @@ -1522,16 +1522,26 @@ type ExplorationResult = { export async function exploreRemoteDataset( remoteUris: string[], - credentials?: { username: string; pass: string }, + credentials?: { username: string; pass: string } | null, + preferredVoxelSize?: Vector3, ): Promise { const { dataSource, report } = await Request.sendJSONReceiveJSON("/api/datasets/exploreRemote", { - data: credentials - ? remoteUris.map((uri) => ({ - remoteUri: uri.trim(), + data: remoteUris.map((uri) => { + const extendedUri = { + remoteUri: uri.trim(), + preferredVoxelSize, + }; + + if (credentials) { + return { + ...extendedUri, credentialIdentifier: credentials.username, credentialSecret: credentials.pass, - })) - : remoteUris.map((uri) => ({ remoteUri: uri.trim() })), + }; + } + + return extendedUri; + }), }); if (report.indexOf("403 Forbidden") !== -1 || report.indexOf("401 Unauthorized") !== -1) { Toast.error("The data could not be accessed. Please verify the credentials!"); @@ -2132,7 +2142,7 @@ export function computeIsosurface( }, }, ); - const neighbors = Utils.parseAsMaybe(headers.neighbors).getOrElse([]); + const neighbors = Utils.parseMaybe(headers.neighbors) || []; return { buffer, neighbors, diff --git a/frontend/javascripts/admin/dataset/dataset_add_remote_view.tsx b/frontend/javascripts/admin/dataset/dataset_add_remote_view.tsx index a0d106036e6..e06e81ca944 100644 --- a/frontend/javascripts/admin/dataset/dataset_add_remote_view.tsx +++ b/frontend/javascripts/admin/dataset/dataset_add_remote_view.tsx @@ -36,6 +36,7 @@ import Upload, { RcFile, UploadChangeParam, UploadFile } from "antd/lib/upload"; import { UnlockOutlined } from "@ant-design/icons"; import { Unicode } from "oxalis/constants"; import { readFileAsText } from "libs/read_file"; +import * as Utils from "libs/utils"; const { Panel } = Collapse; const FormItem = Form.Item; @@ -403,26 +404,37 @@ function AddZarrLayer({ const datasourceConfigStr = form.getFieldValue("dataSourceJson"); const { dataSource: newDataSource, report } = await (async () => { + // @ts-ignore + const preferredVoxelSize = Utils.parseMaybe(datasourceConfigStr)?.scale; + if (showCredentialsFields) { if (selectedProtocol === "gs") { const credentials = fileList.length > 0 ? await parseCredentials(fileList[0]?.originFileObj) : null; if (credentials) { - return exploreRemoteDataset([datasourceUrl], { - username: "", - pass: JSON.stringify(credentials), - }); + return exploreRemoteDataset( + [datasourceUrl], + { + username: "", + pass: JSON.stringify(credentials), + }, + preferredVoxelSize, + ); } else { // Fall through to exploreRemoteDataset without parameters } } else if (usernameOrAccessKey && passwordOrSecretKey) { - return exploreRemoteDataset([datasourceUrl], { - username: usernameOrAccessKey, - pass: passwordOrSecretKey, - }); + return exploreRemoteDataset( + [datasourceUrl], + { + username: usernameOrAccessKey, + pass: passwordOrSecretKey, + }, + preferredVoxelSize, + ); } } - return exploreRemoteDataset([datasourceUrl]); + return exploreRemoteDataset([datasourceUrl], null, preferredVoxelSize); })(); setExploreLog(report); if (!newDataSource) { diff --git a/frontend/javascripts/dashboard/dataset/dataset_settings_data_tab.tsx b/frontend/javascripts/dashboard/dataset/dataset_settings_data_tab.tsx index dcb6a1f8141..2666d5c239c 100644 --- a/frontend/javascripts/dashboard/dataset/dataset_settings_data_tab.tsx +++ b/frontend/javascripts/dashboard/dataset/dataset_settings_data_tab.tsx @@ -25,7 +25,7 @@ import { jsonEditStyle, } from "dashboard/dataset/helper_components"; import { startFindLargestSegmentIdJob } from "admin/admin_rest_api"; -import { jsonStringify, parseAsMaybe } from "libs/utils"; +import { jsonStringify, parseMaybe } from "libs/utils"; import { DataLayer } from "types/schemas/datasource.types"; import { getDatasetNameRules, layerNameRules } from "admin/dataset/dataset_components"; import { useSelector } from "react-redux"; @@ -52,9 +52,7 @@ export const syncDataSourceFields = ( dataSourceJson: jsonStringify(dataSourceFromSimpleTab), }); } else { - const dataSourceFromAdvancedTab = parseAsMaybe(form.getFieldValue("dataSourceJson")).getOrElse( - null, - ); + const dataSourceFromAdvancedTab = parseMaybe(form.getFieldValue("dataSourceJson")); // Copy from advanced to simple: update form values form.setFieldsValue({ dataSource: dataSourceFromAdvancedTab, diff --git a/frontend/javascripts/libs/utils.ts b/frontend/javascripts/libs/utils.ts index 496310ea9bf..337a47c40b6 100644 --- a/frontend/javascripts/libs/utils.ts +++ b/frontend/javascripts/libs/utils.ts @@ -132,17 +132,17 @@ export function maybe(fn: (arg0: A) => B): (arg0: A | null | undefined) => return (nullableA: A | null | undefined) => Maybe.fromNullable(nullableA).map(fn); } -export function parseAsMaybe(str: string | null | undefined): Maybe { +export function parseMaybe(str: string | null | undefined): unknown | null { try { const parsedJSON = JSON.parse(str || ""); if (parsedJSON != null) { - return Maybe.Just(parsedJSON); + return parsedJSON; } else { - return Maybe.Nothing(); + return null; } } catch (_exception) { - return Maybe.Nothing(); + return null; } } diff --git a/frontend/javascripts/oxalis/model/bucket_data_handling/wkstore_adapter.ts b/frontend/javascripts/oxalis/model/bucket_data_handling/wkstore_adapter.ts index e31ebbe4c26..8b3ce1aba55 100644 --- a/frontend/javascripts/oxalis/model/bucket_data_handling/wkstore_adapter.ts +++ b/frontend/javascripts/oxalis/model/bucket_data_handling/wkstore_adapter.ts @@ -9,7 +9,7 @@ import { getResolutionInfo, } from "oxalis/model/accessors/dataset_accessor"; import { getVolumeTracingById } from "oxalis/model/accessors/volumetracing_accessor"; -import { parseAsMaybe } from "libs/utils"; +import { parseMaybe } from "libs/utils"; import type { UpdateAction } from "oxalis/model/sagas/update_actions"; import { updateBucket } from "oxalis/model/sagas/update_actions"; import ByteArraysToLz4Base64Worker from "oxalis/workers/byte_arrays_to_lz4_base64.worker"; @@ -193,7 +193,7 @@ export async function requestFromStore( showErrorToast: false, }); const endTime = window.performance.now(); - const missingBuckets = parseAsMaybe(headers["missing-buckets"]).getOrElse([]); + const missingBuckets = (parseMaybe(headers["missing-buckets"]) || []) as number[]; const receivedBucketsCount = batch.length - missingBuckets.length; const BUCKET_BYTE_LENGTH = constants.BUCKET_SIZE * getByteCountFromLayer(layerInfo); getGlobalDataConnectionInfo().log( diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/models/datasource/CoordinateTransformation.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/models/datasource/CoordinateTransformation.scala index 16b5f2b840a..b0b5df65963 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/models/datasource/CoordinateTransformation.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/models/datasource/CoordinateTransformation.scala @@ -15,7 +15,7 @@ object ThinPlateSplineCorrespondences { case class CoordinateTransformation(`type`: CoordinateTransformationType, matrix: Option[List[List[Double]]], - correspondences: Option[ThinPlateSplineCorrespondences]) + correspondences: Option[ThinPlateSplineCorrespondences] = None) object CoordinateTransformation { implicit val jsonFormat: OFormat[CoordinateTransformation] = Json.format[CoordinateTransformation]