diff --git a/CHANGELOG.unreleased.md b/CHANGELOG.unreleased.md index 5e767a62c72..730df341f08 100644 --- a/CHANGELOG.unreleased.md +++ b/CHANGELOG.unreleased.md @@ -54,6 +54,7 @@ For upgrade instructions, please check the [migration guide](MIGRATIONS.released - Fixed that skeleton groups couldn't be collapsed or expanded in locked annotations. [#7988](https://github.com/scalableminds/webknossos/pull/7988) - Fixed that registering segments for a bounding box did only work if the segmentation had mag 1. [#8009](https://github.com/scalableminds/webknossos/pull/8009) - Fixed uploading datasets in neuroglancer precomputed and n5 data format. [#8008](https://github.com/scalableminds/webknossos/pull/8008) +- Various fixes for composing datasets with landmarks. Note that the interpretation of correspondence points was inverted for thin plate splines. [#7992](https://github.com/scalableminds/webknossos/pull/7992) ### Removed diff --git a/frontend/javascripts/admin/dataset/composition_wizard/02_upload_files.tsx b/frontend/javascripts/admin/dataset/composition_wizard/02_upload_files.tsx index 6bd0ac87973..352442e0661 100644 --- a/frontend/javascripts/admin/dataset/composition_wizard/02_upload_files.tsx +++ b/frontend/javascripts/admin/dataset/composition_wizard/02_upload_files.tsx @@ -173,14 +173,17 @@ async function parseNmlFiles(fileList: FileList): Promise throw new SoftError("The two NML files should have the same tree count."); } - for (const [tree1, tree2] of _.zip(Utils.values(trees1), Utils.values(trees2))) { + for (const [tree1, tree2] of _.zip( + Utils.values(trees1).sort((a, b) => a.treeId - b.treeId), + Utils.values(trees2).sort((a, b) => a.treeId - b.treeId), + )) { if (tree1 == null || tree2 == null) { // Satisfy TS. This should not happen, as we checked before that both tree collections // have the same size. throw new SoftError("A tree was unexpectedly parsed as null. Please try again"); } - const nodes1 = Array.from(tree1.nodes.values()); - const nodes2 = Array.from(tree2.nodes.values()); + const nodes1 = Array.from(tree1.nodes.values()).sort((a, b) => a.id - b.id); + const nodes2 = Array.from(tree2.nodes.values()).sort((a, b) => a.id - b.id); for (const [node1, node2] of _.zip(nodes1, nodes2)) { if ((node1 == null) !== (node2 == null)) { throw new SoftError( @@ -194,6 +197,10 @@ async function parseNmlFiles(fileList: FileList): Promise } } + if (sourcePoints.length < 3) { + throw new SoftError("Each file should contain at least 3 nodes."); + } + const datasets = await tryToFetchDatasetsByName( [datasetName1, datasetName2], "Could not derive datasets from NML. Please specify these manually.", @@ -201,8 +208,8 @@ async function parseNmlFiles(fileList: FileList): Promise return { datasets: datasets || [], - sourcePoints, - targetPoints, + sourcePoints, // The first dataset (will be transformed to match the second later) + targetPoints, // The second dataset (won't be transformed by default) currentWizardStep: "SelectDatasets", }; } diff --git a/frontend/javascripts/admin/dataset/composition_wizard/03_select_datasets.tsx b/frontend/javascripts/admin/dataset/composition_wizard/03_select_datasets.tsx index 5dc73e6294d..8ad912cb6d7 100644 --- a/frontend/javascripts/admin/dataset/composition_wizard/03_select_datasets.tsx +++ b/frontend/javascripts/admin/dataset/composition_wizard/03_select_datasets.tsx @@ -48,7 +48,11 @@ export default function SelectDatasets({ wizardContext, setWizardContext }: Wiza return (
-

Select the datasets that you want to combine or doublecheck the pre-selected datasets.

+

+ Select the datasets that you want to combine or doublecheck the pre-selected datasets. Note + that the order of the datasets is important and needs to be equal to the order of the files + from the upload. +

{ - const datasets = linkedDatasets; - const transformationArr = - sourcePoints.length > 0 && targetPoints.length > 0 - ? [ - { - type: "affine" as const, - matrix: flatToNestedMatrix(estimateAffineMatrix4x4(sourcePoints, targetPoints)), - }, - ] - : []; - + const handleTransformImport = async () => { const newLinks: LayerLink[] = ( - _.flatMap(datasets, (dataset) => + _.flatMap(linkedDatasets, (dataset) => dataset.dataSource.dataLayers.map((layer) => [dataset, layer]), ) as [APIDataset, APIDataLayer][] ).map( @@ -74,21 +73,54 @@ export function ConfigureNewDataset(props: WizardComponentProps) { }, sourceName: dataLayer.name, newName: dataLayer.name, - transformations: dataset === datasets[0] ? transformationArr : [], + transformations: [], }), ); form.setFieldsValue({ layers: newLinks }); }; useEffectOnlyOnce(() => { - handleTransformImport(wizardContext.sourcePoints, wizardContext.targetPoints); + handleTransformImport(); }); const handleSubmit = async () => { if (activeUser == null) { throw new Error("Cannot create dataset without being logged in."); } - const layers = form.getFieldValue(["layers"]); + const layersWithoutTransforms = form.getFieldValue(["layers"]) as LayerLink[]; + const useThinPlateSplines = (form.getFieldValue("useThinPlateSplines") ?? false) as boolean; + + const affineMeanError = { meanError: 0 }; + + function withTransforms(layers: LayerLink[], sourcePoints: Vector3[], targetPoints: Vector3[]) { + if (sourcePoints.length + targetPoints.length === 0) { + return layers; + } + + const transformationArr = [ + useThinPlateSplines + ? { + type: "thin_plate_spline" as const, + correspondences: { source: sourcePoints, target: targetPoints }, + } + : { + type: "affine" as const, + matrix: flatToNestedMatrix( + estimateAffineMatrix4x4(sourcePoints, targetPoints, affineMeanError), + ), + }, + ]; + if (useThinPlateSplines) { + checkLandmarksForThinPlateSpline(sourcePoints, targetPoints); + } + return layers.map((layer) => ({ + ...layer, + // The first dataset will be transformed to match the second. + transformations: areDatasetsIdentical(layer.datasetId, linkedDatasets[0]) + ? transformationArr + : [], + })); + } const uploadableDatastores = props.datastores.filter((datastore) => datastore.allowsUpload); const datastoreToUse = uploadableDatastores[0]; @@ -97,6 +129,35 @@ export function ConfigureNewDataset(props: WizardComponentProps) { return; } + let layersWithTransforms; + const { sourcePoints, targetPoints } = wizardContext; + try { + layersWithTransforms = withTransforms(layersWithoutTransforms, sourcePoints, targetPoints); + } catch (exception) { + const tryAugmentation = await new Promise((resolve) => { + Modal.confirm({ + title: "Augment landmarks?", + content: + "The provided landmarks can't be used for affine estimation, possibly " + + "due to their planar nature. Should a constant translation in the Z " + + "direction be assumed, and the landmarks adjusted accordingly?", + onOk: () => resolve(true), + onCancel: () => resolve(false), + }); + }); + const augmentLandmarks = (points: Vector3[]) => + points.concat(points.map((p) => [p[0], p[1], p[2] + 1])); + if (tryAugmentation) { + layersWithTransforms = withTransforms( + layersWithoutTransforms, + augmentLandmarks(sourcePoints), + augmentLandmarks(targetPoints), + ); + } else { + throw exception; + } + } + const newDatasetName = form.getFieldValue(["name"]); setIsLoading(true); try { @@ -105,8 +166,38 @@ export function ConfigureNewDataset(props: WizardComponentProps) { targetFolderId: form.getFieldValue(["targetFolderId"]), organizationName: activeUser.organization, voxelSize: linkedDatasets.slice(-1)[0].dataSource.scale, - layers, + layers: layersWithTransforms, }); + + const uniqueDatasets = _.uniqBy( + layersWithoutTransforms.map((layer) => layer.datasetId), + (id) => id.owningOrganization + "-" + id.name, + ); + const datasetMarkdownLinks = uniqueDatasets + .map((el) => `- [${el.name}](/datasets/${el.owningOrganization}/${el.name})`) + .join("\n"); + + await updateDatasetPartial( + { owningOrganization: activeUser.organization, name: newDatasetName }, + { + description: [ + "This dataset was composed from:", + datasetMarkdownLinks, + "", + "The layers were combined " + + (sourcePoints.length === 0 + ? "without any transforms" + : `with ${ + useThinPlateSplines + ? `Thin-Plate-Splines (${sourcePoints.length} correspondences)` + : `an affine transformation (mean error: ${formatNumber( + affineMeanError.meanError, + )} vx)` + }`) + + ".", + ].join("\n"), + }, + ); } finally { setIsLoading(false); } @@ -118,7 +209,7 @@ export function ConfigureNewDataset(props: WizardComponentProps) { // Using Forms here only to validate fields and for easy layout

Please configure the dataset that is about to be created.

-
+ guardedWithErrorToast(handleSubmit)}> @@ -180,6 +271,12 @@ export function ConfigureNewDataset(props: WizardComponentProps) { ); }} + {WkDevFlags.datasetComposition.allowThinPlateSplines && + wizardContext.sourcePoints.length > 0 && ( + + Use Thin-Plate-Splines (Experimental) + + )} -

) : null} -

+
{files.length > 0 ? ( diff --git a/frontend/javascripts/dashboard/explorative_annotations_view.tsx b/frontend/javascripts/dashboard/explorative_annotations_view.tsx index 357a2e98ad2..0e18a0a4b51 100644 --- a/frontend/javascripts/dashboard/explorative_annotations_view.tsx +++ b/frontend/javascripts/dashboard/explorative_annotations_view.tsx @@ -682,7 +682,7 @@ class ExplorativeAnnotationsView extends React.PureComponent { return ( <>
- + {ownerName}
diff --git a/frontend/javascripts/dashboard/folders/details_sidebar.tsx b/frontend/javascripts/dashboard/folders/details_sidebar.tsx index 0f816a92e22..4abfeee27e8 100644 --- a/frontend/javascripts/dashboard/folders/details_sidebar.tsx +++ b/frontend/javascripts/dashboard/folders/details_sidebar.tsx @@ -23,6 +23,7 @@ import { useSelector } from "react-redux"; import { OxalisState } from "oxalis/store"; import { getOrganization } from "admin/admin_rest_api"; import { useQuery } from "@tanstack/react-query"; +import Markdown from "libs/markdown_adapter"; export function DetailsSidebar({ selectedDatasets, @@ -148,7 +149,7 @@ function DatasetDetails({ selectedDataset }: { selectedDataset: APIDatasetCompac
Description
-
{fullDataset?.description}
+ {fullDataset?.description}
diff --git a/frontend/javascripts/libs/estimate_affine.ts b/frontend/javascripts/libs/estimate_affine.ts index 715a87d1385..7413361dbc9 100644 --- a/frontend/javascripts/libs/estimate_affine.ts +++ b/frontend/javascripts/libs/estimate_affine.ts @@ -3,8 +3,12 @@ import { Matrix4x4 } from "mjs"; import { Matrix, solve } from "ml-matrix"; import { Vector3 } from "oxalis/constants"; -// Estimates an affine matrix that transforms from source points to target points. -export default function estimateAffine(sourcePoints: Vector3[], targetPoints: Vector3[]) { +export default function estimateAffine( + sourcePoints: Vector3[], + targetPoints: Vector3[], + optInfoOut?: { meanError: number }, +) { + /* Estimates an affine matrix that transforms from source points to target points. */ // Number of correspondences const N = sourcePoints.length; @@ -30,12 +34,12 @@ export default function estimateAffine(sourcePoints: Vector3[], targetPoints: Ve const xMatrix = solve(A, b); const x = xMatrix.to1DArray(); const error = Matrix.sub(b, new Matrix(A).mmul(xMatrix)).to1DArray(); + const meanError = _.mean(error.map((el) => Math.abs(el))); + if (optInfoOut) { + optInfoOut.meanError = meanError; + } if (!process.env.IS_TESTING) { - console.log( - "Affine estimation error: ", - error, - `(mean=${_.mean(error.map((el) => Math.abs(el)))})`, - ); + console.log("Affine estimation error: ", error, `(mean=${meanError})`); } const affineMatrix = new Matrix([ @@ -51,6 +55,8 @@ export default function estimateAffine(sourcePoints: Vector3[], targetPoints: Ve export function estimateAffineMatrix4x4( sourcePoints: Vector3[], targetPoints: Vector3[], + optInfoOut?: { meanError: number }, ): Matrix4x4 { - return estimateAffine(sourcePoints, targetPoints).to1DArray() as any as Matrix4x4; + /* Estimates an affine matrix that transforms from source points to target points. */ + return estimateAffine(sourcePoints, targetPoints, optInfoOut).to1DArray() as any as Matrix4x4; } diff --git a/frontend/javascripts/libs/toast.tsx b/frontend/javascripts/libs/toast.tsx index 4afbc5664c1..e8e210397a1 100644 --- a/frontend/javascripts/libs/toast.tsx +++ b/frontend/javascripts/libs/toast.tsx @@ -35,6 +35,19 @@ type ToastParams = { details?: string; }; +export async function guardedWithErrorToast(fn: () => Promise) { + try { + await fn(); + } catch (error) { + import("libs/error_handling").then((_ErrorHandling) => { + const ErrorHandling = _ErrorHandling.default; + Toast.error("An unexpected error occurred. Please check the console for details"); + console.error(error); + ErrorHandling.notify(error as Error); + }); + } +} + const Toast = { // The notificationAPI is designed to be a singleton spawned by the ToastContextMountRoot // mounted in the GlobalThemeProvider. diff --git a/frontend/javascripts/oxalis/api/wk_dev.ts b/frontend/javascripts/oxalis/api/wk_dev.ts index 25f6d63521b..f4548080fe5 100644 --- a/frontend/javascripts/oxalis/api/wk_dev.ts +++ b/frontend/javascripts/oxalis/api/wk_dev.ts @@ -24,6 +24,9 @@ export const WkDevFlags = { meshing: { marchingCubeSizeInTargetMag: [64, 64, 64] as Vector3, }, + datasetComposition: { + allowThinPlateSplines: false, + }, }; export default class WkDev { diff --git a/frontend/javascripts/oxalis/model/accessors/dataset_accessor.ts b/frontend/javascripts/oxalis/model/accessors/dataset_accessor.ts index 46f0df76b90..a65ea04a6e4 100644 --- a/frontend/javascripts/oxalis/model/accessors/dataset_accessor.ts +++ b/frontend/javascripts/oxalis/model/accessors/dataset_accessor.ts @@ -722,7 +722,7 @@ function _getOriginalTransformsForLayerOrNull( } else if (type === "thin_plate_spline") { const { source, target } = transformation.correspondences; - return createThinPlateSplineTransform(target, source, dataset.dataSource.scale.factor); + return createThinPlateSplineTransform(source, target, dataset.dataSource.scale.factor); } console.error( diff --git a/frontend/javascripts/oxalis/model/helpers/transformation_helpers.ts b/frontend/javascripts/oxalis/model/helpers/transformation_helpers.ts index 4f0ac826a27..8b9b081f83e 100644 --- a/frontend/javascripts/oxalis/model/helpers/transformation_helpers.ts +++ b/frontend/javascripts/oxalis/model/helpers/transformation_helpers.ts @@ -34,6 +34,7 @@ export function createAffineTransformFromMatrix( } export function createAffineTransform(source: Vector3[], target: Vector3[]): Transform { + /* Creates an affine transform that transforms from source points to target points. */ const affineMatrix = estimateAffineMatrix4x4(source, target); return { @@ -43,11 +44,20 @@ export function createAffineTransform(source: Vector3[], target: Vector3[]): Tra }; } +export function checkLandmarksForThinPlateSpline(source: Vector3[], target: Vector3[]) { + // Strictly speaking, the TPS transform is not needed here, because it will + // be created when the actual dataset is opened. However, if the landmarks + // cannot be loaded into a TPS (e.g., because the landmarks are planar and + // affine estimation will crash), we want to detect this here automatically. + createThinPlateSplineTransform(source, target, [1, 1, 1]); +} + export function createThinPlateSplineTransform( source: Vector3[], target: Vector3[], scale: Vector3, ): Transform { + /* Creates a TPS that transforms from source points to target points. */ const affineMatrix = estimateAffineMatrix4x4(source, target); const affineMatrixInv = estimateAffineMatrix4x4(target, source); diff --git a/frontend/javascripts/oxalis/model/sagas/saga_helpers.ts b/frontend/javascripts/oxalis/model/sagas/saga_helpers.ts index bc5572b4d74..7586b3ecc93 100644 --- a/frontend/javascripts/oxalis/model/sagas/saga_helpers.ts +++ b/frontend/javascripts/oxalis/model/sagas/saga_helpers.ts @@ -78,7 +78,7 @@ export function askUserForLockingActiveMapping( cancelText: "Abort Annotation Action", width: 600, onOk: lockMapping, - onCancel: async () => { + onCancel: () => { reject({ isMappingLockedIfNeeded: false, reason: "User aborted." }); }, }); diff --git a/frontend/javascripts/types/api_flow_types.ts b/frontend/javascripts/types/api_flow_types.ts index c898d7b6eb4..c7f087825a2 100644 --- a/frontend/javascripts/types/api_flow_types.ts +++ b/frontend/javascripts/types/api_flow_types.ts @@ -167,6 +167,9 @@ export type MutableAPIDatasetId = { owningOrganization: string; name: string; }; +export function areDatasetsIdentical(a: APIDatasetId, b: APIDatasetId) { + return a.owningOrganization === b.owningOrganization && a.name === b.name; +} export type APIDatasetId = Readonly; export type APIDatasetDetails = { readonly species?: string;