Skip to content

Commit

Permalink
Fix volume undo for unfetched data (#5608)
Browse files Browse the repository at this point in the history
* add backend fetched data to the undo stack and apply it on demand

* fix: reenable drawing

* remove discussion nodes and add changelog entry

* Update frontend/javascripts/oxalis/model/bucket_data_handling/bucket.js

Co-authored-by: Daniel <[email protected]>

* add comment explaining deferral of computation

Co-authored-by: Daniel <[email protected]>
  • Loading branch information
MichaelBuessemeyer and daniel-wer authored Aug 9, 2021
1 parent c6109b3 commit 728ed1b
Show file tree
Hide file tree
Showing 7 changed files with 106 additions and 19 deletions.
1 change: 1 addition & 0 deletions CHANGELOG.unreleased.md
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,7 @@ For upgrade instructions, please check the [migration guide](MIGRATIONS.released
- Fix that active segment and node id were not shown in status bar when being in a non-hybrid annotation. [#5638](https://github.com/scalableminds/webknossos/pull/5638)
- Fix that "Compute Mesh File" button was enabled in scenarios where it should not be supported (e.g., when no segmentation layer exists). [#5648](https://github.com/scalableminds/webknossos/pull/5648)
- Fixed a bug where an authentication error was shown when viewing the meshes tab while not logged in. [#5647](https://github.com/scalableminds/webknossos/pull/5647)
- Fixed that undoing of volume annotations might overwrite the backend data on not loaded magnifications with nothing. [#5608](https://github.com/scalableminds/webknossos/pull/5608)

### Removed
-
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,10 +21,12 @@ export type CopySegmentationLayerAction = {
type: "COPY_SEGMENTATION_LAYER",
source: "previousLayer" | "nextLayer",
};
export type MaybeBucketLoadedPromise = null | Promise<BucketDataArray>;
export type AddBucketToUndoAction = {
type: "ADD_BUCKET_TO_UNDO",
zoomedBucketAddress: Vector4,
bucketData: BucketDataArray,
maybeBucketLoadedPromise: MaybeBucketLoadedPromise,
};
type UpdateDirectionAction = { type: "UPDATE_DIRECTION", centroid: Vector3 };
type ResetContourAction = { type: "RESET_CONTOUR" };
Expand Down Expand Up @@ -141,7 +143,13 @@ export const setContourTracingModeAction = (mode: ContourMode): SetContourTracin
export const addBucketToUndoAction = (
zoomedBucketAddress: Vector4,
bucketData: BucketDataArray,
): AddBucketToUndoAction => ({ type: "ADD_BUCKET_TO_UNDO", zoomedBucketAddress, bucketData });
maybeBucketLoadedPromise: MaybeBucketLoadedPromise,
): AddBucketToUndoAction => ({
type: "ADD_BUCKET_TO_UNDO",
zoomedBucketAddress,
bucketData,
maybeBucketLoadedPromise,
});

export const inferSegmentationInViewportAction = (
position: Vector3,
Expand Down
28 changes: 19 additions & 9 deletions frontend/javascripts/oxalis/model/bucket_data_handling/bucket.js
Original file line number Diff line number Diff line change
Expand Up @@ -240,15 +240,15 @@ export class DataBucket {
return { isVoxelOutside, neighbourBucketAddress, adjustedVoxel };
};

getCopyOfData(): BucketDataArray {
const bucketData = this.getOrCreateData();
getCopyOfData(): { dataClone: BucketDataArray, triggeredBucketFetch: boolean } {
const { data: bucketData, triggeredBucketFetch } = this.getOrCreateData();
const TypedArrayClass = getConstructorForElementClass(this.elementClass)[0];
const dataClone = new TypedArrayClass(bucketData);
return dataClone;
return { dataClone, triggeredBucketFetch };
}

label(labelFunc: BucketDataArray => void) {
const bucketData = this.getOrCreateData();
const { data: bucketData } = this.getOrCreateData();
this.markAndAddBucketForUndo();
labelFunc(bucketData);
this.throttledTriggerLabeled();
Expand All @@ -258,7 +258,16 @@ export class DataBucket {
this.dirty = true;
if (!bucketsAlreadyInUndoState.has(this)) {
bucketsAlreadyInUndoState.add(this);
Store.dispatch(addBucketToUndoAction(this.zoomedAddress, this.getCopyOfData()));
let maybeBucketLoadedPromise = null;
const { dataClone, triggeredBucketFetch } = this.getCopyOfData();
if (triggeredBucketFetch) {
maybeBucketLoadedPromise = new Promise((resolve, _reject) => {
this.once("bucketLoaded", resolve);
});
}
Store.dispatch(
addBucketToUndoAction(this.zoomedAddress, dataClone, maybeBucketLoadedPromise),
);
}
}

Expand Down Expand Up @@ -294,16 +303,17 @@ export class DataBucket {
this.accessed = false;
}

getOrCreateData(): BucketDataArray {
getOrCreateData(): { data: BucketDataArray, triggeredBucketFetch: boolean } {
let triggeredBucketFetch = false;
if (this.data == null) {
const [TypedArrayClass, channelCount] = getConstructorForElementClass(this.elementClass);
this.data = new TypedArrayClass(channelCount * Constants.BUCKET_SIZE);
if (!this.isMissing()) {
triggeredBucketFetch = true;
this.temporalBucketManager.addBucket(this);
}
}

return this.getData();
return { data: this.getData(), triggeredBucketFetch };
}

pull(): void {
Expand Down Expand Up @@ -346,7 +356,7 @@ export class DataBucket {
} else {
this.data = data;
}
this.trigger("bucketLoaded");
this.trigger("bucketLoaded", data);
this.state = BucketStateEnum.LOADED;
break;
default:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -412,7 +412,7 @@ class DataCube {
);
}
const seedVoxelIndex = this.getVoxelIndex(seedVoxel, zoomStep);
const sourceCellId = seedBucket.getOrCreateData()[seedVoxelIndex];
const sourceCellId = seedBucket.getOrCreateData().data[seedVoxelIndex];
if (sourceCellId === cellId) {
return null;
}
Expand All @@ -428,7 +428,7 @@ class DataCube {
) {
continue;
}
const bucketData = currentBucket.getOrCreateData();
const { data: bucketData } = currentBucket.getOrCreateData();
const initialVoxelIndex = this.getVoxelIndexByVoxelOffset(initialVoxelInBucket);
if (bucketData[initialVoxelIndex] !== sourceCellId) {
// Ignoring neighbour buckets whose cellId at the initial voxel does not match the source cell id.
Expand Down
78 changes: 73 additions & 5 deletions frontend/javascripts/oxalis/model/sagas/save_saga.js
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@ import {
type AddBucketToUndoAction,
type FinishAnnotationStrokeAction,
type ImportVolumeTracingAction,
type MaybeBucketLoadedPromise,
} from "oxalis/model/actions/volumetracing_actions";
import {
_all,
Expand Down Expand Up @@ -75,7 +76,12 @@ import { enforceSkeletonTracing } from "../accessors/skeletontracing_accessor";

const byteArrayToLz4Array = createWorker(compressLz4Block);

type UndoBucket = { zoomedBucketAddress: Vector4, data: Uint8Array };
type UndoBucket = {
zoomedBucketAddress: Vector4,
data: Uint8Array,
backendData?: Uint8Array,
maybeBucketLoadedPromise: MaybeBucketLoadedPromise,
};
type VolumeAnnotationBatch = Array<UndoBucket>;
type SkeletonUndoState = { type: "skeleton", data: SkeletonTracing };
type VolumeUndoState = { type: "volume", data: VolumeAnnotationBatch };
Expand Down Expand Up @@ -133,12 +139,13 @@ export function* collectUndoStates(): Saga<void> {
}
previousAction = skeletonUserAction;
} else if (addBucketToUndoAction) {
const { zoomedBucketAddress, bucketData } = addBucketToUndoAction;
const { zoomedBucketAddress, bucketData, maybeBucketLoadedPromise } = addBucketToUndoAction;
pendingCompressions.push(
yield* fork(
compressBucketAndAddToUndoBatch,
zoomedBucketAddress,
bucketData,
maybeBucketLoadedPromise,
currentVolumeAnnotationBatch,
),
);
Expand Down Expand Up @@ -207,6 +214,7 @@ function* getSkeletonTracingToUndoState(
function* compressBucketAndAddToUndoBatch(
zoomedBucketAddress: Vector4,
bucketData: BucketDataArray,
maybeBucketLoadedPromise: MaybeBucketLoadedPromise,
undoBatch: VolumeAnnotationBatch,
): Saga<void> {
const bucketDataAsByteArray = new Uint8Array(
Expand All @@ -216,7 +224,26 @@ function* compressBucketAndAddToUndoBatch(
);
const compressedBucketData = yield* call(byteArrayToLz4Array, bucketDataAsByteArray, true);
if (compressedBucketData != null) {
undoBatch.push({ zoomedBucketAddress, data: compressedBucketData });
const volumeUndoPart: UndoBucket = {
zoomedBucketAddress,
data: compressedBucketData,
maybeBucketLoadedPromise,
};
if (maybeBucketLoadedPromise != null) {
maybeBucketLoadedPromise.then(async backendBucketData => {
// Once the backend data is fetched, do not directly merge it with the already saved undo data
// as this operation is only needed, when the volume action is undone. Additionally merging is more
// expensive than saving the backend data. Thus the data is only merged upon an undo action / when it is needed.
const backendDataAsByteArray = new Uint8Array(
backendBucketData.buffer,
backendBucketData.byteOffset,
backendBucketData.byteLength,
);
const compressedBackendData = await byteArrayToLz4Array(backendDataAsByteArray, true);
volumeUndoPart.backendData = compressedBackendData;
});
}
undoBatch.push(volumeUndoPart);
}
}

Expand Down Expand Up @@ -274,6 +301,15 @@ function* applyStateOfStack(
}
}

function mergeDataWithBackendDataInPlace(
originalData: BucketDataArray,
backendData: BucketDataArray,
) {
for (let i = 0; i < originalData.length; ++i) {
originalData[i] = originalData[i] || backendData[i];
}
}

function* applyAndGetRevertingVolumeBatch(
volumeAnnotationBatch: VolumeAnnotationBatch,
): Saga<VolumeUndoState> {
Expand All @@ -283,19 +319,51 @@ function* applyAndGetRevertingVolumeBatch(
}
const { cube } = segmentationLayer;
const allCompressedBucketsOfCurrentState: VolumeAnnotationBatch = [];
for (const { zoomedBucketAddress, data: compressedBucketData } of volumeAnnotationBatch) {
for (const volumeUndoBucket of volumeAnnotationBatch) {
const {
zoomedBucketAddress,
data: compressedBucketData,
backendData: compressedBackendData,
} = volumeUndoBucket;
let { maybeBucketLoadedPromise } = volumeUndoBucket;
const bucket = cube.getOrCreateBucket(zoomedBucketAddress);
if (bucket.type === "null") {
continue;
}
const bucketData = bucket.getData();
if (compressedBackendData != null) {
// If the backend data for the bucket has been fetched in the meantime,
// we can first merge the data with the current data and then add this to the undo batch.
const decompressedBackendData = yield* call(
byteArrayToLz4Array,
compressedBackendData,
false,
);
if (decompressedBackendData) {
mergeDataWithBackendDataInPlace(bucketData, decompressedBackendData);
}
maybeBucketLoadedPromise = null;
}
yield* call(
compressBucketAndAddToUndoBatch,
zoomedBucketAddress,
bucketData,
maybeBucketLoadedPromise,
allCompressedBucketsOfCurrentState,
);
const decompressedBucketData = yield* call(byteArrayToLz4Array, compressedBucketData, false);
let decompressedBucketData = null;
if (compressedBackendData != null) {
let decompressedBackendData;
[decompressedBucketData, decompressedBackendData] = yield _all([
_call(byteArrayToLz4Array, compressedBucketData, false),
_call(byteArrayToLz4Array, compressedBackendData, false),
]);
if (decompressedBucketData && decompressedBackendData) {
mergeDataWithBackendDataInPlace(decompressedBucketData, decompressedBackendData);
}
} else {
decompressedBucketData = yield* call(byteArrayToLz4Array, compressedBucketData, false);
}
if (decompressedBucketData) {
// Set the new bucket data to add the bucket directly to the pushqueue.
cube.setBucketData(zoomedBucketAddress, decompressedBucketData);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -332,7 +332,7 @@ export function applyVoxelMap(
if (bucket.type === "null") {
continue;
}
const data = bucket.getOrCreateData();
const { data } = bucket.getOrCreateData();
for (let firstDim = 0; firstDim < constants.BUCKET_WIDTH; firstDim++) {
for (let secondDim = 0; secondDim < constants.BUCKET_WIDTH; secondDim++) {
if (voxelMap[firstDim * constants.BUCKET_WIDTH + secondDim] === 1) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -472,7 +472,7 @@ test("A labeledVoxelMap should be applied correctly", t => {
expectedBucketData[addr] = 1;
});
applyVoxelMap(labeledVoxelsMap, cube, 1, get3DAddress, 1, 2, true);
const labeledBucketData = bucket.getOrCreateData();
const { data: labeledBucketData } = bucket.getOrCreateData();
for (let firstDim = 0; firstDim < Constants.BUCKET_WIDTH; firstDim++) {
for (let secondDim = 0; secondDim < Constants.BUCKET_WIDTH; secondDim++) {
const addr = cube.getVoxelIndex([firstDim, secondDim, 5], 0);
Expand Down

0 comments on commit 728ed1b

Please sign in to comment.