diff --git a/CHANGELOG.unreleased.md b/CHANGELOG.unreleased.md index 91d64117dcc..1e2421fe0dc 100644 --- a/CHANGELOG.unreleased.md +++ b/CHANGELOG.unreleased.md @@ -22,6 +22,7 @@ For upgrade instructions, please check the [migration guide](MIGRATIONS.released - Fix that active segment and node id were not shown in status bar when being in a non-hybrid annotation. [#5638](https://github.com/scalableminds/webknossos/pull/5638) - Fix that "Compute Mesh File" button was enabled in scenarios where it should not be supported (e.g., when no segmentation layer exists). [#5648](https://github.com/scalableminds/webknossos/pull/5648) - Fixed a bug where an authentication error was shown when viewing the meshes tab while not logged in. [#5647](https://github.com/scalableminds/webknossos/pull/5647) +- Fixed that undoing of volume annotations might overwrite the backend data on not loaded magnifications with nothing. [#5608](https://github.com/scalableminds/webknossos/pull/5608) ### Removed - diff --git a/frontend/javascripts/oxalis/model/actions/volumetracing_actions.js b/frontend/javascripts/oxalis/model/actions/volumetracing_actions.js index 8684f7234e7..b851964d80d 100644 --- a/frontend/javascripts/oxalis/model/actions/volumetracing_actions.js +++ b/frontend/javascripts/oxalis/model/actions/volumetracing_actions.js @@ -21,10 +21,12 @@ export type CopySegmentationLayerAction = { type: "COPY_SEGMENTATION_LAYER", source: "previousLayer" | "nextLayer", }; +export type MaybeBucketLoadedPromise = null | Promise; export type AddBucketToUndoAction = { type: "ADD_BUCKET_TO_UNDO", zoomedBucketAddress: Vector4, bucketData: BucketDataArray, + maybeBucketLoadedPromise: MaybeBucketLoadedPromise, }; type UpdateDirectionAction = { type: "UPDATE_DIRECTION", centroid: Vector3 }; type ResetContourAction = { type: "RESET_CONTOUR" }; @@ -141,7 +143,13 @@ export const setContourTracingModeAction = (mode: ContourMode): SetContourTracin export const addBucketToUndoAction = ( zoomedBucketAddress: Vector4, bucketData: BucketDataArray, -): AddBucketToUndoAction => ({ type: "ADD_BUCKET_TO_UNDO", zoomedBucketAddress, bucketData }); + maybeBucketLoadedPromise: MaybeBucketLoadedPromise, +): AddBucketToUndoAction => ({ + type: "ADD_BUCKET_TO_UNDO", + zoomedBucketAddress, + bucketData, + maybeBucketLoadedPromise, +}); export const inferSegmentationInViewportAction = ( position: Vector3, diff --git a/frontend/javascripts/oxalis/model/bucket_data_handling/bucket.js b/frontend/javascripts/oxalis/model/bucket_data_handling/bucket.js index e92d0c94b3c..58ea99bf26e 100644 --- a/frontend/javascripts/oxalis/model/bucket_data_handling/bucket.js +++ b/frontend/javascripts/oxalis/model/bucket_data_handling/bucket.js @@ -240,15 +240,15 @@ export class DataBucket { return { isVoxelOutside, neighbourBucketAddress, adjustedVoxel }; }; - getCopyOfData(): BucketDataArray { - const bucketData = this.getOrCreateData(); + getCopyOfData(): { dataClone: BucketDataArray, triggeredBucketFetch: boolean } { + const { data: bucketData, triggeredBucketFetch } = this.getOrCreateData(); const TypedArrayClass = getConstructorForElementClass(this.elementClass)[0]; const dataClone = new TypedArrayClass(bucketData); - return dataClone; + return { dataClone, triggeredBucketFetch }; } label(labelFunc: BucketDataArray => void) { - const bucketData = this.getOrCreateData(); + const { data: bucketData } = this.getOrCreateData(); this.markAndAddBucketForUndo(); labelFunc(bucketData); this.throttledTriggerLabeled(); @@ -258,7 +258,16 @@ export class DataBucket { this.dirty = true; if (!bucketsAlreadyInUndoState.has(this)) { bucketsAlreadyInUndoState.add(this); - Store.dispatch(addBucketToUndoAction(this.zoomedAddress, this.getCopyOfData())); + let maybeBucketLoadedPromise = null; + const { dataClone, triggeredBucketFetch } = this.getCopyOfData(); + if (triggeredBucketFetch) { + maybeBucketLoadedPromise = new Promise((resolve, _reject) => { + this.once("bucketLoaded", resolve); + }); + } + Store.dispatch( + addBucketToUndoAction(this.zoomedAddress, dataClone, maybeBucketLoadedPromise), + ); } } @@ -294,16 +303,17 @@ export class DataBucket { this.accessed = false; } - getOrCreateData(): BucketDataArray { + getOrCreateData(): { data: BucketDataArray, triggeredBucketFetch: boolean } { + let triggeredBucketFetch = false; if (this.data == null) { const [TypedArrayClass, channelCount] = getConstructorForElementClass(this.elementClass); this.data = new TypedArrayClass(channelCount * Constants.BUCKET_SIZE); if (!this.isMissing()) { + triggeredBucketFetch = true; this.temporalBucketManager.addBucket(this); } } - - return this.getData(); + return { data: this.getData(), triggeredBucketFetch }; } pull(): void { @@ -346,7 +356,7 @@ export class DataBucket { } else { this.data = data; } - this.trigger("bucketLoaded"); + this.trigger("bucketLoaded", data); this.state = BucketStateEnum.LOADED; break; default: diff --git a/frontend/javascripts/oxalis/model/bucket_data_handling/data_cube.js b/frontend/javascripts/oxalis/model/bucket_data_handling/data_cube.js index 34907b3e887..79d3c99d4f3 100644 --- a/frontend/javascripts/oxalis/model/bucket_data_handling/data_cube.js +++ b/frontend/javascripts/oxalis/model/bucket_data_handling/data_cube.js @@ -412,7 +412,7 @@ class DataCube { ); } const seedVoxelIndex = this.getVoxelIndex(seedVoxel, zoomStep); - const sourceCellId = seedBucket.getOrCreateData()[seedVoxelIndex]; + const sourceCellId = seedBucket.getOrCreateData().data[seedVoxelIndex]; if (sourceCellId === cellId) { return null; } @@ -428,7 +428,7 @@ class DataCube { ) { continue; } - const bucketData = currentBucket.getOrCreateData(); + const { data: bucketData } = currentBucket.getOrCreateData(); const initialVoxelIndex = this.getVoxelIndexByVoxelOffset(initialVoxelInBucket); if (bucketData[initialVoxelIndex] !== sourceCellId) { // Ignoring neighbour buckets whose cellId at the initial voxel does not match the source cell id. diff --git a/frontend/javascripts/oxalis/model/sagas/save_saga.js b/frontend/javascripts/oxalis/model/sagas/save_saga.js index c7315e457ff..a89fa6549f2 100644 --- a/frontend/javascripts/oxalis/model/sagas/save_saga.js +++ b/frontend/javascripts/oxalis/model/sagas/save_saga.js @@ -21,6 +21,7 @@ import { type AddBucketToUndoAction, type FinishAnnotationStrokeAction, type ImportVolumeTracingAction, + type MaybeBucketLoadedPromise, } from "oxalis/model/actions/volumetracing_actions"; import { _all, @@ -75,7 +76,12 @@ import { enforceSkeletonTracing } from "../accessors/skeletontracing_accessor"; const byteArrayToLz4Array = createWorker(compressLz4Block); -type UndoBucket = { zoomedBucketAddress: Vector4, data: Uint8Array }; +type UndoBucket = { + zoomedBucketAddress: Vector4, + data: Uint8Array, + backendData?: Uint8Array, + maybeBucketLoadedPromise: MaybeBucketLoadedPromise, +}; type VolumeAnnotationBatch = Array; type SkeletonUndoState = { type: "skeleton", data: SkeletonTracing }; type VolumeUndoState = { type: "volume", data: VolumeAnnotationBatch }; @@ -133,12 +139,13 @@ export function* collectUndoStates(): Saga { } previousAction = skeletonUserAction; } else if (addBucketToUndoAction) { - const { zoomedBucketAddress, bucketData } = addBucketToUndoAction; + const { zoomedBucketAddress, bucketData, maybeBucketLoadedPromise } = addBucketToUndoAction; pendingCompressions.push( yield* fork( compressBucketAndAddToUndoBatch, zoomedBucketAddress, bucketData, + maybeBucketLoadedPromise, currentVolumeAnnotationBatch, ), ); @@ -207,6 +214,7 @@ function* getSkeletonTracingToUndoState( function* compressBucketAndAddToUndoBatch( zoomedBucketAddress: Vector4, bucketData: BucketDataArray, + maybeBucketLoadedPromise: MaybeBucketLoadedPromise, undoBatch: VolumeAnnotationBatch, ): Saga { const bucketDataAsByteArray = new Uint8Array( @@ -216,7 +224,26 @@ function* compressBucketAndAddToUndoBatch( ); const compressedBucketData = yield* call(byteArrayToLz4Array, bucketDataAsByteArray, true); if (compressedBucketData != null) { - undoBatch.push({ zoomedBucketAddress, data: compressedBucketData }); + const volumeUndoPart: UndoBucket = { + zoomedBucketAddress, + data: compressedBucketData, + maybeBucketLoadedPromise, + }; + if (maybeBucketLoadedPromise != null) { + maybeBucketLoadedPromise.then(async backendBucketData => { + // Once the backend data is fetched, do not directly merge it with the already saved undo data + // as this operation is only needed, when the volume action is undone. Additionally merging is more + // expensive than saving the backend data. Thus the data is only merged upon an undo action / when it is needed. + const backendDataAsByteArray = new Uint8Array( + backendBucketData.buffer, + backendBucketData.byteOffset, + backendBucketData.byteLength, + ); + const compressedBackendData = await byteArrayToLz4Array(backendDataAsByteArray, true); + volumeUndoPart.backendData = compressedBackendData; + }); + } + undoBatch.push(volumeUndoPart); } } @@ -274,6 +301,15 @@ function* applyStateOfStack( } } +function mergeDataWithBackendDataInPlace( + originalData: BucketDataArray, + backendData: BucketDataArray, +) { + for (let i = 0; i < originalData.length; ++i) { + originalData[i] = originalData[i] || backendData[i]; + } +} + function* applyAndGetRevertingVolumeBatch( volumeAnnotationBatch: VolumeAnnotationBatch, ): Saga { @@ -283,19 +319,51 @@ function* applyAndGetRevertingVolumeBatch( } const { cube } = segmentationLayer; const allCompressedBucketsOfCurrentState: VolumeAnnotationBatch = []; - for (const { zoomedBucketAddress, data: compressedBucketData } of volumeAnnotationBatch) { + for (const volumeUndoBucket of volumeAnnotationBatch) { + const { + zoomedBucketAddress, + data: compressedBucketData, + backendData: compressedBackendData, + } = volumeUndoBucket; + let { maybeBucketLoadedPromise } = volumeUndoBucket; const bucket = cube.getOrCreateBucket(zoomedBucketAddress); if (bucket.type === "null") { continue; } const bucketData = bucket.getData(); + if (compressedBackendData != null) { + // If the backend data for the bucket has been fetched in the meantime, + // we can first merge the data with the current data and then add this to the undo batch. + const decompressedBackendData = yield* call( + byteArrayToLz4Array, + compressedBackendData, + false, + ); + if (decompressedBackendData) { + mergeDataWithBackendDataInPlace(bucketData, decompressedBackendData); + } + maybeBucketLoadedPromise = null; + } yield* call( compressBucketAndAddToUndoBatch, zoomedBucketAddress, bucketData, + maybeBucketLoadedPromise, allCompressedBucketsOfCurrentState, ); - const decompressedBucketData = yield* call(byteArrayToLz4Array, compressedBucketData, false); + let decompressedBucketData = null; + if (compressedBackendData != null) { + let decompressedBackendData; + [decompressedBucketData, decompressedBackendData] = yield _all([ + _call(byteArrayToLz4Array, compressedBucketData, false), + _call(byteArrayToLz4Array, compressedBackendData, false), + ]); + if (decompressedBucketData && decompressedBackendData) { + mergeDataWithBackendDataInPlace(decompressedBucketData, decompressedBackendData); + } + } else { + decompressedBucketData = yield* call(byteArrayToLz4Array, compressedBucketData, false); + } if (decompressedBucketData) { // Set the new bucket data to add the bucket directly to the pushqueue. cube.setBucketData(zoomedBucketAddress, decompressedBucketData); diff --git a/frontend/javascripts/oxalis/model/volumetracing/volume_annotation_sampling.js b/frontend/javascripts/oxalis/model/volumetracing/volume_annotation_sampling.js index 4e4646a7546..deb23354120 100644 --- a/frontend/javascripts/oxalis/model/volumetracing/volume_annotation_sampling.js +++ b/frontend/javascripts/oxalis/model/volumetracing/volume_annotation_sampling.js @@ -332,7 +332,7 @@ export function applyVoxelMap( if (bucket.type === "null") { continue; } - const data = bucket.getOrCreateData(); + const { data } = bucket.getOrCreateData(); for (let firstDim = 0; firstDim < constants.BUCKET_WIDTH; firstDim++) { for (let secondDim = 0; secondDim < constants.BUCKET_WIDTH; secondDim++) { if (voxelMap[firstDim * constants.BUCKET_WIDTH + secondDim] === 1) { diff --git a/frontend/javascripts/test/model/volumetracing/volume_annotation_sampling.spec.js b/frontend/javascripts/test/model/volumetracing/volume_annotation_sampling.spec.js index 68ddbf8086d..8bdb1ca8f81 100644 --- a/frontend/javascripts/test/model/volumetracing/volume_annotation_sampling.spec.js +++ b/frontend/javascripts/test/model/volumetracing/volume_annotation_sampling.spec.js @@ -472,7 +472,7 @@ test("A labeledVoxelMap should be applied correctly", t => { expectedBucketData[addr] = 1; }); applyVoxelMap(labeledVoxelsMap, cube, 1, get3DAddress, 1, 2, true); - const labeledBucketData = bucket.getOrCreateData(); + const { data: labeledBucketData } = bucket.getOrCreateData(); for (let firstDim = 0; firstDim < Constants.BUCKET_WIDTH; firstDim++) { for (let secondDim = 0; secondDim < Constants.BUCKET_WIDTH; secondDim++) { const addr = cube.getVoxelIndex([firstDim, secondDim, 5], 0);