diff --git a/packages/scatterbrain/package.json b/packages/scatterbrain/package.json index 5376029..2b27a38 100644 --- a/packages/scatterbrain/package.json +++ b/packages/scatterbrain/package.json @@ -1,6 +1,6 @@ { "name": "@alleninstitute/vis-scatterbrain", - "version": "0.0.5", + "version": "0.0.6", "contributors": [ { "name": "Lane Sawyer", diff --git a/packages/scatterbrain/src/abstract/async-frame.ts b/packages/scatterbrain/src/abstract/async-frame.ts index ea32ba4..4e0a5a9 100644 --- a/packages/scatterbrain/src/abstract/async-frame.ts +++ b/packages/scatterbrain/src/abstract/async-frame.ts @@ -87,12 +87,15 @@ export function beginFrame< const abort = new AbortController(); const queue: Item[] = [...items]; const taskCancelCallbacks: Array<() => void> = []; - const fancy = (itemToRender: Item, maybe: Record) => { - if (isPrepared(maybe)) { + const renderItemWrapper = (itemToRender: Item, maybe: Record) => { + if (isPrepared(maybe) && !abort.signal.aborted) { renderItem(itemToRender, dataset, settings, maybe); } }; const reportStatus = (event: AsyncFrameEvent, synchronous: boolean) => { + if (event.status !== 'cancelled' && abort.signal.aborted) { + return; + } // we want to report our status, however the flow of events can be confusing - // our callers anticipate an asynchronous (long running) frame to be started, // but there are scenarios in which the whole thing is completely synchronous @@ -129,7 +132,7 @@ export function beginFrame< try { const result = mutableCache.cacheAndUse( requestsForItem(itemToRender, dataset, settings, abort.signal), - partial(fancy, itemToRender), + partial(renderItemWrapper, itemToRender), toCacheKey, () => reportStatus({ status: 'progress', dataset, renderedItems: [itemToRender] }, synchronous) ); @@ -165,8 +168,8 @@ export function beginFrame< } return { cancelFrame: (reason?: string) => { - taskCancelCallbacks.forEach((cancelMe) => cancelMe()); abort.abort(new DOMException(reason, 'AbortError')); + taskCancelCallbacks.forEach((cancelMe) => cancelMe()); clearInterval(interval); reportStatus({ status: 'cancelled' }, true); }, diff --git a/packages/scatterbrain/src/abstract/render-server.ts b/packages/scatterbrain/src/abstract/render-server.ts index 2bf3b32..e2db538 100644 --- a/packages/scatterbrain/src/abstract/render-server.ts +++ b/packages/scatterbrain/src/abstract/render-server.ts @@ -131,6 +131,7 @@ export class RenderServer { private prepareToRenderToClient(client: Client) { const previousEntry = this.clients.get(client); if (previousEntry) { + previousEntry.updateRequested = null; // the client is mutable - so every time we get a request, we have to check to see if it got resized if (client.width !== previousEntry.resolution[0] || client.height !== previousEntry.resolution[1]) { // handle resizing by deleting previously allocated resources: @@ -151,6 +152,8 @@ export class RenderServer { const clientFrame = this.clients.get(client); if (clientFrame && clientFrame.frame) { clientFrame.frame.cancelFrame(); + this.regl.clear({ framebuffer: clientFrame.image, color: [0, 0, 0, 0], depth: 0 }); + clientFrame.updateRequested = null; } const { image, resolution, copyBuffer } = this.prepareToRenderToClient(client); const hijack: RenderCallback = (e) => { @@ -179,7 +182,15 @@ export class RenderServer { // this is a good thing for performance, but potentially confusing - so we do our book-keeping before we actually start rendering: const aboutToStart = this.clients.get(client); // this is the record we just put into the clients map - TS just wants to be sure it really exists: if (aboutToStart) { - aboutToStart.frame = renderFn(image, this.cache, hijack); + const frame = renderFn(image, this.cache, hijack); + if (frame) { + aboutToStart.frame = { + cancelFrame: (reason?: string) => { + frame.cancelFrame(reason); + aboutToStart.updateRequested = null; + }, + }; + } } } }