From b53ecc9e8ed1cf29bb3fc7cdadb59a3a350593e9 Mon Sep 17 00:00:00 2001 From: Richard Tibbles Date: Thu, 27 Apr 2023 19:19:06 -0700 Subject: [PATCH 01/21] Add more sentry logging for sync errors. Add more finegrained error catching for applying remote changes. --- .../shared/data/applyRemoteChanges.js | 34 ++++++++++++------- .../frontend/shared/data/serverSync.js | 23 +++++-------- .../frontend/shared/logging.js | 17 ++++++++++ 3 files changed, 47 insertions(+), 27 deletions(-) create mode 100644 contentcuration/contentcuration/frontend/shared/logging.js diff --git a/contentcuration/contentcuration/frontend/shared/data/applyRemoteChanges.js b/contentcuration/contentcuration/frontend/shared/data/applyRemoteChanges.js index 9348b8bbf3..effbdd3c81 100644 --- a/contentcuration/contentcuration/frontend/shared/data/applyRemoteChanges.js +++ b/contentcuration/contentcuration/frontend/shared/data/applyRemoteChanges.js @@ -1,5 +1,6 @@ import Dexie from 'dexie'; import sortBy from 'lodash/sortBy'; +import logging from '../logging'; import { CHANGE_TYPES, IGNORED_SOURCE, TABLE_NAMES } from './constants'; import db from './db'; import { INDEXEDDB_RESOURCES } from './registry'; @@ -129,18 +130,27 @@ export default async function applyChanges(changes) { const results = []; for (const change of sortBy(changes, ['server_rev', 'rev'])) { let result; - if (change.type === CHANGE_TYPES.CREATED) { - result = await applyCreate(change); - } else if (change.type === CHANGE_TYPES.UPDATED) { - result = await applyUpdate(change); - } else if (change.type === CHANGE_TYPES.DELETED) { - result = await applyDelete(change); - } else if (change.type === CHANGE_TYPES.MOVED) { - result = await applyMove(change); - } else if (change.type === CHANGE_TYPES.COPIED) { - result = await applyCopy(change); - } else if (change.type === CHANGE_TYPES.PUBLISHED) { - result = await applyPublish(change); + try { + if (change.type === CHANGE_TYPES.CREATED) { + result = await applyCreate(change); + } else if (change.type === CHANGE_TYPES.UPDATED) { + result = await applyUpdate(change); + } else if (change.type === CHANGE_TYPES.DELETED) { + result = await applyDelete(change); + } else if (change.type === CHANGE_TYPES.MOVED) { + result = await applyMove(change); + } else if (change.type === CHANGE_TYPES.COPIED) { + result = await applyCopy(change); + } else if (change.type === CHANGE_TYPES.PUBLISHED) { + result = await applyPublish(change); + } + } catch (e) { + logging.error(e, { + filename: 'change.json', + // strip csrf token from headers + data: JSON.stringify(change), + contentType: 'application/json', + }); } if (result) { diff --git a/contentcuration/contentcuration/frontend/shared/data/serverSync.js b/contentcuration/contentcuration/frontend/shared/data/serverSync.js index 92f421f9a8..326d928155 100644 --- a/contentcuration/contentcuration/frontend/shared/data/serverSync.js +++ b/contentcuration/contentcuration/frontend/shared/data/serverSync.js @@ -1,4 +1,3 @@ -import * as Sentry from '@sentry/vue'; import debounce from 'lodash/debounce'; import findLastIndex from 'lodash/findLastIndex'; import get from 'lodash/get'; @@ -6,6 +5,7 @@ import pick from 'lodash/pick'; import omit from 'lodash/omit'; import orderBy from 'lodash/orderBy'; import uniq from 'lodash/uniq'; +import logging from '../logging'; import applyChanges from './applyRemoteChanges'; import { CHANGE_TYPES, @@ -100,18 +100,11 @@ function handleDisallowed(response) { const disallowed = get(response, ['data', 'disallowed'], []); if (disallowed.length) { // Capture occurrences of the api disallowing changes - if (process.env.NODE_ENV === 'production') { - Sentry.withScope(function(scope) { - scope.addAttachment({ - filename: 'disallowed.json', - data: JSON.stringify(disallowed), - contentType: 'application/json', - }); - Sentry.captureException(new Error('/api/sync returned disallowed changes')); - }); - } else { - console.warn('/api/sync returned disallowed changes:', disallowed); // eslint-disable-line no-console - } + logging.error(new Error('/api/sync returned disallowed changes'), { + filename: 'disallowed.json', + data: JSON.stringify(disallowed), + contentType: 'application/json', + }); // Collect all disallowed const disallowedRevs = disallowed.map(d => Number(d.rev)); @@ -317,11 +310,11 @@ async function syncChanges() { handleTasks(response), ]); } catch (err) { - console.error('There was an error updating change status: ', err); // eslint-disable-line no-console + logging.error(err); } } catch (err) { // There was an error during syncing, log, but carry on - console.warn('There was an error during syncing with the backend: ', err); // eslint-disable-line no-console + logging.error(err); } syncActive = false; } diff --git a/contentcuration/contentcuration/frontend/shared/logging.js b/contentcuration/contentcuration/frontend/shared/logging.js new file mode 100644 index 0000000000..817e24f9fe --- /dev/null +++ b/contentcuration/contentcuration/frontend/shared/logging.js @@ -0,0 +1,17 @@ +import * as Sentry from '@sentry/vue'; + +export default { + error(error, ...attachments) { + if (process.env.NODE_ENV !== 'production') { + // In dev build log warnings to console for developer use + console.trace(error, ...attachments); // eslint-disable-line no-console + } else { + Sentry.withScope(function(scope) { + for (let attachment of attachments) { + scope.addAttachment(attachment); + } + Sentry.captureException(error); + }); + } + }, +}; From d6ab54c2ba1903e0a1c9ac85a09d464b73eb4c42 Mon Sep 17 00:00:00 2001 From: Richard Tibbles Date: Thu, 4 May 2023 16:19:02 -0700 Subject: [PATCH 02/21] Catch the entire syncChanges function. Always set syncActive to `false` when finished. --- .../frontend/shared/data/serverSync.js | 104 +++++++++--------- 1 file changed, 54 insertions(+), 50 deletions(-) diff --git a/contentcuration/contentcuration/frontend/shared/data/serverSync.js b/contentcuration/contentcuration/frontend/shared/data/serverSync.js index 326d928155..72b800b1bc 100644 --- a/contentcuration/contentcuration/frontend/shared/data/serverSync.js +++ b/contentcuration/contentcuration/frontend/shared/data/serverSync.js @@ -226,6 +226,8 @@ function handleTasks(response) { return Task.setTasks(tasks); } +const noUserError = 'No user logged in'; + async function syncChanges() { // Note: we could in theory use Dexie syncable for what // we are doing here, but I can't find a good way to make @@ -238,58 +240,58 @@ async function syncChanges() { syncActive = true; - // Track the maxRevision at this moment so that we can ignore any changes that - // might have come in during processing - leave them for the next cycle. - // This is the primary key of the change objects, so the collection is ordered by this - // by default - if we just grab the last object, we can get the key from there. - const [lastChange, user] = await Promise.all([ - db[CHANGES_TABLE].orderBy('rev').last(), - Session.getSession(), - ]); - if (!user) { - // If not logged in, nothing to do. - return; - } + try { + // Track the maxRevision at this moment so that we can ignore any changes that + // might have come in during processing - leave them for the next cycle. + // This is the primary key of the change objects, so the collection is ordered by this + // by default - if we just grab the last object, we can get the key from there. + const [lastChange, user] = await Promise.all([ + db[CHANGES_TABLE].orderBy('rev').last(), + Session.getSession(), + ]); + if (!user) { + // If not logged in, nothing to do. + throw new Error(noUserError); + } - const now = Date.now(); - const channelIds = Object.entries(user[ACTIVE_CHANNELS] || {}) - .filter(([id, time]) => id && time > now - CHANNEL_SYNC_KEEP_ALIVE_INTERVAL) - .map(([id]) => id); - const channel_revs = {}; - for (const channelId of channelIds) { - channel_revs[channelId] = get(user, [MAX_REV_KEY, channelId], 0); - } + const now = Date.now(); + const channelIds = Object.entries(user[ACTIVE_CHANNELS] || {}) + .filter(([id, time]) => id && time > now - CHANNEL_SYNC_KEEP_ALIVE_INTERVAL) + .map(([id]) => id); + const channel_revs = {}; + for (const channelId of channelIds) { + channel_revs[channelId] = get(user, [MAX_REV_KEY, channelId], 0); + } - const unAppliedChanges = await db[CHANGES_TABLE].orderBy('server_rev') - .filter(c => c.synced && !c.errors && !c.disallowed) - .toArray(); - - const requestPayload = { - changes: [], - channel_revs, - user_rev: user.user_rev || 0, - unapplied_revs: unAppliedChanges.map(c => c.server_rev).filter(Boolean), - }; - - if (lastChange) { - const changesMaxRevision = lastChange.rev; - const syncableChanges = db[CHANGES_TABLE].where('rev') - .belowOrEqual(changesMaxRevision) - .filter(c => !c.synced); - const changesToSync = await syncableChanges.toArray(); - // By the time we get here, our changesToSync Array should - // have every change we want to sync to the server, so we - // can now trim it down to only what is needed to transmit over the wire. - // TODO: remove moves when a delete change is present for an object, - // because a delete will wipe out the move. - const changes = changesToSync.map(trimChangeForSync).filter(Boolean); - // Create a promise for the sync - if there is nothing to sync just resolve immediately, - // in order to still call our change cleanup code. - if (changes.length) { - requestPayload.changes = changes; + const unAppliedChanges = await db[CHANGES_TABLE].orderBy('server_rev') + .filter(c => c.synced && !c.errors && !c.disallowed) + .toArray(); + + const requestPayload = { + changes: [], + channel_revs, + user_rev: user.user_rev || 0, + unapplied_revs: unAppliedChanges.map(c => c.server_rev).filter(Boolean), + }; + + if (lastChange) { + const changesMaxRevision = lastChange.rev; + const syncableChanges = db[CHANGES_TABLE].where('rev') + .belowOrEqual(changesMaxRevision) + .filter(c => !c.synced); + const changesToSync = await syncableChanges.toArray(); + // By the time we get here, our changesToSync Array should + // have every change we want to sync to the server, so we + // can now trim it down to only what is needed to transmit over the wire. + // TODO: remove moves when a delete change is present for an object, + // because a delete will wipe out the move. + const changes = changesToSync.map(trimChangeForSync).filter(Boolean); + // Create a promise for the sync - if there is nothing to sync just resolve immediately, + // in order to still call our change cleanup code. + if (changes.length) { + requestPayload.changes = changes; + } } - } - try { // The response from the sync endpoint has the format: // { // "disallowed": [], @@ -314,7 +316,9 @@ async function syncChanges() { } } catch (err) { // There was an error during syncing, log, but carry on - logging.error(err); + if (err.message !== noUserError) { + logging.error(err); + } } syncActive = false; } From abeabb785d0c24e8170dd8d26f7287ba34316a8e Mon Sep 17 00:00:00 2001 From: Richard Tibbles Date: Sat, 6 May 2023 18:36:09 -0700 Subject: [PATCH 03/21] Update put method to add method to make creates explicit. --- .../vuex/assessmentItem/actions.js | 2 +- .../contentNode/__tests__/actions.spec.js | 4 +- .../channelEdit/vuex/contentNode/actions.js | 2 +- .../vuex/importFromChannels/actions.js | 2 +- .../vuex/channelList/__tests__/module.spec.js | 4 +- .../vuex/channelSet/__tests__/module.spec.js | 2 +- .../frontend/shared/__tests__/app.spec.js | 6 +-- .../__tests__/ContentNodeResource.spec.js | 2 +- .../frontend/shared/data/resources.js | 44 +++++++------------ .../vuex/channel/__tests__/module.spec.js | 16 +++---- .../frontend/shared/vuex/channel/actions.js | 2 +- .../shared/vuex/file/__tests__/module.spec.js | 2 +- 12 files changed, 38 insertions(+), 50 deletions(-) diff --git a/contentcuration/contentcuration/frontend/channelEdit/vuex/assessmentItem/actions.js b/contentcuration/contentcuration/frontend/channelEdit/vuex/assessmentItem/actions.js index f411c4ed14..d421396433 100644 --- a/contentcuration/contentcuration/frontend/channelEdit/vuex/assessmentItem/actions.js +++ b/contentcuration/contentcuration/frontend/channelEdit/vuex/assessmentItem/actions.js @@ -42,7 +42,7 @@ export function addAssessmentItem(context, assessmentItem) { }; return db.transaction('rw', [TABLE_NAMES.CONTENTNODE, TABLE_NAMES.ASSESSMENTITEM], () => { - return AssessmentItem.put(stringifiedAssessmentItem).then(([contentnode, assessment_id]) => { + return AssessmentItem.add(stringifiedAssessmentItem).then(([contentnode, assessment_id]) => { context.commit('UPDATE_ASSESSMENTITEM', { ...assessmentItem, contentnode, diff --git a/contentcuration/contentcuration/frontend/channelEdit/vuex/contentNode/__tests__/actions.spec.js b/contentcuration/contentcuration/frontend/channelEdit/vuex/contentNode/__tests__/actions.spec.js index 0ebffd904c..2e2898b67b 100644 --- a/contentcuration/contentcuration/frontend/channelEdit/vuex/contentNode/__tests__/actions.spec.js +++ b/contentcuration/contentcuration/frontend/channelEdit/vuex/contentNode/__tests__/actions.spec.js @@ -16,7 +16,7 @@ describe('contentNode actions', () => { let id; const contentNodeDatum = { title: 'test', parent: parentId, lft: 1, tags: {} }; beforeEach(() => { - return ContentNode._put(contentNodeDatum).then(newId => { + return ContentNode._add(contentNodeDatum).then(newId => { id = newId; contentNodeDatum.id = newId; jest @@ -25,7 +25,7 @@ describe('contentNode actions', () => { jest .spyOn(ContentNode, 'fetchModel') .mockImplementation(() => Promise.resolve(contentNodeDatum)); - return ContentNode._put({ title: 'notatest', parent: newId, lft: 2 }).then(() => { + return ContentNode._add({ title: 'notatest', parent: newId, lft: 2 }).then(() => { store = storeFactory({ modules: { assessmentItem, diff --git a/contentcuration/contentcuration/frontend/channelEdit/vuex/contentNode/actions.js b/contentcuration/contentcuration/frontend/channelEdit/vuex/contentNode/actions.js index 55185ac8c1..d0c3f2bba1 100644 --- a/contentcuration/contentcuration/frontend/channelEdit/vuex/contentNode/actions.js +++ b/contentcuration/contentcuration/frontend/channelEdit/vuex/contentNode/actions.js @@ -191,7 +191,7 @@ export function createContentNode(context, { parent, kind, ...payload }) { assessmentItems: [], files: [], }); - return ContentNode.put(contentNodeData).then(id => { + return ContentNode.add(contentNodeData).then(id => { context.commit('ADD_CONTENTNODE', { id, ...contentNodeData, diff --git a/contentcuration/contentcuration/frontend/channelEdit/vuex/importFromChannels/actions.js b/contentcuration/contentcuration/frontend/channelEdit/vuex/importFromChannels/actions.js index 21cee26437..5f2bc585e3 100644 --- a/contentcuration/contentcuration/frontend/channelEdit/vuex/importFromChannels/actions.js +++ b/contentcuration/contentcuration/frontend/channelEdit/vuex/importFromChannels/actions.js @@ -38,7 +38,7 @@ export function createSearch({ commit, rootState }, params) { saved_by: rootState.session.currentUser.id, created: new Date(), }; - return SavedSearch.put(data).then(id => { + return SavedSearch.add(data).then(id => { commit('UPDATE_SAVEDSEARCH', { id, ...data, diff --git a/contentcuration/contentcuration/frontend/channelList/vuex/channelList/__tests__/module.spec.js b/contentcuration/contentcuration/frontend/channelList/vuex/channelList/__tests__/module.spec.js index a99a37e94a..f6f7d2c6ba 100644 --- a/contentcuration/contentcuration/frontend/channelList/vuex/channelList/__tests__/module.spec.js +++ b/contentcuration/contentcuration/frontend/channelList/vuex/channelList/__tests__/module.spec.js @@ -18,7 +18,7 @@ describe('invitation actions', () => { let store; let id; beforeEach(() => { - return Invitation.put(invitation).then(newId => { + return Invitation.add(invitation).then(newId => { id = newId; store = storeFactory({ modules: { @@ -59,7 +59,7 @@ describe('invitation actions', () => { // const channel = { id: channel_id, name: 'test', deleted: false, edit: true }; // beforeEach(() => { // store.commit('channelList/SET_INVITATION_LIST', [{ id, ...invitation }]); - // return Channel.put(channel); + // return Channel.add(channel); // }); // afterEach(() => { // return Channel.table.toCollection().delete(); diff --git a/contentcuration/contentcuration/frontend/channelList/vuex/channelSet/__tests__/module.spec.js b/contentcuration/contentcuration/frontend/channelList/vuex/channelSet/__tests__/module.spec.js index 28892a035c..bc6d5b6be2 100644 --- a/contentcuration/contentcuration/frontend/channelList/vuex/channelSet/__tests__/module.spec.js +++ b/contentcuration/contentcuration/frontend/channelList/vuex/channelSet/__tests__/module.spec.js @@ -14,7 +14,7 @@ describe('channelSet actions', () => { edit: true, }; beforeEach(() => { - return ChannelSet.put(channelSetDatum).then(newId => { + return ChannelSet.add(channelSetDatum).then(newId => { id = newId; store = storeFactory({ modules: { diff --git a/contentcuration/contentcuration/frontend/shared/__tests__/app.spec.js b/contentcuration/contentcuration/frontend/shared/__tests__/app.spec.js index 0733a55184..bf5625df7c 100644 --- a/contentcuration/contentcuration/frontend/shared/__tests__/app.spec.js +++ b/contentcuration/contentcuration/frontend/shared/__tests__/app.spec.js @@ -45,7 +45,7 @@ describe('startApp', () => { describe('if there is a current user saved in a session', () => { beforeEach(async () => { - await Session.put({ ...USER_1, CURRENT_USER }); + await Session.table.put({ ...USER_1, CURRENT_USER }); const sessionTable = await Session.table.toArray(); expect(sessionTable).toEqual([{ ...USER_1, CURRENT_USER }]); @@ -77,7 +77,7 @@ describe('startApp', () => { describe('if there is the same current user saved in a session', () => { beforeEach(async () => { - await Session.put({ ...USER_1, CURRENT_USER }); + await Session.table.put({ ...USER_1, CURRENT_USER }); const sessionTable = await Session.table.toArray(); expect(sessionTable).toEqual([{ ...USER_1, CURRENT_USER }]); @@ -91,7 +91,7 @@ describe('startApp', () => { describe('if there is a different current user saved in a session', () => { beforeEach(async () => { - await Session.put({ ...USER_2, CURRENT_USER }); + await Session.table.put({ ...USER_2, CURRENT_USER }); const sessionTable = await Session.table.toArray(); expect(sessionTable).toEqual([{ ...USER_2, CURRENT_USER }]); diff --git a/contentcuration/contentcuration/frontend/shared/data/__tests__/ContentNodeResource.spec.js b/contentcuration/contentcuration/frontend/shared/data/__tests__/ContentNodeResource.spec.js index 0b3cff25e0..b52ee98148 100644 --- a/contentcuration/contentcuration/frontend/shared/data/__tests__/ContentNodeResource.spec.js +++ b/contentcuration/contentcuration/frontend/shared/data/__tests__/ContentNodeResource.spec.js @@ -854,7 +854,7 @@ describe('ContentNodePrerequisite methods', () => { }); it('should return all associated requisites, even when there is a cyclic dependency', () => { const cyclic = { target_node: 'id-chemistry', prerequisite: 'id-lab' }; - return ContentNodePrerequisite.put(cyclic).then(() => { + return ContentNodePrerequisite.add(cyclic).then(() => { return ContentNode.getRequisites('id-integrals').then(entries => { expect(sortBy(entries, 'target_node')).toEqual( sortBy(mappings.concat([cyclic]), 'target_node') diff --git a/contentcuration/contentcuration/frontend/shared/data/resources.js b/contentcuration/contentcuration/frontend/shared/data/resources.js index 7005946042..c71b4fbb18 100644 --- a/contentcuration/contentcuration/frontend/shared/data/resources.js +++ b/contentcuration/contentcuration/frontend/shared/data/resources.js @@ -557,12 +557,12 @@ class IndexedDBResource { */ createObj(obj) { return { - ...this._preparePut(obj), + ...this._prepareAdd(obj), [NEW_OBJECT]: true, }; } - _preparePut(obj) { + _prepareAdd(obj) { const idMap = this._prepareCopy({}); return this._cleanNew({ ...idMap, @@ -574,20 +574,10 @@ class IndexedDBResource { * @param {Object} obj * @return {Promise} */ - put(obj) { - return this.transaction({ mode: 'rw' }, () => { - return this.table.put(this._preparePut(obj)); - }); - } - - /** - * @param {Object[]} objs - * @return {Promise} - */ - bulkPut(objs) { - const putObjs = objs.map(this._preparePut); - return this.transaction({ mode: 'rw' }, () => { - return this.table.bulkPut(putObjs); + add(obj) { + return this.transaction({ mode: 'rw' }, CHANGES_TABLE, () => { + // TODO: Finish implementation of add by adding explicit CREATE change to changes table + return this.table.add(this._prepareAdd(obj)); }); } @@ -980,7 +970,7 @@ export const Session = new IndexedDBResource({ return this.get(CURRENT_USER); }, setSession(currentUser) { - return this.put({ CURRENT_USER, ...currentUser }); + return this.table.put({ CURRENT_USER, ...currentUser }); }, updateSession(currentUser) { return this.update(CURRENT_USER, currentUser); @@ -1205,7 +1195,7 @@ export const ContentNode = new TreeResource({ if (entry) { return Promise.reject('No cyclic prerequisites'); } - return ContentNodePrerequisite.put({ + return ContentNodePrerequisite.add({ target_node, prerequisite, }); @@ -1416,15 +1406,15 @@ export const ContentNode = new TreeResource({ }); }, - // Retain super's put method that does not handle tree insertion - _put: TreeResource.prototype.put, + // Retain super's add method that does not handle tree insertion + _add: TreeResource.prototype.add, /** * @param {Object} obj * @return {Promise} */ - put(obj) { - const prepared = this._preparePut(obj); + add(obj) { + const prepared = this._prepareAdd(obj); return this.resolveTreeInsert( prepared.id, @@ -1433,7 +1423,7 @@ export const ContentNode = new TreeResource({ true, data => { return this.transaction({ mode: 'rw' }, () => { - return this.table.put({ ...prepared, ...data.payload }); + return this.table.add({ ...prepared, ...data.payload }); }); } ); @@ -1678,7 +1668,7 @@ export const ChannelUser = new APIResource({ urlName: 'channeluser', makeEditor(channel, user) { return ViewerM2M.delete([user, channel]).then(() => { - return EditorM2M.put({ user, channel }); + return EditorM2M.add({ user, channel }); }); }, removeViewer(channel, user) { @@ -1809,10 +1799,8 @@ export const AssessmentItem = new Resource({ }); }); }, - put(obj) { - return this.transaction({ mode: 'rw' }, () => { - return this.table.put(this._preparePut(obj)); - }).then(id => { + add(obj) { + return super.add(obj).then(id => { return this.modifyAssessmentItemCount(obj.contentnode, 1).then(() => { return id; }); diff --git a/contentcuration/contentcuration/frontend/shared/vuex/channel/__tests__/module.spec.js b/contentcuration/contentcuration/frontend/shared/vuex/channel/__tests__/module.spec.js index 0f09ad4d7d..e0c7551fb6 100644 --- a/contentcuration/contentcuration/frontend/shared/vuex/channel/__tests__/module.spec.js +++ b/contentcuration/contentcuration/frontend/shared/vuex/channel/__tests__/module.spec.js @@ -22,7 +22,7 @@ describe('channel actions', () => { let id; const channelDatum = { name: 'test', deleted: false, edit: true }; beforeEach(() => { - return Channel.put(channelDatum).then(newId => { + return Channel.add(channelDatum).then(newId => { id = newId; channelDatum.id = id; store = storeFactory({ @@ -178,14 +178,14 @@ describe('channel actions', () => { }); }); describe('bookmarkChannel action', () => { - it('should call Bookmark.put when creating a bookmark', () => { - const putSpy = jest.spyOn(Bookmark, 'put'); + it('should call Bookmark.add when creating a bookmark', () => { + const addSpy = jest.spyOn(Bookmark, 'add'); store.commit('channel/ADD_CHANNEL', { id, name: 'test', }); return store.dispatch('channel/bookmarkChannel', { id, bookmark: true }).then(() => { - expect(putSpy).toHaveBeenCalledWith({ channel: id }); + expect(addSpy).toHaveBeenCalledWith({ channel: id }); }); }); it('should call Bookmark.delete when removing a bookmark', () => { @@ -268,7 +268,7 @@ describe('Channel sharing vuex', () => { jest .spyOn(ChannelUser, 'fetchCollection') .mockImplementation(() => Promise.resolve([testUser])); - return Channel.put(channelDatum).then(newId => { + return Channel.add(channelDatum).then(newId => { channelId = newId; const user = { ...testUser, @@ -276,8 +276,8 @@ describe('Channel sharing vuex', () => { const invitations = makeInvitations(channelId); testInvitations = invitations; - return User.put(user).then(() => { - return ViewerM2M.put({ user: user.id, channel: channelDatum.id }).then(() => { + return User.add(user).then(() => { + return ViewerM2M.add({ user: user.id, channel: channelDatum.id }).then(() => { return Invitation.table.bulkPut(invitations).then(() => { store = storeFactory({ modules: { @@ -382,7 +382,7 @@ describe('Channel sharing vuex', () => { declined: true, }; - Invitation.put(declinedInvitation).then(() => { + Invitation.add(declinedInvitation).then(() => { store.dispatch('channel/loadChannelUsers', channelId).then(() => { expect(Object.keys(store.state.channel.invitationsMap)).not.toContain( 'choosy-invitation' diff --git a/contentcuration/contentcuration/frontend/shared/vuex/channel/actions.js b/contentcuration/contentcuration/frontend/shared/vuex/channel/actions.js index 8e5989e6cb..b977551ab0 100644 --- a/contentcuration/contentcuration/frontend/shared/vuex/channel/actions.js +++ b/contentcuration/contentcuration/frontend/shared/vuex/channel/actions.js @@ -199,7 +199,7 @@ export function updateChannel( export function bookmarkChannel(context, { id, bookmark }) { if (bookmark) { - return Bookmark.put({ channel: id }).then(() => { + return Bookmark.add({ channel: id }).then(() => { context.commit('SET_BOOKMARK', { channel: id }); }); } else { diff --git a/contentcuration/contentcuration/frontend/shared/vuex/file/__tests__/module.spec.js b/contentcuration/contentcuration/frontend/shared/vuex/file/__tests__/module.spec.js index 8a3d26d531..cf29f6ae40 100644 --- a/contentcuration/contentcuration/frontend/shared/vuex/file/__tests__/module.spec.js +++ b/contentcuration/contentcuration/frontend/shared/vuex/file/__tests__/module.spec.js @@ -24,7 +24,7 @@ describe('file store', () => { beforeEach(() => { jest.spyOn(File, 'fetchCollection').mockImplementation(() => Promise.resolve([testFile])); jest.spyOn(File, 'fetchModel').mockImplementation(() => Promise.resolve(testFile)); - return File.put(testFile).then(newId => { + return File.add(testFile).then(newId => { id = newId; store = storeFactory(); store.commit('file/ADD_FILE', { id, ...testFile }); From c89ba35c2938b35dec2053b32cbfb46014bc8bca Mon Sep 17 00:00:00 2001 From: Richard Tibbles Date: Mon, 8 May 2023 09:00:31 -0700 Subject: [PATCH 04/21] Add Change classes to encapsulate and handle setting changes on the changes table. --- .../shared/data/__tests__/changes.spec.js | 494 ++++++++++++++++++ .../frontend/shared/data/changes.js | 199 +++++++ .../frontend/shared/data/constants.js | 14 + .../frontend/shared/logging.js | 6 +- 4 files changed, 710 insertions(+), 3 deletions(-) create mode 100644 contentcuration/contentcuration/frontend/shared/data/__tests__/changes.spec.js diff --git a/contentcuration/contentcuration/frontend/shared/data/__tests__/changes.spec.js b/contentcuration/contentcuration/frontend/shared/data/__tests__/changes.spec.js new file mode 100644 index 0000000000..14caaedfbc --- /dev/null +++ b/contentcuration/contentcuration/frontend/shared/data/__tests__/changes.spec.js @@ -0,0 +1,494 @@ +import pick from 'lodash/pick'; +import { + CreatedChange, + UpdatedChange, + DeletedChange, + MovedChange, + CopiedChange, + PublishedChange, + SyncedChange, + DeployedChange, +} from '../changes'; +import { + CHANGES_TABLE, + TABLE_NAMES, + RELATIVE_TREE_POSITIONS, + LAST_FETCHED, + COPYING_FLAG, + TASK_ID, +} from 'shared/data/constants'; +import db from 'shared/data/db'; + +describe('Change Types', () => { + beforeEach(() => { + db[CHANGES_TABLE].clear(); + }); + + it('should persist only the specified fields in the CreatedChange', async () => { + const change = new CreatedChange({ + key: '1', + table: TABLE_NAMES.CONTENTNODE, + obj: { a: 1, b: 2 }, + }); + const rev = await change.saveChange(); + const persistedChange = await db[CHANGES_TABLE].get(rev); + expect(persistedChange).toEqual({ rev, ...pick(change, ['type', 'key', 'table', 'obj']) }); + }); + + it('should persist only the specified fields in the UpdatedChange', async () => { + const oldObj = { a: 1, b: 2 }; + const changes = { a: 1, b: 3 }; + const change = new UpdatedChange({ key: '1', table: TABLE_NAMES.CONTENTNODE, oldObj, changes }); + const rev = await change.saveChange(); + const persistedChange = await db[CHANGES_TABLE].get(rev); + expect(persistedChange).toEqual({ + oldObj, + obj: changes, + mods: { b: 3 }, + rev, + ...pick(change, ['type', 'key', 'table']), + }); + }); + + it('should save the mods as key paths in the UpdatedChange', async () => { + const oldObj = { a: 1, b: { c: 1 } }; + const changes = { a: 1, b: { c: 2 } }; + const change = new UpdatedChange({ key: '1', table: TABLE_NAMES.CONTENTNODE, oldObj, changes }); + const rev = await change.saveChange(); + const persistedChange = await db[CHANGES_TABLE].get(rev); + expect(persistedChange).toEqual({ + oldObj, + obj: changes, + mods: { 'b.c': 2 }, + rev, + ...pick(change, ['type', 'key', 'table']), + }); + }); + + it('should save deleted mods as key paths to null in the UpdatedChange', async () => { + const oldObj = { a: 1, b: { c: 1 } }; + const changes = { a: 1, b: {} }; + const change = new UpdatedChange({ key: '1', table: TABLE_NAMES.CONTENTNODE, oldObj, changes }); + const rev = await change.saveChange(); + const persistedChange = await db[CHANGES_TABLE].get(rev); + expect(persistedChange).toEqual({ + oldObj, + obj: changes, + mods: { 'b.c': null }, + rev, + ...pick(change, ['type', 'key', 'table']), + }); + }); + + it('should ignore updates to specific subfields in the UpdatedChange', async () => { + const oldObj = { a: 1, b: 2 }; + const changes = { + a: 1, + b: 3, + [LAST_FETCHED]: new Date(), + [TASK_ID]: '18292183921', + [COPYING_FLAG]: true, + }; + const change = new UpdatedChange({ key: '1', table: TABLE_NAMES.CONTENTNODE, oldObj, changes }); + const rev = await change.saveChange(); + const persistedChange = await db[CHANGES_TABLE].get(rev); + expect(persistedChange).toEqual({ + oldObj, + obj: { a: 1, b: 3 }, + mods: { b: 3 }, + rev, + ...pick(change, ['type', 'key', 'table']), + }); + }); + + it('should not save a change when no updates are made in the UpdatedChange', async () => { + const oldObj = { a: 1, b: 2 }; + const changes = { + a: 1, + b: 2, + [LAST_FETCHED]: new Date(), + [TASK_ID]: '18292183921', + [COPYING_FLAG]: true, + }; + const change = new UpdatedChange({ key: '1', table: TABLE_NAMES.CONTENTNODE, oldObj, changes }); + const rev = await change.saveChange(); + expect(rev).toBeNull(); + }); + + it('should persist only the specified fields in the DeletedChange', async () => { + const change = new DeletedChange({ + key: '1', + table: TABLE_NAMES.CONTENTNODE, + oldObj: { a: 1, b: 2 }, + }); + const rev = await change.saveChange(); + const persistedChange = await db[CHANGES_TABLE].get(rev); + expect(persistedChange).toEqual({ rev, ...pick(change, ['type', 'key', 'table', 'oldObj']) }); + }); + + it('should persist only the specified fields in the MovedChange', async () => { + const change = new MovedChange({ + key: '1', + table: TABLE_NAMES.CONTENTNODE, + target: '2', + position: RELATIVE_TREE_POSITIONS.LAST_CHILD, + parent: '3', + }); + const rev = await change.saveChange(); + const persistedChange = await db[CHANGES_TABLE].get(rev); + expect(persistedChange).toEqual({ + rev, + ...pick(change, ['type', 'key', 'table', 'target', 'position', 'parent']), + }); + }); + + it('should persist only the specified fields in the CopiedChange', async () => { + const change = new CopiedChange({ + key: '1', + table: TABLE_NAMES.CONTENTNODE, + from_key: '2', + mods: { a: 1, b: 2 }, + target: '3', + position: RELATIVE_TREE_POSITIONS.LAST_CHILD, + excluded_descendants: null, + parent: '3', + }); + const rev = await change.saveChange(); + const persistedChange = await db[CHANGES_TABLE].get(rev); + expect(persistedChange).toEqual({ + rev, + ...pick(change, [ + 'type', + 'key', + 'table', + 'from_key', + 'mods', + 'target', + 'position', + 'excluded_descendants', + 'parent', + ]), + }); + }); + + it('should persist only the specified fields in the PublishedChange', async () => { + const change = new PublishedChange({ + key: '1', + table: TABLE_NAMES.CONTENTNODE, + version_notes: 'Some version notes', + language: 'en', + }); + const rev = await change.saveChange(); + const persistedChange = await db[CHANGES_TABLE].get(rev); + expect(persistedChange).toEqual({ + rev, + ...pick(change, ['type', 'key', 'table', 'version_notes', 'language']), + }); + }); + + it('should persist only the specified fields in the SyncedChange', async () => { + const change = new SyncedChange({ + key: '1', + table: TABLE_NAMES.CONTENTNODE, + titles_and_descriptions: true, + resource_details: false, + files: true, + assessment_items: false, + }); + const rev = await change.saveChange(); + const persistedChange = await db[CHANGES_TABLE].get(rev); + expect(persistedChange).toEqual({ + rev, + ...pick(change, [ + 'type', + 'key', + 'table', + 'titles_and_descriptions', + 'resource_details', + 'files', + 'assessment_items', + ]), + }); + }); + + it('should persist only the specified fields in the DeployedChange', async () => { + const change = new DeployedChange({ key: '1', table: TABLE_NAMES.CONTENTNODE }); + const rev = await change.saveChange(); + const persistedChange = await db[CHANGES_TABLE].get(rev); + expect(persistedChange).toEqual({ rev, ...pick(change, ['type', 'key', 'table']) }); + }); +}); + +describe('Change Types Unhappy Paths', () => { + // CreatedChange + it('should throw error when CreatedChange is instantiated without obj', () => { + expect(() => new CreatedChange({ key: '1', table: TABLE_NAMES.CONTENTNODE })).toThrow( + new TypeError('obj should be an object, but undefined was passed instead') + ); + }); + + it('should throw error when CreatedChange is instantiated with incorrect obj type', () => { + expect( + () => new CreatedChange({ key: '1', table: TABLE_NAMES.CONTENTNODE, obj: 'invalid' }) + ).toThrow(new TypeError('obj should be an object, but invalid was passed instead')); + }); + + it('should throw error when CreatedChange is instantiated with a non-syncable table', () => { + expect(() => new CreatedChange({ key: '1', table: TABLE_NAMES.SESSION, obj: {} })).toThrow( + new TypeError('session is not a syncable table') + ); + }); + + // UpdatedChange + it('should throw error when UpdatedChange is instantiated without changes', () => { + expect(() => new UpdatedChange({ key: '1', table: TABLE_NAMES.CONTENTNODE })).toThrow( + new TypeError('changes should be an object, but undefined was passed instead') + ); + }); + + it('should throw error when UpdatedChange is instantiated with incorrect changes type', () => { + expect( + () => new UpdatedChange({ key: '1', table: TABLE_NAMES.CONTENTNODE, changes: 'invalid' }) + ).toThrow(new TypeError('changes should be an object, but invalid was passed instead')); + }); + + it('should throw error when UpdatedChange is instantiated without oldObj', () => { + expect( + () => new UpdatedChange({ key: '1', table: TABLE_NAMES.CONTENTNODE, changes: {} }) + ).toThrow(new TypeError('oldObj should be an object, but undefined was passed instead')); + }); + + it('should throw error when UpdatedChange is instantiated with incorrect oldObj type', () => { + expect( + () => + new UpdatedChange({ + key: '1', + table: TABLE_NAMES.CONTENTNODE, + changes: {}, + oldObj: 'invalid', + }) + ).toThrow(new TypeError('oldObj should be an object, but invalid was passed instead')); + }); + + // DeletedChange + it('should throw error when DeletedChange is instantiated without key', () => { + expect(() => new DeletedChange({ table: TABLE_NAMES.CONTENTNODE })).toThrow( + new TypeError('key is required for a DeletedChange but it was undefined') + ); + }); + + it('should throw error when DeletedChange is instantiated with invalid table', () => { + expect(() => new DeletedChange({ key: '1', table: 'test' })).toThrow( + new ReferenceError('test is not a valid table value') + ); + }); + + // MovedChange + it('should throw error when MovedChange is instantiated without target', () => { + expect( + () => + new MovedChange({ + key: '1', + table: TABLE_NAMES.CONTENTNODE, + position: RELATIVE_TREE_POSITIONS.LAST_CHILD, + }) + ).toThrow(new TypeError('target is required for a MovedChange but it was undefined')); + }); + + it('should throw error when MovedChange is instantiated with incorrect position type', () => { + expect( + () => + new MovedChange({ + key: '1', + table: TABLE_NAMES.CONTENTNODE, + target: '2', + position: 'invalid', + }) + ).toThrow(new ReferenceError('invalid is not a valid position value')); + }); + + it('should throw error when MovedChange is instantiated without parent', () => { + expect( + () => + new MovedChange({ + key: '1', + table: TABLE_NAMES.CONTENTNODE, + target: '2', + position: RELATIVE_TREE_POSITIONS.LAST_CHILD, + }) + ).toThrow(new ReferenceError('parent is required for a MovedChange but it was undefined')); + }); + + // CopiedChange + it('should throw error when CopiedChange is instantiated without from_key', () => { + expect(() => new CopiedChange({ key: '1', table: TABLE_NAMES.CONTENTNODE })).toThrow( + new TypeError('from_key is required for a CopiedChange but it was undefined') + ); + }); + + it('should throw error when CopiedChange is instantiated with incorrect mods type', () => { + expect( + () => + new CopiedChange({ + key: '1', + table: TABLE_NAMES.CONTENTNODE, + from_key: '2', + mods: 'invalid', + }) + ).toThrow(new TypeError('mods should be an object, but invalid was passed instead')); + }); + + it('should throw error when CopiedChange is instantiated without target', () => { + expect( + () => + new CopiedChange({ + key: '1', + table: TABLE_NAMES.CONTENTNODE, + from_key: '2', + mods: { a: 1 }, + }) + ).toThrow(new TypeError('target is required for a CopiedChange but it was undefined')); + }); + + it('should throw error when CopiedChange is instantiated with incorrect position type', () => { + expect( + () => + new CopiedChange({ + key: '1', + table: TABLE_NAMES.CONTENTNODE, + from_key: '2', + mods: { a: 1 }, + target: '3', + position: 'invalid', + }) + ).toThrow(new ReferenceError('invalid is not a valid position value')); + }); + + it('should throw error when CopiedChange is instantiated with incorrect excluded_descendants type', () => { + expect( + () => + new CopiedChange({ + key: '1', + table: TABLE_NAMES.CONTENTNODE, + from_key: '2', + mods: { a: 1 }, + target: '3', + position: RELATIVE_TREE_POSITIONS.LAST_CHILD, + excluded_descendants: 'invalid', + }) + ).toThrow( + new TypeError( + 'excluded_descendants should be an object or null, but invalid was passed instead' + ) + ); + }); + + it('should throw error when CopiedChange is instantiated without a parent', () => { + expect( + () => + new CopiedChange({ + key: '1', + table: TABLE_NAMES.CONTENTNODE, + from_key: '2', + mods: { a: 1 }, + target: '3', + position: RELATIVE_TREE_POSITIONS.LAST_CHILD, + excluded_descendants: null, + }) + ).toThrow(new TypeError('parent is required for a CopiedChange but it was undefined')); + }); + + // PublishedChange + it('should throw error when PublishedChange is instantiated without version_notes', () => { + expect(() => new PublishedChange({ key: '1', table: TABLE_NAMES.CONTENTNODE })).toThrow( + new TypeError('version_notes is required for a PublishedChange but it was undefined') + ); + }); + + it('should throw error when PublishedChange is instantiated without language', () => { + expect( + () => + new PublishedChange({ + key: '1', + table: TABLE_NAMES.CONTENTNODE, + version_notes: 'Some notes', + }) + ).toThrow(new TypeError('language is required for a PublishedChange but it was undefined')); + }); + + // SyncedChange + it('should throw error when SyncedChange is instantiated without titles_and_descriptions', () => { + expect(() => new SyncedChange({ key: '1', table: TABLE_NAMES.CONTENTNODE })).toThrow( + new TypeError('titles_and_descriptions should be a boolean, but undefined was passed instead') + ); + }); + + it('should throw error when SyncedChange is instantiated with incorrect titles_and_descriptions type', () => { + expect( + () => + new SyncedChange({ + key: '1', + table: TABLE_NAMES.CONTENTNODE, + titles_and_descriptions: 'invalid', + }) + ).toThrow( + new TypeError('titles_and_descriptions should be a boolean, but invalid was passed instead') + ); + }); + + it('should throw error when SyncedChange is instantiated with incorrect resource_details type', () => { + expect( + () => + new SyncedChange({ + key: '1', + table: TABLE_NAMES.CONTENTNODE, + titles_and_descriptions: true, + resource_details: 'invalid', + }) + ).toThrow( + new TypeError('resource_details should be a boolean, but invalid was passed instead') + ); + }); + + it('should throw error when SyncedChange is instantiated with incorrect files type', () => { + expect( + () => + new SyncedChange({ + key: '1', + table: TABLE_NAMES.CONTENTNODE, + titles_and_descriptions: true, + resource_details: false, + files: 'invalid', + }) + ).toThrow(new TypeError('files should be a boolean, but invalid was passed instead')); + }); + + it('should throw error when SyncedChange is instantiated with incorrect assessment_items type', () => { + expect( + () => + new SyncedChange({ + key: '1', + table: TABLE_NAMES.CONTENTNODE, + titles_and_descriptions: true, + resource_details: false, + files: true, + assessment_items: 'invalid', + }) + ).toThrow( + new TypeError('assessment_items should be a boolean, but invalid was passed instead') + ); + }); + + // DeployedChange + it('should throw error when DeployedChange is instantiated without key', () => { + expect(() => new DeployedChange({ table: TABLE_NAMES.CONTENTNODE })).toThrow( + new TypeError('key is required for a DeployedChange but it was undefined') + ); + }); + + it('should throw error when DeployedChange is instantiated with invalid table', () => { + expect(() => new DeployedChange({ key: '1', table: 'test' })).toThrow( + new ReferenceError('test is not a valid table value') + ); + }); +}); diff --git a/contentcuration/contentcuration/frontend/shared/data/changes.js b/contentcuration/contentcuration/frontend/shared/data/changes.js index 777a470253..c8f8c35de9 100644 --- a/contentcuration/contentcuration/frontend/shared/data/changes.js +++ b/contentcuration/contentcuration/frontend/shared/data/changes.js @@ -1,12 +1,25 @@ import Dexie from 'dexie'; +import isEmpty from 'lodash/isEmpty'; +import isNull from 'lodash/isNull'; +import isPlainObject from 'lodash/isPlainObject'; +import isUndefined from 'lodash/isUndefined'; +import omit from 'lodash/omit'; import sortBy from 'lodash/sortBy'; +import logging from '../logging'; +import { applyMods } from './applyRemoteChanges'; import db from 'shared/data/db'; import { promiseChunk } from 'shared/utils/helpers'; import { CHANGES_TABLE, CHANGE_TYPES, + CHANGE_TYPES_LOOKUP, + TABLE_NAMES_LOOKUP, IGNORED_SOURCE, RELATIVE_TREE_POSITIONS, + RELATIVE_TREE_POSITIONS_LOOKUP, + LAST_FETCHED, + COPYING_FLAG, + TASK_ID, } from 'shared/data/constants'; import { INDEXEDDB_RESOURCES } from 'shared/data/registry'; @@ -182,3 +195,189 @@ export class ChangeTracker { }); } } + +// These fields should not be included in change objects that we +// store in the changes table for syncing to the backend +// as they are only used for tracking state locally +const ignoredSubFields = [COPYING_FLAG, LAST_FETCHED, TASK_ID]; + +function omitIgnoredSubFields(obj) { + return omit(obj, ignoredSubFields); +} + +class Change { + constructor({ type, key, table } = {}) { + this.setAndValidateLookup(type, 'type', CHANGE_TYPES_LOOKUP); + this.setAndValidateNotUndefined(key, 'key'); + this.setAndValidateLookup(table, 'table', TABLE_NAMES_LOOKUP); + if (!INDEXEDDB_RESOURCES[this.table].syncable) { + const error = ReferenceError(`${this.table} is not a syncable table`); + logging.error(error); + throw error; + } + INDEXEDDB_RESOURCES[this.table].setChannelIdOnChange(this); + INDEXEDDB_RESOURCES[this.table].setUserIdOnChange(this); + } + get changeType() { + return this.constructor.name; + } + + setAndValidateLookup(value, name, lookup) { + if (!lookup[value]) { + const error = ReferenceError(`${value} is not a valid ${name} value`); + logging.error(error, value); + throw error; + } + this[name] = value; + } + setAndValidateNotUndefined(value, name) { + if (isUndefined(value)) { + const error = TypeError(`${name} is required for a ${this.changeType} but it was undefined`); + logging.error(error, value); + throw error; + } + this[name] = value; + } + validateObj(value, name) { + if (!isPlainObject(value)) { + const error = TypeError(`${name} should be an object, but ${value} was passed instead`); + logging.error(error, value); + throw error; + } + } + setAndValidateObj(value, name, mapper = obj => obj) { + this.validateObj(value, name); + this[name] = mapper(value); + } + setAndValidateBoolean(value, name) { + if (typeof value !== 'boolean') { + const error = TypeError(`${name} should be a boolean, but ${value} was passed instead`); + logging.error(error, value); + throw error; + } + this[name] = value; + } + setAndValidateObjOrNull(value, name, mapper = obj => obj) { + if (!isPlainObject(value) && !isNull(value)) { + const error = TypeError( + `${name} should be an object or null, but ${value} was passed instead` + ); + logging.error(error, value); + throw error; + } + this[name] = mapper(value); + } + saveChange() { + return db[CHANGES_TABLE].add(this); + } +} + +export class CreatedChange extends Change { + constructor({ obj, ...fields }) { + fields.type = CHANGE_TYPES.CREATED; + super(fields); + this.setAndValidateObj(obj, 'obj', omitIgnoredSubFields); + } +} + +export class UpdatedChange extends Change { + constructor({ oldObj, changes, ...fields }) { + fields.type = CHANGE_TYPES.UPDATED; + super(fields); + this.validateObj(changes, 'changes'); + this.validateObj(oldObj, 'oldObj'); + changes = omitIgnoredSubFields(changes); + oldObj = omitIgnoredSubFields(oldObj); + // For now, the aim here is to preserve the same change structure as found in + // the Dexie Observable updating hook: + // https://github.com/dexie/Dexie.js/blob/master/addons/Dexie.Observable/src/hooks/updating.js#L15 + const mods = Dexie.getObjectDiff(oldObj, changes); + const newObj = Dexie.deepClone(oldObj); + for (const propPath in mods) { + const mod = mods[propPath]; + const currentValue = Dexie.getByKeyPath(oldObj, propPath); + if (mod !== currentValue && JSON.stringify(mod) !== JSON.stringify(currentValue)) { + if (typeof mod === 'undefined') { + // Null is as close we could come to deleting a property when not allowing undefined. + mods[propPath] = null; + } + } else { + delete mods[propPath]; + } + } + this.mods = mods; + if (!this.changed) { + return; + } + applyMods(newObj, mods); + this.oldObj = oldObj; + this.obj = newObj; + } + get changed() { + return !isEmpty(this.mods); + } + saveChange() { + if (!this.changed) { + return Promise.resolve(null); + } + return super.saveChange(); + } +} + +export class DeletedChange extends Change { + constructor({ oldObj, ...fields }) { + fields.type = CHANGE_TYPES.DELETED; + super(fields); + this.setAndValidateObj(oldObj, 'oldObj', omitIgnoredSubFields); + } +} + +export class MovedChange extends Change { + constructor({ target, position, parent, ...fields }) { + fields.type = CHANGE_TYPES.MOVED; + super(fields); + this.setAndValidateNotUndefined(target, 'target'); + this.setAndValidateLookup(position, 'position', RELATIVE_TREE_POSITIONS_LOOKUP); + this.setAndValidateNotUndefined(parent, 'parent'); + } +} + +export class CopiedChange extends Change { + constructor({ from_key, mods, target, position, excluded_descendants, parent, ...fields }) { + fields.type = CHANGE_TYPES.COPIED; + super(fields); + this.setAndValidateNotUndefined(from_key, 'from_key'); + this.setAndValidateObj(mods, 'mods', omitIgnoredSubFields); + this.setAndValidateNotUndefined(target, 'target'); + this.setAndValidateLookup(position, 'position', RELATIVE_TREE_POSITIONS_LOOKUP); + this.setAndValidateObjOrNull(excluded_descendants, 'excluded_descendants'); + this.setAndValidateNotUndefined(parent, 'parent'); + } +} + +export class PublishedChange extends Change { + constructor({ version_notes, language, ...fields }) { + fields.type = CHANGE_TYPES.PUBLISHED; + super(fields); + this.setAndValidateNotUndefined(version_notes, 'version_notes'); + this.setAndValidateNotUndefined(language, 'language'); + } +} + +export class SyncedChange extends Change { + constructor({ titles_and_descriptions, resource_details, files, assessment_items, ...fields }) { + fields.type = CHANGE_TYPES.SYNCED; + super(fields); + this.setAndValidateBoolean(titles_and_descriptions, 'titles_and_descriptions'); + this.setAndValidateBoolean(resource_details, 'resource_details'); + this.setAndValidateBoolean(files, 'files'); + this.setAndValidateBoolean(assessment_items, 'assessment_items'); + } +} + +export class DeployedChange extends Change { + constructor(fields) { + fields.type = CHANGE_TYPES.DEPLOYED; + super(fields); + } +} diff --git a/contentcuration/contentcuration/frontend/shared/data/constants.js b/contentcuration/contentcuration/frontend/shared/data/constants.js index bbc35582a3..093b435ad8 100644 --- a/contentcuration/contentcuration/frontend/shared/data/constants.js +++ b/contentcuration/contentcuration/frontend/shared/data/constants.js @@ -1,3 +1,5 @@ +import invert from 'lodash/invert'; + export const CHANGE_TYPES = { CREATED: 1, UPDATED: 2, @@ -19,6 +21,11 @@ export const CREATION_CHANGE_TYPES = [CHANGE_TYPES.CREATED, CHANGE_TYPES.COPIED] */ export const TREE_CHANGE_TYPES = [CHANGE_TYPES.CREATED, CHANGE_TYPES.COPIED, CHANGE_TYPES.MOVED]; +/** + * An inverse lookup of CHANGE_TYPES to allow validation of CHANGE_TYPE values + */ +export const CHANGE_TYPES_LOOKUP = invert(CHANGE_TYPES); + // Tables export const CHANGES_TABLE = 'changesForSyncing'; @@ -41,6 +48,11 @@ export const TABLE_NAMES = { BOOKMARK: 'bookmark', }; +/** + * An inverse lookup of TABLE_NAMES to allow validation of TABLE_NAME values + */ +export const TABLE_NAMES_LOOKUP = invert(TABLE_NAMES); + export const APP_ID = 'KolibriStudio'; // Transaction sources @@ -59,6 +71,8 @@ export const RELATIVE_TREE_POSITIONS = { RIGHT: 'right', }; +export const RELATIVE_TREE_POSITIONS_LOOKUP = invert(RELATIVE_TREE_POSITIONS); + // Special fields used for frontend specific handling export const COPYING_FLAG = '__COPYING'; export const TASK_ID = '__TASK_ID'; diff --git a/contentcuration/contentcuration/frontend/shared/logging.js b/contentcuration/contentcuration/frontend/shared/logging.js index 817e24f9fe..9a31ed38e1 100644 --- a/contentcuration/contentcuration/frontend/shared/logging.js +++ b/contentcuration/contentcuration/frontend/shared/logging.js @@ -2,12 +2,12 @@ import * as Sentry from '@sentry/vue'; export default { error(error, ...attachments) { - if (process.env.NODE_ENV !== 'production') { + if (process.env.NODE_ENV === 'development') { // In dev build log warnings to console for developer use console.trace(error, ...attachments); // eslint-disable-line no-console - } else { + } else if (process.env.NODE_ENV === 'production') { Sentry.withScope(function(scope) { - for (let attachment of attachments) { + for (const attachment of attachments) { scope.addAttachment(attachment); } Sentry.captureException(error); From 7feb5bf118f9f8cad78580e73fcc2af953962176 Mon Sep 17 00:00:00 2001 From: Richard Tibbles Date: Tue, 9 May 2023 19:31:59 -0700 Subject: [PATCH 05/21] Update syncChanges logic to handle queueing per tab. --- .../shared/data/__tests__/serverSync.spec.js | 219 +++++++++++++++++ .../frontend/shared/data/serverSync.js | 223 ++++++++---------- 2 files changed, 316 insertions(+), 126 deletions(-) create mode 100644 contentcuration/contentcuration/frontend/shared/data/__tests__/serverSync.spec.js diff --git a/contentcuration/contentcuration/frontend/shared/data/__tests__/serverSync.spec.js b/contentcuration/contentcuration/frontend/shared/data/__tests__/serverSync.spec.js new file mode 100644 index 0000000000..00b3244db7 --- /dev/null +++ b/contentcuration/contentcuration/frontend/shared/data/__tests__/serverSync.spec.js @@ -0,0 +1,219 @@ +import { queueChange, debouncedSyncChanges } from '../serverSync'; +import { CreatedChange } from '../changes'; +import db from '../db'; +import { Session, Task } from 'shared/data/resources'; +import client from 'shared/client'; +import { CHANGES_TABLE, CURRENT_USER, TABLE_NAMES } from 'shared/data/constants'; + +async function makeChange(key, server_rev) { + const rev = await new CreatedChange({ + key: String(key), + table: TABLE_NAMES.CONTENTNODE, + obj: { a: 1, b: 2 }, + }).saveChange(); + if (server_rev) { + await db[CHANGES_TABLE].update(rev, { server_rev }); + } + return rev; +} + +describe('Debounce behaviour', () => { + beforeEach(async () => { + await Session.table.put({ id: 0, CURRENT_USER }); + client.post.mockReset(); + }); + + it('should debounce sync changes', async () => { + // Call queueChange multiple times quickly + const change1 = await makeChange(1); + const change2 = await makeChange(2); + const change3 = await makeChange(3); + + queueChange(change1); + queueChange(change2); + queueChange(change3); + + // Wait for a short duration + await debouncedSyncChanges(); + // Check that client.post was called only once + expect(client.post).toHaveBeenCalledTimes(1); + }); +}); + +describe('ServerSync tests', () => { + beforeEach(async () => { + await Session.table.put({ id: 0, CURRENT_USER }); + client.post.mockReset(); + }); + + afterEach(async () => { + await db[CHANGES_TABLE].clear(); + }); + + it('should handle allowed responses', async () => { + const change1 = await makeChange(1); + const change2 = await makeChange(2); + const change3 = await makeChange(3); + + const allowed = [ + { rev: change1, server_rev: 100 }, + { rev: change2, server_rev: 200 }, + { rev: change3, server_rev: 300 }, + ]; + // Mock the client.post response for this specific test + client.post.mockResolvedValue({ + data: { + disallowed: [], + allowed, + returned: [], + errors: [], + successes: [], + maxRevs: [], + tasks: [], + }, + }); + + await debouncedSyncChanges(); + + // Check that client.post was called only once + expect(client.post).toHaveBeenCalledTimes(1); + for (const allow of allowed) { + const change = await db[CHANGES_TABLE].get(allow.rev); + expect(change.server_rev).toEqual(allow.server_rev); + } + }); + it('should handle disallowed responses', async () => { + const change1 = await makeChange(1); + const change2 = await makeChange(2); + const change3 = await makeChange(3); + + const disallowed = [{ rev: change1 }, { rev: change2 }, { rev: change3 }]; + + client.post.mockResolvedValue({ + data: { + disallowed, + allowed: [], + returned: [], + errors: [], + successes: [], + maxRevs: [], + tasks: [], + }, + }); + + await debouncedSyncChanges(); + + expect(client.post).toHaveBeenCalledTimes(1); + for (const disallow of disallowed) { + const change = await db[CHANGES_TABLE].get(disallow.rev); + expect(change.disallowed).toBe(true); + } + }); + + it('should handle errors responses', async () => { + const change1 = await makeChange(1, 100); + const change2 = await makeChange(2, 200); + const change3 = await makeChange(3, 300); + + const errors = [ + { rev: change1, server_rev: 100, errored: true }, + { rev: change2, server_rev: 200, errored: true }, + { rev: change3, server_rev: 300, errored: true }, + ]; + + client.post.mockResolvedValue({ + data: { + disallowed: [], + allowed: [], + returned: [], + errors, + successes: [], + maxRevs: [], + tasks: [], + }, + }); + + await debouncedSyncChanges(); + + expect(client.post).toHaveBeenCalledTimes(1); + for (const error of errors) { + const change = await db[CHANGES_TABLE].get(error.rev); + expect(change.errored).toBe(true); + } + }); + + it('should handle successes responses', async () => { + const change1 = await makeChange(1, 100); + const change2 = await makeChange(2, 200); + const change3 = await makeChange(3, 300); + + const successes = [ + { rev: change1, server_rev: 100 }, + { rev: change2, server_rev: 200 }, + { rev: change3, server_rev: 300 }, + ]; + + client.post.mockResolvedValue({ + data: { + disallowed: [], + allowed: [], + returned: [], + errors: [], + successes, + maxRevs: [], + tasks: [], + }, + }); + + await debouncedSyncChanges(); + + expect(client.post).toHaveBeenCalledTimes(1); + for (const s of successes) { + const change = await db[CHANGES_TABLE].get(s.rev); + expect(change).not.toBeDefined(); + } + }); + + it('should handle tasks responses', async () => { + const tasks = [ + { + task_id: 'task1', + progress: 0, + status: 'PENDING', + channel_id: 'aaaaaaaaaaaaaaaaa-bbbbbbbbbbbbb-ccccccccccccc', + }, + { + task_id: 'task2', + progress: 0, + status: 'PENDING', + channel_id: 'aaaaaaaaaaaaaaaaa-bbbbbbbbbbbbb-ccccccccccccc', + }, + { + task_id: 'task3', + progress: 0, + status: 'PENDING', + channel_id: 'aaaaaaaaaaaaaaaaa-bbbbbbbbbbbbb-ccccccccccccc', + }, + ]; + + client.post.mockResolvedValue({ + data: { + disallowed: [], + allowed: [], + returned: [], + errors: [], + successes: [], + maxRevs: [], + tasks, + }, + }); + + await debouncedSyncChanges(); + + expect(client.post).toHaveBeenCalledTimes(1); + for (const task of tasks) { + const dbTask = await Task.get(task.task_id); + expect(dbTask.status).toEqual(task.status); + } + }); +}); diff --git a/contentcuration/contentcuration/frontend/shared/data/serverSync.js b/contentcuration/contentcuration/frontend/shared/data/serverSync.js index 72b800b1bc..1f98787972 100644 --- a/contentcuration/contentcuration/frontend/shared/data/serverSync.js +++ b/contentcuration/contentcuration/frontend/shared/data/serverSync.js @@ -1,26 +1,12 @@ -import debounce from 'lodash/debounce'; import findLastIndex from 'lodash/findLastIndex'; import get from 'lodash/get'; import pick from 'lodash/pick'; -import omit from 'lodash/omit'; import orderBy from 'lodash/orderBy'; import uniq from 'lodash/uniq'; import logging from '../logging'; import applyChanges from './applyRemoteChanges'; -import { - CHANGE_TYPES, - CHANGES_TABLE, - IGNORED_SOURCE, - CHANNEL_SYNC_KEEP_ALIVE_INTERVAL, - ACTIVE_CHANNELS, - MAX_REV_KEY, - LAST_FETCHED, - COPYING_FLAG, - TASK_ID, -} from './constants'; -import db from './db'; -import mergeAllChanges from './mergeChanges'; -import { INDEXEDDB_RESOURCES } from './registry'; +import { CHANGE_TYPES, CHANGES_TABLE, IGNORED_SOURCE, MAX_REV_KEY } from './constants'; +import db, { channelScope } from './db'; import { Channel, Session, Task } from './resources'; import client from 'shared/client'; import urls from 'shared/urls'; @@ -33,13 +19,7 @@ const SYNC_IF_NO_CHANGES_FOR = 2; // check for any updates to active channels, or the user and sync any current changes const SYNC_POLL_INTERVAL = 5; -// Flag to check if a sync is currently active. -let syncActive = false; - const commonFields = ['type', 'key', 'table', 'rev', 'channel_id', 'user_id']; -const objectFields = ['objs', 'mods']; -const ignoredSubFields = [COPYING_FLAG, LAST_FETCHED, TASK_ID]; - const ChangeTypeMapFields = { [CHANGE_TYPES.CREATED]: commonFields.concat(['obj']), [CHANGE_TYPES.UPDATED]: commonFields.concat(['mods']), @@ -62,19 +42,6 @@ const ChangeTypeMapFields = { [CHANGE_TYPES.DEPLOYED]: commonFields, }; -function isSyncableChange(change) { - const src = change.source || ''; - - return ( - !src.match(IGNORED_SOURCE) && - INDEXEDDB_RESOURCES[change.table] && - INDEXEDDB_RESOURCES[change.table].syncable && - // don't create changes when it's an update to only ignored fields - (change.type !== CHANGE_TYPES.UPDATED || - Object.keys(change.mods).some(key => !ignoredSubFields.includes(key))) - ); -} - /** * Reduces a change to only the fields that are needed for sending it to the backend * @@ -83,15 +50,7 @@ function isSyncableChange(change) { */ function trimChangeForSync(change) { // Extract the syncable fields - const payload = pick(change, ChangeTypeMapFields[change.type]); - - // for any field that has an object as a value, remove ignored fields from those objects - for (const field of objectFields) { - if (payload[field]) { - payload[field] = omit(payload[field], ignoredSubFields); - } - } - return payload; + return pick(change, ChangeTypeMapFields[change.type]); } function handleDisallowed(response) { @@ -147,6 +106,15 @@ function handleReturnedChanges(response) { return Promise.resolve(); } +// These are keys that the changes table is indexed by, so we cannot modify these during +// the modify call that we use to update the changes table, if they already exist. +const noModifyKeys = { + server_rev: true, + rev: true, + table: true, + type: true, +}; + function handleErrors(response) { // The errors property is an array of any changes that were sent to the server, // that were rejected, with an additional errors property that describes @@ -163,7 +131,11 @@ function handleErrors(response) { return db[CHANGES_TABLE].where('server_rev') .anyOf(Object.keys(errorMap).map(Number)) .modify(obj => { - return Object.assign(obj, errorMap[obj.server_rev]); + for (const key in errorMap[obj.server_rev]) { + if (!noModifyKeys[key] || typeof obj[key] === 'undefined') { + obj[key] = errorMap[obj.server_rev][key]; + } + } }); } return Promise.resolve(); @@ -228,6 +200,8 @@ function handleTasks(response) { const noUserError = 'No user logged in'; +const changeRevs = []; + async function syncChanges() { // Note: we could in theory use Dexie syncable for what // we are doing here, but I can't find a good way to make @@ -238,29 +212,17 @@ async function syncChanges() { // revisions. We will do this for now, but we have the option of doing // something more involved and better architectured in the future. - syncActive = true; - try { - // Track the maxRevision at this moment so that we can ignore any changes that - // might have come in during processing - leave them for the next cycle. - // This is the primary key of the change objects, so the collection is ordered by this - // by default - if we just grab the last object, we can get the key from there. - const [lastChange, user] = await Promise.all([ - db[CHANGES_TABLE].orderBy('rev').last(), - Session.getSession(), - ]); + // Get the current user - if there is no user, we can't sync. + const user = await Session.getSession(); if (!user) { // If not logged in, nothing to do. throw new Error(noUserError); } - const now = Date.now(); - const channelIds = Object.entries(user[ACTIVE_CHANNELS] || {}) - .filter(([id, time]) => id && time > now - CHANNEL_SYNC_KEEP_ALIVE_INTERVAL) - .map(([id]) => id); const channel_revs = {}; - for (const channelId of channelIds) { - channel_revs[channelId] = get(user, [MAX_REV_KEY, channelId], 0); + if (channelScope.id) { + channel_revs[channelScope.id] = get(user, [MAX_REV_KEY, channelScope.id], 0); } const unAppliedChanges = await db[CHANGES_TABLE].orderBy('server_rev') @@ -274,10 +236,12 @@ async function syncChanges() { unapplied_revs: unAppliedChanges.map(c => c.server_rev).filter(Boolean), }; - if (lastChange) { - const changesMaxRevision = lastChange.rev; + // Snapshot which revs we are syncing, so that we can + // removes them from the changeRevs array after the sync + const revsToSync = [...changeRevs]; + if (revsToSync.length) { const syncableChanges = db[CHANGES_TABLE].where('rev') - .belowOrEqual(changesMaxRevision) + .anyOf(revsToSync) .filter(c => !c.synced); const changesToSync = await syncableChanges.toArray(); // By the time we get here, our changesToSync Array should @@ -301,91 +265,98 @@ async function syncChanges() { // "successes": [], // } const response = await client.post(urls['sync'](), requestPayload); - try { - await Promise.all([ - handleDisallowed(response), - handleAllowed(response), - handleReturnedChanges(response), - handleErrors(response), - handleSuccesses(response), - handleMaxRevs(response, user.id), - handleTasks(response), - ]); - } catch (err) { - logging.error(err); - } + // Clear out this many changes from changeRevs array, since we have now synced them. + changeRevs.splice(0, revsToSync.length); + await Promise.all([ + handleDisallowed(response), + handleAllowed(response), + handleReturnedChanges(response), + handleErrors(response), + handleSuccesses(response), + handleMaxRevs(response, user.id), + handleTasks(response), + ]); } catch (err) { // There was an error during syncing, log, but carry on if (err.message !== noUserError) { logging.error(err); } } - syncActive = false; } -const debouncedSyncChanges = debounce(() => { - if (!syncActive) { - return syncChanges(); - } - // TODO: actually return promise that resolves when active sync completes - return new Promise(resolve => setTimeout(resolve, 1000)); -}, SYNC_IF_NO_CHANGES_FOR * 1000); - -if (process.env.NODE_ENV !== 'production' && typeof window !== 'undefined') { - window.forceServerSync = forceServerSync; +// Set the sync debounce time artificially low in tests to avoid timeouts. +const syncDebounceWait = process.env.NODE_ENV === 'test' ? 1 : SYNC_IF_NO_CHANGES_FOR * 1000; +let syncDebounceTimer; +const syncResolveRejectStack = []; +let syncingPromise = Promise.resolve(); + +function doSyncChanges() { + syncDebounceTimer = null; + // Splice all the resolve/reject handlers off the stack + const resolveRejectStack = syncResolveRejectStack.splice(0); + // Wait for any existing sync to complete, then sync again. + syncingPromise = syncingPromise + .then(syncChanges) + .then(result => { + // If it is successful call all of the resolve functions that we have stored + // from all the Promises that have been returned while this specific debounce + // has been active. + for (const [resolve] of resolveRejectStack) { + resolve(result); + } + }) + .catch(err => { + // If there is an error call reject for all previously returned promises. + for (const [, reject] of resolveRejectStack) { + reject(err); + } + }); + return syncingPromise; } -async function handleChanges(changes) { - const syncableChanges = changes.filter(isSyncableChange); - // Here we are handling changes triggered by Dexie Observable - // this is listening to all of our IndexedDB tables, both for resources - // and for our special Changes table. - // Here we look for any newly created changes in the changes table, which may require - // a sync to send them to the backend - this is particularly important for - // MOVE, COPY, PUBLISH, and SYNC changes where we may be executing them inside an IGNORED_SOURCE - // because they also make UPDATE and CREATE changes that we wish to make in the client only. - // Only do this for changes that are not marked as synced. - const newChangeTableEntries = changes.some( - c => c.table === CHANGES_TABLE && c.type === CHANGE_TYPES.CREATED && !c.obj.synced - ); - - if (syncableChanges.length) { - // Flatten any changes before we store them in the changes table - const mergedSyncableChanges = mergeAllChanges(syncableChanges, true).map(change => { - // Filter out the rev property as we want that to be assigned during the bulkPut - const { rev, ...filteredChange } = change; // eslint-disable-line no-unused-vars - // Set appropriate contextual information on changes, channel_id and user_id - INDEXEDDB_RESOURCES[change.table].setChannelIdOnChange(filteredChange); - INDEXEDDB_RESOURCES[change.table].setUserIdOnChange(filteredChange); - return filteredChange; - }); +export function debouncedSyncChanges(flush = false) { + // Logic for promise returning debounce vendored and modified from: + // https://github.com/sindresorhus/p-debounce/blob/main/index.js + // Return a promise that will consistently resolve when this debounced + // function invocation is completed. + return new Promise((resolve, reject) => { + // Clear any current timeouts, so that this invocation takes precedence + // Any subsequent calls will then also revoke this timeout. + clearTimeout(syncDebounceTimer); + // Add the resolve and reject handlers for this promise to the stack here. + syncResolveRejectStack.push([resolve, reject]); + if (flush) { + // If immediate invocation is required immediately call doSyncChanges + // rather than using a timeout delay. + doSyncChanges(); + } else { + // Otherwise update the timeout to this invocation. + syncDebounceTimer = setTimeout(doSyncChanges, syncDebounceWait); + } + }); +} - await db[CHANGES_TABLE].bulkPut(mergedSyncableChanges); +export function queueChange(rev) { + if (rev) { + changeRevs.push(rev); } + debouncedSyncChanges(); +} - // If we detect changes were written to the changes table - // then we'll trigger sync - if (syncableChanges.length || newChangeTableEntries) { - debouncedSyncChanges(); - } +if (process.env.NODE_ENV !== 'production' && typeof window !== 'undefined') { + window.forceServerSync = forceServerSync; } let intervalTimer; export function startSyncing() { - // Initiate a sync immediately in case any data - // is left over in the database. - debouncedSyncChanges(); // Start the sync interval intervalTimer = setInterval(debouncedSyncChanges, SYNC_POLL_INTERVAL * 1000); - db.on('changes', handleChanges); } export function stopSyncing() { clearInterval(intervalTimer); - debouncedSyncChanges.cancel(); - // Dexie's slightly counterintuitive method for unsubscribing from events - db.on('changes').unsubscribe(handleChanges); + debouncedSyncChanges(true); } /** @@ -393,5 +364,5 @@ export function stopSyncing() { */ export function forceServerSync() { debouncedSyncChanges(); - return debouncedSyncChanges.flush(); + return debouncedSyncChanges(true); } From 454bb2360989d888cdcdd28eb63f7c8437815d85 Mon Sep 17 00:00:00 2001 From: Richard Tibbles Date: Tue, 9 May 2023 19:58:20 -0700 Subject: [PATCH 06/21] Update forceServerSync to still attempt to sync all unsynced changes. --- .../frontend/shared/data/serverSync.js | 23 +++++++++++-------- 1 file changed, 14 insertions(+), 9 deletions(-) diff --git a/contentcuration/contentcuration/frontend/shared/data/serverSync.js b/contentcuration/contentcuration/frontend/shared/data/serverSync.js index 1f98787972..69be274d46 100644 --- a/contentcuration/contentcuration/frontend/shared/data/serverSync.js +++ b/contentcuration/contentcuration/frontend/shared/data/serverSync.js @@ -202,7 +202,7 @@ const noUserError = 'No user logged in'; const changeRevs = []; -async function syncChanges() { +async function syncChanges(syncAllChanges) { // Note: we could in theory use Dexie syncable for what // we are doing here, but I can't find a good way to make // it ignore our regular API calls for seeding the database @@ -238,7 +238,13 @@ async function syncChanges() { // Snapshot which revs we are syncing, so that we can // removes them from the changeRevs array after the sync - const revsToSync = [...changeRevs]; + const revsToSync = []; + if (syncAllChanges) { + const unsyncedRevs = await db[CHANGES_TABLE].filter(c => !c.synced).primaryKeys(); + revsToSync.push(...unsyncedRevs); + } else { + revsToSync.push(...changeRevs); + } if (revsToSync.length) { const syncableChanges = db[CHANGES_TABLE].where('rev') .anyOf(revsToSync) @@ -290,13 +296,13 @@ let syncDebounceTimer; const syncResolveRejectStack = []; let syncingPromise = Promise.resolve(); -function doSyncChanges() { +function doSyncChanges(syncAll = false) { syncDebounceTimer = null; // Splice all the resolve/reject handlers off the stack const resolveRejectStack = syncResolveRejectStack.splice(0); // Wait for any existing sync to complete, then sync again. syncingPromise = syncingPromise - .then(syncChanges) + .then(() => syncChanges(syncAll)) .then(result => { // If it is successful call all of the resolve functions that we have stored // from all the Promises that have been returned while this specific debounce @@ -314,7 +320,7 @@ function doSyncChanges() { return syncingPromise; } -export function debouncedSyncChanges(flush = false) { +export function debouncedSyncChanges(flush = false, syncAll = false) { // Logic for promise returning debounce vendored and modified from: // https://github.com/sindresorhus/p-debounce/blob/main/index.js // Return a promise that will consistently resolve when this debounced @@ -328,10 +334,10 @@ export function debouncedSyncChanges(flush = false) { if (flush) { // If immediate invocation is required immediately call doSyncChanges // rather than using a timeout delay. - doSyncChanges(); + doSyncChanges(syncAll); } else { // Otherwise update the timeout to this invocation. - syncDebounceTimer = setTimeout(doSyncChanges, syncDebounceWait); + syncDebounceTimer = setTimeout(() => doSyncChanges(syncAll), syncDebounceWait); } }); } @@ -363,6 +369,5 @@ export function stopSyncing() { * @return {Promise} */ export function forceServerSync() { - debouncedSyncChanges(); - return debouncedSyncChanges(true); + return debouncedSyncChanges(true, true); } From fdded7843ae0a1d367f7fc1767da63bf54292a92 Mon Sep 17 00:00:00 2001 From: Richard Tibbles Date: Tue, 9 May 2023 20:41:14 -0700 Subject: [PATCH 07/21] Link up change objects to the resource layer. Clean up unused active channel monitoring logic. --- .../frontend/shared/data/changes.js | 6 + .../frontend/shared/data/constants.js | 5 - .../frontend/shared/data/resources.js | 217 +++++++++--------- 3 files changed, 113 insertions(+), 115 deletions(-) diff --git a/contentcuration/contentcuration/frontend/shared/data/changes.js b/contentcuration/contentcuration/frontend/shared/data/changes.js index c8f8c35de9..47ba9f20a8 100644 --- a/contentcuration/contentcuration/frontend/shared/data/changes.js +++ b/contentcuration/contentcuration/frontend/shared/data/changes.js @@ -7,6 +7,7 @@ import omit from 'lodash/omit'; import sortBy from 'lodash/sortBy'; import logging from '../logging'; import { applyMods } from './applyRemoteChanges'; +import { queueChange } from './serverSync'; import db from 'shared/data/db'; import { promiseChunk } from 'shared/utils/helpers'; import { @@ -270,6 +271,11 @@ class Change { saveChange() { return db[CHANGES_TABLE].add(this); } + saveAndQueueChange() { + return this.saveChange().then(() => { + return queueChange(this); + }); + } } export class CreatedChange extends Change { diff --git a/contentcuration/contentcuration/frontend/shared/data/constants.js b/contentcuration/contentcuration/frontend/shared/data/constants.js index 093b435ad8..d7e8f97139 100644 --- a/contentcuration/contentcuration/frontend/shared/data/constants.js +++ b/contentcuration/contentcuration/frontend/shared/data/constants.js @@ -82,9 +82,4 @@ export const LAST_FETCHED = '__last_fetch'; // user object from the session table export const CURRENT_USER = 'CURRENT_USER'; -// A key in the session table that stores the currently active channels to listen for updates -export const ACTIVE_CHANNELS = 'ACTIVE_CHANNELS'; - -export const CHANNEL_SYNC_KEEP_ALIVE_INTERVAL = 300 * 1000; - export const MAX_REV_KEY = 'max_rev'; diff --git a/contentcuration/contentcuration/frontend/shared/data/resources.js b/contentcuration/contentcuration/frontend/shared/data/resources.js index c71b4fbb18..da97b40e8a 100644 --- a/contentcuration/contentcuration/frontend/shared/data/resources.js +++ b/contentcuration/contentcuration/frontend/shared/data/resources.js @@ -22,8 +22,6 @@ import { COPYING_FLAG, TASK_ID, CURRENT_USER, - ACTIVE_CHANNELS, - CHANNEL_SYNC_KEEP_ALIVE_INTERVAL, MAX_REV_KEY, LAST_FETCHED, CREATION_CHANGE_TYPES, @@ -33,6 +31,16 @@ import applyChanges, { applyMods, collectChanges } from './applyRemoteChanges'; import mergeAllChanges from './mergeChanges'; import db, { channelScope, CLIENTID, Collection } from './db'; import { API_RESOURCES, INDEXEDDB_RESOURCES } from './registry'; +import { + CreatedChange, + UpdatedChange, + DeletedChange, + MovedChange, + CopiedChange, + PublishedChange, + SyncedChange, + DeployedChange, +} from './changes'; import urls from 'shared/urls'; import { currentLanguage } from 'shared/i18n'; import client, { paramsSerializer } from 'shared/client'; @@ -244,8 +252,7 @@ class IndexedDBResource { transaction({ mode = 'rw', source = null } = {}, ...extraTables) { const callback = extraTables.pop(); if (source === null) { - const channelScopeId = channelScope.id; - source = channelScopeId === null ? CLIENTID : `${CLIENTID}::${channelScopeId}`; + source = CLIENTID; } return db.transaction(mode, this.tableName, ...extraTables, () => { Dexie.currentTransaction.source = source; @@ -518,18 +525,31 @@ class IndexedDBResource { return out; } - update(id, changes) { - return this.transaction({ mode: 'rw' }, () => { - return this.table.update(id, this._cleanNew(changes)); + async _updateChange(id, changes, oldObj = null) { + if (!this.syncable) { + return Promise.resolve(); + } + if (!oldObj) { + oldObj = await this.get(id); + } + if (!oldObj) { + return Promise.resolve(); + } + const change = new UpdatedChange({ + key: id, + table: this.tableName, + oldObj, + changes, }); + return change.saveAndQueueChange(); } - modifyByIds(ids, changes) { - return this.transaction({ mode: 'rw' }, () => { - return this.table - .where(this.rawIdField) - .anyOf(ids) - .modify(this._cleanNew(changes)); + update(id, changes) { + return this.transaction({ mode: 'rw' }, CHANGES_TABLE, () => { + changes = this._cleanNew(changes); + return this.table.update(id, changes).then(val => { + return this._updateChange(id, changes).then(() => val); + }); }); } @@ -570,14 +590,28 @@ class IndexedDBResource { }); } + _createChange(id, obj) { + if (!this.syncable) { + return Promise.resolve(); + } + const change = new CreatedChange({ + key: id, + table: this.tableName, + obj, + }); + return change.saveAndQueueChange(); + } + /** * @param {Object} obj * @return {Promise} */ add(obj) { return this.transaction({ mode: 'rw' }, CHANGES_TABLE, () => { - // TODO: Finish implementation of add by adding explicit CREATE change to changes table - return this.table.add(this._prepareAdd(obj)); + obj = this._prepareAdd(obj); + return this.table.add(obj).then(id => { + return this._createChange(id, obj).then(() => id); + }); }); } @@ -595,9 +629,28 @@ class IndexedDBResource { }); } + _deleteChange(id) { + if (!this.syncable) { + return Promise.resolve(); + } + return this.get(id).then(oldObj => { + if (!oldObj) { + return Promise.resolve(); + } + const change = new DeletedChange({ + key: id, + table: this.tableName, + oldObj, + }); + return change.saveAndQueueChange(); + }); + } + delete(id) { - return this.transaction({ mode: 'rw' }, () => { - return this.table.delete(id); + return this.transaction({ mode: 'rw' }, CHANGES_TABLE, () => { + return this.table.delete(id).then(ret => { + return this._deleteChange(id).then(() => ret); + }); }); } @@ -922,28 +975,6 @@ export const Session = new IndexedDBResource({ get currentChannelId() { return this.currentChannel.channel_id || null; }, - channelSyncKeepAlive() { - if (this.currentChannelId && document.hasFocus()) { - this.updateSession({ [`${ACTIVE_CHANNELS}.${this.currentChannelId}`]: Date.now() }); - } - }, - monitorKeepAlive() { - if (this.currentChannelId) { - this.channelSyncKeepAlive(); - if (!this._keepAliveInterval) { - this._keepAliveInterval = setInterval( - () => this.channelSyncKeepAlive(), - CHANNEL_SYNC_KEEP_ALIVE_INTERVAL - ); - } - } - }, - stopMonitorKeepAlive() { - if (this._keepAliveInterval) { - clearInterval(this._keepAliveInterval); - this._keepAliveInterval = null; - } - }, async setChannelScope() { const channelId = this.currentChannelId; if (channelId) { @@ -954,16 +985,6 @@ export const Session = new IndexedDBResource({ await this.updateSession({ [`${MAX_REV_KEY}.${channelId}`]: channelRev, }); - this.monitorKeepAlive(); - window.addEventListener('focus', () => this.monitorKeepAlive()); - window.addEventListener('blur', () => this.stopMonitorKeepAlive()); - document.addEventListener('visibilitychange', () => { - if (document.hidden) { - this.stopMonitorKeepAlive(); - } else { - this.monitorKeepAlive(); - } - }); } }, async getSession() { @@ -1018,20 +1039,20 @@ export const Channel = new Resource({ * @return {Promise} */ update(id, { content_defaults = {}, ...changes }) { - return this.transaction({ mode: 'rw' }, () => { - return this.table - .where('id') - .equals(id) - .modify(channel => { - if (Object.keys(content_defaults).length) { - if (!channel.content_defaults) { - channel.content_defaults = {}; - } - Object.assign(channel.content_defaults, content_defaults); + return this.transaction({ mode: 'rw' }, CHANGES_TABLE, () => { + return this.table.get(id).then(channel => { + if (Object.keys(content_defaults).length) { + if (!channel.content_defaults) { + channel.content_defaults = {}; } - - Object.assign(channel, changes); + const mergedContentDefaults = {}; + Object.assign(mergedContentDefaults, channel.content_defaults, content_defaults); + changes.content_defaults = mergedContentDefaults; + } + return this.table.update(id, changes).then(ret => { + return this._updateChange(id, changes, channel).then(() => ret); }); + }); }); }, @@ -1047,22 +1068,19 @@ export const Channel = new Resource({ return this.transaction({ mode: 'rw', source: IGNORED_SOURCE }, () => { return this.table.update(id, { publishing: true }); }).then(() => { - const change = { + const change = new PublishedChange({ key: id, version_notes, language: currentLanguage, - source: CLIENTID, table: this.tableName, - type: CHANGE_TYPES.PUBLISHED, - channel_id: id, - }; + }); return this.transaction( { mode: 'rw', source: IGNORED_SOURCE }, CHANGES_TABLE, TABLE_NAMES.CONTENTNODE, () => { return Promise.all([ - db[CHANGES_TABLE].put(change), + change.saveAndQueueChange(), ContentNode.table.where({ channel_id: id }).modify({ changed: false, published: true, @@ -1076,15 +1094,13 @@ export const Channel = new Resource({ }, deploy(id) { - const change = { + const change = new DeployedChange({ key: id, source: CLIENTID, table: this.tableName, - type: CHANGE_TYPES.DEPLOYED, - channel_id: id, - }; + }); return this.transaction({ mode: 'rw', source: IGNORED_SOURCE }, CHANGES_TABLE, () => { - return db[CHANGES_TABLE].put(change); + return change.saveAndQueueChange(); }); }, @@ -1122,19 +1138,16 @@ export const Channel = new Resource({ assessment_items = false, } = {} ) { - const change = { + const change = new SyncedChange({ key: id, titles_and_descriptions, resource_details, files, assessment_items, - source: CLIENTID, table: this.tableName, - type: CHANGE_TYPES.SYNCED, - channel_id: id, - }; + }); return this.transaction({ mode: 'rw', source: IGNORED_SOURCE }, CHANGES_TABLE, () => { - return db[CHANGES_TABLE].put(change); + return change.saveAndQueueChange(); }); }, @@ -1155,8 +1168,8 @@ export const Channel = new Resource({ }, }); -function setChannelIdFromTransactionSource(change) { - const channel_id = change.source.split('::').slice(1)[0]; +function setChannelFromChannelScope(change) { + const channel_id = channelScope.id; if (channel_id) { change.channel_id = channel_id; } @@ -1168,7 +1181,7 @@ export const ContentNodePrerequisite = new IndexedDBResource({ idField: '[target_node+prerequisite]', uuid: false, syncable: true, - setChannelIdOnChange: setChannelIdFromTransactionSource, + setChannelIdOnChange: setChannelFromChannelScope, }); export const ContentNode = new TreeResource({ @@ -1358,26 +1371,8 @@ export const ContentNode = new TreeResource({ changed: true, }; - let channel_id = parent.channel_id; - - // This should really only happen when this is a move operation to the trash tree. - // Other cases like copying shouldn't encounter this because they should always have the - // channel_id on the destination. The trash tree root node does not have the channel_id - // annotated on it, and using the source node's channel_id should be reliable - // in that case - if (!channel_id && node) { - channel_id = node.channel_id; - - // The change would be disallowed anyway, so prevent the frontend from applying it - if (!channel_id) { - return Promise.reject( - new Error('Missing channel_id for tree insertion change event') - ); - } - } - // Prep the change data tracked in the changes table - const change = { + const changeData = { key: payload.id, from_key: isCreate ? id : null, target, @@ -1388,17 +1383,14 @@ export const ContentNode = new TreeResource({ // are pending. parent: parent.id, oldObj: isCreate ? null : node, - source: CLIENTID, table: this.tableName, - type: isCreate ? CHANGE_TYPES.COPIED : CHANGE_TYPES.MOVED, - channel_id, }; return callback({ node, parent, payload, - change, + changeData, }); } ); @@ -1423,7 +1415,10 @@ export const ContentNode = new TreeResource({ true, data => { return this.transaction({ mode: 'rw' }, () => { - return this.table.add({ ...prepared, ...data.payload }); + const obj = { ...prepared, ...data.payload }; + return this.table.add(obj).then(id => { + return this._createChange(id, obj).then(() => id); + }); }); } ); @@ -1434,7 +1429,8 @@ export const ContentNode = new TreeResource({ // Ignore changes from this operation except for the explicit move change we generate. return this.transaction({ mode: 'rw', source: IGNORED_SOURCE }, CHANGES_TABLE, async () => { const payload = await this.tableMove(data); - await db[CHANGES_TABLE].put(data.change); + const change = new MovedChange(data.changeData); + await change.saveAndQueueChange(); return payload; }); }); @@ -1481,7 +1477,8 @@ export const ContentNode = new TreeResource({ // explicit copy change we generate. return this.transaction({ mode: 'rw', source: IGNORED_SOURCE }, CHANGES_TABLE, async () => { const payload = await this.tableCopy(data); - await db[CHANGES_TABLE].put(data.change); + const change = new CopiedChange(data.changeData); + await change.saveAndQueueChange(); return payload; }); }); @@ -1546,7 +1543,7 @@ export const ContentNode = new TreeResource({ return node; }); }, - setChannelIdOnChange: setChannelIdFromTransactionSource, + setChannelIdOnChange: setChannelFromChannelScope, /** * Waits for copying of content nodes to complete for all ids referenced in `ids` array @@ -1806,7 +1803,7 @@ export const AssessmentItem = new Resource({ }); }); }, - setChannelIdOnChange: setChannelIdFromTransactionSource, + setChannelIdOnChange: setChannelFromChannelScope, }); export const File = new Resource({ @@ -1835,7 +1832,7 @@ export const File = new Resource({ }); }); }, - setChannelIdOnChange: setChannelIdFromTransactionSource, + setChannelIdOnChange: setChannelFromChannelScope, }); export const Clipboard = new TreeResource({ From 3dd6ca3ec105fb87d0dc7d7e7af0bba07fda4c93 Mon Sep 17 00:00:00 2001 From: Richard Tibbles Date: Tue, 9 May 2023 20:41:57 -0700 Subject: [PATCH 08/21] Remove use of leader election. Do background polling only when document is visible. --- .../frontend/shared/data/index.js | 33 ++++++------------- 1 file changed, 10 insertions(+), 23 deletions(-) diff --git a/contentcuration/contentcuration/frontend/shared/data/index.js b/contentcuration/contentcuration/frontend/shared/data/index.js index 70588e4079..64dd646c7e 100644 --- a/contentcuration/contentcuration/frontend/shared/data/index.js +++ b/contentcuration/contentcuration/frontend/shared/data/index.js @@ -1,7 +1,6 @@ import Dexie from 'dexie'; import * as Sentry from '@sentry/vue'; import mapValues from 'lodash/mapValues'; -import channel from './broadcastChannel'; import { CHANGES_TABLE, IGNORED_SOURCE, TABLE_NAMES } from './constants'; import db from './db'; import { INDEXEDDB_RESOURCES } from './registry'; @@ -12,8 +11,6 @@ import * as resources from './resources'; export { CHANGE_TYPES, TABLE_NAMES } from './constants'; export { API_RESOURCES, INDEXEDDB_RESOURCES } from './registry'; -const { createLeaderElection } = require('broadcast-channel'); - export function setupSchema() { if (!Object.keys(resources).length) { console.warn('No resources defined!'); // eslint-disable-line no-console @@ -42,25 +39,6 @@ if (process.env.NODE_ENV !== 'production' && typeof window !== 'undefined') { window.resetDB = resetDB; } -function runElection() { - const elector = createLeaderElection(channel); - - elector.awaitLeadership().then(startSyncing); - elector.onduplicate = () => { - stopSyncing(); - elector - .die() - .then(() => { - // manually reset reference to dead elector on the channel - // which is set within `createLeaderElection` and whose - // presence is also validated against, requiring its removal - channel._leaderElector = null; - return runElection(); - }) - .catch(Sentry.captureException); - }; -} - function applyResourceListener(change) { const resource = INDEXEDDB_RESOURCES[change.table]; if (resource && resource.listeners && resource.listeners[change.type]) { @@ -73,7 +51,16 @@ export async function initializeDB() { setupSchema(); await db.open(); db.on('changes', changes => changes.map(applyResourceListener)); - await runElection(); + document.addEventListener('visibilitychange', () => { + if (document.hidden) { + stopSyncing(); + } else { + startSyncing(); + } + }); + if (!document.hidden) { + startSyncing(); + } } catch (e) { Sentry.captureException(e); } From cd498125c15a4170c4aaacbf45477f6d5bdfb118 Mon Sep 17 00:00:00 2001 From: Richard Tibbles Date: Tue, 9 May 2023 20:42:46 -0700 Subject: [PATCH 09/21] Use serviceWorker during development. --- .../contentcuration/frontend/shared/app.js | 2 +- contentcuration/contentcuration/views/pwa.py | 21 +++++----- webpack.config.js | 40 +++++++++++++------ 3 files changed, 39 insertions(+), 24 deletions(-) diff --git a/contentcuration/contentcuration/frontend/shared/app.js b/contentcuration/contentcuration/frontend/shared/app.js index 1918b41125..071e0c0813 100644 --- a/contentcuration/contentcuration/frontend/shared/app.js +++ b/contentcuration/contentcuration/frontend/shared/app.js @@ -256,7 +256,7 @@ Vue.component('Menu', Menu); function initiateServiceWorker() { // Second conditional must be removed if you are doing dev work on the service // worker. - if ('serviceWorker' in navigator && process.env.NODE_ENV === 'production') { + if ('serviceWorker' in navigator) { const wb = new Workbox(window.Urls.service_worker()); let registration; diff --git a/contentcuration/contentcuration/views/pwa.py b/contentcuration/contentcuration/views/pwa.py index 8c7028804c..5ac0100e7b 100644 --- a/contentcuration/contentcuration/views/pwa.py +++ b/contentcuration/contentcuration/views/pwa.py @@ -1,5 +1,6 @@ import os +from django.conf import settings from django.contrib.staticfiles import finders from django.contrib.staticfiles.storage import staticfiles_storage from django.views.generic.base import TemplateView @@ -12,17 +13,17 @@ class ServiceWorkerView(TemplateView): def get_context_data(self, **kwargs): context = super().get_context_data(**kwargs) # Code to access the compiled serivce worker in developer mode - # if getattr(settings, "DEBUG", False): - # import requests + if getattr(settings, "DEBUG", False): + import requests - # request = requests.get("http://127.0.0.1:4000/dist/serviceWorker.js") - # content = request.content - # else: - path = staticfiles_storage.path("studio/serviceWorker.js") - if not os.path.exists(path): - path = finders.find("studio/serviceWorker.js") - with open(path) as f: - content = f.read() + request = requests.get("http://127.0.0.1:4000/dist/serviceWorker.js") + content = request.content.decode("utf-8") + else: + path = staticfiles_storage.path("studio/serviceWorker.js") + if not os.path.exists(path): + path = finders.find("studio/serviceWorker.js") + with open(path) as f: + content = f.read() context["webpack_service_worker"] = content return context diff --git a/webpack.config.js b/webpack.config.js index 06d11b4dba..95e9802207 100644 --- a/webpack.config.js +++ b/webpack.config.js @@ -36,6 +36,30 @@ module.exports = (env = {}) => { const baseCssLoaders = base.module.rules[1].use; + const workboxPlugin = new InjectManifest({ + swSrc: path.resolve(srcDir, 'serviceWorker/index.js'), + swDest: 'serviceWorker.js', + exclude: dev ? [/./] : [/\.map$/, /^manifest.*\.js$/] + }); + + if (dev) { + // Suppress the "InjectManifest has been called multiple times" warning by reaching into + // the private properties of the plugin and making sure it never ends up in the state + // where it makes that warning. + // https://github.com/GoogleChrome/workbox/blob/v6/packages/workbox-webpack-plugin/src/inject-manifest.ts#L260-L282 + // Solution taken from here: + // https://github.com/GoogleChrome/workbox/issues/1790#issuecomment-1241356293 + Object.defineProperty(workboxPlugin, "alreadyCalled", { + get() { + return false + }, + set() { + // do nothing; the internals try to set it to true, which then results in a warning + // on the next run of webpack. + }, + }) + } + return merge(base, { context: srcDir, entry: { @@ -52,7 +76,7 @@ module.exports = (env = {}) => { }, output: { filename: dev ? '[name].js' : '[name]-[fullhash].js', - chunkFilename: dev ? '[name]-[id].js' : '[name]-[id]-[fullhash].js', + chunkFilename: '[name]-[id]-[fullhash].js', path: bundleOutputDir, publicPath: dev ? 'http://127.0.0.1:4000/dist/' : '/static/studio/', pathinfo: !dev, @@ -117,18 +141,8 @@ module.exports = (env = {}) => { // set the current working directory for displaying module paths cwd: process.cwd(), }), - // This must be added in dev mode if you want to do dev work - // on the service worker. - ].concat( - dev - ? [] - : [ - new InjectManifest({ - swSrc: path.resolve(srcDir, 'serviceWorker/index.js'), - swDest: 'serviceWorker.js', - }), - ] - ), + workboxPlugin, + ], stats: 'normal', }); }; From ce0bf14ed6f14cbeb42d349a312a4ac55f5be907 Mon Sep 17 00:00:00 2001 From: Richard Tibbles Date: Tue, 9 May 2023 21:08:30 -0700 Subject: [PATCH 10/21] Prevent circular dependencies. Use reactive watch to trigger queueing. --- .../frontend/shared/data/changes.js | 6 ----- .../frontend/shared/data/index.js | 3 ++- .../frontend/shared/data/registry.js | 4 ++++ .../frontend/shared/data/resources.js | 24 ++++++++++++------- .../frontend/shared/data/serverSync.js | 8 +++++-- 5 files changed, 27 insertions(+), 18 deletions(-) diff --git a/contentcuration/contentcuration/frontend/shared/data/changes.js b/contentcuration/contentcuration/frontend/shared/data/changes.js index 47ba9f20a8..c8f8c35de9 100644 --- a/contentcuration/contentcuration/frontend/shared/data/changes.js +++ b/contentcuration/contentcuration/frontend/shared/data/changes.js @@ -7,7 +7,6 @@ import omit from 'lodash/omit'; import sortBy from 'lodash/sortBy'; import logging from '../logging'; import { applyMods } from './applyRemoteChanges'; -import { queueChange } from './serverSync'; import db from 'shared/data/db'; import { promiseChunk } from 'shared/utils/helpers'; import { @@ -271,11 +270,6 @@ class Change { saveChange() { return db[CHANGES_TABLE].add(this); } - saveAndQueueChange() { - return this.saveChange().then(() => { - return queueChange(this); - }); - } } export class CreatedChange extends Change { diff --git a/contentcuration/contentcuration/frontend/shared/data/index.js b/contentcuration/contentcuration/frontend/shared/data/index.js index 64dd646c7e..facdaa067c 100644 --- a/contentcuration/contentcuration/frontend/shared/data/index.js +++ b/contentcuration/contentcuration/frontend/shared/data/index.js @@ -4,7 +4,7 @@ import mapValues from 'lodash/mapValues'; import { CHANGES_TABLE, IGNORED_SOURCE, TABLE_NAMES } from './constants'; import db from './db'; import { INDEXEDDB_RESOURCES } from './registry'; -import { startSyncing, stopSyncing } from './serverSync'; +import { startSyncing, stopSyncing, syncOnChanges } from './serverSync'; import * as resources from './resources'; // Re-export for ease of reference. @@ -61,6 +61,7 @@ export async function initializeDB() { if (!document.hidden) { startSyncing(); } + syncOnChanges(); } catch (e) { Sentry.captureException(e); } diff --git a/contentcuration/contentcuration/frontend/shared/data/registry.js b/contentcuration/contentcuration/frontend/shared/data/registry.js index 47d5afcc18..f56ba98e39 100644 --- a/contentcuration/contentcuration/frontend/shared/data/registry.js +++ b/contentcuration/contentcuration/frontend/shared/data/registry.js @@ -1,4 +1,8 @@ +import Vue from 'vue'; + // Put resource registry objects in here to avoid circular import issues export const API_RESOURCES = {}; export const INDEXEDDB_RESOURCES = {}; + +export const changeRevs = Vue.observable([]); diff --git a/contentcuration/contentcuration/frontend/shared/data/resources.js b/contentcuration/contentcuration/frontend/shared/data/resources.js index da97b40e8a..4c64769d53 100644 --- a/contentcuration/contentcuration/frontend/shared/data/resources.js +++ b/contentcuration/contentcuration/frontend/shared/data/resources.js @@ -30,7 +30,7 @@ import { import applyChanges, { applyMods, collectChanges } from './applyRemoteChanges'; import mergeAllChanges from './mergeChanges'; import db, { channelScope, CLIENTID, Collection } from './db'; -import { API_RESOURCES, INDEXEDDB_RESOURCES } from './registry'; +import { API_RESOURCES, INDEXEDDB_RESOURCES, changeRevs } from './registry'; import { CreatedChange, UpdatedChange, @@ -525,6 +525,12 @@ class IndexedDBResource { return out; } + _saveAndQueueChange(change) { + return change.saveChange().then(rev => { + changeRevs.push(rev); + }); + } + async _updateChange(id, changes, oldObj = null) { if (!this.syncable) { return Promise.resolve(); @@ -541,7 +547,7 @@ class IndexedDBResource { oldObj, changes, }); - return change.saveAndQueueChange(); + return this._saveAndQueueChange(change); } update(id, changes) { @@ -599,7 +605,7 @@ class IndexedDBResource { table: this.tableName, obj, }); - return change.saveAndQueueChange(); + return this._saveAndQueueChange(change); } /** @@ -642,7 +648,7 @@ class IndexedDBResource { table: this.tableName, oldObj, }); - return change.saveAndQueueChange(); + return this._saveAndQueueChange(change); }); } @@ -1080,7 +1086,7 @@ export const Channel = new Resource({ TABLE_NAMES.CONTENTNODE, () => { return Promise.all([ - change.saveAndQueueChange(), + this._saveAndQueueChange(change), ContentNode.table.where({ channel_id: id }).modify({ changed: false, published: true, @@ -1100,7 +1106,7 @@ export const Channel = new Resource({ table: this.tableName, }); return this.transaction({ mode: 'rw', source: IGNORED_SOURCE }, CHANGES_TABLE, () => { - return change.saveAndQueueChange(); + return this._saveAndQueueChange(change); }); }, @@ -1147,7 +1153,7 @@ export const Channel = new Resource({ table: this.tableName, }); return this.transaction({ mode: 'rw', source: IGNORED_SOURCE }, CHANGES_TABLE, () => { - return change.saveAndQueueChange(); + return this._saveAndQueueChange(change); }); }, @@ -1430,7 +1436,7 @@ export const ContentNode = new TreeResource({ return this.transaction({ mode: 'rw', source: IGNORED_SOURCE }, CHANGES_TABLE, async () => { const payload = await this.tableMove(data); const change = new MovedChange(data.changeData); - await change.saveAndQueueChange(); + await this._saveAndQueueChange(change); return payload; }); }); @@ -1478,7 +1484,7 @@ export const ContentNode = new TreeResource({ return this.transaction({ mode: 'rw', source: IGNORED_SOURCE }, CHANGES_TABLE, async () => { const payload = await this.tableCopy(data); const change = new CopiedChange(data.changeData); - await change.saveAndQueueChange(); + await this._saveAndQueueChange(change); return payload; }); }); diff --git a/contentcuration/contentcuration/frontend/shared/data/serverSync.js b/contentcuration/contentcuration/frontend/shared/data/serverSync.js index 69be274d46..52f495c8f2 100644 --- a/contentcuration/contentcuration/frontend/shared/data/serverSync.js +++ b/contentcuration/contentcuration/frontend/shared/data/serverSync.js @@ -1,3 +1,4 @@ +import Vue from 'vue'; import findLastIndex from 'lodash/findLastIndex'; import get from 'lodash/get'; import pick from 'lodash/pick'; @@ -5,6 +6,7 @@ import orderBy from 'lodash/orderBy'; import uniq from 'lodash/uniq'; import logging from '../logging'; import applyChanges from './applyRemoteChanges'; +import { changeRevs } from './registry'; import { CHANGE_TYPES, CHANGES_TABLE, IGNORED_SOURCE, MAX_REV_KEY } from './constants'; import db, { channelScope } from './db'; import { Channel, Session, Task } from './resources'; @@ -200,8 +202,6 @@ function handleTasks(response) { const noUserError = 'No user logged in'; -const changeRevs = []; - async function syncChanges(syncAllChanges) { // Note: we could in theory use Dexie syncable for what // we are doing here, but I can't find a good way to make @@ -355,6 +355,10 @@ if (process.env.NODE_ENV !== 'production' && typeof window !== 'undefined') { let intervalTimer; +export function syncOnChanges() { + Vue.$watch(() => changeRevs.length, debouncedSyncChanges); +} + export function startSyncing() { // Start the sync interval intervalTimer = setInterval(debouncedSyncChanges, SYNC_POLL_INTERVAL * 1000); From 6de69b92ecbb2764b6086c81f17dc505892bbd76 Mon Sep 17 00:00:00 2001 From: Richard Tibbles Date: Wed, 10 May 2023 15:09:56 -0700 Subject: [PATCH 11/21] Change updatedChanges to handle an object of changes, not the changed object. Ensure we generate changes that require the old object representation before modifying. Ignore null changes. --- .../frontend/shared/data/changes.js | 5 ++- .../frontend/shared/data/resources.js | 31 +++++++++++-------- 2 files changed, 20 insertions(+), 16 deletions(-) diff --git a/contentcuration/contentcuration/frontend/shared/data/changes.js b/contentcuration/contentcuration/frontend/shared/data/changes.js index c8f8c35de9..e1a7cecca6 100644 --- a/contentcuration/contentcuration/frontend/shared/data/changes.js +++ b/contentcuration/contentcuration/frontend/shared/data/changes.js @@ -6,7 +6,6 @@ import isUndefined from 'lodash/isUndefined'; import omit from 'lodash/omit'; import sortBy from 'lodash/sortBy'; import logging from '../logging'; -import { applyMods } from './applyRemoteChanges'; import db from 'shared/data/db'; import { promiseChunk } from 'shared/utils/helpers'; import { @@ -291,8 +290,9 @@ export class UpdatedChange extends Change { // For now, the aim here is to preserve the same change structure as found in // the Dexie Observable updating hook: // https://github.com/dexie/Dexie.js/blob/master/addons/Dexie.Observable/src/hooks/updating.js#L15 - const mods = Dexie.getObjectDiff(oldObj, changes); const newObj = Dexie.deepClone(oldObj); + Object.assign(newObj, changes); + const mods = Dexie.getObjectDiff(oldObj, newObj); for (const propPath in mods) { const mod = mods[propPath]; const currentValue = Dexie.getByKeyPath(oldObj, propPath); @@ -309,7 +309,6 @@ export class UpdatedChange extends Change { if (!this.changed) { return; } - applyMods(newObj, mods); this.oldObj = oldObj; this.obj = newObj; } diff --git a/contentcuration/contentcuration/frontend/shared/data/resources.js b/contentcuration/contentcuration/frontend/shared/data/resources.js index 4c64769d53..f95dc28c55 100644 --- a/contentcuration/contentcuration/frontend/shared/data/resources.js +++ b/contentcuration/contentcuration/frontend/shared/data/resources.js @@ -527,7 +527,9 @@ class IndexedDBResource { _saveAndQueueChange(change) { return change.saveChange().then(rev => { - changeRevs.push(rev); + if (rev !== null) { + changeRevs.push(rev); + } }); } @@ -536,7 +538,7 @@ class IndexedDBResource { return Promise.resolve(); } if (!oldObj) { - oldObj = await this.get(id); + oldObj = await this.table.get(id); } if (!oldObj) { return Promise.resolve(); @@ -553,8 +555,10 @@ class IndexedDBResource { update(id, changes) { return this.transaction({ mode: 'rw' }, CHANGES_TABLE, () => { changes = this._cleanNew(changes); - return this.table.update(id, changes).then(val => { - return this._updateChange(id, changes).then(() => val); + // Do the update change first so that we can diff against the + // non-updated object. + return this._updateChange(id, changes).then(() => { + return this.table.update(id, changes); }); }); } @@ -639,7 +643,7 @@ class IndexedDBResource { if (!this.syncable) { return Promise.resolve(); } - return this.get(id).then(oldObj => { + return this.table.get(id).then(oldObj => { if (!oldObj) { return Promise.resolve(); } @@ -654,8 +658,9 @@ class IndexedDBResource { delete(id) { return this.transaction({ mode: 'rw' }, CHANGES_TABLE, () => { - return this.table.delete(id).then(ret => { - return this._deleteChange(id).then(() => ret); + // Generate delete change first so that we can look up the existing object + return this._deleteChange(id).then(() => { + return this.table.delete(id); }); }); } @@ -1048,15 +1053,15 @@ export const Channel = new Resource({ return this.transaction({ mode: 'rw' }, CHANGES_TABLE, () => { return this.table.get(id).then(channel => { if (Object.keys(content_defaults).length) { - if (!channel.content_defaults) { - channel.content_defaults = {}; - } const mergedContentDefaults = {}; - Object.assign(mergedContentDefaults, channel.content_defaults, content_defaults); + Object.assign(mergedContentDefaults, channel.content_defaults || {}, content_defaults); changes.content_defaults = mergedContentDefaults; } - return this.table.update(id, changes).then(ret => { - return this._updateChange(id, changes, channel).then(() => ret); + // Create the update change for consistency with the super update method + // although we could do it after as we are explicity passing in + // the pre change channel object + return this._updateChange(id, changes, channel).then(() => { + return this.table.update(id, changes); }); }); }); From e85491a27b96c9a746f40f043b17d804dd60ed97 Mon Sep 17 00:00:00 2001 From: Richard Tibbles Date: Wed, 10 May 2023 16:03:03 -0700 Subject: [PATCH 12/21] Include CHANGES_TABLE in transaction. --- .../contentcuration/frontend/shared/data/resources.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/contentcuration/contentcuration/frontend/shared/data/resources.js b/contentcuration/contentcuration/frontend/shared/data/resources.js index f95dc28c55..0f19778b46 100644 --- a/contentcuration/contentcuration/frontend/shared/data/resources.js +++ b/contentcuration/contentcuration/frontend/shared/data/resources.js @@ -1425,7 +1425,7 @@ export const ContentNode = new TreeResource({ RELATIVE_TREE_POSITIONS.LAST_CHILD, true, data => { - return this.transaction({ mode: 'rw' }, () => { + return this.transaction({ mode: 'rw' }, CHANGES_TABLE, () => { const obj = { ...prepared, ...data.payload }; return this.table.add(obj).then(id => { return this._createChange(id, obj).then(() => id); From 8ec484009f7f6d47d1b9674d5bc72b68fa14e44a Mon Sep 17 00:00:00 2001 From: Richard Tibbles Date: Thu, 11 May 2023 20:24:14 -0700 Subject: [PATCH 13/21] Properly handle channel_id and user_id setting on changes. Update tests with appropriate mocks to handle this. --- .../contentNode/__tests__/actions.spec.js | 7 +- .../vuex/channelList/__tests__/module.spec.js | 1 - .../vuex/channelSet/__tests__/module.spec.js | 16 +-- .../__tests__/ContentNodeResource.spec.js | 84 ++++++-------- .../shared/data/__tests__/changes.spec.js | 59 +++++++--- .../shared/data/__tests__/serverSync.spec.js | 8 ++ .../frontend/shared/data/changes.js | 70 ++++++++++-- .../frontend/shared/data/resources.js | 106 ++++++++++-------- .../frontend/shared/utils/testing.js | 19 ++++ .../frontend/shared/utils/validation.js | 10 +- .../vuex/channel/__tests__/module.spec.js | 31 +++-- .../shared/vuex/file/__tests__/module.spec.js | 15 ++- 12 files changed, 275 insertions(+), 151 deletions(-) diff --git a/contentcuration/contentcuration/frontend/channelEdit/vuex/contentNode/__tests__/actions.spec.js b/contentcuration/contentcuration/frontend/channelEdit/vuex/contentNode/__tests__/actions.spec.js index 2e2898b67b..a133156e3d 100644 --- a/contentcuration/contentcuration/frontend/channelEdit/vuex/contentNode/__tests__/actions.spec.js +++ b/contentcuration/contentcuration/frontend/channelEdit/vuex/contentNode/__tests__/actions.spec.js @@ -4,6 +4,7 @@ import assessmentItem from '../../assessmentItem/index'; import file from 'shared/vuex/file'; import { ContentNode } from 'shared/data/resources'; import storeFactory from 'shared/vuex/baseStore'; +import { mockChannelScope, resetMockChannelScope } from 'shared/utils/testing'; jest.mock('../../currentChannel/index'); @@ -15,7 +16,8 @@ describe('contentNode actions', () => { let store; let id; const contentNodeDatum = { title: 'test', parent: parentId, lft: 1, tags: {} }; - beforeEach(() => { + beforeEach(async () => { + await mockChannelScope('test-123'); return ContentNode._add(contentNodeDatum).then(newId => { id = newId; contentNodeDatum.id = newId; @@ -38,7 +40,8 @@ describe('contentNode actions', () => { }); }); }); - afterEach(() => { + afterEach(async () => { + await resetMockChannelScope(); jest.restoreAllMocks(); return ContentNode.table.toCollection().delete(); }); diff --git a/contentcuration/contentcuration/frontend/channelList/vuex/channelList/__tests__/module.spec.js b/contentcuration/contentcuration/frontend/channelList/vuex/channelList/__tests__/module.spec.js index f6f7d2c6ba..2998520efd 100644 --- a/contentcuration/contentcuration/frontend/channelList/vuex/channelList/__tests__/module.spec.js +++ b/contentcuration/contentcuration/frontend/channelList/vuex/channelList/__tests__/module.spec.js @@ -3,7 +3,6 @@ import channelList from '../index'; import { Channel, Invitation } from 'shared/data/resources'; import storeFactory from 'shared/vuex/baseStore'; -jest.mock('shared/client'); jest.mock('shared/vuex/connectionPlugin'); const channel_id = '11111111111111111111111111111111'; diff --git a/contentcuration/contentcuration/frontend/channelList/vuex/channelSet/__tests__/module.spec.js b/contentcuration/contentcuration/frontend/channelList/vuex/channelSet/__tests__/module.spec.js index bc6d5b6be2..60690b235c 100644 --- a/contentcuration/contentcuration/frontend/channelList/vuex/channelSet/__tests__/module.spec.js +++ b/contentcuration/contentcuration/frontend/channelList/vuex/channelSet/__tests__/module.spec.js @@ -1,5 +1,5 @@ import channelSet from '../index'; -import { ChannelSet } from 'shared/data/resources'; +import { ChannelSet, injectVuexStore } from 'shared/data/resources'; import storeFactory from 'shared/vuex/baseStore'; jest.mock('shared/vuex/connectionPlugin'); @@ -14,17 +14,19 @@ describe('channelSet actions', () => { edit: true, }; beforeEach(() => { + store = storeFactory({ + modules: { + channelSet, + }, + }); + store.state.session.currentUser.id = userId; + injectVuexStore(store); return ChannelSet.add(channelSetDatum).then(newId => { id = newId; - store = storeFactory({ - modules: { - channelSet, - }, - }); - store.state.session.currentUser.id = userId; }); }); afterEach(() => { + injectVuexStore(); return ChannelSet.table.toCollection().delete(); }); describe('loadChannelSetList action', () => { diff --git a/contentcuration/contentcuration/frontend/shared/data/__tests__/ContentNodeResource.spec.js b/contentcuration/contentcuration/frontend/shared/data/__tests__/ContentNodeResource.spec.js index b52ee98148..eaa3249413 100644 --- a/contentcuration/contentcuration/frontend/shared/data/__tests__/ContentNodeResource.spec.js +++ b/contentcuration/contentcuration/frontend/shared/data/__tests__/ContentNodeResource.spec.js @@ -1,6 +1,7 @@ import sortBy from 'lodash/sortBy'; import shuffle from 'lodash/shuffle'; import find from 'lodash/find'; +import { Store } from 'vuex'; import { RELATIVE_TREE_POSITIONS, COPYING_FLAG, @@ -15,6 +16,7 @@ import { ContentNodePrerequisite, TreeResource, uuid4, + injectVuexStore, } from 'shared/data/resources'; import { ContentKindsNames } from 'shared/leUtils/ContentKinds'; @@ -323,16 +325,13 @@ describe('ContentNode methods', () => { lft: 1, changed: true, }, - change: { + changeData: { key: 'abc123', from_key: null, target: parent.id, position: RELATIVE_TREE_POSITIONS.LAST_CHILD, oldObj: node, - source: CLIENTID, table: 'contentnode', - type: CHANGE_TYPES.MOVED, - channel_id: parent.channel_id, }, }); }); @@ -359,16 +358,13 @@ describe('ContentNode methods', () => { lft: 1, changed: true, }, - change: { + changeData: { key: 'abc123', from_key: null, target: parent.id, position: RELATIVE_TREE_POSITIONS.LAST_CHILD, oldObj: node, - source: CLIENTID, table: 'contentnode', - type: CHANGE_TYPES.MOVED, - channel_id: node.channel_id, }, }); }); @@ -399,15 +395,13 @@ describe('ContentNode methods', () => { lft, changed: true, }, - change: { + changeData: { key: 'abc123', from_key: null, target: 'target', position: 'position', oldObj: node, - source: CLIENTID, table: 'contentnode', - type: CHANGE_TYPES.MOVED, }, }); }); @@ -428,22 +422,6 @@ describe('ContentNode methods', () => { expect(getNewSortOrder).toHaveBeenCalledWith('abc123', 'target', 'position', siblings); expect(cb).not.toBeCalled(); }); - - it('should reject if null channel_id', async () => { - const cb = jest.fn(() => Promise.resolve('results')); - parent.channel_id = null; - node.channel_id = null; - - await expect( - ContentNode.resolveTreeInsert('abc123', 'target', 'position', false, cb) - ).rejects.toThrow('Missing channel_id for tree insertion change event'); - expect(resolveParent).toHaveBeenCalledWith('target', 'position'); - expect(treeLock).toHaveBeenCalledWith(parent.root_id, expect.any(Function)); - expect(get).toHaveBeenCalledWith('abc123', false); - expect(where).toHaveBeenCalledWith({ parent: parent.id }, false); - expect(getNewSortOrder).not.toBeCalled(); - expect(cb).not.toBeCalled(); - }); }); describe('copying', () => { @@ -468,18 +446,16 @@ describe('ContentNode methods', () => { lft: 1, changed: true, }, - change: { + changeData: { key: expect.not.stringMatching('abc123'), from_key: 'abc123', target: parent.id, position: RELATIVE_TREE_POSITIONS.LAST_CHILD, oldObj: null, - source: CLIENTID, table: 'contentnode', - type: CHANGE_TYPES.COPIED, }, }); - expect(result.payload.id).toEqual(result.change.key); + expect(result.payload.id).toEqual(result.changeData.key); }); it('should determine lft from siblings', async () => { @@ -507,18 +483,16 @@ describe('ContentNode methods', () => { lft, changed: true, }, - change: { + changeData: { key: expect.not.stringMatching('abc123'), from_key: 'abc123', target: 'target', position: 'position', oldObj: null, - source: CLIENTID, table: 'contentnode', - type: CHANGE_TYPES.COPIED, }, }); - expect(result.payload.id).toEqual(result.change.key); + expect(result.payload.id).toEqual(result.changeData.key); }); it('should reject if null lft', async () => { @@ -615,7 +589,7 @@ describe('ContentNode methods', () => { let node, parent, payload, - change, + // change, table = {}; beforeEach(() => { @@ -641,16 +615,14 @@ describe('ContentNode methods', () => { node_id: uuid4(), }; payload = { id: uuid4(), parent: parent.id, changed: true, lft: 1 }; - change = { - key: payload.id, - from_key: node.id, - target: parent.id, - position: RELATIVE_TREE_POSITIONS.LAST_CHILD, - oldObj: null, - source: CLIENTID, - table: 'contentnode', - type: CHANGE_TYPES.COPIED, - }; + // change = { + // key: payload.id, + // from_key: node.id, + // target: parent.id, + // position: RELATIVE_TREE_POSITIONS.LAST_CHILD, + // oldObj: null, + // table: 'contentnode', + // }; mockProperty('table', table); }); @@ -673,11 +645,12 @@ describe('ContentNode methods', () => { [COPYING_FLAG]: true, [TASK_ID]: null, }; - await expect(ContentNode.tableCopy({ node, parent, payload, change })).resolves.toMatchObject( + await expect(ContentNode.tableCopy({ node, parent, payload })).resolves.toMatchObject( expectedPayload ); expect(table.put).toHaveBeenCalledWith(expectedPayload); // TODO: Fails + // I think because the change is only saved in the `copy` method not the tableCopy method? // await expect(db[CHANGES_TABLE].get({ '[table+key]': [ContentNode.tableName, node.id] })) // .resolves.toMatchObject(change); }); @@ -756,11 +729,12 @@ describe('Clipboard methods', () => { }); describe('copy method', () => { - let node_id, channel_id, clipboardRootId, node, siblings, where, getByNodeIdChannelId; + let node_id, channel_id, clipboardRootId, node, siblings, where, getByNodeIdChannelId, user_id; beforeEach(() => { node_id = uuid4(); channel_id = uuid4(); clipboardRootId = uuid4(); + user_id = uuid4(); node = { id: node_id, kind: ContentKindsNames.DOCUMENT, @@ -771,6 +745,18 @@ describe('Clipboard methods', () => { .spyOn(ContentNode, 'getByNodeIdChannelId') .mockImplementation(() => Promise.resolve(node)); mocks.push(getByNodeIdChannelId); + const store = new Store({ + getters: { + currentUserId() { + return user_id; + }, + }, + }); + injectVuexStore(store); + }); + + afterEach(() => { + injectVuexStore(); }); it('should create a bare copy of the node', async () => { @@ -854,7 +840,7 @@ describe('ContentNodePrerequisite methods', () => { }); it('should return all associated requisites, even when there is a cyclic dependency', () => { const cyclic = { target_node: 'id-chemistry', prerequisite: 'id-lab' }; - return ContentNodePrerequisite.add(cyclic).then(() => { + return ContentNodePrerequisite.table.add(cyclic).then(() => { return ContentNode.getRequisites('id-integrals').then(entries => { expect(sortBy(entries, 'target_node')).toEqual( sortBy(mappings.concat([cyclic]), 'target_node') diff --git a/contentcuration/contentcuration/frontend/shared/data/__tests__/changes.spec.js b/contentcuration/contentcuration/frontend/shared/data/__tests__/changes.spec.js index 14caaedfbc..3aea8be46e 100644 --- a/contentcuration/contentcuration/frontend/shared/data/__tests__/changes.spec.js +++ b/contentcuration/contentcuration/frontend/shared/data/__tests__/changes.spec.js @@ -18,10 +18,17 @@ import { TASK_ID, } from 'shared/data/constants'; import db from 'shared/data/db'; +import { mockChannelScope, resetMockChannelScope } from 'shared/utils/testing'; describe('Change Types', () => { - beforeEach(() => { - db[CHANGES_TABLE].clear(); + const channel_id = 'test-123'; + beforeEach(async () => { + await db[CHANGES_TABLE].clear(); + await mockChannelScope(channel_id); + }); + + afterEach(async () => { + await resetMockChannelScope(); }); it('should persist only the specified fields in the CreatedChange', async () => { @@ -32,7 +39,11 @@ describe('Change Types', () => { }); const rev = await change.saveChange(); const persistedChange = await db[CHANGES_TABLE].get(rev); - expect(persistedChange).toEqual({ rev, ...pick(change, ['type', 'key', 'table', 'obj']) }); + expect(persistedChange).toEqual({ + channel_id, + rev, + ...pick(change, ['type', 'key', 'table', 'obj']), + }); }); it('should persist only the specified fields in the UpdatedChange', async () => { @@ -46,6 +57,7 @@ describe('Change Types', () => { obj: changes, mods: { b: 3 }, rev, + channel_id, ...pick(change, ['type', 'key', 'table']), }); }); @@ -61,6 +73,7 @@ describe('Change Types', () => { obj: changes, mods: { 'b.c': 2 }, rev, + channel_id, ...pick(change, ['type', 'key', 'table']), }); }); @@ -76,6 +89,7 @@ describe('Change Types', () => { obj: changes, mods: { 'b.c': null }, rev, + channel_id, ...pick(change, ['type', 'key', 'table']), }); }); @@ -97,6 +111,7 @@ describe('Change Types', () => { obj: { a: 1, b: 3 }, mods: { b: 3 }, rev, + channel_id, ...pick(change, ['type', 'key', 'table']), }); }); @@ -123,7 +138,11 @@ describe('Change Types', () => { }); const rev = await change.saveChange(); const persistedChange = await db[CHANGES_TABLE].get(rev); - expect(persistedChange).toEqual({ rev, ...pick(change, ['type', 'key', 'table', 'oldObj']) }); + expect(persistedChange).toEqual({ + channel_id, + rev, + ...pick(change, ['type', 'key', 'table', 'oldObj']), + }); }); it('should persist only the specified fields in the MovedChange', async () => { @@ -138,6 +157,7 @@ describe('Change Types', () => { const persistedChange = await db[CHANGES_TABLE].get(rev); expect(persistedChange).toEqual({ rev, + channel_id, ...pick(change, ['type', 'key', 'table', 'target', 'position', 'parent']), }); }); @@ -157,6 +177,7 @@ describe('Change Types', () => { const persistedChange = await db[CHANGES_TABLE].get(rev); expect(persistedChange).toEqual({ rev, + channel_id, ...pick(change, [ 'type', 'key', @@ -174,7 +195,7 @@ describe('Change Types', () => { it('should persist only the specified fields in the PublishedChange', async () => { const change = new PublishedChange({ key: '1', - table: TABLE_NAMES.CONTENTNODE, + table: TABLE_NAMES.CHANNEL, version_notes: 'Some version notes', language: 'en', }); @@ -182,6 +203,7 @@ describe('Change Types', () => { const persistedChange = await db[CHANGES_TABLE].get(rev); expect(persistedChange).toEqual({ rev, + channel_id: change.key, ...pick(change, ['type', 'key', 'table', 'version_notes', 'language']), }); }); @@ -189,7 +211,7 @@ describe('Change Types', () => { it('should persist only the specified fields in the SyncedChange', async () => { const change = new SyncedChange({ key: '1', - table: TABLE_NAMES.CONTENTNODE, + table: TABLE_NAMES.CHANNEL, titles_and_descriptions: true, resource_details: false, files: true, @@ -199,6 +221,7 @@ describe('Change Types', () => { const persistedChange = await db[CHANGES_TABLE].get(rev); expect(persistedChange).toEqual({ rev, + channel_id: change.key, ...pick(change, [ 'type', 'key', @@ -212,10 +235,14 @@ describe('Change Types', () => { }); it('should persist only the specified fields in the DeployedChange', async () => { - const change = new DeployedChange({ key: '1', table: TABLE_NAMES.CONTENTNODE }); + const change = new DeployedChange({ key: '1', table: TABLE_NAMES.CHANNEL }); const rev = await change.saveChange(); const persistedChange = await db[CHANGES_TABLE].get(rev); - expect(persistedChange).toEqual({ rev, ...pick(change, ['type', 'key', 'table']) }); + expect(persistedChange).toEqual({ + rev, + channel_id: change.key, + ...pick(change, ['type', 'key', 'table']), + }); }); }); @@ -400,7 +427,7 @@ describe('Change Types Unhappy Paths', () => { // PublishedChange it('should throw error when PublishedChange is instantiated without version_notes', () => { - expect(() => new PublishedChange({ key: '1', table: TABLE_NAMES.CONTENTNODE })).toThrow( + expect(() => new PublishedChange({ key: '1', table: TABLE_NAMES.CHANNEL })).toThrow( new TypeError('version_notes is required for a PublishedChange but it was undefined') ); }); @@ -410,7 +437,7 @@ describe('Change Types Unhappy Paths', () => { () => new PublishedChange({ key: '1', - table: TABLE_NAMES.CONTENTNODE, + table: TABLE_NAMES.CHANNEL, version_notes: 'Some notes', }) ).toThrow(new TypeError('language is required for a PublishedChange but it was undefined')); @@ -418,7 +445,7 @@ describe('Change Types Unhappy Paths', () => { // SyncedChange it('should throw error when SyncedChange is instantiated without titles_and_descriptions', () => { - expect(() => new SyncedChange({ key: '1', table: TABLE_NAMES.CONTENTNODE })).toThrow( + expect(() => new SyncedChange({ key: '1', table: TABLE_NAMES.CHANNEL })).toThrow( new TypeError('titles_and_descriptions should be a boolean, but undefined was passed instead') ); }); @@ -428,7 +455,7 @@ describe('Change Types Unhappy Paths', () => { () => new SyncedChange({ key: '1', - table: TABLE_NAMES.CONTENTNODE, + table: TABLE_NAMES.CHANNEL, titles_and_descriptions: 'invalid', }) ).toThrow( @@ -441,7 +468,7 @@ describe('Change Types Unhappy Paths', () => { () => new SyncedChange({ key: '1', - table: TABLE_NAMES.CONTENTNODE, + table: TABLE_NAMES.CHANNEL, titles_and_descriptions: true, resource_details: 'invalid', }) @@ -455,7 +482,7 @@ describe('Change Types Unhappy Paths', () => { () => new SyncedChange({ key: '1', - table: TABLE_NAMES.CONTENTNODE, + table: TABLE_NAMES.CHANNEL, titles_and_descriptions: true, resource_details: false, files: 'invalid', @@ -468,7 +495,7 @@ describe('Change Types Unhappy Paths', () => { () => new SyncedChange({ key: '1', - table: TABLE_NAMES.CONTENTNODE, + table: TABLE_NAMES.CHANNEL, titles_and_descriptions: true, resource_details: false, files: true, @@ -481,7 +508,7 @@ describe('Change Types Unhappy Paths', () => { // DeployedChange it('should throw error when DeployedChange is instantiated without key', () => { - expect(() => new DeployedChange({ table: TABLE_NAMES.CONTENTNODE })).toThrow( + expect(() => new DeployedChange({ table: TABLE_NAMES.CHANNEL })).toThrow( new TypeError('key is required for a DeployedChange but it was undefined') ); }); diff --git a/contentcuration/contentcuration/frontend/shared/data/__tests__/serverSync.spec.js b/contentcuration/contentcuration/frontend/shared/data/__tests__/serverSync.spec.js index 00b3244db7..a74150642f 100644 --- a/contentcuration/contentcuration/frontend/shared/data/__tests__/serverSync.spec.js +++ b/contentcuration/contentcuration/frontend/shared/data/__tests__/serverSync.spec.js @@ -4,6 +4,7 @@ import db from '../db'; import { Session, Task } from 'shared/data/resources'; import client from 'shared/client'; import { CHANGES_TABLE, CURRENT_USER, TABLE_NAMES } from 'shared/data/constants'; +import { mockChannelScope, resetMockChannelScope } from 'shared/utils/testing'; async function makeChange(key, server_rev) { const rev = await new CreatedChange({ @@ -21,6 +22,11 @@ describe('Debounce behaviour', () => { beforeEach(async () => { await Session.table.put({ id: 0, CURRENT_USER }); client.post.mockReset(); + await mockChannelScope('test-123'); + }); + + afterEach(async () => { + await resetMockChannelScope(); }); it('should debounce sync changes', async () => { @@ -44,9 +50,11 @@ describe('ServerSync tests', () => { beforeEach(async () => { await Session.table.put({ id: 0, CURRENT_USER }); client.post.mockReset(); + await mockChannelScope('test-123'); }); afterEach(async () => { + await resetMockChannelScope(); await db[CHANGES_TABLE].clear(); }); diff --git a/contentcuration/contentcuration/frontend/shared/data/changes.js b/contentcuration/contentcuration/frontend/shared/data/changes.js index e1a7cecca6..998af6d389 100644 --- a/contentcuration/contentcuration/frontend/shared/data/changes.js +++ b/contentcuration/contentcuration/frontend/shared/data/changes.js @@ -12,6 +12,7 @@ import { CHANGES_TABLE, CHANGE_TYPES, CHANGE_TYPES_LOOKUP, + TABLE_NAMES, TABLE_NAMES_LOOKUP, IGNORED_SOURCE, RELATIVE_TREE_POSITIONS, @@ -210,20 +211,33 @@ class Change { this.setAndValidateNotUndefined(key, 'key'); this.setAndValidateLookup(table, 'table', TABLE_NAMES_LOOKUP); if (!INDEXEDDB_RESOURCES[this.table].syncable) { - const error = ReferenceError(`${this.table} is not a syncable table`); + const error = new ReferenceError(`${this.table} is not a syncable table`); logging.error(error); throw error; } - INDEXEDDB_RESOURCES[this.table].setChannelIdOnChange(this); - INDEXEDDB_RESOURCES[this.table].setUserIdOnChange(this); } get changeType() { return this.constructor.name; } + get channelOrUserIdSet() { + return !isUndefined(this.channel_id) || !isUndefined(this.user_id); + } + + setChannelAndUserId(obj) { + const channel_id = INDEXEDDB_RESOURCES[this.table].getChannelId(obj); + if (channel_id) { + this.channel_id = channel_id; + } + const user_id = INDEXEDDB_RESOURCES[this.table].getUserId(obj); + if (user_id) { + this.user_id = user_id; + } + } + setAndValidateLookup(value, name, lookup) { if (!lookup[value]) { - const error = ReferenceError(`${value} is not a valid ${name} value`); + const error = new ReferenceError(`${value} is not a valid ${name} value`); logging.error(error, value); throw error; } @@ -231,7 +245,9 @@ class Change { } setAndValidateNotUndefined(value, name) { if (isUndefined(value)) { - const error = TypeError(`${name} is required for a ${this.changeType} but it was undefined`); + const error = new TypeError( + `${name} is required for a ${this.changeType} but it was undefined` + ); logging.error(error, value); throw error; } @@ -239,7 +255,7 @@ class Change { } validateObj(value, name) { if (!isPlainObject(value)) { - const error = TypeError(`${name} should be an object, but ${value} was passed instead`); + const error = new TypeError(`${name} should be an object, but ${value} was passed instead`); logging.error(error, value); throw error; } @@ -250,7 +266,7 @@ class Change { } setAndValidateBoolean(value, name) { if (typeof value !== 'boolean') { - const error = TypeError(`${name} should be a boolean, but ${value} was passed instead`); + const error = new TypeError(`${name} should be a boolean, but ${value} was passed instead`); logging.error(error, value); throw error; } @@ -258,7 +274,7 @@ class Change { } setAndValidateObjOrNull(value, name, mapper = obj => obj) { if (!isPlainObject(value) && !isNull(value)) { - const error = TypeError( + const error = new TypeError( `${name} should be an object or null, but ${value} was passed instead` ); logging.error(error, value); @@ -267,6 +283,11 @@ class Change { this[name] = mapper(value); } saveChange() { + if (!this.channelOrUserIdSet) { + throw new ReferenceError( + `Attempted to save ${this.changeType} change for ${this.table} before setting channel_id and user_id` + ); + } return db[CHANGES_TABLE].add(this); } } @@ -276,6 +297,7 @@ export class CreatedChange extends Change { fields.type = CHANGE_TYPES.CREATED; super(fields); this.setAndValidateObj(obj, 'obj', omitIgnoredSubFields); + this.setChannelAndUserId(obj); } } @@ -311,6 +333,7 @@ export class UpdatedChange extends Change { } this.oldObj = oldObj; this.obj = newObj; + this.setChannelAndUserId(oldObj); } get changed() { return !isEmpty(this.mods); @@ -328,6 +351,7 @@ export class DeletedChange extends Change { fields.type = CHANGE_TYPES.DELETED; super(fields); this.setAndValidateObj(oldObj, 'oldObj', omitIgnoredSubFields); + this.setChannelAndUserId(oldObj); } } @@ -335,9 +359,15 @@ export class MovedChange extends Change { constructor({ target, position, parent, ...fields }) { fields.type = CHANGE_TYPES.MOVED; super(fields); + if (this.table !== TABLE_NAMES.CONTENTNODE) { + throw TypeError( + `${this.changeType} is only supported by ${TABLE_NAMES.CONTENTNODE} table but ${this.table} was passed instead` + ); + } this.setAndValidateNotUndefined(target, 'target'); this.setAndValidateLookup(position, 'position', RELATIVE_TREE_POSITIONS_LOOKUP); this.setAndValidateNotUndefined(parent, 'parent'); + this.setChannelAndUserId(); } } @@ -345,12 +375,18 @@ export class CopiedChange extends Change { constructor({ from_key, mods, target, position, excluded_descendants, parent, ...fields }) { fields.type = CHANGE_TYPES.COPIED; super(fields); + if (this.table !== TABLE_NAMES.CONTENTNODE) { + throw TypeError( + `${this.changeType} is only supported by ${TABLE_NAMES.CONTENTNODE} table but ${this.table} was passed instead` + ); + } this.setAndValidateNotUndefined(from_key, 'from_key'); this.setAndValidateObj(mods, 'mods', omitIgnoredSubFields); this.setAndValidateNotUndefined(target, 'target'); this.setAndValidateLookup(position, 'position', RELATIVE_TREE_POSITIONS_LOOKUP); this.setAndValidateObjOrNull(excluded_descendants, 'excluded_descendants'); this.setAndValidateNotUndefined(parent, 'parent'); + this.setChannelAndUserId(); } } @@ -358,8 +394,14 @@ export class PublishedChange extends Change { constructor({ version_notes, language, ...fields }) { fields.type = CHANGE_TYPES.PUBLISHED; super(fields); + if (this.table !== TABLE_NAMES.CHANNEL) { + throw TypeError( + `${this.changeType} is only supported by ${TABLE_NAMES.CHANNEL} table but ${this.table} was passed instead` + ); + } this.setAndValidateNotUndefined(version_notes, 'version_notes'); this.setAndValidateNotUndefined(language, 'language'); + this.setChannelAndUserId({ id: this.key }); } } @@ -367,10 +409,16 @@ export class SyncedChange extends Change { constructor({ titles_and_descriptions, resource_details, files, assessment_items, ...fields }) { fields.type = CHANGE_TYPES.SYNCED; super(fields); + if (this.table !== TABLE_NAMES.CHANNEL) { + throw TypeError( + `${this.changeType} is only supported by ${TABLE_NAMES.CHANNEL} table but ${this.table} was passed instead` + ); + } this.setAndValidateBoolean(titles_and_descriptions, 'titles_and_descriptions'); this.setAndValidateBoolean(resource_details, 'resource_details'); this.setAndValidateBoolean(files, 'files'); this.setAndValidateBoolean(assessment_items, 'assessment_items'); + this.setChannelAndUserId({ id: this.key }); } } @@ -378,5 +426,11 @@ export class DeployedChange extends Change { constructor(fields) { fields.type = CHANGE_TYPES.DEPLOYED; super(fields); + if (this.table !== TABLE_NAMES.CHANNEL) { + throw TypeError( + `${this.changeType} is only supported by ${TABLE_NAMES.CHANNEL} table but ${this.table} was passed instead` + ); + } + this.setChannelAndUserId({ id: this.key }); } } diff --git a/contentcuration/contentcuration/frontend/shared/data/resources.js b/contentcuration/contentcuration/frontend/shared/data/resources.js index 0f19778b46..6db716924d 100644 --- a/contentcuration/contentcuration/frontend/shared/data/resources.js +++ b/contentcuration/contentcuration/frontend/shared/data/resources.js @@ -126,6 +126,18 @@ export function injectVuexStore(store) { vuexStore = store; } +function getUserIdFromStore() { + if (vuexStore) { + return vuexStore.getters.currentUserId; + } + throw ReferenceError( + ``` + Attempted to get the user_id from the store to set on a change object, + but the store has not been injected into the resources.js module using injectVuexStore function + ``` + ); +} + // Custom uuid4 function to match our dashless uuids on the server side export function uuid4() { return uuidv4().replace(/-/g, ''); @@ -666,23 +678,21 @@ class IndexedDBResource { } /** - * Set the channel_id property on a change - * object before we put it in the store, if relevant - * @param {Object} change + * Return the channel_id for a change, given the object + * @param {Object} obj * @returns {Object} */ - setChannelIdOnChange(change) { - return change; + getChannelId() { + return; } /** - * Set the user_id property on a change - * object before we put it in the store, if relevant - * @param {Object} change + * Return the user_id for a change, given the object + * @param {Object} obj * @returns {Object} */ - setUserIdOnChange(change) { - return change; + getUserId() { + return; } } @@ -998,6 +1008,14 @@ export const Session = new IndexedDBResource({ }); } }, + async clearChannelScope() { + if (channelScope.id) { + await this.updateSession({ + [`${MAX_REV_KEY}.${channelScope.id}`]: null, + }); + channelScope.id = null; + } + }, async getSession() { return this.get(CURRENT_USER); }, @@ -1013,11 +1031,7 @@ export const Bookmark = new Resource({ tableName: TABLE_NAMES.BOOKMARK, urlName: 'bookmark', idField: 'channel', - setUserIdOnChange(change) { - if (vuexStore) { - change.user_id = vuexStore.getters.currentUserId; - } - }, + getUserId: getUserIdFromStore, }); export const Channel = new Resource({ @@ -1173,17 +1187,20 @@ export const Channel = new Resource({ return client.delete(modelUrl); }); }, - setChannelIdOnChange(change) { + getChannelId(obj) { // For channels, the appropriate channel_id for a change is just the key - change.channel_id = change.key; + return obj.id; }, }); -function setChannelFromChannelScope(change) { +function getChannelFromChannelScope() { const channel_id = channelScope.id; if (channel_id) { - change.channel_id = channel_id; + return channel_id; } + throw ReferenceError( + 'Attempted to get the channel_id from the channelScope object, but it has not been set.' + ); } export const ContentNodePrerequisite = new IndexedDBResource({ @@ -1192,7 +1209,7 @@ export const ContentNodePrerequisite = new IndexedDBResource({ idField: '[target_node+prerequisite]', uuid: false, syncable: true, - setChannelIdOnChange: setChannelFromChannelScope, + getChannelId: getChannelFromChannelScope, }); export const ContentNode = new TreeResource({ @@ -1554,7 +1571,7 @@ export const ContentNode = new TreeResource({ return node; }); }, - setChannelIdOnChange: setChannelFromChannelScope, + getChannelId: getChannelFromChannelScope, /** * Waits for copying of content nodes to complete for all ids referenced in `ids` array @@ -1590,11 +1607,7 @@ export const ContentNode = new TreeResource({ export const ChannelSet = new Resource({ tableName: TABLE_NAMES.CHANNELSET, urlName: 'channelset', - setUserIdOnChange(change) { - if (vuexStore) { - change.user_id = vuexStore.getters.currentUserId; - } - }, + getUserId: getUserIdFromStore, }); export const Invitation = new Resource({ @@ -1610,11 +1623,11 @@ export const Invitation = new Resource({ }); }); }, - setChannelIdOnChange(change) { - change.channel_id = change.obj.channel; + getChannelId(obj) { + return obj.channel; }, - setUserIdOnChange(change) { - change.user_id = change.obj.invited; + getUserId(obj) { + return obj.invited; }, }); @@ -1642,6 +1655,9 @@ export const User = new Resource({ return this.table.update(id, { disk_space_used }); }); }, + getUserId(obj) { + return obj.id; + }, }); export const EditorM2M = new IndexedDBResource({ @@ -1650,11 +1666,11 @@ export const EditorM2M = new IndexedDBResource({ idField: '[user+channel]', uuid: false, syncable: true, - setChannelIdOnChange(change) { - change.channel_id = change.obj.channel; + getChannelId(obj) { + return obj.channel; }, - setUserIdOnChange(change) { - change.user_id = change.obj.user; + getUserId(obj) { + return obj.user; }, }); @@ -1664,11 +1680,11 @@ export const ViewerM2M = new IndexedDBResource({ idField: '[user+channel]', uuid: false, syncable: true, - setChannelIdOnChange(change) { - change.channel_id = change.obj.channel; + getChannelId(obj) { + return obj.channel; }, - setUserIdOnChange(change) { - change.user_id = change.obj.user; + getUserId(obj) { + return obj.user; }, }); @@ -1814,7 +1830,7 @@ export const AssessmentItem = new Resource({ }); }); }, - setChannelIdOnChange: setChannelFromChannelScope, + getChannelId: getChannelFromChannelScope, }); export const File = new Resource({ @@ -1843,7 +1859,7 @@ export const File = new Resource({ }); }); }, - setChannelIdOnChange: setChannelFromChannelScope, + getChannelId: getChannelFromChannelScope, }); export const Clipboard = new TreeResource({ @@ -1891,15 +1907,13 @@ export const Clipboard = new TreeResource({ }; return this.transaction({ mode: 'rw' }, CHANGES_TABLE, () => { - return this.table.put(data).then(() => data); + return this.table.add(data).then(id => { + return this._createChange(id, data).then(() => data); + }); }); }); }, - setUserIdOnChange(change) { - if (vuexStore) { - change.user_id = vuexStore.getters.currentUserId; - } - }, + getUserId: getUserIdFromStore, }); export const Task = new IndexedDBResource({ diff --git a/contentcuration/contentcuration/frontend/shared/utils/testing.js b/contentcuration/contentcuration/frontend/shared/utils/testing.js index 641d1ad5a2..cc78737696 100644 --- a/contentcuration/contentcuration/frontend/shared/utils/testing.js +++ b/contentcuration/contentcuration/frontend/shared/utils/testing.js @@ -1,3 +1,5 @@ +import { Session } from 'shared/data/resources'; + export function resetJestGlobal() { // This global object is bootstraped into channel_edit.html and is // assumed by the frontend code for it @@ -6,3 +8,20 @@ export function resetJestGlobal() { channel_error: '', }; } + +export async function mockChannelScope(channel_id) { + // Function to allow setting the channel scope for use in testing + // When we have upgraded to Jest 29, we can change this logic to + // make a mock property instead of doing this swap in and out. + Session._oldCurrentChannelId = Session.currentChannelId; + Session.currentChannelId = channel_id; + await Session.setChannelScope(); +} + +export async function resetMockChannelScope() { + // Function to undo the above + // when we have done the above suggested change, we can just reset the mock here. + await Session.clearChannelScope(); + Session.currentChannelId = Session._oldCurrentChannelId; + delete Session._oldCurrentChannelId; +} diff --git a/contentcuration/contentcuration/frontend/shared/utils/validation.js b/contentcuration/contentcuration/frontend/shared/utils/validation.js index b982c05c3a..3c6676ece5 100644 --- a/contentcuration/contentcuration/frontend/shared/utils/validation.js +++ b/contentcuration/contentcuration/frontend/shared/utils/validation.js @@ -53,7 +53,7 @@ export function isNodeComplete({ nodeDetails, assessmentItems, files }) { } if (getNodeDetailsErrors(nodeDetails).length) { - if (process.env.NODE_ENV !== 'production') { + if (process.env.NODE_ENV !== 'production' && process.env.NODE_ENV !== 'test') { console.info('Node is incomplete', getNodeDetailsErrors(nodeDetails)); } return false; @@ -63,7 +63,7 @@ export function isNodeComplete({ nodeDetails, assessmentItems, files }) { nodeDetails.kind !== ContentKindsNames.EXERCISE ) { if (getNodeFilesErrors(files).length) { - if (process.env.NODE_ENV !== 'production') { + if (process.env.NODE_ENV !== 'production' && process.env.NODE_ENV !== 'test') { console.info("Node's files are incomplete", getNodeFilesErrors(files)); } return false; @@ -72,7 +72,7 @@ export function isNodeComplete({ nodeDetails, assessmentItems, files }) { if (nodeDetails.kind !== ContentKindsNames.TOPIC) { const completionCriteria = get(nodeDetails, 'extra_fields.options.completion_criteria'); if (completionCriteria && !validateCompletionCriteria(completionCriteria, nodeDetails.kind)) { - if (process.env.NODE_ENV !== 'production') { + if (process.env.NODE_ENV !== 'production' && process.env.NODE_ENV !== 'test') { console.info("Node's completion criteria is invalid", validateCompletionCriteria.errors); } return false; @@ -80,7 +80,7 @@ export function isNodeComplete({ nodeDetails, assessmentItems, files }) { } if (nodeDetails.kind === ContentKindsNames.EXERCISE) { if (!assessmentItems.length) { - if (process.env.NODE_ENV !== 'production') { + if (process.env.NODE_ENV !== 'production' && process.env.NODE_ENV !== 'test') { console.info('Exercise node is missing assessment items'); } return false; @@ -91,7 +91,7 @@ export function isNodeComplete({ nodeDetails, assessmentItems, files }) { return getAssessmentItemErrors(sanitizedAssessmentItem).length; }; if (assessmentItems.some(isInvalid)) { - if (process.env.NODE_ENV !== 'production') { + if (process.env.NODE_ENV !== 'production' && process.env.NODE_ENV !== 'test') { console.info( "Exercise node's assessment items are invalid", assessmentItems.some(isInvalid) diff --git a/contentcuration/contentcuration/frontend/shared/vuex/channel/__tests__/module.spec.js b/contentcuration/contentcuration/frontend/shared/vuex/channel/__tests__/module.spec.js index e0c7551fb6..182dd79fd6 100644 --- a/contentcuration/contentcuration/frontend/shared/vuex/channel/__tests__/module.spec.js +++ b/contentcuration/contentcuration/frontend/shared/vuex/channel/__tests__/module.spec.js @@ -8,6 +8,7 @@ import { ViewerM2M, EditorM2M, User, + injectVuexStore, } from 'shared/data/resources'; import { SharingPermissions } from 'shared/constants'; import storeFactory from 'shared/vuex/baseStore'; @@ -22,18 +23,20 @@ describe('channel actions', () => { let id; const channelDatum = { name: 'test', deleted: false, edit: true }; beforeEach(() => { + store = storeFactory({ + modules: { + channel, + }, + }); + injectVuexStore(store); + store.state.session.currentUser.id = userId; return Channel.add(channelDatum).then(newId => { id = newId; channelDatum.id = id; - store = storeFactory({ - modules: { - channel, - }, - }); - store.state.session.currentUser.id = userId; }); }); afterEach(() => { + injectVuexStore(); return Channel.table.toCollection().delete(); }); describe('loadChannelList action', () => { @@ -268,6 +271,13 @@ describe('Channel sharing vuex', () => { jest .spyOn(ChannelUser, 'fetchCollection') .mockImplementation(() => Promise.resolve([testUser])); + store = storeFactory({ + modules: { + channel, + }, + }); + injectVuexStore(store); + store.state.session.currentUser.id = userId; return Channel.add(channelDatum).then(newId => { channelId = newId; const user = { @@ -279,12 +289,6 @@ describe('Channel sharing vuex', () => { return User.add(user).then(() => { return ViewerM2M.add({ user: user.id, channel: channelDatum.id }).then(() => { return Invitation.table.bulkPut(invitations).then(() => { - store = storeFactory({ - modules: { - channel, - }, - }); - store.state.session.currentUser.id = userId; store.commit('channel/ADD_CHANNEL', { id: channelId, ...channelDatum }); store.commit('channel/SET_USERS_TO_CHANNEL', { channelId, users: [user] }); store.commit('channel/ADD_INVITATIONS', invitations); @@ -295,6 +299,7 @@ describe('Channel sharing vuex', () => { }); afterEach(() => { jest.restoreAllMocks(); + injectVuexStore(); return Promise.all([ Channel.table.toCollection().delete(), ViewerM2M.table.toCollection().delete(), @@ -380,6 +385,8 @@ describe('Channel sharing vuex', () => { id: 'choosy-invitation', email: 'choosy-collaborator@test.com', declined: true, + channel: 'some-other-channel', + user: 'some-other-user', }; Invitation.add(declinedInvitation).then(() => { diff --git a/contentcuration/contentcuration/frontend/shared/vuex/file/__tests__/module.spec.js b/contentcuration/contentcuration/frontend/shared/vuex/file/__tests__/module.spec.js index cf29f6ae40..9a43b87d33 100644 --- a/contentcuration/contentcuration/frontend/shared/vuex/file/__tests__/module.spec.js +++ b/contentcuration/contentcuration/frontend/shared/vuex/file/__tests__/module.spec.js @@ -1,6 +1,7 @@ import storeFactory from 'shared/vuex/baseStore'; -import { File } from 'shared/data/resources'; +import { File, injectVuexStore } from 'shared/data/resources'; import client from 'shared/client'; +import { mockChannelScope, resetMockChannelScope } from 'shared/utils/testing'; jest.mock('shared/vuex/connectionPlugin'); @@ -21,18 +22,22 @@ const userId = 'some user'; describe('file store', () => { let store; let id; - beforeEach(() => { + beforeEach(async () => { + await mockChannelScope('test-123'); jest.spyOn(File, 'fetchCollection').mockImplementation(() => Promise.resolve([testFile])); jest.spyOn(File, 'fetchModel').mockImplementation(() => Promise.resolve(testFile)); + store = storeFactory(); + injectVuexStore(store); + store.state.session.currentUser.id = userId; return File.add(testFile).then(newId => { id = newId; - store = storeFactory(); store.commit('file/ADD_FILE', { id, ...testFile }); - store.state.session.currentUser.id = userId; }); }); - afterEach(() => { + afterEach(async () => { + await resetMockChannelScope(); jest.restoreAllMocks(); + injectVuexStore(); return File.table.toCollection().delete(); }); describe('file getters', () => { From a73100b24f93d1dc18414fc1cea5dcfaf5d3042e Mon Sep 17 00:00:00 2001 From: Richard Tibbles Date: Thu, 25 May 2023 13:51:11 -0700 Subject: [PATCH 14/21] Ensure key is not null. --- .../shared/data/__tests__/changes.spec.js | 10 ++++++++++ .../frontend/shared/data/changes.js | 16 ++++++++++++++-- 2 files changed, 24 insertions(+), 2 deletions(-) diff --git a/contentcuration/contentcuration/frontend/shared/data/__tests__/changes.spec.js b/contentcuration/contentcuration/frontend/shared/data/__tests__/changes.spec.js index 3aea8be46e..317331df27 100644 --- a/contentcuration/contentcuration/frontend/shared/data/__tests__/changes.spec.js +++ b/contentcuration/contentcuration/frontend/shared/data/__tests__/changes.spec.js @@ -248,6 +248,16 @@ describe('Change Types', () => { describe('Change Types Unhappy Paths', () => { // CreatedChange + it('should throw error when CreatedChange is instantiated without key', () => { + expect(() => new CreatedChange({ table: TABLE_NAMES.CONTENTNODE, obj: {} })).toThrow( + new TypeError('key is required for a CreatedChange but it was undefined') + ); + }); + it('should throw error when CreatedChange is instantiated with a null key', () => { + expect(() => new CreatedChange({ key: null, table: TABLE_NAMES.CONTENTNODE, obj: {} })).toThrow( + new TypeError('key is required for a CreatedChange but it was null') + ); + }); it('should throw error when CreatedChange is instantiated without obj', () => { expect(() => new CreatedChange({ key: '1', table: TABLE_NAMES.CONTENTNODE })).toThrow( new TypeError('obj should be an object, but undefined was passed instead') diff --git a/contentcuration/contentcuration/frontend/shared/data/changes.js b/contentcuration/contentcuration/frontend/shared/data/changes.js index 998af6d389..bffeed2e7d 100644 --- a/contentcuration/contentcuration/frontend/shared/data/changes.js +++ b/contentcuration/contentcuration/frontend/shared/data/changes.js @@ -208,7 +208,7 @@ function omitIgnoredSubFields(obj) { class Change { constructor({ type, key, table } = {}) { this.setAndValidateLookup(type, 'type', CHANGE_TYPES_LOOKUP); - this.setAndValidateNotUndefined(key, 'key'); + this.setAndValidateNotNull(key, 'key'); this.setAndValidateLookup(table, 'table', TABLE_NAMES_LOOKUP); if (!INDEXEDDB_RESOURCES[this.table].syncable) { const error = new ReferenceError(`${this.table} is not a syncable table`); @@ -243,7 +243,7 @@ class Change { } this[name] = value; } - setAndValidateNotUndefined(value, name) { + validateNotUndefined(value, name) { if (isUndefined(value)) { const error = new TypeError( `${name} is required for a ${this.changeType} but it was undefined` @@ -251,6 +251,18 @@ class Change { logging.error(error, value); throw error; } + } + setAndValidateNotUndefined(value, name) { + this.validateNotUndefined(value, name); + this[name] = value; + } + setAndValidateNotNull(value, name) { + this.validateNotUndefined(value, name); + if (isNull(value)) { + const error = new TypeError(`${name} is required for a ${this.changeType} but it was null`); + logging.error(error, value); + throw error; + } this[name] = value; } validateObj(value, name) { From 17ec8ff415124bf72aeb2842963e452a3201cd60 Mon Sep 17 00:00:00 2001 From: Richard Tibbles Date: Thu, 25 May 2023 14:04:02 -0700 Subject: [PATCH 15/21] Make the service worker serving handle no webpack devserver in debug mode. --- contentcuration/contentcuration/views/pwa.py | 21 +++++++++++++++----- 1 file changed, 16 insertions(+), 5 deletions(-) diff --git a/contentcuration/contentcuration/views/pwa.py b/contentcuration/contentcuration/views/pwa.py index 5ac0100e7b..a150527d1b 100644 --- a/contentcuration/contentcuration/views/pwa.py +++ b/contentcuration/contentcuration/views/pwa.py @@ -3,6 +3,7 @@ from django.conf import settings from django.contrib.staticfiles import finders from django.contrib.staticfiles.storage import staticfiles_storage +from django.http import Http404 from django.views.generic.base import TemplateView @@ -13,17 +14,27 @@ class ServiceWorkerView(TemplateView): def get_context_data(self, **kwargs): context = super().get_context_data(**kwargs) # Code to access the compiled serivce worker in developer mode + content = None if getattr(settings, "DEBUG", False): import requests + try: + request = requests.get("http://127.0.0.1:4000/dist/serviceWorker.js") + content = request.content.decode("utf-8") + except requests.exceptions.RequestException: + pass - request = requests.get("http://127.0.0.1:4000/dist/serviceWorker.js") - content = request.content.decode("utf-8") - else: + if content is None: path = staticfiles_storage.path("studio/serviceWorker.js") if not os.path.exists(path): path = finders.find("studio/serviceWorker.js") - with open(path) as f: - content = f.read() + try: + with open(path) as f: + content = f.read() + except (FileNotFoundError, IOError): + pass + + if content is None: + raise Http404("Service worker not found") context["webpack_service_worker"] = content return context From 533b293144564341fbb0618b7c0155c8c797c32f Mon Sep 17 00:00:00 2001 From: Richard Tibbles Date: Thu, 25 May 2023 14:12:10 -0700 Subject: [PATCH 16/21] Make resolve/reject stack more comprehensible. --- .../contentcuration/frontend/shared/data/serverSync.js | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/contentcuration/contentcuration/frontend/shared/data/serverSync.js b/contentcuration/contentcuration/frontend/shared/data/serverSync.js index 52f495c8f2..961bfcc660 100644 --- a/contentcuration/contentcuration/frontend/shared/data/serverSync.js +++ b/contentcuration/contentcuration/frontend/shared/data/serverSync.js @@ -293,13 +293,13 @@ async function syncChanges(syncAllChanges) { // Set the sync debounce time artificially low in tests to avoid timeouts. const syncDebounceWait = process.env.NODE_ENV === 'test' ? 1 : SYNC_IF_NO_CHANGES_FOR * 1000; let syncDebounceTimer; -const syncResolveRejectStack = []; +const syncDeferredStack = []; let syncingPromise = Promise.resolve(); function doSyncChanges(syncAll = false) { syncDebounceTimer = null; // Splice all the resolve/reject handlers off the stack - const resolveRejectStack = syncResolveRejectStack.splice(0); + const deferredStack = syncDeferredStack.splice(0); // Wait for any existing sync to complete, then sync again. syncingPromise = syncingPromise .then(() => syncChanges(syncAll)) @@ -307,13 +307,13 @@ function doSyncChanges(syncAll = false) { // If it is successful call all of the resolve functions that we have stored // from all the Promises that have been returned while this specific debounce // has been active. - for (const [resolve] of resolveRejectStack) { + for (const { resolve } of deferredStack) { resolve(result); } }) .catch(err => { // If there is an error call reject for all previously returned promises. - for (const [, reject] of resolveRejectStack) { + for (const { reject } of deferredStack) { reject(err); } }); @@ -330,7 +330,7 @@ export function debouncedSyncChanges(flush = false, syncAll = false) { // Any subsequent calls will then also revoke this timeout. clearTimeout(syncDebounceTimer); // Add the resolve and reject handlers for this promise to the stack here. - syncResolveRejectStack.push([resolve, reject]); + syncDeferredStack.push({ resolve, reject }); if (flush) { // If immediate invocation is required immediately call doSyncChanges // rather than using a timeout delay. From 58c09a6d2899058eb44990bc2c90b95758257e19 Mon Sep 17 00:00:00 2001 From: Richard Tibbles Date: Thu, 25 May 2023 16:24:54 -0700 Subject: [PATCH 17/21] Add in saving of the source to the change object. --- .../shared/data/__tests__/changes.spec.js | 218 +++++++++++++----- .../shared/data/__tests__/serverSync.spec.js | 1 + .../frontend/shared/data/changes.js | 13 +- .../frontend/shared/data/resources.js | 5 + 4 files changed, 182 insertions(+), 55 deletions(-) diff --git a/contentcuration/contentcuration/frontend/shared/data/__tests__/changes.spec.js b/contentcuration/contentcuration/frontend/shared/data/__tests__/changes.spec.js index 317331df27..dfea978a94 100644 --- a/contentcuration/contentcuration/frontend/shared/data/__tests__/changes.spec.js +++ b/contentcuration/contentcuration/frontend/shared/data/__tests__/changes.spec.js @@ -1,5 +1,6 @@ import pick from 'lodash/pick'; import { + Change, CreatedChange, UpdatedChange, DeletedChange, @@ -11,6 +12,7 @@ import { } from '../changes'; import { CHANGES_TABLE, + CHANGE_TYPES, TABLE_NAMES, RELATIVE_TREE_POSITIONS, LAST_FETCHED, @@ -20,6 +22,8 @@ import { import db from 'shared/data/db'; import { mockChannelScope, resetMockChannelScope } from 'shared/utils/testing'; +const CLIENTID = 'test-client-id'; + describe('Change Types', () => { const channel_id = 'test-123'; beforeEach(async () => { @@ -36,20 +40,27 @@ describe('Change Types', () => { key: '1', table: TABLE_NAMES.CONTENTNODE, obj: { a: 1, b: 2 }, + source: CLIENTID, }); const rev = await change.saveChange(); const persistedChange = await db[CHANGES_TABLE].get(rev); expect(persistedChange).toEqual({ channel_id, rev, - ...pick(change, ['type', 'key', 'table', 'obj']), + ...pick(change, ['type', 'key', 'table', 'obj', 'source']), }); }); it('should persist only the specified fields in the UpdatedChange', async () => { const oldObj = { a: 1, b: 2 }; const changes = { a: 1, b: 3 }; - const change = new UpdatedChange({ key: '1', table: TABLE_NAMES.CONTENTNODE, oldObj, changes }); + const change = new UpdatedChange({ + key: '1', + table: TABLE_NAMES.CONTENTNODE, + oldObj, + changes, + source: CLIENTID, + }); const rev = await change.saveChange(); const persistedChange = await db[CHANGES_TABLE].get(rev); expect(persistedChange).toEqual({ @@ -58,14 +69,20 @@ describe('Change Types', () => { mods: { b: 3 }, rev, channel_id, - ...pick(change, ['type', 'key', 'table']), + ...pick(change, ['type', 'key', 'table', 'source']), }); }); it('should save the mods as key paths in the UpdatedChange', async () => { const oldObj = { a: 1, b: { c: 1 } }; const changes = { a: 1, b: { c: 2 } }; - const change = new UpdatedChange({ key: '1', table: TABLE_NAMES.CONTENTNODE, oldObj, changes }); + const change = new UpdatedChange({ + key: '1', + table: TABLE_NAMES.CONTENTNODE, + oldObj, + changes, + source: CLIENTID, + }); const rev = await change.saveChange(); const persistedChange = await db[CHANGES_TABLE].get(rev); expect(persistedChange).toEqual({ @@ -74,14 +91,20 @@ describe('Change Types', () => { mods: { 'b.c': 2 }, rev, channel_id, - ...pick(change, ['type', 'key', 'table']), + ...pick(change, ['type', 'key', 'table', 'source']), }); }); it('should save deleted mods as key paths to null in the UpdatedChange', async () => { const oldObj = { a: 1, b: { c: 1 } }; const changes = { a: 1, b: {} }; - const change = new UpdatedChange({ key: '1', table: TABLE_NAMES.CONTENTNODE, oldObj, changes }); + const change = new UpdatedChange({ + key: '1', + table: TABLE_NAMES.CONTENTNODE, + oldObj, + changes, + source: CLIENTID, + }); const rev = await change.saveChange(); const persistedChange = await db[CHANGES_TABLE].get(rev); expect(persistedChange).toEqual({ @@ -90,7 +113,7 @@ describe('Change Types', () => { mods: { 'b.c': null }, rev, channel_id, - ...pick(change, ['type', 'key', 'table']), + ...pick(change, ['type', 'key', 'table', 'source']), }); }); @@ -103,7 +126,13 @@ describe('Change Types', () => { [TASK_ID]: '18292183921', [COPYING_FLAG]: true, }; - const change = new UpdatedChange({ key: '1', table: TABLE_NAMES.CONTENTNODE, oldObj, changes }); + const change = new UpdatedChange({ + key: '1', + table: TABLE_NAMES.CONTENTNODE, + oldObj, + changes, + source: CLIENTID, + }); const rev = await change.saveChange(); const persistedChange = await db[CHANGES_TABLE].get(rev); expect(persistedChange).toEqual({ @@ -112,7 +141,7 @@ describe('Change Types', () => { mods: { b: 3 }, rev, channel_id, - ...pick(change, ['type', 'key', 'table']), + ...pick(change, ['type', 'key', 'table', 'source']), }); }); @@ -125,7 +154,13 @@ describe('Change Types', () => { [TASK_ID]: '18292183921', [COPYING_FLAG]: true, }; - const change = new UpdatedChange({ key: '1', table: TABLE_NAMES.CONTENTNODE, oldObj, changes }); + const change = new UpdatedChange({ + key: '1', + table: TABLE_NAMES.CONTENTNODE, + oldObj, + changes, + source: CLIENTID, + }); const rev = await change.saveChange(); expect(rev).toBeNull(); }); @@ -135,13 +170,14 @@ describe('Change Types', () => { key: '1', table: TABLE_NAMES.CONTENTNODE, oldObj: { a: 1, b: 2 }, + source: CLIENTID, }); const rev = await change.saveChange(); const persistedChange = await db[CHANGES_TABLE].get(rev); expect(persistedChange).toEqual({ channel_id, rev, - ...pick(change, ['type', 'key', 'table', 'oldObj']), + ...pick(change, ['type', 'key', 'table', 'oldObj', 'source']), }); }); @@ -152,13 +188,14 @@ describe('Change Types', () => { target: '2', position: RELATIVE_TREE_POSITIONS.LAST_CHILD, parent: '3', + source: CLIENTID, }); const rev = await change.saveChange(); const persistedChange = await db[CHANGES_TABLE].get(rev); expect(persistedChange).toEqual({ rev, channel_id, - ...pick(change, ['type', 'key', 'table', 'target', 'position', 'parent']), + ...pick(change, ['type', 'key', 'table', 'target', 'position', 'parent', 'source']), }); }); @@ -172,6 +209,7 @@ describe('Change Types', () => { position: RELATIVE_TREE_POSITIONS.LAST_CHILD, excluded_descendants: null, parent: '3', + source: CLIENTID, }); const rev = await change.saveChange(); const persistedChange = await db[CHANGES_TABLE].get(rev); @@ -188,6 +226,7 @@ describe('Change Types', () => { 'position', 'excluded_descendants', 'parent', + 'source', ]), }); }); @@ -198,13 +237,14 @@ describe('Change Types', () => { table: TABLE_NAMES.CHANNEL, version_notes: 'Some version notes', language: 'en', + source: CLIENTID, }); const rev = await change.saveChange(); const persistedChange = await db[CHANGES_TABLE].get(rev); expect(persistedChange).toEqual({ rev, channel_id: change.key, - ...pick(change, ['type', 'key', 'table', 'version_notes', 'language']), + ...pick(change, ['type', 'key', 'table', 'version_notes', 'language', 'source']), }); }); @@ -216,6 +256,7 @@ describe('Change Types', () => { resource_details: false, files: true, assessment_items: false, + source: CLIENTID, }); const rev = await change.saveChange(); const persistedChange = await db[CHANGES_TABLE].get(rev); @@ -230,68 +271,127 @@ describe('Change Types', () => { 'resource_details', 'files', 'assessment_items', + 'source', ]), }); }); it('should persist only the specified fields in the DeployedChange', async () => { - const change = new DeployedChange({ key: '1', table: TABLE_NAMES.CHANNEL }); + const change = new DeployedChange({ key: '1', table: TABLE_NAMES.CHANNEL, source: CLIENTID }); const rev = await change.saveChange(); const persistedChange = await db[CHANGES_TABLE].get(rev); expect(persistedChange).toEqual({ rev, channel_id: change.key, - ...pick(change, ['type', 'key', 'table']), + ...pick(change, ['type', 'key', 'table', 'source']), }); }); }); describe('Change Types Unhappy Paths', () => { - // CreatedChange - it('should throw error when CreatedChange is instantiated without key', () => { - expect(() => new CreatedChange({ table: TABLE_NAMES.CONTENTNODE, obj: {} })).toThrow( - new TypeError('key is required for a CreatedChange but it was undefined') - ); + // Base Change + it('should throw error when Change is instantiated without key', () => { + expect( + () => + new Change({ table: TABLE_NAMES.CONTENTNODE, source: CLIENTID, type: CHANGE_TYPES.CREATED }) + ).toThrow(new TypeError('key is required for a Change but it was undefined')); }); - it('should throw error when CreatedChange is instantiated with a null key', () => { - expect(() => new CreatedChange({ key: null, table: TABLE_NAMES.CONTENTNODE, obj: {} })).toThrow( - new TypeError('key is required for a CreatedChange but it was null') - ); + it('should throw error when Change is instantiated with a null key', () => { + expect( + () => + new Change({ + key: null, + table: TABLE_NAMES.CONTENTNODE, + source: CLIENTID, + type: CHANGE_TYPES.CREATED, + }) + ).toThrow(new TypeError('key is required for a Change but it was null')); }); + + it('should throw error when Change is instantiated without a source', () => { + expect( + () => new Change({ key: '1', table: TABLE_NAMES.CONTENTNODE, type: CHANGE_TYPES.CREATED }) + ).toThrow(new ReferenceError('source should be a string, but undefined was passed instead')); + }); + + it('should throw error when Change is instantiated with a non-string source', () => { + expect( + () => + new Change({ + key: '1', + source: 123, + table: TABLE_NAMES.CONTENTNODE, + type: CHANGE_TYPES.CREATED, + }) + ).toThrow(new ReferenceError('source should be a string, but 123 was passed instead')); + }); + + it('should throw error when Change is instantiated with invalid table', () => { + expect( + () => new Change({ key: '1', table: 'test', source: CLIENTID, type: CHANGE_TYPES.CREATED }) + ).toThrow(new ReferenceError('test is not a valid table value')); + }); + + it('should throw error when Change is instantiated with a non-syncable table', () => { + expect( + () => + new Change({ + key: '1', + table: TABLE_NAMES.SESSION, + source: CLIENTID, + type: CHANGE_TYPES.CREATED, + }) + ).toThrow(new TypeError('session is not a syncable table')); + }); + + // CreatedChange + it('should throw error when CreatedChange is instantiated without obj', () => { - expect(() => new CreatedChange({ key: '1', table: TABLE_NAMES.CONTENTNODE })).toThrow( - new TypeError('obj should be an object, but undefined was passed instead') - ); + expect( + () => new CreatedChange({ key: '1', table: TABLE_NAMES.CONTENTNODE, source: CLIENTID }) + ).toThrow(new TypeError('obj should be an object, but undefined was passed instead')); }); it('should throw error when CreatedChange is instantiated with incorrect obj type', () => { expect( - () => new CreatedChange({ key: '1', table: TABLE_NAMES.CONTENTNODE, obj: 'invalid' }) + () => + new CreatedChange({ + key: '1', + table: TABLE_NAMES.CONTENTNODE, + obj: 'invalid', + source: CLIENTID, + }) ).toThrow(new TypeError('obj should be an object, but invalid was passed instead')); }); - it('should throw error when CreatedChange is instantiated with a non-syncable table', () => { - expect(() => new CreatedChange({ key: '1', table: TABLE_NAMES.SESSION, obj: {} })).toThrow( - new TypeError('session is not a syncable table') - ); - }); - // UpdatedChange it('should throw error when UpdatedChange is instantiated without changes', () => { - expect(() => new UpdatedChange({ key: '1', table: TABLE_NAMES.CONTENTNODE })).toThrow( - new TypeError('changes should be an object, but undefined was passed instead') - ); + expect( + () => new UpdatedChange({ key: '1', table: TABLE_NAMES.CONTENTNODE, source: CLIENTID }) + ).toThrow(new TypeError('changes should be an object, but undefined was passed instead')); }); it('should throw error when UpdatedChange is instantiated with incorrect changes type', () => { expect( - () => new UpdatedChange({ key: '1', table: TABLE_NAMES.CONTENTNODE, changes: 'invalid' }) + () => + new UpdatedChange({ + key: '1', + table: TABLE_NAMES.CONTENTNODE, + changes: 'invalid', + source: CLIENTID, + }) ).toThrow(new TypeError('changes should be an object, but invalid was passed instead')); }); it('should throw error when UpdatedChange is instantiated without oldObj', () => { expect( - () => new UpdatedChange({ key: '1', table: TABLE_NAMES.CONTENTNODE, changes: {} }) + () => + new UpdatedChange({ + key: '1', + table: TABLE_NAMES.CONTENTNODE, + changes: {}, + source: CLIENTID, + }) ).toThrow(new TypeError('oldObj should be an object, but undefined was passed instead')); }); @@ -303,19 +403,14 @@ describe('Change Types Unhappy Paths', () => { table: TABLE_NAMES.CONTENTNODE, changes: {}, oldObj: 'invalid', + source: CLIENTID, }) ).toThrow(new TypeError('oldObj should be an object, but invalid was passed instead')); }); // DeletedChange - it('should throw error when DeletedChange is instantiated without key', () => { - expect(() => new DeletedChange({ table: TABLE_NAMES.CONTENTNODE })).toThrow( - new TypeError('key is required for a DeletedChange but it was undefined') - ); - }); - it('should throw error when DeletedChange is instantiated with invalid table', () => { - expect(() => new DeletedChange({ key: '1', table: 'test' })).toThrow( + expect(() => new DeletedChange({ key: '1', table: 'test', source: CLIENTID })).toThrow( new ReferenceError('test is not a valid table value') ); }); @@ -328,6 +423,7 @@ describe('Change Types Unhappy Paths', () => { key: '1', table: TABLE_NAMES.CONTENTNODE, position: RELATIVE_TREE_POSITIONS.LAST_CHILD, + source: CLIENTID, }) ).toThrow(new TypeError('target is required for a MovedChange but it was undefined')); }); @@ -340,6 +436,7 @@ describe('Change Types Unhappy Paths', () => { table: TABLE_NAMES.CONTENTNODE, target: '2', position: 'invalid', + source: CLIENTID, }) ).toThrow(new ReferenceError('invalid is not a valid position value')); }); @@ -352,15 +449,16 @@ describe('Change Types Unhappy Paths', () => { table: TABLE_NAMES.CONTENTNODE, target: '2', position: RELATIVE_TREE_POSITIONS.LAST_CHILD, + source: CLIENTID, }) ).toThrow(new ReferenceError('parent is required for a MovedChange but it was undefined')); }); // CopiedChange it('should throw error when CopiedChange is instantiated without from_key', () => { - expect(() => new CopiedChange({ key: '1', table: TABLE_NAMES.CONTENTNODE })).toThrow( - new TypeError('from_key is required for a CopiedChange but it was undefined') - ); + expect( + () => new CopiedChange({ key: '1', table: TABLE_NAMES.CONTENTNODE, source: CLIENTID }) + ).toThrow(new TypeError('from_key is required for a CopiedChange but it was undefined')); }); it('should throw error when CopiedChange is instantiated with incorrect mods type', () => { @@ -371,6 +469,7 @@ describe('Change Types Unhappy Paths', () => { table: TABLE_NAMES.CONTENTNODE, from_key: '2', mods: 'invalid', + source: CLIENTID, }) ).toThrow(new TypeError('mods should be an object, but invalid was passed instead')); }); @@ -383,6 +482,7 @@ describe('Change Types Unhappy Paths', () => { table: TABLE_NAMES.CONTENTNODE, from_key: '2', mods: { a: 1 }, + source: CLIENTID, }) ).toThrow(new TypeError('target is required for a CopiedChange but it was undefined')); }); @@ -397,6 +497,7 @@ describe('Change Types Unhappy Paths', () => { mods: { a: 1 }, target: '3', position: 'invalid', + source: CLIENTID, }) ).toThrow(new ReferenceError('invalid is not a valid position value')); }); @@ -412,6 +513,7 @@ describe('Change Types Unhappy Paths', () => { target: '3', position: RELATIVE_TREE_POSITIONS.LAST_CHILD, excluded_descendants: 'invalid', + source: CLIENTID, }) ).toThrow( new TypeError( @@ -431,13 +533,16 @@ describe('Change Types Unhappy Paths', () => { target: '3', position: RELATIVE_TREE_POSITIONS.LAST_CHILD, excluded_descendants: null, + source: CLIENTID, }) ).toThrow(new TypeError('parent is required for a CopiedChange but it was undefined')); }); // PublishedChange it('should throw error when PublishedChange is instantiated without version_notes', () => { - expect(() => new PublishedChange({ key: '1', table: TABLE_NAMES.CHANNEL })).toThrow( + expect( + () => new PublishedChange({ key: '1', table: TABLE_NAMES.CHANNEL, source: CLIENTID }) + ).toThrow( new TypeError('version_notes is required for a PublishedChange but it was undefined') ); }); @@ -449,13 +554,16 @@ describe('Change Types Unhappy Paths', () => { key: '1', table: TABLE_NAMES.CHANNEL, version_notes: 'Some notes', + source: CLIENTID, }) ).toThrow(new TypeError('language is required for a PublishedChange but it was undefined')); }); // SyncedChange it('should throw error when SyncedChange is instantiated without titles_and_descriptions', () => { - expect(() => new SyncedChange({ key: '1', table: TABLE_NAMES.CHANNEL })).toThrow( + expect( + () => new SyncedChange({ key: '1', table: TABLE_NAMES.CHANNEL, source: CLIENTID }) + ).toThrow( new TypeError('titles_and_descriptions should be a boolean, but undefined was passed instead') ); }); @@ -467,6 +575,7 @@ describe('Change Types Unhappy Paths', () => { key: '1', table: TABLE_NAMES.CHANNEL, titles_and_descriptions: 'invalid', + source: CLIENTID, }) ).toThrow( new TypeError('titles_and_descriptions should be a boolean, but invalid was passed instead') @@ -481,6 +590,7 @@ describe('Change Types Unhappy Paths', () => { table: TABLE_NAMES.CHANNEL, titles_and_descriptions: true, resource_details: 'invalid', + source: CLIENTID, }) ).toThrow( new TypeError('resource_details should be a boolean, but invalid was passed instead') @@ -496,6 +606,7 @@ describe('Change Types Unhappy Paths', () => { titles_and_descriptions: true, resource_details: false, files: 'invalid', + source: CLIENTID, }) ).toThrow(new TypeError('files should be a boolean, but invalid was passed instead')); }); @@ -510,6 +621,7 @@ describe('Change Types Unhappy Paths', () => { resource_details: false, files: true, assessment_items: 'invalid', + source: CLIENTID, }) ).toThrow( new TypeError('assessment_items should be a boolean, but invalid was passed instead') @@ -518,13 +630,13 @@ describe('Change Types Unhappy Paths', () => { // DeployedChange it('should throw error when DeployedChange is instantiated without key', () => { - expect(() => new DeployedChange({ table: TABLE_NAMES.CHANNEL })).toThrow( + expect(() => new DeployedChange({ table: TABLE_NAMES.CHANNEL, source: CLIENTID })).toThrow( new TypeError('key is required for a DeployedChange but it was undefined') ); }); it('should throw error when DeployedChange is instantiated with invalid table', () => { - expect(() => new DeployedChange({ key: '1', table: 'test' })).toThrow( + expect(() => new DeployedChange({ key: '1', table: 'test', source: CLIENTID })).toThrow( new ReferenceError('test is not a valid table value') ); }); diff --git a/contentcuration/contentcuration/frontend/shared/data/__tests__/serverSync.spec.js b/contentcuration/contentcuration/frontend/shared/data/__tests__/serverSync.spec.js index a74150642f..032b0f507e 100644 --- a/contentcuration/contentcuration/frontend/shared/data/__tests__/serverSync.spec.js +++ b/contentcuration/contentcuration/frontend/shared/data/__tests__/serverSync.spec.js @@ -11,6 +11,7 @@ async function makeChange(key, server_rev) { key: String(key), table: TABLE_NAMES.CONTENTNODE, obj: { a: 1, b: 2 }, + source: 'test-source-id', }).saveChange(); if (server_rev) { await db[CHANGES_TABLE].update(rev, { server_rev }); diff --git a/contentcuration/contentcuration/frontend/shared/data/changes.js b/contentcuration/contentcuration/frontend/shared/data/changes.js index bffeed2e7d..90a0e953a7 100644 --- a/contentcuration/contentcuration/frontend/shared/data/changes.js +++ b/contentcuration/contentcuration/frontend/shared/data/changes.js @@ -205,8 +205,8 @@ function omitIgnoredSubFields(obj) { return omit(obj, ignoredSubFields); } -class Change { - constructor({ type, key, table } = {}) { +export class Change { + constructor({ type, key, table, source } = {}) { this.setAndValidateLookup(type, 'type', CHANGE_TYPES_LOOKUP); this.setAndValidateNotNull(key, 'key'); this.setAndValidateLookup(table, 'table', TABLE_NAMES_LOOKUP); @@ -215,6 +215,7 @@ class Change { logging.error(error); throw error; } + this.setAndValidateString(source, 'source'); } get changeType() { return this.constructor.name; @@ -294,6 +295,14 @@ class Change { } this[name] = mapper(value); } + setAndValidateString(value, name) { + if (typeof value !== 'string') { + const error = new TypeError(`${name} should be a string, but ${value} was passed instead`); + logging.error(error, value); + throw error; + } + this[name] = value; + } saveChange() { if (!this.channelOrUserIdSet) { throw new ReferenceError( diff --git a/contentcuration/contentcuration/frontend/shared/data/resources.js b/contentcuration/contentcuration/frontend/shared/data/resources.js index 6db716924d..d4658e696d 100644 --- a/contentcuration/contentcuration/frontend/shared/data/resources.js +++ b/contentcuration/contentcuration/frontend/shared/data/resources.js @@ -560,6 +560,7 @@ class IndexedDBResource { table: this.tableName, oldObj, changes, + source: CLIENTID, }); return this._saveAndQueueChange(change); } @@ -620,6 +621,7 @@ class IndexedDBResource { key: id, table: this.tableName, obj, + source: CLIENTID, }); return this._saveAndQueueChange(change); } @@ -663,6 +665,7 @@ class IndexedDBResource { key: id, table: this.tableName, oldObj, + source: CLIENTID, }); return this._saveAndQueueChange(change); }); @@ -1170,6 +1173,7 @@ export const Channel = new Resource({ files, assessment_items, table: this.tableName, + source: CLIENTID, }); return this.transaction({ mode: 'rw', source: IGNORED_SOURCE }, CHANGES_TABLE, () => { return this._saveAndQueueChange(change); @@ -1412,6 +1416,7 @@ export const ContentNode = new TreeResource({ parent: parent.id, oldObj: isCreate ? null : node, table: this.tableName, + source: CLIENTID, }; return callback({ From f64e6de41919c14f8d2fb80c3b663c2cca6c2bf0 Mon Sep 17 00:00:00 2001 From: Richard Tibbles Date: Thu, 25 May 2023 16:27:57 -0700 Subject: [PATCH 18/21] Update change tracker to only track changes from this client/tab. --- .../contentcuration/frontend/shared/data/changes.js | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/contentcuration/contentcuration/frontend/shared/data/changes.js b/contentcuration/contentcuration/frontend/shared/data/changes.js index 90a0e953a7..6065218d10 100644 --- a/contentcuration/contentcuration/frontend/shared/data/changes.js +++ b/contentcuration/contentcuration/frontend/shared/data/changes.js @@ -6,7 +6,7 @@ import isUndefined from 'lodash/isUndefined'; import omit from 'lodash/omit'; import sortBy from 'lodash/sortBy'; import logging from '../logging'; -import db from 'shared/data/db'; +import db, { CLIENTID } from 'shared/data/db'; import { promiseChunk } from 'shared/utils/helpers'; import { CHANGES_TABLE, @@ -14,7 +14,6 @@ import { CHANGE_TYPES_LOOKUP, TABLE_NAMES, TABLE_NAMES_LOOKUP, - IGNORED_SOURCE, RELATIVE_TREE_POSITIONS, RELATIVE_TREE_POSITIONS_LOOKUP, LAST_FETCHED, @@ -98,7 +97,8 @@ export class ChangeTracker { } this._changes = await changes - .filter(change => !change.source.match(IGNORED_SOURCE)) + // Filter out changes that were not made by this client/browser tab + .filter(change => change.source === CLIENTID) .sortBy('rev'); } From c0e34e4589ea6c616e4cf6b342b7c26f1b0321b1 Mon Sep 17 00:00:00 2001 From: Richard Tibbles Date: Thu, 25 May 2023 16:33:53 -0700 Subject: [PATCH 19/21] Remove use of IGNORED_SOURCE as we are no longer syncing change events from Dexie Observable. --- .../shared/data/applyRemoteChanges.js | 3 +- .../frontend/shared/data/constants.js | 9 - .../frontend/shared/data/index.js | 4 +- .../frontend/shared/data/resources.js | 241 ++++++++---------- .../frontend/shared/data/serverSync.js | 4 +- .../frontend/shared/vuex/channel/actions.js | 3 +- 6 files changed, 117 insertions(+), 147 deletions(-) diff --git a/contentcuration/contentcuration/frontend/shared/data/applyRemoteChanges.js b/contentcuration/contentcuration/frontend/shared/data/applyRemoteChanges.js index effbdd3c81..02b34ca9af 100644 --- a/contentcuration/contentcuration/frontend/shared/data/applyRemoteChanges.js +++ b/contentcuration/contentcuration/frontend/shared/data/applyRemoteChanges.js @@ -1,7 +1,7 @@ import Dexie from 'dexie'; import sortBy from 'lodash/sortBy'; import logging from '../logging'; -import { CHANGE_TYPES, IGNORED_SOURCE, TABLE_NAMES } from './constants'; +import { CHANGE_TYPES, TABLE_NAMES } from './constants'; import db from './db'; import { INDEXEDDB_RESOURCES } from './registry'; @@ -46,7 +46,6 @@ function transaction(change, ...args) { const callback = args.pop(); const tableNames = [change.table, ...args]; return db.transaction('rw', tableNames, () => { - Dexie.currentTransaction.source = IGNORED_SOURCE; return callback(); }); } diff --git a/contentcuration/contentcuration/frontend/shared/data/constants.js b/contentcuration/contentcuration/frontend/shared/data/constants.js index d7e8f97139..5f1ea5d357 100644 --- a/contentcuration/contentcuration/frontend/shared/data/constants.js +++ b/contentcuration/contentcuration/frontend/shared/data/constants.js @@ -55,15 +55,6 @@ export const TABLE_NAMES_LOOKUP = invert(TABLE_NAMES); export const APP_ID = 'KolibriStudio'; -// Transaction sources -/** - * This transaction source will be ignored when tracking the - * client's changes - * - * @type {string} - */ -export const IGNORED_SOURCE = 'IGNORED_SOURCE'; - export const RELATIVE_TREE_POSITIONS = { FIRST_CHILD: 'first-child', LAST_CHILD: 'last-child', diff --git a/contentcuration/contentcuration/frontend/shared/data/index.js b/contentcuration/contentcuration/frontend/shared/data/index.js index facdaa067c..380e09f9c1 100644 --- a/contentcuration/contentcuration/frontend/shared/data/index.js +++ b/contentcuration/contentcuration/frontend/shared/data/index.js @@ -1,7 +1,6 @@ -import Dexie from 'dexie'; import * as Sentry from '@sentry/vue'; import mapValues from 'lodash/mapValues'; -import { CHANGES_TABLE, IGNORED_SOURCE, TABLE_NAMES } from './constants'; +import { CHANGES_TABLE, TABLE_NAMES } from './constants'; import db from './db'; import { INDEXEDDB_RESOURCES } from './registry'; import { startSyncing, stopSyncing, syncOnChanges } from './serverSync'; @@ -30,7 +29,6 @@ export function setupSchema() { export function resetDB() { const tableNames = Object.values(TABLE_NAMES); return db.transaction('rw', ...tableNames, () => { - Dexie.currentTransaction.source = IGNORED_SOURCE; return Promise.all(tableNames.map(table => db[table].clear())); }); } diff --git a/contentcuration/contentcuration/frontend/shared/data/resources.js b/contentcuration/contentcuration/frontend/shared/data/resources.js index d4658e696d..73fe19f3a3 100644 --- a/contentcuration/contentcuration/frontend/shared/data/resources.js +++ b/contentcuration/contentcuration/frontend/shared/data/resources.js @@ -16,7 +16,6 @@ import { v4 as uuidv4 } from 'uuid'; import { CHANGE_TYPES, CHANGES_TABLE, - IGNORED_SOURCE, RELATIVE_TREE_POSITIONS, TABLE_NAMES, COPYING_FLAG, @@ -274,93 +273,87 @@ class IndexedDBResource { setData(itemData) { const now = Date.now(); - // Explicitly set the source of this as a fetch - // from the server, to prevent us from trying - // to sync these changes back to the server! - return this.transaction( - { mode: 'rw', source: IGNORED_SOURCE }, - this.tableName, - CHANGES_TABLE, - () => { - // Get any relevant changes that would be overwritten by this bulkPut - const changesPromise = db[CHANGES_TABLE].where('[table+key]') - .anyOf(itemData.map(datum => [this.tableName, this.getIdValue(datum)])) - .sortBy('rev'); - const currentPromise = this.table - .where(this.idField) - .anyOf(itemData.map(datum => this.getIdValue(datum))) - .toArray(); - - return Promise.all([changesPromise, currentPromise]).then(([changes, currents]) => { - changes = mergeAllChanges(changes, true); - const collectedChanges = collectChanges(changes)[this.tableName] || {}; - for (const changeType of Object.keys(collectedChanges)) { - const map = {}; - for (const change of collectedChanges[changeType]) { - map[change.key] = change; - } - collectedChanges[changeType] = map; - } - const currentMap = {}; - for (const currentObj of currents) { - currentMap[this.getIdValue(currentObj)] = currentObj; + // Directly write to the table, rather than using the add/update methods + // to avoid creating change events that we would sync back to the server. + return this.transaction({ mode: 'rw' }, this.tableName, CHANGES_TABLE, () => { + // Get any relevant changes that would be overwritten by this bulkPut + const changesPromise = db[CHANGES_TABLE].where('[table+key]') + .anyOf(itemData.map(datum => [this.tableName, this.getIdValue(datum)])) + .sortBy('rev'); + const currentPromise = this.table + .where(this.idField) + .anyOf(itemData.map(datum => this.getIdValue(datum))) + .toArray(); + + return Promise.all([changesPromise, currentPromise]).then(([changes, currents]) => { + changes = mergeAllChanges(changes, true); + const collectedChanges = collectChanges(changes)[this.tableName] || {}; + for (const changeType of Object.keys(collectedChanges)) { + const map = {}; + for (const change of collectedChanges[changeType]) { + map[change.key] = change; } - const data = itemData - .map(datum => { - const id = this.getIdValue(datum); - datum[LAST_FETCHED] = now; - // Persist TASK_ID and COPYING_FLAG attributes when directly fetching from the server - if (currentMap[id] && currentMap[id][TASK_ID]) { - datum[TASK_ID] = currentMap[id][TASK_ID]; - } - if (currentMap[id] && currentMap[id][COPYING_FLAG]) { - datum[COPYING_FLAG] = currentMap[id][COPYING_FLAG]; - } - // If we have an updated change, apply the modifications here - if ( - collectedChanges[CHANGE_TYPES.UPDATED] && - collectedChanges[CHANGE_TYPES.UPDATED][id] - ) { - applyMods(datum, collectedChanges[CHANGE_TYPES.UPDATED][id].mods); + collectedChanges[changeType] = map; + } + const currentMap = {}; + for (const currentObj of currents) { + currentMap[this.getIdValue(currentObj)] = currentObj; + } + const data = itemData + .map(datum => { + const id = this.getIdValue(datum); + datum[LAST_FETCHED] = now; + // Persist TASK_ID and COPYING_FLAG attributes when directly fetching from the server + if (currentMap[id] && currentMap[id][TASK_ID]) { + datum[TASK_ID] = currentMap[id][TASK_ID]; + } + if (currentMap[id] && currentMap[id][COPYING_FLAG]) { + datum[COPYING_FLAG] = currentMap[id][COPYING_FLAG]; + } + // If we have an updated change, apply the modifications here + if ( + collectedChanges[CHANGE_TYPES.UPDATED] && + collectedChanges[CHANGE_TYPES.UPDATED][id] + ) { + applyMods(datum, collectedChanges[CHANGE_TYPES.UPDATED][id].mods); + } + return datum; + // If we have a deleted change, just filter out this object so we don't reput it + }) + .filter( + datum => + !collectedChanges[CHANGE_TYPES.DELETED] || + !collectedChanges[CHANGE_TYPES.DELETED][this.getIdValue(datum)] + ); + return this.table.bulkPut(data).then(() => { + // Move changes need to be reapplied on top of fetched data in case anything + // has happened on the backend. + return applyChanges(Object.values(collectedChanges[CHANGE_TYPES.MOVED] || {})).then( + results => { + if (!results || !results.length) { + return data; } - return datum; - // If we have a deleted change, just filter out this object so we don't reput it - }) - .filter( - datum => - !collectedChanges[CHANGE_TYPES.DELETED] || - !collectedChanges[CHANGE_TYPES.DELETED][this.getIdValue(datum)] - ); - return this.table.bulkPut(data).then(() => { - // Move changes need to be reapplied on top of fetched data in case anything - // has happened on the backend. - return applyChanges(Object.values(collectedChanges[CHANGE_TYPES.MOVED] || {})).then( - results => { - if (!results || !results.length) { - return data; - } - const resultsMap = {}; - for (const result of results) { - const id = this.getIdValue(result); - resultsMap[id] = result; - } - return data - .map(datum => { - const id = this.getIdValue(datum); - if (resultsMap[id]) { - applyMods(datum, resultsMap[id]); - } - return datum; - // Concatenate any unsynced created objects onto - // the end of the returned objects - }) - .concat(Object.values(collectedChanges[CHANGE_TYPES.CREATED]).map(c => c.obj)); + const resultsMap = {}; + for (const result of results) { + const id = this.getIdValue(result); + resultsMap[id] = result; } - ); - }); + return data + .map(datum => { + const id = this.getIdValue(datum); + if (resultsMap[id]) { + applyMods(datum, resultsMap[id]); + } + return datum; + // Concatenate any unsynced created objects onto + // the end of the returned objects + }) + .concat(Object.values(collectedChanges[CHANGE_TYPES.CREATED]).map(c => c.obj)); + } + ); }); - } - ); + }); + }); } where(params = {}) { @@ -836,10 +829,9 @@ class Resource extends mix(APIResource, IndexedDBResource) { const now = Date.now(); const data = response.data; data[LAST_FETCHED] = now; - // Explicitly set the source of this as a fetch - // from the server, to prevent us from trying - // to sync these changes back to the server! - return this.transaction({ mode: 'rw', source: IGNORED_SOURCE }, () => { + // Directly write to the table, rather than using the add method + // to avoid creating change events that we would sync back to the server. + return this.transaction({ mode: 'rw' }, () => { return this.table.put(data).then(() => { return data; }); @@ -849,10 +841,9 @@ class Resource extends mix(APIResource, IndexedDBResource) { deleteModel(id) { return client.delete(this.modelUrl(id)).then(() => { - // Explicitly set the source of this as a fetch - // from the server, to prevent us from trying - // to sync these changes back to the server! - return this.transaction({ mode: 'rw', source: IGNORED_SOURCE }, () => { + // Directly write to the table, rather than using the delete method + // to avoid creating change events that we would sync back to the server. + return this.transaction({ mode: 'rw' }, () => { return this.table.delete(id).then(() => { return true; }); @@ -1086,14 +1077,14 @@ export const Channel = new Resource({ updateAsAdmin(id, changes) { return client.patch(window.Urls.adminChannelsDetail(id), changes).then(() => { - return this.transaction({ mode: 'rw', source: IGNORED_SOURCE }, () => { + return this.transaction({ mode: 'rw' }, () => { return this.table.update(id, changes); }); }); }, publish(id, version_notes) { - return this.transaction({ mode: 'rw', source: IGNORED_SOURCE }, () => { + return this.transaction({ mode: 'rw' }, () => { return this.table.update(id, { publishing: true }); }).then(() => { const change = new PublishedChange({ @@ -1101,23 +1092,19 @@ export const Channel = new Resource({ version_notes, language: currentLanguage, table: this.tableName, + source: CLIENTID, + }); + return this.transaction({ mode: 'rw' }, CHANGES_TABLE, TABLE_NAMES.CONTENTNODE, () => { + return Promise.all([ + this._saveAndQueueChange(change), + ContentNode.table.where({ channel_id: id }).modify({ + changed: false, + published: true, + has_new_descendants: false, + has_updated_descendants: false, + }), + ]); }); - return this.transaction( - { mode: 'rw', source: IGNORED_SOURCE }, - CHANGES_TABLE, - TABLE_NAMES.CONTENTNODE, - () => { - return Promise.all([ - this._saveAndQueueChange(change), - ContentNode.table.where({ channel_id: id }).modify({ - changed: false, - published: true, - has_new_descendants: false, - has_updated_descendants: false, - }), - ]); - } - ); }); }, @@ -1127,7 +1114,7 @@ export const Channel = new Resource({ source: CLIENTID, table: this.tableName, }); - return this.transaction({ mode: 'rw', source: IGNORED_SOURCE }, CHANGES_TABLE, () => { + return this.transaction({ mode: 'rw' }, CHANGES_TABLE, () => { return this._saveAndQueueChange(change); }); }, @@ -1175,7 +1162,7 @@ export const Channel = new Resource({ table: this.tableName, source: CLIENTID, }); - return this.transaction({ mode: 'rw', source: IGNORED_SOURCE }, CHANGES_TABLE, () => { + return this.transaction({ mode: 'rw' }, CHANGES_TABLE, () => { return this._saveAndQueueChange(change); }); }, @@ -1183,7 +1170,7 @@ export const Channel = new Resource({ softDelete(id) { const modelUrl = this.modelUrl(id); // Call endpoint directly in case we need to navigate to new page - return this.transaction({ mode: 'rw', source: IGNORED_SOURCE }, () => { + return this.transaction({ mode: 'rw' }, () => { return this.table.update(id, { deleted: true }); }).then(() => { // make sure transaction is closed before calling a non-Dexie async function @@ -1459,8 +1446,7 @@ export const ContentNode = new TreeResource({ move(id, target, position = RELATIVE_TREE_POSITIONS.FIRST_CHILD) { return this.resolveTreeInsert(id, target, position, false, data => { - // Ignore changes from this operation except for the explicit move change we generate. - return this.transaction({ mode: 'rw', source: IGNORED_SOURCE }, CHANGES_TABLE, async () => { + return this.transaction({ mode: 'rw' }, CHANGES_TABLE, async () => { const payload = await this.tableMove(data); const change = new MovedChange(data.changeData); await this._saveAndQueueChange(change); @@ -1470,6 +1456,8 @@ export const ContentNode = new TreeResource({ }, async tableMove({ node, parent, payload }) { + // Do direct table writes here rather than using add/update methods to avoid + // creating unnecessary additional change events. const updated = await this.table.update(node.id, payload); // Update didn't succeed, this node probably doesn't exist, do a put instead, // but need to add in other parent info. @@ -1506,9 +1494,7 @@ export const ContentNode = new TreeResource({ return this.resolveTreeInsert(id, target, position, true, data => { data.change.excluded_descendants = excluded_descendants; - // Ignore changes from this operation except for the - // explicit copy change we generate. - return this.transaction({ mode: 'rw', source: IGNORED_SOURCE }, CHANGES_TABLE, async () => { + return this.transaction({ mode: 'rw' }, CHANGES_TABLE, async () => { const payload = await this.tableCopy(data); const change = new CopiedChange(data.changeData); await this._saveAndQueueChange(change); @@ -1539,6 +1525,8 @@ export const ContentNode = new TreeResource({ }; // Manually put our changes into the tree changes for syncing table + // rather than use add/update methods to avoid creating unnecessary + // additional change events. await this.table.put(payload); return payload; }, @@ -1623,7 +1611,7 @@ export const Invitation = new Resource({ accept(id) { const changes = { accepted: true }; return client.patch(window.Urls.invitationDetail(id), changes).then(() => { - return this.transaction({ mode: 'rw', source: IGNORED_SOURCE }, () => { + return this.transaction({ mode: 'rw' }, () => { return this.table.update(id, changes); }); }); @@ -1648,7 +1636,7 @@ export const User = new Resource({ updateAsAdmin(id, changes) { return client.patch(window.Urls.adminUsersDetail(id), changes).then(() => { - return this.transaction({ mode: 'rw', source: IGNORED_SOURCE }, () => { + return this.transaction({ mode: 'rw' }, () => { return this.table.update(id, changes); }); }); @@ -1656,7 +1644,7 @@ export const User = new Resource({ // Used when get_storage_used endpoint polling returns updateDiskSpaceUsed(id, disk_space_used) { - return this.transaction({ mode: 'rw', source: IGNORED_SOURCE }, () => { + return this.transaction({ mode: 'rw' }, () => { return this.table.update(id, { disk_space_used }); }); }, @@ -1731,10 +1719,6 @@ export const ChannelUser = new APIResource({ } return db.transaction('rw', User.tableName, EditorM2M.tableName, ViewerM2M.tableName, () => { - // Explicitly set the source of this as a fetch - // from the server, to prevent us from trying - // to sync these changes back to the server! - Dexie.currentTransaction.source = IGNORED_SOURCE; return Promise.all([ EditorM2M.table.bulkPut(editorM2M), ViewerM2M.table.bulkPut(viewerM2M), @@ -1807,7 +1791,7 @@ export const AssessmentItem = new Resource({ }, modifyAssessmentItemCount(nodeId, increment) { // Update assessment item count - return this.transaction({ mode: 'rw', source: IGNORED_SOURCE }, TABLE_NAMES.CONTENTNODE, () => { + return this.transaction({ mode: 'rw' }, TABLE_NAMES.CONTENTNODE, () => { return ContentNode.table.get(nodeId).then(node => { if (node) { return ContentNode.table.update(node.id, { @@ -1857,7 +1841,7 @@ export const File = new Resource({ if (!response) { return Promise.reject(fileErrors.UPLOAD_FAILED); } - return this.transaction({ mode: 'rw', source: IGNORED_SOURCE }, () => { + return this.transaction({ mode: 'rw' }, () => { return this.table.put(response.data.file).then(() => { return response.data; }); @@ -1878,7 +1862,6 @@ export const Clipboard = new TreeResource({ // to the backend because that would affect the moved // nodes. return db.transaction('rw', this.tableName, () => { - Dexie.currentTransaction.source = IGNORED_SOURCE; return this.table.bulkDelete(ids); }); }, @@ -1929,7 +1912,7 @@ export const Task = new IndexedDBResource({ // Coerce channel_id to be a simple hex string task.channel_id = task.channel_id.replace('-', ''); } - return this.transaction({ mode: 'rw', source: IGNORED_SOURCE }, () => { + return this.transaction({ mode: 'rw' }, () => { return this.table .where(this.idField) .noneOf(tasks.map(t => t[this.idField])) diff --git a/contentcuration/contentcuration/frontend/shared/data/serverSync.js b/contentcuration/contentcuration/frontend/shared/data/serverSync.js index 961bfcc660..5cc338b26c 100644 --- a/contentcuration/contentcuration/frontend/shared/data/serverSync.js +++ b/contentcuration/contentcuration/frontend/shared/data/serverSync.js @@ -7,7 +7,7 @@ import uniq from 'lodash/uniq'; import logging from '../logging'; import applyChanges from './applyRemoteChanges'; import { changeRevs } from './registry'; -import { CHANGE_TYPES, CHANGES_TABLE, IGNORED_SOURCE, MAX_REV_KEY } from './constants'; +import { CHANGE_TYPES, CHANGES_TABLE, MAX_REV_KEY } from './constants'; import db, { channelScope } from './db'; import { Channel, Session, Task } from './resources'; import client from 'shared/client'; @@ -179,7 +179,7 @@ function handleMaxRevs(response, userId) { ); if (lastChannelEditIndex > lastPublishIndex) { promises.push( - Channel.transaction({ mode: 'rw', source: IGNORED_SOURCE }, () => { + Channel.transaction({ mode: 'rw' }, () => { return Channel.table.update(channelId, { unpublished_changes: true }); }) ); diff --git a/contentcuration/contentcuration/frontend/shared/vuex/channel/actions.js b/contentcuration/contentcuration/frontend/shared/vuex/channel/actions.js index b977551ab0..dede896b49 100644 --- a/contentcuration/contentcuration/frontend/shared/vuex/channel/actions.js +++ b/contentcuration/contentcuration/frontend/shared/vuex/channel/actions.js @@ -1,6 +1,5 @@ import pickBy from 'lodash/pickBy'; import { NOVALUE } from 'shared/constants'; -import { IGNORED_SOURCE } from 'shared/data/constants'; import { Bookmark, Channel, Invitation, ChannelUser } from 'shared/data/resources'; import client from 'shared/client'; @@ -264,7 +263,7 @@ export async function sendInvitation(context, { channelId, email, shareMode }) { share_mode: shareMode, channel_id: channelId, }); - await Invitation.transaction({ mode: 'rw', source: IGNORED_SOURCE }, () => { + await Invitation.transaction({ mode: 'rw' }, () => { return Invitation.table.put(postedInvitation.data); }); context.commit('ADD_INVITATION', postedInvitation.data); From 78e32c358a63961799e83a0afea46d7d95f578b4 Mon Sep 17 00:00:00 2001 From: Richard Tibbles Date: Tue, 30 May 2023 09:26:54 -0700 Subject: [PATCH 20/21] Update assessment item handling for change updates. --- .../vuex/assessmentItem/actions.js | 92 +++++++++++-------- .../frontend/shared/data/resources.js | 10 +- 2 files changed, 58 insertions(+), 44 deletions(-) diff --git a/contentcuration/contentcuration/frontend/channelEdit/vuex/assessmentItem/actions.js b/contentcuration/contentcuration/frontend/channelEdit/vuex/assessmentItem/actions.js index d421396433..8452cc0641 100644 --- a/contentcuration/contentcuration/frontend/channelEdit/vuex/assessmentItem/actions.js +++ b/contentcuration/contentcuration/frontend/channelEdit/vuex/assessmentItem/actions.js @@ -41,16 +41,20 @@ export function addAssessmentItem(context, assessmentItem) { hints: JSON.stringify(assessmentItem.hints || []), }; - return db.transaction('rw', [TABLE_NAMES.CONTENTNODE, TABLE_NAMES.ASSESSMENTITEM], () => { - return AssessmentItem.add(stringifiedAssessmentItem).then(([contentnode, assessment_id]) => { - context.commit('UPDATE_ASSESSMENTITEM', { - ...assessmentItem, - contentnode, - assessment_id, + return db.transaction( + 'rw', + [TABLE_NAMES.CONTENTNODE, TABLE_NAMES.ASSESSMENTITEM, TABLE_NAMES.CHANGES_TABLE], + () => { + return AssessmentItem.add(stringifiedAssessmentItem).then(([contentnode, assessment_id]) => { + context.commit('UPDATE_ASSESSMENTITEM', { + ...assessmentItem, + contentnode, + assessment_id, + }); + return updateNodeComplete(contentnode, context); }); - return updateNodeComplete(contentnode, context); - }); - }); + } + ); } export function updateAssessmentItem(context, assessmentItem) { return updateAssessmentItems(context, [assessmentItem]); @@ -65,38 +69,46 @@ export function updateAssessmentItems(context, assessmentItems) { context.commit('UPDATE_ASSESSMENTITEM', assessmentItem); }); - return db.transaction('rw', [TABLE_NAMES.CONTENTNODE, TABLE_NAMES.ASSESSMENTITEM], () => { - return Promise.all( - assessmentItems.map(assessmentItem => { - // API accepts answers and hints as strings - const stringifiedAssessmentItem = { - ...assessmentItem, - }; - if (assessmentItem.answers) { - stringifiedAssessmentItem.answers = JSON.stringify(assessmentItem.answers); - } - if (assessmentItem.hints) { - stringifiedAssessmentItem.hints = JSON.stringify(assessmentItem.hints); - } - return AssessmentItem.update( - [assessmentItem.contentnode, assessmentItem.assessment_id], - stringifiedAssessmentItem - ).then(() => { - updateNodeComplete(assessmentItem.contentnode, context); - }); - }) - ); - }); + return db.transaction( + 'rw', + [TABLE_NAMES.CONTENTNODE, TABLE_NAMES.ASSESSMENTITEM, TABLE_NAMES.CHANGES_TABLE], + () => { + return Promise.all( + assessmentItems.map(assessmentItem => { + // API accepts answers and hints as strings + const stringifiedAssessmentItem = { + ...assessmentItem, + }; + if (assessmentItem.answers) { + stringifiedAssessmentItem.answers = JSON.stringify(assessmentItem.answers); + } + if (assessmentItem.hints) { + stringifiedAssessmentItem.hints = JSON.stringify(assessmentItem.hints); + } + return AssessmentItem.update( + [assessmentItem.contentnode, assessmentItem.assessment_id], + stringifiedAssessmentItem + ).then(() => { + updateNodeComplete(assessmentItem.contentnode, context); + }); + }) + ); + } + ); } export function deleteAssessmentItem(context, assessmentItem) { - return db.transaction('rw', [TABLE_NAMES.CONTENTNODE, TABLE_NAMES.ASSESSMENTITEM], () => { - return AssessmentItem.delete([assessmentItem.contentnode, assessmentItem.assessment_id]).then( - () => { - context.commit('DELETE_ASSESSMENTITEM', assessmentItem); - const contentnode = assessmentItem.contentnode; - return updateNodeComplete(contentnode, context); - } - ); - }); + return db.transaction( + 'rw', + [TABLE_NAMES.CONTENTNODE, TABLE_NAMES.ASSESSMENTITEM, TABLE_NAMES.CHANGES_TABLE], + () => { + return AssessmentItem.delete([assessmentItem.contentnode, assessmentItem.assessment_id]).then( + () => { + context.commit('DELETE_ASSESSMENTITEM', assessmentItem); + const contentnode = assessmentItem.contentnode; + return updateNodeComplete(contentnode, context); + } + ); + } + ); } diff --git a/contentcuration/contentcuration/frontend/shared/data/resources.js b/contentcuration/contentcuration/frontend/shared/data/resources.js index 73fe19f3a3..e0c7d088b0 100644 --- a/contentcuration/contentcuration/frontend/shared/data/resources.js +++ b/contentcuration/contentcuration/frontend/shared/data/resources.js @@ -1802,18 +1802,20 @@ export const AssessmentItem = new Resource({ }); }); }, + // Retain super's delete method + _delete: Resource.prototype.delete, delete(id) { const nodeId = id[0]; - return this.transaction({ mode: 'rw' }, () => { - return this.table.delete(id); - }).then(data => { + return this._delete(id).then(data => { return this.modifyAssessmentItemCount(nodeId, -1).then(() => { return data; }); }); }, + // Retain super's add method + _add: Resource.prototype.add, add(obj) { - return super.add(obj).then(id => { + return this._add(obj).then(id => { return this.modifyAssessmentItemCount(obj.contentnode, 1).then(() => { return id; }); From 05fccf09e6310ca086e6513587ef7b860ab5f2b3 Mon Sep 17 00:00:00 2001 From: Richard Tibbles Date: Tue, 30 May 2023 09:31:30 -0700 Subject: [PATCH 21/21] Fix issues with copying. --- .../contentcuration/frontend/shared/data/resources.js | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/contentcuration/contentcuration/frontend/shared/data/resources.js b/contentcuration/contentcuration/frontend/shared/data/resources.js index e0c7d088b0..3843753b6b 100644 --- a/contentcuration/contentcuration/frontend/shared/data/resources.js +++ b/contentcuration/contentcuration/frontend/shared/data/resources.js @@ -1492,7 +1492,8 @@ export const ContentNode = new TreeResource({ } return this.resolveTreeInsert(id, target, position, true, data => { - data.change.excluded_descendants = excluded_descendants; + data.changeData.excluded_descendants = excluded_descendants; + data.changeData.mods = {}; return this.transaction({ mode: 'rw' }, CHANGES_TABLE, async () => { const payload = await this.tableCopy(data);