From 5b684745a7209aecc8e389912b89dccac0d809dd Mon Sep 17 00:00:00 2001 From: Manuel Iglesias <6154160+manueliglesias@users.noreply.github.com> Date: Fri, 30 Sep 2022 14:18:10 -0700 Subject: [PATCH] feat(datastore): custom pk support Co-authored-by: Ivan Artemiev <29709626+iartemiev@users.noreply.github.com> Co-authored-by: David McAfee Co-authored-by: Dane Pilcher Co-authored-by: Jon Wire --- .circleci/config.yml | 2 + .envrc | 1 + .vscode/launch.json | 28 + package.json | 4 +- .../__tests__/SQLiteAdapter.test.ts | 13 +- .../datastore-storage-adapter/package.json | 3 +- .../src/common/CommonSQLiteAdapter.ts | 5 +- .../src/common/SQLiteUtils.ts | 26 +- .../src/common/types.ts | 7 +- packages/datastore/__tests__/AsyncStorage.ts | 1 + .../__tests__/AsyncStorageAdapter.test.ts | 61 +- packages/datastore/__tests__/DataStore.ts | 1002 ++++++++++++++++- .../__tests__/IndexedDBAdapter.test.ts | 52 +- packages/datastore/__tests__/Merger.test.ts | 380 +++++-- .../__snapshots__/indexeddb.test.ts.snap | 24 +- .../__tests__/__snapshots__/sync.test.ts.snap | 19 + .../identifier-fields.test.tsx | 47 + .../composite-identifier.test.tsx | 190 ++++ .../custom-identifier.test.tsx | 105 ++ .../legacy-backwards-compatibility.test.tsx | 494 ++++++++ .../managed-identifier.test.tsx | 248 ++++ .../observe-all.test.tsx | 16 + .../optionally-managed-identifier.test.tsx | 313 +++++ packages/datastore/__tests__/helpers.ts | 445 +++++++- .../datastore/__tests__/indexeddb.test.ts | 104 +- packages/datastore/__tests__/model.ts | 19 +- packages/datastore/__tests__/mutation.test.ts | 27 +- packages/datastore/__tests__/outbox.test.ts | 58 +- packages/datastore/__tests__/storage.test.ts | 243 ++-- packages/datastore/__tests__/sync.test.ts | 33 + packages/datastore/__tests__/util.test.ts | 124 +- packages/datastore/src/datastore/datastore.ts | 366 ++++-- packages/datastore/src/predicates/index.ts | 42 +- .../storage/adapter/AsyncStorageAdapter.ts | 402 +++++-- .../storage/adapter/AsyncStorageDatabase.ts | 100 +- .../src/storage/adapter/IndexedDBAdapter.ts | 455 +++++--- .../datastore/src/storage/adapter/index.ts | 2 +- packages/datastore/src/storage/storage.ts | 89 +- packages/datastore/src/sync/index.ts | 67 +- packages/datastore/src/sync/merger.ts | 18 +- packages/datastore/src/sync/outbox.ts | 29 +- .../datastore/src/sync/processors/mutation.ts | 94 +- .../src/sync/processors/subscription.ts | 1 - .../datastore/src/sync/processors/sync.ts | 4 +- packages/datastore/src/sync/utils.ts | 81 +- packages/datastore/src/types.ts | 210 +++- packages/datastore/src/util.ts | 591 +++++++--- 47 files changed, 5561 insertions(+), 1084 deletions(-) create mode 100644 .envrc create mode 100644 .vscode/launch.json create mode 100644 packages/datastore/__tests__/custom-pk-typings/identifier-fields.test.tsx create mode 100644 packages/datastore/__tests__/custom-pk-typings/model-init-mutable-model-typings/composite-identifier.test.tsx create mode 100644 packages/datastore/__tests__/custom-pk-typings/model-init-mutable-model-typings/custom-identifier.test.tsx create mode 100644 packages/datastore/__tests__/custom-pk-typings/model-init-mutable-model-typings/legacy-backwards-compatibility.test.tsx create mode 100644 packages/datastore/__tests__/custom-pk-typings/model-init-mutable-model-typings/managed-identifier.test.tsx create mode 100644 packages/datastore/__tests__/custom-pk-typings/model-init-mutable-model-typings/observe-all.test.tsx create mode 100644 packages/datastore/__tests__/custom-pk-typings/model-init-mutable-model-typings/optionally-managed-identifier.test.tsx diff --git a/.circleci/config.yml b/.circleci/config.yml index 8c6ccd1c12e..90f314ea531 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -369,6 +369,7 @@ jobs: command: | cd packages/datastore-storage-adapter npm install --build-from-source + rm -rf node_modules/@aws-amplify node_modules/@aws-sdk - run: name: 'Run Amplify JS unit tests' command: | @@ -1218,6 +1219,7 @@ releasable_branches: &releasable_branches - ui-components/main - 1.0-stable - geo/main + - ds-custom-pk test_browsers: &test_browsers browser: [chrome, firefox] diff --git a/.envrc b/.envrc new file mode 100644 index 00000000000..cf8fda61d8e --- /dev/null +++ b/.envrc @@ -0,0 +1 @@ +use node 14 \ No newline at end of file diff --git a/.vscode/launch.json b/.vscode/launch.json new file mode 100644 index 00000000000..f89d0f3edac --- /dev/null +++ b/.vscode/launch.json @@ -0,0 +1,28 @@ +// A launch configuration that compiles the extension and then opens it inside a new window +// Use IntelliSense to learn about possible attributes. +// Hover to view descriptions of existing attributes. +// For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387 +{ + "version": "0.2.0", + "configurations": [ + { + "name": "debug tests", + "type": "node", + "request": "launch", + // The debugger will only run tests for the package specified here: + "cwd": "${workspaceFolder}/packages/datastore", + "runtimeArgs": [ + "--inspect-brk", + "${workspaceRoot}/node_modules/.bin/jest", + // Optionally specify a single test file to run/debug: + "storage.test.ts", + "--runInBand", + "--testTimeout", + "600000", // 10 min timeout so jest doesn't error while we're stepping through code + "false" + ], + "console": "integratedTerminal", + "internalConsoleOptions": "neverOpen" + } + ] +} diff --git a/package.json b/package.json index d4ae9150507..08e3372dce8 100644 --- a/package.json +++ b/package.json @@ -27,7 +27,9 @@ "publish:1.0-stable": "lerna publish --conventional-commits --yes --dist-tag=stable-1.0 --message 'chore(release): Publish [ci skip]' --no-verify-access", "publish:ui-components/main": "lerna publish --canary --force-publish \"*\" --yes --dist-tag=ui-preview --preid=ui-preview --exact --no-verify-access", "publish:verdaccio": "lerna publish --no-push --canary minor --dist-tag=unstable --preid=unstable --exact --force-publish --yes --no-verify-access", - "publish:geo/main": "lerna publish --canary --force-publish \"*\" --yes --dist-tag=geo --preid=geo --exact --no-verify-access" + "publish:geo/main": "lerna publish --canary --force-publish \"*\" --yes --dist-tag=geo --preid=geo --exact --no-verify-access", + "publish:ds-custom-pk": "lerna publish --canary --force-publish \"*\" --yes --dist-tag=custom-pk --preid=custom-pk --exact --no-verify-access", + "temp-ds-safe-push": "yarn build --scope @aws-amplify/datastore && yarn test --scope @aws-amplify/datastore && git push origin" }, "husky": { "hooks": { diff --git a/packages/datastore-storage-adapter/__tests__/SQLiteAdapter.test.ts b/packages/datastore-storage-adapter/__tests__/SQLiteAdapter.test.ts index 7aa6b9eee17..bd35c58a5e1 100644 --- a/packages/datastore-storage-adapter/__tests__/SQLiteAdapter.test.ts +++ b/packages/datastore-storage-adapter/__tests__/SQLiteAdapter.test.ts @@ -76,6 +76,10 @@ class InnerSQLiteDatabase { statement, params, async (err, row) => { + if (err) { + console.error('SQLite ERROR', new Error(err)); + console.warn(statement, params); + } rows.push(row); }, () => { @@ -86,7 +90,14 @@ class InnerSQLiteDatabase { if (callback) await callback(this, resultSet); }); } else { - return await this.innerDB.run(statement, params, callback); + return await this.innerDB.run(statement, params, err => { + if (typeof callback === 'function') { + callback(err); + } else if (err) { + console.error('calback', err); + throw err; + } + }); } } diff --git a/packages/datastore-storage-adapter/package.json b/packages/datastore-storage-adapter/package.json index a9c21974426..3a08a4832d5 100644 --- a/packages/datastore-storage-adapter/package.json +++ b/packages/datastore-storage-adapter/package.json @@ -51,7 +51,8 @@ "es5", "es2015", "esnext.asynciterable", - "es2019" + "es2019", + "dom" ], "allowJs": true, "esModuleInterop": true, diff --git a/packages/datastore-storage-adapter/src/common/CommonSQLiteAdapter.ts b/packages/datastore-storage-adapter/src/common/CommonSQLiteAdapter.ts index d2d2a83d8cc..30fbd930cfd 100644 --- a/packages/datastore-storage-adapter/src/common/CommonSQLiteAdapter.ts +++ b/packages/datastore-storage-adapter/src/common/CommonSQLiteAdapter.ts @@ -17,7 +17,6 @@ import { ModelSortPredicateCreator, InternalSchema, isPredicateObj, - ModelInstanceMetadata, ModelPredicate, NamespaceResolver, OpType, @@ -29,7 +28,7 @@ import { QueryOne, utils, } from '@aws-amplify/datastore'; -import { CommonSQLiteDatabase, ParameterizedStatement } from './types'; +import { CommonSQLiteDatabase, ParameterizedStatement, ModelInstanceMetadataWithId } from './types'; const { traverseModel, validatePredicate, isModelConstructor } = utils; @@ -407,7 +406,7 @@ export class CommonSQLiteAdapter implements StorageAdapter { async batchSave( modelConstructor: PersistentModelConstructor, - items: ModelInstanceMetadata[] + items: ModelInstanceMetadataWithId[] ): Promise<[T, OpType][]> { const { name: tableName } = modelConstructor; const result: [T, OpType][] = []; diff --git a/packages/datastore-storage-adapter/src/common/SQLiteUtils.ts b/packages/datastore-storage-adapter/src/common/SQLiteUtils.ts index ecc29d79222..968e33cfb33 100644 --- a/packages/datastore-storage-adapter/src/common/SQLiteUtils.ts +++ b/packages/datastore-storage-adapter/src/common/SQLiteUtils.ts @@ -148,7 +148,7 @@ export function modelCreateTableStatement( let fields = Object.values(model.fields).reduce((acc, field: ModelField) => { if (isGraphQLScalarType(field.type)) { if (field.name === 'id') { - return acc + '"id" PRIMARY KEY NOT NULL'; + return [...acc, '"id" PRIMARY KEY NOT NULL']; } let columnParam = `"${field.name}" ${getSQLiteType(field.type)}`; @@ -157,7 +157,7 @@ export function modelCreateTableStatement( columnParam += ' NOT NULL'; } - return acc + `, ${columnParam}`; + return [...acc, `${columnParam}`]; } if (isModelFieldType(field.type)) { @@ -167,7 +167,7 @@ export function modelCreateTableStatement( if (isTargetNameAssociation(field.association)) { // check if this field has been explicitly defined in the model const fkDefinedInModel = Object.values(model.fields).find( - (f: ModelField) => f.name === field.association.targetName + (f: ModelField) => f.name === field?.association?.targetName ); // if the FK is not explicitly defined in the model, we have to add it here @@ -179,7 +179,7 @@ export function modelCreateTableStatement( // ignore isRequired param for model fields, since they will not contain // the related data locally - return acc + `, ${columnParam}`; + return [...acc, `${columnParam}`]; } // default to TEXT @@ -189,19 +189,25 @@ export function modelCreateTableStatement( columnParam += ' NOT NULL'; } - return acc + `, ${columnParam}`; - }, ''); + return [...acc, `${columnParam}`]; + }, [] as string[]); implicitAuthFields.forEach((authField: string) => { - fields += `, ${authField} TEXT`; + fields.push(`${authField} TEXT`); }); if (userModel) { - fields += - ', "_version" INTEGER, "_lastChangedAt" INTEGER, "_deleted" INTEGER'; + fields = [ + ...fields, + `"_version" INTEGER`, + `"_lastChangedAt" INTEGER`, + `"_deleted" INTEGER`, + ]; } - const createTableStatement = `CREATE TABLE IF NOT EXISTS "${model.name}" (${fields});`; + const createTableStatement = `CREATE TABLE IF NOT EXISTS "${ + model.name + }" (${fields.join(', ')});`; return createTableStatement; } diff --git a/packages/datastore-storage-adapter/src/common/types.ts b/packages/datastore-storage-adapter/src/common/types.ts index 84c63014371..43905926dac 100644 --- a/packages/datastore-storage-adapter/src/common/types.ts +++ b/packages/datastore-storage-adapter/src/common/types.ts @@ -1,4 +1,4 @@ -import { PersistentModel } from '@aws-amplify/datastore'; +import { PersistentModel, ModelInstanceMetadata } from '@aws-amplify/datastore'; export interface CommonSQLiteDatabase { init(): Promise; @@ -27,3 +27,8 @@ export interface CommonSQLiteDatabase { } export type ParameterizedStatement = [string, any[]]; + +// TODO: remove once we implement CPK for this adapter +export type ModelInstanceMetadataWithId = ModelInstanceMetadata & { + id: string; +}; diff --git a/packages/datastore/__tests__/AsyncStorage.ts b/packages/datastore/__tests__/AsyncStorage.ts index bba4a6be0e3..d40db4f4f57 100644 --- a/packages/datastore/__tests__/AsyncStorage.ts +++ b/packages/datastore/__tests__/AsyncStorage.ts @@ -168,6 +168,7 @@ describe('AsyncStorage tests', () => { test('save function 1:1 insert', async () => { await DataStore.save(blog); + await DataStore.save(owner); const get1 = JSON.parse( diff --git a/packages/datastore/__tests__/AsyncStorageAdapter.test.ts b/packages/datastore/__tests__/AsyncStorageAdapter.test.ts index f93961ba742..1a2352da793 100644 --- a/packages/datastore/__tests__/AsyncStorageAdapter.test.ts +++ b/packages/datastore/__tests__/AsyncStorageAdapter.test.ts @@ -5,7 +5,15 @@ import { syncClasses, } from '../src/datastore/datastore'; import { PersistentModelConstructor, SortDirection } from '../src/types'; -import { pause, Model, User, Profile, testSchema } from './helpers'; +import { + Model, + User, + Profile, + Post, + Comment, + testSchema, + pause, +} from './helpers'; import { Predicates } from '../src/predicates'; import { addCommonQueryTests } from './commonAdapterTests'; @@ -41,7 +49,7 @@ describe('AsyncStorageAdapter tests', () => { describe('Query', () => { let Model: PersistentModelConstructor; let model1Id: string; - const spyOnGetOne = jest.spyOn(ASAdapter, 'getById'); + const spyOnGetOne = jest.spyOn(ASAdapter, 'getByKey'); const spyOnGetAll = jest.spyOn(ASAdapter, 'getAll'); const spyOnMemory = jest.spyOn(ASAdapter, 'inMemoryPagination'); @@ -92,9 +100,8 @@ describe('AsyncStorageAdapter tests', () => { await DataStore.clear(); }); - it('Should call getById for query by id', async () => { + it('Should call getById for query by key', async () => { const result = await DataStore.query(Model, model1Id); - expect(result.field1).toEqual('Some value'); expect(spyOnGetOne).toHaveBeenCalled(); expect(spyOnGetAll).not.toHaveBeenCalled(); @@ -155,11 +162,16 @@ describe('AsyncStorageAdapter tests', () => { expect(spyOnMemory).not.toHaveBeenCalled(); }); }); + describe('Delete', () => { let User: PersistentModelConstructor; let Profile: PersistentModelConstructor; let profile1Id: string; let user1Id: string; + let Post: PersistentModelConstructor; + let Comment: PersistentModelConstructor; + let post1Id: string; + let comment1Id: string; beforeAll(async () => { ({ initSchema, DataStore } = require('../src/datastore/datastore')); @@ -183,6 +195,25 @@ describe('AsyncStorageAdapter tests', () => { )); }); + beforeEach(async () => { + const classes = initSchema(testSchema()); + + ({ Post } = classes as { + Post: PersistentModelConstructor; + }); + + ({ Comment } = classes as { + Comment: PersistentModelConstructor; + }); + + const post = await DataStore.save(new Post({ title: 'Test' })); + ({ id: post1Id } = post); + + ({ id: comment1Id } = await DataStore.save( + new Comment({ content: 'Test Content', post }) + )); + }); + it('Should perform a cascading delete on a record with a Has One relationship', async () => { let user = await DataStore.query(User, user1Id); let profile = await DataStore.query(Profile, profile1Id); @@ -197,8 +228,26 @@ describe('AsyncStorageAdapter tests', () => { profile = await DataStore.query(Profile, profile1Id); // both should be undefined, even though we only explicitly deleted the user - expect(user).toBeUndefined; - expect(profile).toBeUndefined; + expect(user).toBeUndefined(); + expect(profile).toBeUndefined(); + }); + + it('Should perform a cascading delete on a record with a Has Many relationship', async () => { + let post = await DataStore.query(Post, post1Id); + let comment = await DataStore.query(Comment, comment1Id); + + // double-checking that both of the records exist at first + expect(post.id).toEqual(post1Id); + expect(comment.id).toEqual(comment1Id); + + await DataStore.delete(Post, post.id); + + post = await DataStore.query(Post, post1Id); + comment = await DataStore.query(Comment, comment1Id); + + // both should be undefined, even though we only explicitly deleted the post + expect(post).toBeUndefined(); + expect(comment).toBeUndefined(); }); }); diff --git a/packages/datastore/__tests__/DataStore.ts b/packages/datastore/__tests__/DataStore.ts index 42795ea1fa2..5f019ace973 100644 --- a/packages/datastore/__tests__/DataStore.ts +++ b/packages/datastore/__tests__/DataStore.ts @@ -15,13 +15,14 @@ import { } from '../src/types'; import { Comment, + Metadata, Model, + pause, Post, + PostCustomPK as PostCustomPKType, Profile, - Metadata, - User, testSchema, - pause, + User, } from './helpers'; let initSchema: typeof initSchemaType; @@ -898,7 +899,9 @@ describe('DataStore tests', () => { expect(classes).toHaveProperty('Model'); - const { Model } = classes as { Model: PersistentModelConstructor }; + const { Model } = classes as { + Model: PersistentModelConstructor; + }; expect(Model).toHaveProperty( nameOf>('copyOf') @@ -1214,7 +1217,9 @@ describe('DataStore tests', () => { const classes = initSchema(testSchema()); - const { Model } = classes as { Model: PersistentModelConstructor }; + const { Model } = classes as { + Model: PersistentModelConstructor; + }; const promises = [ DataStore.query(Model), @@ -1234,7 +1239,9 @@ describe('DataStore tests', () => { const classes = initSchema(testSchema()); - const { Model } = classes as { Model: PersistentModelConstructor }; + const { Model } = classes as { + Model: PersistentModelConstructor; + }; DataStore.observe(Model).subscribe(jest.fn()); @@ -1297,7 +1304,9 @@ describe('DataStore tests', () => { const classes = initSchema(testSchema()); - const { Model } = classes as { Model: PersistentModelConstructor }; + const { Model } = classes as { + Model: PersistentModelConstructor; + }; model = new Model({ field1: 'Some value', @@ -1340,7 +1349,9 @@ describe('DataStore tests', () => { const classes = initSchema(testSchema()); - const { Model } = classes as { Model: PersistentModelConstructor }; + const { Model } = classes as { + Model: PersistentModelConstructor; + }; model = new Model({ field1: 'something', @@ -1393,7 +1404,9 @@ describe('DataStore tests', () => { const classes = initSchema(testSchema()); - const { Model } = classes as { Model: PersistentModelConstructor }; + const { Model } = classes as { + Model: PersistentModelConstructor; + }; model = new Model({ field1: 'something', @@ -1473,7 +1486,9 @@ describe('DataStore tests', () => { const classes = initSchema(testSchema()); - const { Model } = classes as { Model: PersistentModelConstructor }; + const { Model } = classes as { + Model: PersistentModelConstructor; + }; expect(() => { new Model({ @@ -1959,7 +1974,10 @@ describe('DataStore tests', () => { ).rejects.toThrow("Page can't be negative"); await expect( - DataStore.query(Model, 'someid', { page: 0, limit: 'avalue' }) + DataStore.query(Model, 'someid', { + page: 0, + limit: 'avalue', + }) ).rejects.toThrow('Limit should be a number'); await expect( @@ -2165,4 +2183,966 @@ describe('DataStore tests', () => { }); }); }); + describe('DataStore Custom PK tests', () => { + describe('initSchema tests', () => { + test('PostCustomPK class is created', () => { + const classes = initSchema(testSchema()); + + expect(classes).toHaveProperty('PostCustomPK'); + + const { PostCustomPK } = classes as { + PostCustomPK: PersistentModelConstructor; + }; + + expect(PostCustomPK).toHaveProperty( + nameOf>('copyOf') + ); + + expect(typeof PostCustomPK.copyOf).toBe('function'); + }); + + test('PostCustomPK class can be instantiated', () => { + const { PostCustomPK } = initSchema(testSchema()) as { + PostCustomPK: PersistentModelConstructor; + }; + + const model = new PostCustomPK({ + postId: '12345', + title: 'something', + dateCreated: new Date().toISOString(), + }); + + expect(model).toBeInstanceOf(PostCustomPK); + + expect(model.postId).toBeDefined(); + }); + }); + + describe('Immutability', () => { + test('Title cannot be changed', () => { + const { PostCustomPK } = initSchema(testSchema()) as { + PostCustomPK: PersistentModelConstructor; + }; + + const model = new PostCustomPK({ + postId: '12345', + title: 'something', + dateCreated: new Date().toISOString(), + }); + + expect(() => { + (model).title = 'edit'; + }).toThrowError( + "Cannot assign to read only property 'title' of object" + ); + }); + + test('PostCustomPK can be copied+edited by creating an edited copy', () => { + const { PostCustomPK } = initSchema(testSchema()) as { + PostCustomPK: PersistentModelConstructor; + }; + + const model1 = new PostCustomPK({ + postId: '12345', + title: 'something', + dateCreated: new Date().toISOString(), + }); + + const model2 = PostCustomPK.copyOf(model1, draft => { + draft.title = 'edited'; + }); + + expect(model1).not.toBe(model2); + + // postId should be kept the same + expect(model1.postId).toBe(model2.postId); + + expect(model1.title).toBe('something'); + expect(model2.title).toBe('edited'); + }); + + test('postId cannot be changed inside copyOf', () => { + const { PostCustomPK } = initSchema(testSchema()) as { + PostCustomPK: PersistentModelConstructor; + }; + + const model1 = new PostCustomPK({ + postId: '12345', + title: 'something', + dateCreated: new Date().toISOString(), + }); + + const model2 = PostCustomPK.copyOf(model1, draft => { + (draft).postId = 'a-new-postId'; + }); + + // postId should be kept the same + expect(model1.postId).toBe(model2.postId); + }); + + test('Optional field can be initialized with undefined', () => { + const { PostCustomPK } = initSchema(testSchema()) as { + PostCustomPK: PersistentModelConstructor; + }; + + const model1 = new PostCustomPK({ + postId: '12345', + title: 'something', + description: undefined, + dateCreated: new Date().toISOString(), + }); + + expect(model1.description).toBeUndefined(); + }); + + test('Optional field can be initialized with null', () => { + const { PostCustomPK } = initSchema(testSchema()) as { + PostCustomPK: PersistentModelConstructor; + }; + + const model1 = new PostCustomPK({ + postId: '12345', + title: 'something', + dateCreated: new Date().toISOString(), + description: null, + }); + + expect(model1.description).toBeNull(); + }); + + test('Optional field can be changed to undefined inside copyOf', () => { + const { PostCustomPK } = initSchema(testSchema()) as { + PostCustomPK: PersistentModelConstructor; + }; + + const model1 = new PostCustomPK({ + postId: '12345', + title: 'something', + dateCreated: new Date().toISOString(), + description: 'something-else', + }); + + const model2 = PostCustomPK.copyOf(model1, draft => { + (draft).description = undefined; + }); + + // postId should be kept the same + expect(model1.postId).toBe(model2.postId); + + expect(model1.description).toBe('something-else'); + expect(model2.description).toBeUndefined(); + }); + + test('Optional field can be set to null inside copyOf', () => { + const { PostCustomPK } = initSchema(testSchema()) as { + PostCustomPK: PersistentModelConstructor; + }; + + const model1 = new PostCustomPK({ + postId: '12345', + title: 'something', + dateCreated: new Date().toISOString(), + }); + + const model2 = PostCustomPK.copyOf(model1, draft => { + (draft).description = null; + }); + + // postId should be kept the same + expect(model1.postId).toBe(model2.postId); + + expect(model1.description).toBeUndefined(); + expect(model2.description).toBeNull(); + }); + + test('Non @model - Field cannot be changed', () => { + const { Metadata } = initSchema(testSchema()) as { + Metadata: NonModelTypeConstructor; + }; + + const nonPostCustomPK = new Metadata({ + author: 'something', + rewards: [], + penNames: [], + nominations: [], + }); + + expect(() => { + (nonPostCustomPK).author = 'edit'; + }).toThrowError( + "Cannot assign to read only property 'author' of object" + ); + }); + }); + + describe('Initialization', () => { + let PostCustomPK; + test('start is called only once', async () => { + const storage: StorageType = + require('../src/storage/storage').ExclusiveStorage; + + const classes = initSchema(testSchema()); + + ({ PostCustomPK } = classes as { + PostCustomPK: PersistentModelConstructor; + }); + + const promises = [ + DataStore.query(PostCustomPK), + DataStore.query(PostCustomPK), + DataStore.query(PostCustomPK), + DataStore.query(PostCustomPK), + ]; + + await Promise.all(promises); + + expect(storage).toHaveBeenCalledTimes(1); + }); + + test('It is initialized when observing (no query)', async () => { + const storage: StorageType = + require('../src/storage/storage').ExclusiveStorage; + + const classes = initSchema(testSchema()); + + ({ PostCustomPK } = classes as { + PostCustomPK: PersistentModelConstructor; + }); + + DataStore.observe(PostCustomPK).subscribe(jest.fn()); + + expect(storage).toHaveBeenCalledTimes(1); + }); + }); + + describe('Basic operations', () => { + let PostCustomPK: PersistentModelConstructor; + + beforeEach(() => { + jest.resetModules(); + jest.doMock('../src/storage/storage', () => { + const mock = jest.fn().mockImplementation(() => ({ + init: jest.fn(), + runExclusive: jest.fn(() => []), + query: jest.fn(() => []), + observe: jest.fn(() => Observable.from([])), + })); + + (mock).getNamespace = () => ({ models: {} }); + + return { ExclusiveStorage: mock }; + }); + ({ initSchema, DataStore } = require('../src/datastore/datastore')); + + const classes = initSchema(testSchema()); + + ({ PostCustomPK } = classes as { + PostCustomPK: PersistentModelConstructor; + }); + }); + + test('Save returns the saved model', async () => { + let model: PostCustomPKType; + const save = jest.fn(() => [model]); + const query = jest.fn(() => [model]); + + jest.resetModules(); + jest.doMock('../src/storage/storage', () => { + const mock = jest.fn().mockImplementation(() => { + const _mock = { + init: jest.fn(), + save, + query, + runExclusive: jest.fn(fn => fn.bind(this, _mock)()), + }; + + return _mock; + }); + + (mock).getNamespace = () => ({ models: {} }); + + return { ExclusiveStorage: mock }; + }); + + ({ initSchema, DataStore } = require('../src/datastore/datastore')); + + const classes = initSchema(testSchema()); + + const { PostCustomPK } = classes as { + PostCustomPK: PersistentModelConstructor; + }; + + model = new PostCustomPK({ + postId: '12345', + title: 'Some value', + dateCreated: new Date().toISOString(), + }); + + const result = await DataStore.save(model); + + const [settingsSave, modelCall] = save.mock.calls; + const [_model, _condition, _mutator, patches] = modelCall; + + expect(result).toMatchObject(model); + expect(patches).toBeUndefined(); + }); + + test('Save returns the updated model and patches', async () => { + let model: PostCustomPKType; + const save = jest.fn(() => [model]); + const query = jest.fn(() => [model]); + + jest.resetModules(); + jest.doMock('../src/storage/storage', () => { + const mock = jest.fn().mockImplementation(() => { + const _mock = { + init: jest.fn(), + save, + query, + runExclusive: jest.fn(fn => fn.bind(this, _mock)()), + }; + + return _mock; + }); + + (mock).getNamespace = () => ({ models: {} }); + + return { ExclusiveStorage: mock }; + }); + + ({ initSchema, DataStore } = require('../src/datastore/datastore')); + + const classes = initSchema(testSchema()); + + const { PostCustomPK } = classes as { + PostCustomPK: PersistentModelConstructor; + }; + + model = new PostCustomPK({ + postId: '12345', + title: 'something', + dateCreated: new Date().toISOString(), + }); + + await DataStore.save(model); + + model = PostCustomPK.copyOf(model, draft => { + draft.title = 'edited'; + }); + + const result = await DataStore.save(model); + + const [settingsSave, modelSave, modelUpdate] = save.mock.calls; + const [_model, _condition, _mutator, [patches]] = modelUpdate; + + const expectedPatches = [ + { op: 'replace', path: ['title'], value: 'edited' }, + ]; + + expect(result).toMatchObject(model); + expect(patches).toMatchObject(expectedPatches); + }); + + test('Save returns the updated model and patches - list field', async () => { + let model: PostCustomPKType; + const save = jest.fn(() => [model]); + const query = jest.fn(() => [model]); + + jest.resetModules(); + jest.doMock('../src/storage/storage', () => { + const mock = jest.fn().mockImplementation(() => { + const _mock = { + init: jest.fn(), + save, + query, + runExclusive: jest.fn(fn => fn.bind(this, _mock)()), + }; + + return _mock; + }); + + (mock).getNamespace = () => ({ models: {} }); + + return { ExclusiveStorage: mock }; + }); + + ({ initSchema, DataStore } = require('../src/datastore/datastore')); + + const classes = initSchema(testSchema()); + + const { PostCustomPK } = classes as { + PostCustomPK: PersistentModelConstructor; + }; + + model = new PostCustomPK({ + postId: '12345', + title: 'something', + dateCreated: new Date().toISOString(), + emails: ['john@doe.com', 'jane@doe.com'], + }); + + await DataStore.save(model); + + model = PostCustomPK.copyOf(model, draft => { + draft.emails = [...draft.emails, 'joe@doe.com']; + }); + + let result = await DataStore.save(model); + + expect(result).toMatchObject(model); + + model = PostCustomPK.copyOf(model, draft => { + draft.emails.push('joe@doe.com'); + }); + + result = await DataStore.save(model); + + expect(result).toMatchObject(model); + + const [settingsSave, modelSave, modelUpdate, modelUpdate2] = ( + save.mock.calls + ); + + const [_model, _condition, _mutator, [patches]] = modelUpdate; + const [_model2, _condition2, _mutator2, [patches2]] = modelUpdate2; + + const expectedPatches = [ + { + op: 'replace', + path: ['emails'], + value: ['john@doe.com', 'jane@doe.com', 'joe@doe.com'], + }, + ]; + + const expectedPatches2 = [ + { + op: 'replace', + path: ['emails'], + value: [ + 'john@doe.com', + 'jane@doe.com', + 'joe@doe.com', + 'joe@doe.com', + ], + }, + ]; + + expect(patches).toMatchObject(expectedPatches); + expect(patches2).toMatchObject(expectedPatches2); + }); + + test('Read-only fields cannot be overwritten', async () => { + let model: PostCustomPKType; + const save = jest.fn(() => [model]); + const query = jest.fn(() => [model]); + + jest.resetModules(); + jest.doMock('../src/storage/storage', () => { + const mock = jest.fn().mockImplementation(() => { + const _mock = { + init: jest.fn(), + save, + query, + runExclusive: jest.fn(fn => fn.bind(this, _mock)()), + }; + + return _mock; + }); + + (mock).getNamespace = () => ({ models: {} }); + + return { ExclusiveStorage: mock }; + }); + + ({ initSchema, DataStore } = require('../src/datastore/datastore')); + + const classes = initSchema(testSchema()); + + const { PostCustomPK } = classes as { + PostCustomPK: PersistentModelConstructor; + }; + + expect(() => { + new PostCustomPK({ + postId: '12345', + title: 'something', + dateCreated: new Date().toISOString(), + createdAt: '2021-06-03T20:56:23.201Z', + }) as any; + }).toThrow('createdAt is read-only.'); + + model = new PostCustomPK({ + postId: '12345', + title: 'something', + dateCreated: new Date().toISOString(), + }); + + expect(() => { + PostCustomPK.copyOf(model, draft => { + (draft as any).createdAt = '2021-06-03T20:56:23.201Z'; + }); + }).toThrow('createdAt is read-only.'); + + expect(() => { + PostCustomPK.copyOf(model, draft => { + (draft as any).updatedAt = '2021-06-03T20:56:23.201Z'; + }); + }).toThrow('updatedAt is read-only.'); + }); + + test('Instantiation validations', async () => { + expect(() => { + new PostCustomPK({ + postId: '12345', + title: undefined, + dateCreated: new Date().toISOString(), + }); + }).toThrowError('Field title is required'); + + expect(() => { + new PostCustomPK({ + postId: '12345', + title: null, + dateCreated: new Date().toISOString(), + }); + }).toThrowError('Field title is required'); + + expect(() => { + new PostCustomPK({ + postId: '12345', + title: 1234, + dateCreated: new Date().toISOString(), + }); + }).toThrowError( + 'Field title should be of type string, number received. 1234' + ); + + expect(() => { + new PostCustomPK({ + postId: '12345', + title: 'someField', + dateCreated: 'not-a-date', + }); + }).toThrowError( + 'Field dateCreated should be of type AWSDateTime, validation failed. not-a-date' + ); + + expect(() => { + new PostCustomPK({ + postId: '12345', + title: 'someField', + dateCreated: new Date().toISOString(), + emails: [null], + }); + }).toThrowError( + 'All elements in the emails array should be of type string, [null] received. ' + ); + + expect(() => { + new PostCustomPK({ + postId: '12345', + title: 'someField', + dateCreated: new Date().toISOString(), + emails: ['test@example.com'], + }); + }).not.toThrow(); + + expect(() => { + new PostCustomPK({ + postId: '12345', + title: 'someField', + dateCreated: new Date().toISOString(), + emails: ['not-an-email'], + }); + }).toThrowError( + 'All elements in the emails array should be of type AWSEmail, validation failed for one or more elements. not-an-email' + ); + + expect({ + extraAttribute: 'some value', + title: 'some value', + }).toHaveProperty('extraAttribute'); + + expect(() => { + PostCustomPK.copyOf(undefined, d => d); + }).toThrow('The source object is not a valid model'); + expect(() => { + const source = new PostCustomPK({ + postId: '12345', + title: 'something', + dateCreated: new Date().toISOString(), + }); + PostCustomPK.copyOf(source, d => (d.title = 1234)); + }).toThrow( + 'Field title should be of type string, number received. 1234' + ); + }); + + test('Delete params', async () => { + await expect(DataStore.delete(undefined)).rejects.toThrow( + 'Model or Model Constructor required' + ); + + await expect(DataStore.delete(PostCustomPK)).rejects.toThrow( + 'Id to delete or criteria required. Do you want to delete all? Pass Predicates.ALL' + ); + + await expect( + DataStore.delete(PostCustomPK, (() => {})) + ).rejects.toThrow( + 'Criteria required. Do you want to delete all? Pass Predicates.ALL' + ); + + await expect( + DataStore.delete(PostCustomPK, (() => {})) + ).rejects.toThrow( + 'Criteria required. Do you want to delete all? Pass Predicates.ALL' + ); + + await expect(DataStore.delete({})).rejects.toThrow( + 'Object is not an instance of a valid model' + ); + + await expect( + DataStore.delete( + new PostCustomPK({ + postId: '12345', + title: 'somevalue', + dateCreated: new Date().toISOString(), + }), + {} + ) + ).rejects.toThrow('Invalid criteria'); + }); + + test('Delete many returns many', async () => { + const models: PostCustomPKType[] = []; + const save = jest.fn(model => { + model instanceof PostCustomPK && models.push(model); + }); + const query = jest.fn(() => models); + const _delete = jest.fn(() => [models, models]); + + jest.resetModules(); + jest.doMock('../src/storage/storage', () => { + const mock: jest.Mock = jest.fn().mockImplementation(() => { + const _mock = { + init: jest.fn(), + save, + query, + delete: _delete, + runExclusive: jest.fn(fn => fn.bind(this, _mock)()), + }; + + return _mock; + }); + + (mock).getNamespace = () => ({ models: {} }); + + return { ExclusiveStorage: mock }; + }); + + ({ initSchema, DataStore } = require('../src/datastore/datastore')); + + const classes = initSchema(testSchema()); + + const { PostCustomPK } = classes as { + PostCustomPK: PersistentModelConstructor; + }; + + Promise.all( + [...Array(10).keys()].map(async i => { + await DataStore.save( + new PostCustomPK({ + postId: `${i}`, + title: 'someField', + dateCreated: new Date().toISOString(), + }) + ); + }) + ); + + const deleted = await DataStore.delete(PostCustomPK, m => + m.title('eq', 'someField') + ); + + const sortedRecords = deleted.sort((a, b) => + a.postId < b.postId ? -1 : 1 + ); + + expect(sortedRecords.length).toEqual(10); + sortedRecords.forEach((deletedItem, idx) => { + expect(deletedItem.postId).toEqual(`${idx}`); + expect(deletedItem.title).toEqual('someField'); + }); + }); + + test('Delete one by Custom PK returns one', async () => { + let model: PostCustomPKType; + const save = jest.fn(saved => (model = saved)); + const query = jest.fn(() => [model]); + const _delete = jest.fn(() => [[model], [model]]); + + jest.resetModules(); + jest.doMock('../src/storage/storage', () => { + const mock = jest.fn().mockImplementation(() => { + const _mock = { + init: jest.fn(), + save, + query, + delete: _delete, + runExclusive: jest.fn(fn => fn.bind(this, _mock)()), + }; + return _mock; + }); + + (mock).getNamespace = () => ({ models: {} }); + + return { ExclusiveStorage: mock }; + }); + + ({ initSchema, DataStore } = require('../src/datastore/datastore')); + + const classes = initSchema(testSchema()); + + const { PostCustomPK } = classes as { + PostCustomPK: PersistentModelConstructor; + }; + + const saved = await DataStore.save( + new PostCustomPK({ + postId: '12345', + title: 'someField', + dateCreated: new Date().toISOString(), + }) + ); + + const deleted: PostCustomPKType[] = await DataStore.delete( + PostCustomPK, + saved.postId + ); + + expect(deleted.length).toEqual(1); + expect(deleted[0]).toEqual(model); + }); + + test('Delete one by Custom PK with predicate returns one', async () => { + let model: PostCustomPKType; + const save = jest.fn(saved => (model = saved)); + const query = jest.fn(() => [model]); + const _delete = jest.fn(() => [[model], [model]]); + + jest.resetModules(); + jest.doMock('../src/storage/storage', () => { + const mock = jest.fn().mockImplementation(() => { + const _mock = { + init: jest.fn(), + save, + query, + delete: _delete, + runExclusive: jest.fn(fn => fn.bind(this, _mock)()), + }; + return _mock; + }); + + (mock).getNamespace = () => ({ models: {} }); + + return { ExclusiveStorage: mock }; + }); + + ({ initSchema, DataStore } = require('../src/datastore/datastore')); + + const classes = initSchema(testSchema()); + + const { PostCustomPK } = classes as { + PostCustomPK: PersistentModelConstructor; + }; + + const saved = await DataStore.save( + new PostCustomPK({ + postId: '12345', + title: 'someField', + dateCreated: new Date().toISOString(), + }) + ); + + const deleted: PostCustomPKType[] = await DataStore.delete( + PostCustomPK, + + m => m.postId('eq', saved.postId) + ); + + expect(deleted.length).toEqual(1); + expect(deleted[0]).toEqual(model); + }); + + test('Query params', async () => { + await expect(DataStore.query(undefined)).rejects.toThrow( + 'Constructor is not for a valid model' + ); + + await expect(DataStore.query(undefined)).rejects.toThrow( + 'Constructor is not for a valid model' + ); + + await expect( + DataStore.query(PostCustomPK, 'someid', { page: 0 }) + ).rejects.toThrow('Limit is required when requesting a page'); + + await expect( + DataStore.query(PostCustomPK, 'someid', { + page: 'a', + limit: 10, + }) + ).rejects.toThrow('Page should be a number'); + + await expect( + DataStore.query(PostCustomPK, 'someid', { page: -1, limit: 10 }) + ).rejects.toThrow("Page can't be negative"); + + await expect( + DataStore.query(PostCustomPK, 'someid', { + page: 0, + limit: 'avalue', + }) + ).rejects.toThrow('Limit should be a number'); + + await expect( + DataStore.query(PostCustomPK, 'someid', { + page: 0, + limit: -1, + }) + ).rejects.toThrow("Limit can't be negative"); + }); + + describe('Type definitions', () => { + let PostCustomPK: PersistentModelConstructor; + + beforeEach(() => { + let model: PostCustomPKType; + + jest.resetModules(); + jest.doMock('../src/storage/storage', () => { + const mock = jest.fn().mockImplementation(() => ({ + init: jest.fn(), + runExclusive: jest.fn(() => [model]), + query: jest.fn(() => [model]), + observe: jest.fn(() => Observable.from([])), + })); + + (mock).getNamespace = () => ({ models: {} }); + + return { ExclusiveStorage: mock }; + }); + ({ initSchema, DataStore } = require('../src/datastore/datastore')); + + const classes = initSchema(testSchema()); + + ({ PostCustomPK } = classes as { + PostCustomPK: PersistentModelConstructor; + }); + + model = new PostCustomPK({ + postId: '12345', + title: 'Some value', + dateCreated: new Date().toISOString(), + }); + }); + + describe('Query', () => { + test('all', async () => { + const allPostCustomPKs = await DataStore.query(PostCustomPK); + + expectType(allPostCustomPKs); + + const [one] = allPostCustomPKs; + expect(one.title).toBeDefined(); + expect(one).toBeInstanceOf(PostCustomPK); + }); + test('one by custom PK', async () => { + const onePostCustomPKById = await DataStore.query( + PostCustomPK, + 'someid' + ); + + expectType(onePostCustomPKById); + expect(onePostCustomPKById.title).toBeDefined(); + expect(onePostCustomPKById).toBeInstanceOf(PostCustomPK); + }); + test('with criteria', async () => { + const multiPostCustomPKWithCriteria = await DataStore.query( + PostCustomPK, + c => c.title('contains', 'something') + ); + + expectType(multiPostCustomPKWithCriteria); + + const [one] = multiPostCustomPKWithCriteria; + expect(one.title).toBeDefined(); + expect(one).toBeInstanceOf(PostCustomPK); + }); + test('with pagination', async () => { + const allPostCustomPKsPaginated = await DataStore.query( + PostCustomPK, + Predicates.ALL, + { page: 0, limit: 20 } + ); + + expectType(allPostCustomPKsPaginated); + const [one] = allPostCustomPKsPaginated; + expect(one.title).toBeDefined(); + expect(one).toBeInstanceOf(PostCustomPK); + }); + }); + + describe('Query with generic type', () => { + test('all', async () => { + const allPostCustomPKs = await DataStore.query( + PostCustomPK + ); + + expectType(allPostCustomPKs); + + const [one] = allPostCustomPKs; + expect(one.title).toBeDefined(); + expect(one).toBeInstanceOf(PostCustomPK); + }); + test('one by postId', async () => { + const onePostCustomPKById = await DataStore.query( + PostCustomPK, + 'someid' + ); + expectType(onePostCustomPKById); + expect(onePostCustomPKById.title).toBeDefined(); + expect(onePostCustomPKById).toBeInstanceOf(PostCustomPK); + }); + test('with criteria', async () => { + const multiPostCustomPKWithCriteria = + await DataStore.query(PostCustomPK, c => + c.title('contains', 'something') + ); + + expectType(multiPostCustomPKWithCriteria); + + const [one] = multiPostCustomPKWithCriteria; + expect(one.title).toBeDefined(); + expect(one).toBeInstanceOf(PostCustomPK); + }); + test('with pagination', async () => { + const allPostCustomPKsPaginated = + await DataStore.query( + PostCustomPK, + Predicates.ALL, + { page: 0, limit: 20 } + ); + + expectType(allPostCustomPKsPaginated); + const [one] = allPostCustomPKsPaginated; + expect(one.title).toBeDefined(); + expect(one).toBeInstanceOf(PostCustomPK); + }); + }); + }); + }); + }); }); diff --git a/packages/datastore/__tests__/IndexedDBAdapter.test.ts b/packages/datastore/__tests__/IndexedDBAdapter.test.ts index db5dcd6440a..4870c65137a 100644 --- a/packages/datastore/__tests__/IndexedDBAdapter.test.ts +++ b/packages/datastore/__tests__/IndexedDBAdapter.test.ts @@ -47,8 +47,7 @@ describe('IndexedDBAdapter tests', () => { describe('Query', () => { let Model: PersistentModelConstructor; let model1Id: string; - - const spyOnGetOne = jest.spyOn(IDBAdapter, 'getById'); + const spyOnGetOne = jest.spyOn(IDBAdapter, 'getByKey'); const spyOnGetAll = jest.spyOn(IDBAdapter, 'getAll'); const spyOnEngine = jest.spyOn(IDBAdapter, 'enginePagination'); const spyOnMemory = jest.spyOn(IDBAdapter, 'inMemoryPagination'); @@ -97,7 +96,7 @@ describe('IndexedDBAdapter tests', () => { jest.clearAllMocks(); }); - it('Should call getById for query by id', async () => { + it('Should call getByKey for query by id', async () => { const result = await DataStore.query(Model, model1Id); expect(result.field1).toEqual('field1 value 0'); @@ -156,6 +155,10 @@ describe('IndexedDBAdapter tests', () => { let Profile: PersistentModelConstructor; let profile1Id: string; let user1Id: string; + let Post: PersistentModelConstructor; + let Comment: PersistentModelConstructor; + let post1Id: string; + let comment1Id: string; beforeAll(async () => { ({ initSchema, DataStore } = require('../src/datastore/datastore')); @@ -179,7 +182,27 @@ describe('IndexedDBAdapter tests', () => { )); }); + beforeEach(async () => { + ({ initSchema, DataStore } = require('../src/datastore/datastore')); + const classes = initSchema(testSchema()); + ({ Post } = classes as { + Post: PersistentModelConstructor; + }); + + ({ Comment } = classes as { + Comment: PersistentModelConstructor; + }); + + const post = await DataStore.save(new Post({ title: 'Test' })); + ({ id: post1Id } = post); + + ({ id: comment1Id } = await DataStore.save( + new Comment({ content: 'Test Content', post }) + )); + }); + it('Should perform a cascading delete on a record with a Has One relationship', async () => { + expect.assertions(4); let user = await DataStore.query(User, user1Id); let profile = await DataStore.query(Profile, profile1Id); @@ -193,8 +216,27 @@ describe('IndexedDBAdapter tests', () => { profile = await DataStore.query(Profile, profile1Id); // both should be undefined, even though we only explicitly deleted the user - expect(user).toBeUndefined; - expect(profile).toBeUndefined; + expect(user).toBeUndefined(); + expect(profile).toBeUndefined(); + }); + + it('Should perform a cascading delete on a record with a Has Many relationship', async () => { + expect.assertions(4); + let post = await DataStore.query(Post, post1Id); + let comment = await DataStore.query(Comment, comment1Id); + + // double-checking that both of the records exist at first + expect(post.id).toEqual(post1Id); + expect(comment.id).toEqual(comment1Id); + + await DataStore.delete(Post, post.id); + + post = await DataStore.query(Post, post1Id); + comment = await DataStore.query(Comment, comment1Id); + + // both should be undefined, even though we only explicitly deleted the post + expect(post).toBeUndefined(); + expect(comment).toBeUndefined(); }); }); }); diff --git a/packages/datastore/__tests__/Merger.test.ts b/packages/datastore/__tests__/Merger.test.ts index 028315110ba..2c45bd58081 100644 --- a/packages/datastore/__tests__/Merger.test.ts +++ b/packages/datastore/__tests__/Merger.test.ts @@ -4,142 +4,298 @@ import { DataStore as DataStoreType, initSchema as initSchemaType, } from '../src/datastore/datastore'; -import { Model as ModelType, testSchema } from './helpers'; +import { + Model as ModelType, + PostCustomPK as PostCustomPKType, + testSchema, +} from './helpers'; let initSchema: typeof initSchemaType; let DataStore: typeof DataStoreType; let Storage: any; const ownSymbol = Symbol('sync'); -describe('ModelMerger tests', () => { - let modelMerger: ModelMerger; - let Model: PersistentModelConstructor>; +describe('Merger', () => { + describe('ModelMerger tests with id', () => { + let modelMerger: ModelMerger; + let Model: PersistentModelConstructor>; + const testUserSchema = testSchema(); + const modelDefinition = testUserSchema.models.Model; + describe('mergePage', () => { + beforeAll(async () => { + ({ initSchema, DataStore } = require('../src/datastore/datastore')); + ({ Model } = initSchema(testUserSchema) as { + Model: PersistentModelConstructor; + }); + + await DataStore.start(); - describe('mergePage', () => { - beforeAll(async () => { - ({ initSchema, DataStore } = require('../src/datastore/datastore')); - ({ Model } = initSchema(testSchema()) as { - Model: PersistentModelConstructor; + // mergePage doesn't rely on the outbox, so it doesn't need to be mocked + const outbox = (() => {}) as any; + + Storage = (DataStore as any).storage; + modelMerger = new ModelMerger(outbox, ownSymbol); }); - await DataStore.start(); + test('delete after create should result in delete', async () => { + const modelId = 'ce408429-d667-4606-bb4f-3d7e0a8e5938'; - // mergePage doesn't rely on the outbox, so it doesn't need to be mocked - const outbox = (() => {}) as any; + const items = [ + { + id: modelId, + field1: 'Create', + optionalField1: null, + _version: 1, + _lastChangedAt: 1619627611860, + _deleted: null, + }, + { + id: modelId, + field1: 'Create', + optionalField1: null, + _version: 2, + _lastChangedAt: 1619627619017, + _deleted: true, + }, + ]; - Storage = (DataStore as any).storage; - modelMerger = new ModelMerger(outbox, ownSymbol); - }); + await Storage.runExclusive(async storage => { + await modelMerger.mergePage(storage, Model, items, modelDefinition); + }); - test('delete after create should result in delete', async () => { - const modelId = 'ce408429-d667-4606-bb4f-3d7e0a8e5938'; - - const items = [ - { - id: modelId, - field1: 'Create', - optionalField1: null, - _version: 1, - _lastChangedAt: 1619627611860, - _deleted: null, - }, - { - id: modelId, - field1: 'Create', - optionalField1: null, - _version: 2, - _lastChangedAt: 1619627619017, - _deleted: true, - }, - ]; - - await Storage.runExclusive(async storage => { - await modelMerger.mergePage(storage, Model, items); + const record = await DataStore.query(Model, modelId); + + expect(record).toBeUndefined(); }); - const record = await DataStore.query(Model, modelId); + test('update after create should persist data from update', async () => { + const modelId = '15739024-910d-4c1e-b401-65f5f7838f42'; - expect(record).toBeUndefined(); - }); + const items = [ + { + id: modelId, + field1: 'Create', + optionalField1: null, + _version: 1, + _lastChangedAt: 1619627611860, + _deleted: null, + }, + { + id: modelId, + field1: 'Update', + optionalField1: null, + _version: 2, + _lastChangedAt: 1619627619017, + _deleted: null, + }, + { + id: modelId, + field1: 'Another Update', + optionalField1: 'Optional', + _version: 2, + _lastChangedAt: 1619627621329, + _deleted: null, + }, + ]; + + await Storage.runExclusive(async storage => { + await modelMerger.mergePage(storage, Model, items, modelDefinition); + }); + + const record = await DataStore.query(Model, modelId); - test('update after create should persist data from update', async () => { - const modelId = '15739024-910d-4c1e-b401-65f5f7838f42'; - - const items = [ - { - id: modelId, - field1: 'Create', - optionalField1: null, - _version: 1, - _lastChangedAt: 1619627611860, - _deleted: null, - }, - { - id: modelId, - field1: 'Update', - optionalField1: null, - _version: 2, - _lastChangedAt: 1619627619017, - _deleted: null, - }, - { - id: modelId, - field1: 'Another Update', - optionalField1: 'Optional', - _version: 2, - _lastChangedAt: 1619627621329, - _deleted: null, - }, - ]; - - await Storage.runExclusive(async storage => { - await modelMerger.mergePage(storage, Model, items); + expect(record.field1).toEqual('Another Update'); + expect(record.optionalField1).toEqual('Optional'); }); - const record = await DataStore.query(Model, modelId); + test('create > delete > create => create', async () => { + const modelId = '3d2d9d63-a561-4a29-af29-fd4ef465a5ee'; - expect(record.field1).toEqual('Another Update'); - expect(record.optionalField1).toEqual('Optional'); + const items = [ + { + id: modelId, + field1: 'Create', + optionalField1: null, + _version: 1, + _lastChangedAt: 1619627611860, + _deleted: null, + }, + { + id: modelId, + field1: 'Create', + optionalField1: null, + _version: 2, + _lastChangedAt: 1619627619017, + _deleted: true, + }, + { + id: modelId, + field1: 'New Create with the same id', + optionalField1: null, + _version: 1, + _lastChangedAt: 1619627621329, + _deleted: null, + }, + ]; + + await Storage.runExclusive(async storage => { + await modelMerger.mergePage(storage, Model, items, modelDefinition); + }); + + const record = await DataStore.query(Model, modelId); + + expect(record).not.toBeUndefined(); + expect(record.field1).toEqual('New Create with the same id'); + }); }); + }); + + describe('ModelMerger tests with Custom PK', () => { + let modelMerger: ModelMerger; + let PostCustomPK; + const testUserSchema = testSchema(); + const modelDefinition = testUserSchema.models.PostCustomPK; + describe('mergePage', () => { + beforeAll(async () => { + ({ initSchema, DataStore } = require('../src/datastore/datastore')); + ({ PostCustomPK } = initSchema(testUserSchema) as { + PostCustomPK: PersistentModelConstructor; + }); + + await DataStore.start(); + + // mergePage doesn't rely on the outbox, so it doesn't need to be mocked + const outbox = (() => {}) as any; - test('create > delete > create => create', async () => { - const modelId = '3d2d9d63-a561-4a29-af29-fd4ef465a5ee'; - - const items = [ - { - id: modelId, - field1: 'Create', - optionalField1: null, - _version: 1, - _lastChangedAt: 1619627611860, - _deleted: null, - }, - { - id: modelId, - field1: 'Create', - optionalField1: null, - _version: 2, - _lastChangedAt: 1619627619017, - _deleted: true, - }, - { - id: modelId, - field1: 'New Create with the same id', - optionalField1: null, - _version: 1, - _lastChangedAt: 1619627621329, - _deleted: null, - }, - ]; - - await Storage.runExclusive(async storage => { - await modelMerger.mergePage(storage, Model, items); + Storage = (DataStore as any).storage; + modelMerger = new ModelMerger(outbox, ownSymbol); }); - const record = await DataStore.query(Model, modelId); + test('delete after create should result in delete', async () => { + const customPk = 'ce408429-d667-4606-bb4f-3d7e0a8e5939'; - expect(record).not.toBeUndefined(); - expect(record.field1).toEqual('New Create with the same id'); + const items = [ + { + postId: customPk, + title: 'Create1', + description: null, + _version: 1, + _lastChangedAt: 1619627611860, + _deleted: null, + }, + { + postId: customPk, + title: 'Create1', + description: null, + _version: 2, + _lastChangedAt: 1619627619017, + _deleted: true, + }, + ]; + + await Storage.runExclusive(async storage => { + await modelMerger.mergePage( + storage, + PostCustomPK, + items, + modelDefinition + ); + }); + + const record = await DataStore.query(PostCustomPK, customPk); + + expect(record).toBeUndefined(); + }); + + test('update after create should persist data from update', async () => { + const customPk = '15739024-910d-4c1e-b401-65f5f7838f43'; + + const items = [ + { + postId: customPk, + title: 'Create1', + description: null, + _version: 1, + _lastChangedAt: 1619627611860, + _deleted: null, + }, + { + postId: customPk, + title: 'Update1', + description: null, + _version: 2, + _lastChangedAt: 1619627619017, + _deleted: null, + }, + { + postId: customPk, + title: 'Another Update1', + description: 'Optional1', + _version: 2, + _lastChangedAt: 1619627621329, + _deleted: null, + }, + ]; + + await Storage.runExclusive(async storage => { + await modelMerger.mergePage( + storage, + PostCustomPK, + items, + modelDefinition + ); + }); + + const record = await DataStore.query(PostCustomPK, customPk); + + expect(record.title).toEqual('Another Update1'); + expect(record.description).toEqual('Optional1'); + }); + + test('create > delete > create => create', async () => { + const customPk = '3d2d9d63-a561-4a29-af29-fd4ef465a5eg'; + + const items = [ + { + postId: customPk, + title: 'Create1', + description: null, + _version: 1, + _lastChangedAt: 1619627611860, + _deleted: null, + }, + { + postId: customPk, + title: 'Create1', + description: null, + _version: 2, + _lastChangedAt: 1619627619017, + _deleted: true, + }, + { + postId: customPk, + title: 'New Create with the same custom pk', + description: null, + _version: 1, + _lastChangedAt: 1619627621329, + _deleted: null, + }, + ]; + + await Storage.runExclusive(async storage => { + await modelMerger.mergePage( + storage, + PostCustomPK, + items, + modelDefinition + ); + }); + + const record = await DataStore.query(PostCustomPK, customPk); + + expect(record).not.toBeUndefined(); + expect(record.title).toEqual('New Create with the same custom pk'); + }); }); }); }); diff --git a/packages/datastore/__tests__/__snapshots__/indexeddb.test.ts.snap b/packages/datastore/__tests__/__snapshots__/indexeddb.test.ts.snap index 72ae627d52c..19909a49462 100644 --- a/packages/datastore/__tests__/__snapshots__/indexeddb.test.ts.snap +++ b/packages/datastore/__tests__/__snapshots__/indexeddb.test.ts.snap @@ -1,6 +1,6 @@ // Jest Snapshot v1, https://goo.gl/fbAQLP -exports[`DB versions migration Migration from v1 to v2: v2-schema 1`] = ` +exports[`DB versions migration Migration from v1 to v3: v3-schema 1`] = ` Object { "data": Object { "data": Array [ @@ -467,57 +467,57 @@ Object { }, ], "databaseName": "amplify-datastore", - "databaseVersion": 0.2, + "databaseVersion": 0.3, "tables": Array [ Object { "name": "datastore_Setting", "rowCount": 1, - "schema": "++,&id", + "schema": "++,&[id]", }, Object { "name": "sync_ModelMetadata", "rowCount": 0, - "schema": "++,&id", + "schema": "++,&[id]", }, Object { "name": "sync_MutationEvent", "rowCount": 0, - "schema": "++,&id", + "schema": "++,&[id]", }, Object { "name": "user_Author", "rowCount": 2, - "schema": "++,&id", + "schema": "++,&[id]", }, Object { "name": "user_Blog", "rowCount": 6, - "schema": "++,&id", + "schema": "++,[blogOwnerId],&[id]", }, Object { "name": "user_BlogOwner", "rowCount": 9, - "schema": "++,&id", + "schema": "++,&[id]", }, Object { "name": "user_Comment", "rowCount": 0, - "schema": "++,&id,commentPostId", + "schema": "++,&[id],[commentPostId]", }, Object { "name": "user_Person", "rowCount": 0, - "schema": "++,&id", + "schema": "++,&[id]", }, Object { "name": "user_Post", "rowCount": 2, - "schema": "++,&id", + "schema": "++,&[id],[postBlogId],[referencePostId]", }, Object { "name": "user_PostAuthorJoin", "rowCount": 0, - "schema": "++,&id", + "schema": "++,[authorId],&[id],[postId]", }, ], }, diff --git a/packages/datastore/__tests__/__snapshots__/sync.test.ts.snap b/packages/datastore/__tests__/__snapshots__/sync.test.ts.snap index 3fba0a3bbe0..8cdde8d2051 100644 --- a/packages/datastore/__tests__/__snapshots__/sync.test.ts.snap +++ b/packages/datastore/__tests__/__snapshots__/sync.test.ts.snap @@ -1,5 +1,24 @@ // Jest Snapshot v1, https://goo.gl/fbAQLP +exports[`Sync jitteredRetry custom pk: should return all data 1`] = ` +Object { + "data": Object { + "syncPosts": Object { + "items": Array [ + Object { + "postId": "1", + "title": "Item 1", + }, + Object { + "postId": "2", + "title": "Item 2", + }, + ], + }, + }, +} +`; + exports[`Sync jitteredRetry should return all data 1`] = ` Object { "data": Object { diff --git a/packages/datastore/__tests__/custom-pk-typings/identifier-fields.test.tsx b/packages/datastore/__tests__/custom-pk-typings/identifier-fields.test.tsx new file mode 100644 index 00000000000..95c15bc3018 --- /dev/null +++ b/packages/datastore/__tests__/custom-pk-typings/identifier-fields.test.tsx @@ -0,0 +1,47 @@ +// TODO: Look at ts-expect-error once we move to TypeScript 3.9 or above +import { IdentifierFields, __modelMeta__ } from '../../src'; +import { + expectType, + LegacyCustomRO, + LegacyNoMetadata, + ManagedCustomRO, + OptionallyManagedCustomRO, + CompositeCustomRO, + CustomIdentifierCustomRO, +} from '../helpers'; + +describe('IdentifierFields', () => { + test('Types for identifiers match model definition', () => { + expectType<'id'>(undefined as IdentifierFields); + + expectType<'id'>(undefined as IdentifierFields); + + expectType<'id'>( + undefined as IdentifierFields< + ManagedCustomRO, + ManagedCustomRO[typeof __modelMeta__] + > + ); + + expectType<'id'>( + undefined as IdentifierFields< + OptionallyManagedCustomRO, + OptionallyManagedCustomRO[typeof __modelMeta__] + > + ); + + expectType<'myId'>( + undefined as IdentifierFields< + CustomIdentifierCustomRO, + CustomIdentifierCustomRO[typeof __modelMeta__] + > + ); + + expectType<'tenant' | 'dob'>( + undefined as IdentifierFields< + CompositeCustomRO, + CompositeCustomRO[typeof __modelMeta__] + > + ); + }); +}); diff --git a/packages/datastore/__tests__/custom-pk-typings/model-init-mutable-model-typings/composite-identifier.test.tsx b/packages/datastore/__tests__/custom-pk-typings/model-init-mutable-model-typings/composite-identifier.test.tsx new file mode 100644 index 00000000000..d7854c2bc96 --- /dev/null +++ b/packages/datastore/__tests__/custom-pk-typings/model-init-mutable-model-typings/composite-identifier.test.tsx @@ -0,0 +1,190 @@ +// TODO: Look at ts-expect-error once we move to TypeScript 3.9 or above + +import { + ModelInit, + PersistentModelConstructor, + Predicates, + __modelMeta__, +} from '../../../src'; +import { + DataStore, + dummyInstance, + expectType, + CompositeCustomRO, + CompositeDefaultRO, +} from '../../helpers'; + +describe('Composite Identifier', () => { + test(`CompositeDefaultRO`, async () => { + expectType< + ModelInit + >({ + tenant: '', + dob: '', + name: '', + description: '', + }); + + expectType< + ModelInit + >({ + tenant: '', + dob: '', + name: '', + description: '', + // @ts-expect-error + // x: 234, + }); + + CompositeDefaultRO.copyOf({} as CompositeDefaultRO, d => { + // @ts-expect-error + // d.id; + // @ts-expect-error + // d.id = ''; + + d.tenant; + // @ts-expect-error + // d.tenant = ''; + d.dob; + // @ts-expect-error + // d.dob = ''; + + d.name = ''; + d.description = ''; + + d.createdAt; + // @ts-expect-error + // d.createdAt = ''; + + d.updatedAt; + // @ts-expect-error + // d.updatedAt = ''; + }); + + // Query + // @ts-expect-error + // await DataStore.query(CompositeDefaultRO, 'someid'); + // @ts-expect-error + // await DataStore.query(CompositeDefaultRO, { id: 'someid' }); + + expectType( + await DataStore.query(CompositeDefaultRO, { tenant: '', dob: '' }) + ); + expectType(await DataStore.query(CompositeDefaultRO)); + expectType( + await DataStore.query(CompositeDefaultRO, Predicates.ALL) + ); + expectType( + await DataStore.query(CompositeDefaultRO, c => c.createdAt('ge', '2019')) + ); + + // Save + expectType( + await DataStore.save(dummyInstance()) + ); + expectType( + await DataStore.save(dummyInstance(), c => + c.createdAt('ge', '2019') + ) + ); + + // Delete + + // @ts-expect-error + // await DataStore.delete(CompositeDefaultRO, '') + + expectType( + await DataStore.delete(CompositeDefaultRO, { tenant: '', dob: '' }) + ); + expectType( + await DataStore.delete(dummyInstance()) + ); + expectType( + await DataStore.delete(dummyInstance(), c => + c.description('contains', 'something') + ) + ); + expectType( + await DataStore.delete(CompositeDefaultRO, Predicates.ALL) + ); + expectType( + await DataStore.delete(CompositeDefaultRO, c => c.createdAt('le', '2019')) + ); + + // Observe + DataStore.observe(CompositeDefaultRO).subscribe(({ model, element }) => { + expectType>(model); + expectType(element); + }); + DataStore.observe(CompositeDefaultRO, c => + c.description('beginsWith', 'something') + ).subscribe(({ model, element }) => { + expectType>(model); + expectType(element); + }); + + // Observe query + DataStore.observeQuery(CompositeDefaultRO).subscribe(({ items }) => { + expectType(items); + }); + DataStore.observeQuery(CompositeDefaultRO, c => + c.description('notContains', 'something') + ).subscribe(({ items }) => { + expectType(items); + }); + DataStore.observeQuery( + CompositeDefaultRO, + c => c.description('notContains', 'something'), + { sort: c => c.createdAt('ASCENDING') } + ).subscribe(({ items }) => { + expectType(items); + }); + }); + + test(`CompositeCustomRO`, async () => { + expectType< + ModelInit + >({ + tenant: '', + dob: '', + name: '', + description: '', + }); + + expectType< + ModelInit + >({ + tenant: '', + dob: '', + name: '', + description: '', + // @ts-expect-error + // x: 234, + }); + + CompositeCustomRO.copyOf({} as CompositeCustomRO, d => { + // @ts-expect-error + // d.id; + // @ts-expect-error + // d.id = ''; + + d.tenant; + // @ts-expect-error + // d.tenant = ''; + d.dob; + // @ts-expect-error + // d.dob = ''; + + d.name = ''; + d.description = ''; + + d.createdOn; + // @ts-expect-error + // d.createdOn = ''; + + d.updatedOn; + // @ts-expect-error + // d.updatedOn = ''; + }); + }); +}); diff --git a/packages/datastore/__tests__/custom-pk-typings/model-init-mutable-model-typings/custom-identifier.test.tsx b/packages/datastore/__tests__/custom-pk-typings/model-init-mutable-model-typings/custom-identifier.test.tsx new file mode 100644 index 00000000000..789217c7229 --- /dev/null +++ b/packages/datastore/__tests__/custom-pk-typings/model-init-mutable-model-typings/custom-identifier.test.tsx @@ -0,0 +1,105 @@ +// TODO: Look at ts-expect-error once we move to TypeScript 3.9 or above +import { ModelInit, __modelMeta__ } from '../../../src'; +import { + expectType, + CustomIdentifierCustomRO, + CustomIdentifierDefaultRO, +} from '../../helpers'; + +describe('Custom Identifier', () => { + test(`CustomIdentifierDefaultRO`, async () => { + expectType< + ModelInit< + CustomIdentifierDefaultRO, + CustomIdentifierDefaultRO[typeof __modelMeta__] + > + >({ + myId: '', + name: '', + description: '', + }); + + expectType< + ModelInit< + CustomIdentifierDefaultRO, + CustomIdentifierDefaultRO[typeof __modelMeta__] + > + >({ + myId: '', + name: '', + description: '', + // @ts-expect-error + // x: 234, + }); + + CustomIdentifierDefaultRO.copyOf({} as CustomIdentifierDefaultRO, d => { + // @ts-expect-error + // d.id; + // @ts-expect-error + // d.id = ''; + + d.myId; + // @ts-expect-error + // d.myId = ''; + + d.name = ''; + d.description = ''; + + d.createdAt; + // @ts-expect-error + // d.createdAt = ''; + + d.updatedAt; + // @ts-expect-error + // d.updatedAt = ''; + }); + }); + + test(`CustomIdentifierCustomRO`, async () => { + expectType< + ModelInit< + CustomIdentifierCustomRO, + CustomIdentifierCustomRO[typeof __modelMeta__] + > + >({ + myId: '', + name: '', + description: '', + }); + + expectType< + ModelInit< + CustomIdentifierCustomRO, + CustomIdentifierCustomRO[typeof __modelMeta__] + > + >({ + myId: '', + name: '', + description: '', + // @ts-expect-error + // x: 234, + }); + + CustomIdentifierCustomRO.copyOf({} as CustomIdentifierCustomRO, d => { + // @ts-expect-error + // d.id; + // @ts-expect-error + // d.id = ''; + + d.myId; + // @ts-expect-error + // d.myId = ''; + + d.name = ''; + d.description = ''; + + d.createdOn; + // @ts-expect-error + // d.createdOn = ''; + + d.updatedOn; + // @ts-expect-error + // d.updatedOn = ''; + }); + }); +}); diff --git a/packages/datastore/__tests__/custom-pk-typings/model-init-mutable-model-typings/legacy-backwards-compatibility.test.tsx b/packages/datastore/__tests__/custom-pk-typings/model-init-mutable-model-typings/legacy-backwards-compatibility.test.tsx new file mode 100644 index 00000000000..593f1742dd4 --- /dev/null +++ b/packages/datastore/__tests__/custom-pk-typings/model-init-mutable-model-typings/legacy-backwards-compatibility.test.tsx @@ -0,0 +1,494 @@ +// TODO: Look at ts-expect-error once we move to TypeScript 3.9 or above +import { + ModelInit, + PersistentModelConstructor, + Predicates, + __modelMeta__, +} from '../../../src'; +import { + DataStore, + dummyInstance, + expectType, + LegacyCustomROMETA, + LegacyDefaultRO, + LegacyCustomRO, + LegacyNoMetadata, + CustomIdentifierNoRO, +} from '../../helpers'; + +describe('Legacy - backwards compatibility', () => { + test(`LegacyNoMetadata`, async () => { + expectType>({ + // @ts-expect-error + // id: '234', + name: '', + description: '', + }); + + expectType>({ + name: '', + description: '', + // @ts-expect-error + // x: 234, + }); + + expectType>({ + name: '', + description: '', + createdAt: '', + }); + + LegacyNoMetadata.copyOf({} as LegacyNoMetadata, d => { + d.id; + // @ts-expect-error + // d.id = ''; + + d.name = ''; + d.description = ''; + + d.createdAt; + d.createdAt = ''; + + d.updatedAt; + d.updatedAt = ''; + }); + + // Query + expectType( + await DataStore.query(LegacyNoMetadata, 'someid') + ); + expectType( + await DataStore.query(LegacyNoMetadata, { id: 'someid' }) + ); + expectType(await DataStore.query(LegacyNoMetadata)); + expectType( + await DataStore.query(LegacyNoMetadata, Predicates.ALL) + ); + expectType( + await DataStore.query(LegacyNoMetadata, c => c.createdAt('ge', '2019')) + ); + + // Save + expectType( + await DataStore.save(dummyInstance()) + ); + expectType( + await DataStore.save(dummyInstance(), c => + c.createdAt('ge', '2019') + ) + ); + + // Delete + expectType( + await DataStore.delete(LegacyNoMetadata, '') + ); + expectType( + await DataStore.delete(dummyInstance()) + ); + expectType( + await DataStore.delete(dummyInstance(), c => + c.description('contains', 'something') + ) + ); + expectType( + await DataStore.delete(LegacyNoMetadata, Predicates.ALL) + ); + expectType( + await DataStore.delete(LegacyNoMetadata, c => c.createdAt('le', '2019')) + ); + + // Observe + DataStore.observe(LegacyNoMetadata).subscribe(({ model, element }) => { + expectType>(model); + expectType(element); + }); + DataStore.observe(LegacyNoMetadata, c => + c.description('beginsWith', 'something') + ).subscribe(({ model, element }) => { + expectType>(model); + expectType(element); + }); + DataStore.observe(dummyInstance()).subscribe( + ({ model, element }) => { + new model({ + name: '', + description: '', + }); + expectType>(model); + expectType(element); + } + ); + + // Observe query + DataStore.observeQuery(LegacyNoMetadata).subscribe(({ items }) => { + expectType(items); + }); + DataStore.observeQuery(LegacyNoMetadata, c => + c.description('notContains', 'something') + ).subscribe(({ items }) => { + expectType(items); + }); + DataStore.observeQuery( + LegacyNoMetadata, + c => c.description('notContains', 'something'), + { sort: c => c.createdAt('ASCENDING') } + ).subscribe(({ items }) => { + expectType(items); + }); + }); + + test(`LegacyDefaultRO`, async () => { + expectType>({ + // @ts-expect-error + // id: '234', + name: '', + description: '', + }); + + expectType>({ + name: '', + description: '', + // @ts-expect-error + // x: 234, + }); + + LegacyDefaultRO.copyOf({} as LegacyDefaultRO, d => { + d.id; + // @ts-expect-error + // d.id = ''; + + d.name = ''; + d.description = ''; + + d.createdAt; + // @ts-expect-error + // d.createdAt = ''; + + d.updatedAt; + // @ts-expect-error + // d.updatedAt = ''; + }); + + // Query + expectType( + await DataStore.query(LegacyDefaultRO, 'someid') + ); + expectType(await DataStore.query(LegacyDefaultRO)); + expectType( + await DataStore.query(LegacyDefaultRO, Predicates.ALL) + ); + expectType( + await DataStore.query(LegacyDefaultRO, c => c.createdAt('ge', '2019')) + ); + + // Save + expectType( + await DataStore.save(dummyInstance()) + ); + expectType( + await DataStore.save(dummyInstance(), c => + c.createdAt('ge', '2019') + ) + ); + + // Delete + expectType(await DataStore.delete(LegacyDefaultRO, '')); + expectType( + await DataStore.delete(dummyInstance()) + ); + expectType( + await DataStore.delete(dummyInstance(), c => + c.description('contains', 'something') + ) + ); + expectType( + await DataStore.delete(LegacyDefaultRO, Predicates.ALL) + ); + expectType( + await DataStore.delete(LegacyDefaultRO, c => c.createdAt('le', '2019')) + ); + + // Observe + DataStore.observe(LegacyDefaultRO).subscribe(({ model, element }) => { + expectType>(model); + expectType(element); + }); + DataStore.observe(LegacyDefaultRO, c => + c.description('beginsWith', 'something') + ).subscribe(({ model, element }) => { + expectType>(model); + expectType(element); + }); + DataStore.observe(dummyInstance()).subscribe( + ({ model, element }) => { + expectType>(model); + expectType(element); + } + ); + + // Observe query + DataStore.observeQuery(LegacyDefaultRO).subscribe(({ items }) => { + expectType(items); + }); + DataStore.observeQuery(LegacyDefaultRO, c => + c.description('notContains', 'something') + ).subscribe(({ items }) => { + expectType(items); + }); + DataStore.observeQuery( + LegacyDefaultRO, + c => c.description('notContains', 'something'), + { sort: c => c.createdAt('ASCENDING') } + ).subscribe(({ items }) => { + expectType(items); + }); + }); + + test(`LegacyCustomRO`, async () => { + expectType>({ + // @ts-expect-error + // id: '234', + name: '', + description: '', + }); + + expectType>({ + name: '', + description: '', + // @ts-expect-error + // createdOn: '', + }); + + expectType>({ + name: '', + description: '', + // @ts-expect-error + // createdAt: '', + }); + + LegacyCustomRO.copyOf({} as LegacyCustomRO, d => { + d.id; + // @ts-expect-error + // d.id = ''; + + d.name = ''; + d.description = ''; + + // @ts-expect-error + // d.createdAt; + + // @ts-expect-error + // d.updatedAt; + + d.createdOn; + // @ts-expect-error + // d.createdOn = ''; + + d.updatedOn; + // @ts-expect-error + // d.updatedOn = ''; + }); + + // Query + expectType(await DataStore.query(LegacyCustomRO, 'someid')); + expectType(await DataStore.query(LegacyCustomRO)); + expectType( + await DataStore.query(LegacyCustomRO, Predicates.ALL) + ); + expectType( + await DataStore.query(LegacyCustomRO, c => c.createdOn('ge', '2019')) + ); + + // Save + expectType( + await DataStore.save(dummyInstance()) + ); + expectType( + await DataStore.save(dummyInstance(), c => + c.createdOn('ge', '2019') + ) + ); + + // Delete + expectType(await DataStore.delete(LegacyCustomRO, '')); + expectType( + await DataStore.delete(dummyInstance()) + ); + expectType( + await DataStore.delete(dummyInstance(), c => + c.description('contains', 'something') + ) + ); + expectType( + await DataStore.delete(LegacyCustomRO, Predicates.ALL) + ); + expectType( + await DataStore.delete(LegacyCustomRO, c => c.createdOn('le', '2019')) + ); + + // Observe + DataStore.observe(LegacyCustomRO).subscribe(({ model, element }) => { + expectType>(model); + expectType(element); + }); + DataStore.observe(LegacyCustomRO, c => + c.description('beginsWith', 'something') + ).subscribe(({ model, element }) => { + expectType>(model); + expectType(element); + }); + DataStore.observe(dummyInstance()).subscribe( + ({ model, element }) => { + expectType>(model); + expectType(element); + } + ); + + // Observe query + DataStore.observeQuery(LegacyCustomRO).subscribe(({ items }) => { + expectType(items); + }); + DataStore.observeQuery(LegacyCustomRO, c => + c.description('notContains', 'something') + ).subscribe(({ items }) => { + expectType(items); + }); + DataStore.observeQuery( + LegacyCustomRO, + c => c.description('notContains', 'something'), + { sort: c => c.createdOn('ASCENDING') } + ).subscribe(({ items }) => { + expectType(items); + }); + }); + + test(`CustomIdentifierNoRO`, async () => { + expectType>({ + // @ts-expect-error + // id: '234', + myId: '23342', + name: '', + description: '', + }); + + expectType>({ + myId: '23342', + name: '', + description: '', + createdAt: '', + }); + + expectType>({ + myId: '23342', + name: '', + description: '', + createdAt: '', + }); + + CustomIdentifierNoRO.copyOf({} as CustomIdentifierNoRO, d => { + d.myId; + // @ts-expect-error + // d.myId = ''; + + d.name = ''; + d.description = ''; + + d.createdAt; + d.createdAt = ''; + + d.updatedAt; + d.updatedAt = ''; + + // @ts-expect-error + // d.createdOn; + + // @ts-expect-error + // d.updatedOn; + }); + + // Query + expectType( + await DataStore.query(CustomIdentifierNoRO, 'someid') + ); + expectType( + await DataStore.query(CustomIdentifierNoRO, { myId: 'someid' }) + ); + expectType( + await DataStore.query(CustomIdentifierNoRO) + ); + expectType( + await DataStore.query(CustomIdentifierNoRO, Predicates.ALL) + ); + expectType( + await DataStore.query(CustomIdentifierNoRO, c => + c.createdAt('ge', '2019') + ) + ); + + // Save + expectType( + await DataStore.save(dummyInstance()) + ); + expectType( + await DataStore.save(dummyInstance(), c => + c.createdAt('ge', '2019') + ) + ); + + // Delete + expectType( + await DataStore.delete(CustomIdentifierNoRO, '') + ); + expectType( + await DataStore.delete(dummyInstance()) + ); + expectType( + await DataStore.delete(dummyInstance(), c => + c.description('contains', 'something') + ) + ); + expectType( + await DataStore.delete(CustomIdentifierNoRO, Predicates.ALL) + ); + expectType( + await DataStore.delete(CustomIdentifierNoRO, c => + c.createdAt('le', '2019') + ) + ); + + // Observe + DataStore.observe(CustomIdentifierNoRO).subscribe(({ model, element }) => { + expectType>(model); + expectType(element); + }); + DataStore.observe(CustomIdentifierNoRO, c => + c.description('beginsWith', 'something') + ).subscribe(({ model, element }) => { + expectType>(model); + expectType(element); + }); + DataStore.observe(dummyInstance()).subscribe( + ({ model, element }) => { + expectType>(model); + expectType(element); + } + ); + + // Observe query + DataStore.observeQuery(CustomIdentifierNoRO).subscribe(({ items }) => { + expectType(items); + }); + DataStore.observeQuery(CustomIdentifierNoRO, c => + c.description('notContains', 'something') + ).subscribe(({ items }) => { + expectType(items); + }); + DataStore.observeQuery( + CustomIdentifierNoRO, + c => c.description('notContains', 'something'), + { sort: c => c.createdAt('ASCENDING') } + ).subscribe(({ items }) => { + expectType(items); + }); + }); +}); diff --git a/packages/datastore/__tests__/custom-pk-typings/model-init-mutable-model-typings/managed-identifier.test.tsx b/packages/datastore/__tests__/custom-pk-typings/model-init-mutable-model-typings/managed-identifier.test.tsx new file mode 100644 index 00000000000..a2f54583688 --- /dev/null +++ b/packages/datastore/__tests__/custom-pk-typings/model-init-mutable-model-typings/managed-identifier.test.tsx @@ -0,0 +1,248 @@ +import { + ModelInit, + PersistentModelConstructor, + Predicates, + __modelMeta__, +} from '../../../src'; +import { + DataStore, + dummyInstance, + expectType, + ManagedCustomRO, + ManagedDefaultRO, +} from '../../helpers'; + +describe('Managed Identifier', () => { + test(`ManagedDefaultRO`, async () => { + expectType>({ + // @ts-expect-error + // id: 'eeeeeee', + name: '', + description: '', + }); + + expectType>({ + name: '', + description: '', + // @ts-expect-error + // x: 234, + }); + + expectType>({ + name: '', + description: '', + // @ts-expect-error + // x: 234, + }); + + ManagedDefaultRO.copyOf({} as ManagedDefaultRO, d => { + d.id; + // @ts-expect-error + // d.id = ''; + + d.name = ''; + d.description = ''; + + d.createdAt; + // @ts-expect-error + // d.createdAt = ''; + + d.updatedAt; + // @ts-expect-error + // d.updatedAt = ''; + }); + + // Query + expectType( + await DataStore.query(ManagedDefaultRO, 'someid') + ); + expectType( + await DataStore.query(ManagedDefaultRO, { id: 'someid' }) + ); + expectType(await DataStore.query(ManagedDefaultRO)); + expectType( + await DataStore.query(ManagedDefaultRO, Predicates.ALL) + ); + expectType( + await DataStore.query(ManagedDefaultRO, c => c.createdAt('ge', '2019')) + ); + + // Save + expectType( + await DataStore.save(dummyInstance()) + ); + expectType( + await DataStore.save(dummyInstance(), c => + c.createdAt('ge', '2019') + ) + ); + + // Delete + expectType( + await DataStore.delete(ManagedDefaultRO, '') + ); + expectType( + await DataStore.delete(dummyInstance()) + ); + expectType( + await DataStore.delete(dummyInstance(), c => + c.description('contains', 'something') + ) + ); + expectType( + await DataStore.delete(ManagedDefaultRO, Predicates.ALL) + ); + expectType( + await DataStore.delete(ManagedDefaultRO, c => c.createdAt('le', '2019')) + ); + + // Observe + DataStore.observe(ManagedDefaultRO).subscribe(({ model, element }) => { + expectType>(model); + expectType(element); + }); + DataStore.observe(ManagedDefaultRO, c => + c.description('beginsWith', 'something') + ).subscribe(({ model, element }) => { + expectType>(model); + expectType(element); + }); + DataStore.observe(dummyInstance()).subscribe( + ({ model, element }) => { + expectType>(model); + expectType(element); + } + ); + + // Observe query + DataStore.observeQuery(ManagedDefaultRO).subscribe(({ items }) => { + expectType(items); + }); + DataStore.observeQuery(ManagedDefaultRO, c => + c.description('notContains', 'something') + ).subscribe(({ items }) => { + expectType(items); + }); + DataStore.observeQuery( + ManagedDefaultRO, + c => c.description('notContains', 'something'), + { sort: c => c.createdAt('ASCENDING') } + ).subscribe(({ items }) => { + expectType(items); + }); + }); + + test(`ManagedCustomRO`, async () => { + expectType>({ + // @ts-expect-error + // id: 'eeeeeee', + name: '', + description: '', + }); + + expectType>({ + name: '', + description: '', + // @ts-expect-error + // x: 234, + }); + + expectType>({ + name: '', + description: '', + // @ts-expect-error + // x: 234, + }); + + ManagedCustomRO.copyOf({} as ManagedCustomRO, d => { + d.id; + // @ts-expect-error + // d.id = ''; + + d.name = ''; + d.description = ''; + + d.createdOn; + // @ts-expect-error + // d.createdOn = ''; + + d.updatedOn; + // @ts-expect-error + // d.updatedOn = ''; + }); + + // Query + expectType( + await DataStore.query(ManagedCustomRO, 'someid') + ); + expectType(await DataStore.query(ManagedCustomRO)); + expectType( + await DataStore.query(ManagedCustomRO, Predicates.ALL) + ); + expectType( + await DataStore.query(ManagedCustomRO, c => c.createdOn('ge', '2019')) + ); + + // Save + expectType( + await DataStore.save(dummyInstance()) + ); + expectType( + await DataStore.save(dummyInstance(), c => + c.createdOn('ge', '2019') + ) + ); + + // Delete + expectType(await DataStore.delete(ManagedCustomRO, '')); + expectType( + await DataStore.delete(dummyInstance()) + ); + expectType( + await DataStore.delete(dummyInstance(), c => + c.description('contains', 'something') + ) + ); + expectType( + await DataStore.delete(ManagedCustomRO, Predicates.ALL) + ); + expectType( + await DataStore.delete(ManagedCustomRO, c => c.createdOn('le', '2019')) + ); + + // Observe + DataStore.observe(ManagedCustomRO).subscribe(({ model, element }) => { + expectType>(model); + expectType(element); + }); + DataStore.observe(ManagedCustomRO, c => + c.description('beginsWith', 'something') + ).subscribe(({ model, element }) => { + expectType>(model); + expectType(element); + }); + DataStore.observe(dummyInstance()).subscribe( + ({ model, element }) => { + expectType>(model); + expectType(element); + } + ); + + // Observe query + DataStore.observeQuery(ManagedCustomRO).subscribe(({ items }) => { + expectType(items); + }); + DataStore.observeQuery(ManagedCustomRO, c => + c.description('notContains', 'something') + ).subscribe(({ items }) => { + expectType(items); + }); + DataStore.observeQuery( + ManagedCustomRO, + c => c.description('notContains', 'something'), + { sort: c => c.createdOn('ASCENDING') } + ).subscribe(({ items }) => { + expectType(items); + }); + }); +}); diff --git a/packages/datastore/__tests__/custom-pk-typings/model-init-mutable-model-typings/observe-all.test.tsx b/packages/datastore/__tests__/custom-pk-typings/model-init-mutable-model-typings/observe-all.test.tsx new file mode 100644 index 00000000000..959addc660e --- /dev/null +++ b/packages/datastore/__tests__/custom-pk-typings/model-init-mutable-model-typings/observe-all.test.tsx @@ -0,0 +1,16 @@ +import { + PersistentModel, + PersistentModelConstructor, + __modelMeta__, +} from '../../../src'; +import { DataStore, expectType } from '../../helpers'; + +test('Observe all', () => { + DataStore.observe().subscribe(({ model, element }) => { + expectType>(model); + expectType(element); + + element.id; + element.anything; + }); +}); diff --git a/packages/datastore/__tests__/custom-pk-typings/model-init-mutable-model-typings/optionally-managed-identifier.test.tsx b/packages/datastore/__tests__/custom-pk-typings/model-init-mutable-model-typings/optionally-managed-identifier.test.tsx new file mode 100644 index 00000000000..ddd82b9df3b --- /dev/null +++ b/packages/datastore/__tests__/custom-pk-typings/model-init-mutable-model-typings/optionally-managed-identifier.test.tsx @@ -0,0 +1,313 @@ +// TODO: Look at ts-expect-error once we move to TypeScript 3.9 or above +import { + ModelInit, + PersistentModelConstructor, + Predicates, + __modelMeta__, +} from '../../../src'; +import { + DataStore, + dummyInstance, + expectType, + OptionallyManagedCustomRO, + OptionallyManagedDefaultRO, +} from '../../helpers'; + +describe('Optionally Managed Identifier', () => { + test(`OptionallyManagedDefaultRO`, async () => { + expectType< + ModelInit< + OptionallyManagedDefaultRO, + OptionallyManagedDefaultRO[typeof __modelMeta__] + > + >({ + id: 'eeeeeee', + name: '', + description: '', + }); + + expectType< + ModelInit< + OptionallyManagedDefaultRO, + OptionallyManagedDefaultRO[typeof __modelMeta__] + > + >({ + name: '', + description: '', + // @ts-expect-error + // x: 234, + }); + + expectType< + ModelInit< + OptionallyManagedDefaultRO, + OptionallyManagedDefaultRO[typeof __modelMeta__] + > + >({ + name: '', + description: '', + // @ts-expect-error + // x: 234, + }); + + OptionallyManagedDefaultRO.copyOf({} as OptionallyManagedDefaultRO, d => { + d.id; + // @ts-expect-error + // d.id = ''; + + d.name = ''; + d.description = ''; + + d.createdAt; + // @ts-expect-error + // d.createdAt = ''; + + d.updatedAt; + // @ts-expect-error + // d.updatedAt = ''; + }); + + // Query + expectType( + await DataStore.query(OptionallyManagedDefaultRO, 'someid') + ); + expectType( + await DataStore.query(OptionallyManagedDefaultRO, { id: 'someid' }) + ); + expectType( + await DataStore.query(OptionallyManagedDefaultRO) + ); + expectType( + await DataStore.query(OptionallyManagedDefaultRO, Predicates.ALL) + ); + expectType( + await DataStore.query(OptionallyManagedDefaultRO, c => + c.createdAt('ge', '2019') + ) + ); + + // Save + expectType( + await DataStore.save(dummyInstance()) + ); + expectType( + await DataStore.save(dummyInstance(), c => + c.createdAt('ge', '2019') + ) + ); + + // Delete + expectType( + await DataStore.delete(OptionallyManagedDefaultRO, '') + ); + expectType( + await DataStore.delete(dummyInstance()) + ); + expectType( + await DataStore.delete(dummyInstance(), c => + c.description('contains', 'something') + ) + ); + expectType( + await DataStore.delete(OptionallyManagedDefaultRO, Predicates.ALL) + ); + expectType( + await DataStore.delete(OptionallyManagedDefaultRO, c => + c.createdAt('le', '2019') + ) + ); + + // Observe + DataStore.observe(OptionallyManagedDefaultRO).subscribe( + ({ model, element }) => { + expectType>( + model + ); + expectType(element); + } + ); + DataStore.observe(OptionallyManagedDefaultRO, c => + c.description('beginsWith', 'something') + ).subscribe(({ model, element }) => { + expectType>(model); + expectType(element); + }); + DataStore.observe(dummyInstance()).subscribe( + ({ model, element }) => { + expectType>( + model + ); + expectType(element); + } + ); + + // Observe query + DataStore.observeQuery(OptionallyManagedDefaultRO).subscribe( + ({ items }) => { + expectType(items); + } + ); + DataStore.observeQuery(OptionallyManagedDefaultRO, c => + c.description('notContains', 'something') + ).subscribe(({ items }) => { + expectType(items); + }); + DataStore.observeQuery( + OptionallyManagedDefaultRO, + c => c.description('notContains', 'something'), + { sort: c => c.createdAt('ASCENDING') } + ).subscribe(({ items }) => { + expectType(items); + }); + }); + + test(`OptionallyManagedCustomRO`, async () => { + expectType>({ + name: '', + description: '', + }); + + expectType< + ModelInit< + OptionallyManagedCustomRO, + OptionallyManagedCustomRO[typeof __modelMeta__] + > + >({ + id: 'eeeeeee', + name: '', + description: '', + }); + + expectType< + ModelInit< + OptionallyManagedCustomRO, + OptionallyManagedCustomRO[typeof __modelMeta__] + > + >({ + name: '', + description: '', + // @ts-expect-error + // x: 234, + }); + + expectType< + ModelInit< + OptionallyManagedCustomRO, + OptionallyManagedCustomRO[typeof __modelMeta__] + > + >({ + name: '', + description: '', + // @ts-expect-error + // x: 234, + }); + + OptionallyManagedCustomRO.copyOf({} as OptionallyManagedCustomRO, d => { + d.id; + // @ts-expect-error + // d.id = ''; + + d.name = ''; + d.description = ''; + + d.createdOn; + // @ts-expect-error + // d.createdOn = ''; + + d.updatedOn; + // @ts-expect-error + // d.updatedOn = ''; + }); + + // Query + expectType( + await DataStore.query(OptionallyManagedCustomRO, 'someid') + ); + expectType( + await DataStore.query(OptionallyManagedCustomRO, { id: 'someid' }) + ); + expectType( + await DataStore.query(OptionallyManagedCustomRO) + ); + expectType( + await DataStore.query(OptionallyManagedCustomRO, Predicates.ALL) + ); + expectType( + await DataStore.query(OptionallyManagedCustomRO, c => + c.createdOn('ge', '2019') + ) + ); + + // Save + expectType( + await DataStore.save(dummyInstance()) + ); + expectType( + await DataStore.save(dummyInstance(), c => + c.createdOn('ge', '2019') + ) + ); + + // Delete + expectType( + await DataStore.delete(OptionallyManagedCustomRO, '') + ); + expectType( + await DataStore.delete(dummyInstance()) + ); + expectType( + await DataStore.delete(dummyInstance(), c => + c.description('contains', 'something') + ) + ); + expectType( + await DataStore.delete(OptionallyManagedCustomRO, Predicates.ALL) + ); + expectType( + await DataStore.delete(OptionallyManagedCustomRO, c => + c.createdOn('le', '2019') + ) + ); + + // Observe + DataStore.observe(OptionallyManagedCustomRO).subscribe( + ({ model, element }) => { + expectType>( + model + ); + expectType(element); + } + ); + DataStore.observe(OptionallyManagedCustomRO, c => + c.description('beginsWith', 'something') + ).subscribe(({ model, element }) => { + expectType>(model); + expectType(element); + }); + DataStore.observe(dummyInstance()).subscribe( + ({ model, element }) => { + expectType>( + model + ); + expectType(element); + } + ); + + // Observe query + DataStore.observeQuery(OptionallyManagedCustomRO).subscribe(({ items }) => { + expectType(items); + }); + DataStore.observeQuery(OptionallyManagedCustomRO, c => + c.description('notContains', 'something') + ).subscribe(({ items }) => { + expectType(items); + }); + DataStore.observeQuery( + OptionallyManagedCustomRO, + c => c.description('notContains', 'something'), + { sort: c => c.createdOn('ASCENDING') } + ).subscribe(({ items }) => { + expectType(items); + }); + }); +}); diff --git a/packages/datastore/__tests__/helpers.ts b/packages/datastore/__tests__/helpers.ts index d237cc7f53c..4131e08312e 100644 --- a/packages/datastore/__tests__/helpers.ts +++ b/packages/datastore/__tests__/helpers.ts @@ -1,10 +1,14 @@ +import Observable from 'zen-observable-ts'; +import { ModelInit, Schema, InternalSchema, __modelMeta__ } from '../src/types'; import { - ModelInit, + DataStore as DS, + CompositeIdentifier, + CustomIdentifier, + ManagedIdentifier, MutableModel, - Schema, - InternalSchema, - SchemaModel, -} from '../src/types'; + PersistentModel, + OptionallyManagedIdentifier, +} from '../src'; /** * Convenience function to wait for a number of ms. @@ -52,6 +56,14 @@ export function expectMutation(mutation, values) { } } +export function expectType(_param: T): _param is T { + return true; +} + +export function dummyInstance(): T { + return {}; +} + /** * Checks an object for adherence to expected values from a set of matchers. * Returns a list of erroneous key-value pairs. @@ -65,6 +77,8 @@ export function errorsFrom(data, matchers) { !( (typeof matcher === 'function' && matcher(value)) || (matcher instanceof RegExp && matcher.test(value)) || + (typeof matcher === 'object' && + JSON.stringify(value) === JSON.stringify(matcher)) || value === matcher ) ) { @@ -89,6 +103,28 @@ export function extraFieldsFrom(data, template) { return fields.filter(name => !expectedFields.has(name)); } +export const DataStore: typeof DS = (() => { + class clazz {} + + const proxy = new Proxy(clazz, { + get: (_, prop) => { + const p = prop as keyof typeof DS; + + switch (p) { + case 'query': + case 'save': + case 'delete': + return () => new Proxy({}, {}); + case 'observe': + case 'observeQuery': + return () => Observable.of(); + } + }, + }) as unknown as typeof DS; + + return proxy; +})(); + export declare class Model { public readonly id: string; public readonly field1: string; @@ -120,12 +156,26 @@ export declare class Metadata { export declare class Post { public readonly id: string; public readonly title: string; + + constructor(init: ModelInit); + + static copyOf( + src: Post, + mutator: (draft: MutableModel) => void | Post + ): Post; } export declare class Comment { public readonly id: string; public readonly content: string; public readonly post: Post; + + constructor(init: ModelInit); + + static copyOf( + src: Comment, + mutator: (draft: MutableModel) => void | Comment + ): Comment; } export declare class User { @@ -133,11 +183,25 @@ export declare class User { public readonly name: string; public readonly profile?: Profile; public readonly profileID?: string; + + constructor(init: ModelInit); + + static copyOf( + src: User, + mutator: (draft: MutableModel) => void | User + ): User; } export declare class Profile { public readonly id: string; public readonly firstName: string; public readonly lastName: string; + + constructor(init: ModelInit); + + static copyOf( + src: Profile, + mutator: (draft: MutableModel) => void | Profile + ): Profile; } export declare class PostComposite { @@ -146,27 +210,71 @@ export declare class PostComposite { public readonly description: string; public readonly created: string; public readonly sort: number; + + constructor(init: ModelInit); + + static copyOf( + src: PostComposite, + mutator: (draft: MutableModel) => void | PostComposite + ): PostComposite; } export declare class PostCustomPK { - public readonly id: string; - public readonly postId: number; + readonly [__modelMeta__]: { + identifier: CustomIdentifier; + }; + public readonly postId: string; public readonly title: string; public readonly description?: string; + public readonly dateCreated: string; + public readonly optionalField1?: string; + public readonly emails?: string[]; + public readonly createdAt?: string; + public readonly updatedAt?: string; + + constructor(init: ModelInit); + + static copyOf( + src: PostCustomPK, + mutator: (draft: MutableModel) => void | PostCustomPK + ): PostCustomPK; } export declare class PostCustomPKSort { - public readonly id: string; - public readonly postId: number; + readonly [__modelMeta__]: { + identifier: CompositeIdentifier; + }; + public readonly id: number | string; + public readonly postId: string; public readonly title: string; public readonly description?: string; + + constructor(init: ModelInit); + + static copyOf( + src: PostCustomPKSort, + mutator: (draft: MutableModel) => void | PostCustomPKSort + ): PostCustomPKSort; } + export declare class PostCustomPKComposite { + readonly [__modelMeta__]: { + identifier: CompositeIdentifier; + }; public readonly id: string; - public readonly postId: number; + public readonly postId: string; public readonly title: string; public readonly description?: string; public readonly sort: number; + + constructor(init: ModelInit); + + static copyOf( + src: PostCustomPKComposite, + mutator: ( + draft: MutableModel + ) => void | PostCustomPKComposite + ): PostCustomPKComposite; } export function testSchema(): Schema { @@ -493,17 +601,10 @@ export function testSchema(): Schema { PostCustomPK: { name: 'PostCustomPK', fields: { - id: { - name: 'id', - isArray: false, - type: 'ID', - isRequired: true, - attributes: [], - }, postId: { name: 'postId', isArray: false, - type: 'Int', + type: 'String', isRequired: true, attributes: [], }, @@ -521,6 +622,37 @@ export function testSchema(): Schema { isRequired: false, attributes: [], }, + emails: { + name: 'emails', + isArray: true, + type: 'AWSEmail', + isRequired: true, + attributes: [], + isArrayNullable: true, + }, + createdAt: { + name: 'createdAt', + isArray: false, + type: 'AWSDateTime', + isRequired: false, + attributes: [], + isReadOnly: true, + }, + updatedAt: { + name: 'updatedAt', + isArray: false, + type: 'AWSDateTime', + isRequired: false, + attributes: [], + isReadOnly: true, + }, + dateCreated: { + name: 'dateCreated', + isArray: false, + type: 'AWSDateTime', + isRequired: true, + attributes: [], + }, }, syncable: true, pluralName: 'PostCustomPKS', @@ -550,7 +682,7 @@ export function testSchema(): Schema { postId: { name: 'postId', isArray: false, - type: 'Int', + type: 'String', isRequired: true, attributes: [], }, @@ -597,7 +729,7 @@ export function testSchema(): Schema { postId: { name: 'postId', isArray: false, - type: 'Int', + type: 'String', isRequired: true, attributes: [], }, @@ -1004,3 +1136,276 @@ export function smallTestSchema(): Schema { }, }; } + +//#region Types + +//#region Legacy + +export type LegacyCustomROMETA = { + readOnlyFields: 'createdOn' | 'updatedOn'; +}; + +export class LegacyCustomRO { + readonly id: string; + readonly name: string; + readonly description?: string; + readonly createdOn?: string; + readonly updatedOn?: string; + constructor(init: ModelInit) {} + static copyOf( + source: LegacyCustomRO, + mutator: ( + draft: MutableModel + ) => MutableModel | void + ): LegacyCustomRO { + return (undefined); + } +} + +export type LegacyDefaultROMETA = { + readOnlyFields: 'createdAt' | 'updatedAt'; +}; + +export class LegacyDefaultRO { + readonly id: string; + readonly name: string; + readonly description?: string; + readonly createdAt?: string; + readonly updatedAt?: string; + constructor(init: ModelInit) {} + static copyOf( + source: LegacyDefaultRO, + mutator: ( + draft: MutableModel + ) => MutableModel | void + ): LegacyDefaultRO { + return (undefined); + } +} + +export class LegacyNoMetadata { + readonly id: string; + readonly name: string; + readonly description?: string; + readonly createdAt?: string; + readonly updatedAt?: string; + constructor(init: ModelInit) {} + static copyOf( + source: LegacyNoMetadata, + mutator: ( + draft: MutableModel + ) => MutableModel | void + ): LegacyNoMetadata { + return (undefined); + } +} + +//#endregion + +//#region Managed + +export class ManagedCustomRO { + readonly [__modelMeta__]: { + identifier: ManagedIdentifier; + readOnlyFields: 'createdOn' | 'updatedOn'; + }; + readonly id: string; + readonly name: string; + readonly description?: string; + readonly createdOn?: string; + readonly updatedOn?: string; + constructor(init: ModelInit) {} + static copyOf( + source: ManagedCustomRO, + mutator: ( + draft: MutableModel + ) => MutableModel | void + ): ManagedCustomRO { + return (undefined); + } +} + +export class ManagedDefaultRO { + readonly [__modelMeta__]: { + identifier: ManagedIdentifier; + readOnlyFields: 'createdAt' | 'updatedAt'; + }; + readonly id: string; + readonly name: string; + readonly description?: string; + readonly createdAt?: string; + readonly updatedAt?: string; + constructor(init: ModelInit) {} + static copyOf( + source: ManagedDefaultRO, + mutator: ( + draft: MutableModel + ) => MutableModel | void + ): ManagedDefaultRO { + return (undefined); + } +} + +//#endregion + +//#region Optionally Managed + +export class OptionallyManagedCustomRO { + readonly [__modelMeta__]: { + identifier: OptionallyManagedIdentifier; + readOnlyFields: 'createdOn' | 'updatedOn'; + }; + readonly id: string; + readonly name: string; + readonly description?: string; + readonly createdOn?: string; + readonly updatedOn?: string; + constructor(init: ModelInit) {} + static copyOf( + source: OptionallyManagedCustomRO, + mutator: ( + draft: MutableModel + ) => MutableModel | void + ): OptionallyManagedCustomRO { + return (undefined); + } +} + +export class OptionallyManagedDefaultRO { + readonly [__modelMeta__]: { + identifier: OptionallyManagedIdentifier; + readOnlyFields: 'createdAt' | 'updatedAt'; + }; + readonly id: string; + readonly name: string; + readonly description?: string; + readonly createdAt?: string; + readonly updatedAt?: string; + constructor(init: ModelInit) {} + static copyOf( + source: OptionallyManagedDefaultRO, + mutator: ( + draft: MutableModel + ) => MutableModel | void + ): OptionallyManagedDefaultRO { + return (undefined); + } +} + +//#endregion + +//#region Composite + +export class CompositeCustomRO { + readonly [__modelMeta__]: { + identifier: CompositeIdentifier; + readOnlyFields: 'createdOn' | 'updatedOn'; + }; + readonly tenant: string; + readonly dob: string; + readonly name: string; + readonly description?: string; + readonly createdOn?: string; + readonly updatedOn?: string; + constructor(init: ModelInit) {} + static copyOf( + source: CompositeCustomRO, + mutator: ( + draft: MutableModel + ) => MutableModel | void + ): CompositeCustomRO { + return (undefined); + } +} + +export class CompositeDefaultRO { + readonly [__modelMeta__]: { + identifier: CompositeIdentifier; + readOnlyFields: 'createdAt' | 'updatedAt'; + }; + readonly tenant: string; + readonly dob: string; + readonly name: string; + readonly description?: string; + readonly createdAt?: string; + readonly updatedAt?: string; + constructor(init: ModelInit) {} + static copyOf( + source: CompositeDefaultRO, + mutator: ( + draft: MutableModel + ) => MutableModel | void + ): CompositeDefaultRO { + return (undefined); + } +} + +//#endregion + +//#region Custom + +export class CustomIdentifierCustomRO { + readonly [__modelMeta__]: { + identifier: CustomIdentifier; + readOnlyFields: 'createdOn' | 'updatedOn'; + }; + readonly myId: string; + readonly name: string; + readonly description?: string; + readonly createdOn: string; + readonly updatedOn: string; + constructor(init: ModelInit) {} + static copyOf( + source: CustomIdentifierCustomRO, + mutator: ( + draft: MutableModel + ) => MutableModel | void + ): CustomIdentifierCustomRO { + return (undefined); + } +} + +export class CustomIdentifierDefaultRO { + readonly [__modelMeta__]: { + identifier: CustomIdentifier; + readOnlyFields: 'createdAt' | 'updatedAt'; + }; + readonly myId: string; + readonly name: string; + readonly description?: string; + readonly createdAt?: string; + readonly updatedAt?: string; + constructor(init: ModelInit) {} + static copyOf( + source: CustomIdentifierDefaultRO, + mutator: ( + draft: MutableModel + ) => MutableModel | void + ): CustomIdentifierDefaultRO { + return (undefined); + } +} + +export class CustomIdentifierNoRO { + readonly [__modelMeta__]: { + identifier: CustomIdentifier; + }; + readonly myId: string; + readonly name: string; + readonly description?: string; + readonly createdAt?: string; + readonly updatedAt?: string; + constructor(init: ModelInit) {} + static copyOf( + source: CustomIdentifierNoRO, + mutator: ( + draft: MutableModel + ) => MutableModel | void + ): CustomIdentifierDefaultRO { + return undefined; + } +} + +//#endregion + +//#endregion diff --git a/packages/datastore/__tests__/indexeddb.test.ts b/packages/datastore/__tests__/indexeddb.test.ts index 95538a5e050..96bb1cab2f4 100644 --- a/packages/datastore/__tests__/indexeddb.test.ts +++ b/packages/datastore/__tests__/indexeddb.test.ts @@ -16,6 +16,7 @@ import { Person, } from './model'; let db: idb.IDBPDatabase; +const DB_VERSION = 3; const indexedDB = require('fake-indexeddb'); const IDBKeyRange = require('fake-indexeddb/lib/FDBKeyRange'); @@ -31,7 +32,7 @@ describe('Indexed db storage test', () => { beforeAll(async () => { await DataStore.start(); - db = await idb.openDB('amplify-datastore', 2); + db = await idb.openDB('amplify-datastore', DB_VERSION); }); beforeEach(async () => { @@ -52,6 +53,8 @@ describe('Indexed db storage test', () => { }); test('setup function', async () => { + expect.assertions(8); + const createdObjStores = db.objectStoreNames; const expectedStores = [ `${DATASTORE}_Setting`, @@ -80,8 +83,8 @@ describe('Indexed db storage test', () => { `${USER}_PostAuthorJoin` ); - expect(commentStore.rawIndexes.has('byId')).toBe(true); // checks byIdIndex - expect(postAuthorStore.rawIndexes.has('byId')).toBe(true); // checks byIdIndex + expect(commentStore.rawIndexes.has('byPk')).toBe(true); // checks byPkIndex + expect(postAuthorStore.rawIndexes.has('byPk')).toBe(true); // checks byPkIndex expect(commentStore.rawIndexes.has('commentPostId')).toBe(true); // checks 1:M expect(postAuthorStore.rawIndexes.has('postId')).toBe(true); // checks M:M expect(postAuthorStore.rawIndexes.has('authorId')).toBe(true); // checks M:M @@ -94,8 +97,8 @@ describe('Indexed db storage test', () => { const get1 = await db .transaction(`${USER}_Blog`, 'readonly') .objectStore(`${USER}_Blog`) - .index('byId') - .get(blog.id); + .index('byPk') + .get([blog.id]); expect(get1).toBeDefined(); @@ -108,8 +111,8 @@ describe('Indexed db storage test', () => { const get2 = await db .transaction(`${USER}_BlogOwner`, 'readonly') .objectStore(`${USER}_BlogOwner`) - .index('byId') - .get(owner.id); + .index('byPk') + .get([owner.id]); expect([...Object.keys(owner)].sort()).toEqual( expect.arrayContaining(Object.keys(get2).sort()) @@ -119,8 +122,8 @@ describe('Indexed db storage test', () => { const get3 = await db .transaction(`${USER}_Blog`, 'readonly') .objectStore(`${USER}_Blog`) - .index('byId') - .get(blog2.id); + .index('byPk') + .get([blog2.id]); expect([...Object.keys(blog2).sort(), 'blogOwnerId']).toEqual( expect.arrayContaining(Object.keys(get3).sort()) @@ -145,8 +148,8 @@ describe('Indexed db storage test', () => { const postFromDB = await db .transaction(`${USER}_Post`, 'readonly') .objectStore(`${USER}_Post`) - .index('byId') - .get(p.id); + .index('byPk') + .get([p.id]); expect(postFromDB.metadata).toMatchObject({ rating: 3, @@ -168,8 +171,8 @@ describe('Indexed db storage test', () => { const getComment = await db .transaction(`${USER}_Comment`, 'readonly') .objectStore(`${USER}_Comment`) - .index('byId') - .get(c1.id); + .index('byPk') + .get([c1.id]); expect([...Object.keys(c1), 'commentPostId'].sort()).toEqual( expect.arrayContaining(Object.keys(getComment).sort()) @@ -179,12 +182,12 @@ describe('Indexed db storage test', () => { .transaction(`${USER}_Comment`, 'readonly') .objectStore(`${USER}_Comment`) .index('commentPostId') - .get(p.id); + .get([p.id]); expect(checkIndex['commentPostId']).toEqual(p.id); }); - test('save function M:M insert', async () => { + test('save function M:N insert', async () => { const post = new Post({ title: 'Avatar', blog, @@ -194,8 +197,8 @@ describe('Indexed db storage test', () => { const getPost = await db .transaction(`${USER}_Post`, 'readonly') .objectStore(`${USER}_Post`) - .index('byId') - .get(post.id); + .index('byPk') + .get([post.id]); expect(getPost.author).toBeUndefined(); @@ -208,15 +211,15 @@ describe('Indexed db storage test', () => { const getA1 = await db .transaction(`${USER}_Author`, 'readonly') .objectStore(`${USER}_Author`) - .index('byId') - .get(a1.id); + .index('byPk') + .get([a1.id]); expect(getA1.name).toEqual('author1'); const getA2 = await db .transaction(`${USER}_Author`, 'readonly') .objectStore(`${USER}_Author`) - .index('byId') - .get(a2.id); + .index('byPk') + .get([a2.id]); expect(getA2.name).toEqual('author2'); await DataStore.save(new PostAuthorJoin({ post, author: a1 })); @@ -230,7 +233,7 @@ describe('Indexed db storage test', () => { .transaction(`${USER}_PostAuthorJoin`) .objectStore(`${USER}_PostAuthorJoin`) .index('postId') - .getAll(post.id); + .getAll([post.id]); expect(getAuthors).toHaveLength(2); }); @@ -242,8 +245,8 @@ describe('Indexed db storage test', () => { const get1 = await db .transaction(`${USER}_Blog`, 'readonly') .objectStore(`${USER}_Blog`) - .index('byId') - .get(blog.id); + .index('byPk') + .get([blog.id]); expect(get1['blogOwnerId']).toBe(owner.id); const updated = Blog.copyOf(blog, draft => { @@ -254,8 +257,8 @@ describe('Indexed db storage test', () => { const get2 = await db .transaction(`${USER}_Blog`, 'readonly') .objectStore(`${USER}_Blog`) - .index('byId') - .get(blog.id); + .index('byPk') + .get([blog.id]); expect(get2.name).toEqual(updated.name); }); @@ -276,7 +279,9 @@ describe('Indexed db storage test', () => { }); }); - test('query M:1 eager load', async () => { + test('query 1:M eager load', async () => { + expect.assertions(1); + const p = new Post({ title: 'Avatar', blog, @@ -294,6 +299,8 @@ describe('Indexed db storage test', () => { }); test('query with sort on a single field', async () => { + expect.assertions(4); + const p1 = new Person({ firstName: 'John', lastName: 'Snow', @@ -332,6 +339,8 @@ describe('Indexed db storage test', () => { }); test('query with sort on multiple fields', async () => { + expect.assertions(3); + const p1 = new Person({ firstName: 'John', lastName: 'Snow', @@ -373,6 +382,8 @@ describe('Indexed db storage test', () => { }); test('delete 1:1 function', async () => { + expect.assertions(5); + await DataStore.save(blog); await DataStore.save(owner); @@ -400,7 +411,9 @@ describe('Indexed db storage test', () => { expect(await DataStore.query(Blog, blog3.id)).toBeUndefined(); }); - test('delete M:1 function', async () => { + test('delete 1:M function', async () => { + expect.assertions(2); + const post = new Post({ title: 'Avatar', blog, @@ -415,11 +428,13 @@ describe('Indexed db storage test', () => { await DataStore.delete(Comment, c1.id); - expect(await DataStore.query(Comment, c1.id)).toBeUndefined; - expect((await DataStore.query(Comment, c2.id)).id).toEqual(c2.id); + expect(await DataStore.query(Comment, c1.id)).toBeUndefined(); + expect((await DataStore.query(Comment, c2.id))?.id).toEqual(c2.id); }); test('delete 1:M function', async () => { + expect.assertions(4); + const post = new Post({ title: 'Avatar 1', blog, @@ -441,13 +456,16 @@ describe('Indexed db storage test', () => { await DataStore.save(c3); await DataStore.delete(Post, post.id); + expect(await DataStore.query(Comment, c1.id)).toBeUndefined(); expect(await DataStore.query(Comment, c2.id)).toBeUndefined(); - expect((await DataStore.query(Comment, c3.id)).id).toEqual(c3.id); + expect((await DataStore.query(Comment, c3.id))?.id).toEqual(c3.id); expect(await DataStore.query(Post, post.id)).toBeUndefined(); }); - test('delete M:M function', async () => { + test('delete M:N function', async () => { + expect.assertions(1); + const a1 = new Author({ name: 'author1' }); const a2 = new Author({ name: 'author2' }); const a3 = new Author({ name: 'author3' }); @@ -476,6 +494,8 @@ describe('Indexed db storage test', () => { }); test('delete cascade', async () => { + expect.assertions(9); + const a1 = await DataStore.save(new Author({ name: 'author1' })); const a2 = await DataStore.save(new Author({ name: 'author2' })); const blog = new Blog({ @@ -508,11 +528,13 @@ describe('Indexed db storage test', () => { .transaction(`${USER}_PostAuthorJoin`, 'readonly') .objectStore(`${USER}_PostAuthorJoin`) .index('postId') - .getAll(p1.id); + .getAll([p1.id]); expect(refResult).toHaveLength(0); }); test('delete non existent', async () => { + expect.assertions(2); + const author = new Author({ name: 'author1' }); const deleted = await DataStore.delete(author); @@ -534,7 +556,7 @@ describe('DB versions migration', () => { await DataStore.clear(); }); - test('Migration from v1 to v2', async () => { + test(`Migration from v1 to v${DB_VERSION}`, async () => { const v1Data = require('./v1schema.data.json'); const blob = new Blob([JSON.stringify(v1Data)], { @@ -544,11 +566,11 @@ describe('DB versions migration', () => { // Import V1 (await Dexie.import(blob)).close(); - // Migrate to V2 + // Migrate to latest await DataStore.start(); - // Open V2 - db = await idb.openDB('amplify-datastore', 2); + // Open latest + db = await idb.openDB('amplify-datastore', DB_VERSION); expect([...db.objectStoreNames].sort()).toMatchObject( [ @@ -561,7 +583,7 @@ describe('DB versions migration', () => { ); for (const storeName of db.objectStoreNames) { - expect(db.transaction(storeName).store.indexNames).toContain('byId'); + expect(db.transaction(storeName).store.indexNames).toContain('byPk'); } const dexie = await new Dexie('amplify-datastore').open(); @@ -580,10 +602,12 @@ describe('DB versions migration', () => { const exportedJSON = await readBlob(exportedBlob); const exported = JSON.parse(exportedJSON); + console.log(exported.data.tables); + for (const { schema } of exported.data.tables) { - expect(schema.split(',')).toContain('&id'); + expect(schema.split(',')).toContain('&[id]'); } - expect(exported).toMatchSnapshot('v2-schema'); + expect(exported).toMatchSnapshot(`v${DB_VERSION}-schema`); }); }); diff --git a/packages/datastore/__tests__/model.ts b/packages/datastore/__tests__/model.ts index fe38f3d4e7e..d34d0a9a164 100644 --- a/packages/datastore/__tests__/model.ts +++ b/packages/datastore/__tests__/model.ts @@ -1,10 +1,10 @@ import { ModelInit, MutableModel, + initSchema, + NonModelTypeConstructor, PersistentModelConstructor, -} from '@aws-amplify/datastore'; - -import { initSchema, NonModelTypeConstructor } from '../src/index'; +} from '../src/index'; import { newSchema } from './schema'; declare class BlogModel { @@ -38,12 +38,12 @@ declare class PostMetadataType { readonly rating: number; readonly tags?: string[]; readonly nested?: NestedType; - constructor(init: ModelInit); + constructor(init: PostMetadataType); } declare class NestedType { readonly aField: string; - constructor(init: ModelInit); + constructor(init: NestedType); } declare class CommentModel { @@ -103,6 +103,13 @@ declare class PersonModel { readonly firstName: string; readonly lastName: string; readonly username?: string; + constructor(init: ModelInit); + static copyOf( + source: PersonModel, + mutator: ( + draft: MutableModel + ) => MutableModel | void + ): PersonModel; } const { @@ -126,7 +133,7 @@ const { PostMetadata: NonModelTypeConstructor; Nested: NonModelTypeConstructor; }; -``; + export { Author, Post, diff --git a/packages/datastore/__tests__/mutation.test.ts b/packages/datastore/__tests__/mutation.test.ts index 2795d052de6..083eafbd62e 100644 --- a/packages/datastore/__tests__/mutation.test.ts +++ b/packages/datastore/__tests__/mutation.test.ts @@ -110,8 +110,9 @@ describe('MutationProcessor', () => { it('Should correctly generate delete mutation input for models with a custom PK', async () => { // custom PK @key(fields: ["postId"]) const deletePost = new PostCustomPK({ - postId: 100, + postId: '100', title: 'Title', + dateCreated: new Date().toISOString(), }); const { data } = await createMutationEvent(deletePost, OpType.DELETE); @@ -124,14 +125,15 @@ describe('MutationProcessor', () => { '{}' ); - expect(input.postId).toEqual(100); + expect(input.postId).toEqual('100'); expect(input.id).toBeUndefined(); }); it('Should correctly generate delete mutation input for models with a custom PK - multi-field', async () => { // multi-key PK @key(fields: ["id", "postId"]) const deletePost = new PostCustomPKSort({ - postId: 100, + id: 'abcdef', + postId: '100', title: 'Title', }); @@ -145,8 +147,23 @@ describe('MutationProcessor', () => { '{}' ); - expect(input.id).toEqual(deletePost.id); - expect(input.postId).toEqual(100); + expect(input.id).toEqual('abcdef'); + expect(input.postId).toEqual('100'); + }); + }); + describe('Call to rest api', () => { + it('Should send a user agent with the datastore suffix the rest api request', async () => { + jest.spyOn(mutationProcessor, 'resume'); + await mutationProcessor.resume(); + + expect(mockRestPost).toBeCalledWith( + expect.anything(), + expect.objectContaining({ + headers: expect.objectContaining({ + 'x-amz-user-agent': `${Constants.userAgent}${USER_AGENT_SUFFIX_DATASTORE}`, + }), + }) + ); }); }); describe('Call to rest api', () => { diff --git a/packages/datastore/__tests__/outbox.test.ts b/packages/datastore/__tests__/outbox.test.ts index d51363bb1e7..cfe4c7dee3f 100644 --- a/packages/datastore/__tests__/outbox.test.ts +++ b/packages/datastore/__tests__/outbox.test.ts @@ -18,6 +18,7 @@ import { SchemaModel, } from '../src/types'; import { MutationEvent } from '../src/sync/'; +import { USER, extractPrimaryKeyFieldNames } from '../src/util'; let initSchema: typeof initSchemaType; // using to access private members @@ -31,6 +32,13 @@ let Model: PersistentModelConstructor; const schema: InternalSchema = internalTestSchema(); +const getModelDefinition = ( + modelConstructor: PersistentModelConstructor +): SchemaModel => { + const modelDefinition = schema.namespaces[USER].models[modelConstructor.name]; + return modelDefinition; +}; + describe('Outbox tests', () => { let modelId: string; @@ -105,8 +113,12 @@ describe('Outbox tests', () => { expect(head.modelId).toEqual(modelId); expect(head.operation).toEqual(TransformerMutationType.UPDATE); expect(modelData.field1).toEqual('another value'); - - const mutationsForModel = await outbox.getForModel(s, last); + const modelDefinition = getModelDefinition(last); + const mutationsForModel = await outbox.getForModel( + s, + last, + modelDefinition + ); expect(mutationsForModel.length).toEqual(1); }); @@ -125,9 +137,15 @@ describe('Outbox tests', () => { await outbox.enqueue(Storage, await createMutationEvent(updatedModel3)); + const modelDefinition = getModelDefinition(last); + // model2 should get deleted when model3 is enqueued, so we're expecting to see // 2 items in the queue for this Model total (including the in progress record - updatedModel1) - const mutationsForModel = await outbox.getForModel(Storage, last); + const mutationsForModel = await outbox.getForModel( + Storage, + last, + modelDefinition + ); expect(mutationsForModel.length).toEqual(2); const [_inProgress, nextMutation] = mutationsForModel; @@ -207,7 +225,12 @@ describe('Outbox tests', () => { expect(head.operation).toEqual(TransformerMutationType.UPDATE); expect(modelData.field1).toEqual('another value'); - const mutationsForModel = await outbox.getForModel(s, last); + const modelDefinition = getModelDefinition(last); + const mutationsForModel = await outbox.getForModel( + s, + last, + modelDefinition + ); expect(mutationsForModel.length).toEqual(1); }); @@ -218,9 +241,14 @@ describe('Outbox tests', () => { }); await outbox.enqueue(Storage, await createMutationEvent(updatedModel2)); + const modelDefinition = getModelDefinition(last); // 2 items in the queue for this Model total (including the in progress record - updatedModel1) - const mutationsForModel = await outbox.getForModel(Storage, last); + const mutationsForModel = await outbox.getForModel( + Storage, + last, + modelDefinition + ); expect(mutationsForModel.length).toEqual(2); const [_inProgress, nextMutation] = mutationsForModel; @@ -317,29 +345,15 @@ async function instantiateOutbox(): Promise { const MutationEvent = syncClasses[ 'MutationEvent' - ] as PersistentModelConstructor; + ] as PersistentModelConstructor; await DataStore.start(); Storage = DataStore.storage; anyStorage = Storage; - const namespaceResolver = - anyStorage.storage.namespaceResolver.bind(anyStorage); - ({ modelInstanceCreator } = anyStorage.storage); - const getModelDefinition = ( - modelConstructor: PersistentModelConstructor - ): SchemaModel => { - const namespaceName = namespaceResolver(modelConstructor); - - const modelDefinition = - schema.namespaces[namespaceName].models[modelConstructor.name]; - - return modelDefinition; - }; - const userClasses = {}; userClasses['Model'] = Model; @@ -383,6 +397,6 @@ async function processMutationResponse( const modelConstructor = Model as PersistentModelConstructor; const model = modelInstanceCreator(modelConstructor, record); - - await merger.merge(storage, model); + const modelDefinition = getModelDefinition(model); + await merger.merge(storage, model, modelDefinition); } diff --git a/packages/datastore/__tests__/storage.test.ts b/packages/datastore/__tests__/storage.test.ts index 15a5bbf7508..002e1a79009 100644 --- a/packages/datastore/__tests__/storage.test.ts +++ b/packages/datastore/__tests__/storage.test.ts @@ -9,14 +9,77 @@ import { Post, Comment, PostComposite, - PostCustomPK, - PostCustomPKSort, - PostCustomPKComposite, + PostCustomPK as PostCustomPKType, + PostCustomPKSort as PostCustomPKSortType, + PostCustomPKComposite as PostCustomPKCompositeType, testSchema, } from './helpers'; -let initSchema: typeof initSchemaType; -let DataStore: typeof DataStoreType; +function getDataStore() { + const { + initSchema, + DataStore, + }: { + initSchema: typeof initSchemaType; + DataStore: typeof DataStoreType; + } = require('../src/datastore/datastore'); + + const classes = initSchema(testSchema()); + const { + Model, + Post, + Comment, + PostComposite, + PostCustomPK, + PostCustomPKSort, + PostCustomPKComposite, + } = classes as { + Model: PersistentModelConstructor; + Post: PersistentModelConstructor; + Comment: PersistentModelConstructor; + PostComposite: PersistentModelConstructor; + PostCustomPK: PersistentModelConstructor; + PostCustomPKSort: PersistentModelConstructor; + PostCustomPKComposite: PersistentModelConstructor; + }; + + return { + DataStore, + Model, + Post, + Comment, + PostComposite, + PostCustomPK, + PostCustomPKSort, + PostCustomPKComposite, + }; +} + +/** + * Strip out schemaVersion save call that DS performs when starting. + * Allows us to run any of the tests in isolation (i.e., .only on any test will work) + * + * @returns A flattened array of mock function calls + */ +function processZenPushCalls(zenNext): Array { + const { + mock: { calls }, + } = zenNext; + + if (!Array.isArray(calls)) { + return []; + } + + if (calls.length) { + const [[first]] = calls; + + if (first?.element?.key === 'schemaVersion') { + return calls.slice(1).flat(); + } + } + + return calls.flat(); +} describe('Storage tests', () => { describe('Update', () => { @@ -24,9 +87,6 @@ describe('Storage tests', () => { let zenNext; beforeEach(() => { - jest.resetModules(); - jest.resetAllMocks(); - zenNext = jest.fn(); jest.doMock('zen-push', () => { @@ -37,17 +97,15 @@ describe('Storage tests', () => { return zenPush; }); + }); - ({ initSchema, DataStore } = require('../src/datastore/datastore')); + afterEach(() => { + jest.resetModules(); + jest.resetAllMocks(); }); test('scalar', async () => { - const classes = initSchema(testSchema()); - - const { Model } = classes as { - Model: PersistentModelConstructor; - }; - + const { DataStore, Model } = getDataStore(); const dateCreated = new Date().toISOString(); const model = await DataStore.save( @@ -63,7 +121,7 @@ describe('Storage tests', () => { }) ); - const [_settingsSave, [modelSave], [modelUpdate]] = zenNext.mock.calls; + const [modelSave, modelUpdate] = processZenPushCalls(zenNext); // Save should include expect(modelSave.element.dateCreated).toEqual(dateCreated); @@ -75,12 +133,7 @@ describe('Storage tests', () => { }); test('scalar - unchanged', async () => { - const classes = initSchema(testSchema()); - - const { Model } = classes as { - Model: PersistentModelConstructor; - }; - + const { DataStore, Model } = getDataStore(); const dateCreated = new Date().toISOString(); const model = await DataStore.save( @@ -96,20 +149,15 @@ describe('Storage tests', () => { }) ); - const [[_modelSave], modelUpdate] = zenNext.mock.calls; + const [_modelSave, modelUpdate] = processZenPushCalls(zenNext); - expect(modelUpdate).toBeUndefined(); expect(modelUpdate).toBeUndefined(); expect(true).toBeTruthy(); }); test('update by nulling previous value', async () => { - const classes = initSchema(testSchema()); - - const { Model } = classes as { - Model: PersistentModelConstructor; - }; + const { DataStore, Model } = getDataStore(); const model = await DataStore.save( new Model({ @@ -125,18 +173,13 @@ describe('Storage tests', () => { }) ); - const [[_modelSave], [modelUpdate]] = zenNext.mock.calls; + const [_modelSave, modelUpdate] = processZenPushCalls(zenNext); expect(modelUpdate.element.optionalField1).toBeNull(); }); test('updating value with undefined gets saved as null', async () => { - const classes = initSchema(testSchema()); - - const { Model } = classes as { - Model: PersistentModelConstructor; - }; - + const { DataStore, Model } = getDataStore(); const model = await DataStore.save( new Model({ field1: 'Some value', @@ -151,17 +194,13 @@ describe('Storage tests', () => { }) ); - const [[_modelSave], [modelUpdate]] = zenNext.mock.calls; + const [_modelSave, modelUpdate] = processZenPushCalls(zenNext); expect(modelUpdate.element.optionalField1).toBeNull(); }); test('list (destructured)', async () => { - const classes = initSchema(testSchema()); - - const { Model } = classes as { - Model: PersistentModelConstructor; - }; + const { DataStore, Model } = getDataStore(); const model = await DataStore.save( new Model({ @@ -177,7 +216,7 @@ describe('Storage tests', () => { }) ); - const [[_modelSave], [modelUpdate]] = zenNext.mock.calls; + const [_modelSave, modelUpdate] = processZenPushCalls(zenNext); const expectedValueEmails = [ 'john@doe.com', @@ -191,11 +230,7 @@ describe('Storage tests', () => { }); test('list (push)', async () => { - const classes = initSchema(testSchema()); - - const { Model } = classes as { - Model: PersistentModelConstructor; - }; + const { DataStore, Model } = getDataStore(); const model = await DataStore.save( new Model({ @@ -211,7 +246,7 @@ describe('Storage tests', () => { }) ); - const [[_modelSave], [modelUpdate]] = zenNext.mock.calls; + const [_modelSave, modelUpdate] = processZenPushCalls(zenNext); const expectedValueEmails = [ 'john@doe.com', @@ -225,11 +260,7 @@ describe('Storage tests', () => { }); test('update with changed field and list unchanged', async () => { - const classes = initSchema(testSchema()); - - const { Model } = classes as { - Model: PersistentModelConstructor; - }; + const { DataStore, Model } = getDataStore(); const model = await DataStore.save( new Model({ @@ -247,7 +278,7 @@ describe('Storage tests', () => { }) ); - const [[_modelSave], [modelUpdate]] = zenNext.mock.calls; + const [_modelSave, modelUpdate] = processZenPushCalls(zenNext); expect(modelUpdate.element.dateCreated).toBeUndefined(); expect(modelUpdate.element.field1).toEqual('Updated value'); @@ -255,11 +286,8 @@ describe('Storage tests', () => { }); test('update with list unchanged', async () => { - const classes = initSchema(testSchema()); - - const { Model } = classes as { - Model: PersistentModelConstructor; - }; + expect.assertions(1); + const { DataStore, Model } = getDataStore(); const model = await DataStore.save( new Model({ @@ -276,17 +304,13 @@ describe('Storage tests', () => { }) ); - const [[_modelSave], modelUpdate] = zenNext.mock.calls; + const [_modelSave, modelUpdate] = processZenPushCalls(zenNext); expect(modelUpdate).toBeUndefined(); }); test('update by nulling list', async () => { - const classes = initSchema(testSchema()); - - const { Model } = classes as { - Model: PersistentModelConstructor; - }; + const { DataStore, Model } = getDataStore(); const model = await DataStore.save( new Model({ @@ -302,17 +326,13 @@ describe('Storage tests', () => { }) ); - const [[_modelSave], [modelUpdate]] = zenNext.mock.calls; + const [_modelSave, modelUpdate] = processZenPushCalls(zenNext); expect(modelUpdate.element.emails).toBeNull(); }); test('custom type (destructured)', async () => { - const classes = initSchema(testSchema()); - - const { Model } = classes as { - Model: PersistentModelConstructor; - }; + const { DataStore, Model } = getDataStore(); const model = await DataStore.save( new Model({ @@ -335,7 +355,7 @@ describe('Storage tests', () => { }) ); - const [[_modelSave], [modelUpdate]] = zenNext.mock.calls; + const [_modelSave, modelUpdate] = processZenPushCalls(zenNext); const expectedValueMetadata = { author: 'some author', @@ -351,11 +371,7 @@ describe('Storage tests', () => { }); test('custom type (accessor)', async () => { - const classes = initSchema(testSchema()); - - const { Model } = classes as { - Model: PersistentModelConstructor; - }; + const { DataStore, Model } = getDataStore(); const model = await DataStore.save( new Model({ @@ -375,7 +391,7 @@ describe('Storage tests', () => { }) ); - const [[_modelSave], [modelUpdate]] = zenNext.mock.calls; + const [_modelSave, modelUpdate] = processZenPushCalls(zenNext); const expectedValueMetadata = { author: 'some author', @@ -391,11 +407,7 @@ describe('Storage tests', () => { }); test('custom type unchanged', async () => { - const classes = initSchema(testSchema()); - - const { Model } = classes as { - Model: PersistentModelConstructor; - }; + const { DataStore, Model } = getDataStore(); const model = await DataStore.save( new Model({ @@ -420,7 +432,7 @@ describe('Storage tests', () => { }) ); - const [[_modelSave], [modelUpdate]] = zenNext.mock.calls; + const [_modelSave, modelUpdate] = processZenPushCalls(zenNext); expect(modelUpdate.element.dateCreated).toBeUndefined(); expect(modelUpdate.element.field1).toEqual('Updated value'); @@ -428,12 +440,7 @@ describe('Storage tests', () => { }); test('relation', async () => { - const classes = initSchema(testSchema()); - - const { Post, Comment } = classes as { - Post: PersistentModelConstructor; - Comment: PersistentModelConstructor; - }; + const { DataStore, Post, Comment } = getDataStore(); const post = await DataStore.save( new Post({ @@ -460,14 +467,14 @@ describe('Storage tests', () => { }) ); - const [, [commentSave], , [commentUpdate]] = zenNext.mock.calls; + const [_postSave, commentSave, _anotherPostSave, commentUpdate] = + processZenPushCalls(zenNext); expect(commentSave.element.postId).toEqual(post.id); expect(commentUpdate.element.postId).toEqual(anotherPost.id); }); test('composite key', async () => { - const classes = initSchema(testSchema()); // model has a GSI with a composite key defined: // @key(name: "titleSort", fields: ["title", "created", "sort"]) @@ -476,9 +483,7 @@ describe('Storage tests', () => { // updating the hash key [0] should NOT include the other fields in that key - const { PostComposite } = classes as { - PostComposite: PersistentModelConstructor; - }; + const { DataStore, PostComposite } = getDataStore(); const createdTimestamp = String(Date.now()); @@ -513,8 +518,8 @@ describe('Storage tests', () => { }) ); - const [, [postUpdate1], [postUpdate2], [postUpdate3]] = - zenNext.mock.calls; + const [_postSave, postUpdate1, postUpdate2, postUpdate3] = + processZenPushCalls(zenNext); expect(postUpdate1.element.title).toBeUndefined(); expect(postUpdate1.element.created).toEqual(createdTimestamp); @@ -533,19 +538,16 @@ describe('Storage tests', () => { }); test('custom pk', async () => { - const classes = initSchema(testSchema()); - // model has a custom pk defined via @key(fields: ["postId"]) // the PK should always be included in the mutation input - const { PostCustomPK } = classes as { - PostCustomPK: PersistentModelConstructor; - }; + const { DataStore, PostCustomPK } = getDataStore(); const post = await DataStore.save( new PostCustomPK({ - postId: 100, + postId: '100', title: 'New Post', description: 'Desc', + dateCreated: new Date().toISOString(), }) ); @@ -555,27 +557,23 @@ describe('Storage tests', () => { }) ); - const [, [postUpdate]] = zenNext.mock.calls; + const [_postSave, postUpdate] = processZenPushCalls(zenNext); - expect(postUpdate.element.postId).toEqual(100); + expect(postUpdate.element.postId).toEqual('100'); expect(postUpdate.element.title).toEqual('Updated'); expect(postUpdate.element.description).toBeUndefined(); }); test('custom pk - with sort', async () => { - const classes = initSchema(testSchema()); - // model has a custom pk (hk + sort key) defined via @key(fields: ["id", "postId"]) // all of the fields in the PK should always be included in the mutation input - const { PostCustomPKSort } = classes as { - PostCustomPKSort: PersistentModelConstructor; - }; + const { DataStore, PostCustomPKSort } = getDataStore(); const post = await DataStore.save( new PostCustomPKSort({ - postId: 100, + id: 'abcdef', + postId: '100', title: 'New Post', - description: 'Desc', }) ); @@ -585,25 +583,23 @@ describe('Storage tests', () => { }) ); - const [, [postUpdate]] = zenNext.mock.calls; + const [_postSave, postUpdate] = processZenPushCalls(zenNext); - expect(postUpdate.element.postId).toEqual(100); + expect(postUpdate.element.id).toEqual('abcdef'); + expect(postUpdate.element.postId).toEqual('100'); expect(postUpdate.element.title).toEqual('Updated'); expect(postUpdate.element.description).toBeUndefined(); }); test('custom pk - with composite', async () => { - const classes = initSchema(testSchema()); - // model has a custom pk (hk + composite key) defined via @key(fields: ["id", "postId", "sort"]) // all of the fields in the PK should always be included in the mutation input - const { PostCustomPKComposite } = classes as { - PostCustomPKComposite: PersistentModelConstructor; - }; + const { DataStore, PostCustomPKComposite } = getDataStore(); const post = await DataStore.save( new PostCustomPKComposite({ - postId: 100, + id: 'abcdef', + postId: '100', title: 'New Post', description: 'Desc', sort: 1, @@ -616,9 +612,10 @@ describe('Storage tests', () => { }) ); - const [, [postUpdate]] = zenNext.mock.calls; + const [_postSave, postUpdate] = processZenPushCalls(zenNext); - expect(postUpdate.element.postId).toEqual(100); + expect(postUpdate.element.id).toEqual('abcdef'); + expect(postUpdate.element.postId).toEqual('100'); expect(postUpdate.element.sort).toEqual(1); expect(postUpdate.element.title).toEqual('Updated'); expect(postUpdate.element.description).toBeUndefined(); diff --git a/packages/datastore/__tests__/sync.test.ts b/packages/datastore/__tests__/sync.test.ts index 4e5def45a31..c22e408e127 100644 --- a/packages/datastore/__tests__/sync.test.ts +++ b/packages/datastore/__tests__/sync.test.ts @@ -86,6 +86,39 @@ describe('Sync', () => { expect(data).toMatchSnapshot(); }); + it('custom pk: should return all data', async () => { + window.sessionStorage.setItem('datastorePartialData', 'true'); + const resolveResponse = { + data: { + syncPosts: { + items: [ + { + postId: '1', + title: 'Item 1', + }, + { + postId: '2', + title: 'Item 2', + }, + ], + }, + }, + }; + + const SyncProcessor = jitteredRetrySyncProcessorSetup({ + resolveResponse, + }); + + const data = await SyncProcessor.jitteredRetry({ + query: defaultQuery, + variables: defaultVariables, + opName: defaultOpName, + modelDefinition: defaultModelDefinition, + }); + + expect(data).toMatchSnapshot(); + }); + it('should return partial data and send Hub event when datastorePartialData is set', async () => { window.sessionStorage.setItem('datastorePartialData', 'true'); const rejectResponse = { diff --git a/packages/datastore/__tests__/util.test.ts b/packages/datastore/__tests__/util.test.ts index 97fc65966da..d305075b364 100644 --- a/packages/datastore/__tests__/util.test.ts +++ b/packages/datastore/__tests__/util.test.ts @@ -1,5 +1,7 @@ import { enablePatches, produce, Patch } from 'immer'; import { + extractKeyIfExists, + extractPrimaryKeyFieldNames, isAWSDate, isAWSDateTime, isAWSEmail, @@ -13,8 +15,15 @@ import { valuesEqual, processCompositeKeys, mergePatches, + extractPrimaryKeyValues, + isIdManaged, + isIdOptionallyManaged, + indexNameFromKeys, + keysEqual, } from '../src/util'; +import { testSchema } from './helpers'; + describe('datastore util', () => { test('validatePredicateField', () => { expect(validatePredicateField(undefined, 'contains', 'test')).toEqual( @@ -594,7 +603,6 @@ describe('datastore util', () => { expect(isAWSIPAddress(test)).toBe(false); }); }); - describe('mergePatches', () => { enablePatches(); test('merge patches with no conflict', () => { @@ -718,4 +726,118 @@ describe('datastore util', () => { ]); }); }); + describe('Key Utils', () => { + describe('extractKeyIfExists', () => { + const testUserSchema = testSchema(); + test('model definition with custom pk', () => { + const result = extractKeyIfExists(testUserSchema.models.PostCustomPK); + expect(result.properties.fields.length).toBe(1); + expect(result.properties.fields[0]).toBe('postId'); + expect(result.type).toBe('key'); + }); + test('model definition with custom pk + sk', () => { + const result = extractKeyIfExists( + testUserSchema.models.PostCustomPKSort + ); + expect(result.properties.fields.length).toBe(2); + expect(result.properties.fields[0]).toBe('id'); + expect(result.properties.fields[1]).toBe('postId'); + expect(result.type).toBe('key'); + }); + test('model definition with id', () => { + const result = extractKeyIfExists(testUserSchema.models.Model); + expect(result).toBeUndefined(); + }); + }); + describe('extractPrimaryKeyFieldNames', () => { + const testUserSchema = testSchema(); + test('model definition with custom pk', () => { + const result = extractPrimaryKeyFieldNames( + testUserSchema.models.PostCustomPK + ); + expect(result.length).toBe(1); + expect(result[0]).toBe('postId'); + }); + test('model definition with custom pk + sk', () => { + const result = extractPrimaryKeyFieldNames( + testUserSchema.models.PostCustomPKSort + ); + expect(result.length).toBe(2); + expect(result[0]).toBe('id'); + expect(result[1]).toBe('postId'); + }); + test('model definition with id', () => { + const result = extractPrimaryKeyFieldNames(testUserSchema.models.Model); + expect(result.length).toBe(1); + expect(result[0]).toBe('id'); + }); + }); + describe('extractPrimaryKeyValues', () => { + test('should extract key values from a model', () => { + const result = extractPrimaryKeyValues( + { + id: 'abcdef', + postId: '100', + title: 'New Post', + description: 'Desc', + sort: 1, + }, + ['id', 'postId', 'sort'] + ); + expect(result).toEqual(['abcdef', '100', 1]); + }); + }); + describe('isIdManaged', () => { + test('should return `false` for model with custom primary key', () => { + const testUserSchema = testSchema(); + const result = isIdManaged(testUserSchema.models.PostCustomPK); + expect(result).toEqual(false); + }); + test('should return `true` for model without custom primary key', () => { + const testUserSchema = testSchema(); + const result = isIdManaged(testUserSchema.models.Model); + expect(result).toEqual(true); + }); + }); + describe('isIdOptionallyManaged', () => { + test('should return `false` for model with custom primary key', () => { + const testUserSchema = testSchema(); + const result = isIdOptionallyManaged( + testUserSchema.models.PostCustomPK + ); + expect(result).toBeFalsy(); + }); + test('should return `false` for model without custom primary key', () => { + const testUserSchema = testSchema(); + const result = isIdOptionallyManaged(testUserSchema.models.Model); + expect(result).toBeFalsy(); + }); + }); + describe('indexNameFromKeys', () => { + test('should generate spinal-cased index name from key field names', () => { + const result = indexNameFromKeys(['customId', 'sortKey']); + expect(result).toEqual('customId-sortKey'); + }); + }); + describe('keysEqual', () => { + test('should return `false` when equal keys are not sequentially equal', () => { + const keys1 = ['id', 'sort']; + const keys2 = ['sort', 'id']; + const result = keysEqual(keys1, keys2); + expect(result).toBeFalsy(); + }); + test('should return `true` when equal keys are sequentially equal', () => { + const keys1 = ['id', 'sort']; + const keys2 = ['id', 'sort']; + const result = keysEqual(keys1, keys2); + expect(result).toBeTruthy(); + }); + test('should return `false` when keys are not of equal length', () => { + const keys1 = ['id', 'sort']; + const keys2 = ['id']; + const result = keysEqual(keys1, keys2); + expect(result).toBeFalsy(); + }); + }); + }); }); diff --git a/packages/datastore/src/datastore/datastore.ts b/packages/datastore/src/datastore/datastore.ts index bda297f6467..daf7d0a65b7 100644 --- a/packages/datastore/src/datastore/datastore.ts +++ b/packages/datastore/src/datastore/datastore.ts @@ -29,6 +29,7 @@ import { GraphQLScalarType, InternalSchema, isGraphQLScalarType, + isSchemaModelWithAttributes, ModelFieldType, ModelInit, ModelInstanceMetadata, @@ -57,10 +58,16 @@ import { isNonModelFieldType, isModelFieldType, ObserveQueryOptions, + ManagedIdentifier, + PersistentModelMetaData, + IdentifierFieldOrIdentifierObject, + __modelMeta__, + isIdentifierObject, AmplifyContext, } from '../types'; import { DATASTORE, + errorMessages, establishRelationAndKeys, exhaustiveCheck, isModelConstructor, @@ -73,9 +80,14 @@ import { registerNonModelClass, sortCompareFunction, DeferredCallbackResolver, + extractPrimaryKeyFieldNames, + extractPrimaryKeysAndValues, + isIdManaged, + isIdOptionallyManaged, validatePredicate, mergePatches, } from '../util'; +import { getIdentifierValue } from '../sync/utils'; setAutoFreeze(true); enablePatches(); @@ -85,11 +97,16 @@ const logger = new Logger('DataStore'); const ulid = monotonicUlidFactory(Date.now()); const { isNode } = JS.browserOrNode(); +type SettingMetaData = { + identifier: ManagedIdentifier; + readOnlyFields: never; +}; declare class Setting { - constructor(init: ModelInit); + public readonly [__modelMeta__]: SettingMetaData; + constructor(init: ModelInit); static copyOf( src: Setting, - mutator: (draft: MutableModel) => void | Setting + mutator: (draft: MutableModel) => void | Setting ): Setting; public readonly id: string; public readonly key: string; @@ -266,16 +283,15 @@ const createTypeClasses: ( export declare type ModelInstanceCreator = typeof modelInstanceCreator; -const instancesMetadata = new WeakSet< - ModelInit> ->(); -function modelInstanceCreator( +const instancesMetadata = new WeakSet>(); + +function modelInstanceCreator( modelConstructor: PersistentModelConstructor, - init: ModelInit & Partial + init: Partial ): T { instancesMetadata.add(init); - return new modelConstructor(init); + return new modelConstructor(>>init); } const validateModelFields = @@ -293,6 +309,14 @@ const validateModelFields = throw new Error(`Field ${name} is required`); } + if (isSchemaModelWithAttributes(modelDefinition) && !isIdManaged(modelDefinition)) { + const keys = extractPrimaryKeyFieldNames(modelDefinition); + if (keys.includes(k) && v === '') { + logger.error(errorMessages.idEmptyString, { k, value: v }); + throw new Error(errorMessages.idEmptyString); + } + } + if (isGraphQLScalarType(type)) { const jsType = GraphQLScalarType.getJSType(type); const validateScalar = GraphQLScalarType.getValidationFunction(type); @@ -403,7 +427,7 @@ const castInstanceType = ( return v; }; -const initializeInstance = ( +const initializeInstance = ( init: ModelInit, modelDefinition: SchemaModel | SchemaNonModel, draft: Draft @@ -427,31 +451,39 @@ const createModelClass = ( (draft: Draft) => { initializeInstance(init, modelDefinition, draft); + // model is initialized inside a DataStore component (e.g. by Sync Engine, Storage Engine, etc.) + const isInternallyInitialized = instancesMetadata.has(init); + const modelInstanceMetadata: ModelInstanceMetadata = - instancesMetadata.has(init) + isInternallyInitialized ? (init) : {}; - const { - id: _id, - _version, - _lastChangedAt, - _deleted, - } = modelInstanceMetadata; - - // instancesIds are set by modelInstanceCreator, it is accessible only internally - const isInternal = _id !== null && _id !== undefined; - - const id = isInternal - ? _id - : modelDefinition.syncable - ? uuid4() - : ulid(); - - if (!isInternal) { + + type ModelWithIDIdentifier = { id: string }; + + const { id: _id } = + modelInstanceMetadata as unknown as ModelWithIDIdentifier; + + if (isIdManaged(modelDefinition)) { + const isInternalModel = _id !== null && _id !== undefined; + + const id = isInternalModel + ? _id + : modelDefinition.syncable + ? uuid4() + : ulid(); + + ((draft)).id = id; + } else if (isIdOptionallyManaged(modelDefinition)) { + // only auto-populate if the id was not provided + ((draft)).id = draft.id || uuid4(); + } + + if (!isInternallyInitialized) { checkReadOnlyPropertyOnCreate(draft, modelDefinition); } - draft.id = id; + const { _version, _lastChangedAt, _deleted } = modelInstanceMetadata; if (modelDefinition.syncable) { draft._version = _version; @@ -476,8 +508,12 @@ const createModelClass = ( const model = produce( source, draft => { - fn(>(draft as unknown)); - draft.id = source.id; + fn(>draft); + + const keyNames = extractPrimaryKeyFieldNames(modelDefinition); + // Keys are immutable + keyNames.forEach(key => ((draft as Object)[key] = source[key])); + const modelValidator = validateModelFields(modelDefinition); Object.entries(draft).forEach(([k, v]) => { const parsedValue = castInstanceType(modelDefinition, k, v); @@ -489,6 +525,7 @@ const createModelClass = ( ); const hasExistingPatches = modelPatchesMap.has(source); + if (patches.length || hasExistingPatches) { if (hasExistingPatches) { const [existingPatches, existingSource] = modelPatchesMap.get(source); @@ -516,6 +553,7 @@ const createModelClass = ( } const instance = modelInstanceCreator(clazz, json); + const modelValidator = validateModelFields(modelDefinition); Object.entries(instance).forEach(([k, v]) => { @@ -563,7 +601,9 @@ const checkReadOnlyPropertyOnUpdate = ( }); }; -const createNonModelClass = (typeDefinition: SchemaNonModel) => { +const createNonModelClass = ( + typeDefinition: SchemaNonModel +) => { const clazz = >(class Model { constructor(init: ModelInit) { const instance = produce( @@ -647,7 +687,6 @@ async function checkSchemaVersion( const [schemaVersionSetting] = await s.query( Setting, ModelPredicateCreator.createFromExisting(modelDefinition, c => - // @ts-ignore Argument of type '"eq"' is not assignable to parameter of type 'never'. c.key('eq', SETTING_SCHEMA_VERSION) ), { page: 0, limit: 1 } @@ -724,12 +763,12 @@ class DataStore { private conflictHandler: ConflictHandler; private errorHandler: (error: SyncError) => void; private fullSyncInterval: number; - private initialized: Promise; + private initialized?: Promise; private initReject: Function; private initResolve: Function; private maxRecordsToSync: number; - private storage: Storage; - private sync: SyncEngine; + private storage?: Storage; + private sync?: SyncEngine; private syncPageSize: number; private syncExpressions: SyncExpression[]; private syncPredicates: WeakMap> = @@ -839,7 +878,10 @@ class DataStore { query: { ( modelConstructor: PersistentModelConstructor, - id: string + identifier: IdentifierFieldOrIdentifierObject< + T, + PersistentModelMetaData + > ): Promise; ( modelConstructor: PersistentModelConstructor, @@ -848,7 +890,10 @@ class DataStore { ): Promise; } = async ( modelConstructor: PersistentModelConstructor, - idOrCriteria?: string | ProducerModelPredicate | typeof PredicateAll, + identifierOrCriteria?: + | IdentifierFieldOrIdentifierObject> + | ProducerModelPredicate + | typeof PredicateAll, paginationProducer?: ProducerPaginationInput ): Promise => { await this.start(); @@ -862,28 +907,44 @@ class DataStore { throw new Error(msg); } - if (typeof idOrCriteria === 'string') { + if (typeof identifierOrCriteria === 'string') { if (paginationProducer !== undefined) { logger.warn('Pagination is ignored when querying by id'); } } const modelDefinition = getModelDefinition(modelConstructor); + const keyFields = extractPrimaryKeyFieldNames(modelDefinition); + let predicate: ModelPredicate; - if (isQueryOne(idOrCriteria)) { - predicate = ModelPredicateCreator.createForId( + if (isQueryOne(identifierOrCriteria)) { + if (keyFields.length > 1) { + const msg = errorMessages.queryByPkWithCompositeKeyPresent; + logger.error(msg, { keyFields }); + + throw new Error(msg); + } + + predicate = ModelPredicateCreator.createForSingleField( modelDefinition, - idOrCriteria + keyFields[0], + identifierOrCriteria ); } else { - if (isPredicatesAll(idOrCriteria)) { + // Object is being queried using object literal syntax + if (isIdentifierObject(identifierOrCriteria, modelDefinition)) { + predicate = ModelPredicateCreator.createForPk( + modelDefinition, + identifierOrCriteria + ); + } else if (isPredicatesAll(identifierOrCriteria)) { // Predicates.ALL means "all records", so no predicate (undefined) predicate = undefined; } else { predicate = ModelPredicateCreator.createFromExisting( modelDefinition, - idOrCriteria + identifierOrCriteria ); } } @@ -913,7 +974,11 @@ class DataStore { pagination ); - return isQueryOne(idOrCriteria) ? result[0] : result; + const returnOne = + isQueryOne(identifierOrCriteria) || + isIdentifierObject(identifierOrCriteria, modelDefinition); + + return returnOne ? result[0] : result; }; save = async ( @@ -926,7 +991,7 @@ class DataStore { // Allows us to only include changed fields for updates const patchesTuple = modelPatchesMap.get(model); - const modelConstructor: PersistentModelConstructor = model + const modelConstructor: PersistentModelConstructor | undefined = model ? >model.constructor : undefined; @@ -941,15 +1006,15 @@ class DataStore { const producedCondition = ModelPredicateCreator.createFromExisting( modelDefinition, - condition + condition! ); const [savedModel] = await this.storage.runExclusive(async s => { await s.save(model, producedCondition, undefined, patchesTuple); - return s.query( + return s.query( modelConstructor, - ModelPredicateCreator.createForId(modelDefinition, model.id) + ModelPredicateCreator.createForPk(modelDefinition, model) ); }); @@ -989,22 +1054,28 @@ class DataStore { }; delete: { - ( - model: T, - condition?: ProducerModelPredicate - ): Promise; ( modelConstructor: PersistentModelConstructor, - id: string + identifier: IdentifierFieldOrIdentifierObject< + T, + PersistentModelMetaData + > ): Promise; ( modelConstructor: PersistentModelConstructor, condition: ProducerModelPredicate | typeof PredicateAll ): Promise; + ( + model: T, + condition?: ProducerModelPredicate + ): Promise; } = async ( modelOrConstructor: T | PersistentModelConstructor, - idOrCriteria?: string | ProducerModelPredicate | typeof PredicateAll - ) => { + identifierOrCriteria?: + | IdentifierFieldOrIdentifierObject> + | ProducerModelPredicate + | typeof PredicateAll + ): Promise => { await this.start(); let condition: ModelPredicate; @@ -1016,31 +1087,50 @@ class DataStore { throw new Error(msg); } - if (isValidModelConstructor(modelOrConstructor)) { + if (isValidModelConstructor(modelOrConstructor)) { const modelConstructor = modelOrConstructor; - if (!idOrCriteria) { + if (!identifierOrCriteria) { const msg = 'Id to delete or criteria required. Do you want to delete all? Pass Predicates.ALL'; - logger.error(msg, { idOrCriteria }); + logger.error(msg, { identifierOrCriteria }); throw new Error(msg); } - if (typeof idOrCriteria === 'string') { - condition = ModelPredicateCreator.createForId( + const modelDefinition = getModelDefinition(modelConstructor); + + if (typeof identifierOrCriteria === 'string') { + const keyFields = extractPrimaryKeyFieldNames(modelDefinition); + + if (keyFields.length > 1) { + const msg = errorMessages.deleteByPkWithCompositeKeyPresent; + logger.error(msg, { keyFields }); + + throw new Error(msg); + } + + condition = ModelPredicateCreator.createForSingleField( getModelDefinition(modelConstructor), - idOrCriteria + keyFields[0], + identifierOrCriteria ); } else { - condition = ModelPredicateCreator.createFromExisting( - getModelDefinition(modelConstructor), - /** - * idOrCriteria is always a ProducerModelPredicate, never a symbol. - * The symbol is used only for typing purposes. e.g. see Predicates.ALL - */ - idOrCriteria as ProducerModelPredicate - ); + if (isIdentifierObject(identifierOrCriteria, modelDefinition)) { + condition = ModelPredicateCreator.createForPk( + modelDefinition, + identifierOrCriteria + ); + } else { + condition = ModelPredicateCreator.createFromExisting( + modelDefinition, + /** + * idOrCriteria is always a ProducerModelPredicate, never a symbol. + * The symbol is used only for typing purposes. e.g. see Predicates.ALL + */ + identifierOrCriteria as ProducerModelPredicate + ); + } if (!condition || !ModelPredicateCreator.isValidPredicate(condition)) { const msg = @@ -1068,22 +1158,24 @@ class DataStore { const modelDefinition = getModelDefinition(modelConstructor); - const idPredicate = ModelPredicateCreator.createForId( + const pkPredicate = ModelPredicateCreator.createForPk( modelDefinition, - model.id + model ); - if (idOrCriteria) { - if (typeof idOrCriteria !== 'function') { + if (identifierOrCriteria) { + if (typeof identifierOrCriteria !== 'function') { const msg = 'Invalid criteria'; - logger.error(msg, { idOrCriteria }); + logger.error(msg, { identifierOrCriteria }); throw new Error(msg); } - condition = idOrCriteria(idPredicate); + condition = (>identifierOrCriteria)( + pkPredicate + ); } else { - condition = idPredicate; + condition = pkPredicate; } const [[deleted]] = await this.storage.delete(model, condition); @@ -1095,20 +1187,28 @@ class DataStore { observe: { (): Observable>; - (model: T): Observable>; + ( + modelConstructor: PersistentModelConstructor, + identifier: string + ): Observable>; ( modelConstructor: PersistentModelConstructor, - criteria?: string | ProducerModelPredicate + criteria?: ProducerModelPredicate | typeof PredicateAll ): Observable>; - } = ( + + (model: T): Observable>; + } = ( modelOrConstructor?: T | PersistentModelConstructor, - idOrCriteria?: string | ProducerModelPredicate + identifierOrCriteria?: + | string + | ProducerModelPredicate + | typeof PredicateAll ): Observable> => { let predicate: ModelPredicate; - const modelConstructor: PersistentModelConstructor = - modelOrConstructor && isValidModelConstructor(modelOrConstructor) + const modelConstructor: PersistentModelConstructor | undefined = + modelOrConstructor && isValidModelConstructor(modelOrConstructor) ? modelOrConstructor : undefined; @@ -1118,10 +1218,10 @@ class DataStore { model && (Object.getPrototypeOf(model)).constructor; if (isValidModelConstructor(modelConstructor)) { - if (idOrCriteria) { + if (identifierOrCriteria) { logger.warn('idOrCriteria is ignored when using a model instance', { model, - idOrCriteria, + identifierOrCriteria, }); } @@ -1135,9 +1235,24 @@ class DataStore { } } - if (idOrCriteria !== undefined && modelConstructor === undefined) { + // observe should not accept object literal syntax + if ( + identifierOrCriteria && + modelConstructor && + isIdentifierObject( + identifierOrCriteria, + getModelDefinition(modelConstructor) + ) + ) { + const msg = errorMessages.observeWithObjectLiteral; + logger.error(msg, { objectLiteral: identifierOrCriteria }); + + throw new Error(msg); + } + + if (identifierOrCriteria !== undefined && modelConstructor === undefined) { const msg = 'Cannot provide criteria without a modelConstructor'; - logger.error(msg, idOrCriteria); + logger.error(msg, identifierOrCriteria); throw new Error(msg); } @@ -1148,18 +1263,26 @@ class DataStore { throw new Error(msg); } - if (typeof idOrCriteria === 'string') { - predicate = ModelPredicateCreator.createForId( + if (typeof identifierOrCriteria === 'string') { + const modelDefinition = getModelDefinition(modelConstructor); + const [keyField] = extractPrimaryKeyFieldNames(modelDefinition); + + predicate = ModelPredicateCreator.createForSingleField( getModelDefinition(modelConstructor), - idOrCriteria + keyField, + identifierOrCriteria ); } else { - predicate = - modelConstructor && - ModelPredicateCreator.createFromExisting( - getModelDefinition(modelConstructor), - idOrCriteria - ); + if (isPredicatesAll(identifierOrCriteria)) { + predicate = undefined; + } else { + predicate = + modelConstructor && + ModelPredicateCreator.createFromExisting( + getModelDefinition(modelConstructor), + identifierOrCriteria + ); + } } return new Observable>(observer => { @@ -1181,12 +1304,18 @@ class DataStore { let message = item; - // as lnog as we're not dealing with a DELETE, we need to fetch a fresh + // as long as we're not dealing with a DELETE, we need to fetch a fresh // item from storage to ensure it's fully populated. if (item.opType !== 'DELETE') { + const modelDefinition = getModelDefinition(item.model); + const keyFields = extractPrimaryKeyFieldNames(modelDefinition); + const primaryKeysAndValues = extractPrimaryKeysAndValues( + item.element, + keyFields + ); const freshElement = await this.query( item.model, - item.element.id + primaryKeysAndValues ); message = { ...message, @@ -1215,7 +1344,7 @@ class DataStore { criteria?: ProducerModelPredicate | typeof PredicateAll, paginationProducer?: ObserveQueryOptions ): Observable>; - } = ( + } = ( model: PersistentModelConstructor, criteria?: ProducerModelPredicate | typeof PredicateAll, options?: ObserveQueryOptions @@ -1254,9 +1383,12 @@ class DataStore { const sortOptions = sort ? { sort } : undefined; const modelDefinition = getModelDefinition(model); + const keyFields = extractPrimaryKeyFieldNames(modelDefinition); + if (isQueryOne(criteria)) { - predicate = ModelPredicateCreator.createForId( + predicate = ModelPredicateCreator.createForSingleField( modelDefinition, + keyFields[0], criteria ); } else { @@ -1278,9 +1410,11 @@ class DataStore { (async () => { try { // first, query and return any locally-available records - (await this.query(model, criteria, sortOptions)).forEach(item => - items.set(item.id, item) - ); + (await this.query(model, criteria, sortOptions)).forEach(item => { + const itemModelDefinition = getModelDefinition(model); + const idOrPk = getIdentifierValue(itemModelDefinition, item); + items.set(idOrPk, item); + }); // Observe the model and send a stream of updates (debounced). // We need to post-filter results instead of passing criteria through @@ -1288,19 +1422,21 @@ class DataStore { // We need to explicitly remove those items from the existing snapshot. handle = this.observe(model).subscribe( ({ element, model, opType }) => { + const itemModelDefinition = getModelDefinition(model); + const idOrPk = getIdentifierValue(itemModelDefinition, element); if ( hasPredicate && !validatePredicate(element, predicateGroupType, predicates) ) { if ( opType === 'UPDATE' && - (items.has(element.id) || itemsChanged.has(element.id)) + (items.has(idOrPk) || itemsChanged.has(idOrPk)) ) { // tracking as a "deleted item" will include the item in // page limit calculations and ensure it is removed from the // final items collection, regardless of which collection(s) // it is currently in. (I mean, it could be in both, right!?) - deletedItemIds.push(element.id); + deletedItemIds.push(idOrPk); } else { // ignore updates for irrelevant/filtered items. return; @@ -1312,9 +1448,9 @@ class DataStore { // in the `mergePage` method within src/sync/merger.ts. The final state of a model instance // depends on the LATEST record (for a given id). if (opType === 'DELETE') { - deletedItemIds.push(element.id); + deletedItemIds.push(idOrPk); } else { - itemsChanged.set(element.id, element); + itemsChanged.set(idOrPk, element); } const isSynced = this.sync?.getModelSyncedStatus(model) ?? false; @@ -1356,10 +1492,14 @@ class DataStore { } items.clear(); - itemsArray.forEach(item => items.set(item.id, item)); + itemsArray.forEach(item => { + const itemModelDefinition = getModelDefinition(model); + const idOrPk = getIdentifierValue(itemModelDefinition, item); + items.set(idOrPk, item); + }); // remove deleted items from the final result set - deletedItemIds.forEach(id => items.delete(id)); + deletedItemIds.forEach(idOrPk => items.delete(idOrPk)); return { items: Array.from(items.values()), @@ -1515,7 +1655,7 @@ class DataStore { this.storageAdapter || undefined; - this.sessionId = this.retrieveSessionId(); + this.sessionId = this.retrieveSessionId()!; }; clear = async function clear() { @@ -1549,7 +1689,7 @@ class DataStore { this.syncPredicates = new WeakMap>(); }; - stop = async function stop() { + stop = async function stop(this: InstanceType) { if (this.initialized !== undefined) { await this.start(); } @@ -1713,9 +1853,9 @@ class DataStore { return `${sessionId}-${appSyncId}`; } - } catch { - return undefined; - } + } catch {} + + return undefined; } } diff --git a/packages/datastore/src/predicates/index.ts b/packages/datastore/src/predicates/index.ts index caeeb4e4a46..7ee34b08c2f 100644 --- a/packages/datastore/src/predicates/index.ts +++ b/packages/datastore/src/predicates/index.ts @@ -8,7 +8,11 @@ import { ProducerModelPredicate, SchemaModel, } from '../types'; -import { exhaustiveCheck } from '../util'; +import { + exhaustiveCheck, + extractPrimaryKeyFieldNames, + extractPrimaryKeyValues, +} from '../util'; export { ModelSortPredicateCreator } from './sort'; @@ -85,7 +89,7 @@ export class ModelPredicateCreator { // Push the group to the top-level recorder ModelPredicateCreator.predicateGroupsMap - .get(receiver) + .get(receiver)! .predicates.push(group); return receiver; @@ -109,7 +113,7 @@ export class ModelPredicateCreator { operand: any ) => { ModelPredicateCreator.predicateGroupsMap - .get(receiver) + .get(receiver)! .predicates.push({ field, operator, operand }); return receiver; }; @@ -147,7 +151,7 @@ export class ModelPredicateCreator { // transforms cb-style predicate into Proxy static createFromExisting( modelDefinition: SchemaModel, - existing: ProducerModelPredicate + existing?: ProducerModelPredicate ) { if (!existing || !modelDefinition) { return undefined; @@ -158,13 +162,31 @@ export class ModelPredicateCreator { ); } - static createForId( + static createForSingleField( modelDefinition: SchemaModel, - id: string + fieldName: string, + value: string ) { - return ModelPredicateCreator.createPredicateBuilder(modelDefinition).id( - 'eq', - id - ); + return ModelPredicateCreator.createPredicateBuilder(modelDefinition)[ + fieldName + ]('eq', value); + } + + static createForPk( + modelDefinition: SchemaModel, + model: T + ) { + const keyFields = extractPrimaryKeyFieldNames(modelDefinition); + const keyValues = extractPrimaryKeyValues(model, keyFields); + + let modelPredicate = + ModelPredicateCreator.createPredicateBuilder(modelDefinition); + + keyFields.forEach((field, idx) => { + const operand = keyValues[idx]; + modelPredicate = modelPredicate[field]('eq', operand); + }); + + return modelPredicate; } } diff --git a/packages/datastore/src/storage/adapter/AsyncStorageAdapter.ts b/packages/datastore/src/storage/adapter/AsyncStorageAdapter.ts index b38724d03fc..bbd213201e5 100644 --- a/packages/datastore/src/storage/adapter/AsyncStorageAdapter.ts +++ b/packages/datastore/src/storage/adapter/AsyncStorageAdapter.ts @@ -22,6 +22,7 @@ import { RelationType, } from '../../types'; import { + DEFAULT_PRIMARY_KEY_VALUE_SEPARATOR, exhaustiveCheck, getIndex, getIndexFromAssociation, @@ -29,6 +30,11 @@ import { traverseModel, validatePredicate, sortCompareFunction, + keysEqual, + getStorename, + getIndexKeys, + extractPrimaryKeyValues, + IDENTIFIER_KEY_SEPARATOR, } from '../../util'; const logger = new Logger('DataStore'); @@ -52,13 +58,29 @@ export class AsyncStorageAdapter implements Adapter { const namespace = this.namespaceResolver(modelConstructor); const { name: modelName } = modelConstructor; - return this.getStorename(namespace, modelName); + return getStorename(namespace, modelName); } - private getStorename(namespace: string, modelName: string) { - const storeName = `${namespace}_${modelName}`; + // Retrieves primary key values from a model + private getIndexKeyValuesFromModel( + model: T + ): string[] { + const modelConstructor = Object.getPrototypeOf(model) + .constructor as PersistentModelConstructor; + const namespaceName = this.namespaceResolver(modelConstructor); + const keys = getIndexKeys( + this.schema.namespaces[namespaceName], + modelConstructor.name + ); + + return extractPrimaryKeyValues(model, keys); + } - return storeName; + // Retrieves concatenated primary key values from a model + private getIndexKeyValuesPath(model: T): string { + return this.getIndexKeyValuesFromModel(model).join( + DEFAULT_PRIMARY_KEY_VALUE_SEPARATOR + ); } async setUp( @@ -101,23 +123,32 @@ export class AsyncStorageAdapter implements Adapter { const modelConstructor = Object.getPrototypeOf(model) .constructor as PersistentModelConstructor; const storeName = this.getStorenameForModel(modelConstructor); + + const namespaceName = this.namespaceResolver(modelConstructor); + const connectedModels = traverseModel( modelConstructor.name, model, - this.schema.namespaces[this.namespaceResolver(modelConstructor)], + this.schema.namespaces[namespaceName], this.modelInstanceCreator, this.getModelConstructorByModelName ); - const namespaceName = this.namespaceResolver(modelConstructor); + const set = new Set(); const connectionStoreNames = Object.values(connectedModels).map( ({ modelName, item, instance }) => { - const storeName = this.getStorename(namespaceName, modelName); + const storeName = getStorename(namespaceName, modelName); set.add(storeName); - return { storeName, item, instance }; + const keys = getIndexKeys( + this.schema.namespaces[namespaceName], + modelName + ); + return { storeName, item, instance, keys }; } ); - const fromDB = await this.db.get(model.id, storeName); + const keyValuesPath = this.getIndexKeyValuesPath(model); + + const fromDB = await this.db.get(keyValuesPath, storeName); if (condition && fromDB) { const predicates = ModelPredicateCreator.getPredicates(condition); @@ -136,14 +167,24 @@ export class AsyncStorageAdapter implements Adapter { const result: [T, OpType.INSERT | OpType.UPDATE][] = []; for await (const resItem of connectionStoreNames) { - const { storeName, item, instance } = resItem; - const { id } = item; + const { storeName, item, instance, keys } = resItem; + + /* Find the key values in the item, and concatenate them */ + const itemKeyValues: string[] = keys.map(key => item[key]); + const itemKeyValuesPath: string = itemKeyValues.join( + DEFAULT_PRIMARY_KEY_VALUE_SEPARATOR + ); - const fromDB = await this.db.get(id, storeName); + const fromDB = await this.db.get(itemKeyValuesPath, storeName); const opType: OpType = fromDB ? OpType.UPDATE : OpType.INSERT; + const modelKeyValues = this.getIndexKeyValuesFromModel(model); - if (id === model.id || opType === OpType.INSERT) { - await this.db.save(item, storeName); + // If item key values and model key values are equal, save to db + if ( + keysEqual(itemKeyValues, modelKeyValues) || + opType === OpType.INSERT + ) { + await this.db.save(item, storeName, keys, itemKeyValuesPath); result.push([instance, opType]); } @@ -160,7 +201,7 @@ export class AsyncStorageAdapter implements Adapter { const namespace = this.schema.namespaces[namespaceName]; const relations = namespace.relationships[srcModelName].relationTypes; const connectionStoreNames = relations.map(({ modelName }) => { - return this.getStorename(namespaceName, modelName); + return getStorename(namespaceName, modelName); }); const modelConstructor = this.getModelConstructorByModelName( namespaceName, @@ -174,8 +215,9 @@ export class AsyncStorageAdapter implements Adapter { } for await (const relation of relations) { - const { fieldName, modelName, targetName, relationType } = relation; - const storeName = this.getStorename(namespaceName, modelName); + const { fieldName, modelName, targetName, targetNames, relationType } = + relation; + const storeName = getStorename(namespaceName, modelName); const modelConstructor = this.getModelConstructorByModelName( namespaceName, modelName @@ -184,27 +226,81 @@ export class AsyncStorageAdapter implements Adapter { switch (relationType) { case 'HAS_ONE': for await (const recordItem of records) { - const getByfield = recordItem[targetName] ? targetName : fieldName; - if (!recordItem[getByfield]) break; + // ASYNC CPK TODO: make this cleaner + if (targetNames?.length) { + let getByFields = []; + let allPresent; + // iterate through all targetnames to make sure they are all present in the recordItem + allPresent = targetNames.every(targetName => { + return recordItem[targetName] != null; + }); - const connectionRecord = await this.db.get( - recordItem[getByfield], - storeName - ); + if (!allPresent) { + break; + } + + getByFields = targetNames as any; + + // keys are the key values + const keys = getByFields + .map(getByField => recordItem[getByField]) + .join(DEFAULT_PRIMARY_KEY_VALUE_SEPARATOR); + + const connectionRecord = await this.db.get(keys, storeName); - recordItem[fieldName] = - connectionRecord && - this.modelInstanceCreator(modelConstructor, connectionRecord); + recordItem[fieldName] = + connectionRecord && + this.modelInstanceCreator(modelConstructor, connectionRecord); + } else { + const getByfield = recordItem[targetName] + ? targetName + : fieldName; + if (!recordItem[getByfield]) break; + + const key = recordItem[getByfield]; + + const connectionRecord = await this.db.get(key, storeName); + + recordItem[fieldName] = + connectionRecord && + this.modelInstanceCreator(modelConstructor, connectionRecord); + } } break; case 'BELONGS_TO': for await (const recordItem of records) { - if (recordItem[targetName]) { - const connectionRecord = await this.db.get( - recordItem[targetName], - storeName - ); + // ASYNC CPK TODO: make this cleaner + if (targetNames?.length) { + let allPresent; + // iterate through all targetnames to make sure they are all present in the recordItem + allPresent = targetNames.every(targetName => { + return recordItem[targetName] != null; + }); + + // If not present, there is not yet a connected record + if (!allPresent) { + break; + } + + const keys = targetNames + .map(targetName => recordItem[targetName]) + .join(DEFAULT_PRIMARY_KEY_VALUE_SEPARATOR); + + // Retrieve the connected record + const connectionRecord = await this.db.get(keys, storeName); + + recordItem[fieldName] = + connectionRecord && + this.modelInstanceCreator(modelConstructor, connectionRecord); + + targetNames?.map(targetName => { + delete recordItem[targetName]; + }); + } else if (recordItem[targetName as any]) { + const key = recordItem[targetName]; + + const connectionRecord = await this.db.get(key, storeName); recordItem[fieldName] = connectionRecord && @@ -238,13 +334,19 @@ export class AsyncStorageAdapter implements Adapter { const predicates = predicate && ModelPredicateCreator.getPredicates(predicate); - const queryById = predicates && this.idFromPredicate(predicates); + const keys = getIndexKeys( + this.schema.namespaces[namespaceName], + modelConstructor.name + ); + const queryByKey = + predicates && this.keyValueFromPredicate(predicates, keys); + const hasSort = pagination && pagination.sort; const hasPagination = pagination && pagination.limit; const records: T[] = await (async () => { - if (queryById) { - const record = await this.getById(storeName, queryById); + if (queryByKey) { + const record = await this.getByKey(storeName, queryByKey); return record ? [record] : []; } @@ -264,11 +366,11 @@ export class AsyncStorageAdapter implements Adapter { return await this.load(namespaceName, modelConstructor.name, records); } - private async getById( + private async getByKey( storeName: string, - id: string + keyValuePath: string ): Promise { - const record = await this.db.get(id, storeName); + const record = await this.db.get(keyValuePath, storeName); return record; } @@ -278,17 +380,29 @@ export class AsyncStorageAdapter implements Adapter { return await this.db.getAll(storeName); } - private idFromPredicate( - predicates: PredicatesGroup - ) { + private keyValueFromPredicate( + predicates: PredicatesGroup, + keys: string[] + ): string | undefined { const { predicates: predicateObjs } = predicates; - const idPredicate = - predicateObjs.length === 1 && - (predicateObjs.find( - p => isPredicateObj(p) && p.field === 'id' && p.operator === 'eq' - ) as PredicateObject); - return idPredicate && idPredicate.operand; + if (predicateObjs.length !== keys.length) { + return; + } + + const keyValues = []; + + for (const key of keys) { + const predicateObj = predicateObjs.find( + p => isPredicateObj(p) && p.field === key && p.operator === 'eq' + ) as PredicateObject; + + predicateObj && keyValues.push(predicateObj.operand); + } + + return keyValues.length === keys.length + ? keyValues.join(DEFAULT_PRIMARY_KEY_VALUE_SEPARATOR) + : undefined; } private async filterOnPredicate( @@ -328,6 +442,7 @@ export class AsyncStorageAdapter implements Adapter { return records.slice(start, end); } + return records; } @@ -337,6 +452,7 @@ export class AsyncStorageAdapter implements Adapter { ): Promise { const storeName = this.getStorenameForModel(modelConstructor); const result = await this.db.getOne(firstOrLast, storeName); + return result && this.modelInstanceCreator(modelConstructor, result); } @@ -372,6 +488,7 @@ export class AsyncStorageAdapter implements Adapter { (acc, { items }) => acc.concat(items), [] ); + return [models, deletedModels]; } else { await this.deleteTraverse( @@ -396,12 +513,14 @@ export class AsyncStorageAdapter implements Adapter { const modelConstructor = Object.getPrototypeOf(model) .constructor as PersistentModelConstructor; - const nameSpace = this.namespaceResolver(modelConstructor); + const namespaceName = this.namespaceResolver(modelConstructor); const storeName = this.getStorenameForModel(modelConstructor); if (condition) { - const fromDB = await this.db.get(model.id, storeName); + const keyValuePath = this.getIndexKeyValuesPath(model); + + const fromDB = await this.db.get(keyValuePath, storeName); if (fromDB === undefined) { const msg = 'Model instance not found in storage'; @@ -422,25 +541,28 @@ export class AsyncStorageAdapter implements Adapter { } const relations = - this.schema.namespaces[nameSpace].relationships[modelConstructor.name] - .relationTypes; + this.schema.namespaces[namespaceName].relationships[ + modelConstructor.name + ].relationTypes; + await this.deleteTraverse( relations, [model], modelConstructor.name, - nameSpace, + namespaceName, deleteQueue ); } else { const relations = - this.schema.namespaces[nameSpace].relationships[modelConstructor.name] - .relationTypes; + this.schema.namespaces[namespaceName].relationships[ + modelConstructor.name + ].relationTypes; await this.deleteTraverse( relations, [model], modelConstructor.name, - nameSpace, + namespaceName, deleteQueue ); } @@ -465,8 +587,8 @@ export class AsyncStorageAdapter implements Adapter { for await (const item of items) { if (item) { if (typeof item === 'object') { - const id = item['id']; - await this.db.delete(id, storeName); + const keyValuesPath: string = this.getIndexKeyValuesPath(item as T); + await this.db.delete(keyValuesPath, storeName); } } } @@ -488,10 +610,16 @@ export class AsyncStorageAdapter implements Adapter { deleteQueue: { storeName: string; items: T[] }[] ): Promise { for await (const rel of relations) { - const { relationType, modelName, targetName } = rel; - const storeName = this.getStorename(nameSpace, modelName); - - const index: string = + const { + relationType, + modelName, + targetName, + targetNames, + associatedWith, + } = rel; + const storeName = getStorename(nameSpace, modelName); + + const index: string | undefined = getIndex( this.schema.namespaces[nameSpace].relationships[modelName] .relationTypes, @@ -508,35 +636,120 @@ export class AsyncStorageAdapter implements Adapter { switch (relationType) { case 'HAS_ONE': for await (const model of models) { - const hasOneIndex = index || 'byId'; - - const hasOneCustomField = targetName in model; - const value = hasOneCustomField ? model[targetName] : model.id; - if (!value) break; - - const allRecords = await this.db.getAll(storeName); - const recordToDelete = allRecords.filter( - childItem => childItem[hasOneIndex] === value - ); + if (targetNames && targetNames?.length) { + let hasOneIndex; + + if (index) { + hasOneIndex = index.split(IDENTIFIER_KEY_SEPARATOR); + } else if (associatedWith) { + if (Array.isArray(associatedWith)) { + hasOneIndex = associatedWith; + } else { + hasOneIndex = [associatedWith]; + } + } + + // iterate over targetNames array and see if each key is present in model object + // targetNames here being the keys for the CHILD model + const hasConnectedModelFields = targetNames.every(targetName => + model.hasOwnProperty(targetName) + ); - await this.deleteTraverse( - this.schema.namespaces[nameSpace].relationships[modelName] - .relationTypes, - recordToDelete, - modelName, - nameSpace, - deleteQueue - ); + // PK / Composite key for the parent model + const keyValuesPath: string = this.getIndexKeyValuesPath(model); + + let values; + + const isUnidirectionalConnection = hasOneIndex === associatedWith; + + if (hasConnectedModelFields && isUnidirectionalConnection) { + // Values will be that of the child model + values = targetNames.map( + targetName => model[targetName] + ) as any; + } else { + // values will be that of the parent model + values = keyValuesPath.split( + DEFAULT_PRIMARY_KEY_VALUE_SEPARATOR + ); + } + + if (values.length === 0) break; + + const allRecords = await this.db.getAll(storeName); + + let recordToDelete; + + // values === targetNames + if (hasConnectedModelFields) { + /** + * Retrieve record by finding the record where all + * targetNames are present on the connected model. + * + */ + // recordToDelete = allRecords.filter(childItem => + // values.every(value => childItem[value] != null) + // ) as T[]; + + recordToDelete = allRecords.filter(childItem => + hasOneIndex.every(index => values.includes(childItem[index])) + ); + } else { + // values === keyValuePath + recordToDelete = allRecords.filter( + childItem => childItem[hasOneIndex] === values + ) as T[]; + } + + await this.deleteTraverse( + this.schema.namespaces[nameSpace].relationships[modelName] + .relationTypes, + recordToDelete, + modelName, + nameSpace, + deleteQueue + ); + } else { + const hasOneIndex = index || associatedWith; + const hasOneCustomField = targetName in model; + const keyValuesPath: string = this.getIndexKeyValuesPath(model); + const value = hasOneCustomField + ? model[targetName] + : keyValuesPath; + + if (!value) break; + + const allRecords = await this.db.getAll(storeName); + + const recordToDelete = allRecords.filter( + childItem => childItem[hasOneIndex as string] === value + ) as T[]; + + await this.deleteTraverse( + this.schema.namespaces[nameSpace].relationships[modelName] + .relationTypes, + recordToDelete, + modelName, + nameSpace, + deleteQueue + ); + } } break; case 'HAS_MANY': for await (const model of models) { + // Key values for the parent model: + const keyValues: string[] = this.getIndexKeyValuesFromModel(model); + const allRecords = await this.db.getAll(storeName); - const childrenArray = allRecords.filter( - childItem => childItem[index] === model.id - ); - await this.deleteTraverse( + const indices = index.split(IDENTIFIER_KEY_SEPARATOR); + + const childrenArray = allRecords.filter(childItem => + indices.every(index => keyValues.includes(childItem[index])) + ) as T[]; + + await this.deleteTraverse( this.schema.namespaces[nameSpace].relationships[modelName] .relationTypes, childrenArray, @@ -556,7 +769,7 @@ export class AsyncStorageAdapter implements Adapter { } deleteQueue.push({ - storeName: this.getStorename(nameSpace, srcModel), + storeName: getStorename(nameSpace, srcModel), items: models.map(record => this.modelInstanceCreator( this.getModelConstructorByModelName(nameSpace, srcModel), @@ -579,29 +792,32 @@ export class AsyncStorageAdapter implements Adapter { ): Promise<[T, OpType][]> { const { name: modelName } = modelConstructor; const namespaceName = this.namespaceResolver(modelConstructor); - const storeName = this.getStorename(namespaceName, modelName); - + const storeName = getStorename(namespaceName, modelName); + const keys = getIndexKeys(this.schema.namespaces[namespaceName], modelName); const batch: ModelInstanceMetadata[] = []; for (const item of items) { - const { id } = item; + const model = this.modelInstanceCreator(modelConstructor, item); const connectedModels = traverseModel( - modelConstructor.name, - this.modelInstanceCreator(modelConstructor, item), - this.schema.namespaces[this.namespaceResolver(modelConstructor)], + modelName, + model, + this.schema.namespaces[namespaceName], this.modelInstanceCreator, this.getModelConstructorByModelName ); - const { instance } = connectedModels.find( - ({ instance }) => instance.id === id - ); + const keyValuesPath = this.getIndexKeyValuesPath(model); + + const { instance } = connectedModels.find(({ instance }) => { + const instanceKeyValuesPath = this.getIndexKeyValuesPath(instance); + return keysEqual([instanceKeyValuesPath], [keyValuesPath]); + }); batch.push(instance); } - return await this.db.batchSave(storeName, batch); + return await this.db.batchSave(storeName, batch, keys); } } diff --git a/packages/datastore/src/storage/adapter/AsyncStorageDatabase.ts b/packages/datastore/src/storage/adapter/AsyncStorageDatabase.ts index 4b02bbf927f..f3a97442da9 100644 --- a/packages/datastore/src/storage/adapter/AsyncStorageDatabase.ts +++ b/packages/datastore/src/storage/adapter/AsyncStorageDatabase.ts @@ -6,7 +6,11 @@ import { PersistentModel, QueryOne, } from '../../types'; -import { monotonicUlidFactory } from '../../util'; +import { + DEFAULT_PRIMARY_KEY_VALUE_SEPARATOR, + indexNameFromKeys, + monotonicUlidFactory, +} from '../../util'; import { createInMemoryStore } from './InMemoryStore'; const DB_NAME = '@AmplifyDatastore'; @@ -23,6 +27,11 @@ class AsyncStorageDatabase { private storage = createInMemoryStore(); + /** + * Collection index is map of stores (i.e. sync, metadata, mutation event, and data) + * @param storeName {string} - Name of the store + * @returns Map of ulid->id + */ private getCollectionIndex(storeName: string) { if (!this._collectionInMemoryIndex.has(storeName)) { this._collectionInMemoryIndex.set(storeName, new Map()); @@ -31,6 +40,11 @@ class AsyncStorageDatabase { return this._collectionInMemoryIndex.get(storeName); } + /** + * Return ULID for store if it exists, otherwise create a new one + * @param storeName {string} - Name of the store + * @returns ulid + */ private getMonotonicFactory(storeName: string): ULID { if (!monotonicFactoriesMap.has(storeName)) { monotonicFactoriesMap.set(storeName, monotonicUlidFactory()); @@ -85,39 +99,63 @@ class AsyncStorageDatabase { } } - async save(item: T, storeName: string) { + async save( + item: T, + storeName: string, + keys: string[], + keyValuesPath: string + ) { + const idxName = indexNameFromKeys(keys); + const ulid = - this.getCollectionIndex(storeName).get(item.id) || + this.getCollectionIndex(storeName).get(idxName) || this.getMonotonicFactory(storeName)(); - const itemKey = this.getKeyForItem(storeName, item.id, ulid); + // Retrieve db key for item + const itemKey = this.getKeyForItem(storeName, keyValuesPath, ulid); - this.getCollectionIndex(storeName).set(item.id, ulid); + // Set key in collection index + this.getCollectionIndex(storeName).set(keyValuesPath, ulid); + // Save item in db await this.storage.setItem(itemKey, JSON.stringify(item)); } async batchSave( storeName: string, - items: ModelInstanceMetadata[] + items: ModelInstanceMetadata[], + keys: string[] ): Promise<[T, OpType][]> { if (items.length === 0) { return []; } const result: [T, OpType][] = []; - const collection = this.getCollectionIndex(storeName); const keysToDelete = new Set(); const keysToSave = new Set(); const allItemsKeys = []; const itemsMap: Record = {}; + + /* Populate allItemKeys, keysToDelete, and keysToSave */ for (const item of items) { - const { id, _deleted } = item; - const ulid = collection.get(id) || this.getMonotonicFactory(storeName)(); + // Extract keys from concatenated key path, map to item values + const keyValues = keys.map(field => item[field]); - const key = this.getKeyForItem(storeName, id, ulid); + const { _deleted } = item; + + // If id is in the store, retrieve, otherwise generate new ULID + const ulid = + collection.get(keyValues.join(DEFAULT_PRIMARY_KEY_VALUE_SEPARATOR)) || + this.getMonotonicFactory(storeName)(); + + // Generate the "longer key" for the item + const key = this.getKeyForItem( + storeName, + keyValues.join(DEFAULT_PRIMARY_KEY_VALUE_SEPARATOR), + ulid + ); allItemsKeys.push(key); itemsMap[key] = { ulid, model: (item) }; @@ -136,6 +174,7 @@ class AsyncStorageDatabase { .filter(([, v]) => !!v) .reduce((set, [k]) => set.add(k), new Set()); + // Delete await new Promise((resolve, reject) => { if (keysToDelete.size === 0) { resolve(); @@ -144,9 +183,15 @@ class AsyncStorageDatabase { const keysToDeleteArray = Array.from(keysToDelete); - keysToDeleteArray.forEach(key => - collection.delete(itemsMap[key].model.id) - ); + keysToDeleteArray.forEach(key => { + // key: full db key + // keys: PK and/or SK keys + const primaryKeyValues: string = keys + .map(field => itemsMap[key].model[field]) + .join(DEFAULT_PRIMARY_KEY_VALUE_SEPARATOR); + + collection.delete(primaryKeyValues); + }); this.storage.multiRemove(keysToDeleteArray, (errors?: Error[]) => { if (errors && errors.length > 0) { @@ -157,6 +202,7 @@ class AsyncStorageDatabase { }); }); + // Save await new Promise((resolve, reject) => { if (keysToSave.size === 0) { resolve(); @@ -169,12 +215,14 @@ class AsyncStorageDatabase { ]); keysToSave.forEach(key => { - const { - model: { id }, - ulid, - } = itemsMap[key]; + const { model, ulid } = itemsMap[key]; + + // Retrieve values from model, use as key for collection index + const keyValues: string = keys + .map(field => model[field]) + .join(DEFAULT_PRIMARY_KEY_VALUE_SEPARATOR); - collection.set(id, ulid); + collection.set(keyValues, ulid); }); this.storage.multiSet(entriesToSet, (errors?: Error[]) => { @@ -201,11 +249,11 @@ class AsyncStorageDatabase { } async get( - id: string, + keyValuePath: string, storeName: string ): Promise { - const ulid = this.getCollectionIndex(storeName).get(id); - const itemKey = this.getKeyForItem(storeName, id, ulid); + const ulid = this.getCollectionIndex(storeName).get(keyValuePath); + const itemKey = this.getKeyForItem(storeName, keyValuePath, ulid); const recordAsString = await this.storage.getItem(itemKey); const record = recordAsString && JSON.parse(recordAsString); return record; @@ -227,6 +275,7 @@ class AsyncStorageDatabase { return [id, ulid]; })(); const itemKey = this.getKeyForItem(storeName, itemId, ulid); + const itemString = itemKey && (await this.storage.getItem(itemKey)); const result = itemString ? JSON.parse(itemString) || undefined : undefined; @@ -272,11 +321,10 @@ class AsyncStorageDatabase { return records; } - async delete(id: string, storeName: string) { - const ulid = this.getCollectionIndex(storeName).get(id); - const itemKey = this.getKeyForItem(storeName, id, ulid); - - this.getCollectionIndex(storeName).delete(id); + async delete(key: string, storeName: string) { + const ulid = this.getCollectionIndex(storeName).get(key); + const itemKey = this.getKeyForItem(storeName, key, ulid); + this.getCollectionIndex(storeName).delete(key); await this.storage.removeItem(itemKey); } diff --git a/packages/datastore/src/storage/adapter/IndexedDBAdapter.ts b/packages/datastore/src/storage/adapter/IndexedDBAdapter.ts index 543aff61dbe..db54a2dd990 100644 --- a/packages/datastore/src/storage/adapter/IndexedDBAdapter.ts +++ b/packages/datastore/src/storage/adapter/IndexedDBAdapter.ts @@ -29,13 +29,16 @@ import { traverseModel, validatePredicate, sortCompareFunction, + keysEqual, + getStorename, + getIndexKeys, + extractPrimaryKeyValues, } from '../../util'; import { Adapter } from './index'; const logger = new Logger('DataStore'); const DB_NAME = 'amplify-datastore'; - class IndexedDBAdapter implements Adapter { private schema: InternalSchema; private namespaceResolver: NamespaceResolver; @@ -50,6 +53,31 @@ class IndexedDBAdapter implements Adapter { private reject: (value?: any) => void; private dbName: string = DB_NAME; + private getStorenameForModel( + modelConstructor: PersistentModelConstructor + ) { + const namespace = this.namespaceResolver(modelConstructor); + const { name: modelName } = modelConstructor; + + return getStorename(namespace, modelName); + } + + // Retrieves primary key values from a model + private getIndexKeyValuesFromModel( + model: T + ): string[] { + const modelConstructor = Object.getPrototypeOf(model) + .constructor as PersistentModelConstructor; + const namespaceName = this.namespaceResolver(modelConstructor); + + const keys = getIndexKeys( + this.schema.namespaces[namespaceName], + modelConstructor.name + ); + + return extractPrimaryKeyValues(model, keys); + } + private async checkPrivate() { const isPrivate = await isPrivateMode().then(isPrivate => { return isPrivate; @@ -64,19 +92,12 @@ class IndexedDBAdapter implements Adapter { } } - private getStorenameForModel( - modelConstructor: PersistentModelConstructor - ) { - const namespace = this.namespaceResolver(modelConstructor); - const { name: modelName } = modelConstructor; - - return this.getStorename(namespace, modelName); - } - - private getStorename(namespace: string, modelName: string) { - const storeName = `${namespace}_${modelName}`; - - return storeName; + private getNamespaceAndModelFromStorename(storeName: string) { + const [namespaceName, ...modelNameArr] = storeName.split('_'); + return { + namespaceName, + modelName: modelNameArr.join('_'), + }; } async setUp( @@ -90,6 +111,7 @@ class IndexedDBAdapter implements Adapter { sessionId?: string ) { await this.checkPrivate(); + if (!this.initPromise) { this.initPromise = new Promise((res, rej) => { this.resolve = res; @@ -108,7 +130,7 @@ class IndexedDBAdapter implements Adapter { try { if (!this.db) { - const VERSION = 2; + const VERSION = 3; this.db = await idb.openDB(this.dbName, VERSION, { upgrade: async (db, oldVersion, newVersion, txn) => { if (oldVersion === 0) { @@ -116,7 +138,7 @@ class IndexedDBAdapter implements Adapter { const namespace = theSchema.namespaces[namespaceName]; Object.keys(namespace.models).forEach(modelName => { - const storeName = this.getStorename(namespaceName, modelName); + const storeName = getStorename(namespaceName, modelName); this.createObjectStoreForModel( db, namespaceName, @@ -129,7 +151,7 @@ class IndexedDBAdapter implements Adapter { return; } - if (oldVersion === 1 && newVersion === 2) { + if ((oldVersion === 1 || oldVersion === 2) && newVersion === 3) { try { for (const storeName of txn.objectStoreNames) { const origStore = txn.objectStore(storeName); @@ -138,13 +160,15 @@ class IndexedDBAdapter implements Adapter { const tmpName = `tmp_${storeName}`; origStore.name = tmpName; - // create new store with original name - const newStore = db.createObjectStore(storeName, { - keyPath: undefined, - autoIncrement: true, - }); + const { namespaceName, modelName } = + this.getNamespaceAndModelFromStorename(storeName); - newStore.createIndex('byId', 'id', { unique: true }); + const newStore = this.createObjectStoreForModel( + db, + namespaceName, + storeName, + modelName + ); let cursor = await origStore.openCursor(); let count = 0; @@ -175,7 +199,7 @@ class IndexedDBAdapter implements Adapter { .map(modelName => { return [ modelName, - this.getStorename(namespaceName, modelName), + getStorename(namespaceName, modelName), ]; }) .filter(([, storeName]) => !objectStoreNames.has(storeName)) @@ -208,19 +232,19 @@ class IndexedDBAdapter implements Adapter { private async _get( storeOrStoreName: idb.IDBPObjectStore | string, - id: string + keyArr: string[] ): Promise { let index: idb.IDBPIndex; if (typeof storeOrStoreName === 'string') { const storeName = storeOrStoreName; - index = this.db.transaction(storeName, 'readonly').store.index('byId'); + index = this.db.transaction(storeName, 'readonly').store.index('byPk'); } else { const store = storeOrStoreName; - index = store.index('byId'); + index = store.index('byPk'); } - const result = await index.get(id); + const result = await index.get(keyArr); return result; } @@ -233,21 +257,26 @@ class IndexedDBAdapter implements Adapter { const modelConstructor = Object.getPrototypeOf(model) .constructor as PersistentModelConstructor; const storeName = this.getStorenameForModel(modelConstructor); + const namespaceName = this.namespaceResolver(modelConstructor); + const connectedModels = traverseModel( modelConstructor.name, model, - this.schema.namespaces[this.namespaceResolver(modelConstructor)], + this.schema.namespaces[namespaceName], this.modelInstanceCreator, this.getModelConstructorByModelName ); - const namespaceName = this.namespaceResolver(modelConstructor); const set = new Set(); const connectionStoreNames = Object.values(connectedModels).map( ({ modelName, item, instance }) => { - const storeName = this.getStorename(namespaceName, modelName); + const storeName = getStorename(namespaceName, modelName); set.add(storeName); - return { storeName, item, instance }; + const keys = getIndexKeys( + this.schema.namespaces[namespaceName], + modelName + ); + return { storeName, item, instance, keys }; } ); @@ -257,7 +286,9 @@ class IndexedDBAdapter implements Adapter { ); const store = tx.objectStore(storeName); - const fromDB = await this._get(store, model.id); + const keyValues = this.getIndexKeyValuesFromModel(model); + + const fromDB = await this._get(store, keyValues); if (condition && fromDB) { const predicates = ModelPredicateCreator.getPredicates(condition); @@ -276,17 +307,26 @@ class IndexedDBAdapter implements Adapter { const result: [T, OpType.INSERT | OpType.UPDATE][] = []; for await (const resItem of connectionStoreNames) { - const { storeName, item, instance } = resItem; + const { storeName, item, instance, keys } = resItem; const store = tx.objectStore(storeName); - const { id } = item; - const fromDB = await this._get(store, id); + const itemKeyValues = keys.map(key => { + const value = item[key]; + return value; + }); + + const fromDB = await this._get(store, itemKeyValues); const opType: OpType = fromDB === undefined ? OpType.INSERT : OpType.UPDATE; + const modelKeyValues = this.getIndexKeyValuesFromModel(model); + // Even if the parent is an INSERT, the child might not be, so we need to get its key - if (id === model.id || opType === OpType.INSERT) { - const key = await store.index('byId').getKey(item.id); + if ( + keysEqual(itemKeyValues, modelKeyValues) || + opType === OpType.INSERT + ) { + const key = await store.index('byPk').getKey(itemKeyValues); await store.put(item, key); result.push([instance, opType]); @@ -306,7 +346,7 @@ class IndexedDBAdapter implements Adapter { const namespace = this.schema.namespaces[namespaceName]; const relations = namespace.relationships[srcModelName].relationTypes; const connectionStoreNames = relations.map(({ modelName }) => { - return this.getStorename(namespaceName, modelName); + return getStorename(namespaceName, modelName); }); const modelConstructor = this.getModelConstructorByModelName( namespaceName, @@ -322,8 +362,9 @@ class IndexedDBAdapter implements Adapter { const tx = this.db.transaction([...connectionStoreNames], 'readonly'); for await (const relation of relations) { - const { fieldName, modelName, targetName } = relation; - const storeName = this.getStorename(namespaceName, modelName); + // target name, metadata, set by init + const { fieldName, modelName, targetName, targetNames } = relation; + const storeName = getStorename(namespaceName, modelName); const store = tx.objectStore(storeName); const modelConstructor = this.getModelConstructorByModelName( namespaceName, @@ -333,35 +374,91 @@ class IndexedDBAdapter implements Adapter { switch (relation.relationType) { case 'HAS_ONE': for await (const recordItem of records) { - const getByfield = recordItem[targetName] ? targetName : fieldName; - if (!recordItem[getByfield]) break; + // POST CPK codegen changes: + if (targetNames?.length) { + let getByFields = []; + let allPresent; + // iterate through all targetnames to make sure they are all present in the recordItem + allPresent = targetNames.every(targetName => { + return recordItem[targetName] != null; + }); - const connectionRecord = await this._get( - store, - recordItem[getByfield] - ); + if (!allPresent) { + break; + } - recordItem[fieldName] = - connectionRecord && - this.modelInstanceCreator(modelConstructor, connectionRecord); - } + getByFields = targetNames as any; + + // keys are the key values + const keys = getByFields.map( + getByField => recordItem[getByField] + ); + + const connectionRecord = await this._get(store, keys); + + recordItem[fieldName] = + connectionRecord && + this.modelInstanceCreator(modelConstructor, connectionRecord); + } else { + // If single target name, using old codegen + const getByfield = recordItem[targetName] + ? targetName + : fieldName; + + // We break here, because the recordItem does not have 'team', the `getByField` + // extract the keys on the related model. + if (!recordItem[getByfield]) break; + + const key = [recordItem[getByfield]]; + + const connectionRecord = await this._get(store, key); + recordItem[fieldName] = + connectionRecord && + this.modelInstanceCreator(modelConstructor, connectionRecord); + } + } break; case 'BELONGS_TO': for await (const recordItem of records) { - if (recordItem[targetName]) { - const connectionRecord = await this._get( - store, - recordItem[targetName] + // POST CPK codegen changes: + if (targetNames?.length) { + let allPresent; + // iterate through all targetnames to make sure they are all present in the recordItem + allPresent = targetNames.every(targetName => { + return recordItem[targetName] != null; + }); + + // If not present, there is not yet a connected record + if (!allPresent) { + break; + } + + const keys = targetNames.map( + targetName => recordItem[targetName] ); + // Retrieve the connected record + const connectionRecord = await this._get(store, keys); + + recordItem[fieldName] = + connectionRecord && + this.modelInstanceCreator(modelConstructor, connectionRecord); + + targetNames?.map(targetName => { + delete recordItem[targetName]; + }); + } else if (recordItem[targetName]) { + const key = [recordItem[targetName]]; + + const connectionRecord = await this._get(store, key); + recordItem[fieldName] = connectionRecord && this.modelInstanceCreator(modelConstructor, connectionRecord); delete recordItem[targetName]; } } - break; case 'HAS_MANY': // TODO: Lazy loading @@ -388,13 +485,19 @@ class IndexedDBAdapter implements Adapter { const predicates = predicate && ModelPredicateCreator.getPredicates(predicate); - const queryById = predicates && this.idFromPredicate(predicates); + const keyPath = getIndexKeys( + this.schema.namespaces[namespaceName], + modelConstructor.name + ); + const queryByKey = + predicates && this.keyValueFromPredicate(predicates, keyPath); + const hasSort = pagination && pagination.sort; const hasPagination = pagination && pagination.limit; const records: T[] = await (async () => { - if (queryById) { - const record = await this.getById(storeName, queryById); + if (queryByKey) { + const record = await this.getByKey(storeName, queryByKey); return record ? [record] : []; } @@ -418,11 +521,11 @@ class IndexedDBAdapter implements Adapter { return await this.load(namespaceName, modelConstructor.name, records); } - private async getById( + private async getByKey( storeName: string, - id: string + keyValue: string[] ): Promise { - const record = await this._get(storeName, id); + const record = await this._get(storeName, keyValue); return record; } @@ -432,17 +535,27 @@ class IndexedDBAdapter implements Adapter { return await this.db.getAll(storeName); } - private idFromPredicate( - predicates: PredicatesGroup - ) { + private keyValueFromPredicate( + predicates: PredicatesGroup, + keyPath: string[] + ): string[] | undefined { const { predicates: predicateObjs } = predicates; - const idPredicate = - predicateObjs.length === 1 && - (predicateObjs.find( - p => isPredicateObj(p) && p.field === 'id' && p.operator === 'eq' - ) as PredicateObject); - return idPredicate && idPredicate.operand; + if (predicateObjs.length !== keyPath.length) { + return; + } + + const keyValues = []; + + for (const key of keyPath) { + const predicateObj = predicateObjs.find( + p => isPredicateObj(p) && p.field === key && p.operator === 'eq' + ) as PredicateObject; + + predicateObj && keyValues.push(predicateObj.operand); + } + + return keyValues.length === keyPath.length ? keyValues : undefined; } private async filterOnPredicate( @@ -605,15 +718,16 @@ class IndexedDBAdapter implements Adapter { const modelConstructor = Object.getPrototypeOf(model) .constructor as PersistentModelConstructor; - const nameSpace = this.namespaceResolver(modelConstructor); + const namespaceName = this.namespaceResolver(modelConstructor); const storeName = this.getStorenameForModel(modelConstructor); if (condition) { const tx = this.db.transaction([storeName], 'readwrite'); const store = tx.objectStore(storeName); + const keyValues = this.getIndexKeyValuesFromModel(model); - const fromDB = await this._get(store, model.id); + const fromDB = await this._get(store, keyValues); if (fromDB === undefined) { const msg = 'Model instance not found in storage'; @@ -636,26 +750,28 @@ class IndexedDBAdapter implements Adapter { await tx.done; const relations = - this.schema.namespaces[nameSpace].relationships[modelConstructor.name] - .relationTypes; + this.schema.namespaces[namespaceName].relationships[ + modelConstructor.name + ].relationTypes; await this.deleteTraverse( relations, [model], modelConstructor.name, - nameSpace, + namespaceName, deleteQueue ); } else { const relations = - this.schema.namespaces[nameSpace].relationships[modelConstructor.name] - .relationTypes; + this.schema.namespaces[namespaceName].relationships[ + modelConstructor.name + ].relationTypes; await this.deleteTraverse( relations, [model], modelConstructor.name, - nameSpace, + namespaceName, deleteQueue ); } @@ -672,7 +788,10 @@ class IndexedDBAdapter implements Adapter { } private async deleteItem( - deleteQueue?: { storeName: string; items: T[] | IDBValidKey[] }[] + deleteQueue?: { + storeName: string; + items: T[] | IDBValidKey[]; + }[] ) { const connectionStoreNames = deleteQueue.map(({ storeName }) => { return storeName; @@ -688,9 +807,11 @@ class IndexedDBAdapter implements Adapter { let key: IDBValidKey; if (typeof item === 'object') { - key = await store.index('byId').getKey(item['id']); + const keyValues = this.getIndexKeyValuesFromModel(item as T); + key = await store.index('byPk').getKey(keyValues); } else { - key = await store.index('byId').getKey(item.toString()); + const itemKey = [item.toString()]; + key = await store.index('byPk').getKey([itemKey]); } if (key !== undefined) { @@ -709,57 +830,109 @@ class IndexedDBAdapter implements Adapter { deleteQueue: { storeName: string; items: T[] }[] ): Promise { for await (const rel of relations) { - const { relationType, fieldName, modelName, targetName } = rel; - const storeName = this.getStorename(nameSpace, modelName); - - const index: string = - getIndex( - this.schema.namespaces[nameSpace].relationships[modelName] - .relationTypes, - srcModel - ) || - // if we were unable to find an index via relationTypes - // i.e. for keyName connections, attempt to find one by the - // associatedWith property - getIndexFromAssociation( - this.schema.namespaces[nameSpace].relationships[modelName].indexes, - rel.associatedWith - ); + const { + relationType, + modelName, + targetName, + targetNames, + associatedWith, + } = rel; + + const storeName = getStorename(nameSpace, modelName); switch (relationType) { case 'HAS_ONE': for await (const model of models) { - const hasOneIndex = index || 'byId'; - - const hasOneCustomField = targetName in model; - const value = hasOneCustomField ? model[targetName] : model.id; - if (!value) break; - - const recordToDelete = ( - await this.db - .transaction(storeName, 'readwrite') - .objectStore(storeName) - .index(hasOneIndex) - .get(value) - ); + const hasOneIndex = 'byPk'; - await this.deleteTraverse( - this.schema.namespaces[nameSpace].relationships[modelName] - .relationTypes, - recordToDelete ? [recordToDelete] : [], - modelName, - nameSpace, - deleteQueue - ); + if (targetNames?.length) { + // CPK codegen + const values = targetNames.map(targetName => model[targetName]); + + if (values.length === 0) break; + + const recordToDelete = ( + await this.db + .transaction(storeName, 'readwrite') + .objectStore(storeName) + .index(hasOneIndex) + .get(values) + ); + + await this.deleteTraverse( + this.schema.namespaces[nameSpace].relationships[modelName] + .relationTypes, + recordToDelete ? [recordToDelete] : [], + modelName, + nameSpace, + deleteQueue + ); + break; + } else { + // PRE-CPK codegen + let index; + let values: string[]; + + if (targetName && targetName in model) { + index = hasOneIndex; + const value = model[targetName]; + values = [value]; + } else { + // backwards compatability for older versions of codegen that did not emit targetName for HAS_ONE relations + // TODO: can we deprecate this? it's been ~2 years since codegen started including targetName for HAS_ONE + // If we deprecate, we'll need to re-gen the MIPR in __tests__/schema.ts > newSchema + // otherwise some unit tests will fail + index = getIndex( + this.schema.namespaces[nameSpace].relationships[modelName] + .relationTypes, + srcModel + ); + values = this.getIndexKeyValuesFromModel(model); + } + + if (!values || !index) break; + + const recordToDelete = ( + await this.db + .transaction(storeName, 'readwrite') + .objectStore(storeName) + .index(index) + .get(values) + ); + + await this.deleteTraverse( + this.schema.namespaces[nameSpace].relationships[modelName] + .relationTypes, + recordToDelete ? [recordToDelete] : [], + modelName, + nameSpace, + deleteQueue + ); + } } break; case 'HAS_MANY': for await (const model of models) { + const index = + // explicit bi-directional @hasMany and @manyToMany + getIndex( + this.schema.namespaces[nameSpace].relationships[modelName] + .relationTypes, + srcModel + ) || + // uni and/or implicit @hasMany + getIndexFromAssociation( + this.schema.namespaces[nameSpace].relationships[modelName] + .indexes, + associatedWith + ); + const keyValues = this.getIndexKeyValuesFromModel(model); + const childrenArray = await this.db .transaction(storeName, 'readwrite') .objectStore(storeName) - .index(index) - .getAll(model['id']); + .index(index as string) + .getAll(keyValues); await this.deleteTraverse( this.schema.namespaces[nameSpace].relationships[modelName] @@ -781,7 +954,7 @@ class IndexedDBAdapter implements Adapter { } deleteQueue.push({ - storeName: this.getStorename(nameSpace, srcModel), + storeName: getStorename(nameSpace, srcModel), items: models.map(record => this.modelInstanceCreator( this.getModelConstructorByModelName(nameSpace, srcModel), @@ -820,22 +993,29 @@ class IndexedDBAdapter implements Adapter { const store = txn.store; for (const item of items) { + const namespaceName = this.namespaceResolver(modelConstructor); + const modelName = modelConstructor.name; + const model = this.modelInstanceCreator(modelConstructor, item); + const connectedModels = traverseModel( - modelConstructor.name, - this.modelInstanceCreator(modelConstructor, item), - this.schema.namespaces[this.namespaceResolver(modelConstructor)], + modelName, + model, + this.schema.namespaces[namespaceName], this.modelInstanceCreator, this.getModelConstructorByModelName ); - const { id, _deleted } = item; - const index = store.index('byId'); - const key = await index.getKey(id); + const keyValues = this.getIndexKeyValuesFromModel(model); + const { _deleted } = item; + + const index = store.index('byPk'); + const key = await index.getKey(keyValues); if (!_deleted) { - const { instance } = connectedModels.find( - ({ instance }) => instance.id === id - ); + const { instance } = connectedModels.find(({ instance }) => { + const instanceKeyValues = this.getIndexKeyValuesFromModel(instance); + return keysEqual(instanceKeyValues, keyValues); + }); result.push([ (instance), @@ -856,7 +1036,7 @@ class IndexedDBAdapter implements Adapter { return result; } - private async createObjectStoreForModel( + private createObjectStoreForModel( db: idb.IDBPDatabase, namespaceName: string, storeName: string, @@ -866,11 +1046,14 @@ class IndexedDBAdapter implements Adapter { autoIncrement: true, }); - const indexes = - this.schema.namespaces[namespaceName].relationships[modelName].indexes; - indexes.forEach(index => store.createIndex(index, index)); + const { indexes } = + this.schema.namespaces[namespaceName].relationships[modelName]; + + indexes.forEach(([idxName, keyPath, options]) => { + store.createIndex(idxName, keyPath, options); + }); - store.createIndex('byId', 'id', { unique: true }); + return store; } } diff --git a/packages/datastore/src/storage/adapter/index.ts b/packages/datastore/src/storage/adapter/index.ts index 9c7b54feb2f..0a80929c29d 100644 --- a/packages/datastore/src/storage/adapter/index.ts +++ b/packages/datastore/src/storage/adapter/index.ts @@ -29,7 +29,7 @@ export interface Adapter extends SystemComponent { firstOrLast: QueryOne ): Promise; batchSave( - modelConstructor: PersistentModelConstructor, + modelConstructor: PersistentModelConstructor, items: ModelInstanceMetadata[] ): Promise<[T, OpType][]>; } diff --git a/packages/datastore/src/storage/storage.ts b/packages/datastore/src/storage/storage.ts index 831500afba1..e9a28951ce9 100644 --- a/packages/datastore/src/storage/storage.ts +++ b/packages/datastore/src/storage/storage.ts @@ -26,6 +26,7 @@ import { validatePredicate, valuesEqual, } from '../util'; +import { getIdentifierValue } from '../sync/utils'; import { Adapter } from './adapter'; import getDefaultAdapter from './adapter/getDefaultAdapter'; @@ -188,7 +189,21 @@ class StorageClass implements StorageFacade { condition ); - const modelIds = new Set(models.map(({ id }) => id)); + const modelConstructor = isModelConstructor(modelOrModelConstructor) + ? modelOrModelConstructor + : (Object.getPrototypeOf(modelOrModelConstructor || {}) + .constructor as PersistentModelConstructor); + const namespaceName = this.namespaceResolver(modelConstructor); + + const modelDefinition = + this.schema.namespaces[namespaceName].models[modelConstructor.name]; + + const modelIds = new Set( + models.map(model => { + const modelId = getIdentifierValue(modelDefinition, model); + return modelId; + }) + ); if ( !isModelConstructor(modelOrModelConstructor) && @@ -204,7 +219,8 @@ class StorageClass implements StorageFacade { let theCondition: PredicatesGroup; if (!isModelConstructor(modelOrModelConstructor)) { - theCondition = modelIds.has(model.id) + const modelId = getIdentifierValue(modelDefinition, model); + theCondition = modelIds.has(modelId) ? ModelPredicateCreator.getPredicates(condition, false) : undefined; } @@ -337,30 +353,65 @@ class StorageClass implements StorageFacade { // set original values for these fields updatedFields.forEach((field: string) => { - const targetName: any = isTargetNameAssociation( + const targetNames: any = isTargetNameAssociation( fields[field]?.association ); - // if field refers to a belongsTo relation, use the target field instead - const key = targetName || field; + if (Array.isArray(targetNames)) { + // if field refers to a belongsTo relation, use the target field instead + + for (const targetName of targetNames) { + // check field values by value. Ignore unchanged fields + if (!valuesEqual(source[targetName], originalElement[targetName])) { + // if the field was updated to 'undefined', replace with 'null' for compatibility with JSON and GraphQL + + updatedElement[targetName] = + originalElement[targetName] === undefined + ? null + : originalElement[targetName]; + + for (const fieldSet of compositeKeys) { + // include all of the fields that comprise the composite key + if (fieldSet.has(targetName)) { + for (const compositeField of fieldSet) { + updatedElement[compositeField] = + originalElement[compositeField]; + } + } + } + } + } + } else { + // Backwards compatibility pre-CPK + + // if field refers to a belongsTo relation, use the target field instead + const key = targetNames || field; + + // check field values by value. Ignore unchanged fields + if (!valuesEqual(source[key], originalElement[key])) { + // if the field was updated to 'undefined', replace with 'null' for compatibility with JSON and GraphQL - // check field values by value. Ignore unchanged fields - if (!valuesEqual(source[key], originalElement[key])) { - // if the field was updated to 'undefined', replace with 'null' for compatibility with JSON and GraphQL - updatedElement[key] = - originalElement[key] === undefined ? null : originalElement[key]; + updatedElement[key] = + originalElement[key] === undefined ? null : originalElement[key]; - for (const fieldSet of compositeKeys) { - // include all of the fields that comprise the composite key - if (fieldSet.has(key)) { - for (const compositeField of fieldSet) { - updatedElement[compositeField] = originalElement[compositeField]; + for (const fieldSet of compositeKeys) { + // include all of the fields that comprise the composite key + if (fieldSet.has(key)) { + for (const compositeField of fieldSet) { + updatedElement[compositeField] = + originalElement[compositeField]; + } } } } } }); + // Exit early when there are no changes introduced in the update mutation + if (Object.keys(updatedElement).length === 0) { + return null; + } + // include field(s) from custom PK if one is specified for the model if (primaryKey && primaryKey.length) { for (const pkField of primaryKey) { @@ -368,10 +419,6 @@ class StorageClass implements StorageFacade { } } - if (Object.keys(updatedElement).length === 0) { - return null; - } - const { id, _version, _lastChangedAt, _deleted } = originalElement; // For update mutations we only want to send fields with changes @@ -421,7 +468,7 @@ class ExclusiveStorage implements StorageFacade { patchesTuple?: [Patch[], PersistentModel] ): Promise<[T, OpType.INSERT | OpType.UPDATE][]> { return this.runExclusive<[T, OpType.INSERT | OpType.UPDATE][]>(storage => - storage.save(model, condition, mutator, patchesTuple) + storage.save(model, condition, mutator, patchesTuple) ); } @@ -489,7 +536,7 @@ class ExclusiveStorage implements StorageFacade { } batchSave( - modelConstructor: PersistentModelConstructor, + modelConstructor: PersistentModelConstructor, items: ModelInstanceMetadata[] ): Promise<[T, OpType][]> { return this.storage.batchSave(modelConstructor, items); diff --git a/packages/datastore/src/sync/index.ts b/packages/datastore/src/sync/index.ts index 4d985b8a479..6727118d319 100644 --- a/packages/datastore/src/sync/index.ts +++ b/packages/datastore/src/sync/index.ts @@ -21,6 +21,9 @@ import { TypeConstructorMap, ModelPredicate, AuthModeStrategy, + ManagedIdentifier, + OptionallyManagedIdentifier, + __modelMeta__, AmplifyContext, } from '../types'; import { exhaustiveCheck, getNow, SYNC, USER } from '../util'; @@ -32,6 +35,7 @@ import { CONTROL_MSG, SubscriptionProcessor } from './processors/subscription'; import { SyncProcessor } from './processors/sync'; import { createMutationInstanceFromModelOperation, + getIdentifierValue, predicateToGraphQLCondition, TransformerMutationType, } from './utils'; @@ -46,25 +50,26 @@ type StartParams = { }; export declare class MutationEvent { - constructor(init: ModelInit); - static copyOf( - src: MutationEvent, - mutator: (draft: MutableModel) => void | MutationEvent - ): MutationEvent; + readonly [__modelMeta__]: { + identifier: OptionallyManagedIdentifier; + }; public readonly id: string; public readonly model: string; public readonly operation: TransformerMutationType; public readonly modelId: string; public readonly condition: string; - public data: string; + public readonly data: string; + constructor(init: ModelInit); + static copyOf( + src: MutationEvent, + mutator: (draft: MutableModel) => void | MutationEvent + ): MutationEvent; } -declare class ModelMetadata { - constructor(init: ModelInit); - static copyOf( - src: ModelMetadata, - mutator: (draft: MutableModel) => void | ModelMetadata - ): ModelMetadata; +export declare class ModelMetadata { + readonly [__modelMeta__]: { + identifier: ManagedIdentifier; + }; public readonly id: string; public readonly namespace: string; public readonly model: string; @@ -72,6 +77,11 @@ declare class ModelMetadata { public readonly lastSync?: number; public readonly lastFullSync?: number; public readonly lastSyncPredicate?: null | string; + constructor(init: ModelInit); + static copyOf( + src: ModelMetadata, + mutator: (draft: MutableModel) => void | ModelMetadata + ): ModelMetadata; } export enum ControlMessage { @@ -123,7 +133,7 @@ export class SyncEngine { ) { const MutationEvent = this.modelClasses[ 'MutationEvent' - ] as PersistentModelConstructor; + ] as PersistentModelConstructor; this.outbox = new MutationEventOutbox( this.schema, @@ -296,7 +306,7 @@ export class SyncEngine { ); this.storage.runExclusive(storage => - this.modelMerger.merge(storage, model) + this.modelMerger.merge(storage, model, modelDefinition) ); observer.next({ @@ -333,7 +343,7 @@ export class SyncEngine { ); this.storage.runExclusive(storage => - this.modelMerger.merge(storage, model) + this.modelMerger.merge(storage, model, modelDefinition) ); } ) @@ -362,7 +372,6 @@ export class SyncEngine { .observe(null, null, ownSymbol) .filter(({ model }) => { const modelDefinition = this.getModelDefinition(model); - return modelDefinition.syncable === true; }) .subscribe({ @@ -372,7 +381,11 @@ export class SyncEngine { const MutationEventConstructor = this.modelClasses[ 'MutationEvent' ] as PersistentModelConstructor; - const graphQLCondition = predicateToGraphQLCondition(condition); + const modelDefinition = this.getModelDefinition(model); + const graphQLCondition = predicateToGraphQLCondition( + condition, + modelDefinition + ); const mutationEvent = createMutationInstanceFromModelOperation( namespace.relationships, this.getModelDefinition(model), @@ -537,7 +550,9 @@ export class SyncEngine { const oneByOne: ModelInstanceMetadata[] = []; const page = items.filter(item => { - if (!idsInOutbox.has(item.id)) { + const itemId = getIdentifierValue(modelDefinition, item); + + if (!idsInOutbox.has(itemId)) { return true; } @@ -550,7 +565,8 @@ export class SyncEngine { for (const item of oneByOne) { const opType = await this.modelMerger.merge( storage, - item + item, + modelDefinition ); if (opType !== undefined) { @@ -562,7 +578,8 @@ export class SyncEngine { ...(await this.modelMerger.mergePage( storage, modelConstructor, - page + page, + modelDefinition )) ); @@ -608,7 +625,7 @@ export class SyncEngine { modelMetadata = ( this.modelClasses - .ModelMetadata as PersistentModelConstructor + .ModelMetadata as PersistentModelConstructor ).copyOf(modelMetadata, draft => { draft.lastSync = startedAt; draft.lastFullSync = isFullSync @@ -709,7 +726,7 @@ export class SyncEngine { private async setupModels(params: StartParams) { const { fullSyncInterval } = params; - const ModelMetadata = this.modelClasses + const ModelMetadataConstructor = this.modelClasses .ModelMetadata as PersistentModelConstructor; const models: [string, SchemaModel][] = []; @@ -741,7 +758,7 @@ export class SyncEngine { if (modelMetadata === undefined) { [[savedModel]] = await this.storage.save( - this.modelInstanceCreator(ModelMetadata, { + this.modelInstanceCreator(ModelMetadataConstructor, { model: model.name, namespace, lastSync: null, @@ -759,9 +776,7 @@ export class SyncEngine { const syncPredicateUpdated = prevSyncPredicate !== lastSyncPredicate; [[savedModel]] = await this.storage.save( - ( - this.modelClasses.ModelMetadata as PersistentModelConstructor - ).copyOf(modelMetadata, draft => { + ModelMetadataConstructor.copyOf(modelMetadata, draft => { draft.fullSyncInterval = fullSyncInterval; // perform a base sync if the syncPredicate changed in between calls to DataStore.start // ensures that the local store contains all the data specified by the syncExpression diff --git a/packages/datastore/src/sync/merger.ts b/packages/datastore/src/sync/merger.ts index c52c4c98d0c..a08ba330717 100644 --- a/packages/datastore/src/sync/merger.ts +++ b/packages/datastore/src/sync/merger.ts @@ -3,8 +3,10 @@ import { ModelInstanceMetadata, OpType, PersistentModelConstructor, + SchemaModel, } from '../types'; import { MutationEventOutbox } from './outbox'; +import { getIdentifierValue } from './utils'; // https://github.com/aws-amplify/amplify-js/blob/datastore-docs/packages/datastore/docs/sync-engine.md#merger class ModelMerger { @@ -15,10 +17,15 @@ class ModelMerger { public async merge( storage: Storage, - model: T + model: T, + modelDefinition: SchemaModel ): Promise { let result: OpType; - const mutationsForModel = await this.outbox.getForModel(storage, model); + const mutationsForModel = await this.outbox.getForModel( + storage, + model, + modelDefinition + ); const isDelete = model._deleted; @@ -37,13 +44,16 @@ class ModelMerger { public async mergePage( storage: Storage, modelConstructor: PersistentModelConstructor, - items: ModelInstanceMetadata[] + items: ModelInstanceMetadata[], + modelDefinition: SchemaModel ): Promise<[ModelInstanceMetadata, OpType][]> { const itemsMap: Map = new Map(); for (const item of items) { // merge items by model id. Latest record for a given id remains. - itemsMap.set(item.id, item); + const modelId = getIdentifierValue(modelDefinition, item); + + itemsMap.set(modelId, item); } const page = [...itemsMap.values()]; diff --git a/packages/datastore/src/sync/outbox.ts b/packages/datastore/src/sync/outbox.ts index ac4b6e52c05..558e4a46689 100644 --- a/packages/datastore/src/sync/outbox.ts +++ b/packages/datastore/src/sync/outbox.ts @@ -11,9 +11,10 @@ import { PersistentModel, PersistentModelConstructor, QueryOne, + SchemaModel, } from '../types'; import { USER, SYNC, valuesEqual } from '../util'; -import { TransformerMutationType } from './utils'; +import { getIdentifierValue, TransformerMutationType } from './utils'; // TODO: Persist deleted ids // https://github.com/aws-amplify/amplify-js/blob/datastore-docs/packages/datastore/docs/sync-engine.md#outbox @@ -35,6 +36,8 @@ class MutationEventOutbox { const mutationEventModelDefinition = this.schema.namespaces[SYNC].models['MutationEvent']; + // `id` is the key for the record in the mutationEvent; + // `modelId` is the key for the actual record that was mutated const predicate = ModelPredicateCreator.createFromExisting( mutationEventModelDefinition, c => @@ -43,13 +46,16 @@ class MutationEventOutbox { .id('ne', this.inProgressMutationEventId) ); + // Check if there are any other records with same id const [first] = await s.query(this.MutationEvent, predicate); + // No other record with same modelId, so enqueue if (first === undefined) { await s.save(mutationEvent, undefined, this.ownSymbol); return; } + // There was an enqueued mutation for the modelId, so continue const { operation: incomingMutationType } = mutationEvent; if (first.operation === TransformerMutationType.CREATE) { @@ -122,16 +128,19 @@ class MutationEventOutbox { public async getForModel( storage: StorageFacade, - model: T + model: T, + userModelDefinition: SchemaModel ): Promise { const mutationEventModelDefinition = this.schema.namespaces[SYNC].models.MutationEvent; + const modelId = getIdentifierValue(userModelDefinition, model); + const mutationEvents = await storage.query( this.MutationEvent, ModelPredicateCreator.createFromExisting( mutationEventModelDefinition, - c => c.modelId('eq', model.id) + c => c.modelId('eq', modelId) ) ); @@ -187,9 +196,14 @@ class MutationEventOutbox { const mutationEventModelDefinition = this.schema.namespaces[SYNC].models['MutationEvent']; + const userModelDefinition = + this.schema.namespaces['user'].models[head.model]; + + const recordId = getIdentifierValue(userModelDefinition, record); + const predicate = ModelPredicateCreator.createFromExisting( mutationEventModelDefinition, - c => c.modelId('eq', record.id).id('ne', this.inProgressMutationEventId) + c => c.modelId('eq', recordId).id('ne', this.inProgressMutationEventId) ); const outdatedMutations = await storage.query( @@ -224,11 +238,11 @@ class MutationEventOutbox { previous: MutationEvent, current: MutationEvent ): MutationEvent { - const { _version, id, _lastChangedAt, _deleted, ...previousData } = - JSON.parse(previous.data); + const { _version, _lastChangedAt, _deleted, ...previousData } = JSON.parse( + previous.data + ); const { - id: __id, _version: __version, _lastChangedAt: __lastChangedAt, _deleted: __deleted, @@ -236,7 +250,6 @@ class MutationEventOutbox { } = JSON.parse(current.data); const data = JSON.stringify({ - id, _version, _lastChangedAt, _deleted, diff --git a/packages/datastore/src/sync/processors/mutation.ts b/packages/datastore/src/sync/processors/mutation.ts index b07d93d2cfe..17b47658ede 100644 --- a/packages/datastore/src/sync/processors/mutation.ts +++ b/packages/datastore/src/sync/processors/mutation.ts @@ -27,7 +27,13 @@ import { ProcessName, AmplifyContext, } from '../../types'; -import { exhaustiveCheck, USER, USER_AGENT_SUFFIX_DATASTORE } from '../../util'; +import { + exhaustiveCheck, + extractTargetNamesFromSrc, + USER, + USER_AGENT_SUFFIX_DATASTORE, + ID, +} from '../../util'; import { MutationEventOutbox } from '../outbox'; import { buildGraphQLOperation, @@ -452,63 +458,61 @@ class MutationProcessor { // include all the fields that comprise a custom PK if one is specified const deleteInput = {}; - if (primaryKey && primaryKey.length) { + if (primaryKey?.length) { for (const pkField of primaryKey) { deleteInput[pkField] = parsedData[pkField]; } } else { - deleteInput['id'] = parsedData.id; + deleteInput[ID] = (parsedData).id; } - const filteredData = - operation === TransformerMutationType.DELETE - ? deleteInput // For DELETE mutations, only PK is sent - : Object.values(modelDefinition.fields) - .filter(({ name, type, association }) => { - // connections - if (isModelFieldType(type)) { - // BELONGS_TO - if ( - isTargetNameAssociation(association) && - association.connectionType === 'BELONGS_TO' - ) { - return true; - } + let mutationInput; - // All other connections - return false; - } - - if (operation === TransformerMutationType.UPDATE) { - // this limits the update mutation input to changed fields only - return parsedData.hasOwnProperty(name); + if (operation === TransformerMutationType.DELETE) { + // For DELETE mutations, only the key(s) are included in the input + mutationInput = deleteInput; + } else { + // Otherwise, we construct the mutation input with the following logic + mutationInput = {}; + const modelFields = Object.values(modelDefinition.fields); + + for (const { name, type, association } of modelFields) { + // model fields should be stripped out from the input + if (isModelFieldType(type)) { + // except for belongs to relations - we need to replace them with the correct foreign key(s) + if ( + isTargetNameAssociation(association) && + association.connectionType === 'BELONGS_TO' + ) { + const targetNames: string[] | undefined = + extractTargetNamesFromSrc(association); + + if (targetNames) { + // instead of including the connected model itself, we add its key(s) to the mutation input + for (const targetName of targetNames) { + mutationInput[targetName] = parsedData[targetName]; } + } + } + continue; + } + // scalar fields / non-model types - // scalars and non-model types - return true; - }) - .map(({ name, type, association }) => { - let fieldName = name; - let val = parsedData[name]; - - if ( - isModelFieldType(type) && - isTargetNameAssociation(association) - ) { - fieldName = association.targetName; - val = parsedData[fieldName]; - } + if (operation === TransformerMutationType.UPDATE) { + if (!parsedData.hasOwnProperty(name)) { + // for update mutations - strip out a field if it's unchanged + continue; + } + } - return [fieldName, val]; - }) - .reduce((acc, [k, v]) => { - acc[k] = v; - return acc; - }, {}); + // all other fields are added to the input object + mutationInput[name] = parsedData[name]; + } + } // Build mutation variables input object const input: ModelInstanceMetadata = { - ...filteredData, + ...mutationInput, _version, }; diff --git a/packages/datastore/src/sync/processors/subscription.ts b/packages/datastore/src/sync/processors/subscription.ts index ee1f9cea4b7..9a2286f270a 100644 --- a/packages/datastore/src/sync/processors/subscription.ts +++ b/packages/datastore/src/sync/processors/subscription.ts @@ -393,7 +393,6 @@ class SubscriptionProcessor { Observable<{ value: GraphQLResult>; }> - >(this.amplifyContext.API.graphql({ query, variables, ...{ authMode }, authToken, userAgentSuffix })); let subscriptionReadyCallback: () => void; diff --git a/packages/datastore/src/sync/processors/sync.ts b/packages/datastore/src/sync/processors/sync.ts index eb6fc5226ff..87af14c829d 100644 --- a/packages/datastore/src/sync/processors/sync.ts +++ b/packages/datastore/src/sync/processors/sync.ts @@ -85,9 +85,7 @@ class SyncProcessor { return predicateToGraphQLFilter(predicatesGroup); } - private async retrievePage< - T extends ModelInstanceMetadata = ModelInstanceMetadata - >( + private async retrievePage( modelDefinition: SchemaModel, lastSync: number, nextToken: string, diff --git a/packages/datastore/src/sync/utils.ts b/packages/datastore/src/sync/utils.ts index 04d9dea6cf5..7130a916b64 100644 --- a/packages/datastore/src/sync/utils.ts +++ b/packages/datastore/src/sync/utils.ts @@ -11,6 +11,7 @@ import { isGraphQLScalarType, isPredicateObj, isSchemaModel, + isSchemaModelWithAttributes, isTargetNameAssociation, isNonModelFieldType, ModelFields, @@ -27,7 +28,12 @@ import { InternalSchema, AuthModeStrategy, } from '../types'; -import { exhaustiveCheck } from '../util'; +import { + exhaustiveCheck, + extractPrimaryKeyFieldNames, + establishRelationAndKeys, + IDENTIFIER_KEY_SEPARATOR, +} from '../util'; import { MutationEvent } from './'; const logger = new Logger('DataStore'); @@ -47,7 +53,7 @@ export enum TransformerMutationType { GET = 'Get', } -const dummyMetadata: Omit = { +const dummyMetadata: ModelInstanceMetadata = { _version: undefined, _lastChangedAt: undefined, _deleted: undefined, @@ -79,7 +85,7 @@ export function generateSelectionSet( if (isSchemaModel(modelDefinition)) { scalarAndMetadataFields = scalarAndMetadataFields .concat(getMetadataFields()) - .concat(getConnectionFields(modelDefinition)); + .concat(getConnectionFields(modelDefinition, namespace)); } const result = scalarAndMetadataFields.join('\n'); @@ -103,7 +109,7 @@ function getOwnerFields( modelDefinition: SchemaModel | SchemaNonModel ): string[] { const ownerFields: string[] = []; - if (isSchemaModel(modelDefinition) && modelDefinition.attributes) { + if (isSchemaModelWithAttributes(modelDefinition)) { modelDefinition.attributes.forEach(attr => { if (attr.properties && attr.properties.rules) { const rule = attr.properties.rules.find(rule => rule.allow === 'owner'); @@ -138,8 +144,12 @@ function getScalarFields( return result; } -function getConnectionFields(modelDefinition: SchemaModel): string[] { - const result = []; +// Used for generating the selection set for queries and mutations +function getConnectionFields( + modelDefinition: SchemaModel, + namespace: SchemaNamespace +): string[] { + const result: string[] = []; Object.values(modelDefinition.fields) .filter(({ association }) => association && Object.keys(association).length) @@ -153,7 +163,26 @@ function getConnectionFields(modelDefinition: SchemaModel): string[] { break; case 'BELONGS_TO': if (isTargetNameAssociation(association)) { - result.push(`${name} { id _deleted }`); + // New codegen (CPK) + if (association.targetNames && association.targetNames.length > 0) { + // Need to retrieve relations in order to get connected model keys + const [relations] = establishRelationAndKeys(namespace); + + const connectedModelName = + modelDefinition.fields[name].type['model']; + + const byPkIndex = relations[connectedModelName].indexes.find( + ([name]) => name === 'byPk' + ); + const keyFields = byPkIndex && byPkIndex[1]; + const keyFieldSelectionSet = keyFields?.join(' '); + + // We rely on `_deleted` when we process the sync query (e.g. in batchSave in the adapters) + result.push(`${name} { ${keyFieldSelectionSet} _deleted }`); + } else { + // backwards-compatability for schema generated prior to custom primary key support + result.push(`${name} { id _deleted }`); + } } break; default: @@ -412,10 +441,13 @@ export function createMutationInstanceFromModelOperation< return v; }; + const modelId = getIdentifierValue(modelDefinition, element); + const optionalId = OpType.INSERT && id ? { id } : {}; + const mutationEvent = modelInstanceCreator(MutationEventConstructor, { - ...(id ? { id } : {}), + ...optionalId, data: JSON.stringify(element, replacer), - modelId: element.id, + modelId, model: model.name, operation, condition: JSON.stringify(condition), @@ -425,7 +457,8 @@ export function createMutationInstanceFromModelOperation< } export function predicateToGraphQLCondition( - predicate: PredicatesGroup + predicate: PredicatesGroup, + modelDefinition: SchemaModel ): GraphQLCondition { const result = {}; @@ -433,17 +466,27 @@ export function predicateToGraphQLCondition( return result; } + const keyFields = extractPrimaryKeyFieldNames(modelDefinition); + predicate.predicates.forEach(p => { if (isPredicateObj(p)) { const { field, operator, operand } = p; - if (field === 'id') { + // This is compatible with how the GQL Transform currently generates the Condition Input, + // i.e. any PK and SK fields are omitted and can't be used as conditions. + // However, I think this limits usability. + // What if we want to delete all records where SK > some value + // Or all records where PK = some value but SKs are different values + + // TODO: if the Transform gets updated ^ we'll need to modify this logic to only omit + // key fields from the predicate/condition when ALL of the keyFields are present and using `eq` operators + if (keyFields.includes(field as string)) { return; } result[field] = { [operator]: operand }; } else { - result[p.type] = predicateToGraphQLCondition(p); + result[p.type] = predicateToGraphQLCondition(p, modelDefinition); } }); @@ -610,3 +653,17 @@ export async function getTokenForCustomAuth( } } } + +// Util that takes a modelDefinition and model and returns either the id value(s) or the custom primary key value(s) +export function getIdentifierValue( + modelDefinition: SchemaModel, + model: ModelInstanceMetadata | PersistentModel +): string { + const pkFieldNames = extractPrimaryKeyFieldNames(modelDefinition); + + const idOrPk = pkFieldNames + .map(f => model[f]) + .join(IDENTIFIER_KEY_SEPARATOR); + + return idOrPk; +} diff --git a/packages/datastore/src/types.ts b/packages/datastore/src/types.ts index 636a0d078d7..895345b17e5 100644 --- a/packages/datastore/src/types.ts +++ b/packages/datastore/src/types.ts @@ -10,6 +10,7 @@ import { isAWSURL, isAWSPhone, isAWSIPAddress, + extractPrimaryKeyFieldNames, } from './util'; import { PredicateAll } from './predicates'; import { GRAPHQL_AUTH_MODE } from '@aws-amplify/api-graphql'; @@ -46,9 +47,17 @@ export type SchemaModel = { fields: ModelFields; syncable?: boolean; }; + export function isSchemaModel(obj: any): obj is SchemaModel { return obj && (obj).pluralName !== undefined; } + +export function isSchemaModelWithAttributes( + m: SchemaModel | SchemaNonModel +): m is SchemaModel { + return isSchemaModel(m) && (m as SchemaModel).attributes !== undefined; +} + export type SchemaNonModels = Record; export type SchemaNonModel = { name: string; @@ -63,25 +72,29 @@ type SchemaEnum = { export type ModelAssociation = AssociatedWith | TargetNameAssociation; type AssociatedWith = { connectionType: 'HAS_MANY' | 'HAS_ONE'; - associatedWith: string; + associatedWith: string | string[]; targetName?: string; + targetNames?: string[]; }; + export function isAssociatedWith(obj: any): obj is AssociatedWith { return obj && obj.associatedWith; } type TargetNameAssociation = { connectionType: 'BELONGS_TO'; - targetName: string; + targetName?: string; + targetNames?: string[]; }; + export function isTargetNameAssociation( obj: any ): obj is TargetNameAssociation { - return obj && obj.targetName; + return obj?.targetName || obj?.targetNames; } export type ModelAttributes = ModelAttribute[]; -type ModelAttribute = { type: string; properties?: Record }; +export type ModelAttribute = { type: string; properties?: Record }; export type ModelAuthRule = { allow: string; @@ -123,6 +136,7 @@ type ModelAttributeKey = { type ModelAttributePrimaryKey = { type: 'key'; properties: { + name: never; fields: string[]; }; }; @@ -336,33 +350,138 @@ export type NonModelTypeConstructor = { }; // Class for model -export type PersistentModelConstructor< - T extends PersistentModel, - K extends PersistentModelMetaData = { - readOnlyFields: 'createdAt' | 'updatedAt'; - } -> = { - new (init: ModelInit): T; - copyOf(src: T, mutator: (draft: MutableModel) => void): T; +export type PersistentModelConstructor = { + new (init: ModelInit>): T; + copyOf( + src: T, + mutator: (draft: MutableModel>) => void + ): T; }; export type TypeConstructorMap = Record< string, - PersistentModelConstructor | NonModelTypeConstructor + PersistentModelConstructor | NonModelTypeConstructor +>; + +/** + * Each identifier type is represented using nominal types, see: + * https://basarat.gitbook.io/typescript/main-1/nominaltyping + */ +export declare const __identifierBrand__: unique symbol; +export type IdentifierBrand = T & { [__identifierBrand__]: K }; + +// datastore generates a uuid for you +export type ManagedIdentifier = IdentifierBrand< + { field: F extends string ? F : never; type: T }, + 'ManagedIdentifier' +>; + +// you can provide a value, if not, datastore generates a uuid for you +export type OptionallyManagedIdentifier = IdentifierBrand< + { field: F extends string ? F : never; type: T }, + 'OptionallyManagedIdentifier' >; +// You provide the values +export type CompositeIdentifier> = IdentifierBrand< + { fields: K; type: T }, + 'CompositeIdentifier' +>; + +// You provide the value +export type CustomIdentifier = CompositeIdentifier< + T, + [K] +>; + +export type Identifier = + | ManagedIdentifier + | OptionallyManagedIdentifier + | CompositeIdentifier + | CustomIdentifier; + +export type IdentifierFields< + T extends PersistentModel, + M extends PersistentModelMetaData = never +> = (MetadataOrDefault['identifier'] extends + | ManagedIdentifier + | OptionallyManagedIdentifier + ? MetadataOrDefault['identifier']['field'] + : MetadataOrDefault['identifier'] extends CompositeIdentifier< + T, + infer B + > + ? B[number] // B[number] + : MetadataOrDefault['identifier']['field']) & + string; + +export type IdentifierFieldsForInit< + T extends PersistentModel, + M extends PersistentModelMetaData +> = MetadataOrDefault['identifier'] extends + | DefaultPersistentModelMetaData + | ManagedIdentifier + ? never + : MetadataOrDefault['identifier'] extends OptionallyManagedIdentifier< + T, + any + > + ? IdentifierFields + : MetadataOrDefault['identifier'] extends CompositeIdentifier + ? IdentifierFields + : never; + // Instance of model -export type PersistentModelMetaData = { - readOnlyFields: string; +export declare const __modelMeta__: unique symbol; + +export type PersistentModelMetaData = { + identifier?: Identifier; + readOnlyFields?: string; }; -export type PersistentModel = Readonly<{ id: string } & Record>; +export type DefaultPersistentModelMetaData = { + identifier: ManagedIdentifier<{ id: string }, 'id'>; + readOnlyFields: never; +}; + +export type MetadataOrDefault< + T extends PersistentModel, + _ extends PersistentModelMetaData = never +> = T extends { + [__modelMeta__]: PersistentModelMetaData; +} + ? T[typeof __modelMeta__] + : DefaultPersistentModelMetaData; + +export type PersistentModel = Readonly>; + +export type MetadataReadOnlyFields< + T extends PersistentModel, + M extends PersistentModelMetaData +> = Extract< + MetadataOrDefault['readOnlyFields'] | M['readOnlyFields'], + keyof T +>; + +// This type omits the metadata field in the constructor init object +// This type omits identifier fields in the constructor init object +// This type omits readOnlyFields in the constructor init object +// This type requires some identifiers in the constructor init object (e.g. CustomIdentifier) +// This type makes optional some identifiers in the constructor init object (e.g. OptionallyManagedIdentifier) export type ModelInit< + T extends PersistentModel, + M extends PersistentModelMetaData = {} +> = Omit< T, - K extends PersistentModelMetaData = { - readOnlyFields: 'createdAt' | 'updatedAt'; - } -> = Omit; + typeof __modelMeta__ | IdentifierFields | MetadataReadOnlyFields +> & + (MetadataOrDefault['identifier'] extends OptionallyManagedIdentifier< + T, + any + > + ? Partial>> + : Required>>); + type DeepWritable = { -readonly [P in keyof T]: T[P] extends TypeName ? T[P] @@ -370,22 +489,45 @@ type DeepWritable = { }; export type MutableModel< - T extends Record, - K extends PersistentModelMetaData = { - readOnlyFields: 'createdAt' | 'updatedAt'; - } + T extends PersistentModel, + M extends PersistentModelMetaData = {} // This provides Intellisense with ALL of the properties, regardless of read-only // but will throw a linting error if trying to overwrite a read-only property -> = DeepWritable> & - Readonly>; +> = DeepWritable< + Omit | MetadataReadOnlyFields> +> & + Readonly | MetadataReadOnlyFields>>; export type ModelInstanceMetadata = { - id: string; _version: number; _lastChangedAt: number; _deleted: boolean; }; +export type IdentifierFieldValue< + T extends PersistentModel, + M extends PersistentModelMetaData +> = MetadataOrDefault['identifier'] extends CompositeIdentifier + ? MetadataOrDefault['identifier']['fields'] extends [any] + ? T[MetadataOrDefault['identifier']['fields'][0]] + : never + : T[MetadataOrDefault['identifier']['field']]; + +export type IdentifierFieldOrIdentifierObject< + T extends PersistentModel, + M extends PersistentModelMetaData +> = Pick> | IdentifierFieldValue; + +export function isIdentifierObject( + obj: any, + modelDefinition: SchemaModel +): obj is IdentifierFields { + const keys = extractPrimaryKeyFieldNames(modelDefinition); + + return ( + typeof obj === 'object' && obj && keys.every(k => obj[k] !== undefined) + ); +} //#endregion //#region Subscription messages @@ -635,11 +777,21 @@ export type RelationType = { modelName: string; relationType: 'HAS_ONE' | 'HAS_MANY' | 'BELONGS_TO'; targetName?: string; - associatedWith?: string; + targetNames?: string[]; + associatedWith?: string | string[]; +}; + +type IndexOptions = { + unique?: boolean; }; +export type IndexesType = Array<[string, string[], IndexOptions?]>; + export type RelationshipType = { - [modelName: string]: { indexes: string[]; relationTypes: RelationType[] }; + [modelName: string]: { + indexes: IndexesType; + relationTypes: RelationType[]; + }; }; //#endregion diff --git a/packages/datastore/src/util.ts b/packages/datastore/src/util.ts index 08b5c74bc3e..0b24beff9c5 100644 --- a/packages/datastore/src/util.ts +++ b/packages/datastore/src/util.ts @@ -26,9 +26,41 @@ import { NonModelTypeConstructor, DeferredCallbackResolverOptions, LimitTimerRaceResolvedValues, + SchemaModel, + ModelAttribute, + IndexesType, + ModelAssociation, } from './types'; import { WordArray } from 'amazon-cognito-identity-js'; +export const ID = 'id'; + +/** + * Used by the Async Storage Adapter to concatenate key values + * for a record. For instance, if a model has the following keys: + * `customId: ID! @primaryKey(sortKeyFields: ["createdAt"])`, + * we concatenate the `customId` and `createdAt` as: + * `12-234-5#2022-09-28T00:00:00.000Z` + */ +export const DEFAULT_PRIMARY_KEY_VALUE_SEPARATOR = '#'; + +/** + * Used for generating spinal-cased index name from an array of + * key field names. + * E.g. for keys `[id, title]` => 'id-title' + */ +export const IDENTIFIER_KEY_SEPARATOR = '-'; + +export const errorMessages = { + idEmptyString: 'An index field cannot contain an empty string value', + queryByPkWithCompositeKeyPresent: + 'Models with composite primary keys cannot be queried by a single key value. Use object literal syntax for composite keys instead: https://docs.amplify.aws/lib/datastore/advanced-workflows/q/platform/js/#querying-records-with-custom-primary-keys', + deleteByPkWithCompositeKeyPresent: + 'Models with composite primary keys cannot be deleted by a single key value, unless using a predicate. Use object literal syntax for composite keys instead: https://docs.amplify.aws/lib/datastore/advanced-workflows/q/platform/js/#querying-records-with-custom-primary-keys', + observeWithObjectLiteral: + 'Object literal syntax cannot be used with observe. Use a predicate instead: https://docs.amplify.aws/lib/datastore/data-access/q/platform/js/#predicates', +}; + export enum NAMESPACES { DATASTORE = 'datastore', USER = 'user', @@ -162,154 +194,6 @@ export const isNonModelConstructor = ( return nonModelClasses.has(obj); }; -/* - When we have GSI(s) with composite sort keys defined on a model - There are some very particular rules regarding which fields must be included in the update mutation input - The field selection becomes more complex as the number of GSIs with composite sort keys grows - - To summarize: any time we update a field that is part of the composite sort key of a GSI, we must include: - 1. all of the other fields in that composite sort key - 2. all of the fields from any other composite sort key that intersect with the fields from 1. - - E.g., - Model @model - @key(name: 'key1' fields: ['hk', 'a', 'b', 'c']) - @key(name: 'key2' fields: ['hk', 'a', 'b', 'd']) - @key(name: 'key3' fields: ['hk', 'x', 'y', 'z']) - - Model.a is updated => include ['a', 'b', 'c', 'd'] - Model.c is updated => include ['a', 'b', 'c', 'd'] - Model.d is updated => include ['a', 'b', 'c', 'd'] - Model.x is updated => include ['x', 'y', 'z'] - - This function accepts a model's attributes and returns grouped sets of composite key fields - Using our example Model above, the function will return: - [ - Set('a', 'b', 'c', 'd'), - Set('x', 'y', 'z'), - ] - - This gives us the opportunity to correctly include the required fields for composite keys - When crafting the mutation input in Storage.getUpdateMutationInput - - See 'processCompositeKeys' test in util.test.ts for more examples -*/ -export const processCompositeKeys = ( - attributes: ModelAttributes -): Set[] => { - const extractCompositeSortKey = ({ - properties: { - // ignore the HK (fields[0]) we only need to include the composite sort key fields[1...n] - fields: [, ...sortKeyFields], - }, - }) => sortKeyFields; - - const compositeKeyFields = attributes - .filter(isModelAttributeCompositeKey) - .map(extractCompositeSortKey); - - /* - if 2 sets of fields have any intersecting fields => combine them into 1 union set - e.g., ['a', 'b', 'c'] and ['a', 'b', 'd'] => ['a', 'b', 'c', 'd'] - */ - const combineIntersecting = (fields): Set[] => - fields.reduce((combined, sortKeyFields) => { - const sortKeyFieldsSet = new Set(sortKeyFields); - - if (combined.length === 0) { - combined.push(sortKeyFieldsSet); - return combined; - } - - // does the current set share values with another set we've already added to `combined`? - const intersectingSetIdx = combined.findIndex(existingSet => { - return [...existingSet].some(f => sortKeyFieldsSet.has(f)); - }); - - if (intersectingSetIdx > -1) { - const union = new Set([ - ...combined[intersectingSetIdx], - ...sortKeyFieldsSet, - ]); - // combine the current set with the intersecting set we found above - combined[intersectingSetIdx] = union; - } else { - // none of the sets in `combined` have intersecting values with the current set - combined.push(sortKeyFieldsSet); - } - - return combined; - }, []); - - const initial = combineIntersecting(compositeKeyFields); - // a single pass pay not be enough to correctly combine all the fields - // call the function once more to get a final merged list of sets - const combined = combineIntersecting(initial); - - return combined; -}; - -export const establishRelationAndKeys = ( - namespace: SchemaNamespace -): [RelationshipType, ModelKeys] => { - const relationship: RelationshipType = {}; - const keys: ModelKeys = {}; - - Object.keys(namespace.models).forEach((mKey: string) => { - relationship[mKey] = { indexes: [], relationTypes: [] }; - keys[mKey] = {}; - - const model = namespace.models[mKey]; - Object.keys(model.fields).forEach((attr: string) => { - const fieldAttribute = model.fields[attr]; - if ( - typeof fieldAttribute.type === 'object' && - 'model' in fieldAttribute.type - ) { - const connectionType = fieldAttribute.association.connectionType; - relationship[mKey].relationTypes.push({ - fieldName: fieldAttribute.name, - modelName: fieldAttribute.type.model, - relationType: connectionType, - targetName: fieldAttribute.association['targetName'], - associatedWith: fieldAttribute.association['associatedWith'], - }); - - if (connectionType === 'BELONGS_TO') { - relationship[mKey].indexes.push( - fieldAttribute.association['targetName'] - ); - } - } - }); - - if (model.attributes) { - keys[mKey].compositeKeys = processCompositeKeys(model.attributes); - - for (const attribute of model.attributes) { - if (!isModelAttributeKey(attribute)) { - continue; - } - - if (isModelAttributePrimaryKey(attribute)) { - keys[mKey].primaryKey = attribute.properties.fields; - } - - const { fields } = attribute.properties; - for (const field of fields) { - // only add index if it hasn't already been added - const exists = relationship[mKey].indexes.includes(field); - if (!exists) { - relationship[mKey].indexes.push(field); - } - } - } - } - }); - - return [relationship, keys]; -}; - const topologicallySortedModels = new WeakMap(); export const traverseModel = ( @@ -323,12 +207,14 @@ export const traverseModel = ( ) => PersistentModelConstructor ) => { const relationships = namespace.relationships; + const modelConstructor = getModelConstructorByModelName( namespace.name, srcModelName ); const relation = relationships[srcModelName]; + const result: { modelName: string; item: T; @@ -362,15 +248,36 @@ export const traverseModel = ( instance: modelInstance, }); - // targetName will be defined for Has One if feature flag + const targetNames: string[] | undefined = + extractTargetNamesFromSrc(rItem); + + // `targetName` will be defined for Has One if feature flag // https://docs.amplify.aws/cli/reference/feature-flags/#useAppsyncModelgenPlugin // is true (default as of 5/7/21) // Making this conditional for backward-compatibility - if (rItem.targetName) { - (draftInstance)[rItem.targetName] = (( - draftInstance[rItem.fieldName] - )).id; - delete draftInstance[rItem.fieldName]; + if (targetNames) { + targetNames.forEach((targetName, idx) => { + // Get the connected record + const relatedRecordInProxy = ( + draftInstance[rItem.fieldName] + ); + + // Previously, we used the hardcoded 'id' as they key, + // now we need the value of the key to get the PK (and SK) + // values from the related record + + const { primaryKey } = namespace.keys[modelConstructor.name]; + const keyField = primaryKey && primaryKey[idx]; + + // Get the value + const relatedRecordInProxyPkValue = + relatedRecordInProxy[keyField]; + + // Set the targetName value + (draftInstance)[targetName] = relatedRecordInProxyPkValue; + }); + // Delete the instance from the proxy + delete (draftInstance)[rItem.fieldName]; } else { (draftInstance)[rItem.fieldName] = (( draftInstance[rItem.fieldName] @@ -405,10 +312,33 @@ export const traverseModel = ( } if (draftInstance[rItem.fieldName]) { - (draftInstance)[rItem.targetName] = (( - draftInstance[rItem.fieldName] - )).id; - delete draftInstance[rItem.fieldName]; + const targetNames: string[] | undefined = + extractTargetNamesFromSrc(rItem); + + if (targetNames) { + targetNames.forEach((targetName, idx) => { + // Get the connected record + const relatedRecordInProxy = ( + draftInstance[rItem.fieldName] + ); + // Previously, we used the hardcoded `id` for the key. + // Now, we need the value of the key to get the PK (and SK) + // values from the related record + const { primaryKey } = namespace.keys[modelConstructor.name]; + + // fall back to ID if + const keyField = primaryKey && primaryKey[idx]; + + // Get the value + const relatedRecordInProxyPkValue = + relatedRecordInProxy[keyField]; + + // Set the targetName value + (draftInstance)[targetName] = relatedRecordInProxyPkValue; + }); + // Delete the instance from the proxy + delete (draftInstance)[rItem.fieldName]; + } } break; @@ -446,24 +376,6 @@ export const traverseModel = ( return result; }; -export const getIndex = (rel: RelationType[], src: string): string => { - let index = ''; - rel.some((relItem: RelationType) => { - if (relItem.modelName === src) { - index = relItem.targetName; - } - }); - return index; -}; - -export const getIndexFromAssociation = ( - indexes: string[], - src: string -): string => { - const index = indexes.find(idx => idx === src); - return index; -}; - let privateModeCheckResult; export const isPrivateMode = () => { @@ -811,3 +723,330 @@ export function mergePatches( ); return patches; } + +export const getStorename = (namespace: string, modelName: string) => { + const storeName = `${namespace}_${modelName}`; + + return storeName; +}; + +//#region Key Utils + +/* + When we have GSI(s) with composite sort keys defined on a model + There are some very particular rules regarding which fields must be included in the update mutation input + The field selection becomes more complex as the number of GSIs with composite sort keys grows + + To summarize: any time we update a field that is part of the composite sort key of a GSI, we must include: + 1. all of the other fields in that composite sort key + 2. all of the fields from any other composite sort key that intersect with the fields from 1. + + E.g., + Model @model + @key(name: 'key1' fields: ['hk', 'a', 'b', 'c']) + @key(name: 'key2' fields: ['hk', 'a', 'b', 'd']) + @key(name: 'key3' fields: ['hk', 'x', 'y', 'z']) + + Model.a is updated => include ['a', 'b', 'c', 'd'] + Model.c is updated => include ['a', 'b', 'c', 'd'] + Model.d is updated => include ['a', 'b', 'c', 'd'] + Model.x is updated => include ['x', 'y', 'z'] + + This function accepts a model's attributes and returns grouped sets of composite key fields + Using our example Model above, the function will return: + [ + Set('a', 'b', 'c', 'd'), + Set('x', 'y', 'z'), + ] + + This gives us the opportunity to correctly include the required fields for composite keys + When crafting the mutation input in Storage.getUpdateMutationInput + + See 'processCompositeKeys' test in util.test.ts for more examples +*/ +export const processCompositeKeys = ( + attributes: ModelAttributes +): Set[] => { + const extractCompositeSortKey = ({ + properties: { + // ignore the HK (fields[0]) we only need to include the composite sort key fields[1...n] + fields: [, ...sortKeyFields], + }, + }) => sortKeyFields; + + const compositeKeyFields = attributes + .filter(isModelAttributeCompositeKey) + .map(extractCompositeSortKey); + + /* + if 2 sets of fields have any intersecting fields => combine them into 1 union set + e.g., ['a', 'b', 'c'] and ['a', 'b', 'd'] => ['a', 'b', 'c', 'd'] + */ + const combineIntersecting = (fields): Set[] => + fields.reduce((combined, sortKeyFields) => { + const sortKeyFieldsSet = new Set(sortKeyFields); + + if (combined.length === 0) { + combined.push(sortKeyFieldsSet); + return combined; + } + + // does the current set share values with another set we've already added to `combined`? + const intersectingSetIdx = combined.findIndex(existingSet => { + return [...existingSet].some(f => sortKeyFieldsSet.has(f)); + }); + + if (intersectingSetIdx > -1) { + const union = new Set([ + ...combined[intersectingSetIdx], + ...sortKeyFieldsSet, + ]); + // combine the current set with the intersecting set we found above + combined[intersectingSetIdx] = union; + } else { + // none of the sets in `combined` have intersecting values with the current set + combined.push(sortKeyFieldsSet); + } + + return combined; + }, []); + + const initial = combineIntersecting(compositeKeyFields); + // a single pass pay not be enough to correctly combine all the fields + // call the function once more to get a final merged list of sets + const combined = combineIntersecting(initial); + + return combined; +}; + +export const extractKeyIfExists = ( + modelDefinition: SchemaModel +): ModelAttribute | undefined => { + const keyAttribute = modelDefinition?.attributes?.find(isModelAttributeKey); + + return keyAttribute; +}; + +export const extractPrimaryKeyFieldNames = ( + modelDefinition: SchemaModel +): string[] => { + const keyAttribute = extractKeyIfExists(modelDefinition); + if (keyAttribute && isModelAttributePrimaryKey(keyAttribute)) { + return keyAttribute.properties.fields; + } + + return [ID]; +}; + +export const extractPrimaryKeyValues = ( + model: T, + keyFields: string[] +): string[] => { + return keyFields.map(key => model[key]); +}; + +export const extractPrimaryKeysAndValues = ( + model: T, + keyFields: string[] +): any => { + const primaryKeysAndValues = {}; + keyFields.forEach(key => (primaryKeysAndValues[key] = model[key])); + return primaryKeysAndValues; +}; + +// IdentifierFields +// Default behavior without explicit @primaryKey defined +export const isIdManaged = (modelDefinition: SchemaModel): boolean => { + const keyAttribute = extractKeyIfExists(modelDefinition); + + if (keyAttribute && isModelAttributePrimaryKey(keyAttribute)) { + return false; + } + + return true; +}; + +// IdentifierFields +// @primaryKey with explicit `id` in the PK. Single key or composite +export const isIdOptionallyManaged = ( + modelDefinition: SchemaModel +): boolean => { + const keyAttribute = extractKeyIfExists(modelDefinition); + + if (keyAttribute && isModelAttributePrimaryKey(keyAttribute)) { + return keyAttribute.properties.fields[0] === ID; + } + + return false; +}; + +export const establishRelationAndKeys = ( + namespace: SchemaNamespace +): [RelationshipType, ModelKeys] => { + const relationship: RelationshipType = {}; + const keys: ModelKeys = {}; + + Object.keys(namespace.models).forEach((mKey: string) => { + relationship[mKey] = { indexes: [], relationTypes: [] }; + keys[mKey] = {}; + + const model = namespace.models[mKey]; + Object.keys(model.fields).forEach((attr: string) => { + const fieldAttribute = model.fields[attr]; + if ( + typeof fieldAttribute.type === 'object' && + 'model' in fieldAttribute.type + ) { + const connectionType = fieldAttribute.association.connectionType; + relationship[mKey].relationTypes.push({ + fieldName: fieldAttribute.name, + modelName: fieldAttribute.type.model, + relationType: connectionType, + targetName: fieldAttribute.association['targetName'], + targetNames: fieldAttribute.association['targetNames'], + associatedWith: fieldAttribute.association['associatedWith'], + }); + + if (connectionType === 'BELONGS_TO') { + const targetNames = extractTargetNamesFromSrc( + fieldAttribute.association + ); + + if (targetNames) { + const idxName = indexNameFromKeys(targetNames); + relationship[mKey].indexes.push([idxName, targetNames]); + } + } + } + }); + + if (model.attributes) { + keys[mKey].compositeKeys = processCompositeKeys(model.attributes); + + for (const attribute of model.attributes) { + if (!isModelAttributeKey(attribute)) { + continue; + } + + const { fields } = attribute.properties; + + if (isModelAttributePrimaryKey(attribute)) { + keys[mKey].primaryKey = fields; + continue; + } + + // create indexes for all other keys + const idxName = indexNameFromKeys(fields); + const idxExists = relationship[mKey].indexes.find( + ([index]) => index === idxName + ); + + if (!idxExists) { + relationship[mKey].indexes.push([idxName, fields]); + } + } + } + + // set 'id' as the PK for models without a custom PK explicitly defined + if (!keys[mKey].primaryKey) { + keys[mKey].primaryKey = [ID]; + } + + // create primary index + relationship[mKey].indexes.push([ + 'byPk', + keys[mKey].primaryKey as string[], + { unique: true }, + ]); + }); + + return [relationship, keys]; +}; + +export const getIndex = ( + rel: RelationType[], + src: string +): string | undefined => { + let indexName; + rel.some((relItem: RelationType) => { + if (relItem.modelName === src) { + const targetNames = extractTargetNamesFromSrc(relItem); + indexName = targetNames && indexNameFromKeys(targetNames); + return true; + } + }); + return indexName; +}; + +export const getIndexFromAssociation = ( + indexes: IndexesType, + src: string | string[] +): string | undefined => { + let indexName: string; + + if (Array.isArray(src)) { + indexName = indexNameFromKeys(src); + } else { + indexName = src; + } + + const associationIndex = indexes.find(([idxName]) => idxName === indexName); + return associationIndex && associationIndex[0]; +}; + +/** + * Backwards-compatability for schema generated prior to custom primary key support: +the single field `targetName` has been replaced with an array of `targetNames`. +`targetName` and `targetNames` are exclusive (will never exist on the same schema) + * @param src {RelationType | ModelAssociation | undefined} + * @returns array of targetNames, or `undefined` + */ +export const extractTargetNamesFromSrc = ( + src: RelationType | ModelAssociation | undefined +): string[] | undefined => { + const targetName = src?.targetName; + const targetNames = src?.targetNames; + + if (Array.isArray(targetNames)) { + return targetNames; + } else if (typeof targetName === 'string') { + return [targetName]; + } else { + return undefined; + } +}; + +// Generates spinal-cased index name from an array of key field names +// E.g. for keys `[id, title]` => 'id-title' +export const indexNameFromKeys = (keys: string[]): string => { + return keys.reduce((prev: string, cur: string, idx: number) => { + if (idx === 0) { + return cur; + } + return `${prev}${IDENTIFIER_KEY_SEPARATOR}${cur}`; + }, ''); +}; + +export const keysEqual = (keysA, keysB): boolean => { + if (keysA.length !== keysB.length) { + return false; + } + + return keysA.every((key, idx) => key === keysB[idx]); +}; + +// Returns primary keys for a model +export const getIndexKeys = ( + namespace: SchemaNamespace, + modelName: string +): string[] => { + const keyPath = namespace?.keys[modelName]?.primaryKey; + + if (keyPath) { + return keyPath; + } + + return [ID]; +}; + +//#endregion