diff --git a/.eslintrc.js b/.eslintrc.js index bb50a6f60..98be7e8ec 100644 --- a/.eslintrc.js +++ b/.eslintrc.js @@ -10,6 +10,11 @@ module.exports = { plugins: ['@typescript-eslint', 'prettier'], ignorePatterns: ['**/dist/**', '**/vercel/examples/**'], rules: { + '@typescript-eslint/lines-between-class-members': 'off', + '@typescript-eslint/no-unused-vars': [ + 'error', + { ignoreRestSiblings: true, argsIgnorePattern: '^_', varsIgnorePattern: '^__' }, + ], 'prettier/prettier': ['error'], 'class-methods-use-this': 'off', 'import/no-extraneous-dependencies': [ @@ -18,5 +23,11 @@ module.exports = { devDependencies: ['**/jest*.ts', '**/*.test.ts', '**/rollup.config.ts'], }, ], + 'import/default': 'error', + 'import/export': 'error', + 'import/no-self-import': 'error', + 'import/no-cycle': 'error', + 'import/no-useless-path-segments': 'error', + 'import/no-duplicates': 'error', }, }; diff --git a/.github/workflows/mocks.yml b/.github/workflows/mocks.yml new file mode 100644 index 000000000..674aa7a70 --- /dev/null +++ b/.github/workflows/mocks.yml @@ -0,0 +1,25 @@ +name: shared/mocks + +on: + push: + branches: [main, 'feat/**'] + paths-ignore: + - '**.md' #Do not need to run CI for markdown changes. + pull_request: + branches: [main, 'feat/**'] + paths-ignore: + - '**.md' + +jobs: + build-test-mocks: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v3 + - uses: actions/setup-node@v3 + - id: shared + name: Shared CI Steps + uses: ./actions/ci + with: + workspace_name: '@launchdarkly/private-js-mocks' + workspace_path: packages/shared/mocks + should_build_docs: false diff --git a/actions/ci/action.yml b/actions/ci/action.yml index 439e4a3d5..6751d0cbd 100644 --- a/actions/ci/action.yml +++ b/actions/ci/action.yml @@ -11,7 +11,9 @@ inputs: workspace_path: description: 'Path to the package to release.' required: true - + should_build_docs: + description: 'Whether docs should be built. It will be by default.' + default: true runs: using: composite steps: @@ -40,4 +42,5 @@ runs: - name: Build Docs shell: bash + if: ${{inputs.should_build_docs == 'true'}} run: yarn build:doc -- ${{ inputs.workspace_path }} diff --git a/contract-tests/index.js b/contract-tests/index.js index ee27b7751..06430d0a5 100644 --- a/contract-tests/index.js +++ b/contract-tests/index.js @@ -28,6 +28,9 @@ app.get('/', (req, res) => { 'tags', 'big-segments', 'user-type', + 'migrations', + 'event-sampling', + 'strongly-typed', ], }); }); diff --git a/contract-tests/sdkClientEntity.js b/contract-tests/sdkClientEntity.js index ca85905a5..c9a1556f3 100644 --- a/contract-tests/sdkClientEntity.js +++ b/contract-tests/sdkClientEntity.js @@ -1,4 +1,12 @@ -import ld from 'node-server-sdk'; +import got from 'got'; +import ld, { + createMigration, + LDConcurrentExecution, + LDExecutionOrdering, + LDMigrationError, + LDMigrationSuccess, + LDSerialExecution, +} from 'node-server-sdk'; import BigSegmentTestStore from './BigSegmentTestStore.js'; import { Log, sdkLogger } from './log.js'; @@ -9,7 +17,7 @@ export { badCommandError }; export function makeSdkConfig(options, tag) { const cf = { logger: sdkLogger(tag), - diagnosticOptOut: true + diagnosticOptOut: true, }; const maybeTime = (seconds) => seconds === undefined || seconds === null ? undefined : seconds / 1000; @@ -55,6 +63,30 @@ export function makeSdkConfig(options, tag) { return cf; } +function getExecution(order) { + switch (order) { + case 'serial': { + return new LDSerialExecution(LDExecutionOrdering.Fixed); + } + case 'random': { + return new LDSerialExecution(LDExecutionOrdering.Random); + } + case 'concurrent': { + return new LDConcurrentExecution(); + } + default: { + throw new Error('Unsupported execution order.'); + } + } +} + +function makeMigrationPostOptions(payload) { + if (payload) { + return { body: payload }; + } + return {}; +} + export async function newSdkClientEntity(options) { const c = {}; const log = Log(options.tag); @@ -93,10 +125,65 @@ export async function newSdkClientEntity(options) { case 'evaluate': { const pe = params.evaluate; if (pe.detail) { - return await client.variationDetail(pe.flagKey, pe.context || pe.user, pe.defaultValue); + switch (pe.valueType) { + case 'bool': + return await client.boolVariationDetail( + pe.flagKey, + pe.context || pe.user, + pe.defaultValue, + ); + case 'int': // Intentional fallthrough. + case 'double': + return await client.numberVariationDetail( + pe.flagKey, + pe.context || pe.user, + pe.defaultValue, + ); + case 'string': + return await client.stringVariationDetail( + pe.flagKey, + pe.context || pe.user, + pe.defaultValue, + ); + default: + return await client.variationDetail( + pe.flagKey, + pe.context || pe.user, + pe.defaultValue, + ); + } } else { - const value = await client.variation(pe.flagKey, pe.context || pe.user, pe.defaultValue); - return { value }; + switch (pe.valueType) { + case 'bool': + return { + value: await client.boolVariation( + pe.flagKey, + pe.context || pe.user, + pe.defaultValue, + ), + }; + case 'int': // Intentional fallthrough. + case 'double': + return { + value: await client.numberVariation( + pe.flagKey, + pe.context || pe.user, + pe.defaultValue, + ), + }; + case 'string': + return { + value: await client.stringVariation( + pe.flagKey, + pe.context || pe.user, + pe.defaultValue, + ), + }; + default: + return { + value: await client.variation(pe.flagKey, pe.context || pe.user, pe.defaultValue), + }; + } } } @@ -127,6 +214,101 @@ export async function newSdkClientEntity(options) { case 'getBigSegmentStoreStatus': return await client.bigSegmentStoreStatusProvider.requireStatus(); + case 'migrationVariation': + const migrationVariation = params.migrationVariation; + const res = await client.migrationVariation( + migrationVariation.key, + migrationVariation.context, + migrationVariation.defaultStage, + ); + return { result: res.value }; + + case 'migrationOperation': + const migrationOperation = params.migrationOperation; + const readExecutionOrder = migrationOperation.readExecutionOrder; + + const migration = createMigration(client, { + execution: getExecution(readExecutionOrder), + latencyTracking: migrationOperation.trackLatency, + errorTracking: migrationOperation.trackErrors, + check: migrationOperation.trackConsistency ? (a, b) => a === b : undefined, + readNew: async (payload) => { + try { + const res = await got.post( + migrationOperation.newEndpoint, + makeMigrationPostOptions(payload), + ); + return LDMigrationSuccess(res.body); + } catch (err) { + return LDMigrationError(err.message); + } + }, + writeNew: async (payload) => { + try { + const res = await got.post( + migrationOperation.newEndpoint, + makeMigrationPostOptions(payload), + ); + return LDMigrationSuccess(res.body); + } catch (err) { + return LDMigrationError(err.message); + } + }, + readOld: async (payload) => { + try { + const res = await got.post( + migrationOperation.oldEndpoint, + makeMigrationPostOptions(payload), + ); + return LDMigrationSuccess(res.body); + } catch (err) { + return LDMigrationError(err.message); + } + }, + writeOld: async (payload) => { + try { + const res = await got.post( + migrationOperation.oldEndpoint, + makeMigrationPostOptions(payload), + ); + return LDMigrationSuccess(res.body); + } catch (err) { + return LDMigrationError(err.message); + } + }, + }); + + switch (migrationOperation.operation) { + case 'read': { + const res = await migration.read( + migrationOperation.key, + migrationOperation.context, + migrationOperation.defaultStage, + migrationOperation.payload, + ); + if (res.success) { + return { result: res.result }; + } else { + return { result: res.error }; + } + } + case 'write': { + const res = await migration.write( + migrationOperation.key, + migrationOperation.context, + migrationOperation.defaultStage, + migrationOperation.payload, + ); + + if (res.authoritative.success) { + return { result: res.authoritative.result }; + } else { + return { result: res.authoritative.error }; + } + } + } + return undefined; + default: throw badCommandError; } diff --git a/package.json b/package.json index aa90bac2a..c569cb13d 100644 --- a/package.json +++ b/package.json @@ -2,6 +2,8 @@ "name": "@launchdarkly/js-core", "workspaces": [ "packages/shared/common", + "packages/shared/mocks", + "packages/shared/sdk-client", "packages/shared/sdk-server", "packages/shared/sdk-server-edge", "packages/shared/akamai-edgeworker-sdk", diff --git a/packages/sdk/akamai-base/example/ldClient.ts b/packages/sdk/akamai-base/example/ldClient.ts index 8beb003d9..36f92d209 100644 --- a/packages/sdk/akamai-base/example/ldClient.ts +++ b/packages/sdk/akamai-base/example/ldClient.ts @@ -39,8 +39,7 @@ const flagData = ` class MyCustomStoreProvider implements EdgeProvider { // root key is formatted as LD-Env-{Launchdarkly environment client ID} - // eslint-disable-next-line @typescript-eslint/no-unused-vars - async get(rootKey: string): Promise { + async get(_rootKey: string): Promise { // you should provide an implementation to retrieve your flags from launchdarkly's https://sdk.launchdarkly.com/sdk/latest-all endpoint. // see https://docs.launchdarkly.com/sdk/features/flags-from-files for more information. return flagData; diff --git a/packages/sdk/server-node/__tests__/LDClientNode.test.ts b/packages/sdk/server-node/__tests__/LDClientNode.test.ts index 1e0dd0b52..1bfa8f6cd 100644 --- a/packages/sdk/server-node/__tests__/LDClientNode.test.ts +++ b/packages/sdk/server-node/__tests__/LDClientNode.test.ts @@ -1,4 +1,5 @@ import { LDContext } from '@launchdarkly/js-server-sdk-common'; +import { logger } from '@launchdarkly/private-js-mocks'; import { init } from '../src'; @@ -10,23 +11,24 @@ it('fires ready event in offline mode', (done) => { }); }); -it('fires the failed event if initialization fails', (done) => { +it('fires the failed event if initialization fails', async () => { + jest.useFakeTimers(); + + const failedHandler = jest.fn().mockName('failedHandler'); const client = init('sdk_key', { - updateProcessor: { - start: (fn: (err: any) => void) => { - setTimeout(() => { - fn(new Error('BAD THINGS')); - }, 0); + sendEvents: false, + logger, + updateProcessor: (clientContext, dataSourceUpdates, initSuccessHandler, errorHandler) => ({ + start: () => { + setTimeout(() => errorHandler?.(new Error('Something unexpected happened')), 0); }, - stop: () => {}, - close: () => {}, - sendEvents: false, - }, - }); - client.on('failed', () => { - client.close(); - done(); + close: jest.fn(), + }), }); + client.on('failed', failedHandler); + jest.runAllTimers(); + + expect(failedHandler).toBeCalledWith(new Error('Something unexpected happened')); }); // These tests are done in the node implementation because common doesn't have a crypto diff --git a/packages/sdk/server-node/__tests__/LDClientNode.tls.test.ts b/packages/sdk/server-node/__tests__/LDClientNode.tls.test.ts index ba675a552..8c6baba96 100644 --- a/packages/sdk/server-node/__tests__/LDClientNode.tls.test.ts +++ b/packages/sdk/server-node/__tests__/LDClientNode.tls.test.ts @@ -6,19 +6,14 @@ import { TestHttpServer, } from 'launchdarkly-js-test-helpers'; -import { basicLogger, LDClient, LDLogger } from '../src'; +import { logger } from '@launchdarkly/private-js-mocks'; + +import { LDClient } from '../src'; import LDClientNode from '../src/LDClientNode'; describe('When using a TLS connection', () => { let client: LDClient; let server: TestHttpServer; - let logger: LDLogger; - - beforeEach(() => { - logger = basicLogger({ - destination: () => {}, - }); - }); it('can connect via HTTPS to a server with a self-signed certificate, if CA is specified', async () => { server = await TestHttpServer.startSecure(); @@ -87,6 +82,7 @@ describe('When using a TLS connection', () => { stream: false, tlsParams: { ca: server.certificate }, diagnosticOptOut: true, + logger, }); await client.waitForInitialization(); diff --git a/packages/sdk/server-node/package.json b/packages/sdk/server-node/package.json index ad1d283eb..50181027a 100644 --- a/packages/sdk/server-node/package.json +++ b/packages/sdk/server-node/package.json @@ -50,6 +50,7 @@ "launchdarkly-eventsource": "2.0.1" }, "devDependencies": { + "@launchdarkly/private-js-mocks": "0.0.1", "@trivago/prettier-plugin-sort-imports": "^4.1.1", "@types/jest": "^29.4.0", "@typescript-eslint/eslint-plugin": "^6.1.0", diff --git a/packages/shared/akamai-edgeworker-sdk/src/featureStore/index.ts b/packages/shared/akamai-edgeworker-sdk/src/featureStore/index.ts index aea71e572..812ae1dfa 100644 --- a/packages/shared/akamai-edgeworker-sdk/src/featureStore/index.ts +++ b/packages/shared/akamai-edgeworker-sdk/src/featureStore/index.ts @@ -60,7 +60,7 @@ export class EdgeFeatureStore implements LDFeatureStore { callback(item.segments[dataKey]); break; default: - throw new Error(`Unsupported DataKind: ${namespace}`); + callback(null); } } catch (err) { this.logger.error(err); diff --git a/packages/shared/akamai-edgeworker-sdk/src/platform/requests.ts b/packages/shared/akamai-edgeworker-sdk/src/platform/requests.ts index cc7926dab..5a6b728b0 100644 --- a/packages/shared/akamai-edgeworker-sdk/src/platform/requests.ts +++ b/packages/shared/akamai-edgeworker-sdk/src/platform/requests.ts @@ -34,8 +34,7 @@ class NoopResponse implements Response { } export default class EdgeRequests implements Requests { - // eslint-disable-next-line @typescript-eslint/no-unused-vars - fetch(url: string, options: Options = {}): Promise { + fetch(url: string, _options: Options = {}): Promise { return Promise.resolve(new NoopResponse()); } diff --git a/packages/shared/akamai-edgeworker-sdk/src/utils/createCallbacks.ts b/packages/shared/akamai-edgeworker-sdk/src/utils/createCallbacks.ts index 8002344b4..b6907ddac 100644 --- a/packages/shared/akamai-edgeworker-sdk/src/utils/createCallbacks.ts +++ b/packages/shared/akamai-edgeworker-sdk/src/utils/createCallbacks.ts @@ -1,9 +1,8 @@ -/* eslint-disable @typescript-eslint/no-unused-vars */ // eslint-disable-next-line import/prefer-default-export export const createCallbacks = () => ({ - onError: (err: Error) => {}, - onFailed: (err: Error) => {}, + onError: (_err: Error) => {}, + onFailed: (_err: Error) => {}, onReady: () => {}, - onUpdate: (key: string) => {}, + onUpdate: (_key: string) => {}, hasEventListeners: () => false, }); diff --git a/packages/shared/common/__tests__/internal/events/EventProcessor.test.ts b/packages/shared/common/__tests__/internal/events/EventProcessor.test.ts index fc507d249..be3cf3ec6 100644 --- a/packages/shared/common/__tests__/internal/events/EventProcessor.test.ts +++ b/packages/shared/common/__tests__/internal/events/EventProcessor.test.ts @@ -1,33 +1,27 @@ -/* eslint-disable @typescript-eslint/no-unused-vars */ - -/* eslint-disable class-methods-use-this */ - -/* eslint-disable max-classes-per-file */ -import { AsyncQueue } from 'launchdarkly-js-test-helpers'; - -import { - ClientContext, - Context, - EventSource, - EventSourceInitDict, - Hasher, - Hmac, - Options, - Platform, - PlatformData, - Response, - SdkData, - ServiceEndpoints, -} from '../../../src'; -import { - LDContextDeduplicator, - LDDeliveryStatus, - LDEventSender, - LDEventSenderResult, - LDEventType, -} from '../../../src/api/subsystem'; +import { clientContext, ContextDeduplicator } from '@launchdarkly/private-js-mocks'; + +import { Context } from '../../../src'; +import { LDContextDeduplicator, LDDeliveryStatus, LDEventType } from '../../../src/api/subsystem'; import { EventProcessor, InputIdentifyEvent } from '../../../src/internal'; import { EventProcessorOptions } from '../../../src/internal/events/EventProcessor'; +import shouldSample from '../../../src/internal/events/sampling'; +import BasicLogger from '../../../src/logging/BasicLogger'; +import format from '../../../src/logging/format'; + +jest.mock('../../../src/internal/events/sampling', () => ({ + __esModule: true, + default: jest.fn(() => true), +})); + +const mockSendEventData = jest.fn(); + +jest.useFakeTimers(); + +jest.mock('../../../src/internal/events/EventSender', () => ({ + default: jest.fn(() => ({ + sendEventData: mockSendEventData, + })), +})); const user = { key: 'userKey', name: 'Red' }; const userWithFilteredName = { @@ -39,6 +33,7 @@ const userWithFilteredName = { const anonUser = { key: 'anon-user', name: 'Anon', anonymous: true }; const filteredUser = { key: 'userKey', kind: 'user', _meta: { redactedAttributes: ['name'] } }; +const testIndexEvent = { context: { ...user, kind: 'user' }, creationDate: 1000, kind: 'index' }; function makeSummary(start: number, end: number, count: number, version: number): any { return { endDate: end, @@ -94,43 +89,10 @@ function makeFeatureEvent( }; } -class MockEventSender implements LDEventSender { - public queue: AsyncQueue<{ type: LDEventType; data: any }> = new AsyncQueue(); - - public results: LDEventSenderResult[] = []; - - public defaultResult: LDEventSenderResult = { - status: LDDeliveryStatus.Succeeded, - }; - - async sendEventData(type: LDEventType, data: any): Promise { - this.queue.add({ type, data }); - return this.results.length ? this.results.shift()! : this.defaultResult; - } -} - -class MockContextDeduplicator implements LDContextDeduplicator { - flushInterval?: number | undefined = 0.1; - - seen: string[] = []; - - processContext(context: Context): boolean { - if (this.seen.indexOf(context.canonicalKey) >= 0) { - return false; - } - this.seen.push(context.canonicalKey); - return true; - } - - flush(): void {} -} - describe('given an event processor', () => { + let contextDeduplicator: LDContextDeduplicator; let eventProcessor: EventProcessor; - let eventSender: MockEventSender; - let contextDeduplicator: MockContextDeduplicator; - const eventProcessorConfig: EventProcessorOptions = { allAttributesPrivate: false, privateAttributes: [], @@ -139,69 +101,15 @@ describe('given an event processor', () => { diagnosticRecordingInterval: 900, }; - const basicConfiguration = { - offline: false, - serviceEndpoints: new ServiceEndpoints('', '', ''), - }; - - const platform: Platform = { - info: { - platformData(): PlatformData { - return { - os: { - name: 'An OS', - version: '1.0.1', - arch: 'An Arch', - }, - name: 'The SDK Name', - additional: { - nodeVersion: '42', - }, - }; - }, - sdkData(): SdkData { - return { - name: 'An SDK', - version: '2.0.2', - }; - }, - }, - crypto: { - createHash(algorithm: string): Hasher { - throw new Error('Function not implemented'); - }, - createHmac(algorithm: string, key: string): Hmac { - // Not used for this test. - throw new Error('Function not implemented.'); - }, - randomUUID(): string { - // Not used for this test. - throw new Error(`Function not implemented.`); - }, - }, - requests: { - /* eslint-disable-next-line @typescript-eslint/no-unused-vars */ - fetch(url: string, options?: Options): Promise { - throw new Error('Function not implemented.'); - }, - - /* eslint-disable-next-line @typescript-eslint/no-unused-vars */ - createEventSource(url: string, eventSourceInitDict: EventSourceInitDict): EventSource { - throw new Error('Function not implemented.'); - }, - }, - }; - beforeEach(() => { - eventSender = new MockEventSender(); - contextDeduplicator = new MockContextDeduplicator(); - - eventProcessor = new EventProcessor( - eventProcessorConfig, - new ClientContext('sdk-key', basicConfiguration, platform), - eventSender, - contextDeduplicator, + jest.clearAllMocks(); + mockSendEventData.mockImplementation(() => + Promise.resolve({ + status: LDDeliveryStatus.Succeeded, + }), ); + contextDeduplicator = new ContextDeduplicator(); + eventProcessor = new EventProcessor(eventProcessorConfig, clientContext, contextDeduplicator); }); afterEach(() => { @@ -214,12 +122,13 @@ describe('given an event processor', () => { await eventProcessor.flush(); - const request = await eventSender.queue.take(); - - expect(request.data[0].context).toEqual({ ...user, kind: 'user' }); - expect(request.data[0].creationDate).toEqual(1000); - expect(request.data[0].kind).toEqual('identify'); - expect(request.type).toEqual(LDEventType.AnalyticsEvents); + expect(mockSendEventData).toBeCalledWith(LDEventType.AnalyticsEvents, [ + { + context: { ...user, kind: 'user' }, + creationDate: 1000, + kind: 'identify', + }, + ]); }); it('filters user in identify event', async () => { @@ -228,11 +137,13 @@ describe('given an event processor', () => { await eventProcessor.flush(); - const request = await eventSender.queue.take(); - expect(request.data[0].context).toEqual({ ...filteredUser, kind: 'user' }); - expect(request.data[0].creationDate).toEqual(1000); - expect(request.data[0].kind).toEqual('identify'); - expect(request.type).toEqual(LDEventType.AnalyticsEvents); + expect(mockSendEventData).toBeCalledWith(LDEventType.AnalyticsEvents, [ + { + context: { ...filteredUser, kind: 'user' }, + creationDate: 1000, + kind: 'identify', + }, + ]); }); it('stringifies user attributes in identify event', async () => { @@ -255,23 +166,26 @@ describe('given an event processor', () => { ); await eventProcessor.flush(); - const request = await eventSender.queue.take(); - expect(request.data[0].context).toEqual({ - kind: 'user', - key: '1', - ip: '3', - country: '4', - email: '5', - firstName: '6', - lastName: '7', - avatar: '8', - name: '9', - age: 99, - anonymous: false, - }); - expect(request.data[0].creationDate).toEqual(1000); - expect(request.data[0].kind).toEqual('identify'); - expect(request.type).toEqual(LDEventType.AnalyticsEvents); + + expect(mockSendEventData).toBeCalledWith(LDEventType.AnalyticsEvents, [ + { + context: { + kind: 'user', + key: '1', + ip: '3', + country: '4', + email: '5', + firstName: '6', + lastName: '7', + avatar: '8', + name: '9', + age: 99, + anonymous: false, + }, + creationDate: 1000, + kind: 'identify', + }, + ]); }); it('queues individual feature event with index event', async () => { @@ -286,46 +200,100 @@ describe('given an event processor', () => { value: 'value', trackEvents: true, default: 'default', + samplingRatio: 1, + withReasons: true, }); await eventProcessor.flush(); - const request = await eventSender.queue.take(); - expect(request.data).toEqual([ + expect(mockSendEventData).toBeCalledWith(LDEventType.AnalyticsEvents, [ + testIndexEvent, + makeFeatureEvent(1000, 11), + makeSummary(1000, 1000, 1, 11), + ]); + }); + + it('uses sampling ratio for feature events', async () => { + Date.now = jest.fn(() => 1000); + eventProcessor.sendEvent({ + kind: 'feature', + creationDate: 1000, + context: Context.fromLDContext(user), + key: 'flagkey', + version: 11, + variation: 1, + value: 'value', + trackEvents: true, + default: 'default', + samplingRatio: 2, + withReasons: true, + }); + + await eventProcessor.flush(); + expect(shouldSample).toHaveBeenCalledWith(2); + + expect(mockSendEventData).toBeCalledWith(LDEventType.AnalyticsEvents, [ { kind: 'index', creationDate: 1000, context: { ...user, kind: 'user' }, }, - makeFeatureEvent(1000, 11), + { ...makeFeatureEvent(1000, 11), samplingRatio: 2 }, makeSummary(1000, 1000, 1, 11), ]); }); - it('handles the version being 0', async () => { + it('excludes feature events that are not sampled', async () => { + // @ts-ignore + shouldSample.mockImplementation((ratio) => ratio !== 2); Date.now = jest.fn(() => 1000); eventProcessor.sendEvent({ kind: 'feature', creationDate: 1000, context: Context.fromLDContext(user), key: 'flagkey', - version: 0, + version: 11, variation: 1, value: 'value', trackEvents: true, default: 'default', + samplingRatio: 2, + withReasons: true, }); await eventProcessor.flush(); + expect(shouldSample).toHaveBeenCalledWith(2); - const request = await eventSender.queue.take(); - - expect(request.data).toEqual([ + expect(mockSendEventData).toBeCalledWith(LDEventType.AnalyticsEvents, [ { kind: 'index', creationDate: 1000, context: { ...user, kind: 'user' }, }, + makeSummary(1000, 1000, 1, 11), + ]); + }); + + it('handles the version being 0', async () => { + Date.now = jest.fn(() => 1000); + eventProcessor.sendEvent({ + kind: 'feature', + creationDate: 1000, + context: Context.fromLDContext(user), + key: 'flagkey', + version: 0, + variation: 1, + value: 'value', + trackEvents: true, + default: 'default', + samplingRatio: 1, + withReasons: true, + }); + + await eventProcessor.flush(); + + expect(mockSendEventData).toBeCalledWith(LDEventType.AnalyticsEvents, [ + testIndexEvent, makeFeatureEvent(1000, 0), makeSummary(1000, 1000, 1, 0), ]); @@ -344,17 +312,14 @@ describe('given an event processor', () => { trackEvents: false, debugEventsUntilDate: 2000, default: 'default', + samplingRatio: 1, + withReasons: true, }); await eventProcessor.flush(); - const request = await eventSender.queue.take(); - expect(request.data).toEqual([ - { - kind: 'index', - creationDate: 1000, - context: { ...user, kind: 'user' }, - }, + expect(mockSendEventData).toBeCalledWith(LDEventType.AnalyticsEvents, [ + testIndexEvent, makeFeatureEvent(1000, 11, true), makeSummary(1000, 1000, 1, 11), ]); @@ -373,17 +338,14 @@ describe('given an event processor', () => { trackEvents: true, debugEventsUntilDate: 2000, default: 'default', + samplingRatio: 1, + withReasons: true, }); await eventProcessor.flush(); - const request = await eventSender.queue.take(); - expect(request.data).toEqual([ - { - kind: 'index', - creationDate: 1000, - context: { ...user, kind: 'user' }, - }, + expect(mockSendEventData).toBeCalledWith(LDEventType.AnalyticsEvents, [ + testIndexEvent, makeFeatureEvent(1000, 11, false), makeFeatureEvent(1000, 11, true), makeSummary(1000, 1000, 1, 11), @@ -393,11 +355,6 @@ describe('given an event processor', () => { it('expires debug mode based on client time if client time is later than server time', async () => { Date.now = jest.fn(() => 2000); - eventSender.defaultResult = { - status: LDDeliveryStatus.Succeeded, - serverTime: new Date(1000).getTime(), - }; - eventProcessor.sendEvent({ kind: 'feature', creationDate: 1400, @@ -409,12 +366,13 @@ describe('given an event processor', () => { trackEvents: false, debugEventsUntilDate: 1500, default: 'default', + samplingRatio: 1, + withReasons: true, }); await eventProcessor.flush(); - const request = await eventSender.queue.take(); - expect(request.data).toEqual([ + expect(mockSendEventData).toBeCalledWith(LDEventType.AnalyticsEvents, [ { kind: 'index', creationDate: 1400, @@ -438,6 +396,8 @@ describe('given an event processor', () => { value: 'value', trackEvents: true, default: 'default', + samplingRatio: 1, + withReasons: true, }); eventProcessor.sendEvent({ kind: 'feature', @@ -449,12 +409,13 @@ describe('given an event processor', () => { value: 'carrot', trackEvents: true, default: 'potato', + samplingRatio: 1, + withReasons: true, }); await eventProcessor.flush(); - const request = await eventSender.queue.take(); - expect(request.data).toEqual([ + expect(mockSendEventData).toBeCalledWith(LDEventType.AnalyticsEvents, [ { kind: 'index', creationDate: 1000, @@ -511,6 +472,8 @@ describe('given an event processor', () => { value: 'value', trackEvents: false, default: 'default', + samplingRatio: 1, + withReasons: true, }); eventProcessor.sendEvent({ kind: 'feature', @@ -522,13 +485,13 @@ describe('given an event processor', () => { value: 'carrot', trackEvents: false, default: 'potato', + samplingRatio: 1, + withReasons: true, }); await eventProcessor.flush(); - const request = await eventSender.queue.take(); - - expect(request.data).toEqual([ + expect(mockSendEventData).toBeCalledWith(LDEventType.AnalyticsEvents, [ { kind: 'index', creationDate: 1000, @@ -577,12 +540,12 @@ describe('given an event processor', () => { context: Context.fromLDContext(user), key: 'eventkey', data: { thing: 'stuff' }, + samplingRatio: 1, }); await eventProcessor.flush(); - const request = await eventSender.queue.take(); - expect(request.data).toEqual([ + expect(mockSendEventData).toBeCalledWith(LDEventType.AnalyticsEvents, [ { kind: 'index', creationDate: 1000, @@ -607,12 +570,12 @@ describe('given an event processor', () => { context: Context.fromLDContext(anonUser), key: 'eventkey', data: { thing: 'stuff' }, + samplingRatio: 1, }); await eventProcessor.flush(); - const request = await eventSender.queue.take(); - expect(request.data).toEqual([ + expect(mockSendEventData).toBeCalledWith(LDEventType.AnalyticsEvents, [ { kind: 'index', creationDate: 1000, @@ -639,12 +602,12 @@ describe('given an event processor', () => { key: 'eventkey', data: { thing: 'stuff' }, metricValue: 1.5, + samplingRatio: 1, }); await eventProcessor.flush(); - const request = await eventSender.queue.take(); - expect(request.data).toEqual([ + expect(mockSendEventData).toBeCalledWith(LDEventType.AnalyticsEvents, [ { kind: 'index', creationDate: 1000, @@ -664,15 +627,18 @@ describe('given an event processor', () => { }); it('makes no requests if there are no events to flush', async () => { - eventProcessor.flush(); - expect(eventSender.queue.isEmpty()).toBeTruthy(); + await eventProcessor.flush(); + expect(mockSendEventData).not.toBeCalled(); }); it('will not shutdown after a recoverable error', async () => { - eventSender.defaultResult = { - status: LDDeliveryStatus.Failed, - error: new Error('some error'), - }; + mockSendEventData.mockImplementation(() => + Promise.resolve({ + status: LDDeliveryStatus.Failed, + error: new Error('some error'), + }), + ); + eventProcessor.sendEvent(new InputIdentifyEvent(Context.fromLDContext(user))); await expect(eventProcessor.flush()).rejects.toThrow('some error'); @@ -681,10 +647,13 @@ describe('given an event processor', () => { }); it('will shutdown after a non-recoverable error', async () => { - eventSender.defaultResult = { - status: LDDeliveryStatus.FailedAndMustShutDown, - error: new Error('some error'), - }; + mockSendEventData.mockImplementation(() => + Promise.resolve({ + status: LDDeliveryStatus.FailedAndMustShutDown, + error: new Error('some error'), + }), + ); + eventProcessor.sendEvent(new InputIdentifyEvent(Context.fromLDContext(user))); await expect(eventProcessor.flush()).rejects.toThrow('some error'); @@ -693,24 +662,33 @@ describe('given an event processor', () => { }); it('swallows errors from failed background flush', async () => { - // Make a new client that flushes fast. - const newConfig = { ...eventProcessorConfig, flushInterval: 0.1 }; - - eventSender.defaultResult = { - status: LDDeliveryStatus.Failed, - error: new Error('some error'), - }; - - eventProcessor.close(); - + mockSendEventData.mockImplementation(() => + Promise.resolve({ + status: LDDeliveryStatus.Failed, + error: new Error('some error'), + }), + ); + const mockConsole = jest.fn(); + const clientContextWithDebug = { ...clientContext }; + clientContextWithDebug.basicConfiguration.logger = new BasicLogger({ + level: 'debug', + destination: mockConsole, + formatter: format, + }); eventProcessor = new EventProcessor( - newConfig, - new ClientContext('sdk-key', basicConfiguration, platform), - eventSender, + eventProcessorConfig, + clientContextWithDebug, contextDeduplicator, ); + eventProcessor.sendEvent(new InputIdentifyEvent(Context.fromLDContext(user))); + await jest.advanceTimersByTimeAsync(eventProcessorConfig.flushInterval * 1000); - eventSender.queue.take(); + expect(mockConsole).toBeCalledTimes(2); + expect(mockConsole).toHaveBeenNthCalledWith(1, 'debug: [LaunchDarkly] Flushing 1 events'); + expect(mockConsole).toHaveBeenNthCalledWith( + 2, + 'debug: [LaunchDarkly] Flush failed: Error: some error', + ); }); }); diff --git a/packages/shared/common/__tests__/internal/events/EventSummarizer.test.ts b/packages/shared/common/__tests__/internal/events/EventSummarizer.test.ts index bb4bfa91d..1fd713a9f 100644 --- a/packages/shared/common/__tests__/internal/events/EventSummarizer.test.ts +++ b/packages/shared/common/__tests__/internal/events/EventSummarizer.test.ts @@ -25,6 +25,20 @@ describe('given an event summarizer', () => { expect(beforeSummary).toEqual(afterSummary); }); + it('does nothing for an event with excludeFromSummaries set to true', () => { + const event = { + kind: 'feature', + creationDate: 2000, + key: 'key', + context, + excludeFromSummaries: true, + }; + const beforeSummary = summarizer.getSummary(); + summarizer.summarizeEvent(event as any); + const afterSummary = summarizer.getSummary(); + expect(beforeSummary).toEqual(afterSummary); + }); + it('sets start and end dates for feature events', () => { const event1 = { kind: 'feature', diff --git a/packages/shared/common/jest.config.js b/packages/shared/common/jest.config.js index f106eb3bc..6753062cc 100644 --- a/packages/shared/common/jest.config.js +++ b/packages/shared/common/jest.config.js @@ -1,6 +1,6 @@ module.exports = { transform: { '^.+\\.ts?$': 'ts-jest' }, - testMatch: ['**/__tests__/**/*test.ts?(x)'], + testMatch: ['**/*.test.ts?(x)'], testEnvironment: 'node', moduleFileExtensions: ['ts', 'tsx', 'js', 'jsx', 'json', 'node'], collectCoverageFrom: ['src/**/*.ts'], diff --git a/packages/shared/common/package.json b/packages/shared/common/package.json index 4f146fabe..1d2402773 100644 --- a/packages/shared/common/package.json +++ b/packages/shared/common/package.json @@ -20,13 +20,15 @@ ], "scripts": { "test": "npx jest --ci", + "build-types": "npx tsc --declaration true --emitDeclarationOnly true --declarationDir dist", "build": "npx tsc", "clean": "npx tsc --build --clean", "lint": "npx eslint . --ext .ts", - "lint:fix": "yarn run lint -- --fix" + "lint:fix": "yarn run lint --fix" }, "license": "Apache-2.0", "devDependencies": { + "@launchdarkly/private-js-mocks": "0.0.1", "@trivago/prettier-plugin-sort-imports": "^4.1.1", "@types/jest": "^29.4.0", "@typescript-eslint/eslint-plugin": "^6.1.0", diff --git a/packages/shared/common/src/Context.ts b/packages/shared/common/src/Context.ts index f71d4a148..d371aaaf1 100644 --- a/packages/shared/common/src/Context.ts +++ b/packages/shared/common/src/Context.ts @@ -15,9 +15,6 @@ import { TypeValidators } from './validators'; // This is to reduce work on the hot-path. Later, for event processing, deeper // cloning of the context will be done. -// Validates a kind excluding check that it isn't "kind". -const KindValidator = TypeValidators.stringMatchingRegex(/^(\w|\.|-)+$/); - // When no kind is specified, then this kind will be used. const DEFAULT_KIND = 'user'; @@ -98,7 +95,7 @@ function isContextCommon( * @returns true if the kind is valid. */ function validKind(kind: string) { - return KindValidator.is(kind) && kind !== 'kind'; + return TypeValidators.Kind.is(kind); } /** diff --git a/packages/shared/common/src/api/data/LDEvaluationDetail.ts b/packages/shared/common/src/api/data/LDEvaluationDetail.ts index 37c959ce1..0a4f11dd7 100644 --- a/packages/shared/common/src/api/data/LDEvaluationDetail.ts +++ b/packages/shared/common/src/api/data/LDEvaluationDetail.ts @@ -27,3 +27,22 @@ export interface LDEvaluationDetail { */ reason: LDEvaluationReason; } + +export interface LDEvaluationDetailTyped { + /** + * The result of the flag evaluation. This will be either one of the flag's variations or + * the default value that was passed to `LDClient.variationDetail`. + */ + value: TFlag; + + /** + * The index of the returned value within the flag's list of variations, e.g. 0 for the + * first variation-- or `null` if the default value was returned. + */ + variationIndex?: number | null; + + /** + * An object describing the main factor that influenced the flag evaluation value. + */ + reason: LDEvaluationReason; +} diff --git a/packages/shared/common/src/api/options/LDClientContext.ts b/packages/shared/common/src/api/options/LDClientContext.ts index 49263cdcf..6fd96452c 100644 --- a/packages/shared/common/src/api/options/LDClientContext.ts +++ b/packages/shared/common/src/api/options/LDClientContext.ts @@ -12,13 +12,6 @@ export interface LDServiceEndpoints { * The most basic properties of the SDK client that are available to all SDK component factories. */ export interface LDBasicConfiguration { - logger?: LDLogger; - - /** - * True if the SDK was configured to be completely offline. - */ - offline: boolean; - /** * The configured SDK key. */ @@ -28,6 +21,15 @@ export interface LDBasicConfiguration { * Defines the base service URIs used by SDK components. */ serviceEndpoints: LDServiceEndpoints; + + /** + * True if the SDK was configured to be completely offline. + */ + offline?: boolean; + + logger?: LDLogger; + + tags?: { value?: string }; } /** diff --git a/packages/shared/common/src/api/platform/Encoding.ts b/packages/shared/common/src/api/platform/Encoding.ts new file mode 100644 index 000000000..c431ada02 --- /dev/null +++ b/packages/shared/common/src/api/platform/Encoding.ts @@ -0,0 +1,3 @@ +export interface Encoding { + btoa(data: string): string; +} diff --git a/packages/shared/common/src/api/platform/EventSource.ts b/packages/shared/common/src/api/platform/EventSource.ts index 0a651363e..3cf8c1dd1 100644 --- a/packages/shared/common/src/api/platform/EventSource.ts +++ b/packages/shared/common/src/api/platform/EventSource.ts @@ -1,10 +1,17 @@ +export type EventName = 'delete' | 'patch' | 'ping' | 'put'; +export type EventListener = (event?: { data?: any }) => void; +export type ProcessStreamResponse = { + deserializeData: (data: string) => any; + processJson: (json: any) => void; +}; + export interface EventSource { onclose: (() => void) | undefined; onerror: (() => void) | undefined; onopen: (() => void) | undefined; onretrying: ((e: { delayMillis: number }) => void) | undefined; - addEventListener(type: string, listener: (event?: { data?: any }) => void): void; + addEventListener(type: EventName, listener: EventListener): void; close(): void; } diff --git a/packages/shared/common/src/api/platform/Platform.ts b/packages/shared/common/src/api/platform/Platform.ts index b73cfb520..6c667dcaf 100644 --- a/packages/shared/common/src/api/platform/Platform.ts +++ b/packages/shared/common/src/api/platform/Platform.ts @@ -1,9 +1,15 @@ import { Crypto } from './Crypto'; +import { Encoding } from './Encoding'; import { Filesystem } from './Filesystem'; import { Info } from './Info'; import { Requests } from './Requests'; export interface Platform { + /** + * The interface for performing encoding operations. + */ + encoding?: Encoding; + /** * The interface for getting information about the platform and the execution * environment. diff --git a/packages/shared/common/src/api/platform/index.ts b/packages/shared/common/src/api/platform/index.ts index fc31a909a..0e488004e 100644 --- a/packages/shared/common/src/api/platform/index.ts +++ b/packages/shared/common/src/api/platform/index.ts @@ -1,3 +1,4 @@ +export * from './Encoding'; export * from './Crypto'; export * from './Filesystem'; export * from './Info'; diff --git a/packages/shared/sdk-server/src/api/subsystems/LDStreamProcessor.ts b/packages/shared/common/src/api/subsystem/LDStreamProcessor.ts similarity index 83% rename from packages/shared/sdk-server/src/api/subsystems/LDStreamProcessor.ts rename to packages/shared/common/src/api/subsystem/LDStreamProcessor.ts index ab0f6162b..67a7fa559 100644 --- a/packages/shared/sdk-server/src/api/subsystems/LDStreamProcessor.ts +++ b/packages/shared/common/src/api/subsystem/LDStreamProcessor.ts @@ -6,7 +6,7 @@ * @ignore */ export interface LDStreamProcessor { - start: (fn?: (err?: any) => void) => void; + start: () => void; stop: () => void; close: () => void; } diff --git a/packages/shared/common/src/api/subsystem/index.ts b/packages/shared/common/src/api/subsystem/index.ts index 70a1777e9..000f60f68 100644 --- a/packages/shared/common/src/api/subsystem/index.ts +++ b/packages/shared/common/src/api/subsystem/index.ts @@ -1,6 +1,7 @@ import LDContextDeduplicator from './LDContextDeduplicator'; import LDEventProcessor from './LDEventProcessor'; import LDEventSender, { LDDeliveryStatus, LDEventSenderResult, LDEventType } from './LDEventSender'; +import { LDStreamProcessor } from './LDStreamProcessor'; export { LDEventProcessor, @@ -9,4 +10,5 @@ export { LDDeliveryStatus, LDEventType, LDEventSenderResult, + LDStreamProcessor, }; diff --git a/packages/shared/sdk-server/src/errors.ts b/packages/shared/common/src/errors.ts similarity index 80% rename from packages/shared/sdk-server/src/errors.ts rename to packages/shared/common/src/errors.ts index 180401a2d..eb7be712e 100644 --- a/packages/shared/sdk-server/src/errors.ts +++ b/packages/shared/common/src/errors.ts @@ -2,9 +2,19 @@ // more complex, then they could be independent files. /* eslint-disable max-classes-per-file */ -export class LDPollingError extends Error { +export class LDFileDataSourceError extends Error { constructor(message: string) { super(message); + this.name = 'LaunchDarklyFileDataSourceError'; + } +} + +export class LDPollingError extends Error { + public readonly status?: number; + + constructor(message: string, status?: number) { + super(message); + this.status = status; this.name = 'LaunchDarklyPollingError'; } } diff --git a/packages/shared/common/src/index.ts b/packages/shared/common/src/index.ts index 53118276a..653cde18d 100644 --- a/packages/shared/common/src/index.ts +++ b/packages/shared/common/src/index.ts @@ -9,5 +9,6 @@ export * from './options'; export * from './utils'; export * as internal from './internal'; +export * from './errors'; export { AttributeReference, Context, ContextFilter }; diff --git a/packages/shared/common/src/internal/diagnostics/DiagnosticsManager.test.ts b/packages/shared/common/src/internal/diagnostics/DiagnosticsManager.test.ts new file mode 100644 index 000000000..2816dde10 --- /dev/null +++ b/packages/shared/common/src/internal/diagnostics/DiagnosticsManager.test.ts @@ -0,0 +1,113 @@ +import { basicPlatform } from '@launchdarkly/private-js-mocks'; + +import DiagnosticsManager from './DiagnosticsManager'; + +describe('given a diagnostics manager', () => { + const dateNowString = '2023-08-10'; + let manager: DiagnosticsManager; + + beforeAll(() => { + jest.useFakeTimers(); + jest.setSystemTime(new Date(dateNowString)); + }); + + afterAll(() => { + jest.useRealTimers(); + }); + + beforeEach(() => { + manager = new DiagnosticsManager('my-sdk-key', basicPlatform, { test1: 'value1' }); + }); + + afterEach(() => { + jest.resetAllMocks(); + }); + + it('uses the last 6 characters of the SDK key in the diagnostic id', () => { + const { id } = manager.createInitEvent(); + expect(id.sdkKeySuffix).toEqual('dk-key'); + }); + + it('creates random UUID', () => { + const { id } = manager.createInitEvent(); + const manager2 = new DiagnosticsManager('my-sdk-key', basicPlatform, {}); + const { id: id2 } = manager2.createInitEvent(); + + expect(id.diagnosticId).toBeTruthy(); + expect(id2.diagnosticId).toBeTruthy(); + expect(id.diagnosticId).not.toEqual(id2.diagnosticId); + }); + + it('puts the start time into the init event', () => { + const { creationDate } = manager.createInitEvent(); + expect(creationDate).toEqual(Date.now()); + }); + + it('puts SDK data into the init event', () => { + const { sdk } = manager.createInitEvent(); + expect(sdk).toMatchObject(basicPlatform.info.sdkData()); + }); + + it('puts config data into the init event', () => { + const { configuration } = manager.createInitEvent(); + expect(configuration).toEqual({ test1: 'value1' }); + }); + + it('puts platform data into the init event', () => { + const { platform } = manager.createInitEvent(); + expect(platform).toEqual({ + name: 'The SDK Name', + osName: 'An OS', + osVersion: '1.0.1', + osArch: 'An Arch', + nodeVersion: '42', + }); + }); + + it('creates periodic event from stats, then resets', () => { + const originalDate = Date.now(); + const streamInit1 = originalDate + 1; + const streamInit2 = originalDate + 2; + const statsCreation1 = originalDate + 3; + const statsCreation2 = originalDate + 4; + + manager.recordStreamInit(streamInit1, true, 1000); + manager.recordStreamInit(streamInit2, false, 550); + jest.setSystemTime(statsCreation1); + const statsEvent1 = manager.createStatsEventAndReset(4, 5, 6); + + expect(statsEvent1).toMatchObject({ + kind: 'diagnostic', + creationDate: statsCreation1, + dataSinceDate: originalDate, + droppedEvents: 4, + deduplicatedUsers: 5, + eventsInLastBatch: 6, + streamInits: [ + { + timestamp: streamInit1, + failed: true, + durationMillis: 1000, + }, + { + timestamp: streamInit2, + failed: false, + durationMillis: 550, + }, + ], + }); + + jest.setSystemTime(statsCreation2); + const statsEvent2 = manager.createStatsEventAndReset(1, 2, 3); + + expect(statsEvent2).toMatchObject({ + kind: 'diagnostic', + creationDate: statsCreation2, + dataSinceDate: statsCreation1, + droppedEvents: 1, + deduplicatedUsers: 2, + eventsInLastBatch: 3, + streamInits: [], + }); + }); +}); diff --git a/packages/shared/common/src/internal/diagnostics/DiagnosticsManager.ts b/packages/shared/common/src/internal/diagnostics/DiagnosticsManager.ts new file mode 100644 index 000000000..4516b9f17 --- /dev/null +++ b/packages/shared/common/src/internal/diagnostics/DiagnosticsManager.ts @@ -0,0 +1,89 @@ +import { Platform } from '../../api'; +import { DiagnosticId, DiagnosticInitEvent, DiagnosticStatsEvent, StreamInitData } from './types'; + +export default class DiagnosticsManager { + private readonly startTime: number; + private streamInits: StreamInitData[] = []; + private readonly id: DiagnosticId; + private dataSinceDate: number; + + constructor( + sdkKey: string, + private readonly platform: Platform, + private readonly diagnosticInitConfig: any, + ) { + this.startTime = Date.now(); + this.dataSinceDate = this.startTime; + this.id = { + diagnosticId: platform.crypto.randomUUID(), + sdkKeySuffix: sdkKey.length > 6 ? sdkKey.substring(sdkKey.length - 6) : sdkKey, + }; + } + + /** + * Creates the initial event that is sent by the event processor when the SDK starts up. This will + * not be repeated during the lifetime of the SDK client. + */ + createInitEvent(): DiagnosticInitEvent { + const sdkData = this.platform.info.sdkData(); + const platformData = this.platform.info.platformData(); + + return { + kind: 'diagnostic-init', + id: this.id, + creationDate: this.startTime, + sdk: sdkData, + configuration: this.diagnosticInitConfig, + platform: { + name: platformData.name, + osArch: platformData.os?.arch, + osName: platformData.os?.name, + osVersion: platformData.os?.version, + ...(platformData.additional || {}), + }, + }; + } + + /** + * Records a stream connection attempt (called by the stream processor). + * + * @param timestamp Time of the *beginning* of the connection attempt. + * @param failed True if the connection failed, or we got a read timeout before receiving a "put". + * @param durationMillis Elapsed time between starting timestamp and when we either gave up/lost + * the connection or received a successful "put". + */ + recordStreamInit(timestamp: number, failed: boolean, durationMillis: number) { + const item = { timestamp, failed, durationMillis }; + this.streamInits.push(item); + } + + /** + * Creates a periodic event containing time-dependent stats, and resets the state of the manager + * with regard to those stats. + * + * Note: the reason droppedEvents, deduplicatedUsers, and eventsInLastBatch are passed into this + * function, instead of being properties of the DiagnosticsManager, is that the event processor is + * the one who's calling this function and is also the one who's tracking those stats. + */ + createStatsEventAndReset( + droppedEvents: number, + deduplicatedUsers: number, + eventsInLastBatch: number, + ): DiagnosticStatsEvent { + const currentTime = Date.now(); + const evt: DiagnosticStatsEvent = { + kind: 'diagnostic', + id: this.id, + creationDate: currentTime, + dataSinceDate: this.dataSinceDate, + droppedEvents, + deduplicatedUsers, + eventsInLastBatch, + streamInits: this.streamInits, + }; + + this.streamInits = []; + this.dataSinceDate = currentTime; + return evt; + } +} diff --git a/packages/shared/common/src/internal/diagnostics/index.ts b/packages/shared/common/src/internal/diagnostics/index.ts new file mode 100644 index 000000000..242b42f16 --- /dev/null +++ b/packages/shared/common/src/internal/diagnostics/index.ts @@ -0,0 +1,4 @@ +import DiagnosticsManager from './DiagnosticsManager'; + +// eslint-disable-next-line import/prefer-default-export +export { DiagnosticsManager }; diff --git a/packages/shared/common/src/internal/diagnostics/types.ts b/packages/shared/common/src/internal/diagnostics/types.ts new file mode 100644 index 000000000..bfb882a53 --- /dev/null +++ b/packages/shared/common/src/internal/diagnostics/types.ts @@ -0,0 +1,72 @@ +export interface DiagnosticPlatformData { + name?: string; + osArch?: string; + osName?: string; + osVersion?: string; + /** + * Platform specific identifiers. + * For instance `nodeVersion` + */ + [key: string]: string | undefined; +} + +export interface DiagnosticSdkData { + name?: string; + wrapperName?: string; + wrapperVersion?: string; +} + +export interface DiagnosticConfigData { + customBaseURI: boolean; + customStreamURI: boolean; + customEventsURI: boolean; + eventsCapacity: number; + connectTimeoutMillis: number; + socketTimeoutMillis: number; + eventsFlushIntervalMillis: number; + pollingIntervalMillis: number; + // startWaitMillis: n/a (SDK does not have this feature) + // samplingInterval: n/a (SDK does not have this feature) + reconnectTimeMillis: number; + streamingDisabled: boolean; + usingRelayDaemon: boolean; + offline: boolean; + allAttributesPrivate: boolean; + contextKeysCapacity: number; + contextKeysFlushIntervalMillis: number; + usingProxy: boolean; + usingProxyAuthenticator: boolean; + diagnosticRecordingIntervalMillis: number; + dataStoreType: string; +} + +export interface DiagnosticId { + diagnosticId: string; + sdkKeySuffix: string; +} + +export interface DiagnosticInitEvent { + kind: 'diagnostic-init'; + id: DiagnosticId; + creationDate: number; + sdk: DiagnosticSdkData; + configuration: DiagnosticConfigData; + platform: DiagnosticPlatformData; +} + +export interface StreamInitData { + timestamp: number; + failed: boolean; + durationMillis: number; +} + +export interface DiagnosticStatsEvent { + kind: 'diagnostic'; + id: DiagnosticId; + creationDate: number; + dataSinceDate: number; + droppedEvents: number; + deduplicatedUsers: number; + eventsInLastBatch: number; + streamInits: StreamInitData[]; +} diff --git a/packages/shared/sdk-server/src/evaluation/ErrorKinds.ts b/packages/shared/common/src/internal/evaluation/ErrorKinds.ts similarity index 88% rename from packages/shared/sdk-server/src/evaluation/ErrorKinds.ts rename to packages/shared/common/src/internal/evaluation/ErrorKinds.ts index e2bb87af2..003c46b0d 100644 --- a/packages/shared/sdk-server/src/evaluation/ErrorKinds.ts +++ b/packages/shared/common/src/internal/evaluation/ErrorKinds.ts @@ -1,16 +1,12 @@ /** * Different kinds of error which may be encountered during evaluation. - * - * @internal */ enum ErrorKinds { MalformedFlag = 'MALFORMED_FLAG', UserNotSpecified = 'USER_NOT_SPECIFIED', FlagNotFound = 'FLAG_NOT_FOUND', ClientNotReady = 'CLIENT_NOT_READY', + WrongType = 'WRONG_TYPE', } -/** - * @internal - */ export default ErrorKinds; diff --git a/packages/shared/common/src/internal/evaluation/EventFactoryBase.ts b/packages/shared/common/src/internal/evaluation/EventFactoryBase.ts new file mode 100644 index 000000000..0b877a400 --- /dev/null +++ b/packages/shared/common/src/internal/evaluation/EventFactoryBase.ts @@ -0,0 +1,85 @@ +import { LDEvaluationReason, LDFlagValue } from '../../api'; +import Context from '../../Context'; +import { InputCustomEvent, InputEvalEvent, InputIdentifyEvent } from '../events'; + +export type EvalEventArgs = { + addExperimentData?: boolean; + context: Context; + debugEventsUntilDate?: number; + defaultVal: any; + excludeFromSummaries?: boolean; + flagKey: string; + prereqOfFlagKey?: string; + reason?: LDEvaluationReason; + samplingRatio?: number; + trackEvents: boolean; + value: LDFlagValue; + variation?: number; + version: number; +}; + +export default class EventFactoryBase { + constructor(private readonly withReasons: boolean) {} + + evalEvent(e: EvalEventArgs): InputEvalEvent { + return new InputEvalEvent( + this.withReasons, + e.context, + e.flagKey, + e.value, + e.defaultVal, + e.version, + // Exclude null as a possibility. + e.variation ?? undefined, + e.trackEvents || e.addExperimentData, + e.prereqOfFlagKey, + this.withReasons || e.addExperimentData ? e.reason : undefined, + e.debugEventsUntilDate, + e.excludeFromSummaries, + e.samplingRatio, + ); + } + + unknownFlagEvent(key: string, defVal: LDFlagValue, context: Context) { + return new InputEvalEvent( + this.withReasons, + context, + key, + defVal, + defVal, + // This isn't ideal, but the purpose of the factory is to at least + // handle this situation. + undefined, // version + undefined, // variation index + undefined, // track events + undefined, // prereqOf + undefined, // reason + undefined, // debugEventsUntilDate + undefined, // exclude from summaries + undefined, // sampling ratio + ); + } + + /* eslint-disable-next-line class-methods-use-this */ + identifyEvent(context: Context) { + // Currently sampling for identify events is always 1. + return new InputIdentifyEvent(context, 1); + } + + /* eslint-disable-next-line class-methods-use-this */ + customEvent( + key: string, + context: Context, + data?: any, + metricValue?: number, + samplingRatio: number = 1, + ) { + return new InputCustomEvent( + context, + key, + data ?? undefined, + metricValue ?? undefined, + samplingRatio, + ); + } +} diff --git a/packages/shared/common/src/internal/evaluation/evaluationDetail.ts b/packages/shared/common/src/internal/evaluation/evaluationDetail.ts new file mode 100644 index 000000000..c08cc8687 --- /dev/null +++ b/packages/shared/common/src/internal/evaluation/evaluationDetail.ts @@ -0,0 +1,18 @@ +import { LDEvaluationReason, LDFlagValue } from '../../api'; +import ErrorKinds from './ErrorKinds'; + +export const createErrorEvaluationDetail = (errorKind: ErrorKinds, def?: LDFlagValue) => ({ + value: def ?? null, + variationIndex: null, + reason: { kind: 'ERROR', errorKind }, +}); + +export const createSuccessEvaluationDetail = ( + value: LDFlagValue, + variationIndex?: number, + reason?: LDEvaluationReason, +) => ({ + value, + variationIndex: variationIndex ?? null, + reason: reason ?? null, +}); diff --git a/packages/shared/common/src/internal/evaluation/index.ts b/packages/shared/common/src/internal/evaluation/index.ts new file mode 100644 index 000000000..175c5d4e9 --- /dev/null +++ b/packages/shared/common/src/internal/evaluation/index.ts @@ -0,0 +1,11 @@ +import ErrorKinds from './ErrorKinds'; +import { createErrorEvaluationDetail, createSuccessEvaluationDetail } from './evaluationDetail'; +import EventFactoryBase, { EvalEventArgs } from './EventFactoryBase'; + +export { + createSuccessEvaluationDetail, + createErrorEvaluationDetail, + ErrorKinds, + EvalEventArgs, + EventFactoryBase, +}; diff --git a/packages/shared/sdk-server/src/ClientMessages.ts b/packages/shared/common/src/internal/events/ClientMessages.ts similarity index 93% rename from packages/shared/sdk-server/src/ClientMessages.ts rename to packages/shared/common/src/internal/events/ClientMessages.ts index a254a7866..1d7652ba0 100644 --- a/packages/shared/sdk-server/src/ClientMessages.ts +++ b/packages/shared/common/src/internal/events/ClientMessages.ts @@ -1,7 +1,5 @@ /** * Messages for issues which can be encountered processing client requests. - * - * @internal */ export default class ClientMessages { static readonly missingContextKeyNoEvent = diff --git a/packages/shared/common/src/internal/events/EventProcessor.ts b/packages/shared/common/src/internal/events/EventProcessor.ts index ade263dcc..0ce29a3f0 100644 --- a/packages/shared/common/src/internal/events/EventProcessor.ts +++ b/packages/shared/common/src/internal/events/EventProcessor.ts @@ -1,15 +1,19 @@ -import { LDEvaluationReason } from '../../api/data/LDEvaluationReason'; -import { LDLogger } from '../../api/logging/LDLogger'; +import { LDEvaluationReason, LDLogger } from '../../api'; +import { LDDeliveryStatus, LDEventType } from '../../api/subsystem'; import LDContextDeduplicator from '../../api/subsystem/LDContextDeduplicator'; import LDEventProcessor from '../../api/subsystem/LDEventProcessor'; -import LDEventSender, { LDDeliveryStatus, LDEventType } from '../../api/subsystem/LDEventSender'; import AttributeReference from '../../AttributeReference'; import ContextFilter from '../../ContextFilter'; -import ClientContext from '../../options/ClientContext'; +import { ClientContext } from '../../options'; +import { DiagnosticsManager } from '../diagnostics'; +import EventSender from './EventSender'; import EventSummarizer, { SummarizedFlagsEvent } from './EventSummarizer'; -import { isFeature, isIdentify } from './guards'; +import { isFeature, isIdentify, isMigration } from './guards'; import InputEvent from './InputEvent'; +import InputIdentifyEvent from './InputIdentifyEvent'; +import InputMigrationEvent from './InputMigrationEvent'; import LDInvalidSDKKeyError from './LDInvalidSDKKeyError'; +import shouldSample from './sampling'; type FilteredContext = any; @@ -20,6 +24,7 @@ interface IdentifyOutputEvent { kind: 'identify' | 'index'; creationDate: number; context: FilteredContext; + samplingRatio?: number; } interface CustomOutputEvent { @@ -29,6 +34,7 @@ interface CustomOutputEvent { contextKeys: Record; data?: any; metricValue?: number; + samplingRatio?: number; } interface FeatureOutputEvent { @@ -43,6 +49,11 @@ interface FeatureOutputEvent { reason?: LDEvaluationReason; context?: FilteredContext; contextKeys?: Record; + samplingRatio?: number; +} + +interface IndexInputEvent extends Omit { + kind: 'index'; } /** @@ -51,12 +62,18 @@ interface FeatureOutputEvent { */ type DiagnosticEvent = any; +interface MigrationOutputEvent extends Omit { + // Make the sampling ratio optional so we can omit it when it is one. + samplingRatio?: number; +} + type OutputEvent = | IdentifyOutputEvent | CustomOutputEvent | FeatureOutputEvent | SummarizedFlagsEvent - | DiagnosticEvent; + | DiagnosticEvent + | MigrationOutputEvent; export interface EventProcessorOptions { allAttributesPrivate: boolean; @@ -66,72 +83,53 @@ export interface EventProcessorOptions { diagnosticRecordingInterval: number; } -interface LDDiagnosticsManager { - createInitEvent(): DiagnosticEvent; - createStatsEventAndReset( - droppedEvents: number, - deduplicatedUsers: number, - eventsInLastBatch: number, - ): DiagnosticEvent; -} - export default class EventProcessor implements LDEventProcessor { + private eventSender: EventSender; private summarizer = new EventSummarizer(); - private queue: OutputEvent[] = []; - private lastKnownPastTime = 0; - private droppedEvents = 0; - private deduplicatedUsers = 0; - private exceededCapacity = false; - private eventsInLastBatch = 0; - private shutdown = false; - private capacity: number; - private logger?: LDLogger; - private contextFilter: ContextFilter; // Using any here, because setInterval handles are not the same // between node and web. private diagnosticsTimer: any; - private flushTimer: any; - private flushUsersTimer: any = null; constructor( config: EventProcessorOptions, clientContext: ClientContext, - private readonly eventSender: LDEventSender, - private readonly contextDeduplicator: LDContextDeduplicator, - private readonly diagnosticsManager?: LDDiagnosticsManager, + private readonly contextDeduplicator?: LDContextDeduplicator, + private readonly diagnosticsManager?: DiagnosticsManager, ) { this.capacity = config.eventsCapacity; this.logger = clientContext.basicConfiguration.logger; + this.eventSender = new EventSender(clientContext); this.contextFilter = new ContextFilter( config.allAttributesPrivate, config.privateAttributes.map((ref) => new AttributeReference(ref)), ); - if (this.contextDeduplicator.flushInterval !== undefined) { + if (this.contextDeduplicator?.flushInterval !== undefined) { this.flushUsersTimer = setInterval(() => { - this.contextDeduplicator.flush(); + this.contextDeduplicator?.flush(); }, this.contextDeduplicator.flushInterval * 1000); } this.flushTimer = setInterval(async () => { try { await this.flush(); - } catch { - // Eat the errors. + } catch (e) { + // Log errors and swallow them + this.logger?.debug(`Flush failed: ${e}`); } }, config.flushInterval * 1000); @@ -196,14 +194,32 @@ export default class EventProcessor implements LDEventProcessor { return; } + if (isMigration(inputEvent)) { + // These conditions are not combined, because we always want to stop + // processing at this point for a migration event. It cannot generate + // an index event or debug event. + if (shouldSample(inputEvent.samplingRatio)) { + const migrationEvent: MigrationOutputEvent = { + ...inputEvent, + }; + if (migrationEvent.samplingRatio === 1) { + delete migrationEvent.samplingRatio; + } + this.enqueue(migrationEvent); + } + return; + } + this.summarizer.summarizeEvent(inputEvent); const isFeatureEvent = isFeature(inputEvent); + const addFullEvent = (isFeatureEvent && inputEvent.trackEvents) || !isFeatureEvent; + const addDebugEvent = this.shouldDebugEvent(inputEvent); const isIdentifyEvent = isIdentify(inputEvent); - const shouldNotDeduplicate = this.contextDeduplicator.processContext(inputEvent.context); + const shouldNotDeduplicate = this.contextDeduplicator?.processContext(inputEvent.context); // If there is no cache, then it will never be in the cache. if (!shouldNotDeduplicate) { @@ -215,21 +231,27 @@ export default class EventProcessor implements LDEventProcessor { const addIndexEvent = shouldNotDeduplicate && !isIdentifyEvent; if (addIndexEvent) { - this.enqueue({ - kind: 'index', - creationDate: inputEvent.creationDate, - context: this.contextFilter.filter(inputEvent.context), - }); + this.enqueue( + this.makeOutputEvent( + { + kind: 'index', + creationDate: inputEvent.creationDate, + context: inputEvent.context, + samplingRatio: 1, + }, + false, + ), + ); } - if (addFullEvent) { + if (addFullEvent && shouldSample(inputEvent.samplingRatio)) { this.enqueue(this.makeOutputEvent(inputEvent, false)); } - if (addDebugEvent) { + if (addDebugEvent && shouldSample(inputEvent.samplingRatio)) { this.enqueue(this.makeOutputEvent(inputEvent, true)); } } - private makeOutputEvent(event: InputEvent, debug: boolean): OutputEvent { + private makeOutputEvent(event: InputEvent | IndexInputEvent, debug: boolean): OutputEvent { switch (event.kind) { case 'feature': { const out: FeatureOutputEvent = { @@ -238,8 +260,13 @@ export default class EventProcessor implements LDEventProcessor { key: event.key, value: event.value, default: event.default, - prereqOf: event.prereqOf, }; + if (event.samplingRatio !== 1) { + out.samplingRatio = event.samplingRatio; + } + if (event.prereqOf) { + out.prereqOf = event.prereqOf; + } if (event.variation !== undefined) { out.variation = event.variation; } @@ -256,12 +283,17 @@ export default class EventProcessor implements LDEventProcessor { } return out; } + case 'index': // Intentional fallthrough. case 'identify': { - return { - kind: 'identify', + const out: IdentifyOutputEvent = { + kind: event.kind, creationDate: event.creationDate, context: this.contextFilter.filter(event.context), }; + if (event.samplingRatio !== 1) { + out.samplingRatio = event.samplingRatio; + } + return out; } case 'custom': { const out: CustomOutputEvent = { @@ -271,6 +303,10 @@ export default class EventProcessor implements LDEventProcessor { contextKeys: event.context.kindsAndKeys, }; + if (event.samplingRatio !== 1) { + out.samplingRatio = event.samplingRatio; + } + if (event.data !== undefined) { out.data = event.data; } diff --git a/packages/shared/common/src/internal/events/EventSender.test.ts b/packages/shared/common/src/internal/events/EventSender.test.ts new file mode 100644 index 000000000..d90d4daba --- /dev/null +++ b/packages/shared/common/src/internal/events/EventSender.test.ts @@ -0,0 +1,214 @@ +import { basicPlatform } from '@launchdarkly/private-js-mocks'; + +import { Info, PlatformData, SdkData } from '../../api'; +import { LDDeliveryStatus, LDEventSenderResult, LDEventType } from '../../api/subsystem'; +import { ApplicationTags, ClientContext } from '../../options'; +import EventSender from './EventSender'; + +jest.mock('../../utils', () => { + const actual = jest.requireActual('../../utils'); + return { ...actual, sleep: jest.fn() }; +}); + +const basicConfig = { + tags: new ApplicationTags({ application: { id: 'testApplication1', version: '1.0.0' } }), + serviceEndpoints: { events: 'https://events.fake.com', streaming: '', polling: '' }, +}; +const testEventData1 = { eventId: 'test-event-data-1' }; +const testEventData2 = { eventId: 'test-event-data-2' }; +const info: Info = { + platformData(): PlatformData { + return { + os: { + name: 'An OS', + version: '1.0.1', + arch: 'An Arch', + }, + name: 'The SDK Name', + additional: { + nodeVersion: '42', + }, + }; + }, + sdkData(): SdkData { + return { + name: 'An SDK', + version: '2.0.2', + userAgentBase: 'TestUserAgent', + wrapperName: 'Rapper', + wrapperVersion: '1.2.3', + }; + }, +}; + +const analyticsHeaders = (uuid: number) => ({ + authorization: 'sdk-key', + 'content-type': 'application/json', + 'user-agent': 'TestUserAgent/2.0.2', + 'x-launchDarkly-event-schema': '4', + 'x-launchdarkly-payload-id': `${uuid}`, + 'x-launchdarkly-tags': 'application-id/testApplication1 application-version/1.0.0', + 'x-launchdarkly-wrapper': 'Rapper/1.2.3', +}); + +const diagnosticHeaders = { + authorization: 'sdk-key', + 'content-type': 'application/json', + 'user-agent': 'TestUserAgent/2.0.2', + 'x-launchDarkly-event-schema': undefined, + 'x-launchdarkly-payload-id': undefined, + 'x-launchdarkly-tags': 'application-id/testApplication1 application-version/1.0.0', + 'x-launchdarkly-wrapper': 'Rapper/1.2.3', +}; + +describe('given an event sender', () => { + let eventSender: EventSender; + let mockFetch: jest.Mock; + let mockHeadersGet: jest.Mock; + let mockRandomUuid: jest.Mock; + let uuid: number; + const dateNowString = '2023-08-10'; + let eventSenderResult: LDEventSenderResult; + + const setupMockFetch = (responseStatusCode: number) => { + mockFetch = jest + .fn() + .mockResolvedValue({ headers: { get: mockHeadersGet }, status: responseStatusCode }); + basicPlatform.requests.fetch = mockFetch; + }; + + beforeAll(() => { + jest.useFakeTimers(); + jest.setSystemTime(new Date(dateNowString)); + }); + + afterAll(() => { + jest.useRealTimers(); + }); + + beforeEach(async () => { + jest.clearAllMocks(); + mockHeadersGet = jest.fn((key) => (key === 'date' ? new Date() : undefined)); + uuid = 0; + mockRandomUuid = jest.fn(() => { + uuid += 1; + return `${uuid}`; + }); + setupMockFetch(200); + basicPlatform.crypto.randomUUID = mockRandomUuid; + + eventSender = new EventSender( + new ClientContext('sdk-key', basicConfig, { ...basicPlatform, info }), + ); + + eventSenderResult = await eventSender.sendEventData( + LDEventType.AnalyticsEvents, + testEventData1, + ); + }); + + it('includes the correct headers for analytics', async () => { + const { status, serverTime, error } = eventSenderResult; + + expect(status).toEqual(LDDeliveryStatus.Succeeded); + expect(serverTime).toEqual(Date.now()); + expect(error).toBeUndefined(); + expect(mockFetch).toHaveBeenCalledTimes(1); + expect(mockFetch).toHaveBeenCalledWith(`${basicConfig.serviceEndpoints.events}/bulk`, { + body: JSON.stringify(testEventData1), + headers: analyticsHeaders(uuid), + method: 'POST', + }); + }); + + it('includes the payload', async () => { + const { status: status1 } = eventSenderResult; + const { status: status2 } = await eventSender.sendEventData( + LDEventType.DiagnosticEvent, + testEventData2, + ); + + expect(status1).toEqual(LDDeliveryStatus.Succeeded); + expect(status2).toEqual(LDDeliveryStatus.Succeeded); + expect(mockFetch).toHaveBeenCalledTimes(2); + expect(mockFetch).toHaveBeenNthCalledWith(1, `${basicConfig.serviceEndpoints.events}/bulk`, { + body: JSON.stringify(testEventData1), + headers: analyticsHeaders(uuid), + method: 'POST', + }); + expect(mockFetch).toHaveBeenNthCalledWith( + 2, + `${basicConfig.serviceEndpoints.events}/diagnostic`, + { + body: JSON.stringify(testEventData2), + headers: diagnosticHeaders, + method: 'POST', + }, + ); + }); + + it('sends a unique payload for analytics events', async () => { + // send the same request again to assert unique uuids + await eventSender.sendEventData(LDEventType.AnalyticsEvents, testEventData1); + + expect(mockFetch).toHaveBeenCalledTimes(2); + expect(mockFetch).toHaveBeenNthCalledWith( + 1, + `${basicConfig.serviceEndpoints.events}/bulk`, + expect.objectContaining({ + headers: analyticsHeaders(1), + }), + ); + expect(mockFetch).toHaveBeenNthCalledWith( + 2, + `${basicConfig.serviceEndpoints.events}/bulk`, + expect.objectContaining({ + headers: analyticsHeaders(2), + }), + ); + }); + + describe.each([400, 408, 429, 503])('given recoverable errors', (responseStatusCode) => { + beforeEach(async () => { + setupMockFetch(responseStatusCode); + eventSenderResult = await eventSender.sendEventData( + LDEventType.AnalyticsEvents, + testEventData1, + ); + }); + + it(`retries - ${responseStatusCode}`, async () => { + const { status, error } = eventSenderResult; + + expect(mockFetch).toHaveBeenCalledTimes(2); + expect(status).toEqual(LDDeliveryStatus.Failed); + expect(error.name).toEqual('LaunchDarklyUnexpectedResponseError'); + expect(error.message).toEqual( + `Received error ${responseStatusCode} for event posting - giving up permanently`, + ); + }); + }); + + describe.each([401, 403])('given unrecoverable errors', (responseStatusCode) => { + beforeEach(async () => { + setupMockFetch(responseStatusCode); + eventSenderResult = await eventSender.sendEventData( + LDEventType.AnalyticsEvents, + testEventData1, + ); + }); + + it(`does not retry - ${responseStatusCode}`, async () => { + const errorMessage = `Received error ${ + responseStatusCode === 401 ? '401 (invalid SDK key)' : responseStatusCode + } for event posting - giving up permanently`; + + const { status, error } = eventSenderResult; + + expect(mockFetch).toHaveBeenCalledTimes(1); + expect(status).toEqual(LDDeliveryStatus.FailedAndMustShutDown); + expect(error.name).toEqual('LaunchDarklyUnexpectedResponseError'); + expect(error.message).toEqual(errorMessage); + }); + }); +}); diff --git a/packages/shared/common/src/internal/events/EventSender.ts b/packages/shared/common/src/internal/events/EventSender.ts new file mode 100644 index 000000000..5a49347d0 --- /dev/null +++ b/packages/shared/common/src/internal/events/EventSender.ts @@ -0,0 +1,101 @@ +import { Crypto, Requests } from '../../api'; +import { + LDDeliveryStatus, + LDEventSender, + LDEventSenderResult, + LDEventType, +} from '../../api/subsystem'; +import { isHttpRecoverable, LDUnexpectedResponseError } from '../../errors'; +import { ClientContext } from '../../options'; +import { defaultHeaders, httpErrorMessage, sleep } from '../../utils'; + +export default class EventSender implements LDEventSender { + private crypto: Crypto; + private defaultHeaders: { + [key: string]: string; + }; + private diagnosticEventsUri: string; + private eventsUri: string; + private requests: Requests; + + constructor(clientContext: ClientContext) { + const { basicConfiguration, platform } = clientContext; + const { sdkKey, serviceEndpoints, tags } = basicConfiguration; + const { crypto, info, requests } = platform; + + this.defaultHeaders = defaultHeaders(sdkKey, info, tags); + this.eventsUri = `${serviceEndpoints.events}/bulk`; + this.diagnosticEventsUri = `${serviceEndpoints.events}/diagnostic`; + this.requests = requests; + this.crypto = crypto; + } + + private async tryPostingEvents( + events: any, + uri: string, + payloadId: string | undefined, + canRetry: boolean, + ): Promise { + const tryRes: LDEventSenderResult = { + status: LDDeliveryStatus.Succeeded, + }; + + const headers: Record = { + ...this.defaultHeaders, + 'content-type': 'application/json', + }; + + if (payloadId) { + headers['x-launchdarkly-payload-id'] = payloadId; + headers['x-launchDarkly-event-schema'] = '4'; + } + let error; + try { + const { status, headers: resHeaders } = await this.requests.fetch(uri, { + headers, + body: JSON.stringify(events), + method: 'POST', + }); + + const serverDate = Date.parse(resHeaders.get('date') || ''); + if (serverDate) { + tryRes.serverTime = serverDate; + } + + if (status <= 204) { + return tryRes; + } + + error = new LDUnexpectedResponseError( + httpErrorMessage({ status, message: 'some events were dropped' }, 'event posting'), + ); + + if (!isHttpRecoverable(status)) { + tryRes.status = LDDeliveryStatus.FailedAndMustShutDown; + tryRes.error = error; + return tryRes; + } + } catch (err) { + error = err; + } + + // recoverable but not retrying + if (error && !canRetry) { + tryRes.status = LDDeliveryStatus.Failed; + tryRes.error = error; + return tryRes; + } + + // wait 1 second before retrying + await sleep(); + + return this.tryPostingEvents(events, this.eventsUri, payloadId, false); + } + + async sendEventData(type: LDEventType, data: any): Promise { + const payloadId = type === LDEventType.AnalyticsEvents ? this.crypto.randomUUID() : undefined; + const uri = type === LDEventType.AnalyticsEvents ? this.eventsUri : this.diagnosticEventsUri; + + return this.tryPostingEvents(data, uri, payloadId, true); + } +} diff --git a/packages/shared/common/src/internal/events/EventSummarizer.ts b/packages/shared/common/src/internal/events/EventSummarizer.ts index e50a48e05..932a09f5f 100644 --- a/packages/shared/common/src/internal/events/EventSummarizer.ts +++ b/packages/shared/common/src/internal/events/EventSummarizer.ts @@ -52,7 +52,7 @@ export default class EventSummarizer { private contextKinds: Record> = {}; summarizeEvent(event: InputEvent) { - if (isFeature(event)) { + if (isFeature(event) && !event.excludeFromSummaries) { const countKey = counterKey(event); const counter = this.counters[countKey]; let kinds = this.contextKinds[event.key]; diff --git a/packages/shared/common/src/internal/events/InputCustomEvent.ts b/packages/shared/common/src/internal/events/InputCustomEvent.ts index c3e33b7d6..1c0c4a2b3 100644 --- a/packages/shared/common/src/internal/events/InputCustomEvent.ts +++ b/packages/shared/common/src/internal/events/InputCustomEvent.ts @@ -5,13 +5,14 @@ export default class InputCustomEvent { public readonly creationDate: number; - public readonly context: Context; - constructor( - context: Context, + public readonly context: Context, public readonly key: string, public readonly data?: any, public readonly metricValue?: number, + // Currently custom events are not sampled, but this is here to make the handling + // code more uniform. + public readonly samplingRatio: number = 1, ) { this.creationDate = Date.now(); this.context = context; diff --git a/packages/shared/common/src/internal/events/InputEvalEvent.ts b/packages/shared/common/src/internal/events/InputEvalEvent.ts index 50dcd9e9d..5c9b9377b 100644 --- a/packages/shared/common/src/internal/events/InputEvalEvent.ts +++ b/packages/shared/common/src/internal/events/InputEvalEvent.ts @@ -1,4 +1,4 @@ -import { LDEvaluationDetail, LDEvaluationReason } from '../../api/data'; +import { LDEvaluationReason, LDFlagValue } from '../../api/data'; import Context from '../../Context'; export default class InputEvalEvent { @@ -6,8 +6,6 @@ export default class InputEvalEvent { public readonly creationDate: number; - public readonly context: Context; - public readonly default: any; public readonly trackEvents?: boolean; @@ -24,24 +22,26 @@ export default class InputEvalEvent { public readonly version?: number; + public readonly excludeFromSummaries?: boolean; + constructor( - withReasons: boolean, - context: Context, + public readonly withReasons: boolean, + public readonly context: Context, public readonly key: string, + value: LDFlagValue, defValue: any, // default is a reserved keyword in this context. - detail: LDEvaluationDetail, version?: number, variation?: number, trackEvents?: boolean, prereqOf?: string, reason?: LDEvaluationReason, debugEventsUntilDate?: number, + excludeFromSummaries?: boolean, + public readonly samplingRatio: number = 1, ) { this.creationDate = Date.now(); - this.context = context; + this.value = value; this.default = defValue; - this.variation = detail.variationIndex ?? undefined; - this.value = detail.value; if (version !== undefined) { this.version = version; @@ -66,5 +66,9 @@ export default class InputEvalEvent { if (debugEventsUntilDate !== undefined) { this.debugEventsUntilDate = debugEventsUntilDate; } + + if (excludeFromSummaries !== undefined) { + this.excludeFromSummaries = excludeFromSummaries; + } } } diff --git a/packages/shared/common/src/internal/events/InputEvent.ts b/packages/shared/common/src/internal/events/InputEvent.ts index f3962d358..2ffa15f51 100644 --- a/packages/shared/common/src/internal/events/InputEvent.ts +++ b/packages/shared/common/src/internal/events/InputEvent.ts @@ -1,6 +1,7 @@ import InputCustomEvent from './InputCustomEvent'; import InputEvalEvent from './InputEvalEvent'; import InputIdentifyEvent from './InputIdentifyEvent'; +import InputMigrationEvent from './InputMigrationEvent'; -type InputEvent = InputEvalEvent | InputCustomEvent | InputIdentifyEvent; +type InputEvent = InputEvalEvent | InputCustomEvent | InputIdentifyEvent | InputMigrationEvent; export default InputEvent; diff --git a/packages/shared/common/src/internal/events/InputIdentifyEvent.ts b/packages/shared/common/src/internal/events/InputIdentifyEvent.ts index 4f2a60900..b9ba89cce 100644 --- a/packages/shared/common/src/internal/events/InputIdentifyEvent.ts +++ b/packages/shared/common/src/internal/events/InputIdentifyEvent.ts @@ -5,10 +5,10 @@ export default class InputIdentifyEvent { public readonly creationDate: number; - public readonly context: Context; - - constructor(context: Context) { + constructor( + public readonly context: Context, + public readonly samplingRatio: number = 1, + ) { this.creationDate = Date.now(); - this.context = context; } } diff --git a/packages/shared/common/src/internal/events/InputMigrationEvent.ts b/packages/shared/common/src/internal/events/InputMigrationEvent.ts new file mode 100644 index 000000000..0e8b3a222 --- /dev/null +++ b/packages/shared/common/src/internal/events/InputMigrationEvent.ts @@ -0,0 +1,14 @@ +// Migration events are not currently supported by client-side SDKs, so this +// shared implementation contains minimal typing. If/When migration events are +// to be supported by client-side SDKs the appropriate types would be moved +// to the common implementation. + +export default interface InputMigrationEvent { + kind: 'migration_op'; + operation: string; + creationDate: number; + contextKeys: Record; + evaluation: any; + measurements: any[]; + samplingRatio: number; +} diff --git a/packages/shared/common/src/internal/events/NullEventProcessor.ts b/packages/shared/common/src/internal/events/NullEventProcessor.ts new file mode 100644 index 000000000..ba28e84e0 --- /dev/null +++ b/packages/shared/common/src/internal/events/NullEventProcessor.ts @@ -0,0 +1,11 @@ +import { LDEventProcessor } from '../../api/subsystem'; + +export default class NullEventProcessor implements LDEventProcessor { + close() {} + + async flush(): Promise { + // empty comment to keep ts and eslint happy + } + + sendEvent() {} +} diff --git a/packages/shared/common/src/internal/events/guards.ts b/packages/shared/common/src/internal/events/guards.ts index 715f80859..de5fa6e86 100644 --- a/packages/shared/common/src/internal/events/guards.ts +++ b/packages/shared/common/src/internal/events/guards.ts @@ -1,6 +1,7 @@ import InputCustomEvent from './InputCustomEvent'; import InputEvalEvent from './InputEvalEvent'; import InputIdentifyEvent from './InputIdentifyEvent'; +import InputMigrationEvent from './InputMigrationEvent'; export function isFeature(u: any): u is InputEvalEvent { return u.kind === 'feature'; @@ -13,3 +14,7 @@ export function isCustom(u: any): u is InputCustomEvent { export function isIdentify(u: any): u is InputIdentifyEvent { return u.kind === 'identify'; } + +export function isMigration(u: any): u is InputMigrationEvent { + return u.kind === 'migration_op'; +} diff --git a/packages/shared/common/src/internal/events/index.ts b/packages/shared/common/src/internal/events/index.ts index 7b32527fb..547a87ceb 100644 --- a/packages/shared/common/src/internal/events/index.ts +++ b/packages/shared/common/src/internal/events/index.ts @@ -1,7 +1,21 @@ +import ClientMessages from './ClientMessages'; import EventProcessor from './EventProcessor'; import InputCustomEvent from './InputCustomEvent'; import InputEvalEvent from './InputEvalEvent'; import InputEvent from './InputEvent'; import InputIdentifyEvent from './InputIdentifyEvent'; +import InputMigrationEvent from './InputMigrationEvent'; +import NullEventProcessor from './NullEventProcessor'; +import shouldSample from './sampling'; -export { InputCustomEvent, InputEvalEvent, InputEvent, InputIdentifyEvent, EventProcessor }; +export { + ClientMessages, + InputCustomEvent, + InputEvalEvent, + InputEvent, + InputIdentifyEvent, + InputMigrationEvent, + EventProcessor, + shouldSample, + NullEventProcessor, +}; diff --git a/packages/shared/common/src/internal/events/sampling.ts b/packages/shared/common/src/internal/events/sampling.ts new file mode 100644 index 000000000..3ee1c5280 --- /dev/null +++ b/packages/shared/common/src/internal/events/sampling.ts @@ -0,0 +1,22 @@ +/** + * The contents of this file are for event sampling. They are not used for + * any purpose requiring cryptographic security. + * */ + +export default function shouldSample(ratio: number) { + const truncated = Math.trunc(ratio); + // A radio of 1 means 1 in 1. So that will always sample. No need + // to draw a random number. + if (truncated === 1) { + return true; + } + + if (truncated === 0) { + return false; + } + + // Math.random() * truncated) would return 0, 1, ... (ratio - 1). + // Checking for any number in the range will have approximately a 1 in X + // chance. So we check for 0 as it is part of any range. + return Math.floor(Math.random() * truncated) === 0; +} diff --git a/packages/shared/common/src/internal/index.ts b/packages/shared/common/src/internal/index.ts index 7981d6b64..db6af8042 100644 --- a/packages/shared/common/src/internal/index.ts +++ b/packages/shared/common/src/internal/index.ts @@ -1 +1,4 @@ +export * from './diagnostics'; +export * from './evaluation'; export * from './events'; +export * from './stream'; diff --git a/packages/shared/common/src/internal/stream/StreamingProcessor.test.ts b/packages/shared/common/src/internal/stream/StreamingProcessor.test.ts new file mode 100644 index 000000000..cf2e397eb --- /dev/null +++ b/packages/shared/common/src/internal/stream/StreamingProcessor.test.ts @@ -0,0 +1,237 @@ +import { basicPlatform, clientContext, logger } from '@launchdarkly/private-js-mocks'; + +import { EventName, ProcessStreamResponse } from '../../api'; +import { LDStreamProcessor } from '../../api/subsystem'; +import { LDStreamingError } from '../../errors'; +import { defaultHeaders } from '../../utils'; +import { DiagnosticsManager } from '../diagnostics'; +import StreamingProcessor from './StreamingProcessor'; + +const dateNowString = '2023-08-10'; +const sdkKey = 'my-sdk-key'; +const { + basicConfiguration: { serviceEndpoints, tags }, + platform: { info }, +} = clientContext; +const event = { + data: { + flags: { + flagkey: { key: 'flagkey', version: 1 }, + }, + segments: { + segkey: { key: 'segkey', version: 2 }, + }, + }, +}; + +const createMockEventSource = (streamUri: string = '', options: any = {}) => ({ + streamUri, + options, + onclose: jest.fn(), + addEventListener: jest.fn(), + close: jest.fn(), +}); + +describe('given a stream processor with mock event source', () => { + let streamingProcessor: LDStreamProcessor; + let diagnosticsManager: DiagnosticsManager; + let listeners: Map; + let mockEventSource: any; + let mockListener: ProcessStreamResponse; + let mockErrorHandler: jest.Mock; + let simulatePutEvent: (e?: any) => void; + let simulateError: (e: { status: number; message: string }) => boolean; + + beforeAll(() => { + jest.useFakeTimers(); + jest.setSystemTime(new Date(dateNowString)); + }); + + afterAll(() => { + jest.useRealTimers(); + }); + + beforeEach(() => { + mockErrorHandler = jest.fn(); + clientContext.basicConfiguration.logger = logger; + + basicPlatform.requests = { + createEventSource: jest.fn((streamUri: string, options: any) => { + mockEventSource = createMockEventSource(streamUri, options); + return mockEventSource; + }), + } as any; + simulatePutEvent = (e: any = event) => { + mockEventSource.addEventListener.mock.calls[0][1](e); + }; + simulateError = (e: { status: number; message: string }): boolean => + mockEventSource.options.errorFilter(e); + + listeners = new Map(); + mockListener = { + deserializeData: jest.fn((data) => data), + processJson: jest.fn(), + }; + listeners.set('put', mockListener); + listeners.set('patch', mockListener); + + diagnosticsManager = new DiagnosticsManager(sdkKey, basicPlatform, {}); + streamingProcessor = new StreamingProcessor( + sdkKey, + clientContext, + '/all', + listeners, + diagnosticsManager, + mockErrorHandler, + ); + + jest.spyOn(streamingProcessor, 'stop'); + streamingProcessor.start(); + }); + + afterEach(() => { + streamingProcessor.close(); + jest.resetAllMocks(); + }); + + it('uses expected uri and eventSource init args', () => { + expect(basicPlatform.requests.createEventSource).toBeCalledWith( + `${serviceEndpoints.streaming}/all`, + { + errorFilter: expect.any(Function), + headers: defaultHeaders(sdkKey, info, tags), + initialRetryDelayMillis: 1000, + readTimeoutMillis: 300000, + retryResetIntervalMillis: 60000, + }, + ); + }); + + it('sets streamInitialReconnectDelay correctly', () => { + streamingProcessor = new StreamingProcessor( + sdkKey, + clientContext, + '/all', + listeners, + diagnosticsManager, + mockErrorHandler, + 22, + ); + streamingProcessor.start(); + + expect(basicPlatform.requests.createEventSource).toHaveBeenLastCalledWith( + `${serviceEndpoints.streaming}/all`, + { + errorFilter: expect.any(Function), + headers: defaultHeaders(sdkKey, info, tags), + initialRetryDelayMillis: 22000, + readTimeoutMillis: 300000, + retryResetIntervalMillis: 60000, + }, + ); + }); + + it('adds listeners', () => { + expect(mockEventSource.addEventListener).toHaveBeenNthCalledWith( + 1, + 'put', + expect.any(Function), + ); + expect(mockEventSource.addEventListener).toHaveBeenNthCalledWith( + 2, + 'patch', + expect.any(Function), + ); + }); + + it('executes listeners', () => { + simulatePutEvent(); + const patchHandler = mockEventSource.addEventListener.mock.calls[1][1]; + patchHandler(event); + + expect(mockListener.deserializeData).toBeCalledTimes(2); + expect(mockListener.processJson).toBeCalledTimes(2); + }); + + it('passes error to callback if json data is malformed', async () => { + (mockListener.deserializeData as jest.Mock).mockReturnValue(false); + simulatePutEvent(); + + expect(logger.error).toBeCalledWith(expect.stringMatching(/invalid data in "put"/)); + expect(logger.debug).toBeCalledWith(expect.stringMatching(/invalid json/i)); + expect(mockErrorHandler.mock.lastCall[0].message).toMatch(/malformed json/i); + }); + + it('calls error handler if event.data prop is missing', async () => { + simulatePutEvent({ flags: {} }); + + expect(mockListener.deserializeData).not.toBeCalled(); + expect(mockListener.processJson).not.toBeCalled(); + expect(mockErrorHandler.mock.lastCall[0].message).toMatch(/unexpected payload/i); + }); + + it('closes and stops', async () => { + streamingProcessor.close(); + + expect(streamingProcessor.stop).toBeCalled(); + expect(mockEventSource.close).toBeCalled(); + // @ts-ignore + expect(streamingProcessor.eventSource).toBeUndefined(); + }); + + it('creates a stream init event', async () => { + const startTime = Date.now(); + simulatePutEvent(); + + const diagnosticEvent = diagnosticsManager.createStatsEventAndReset(0, 0, 0); + expect(diagnosticEvent.streamInits.length).toEqual(1); + const si = diagnosticEvent.streamInits[0]; + expect(si.timestamp).toEqual(startTime); + expect(si.failed).toBeFalsy(); + expect(si.durationMillis).toBeGreaterThanOrEqual(0); + }); + + describe.each([400, 408, 429, 500, 503])('given recoverable http errors', (status) => { + it(`continues retrying after error: ${status}`, () => { + const startTime = Date.now(); + const testError = { status, message: 'retry. recoverable.' }; + const willRetry = simulateError(testError); + + expect(willRetry).toBeTruthy(); + expect(mockErrorHandler).not.toBeCalled(); + expect(logger.warn).toBeCalledWith( + expect.stringMatching(new RegExp(`${status}.*will retry`)), + ); + + const diagnosticEvent = diagnosticsManager.createStatsEventAndReset(0, 0, 0); + expect(diagnosticEvent.streamInits.length).toEqual(1); + const si = diagnosticEvent.streamInits[0]; + expect(si.timestamp).toEqual(startTime); + expect(si.failed).toBeTruthy(); + expect(si.durationMillis).toBeGreaterThanOrEqual(0); + }); + }); + + describe.each([401, 403])('given irrecoverable http errors', (status) => { + it(`stops retrying after error: ${status}`, () => { + const startTime = Date.now(); + const testError = { status, message: 'stopping. irrecoverable.' }; + const willRetry = simulateError(testError); + + expect(willRetry).toBeFalsy(); + expect(mockErrorHandler).toBeCalledWith( + new LDStreamingError(testError.message, testError.status), + ); + expect(logger.error).toBeCalledWith( + expect.stringMatching(new RegExp(`${status}.*permanently`)), + ); + + const diagnosticEvent = diagnosticsManager.createStatsEventAndReset(0, 0, 0); + expect(diagnosticEvent.streamInits.length).toEqual(1); + const si = diagnosticEvent.streamInits[0]; + expect(si.timestamp).toEqual(startTime); + expect(si.failed).toBeTruthy(); + expect(si.durationMillis).toBeGreaterThanOrEqual(0); + }); + }); +}); diff --git a/packages/shared/common/src/internal/stream/StreamingProcessor.ts b/packages/shared/common/src/internal/stream/StreamingProcessor.ts new file mode 100644 index 000000000..5287b59c6 --- /dev/null +++ b/packages/shared/common/src/internal/stream/StreamingProcessor.ts @@ -0,0 +1,142 @@ +import { EventName, EventSource, LDLogger, ProcessStreamResponse, Requests } from '../../api'; +import { LDStreamProcessor } from '../../api/subsystem'; +import { isHttpRecoverable, LDStreamingError } from '../../errors'; +import { ClientContext } from '../../options'; +import { defaultHeaders, httpErrorMessage } from '../../utils'; +import { DiagnosticsManager } from '../diagnostics'; +import { StreamingErrorHandler } from './types'; + +const STREAM_READ_TIMEOUT_MS = 5 * 60 * 1000; +const RETRY_RESET_INTERVAL_MS = 60 * 1000; + +const reportJsonError = ( + type: string, + data: string, + logger?: LDLogger, + errorHandler?: StreamingErrorHandler, +) => { + logger?.error(`Stream received invalid data in "${type}" message`); + logger?.debug(`Invalid JSON follows: ${data}`); + errorHandler?.(new LDStreamingError('Malformed JSON data in event stream')); +}; + +class StreamingProcessor implements LDStreamProcessor { + private readonly headers: { [key: string]: string | string[] }; + private readonly streamUri: string; + private readonly logger?: LDLogger; + + private eventSource?: EventSource; + private requests: Requests; + private connectionAttemptStartTime?: number; + + constructor( + sdkKey: string, + clientContext: ClientContext, + streamUriPath: string, + private readonly listeners: Map, + private readonly diagnosticsManager?: DiagnosticsManager, + private readonly errorHandler?: StreamingErrorHandler, + private readonly streamInitialReconnectDelay = 1, + ) { + const { basicConfiguration, platform } = clientContext; + const { logger, tags } = basicConfiguration; + const { info, requests } = platform; + + this.headers = defaultHeaders(sdkKey, info, tags); + this.logger = logger; + this.requests = requests; + this.streamUri = `${basicConfiguration.serviceEndpoints.streaming}${streamUriPath}`; + } + + private logConnectionStarted() { + this.connectionAttemptStartTime = Date.now(); + } + + private logConnectionResult(success: boolean) { + if (this.connectionAttemptStartTime && this.diagnosticsManager) { + this.diagnosticsManager.recordStreamInit( + this.connectionAttemptStartTime, + !success, + Date.now() - this.connectionAttemptStartTime, + ); + } + + this.connectionAttemptStartTime = undefined; + } + + start() { + this.logConnectionStarted(); + + const errorFilter = (err: { status: number; message: string }): boolean => { + if (err.status && !isHttpRecoverable(err.status)) { + this.logConnectionResult(false); + this.errorHandler?.(new LDStreamingError(err.message, err.status)); + this.logger?.error(httpErrorMessage(err, 'streaming request')); + return false; + } + + this.logger?.warn(httpErrorMessage(err, 'streaming request', 'will retry')); + this.logConnectionResult(false); + this.logConnectionStarted(); + return true; + }; + + // TLS is handled by the platform implementation. + + const eventSource = this.requests.createEventSource(this.streamUri, { + headers: this.headers, + errorFilter, + initialRetryDelayMillis: 1000 * this.streamInitialReconnectDelay, + readTimeoutMillis: STREAM_READ_TIMEOUT_MS, + retryResetIntervalMillis: RETRY_RESET_INTERVAL_MS, + }); + this.eventSource = eventSource; + + eventSource.onclose = () => { + this.logger?.info('Closed LaunchDarkly stream connection'); + }; + + eventSource.onerror = () => { + // The work is done by `errorFilter`. + }; + + eventSource.onopen = () => { + this.logger?.info('Opened LaunchDarkly stream connection'); + }; + + eventSource.onretrying = (e) => { + this.logger?.info(`Will retry stream connection in ${e.delayMillis} milliseconds`); + }; + + this.listeners.forEach(({ deserializeData, processJson }, eventName) => { + eventSource.addEventListener(eventName, (event) => { + this.logger?.debug(`Received ${eventName} event`); + + if (event?.data) { + this.logConnectionResult(true); + const { data } = event; + const dataJson = deserializeData(data); + + if (!dataJson) { + reportJsonError(eventName, data, this.logger, this.errorHandler); + return; + } + processJson(dataJson); + } else { + this.errorHandler?.(new LDStreamingError('Unexpected payload from event stream')); + } + }); + }); + } + + stop() { + this.eventSource?.close(); + this.eventSource = undefined; + } + + close() { + this.stop(); + } +} + +export default StreamingProcessor; diff --git a/packages/shared/common/src/internal/stream/index.ts b/packages/shared/common/src/internal/stream/index.ts new file mode 100644 index 000000000..55f6118a4 --- /dev/null +++ b/packages/shared/common/src/internal/stream/index.ts @@ -0,0 +1,4 @@ +import StreamingProcessor from './StreamingProcessor'; +import { type StreamingErrorHandler } from './types'; + +export { StreamingProcessor, type StreamingErrorHandler }; diff --git a/packages/shared/common/src/internal/stream/types.ts b/packages/shared/common/src/internal/stream/types.ts new file mode 100644 index 000000000..a2c1c42d4 --- /dev/null +++ b/packages/shared/common/src/internal/stream/types.ts @@ -0,0 +1,3 @@ +import { LDStreamingError } from '../../errors'; + +export type StreamingErrorHandler = (err: LDStreamingError) => void; diff --git a/packages/shared/common/src/logging/SafeLogger.ts b/packages/shared/common/src/logging/SafeLogger.ts index ec67a43e8..8b7b84289 100644 --- a/packages/shared/common/src/logging/SafeLogger.ts +++ b/packages/shared/common/src/logging/SafeLogger.ts @@ -1,4 +1,4 @@ -import { LDLogger } from '../api'; +import type { LDLogger } from '../api'; import { TypeValidators } from '../validators'; const loggerRequirements = { diff --git a/packages/shared/common/src/logging/createSafeLogger.ts b/packages/shared/common/src/logging/createSafeLogger.ts new file mode 100644 index 000000000..346ddb19a --- /dev/null +++ b/packages/shared/common/src/logging/createSafeLogger.ts @@ -0,0 +1,16 @@ +import { LDLogger } from '../api'; +import BasicLogger from './BasicLogger'; +import format from './format'; +import SafeLogger from './SafeLogger'; + +const createSafeLogger = (logger?: LDLogger) => { + const basicLogger = new BasicLogger({ + level: 'info', + // eslint-disable-next-line no-console + destination: console.error, + formatter: format, + }); + return logger ? new SafeLogger(logger, basicLogger) : basicLogger; +}; + +export default createSafeLogger; diff --git a/packages/shared/common/src/logging/index.ts b/packages/shared/common/src/logging/index.ts index 911683cc1..ee031f1f0 100644 --- a/packages/shared/common/src/logging/index.ts +++ b/packages/shared/common/src/logging/index.ts @@ -1,4 +1,5 @@ import BasicLogger from './BasicLogger'; +import createSafeLogger from './createSafeLogger'; import SafeLogger from './SafeLogger'; -export { BasicLogger, SafeLogger }; +export { BasicLogger, SafeLogger, createSafeLogger }; diff --git a/packages/shared/common/src/options/ClientContext.ts b/packages/shared/common/src/options/ClientContext.ts index 57ca9ff58..ab6320b73 100644 --- a/packages/shared/common/src/options/ClientContext.ts +++ b/packages/shared/common/src/options/ClientContext.ts @@ -1,4 +1,5 @@ import { LDClientContext, LDLogger, Platform } from '../api'; +import ApplicationTags from './ApplicationTags'; import ServiceEndpoints from './ServiceEndpoints'; /** @@ -6,12 +7,14 @@ import ServiceEndpoints from './ServiceEndpoints'; * client SDKs. */ interface BasicConfiguration { + tags?: ApplicationTags; + logger?: LDLogger; /** * True if the SDK was configured to be completely offline. */ - offline: boolean; + offline?: boolean; /** * The configured SDK key. @@ -22,6 +25,11 @@ interface BasicConfiguration { * Defines the base service URIs used by SDK components. */ serviceEndpoints: ServiceEndpoints; + + /** + * Sets the initial reconnect delay for the streaming connection, in seconds. + */ + streamInitialReconnectDelay?: number; } /** @@ -35,16 +43,18 @@ export default class ClientContext implements LDClientContext { sdkKey: string, configuration: { logger?: LDLogger; - offline: boolean; + offline?: boolean; serviceEndpoints: ServiceEndpoints; + tags?: ApplicationTags; }, public readonly platform: Platform, ) { this.basicConfiguration = { + tags: configuration.tags, logger: configuration.logger, offline: configuration.offline, - sdkKey, serviceEndpoints: configuration.serviceEndpoints, + sdkKey, }; } } diff --git a/packages/shared/common/src/options/ServiceEndpoints.ts b/packages/shared/common/src/options/ServiceEndpoints.ts index a47cdcc5d..03531950c 100644 --- a/packages/shared/common/src/options/ServiceEndpoints.ts +++ b/packages/shared/common/src/options/ServiceEndpoints.ts @@ -6,13 +6,16 @@ function canonicalizeUri(uri: string): string { * Specifies the base service URIs used by SDK components. */ export default class ServiceEndpoints { - public readonly streaming: string; + public static DEFAULT_EVENTS = 'https://events.launchdarkly.com'; + public readonly streaming: string; public readonly polling: string; - public readonly events: string; - - public constructor(streaming: string, polling: string, events: string) { + public constructor( + streaming: string, + polling: string, + events: string = ServiceEndpoints.DEFAULT_EVENTS, + ) { this.streaming = canonicalizeUri(streaming); this.polling = canonicalizeUri(polling); this.events = canonicalizeUri(events); diff --git a/packages/shared/common/src/utils/VoidFunction.ts b/packages/shared/common/src/utils/VoidFunction.ts new file mode 100644 index 000000000..835bc84b2 --- /dev/null +++ b/packages/shared/common/src/utils/VoidFunction.ts @@ -0,0 +1 @@ +export type VoidFunction = () => void; diff --git a/packages/shared/common/src/utils/clone.ts b/packages/shared/common/src/utils/clone.ts new file mode 100644 index 000000000..5ae8a7abe --- /dev/null +++ b/packages/shared/common/src/utils/clone.ts @@ -0,0 +1,3 @@ +export default function clone(obj: any) { + return JSON.parse(JSON.stringify(obj)); +} diff --git a/packages/shared/common/src/utils/date.ts b/packages/shared/common/src/utils/date.ts new file mode 100644 index 000000000..83be69d24 --- /dev/null +++ b/packages/shared/common/src/utils/date.ts @@ -0,0 +1,4 @@ +// eslint-disable-next-line import/prefer-default-export +export function secondsToMillis(sec: number): number { + return Math.trunc(sec * 1000); +} diff --git a/packages/shared/common/src/utils/http.test.ts b/packages/shared/common/src/utils/http.test.ts new file mode 100644 index 000000000..cf4e30f75 --- /dev/null +++ b/packages/shared/common/src/utils/http.test.ts @@ -0,0 +1,119 @@ +import { Info, PlatformData, SdkData } from '../api'; +import { ApplicationTags } from '../options'; +import { defaultHeaders, httpErrorMessage } from './http'; + +describe('defaultHeaders', () => { + const makeInfo = ( + wrapperName?: string, + wrapperVersion?: string, + userAgentBase?: string, + ): Info => ({ + platformData(): PlatformData { + return {}; + }, + sdkData(): SdkData { + const sdkData: SdkData = { + version: '2.2.2', + userAgentBase, + wrapperName, + wrapperVersion, + }; + return sdkData; + }, + }); + + it('sets SDK key', () => { + const h = defaultHeaders('my-sdk-key', makeInfo()); + expect(h).toMatchObject({ authorization: 'my-sdk-key' }); + }); + + it('sets the default user agent', () => { + const h = defaultHeaders('my-sdk-key', makeInfo()); + expect(h).toMatchObject({ 'user-agent': 'NodeJSClient/2.2.2' }); + }); + + it('sets the SDK specific user agent', () => { + const h = defaultHeaders('my-sdk-key', makeInfo(undefined, undefined, 'CATS')); + expect(h).toMatchObject({ 'user-agent': 'CATS/2.2.2' }); + }); + + it('does not include wrapper header by default', () => { + const h = defaultHeaders('my-sdk-key', makeInfo()); + expect(h['x-launchdarkly-wrapper']).toBeUndefined(); + }); + + it('sets wrapper header with name only', () => { + const h = defaultHeaders('my-sdk-key', makeInfo('my-wrapper')); + expect(h).toMatchObject({ 'x-launchdarkly-wrapper': 'my-wrapper' }); + }); + + it('sets wrapper header with name and version', () => { + const h = defaultHeaders('my-sdk-key', makeInfo('my-wrapper', '2.0')); + expect(h).toMatchObject({ 'x-launchdarkly-wrapper': 'my-wrapper/2.0' }); + }); + + it('sets the X-LaunchDarkly-Tags header with valid tags.', () => { + const tags = new ApplicationTags({ + application: { + id: 'test-application', + version: 'test-version', + }, + }); + const h = defaultHeaders('my-sdk-key', makeInfo('my-wrapper'), tags); + expect(h).toMatchObject({ + 'x-launchdarkly-tags': 'application-id/test-application application-version/test-version', + }); + }); +}); + +describe('httpErrorMessage', () => { + test('I/O error', () => { + const error = { status: undefined, message: 'no status' }; + const context = 'fake error context message'; + const retryMessage = undefined; + + // @ts-ignore + const result = httpErrorMessage(error, context, retryMessage); + + expect(result).toBe( + 'Received I/O error (no status) for fake error context message - giving up permanently', + ); + }); + + test('invalid sdk key', () => { + const error = { status: 401, message: 'denied' }; + const context = 'fake error context message'; + const retryMessage = undefined; + + // @ts-ignore + const result = httpErrorMessage(error, context, retryMessage); + + expect(result).toBe( + 'Received error 401 (invalid SDK key) for fake error context message - giving up permanently', + ); + }); + + test('non-401 errors', () => { + const error = { status: 500, message: 'server error' }; + const context = 'fake error context message'; + const retryMessage = undefined; + + // @ts-ignore + const result = httpErrorMessage(error, context, retryMessage); + + expect(result).toBe( + 'Received error 500 for fake error context message - giving up permanently', + ); + }); + + test('with retry message', () => { + const error = { status: 500, message: 'denied' }; + const context = 'fake error context message'; + const retryMessage = 'will retry'; + + // @ts-ignore + const result = httpErrorMessage(error, context, retryMessage); + + expect(result).toBe('Received error 500 for fake error context message - will retry'); + }); +}); diff --git a/packages/shared/common/src/utils/http.ts b/packages/shared/common/src/utils/http.ts new file mode 100644 index 000000000..c29ea4759 --- /dev/null +++ b/packages/shared/common/src/utils/http.ts @@ -0,0 +1,48 @@ +import { Info } from '../api'; +import { ApplicationTags } from '../options'; + +export type LDHeaders = { + authorization: string; + 'user-agent': string; + 'x-launchdarkly-wrapper'?: string; + 'x-launchdarkly-tags'?: string; +}; + +export function defaultHeaders(sdkKey: string, info: Info, tags?: ApplicationTags): LDHeaders { + const { userAgentBase, version, wrapperName, wrapperVersion } = info.sdkData(); + + const headers: LDHeaders = { + authorization: sdkKey, + 'user-agent': `${userAgentBase ?? 'NodeJSClient'}/${version}`, + }; + + if (wrapperName) { + headers['x-launchdarkly-wrapper'] = wrapperVersion + ? `${wrapperName}/${wrapperVersion}` + : wrapperName; + } + + if (tags?.value) { + headers['x-launchdarkly-tags'] = tags.value; + } + + return headers; +} + +export function httpErrorMessage( + err: { + status: number; + message: string; + }, + context: string, + retryMessage?: string, +): string { + let desc; + if (err.status) { + desc = `error ${err.status}${err.status === 401 ? ' (invalid SDK key)' : ''}`; + } else { + desc = `I/O error (${err.message || err})`; + } + const action = retryMessage ?? 'giving up permanently'; + return `Received ${desc} for ${context} - ${action}`; +} diff --git a/packages/shared/common/src/utils/index.ts b/packages/shared/common/src/utils/index.ts index fb04a0d2f..86bda2eb6 100644 --- a/packages/shared/common/src/utils/index.ts +++ b/packages/shared/common/src/utils/index.ts @@ -1,4 +1,17 @@ +import clone from './clone'; +import { secondsToMillis } from './date'; +import { defaultHeaders, httpErrorMessage, LDHeaders } from './http'; import noop from './noop'; +import sleep from './sleep'; +import { VoidFunction } from './VoidFunction'; -// eslint-disable-next-line import/prefer-default-export -export { noop }; +export { + clone, + defaultHeaders, + httpErrorMessage, + noop, + LDHeaders, + secondsToMillis, + sleep, + VoidFunction, +}; diff --git a/packages/shared/common/src/utils/sleep.ts b/packages/shared/common/src/utils/sleep.ts new file mode 100644 index 000000000..ff6f0e305 --- /dev/null +++ b/packages/shared/common/src/utils/sleep.ts @@ -0,0 +1,6 @@ +const sleep = async (delayMillis: number = 1000) => + new Promise((resolve) => { + setTimeout(resolve, delayMillis); + }); + +export default sleep; diff --git a/packages/shared/common/src/validators.ts b/packages/shared/common/src/validators.ts index d6bcc2fef..5ae8f0c0c 100644 --- a/packages/shared/common/src/validators.ts +++ b/packages/shared/common/src/validators.ts @@ -139,6 +139,16 @@ export class Function implements TypeValidator { } } +export class NullableBoolean implements TypeValidator { + is(u: unknown): boolean { + return typeof u === 'boolean' || typeof u === 'undefined' || u === null; + } + + getType(): string { + return 'boolean | undefined | null'; + } +} + // Our reference SDK, Go, parses date/time strings with the time.RFC3339Nano format. // This regex should match strings that are valid in that format, and no others. // Acceptable: @@ -161,6 +171,19 @@ export class DateValidator implements TypeValidator { } } +/** + * Validates that a string is a valid kind. + */ +export class KindValidator extends StringMatchingRegex { + constructor() { + super(/^(\w|\.|-)+$/); + } + + override is(u: unknown): u is string { + return super.is(u) && u !== 'kind'; + } +} + /** * A set of standard type validators. */ @@ -179,6 +202,10 @@ export class TypeValidators { static readonly Function = new Function(); + static createTypeArray(typeName: string, example: T) { + return new TypeArray(typeName, example); + } + static numberWithMin(min: number): NumberWithMinimum { return new NumberWithMinimum(min); } @@ -188,4 +215,7 @@ export class TypeValidators { } static readonly Date = new DateValidator(); + + static readonly Kind = new KindValidator(); + static readonly NullableBoolean = new NullableBoolean(); } diff --git a/packages/shared/common/tsconfig.json b/packages/shared/common/tsconfig.json index cd3f7af3c..e2ed2b0f3 100644 --- a/packages/shared/common/tsconfig.json +++ b/packages/shared/common/tsconfig.json @@ -2,7 +2,7 @@ "compilerOptions": { "rootDir": "src", "outDir": "dist", - "target": "es2017", + "target": "ES2017", "lib": ["es6"], "module": "commonjs", "strict": true, @@ -12,7 +12,8 @@ "sourceMap": true, "declaration": true, "declarationMap": true, // enables importers to jump to source - "stripInternal": true + "stripInternal": true, + "composite": true }, "exclude": ["**/*.test.ts", "dist", "node_modules", "__tests__"] } diff --git a/packages/shared/mocks/CHANGELOG.md b/packages/shared/mocks/CHANGELOG.md new file mode 100644 index 000000000..cc1a5afb9 --- /dev/null +++ b/packages/shared/mocks/CHANGELOG.md @@ -0,0 +1,3 @@ +# Changelog + +All notable changes to `@launchdarkly/private-js-mocks` will be documented in this file. This project adheres to [Semantic Versioning](http://semver.org). diff --git a/packages/shared/mocks/LICENSE b/packages/shared/mocks/LICENSE new file mode 100644 index 000000000..ab8bd335b --- /dev/null +++ b/packages/shared/mocks/LICENSE @@ -0,0 +1,13 @@ +Copyright 2023 Catamorphic, Co. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. diff --git a/packages/shared/mocks/README.md b/packages/shared/mocks/README.md new file mode 100644 index 000000000..730bb4fee --- /dev/null +++ b/packages/shared/mocks/README.md @@ -0,0 +1,28 @@ +# LaunchDarkly SDK JavaScript Mocks + +[![Actions Status][mocks-ci-badge]][mocks-ci] + +**Internal use only.** + +This project contains JavaScript mocks that are consumed in unit tests in client-side and server-side JavaScript SDKs. + +## Contributing + +See [Contributing](../shared/CONTRIBUTING.md). + +## About LaunchDarkly + +- LaunchDarkly is a continuous delivery platform that provides feature flags as a service and allows developers to iterate quickly and safely. We allow you to easily flag your features and manage them from the LaunchDarkly dashboard. With LaunchDarkly, you can: + - Roll out a new feature to a subset of your users (like a group of users who opt-in to a beta tester group), gathering feedback and bug reports from real-world use cases. + - Gradually roll out a feature to an increasing percentage of users, and track the effect that the feature has on key metrics (for instance, how likely is a user to complete a purchase if they have feature A versus feature B?). + - Turn off a feature that you realize is causing performance problems in production, without needing to re-deploy, or even restart the application with a changed configuration file. + - Grant access to certain features based on user attributes, like payment plan (eg: users on the ‘gold’ plan get access to more features than users in the ‘silver’ plan). Disable parts of your application to facilitate maintenance, without taking everything offline. +- LaunchDarkly provides feature flag SDKs for a wide variety of languages and technologies. Check out [our documentation](https://docs.launchdarkly.com/sdk) for a complete list. +- Explore LaunchDarkly + - [launchdarkly.com](https://www.launchdarkly.com/ 'LaunchDarkly Main Website') for more information + - [docs.launchdarkly.com](https://docs.launchdarkly.com/ 'LaunchDarkly Documentation') for our documentation and SDK reference guides + - [apidocs.launchdarkly.com](https://apidocs.launchdarkly.com/ 'LaunchDarkly API Documentation') for our API documentation + - [blog.launchdarkly.com](https://blog.launchdarkly.com/ 'LaunchDarkly Blog Documentation') for the latest product updates + +[mocks-ci-badge]: https://github.com/launchdarkly/js-core/actions/workflows/mocks.yml/badge.svg +[mocks-ci]: https://github.com/launchdarkly/js-core/actions/workflows/mocks.yml diff --git a/packages/shared/mocks/jest.config.js b/packages/shared/mocks/jest.config.js new file mode 100644 index 000000000..6753062cc --- /dev/null +++ b/packages/shared/mocks/jest.config.js @@ -0,0 +1,7 @@ +module.exports = { + transform: { '^.+\\.ts?$': 'ts-jest' }, + testMatch: ['**/*.test.ts?(x)'], + testEnvironment: 'node', + moduleFileExtensions: ['ts', 'tsx', 'js', 'jsx', 'json', 'node'], + collectCoverageFrom: ['src/**/*.ts'], +}; diff --git a/packages/shared/mocks/package.json b/packages/shared/mocks/package.json new file mode 100644 index 000000000..aa8ceb63f --- /dev/null +++ b/packages/shared/mocks/package.json @@ -0,0 +1,51 @@ +{ + "name": "@launchdarkly/private-js-mocks", + "private": true, + "version": "0.0.1", + "type": "commonjs", + "main": "./dist/index.js", + "types": "./dist/index.d.ts", + "homepage": "https://github.com/launchdarkly/js-core/tree/main/packages/shared/common", + "repository": { + "type": "git", + "url": "https://github.com/launchdarkly/js-core.git" + }, + "description": "LaunchDarkly SDK for JavaScript - mocks", + "files": [ + "dist" + ], + "keywords": [ + "mocks", + "unit", + "tests", + "launchdarkly", + "js", + "client" + ], + "scripts": { + "test": "", + "build-types": "yarn workspace @launchdarkly/js-sdk-common build-types", + "build": "yarn build-types && npx tsc", + "clean": "npx tsc --build --clean", + "lint": "npx eslint --ext .ts", + "lint:fix": "yarn run lint -- --fix" + }, + "license": "Apache-2.0", + "devDependencies": { + "@trivago/prettier-plugin-sort-imports": "^4.2.0", + "@types/jest": "^29.5.5", + "@typescript-eslint/eslint-plugin": "^6.7.3", + "@typescript-eslint/parser": "^6.7.3", + "eslint": "^8.50.0", + "eslint-config-airbnb-base": "^15.0.0", + "eslint-config-airbnb-typescript": "^17.1.0", + "eslint-config-prettier": "^9.0.0", + "eslint-plugin-import": "^2.28.1", + "eslint-plugin-prettier": "^5.0.0", + "jest": "^29.7.0", + "launchdarkly-js-test-helpers": "^2.2.0", + "prettier": "^3.0.3", + "ts-jest": "^29.0.5", + "typescript": "^5.2.2" + } +} diff --git a/packages/shared/mocks/src/clientContext.ts b/packages/shared/mocks/src/clientContext.ts new file mode 100644 index 000000000..aa14729f5 --- /dev/null +++ b/packages/shared/mocks/src/clientContext.ts @@ -0,0 +1,13 @@ +import type { ClientContext } from '@common'; + +import basicPlatform from './platform'; + +const clientContext: ClientContext = { + basicConfiguration: { + sdkKey: 'testSdkKey', + serviceEndpoints: { events: '', polling: '', streaming: 'https://mockstream.ld.com' }, + }, + platform: basicPlatform, +}; + +export default clientContext; diff --git a/packages/shared/mocks/src/contextDeduplicator.ts b/packages/shared/mocks/src/contextDeduplicator.ts new file mode 100644 index 000000000..b04d0d277 --- /dev/null +++ b/packages/shared/mocks/src/contextDeduplicator.ts @@ -0,0 +1,17 @@ +import type { Context, subsystem } from '@common'; + +export default class ContextDeduplicator implements subsystem.LDContextDeduplicator { + flushInterval?: number | undefined = 0.1; + + seen: string[] = []; + + processContext(context: Context): boolean { + if (this.seen.indexOf(context.canonicalKey) >= 0) { + return false; + } + this.seen.push(context.canonicalKey); + return true; + } + + flush(): void {} +} diff --git a/packages/shared/sdk-server/__tests__/evaluation/mocks/hasher.ts b/packages/shared/mocks/src/hasher.ts similarity index 85% rename from packages/shared/sdk-server/__tests__/evaluation/mocks/hasher.ts rename to packages/shared/mocks/src/hasher.ts index bed0871a2..0d3b363de 100644 --- a/packages/shared/sdk-server/__tests__/evaluation/mocks/hasher.ts +++ b/packages/shared/mocks/src/hasher.ts @@ -1,5 +1,4 @@ -// Mock hashing implementation. -import { Crypto, Hasher, Hmac } from '@launchdarkly/js-sdk-common'; +import type { Crypto, Hasher, Hmac } from '@common'; export const hasher: Hasher = { update: jest.fn(), diff --git a/packages/shared/mocks/src/index.ts b/packages/shared/mocks/src/index.ts new file mode 100644 index 000000000..beceb6f47 --- /dev/null +++ b/packages/shared/mocks/src/index.ts @@ -0,0 +1,19 @@ +import clientContext from './clientContext'; +import ContextDeduplicator from './contextDeduplicator'; +import { crypto, hasher } from './hasher'; +import logger from './logger'; +import mockFetch from './mockFetch'; +import basicPlatform from './platform'; +import { MockStreamingProcessor, setupMockStreamingProcessor } from './streamingProcessor'; + +export { + basicPlatform, + clientContext, + mockFetch, + crypto, + logger, + hasher, + ContextDeduplicator, + MockStreamingProcessor, + setupMockStreamingProcessor, +}; diff --git a/packages/shared/mocks/src/logger.ts b/packages/shared/mocks/src/logger.ts new file mode 100644 index 000000000..9fe7004e0 --- /dev/null +++ b/packages/shared/mocks/src/logger.ts @@ -0,0 +1,8 @@ +const logger = { + error: jest.fn(), + warn: jest.fn(), + info: jest.fn(), + debug: jest.fn(), +}; + +export default logger; diff --git a/packages/shared/mocks/src/mockFetch.ts b/packages/shared/mocks/src/mockFetch.ts new file mode 100644 index 000000000..0ae07f804 --- /dev/null +++ b/packages/shared/mocks/src/mockFetch.ts @@ -0,0 +1,32 @@ +import { Response } from '@common'; + +import basicPlatform from './platform'; + +const createMockResponse = (remoteJson: any, statusCode: number) => { + const response: Response = { + headers: { + get: jest.fn(), + keys: jest.fn(), + values: jest.fn(), + entries: jest.fn(), + has: jest.fn(), + }, + status: statusCode, + text: jest.fn(), + json: () => Promise.resolve(remoteJson), + }; + return Promise.resolve(response); +}; + +/** + * Mocks basicPlatform fetch. Returns the fetch jest.Mock object. + * @param remoteJson + * @param statusCode + */ +const mockFetch = (remoteJson: any, statusCode: number = 200): jest.Mock => { + const f = basicPlatform.requests.fetch as jest.Mock; + f.mockResolvedValue(createMockResponse(remoteJson, statusCode)); + return f; +}; + +export default mockFetch; diff --git a/packages/shared/mocks/src/platform.ts b/packages/shared/mocks/src/platform.ts new file mode 100644 index 000000000..7b11b5c71 --- /dev/null +++ b/packages/shared/mocks/src/platform.ts @@ -0,0 +1,46 @@ +import type { Encoding, Info, Platform, PlatformData, Requests, SdkData } from '@common'; + +import { crypto } from './hasher'; + +const encoding: Encoding = { + btoa: (s: string) => Buffer.from(s).toString('base64'), +}; + +const info: Info = { + platformData(): PlatformData { + return { + os: { + name: 'An OS', + version: '1.0.1', + arch: 'An Arch', + }, + name: 'The SDK Name', + additional: { + nodeVersion: '42', + }, + }; + }, + sdkData(): SdkData { + return { + name: 'An SDK', + version: '2.0.2', + userAgentBase: 'TestUserAgent', + wrapperName: 'Rapper', + wrapperVersion: '1.2.3', + }; + }, +}; + +const requests: Requests = { + fetch: jest.fn(), + createEventSource: jest.fn(), +}; + +const basicPlatform: Platform = { + encoding, + info, + crypto, + requests, +}; + +export default basicPlatform; diff --git a/packages/shared/mocks/src/streamingProcessor.ts b/packages/shared/mocks/src/streamingProcessor.ts new file mode 100644 index 000000000..e58cbe583 --- /dev/null +++ b/packages/shared/mocks/src/streamingProcessor.ts @@ -0,0 +1,42 @@ +import type { + ClientContext, + EventName, + internal, + LDStreamingError, + ProcessStreamResponse, +} from '@common'; + +export const MockStreamingProcessor = jest.fn(); + +export const setupMockStreamingProcessor = (shouldError: boolean = false) => { + MockStreamingProcessor.mockImplementation( + ( + sdkKey: string, + clientContext: ClientContext, + streamUriPath: string, + listeners: Map, + diagnosticsManager: internal.DiagnosticsManager, + errorHandler: internal.StreamingErrorHandler, + _streamInitialReconnectDelay: number, + ) => ({ + start: jest.fn(async () => { + if (shouldError) { + process.nextTick(() => { + const unauthorized: LDStreamingError = { + code: 401, + name: 'LaunchDarklyStreamingError', + message: 'test-error', + }; + errorHandler(unauthorized); + }); + } else { + // execute put which will resolve the init promise + process.nextTick( + () => listeners.get('put')?.processJson({ data: { flags: {}, segments: {} } }), + ); + } + }), + close: jest.fn(), + }), + ); +}; diff --git a/packages/shared/mocks/tsconfig.eslint.json b/packages/shared/mocks/tsconfig.eslint.json new file mode 100644 index 000000000..56c9b3830 --- /dev/null +++ b/packages/shared/mocks/tsconfig.eslint.json @@ -0,0 +1,5 @@ +{ + "extends": "./tsconfig.json", + "include": ["/**/*.ts"], + "exclude": ["node_modules"] +} diff --git a/packages/shared/mocks/tsconfig.json b/packages/shared/mocks/tsconfig.json new file mode 100644 index 000000000..93b5f38e5 --- /dev/null +++ b/packages/shared/mocks/tsconfig.json @@ -0,0 +1,26 @@ +{ + "compilerOptions": { + "rootDir": "src", + "outDir": "dist", + "target": "ES2017", + "lib": ["es6"], + "module": "commonjs", + "strict": true, + "noImplicitOverride": true, + // Needed for CommonJS modules: markdown-it, fs-extra + "allowSyntheticDefaultImports": true, + "sourceMap": true, + "declaration": true, + "declarationMap": true, // enables importers to jump to source + "stripInternal": true, + "paths": { + "@common": ["../common"] + } + }, + "exclude": ["**/*.test.ts", "dist", "node_modules", "__tests__"], + "references": [ + { + "path": "../common" + } + ] +} diff --git a/packages/shared/mocks/tsconfig.ref.json b/packages/shared/mocks/tsconfig.ref.json new file mode 100644 index 000000000..0c86b2c55 --- /dev/null +++ b/packages/shared/mocks/tsconfig.ref.json @@ -0,0 +1,7 @@ +{ + "extends": "./tsconfig.json", + "include": ["src/**/*"], + "compilerOptions": { + "composite": true + } +} diff --git a/packages/shared/sdk-client/LICENSE b/packages/shared/sdk-client/LICENSE new file mode 100644 index 000000000..d238a2b01 --- /dev/null +++ b/packages/shared/sdk-client/LICENSE @@ -0,0 +1,13 @@ +Copyright 2022 Catamorphic, Co. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. \ No newline at end of file diff --git a/packages/shared/sdk-client/README.md b/packages/shared/sdk-client/README.md new file mode 100644 index 000000000..679c48c22 --- /dev/null +++ b/packages/shared/sdk-client/README.md @@ -0,0 +1 @@ +# sdk-client diff --git a/packages/shared/sdk-client/jest-setupFilesAfterEnv.ts b/packages/shared/sdk-client/jest-setupFilesAfterEnv.ts new file mode 100644 index 000000000..7b0828bfa --- /dev/null +++ b/packages/shared/sdk-client/jest-setupFilesAfterEnv.ts @@ -0,0 +1 @@ +import '@testing-library/jest-dom'; diff --git a/packages/shared/sdk-client/jest.config.json b/packages/shared/sdk-client/jest.config.json new file mode 100644 index 000000000..65ddc27dd --- /dev/null +++ b/packages/shared/sdk-client/jest.config.json @@ -0,0 +1,10 @@ +{ + "transform": { "^.+\\.ts?$": "ts-jest" }, + "testMatch": ["**/*.test.ts?(x)"], + "testPathIgnorePatterns": ["node_modules", "example", "dist"], + "modulePathIgnorePatterns": ["dist"], + "testEnvironment": "jsdom", + "moduleFileExtensions": ["ts", "tsx", "js", "jsx", "json", "node"], + "collectCoverageFrom": ["src/**/*.ts"], + "setupFilesAfterEnv": ["./jest-setupFilesAfterEnv.ts"] +} diff --git a/packages/shared/sdk-client/package.json b/packages/shared/sdk-client/package.json new file mode 100644 index 000000000..d87dfef8e --- /dev/null +++ b/packages/shared/sdk-client/package.json @@ -0,0 +1,59 @@ +{ + "name": "@launchdarkly/js-client-sdk-common", + "version": "0.0.1", + "type": "commonjs", + "main": "./dist/index.js", + "types": "./dist/index.d.ts", + "homepage": "https://github.com/launchdarkly/js-core/tree/main/packages/shared/sdk-server", + "repository": { + "type": "git", + "url": "https://github.com/launchdarkly/js-core.git" + }, + "description": "LaunchDarkly Client SDK for JavaScript - common code", + "files": [ + "dist" + ], + "keywords": [ + "launchdarkly", + "analytics", + "client" + ], + "scripts": { + "doc": "../../../scripts/build-doc.sh .", + "test": "npx jest --ci", + "build": "npx tsc", + "clean": "npx tsc --build --clean", + "lint": "npx eslint . --ext .ts", + "lint:fix": "yarn run lint -- --fix", + "prettier": "prettier --write 'src/*.@(js|ts|tsx|json)'", + "check": "yarn && yarn prettier && yarn lint && tsc && yarn test" + }, + "license": "Apache-2.0", + "dependencies": { + "@launchdarkly/js-sdk-common": "^1.1.0", + "semver": "7.5.4" + }, + "devDependencies": { + "@launchdarkly/private-js-mocks": "0.0.1", + "@testing-library/dom": "^9.3.1", + "@testing-library/jest-dom": "^5.16.5", + "@types/jest": "^29.5.3", + "@types/semver": "^7.5.0", + "@typescript-eslint/eslint-plugin": "^6.1.0", + "@typescript-eslint/parser": "^6.1.0", + "eslint": "^8.45.0", + "eslint-config-airbnb-base": "^15.0.0", + "eslint-config-airbnb-typescript": "^17.1.0", + "eslint-config-prettier": "^8.8.0", + "eslint-plugin-import": "^2.27.5", + "eslint-plugin-prettier": "^5.0.0", + "jest": "^29.6.1", + "jest-diff": "^29.6.1", + "jest-environment-jsdom": "^29.6.1", + "launchdarkly-js-test-helpers": "^2.2.0", + "prettier": "^3.0.0", + "ts-jest": "^29.1.1", + "typedoc": "0.23.26", + "typescript": "^5.1.6" + } +} diff --git a/packages/shared/sdk-client/src/LDClientImpl.test.ts b/packages/shared/sdk-client/src/LDClientImpl.test.ts new file mode 100644 index 000000000..d0c36d84c --- /dev/null +++ b/packages/shared/sdk-client/src/LDClientImpl.test.ts @@ -0,0 +1,83 @@ +import { LDContext } from '@launchdarkly/js-sdk-common'; +import { basicPlatform } from '@launchdarkly/private-js-mocks'; + +import fetchFlags from './evaluation/fetchFlags'; +import * as mockResponseJson from './evaluation/mockResponse.json'; +import LDClientImpl from './LDClientImpl'; + +jest.mock('./evaluation/fetchFlags', () => { + const actual = jest.requireActual('./evaluation/fetchFlags'); + return { + __esModule: true, + ...actual, + default: jest.fn(), + }; +}); + +describe('sdk-client object', () => { + const testSdkKey = 'test-sdk-key'; + const context: LDContext = { kind: 'org', key: 'Testy Pizza' }; + const mockFetchFlags = fetchFlags as jest.Mock; + + let ldc: LDClientImpl; + + beforeEach(async () => { + mockFetchFlags.mockResolvedValue(mockResponseJson); + + ldc = new LDClientImpl(testSdkKey, context, basicPlatform, {}); + await ldc.start(); + }); + + test('instantiate with blank options', () => { + expect(ldc.config).toMatchObject({ + allAttributesPrivate: false, + baseUri: 'https://sdk.launchdarkly.com', + capacity: 100, + diagnosticOptOut: false, + diagnosticRecordingInterval: 900, + eventsUri: 'https://events.launchdarkly.com', + flushInterval: 2, + inspectors: [], + logger: { + destination: expect.any(Function), + formatter: expect.any(Function), + logLevel: 1, + name: 'LaunchDarkly', + }, + privateAttributes: [], + sendEvents: true, + sendLDHeaders: true, + serviceEndpoints: { + events: 'https://events.launchdarkly.com', + polling: 'https://sdk.launchdarkly.com', + streaming: 'https://clientstream.launchdarkly.com', + }, + streamInitialReconnectDelay: 1, + streamUri: 'https://clientstream.launchdarkly.com', + tags: {}, + useReport: false, + withReasons: false, + }); + }); + + test('all flags', async () => { + const all = ldc.allFlags(); + + expect(all).toEqual({ + 'dev-test-flag': true, + 'easter-i-tunes-special': false, + 'easter-specials': 'no specials', + fdsafdsafdsafdsa: true, + 'log-level': 'warn', + 'moonshot-demo': true, + test1: 's1', + 'this-is-a-test': true, + }); + }); + + test('variation', async () => { + const devTestFlag = ldc.variation('dev-test-flag'); + + expect(devTestFlag).toBe(true); + }); +}); diff --git a/packages/shared/sdk-client/src/LDClientImpl.ts b/packages/shared/sdk-client/src/LDClientImpl.ts new file mode 100644 index 000000000..d6874e10a --- /dev/null +++ b/packages/shared/sdk-client/src/LDClientImpl.ts @@ -0,0 +1,232 @@ +// temporarily allow unused vars for the duration of the migration + +/* eslint-disable @typescript-eslint/no-unused-vars */ +import { + clone, + Context, + internal, + LDClientError, + LDContext, + LDEvaluationDetail, + LDEvaluationDetailTyped, + LDFlagSet, + LDFlagValue, + LDLogger, + Platform, + subsystem, + TypeValidators, +} from '@launchdarkly/js-sdk-common'; + +import { LDClient } from './api/LDClient'; +import LDEmitter, { EventName } from './api/LDEmitter'; +import LDOptions from './api/LDOptions'; +import Configuration from './configuration'; +import createDiagnosticsManager from './diagnostics/createDiagnosticsManager'; +import fetchFlags, { Flags } from './evaluation/fetchFlags'; +import createEventProcessor from './events/createEventProcessor'; +import EventFactory from './events/EventFactory'; + +const { createErrorEvaluationDetail, createSuccessEvaluationDetail, ClientMessages, ErrorKinds } = + internal; + +export default class LDClientImpl implements LDClient { + config: Configuration; + diagnosticsManager?: internal.DiagnosticsManager; + eventProcessor: subsystem.LDEventProcessor; + + private eventFactoryDefault = new EventFactory(false); + private eventFactoryWithReasons = new EventFactory(true); + private emitter: LDEmitter; + private flags: Flags = {}; + private logger: LDLogger; + + /** + * Creates the client object synchronously. No async, no network calls. + */ + constructor( + public readonly sdkKey: string, + public readonly context: LDContext, + public readonly platform: Platform, + options: LDOptions, + ) { + if (!sdkKey) { + throw new Error('You must configure the client with a client-side SDK key'); + } + + const checkedContext = Context.fromLDContext(context); + if (!checkedContext.valid) { + throw new Error('Context was unspecified or had no key'); + } + + if (!platform.encoding) { + throw new Error('Platform must implement Encoding because btoa is required.'); + } + + this.config = new Configuration(options); + this.logger = this.config.logger; + this.diagnosticsManager = createDiagnosticsManager(sdkKey, this.config, platform); + this.eventProcessor = createEventProcessor( + sdkKey, + this.config, + platform, + this.diagnosticsManager, + ); + this.emitter = new LDEmitter(); + + // TODO: init streamer + } + + async start() { + try { + this.flags = await fetchFlags(this.sdkKey, this.context, this.config, this.platform); + this.emitter.emit('ready'); + } catch (error: any) { + this.logger.error(error); + this.emitter.emit('error', error); + this.emitter.emit('failed', error); + } + } + + allFlags(): LDFlagSet { + const result: LDFlagSet = {}; + Object.entries(this.flags).forEach(([k, r]) => { + result[k] = r.value; + }); + return result; + } + + async close(): Promise { + await this.flush(); + this.eventProcessor.close(); + } + + async flush(): Promise<{ error?: Error; result: boolean }> { + try { + await this.eventProcessor.flush(); + } catch (e) { + return { error: e as Error, result: false }; + } + return { result: true }; + } + + getContext(): LDContext { + return clone(this.context); + } + + identify( + context: LDContext, + hash?: string, + onDone?: (err: Error | null, flags: LDFlagSet | null) => void, + ): Promise { + // TODO: + return Promise.resolve({}); + } + + off(eventName: EventName, listener?: Function): void { + this.emitter.off(eventName, listener); + } + + on(eventName: EventName, listener: Function): void { + this.emitter.on(eventName, listener); + } + + setStreaming(value?: boolean): void { + // TODO: + } + + track(key: string, data?: any, metricValue?: number): void { + const checkedContext = Context.fromLDContext(this.context); + + if (!checkedContext.valid) { + this.logger?.warn(ClientMessages.missingContextKeyNoEvent); + return; + } + + this.eventProcessor.sendEvent( + this.eventFactoryDefault.customEvent(key, checkedContext!, data, metricValue), + ); + } + + private variationInternal( + flagKey: string, + defaultValue: any, + eventFactory: EventFactory, + typeChecker?: (value: any) => [boolean, string], + ): LDFlagValue { + const evalContext = Context.fromLDContext(this.context); + const found = this.flags[flagKey]; + + if (!found) { + const error = new LDClientError(`Unknown feature flag "${flagKey}"; returning default value`); + this.emitter.emit('error', error); + this.eventProcessor.sendEvent( + this.eventFactoryDefault.unknownFlagEvent(flagKey, defaultValue ?? null, evalContext), + ); + return createErrorEvaluationDetail(ErrorKinds.FlagNotFound, defaultValue); + } + + const { reason, value, variation } = found; + + if (typeChecker) { + const [matched, type] = typeChecker(value); + if (!matched) { + this.eventProcessor.sendEvent( + eventFactory.evalEventClient( + flagKey, + defaultValue, // track default value on type errors + defaultValue, + found, + evalContext, + reason, + ), + ); + return createErrorEvaluationDetail(ErrorKinds.WrongType, defaultValue); + } + } + + const successDetail = createSuccessEvaluationDetail(value, variation, reason); + if (variation === undefined || variation === null) { + this.logger.debug('Result value is null in variation'); + successDetail.value = defaultValue; + } + this.eventProcessor.sendEvent( + eventFactory.evalEventClient(flagKey, value, defaultValue, found, evalContext, reason), + ); + return successDetail; + } + + variation(flagKey: string, defaultValue?: LDFlagValue): LDFlagValue { + const { value } = this.variationInternal(flagKey, defaultValue, this.eventFactoryDefault); + return value; + } + variationDetail(flagKey: string, defaultValue?: LDFlagValue): LDEvaluationDetail { + return this.variationInternal(flagKey, defaultValue, this.eventFactoryWithReasons); + } + + private typedEval( + key: string, + defaultValue: T, + eventFactory: EventFactory, + typeChecker: (value: unknown) => [boolean, string], + ): LDEvaluationDetailTyped { + return this.variationInternal(key, defaultValue, eventFactory, typeChecker); + } + + // TODO: add other typed variation functions + boolVariation(key: string, defaultValue: boolean): boolean { + return this.typedEval(key, defaultValue, this.eventFactoryDefault, (value) => [ + TypeValidators.Boolean.is(value), + TypeValidators.Boolean.getType(), + ]).value; + } + + waitForInitialization(): Promise { + // TODO: + return Promise.resolve(undefined); + } + + waitUntilReady(): Promise { + // TODO: + return Promise.resolve(undefined); + } +} diff --git a/packages/shared/sdk-client/src/api/LDClient.ts b/packages/shared/sdk-client/src/api/LDClient.ts new file mode 100644 index 000000000..22bae74e0 --- /dev/null +++ b/packages/shared/sdk-client/src/api/LDClient.ts @@ -0,0 +1,263 @@ +import { LDContext, LDEvaluationDetail, LDFlagSet, LDFlagValue } from '@launchdarkly/js-sdk-common'; + +/** + * The basic interface for the LaunchDarkly client. Platform-specific SDKs may add some methods of their own. + * + * @see https://docs.launchdarkly.com/sdk/client-side/javascript + * + * @ignore (don't need to show this separately in TypeDoc output; all methods will be shown in LDClient) + */ +export interface LDClient { + /** + * Returns a Promise that tracks the client's initialization state. + * + * The returned Promise will be resolved once the client has either successfully initialized + * or failed to initialize (e.g. due to an invalid environment key or a server error). It will + * never be rejected. + * + * ``` + * // using a Promise then() handler + * client.waitUntilReady().then(() => { + * doSomethingWithClient(); + * }); + * + * // using async/await + * await client.waitUntilReady(); + * doSomethingWithClient(); + * ``` + * + * If you want to distinguish between these success and failure conditions, use + * {@link waitForInitialization} instead. + * + * If you prefer to use event listeners ({@link on}) rather than Promises, you can listen on the + * client for a `"ready"` event, which will be fired in either case. + * + * @returns + * A Promise that will be resolved once the client is no longer trying to initialize. + */ + waitUntilReady(): Promise; + + /** + * Returns a Promise that tracks the client's initialization state. + * + * The Promise will be resolved if the client successfully initializes, or rejected if client + * initialization has irrevocably failed (for instance, if it detects that the SDK key is invalid). + * + * ``` + * // using Promise then() and catch() handlers + * client.waitForInitialization().then(() => { + * doSomethingWithSuccessfullyInitializedClient(); + * }).catch(err => { + * doSomethingForFailedStartup(err); + * }); + * + * // using async/await + * try { + * await client.waitForInitialization(); + * doSomethingWithSuccessfullyInitializedClient(); + * } catch (err) { + * doSomethingForFailedStartup(err); + * } + * ``` + * + * It is important that you handle the rejection case; otherwise it will become an unhandled Promise + * rejection, which is a serious error on some platforms. The Promise is not created unless you + * request it, so if you never call `waitForInitialization()` then you do not have to worry about + * unhandled rejections. + * + * Note that you can also use event listeners ({@link on}) for the same purpose: the event `"initialized"` + * indicates success, and `"failed"` indicates failure. + * + * @returns + * A Promise that will be resolved if the client initializes successfully, or rejected if it + * fails. + */ + waitForInitialization(): Promise; + + /** + * Identifies a context to LaunchDarkly. + * + * Unlike the server-side SDKs, the client-side JavaScript SDKs maintain a current context state, + * which is set at initialization time. You only need to call `identify()` if the context has changed + * since then. + * + * Changing the current context also causes all feature flag values to be reloaded. Until that has + * finished, calls to {@link variation} will still return flag values for the previous context. You can + * use a callback or a Promise to determine when the new flag values are available. + * + * @param context + * The context properties. Must contain at least the `key` property. + * @param hash + * The signed context key if you are using [Secure Mode](https://docs.launchdarkly.com/sdk/features/secure-mode#configuring-secure-mode-in-the-javascript-client-side-sdk). + * @param onDone + * A function which will be called as soon as the flag values for the new context are available, + * with two parameters: an error value (if any), and an {@link LDFlagSet} containing the new values + * (which can also be obtained by calling {@link variation}). If the callback is omitted, you will + * receive a Promise instead. + * @returns + * If you provided a callback, then nothing. Otherwise, a Promise which resolve once the flag + * values for the new context are available, providing an {@link LDFlagSet} containing the new values + * (which can also be obtained by calling {@link variation}). + */ + identify( + context: LDContext, + hash?: string, + onDone?: (err: Error | null, flags: LDFlagSet | null) => void, + ): Promise; + + /** + * Returns the client's current context. + * + * This is the context that was most recently passed to {@link identify}, or, if {@link identify} has never + * been called, the initial context specified when the client was created. + */ + getContext(): LDContext; + + /** + * Flushes all pending analytics events. + * + * Normally, batches of events are delivered in the background at intervals determined by the + * `flushInterval` property of {@link LDOptions}. Calling `flush()` triggers an immediate delivery. + * + * @returns + * A Promise which resolves once + * flushing is finished. You can inspect the result of the flush for errors. + */ + flush(): Promise<{ error?: Error; result: boolean }>; + + /** + * Determines the variation of a feature flag for the current context. + * + * In the client-side JavaScript SDKs, this is always a fast synchronous operation because all of + * the feature flag values for the current context have already been loaded into memory. + * + * @param key + * The unique key of the feature flag. + * @param defaultValue + * The default value of the flag, to be used if the value is not available from LaunchDarkly. + * @returns + * The flag's value. + */ + variation(key: string, defaultValue?: LDFlagValue): LDFlagValue; + + /** + * Determines the variation of a feature flag for a context, along with information about how it was + * calculated. + * + * Note that this will only work if you have set `evaluationExplanations` to true in {@link LDOptions}. + * Otherwise, the `reason` property of the result will be null. + * + * The `reason` property of the result will also be included in analytics events, if you are + * capturing detailed event data for this flag. + * + * For more information, see the [SDK reference guide](https://docs.launchdarkly.com/sdk/features/evaluation-reasons#javascript). + * + * @param key + * The unique key of the feature flag. + * @param defaultValue + * The default value of the flag, to be used if the value is not available from LaunchDarkly. + * + * @returns + * An {@link LDEvaluationDetail} object containing the value and explanation. + */ + variationDetail(key: string, defaultValue?: LDFlagValue): LDEvaluationDetail; + + /** + * Specifies whether or not to open a streaming connection to LaunchDarkly for live flag updates. + * + * If this is true, the client will always attempt to maintain a streaming connection; if false, + * it never will. If you leave the value undefined (the default), the client will open a streaming + * connection if you subscribe to `"change"` or `"change:flag-key"` events (see {@link LDClient.on}). + * + * This can also be set as the `streaming` property of {@link LDOptions}. + */ + setStreaming(value?: boolean): void; + + /** + * Registers an event listener. + * + * The following event names (keys) are used by the client: + * + * - `"ready"`: The client has finished starting up. This event will be sent regardless + * of whether it successfully connected to LaunchDarkly, or encountered an error + * and had to give up; to distinguish between these cases, see below. + * - `"initialized"`: The client successfully started up and has valid feature flag + * data. This will always be accompanied by `"ready"`. + * - `"failed"`: The client encountered an error that prevented it from connecting to + * LaunchDarkly, such as an invalid environment ID. All flag evaluations will + * therefore receive default values. This will always be accompanied by `"ready"`. + * - `"error"`: General event for any kind of error condition during client operation. + * The callback parameter is an Error object. If you do not listen for "error" + * events, then the errors will be logged with `console.log()`. + * - `"change"`: The client has received new feature flag data. This can happen either + * because you have switched contexts with {@link identify}, or because the client has a + * stream connection and has received a live change to a flag value (see below). + * The callback parameter is an {@link LDFlagChangeset}. + * - `"change:FLAG-KEY"`: The client has received a new value for a specific flag + * whose key is `FLAG-KEY`. The callback receives two parameters: the current (new) + * flag value, and the previous value. This is always accompanied by a general + * `"change"` event as described above; you can listen for either or both. + * + * The `"change"` and `"change:FLAG-KEY"` events have special behavior: by default, the + * client will open a streaming connection to receive live changes if and only if + * you are listening for one of these events. This behavior can be overridden by + * setting `streaming` in {@link LDOptions} or calling {@link LDClient.setStreaming}. + * + * @param key + * The name of the event for which to listen. + * @param callback + * The function to execute when the event fires. The callback may or may not + * receive parameters, depending on the type of event. + * @param context + * The `this` context to use for the callback. + */ + on(key: string, callback: (...args: any[]) => void, context?: any): void; + + /** + * Deregisters an event listener. See {@link on} for the available event types. + * + * @param key + * The name of the event for which to stop listening. + * @param callback + * The function to deregister. + * @param context + * The `this` context for the callback, if one was specified for {@link on}. + */ + off(key: string, callback: (...args: any[]) => void, context?: any): void; + + /** + * Track page events to use in goals or A/B tests. + * + * LaunchDarkly automatically tracks pageviews and clicks that are specified in the + * Goals section of their dashboard. This can be used to track custom goals or other + * events that do not currently have goals. + * + * @param key + * The name of the event, which may correspond to a goal in A/B tests. + * @param data + * Additional information to associate with the event. + * @param metricValue + * An optional numeric value that can be used by the LaunchDarkly experimentation + * feature in numeric custom metrics. Can be omitted if this event is used by only + * non-numeric metrics. This field will also be returned as part of the custom event + * for Data Export. + */ + track(key: string, data?: any, metricValue?: number): void; + + /** + * Returns a map of all available flags to the current context's values. + * + * @returns + * An object in which each key is a feature flag key and each value is the flag value. + * Note that there is no way to specify a default value for each flag as there is with + * {@link variation}, so any flag that cannot be evaluated will have a null value. + */ + allFlags(): LDFlagSet; + + /** + * Shuts down the client and releases its resources, after delivering any pending analytics + * events. After the client is closed, all calls to {@link variation} will return default values, + * and it will not make any requests to LaunchDarkly. + */ + close(): void; +} diff --git a/packages/shared/sdk-client/src/api/LDEmitter.test.ts b/packages/shared/sdk-client/src/api/LDEmitter.test.ts new file mode 100644 index 000000000..9783b55da --- /dev/null +++ b/packages/shared/sdk-client/src/api/LDEmitter.test.ts @@ -0,0 +1,135 @@ +import { LDContext } from '@launchdarkly/js-sdk-common'; + +import LDEmitter from './LDEmitter'; + +describe('LDEmitter', () => { + const error = { type: 'network', message: 'unreachable' }; + let emitter: LDEmitter; + + beforeEach(() => { + jest.resetAllMocks(); + emitter = new LDEmitter(); + }); + + test('subscribe and handle', () => { + const errorHandler1 = jest.fn(); + const errorHandler2 = jest.fn(); + + emitter.on('error', errorHandler1); + emitter.on('error', errorHandler2); + emitter.emit('error', error); + + expect(errorHandler1).toHaveBeenCalledWith(error); + expect(errorHandler2).toHaveBeenCalledWith(error); + }); + + test('unsubscribe and handle', () => { + const errorHandler1 = jest.fn(); + const errorHandler2 = jest.fn(); + + emitter.on('error', errorHandler1); + emitter.on('error', errorHandler2); + emitter.off('error'); + emitter.emit('error', error); + + expect(errorHandler1).not.toHaveBeenCalled(); + expect(errorHandler2).not.toHaveBeenCalled(); + expect(emitter.listenerCount('error')).toEqual(0); + }); + + test('unsubscribing an event should not affect other events', () => { + const errorHandler = jest.fn(); + const changeHandler = jest.fn(); + + emitter.on('error', errorHandler); + emitter.on('change', changeHandler); + emitter.off('error'); // unsubscribe error handler + emitter.emit('error', error); + emitter.emit('change'); + + // change handler should still be affective + expect(changeHandler).toHaveBeenCalled(); + expect(errorHandler).not.toHaveBeenCalled(); + }); + + test('eventNames', () => { + const errorHandler1 = jest.fn(); + const changeHandler = jest.fn(); + const readyHandler = jest.fn(); + + emitter.on('error', errorHandler1); + emitter.on('change', changeHandler); + emitter.on('ready', readyHandler); + + expect(emitter.eventNames()).toEqual(['error', 'change', 'ready']); + }); + + test('listener count', () => { + const errorHandler1 = jest.fn(); + const errorHandler2 = jest.fn(); + const changeHandler = jest.fn(); + + emitter.on('error', errorHandler1); + emitter.on('error', errorHandler2); + emitter.on('change', changeHandler); + + expect(emitter.listenerCount('error')).toEqual(2); + expect(emitter.listenerCount('change')).toEqual(1); + }); + + test('on listener with arguments', () => { + const context = { kind: 'user', key: 'test-user-1' }; + const onListener = jest.fn((c: LDContext) => c); + + emitter.on('change', onListener); + emitter.emit('change', context); + + expect(onListener).toBeCalledWith(context); + }); + + test('unsubscribe one of many listeners', () => { + const errorHandler1 = jest.fn(); + const errorHandler2 = jest.fn(); + + emitter.on('error', errorHandler1); + emitter.on('error', errorHandler2); + emitter.off('error', errorHandler2); + emitter.emit('error'); + + expect(emitter.listenerCount('error')).toEqual(1); + expect(errorHandler2).not.toBeCalled(); + }); + + test('unsubscribe all listeners manually', () => { + const errorHandler1 = jest.fn(); + const errorHandler2 = jest.fn(); + + emitter.on('error', errorHandler1); + emitter.on('error', errorHandler2); + + // intentional duplicate calls to ensure no errors are thrown if the + // same handler gets removed multiple times + emitter.off('error', errorHandler1); + emitter.off('error', errorHandler1); + emitter.off('error', errorHandler2); + emitter.emit('error'); + + expect(emitter.listenerCount('error')).toEqual(0); + expect(errorHandler1).not.toBeCalled(); + expect(errorHandler2).not.toBeCalled(); + }); + + test('unsubscribe all listeners by event name', () => { + const errorHandler1 = jest.fn(); + const errorHandler2 = jest.fn(); + + emitter.on('error', errorHandler1); + emitter.on('error', errorHandler2); + emitter.off('error'); + emitter.emit('error'); + + expect(emitter.listenerCount('error')).toEqual(0); + expect(errorHandler1).not.toBeCalled(); + expect(errorHandler2).not.toBeCalled(); + }); +}); diff --git a/packages/shared/sdk-client/src/api/LDEmitter.ts b/packages/shared/sdk-client/src/api/LDEmitter.ts new file mode 100644 index 000000000..53ad6be38 --- /dev/null +++ b/packages/shared/sdk-client/src/api/LDEmitter.ts @@ -0,0 +1,95 @@ +export type EventName = 'change' | 'ready' | 'failed' | 'error'; + +type CustomEventListeners = { + original: Function; + custom: Function; +}; +/** + * Native api usage: EventTarget. + * + * This is an event emitter using the standard built-in EventTarget web api. + * https://developer.mozilla.org/en-US/docs/Web/API/EventTarget + * + * In react-native use event-target-shim to polyfill EventTarget. This is safe + * because the react-native repo uses it too. + * https://github.com/mysticatea/event-target-shim + */ +export default class LDEmitter { + private et: EventTarget = new EventTarget(); + + private listeners: Map = new Map(); + + /** + * Cache all listeners in a Map so we can remove them later + * @param name string event name + * @param originalListener pointer to the original function as specified by + * the consumer + * @param customListener pointer to the custom function based on original + * listener. This is needed to allow for CustomEvents. + * @private + */ + private saveListener(name: EventName, originalListener: Function, customListener: Function) { + const listener = { original: originalListener, custom: customListener }; + if (!this.listeners.has(name)) { + this.listeners.set(name, [listener]); + } else { + this.listeners.get(name)?.push(listener); + } + } + + on(name: EventName, listener: Function) { + const customListener = (e: Event) => { + const { detail } = e as CustomEvent; + + // invoke listener with args from CustomEvent + listener(...detail); + }; + this.saveListener(name, listener, customListener); + this.et.addEventListener(name, customListener); + } + + /** + * Unsubscribe one or all events. + * + * @param name + * @param listener Optional. If unspecified, all listeners for the event will be removed. + */ + off(name: EventName, listener?: Function) { + const existingListeners = this.listeners.get(name); + if (!existingListeners) { + return; + } + + if (listener) { + const toBeRemoved = existingListeners.find((c) => c.original === listener); + this.et.removeEventListener(name, toBeRemoved?.custom as any); + + // remove from internal cache + const updated = existingListeners.filter((l) => l.original !== listener); + if (updated.length === 0) { + this.listeners.delete(name); + } else { + this.listeners.set(name, updated); + } + return; + } + + // remove all listeners + existingListeners.forEach((l) => { + this.et.removeEventListener(name, l.custom as any); + }); + this.listeners.delete(name); + } + + emit(name: EventName, ...detail: any[]) { + this.et.dispatchEvent(new CustomEvent(name, { detail })); + } + + eventNames(): string[] { + return [...this.listeners.keys()]; + } + + listenerCount(name: EventName): number { + return this.listeners.get(name)?.length ?? 0; + } +} diff --git a/packages/shared/sdk-client/src/api/LDInspection.ts b/packages/shared/sdk-client/src/api/LDInspection.ts new file mode 100644 index 000000000..125a9116c --- /dev/null +++ b/packages/shared/sdk-client/src/api/LDInspection.ts @@ -0,0 +1,105 @@ +import { LDContext, LDEvaluationDetail } from '@launchdarkly/js-sdk-common'; + +/** + * Callback interface for collecting information about the SDK at runtime. + * + * This interface is used to collect information about flag usage. + * + * This interface should not be used by the application to access flags for the purpose of controlling application + * flow. It is intended for monitoring, analytics, or debugging purposes. + */ + +export interface LDInspectionFlagUsedHandler { + type: 'flag-used'; + + /** + * Name of the inspector. Will be used for logging issues with the inspector. + */ + name: string; + + /** + * This method is called when a flag is accessed via a variation method, or it can be called based on actions in + * wrapper SDKs which have different methods of tracking when a flag was accessed. It is not called when a call is made + * to allFlags. + */ + method: (flagKey: string, flagDetail: LDEvaluationDetail, context: LDContext) => void; +} + +/** + * Callback interface for collecting information about the SDK at runtime. + * + * This interface is used to collect information about flag data. In order to understand the + * current flag state it should be combined with {@link LDInspectionFlagValueChangedHandler}. + * This interface will get the initial flag information, and + * {@link LDInspectionFlagValueChangedHandler} will provide changes to individual flags. + * + * This interface should not be used by the application to access flags for the purpose of controlling application + * flow. It is intended for monitoring, analytics, or debugging purposes. + */ +export interface LDInspectionFlagDetailsChangedHandler { + type: 'flag-details-changed'; + + /** + * Name of the inspector. Will be used for logging issues with the inspector. + */ + name: string; + + /** + * This method is called when the flags in the store are replaced with new flags. It will contain all flags + * regardless of if they have been evaluated. + */ + method: (details: Record) => void; +} + +/** + * Callback interface for collecting information about the SDK at runtime. + * + * This interface is used to collect changes to flag data, but does not provide the initial + * data. It can be combined with {@link LDInspectionFlagValuesChangedHandler} to track the + * entire flag state. + * + * This interface should not be used by the application to access flags for the purpose of controlling application + * flow. It is intended for monitoring, analytics, or debugging purposes. + */ +export interface LDInspectionFlagDetailChangedHandler { + type: 'flag-detail-changed'; + + /** + * Name of the inspector. Will be used for logging issues with the inspector. + */ + name: string; + + /** + * This method is called when a flag is updated. It will not be called + * when all flags are updated. + */ + method: (flagKey: string, detail: LDEvaluationDetail) => void; +} + +/** + * Callback interface for collecting information about the SDK at runtime. + * + * This interface is used to track current identity state of the SDK. + * + * This interface should not be used by the application to access flags for the purpose of controlling application + * flow. It is intended for monitoring, analytics, or debugging purposes. + */ +export interface LDInspectionIdentifyHandler { + type: 'client-identity-changed'; + + /** + * Name of the inspector. Will be used for logging issues with the inspector. + */ + name: string; + + /** + * This method will be called when an identify operation completes. + */ + method: (context: LDContext) => void; +} + +export type LDInspection = + | LDInspectionFlagUsedHandler + | LDInspectionFlagDetailsChangedHandler + | LDInspectionFlagDetailChangedHandler + | LDInspectionIdentifyHandler; diff --git a/packages/shared/sdk-client/src/api/LDOptions.ts b/packages/shared/sdk-client/src/api/LDOptions.ts new file mode 100644 index 000000000..34d5118a5 --- /dev/null +++ b/packages/shared/sdk-client/src/api/LDOptions.ts @@ -0,0 +1,238 @@ +import type { LDFlagSet, LDLogger } from '@launchdarkly/js-sdk-common'; + +import type { LDInspection } from './LDInspection'; + +export default interface LDOptions { + /** + * An object that will perform logging for the client. + * + * If not specified, the default is to use `basicLogger`. + */ + logger?: LDLogger; + + /** + * The initial set of flags to use until the remote set is retrieved. + * + * If `"localStorage"` is specified, the flags will be saved and retrieved from browser local + * storage. Alternatively, an {@link LDFlagSet} can be specified which will be used as the initial + * source of flag values. In the latter case, the flag values will be available via {@link LDClient.variation} + * immediately after calling `initialize()` (normally they would not be available until the + * client signals that it is ready). + * + * For more information, see the [SDK Reference Guide](https://docs.launchdarkly.com/sdk/features/bootstrapping#javascript). + */ + bootstrap?: 'localStorage' | LDFlagSet; + + /** + * The base uri for the LaunchDarkly server. + * + * Most users should use the default value. + */ + baseUri?: string; + + /** + * The base uri for the LaunchDarkly events server. + * + * Most users should use the default value. + */ + eventsUri?: string; + + /** + * The base uri for the LaunchDarkly streaming server. + * + * Most users should use the default value. + */ + streamUri?: string; + + /** + * Whether or not to open a streaming connection to LaunchDarkly for live flag updates. + * + * If this is true, the client will always attempt to maintain a streaming connection; if false, + * it never will. If you leave the value undefined (the default), the client will open a streaming + * connection if you subscribe to `"change"` or `"change:flag-key"` events (see {@link LDClient.on}). + * + * This is equivalent to calling `client.setStreaming()` with the same value. + */ + stream?: boolean; + + /** + * Whether or not to use the REPORT verb to fetch flag settings. + * + * If this is true, flag settings will be fetched with a REPORT request + * including a JSON entity body with the context object. + * + * Otherwise (by default) a GET request will be issued with the context passed as + * a base64 uri-encoded path parameter. + * + * Do not use unless advised by LaunchDarkly. + */ + useReport?: boolean; + + /** + * Whether or not to include custom HTTP headers when requesting flags from LaunchDarkly. + * + * These are used to send metadata about the SDK (such as the version). They + * are also used to send the application.id and application.version set in + * the options. + * + * This defaults to true (custom headers will be sent). One reason you might + * want to set it to false is that the presence of custom headers causes + * browsers to make an extra OPTIONS request (a CORS preflight check) before + * each flag request, which could affect performance. + */ + sendLDHeaders?: boolean; + + /** + * A transform function for dynamic configuration of HTTP headers. + * + * This method will run last in the header generation sequence, so the function should have + * all system generated headers in case those also need to be modified. + */ + requestHeaderTransform?: (headers: Map) => Map; + + /** + * Whether LaunchDarkly should provide additional information about how flag values were + * calculated. + * + * The additional information will then be available through the client's + * {@link LDClient.variationDetail} method. Since this increases the size of network requests, + * such information is not sent unless you set this option to true. + */ + withReasons?: boolean; + + /** + * Whether to send analytics events back to LaunchDarkly. By default, this is true. + */ + sendEvents?: boolean; + + /** + * Whether all context attributes (except the context key) should be marked as private, and + * not sent to LaunchDarkly in analytics events. + * + * By default, this is false. + */ + allAttributesPrivate?: boolean; + + /** + * Specifies a list of attribute names (either built-in or custom) which should be marked as + * private, and not sent to LaunchDarkly in analytics events. You can also specify this on a + * per-context basis with {@link LDContextMeta.privateAttributes}. + * + * Any contexts sent to LaunchDarkly with this configuration active will have attributes with + * these names removed in analytic events. This is in addition to any attributes that were + * marked as private for an individual context with {@link LDContextMeta.privateAttributes}. + * Setting {@link LDOptions.allAttributesPrivate} to true overrides this. + * + * If and only if a parameter starts with a slash, it is interpreted as a slash-delimited path + * that can denote a nested property within a JSON object. For instance, "/address/street" means + * that if there is an attribute called "address" that is a JSON object, and one of the object's + * properties is "street", the "street" property will be redacted from the analytics data but + * other properties within "address" will still be sent. This syntax also uses the JSON Pointer + * convention of escaping a literal slash character as "~1" and a tilde as "~0". + */ + privateAttributes?: Array; + + /** + * The capacity of the analytics events queue. + * + * The client buffers up to this many events in memory before flushing. If the capacity is exceeded + * before the queue is flushed, events will be discarded. Increasing the capacity means that events + * are less likely to be discarded, at the cost of consuming more memory. Note that in regular usage + * flag evaluations do not produce individual events, only summary counts, so you only need a large + * capacity if you are generating a large number of click, pageview, or identify events (or if you + * are using the event debugger). + * + * The default value is 100. + */ + capacity?: number; + + /** + * The interval in between flushes of the analytics events queue, in seconds. + * + * The default value is 2s. + */ + flushInterval?: number; + + /** + * Sets the initial reconnect delay for the streaming connection, in seconds. + * + * The streaming service uses a backoff algorithm (with jitter) every time the connection needs + * to be reestablished. The delay for the first reconnection will start near this value, and then + * increase exponentially for any subsequent connection failures. + * + * The default value is 1. + */ + streamInitialReconnectDelay?: number; + + /** + * Set to true to opt out of sending diagnostics data. + * + * Unless `diagnosticOptOut` is set to true, the client will send some diagnostics data to the LaunchDarkly + * servers in order to assist in the development of future SDK improvements. These diagnostics consist of + * an initial payload containing some details of SDK in use, the SDK's configuration, and the platform the + * SDK is being run on, as well as payloads sent periodically with information on irregular occurrences such + * as dropped events. + */ + diagnosticOptOut?: boolean; + + /** + * The interval at which periodic diagnostic data is sent, in seconds. + * + * The default is 900 (every 15 minutes) and the minimum value is 6. See {@link diagnosticOptOut} + * for more information on the diagnostics data being sent. + */ + diagnosticRecordingInterval?: number; + + /** + * For use by wrapper libraries to set an identifying name for the wrapper being used. + * + * This will be sent as diagnostic information to the LaunchDarkly servers to allow recording + * metrics on the usage of these wrapper libraries. + */ + wrapperName?: string; + + /** + * For use by wrapper libraries to set version to be included alongside `wrapperName`. + * + * If `wrapperName` is unset, this field will be ignored. + */ + wrapperVersion?: string; + + /** + * Information about the application where the LaunchDarkly SDK is running. + */ + application?: { + /** + * A unique identifier representing the application where the LaunchDarkly SDK is running. + * + * This can be specified as any string value as long as it only uses the following characters: ASCII letters, + * ASCII digits, period, hyphen, underscore. A string containing any other characters will be ignored. + * + * Example: `authentication-service` + */ + id?: string; + + /** + * A unique identifier representing the version of the application where the LaunchDarkly SDK is running. + * + * This can be specified as any string value as long as it only uses the following characters: ASCII letters, + * ASCII digits, period, hyphen, underscore. A string containing any other characters will be ignored. + * + * Example: `1.0.0` (standard version string) or `abcdef` (sha prefix) + */ + version?: string; + }; + + /** + * Inspectors can be used for collecting information for monitoring, analytics, and debugging. + */ + inspectors?: LDInspection[]; + + /** + * The signed context key for Secure Mode. + * + * For more information, see the JavaScript SDK Reference Guide on + * [Secure mode](https://docs.launchdarkly.com/sdk/features/secure-mode#configuring-secure-mode-in-the-javascript-client-side-sdk). + */ + hash?: string; +} diff --git a/packages/shared/sdk-client/src/configuration/Configuration.test.ts b/packages/shared/sdk-client/src/configuration/Configuration.test.ts new file mode 100644 index 000000000..e501b3d84 --- /dev/null +++ b/packages/shared/sdk-client/src/configuration/Configuration.test.ts @@ -0,0 +1,114 @@ +/* eslint-disable no-console */ +import Configuration from './Configuration'; + +describe('Configuration', () => { + beforeEach(() => { + jest.resetAllMocks(); + console.error = jest.fn(); + }); + + test('defaults', () => { + const config = new Configuration(); + + expect(config).toMatchObject({ + allAttributesPrivate: false, + baseUri: 'https://sdk.launchdarkly.com', + capacity: 100, + diagnosticOptOut: false, + diagnosticRecordingInterval: 900, + withReasons: false, + eventsUri: 'https://events.launchdarkly.com', + flushInterval: 2, + inspectors: [], + logger: { + destination: console.error, + logLevel: 1, + name: 'LaunchDarkly', + }, + privateAttributes: [], + sendEvents: true, + sendLDHeaders: true, + streamInitialReconnectDelay: 1, + streamUri: 'https://clientstream.launchdarkly.com', + useReport: false, + }); + expect(console.error).not.toHaveBeenCalled(); + }); + + test('specified options should be set', () => { + const config = new Configuration({ wrapperName: 'test', stream: true }); + expect(config).toMatchObject({ wrapperName: 'test', stream: true }); + }); + + test('unknown option', () => { + // @ts-ignore + const config = new Configuration({ baseballUri: 1 }); + + expect(config.baseballUri).toBeUndefined(); + expect(console.error).toHaveBeenCalledWith(expect.stringContaining('unknown config option')); + }); + + test('wrong type for boolean should be converted', () => { + // @ts-ignore + const config = new Configuration({ sendEvents: 0 }); + + expect(config.stream).toBeFalsy(); + expect(console.error).toHaveBeenCalledWith( + expect.stringContaining('should be a boolean, got number, converting'), + ); + }); + + test('wrong type for number should use default', () => { + // @ts-ignore + const config = new Configuration({ capacity: true }); + + expect(config.capacity).toEqual(100); + expect(console.error).toHaveBeenCalledWith( + expect.stringContaining('should be of type number with minimum value of 1, got boolean'), + ); + }); + + test('enforce minimum', () => { + const config = new Configuration({ flushInterval: 1 }); + + expect(config.flushInterval).toEqual(2); + expect(console.error).toHaveBeenNthCalledWith( + 1, + expect.stringContaining('"flushInterval" had invalid value of 1, using minimum of 2 instead'), + ); + }); + + test('undefined stream should not log warning', () => { + const config = new Configuration({ stream: undefined }); + + expect(config.stream).toBeUndefined(); + expect(console.error).not.toHaveBeenCalled(); + }); + + test('null stream should default to undefined', () => { + // @ts-ignore + const config = new Configuration({ stream: null }); + + expect(config.stream).toBeUndefined(); + expect(console.error).not.toHaveBeenCalled(); + }); + + test('wrong stream type should be converted to boolean', () => { + // @ts-ignore + const config = new Configuration({ stream: 1 }); + + expect(config.stream).toBeTruthy(); + expect(console.error).toHaveBeenCalled(); + }); + + test('invalid bootstrap should use default', () => { + // @ts-ignore + const config = new Configuration({ bootstrap: 'localStora' }); + + expect(config.bootstrap).toBeUndefined(); + expect(console.error).toHaveBeenNthCalledWith( + 1, + expect.stringContaining(`should be of type 'localStorage' | LDFlagSet, got string`), + ); + }); +}); diff --git a/packages/shared/sdk-client/src/configuration/Configuration.ts b/packages/shared/sdk-client/src/configuration/Configuration.ts new file mode 100644 index 000000000..75640cec7 --- /dev/null +++ b/packages/shared/sdk-client/src/configuration/Configuration.ts @@ -0,0 +1,99 @@ +import { + ApplicationTags, + createSafeLogger, + LDFlagSet, + NumberWithMinimum, + OptionMessages, + ServiceEndpoints, + TypeValidators, +} from '@launchdarkly/js-sdk-common'; + +import { LDInspection } from '../api/LDInspection'; +import type LDOptions from '../api/LDOptions'; +import validators from './validators'; + +export default class Configuration { + public static DEFAULT_POLLING = 'https://sdk.launchdarkly.com'; + public static DEFAULT_STREAM = 'https://clientstream.launchdarkly.com'; + + public readonly logger = createSafeLogger(); + + public readonly baseUri = Configuration.DEFAULT_POLLING; + public readonly eventsUri = ServiceEndpoints.DEFAULT_EVENTS; + public readonly streamUri = Configuration.DEFAULT_STREAM; + + public readonly capacity = 100; + public readonly diagnosticRecordingInterval = 900; + public readonly flushInterval = 2; + public readonly streamInitialReconnectDelay = 1; + + public readonly allAttributesPrivate = false; + public readonly diagnosticOptOut = false; + public readonly withReasons = false; + public readonly sendEvents = true; + public readonly sendLDHeaders = true; + public readonly useReport = false; + + public readonly inspectors: LDInspection[] = []; + public readonly privateAttributes: string[] = []; + + public readonly tags: ApplicationTags; + public readonly application?: { id?: string; version?: string }; + public readonly bootstrap?: 'localStorage' | LDFlagSet; + public readonly requestHeaderTransform?: (headers: Map) => Map; + public readonly stream?: boolean; + public readonly hash?: string; + public readonly wrapperName?: string; + public readonly wrapperVersion?: string; + + public readonly serviceEndpoints: ServiceEndpoints; + + // Allow indexing Configuration by a string + [index: string]: any; + + constructor(pristineOptions: LDOptions = {}) { + const errors = this.validateTypesAndNames(pristineOptions); + errors.forEach((e: string) => this.logger.warn(e)); + + this.serviceEndpoints = new ServiceEndpoints(this.streamUri, this.baseUri, this.eventsUri); + this.tags = new ApplicationTags({ application: this.application, logger: this.logger }); + } + + validateTypesAndNames(pristineOptions: LDOptions): string[] { + const errors: string[] = []; + + Object.entries(pristineOptions).forEach(([k, v]) => { + const validator = validators[k as keyof LDOptions]; + + if (validator) { + if (!validator.is(v)) { + const validatorType = validator.getType(); + + if (validatorType === 'boolean') { + errors.push(OptionMessages.wrongOptionTypeBoolean(k, typeof v)); + this[k] = !!v; + } else if (validatorType === 'boolean | undefined | null') { + errors.push(OptionMessages.wrongOptionTypeBoolean(k, typeof v)); + + if (typeof v !== 'boolean' && typeof v !== 'undefined' && v !== null) { + this[k] = !!v; + } + } else if (validator instanceof NumberWithMinimum && TypeValidators.Number.is(v)) { + const { min } = validator as NumberWithMinimum; + errors.push(OptionMessages.optionBelowMinimum(k, v, min)); + this[k] = min; + } else { + errors.push(OptionMessages.wrongOptionType(k, validator.getType(), typeof v)); + } + } else { + // if an option is explicitly null, coerce to undefined + this[k] = v ?? undefined; + } + } else { + errors.push(OptionMessages.unknownOption(k)); + } + }); + + return errors; + } +} diff --git a/packages/shared/sdk-client/src/configuration/index.ts b/packages/shared/sdk-client/src/configuration/index.ts new file mode 100644 index 000000000..b22c3ea49 --- /dev/null +++ b/packages/shared/sdk-client/src/configuration/index.ts @@ -0,0 +1,3 @@ +import Configuration from './Configuration'; + +export default Configuration; diff --git a/packages/shared/sdk-client/src/configuration/validators.ts b/packages/shared/sdk-client/src/configuration/validators.ts new file mode 100644 index 000000000..b8c7a09a6 --- /dev/null +++ b/packages/shared/sdk-client/src/configuration/validators.ts @@ -0,0 +1,51 @@ +import { noop, TypeValidator, TypeValidators } from '@launchdarkly/js-sdk-common'; + +import { LDInspection } from '../api/LDInspection'; +import LDOptions from '../api/LDOptions'; + +class BootStrapValidator implements TypeValidator { + is(u: unknown): boolean { + return u === 'localStorage' || typeof u === 'object' || typeof u === 'undefined' || u === null; + } + + getType(): string { + return `'localStorage' | LDFlagSet`; + } +} + +const validators: Record = { + logger: TypeValidators.Object, + + baseUri: TypeValidators.String, + streamUri: TypeValidators.String, + eventsUri: TypeValidators.String, + + capacity: TypeValidators.numberWithMin(1), + diagnosticRecordingInterval: TypeValidators.numberWithMin(2), + flushInterval: TypeValidators.numberWithMin(2), + streamInitialReconnectDelay: TypeValidators.numberWithMin(0), + + allAttributesPrivate: TypeValidators.Boolean, + diagnosticOptOut: TypeValidators.Boolean, + withReasons: TypeValidators.Boolean, + sendEvents: TypeValidators.Boolean, + sendLDHeaders: TypeValidators.Boolean, + useReport: TypeValidators.Boolean, + + inspectors: TypeValidators.createTypeArray('LDInspection[]', { + type: 'flag-used', + method: noop, + name: '', + }), + privateAttributes: TypeValidators.StringArray, + + application: TypeValidators.Object, + bootstrap: new BootStrapValidator(), + requestHeaderTransform: TypeValidators.Function, + stream: TypeValidators.NullableBoolean, + wrapperName: TypeValidators.String, + wrapperVersion: TypeValidators.String, + hash: TypeValidators.String, +}; + +export default validators; diff --git a/packages/shared/sdk-client/src/diagnostics/createDiagnosticsInitConfig.test.ts b/packages/shared/sdk-client/src/diagnostics/createDiagnosticsInitConfig.test.ts new file mode 100644 index 000000000..64f741044 --- /dev/null +++ b/packages/shared/sdk-client/src/diagnostics/createDiagnosticsInitConfig.test.ts @@ -0,0 +1,61 @@ +import { secondsToMillis } from '@launchdarkly/js-sdk-common'; + +import Configuration from '../configuration'; +import createDiagnosticsInitConfig, { + type DiagnosticsInitConfig, +} from './createDiagnosticsInitConfig'; + +describe('createDiagnosticsInitConfig', () => { + let initConfig: DiagnosticsInitConfig; + + beforeEach(() => { + initConfig = createDiagnosticsInitConfig(new Configuration()); + }); + + test('defaults', () => { + expect(initConfig).toEqual({ + allAttributesPrivate: false, + bootstrapMode: false, + customBaseURI: false, + customEventsURI: false, + customStreamURI: false, + diagnosticRecordingIntervalMillis: secondsToMillis(900), + eventsCapacity: 100, + eventsFlushIntervalMillis: secondsToMillis(2), + reconnectTimeMillis: secondsToMillis(1), + streamingDisabled: true, + usingSecureMode: false, + }); + }); + + test('non-default config', () => { + const custom = createDiagnosticsInitConfig( + new Configuration({ + baseUri: 'https://dev.ld.com', + streamUri: 'https://stream.ld.com', + eventsUri: 'https://events.ld.com', + capacity: 1, + flushInterval: 2, + streamInitialReconnectDelay: 3, + diagnosticRecordingInterval: 4, + stream: true, + allAttributesPrivate: true, + hash: 'test-hash', + bootstrap: 'localStorage', + }), + ); + expect(custom).toEqual({ + allAttributesPrivate: true, + bootstrapMode: true, + customBaseURI: true, + customEventsURI: true, + customStreamURI: true, + diagnosticRecordingIntervalMillis: 4000, + eventsCapacity: 1, + eventsFlushIntervalMillis: 2000, + reconnectTimeMillis: 3000, + streamingDisabled: false, + usingSecureMode: true, + }); + }); +}); diff --git a/packages/shared/sdk-client/src/diagnostics/createDiagnosticsInitConfig.ts b/packages/shared/sdk-client/src/diagnostics/createDiagnosticsInitConfig.ts new file mode 100644 index 000000000..6bf267381 --- /dev/null +++ b/packages/shared/sdk-client/src/diagnostics/createDiagnosticsInitConfig.ts @@ -0,0 +1,35 @@ +import { secondsToMillis, ServiceEndpoints } from '@launchdarkly/js-sdk-common'; + +import Configuration from '../configuration'; + +export type DiagnosticsInitConfig = { + // client & server common properties + customBaseURI: boolean; + customStreamURI: boolean; + customEventsURI: boolean; + eventsCapacity: number; + eventsFlushIntervalMillis: number; + reconnectTimeMillis: number; + diagnosticRecordingIntervalMillis: number; + streamingDisabled: boolean; + allAttributesPrivate: boolean; + + // client specific properties + usingSecureMode: boolean; + bootstrapMode: boolean; +}; +const createDiagnosticsInitConfig = (config: Configuration): DiagnosticsInitConfig => ({ + customBaseURI: config.baseUri !== Configuration.DEFAULT_POLLING, + customStreamURI: config.streamUri !== Configuration.DEFAULT_STREAM, + customEventsURI: config.eventsUri !== ServiceEndpoints.DEFAULT_EVENTS, + eventsCapacity: config.capacity, + eventsFlushIntervalMillis: secondsToMillis(config.flushInterval), + reconnectTimeMillis: secondsToMillis(config.streamInitialReconnectDelay), + diagnosticRecordingIntervalMillis: secondsToMillis(config.diagnosticRecordingInterval), + streamingDisabled: !config.stream, + allAttributesPrivate: config.allAttributesPrivate, + usingSecureMode: !!config.hash, + bootstrapMode: !!config.bootstrap, +}); + +export default createDiagnosticsInitConfig; diff --git a/packages/shared/sdk-client/src/diagnostics/createDiagnosticsManager.ts b/packages/shared/sdk-client/src/diagnostics/createDiagnosticsManager.ts new file mode 100644 index 000000000..c1ed9928a --- /dev/null +++ b/packages/shared/sdk-client/src/diagnostics/createDiagnosticsManager.ts @@ -0,0 +1,22 @@ +import { internal, Platform } from '@launchdarkly/js-sdk-common'; + +import Configuration from '../configuration'; +import createDiagnosticsInitConfig from './createDiagnosticsInitConfig'; + +const createDiagnosticsManager = ( + clientSideID: string, + config: Configuration, + platform: Platform, +) => { + if (config.sendEvents && !config.diagnosticOptOut) { + return new internal.DiagnosticsManager( + clientSideID, + platform, + createDiagnosticsInitConfig(config), + ); + } + + return undefined; +}; + +export default createDiagnosticsManager; diff --git a/packages/shared/sdk-client/src/evaluation/fetchFlags.test.ts b/packages/shared/sdk-client/src/evaluation/fetchFlags.test.ts new file mode 100644 index 000000000..4a5975af9 --- /dev/null +++ b/packages/shared/sdk-client/src/evaluation/fetchFlags.test.ts @@ -0,0 +1,92 @@ +import { LDContext } from '@launchdarkly/js-sdk-common'; +import { basicPlatform, mockFetch } from '@launchdarkly/private-js-mocks'; + +import Configuration from '../configuration'; +import fetchFlags from './fetchFlags'; +import * as mockResponse from './mockResponse.json'; +import * as mockResponseWithReasons from './mockResponseWithReasons.json'; + +describe('fetchFeatures', () => { + const sdkKey = 'testSdkKey1'; + const context: LDContext = { kind: 'user', key: 'test-user-key-1' }; + const getHeaders = { + authorization: 'testSdkKey1', + 'user-agent': 'TestUserAgent/2.0.2', + 'x-launchdarkly-wrapper': 'Rapper/1.2.3', + }; + const reportHeaders = { + authorization: 'testSdkKey1', + 'content-type': 'application/json', + 'user-agent': 'TestUserAgent/2.0.2', + 'x-launchdarkly-wrapper': 'Rapper/1.2.3', + }; + + let config: Configuration; + const platformFetch = basicPlatform.requests.fetch as jest.Mock; + + beforeEach(() => { + mockFetch(mockResponse); + config = new Configuration(); + }); + + afterEach(() => { + jest.resetAllMocks(); + }); + + test('get', async () => { + const json = await fetchFlags(sdkKey, context, config, basicPlatform); + + expect(platformFetch).toBeCalledWith( + 'https://sdk.launchdarkly.com/sdk/evalx/testSdkKey1/contexts/eyJraW5kIjoidXNlciIsImtleSI6InRlc3QtdXNlci1rZXktMSJ9', + { + method: 'GET', + headers: getHeaders, + }, + ); + expect(json).toEqual(mockResponse); + }); + + test('report', async () => { + config = new Configuration({ useReport: true }); + const json = await fetchFlags(sdkKey, context, config, basicPlatform); + + expect(platformFetch).toBeCalledWith( + 'https://sdk.launchdarkly.com/sdk/evalx/testSdkKey1/context', + { + method: 'REPORT', + headers: reportHeaders, + body: '{"kind":"user","key":"test-user-key-1"}', + }, + ); + expect(json).toEqual(mockResponse); + }); + + test('withReasons', async () => { + mockFetch(mockResponseWithReasons); + config = new Configuration({ withReasons: true }); + const json = await fetchFlags(sdkKey, context, config, basicPlatform); + + expect(platformFetch).toBeCalledWith( + 'https://sdk.launchdarkly.com/sdk/evalx/testSdkKey1/contexts/eyJraW5kIjoidXNlciIsImtleSI6InRlc3QtdXNlci1rZXktMSJ9?withReasons=true', + { + method: 'GET', + headers: getHeaders, + }, + ); + expect(json).toEqual(mockResponseWithReasons); + }); + + test('hash', async () => { + config = new Configuration({ hash: 'test-hash', withReasons: false }); + const json = await fetchFlags(sdkKey, context, config, basicPlatform); + + expect(platformFetch).toBeCalledWith( + 'https://sdk.launchdarkly.com/sdk/evalx/testSdkKey1/contexts/eyJraW5kIjoidXNlciIsImtleSI6InRlc3QtdXNlci1rZXktMSJ9?h=test-hash', + { + method: 'GET', + headers: getHeaders, + }, + ); + expect(json).toEqual(mockResponse); + }); +}); diff --git a/packages/shared/sdk-client/src/evaluation/fetchFlags.ts b/packages/shared/sdk-client/src/evaluation/fetchFlags.ts new file mode 100644 index 000000000..ff714ea4d --- /dev/null +++ b/packages/shared/sdk-client/src/evaluation/fetchFlags.ts @@ -0,0 +1,35 @@ +import { LDContext, LDEvaluationReason, LDFlagValue, Platform } from '@launchdarkly/js-sdk-common'; + +import Configuration from '../configuration'; +import { createFetchOptions, createFetchUrl } from './fetchUtils'; + +export type Flag = { + version: number; + flagVersion: number; + value: LDFlagValue; + variation: number; + trackEvents: boolean; + trackReason?: boolean; + reason?: LDEvaluationReason; + debugEventsUntilDate?: number; +}; + +export type Flags = { + [k: string]: Flag; +}; + +const fetchFlags = async ( + sdkKey: string, + context: LDContext, + config: Configuration, + { encoding, info, requests }: Platform, +): Promise => { + const fetchUrl = createFetchUrl(sdkKey, context, config, encoding!); + const fetchOptions = createFetchOptions(sdkKey, context, config, info); + + // TODO: add error handling, retry and timeout + const response = await requests.fetch(fetchUrl, fetchOptions); + return response.json(); +}; + +export default fetchFlags; diff --git a/packages/shared/sdk-client/src/evaluation/fetchUtils.test.ts b/packages/shared/sdk-client/src/evaluation/fetchUtils.test.ts new file mode 100644 index 000000000..f078c08ca --- /dev/null +++ b/packages/shared/sdk-client/src/evaluation/fetchUtils.test.ts @@ -0,0 +1,4 @@ +// TODO: +describe('fetchUtils', () => { + test('sucesss', () => {}); +}); diff --git a/packages/shared/sdk-client/src/evaluation/fetchUtils.ts b/packages/shared/sdk-client/src/evaluation/fetchUtils.ts new file mode 100644 index 000000000..ded227f76 --- /dev/null +++ b/packages/shared/sdk-client/src/evaluation/fetchUtils.ts @@ -0,0 +1,86 @@ +import { defaultHeaders, Encoding, Info, LDContext, Options } from '@launchdarkly/js-sdk-common'; + +import Configuration from '../configuration'; + +/** + * In react-native use base64-js to polyfill btoa. This is safe + * because the react-native repo uses it too. Set the global.btoa to the encode + * function of base64-js. + * https://github.com/beatgammit/base64-js + * https://github.com/axios/axios/issues/2235#issuecomment-512204616 + * + * Ripped from https://thewoods.blog/base64url/ + */ +export const base64UrlEncode = (s: string, encoding: Encoding): string => + encoding.btoa(s).replace(/\+/g, '-').replace(/\//g, '_').replace(/=+$/, ''); + +export const createFetchPath = ( + sdkKey: string, + context: LDContext, + baseUrlPolling: string, + useReport: boolean, + encoding: Encoding, +) => + useReport + ? `${baseUrlPolling}/sdk/evalx/${sdkKey}/context` + : `${baseUrlPolling}/sdk/evalx/${sdkKey}/contexts/${base64UrlEncode( + JSON.stringify(context), + encoding, + )}`; + +export const createQueryString = (hash: string | undefined, withReasons: boolean) => { + const qs = { + h: hash, + withReasons, + }; + + const qsArray: string[] = []; + Object.entries(qs).forEach(([key, value]) => { + if (value) { + qsArray.push(`${key}=${value}`); + } + }); + + return qsArray.join('&'); +}; + +export const createFetchUrl = ( + sdkKey: string, + context: LDContext, + config: Configuration, + encoding: Encoding, +) => { + const { + withReasons, + hash, + serviceEndpoints: { polling }, + useReport, + } = config; + const path = createFetchPath(sdkKey, context, polling, useReport, encoding); + const qs = createQueryString(hash, withReasons); + + return qs ? `${path}?${qs}` : path; +}; + +export const createFetchOptions = ( + sdkKey: string, + context: LDContext, + config: Configuration, + info: Info, +): Options => { + const { useReport, tags } = config; + const headers = defaultHeaders(sdkKey, info, tags); + + if (useReport) { + return { + method: 'REPORT', + headers: { ...headers, 'content-type': 'application/json' }, + body: JSON.stringify(context), + }; + } + + return { + method: 'GET', + headers, + }; +}; diff --git a/packages/shared/sdk-client/src/evaluation/mockResponse.json b/packages/shared/sdk-client/src/evaluation/mockResponse.json new file mode 100644 index 000000000..d8f8eb5ea --- /dev/null +++ b/packages/shared/sdk-client/src/evaluation/mockResponse.json @@ -0,0 +1,58 @@ +{ + "easter-specials": { + "version": 827, + "flagVersion": 37, + "value": "no specials", + "variation": 3, + "trackEvents": false + }, + "log-level": { + "version": 827, + "flagVersion": 14, + "value": "warn", + "variation": 3, + "trackEvents": false + }, + "test1": { + "version": 827, + "flagVersion": 5, + "value": "s1", + "variation": 0, + "trackEvents": false + }, + "fdsafdsafdsafdsa": { + "version": 827, + "flagVersion": 3, + "value": true, + "variation": 0, + "trackEvents": false + }, + "easter-i-tunes-special": { + "version": 827, + "flagVersion": 15, + "value": false, + "variation": 1, + "trackEvents": false + }, + "moonshot-demo": { + "version": 827, + "flagVersion": 91, + "value": true, + "variation": 0, + "trackEvents": true + }, + "dev-test-flag": { + "version": 827, + "flagVersion": 555, + "value": true, + "variation": 0, + "trackEvents": true + }, + "this-is-a-test": { + "version": 827, + "flagVersion": 5, + "value": true, + "variation": 0, + "trackEvents": false + } +} diff --git a/packages/shared/sdk-client/src/evaluation/mockResponseWithReasons.json b/packages/shared/sdk-client/src/evaluation/mockResponseWithReasons.json new file mode 100644 index 000000000..0e198ad32 --- /dev/null +++ b/packages/shared/sdk-client/src/evaluation/mockResponseWithReasons.json @@ -0,0 +1,66 @@ +{ + "fdsafdsafdsafdsa": { + "version": 827, + "flagVersion": 3, + "value": true, + "variation": 0, + "trackEvents": false, + "reason": { "kind": "FALLTHROUGH" } + }, + "this-is-a-test": { + "version": 827, + "flagVersion": 5, + "value": true, + "variation": 0, + "trackEvents": false, + "reason": { "kind": "FALLTHROUGH" } + }, + "dev-test-flag": { + "version": 827, + "flagVersion": 555, + "value": true, + "variation": 0, + "trackEvents": true, + "reason": { "kind": "FALLTHROUGH" } + }, + "easter-specials": { + "version": 827, + "flagVersion": 37, + "value": "no specials", + "variation": 3, + "trackEvents": false, + "reason": { "kind": "FALLTHROUGH" } + }, + "moonshot-demo": { + "version": 827, + "flagVersion": 91, + "value": true, + "variation": 0, + "trackEvents": true, + "reason": { "kind": "FALLTHROUGH" } + }, + "test1": { + "version": 827, + "flagVersion": 5, + "value": "s1", + "variation": 0, + "trackEvents": false, + "reason": { "kind": "FALLTHROUGH" } + }, + "easter-i-tunes-special": { + "version": 827, + "flagVersion": 15, + "value": false, + "variation": 1, + "trackEvents": false, + "reason": { "kind": "FALLTHROUGH" } + }, + "log-level": { + "version": 827, + "flagVersion": 14, + "value": "warn", + "variation": 3, + "trackEvents": false, + "reason": { "kind": "OFF" } + } +} diff --git a/packages/shared/sdk-client/src/events/EventFactory.ts b/packages/shared/sdk-client/src/events/EventFactory.ts new file mode 100644 index 000000000..69bf4f889 --- /dev/null +++ b/packages/shared/sdk-client/src/events/EventFactory.ts @@ -0,0 +1,32 @@ +import { Context, internal, LDEvaluationReason, LDFlagValue } from '@launchdarkly/js-sdk-common'; + +import { Flag } from '../evaluation/fetchFlags'; + +/** + * @internal + */ +export default class EventFactory extends internal.EventFactoryBase { + evalEventClient( + flagKey: string, + value: LDFlagValue, + defaultVal: any, + flag: Flag, + context: Context, + reason?: LDEvaluationReason, + ): internal.InputEvalEvent { + const { trackEvents, debugEventsUntilDate, trackReason, version, variation } = flag; + + return super.evalEvent({ + addExperimentData: trackReason, + context, + debugEventsUntilDate, + defaultVal, + flagKey, + reason, + trackEvents, + value, + variation, + version, + }); + } +} diff --git a/packages/shared/sdk-client/src/events/createEventProcessor.ts b/packages/shared/sdk-client/src/events/createEventProcessor.ts new file mode 100644 index 000000000..9f6b38b94 --- /dev/null +++ b/packages/shared/sdk-client/src/events/createEventProcessor.ts @@ -0,0 +1,20 @@ +import { ClientContext, internal, Platform, subsystem } from '@launchdarkly/js-sdk-common'; + +import Configuration from '../configuration'; + +const createEventProcessor = ( + clientSideID: string, + config: Configuration, + platform: Platform, + diagnosticsManager?: internal.DiagnosticsManager, +): subsystem.LDEventProcessor => + config.sendEvents + ? new internal.EventProcessor( + { ...config, eventsCapacity: config.capacity }, + new ClientContext(clientSideID, config, platform), + undefined, + diagnosticsManager, + ) + : new internal.NullEventProcessor(); + +export default createEventProcessor; diff --git a/packages/shared/sdk-client/tsconfig.eslint.json b/packages/shared/sdk-client/tsconfig.eslint.json new file mode 100644 index 000000000..56c9b3830 --- /dev/null +++ b/packages/shared/sdk-client/tsconfig.eslint.json @@ -0,0 +1,5 @@ +{ + "extends": "./tsconfig.json", + "include": ["/**/*.ts"], + "exclude": ["node_modules"] +} diff --git a/packages/shared/sdk-client/tsconfig.json b/packages/shared/sdk-client/tsconfig.json new file mode 100644 index 000000000..a3374fce0 --- /dev/null +++ b/packages/shared/sdk-client/tsconfig.json @@ -0,0 +1,21 @@ +{ + "compilerOptions": { + "rootDir": "src", + "outDir": "dist", + "target": "ES2017", + "lib": ["es6", "DOM"], + "module": "commonjs", + "strict": true, + "noImplicitOverride": true, + // Needed for CommonJS modules: markdown-it, fs-extra + "allowSyntheticDefaultImports": true, + "sourceMap": true, + "declaration": true, + "declarationMap": true, // enables importers to jump to source + "stripInternal": true, + "resolveJsonModule": true, + "types": ["jest", "node"] + }, + "include": ["src"], + "exclude": ["**/*.test.ts", "dist", "node_modules", "__tests__"] +} diff --git a/packages/shared/sdk-client/tsconfig.ref.json b/packages/shared/sdk-client/tsconfig.ref.json new file mode 100644 index 000000000..0c86b2c55 --- /dev/null +++ b/packages/shared/sdk-client/tsconfig.ref.json @@ -0,0 +1,7 @@ +{ + "extends": "./tsconfig.json", + "include": ["src/**/*"], + "compilerOptions": { + "composite": true + } +} diff --git a/packages/shared/sdk-server-edge/src/api/EdgeFeatureStore.ts b/packages/shared/sdk-server-edge/src/api/EdgeFeatureStore.ts index d2054d01c..933943d20 100644 --- a/packages/shared/sdk-server-edge/src/api/EdgeFeatureStore.ts +++ b/packages/shared/sdk-server-edge/src/api/EdgeFeatureStore.ts @@ -53,7 +53,7 @@ export class EdgeFeatureStore implements LDFeatureStore { callback(item.segments[dataKey]); break; default: - throw new Error(`Unsupported DataKind: ${namespace}`); + callback(null); } } catch (err) { this.logger.error(err); @@ -84,7 +84,7 @@ export class EdgeFeatureStore implements LDFeatureStore { callback(item.segments); break; default: - throw new Error(`Unsupported DataKind: ${namespace}`); + callback({}); } } catch (err) { this.logger.error(err); diff --git a/packages/shared/sdk-server/__tests__/BigSegmentsManager.test.ts b/packages/shared/sdk-server/__tests__/BigSegmentsManager.test.ts index 12fd303b7..1bda8c9dd 100644 --- a/packages/shared/sdk-server/__tests__/BigSegmentsManager.test.ts +++ b/packages/shared/sdk-server/__tests__/BigSegmentsManager.test.ts @@ -1,4 +1,4 @@ -import { Crypto, Hasher, Hmac } from '@launchdarkly/js-sdk-common'; +import type { Crypto, Hasher, Hmac } from '@launchdarkly/js-sdk-common'; import { BigSegmentStore, diff --git a/packages/shared/sdk-server/__tests__/LDClient.allFlags.test.ts b/packages/shared/sdk-server/__tests__/LDClient.allFlags.test.ts index 4f3fd1987..681d7c9a3 100644 --- a/packages/shared/sdk-server/__tests__/LDClient.allFlags.test.ts +++ b/packages/shared/sdk-server/__tests__/LDClient.allFlags.test.ts @@ -1,6 +1,7 @@ +import * as mocks from '@launchdarkly/private-js-mocks'; + import { LDClientImpl } from '../src'; import TestData from '../src/integrations/test_data/TestData'; -import basicPlatform from './evaluation/mocks/platform'; import TestLogger, { LogLevel } from './Logger'; import makeCallbacks from './makeCallbacks'; @@ -16,7 +17,7 @@ describe('given an LDClient with test data', () => { td = new TestData(); client = new LDClientImpl( 'sdk-key', - basicPlatform, + mocks.basicPlatform, { updateProcessor: td.getFactory(), sendEvents: false, @@ -280,7 +281,7 @@ describe('given an offline client', () => { td = new TestData(); client = new LDClientImpl( 'sdk-key', - basicPlatform, + mocks.basicPlatform, { offline: true, updateProcessor: td.getFactory(), diff --git a/packages/shared/sdk-server/__tests__/LDClient.evaluation.test.ts b/packages/shared/sdk-server/__tests__/LDClient.evaluation.test.ts index a5665de72..16b5aa463 100644 --- a/packages/shared/sdk-server/__tests__/LDClient.evaluation.test.ts +++ b/packages/shared/sdk-server/__tests__/LDClient.evaluation.test.ts @@ -1,14 +1,31 @@ -import { LDClientImpl } from '../src'; -import { LDFeatureStore, LDStreamProcessor } from '../src/api/subsystems'; -import NullUpdateProcessor from '../src/data_sources/NullUpdateProcessor'; +import { subsystem } from '@launchdarkly/js-sdk-common'; +import { + basicPlatform, + MockStreamingProcessor, + setupMockStreamingProcessor, +} from '@launchdarkly/private-js-mocks'; + +import { LDClientImpl, LDFeatureStore } from '../src'; import TestData from '../src/integrations/test_data/TestData'; import AsyncStoreFacade from '../src/store/AsyncStoreFacade'; import InMemoryFeatureStore from '../src/store/InMemoryFeatureStore'; import VersionedDataKinds from '../src/store/VersionedDataKinds'; -import basicPlatform from './evaluation/mocks/platform'; import TestLogger, { LogLevel } from './Logger'; import makeCallbacks from './makeCallbacks'; +jest.mock('@launchdarkly/js-sdk-common', () => { + const actual = jest.requireActual('@launchdarkly/js-sdk-common'); + return { + ...actual, + ...{ + internal: { + ...actual.internal, + StreamingProcessor: MockStreamingProcessor, + }, + }, + }; +}); + const defaultUser = { key: 'user' }; describe('given an LDClient with test data', () => { @@ -160,6 +177,99 @@ describe('given an LDClient with test data', () => { const valueB = await client.variation('my-feature-flag-1', userContextObject, 'default'); expect(valueB).toEqual(true); }); + + it('evaluates with jsonVariation', async () => { + td.update(td.flag('flagkey').booleanFlag().on(true)); + const boolRes: boolean = (await client.jsonVariation('flagkey', defaultUser, false)) as boolean; + expect(boolRes).toBe(true); + + td.update(td.flag('flagkey').valueForAll(62)); + const numericRes: number = (await client.jsonVariation( + 'flagkey', + defaultUser, + false, + )) as number; + expect(numericRes).toBe(62); + + td.update(td.flag('flagkey').valueForAll('potato')); + const stringRes: string = (await client.jsonVariation('flagkey', defaultUser, false)) as string; + expect(stringRes).toBe('potato'); + }); + + it('evaluates an existing boolean flag', async () => { + td.update(td.flag('flagkey').booleanFlag().on(true)); + expect(await client.boolVariation('flagkey', defaultUser, false)).toEqual(true); + }); + + it('it uses the default value when a boolean variation is for a flag of the wrong type', async () => { + td.update(td.flag('flagkey').valueForAll('potato')); + expect(await client.boolVariation('flagkey', defaultUser, false)).toEqual(false); + }); + + it('evaluates an existing numeric flag', async () => { + td.update(td.flag('flagkey').booleanFlag().valueForAll(18)); + expect(await client.numberVariation('flagkey', defaultUser, 36)).toEqual(18); + }); + + it('it uses the default value when a numeric variation is for a flag of the wrong type', async () => { + td.update(td.flag('flagkey').valueForAll('potato')); + expect(await client.numberVariation('flagkey', defaultUser, 36)).toEqual(36); + }); + + it('evaluates an existing string flag', async () => { + td.update(td.flag('flagkey').booleanFlag().valueForAll('potato')); + expect(await client.stringVariation('flagkey', defaultUser, 'default')).toEqual('potato'); + }); + + it('it uses the default value when a string variation is for a flag of the wrong type', async () => { + td.update(td.flag('flagkey').valueForAll(8)); + expect(await client.stringVariation('flagkey', defaultUser, 'default')).toEqual('default'); + }); + + it('evaluates an existing boolean flag with detail', async () => { + td.update(td.flag('flagkey').booleanFlag().on(true)); + const res = await client.boolVariationDetail('flagkey', defaultUser, false); + expect(res.value).toEqual(true); + expect(res.reason.kind).toBe('FALLTHROUGH'); + }); + + it('it uses the default value when a boolean variation is for a flag of the wrong type with detail', async () => { + td.update(td.flag('flagkey').valueForAll('potato')); + const res = await client.boolVariationDetail('flagkey', defaultUser, false); + expect(res.value).toEqual(false); + expect(res.reason.kind).toEqual('ERROR'); + expect(res.reason.errorKind).toEqual('WRONG_TYPE'); + }); + + it('evaluates an existing numeric flag with detail', async () => { + td.update(td.flag('flagkey').booleanFlag().valueForAll(18)); + const res = await client.numberVariationDetail('flagkey', defaultUser, 36); + expect(res.value).toEqual(18); + expect(res.reason.kind).toBe('FALLTHROUGH'); + }); + + it('it uses the default value when a numeric variation is for a flag of the wrong type with detail', async () => { + td.update(td.flag('flagkey').valueForAll('potato')); + const res = await client.numberVariationDetail('flagkey', defaultUser, 36); + expect(res.value).toEqual(36); + expect(res.reason.kind).toEqual('ERROR'); + expect(res.reason.errorKind).toEqual('WRONG_TYPE'); + }); + + it('evaluates an existing string flag with detail', async () => { + td.update(td.flag('flagkey').booleanFlag().valueForAll('potato')); + const res = await client.stringVariationDetail('flagkey', defaultUser, 'default'); + expect(res.value).toEqual('potato'); + expect(res.reason.kind).toBe('FALLTHROUGH'); + }); + + it('it uses the default value when a string variation is for a flag of the wrong type with detail', async () => { + td.update(td.flag('flagkey').valueForAll(8)); + const res = await client.stringVariationDetail('flagkey', defaultUser, 'default'); + expect(res.value).toEqual('default'); + expect(res.reason.kind).toEqual('ERROR'); + expect(res.reason.errorKind).toEqual('WRONG_TYPE'); + }); }); describe('given an offline client', () => { @@ -208,9 +318,8 @@ describe('given an offline client', () => { }); }); -class InertUpdateProcessor implements LDStreamProcessor { - // eslint-disable-next-line @typescript-eslint/no-unused-vars - start(fn?: ((err?: any) => void) | undefined) { +class InertUpdateProcessor implements subsystem.LDStreamProcessor { + start(_fn?: ((err?: any) => void) | undefined) { // Never initialize. } @@ -283,12 +392,12 @@ describe('given a client that is un-initialized and store that is initialized', }, segments: {}, }); + setupMockStreamingProcessor(true); client = new LDClientImpl( 'sdk-key', basicPlatform, { - updateProcessor: new NullUpdateProcessor(), sendEvents: false, featureStore: store, }, diff --git a/packages/shared/sdk-server/__tests__/LDClient.events.test.ts b/packages/shared/sdk-server/__tests__/LDClient.events.test.ts index 393387ae5..603b44e60 100644 --- a/packages/shared/sdk-server/__tests__/LDClient.events.test.ts +++ b/packages/shared/sdk-server/__tests__/LDClient.events.test.ts @@ -1,8 +1,10 @@ +import { AsyncQueue } from 'launchdarkly-js-test-helpers'; + import { Context, internal } from '@launchdarkly/js-sdk-common'; +import * as mocks from '@launchdarkly/private-js-mocks'; import { LDClientImpl } from '../src'; import TestData from '../src/integrations/test_data/TestData'; -import basicPlatform from './evaluation/mocks/platform'; import makeCallbacks from './makeCallbacks'; const defaultUser = { key: 'user' }; @@ -10,14 +12,14 @@ const anonymousUser = { key: 'anon-user', anonymous: true }; describe('given a client with mock event processor', () => { let client: LDClientImpl; - let events: internal.InputEvent[]; + let events: AsyncQueue; let td: TestData; beforeEach(async () => { - events = []; + events = new AsyncQueue(); jest .spyOn(internal.EventProcessor.prototype, 'sendEvent') - .mockImplementation((evt) => events.push(evt)); + .mockImplementation((evt) => events.add(evt)); jest .spyOn(internal.EventProcessor.prototype, 'flush') .mockImplementation(() => Promise.resolve()); @@ -25,7 +27,7 @@ describe('given a client with mock event processor', () => { td = new TestData(); client = new LDClientImpl( 'sdk-key', - basicPlatform, + mocks.basicPlatform, { updateProcessor: td.getFactory(), }, @@ -43,8 +45,7 @@ describe('given a client with mock event processor', () => { await client.variation('flagkey', defaultUser, 'c'); - expect(events).toHaveLength(1); - const e = events[0]; + const e = await events.take(); expect(e).toMatchObject({ kind: 'feature', key: 'flagkey', @@ -60,8 +61,7 @@ describe('given a client with mock event processor', () => { td.update(td.flag('flagkey').on(true).variations('a', 'b').fallthroughVariation(1)); await client.variation('flagkey', anonymousUser, 'c'); - expect(events).toHaveLength(1); - const e = events[0]; + const e = await events.take(); expect(e).toMatchObject({ kind: 'feature', key: 'flagkey', @@ -77,8 +77,7 @@ describe('given a client with mock event processor', () => { td.update(td.flag('flagkey').on(true).variations('a', 'b').fallthroughVariation(1)); await client.variationDetail('flagkey', defaultUser, 'c'); - expect(events).toHaveLength(1); - const e = events[0]; + const e = await events.take(); expect(e).toMatchObject({ kind: 'feature', key: 'flagkey', @@ -111,8 +110,7 @@ describe('given a client with mock event processor', () => { }); await client.variation('flagkey', defaultUser, 'c'); - expect(events).toHaveLength(1); - const e = events[0]; + const e = await events.take(); expect(e).toMatchObject({ kind: 'feature', creationDate: e.creationDate, @@ -145,8 +143,7 @@ describe('given a client with mock event processor', () => { }); await client.variation('flagkey', defaultUser, 'c'); - expect(events).toHaveLength(1); - const e = events[0]; + const e = await events.take(); expect(e).toMatchObject({ kind: 'feature', creationDate: e.creationDate, @@ -172,8 +169,7 @@ describe('given a client with mock event processor', () => { }); await client.variation('flagkey', defaultUser, 'c'); - expect(events).toHaveLength(1); - const e = events[0]; + const e = await events.take(); expect(e).toMatchObject({ kind: 'feature', creationDate: e.creationDate, @@ -210,8 +206,7 @@ describe('given a client with mock event processor', () => { }); await client.variation('flagkey', defaultUser, 'c'); - expect(events).toHaveLength(1); - const e = events[0]; + const e = await events.take(); expect(e).toMatchObject({ kind: 'feature', creationDate: e.creationDate, @@ -250,8 +245,7 @@ describe('given a client with mock event processor', () => { await client.variation('flagkey', defaultUser, 'c'); - expect(events).toHaveLength(1); - const e = events[0]; + const e = await events.take(); expect(e).toMatchObject({ kind: 'feature', creationDate: e.creationDate, @@ -268,8 +262,7 @@ describe('given a client with mock event processor', () => { td.update(td.flag('flagkey').on(true).variations('a', 'b').fallthroughVariation(1)); await client.variation('flagkey', defaultUser, 'c'); - expect(events).toHaveLength(1); - const e = events[0]; + const e = await events.take(); expect(e).toMatchObject({ kind: 'feature', creationDate: e.creationDate, @@ -285,8 +278,7 @@ describe('given a client with mock event processor', () => { it('generates event for unknown feature', async () => { await client.variation('flagkey', defaultUser, 'c'); - expect(events).toHaveLength(1); - const e = events[0]; + const e = await events.take(); expect(e).toMatchObject({ kind: 'feature', key: 'flagkey', @@ -299,8 +291,7 @@ describe('given a client with mock event processor', () => { it('generates event for unknown feature when user is anonymous', async () => { await client.variation('flagkey', anonymousUser, 'c'); - expect(events).toHaveLength(1); - const e = events[0]; + const e = await events.take(); expect(e).toMatchObject({ kind: 'feature', key: 'flagkey', diff --git a/packages/shared/sdk-server/__tests__/LDClient.migrations.test.ts b/packages/shared/sdk-server/__tests__/LDClient.migrations.test.ts new file mode 100644 index 000000000..5775c7169 --- /dev/null +++ b/packages/shared/sdk-server/__tests__/LDClient.migrations.test.ts @@ -0,0 +1,93 @@ +import { basicPlatform } from '@launchdarkly/private-js-mocks'; + +import { LDClientImpl, LDMigrationStage } from '../src'; +import TestData from '../src/integrations/test_data/TestData'; +import { LDClientCallbacks } from '../src/LDClientImpl'; + +/** + * Basic callback handler that records errors for tests. + */ +export default function makeCallbacks(): [Error[], LDClientCallbacks] { + const errors: Error[] = []; + return [ + errors, + { + onError: (error) => { + errors.push(error); + }, + onFailed: () => {}, + onReady: () => {}, + onUpdate: () => {}, + hasEventListeners: () => true, + }, + ]; +} + +describe('given an LDClient with test data', () => { + let client: LDClientImpl; + let td: TestData; + let callbacks: LDClientCallbacks; + let errors: Error[]; + + beforeEach(async () => { + td = new TestData(); + [errors, callbacks] = makeCallbacks(); + client = new LDClientImpl( + 'sdk-key', + basicPlatform, + { + updateProcessor: td.getFactory(), + sendEvents: false, + }, + callbacks, + ); + + await client.waitForInitialization(); + }); + + afterEach(() => { + client.close(); + }); + + it.each(['off', 'dualwrite', 'shadow', 'live', 'rampdown', 'complete'])( + 'handles valid migration stages: %p', + async (value) => { + const flagKey = 'migration'; + td.update(td.flag(flagKey).valueForAll(value)); + // Get a default value that is not the value under test. + const defaultValue = Object.values(LDMigrationStage).find((item) => item !== value); + // Verify the pre-condition that the default value is not the value under test. + expect(defaultValue).not.toEqual(value); + const res = await client.migrationVariation( + flagKey, + { key: 'test-key' }, + defaultValue as LDMigrationStage, + ); + expect(res.value).toEqual(value); + }, + ); + + it.each([ + LDMigrationStage.Off, + LDMigrationStage.DualWrite, + LDMigrationStage.Shadow, + LDMigrationStage.Live, + LDMigrationStage.RampDown, + LDMigrationStage.Complete, + ])('returns the default value if the flag does not exist: default = %p', async (stage) => { + const res = await client.migrationVariation('no-flag', { key: 'test-key' }, stage); + + expect(res.value).toEqual(stage); + }); + + it('produces an error event for a migration flag with an incorrect value', async () => { + const flagKey = 'bad-migration'; + td.update(td.flag(flagKey).valueForAll('potato')); + const res = await client.migrationVariation(flagKey, { key: 'test-key' }, LDMigrationStage.Off); + expect(res.value).toEqual(LDMigrationStage.Off); + expect(errors.length).toEqual(1); + expect(errors[0].message).toEqual( + 'Unrecognized MigrationState for "bad-migration"; returning default value.', + ); + }); +}); diff --git a/packages/shared/sdk-server/__tests__/LDClientImpl.bigSegments.test.ts b/packages/shared/sdk-server/__tests__/LDClientImpl.bigSegments.test.ts index d50c92f31..71af891e1 100644 --- a/packages/shared/sdk-server/__tests__/LDClientImpl.bigSegments.test.ts +++ b/packages/shared/sdk-server/__tests__/LDClientImpl.bigSegments.test.ts @@ -1,12 +1,12 @@ import { Crypto, Hasher, Hmac } from '@launchdarkly/js-sdk-common'; +import * as mocks from '@launchdarkly/private-js-mocks'; +import { LDBigSegmentsOptions } from '../src'; import { BigSegmentStore } from '../src/api/interfaces'; -import { LDBigSegmentsOptions } from '../src/api/options/LDBigSegmentsOptions'; import makeBigSegmentRef from '../src/evaluation/makeBigSegmentRef'; import TestData from '../src/integrations/test_data/TestData'; import LDClientImpl from '../src/LDClientImpl'; import { makeSegmentMatchClause } from './evaluation/flags'; -import basicPlatform from './evaluation/mocks/platform'; import makeCallbacks from './makeCallbacks'; const user = { key: 'userkey' }; @@ -76,7 +76,7 @@ describe('given test data with big segments', () => { client = new LDClientImpl( 'sdk-key', - { ...basicPlatform, crypto }, + { ...mocks.basicPlatform, crypto }, { updateProcessor: td.getFactory(), sendEvents: false, @@ -115,7 +115,7 @@ describe('given test data with big segments', () => { client = new LDClientImpl( 'sdk-key', - { ...basicPlatform, crypto }, + { ...mocks.basicPlatform, crypto }, { updateProcessor: td.getFactory(), sendEvents: false, @@ -154,7 +154,7 @@ describe('given test data with big segments', () => { client = new LDClientImpl( 'sdk-key', - { ...basicPlatform, crypto }, + { ...mocks.basicPlatform, crypto }, { updateProcessor: td.getFactory(), sendEvents: false, @@ -181,7 +181,7 @@ describe('given test data with big segments', () => { beforeEach(async () => { client = new LDClientImpl( 'sdk-key', - { ...basicPlatform, crypto }, + { ...mocks.basicPlatform, crypto }, { updateProcessor: td.getFactory(), sendEvents: false, diff --git a/packages/shared/sdk-server/__tests__/LDClientImpl.listeners.test.ts b/packages/shared/sdk-server/__tests__/LDClientImpl.listeners.test.ts index 6d97ccc4d..f559b8a92 100644 --- a/packages/shared/sdk-server/__tests__/LDClientImpl.listeners.test.ts +++ b/packages/shared/sdk-server/__tests__/LDClientImpl.listeners.test.ts @@ -1,10 +1,11 @@ import { AsyncQueue } from 'launchdarkly-js-test-helpers'; +import { basicPlatform } from '@launchdarkly/private-js-mocks'; + import { AttributeReference, LDClientImpl } from '../src'; import { Op } from '../src/evaluation/data/Clause'; import TestData from '../src/integrations/test_data/TestData'; import { makeFlagWithSegmentMatch } from './evaluation/flags'; -import basicPlatform from './evaluation/mocks/platform'; import TestLogger from './Logger'; import makeCallbacks from './makeCallbacks'; diff --git a/packages/shared/sdk-server/__tests__/LDClientImpl.test.ts b/packages/shared/sdk-server/__tests__/LDClientImpl.test.ts index 60fc3ce30..ad58d5e2f 100644 --- a/packages/shared/sdk-server/__tests__/LDClientImpl.test.ts +++ b/packages/shared/sdk-server/__tests__/LDClientImpl.test.ts @@ -1,134 +1,100 @@ -/* eslint-disable @typescript-eslint/no-unused-vars */ -import { LDClientImpl } from '../src'; -import basicPlatform from './evaluation/mocks/platform'; -import TestLogger from './Logger'; -import makeCallbacks from './makeCallbacks'; - -it('fires ready event in offline mode', (done) => { - const client = new LDClientImpl( - 'sdk-key', - basicPlatform, - { offline: true }, - { ...makeCallbacks(false), onReady: () => done() }, - ); - client.close(); -}); - -it('fires the failed event if initialization fails', (done) => { - const client = new LDClientImpl( - 'sdk-key', - basicPlatform, - { - updateProcessor: { - start: (fn: (err: any) => void) => { - setTimeout(() => { - fn(new Error('BAD THINGS')); - }, 0); - }, - stop: () => {}, - close: () => {}, - }, - }, - { ...makeCallbacks(false), onFailed: () => done() }, - ); +import { + basicPlatform, + MockStreamingProcessor, + setupMockStreamingProcessor, +} from '@launchdarkly/private-js-mocks'; - client.close(); -}); +import { LDClientImpl, LDOptions } from '../src'; -it('isOffline returns true in offline mode', (done) => { - const client = new LDClientImpl( - 'sdk-key', - basicPlatform, - { offline: true }, - { - ...makeCallbacks(false), - onReady: () => { - expect(client.isOffline()).toEqual(true); - done(); +jest.mock('@launchdarkly/js-sdk-common', () => { + const actual = jest.requireActual('@launchdarkly/js-sdk-common'); + return { + ...actual, + ...{ + internal: { + ...actual.internal, + StreamingProcessor: MockStreamingProcessor, }, }, - ); - - client.close(); + }; }); -describe('when waiting for initialization', () => { +describe('LDClientImpl', () => { let client: LDClientImpl; - let resolve: Function; + const callbacks = { + onFailed: jest.fn().mockName('onFailed'), + onError: jest.fn().mockName('onError'), + onReady: jest.fn().mockName('onReady'), + onUpdate: jest.fn().mockName('onUpdate'), + hasEventListeners: jest.fn().mockName('hasEventListeners'), + }; + const createClient = (options: LDOptions = {}) => + new LDClientImpl('sdk-key', basicPlatform, options, callbacks); beforeEach(() => { - client = new LDClientImpl( - 'sdk-key', - basicPlatform, - { - updateProcessor: { - start: (fn: (err?: any) => void) => { - resolve = fn; - }, - stop: () => {}, - close: () => {}, - }, - sendEvents: false, - logger: new TestLogger(), - }, - makeCallbacks(false), - ); + setupMockStreamingProcessor(); }); afterEach(() => { client.close(); + jest.resetAllMocks(); }); - it('resolves when ready', async () => { - resolve(); - await client.waitForInitialization(); + it('fires ready event in online mode', async () => { + client = createClient(); + const initializedClient = await client.waitForInitialization(); + + expect(initializedClient).toEqual(client); + expect(client.initialized()).toBeTruthy(); + expect(callbacks.onReady).toBeCalled(); + expect(callbacks.onFailed).not.toBeCalled(); + expect(callbacks.onError).not.toBeCalled(); + }); + + it('fires ready event in offline mode', async () => { + client = createClient({ offline: true }); + const initializedClient = await client.waitForInitialization(); + + expect(initializedClient).toEqual(client); + expect(client.initialized()).toBeTruthy(); + expect(callbacks.onReady).toBeCalled(); + expect(callbacks.onFailed).not.toBeCalled(); + expect(callbacks.onError).not.toBeCalled(); + }); + + it('initialization fails: failed event fires and initialization promise rejects', async () => { + setupMockStreamingProcessor(true); + client = createClient(); + + await expect(client.waitForInitialization()).rejects.toThrow('failed'); + + expect(client.initialized()).toBeFalsy(); + expect(callbacks.onReady).not.toBeCalled(); + expect(callbacks.onFailed).toBeCalled(); + expect(callbacks.onError).toBeCalled(); + }); + + it('isOffline returns true in offline mode', () => { + client = createClient({ offline: true }); + expect(client.isOffline()).toEqual(true); + }); + + it('does not crash when closing an offline client', () => { + client = createClient({ offline: true }); + expect(() => client.close()).not.toThrow(); }); it('resolves immediately if the client is already ready', async () => { - resolve(); + client = createClient(); await client.waitForInitialization(); await client.waitForInitialization(); }); - it('creates only one Promise', async () => { + it('creates only one Promise when waiting for initialization', async () => { + client = createClient(); const p1 = client.waitForInitialization(); const p2 = client.waitForInitialization(); - resolve(); + expect(p2).toBe(p1); }); }); - -it('does not crash when closing an offline client', () => { - const client = new LDClientImpl( - 'sdk-key', - basicPlatform, - { offline: true }, - makeCallbacks(false), - ); - - expect(() => client.close()).not.toThrow(); - client.close(); -}); - -it('the wait for initialization promise is rejected if initialization fails', (done) => { - const client = new LDClientImpl( - 'sdk-key', - basicPlatform, - { - updateProcessor: { - start: (fn: (err: any) => void) => { - setTimeout(() => { - fn(new Error('BAD THINGS')); - }, 0); - }, - stop: () => {}, - close: () => {}, - }, - sendEvents: false, - }, - makeCallbacks(false), - ); - - client.waitForInitialization().catch(() => done()); - client.close(); -}); diff --git a/packages/shared/sdk-server/__tests__/Migration.test.ts b/packages/shared/sdk-server/__tests__/Migration.test.ts new file mode 100644 index 000000000..ca1ed8284 --- /dev/null +++ b/packages/shared/sdk-server/__tests__/Migration.test.ts @@ -0,0 +1,550 @@ +import { basicPlatform } from '@launchdarkly/private-js-mocks'; + +import { + LDClientImpl, + LDConcurrentExecution, + LDExecutionOrdering, + LDMigrationStage, + LDSerialExecution, +} from '../src'; +import { TestData } from '../src/integrations'; +import { LDClientCallbacks } from '../src/LDClientImpl'; +import { createMigration, LDMigrationError, LDMigrationSuccess } from '../src/Migration'; +import makeCallbacks from './makeCallbacks'; + +describe('given an LDClient with test data', () => { + let client: LDClientImpl; + let td: TestData; + let callbacks: LDClientCallbacks; + + beforeEach(async () => { + td = new TestData(); + callbacks = makeCallbacks(false); + client = new LDClientImpl( + 'sdk-key', + basicPlatform, + { + updateProcessor: td.getFactory(), + sendEvents: false, + }, + callbacks, + ); + + await client.waitForInitialization(); + }); + + afterEach(() => { + client.close(); + }); + + /** Custom matcher for write results. */ + expect.extend({ + toMatchMigrationResult(received, expected) { + const { authoritative, nonAuthoritative } = expected; + const { authoritative: actualAuth, nonAuthoritative: actualNonAuth } = received; + + if (authoritative.origin !== actualAuth.origin) { + return { + pass: false, + message: () => + `Expected authoritative origin: ${authoritative.origin}, but received: ${actualAuth.origin}`, + }; + } + if (authoritative.success !== actualAuth.success) { + return { + pass: false, + message: () => `Expected authoritative success, but received error: ${actualAuth.error}`, + }; + } + if (authoritative.success) { + if (actualAuth.result !== authoritative.result) { + return { + pass: false, + message: () => + `Expected authoritative result: ${authoritative.result}, received: ${actualAuth.result}`, + }; + } + } else if (actualAuth.error?.message !== authoritative.error?.message) { + return { + pass: false, + message: () => + `Expected authoritative error: ${authoritative.error?.message}, received: ${actualAuth.error?.message}`, + }; + } + if (nonAuthoritative) { + if (!actualNonAuth) { + return { + pass: false, + message: () => `Expected no authoritative result, but did not receive one.`, + }; + } + if (nonAuthoritative.origin !== actualNonAuth.origin) { + return { + pass: false, + message: () => + `Expected non-authoritative origin: ${nonAuthoritative.origin}, but received: ${actualNonAuth.origin}`, + }; + } + if (nonAuthoritative.success !== actualNonAuth.success) { + return { + pass: false, + message: () => + `Expected authoritative success, but received error: ${actualNonAuth.error}`, + }; + } + if (nonAuthoritative.success) { + if (actualNonAuth.result !== nonAuthoritative.result) { + return { + pass: false, + message: () => + `Expected non-authoritative result: ${nonAuthoritative.result}, received: ${actualNonAuth.result}`, + }; + } + } else if (actualNonAuth.error?.message !== nonAuthoritative.error?.message) { + return { + pass: false, + message: () => + `Expected nonauthoritative error: ${nonAuthoritative.error?.message}, error: ${actualNonAuth.error?.message}`, + }; + } + } else if (actualNonAuth) { + return { + pass: false, + message: () => `Expected no non-authoritative result, received: ${actualNonAuth}`, + }; + } + return { pass: true, message: () => '' }; + }, + }); + + describe.each([ + [new LDSerialExecution(LDExecutionOrdering.Fixed), 'serial fixed'], + [new LDSerialExecution(LDExecutionOrdering.Random), 'serial random'], + [new LDConcurrentExecution(), 'concurrent'], + ])('given different execution methods: %p %p', (execution) => { + describe.each([ + [ + LDMigrationStage.Off, + 'old', + { + authoritative: { origin: 'old', result: true, success: true }, + nonAuthoritative: undefined, + }, + ], + [ + LDMigrationStage.DualWrite, + 'old', + { + authoritative: { origin: 'old', result: true, success: true }, + nonAuthoritative: { origin: 'new', result: false, success: true }, + }, + ], + [ + LDMigrationStage.Shadow, + 'old', + { + authoritative: { origin: 'old', result: true, success: true }, + nonAuthoritative: { origin: 'new', result: false, success: true }, + }, + ], + [ + LDMigrationStage.Live, + 'new', + { + nonAuthoritative: { origin: 'old', result: true, success: true }, + authoritative: { origin: 'new', result: false, success: true }, + }, + ], + [ + LDMigrationStage.RampDown, + 'new', + { + nonAuthoritative: { origin: 'old', result: true, success: true }, + authoritative: { origin: 'new', result: false, success: true }, + }, + ], + [ + LDMigrationStage.Complete, + 'new', + { + authoritative: { origin: 'new', result: false, success: true }, + nonAuthoritative: undefined, + }, + ], + ])('given each migration step: %p, read: %p, write: %j.', (stage, readValue, writeMatch) => { + it('uses the correct authoritative source', async () => { + const migration = createMigration(client, { + execution, + latencyTracking: false, + errorTracking: false, + readNew: async () => LDMigrationSuccess('new'), + writeNew: async () => LDMigrationSuccess(false), + readOld: async () => LDMigrationSuccess('old'), + writeOld: async () => LDMigrationSuccess(true), + }); + + const flagKey = 'migration'; + td.update(td.flag(flagKey).valueForAll(stage)); + + // Get a default value that is not the value under test. + const defaultStage = Object.values(LDMigrationStage).find((item) => item !== stage); + + const read = await migration.read(flagKey, { key: 'test-key' }, defaultStage!); + expect(read.success).toBeTruthy(); + expect(read.origin).toEqual(readValue); + // Type guards needed for typescript. + if (read.success) { + expect(read.result).toEqual(readValue); + } + + const write = await migration.write(flagKey, { key: 'test-key' }, defaultStage!); + // @ts-ignore Extended without writing types. + expect(write).toMatchMigrationResult(writeMatch); + }); + + it('correctly forwards the payload for read and write operations', async () => { + let receivedReadPayload: string | undefined; + let receivedWritePayload: string | undefined; + const migration = createMigration(client, { + execution, + latencyTracking: false, + errorTracking: false, + readNew: async (payload) => { + receivedReadPayload = payload; + return LDMigrationSuccess('new'); + }, + writeNew: async (payload) => { + receivedWritePayload = payload; + return LDMigrationSuccess(false); + }, + readOld: async (payload) => { + receivedReadPayload = payload; + return LDMigrationSuccess('old'); + }, + writeOld: async (payload) => { + receivedWritePayload = payload; + return LDMigrationSuccess(true); + }, + }); + + const flagKey = 'migration'; + td.update(td.flag(flagKey).valueForAll(stage)); + + const payloadRead = Math.random().toString(10); + const payloadWrite = Math.random().toString(10); + await migration.read(flagKey, { key: 'test-key' }, LDMigrationStage.Off, payloadRead); + + await migration.write(flagKey, { key: 'test-key' }, LDMigrationStage.Off, payloadWrite); + + expect(receivedReadPayload).toEqual(payloadRead); + expect(receivedWritePayload).toEqual(payloadWrite); + }); + }); + }); + + it.each([ + [LDMigrationStage.Off, 'old'], + [LDMigrationStage.DualWrite, 'old'], + [LDMigrationStage.Shadow, 'old'], + [LDMigrationStage.Live, 'new'], + [LDMigrationStage.RampDown, 'new'], + [LDMigrationStage.Complete, 'new'], + ])('handles read errors for stage: %p', async (stage, authority) => { + const migration = createMigration(client, { + execution: new LDSerialExecution(LDExecutionOrdering.Fixed), + latencyTracking: false, + errorTracking: false, + readNew: async () => LDMigrationError(new Error('new')), + writeNew: async () => LDMigrationSuccess(false), + readOld: async () => LDMigrationError(new Error('old')), + writeOld: async () => LDMigrationSuccess(true), + }); + + const flagKey = 'migration'; + td.update(td.flag(flagKey).valueForAll(stage)); + + // Get a default value that is not the value under test. + const defaultStage = Object.values(LDMigrationStage).find((item) => item !== stage); + + const read = await migration.read(flagKey, { key: 'test-key' }, defaultStage!); + expect(read.success).toBeFalsy(); + expect(read.origin).toEqual(authority); + // Type guards needed for typescript. + if (!read.success) { + expect(read.error.message).toEqual(authority); + } + }); + + it.each([ + [LDMigrationStage.Off, 'old'], + [LDMigrationStage.DualWrite, 'old'], + [LDMigrationStage.Shadow, 'old'], + [LDMigrationStage.Live, 'new'], + [LDMigrationStage.RampDown, 'new'], + [LDMigrationStage.Complete, 'new'], + ])('handles exceptions for stage: %p', async (stage, authority) => { + const migration = createMigration(client, { + execution: new LDSerialExecution(LDExecutionOrdering.Fixed), + latencyTracking: false, + errorTracking: false, + readNew: async () => { + throw new Error('new'); + }, + writeNew: async () => LDMigrationSuccess(false), + readOld: async () => { + throw new Error('old'); + }, + writeOld: async () => LDMigrationSuccess(true), + }); + + const flagKey = 'migration'; + td.update(td.flag(flagKey).valueForAll(stage)); + + // Get a default value that is not the value under test. + const defaultStage = Object.values(LDMigrationStage).find((item) => item !== stage); + + const read = await migration.read(flagKey, { key: 'test-key' }, defaultStage!); + expect(read.success).toBeFalsy(); + expect(read.origin).toEqual(authority); + // Type guards needed for typescript. + if (!read.success) { + expect(read.error.message).toEqual(authority); + } + }); + + it.each([ + [ + LDMigrationStage.Off, + 'old', + true, + false, + { + authoritative: { origin: 'old', success: false, error: new Error('old') }, + nonAuthoritative: undefined, + }, + ], + [ + LDMigrationStage.DualWrite, + 'old', + true, + false, + { + authoritative: { origin: 'old', success: false, error: new Error('old') }, + nonAuthoritative: undefined, + }, + ], + [ + LDMigrationStage.Shadow, + 'old', + true, + false, + { + authoritative: { origin: 'old', success: false, error: new Error('old') }, + nonAuthoritative: undefined, + }, + ], + [ + LDMigrationStage.Live, + 'new', + false, + true, + { + authoritative: { origin: 'new', success: false, error: new Error('new') }, + nonAuthoritative: undefined, + }, + ], + [ + LDMigrationStage.RampDown, + 'new', + false, + true, + { + authoritative: { origin: 'new', success: false, error: new Error('new') }, + nonAuthoritative: undefined, + }, + ], + [ + LDMigrationStage.Complete, + 'new', + false, + true, + { + authoritative: { origin: 'new', success: false, error: new Error('new') }, + nonAuthoritative: undefined, + }, + ], + ])( + 'stops writes on error: %p, %p, %p, %p', + async (stage, origin, oldWrite, newWrite, writeMatch) => { + let oldWriteCalled = false; + let newWriteCalled = false; + + const migration = createMigration(client, { + execution: new LDSerialExecution(LDExecutionOrdering.Fixed), + latencyTracking: false, + errorTracking: false, + readNew: async () => LDMigrationSuccess('new'), + writeNew: async () => { + newWriteCalled = true; + return LDMigrationError(new Error('new')); + }, + readOld: async () => LDMigrationSuccess('old'), + writeOld: async () => { + oldWriteCalled = true; + return LDMigrationError(new Error('old')); + }, + }); + + const flagKey = 'migration'; + td.update(td.flag(flagKey).valueForAll(stage)); + + // Get a default value that is not the value under test. + const defaultStage = Object.values(LDMigrationStage).find((item) => item !== stage); + + const write = await migration.write(flagKey, { key: 'test-key' }, defaultStage!); + // @ts-ignore + expect(write).toMatchMigrationResult(writeMatch); + + expect(oldWriteCalled).toEqual(oldWrite); + expect(newWriteCalled).toEqual(newWrite); + }, + ); + + it.each([ + [LDMigrationStage.Off, 'old', true, false], + [LDMigrationStage.DualWrite, 'old', true, false], + [LDMigrationStage.Shadow, 'old', true, false], + [LDMigrationStage.Live, 'new', false, true], + [LDMigrationStage.RampDown, 'new', false, true], + [LDMigrationStage.Complete, 'new', false, true], + ])('stops writes on exception: %p, %p, %p, %p', async (stage, origin, oldWrite, newWrite) => { + let oldWriteCalled = false; + let newWriteCalled = false; + + const migration = createMigration(client, { + execution: new LDSerialExecution(LDExecutionOrdering.Fixed), + latencyTracking: false, + errorTracking: false, + readNew: async () => LDMigrationSuccess('new'), + writeNew: async () => { + newWriteCalled = true; + throw new Error('new'); + }, + readOld: async () => LDMigrationSuccess('old'), + writeOld: async () => { + oldWriteCalled = true; + throw new Error('old'); + }, + }); + + const flagKey = 'migration'; + td.update(td.flag(flagKey).valueForAll(stage)); + + // Get a default value that is not the value under test. + const defaultStage = Object.values(LDMigrationStage).find((item) => item !== stage); + + const write = await migration.write(flagKey, { key: 'test-key' }, defaultStage!); + expect(write.authoritative.success).toBeFalsy(); + expect(write.authoritative.origin).toEqual(origin); + if (!write.authoritative.success) { + expect(write.authoritative.error.message).toEqual(origin); + } + expect(oldWriteCalled).toEqual(oldWrite); + expect(newWriteCalled).toEqual(newWrite); + }); + + it('handles the case where the authoritative write succeeds, but the non-authoritative fails', async () => { + const migrationA = createMigration(client, { + execution: new LDSerialExecution(LDExecutionOrdering.Fixed), + latencyTracking: false, + errorTracking: false, + readNew: async () => LDMigrationSuccess('new'), + writeNew: async () => { + throw new Error('new'); + }, + readOld: async () => LDMigrationSuccess('old'), + writeOld: async () => LDMigrationSuccess(true), + }); + + const flagKey = 'migration'; + td.update(td.flag(flagKey).valueForAll(LDMigrationStage.DualWrite)); + + const writeA = await migrationA.write(flagKey, { key: 'test-key' }, LDMigrationStage.Off); + // @ts-ignore + expect(writeA).toMatchMigrationResult({ + authoritative: { + success: true, + result: true, + origin: 'old', + }, + nonAuthoritative: { + success: false, + error: new Error('new'), + origin: 'new', + }, + }); + + td.update(td.flag(flagKey).valueForAll(LDMigrationStage.Shadow)); + + const writeB = await migrationA.write(flagKey, { key: 'test-key' }, LDMigrationStage.Off); + // @ts-ignore + expect(writeB).toMatchMigrationResult({ + authoritative: { + success: true, + result: true, + origin: 'old', + }, + nonAuthoritative: { + success: false, + error: new Error('new'), + origin: 'new', + }, + }); + + const migrationB = createMigration(client, { + execution: new LDSerialExecution(LDExecutionOrdering.Fixed), + latencyTracking: false, + errorTracking: false, + readNew: async () => LDMigrationSuccess('new'), + writeNew: async () => LDMigrationSuccess(true), + readOld: async () => LDMigrationSuccess('old'), + writeOld: async () => { + throw new Error('old'); + }, + }); + + td.update(td.flag(flagKey).valueForAll(LDMigrationStage.Live)); + + const writeC = await migrationB.write(flagKey, { key: 'test-key' }, LDMigrationStage.Off); + // @ts-ignore + expect(writeC).toMatchMigrationResult({ + authoritative: { + success: true, + result: true, + origin: 'new', + }, + nonAuthoritative: { + success: false, + error: new Error('old'), + origin: 'old', + }, + }); + + td.update(td.flag(flagKey).valueForAll(LDMigrationStage.RampDown)); + + const writeD = await migrationB.write(flagKey, { key: 'test-key' }, LDMigrationStage.Off); + // @ts-ignore + expect(writeD).toMatchMigrationResult({ + authoritative: { + success: true, + result: true, + origin: 'new', + }, + nonAuthoritative: { + success: false, + error: new Error('old'), + origin: 'old', + }, + }); + }); +}); diff --git a/packages/shared/sdk-server/__tests__/MigrationOpEvent.test.ts b/packages/shared/sdk-server/__tests__/MigrationOpEvent.test.ts new file mode 100644 index 000000000..f1eb9d88f --- /dev/null +++ b/packages/shared/sdk-server/__tests__/MigrationOpEvent.test.ts @@ -0,0 +1,590 @@ +import { AsyncQueue } from 'launchdarkly-js-test-helpers'; + +import { internal } from '@launchdarkly/js-sdk-common'; +import { basicPlatform } from '@launchdarkly/private-js-mocks'; + +import { + LDClientImpl, + LDConcurrentExecution, + LDExecutionOrdering, + LDMigrationOpEvent, + LDMigrationStage, + LDSerialExecution, +} from '../src'; +import { LDMigration } from '../src/api/LDMigration'; +import { TestData } from '../src/integrations'; +import { LDClientCallbacks } from '../src/LDClientImpl'; +import { createMigration, LDMigrationError, LDMigrationSuccess } from '../src/Migration'; +import MigrationOpEventConversion from '../src/MigrationOpEventConversion'; +import makeCallbacks from './makeCallbacks'; + +jest.mock('@launchdarkly/js-sdk-common', () => ({ + __esModule: true, + // @ts-ignore + ...jest.requireActual('@launchdarkly/js-sdk-common'), + internal: { + ...jest.requireActual('@launchdarkly/js-sdk-common').internal, + shouldSample: jest.fn().mockReturnValue(true), + }, +})); + +describe('given an LDClient with test data', () => { + let client: LDClientImpl; + let events: AsyncQueue; + let td: TestData; + let callbacks: LDClientCallbacks; + + beforeEach(async () => { + events = new AsyncQueue(); + jest + .spyOn(internal.EventProcessor.prototype, 'sendEvent') + .mockImplementation((evt) => events.add(evt)); + + td = new TestData(); + callbacks = makeCallbacks(false); + client = new LDClientImpl( + 'sdk-key', + basicPlatform, + { + updateProcessor: td.getFactory(), + }, + callbacks, + ); + + await client.waitForInitialization(); + }); + + afterEach(() => { + client.close(); + events.close(); + }); + + describe.each([ + [new LDSerialExecution(LDExecutionOrdering.Fixed), 'serial fixed'], + [new LDSerialExecution(LDExecutionOrdering.Random), 'serial random'], + [new LDConcurrentExecution(), 'concurrent'], + ])('given different execution methods: %p %p', (execution) => { + describe('given a migration which checks consistency and produces consistent results', () => { + let migration: LDMigration; + beforeEach(() => { + migration = createMigration(client, { + execution, + latencyTracking: false, + errorTracking: false, + readNew: async (payload?: string) => LDMigrationSuccess(payload || 'default'), + writeNew: async (payload?: string) => LDMigrationSuccess(payload || 'default'), + readOld: async (payload?: string) => LDMigrationSuccess(payload || 'default'), + writeOld: async (payload?: string) => LDMigrationSuccess(payload || 'default'), + check: (a: string, b: string) => a === b, + }); + }); + + it.each([LDMigrationStage.Shadow, LDMigrationStage.Live])( + 'finds the results consistent: %p', + async (stage) => { + jest.spyOn(internal, 'shouldSample').mockReturnValue(true); + const flagKey = 'migration'; + td.update(td.flag(flagKey).valueForAll(stage)); + + await migration.read(flagKey, { key: 'test' }, stage); + // Feature event. + await events.take(); + // Migration event. + const migrationEvent = (await events.take()) as internal.InputMigrationEvent; + // Only check the measurements component of the event. + expect(migrationEvent.measurements[1].key).toEqual('consistent'); + // This isn't a precise check, but we should have non-zero values. + expect(migrationEvent.measurements[1].value).toEqual(true); + }, + ); + + it.each([LDMigrationStage.Shadow, LDMigrationStage.Live])( + 'it uses the check ratio and does a consistency check when it should sample: %p', + async (stage) => { + jest.spyOn(internal, 'shouldSample').mockReturnValue(true); + const flagKey = 'migration'; + td.update(td.flag(flagKey).valueForAll(stage).checkRatio(10)); + // eslint-disable-next-line no-await-in-loop + await migration.read(flagKey, { key: 'test' }, stage); + // Feature event. + await events.take(); + // Migration event. + const migrationEvent = (await events.take()) as internal.InputMigrationEvent; + // Only check the measurements component of the event. + expect(migrationEvent.measurements[1].key).toEqual('consistent'); + // This isn't a precise check, but we should have non-zero values. + expect(migrationEvent.measurements[1].value).toEqual(true); + expect(internal.shouldSample).toHaveBeenCalledWith(10); + }, + ); + + it.each([LDMigrationStage.Shadow, LDMigrationStage.Live])( + 'it uses the check ratio and does not do a consistency check when it should not: %p', + async (stage) => { + jest.spyOn(internal, 'shouldSample').mockReturnValue(false); + const flagKey = 'migration'; + td.update(td.flag(flagKey).valueForAll(stage).checkRatio(12)); + // eslint-disable-next-line no-await-in-loop + await migration.read(flagKey, { key: 'test' }, stage); + // Feature event. + await events.take(); + // Migration event. + const migrationEvent = (await events.take()) as internal.InputMigrationEvent; + // Only check the measurements component of the event. + expect(migrationEvent.measurements.length).toEqual(1); + expect(internal.shouldSample).toHaveBeenCalledWith(12); + }, + ); + }); + + describe('given a migration which checks consistency and produces inconsistent results', () => { + let migration: LDMigration; + beforeEach(() => { + migration = createMigration(client, { + execution, + latencyTracking: false, + errorTracking: false, + readNew: async () => LDMigrationSuccess('a'), + writeNew: async () => LDMigrationSuccess('b'), + readOld: async () => LDMigrationSuccess('c'), + writeOld: async () => LDMigrationSuccess('d'), + check: (a: string, b: string) => a === b, + }); + }); + + it.each([LDMigrationStage.Shadow, LDMigrationStage.Live])( + 'finds the results consistent: %p', + async (stage) => { + jest.spyOn(internal, 'shouldSample').mockReturnValue(true); + const flagKey = 'migration'; + td.update(td.flag(flagKey).valueForAll(stage)); + + await migration.read(flagKey, { key: 'test' }, stage); + // Feature event. + await events.take(); + // Migration event. + const migrationEvent = (await events.take()) as internal.InputMigrationEvent; + expect(migrationEvent.measurements[1].key).toEqual('consistent'); + // This isn't a precise check, but we should have non-zero values. + expect(migrationEvent.measurements[1].value).toEqual(false); + }, + ); + }); + + describe('given a migration which takes time to execute and tracks latency', () => { + let migration: LDMigration; + + function timeoutPromise(val: TReturn): Promise { + return new Promise((a) => { + setTimeout(() => a(val), 2); + }); + } + + beforeEach(() => { + migration = createMigration(client, { + execution, + latencyTracking: true, + errorTracking: false, + readNew: async () => timeoutPromise(LDMigrationSuccess('readNew')), + writeNew: async () => timeoutPromise(LDMigrationSuccess('writeNew')), + readOld: async () => timeoutPromise(LDMigrationSuccess('readOld')), + writeOld: async () => timeoutPromise(LDMigrationSuccess('writeOld')), + }); + }); + + it.each([ + [LDMigrationStage.Off, { old: true }], + [LDMigrationStage.DualWrite, { old: true }], + [LDMigrationStage.Shadow, { old: true, new: true }], + [LDMigrationStage.RampDown, { new: true }], + [LDMigrationStage.Complete, { new: true }], + ])('tracks the invoked methods for reads', async (stage, values) => { + const flagKey = 'migration'; + td.update(td.flag(flagKey).valueForAll(stage)); + + await migration.read(flagKey, { key: 'test' }, stage); + // Feature event. + await events.take(); + // Migration event. + const migrationEvent = (await events.take()) as internal.InputMigrationEvent; + expect(migrationEvent.measurements[0].key).toEqual('invoked'); + expect(migrationEvent.measurements[0].values).toEqual(values); + }); + + it.each([ + [LDMigrationStage.Off, { old: true }], + [LDMigrationStage.DualWrite, { old: true, new: true }], + [LDMigrationStage.Shadow, { old: true, new: true }], + [LDMigrationStage.RampDown, { old: true, new: true }], + [LDMigrationStage.Complete, { new: true }], + ])('tracks the invoked methods for writes', async (stage, values) => { + const flagKey = 'migration'; + td.update(td.flag(flagKey).valueForAll(stage)); + + await migration.write(flagKey, { key: 'test' }, stage); + // Feature event. + await events.take(); + // Migration event. + const migrationEvent = (await events.take()) as internal.InputMigrationEvent; + expect(migrationEvent.measurements[0].key).toEqual('invoked'); + expect(migrationEvent.measurements[0].values).toEqual(values); + }); + + it.each([LDMigrationStage.Shadow, LDMigrationStage.Live])( + 'can report read latency for new and old', + async (stage) => { + const flagKey = 'migration'; + td.update(td.flag(flagKey).valueForAll(stage)); + + await migration.read(flagKey, { key: 'test' }, stage); + // Feature event. + await events.take(); + // Migration event. + const migrationEvent = (await events.take()) as internal.InputMigrationEvent; + expect(migrationEvent.measurements[1].key).toEqual('latency_ms'); + // This isn't a precise check, but we should have non-zero values. + expect(migrationEvent.measurements[1].values.old).toBeGreaterThanOrEqual(1); + expect(migrationEvent.measurements[1].values.new).toBeGreaterThanOrEqual(1); + }, + ); + + it.each([LDMigrationStage.Off, LDMigrationStage.DualWrite])( + 'can report latency for old reads', + async (stage) => { + const flagKey = 'migration'; + td.update(td.flag(flagKey).valueForAll(stage)); + + await migration.read(flagKey, { key: 'test' }, stage); + // Feature event. + await events.take(); + // Migration event. + const migrationEvent = (await events.take()) as internal.InputMigrationEvent; + expect(migrationEvent.measurements[1].key).toEqual('latency_ms'); + // This isn't a precise check, but we should have non-zero values. + expect(migrationEvent.measurements[1].values.old).toBeGreaterThanOrEqual(1); + expect(migrationEvent.measurements[1].values.new).toBeUndefined(); + }, + ); + + it.each([LDMigrationStage.RampDown, LDMigrationStage.Complete])( + 'can report latency for new reads', + async (stage) => { + const flagKey = 'migration'; + td.update(td.flag(flagKey).valueForAll(stage)); + + await migration.read(flagKey, { key: 'test' }, stage); + // Feature event. + await events.take(); + // Migration event. + const migrationEvent = (await events.take()) as internal.InputMigrationEvent; + expect(migrationEvent.measurements[1].key).toEqual('latency_ms'); + // This isn't a precise check, but we should have non-zero values. + expect(migrationEvent.measurements[1].values.new).toBeGreaterThanOrEqual(1); + expect(migrationEvent.measurements[1].values.old).toBeUndefined(); + }, + ); + + it.each([LDMigrationStage.Off])('can report latency for old writes: %p', async (stage) => { + const flagKey = 'migration'; + td.update(td.flag(flagKey).valueForAll(stage)); + + await migration.write(flagKey, { key: 'test' }, stage); + // Feature event. + await events.take(); + // Migration event. + const migrationEvent = (await events.take()) as internal.InputMigrationEvent; + expect(migrationEvent.measurements[1].key).toEqual('latency_ms'); + // This isn't a precise check, but we should have non-zero values. + expect(migrationEvent.measurements[1].values.old).toBeGreaterThanOrEqual(1); + expect(migrationEvent.measurements[1].values.new).toBeUndefined(); + }); + + it.each([LDMigrationStage.Complete])( + 'can report latency for new writes: %p', + async (stage) => { + const flagKey = 'migration'; + td.update(td.flag(flagKey).valueForAll(stage)); + + await migration.write(flagKey, { key: 'test' }, stage); + // Feature event. + await events.take(); + // Migration event. + const migrationEvent = (await events.take()) as internal.InputMigrationEvent; + expect(migrationEvent.measurements[1].key).toEqual('latency_ms'); + // This isn't a precise check, but we should have non-zero values. + expect(migrationEvent.measurements[1].values.new).toBeGreaterThanOrEqual(1); + expect(migrationEvent.measurements[1].values.old).toBeUndefined(); + }, + ); + + it.each([LDMigrationStage.DualWrite, LDMigrationStage.Shadow, LDMigrationStage.Live])( + 'can report latency for old and new writes: %p', + async (stage) => { + const flagKey = 'migration'; + td.update(td.flag(flagKey).valueForAll(stage)); + + await migration.write(flagKey, { key: 'test' }, stage); + // Feature event. + await events.take(); + // Migration event. + const migrationEvent = (await events.take()) as internal.InputMigrationEvent; + expect(migrationEvent.measurements[1].key).toEqual('latency_ms'); + // This isn't a precise check, but we should have non-zero values. + expect(migrationEvent.measurements[1].values.old).toBeGreaterThanOrEqual(1); + expect(migrationEvent.measurements[1].values.new).toBeGreaterThanOrEqual(1); + }, + ); + + it('can report write latency for new', async () => { + const flagKey = 'migration'; + td.update(td.flag(flagKey).valueForAll(LDMigrationStage.Live)); + + await migration.write(flagKey, { key: 'test' }, LDMigrationStage.Live); + // Feature event. + await events.take(); + // Migration event. + const migrationEvent = (await events.take()) as internal.InputMigrationEvent; + expect(migrationEvent.measurements[1].key).toEqual('latency_ms'); + // This isn't a precise check, but we should have non-zero values. + expect(migrationEvent.measurements[1].values.old).toBeGreaterThanOrEqual(1); + expect(migrationEvent.measurements[1].values.new).toBeGreaterThanOrEqual(1); + }); + }); + + describe('given a migration which produces errors for every step', () => { + let migration: LDMigration; + beforeEach(() => { + migration = createMigration(client, { + execution, + latencyTracking: false, + errorTracking: true, + readNew: async () => LDMigrationError(new Error('error')), + writeNew: async () => LDMigrationError(new Error('error')), + readOld: async () => LDMigrationError(new Error('error')), + writeOld: async () => LDMigrationError(new Error('error')), + }); + }); + + it.each([LDMigrationStage.Off, LDMigrationStage.DualWrite])( + 'can report errors for old reads: %p', + async (stage) => { + const flagKey = 'migration'; + td.update(td.flag(flagKey).valueForAll(stage)); + + await migration.read(flagKey, { key: 'test' }, stage); + // Feature event. + await events.take(); + // Migration event. + const migrationEvent = (await events.take()) as internal.InputMigrationEvent; + // Only check the measurements component of the event. + expect(migrationEvent.measurements).toContainEqual({ + key: 'error', + values: { + old: true, + }, + }); + }, + ); + + it.each([LDMigrationStage.RampDown, LDMigrationStage.Complete])( + 'can report errors for new reads: %p', + async (stage) => { + const flagKey = 'migration'; + td.update(td.flag(flagKey).valueForAll(stage)); + + await migration.read(flagKey, { key: 'test' }, stage); + // Feature event. + await events.take(); + // Migration event. + const migrationEvent = (await events.take()) as internal.InputMigrationEvent; + expect(migrationEvent.measurements).toContainEqual({ + key: 'error', + values: { + new: true, + }, + }); + }, + ); + + it.each([LDMigrationStage.Shadow, LDMigrationStage.Live])( + 'can report errors for old and new reads simultaneously: %p', + async (stage) => { + const flagKey = 'migration'; + td.update(td.flag(flagKey).valueForAll(stage)); + + await migration.read(flagKey, { key: 'test' }, stage); + // Feature event. + await events.take(); + // Migration event. + const migrationEvent = (await events.take()) as internal.InputMigrationEvent; + // Only check the measurements component of the event. + expect(migrationEvent.measurements).toContainEqual({ + key: 'error', + values: { + old: true, + new: true, + }, + }); + }, + ); + + it.each([LDMigrationStage.Off, LDMigrationStage.DualWrite, LDMigrationStage.Shadow])( + 'can report errors for old writes: %p', + async (stage) => { + const flagKey = 'migration'; + td.update(td.flag(flagKey).valueForAll(stage)); + + await migration.write(flagKey, { key: 'test' }, stage); + // Feature event. + await events.take(); + // Migration event. + const migrationEvent = (await events.take()) as internal.InputMigrationEvent; + expect(migrationEvent.measurements).toContainEqual({ + key: 'error', + values: { + old: true, + }, + }); + }, + ); + + it.each([LDMigrationStage.Off, LDMigrationStage.DualWrite, LDMigrationStage.Shadow])( + 'it does not invoke non-authoritative write after an error with authoritative old', + async (stage) => { + const flagKey = 'migration'; + td.update(td.flag(flagKey).valueForAll(stage)); + + await migration.write(flagKey, { key: 'test' }, stage); + // Feature event. + await events.take(); + // Migration event. + const migrationEvent = (await events.take()) as internal.InputMigrationEvent; + expect(migrationEvent.measurements[0].key).toEqual('invoked'); + expect(migrationEvent.measurements[0].values).toEqual({ old: true }); + }, + ); + + it.each([LDMigrationStage.Live, LDMigrationStage.RampDown, LDMigrationStage.Complete])( + 'it does not invoke non-authoritative write after an error with authoritative new', + async (stage) => { + const flagKey = 'migration'; + td.update(td.flag(flagKey).valueForAll(stage)); + + await migration.write(flagKey, { key: 'test' }, stage); + // Feature event. + await events.take(); + // Migration event. + const migrationEvent = (await events.take()) as internal.InputMigrationEvent; + expect(migrationEvent.measurements[0].key).toEqual('invoked'); + expect(migrationEvent.measurements[0].values).toEqual({ new: true }); + }, + ); + + it.each([LDMigrationStage.Live, LDMigrationStage.RampDown, LDMigrationStage.Complete])( + 'can report errors for new writes: %p', + async (stage) => { + const flagKey = 'migration'; + td.update(td.flag(flagKey).valueForAll(stage)); + + await migration.write(flagKey, { key: 'test' }, stage); + // Feature event. + await events.take(); + // Migration event. + const migrationEvent = (await events.take()) as internal.InputMigrationEvent; + // Only check the measurements component of the event. + expect(migrationEvent.measurements).toContainEqual({ + key: 'error', + values: { + new: true, + }, + }); + }, + ); + }); + }); +}); + +it('ignores invalid measurement keys', () => { + const inputEvent: LDMigrationOpEvent = { + kind: 'migration_op', + operation: 'read', + creationDate: 0, + contextKeys: { user: 'bob' }, + evaluation: { + key: 'potato', + value: LDMigrationStage.Off, + default: LDMigrationStage.Live, + reason: { + kind: 'FALLTHROUGH', + }, + }, + measurements: [ + { + // @ts-ignore + key: 'bad', + values: { + old: true, + }, + }, + ], + }; + const validatedEvent = MigrationOpEventConversion(inputEvent); + expect(validatedEvent).toEqual({ ...inputEvent, measurements: [], samplingRatio: 1 }); +}); + +it('invalid data types are filtered', () => { + const inputEvent: LDMigrationOpEvent = { + kind: 'migration_op', + operation: 'read', + creationDate: 0, + contextKeys: { user: 'bob' }, + evaluation: { + key: 'potato', + value: LDMigrationStage.Off, + default: LDMigrationStage.Live, + reason: { + kind: 'FALLTHROUGH', + }, + }, + measurements: [ + { + key: 'latency_ms', + values: { + // @ts-ignore + old: 'ham', + new: 2, + }, + }, + { + key: 'consistent', + // @ts-ignore + value: undefined, + }, + { + key: 'error', + values: { + // @ts-ignore + old: {}, + new: true, + }, + }, + ], + }; + const validatedEvent = MigrationOpEventConversion(inputEvent); + expect(validatedEvent).toEqual({ + kind: 'migration_op', + operation: 'read', + creationDate: 0, + contextKeys: { user: 'bob' }, + evaluation: { + key: 'potato', + value: LDMigrationStage.Off, + default: LDMigrationStage.Live, + reason: { + kind: 'FALLTHROUGH', + }, + }, + measurements: [], + samplingRatio: 1, + }); +}); diff --git a/packages/shared/sdk-server/__tests__/MigrationOpTracker.test.ts b/packages/shared/sdk-server/__tests__/MigrationOpTracker.test.ts new file mode 100644 index 000000000..4c44f80bf --- /dev/null +++ b/packages/shared/sdk-server/__tests__/MigrationOpTracker.test.ts @@ -0,0 +1,471 @@ +import { LDMigrationStage } from '../src'; +import { LDMigrationOrigin } from '../src/api/LDMigration'; +import MigrationOpTracker from '../src/MigrationOpTracker'; +import TestLogger, { LogLevel } from './Logger'; + +it('does not generate an event if an op is not set', () => { + const tracker = new MigrationOpTracker( + 'flag', + { user: 'bob' }, + LDMigrationStage.Off, + LDMigrationStage.Off, + { + kind: 'FALLTHROUGH', + }, + ); + + tracker.invoked('old'); + + expect(tracker.createEvent()).toBeUndefined(); +}); + +it('does not generate an event with missing context keys', () => { + const tracker = new MigrationOpTracker('flag', {}, LDMigrationStage.Off, LDMigrationStage.Off, { + kind: 'FALLTHROUGH', + }); + + // Set the op otherwise/invoked that would prevent an event as well. + tracker.op('write'); + tracker.invoked('old'); + + expect(tracker.createEvent()).toBeUndefined(); +}); + +it('does not generate an event with empty flag key', () => { + const tracker = new MigrationOpTracker( + '', + { key: 'user-key' }, + LDMigrationStage.Off, + LDMigrationStage.Off, + { + kind: 'FALLTHROUGH', + }, + ); + + // Set the op/invoked otherwise that would prevent an event as well. + tracker.op('write'); + tracker.invoked('old'); + + expect(tracker.createEvent()).toBeUndefined(); +}); + +it('generates an event if the minimal requirements are met.', () => { + const tracker = new MigrationOpTracker( + 'flag', + { user: 'bob' }, + LDMigrationStage.Off, + LDMigrationStage.Off, + { + kind: 'FALLTHROUGH', + }, + ); + + tracker.op('write'); + tracker.invoked('old'); + + expect(tracker.createEvent()).toMatchObject({ + contextKeys: { user: 'bob' }, + evaluation: { default: 'off', key: 'flag', reason: { kind: 'FALLTHROUGH' }, value: 'off' }, + kind: 'migration_op', + measurements: [ + { + key: 'invoked', + values: { + old: true, + }, + }, + ], + operation: 'write', + }); +}); + +it('can include the variation in the event', () => { + const tracker = new MigrationOpTracker( + 'flag', + { user: 'bob' }, + LDMigrationStage.Off, + LDMigrationStage.Off, + { + kind: 'FALLTHROUGH', + }, + undefined, + 1, + ); + + tracker.op('write'); + tracker.invoked('old'); + + expect(tracker.createEvent()).toMatchObject({ + contextKeys: { user: 'bob' }, + evaluation: { + default: 'off', + key: 'flag', + reason: { kind: 'FALLTHROUGH' }, + value: 'off', + variation: 1, + }, + kind: 'migration_op', + measurements: [ + { + key: 'invoked', + values: { + old: true, + }, + }, + ], + operation: 'write', + }); +}); + +it('can include the version in the event', () => { + const tracker = new MigrationOpTracker( + 'flag', + { user: 'bob' }, + LDMigrationStage.Off, + LDMigrationStage.Off, + { + kind: 'FALLTHROUGH', + }, + undefined, + undefined, + 2, + ); + + tracker.op('write'); + tracker.invoked('old'); + + expect(tracker.createEvent()).toMatchObject({ + contextKeys: { user: 'bob' }, + evaluation: { + default: 'off', + key: 'flag', + reason: { kind: 'FALLTHROUGH' }, + value: 'off', + version: 2, + }, + kind: 'migration_op', + measurements: [ + { + key: 'invoked', + values: { + old: true, + }, + }, + ], + operation: 'write', + }); +}); + +it('includes errors if at least one is set', () => { + const tracker = new MigrationOpTracker( + 'flag', + { user: 'bob' }, + LDMigrationStage.Off, + LDMigrationStage.Off, + { + kind: 'FALLTHROUGH', + }, + ); + tracker.op('read'); + tracker.error('old'); + tracker.invoked('old'); + tracker.invoked('new'); + + const event = tracker.createEvent(); + expect(event?.measurements).toContainEqual({ + key: 'error', + values: { + old: true, + }, + }); + + const trackerB = new MigrationOpTracker( + 'flag', + { user: 'bob' }, + LDMigrationStage.Off, + LDMigrationStage.Off, + { + kind: 'FALLTHROUGH', + }, + ); + trackerB.op('read'); + trackerB.error('new'); + trackerB.invoked('old'); + trackerB.invoked('new'); + + const eventB = trackerB.createEvent(); + expect(eventB?.measurements).toContainEqual({ + key: 'error', + values: { + new: true, + }, + }); +}); + +it('includes latency if at least one measurement exists', () => { + const tracker = new MigrationOpTracker( + 'flag', + { user: 'bob' }, + LDMigrationStage.Off, + LDMigrationStage.Off, + { + kind: 'FALLTHROUGH', + }, + ); + tracker.op('read'); + tracker.latency('old', 100); + tracker.invoked('old'); + tracker.invoked('new'); + + const event = tracker.createEvent(); + expect(event?.measurements).toContainEqual({ + key: 'latency_ms', + values: { + old: 100, + }, + }); + + const trackerB = new MigrationOpTracker( + 'flag', + { user: 'bob' }, + LDMigrationStage.Off, + LDMigrationStage.Off, + { + kind: 'FALLTHROUGH', + }, + ); + trackerB.op('read'); + trackerB.latency('new', 150); + trackerB.invoked('old'); + trackerB.invoked('new'); + + const eventB = trackerB.createEvent(); + expect(eventB?.measurements).toContainEqual({ + key: 'latency_ms', + values: { + new: 150, + }, + }); +}); + +it('includes if the result was consistent', () => { + const tracker = new MigrationOpTracker( + 'flag', + { user: 'bob' }, + LDMigrationStage.Off, + LDMigrationStage.Off, + { + kind: 'FALLTHROUGH', + }, + ); + tracker.op('read'); + tracker.consistency(() => true); + tracker.invoked('old'); + tracker.invoked('new'); + + const event = tracker.createEvent(); + expect(event?.measurements).toContainEqual({ + key: 'consistent', + value: true, + samplingRatio: 1, + }); +}); + +it('includes if the result was inconsistent', () => { + const tracker = new MigrationOpTracker( + 'flag', + { user: 'bob' }, + LDMigrationStage.Off, + LDMigrationStage.Off, + { + kind: 'FALLTHROUGH', + }, + ); + tracker.op('read'); + tracker.invoked('old'); + tracker.invoked('new'); + tracker.consistency(() => false); + + const event = tracker.createEvent(); + expect(event?.measurements).toContainEqual({ + key: 'consistent', + value: false, + samplingRatio: 1, + }); +}); + +it.each(['old', 'new'])('includes which single origins were invoked', (origin) => { + const tracker = new MigrationOpTracker( + 'flag', + { user: 'bob' }, + LDMigrationStage.Off, + LDMigrationStage.Off, + { + kind: 'FALLTHROUGH', + }, + ); + tracker.op('read'); + tracker.invoked(origin as LDMigrationOrigin); + + const event = tracker.createEvent(); + expect(event?.measurements).toContainEqual({ + key: 'invoked', + values: { [origin]: true }, + }); +}); + +it('includes when both origins were invoked', () => { + const tracker = new MigrationOpTracker( + 'flag', + { user: 'bob' }, + LDMigrationStage.Off, + LDMigrationStage.Off, + { + kind: 'FALLTHROUGH', + }, + ); + tracker.op('read'); + tracker.invoked('old'); + tracker.invoked('new'); + + const event = tracker.createEvent(); + expect(event?.measurements).toContainEqual({ + key: 'invoked', + values: { old: true, new: true }, + }); +}); + +it('can handle exceptions thrown in the consistency check method', () => { + const logger = new TestLogger(); + const tracker = new MigrationOpTracker( + 'flag', + { user: 'bob' }, + LDMigrationStage.Off, + LDMigrationStage.Off, + { + kind: 'FALLTHROUGH', + }, + undefined, + undefined, + undefined, + undefined, + logger, + ); + tracker.op('read'); + tracker.invoked('old'); + tracker.invoked('new'); + tracker.consistency(() => { + throw new Error('I HAVE FAILED'); + }); + logger.expectMessages([ + { + level: LogLevel.Error, + matches: /.*migration 'flag'.*Error: I HAVE FAILED/, + }, + ]); +}); + +it.each([ + [false, true, true, false], + [true, false, false, true], + [false, true, true, true], + [true, false, true, true], +])( + 'does not generate an event if latency measurement without correct invoked measurement' + + ' invoke old: %p invoke new: %p measure old: %p measure new: %p', + (invoke_old, invoke_new, measure_old, measure_new) => { + const tracker = new MigrationOpTracker( + 'flag', + { user: 'bob' }, + LDMigrationStage.Off, + LDMigrationStage.Off, + { + kind: 'FALLTHROUGH', + }, + ); + + tracker.op('write'); + if (invoke_old) { + tracker.invoked('old'); + } + if (invoke_new) { + tracker.invoked('new'); + } + if (measure_old) { + tracker.latency('old', 100); + } + if (measure_new) { + tracker.latency('new', 100); + } + + expect(tracker.createEvent()).toBeUndefined(); + }, +); + +it.each([ + [false, true, true, false], + [true, false, false, true], + [false, true, true, true], + [true, false, true, true], +])( + 'does not generate an event error measurement without correct invoked measurement' + + ' invoke old: %p invoke new: %p measure old: %p measure new: %p', + (invoke_old, invoke_new, measure_old, measure_new) => { + const tracker = new MigrationOpTracker( + 'flag', + { user: 'bob' }, + LDMigrationStage.Off, + LDMigrationStage.Off, + { + kind: 'FALLTHROUGH', + }, + ); + + tracker.op('write'); + if (invoke_old) { + tracker.invoked('old'); + } + if (invoke_new) { + tracker.invoked('new'); + } + if (measure_old) { + tracker.error('old'); + } + if (measure_new) { + tracker.error('new'); + } + + expect(tracker.createEvent()).toBeUndefined(); + }, +); + +it.each([ + [true, false, true], + [false, true, true], + [true, false, false], + [false, true, false], +])( + 'does not generate an event if there is a consistency measurement but both origins were not invoked' + + ' invoke old: %p invoke new: %p consistent: %p', + (invoke_old, invoke_new, consistent) => { + const tracker = new MigrationOpTracker( + 'flag', + { user: 'bob' }, + LDMigrationStage.Off, + LDMigrationStage.Off, + { + kind: 'FALLTHROUGH', + }, + ); + + tracker.op('write'); + if (invoke_old) { + tracker.invoked('old'); + } + if (invoke_new) { + tracker.invoked('new'); + } + tracker.consistency(() => consistent); + expect(tracker.createEvent()).toBeUndefined(); + }, +); diff --git a/packages/shared/sdk-server/__tests__/data_sources/FileDataSource.test.ts b/packages/shared/sdk-server/__tests__/data_sources/FileDataSource.test.ts index 2a2f376ea..8e8da6f03 100644 --- a/packages/shared/sdk-server/__tests__/data_sources/FileDataSource.test.ts +++ b/packages/shared/sdk-server/__tests__/data_sources/FileDataSource.test.ts @@ -1,6 +1,6 @@ import { ClientContext, Context, Filesystem, WatchHandle } from '@launchdarkly/js-sdk-common'; +import * as mocks from '@launchdarkly/private-js-mocks'; -import promisify from '../../src/async/promisify'; import { Flag } from '../../src/evaluation/data/Flag'; import { Segment } from '../../src/evaluation/data/Segment'; import Evaluator from '../../src/evaluation/Evaluator'; @@ -9,7 +9,6 @@ import Configuration from '../../src/options/Configuration'; import AsyncStoreFacade from '../../src/store/AsyncStoreFacade'; import InMemoryFeatureStore from '../../src/store/InMemoryFeatureStore'; import VersionedDataKinds from '../../src/store/VersionedDataKinds'; -import basicPlatform from '../evaluation/mocks/platform'; import TestLogger from '../Logger'; const flag1Key = 'flag1'; @@ -119,545 +118,321 @@ describe('given a mock filesystem and memory feature store', () => { let filesystem: MockFilesystem; let logger: TestLogger; let featureStore: InMemoryFeatureStore; - let asyncFeatureStore: AsyncStoreFacade; + let createFileDataSource: any; + let mockInitSuccessHandler: jest.Mock; + let mockErrorHandler: jest.Mock; beforeEach(() => { + jest.useFakeTimers(); + + mockInitSuccessHandler = jest.fn(); + mockErrorHandler = jest.fn(); filesystem = new MockFilesystem(); logger = new TestLogger(); featureStore = new InMemoryFeatureStore(); asyncFeatureStore = new AsyncStoreFacade(featureStore); + jest.spyOn(filesystem, 'readFile'); + jest.spyOn(filesystem, 'watch'); + jest.spyOn(featureStore, 'init'); + + const defaultData = { + flagValues: { + key: 'value', + }, + }; + const defaultDataString = JSON.stringify(defaultData); + const defaultTestFilePath = 'testFile.json'; + const defaultTestFilePathData = [{ path: defaultTestFilePath, data: defaultDataString }]; + + // setup a filesystem of paths pointing to data + // returns an array of paths + const setupFileSystem = (testFiles: { path: string; data: string }[]) => + testFiles.map(({ path, data }) => { + filesystem.fileData[path] = { timestamp: 0, data }; + return path; + }); + + createFileDataSource = async ( + start: boolean = true, + files: { path: string; data: string }[] = defaultTestFilePathData, + simulateMissingFile: boolean = false, + autoUpdate: boolean = false, + yamlParser?: (data: string) => any, + ) => { + const filePaths = setupFileSystem(files); + if (simulateMissingFile) { + filePaths.push('missing-file.json'); + } + const factory = new FileDataSourceFactory({ + paths: filePaths, + autoUpdate, + yamlParser, + }); + + const fileDataSource = factory.create( + new ClientContext( + '', + new Configuration({ + featureStore, + logger, + }), + { + ...mocks.basicPlatform, + fileSystem: filesystem as unknown as Filesystem, + }, + ), + featureStore, + mockInitSuccessHandler, + mockErrorHandler, + ); + + if (start) { + fileDataSource.start(); + } + + await jest.runAllTimersAsync(); + return fileDataSource; + }; }); afterEach(() => { jest.resetAllMocks(); + jest.useRealTimers(); }); it('does not load flags prior to start', async () => { - filesystem.fileData['testfile.json'] = { timestamp: 0, data: '{"flagValues":{"key":"value"}}' }; - jest.spyOn(filesystem, 'readFile'); - - const factory = new FileDataSourceFactory({ - paths: ['testfile.json'], - }); + await createFileDataSource(false); - factory.create( - new ClientContext( - '', - new Configuration({ - featureStore, - logger, - }), - { ...basicPlatform, fileSystem: filesystem as unknown as Filesystem }, - ), - featureStore, - ); expect(await asyncFeatureStore.initialized()).toBeFalsy(); - expect(await asyncFeatureStore.all(VersionedDataKinds.Features)).toEqual({}); expect(await asyncFeatureStore.all(VersionedDataKinds.Segments)).toEqual({}); // There was no file access. - expect(filesystem.readFile).toHaveBeenCalledTimes(0); + expect(filesystem.readFile).not.toHaveBeenCalled(); }); - it('loads all properties', (done) => { - filesystem.fileData['testfile.json'] = { timestamp: 0, data: allPropertiesJson }; - jest.spyOn(filesystem, 'readFile'); - - const factory = new FileDataSourceFactory({ - paths: ['testfile.json'], - }); - - const fds = factory.create( - new ClientContext( - '', - new Configuration({ - featureStore, - logger, - }), - { ...basicPlatform, fileSystem: filesystem as unknown as Filesystem }, - ), - featureStore, - ); + it('loads all properties', async () => { + await createFileDataSource(true, [{ path: 'allProperties.json', data: allPropertiesJson }]); - fds.start(async () => { - expect(await asyncFeatureStore.initialized()).toBeTruthy(); + expect(mockInitSuccessHandler).toBeCalled(); + expect(await asyncFeatureStore.initialized()).toBeTruthy(); - const flags = await asyncFeatureStore.all(VersionedDataKinds.Features); - expect(sorted(Object.keys(flags))).toEqual([flag1Key, flag2Key]); + const flags = await asyncFeatureStore.all(VersionedDataKinds.Features); + expect(sorted(Object.keys(flags))).toEqual([flag1Key, flag2Key]); - const segments = await asyncFeatureStore.all(VersionedDataKinds.Segments); - expect(segments).toEqual({ seg1: segment1 }); - expect(filesystem.readFile).toHaveBeenCalledTimes(1); - done(); - }); + const segments = await asyncFeatureStore.all(VersionedDataKinds.Segments); + expect(segments).toEqual({ seg1: segment1 }); + expect(filesystem.readFile).toHaveBeenCalledTimes(1); }); - it('does not load if a file it not found', (done) => { - const factory = new FileDataSourceFactory({ - paths: ['missing-file.json'], - }); - - const fds = factory.create( - new ClientContext( - '', - new Configuration({ - featureStore, - logger, - }), - { ...basicPlatform, fileSystem: filesystem as unknown as Filesystem }, - ), - featureStore, - ); - - fds.start(async (err) => { - expect(err).toBeDefined(); - expect(await asyncFeatureStore.initialized()).toBeFalsy(); + it('does not load if a file it not found', async () => { + await createFileDataSource(true, undefined, true); - expect(await asyncFeatureStore.all(VersionedDataKinds.Features)).toEqual({}); - expect(await asyncFeatureStore.all(VersionedDataKinds.Segments)).toEqual({}); - done(); - }); + expect(mockErrorHandler.mock.lastCall[0].message).toMatch(/not found/i); + expect(filesystem.readFile).toHaveBeenCalledWith('missing-file.json'); + expect(await asyncFeatureStore.initialized()).toBeFalsy(); + expect(await asyncFeatureStore.all(VersionedDataKinds.Features)).toEqual({}); + expect(await asyncFeatureStore.all(VersionedDataKinds.Segments)).toEqual({}); }); - it('does not load if a file was malformed', (done) => { - filesystem.fileData['malformed_file.json'] = { timestamp: 0, data: '{sorry' }; - jest.spyOn(filesystem, 'readFile'); - const factory = new FileDataSourceFactory({ - paths: ['malformed_file.json'], - }); - - const fds = factory.create( - new ClientContext( - '', - new Configuration({ - featureStore, - logger, - }), - { ...basicPlatform, fileSystem: filesystem as unknown as Filesystem }, - ), - featureStore, - ); - - fds.start(async (err) => { - expect(err).toBeDefined(); - expect(await asyncFeatureStore.initialized()).toBeFalsy(); + it('does not load if a file was malformed', async () => { + await createFileDataSource(true, [{ path: 'allProperties.json', data: '{malformed' }]); - expect(await asyncFeatureStore.all(VersionedDataKinds.Features)).toEqual({}); - expect(await asyncFeatureStore.all(VersionedDataKinds.Segments)).toEqual({}); - expect(filesystem.readFile).toHaveBeenCalledWith('malformed_file.json'); - done(); - }); + expect(mockErrorHandler.mock.lastCall[0].message).toMatch(/expected.*json at position/i); + expect(await asyncFeatureStore.initialized()).toBeFalsy(); + expect(await asyncFeatureStore.all(VersionedDataKinds.Features)).toEqual({}); + expect(await asyncFeatureStore.all(VersionedDataKinds.Segments)).toEqual({}); }); - it('can load multiple files', (done) => { - filesystem.fileData['file1.json'] = { timestamp: 0, data: flagOnlyJson }; - filesystem.fileData['file2.json'] = { timestamp: 0, data: segmentOnlyJson }; + it('can load multiple files', async () => { + await createFileDataSource(true, [ + { path: 'file1.json', data: flagOnlyJson }, + { path: 'file2.json', data: segmentOnlyJson }, + ]); - jest.spyOn(filesystem, 'readFile'); - const factory = new FileDataSourceFactory({ - paths: ['file1.json', 'file2.json'], - }); - - const fds = factory.create( - new ClientContext( - '', - new Configuration({ - featureStore, - logger, - }), - { ...basicPlatform, fileSystem: filesystem as unknown as Filesystem }, - ), - featureStore, - ); + expect(await asyncFeatureStore.initialized()).toBeTruthy(); - fds.start(async () => { - expect(await asyncFeatureStore.initialized()).toBeTruthy(); + const flags = await asyncFeatureStore.all(VersionedDataKinds.Features); + expect(sorted(Object.keys(flags))).toEqual([flag1Key]); - const flags = await asyncFeatureStore.all(VersionedDataKinds.Features); - expect(sorted(Object.keys(flags))).toEqual([flag1Key]); + const segments = await asyncFeatureStore.all(VersionedDataKinds.Segments); + expect(segments).toEqual({ seg1: segment1 }); - const segments = await asyncFeatureStore.all(VersionedDataKinds.Segments); - expect(segments).toEqual({ seg1: segment1 }); - expect(filesystem.readFile).toHaveBeenCalledTimes(2); - done(); - }); + expect(filesystem.readFile).toHaveBeenCalledTimes(2); + expect(filesystem.readFile).toHaveBeenNthCalledWith(1, 'file1.json'); + expect(filesystem.readFile).toHaveBeenNthCalledWith(2, 'file2.json'); }); - it('does not allow duplicate keys', (done) => { - filesystem.fileData['file1.json'] = { timestamp: 0, data: flagOnlyJson }; - filesystem.fileData['file2.json'] = { timestamp: 0, data: flagOnlyJson }; - - jest.spyOn(filesystem, 'readFile'); - const factory = new FileDataSourceFactory({ - paths: ['file1.json', 'file2.json'], - }); - - const fds = factory.create( - new ClientContext( - '', - new Configuration({ - featureStore, - logger, - }), - { ...basicPlatform, fileSystem: filesystem as unknown as Filesystem }, - ), - featureStore, - ); + it('does not allow duplicate keys', async () => { + await createFileDataSource(true, [ + { path: 'file1.json', data: flagOnlyJson }, + { path: 'file2.json', data: flagOnlyJson }, + ]); - fds.start(async (err) => { - expect(err).toBeDefined(); - expect(await asyncFeatureStore.initialized()).toBeFalsy(); - expect(filesystem.readFile).toHaveBeenCalledTimes(2); - done(); - }); + expect(mockErrorHandler.mock.lastCall[0].message).toMatch(/duplicate.*flag1/); + expect(await asyncFeatureStore.initialized()).toBeFalsy(); + expect(filesystem.readFile).toHaveBeenCalledTimes(2); }); - it('does not create watchers if auto-update if off', (done) => { - filesystem.fileData['file1.json'] = { timestamp: 0, data: flagOnlyJson }; - filesystem.fileData['file2.json'] = { timestamp: 0, data: segmentOnlyJson }; - - jest.spyOn(filesystem, 'watch'); - const factory = new FileDataSourceFactory({ - paths: ['file1.json', 'file2.json'], - }); + it('does not create watchers if auto-update if off', async () => { + await createFileDataSource(true, [ + { path: 'file1.json', data: flagOnlyJson }, + { path: 'file2.json', data: segmentOnlyJson }, + ]); - const fds = factory.create( - new ClientContext( - '', - new Configuration({ - featureStore, - logger, - }), - { ...basicPlatform, fileSystem: filesystem as unknown as Filesystem }, - ), - featureStore, - ); - - fds.start(async () => { - expect(await asyncFeatureStore.initialized()).toBeTruthy(); - expect(filesystem.watch).toHaveBeenCalledTimes(0); - done(); - }); + expect(await asyncFeatureStore.initialized()).toBeTruthy(); + expect(filesystem.watch).not.toBeCalled(); }); - it('can evaluate simple loaded flags', (done) => { - filesystem.fileData['file1.json'] = { timestamp: 0, data: allPropertiesJson }; + it('can evaluate simple loaded flags', async () => { + await createFileDataSource(true, [{ path: 'file1.json', data: allPropertiesJson }]); - const factory = new FileDataSourceFactory({ - paths: ['file1.json'], + const evaluator = new Evaluator(mocks.basicPlatform, { + getFlag: async (key) => + ((await asyncFeatureStore.get(VersionedDataKinds.Features, key)) as Flag) ?? undefined, + getSegment: async (key) => + ((await asyncFeatureStore.get(VersionedDataKinds.Segments, key)) as Segment) ?? undefined, + getBigSegmentsMembership: () => Promise.resolve(undefined), }); + const flag = await asyncFeatureStore.get(VersionedDataKinds.Features, flag2Key); + const res = await evaluator.evaluate(flag as Flag, Context.fromLDContext({ key: 'userkey' })!); - const fds = factory.create( - new ClientContext( - '', - new Configuration({ - featureStore, - logger, - }), - { ...basicPlatform, fileSystem: filesystem as unknown as Filesystem }, - ), - featureStore, - ); - - fds.start(async () => { - const evaluator = new Evaluator(basicPlatform, { - getFlag: async (key) => - ((await asyncFeatureStore.get(VersionedDataKinds.Features, key)) as Flag) ?? undefined, - getSegment: async (key) => - ((await asyncFeatureStore.get(VersionedDataKinds.Segments, key)) as Segment) ?? undefined, - getBigSegmentsMembership: () => Promise.resolve(undefined), - }); - - const flag = await asyncFeatureStore.get(VersionedDataKinds.Features, flag2Key); - const res = await evaluator.evaluate( - flag as Flag, - Context.fromLDContext({ key: 'userkey' })!, - ); - expect(res.detail.value).toEqual(flag2Value); - done(); - }); + expect(res.detail.value).toEqual(flag2Value); }); - it('can evaluate full loaded flags', (done) => { - filesystem.fileData['file1.json'] = { timestamp: 0, data: allPropertiesJson }; - - const factory = new FileDataSourceFactory({ - paths: ['file1.json'], + it('can evaluate full loaded flags', async () => { + await createFileDataSource(true, [{ path: 'file1.json', data: allPropertiesJson }]); + const evaluator = new Evaluator(mocks.basicPlatform, { + getFlag: async (key) => + ((await asyncFeatureStore.get(VersionedDataKinds.Features, key)) as Flag) ?? undefined, + getSegment: async (key) => + ((await asyncFeatureStore.get(VersionedDataKinds.Segments, key)) as Segment) ?? undefined, + getBigSegmentsMembership: () => Promise.resolve(undefined), }); + const flag = await asyncFeatureStore.get(VersionedDataKinds.Features, flag1Key); + const res = await evaluator.evaluate(flag as Flag, Context.fromLDContext({ key: 'userkey' })!); - const fds = factory.create( - new ClientContext( - '', - new Configuration({ - featureStore, - logger, - }), - { ...basicPlatform, fileSystem: filesystem as unknown as Filesystem }, - ), - featureStore, - ); - - fds.start(async () => { - const evaluator = new Evaluator(basicPlatform, { - getFlag: async (key) => - ((await asyncFeatureStore.get(VersionedDataKinds.Features, key)) as Flag) ?? undefined, - getSegment: async (key) => - ((await asyncFeatureStore.get(VersionedDataKinds.Segments, key)) as Segment) ?? undefined, - getBigSegmentsMembership: () => Promise.resolve(undefined), - }); - - const flag = await asyncFeatureStore.get(VersionedDataKinds.Features, flag1Key); - const res = await evaluator.evaluate( - flag as Flag, - Context.fromLDContext({ key: 'userkey' })!, - ); - expect(res.detail.value).toEqual('on'); - done(); - }); + expect(res.detail.value).toEqual('on'); }); - it('register watchers when auto update is enabled and unregisters them when it is closed', (done) => { - filesystem.fileData['file1.json'] = { timestamp: 0, data: flagOnlyJson }; - filesystem.fileData['file2.json'] = { timestamp: 0, data: segmentOnlyJson }; - - jest.spyOn(filesystem, 'watch'); - const factory = new FileDataSourceFactory({ - paths: ['file1.json', 'file2.json'], - autoUpdate: true, - }); - - const fds = factory.create( - new ClientContext( - '', - new Configuration({ - featureStore, - logger, - }), - { ...basicPlatform, fileSystem: filesystem as unknown as Filesystem }, - ), - featureStore, + it('register watchers when auto update is enabled and unregisters them when it is closed', async () => { + const fds = await createFileDataSource( + true, + [ + { path: 'file1.json', data: flagOnlyJson }, + { path: 'file2.json', data: segmentOnlyJson }, + ], + false, + true, ); - fds.start(async () => { - expect(await asyncFeatureStore.initialized()).toBeTruthy(); - expect(filesystem.watch).toHaveBeenCalledTimes(2); - expect(filesystem.watches['file1.json'].length).toEqual(1); - expect(filesystem.watches['file2.json'].length).toEqual(1); - fds.close(); + expect(await asyncFeatureStore.initialized()).toBeTruthy(); + expect(filesystem.watch).toHaveBeenCalledTimes(2); + expect(filesystem.watches['file1.json'].length).toEqual(1); + expect(filesystem.watches['file2.json'].length).toEqual(1); + fds.close(); - expect(filesystem.watches['file1.json'].length).toEqual(0); - expect(filesystem.watches['file2.json'].length).toEqual(0); - done(); - }); + expect(filesystem.watches['file1.json'].length).toEqual(0); + expect(filesystem.watches['file2.json'].length).toEqual(0); }); - it('reloads modified files when auto update is enabled', (done) => { - filesystem.fileData['file1.json'] = { timestamp: 0, data: flagOnlyJson }; + it('reloads modified files when auto update is enabled', async () => { + await createFileDataSource(true, [{ path: 'file1.json', data: flagOnlyJson }], false, true); - jest.spyOn(filesystem, 'watch'); - const factory = new FileDataSourceFactory({ - paths: ['file1.json'], - autoUpdate: true, - }); - - const fds = factory.create( - new ClientContext( - '', - new Configuration({ - featureStore, - logger, - }), - { ...basicPlatform, fileSystem: filesystem as unknown as Filesystem }, - ), - featureStore, - ); - - fds.start(async () => { - expect(await asyncFeatureStore.initialized()).toBeTruthy(); + expect(await asyncFeatureStore.initialized()).toBeTruthy(); - const flags = await asyncFeatureStore.all(VersionedDataKinds.Features); - expect(Object.keys(flags).length).toEqual(1); + const flags = await asyncFeatureStore.all(VersionedDataKinds.Features); + expect(Object.keys(flags).length).toEqual(1); - const segments = await asyncFeatureStore.all(VersionedDataKinds.Segments); - expect(Object.keys(segments).length).toEqual(0); + const segments = await asyncFeatureStore.all(VersionedDataKinds.Segments); + expect(Object.keys(segments).length).toEqual(0); - // Need to update the timestamp, or it will think the file has not changed. - filesystem.fileData['file1.json'] = { timestamp: 100, data: segmentOnlyJson }; - filesystem.watches['file1.json'][0].cb('change', 'file1.json'); + // Need to update the timestamp, or it will think the file has not changed. + filesystem.fileData['file1.json'] = { timestamp: 100, data: segmentOnlyJson }; + filesystem.watches['file1.json'][0].cb('change', 'file1.json'); - // The handling of the file loading is async, and additionally we debounce - // the callback. So we have to wait a bit to account for the awaits and the debounce. - setTimeout(async () => { - const flags2 = await asyncFeatureStore.all(VersionedDataKinds.Features); - expect(Object.keys(flags2).length).toEqual(0); + await jest.runAllTimersAsync(); + const flags2 = await asyncFeatureStore.all(VersionedDataKinds.Features); + expect(Object.keys(flags2).length).toEqual(0); - const segments2 = await asyncFeatureStore.all(VersionedDataKinds.Segments); - expect(Object.keys(segments2).length).toEqual(1); - done(); - }, 100); - }); + const segments2 = await asyncFeatureStore.all(VersionedDataKinds.Segments); + expect(Object.keys(segments2).length).toEqual(1); }); - it('debounces the callback for file loading', (done) => { - filesystem.fileData['file1.json'] = { timestamp: 0, data: flagOnlyJson }; + it('debounces the callback for file loading', async () => { + await createFileDataSource(true, [{ path: 'file1.json', data: flagOnlyJson }], false, true); - jest.spyOn(filesystem, 'watch'); - jest.spyOn(featureStore, 'init'); - const factory = new FileDataSourceFactory({ - paths: ['file1.json'], - autoUpdate: true, - }); + expect(await asyncFeatureStore.initialized()).toBeTruthy(); - const fds = factory.create( - new ClientContext( - '', - new Configuration({ - featureStore, - logger, - }), - { ...basicPlatform, fileSystem: filesystem as unknown as Filesystem }, - ), - featureStore, - ); + const flags = await asyncFeatureStore.all(VersionedDataKinds.Features); + expect(Object.keys(flags).length).toEqual(1); - fds.start(async () => { - expect(await asyncFeatureStore.initialized()).toBeTruthy(); - - const flags = await asyncFeatureStore.all(VersionedDataKinds.Features); - expect(Object.keys(flags).length).toEqual(1); - - const segments = await asyncFeatureStore.all(VersionedDataKinds.Segments); - expect(Object.keys(segments).length).toEqual(0); - - // Trigger several change callbacks. - filesystem.fileData['file1.json'] = { timestamp: 100, data: segmentOnlyJson }; - filesystem.watches['file1.json'][0].cb('change', 'file1.json'); - filesystem.fileData['file1.json'] = { timestamp: 101, data: segmentOnlyJson }; - filesystem.watches['file1.json'][0].cb('change', 'file1.json'); - filesystem.fileData['file1.json'] = { timestamp: 102, data: segmentOnlyJson }; - filesystem.watches['file1.json'][0].cb('change', 'file1.json'); - filesystem.fileData['file1.json'] = { timestamp: 103, data: segmentOnlyJson }; - filesystem.watches['file1.json'][0].cb('change', 'file1.json'); - - // The handling of the file loading is async, and additionally we debounce - // the callback. So we have to wait a bit to account for the awaits and the debounce. - setTimeout(async () => { - // Once for the start, and then again for the coalesced update. - expect(featureStore.init).toHaveBeenCalledTimes(2); - done(); - }, 100); - }); + const segments = await asyncFeatureStore.all(VersionedDataKinds.Segments); + expect(Object.keys(segments).length).toEqual(0); + + // Trigger several change callbacks. + filesystem.fileData['file1.json'] = { timestamp: 100, data: segmentOnlyJson }; + filesystem.watches['file1.json'][0].cb('change', 'file1.json'); + filesystem.fileData['file1.json'] = { timestamp: 101, data: segmentOnlyJson }; + filesystem.watches['file1.json'][0].cb('change', 'file1.json'); + filesystem.fileData['file1.json'] = { timestamp: 102, data: segmentOnlyJson }; + filesystem.watches['file1.json'][0].cb('change', 'file1.json'); + filesystem.fileData['file1.json'] = { timestamp: 103, data: segmentOnlyJson }; + filesystem.watches['file1.json'][0].cb('change', 'file1.json'); + + // The handling of the file loading is async, and additionally we debounce + // the callback. So we have to wait a bit to account for the awaits and the debounce. + // Once for the start, and then again for the coalesced update. + await jest.runAllTimersAsync(); + expect(featureStore.init).toHaveBeenCalledTimes(2); }); - it('does not callback if the timestamp has not changed', (done) => { - filesystem.fileData['file1.json'] = { timestamp: 0, data: flagOnlyJson }; - - jest.spyOn(filesystem, 'watch'); - jest.spyOn(featureStore, 'init'); - const factory = new FileDataSourceFactory({ - paths: ['file1.json'], - autoUpdate: true, - }); - - const fds = factory.create( - new ClientContext( - '', - new Configuration({ - featureStore, - logger, - }), - { ...basicPlatform, fileSystem: filesystem as unknown as Filesystem }, - ), - featureStore, - ); + it('does not callback if the timestamp has not changed', async () => { + await createFileDataSource(true, [{ path: 'file1.json', data: flagOnlyJson }], false, true); - fds.start(async () => { - expect(await asyncFeatureStore.initialized()).toBeTruthy(); + expect(await asyncFeatureStore.initialized()).toBeTruthy(); - const flags = await asyncFeatureStore.all(VersionedDataKinds.Features); - expect(Object.keys(flags).length).toEqual(1); + const flags = await asyncFeatureStore.all(VersionedDataKinds.Features); + expect(Object.keys(flags).length).toEqual(1); - const segments = await asyncFeatureStore.all(VersionedDataKinds.Segments); - expect(Object.keys(segments).length).toEqual(0); + const segments = await asyncFeatureStore.all(VersionedDataKinds.Segments); + expect(Object.keys(segments).length).toEqual(0); - filesystem.watches['file1.json'][0].cb('change', 'file1.json'); - filesystem.watches['file1.json'][0].cb('change', 'file1.json'); + filesystem.watches['file1.json'][0].cb('change', 'file1.json'); + filesystem.watches['file1.json'][0].cb('change', 'file1.json'); - // The handling of the file loading is async, and additionally we debounce - // the callback. So we have to wait a bit to account for the awaits and the debounce. - setTimeout(async () => { - // Once for the start. - expect(featureStore.init).toHaveBeenCalledTimes(1); - done(); - }, 100); - }); + await jest.runAllTimersAsync(); + // Once for the start. + expect(featureStore.init).toHaveBeenCalledTimes(1); }); it.each([['yml'], ['yaml']])( 'does not initialize when a yaml file is specified, but no parser is provided %s', async (ext) => { - jest.spyOn(filesystem, 'readFile'); const fileName = `yamlfile.${ext}`; - filesystem.fileData[fileName] = { timestamp: 0, data: '' }; - const factory = new FileDataSourceFactory({ - paths: [fileName], - }); + await createFileDataSource(true, [{ path: fileName, data: '' }]); - const fds = factory.create( - new ClientContext( - '', - new Configuration({ - featureStore, - logger, - }), - { ...basicPlatform, fileSystem: filesystem as unknown as Filesystem }, - ), - featureStore, - ); - - const err = await promisify((cb) => { - fds.start(cb); - }); - - expect((err as any).message).toEqual( + expect(mockErrorHandler.mock.lastCall[0].message).toEqual( `Attempted to parse yaml file (yamlfile.${ext}) without parser.`, ); expect(await asyncFeatureStore.initialized()).toBeFalsy(); - expect(await asyncFeatureStore.all(VersionedDataKinds.Features)).toEqual({}); expect(await asyncFeatureStore.all(VersionedDataKinds.Segments)).toEqual({}); }, ); it.each([['yml'], ['yaml']])('uses the yaml parser when specified %s', async (ext) => { - const parser = jest.fn(() => JSON.parse(allPropertiesJson)); - - jest.spyOn(filesystem, 'readFile'); + const yamlParser = jest.fn(() => JSON.parse(allPropertiesJson)); const fileName = `yamlfile.${ext}`; - filesystem.fileData[fileName] = { timestamp: 0, data: 'the data' }; - const factory = new FileDataSourceFactory({ - paths: [fileName], - yamlParser: parser, - }); - - const fds = factory.create( - new ClientContext( - '', - new Configuration({ - featureStore, - logger, - }), - { ...basicPlatform, fileSystem: filesystem as unknown as Filesystem }, - ), - featureStore, + await createFileDataSource( + true, + [{ path: fileName, data: 'the data' }], + undefined, + undefined, + yamlParser, ); - const err = await promisify((cb) => { - fds.start(cb); - }); - - expect(err).toBeUndefined(); + expect(mockErrorHandler).not.toBeCalled(); expect(await asyncFeatureStore.initialized()).toBeTruthy(); const flags = await asyncFeatureStore.all(VersionedDataKinds.Features); @@ -666,6 +441,6 @@ describe('given a mock filesystem and memory feature store', () => { const segments = await asyncFeatureStore.all(VersionedDataKinds.Segments); expect(segments).toEqual({ seg1: segment1 }); expect(filesystem.readFile).toHaveBeenCalledTimes(1); - expect(parser).toHaveBeenCalledWith('the data'); + expect(yamlParser).toHaveBeenCalledWith('the data'); }); }); diff --git a/packages/shared/sdk-server/__tests__/data_sources/PollingProcessor.test.ts b/packages/shared/sdk-server/__tests__/data_sources/PollingProcessor.test.ts index b1361307f..a87b08b96 100644 --- a/packages/shared/sdk-server/__tests__/data_sources/PollingProcessor.test.ts +++ b/packages/shared/sdk-server/__tests__/data_sources/PollingProcessor.test.ts @@ -1,14 +1,13 @@ import { ClientContext } from '@launchdarkly/js-sdk-common'; +import * as mocks from '@launchdarkly/private-js-mocks'; -import { LDFeatureStore } from '../../src/api/subsystems'; -import promisify from '../../src/async/promisify'; +import { LDFeatureStore } from '../../src'; import PollingProcessor from '../../src/data_sources/PollingProcessor'; import Requestor from '../../src/data_sources/Requestor'; import Configuration from '../../src/options/Configuration'; import AsyncStoreFacade from '../../src/store/AsyncStoreFacade'; import InMemoryFeatureStore from '../../src/store/InMemoryFeatureStore'; import VersionedDataKinds from '../../src/store/VersionedDataKinds'; -import basicPlatform from '../evaluation/mocks/platform'; import TestLogger, { LogLevel } from '../Logger'; describe('given an event processor', () => { @@ -16,13 +15,17 @@ describe('given an event processor', () => { requestAllData: jest.fn(), }; const longInterval = 100000; - const allData = { flags: { flag: { version: 1 } }, segments: { segment: { version: 1 } } }; + const allData = { + flags: { flag: { version: 1 } }, + segments: { segment: { version: 1 } }, + }; const jsonData = JSON.stringify(allData); let store: LDFeatureStore; let storeFacade: AsyncStoreFacade; let config: Configuration; let processor: PollingProcessor; + let initSuccessHandler: jest.Mock; beforeEach(() => { store = new InMemoryFeatureStore(); @@ -32,10 +35,13 @@ describe('given an event processor', () => { pollInterval: longInterval, logger: new TestLogger(), }); + initSuccessHandler = jest.fn(); + processor = new PollingProcessor( config, requestor as unknown as Requestor, - config.featureStoreFactory(new ClientContext('', config, basicPlatform)), + config.featureStoreFactory(new ClientContext('', config, mocks.basicPlatform)), + initSuccessHandler, ); }); @@ -49,25 +55,25 @@ describe('given an event processor', () => { }); it('polls immediately on start', () => { - processor.start(() => {}); + processor.start(); expect(requestor.requestAllData).toHaveBeenCalledTimes(1); }); - it('calls callback on success', (done) => { + it('calls callback on success', () => { requestor.requestAllData = jest.fn((cb) => cb(undefined, jsonData)); - - processor.start(() => done()); + processor.start(); + expect(initSuccessHandler).toBeCalled(); }); it('initializes the feature store', async () => { requestor.requestAllData = jest.fn((cb) => cb(undefined, jsonData)); - await promisify((cb) => processor.start(cb)); - + processor.start(); const flags = await storeFacade.all(VersionedDataKinds.Features); - expect(flags).toEqual(allData.flags); const segments = await storeFacade.all(VersionedDataKinds.Segments); + + expect(flags).toEqual(allData.flags); expect(segments).toEqual(allData.segments); }); }); @@ -83,6 +89,8 @@ describe('given a polling processor with a short poll duration', () => { let store: LDFeatureStore; let config: Configuration; let processor: PollingProcessor; + let initSuccessHandler: jest.Mock; + let errorHandler: jest.Mock; beforeEach(() => { store = new InMemoryFeatureStore(); @@ -91,24 +99,29 @@ describe('given a polling processor with a short poll duration', () => { pollInterval: shortInterval, logger: new TestLogger(), }); + initSuccessHandler = jest.fn(); + errorHandler = jest.fn(); + // Configuration will not let us set this as low as needed for the test. Object.defineProperty(config, 'pollInterval', { value: 0.1 }); processor = new PollingProcessor( config, requestor as unknown as Requestor, - config.featureStoreFactory(new ClientContext('', config, basicPlatform)), + config.featureStoreFactory(new ClientContext('', config, mocks.basicPlatform)), + initSuccessHandler, + errorHandler, ); }); afterEach(() => { processor.stop(); - jest.restoreAllMocks(); + jest.resetAllMocks(); }); it('polls repeatedly', (done) => { requestor.requestAllData = jest.fn((cb) => cb(undefined, jsonData)); - processor.start(() => {}); + processor.start(); setTimeout(() => { expect(requestor.requestAllData.mock.calls.length).toBeGreaterThanOrEqual(4); done(); @@ -126,10 +139,11 @@ describe('given a polling processor with a short poll duration', () => { undefined, ), ); - processor.start((e) => { - expect(e).toBeUndefined(); - }); + processor.start(); + + expect(initSuccessHandler).not.toBeCalled(); + expect(errorHandler).not.toBeCalled(); setTimeout(() => { expect(requestor.requestAllData.mock.calls.length).toBeGreaterThanOrEqual(2); const testLogger = config.logger as TestLogger; @@ -142,9 +156,11 @@ describe('given a polling processor with a short poll duration', () => { it('continues polling after receiving invalid JSON', (done) => { requestor.requestAllData = jest.fn((cb) => cb(undefined, '{sad')); - processor.start((e) => { - expect(e).toBeDefined(); - }); + + processor.start(); + + expect(initSuccessHandler).not.toBeCalled(); + expect(errorHandler.mock.lastCall[0].message).toMatch(/malformed json/i); setTimeout(() => { expect(requestor.requestAllData.mock.calls.length).toBeGreaterThanOrEqual(2); @@ -165,9 +181,9 @@ describe('given a polling processor with a short poll duration', () => { undefined, ), ); - processor.start((e) => { - expect(e).toBeDefined(); - }); + processor.start(); + expect(initSuccessHandler).not.toBeCalled(); + expect(errorHandler.mock.lastCall[0].message).toMatch(new RegExp(`${status}.*permanently`)); setTimeout(() => { expect(requestor.requestAllData.mock.calls.length).toBe(1); diff --git a/packages/shared/sdk-server/__tests__/data_sources/Requestor.test.ts b/packages/shared/sdk-server/__tests__/data_sources/Requestor.test.ts index 0077d8caa..4e18f372e 100644 --- a/packages/shared/sdk-server/__tests__/data_sources/Requestor.test.ts +++ b/packages/shared/sdk-server/__tests__/data_sources/Requestor.test.ts @@ -1,4 +1,3 @@ -/* eslint-disable @typescript-eslint/no-unused-vars */ import { EventSource, EventSourceInitDict, @@ -7,11 +6,11 @@ import { Requests, Response, } from '@launchdarkly/js-sdk-common'; +import * as mocks from '@launchdarkly/private-js-mocks'; import promisify from '../../src/async/promisify'; import Requestor from '../../src/data_sources/Requestor'; import Configuration from '../../src/options/Configuration'; -import basicPlatform from '../evaluation/mocks/platform'; describe('given a requestor', () => { let requestor: Requestor; @@ -35,7 +34,6 @@ describe('given a requestor', () => { resetRequestState(); const requests: Requests = { - /* eslint-disable-next-line @typescript-eslint/no-unused-vars */ async fetch(url: string, options?: Options): Promise { return new Promise((a, r) => { if (throwThis) { @@ -74,13 +72,12 @@ describe('given a requestor', () => { }); }, - /* eslint-disable-next-line @typescript-eslint/no-unused-vars */ createEventSource(_url: string, _eventSourceInitDict: EventSourceInitDict): EventSource { throw new Error('Function not implemented.'); }, }; - requestor = new Requestor('sdkKey', new Configuration({}), basicPlatform.info, requests); + requestor = new Requestor('sdkKey', new Configuration({}), mocks.basicPlatform.info, requests); }); it('gets data', (done) => { diff --git a/packages/shared/sdk-server/__tests__/data_sources/StreamingProcessor.test.ts b/packages/shared/sdk-server/__tests__/data_sources/StreamingProcessor.test.ts deleted file mode 100644 index ecceb74ad..000000000 --- a/packages/shared/sdk-server/__tests__/data_sources/StreamingProcessor.test.ts +++ /dev/null @@ -1,299 +0,0 @@ -import { - EventSource, - EventSourceInitDict, - Info, - Options, - PlatformData, - Requests, - Response, - SdkData, -} from '@launchdarkly/js-sdk-common'; - -import promisify from '../../src/async/promisify'; -import defaultHeaders from '../../src/data_sources/defaultHeaders'; -import StreamingProcessor from '../../src/data_sources/StreamingProcessor'; -import DiagnosticsManager from '../../src/events/DiagnosticsManager'; -import NullEventSource from '../../src/events/NullEventSource'; -import Configuration from '../../src/options/Configuration'; -import AsyncStoreFacade from '../../src/store/AsyncStoreFacade'; -import InMemoryFeatureStore from '../../src/store/InMemoryFeatureStore'; -import VersionedDataKinds from '../../src/store/VersionedDataKinds'; -import basicPlatform from '../evaluation/mocks/platform'; -import TestLogger, { LogLevel } from '../Logger'; - -const sdkKey = 'my-sdk-key'; - -const info: Info = { - platformData(): PlatformData { - return {}; - }, - sdkData(): SdkData { - const sdkData: SdkData = { - version: '2.2.2', - }; - return sdkData; - }, -}; - -function createRequests(cb: (es: NullEventSource) => void): Requests { - return { - /* eslint-disable-next-line @typescript-eslint/no-unused-vars */ - fetch(url: string, options?: Options | undefined): Promise { - throw new Error('Function not implemented.'); - }, - createEventSource(url: string, eventSourceInitDict: EventSourceInitDict): EventSource { - const es = new NullEventSource(url, eventSourceInitDict); - cb(es); - return es; - }, - }; -} - -describe('given a stream processor with mock event source', () => { - let es: NullEventSource; - let requests: Requests; - let featureStore: InMemoryFeatureStore; - let streamProcessor: StreamingProcessor; - let config: Configuration; - let asyncStore: AsyncStoreFacade; - let logger: TestLogger; - let diagnosticsManager: DiagnosticsManager; - - beforeEach(() => { - requests = createRequests((nes) => { - es = nes; - }); - featureStore = new InMemoryFeatureStore(); - asyncStore = new AsyncStoreFacade(featureStore); - logger = new TestLogger(); - config = new Configuration({ - streamUri: 'http://test', - baseUri: 'http://base.test', - eventsUri: 'http://events.test', - featureStore, - logger, - }); - diagnosticsManager = new DiagnosticsManager('sdk-key', config, basicPlatform, featureStore); - streamProcessor = new StreamingProcessor( - sdkKey, - config, - requests, - info, - featureStore, - diagnosticsManager, - ); - }); - - async function promiseStart() { - return promisify((cb) => streamProcessor.start(cb)); - } - - function expectJsonError(err: { message?: string }) { - expect(err).toBeDefined(); - expect(err.message).toEqual('Malformed JSON data in event stream'); - logger.expectMessages([ - { - level: LogLevel.Error, - matches: /Stream received invalid data in/, - }, - ]); - } - - it('uses expected URL', () => { - streamProcessor.start(); - expect(es.url).toEqual(`${config.serviceEndpoints.streaming}/all`); - }); - - it('sets expected headers', () => { - streamProcessor.start(); - expect(es.options.headers).toMatchObject(defaultHeaders(sdkKey, config, info)); - }); - - describe('when putting a message', () => { - const putData = { - data: { - flags: { - flagkey: { key: 'flagkey', version: 1 }, - }, - segments: { - segkey: { key: 'segkey', version: 2 }, - }, - }, - }; - - it('causes flags and segments to be stored', async () => { - streamProcessor.start(); - es.handlers.put({ data: JSON.stringify(putData) }); - const initialized = await asyncStore.initialized(); - expect(initialized).toBeTruthy(); - - const f = await asyncStore.get(VersionedDataKinds.Features, 'flagkey'); - expect(f?.version).toEqual(1); - const s = await asyncStore.get(VersionedDataKinds.Segments, 'segkey'); - expect(s?.version).toEqual(2); - }); - - it('calls initialization callback', async () => { - const promise = promiseStart(); - es.handlers.put({ data: JSON.stringify(putData) }); - expect(await promise).toBeUndefined(); - }); - - it('passes error to callback if data is invalid', async () => { - streamProcessor.start(); - - const promise = promiseStart(); - es.handlers.put({ data: '{not-good' }); - const result = await promise; - expectJsonError(result as any); - }); - - it('creates a stream init event', async () => { - const startTime = Date.now(); - streamProcessor.start(); - es.handlers.put({ data: JSON.stringify(putData) }); - await asyncStore.initialized(); - - const event = diagnosticsManager.createStatsEventAndReset(0, 0, 0); - expect(event.streamInits.length).toEqual(1); - const si = event.streamInits[0]; - expect(si.timestamp).toBeGreaterThanOrEqual(startTime); - expect(si.failed).toBeFalsy(); - expect(si.durationMillis).toBeGreaterThanOrEqual(0); - }); - }); - - describe('when patching a message', () => { - it('updates a patched flag', async () => { - streamProcessor.start(); - const patchData = { - path: '/flags/flagkey', - data: { key: 'flagkey', version: 1 }, - }; - - es.handlers.patch({ data: JSON.stringify(patchData) }); - - const f = await asyncStore.get(VersionedDataKinds.Features, 'flagkey'); - expect(f!.version).toEqual(1); - }); - - it('updates a patched segment', async () => { - streamProcessor.start(); - const patchData = { - path: '/segments/segkey', - data: { key: 'segkey', version: 1 }, - }; - - es.handlers.patch({ data: JSON.stringify(patchData) }); - - const s = await asyncStore.get(VersionedDataKinds.Segments, 'segkey'); - expect(s!.version).toEqual(1); - }); - - it('passes error to callback if data is invalid', async () => { - streamProcessor.start(); - - const promise = promiseStart(); - es.handlers.patch({ data: '{not-good' }); - const result = await promise; - expectJsonError(result as any); - }); - }); - - describe('when deleting a message', () => { - it('deletes a flag', async () => { - streamProcessor.start(); - const flag = { key: 'flagkey', version: 1 }; - await asyncStore.upsert(VersionedDataKinds.Features, flag); - const f = await asyncStore.get(VersionedDataKinds.Features, 'flagkey'); - expect(f!.version).toEqual(1); - - const deleteData = { path: `/flags/${flag.key}`, version: 2 }; - - es.handlers.delete({ data: JSON.stringify(deleteData) }); - - const f2 = await asyncStore.get(VersionedDataKinds.Features, 'flagkey'); - expect(f2).toBe(null); - }); - - it('deletes a segment', async () => { - streamProcessor.start(); - const segment = { key: 'segkey', version: 1 }; - await asyncStore.upsert(VersionedDataKinds.Segments, segment); - const s = await asyncStore.get(VersionedDataKinds.Segments, 'segkey'); - expect(s!.version).toEqual(1); - - const deleteData = { path: `/segments/${segment.key}`, version: 2 }; - - es.handlers.delete({ data: JSON.stringify(deleteData) }); - - const s2 = await asyncStore.get(VersionedDataKinds.Segments, 'segkey'); - expect(s2).toBe(null); - }); - - it('passes error to callback if data is invalid', async () => { - streamProcessor.start(); - - const promise = promiseStart(); - es.handlers.delete({ data: '{not-good' }); - const result = await promise; - expectJsonError(result as any); - }); - }); - - describe.each([400, 408, 429, 500, 503, undefined])('given recoverable http errors', (status) => { - const err = { - status, - message: 'sorry', - }; - - it(`continues retrying after error: ${status}`, () => { - const startTime = Date.now(); - streamProcessor.start(); - es.simulateError(err as any); - - logger.expectMessages([ - { - level: LogLevel.Warn, - matches: status - ? new RegExp(`error ${err.status}.*will retry`) - : /Received I\/O error \(sorry\) for streaming request - will retry/, - }, - ]); - - const event = diagnosticsManager.createStatsEventAndReset(0, 0, 0); - expect(event.streamInits.length).toEqual(1); - const si = event.streamInits[0]; - expect(si.timestamp).toBeGreaterThanOrEqual(startTime); - expect(si.failed).toBeTruthy(); - expect(si.durationMillis).toBeGreaterThanOrEqual(0); - }); - }); - - describe.each([401, 403])('given unrecoverable http errors', (status) => { - const startTime = Date.now(); - const err = { - status, - message: 'sorry', - }; - - it(`stops retrying after error: ${status}`, () => { - streamProcessor.start(); - es.simulateError(err as any); - - logger.expectMessages([ - { - level: LogLevel.Error, - matches: /Received error.*giving up permanently/, - }, - ]); - - const event = diagnosticsManager.createStatsEventAndReset(0, 0, 0); - expect(event.streamInits.length).toEqual(1); - const si = event.streamInits[0]; - expect(si.timestamp).toBeGreaterThanOrEqual(startTime); - expect(si.failed).toBeTruthy(); - expect(si.durationMillis).toBeGreaterThanOrEqual(0); - }); - }); -}); diff --git a/packages/shared/sdk-server/__tests__/data_sources/defaultHeaders.test.ts b/packages/shared/sdk-server/__tests__/data_sources/defaultHeaders.test.ts deleted file mode 100644 index 5da91e08b..000000000 --- a/packages/shared/sdk-server/__tests__/data_sources/defaultHeaders.test.ts +++ /dev/null @@ -1,68 +0,0 @@ -import { Info, PlatformData, SdkData } from '@launchdarkly/js-sdk-common'; - -import defaultHeaders from '../../src/data_sources/defaultHeaders'; -import Configuration from '../../src/options/Configuration'; - -const makeInfo = (wrapperName?: string, wrapperVersion?: string, userAgentBase?: string): Info => ({ - platformData(): PlatformData { - return {}; - }, - sdkData(): SdkData { - const sdkData: SdkData = { - version: '2.2.2', - userAgentBase, - wrapperName, - wrapperVersion, - }; - return sdkData; - }, -}); - -it('sets SDK key', () => { - const config = new Configuration({}); - const h = defaultHeaders('my-sdk-key', config, makeInfo()); - expect(h).toMatchObject({ authorization: 'my-sdk-key' }); -}); - -it('sets the default user agent', () => { - const config = new Configuration({}); - const h = defaultHeaders('my-sdk-key', config, makeInfo()); - expect(h).toMatchObject({ 'user-agent': 'NodeJSClient/2.2.2' }); -}); - -it('sets the SDK specific user agent', () => { - const config = new Configuration({}); - const h = defaultHeaders('my-sdk-key', config, makeInfo(undefined, undefined, 'CATS')); - expect(h).toMatchObject({ 'user-agent': 'CATS/2.2.2' }); -}); - -it('does not include wrapper header by default', () => { - const config = new Configuration({}); - const h = defaultHeaders('my-sdk-key', config, makeInfo()); - expect(h['x-launchdarkly-wrapper']).toBeUndefined(); -}); - -it('sets wrapper header with name only', () => { - const config = new Configuration({}); - const h = defaultHeaders('my-sdk-key', config, makeInfo('my-wrapper')); - expect(h).toMatchObject({ 'x-launchdarkly-wrapper': 'my-wrapper' }); -}); - -it('sets wrapper header with name and version', () => { - const config = new Configuration({}); - const h = defaultHeaders('my-sdk-key', config, makeInfo('my-wrapper', '2.0')); - expect(h).toMatchObject({ 'x-launchdarkly-wrapper': 'my-wrapper/2.0' }); -}); - -it('sets the X-LaunchDarkly-Tags header with valid tags.', () => { - const config = new Configuration({ - application: { - id: 'test-application', - version: 'test-version', - }, - }); - const h = defaultHeaders('my-sdk-key', config, makeInfo('my-wrapper')); - expect(h).toMatchObject({ - 'x-launchdarkly-tags': 'application-id/test-application application-version/test-version', - }); -}); diff --git a/packages/shared/sdk-server/__tests__/evaluation/Bucketer.test.ts b/packages/shared/sdk-server/__tests__/evaluation/Bucketer.test.ts index cae445c2b..c2bc82276 100644 --- a/packages/shared/sdk-server/__tests__/evaluation/Bucketer.test.ts +++ b/packages/shared/sdk-server/__tests__/evaluation/Bucketer.test.ts @@ -3,9 +3,9 @@ // should contain a consistency test. // Testing here can only validate we are providing correct inputs to the hashing algorithm. import { AttributeReference, Context, LDContext } from '@launchdarkly/js-sdk-common'; +import * as mocks from '@launchdarkly/private-js-mocks'; import Bucketer from '../../src/evaluation/Bucketer'; -import { crypto, hasher } from './mocks/hasher'; describe.each< [ @@ -65,7 +65,7 @@ describe.each< const validatedContext = Context.fromLDContext(context); const attrRef = new AttributeReference(attr); - const bucketer = new Bucketer(crypto); + const bucketer = new Bucketer(mocks.crypto); const [bucket, hadContext] = bucketer.bucket( validatedContext!, key, @@ -75,12 +75,12 @@ describe.each< seed, ); - // The hasher always returns the same value. This just checks that it converts it to a number + // The mocks.hasher always returns the same value. This just checks that it converts it to a number // in the expected way. expect(bucket).toBeCloseTo(0.07111111110140983, 5); expect(hadContext).toBeTruthy(); - expect(hasher.update).toHaveBeenCalledWith(expected); - expect(hasher.digest).toHaveBeenCalledWith('hex'); + expect(mocks.hasher.update).toHaveBeenCalledWith(expected); + expect(mocks.hasher.digest).toHaveBeenCalledWith('hex'); }); afterEach(() => { @@ -104,7 +104,7 @@ describe.each([ }); const attrRef = new AttributeReference(attr); - const bucketer = new Bucketer(crypto); + const bucketer = new Bucketer(mocks.crypto); const [bucket, hadContext] = bucketer.bucket( validatedContext!, 'key', @@ -115,8 +115,8 @@ describe.each([ ); expect(bucket).toEqual(0); expect(hadContext).toEqual(kind === 'org'); - expect(hasher.update).toBeCalledTimes(0); - expect(hasher.digest).toBeCalledTimes(0); + expect(mocks.hasher.update).toBeCalledTimes(0); + expect(mocks.hasher.digest).toBeCalledTimes(0); }); afterEach(() => { diff --git a/packages/shared/sdk-server/__tests__/evaluation/Evaluator.bucketing.test.ts b/packages/shared/sdk-server/__tests__/evaluation/Evaluator.bucketing.test.ts index 325330c97..51acff48d 100644 --- a/packages/shared/sdk-server/__tests__/evaluation/Evaluator.bucketing.test.ts +++ b/packages/shared/sdk-server/__tests__/evaluation/Evaluator.bucketing.test.ts @@ -1,12 +1,12 @@ import { Context } from '@launchdarkly/js-sdk-common'; +import * as mocks from '@launchdarkly/private-js-mocks'; import { Flag } from '../../src/evaluation/data/Flag'; import { Rollout } from '../../src/evaluation/data/Rollout'; import Evaluator from '../../src/evaluation/Evaluator'; import noQueries from './mocks/noQueries'; -import basicPlatform from './mocks/platform'; -const evaluator = new Evaluator(basicPlatform, noQueries); +const evaluator = new Evaluator(mocks.basicPlatform, noQueries); describe('given a flag with a rollout', () => { const seed = 61; diff --git a/packages/shared/sdk-server/__tests__/evaluation/Evaluator.clause.test.ts b/packages/shared/sdk-server/__tests__/evaluation/Evaluator.clause.test.ts index 30e202d8d..9a2e0c9aa 100644 --- a/packages/shared/sdk-server/__tests__/evaluation/Evaluator.clause.test.ts +++ b/packages/shared/sdk-server/__tests__/evaluation/Evaluator.clause.test.ts @@ -1,4 +1,5 @@ import { AttributeReference, Context, LDContext } from '@launchdarkly/js-sdk-common'; +import * as mocks from '@launchdarkly/private-js-mocks'; import { Clause } from '../../src/evaluation/data/Clause'; import { Flag } from '../../src/evaluation/data/Flag'; @@ -10,9 +11,8 @@ import { makeClauseThatMatchesUser, } from './flags'; import noQueries from './mocks/noQueries'; -import basicPlatform from './mocks/platform'; -const evaluator = new Evaluator(basicPlatform, noQueries); +const evaluator = new Evaluator(mocks.basicPlatform, noQueries); // Either a legacy user, or context with equivalent user. describe('given user clauses and contexts', () => { diff --git a/packages/shared/sdk-server/__tests__/evaluation/Evaluator.rules.test.ts b/packages/shared/sdk-server/__tests__/evaluation/Evaluator.rules.test.ts index 69ccd8287..60de3b117 100644 --- a/packages/shared/sdk-server/__tests__/evaluation/Evaluator.rules.test.ts +++ b/packages/shared/sdk-server/__tests__/evaluation/Evaluator.rules.test.ts @@ -1,6 +1,7 @@ // Tests of flag evaluation at the rule level. Clause-level behavior is covered // in detail in Evaluator.clause.tests and (TODO: File for segments). import { AttributeReference, Context, LDContext } from '@launchdarkly/js-sdk-common'; +import * as mocks from '@launchdarkly/private-js-mocks'; import { Clause } from '../../src/evaluation/data/Clause'; import { Flag } from '../../src/evaluation/data/Flag'; @@ -12,13 +13,12 @@ import { makeFlagWithRules, } from './flags'; import noQueries from './mocks/noQueries'; -import basicPlatform from './mocks/platform'; const basicUser: LDContext = { key: 'userkey' }; const basicSingleKindUser: LDContext = { kind: 'user', key: 'userkey' }; const basicMultiKindUser: LDContext = { kind: 'multi', user: { key: 'userkey' } }; -const evaluator = new Evaluator(basicPlatform, noQueries); +const evaluator = new Evaluator(mocks.basicPlatform, noQueries); describe('when evaluating user equivalent contexts', () => { const matchClause = makeClauseThatMatchesUser(basicUser); diff --git a/packages/shared/sdk-server/__tests__/evaluation/Evaluator.segments.test.ts b/packages/shared/sdk-server/__tests__/evaluation/Evaluator.segments.test.ts index f2db08abf..3e71c8690 100644 --- a/packages/shared/sdk-server/__tests__/evaluation/Evaluator.segments.test.ts +++ b/packages/shared/sdk-server/__tests__/evaluation/Evaluator.segments.test.ts @@ -1,6 +1,4 @@ /* eslint-disable class-methods-use-this */ - -/* eslint-disable @typescript-eslint/no-unused-vars */ import { AttributeReference, Context, @@ -9,6 +7,7 @@ import { Hmac, LDContext, } from '@launchdarkly/js-sdk-common'; +import * as mocks from '@launchdarkly/private-js-mocks'; import { BigSegmentStoreMembership } from '../../src/api/interfaces'; import { Flag } from '../../src/evaluation/data/Flag'; @@ -20,7 +19,6 @@ import { makeClauseThatMatchesUser, makeFlagWithSegmentMatch, } from './flags'; -import basicPlatform from './mocks/platform'; const basicUser: LDContext = { key: 'userkey' }; const basicSingleKindUser: LDContext = { kind: 'user', key: 'userkey' }; @@ -45,7 +43,7 @@ class TestQueries implements Queries { } getBigSegmentsMembership( - userKey: string, + _userKey: string, ): Promise<[BigSegmentStoreMembership | null, string] | undefined> { throw new Error('Method not implemented.'); } @@ -62,7 +60,10 @@ describe('when evaluating user equivalent contexts for segments', () => { included: [basicUser.key], version: 1, }; - const evaluator = new Evaluator(basicPlatform, new TestQueries({ segments: [segment] })); + const evaluator = new Evaluator( + mocks.basicPlatform, + new TestQueries({ segments: [segment] }), + ); const flag = makeFlagWithSegmentMatch(segment); const res = await evaluator.evaluate(flag, Context.fromLDContext(context)); expect(res.detail.value).toBe(true); @@ -77,7 +78,10 @@ describe('when evaluating user equivalent contexts for segments', () => { excluded: [basicUser.key], version: 1, }; - const evaluator = new Evaluator(basicPlatform, new TestQueries({ segments: [segment] })); + const evaluator = new Evaluator( + mocks.basicPlatform, + new TestQueries({ segments: [segment] }), + ); const flag = makeFlagWithSegmentMatch(segment); const res = await evaluator.evaluate(flag, Context.fromLDContext(context)); expect(res.detail.value).toBe(false); @@ -92,7 +96,7 @@ describe('when evaluating user equivalent contexts for segments', () => { excluded: [basicUser.key], version: 1, }; - const evaluator = new Evaluator(basicPlatform, new TestQueries({ segments: [] })); + const evaluator = new Evaluator(mocks.basicPlatform, new TestQueries({ segments: [] })); const flag = makeFlagWithSegmentMatch(segment); const res = await evaluator.evaluate(flag, Context.fromLDContext(context)); expect(res.detail.value).toBe(false); @@ -105,7 +109,7 @@ describe('when evaluating user equivalent contexts for segments', () => { included: ['foo'], version: 1, }; - const evaluator = new Evaluator(basicPlatform, new TestQueries({ segments: [segment] })); + const evaluator = new Evaluator(mocks.basicPlatform, new TestQueries({ segments: [segment] })); const flag = makeFlagWithSegmentMatch(segment); const user = { key: 'bar' }; const res = await evaluator.evaluate(flag, Context.fromLDContext(user)); @@ -118,7 +122,7 @@ describe('when evaluating user equivalent contexts for segments', () => { included: ['foo'], version: 1, }; - const evaluator = new Evaluator(basicPlatform, new TestQueries({ segments: [segment] })); + const evaluator = new Evaluator(mocks.basicPlatform, new TestQueries({ segments: [segment] })); const flag = makeFlagWithSegmentMatch(segment); flag.rules[0].clauses![0].negate = true; const user = { key: 'bar' }; @@ -135,7 +139,10 @@ describe('when evaluating user equivalent contexts for segments', () => { excluded: [basicUser.key], version: 1, }; - const evaluator = new Evaluator(basicPlatform, new TestQueries({ segments: [segment] })); + const evaluator = new Evaluator( + mocks.basicPlatform, + new TestQueries({ segments: [segment] }), + ); const flag = makeFlagWithSegmentMatch(segment); const res = await evaluator.evaluate(flag, Context.fromLDContext(context)); expect(res.detail.value).toBe(true); @@ -156,7 +163,10 @@ describe('when evaluating user equivalent contexts for segments', () => { ], version: 1, }; - const evaluator = new Evaluator(basicPlatform, new TestQueries({ segments: [segment] })); + const evaluator = new Evaluator( + mocks.basicPlatform, + new TestQueries({ segments: [segment] }), + ); const flag = makeFlagWithSegmentMatch(segment); const res = await evaluator.evaluate(flag, Context.fromLDContext(context)); expect(res.detail.value).toBe(true); @@ -178,7 +188,7 @@ describe('when evaluating user equivalent contexts for segments', () => { ], version: 1, }; - const evaluator = new Evaluator(basicPlatform, new TestQueries({ segments: [segment] })); + const evaluator = new Evaluator(mocks.basicPlatform, new TestQueries({ segments: [segment] })); const flag = makeFlagWithSegmentMatch(segment); const res = await evaluator.evaluate(flag, Context.fromLDContext(basicUser)); expect(res.detail.reason).toEqual({ kind: 'ERROR', errorKind: 'MALFORMED_FLAG' }); @@ -199,7 +209,10 @@ describe('when evaluating user equivalent contexts for segments', () => { ], version: 1, }; - const evaluator = new Evaluator(basicPlatform, new TestQueries({ segments: [segment] })); + const evaluator = new Evaluator( + mocks.basicPlatform, + new TestQueries({ segments: [segment] }), + ); const flag = makeFlagWithSegmentMatch(segment); const res = await evaluator.evaluate(flag, Context.fromLDContext(context)); expect(res.detail.value).toBe(false); @@ -231,7 +244,7 @@ describe('when evaluating user equivalent contexts for segments', () => { ], version: 1, }; - const evaluator = new Evaluator(basicPlatform, new TestQueries({ segments: [segment] })); + const evaluator = new Evaluator(mocks.basicPlatform, new TestQueries({ segments: [segment] })); const flag = makeFlagWithSegmentMatch(segment); const res = await evaluator.evaluate(flag, Context.fromLDContext(user)); expect(res.detail.value).toBe(true); @@ -249,7 +262,7 @@ describe('when evaluating user equivalent contexts for segments', () => { ], version: 1, }; - const evaluator = new Evaluator(basicPlatform, new TestQueries({ segments: [segment] })); + const evaluator = new Evaluator(mocks.basicPlatform, new TestQueries({ segments: [segment] })); const flag = makeFlagWithSegmentMatch(segment); const res = await evaluator.evaluate(flag, Context.fromLDContext(user)); expect(res.detail.value).toBe(false); @@ -281,7 +294,7 @@ describe('when evaluating user equivalent contexts for segments', () => { }, }; - const bucketingPlatform = { ...basicPlatform, crypto }; + const bucketingPlatform = { ...mocks.basicPlatform, crypto }; const context = Context.fromLDContext({ contextKind: 'user', key: 'userkey' }); const segment1: Segment = { @@ -338,7 +351,10 @@ describe('Evaluator - segment match for non-user contexts', () => { includedContexts: [{ contextKind: 'org', values: [singleKind.key] }], version: 1, }; - const evaluator = new Evaluator(basicPlatform, new TestQueries({ segments: [segment] })); + const evaluator = new Evaluator( + mocks.basicPlatform, + new TestQueries({ segments: [segment] }), + ); const flag = makeFlagWithSegmentMatch(segment); const res = await evaluator.evaluate(flag, Context.fromLDContext(context)); expect(res.detail.value).toBe(true); @@ -370,7 +386,7 @@ describe('Evaluator - segment match for non-user contexts', () => { version: 1, }; const evaluator = new Evaluator( - basicPlatform, + mocks.basicPlatform, new TestQueries({ segments: [segment1, segment2] }), ); const flag = makeFlagWithSegmentMatch(segment2); @@ -397,7 +413,7 @@ describe('Evaluator - segment match for non-user contexts', () => { ], version: 1, }; - const evaluator = new Evaluator(basicPlatform, new TestQueries({ segments: [segment] })); + const evaluator = new Evaluator(mocks.basicPlatform, new TestQueries({ segments: [segment] })); const flag = makeFlagWithSegmentMatch(segment); const res = await evaluator.evaluate(flag, Context.fromLDContext(singleKind)); expect(res.detail.reason).toEqual({ kind: 'ERROR', errorKind: 'MALFORMED_FLAG' }); @@ -435,7 +451,7 @@ describe('Evaluator - segment match for non-user contexts', () => { version: 1, }; const evaluator = new Evaluator( - basicPlatform, + mocks.basicPlatform, new TestQueries({ segments: [segment1, segment2] }), ); const flag = makeFlagWithSegmentMatch(segment2); @@ -451,7 +467,10 @@ describe('Evaluator - segment match for non-user contexts', () => { includedContexts: [{ contextKind: 'org', values: ['otherKey'] }], version: 1, }; - const evaluator = new Evaluator(basicPlatform, new TestQueries({ segments: [segment] })); + const evaluator = new Evaluator( + mocks.basicPlatform, + new TestQueries({ segments: [segment] }), + ); const flag = makeFlagWithSegmentMatch(segment); const res = await evaluator.evaluate(flag, Context.fromLDContext(context)); expect(res.detail.value).toBe(false); @@ -466,7 +485,10 @@ describe('Evaluator - segment match for non-user contexts', () => { excludedContexts: [{ contextKind: 'org', values: [singleKind.key] }], version: 1, }; - const evaluator = new Evaluator(basicPlatform, new TestQueries({ segments: [segment] })); + const evaluator = new Evaluator( + mocks.basicPlatform, + new TestQueries({ segments: [segment] }), + ); const flag = makeFlagWithSegmentMatch(segment); const res = await evaluator.evaluate(flag, Context.fromLDContext(context)); expect(res.detail.value).toBe(false); @@ -479,7 +501,7 @@ describe('Evaluator - segment match for non-user contexts', () => { includedContexts: [{ contextKind: 'notOrg', values: [singleKind.key] }], version: 1, }; - const evaluator = new Evaluator(basicPlatform, new TestQueries({ segments: [segment] })); + const evaluator = new Evaluator(mocks.basicPlatform, new TestQueries({ segments: [segment] })); const flag = makeFlagWithSegmentMatch(segment); const res = await evaluator.evaluate(flag, Context.fromLDContext(context)); expect(res.detail.value).toBe(false); diff --git a/packages/shared/sdk-server/__tests__/evaluation/Evaluator.test.ts b/packages/shared/sdk-server/__tests__/evaluation/Evaluator.test.ts index 3a39e66e5..a51cd35e2 100644 --- a/packages/shared/sdk-server/__tests__/evaluation/Evaluator.test.ts +++ b/packages/shared/sdk-server/__tests__/evaluation/Evaluator.test.ts @@ -1,11 +1,11 @@ import { Context, LDContext } from '@launchdarkly/js-sdk-common'; +import * as mocks from '@launchdarkly/private-js-mocks'; import { Flag } from '../../src/evaluation/data/Flag'; import EvalResult from '../../src/evaluation/EvalResult'; import Evaluator from '../../src/evaluation/Evaluator'; import Reasons from '../../src/evaluation/Reasons'; import noQueries from './mocks/noQueries'; -import basicPlatform from './mocks/platform'; const offBaseFlag = { key: 'feature0', @@ -32,7 +32,7 @@ describe.each<[Flag, LDContext, EvalResult | undefined]>([ EvalResult.forSuccess('two', Reasons.Off, 2), ], ])('Given off flags and an evaluator', (flag, context, expected) => { - const evaluator = new Evaluator(basicPlatform, noQueries); + const evaluator = new Evaluator(mocks.basicPlatform, noQueries); it(`produces the expected evaluation result for context: ${context.key} ${ // @ts-ignore @@ -136,7 +136,7 @@ describe.each<[Flag, LDContext, EvalResult | undefined]>([ EvalResult.forSuccess('one', Reasons.TargetMatch, 1), ], ])('given flag configurations with different targets that match', (flag, context, expected) => { - const evaluator = new Evaluator(basicPlatform, noQueries); + const evaluator = new Evaluator(mocks.basicPlatform, noQueries); it(`produces the expected evaluation result for context: ${context.key} ${ // @ts-ignore context.kind diff --git a/packages/shared/sdk-server/__tests__/evaluation/mocks/platform.ts b/packages/shared/sdk-server/__tests__/evaluation/mocks/platform.ts deleted file mode 100644 index a4ddab9b8..000000000 --- a/packages/shared/sdk-server/__tests__/evaluation/mocks/platform.ts +++ /dev/null @@ -1,42 +0,0 @@ -import { - EventSource, - EventSourceInitDict, - Info, - Options, - Platform, - PlatformData, - Requests, - Response, - SdkData, -} from '@launchdarkly/js-sdk-common'; - -import { crypto } from './hasher'; - -const info: Info = { - platformData(): PlatformData { - return {}; - }, - sdkData(): SdkData { - return {}; - }, -}; - -const requests: Requests = { - /* eslint-disable-next-line @typescript-eslint/no-unused-vars */ - fetch(url: string, options?: Options): Promise { - throw new Error('Function not implemented.'); - }, - - /* eslint-disable-next-line @typescript-eslint/no-unused-vars */ - createEventSource(url: string, eventSourceInitDict: EventSourceInitDict): EventSource { - throw new Error('Function not implemented.'); - }, -}; - -const basicPlatform: Platform = { - info, - crypto, - requests, -}; - -export default basicPlatform; diff --git a/packages/shared/sdk-server/__tests__/evaluation/variations.test.ts b/packages/shared/sdk-server/__tests__/evaluation/variations.test.ts index fed62c6d7..766f4b96c 100644 --- a/packages/shared/sdk-server/__tests__/evaluation/variations.test.ts +++ b/packages/shared/sdk-server/__tests__/evaluation/variations.test.ts @@ -1,9 +1,12 @@ +import { internal } from '@launchdarkly/js-sdk-common'; + import { Flag } from '../../src/evaluation/data/Flag'; -import ErrorKinds from '../../src/evaluation/ErrorKinds'; import EvalResult from '../../src/evaluation/EvalResult'; import Reasons from '../../src/evaluation/Reasons'; import { getOffVariation, getVariation } from '../../src/evaluation/variations'; +const { ErrorKinds } = internal; + const baseFlag = { key: 'feature0', version: 1, diff --git a/packages/shared/sdk-server/__tests__/events/DiagnosticsManager.test.ts b/packages/shared/sdk-server/__tests__/events/DiagnosticsManager.test.ts deleted file mode 100644 index 127cd9f5d..000000000 --- a/packages/shared/sdk-server/__tests__/events/DiagnosticsManager.test.ts +++ /dev/null @@ -1,319 +0,0 @@ -/* eslint-disable @typescript-eslint/no-unused-vars */ -import { - EventSource, - EventSourceInitDict, - Info, - Options, - Platform, - PlatformData, - Requests, - Response, - SdkData, -} from '@launchdarkly/js-sdk-common'; - -import { DataKind } from '../../src/api/interfaces'; -import { - LDFeatureStore, - LDFeatureStoreDataStorage, - LDFeatureStoreItem, - LDFeatureStoreKindData, - LDKeyedFeatureStoreItem, -} from '../../src/api/subsystems'; -import DiagnosticsManager from '../../src/events/DiagnosticsManager'; -import Configuration from '../../src/options/Configuration'; -import InMemoryFeatureStore from '../../src/store/InMemoryFeatureStore'; -import { crypto } from '../evaluation/mocks/hasher'; - -const info: Info = { - platformData(): PlatformData { - return { - os: { - name: 'An OS', - version: '1.0.1', - arch: 'An Arch', - }, - name: 'The SDK Name', - additional: { - nodeVersion: '42', - }, - }; - }, - sdkData(): SdkData { - return { - name: 'An SDK', - version: '2.0.2', - }; - }, -}; - -const requests: Requests = { - /* eslint-disable-next-line @typescript-eslint/no-unused-vars */ - fetch(url: string, options?: Options): Promise { - throw new Error('Function not implemented.'); - }, - - /* eslint-disable-next-line @typescript-eslint/no-unused-vars */ - createEventSource(url: string, eventSourceInitDict: EventSourceInitDict): EventSource { - throw new Error('Function not implemented.'); - }, - - /** - * Returns true if a proxy is configured. - */ - usingProxy: () => false, - - /** - * Returns true if the proxy uses authentication. - */ - usingProxyAuth: () => false, -}; - -const basicPlatform: Platform = { - info, - crypto, - requests, -}; - -describe('given a diagnostics manager', () => { - let manager: DiagnosticsManager; - - beforeEach(() => { - jest.spyOn(Date, 'now').mockImplementation(() => 7777); - manager = new DiagnosticsManager( - 'my-sdk-key', - new Configuration({}), - basicPlatform, - new InMemoryFeatureStore(), - ); - }); - - afterEach(() => { - jest.resetAllMocks(); - }); - - it('uses the last 6 characters of the SDK key in the diagnostic id', () => { - const { id } = manager.createInitEvent(); - expect(id.sdkKeySuffix).toEqual('dk-key'); - }); - - it('creates random UUID', () => { - const manager2 = new DiagnosticsManager( - 'my-sdk-key', - new Configuration({}), - basicPlatform, - new InMemoryFeatureStore(), - ); - - const { id } = manager.createInitEvent(); - const { id: id2 } = manager2.createInitEvent(); - expect(id.diagnosticId).toBeTruthy(); - expect(id2.diagnosticId).toBeTruthy(); - expect(id.diagnosticId).not.toEqual(id2.diagnosticId); - }); - - it('puts the start time into the init event', () => { - const { creationDate } = manager.createInitEvent(); - expect(creationDate).toEqual(7777); - }); - - it('puts SDK data into the init event', () => { - const { sdk } = manager.createInitEvent(); - expect(sdk).toEqual({ - name: 'An SDK', - version: '2.0.2', - }); - }); - - it('puts platform data into the init event', () => { - const { platform } = manager.createInitEvent(); - expect(platform).toEqual({ - name: 'The SDK Name', - osName: 'An OS', - osVersion: '1.0.1', - osArch: 'An Arch', - nodeVersion: '42', - }); - }); - - it('creates periodic event from stats, then resets', () => { - manager.recordStreamInit(7778, true, 1000); - manager.recordStreamInit(7779, false, 550); - - jest.spyOn(Date, 'now').mockImplementation(() => 8888); - - const event1 = manager.createStatsEventAndReset(4, 5, 6); - - expect(event1).toMatchObject({ - kind: 'diagnostic', - dataSinceDate: 7777, - droppedEvents: 4, - deduplicatedUsers: 5, - eventsInLastBatch: 6, - streamInits: [ - { - timestamp: 7778, - failed: true, - durationMillis: 1000, - }, - { - timestamp: 7779, - failed: false, - durationMillis: 550, - }, - ], - }); - - expect(event1.creationDate).toEqual(8888); - - jest.spyOn(Date, 'now').mockImplementation(() => 9999); - const event2 = manager.createStatsEventAndReset(1, 2, 3); - - expect(event2).toMatchObject({ - kind: 'diagnostic', - dataSinceDate: event1.creationDate, - droppedEvents: 1, - deduplicatedUsers: 2, - eventsInLastBatch: 3, - streamInits: [], - }); - - expect(event2.creationDate).toEqual(9999); - }); -}); - -const fakeStore: LDFeatureStore = { - getDescription: () => 'WeirdStore', - get(kind: DataKind, key: string, callback: (res: LDFeatureStoreItem | null) => void): void { - throw new Error('Function not implemented.'); - }, - all(kind: DataKind, callback: (res: LDFeatureStoreKindData) => void): void { - throw new Error('Function not implemented.'); - }, - init(allData: LDFeatureStoreDataStorage, callback: () => void): void { - throw new Error('Function not implemented.'); - }, - delete(kind: DataKind, key: string, version: number, callback: () => void): void { - throw new Error('Function not implemented.'); - }, - upsert(kind: DataKind, data: LDKeyedFeatureStoreItem, callback: () => void): void { - throw new Error('Function not implemented.'); - }, - initialized(callback: (isInitialized: boolean) => void): void { - throw new Error('Function not implemented.'); - }, - close(): void { - throw new Error('Function not implemented.'); - }, -}; - -describe.each([ - [ - {}, - { - allAttributesPrivate: false, - connectTimeoutMillis: 5000, - customBaseURI: false, - customEventsURI: false, - customStreamURI: false, - dataStoreType: 'memory', - diagnosticRecordingIntervalMillis: 900000, - eventsCapacity: 10000, - eventsFlushIntervalMillis: 5000, - offline: false, - pollingIntervalMillis: 30000, - reconnectTimeMillis: 1000, - socketTimeoutMillis: 5000, - streamingDisabled: false, - contextKeysCapacity: 1000, - contextKeysFlushIntervalMillis: 300000, - usingProxy: false, - usingProxyAuthenticator: false, - usingRelayDaemon: false, - }, - ], - [ - { baseUri: 'http://other' }, - { - customBaseURI: true, - customEventsURI: false, - customStreamURI: false, - }, - ], - [ - { eventsUri: 'http://other' }, - { - customBaseURI: false, - customEventsURI: true, - customStreamURI: false, - }, - ], - [ - { streamUri: 'http://other' }, - { - customBaseURI: false, - customEventsURI: false, - customStreamURI: true, - }, - ], - [{ allAttributesPrivate: true }, { allAttributesPrivate: true }], - [{ timeout: 6 }, { connectTimeoutMillis: 6000, socketTimeoutMillis: 6000 }], - [{ diagnosticRecordingInterval: 999 }, { diagnosticRecordingIntervalMillis: 999000 }], - [{ capacity: 999 }, { eventsCapacity: 999 }], - [{ flushInterval: 33 }, { eventsFlushIntervalMillis: 33000 }], - [{ stream: false }, { streamingDisabled: true }], - [{ streamInitialReconnectDelay: 33 }, { reconnectTimeMillis: 33000 }], - [{ contextKeysCapacity: 111 }, { contextKeysCapacity: 111 }], - [{ contextKeysFlushInterval: 33 }, { contextKeysFlushIntervalMillis: 33000 }], - [{ useLdd: true }, { usingRelayDaemon: true }], - [{ featureStore: fakeStore }, { dataStoreType: 'WeirdStore' }], -])('given diagnostics managers with different configurations', (configIn, configOut) => { - let manager: DiagnosticsManager; - - beforeEach(() => { - jest.spyOn(Date, 'now').mockImplementation(() => 7777); - manager = new DiagnosticsManager( - 'my-sdk-key', - new Configuration(configIn), - basicPlatform, - // @ts-ignore - configIn.featureStore ?? new InMemoryFeatureStore(), - ); - }); - - afterEach(() => { - jest.resetAllMocks(); - }); - - it('translates the configuration correctly', () => { - const event = manager.createInitEvent(); - expect(event.configuration).toMatchObject(configOut); - }); -}); - -describe.each([true, false])('Given proxy and proxy auth=%p', (auth) => { - let manager: DiagnosticsManager; - - beforeEach(() => { - jest.spyOn(Date, 'now').mockImplementation(() => 7777); - jest.spyOn(basicPlatform.requests, 'usingProxy').mockImplementation(() => true); - jest.spyOn(basicPlatform.requests, 'usingProxyAuth').mockImplementation(() => auth); - manager = new DiagnosticsManager( - 'my-sdk-key', - new Configuration({}), - basicPlatform, - new InMemoryFeatureStore(), - ); - }); - - afterEach(() => { - jest.resetAllMocks(); - }); - - it('it gets the proxy configuration from the platform', () => { - const event = manager.createInitEvent(); - expect(event.configuration).toMatchObject({ - usingProxy: true, - usingProxyAuthenticator: auth, - }); - }); -}); diff --git a/packages/shared/sdk-server/__tests__/events/EventProcessor.test.ts b/packages/shared/sdk-server/__tests__/events/EventProcessor.test.ts index 443654344..da71496c9 100644 --- a/packages/shared/sdk-server/__tests__/events/EventProcessor.test.ts +++ b/packages/shared/sdk-server/__tests__/events/EventProcessor.test.ts @@ -1,4 +1,3 @@ -/* eslint-disable @typescript-eslint/no-unused-vars */ import { ClientContext, Context, @@ -15,13 +14,11 @@ import { Response, SdkData, } from '@launchdarkly/js-sdk-common'; +import * as mocks from '@launchdarkly/private-js-mocks'; import ContextDeduplicator from '../../src/events/ContextDeduplicator'; -import DiagnosticsManager from '../../src/events/DiagnosticsManager'; -import EventSender from '../../src/events/EventSender'; import Configuration from '../../src/options/Configuration'; import InMemoryFeatureStore from '../../src/store/InMemoryFeatureStore'; -import basicPlatform from '../evaluation/mocks/platform'; const SDK_KEY = 'sdk-key'; @@ -68,9 +65,8 @@ function makePlatform(requestState: RequestState) { }); const requests: Requests = { - /* eslint-disable-next-line @typescript-eslint/no-unused-vars */ fetch(url: string, options?: Options): Promise { - return new Promise((a, r) => { + return new Promise((a) => { const headers: Headers = { get(name: string): string | null { return requestState.testHeaders[name] || null; @@ -84,7 +80,7 @@ function makePlatform(requestState: RequestState) { entries(): Iterable<[string, string]> { throw new Error('Function not implemented.'); }, - has(name: string): boolean { + has(_name: string): boolean { throw new Error('Function not implemented.'); }, }; @@ -106,8 +102,7 @@ function makePlatform(requestState: RequestState) { }); }, - /* eslint-disable-next-line @typescript-eslint/no-unused-vars */ - createEventSource(url: string, eventSourceInitDict: EventSourceInitDict): EventSource { + createEventSource(_url: string, _eventSourceInitDict: EventSourceInitDict): EventSource { throw new Error('Function not implemented.'); }, }; @@ -172,21 +167,22 @@ describe('given an event processor with diagnostics manager', () => { // we need to make an object and replace the value. const testConfig = { ...config, diagnosticRecordingInterval: 0.1 }; - const diagnosticsManager = new DiagnosticsManager( + const diagnosticsManager = new internal.DiagnosticsManager( 'sdk-key', - testConfig, { - ...basicPlatform, + ...mocks.basicPlatform, // Replace info and requests. info, requests, crypto, }, - store, + { + config1: 'test', + }, ); const clientContext = new ClientContext(SDK_KEY, testConfig, { - ...basicPlatform, + ...mocks.basicPlatform, info, requests, }); @@ -194,7 +190,6 @@ describe('given an event processor with diagnostics manager', () => { eventProcessor = new internal.EventProcessor( testConfig, clientContext, - new EventSender(config, clientContext), new ContextDeduplicator(config), diagnosticsManager, ); @@ -209,25 +204,7 @@ describe('given an event processor with diagnostics manager', () => { expect(requestState.requestsMade.length).toEqual(1); expect(JSON.parse(requestState.requestsMade[0].options.body!)).toEqual({ configuration: { - allAttributesPrivate: false, - connectTimeoutMillis: 5000, - contextKeysCapacity: 1000, - contextKeysFlushIntervalMillis: 300000, - customBaseURI: false, - customEventsURI: false, - customStreamURI: false, - dataStoreType: 'memory', - diagnosticRecordingIntervalMillis: 100, - eventsCapacity: 3, - eventsFlushIntervalMillis: 5000, - offline: false, - pollingIntervalMillis: 30000, - reconnectTimeMillis: 1000, - socketTimeoutMillis: 5000, - streamingDisabled: false, - usingProxy: false, - usingProxyAuthenticator: false, - usingRelayDaemon: false, + config1: 'test', }, creationDate: 1000, id: { @@ -299,10 +276,10 @@ describe('given an event processor with diagnostics manager', () => { it('counts events in queue from last flush and dropped events', async () => { const context = Context.fromLDContext(user); - eventProcessor.sendEvent({ kind: 'identify', creationDate: 1000, context }); - eventProcessor.sendEvent({ kind: 'identify', creationDate: 1001, context }); - eventProcessor.sendEvent({ kind: 'identify', creationDate: 1002, context }); - eventProcessor.sendEvent({ kind: 'identify', creationDate: 1003, context }); + eventProcessor.sendEvent({ kind: 'identify', creationDate: 1000, context, samplingRatio: 1 }); + eventProcessor.sendEvent({ kind: 'identify', creationDate: 1001, context, samplingRatio: 1 }); + eventProcessor.sendEvent({ kind: 'identify', creationDate: 1002, context, samplingRatio: 1 }); + eventProcessor.sendEvent({ kind: 'identify', creationDate: 1003, context, samplingRatio: 1 }); await eventProcessor.flush(); await waitForMessages(3); @@ -344,12 +321,14 @@ describe('given an event processor with diagnostics manager', () => { key: 'eventkey1', creationDate: 1000, context, + samplingRatio: 1, }); eventProcessor.sendEvent({ kind: 'custom', key: 'eventkey2', creationDate: 1001, context, + samplingRatio: 1, }); await eventProcessor.flush(); diff --git a/packages/shared/sdk-server/__tests__/events/EventSender.test.ts b/packages/shared/sdk-server/__tests__/events/EventSender.test.ts deleted file mode 100644 index 670fd6364..000000000 --- a/packages/shared/sdk-server/__tests__/events/EventSender.test.ts +++ /dev/null @@ -1,177 +0,0 @@ -/* eslint-disable @typescript-eslint/no-unused-vars */ -import { AsyncQueue } from 'launchdarkly-js-test-helpers'; - -import { - ClientContext, - EventSource, - EventSourceInitDict, - Headers, - Info, - Options, - PlatformData, - Requests, - Response, - SdkData, -} from '@launchdarkly/js-sdk-common'; -import { LDDeliveryStatus, LDEventType } from '@launchdarkly/js-sdk-common/dist/api/subsystem'; - -import EventSender from '../../src/events/EventSender'; -import Configuration from '../../src/options/Configuration'; -import basicPlatform from '../evaluation/mocks/platform'; - -describe('given an event sender', () => { - let queue: AsyncQueue<{ url: string; options?: Options }>; - let eventSender: EventSender; - let requestStatus = 200; - let requestHeaders: Record = {}; - - beforeEach(() => { - queue = new AsyncQueue(); - requestHeaders = {}; - - const info: Info = { - platformData(): PlatformData { - return { - os: { - name: 'An OS', - version: '1.0.1', - arch: 'An Arch', - }, - name: 'The SDK Name', - additional: { - nodeVersion: '42', - }, - }; - }, - sdkData(): SdkData { - return { - name: 'An SDK', - version: '2.0.2', - }; - }, - }; - - const requests: Requests = { - /* eslint-disable-next-line @typescript-eslint/no-unused-vars */ - fetch(url: string, options?: Options): Promise { - queue.add({ url, options }); - - return new Promise((a, r) => { - const headers: Headers = { - get(name: string): string | null { - return requestHeaders[name] ?? null; - }, - keys(): Iterable { - throw new Error('Function not implemented.'); - }, - values(): Iterable { - throw new Error('Function not implemented.'); - }, - entries(): Iterable<[string, string]> { - throw new Error('Function not implemented.'); - }, - has(name: string): boolean { - throw new Error('Function not implemented.'); - }, - }; - - const res: Response = { - headers, - status: requestStatus, - text(): Promise { - throw new Error('Function not implemented.'); - }, - json(): Promise { - throw new Error('Function not implemented.'); - }, - }; - a(res); - }); - }, - - /* eslint-disable-next-line @typescript-eslint/no-unused-vars */ - createEventSource(url: string, eventSourceInitDict: EventSourceInitDict): EventSource { - throw new Error('Function not implemented.'); - }, - }; - - const config = new Configuration({}); - eventSender = new EventSender( - config, - new ClientContext('sdk-key', config, { ...basicPlatform, requests, info }), - ); - }); - - it('indicates a success for a success status', async () => { - const res = await eventSender.sendEventData(LDEventType.AnalyticsEvents, { something: true }); - expect(res.status).toEqual(LDDeliveryStatus.Succeeded); - }); - - it('includes the correct headers for analytics', async () => { - await eventSender.sendEventData(LDEventType.AnalyticsEvents, { something: true }); - const req1 = await queue.take(); - expect(req1.options?.headers).toMatchObject({ - authorization: 'sdk-key', - 'user-agent': 'NodeJSClient/2.0.2', - 'x-launchDarkly-event-schema': '4', - 'content-type': 'application/json', - }); - expect(req1.options?.headers!['x-launchdarkly-payload-id']).toBeDefined(); - }); - - it('includes the payload', async () => { - await eventSender.sendEventData(LDEventType.AnalyticsEvents, { something: true }); - await eventSender.sendEventData(LDEventType.DiagnosticEvent, { something: false }); - const req1 = await queue.take(); - const req2 = await queue.take(); - - expect(req1.options?.body).toEqual(JSON.stringify({ something: true })); - expect(req2.options?.body).toEqual(JSON.stringify({ something: false })); - }); - - it('includes the correct headers for diagnostics', async () => { - await eventSender.sendEventData(LDEventType.DiagnosticEvent, { something: true }); - const req1 = await queue.take(); - expect(req1.options?.headers).toEqual({ - authorization: 'sdk-key', - 'user-agent': 'NodeJSClient/2.0.2', - 'content-type': 'application/json', - }); - }); - - it('sends a unique payload for analytics events', async () => { - await eventSender.sendEventData(LDEventType.AnalyticsEvents, { something: true }); - const req1 = await queue.take(); - await eventSender.sendEventData(LDEventType.AnalyticsEvents, { something: true }); - const req2 = await queue.take(); - expect(req1.options!.headers!['x-launchdarkly-payload-id']).not.toEqual( - req2.options!.headers!['x-launchdarkly-payload-id'], - ); - }); - - it('can get server time', async () => { - requestHeaders.date = new Date(1000).toISOString(); - const res = await eventSender.sendEventData(LDEventType.AnalyticsEvents, { something: true }); - expect(res.serverTime).toEqual(new Date(1000).getTime()); - }); - - describe.each([400, 408, 429, 503])('given recoverable errors', (status) => { - it(`retries - ${status}`, async () => { - requestStatus = status; - const res = await eventSender.sendEventData(LDEventType.AnalyticsEvents, { something: true }); - expect(res.status).toEqual(LDDeliveryStatus.Failed); - expect(res.error).toBeDefined(); - - expect(queue.length()).toEqual(2); - }); - }); - - describe.each([401, 403])('given unrecoverable errors', (status) => { - it(`does not retry - ${status}`, async () => { - requestStatus = status; - const res = await eventSender.sendEventData(LDEventType.AnalyticsEvents, { something: true }); - expect(res.status).toEqual(LDDeliveryStatus.FailedAndMustShutDown); - expect(queue.length()).toEqual(1); - }); - }); -}); diff --git a/packages/shared/sdk-server/__tests__/integrations/test_data/TestData.test.ts b/packages/shared/sdk-server/__tests__/integrations/test_data/TestData.test.ts index 81690750b..e5d7119b0 100644 --- a/packages/shared/sdk-server/__tests__/integrations/test_data/TestData.test.ts +++ b/packages/shared/sdk-server/__tests__/integrations/test_data/TestData.test.ts @@ -1,6 +1,6 @@ -import { ClientContext } from '@launchdarkly/js-sdk-common'; +import { AttributeReference, ClientContext } from '@launchdarkly/js-sdk-common'; +import * as mocks from '@launchdarkly/private-js-mocks'; -import { AttributeReference } from '../../../src'; import { Flag } from '../../../src/evaluation/data/Flag'; import { FlagRule } from '../../../src/evaluation/data/FlagRule'; import TestData from '../../../src/integrations/test_data/TestData'; @@ -8,7 +8,6 @@ import Configuration from '../../../src/options/Configuration'; import AsyncStoreFacade from '../../../src/store/AsyncStoreFacade'; import InMemoryFeatureStore from '../../../src/store/InMemoryFeatureStore'; import VersionedDataKinds from '../../../src/store/VersionedDataKinds'; -import basicPlatform from '../../evaluation/mocks/platform'; const basicBooleanFlag: Flag = { fallthrough: { @@ -21,134 +20,151 @@ const basicBooleanFlag: Flag = { version: 1, }; -it('initializes the data store with flags configured the data store is created', async () => { - const td = new TestData(); - td.update(td.flag('new-flag').variationForAll(true)); +describe('TestData', () => { + let initSuccessHandler: jest.Mock; - const store = new InMemoryFeatureStore(); - const processor = td.getFactory()( - new ClientContext('', new Configuration({}), basicPlatform), - store, - ); + beforeEach(() => { + initSuccessHandler = jest.fn(); + }); - processor.start(); - const facade = new AsyncStoreFacade(store); + afterEach(() => { + jest.resetAllMocks(); + }); - const res = await facade.get(VersionedDataKinds.Features, 'new-flag'); + it('initializes the data store with flags configured when the data store is created', async () => { + const td = new TestData(); + td.update(td.flag('new-flag').variationForAll(true)); - expect(res).toEqual(basicBooleanFlag); -}); + const store = new InMemoryFeatureStore(); + const facade = new AsyncStoreFacade(store); -it('updates the data store when update is called', async () => { - const td = new TestData(); - const store = new InMemoryFeatureStore(); - const processor = td.getFactory()( - new ClientContext('', new Configuration({}), basicPlatform), - store, - ); - - processor.start(); - const facade = new AsyncStoreFacade(store); - - // In this test the update is after initialization. - await td.update(td.flag('new-flag').variationForAll(true)); - const res = await facade.get(VersionedDataKinds.Features, 'new-flag'); - expect(res).toEqual(basicBooleanFlag); -}); + const processor = td.getFactory()( + new ClientContext('', new Configuration({}), mocks.basicPlatform), + store, + initSuccessHandler, + ); + processor.start(); + const res = await facade.get(VersionedDataKinds.Features, 'new-flag'); -it('can include pre-configured items', async () => { - const td = new TestData(); - td.usePreconfiguredFlag({ key: 'my-flag', version: 1000, on: true }); - td.usePreconfiguredSegment({ key: 'my-segment', version: 2000 }); - - const store = new InMemoryFeatureStore(); - const processor = td.getFactory()( - new ClientContext('', new Configuration({}), basicPlatform), - store, - ); - - processor.start(); - - td.usePreconfiguredFlag({ key: 'my-flag', on: false }); - td.usePreconfiguredFlag({ key: 'my-flag-2', version: 1000, on: true }); - td.usePreconfiguredSegment({ key: 'my-segment', included: ['x'] }); - td.usePreconfiguredSegment({ key: 'my-segment-2', version: 2000 }); - - const facade = new AsyncStoreFacade(store); - const allFlags = await facade.all(VersionedDataKinds.Features); - const allSegments = await facade.all(VersionedDataKinds.Segments); - - expect(allFlags).toEqual({ - 'my-flag': { - key: 'my-flag', - on: false, - version: 1001, - }, - 'my-flag-2': { - key: 'my-flag-2', - on: true, - version: 1000, - }, + expect(initSuccessHandler).toBeCalled(); + expect(res).toEqual(basicBooleanFlag); }); - expect(allSegments).toEqual({ - 'my-segment': { - included: ['x'], - key: 'my-segment', - version: 2001, - }, - 'my-segment-2': { - key: 'my-segment-2', - version: 2000, - }, + it('updates the data store when update is called', async () => { + const td = new TestData(); + const store = new InMemoryFeatureStore(); + const processor = td.getFactory()( + new ClientContext('', new Configuration({}), mocks.basicPlatform), + store, + initSuccessHandler, + ); + + processor.start(); + const facade = new AsyncStoreFacade(store); + + // In this test the update is after initialization. + await td.update(td.flag('new-flag').variationForAll(true)); + const res = await facade.get(VersionedDataKinds.Features, 'new-flag'); + expect(res).toEqual(basicBooleanFlag); }); -}); -it.each([true, false])('does not update the store after stop/close is called', async (stop) => { - const td = new TestData(); + it('can include pre-configured items', async () => { + const td = new TestData(); + td.usePreconfiguredFlag({ key: 'my-flag', version: 1000, on: true }); + td.usePreconfiguredSegment({ key: 'my-segment', version: 2000 }); + + const store = new InMemoryFeatureStore(); + const processor = td.getFactory()( + new ClientContext('', new Configuration({}), mocks.basicPlatform), + store, + initSuccessHandler, + ); + + processor.start(); + + td.usePreconfiguredFlag({ key: 'my-flag', on: false }); + td.usePreconfiguredFlag({ key: 'my-flag-2', version: 1000, on: true }); + td.usePreconfiguredSegment({ key: 'my-segment', included: ['x'] }); + td.usePreconfiguredSegment({ key: 'my-segment-2', version: 2000 }); + + const facade = new AsyncStoreFacade(store); + const allFlags = await facade.all(VersionedDataKinds.Features); + const allSegments = await facade.all(VersionedDataKinds.Segments); + + expect(allFlags).toEqual({ + 'my-flag': { + key: 'my-flag', + on: false, + version: 1001, + }, + 'my-flag-2': { + key: 'my-flag-2', + on: true, + version: 1000, + }, + }); - const store = new InMemoryFeatureStore(); - const processor = td.getFactory()( - new ClientContext('', new Configuration({}), basicPlatform), - store, - ); + expect(allSegments).toEqual({ + 'my-segment': { + included: ['x'], + key: 'my-segment', + version: 2001, + }, + 'my-segment-2': { + key: 'my-segment-2', + version: 2000, + }, + }); + }); - processor.start(); - td.update(td.flag('new-flag').variationForAll(true)); - if (stop) { - processor.stop(); - } else { - processor.close(); - } - td.update(td.flag('new-flag-2').variationForAll(true)); + it.each([true, false])('does not update the store after stop/close is called', async (stop) => { + const td = new TestData(); - const facade = new AsyncStoreFacade(store); + const store = new InMemoryFeatureStore(); + const processor = td.getFactory()( + new ClientContext('', new Configuration({}), mocks.basicPlatform), + store, + initSuccessHandler, + ); - const flag1 = await facade.get(VersionedDataKinds.Features, 'new-flag'); - const flag2 = await facade.get(VersionedDataKinds.Features, 'new-flag-2'); + processor.start(); + td.update(td.flag('new-flag').variationForAll(true)); + if (stop) { + processor.stop(); + } else { + processor.close(); + } + td.update(td.flag('new-flag-2').variationForAll(true)); - expect(flag1).toBeDefined(); - expect(flag2).toBeNull(); -}); + const facade = new AsyncStoreFacade(store); -it('can update a flag that already exists in the store', async () => { - const td = new TestData(); + const flag1 = await facade.get(VersionedDataKinds.Features, 'new-flag'); + const flag2 = await facade.get(VersionedDataKinds.Features, 'new-flag-2'); - const store = new InMemoryFeatureStore(); + expect(flag1).toBeDefined(); + expect(flag2).toBeNull(); + }); + + it('can update a flag that already exists in the store', async () => { + const td = new TestData(); - const processor = td.getFactory()( - new ClientContext('', new Configuration({}), basicPlatform), - store, - ); + const store = new InMemoryFeatureStore(); - processor.start(); - td.update(td.flag('new-flag').variationForAll(true)); - td.update(td.flag('new-flag').variationForAll(false)); + const processor = td.getFactory()( + new ClientContext('', new Configuration({}), mocks.basicPlatform), + store, + initSuccessHandler, + ); - const facade = new AsyncStoreFacade(store); - const res = (await facade.get(VersionedDataKinds.Features, 'new-flag')) as Flag; - expect(res.version).toEqual(2); - expect(res.fallthrough.variation).toEqual(1); + processor.start(); + td.update(td.flag('new-flag').variationForAll(true)); + td.update(td.flag('new-flag').variationForAll(false)); + + const facade = new AsyncStoreFacade(store); + const res = (await facade.get(VersionedDataKinds.Features, 'new-flag')) as Flag; + expect(res.version).toEqual(2); + expect(res.fallthrough.variation).toEqual(1); + }); }); describe('given a TestData instance', () => { diff --git a/packages/shared/sdk-server/__tests__/store/serialization.test.ts b/packages/shared/sdk-server/__tests__/store/serialization.test.ts index 9c1607727..a18ac959d 100644 --- a/packages/shared/sdk-server/__tests__/store/serialization.test.ts +++ b/packages/shared/sdk-server/__tests__/store/serialization.test.ts @@ -174,8 +174,12 @@ function makeSerializedAllData(flag?: any, segment?: any): string { } function makePatchData(flag?: any, segment?: any): any { + let path = '/flags/flagName'; + if (segment) { + path = '/segments/segmentName'; + } return { - path: flag ? '/flags/flagName' : '/segments/segmentName', + path, data: flag ?? segment, }; } diff --git a/packages/shared/sdk-server/jest.config.js b/packages/shared/sdk-server/jest.config.js index f106eb3bc..6753062cc 100644 --- a/packages/shared/sdk-server/jest.config.js +++ b/packages/shared/sdk-server/jest.config.js @@ -1,6 +1,6 @@ module.exports = { transform: { '^.+\\.ts?$': 'ts-jest' }, - testMatch: ['**/__tests__/**/*test.ts?(x)'], + testMatch: ['**/*.test.ts?(x)'], testEnvironment: 'node', moduleFileExtensions: ['ts', 'tsx', 'js', 'jsx', 'json', 'node'], collectCoverageFrom: ['src/**/*.ts'], diff --git a/packages/shared/sdk-server/package.json b/packages/shared/sdk-server/package.json index 9a3c4fbb9..893da224e 100644 --- a/packages/shared/sdk-server/package.json +++ b/packages/shared/sdk-server/package.json @@ -23,7 +23,7 @@ "build": "npx tsc", "clean": "npx tsc --build --clean", "lint": "npx eslint . --ext .ts", - "lint:fix": "yarn run lint -- --fix" + "lint:fix": "yarn run lint --fix" }, "license": "Apache-2.0", "dependencies": { @@ -31,6 +31,7 @@ "semver": "7.5.4" }, "devDependencies": { + "@launchdarkly/private-js-mocks": "0.0.1", "@trivago/prettier-plugin-sort-imports": "^4.1.1", "@types/jest": "^29.4.0", "@types/semver": "^7.3.13", diff --git a/packages/shared/sdk-server/src/LDClientImpl.ts b/packages/shared/sdk-server/src/LDClientImpl.ts index 2cef8c964..13ec18627 100644 --- a/packages/shared/sdk-server/src/LDClientImpl.ts +++ b/packages/shared/sdk-server/src/LDClientImpl.ts @@ -1,54 +1,54 @@ -/* eslint-disable @typescript-eslint/no-unused-vars */ - /* eslint-disable class-methods-use-this */ import { ClientContext, Context, internal, + LDClientError, LDContext, LDEvaluationDetail, + LDEvaluationDetailTyped, LDLogger, Platform, subsystem, + TypeValidators, } from '@launchdarkly/js-sdk-common'; import { + IsMigrationStage, LDClient, LDFeatureStore, - LDFeatureStoreKindData, LDFlagsState, LDFlagsStateOptions, + LDMigrationOpEvent, + LDMigrationStage, + LDMigrationVariation, LDOptions, - LDStreamProcessor, } from './api'; import { BigSegmentStoreMembership } from './api/interfaces'; import BigSegmentsManager from './BigSegmentsManager'; import BigSegmentStoreStatusProvider from './BigSegmentStatusProviderImpl'; -import ClientMessages from './ClientMessages'; +import { createStreamListeners } from './data_sources/createStreamListeners'; import DataSourceUpdates from './data_sources/DataSourceUpdates'; -import NullUpdateProcessor from './data_sources/NullUpdateProcessor'; import PollingProcessor from './data_sources/PollingProcessor'; import Requestor from './data_sources/Requestor'; -import StreamingProcessor from './data_sources/StreamingProcessor'; -import { LDClientError } from './errors'; -import { allAsync, allSeriesAsync } from './evaluation/collection'; +import createDiagnosticsInitConfig from './diagnostics/createDiagnosticsInitConfig'; +import { allAsync } from './evaluation/collection'; import { Flag } from './evaluation/data/Flag'; import { Segment } from './evaluation/data/Segment'; -import ErrorKinds from './evaluation/ErrorKinds'; import EvalResult from './evaluation/EvalResult'; import Evaluator from './evaluation/Evaluator'; import { Queries } from './evaluation/Queries'; import ContextDeduplicator from './events/ContextDeduplicator'; -import DiagnosticsManager from './events/DiagnosticsManager'; import EventFactory from './events/EventFactory'; -import EventSender from './events/EventSender'; import isExperiment from './events/isExperiment'; -import NullEventProcessor from './events/NullEventProcessor'; import FlagsStateBuilder from './FlagsStateBuilder'; +import MigrationOpEventToInputEvent from './MigrationOpEventConversion'; +import MigrationOpTracker from './MigrationOpTracker'; import Configuration from './options/Configuration'; -import { AsyncStoreFacade } from './store'; +import AsyncStoreFacade from './store/AsyncStoreFacade'; import VersionedDataKinds from './store/VersionedDataKinds'; +const { ClientMessages, ErrorKinds, NullEventProcessor } = internal; enum InitState { Initializing, Initialized, @@ -76,7 +76,7 @@ export default class LDClientImpl implements LDClient { private asyncFeatureStore: AsyncStoreFacade; - private updateProcessor: LDStreamProcessor; + private updateProcessor?: subsystem.LDStreamProcessor; private eventFactoryDefault = new EventFactory(false); @@ -104,7 +104,7 @@ export default class LDClientImpl implements LDClient { private onReady: () => void; - private diagnosticsManager?: DiagnosticsManager; + private diagnosticsManager?: internal.DiagnosticsManager; /** * Intended for use by platform specific client implementations. @@ -127,6 +127,7 @@ export default class LDClientImpl implements LDClient { const { onUpdate, hasEventListeners } = callbacks; const config = new Configuration(options); + if (!sdkKey && !config.offline) { throw new Error('You must configure the client with an SDK key'); } @@ -139,30 +140,11 @@ export default class LDClientImpl implements LDClient { const dataSourceUpdates = new DataSourceUpdates(featureStore, hasEventListeners, onUpdate); if (config.sendEvents && !config.offline && !config.diagnosticOptOut) { - this.diagnosticsManager = new DiagnosticsManager(sdkKey, config, platform, featureStore); - } - - const makeDefaultProcessor = () => - config.stream - ? new StreamingProcessor( - sdkKey, - config, - this.platform.requests, - this.platform.info, - dataSourceUpdates, - this.diagnosticsManager, - ) - : new PollingProcessor( - config, - new Requestor(sdkKey, config, this.platform.info, this.platform.requests), - dataSourceUpdates, - ); - - if (config.offline || config.useLdd) { - this.updateProcessor = new NullUpdateProcessor(); - } else { - this.updateProcessor = - config.updateProcessorFactory?.(clientContext, dataSourceUpdates) ?? makeDefaultProcessor(); + this.diagnosticsManager = new internal.DiagnosticsManager( + sdkKey, + platform, + createDiagnosticsInitConfig(config, platform, featureStore), + ); } if (!config.sendEvents || config.offline) { @@ -171,7 +153,6 @@ export default class LDClientImpl implements LDClient { this.eventProcessor = new internal.EventProcessor( config, clientContext, - new EventSender(config, clientContext), new ContextDeduplicator(config), this.diagnosticsManager, ); @@ -203,28 +184,45 @@ export default class LDClientImpl implements LDClient { }; this.evaluator = new Evaluator(this.platform, queries); - this.updateProcessor.start((err) => { - if (err) { - let error; - if ((err.status && err.status === 401) || (err.code && err.code === 401)) { - error = new Error('Authentication failed. Double check your SDK key.'); - } else { - error = err; - } + const listeners = createStreamListeners(dataSourceUpdates, this.logger, { + put: () => this.initSuccess(), + }); + const makeDefaultProcessor = () => + config.stream + ? new internal.StreamingProcessor( + sdkKey, + clientContext, + '/all', + listeners, + this.diagnosticsManager, + (e) => this.dataSourceErrorHandler(e), + this.config.streamInitialReconnectDelay, + ) + : new PollingProcessor( + config, + new Requestor(sdkKey, config, this.platform.info, this.platform.requests), + dataSourceUpdates, + () => this.initSuccess(), + (e) => this.dataSourceErrorHandler(e), + ); - this.onError(error); - this.onFailed(error); + if (!(config.offline || config.useLdd)) { + this.updateProcessor = + config.updateProcessorFactory?.( + clientContext, + dataSourceUpdates, + () => this.initSuccess(), + (e) => this.dataSourceErrorHandler(e), + ) ?? makeDefaultProcessor(); + } - if (!this.initialized()) { - this.initState = InitState.Failed; - this.initReject?.(error); - } - } else if (!this.initialized()) { - this.initState = InitState.Initialized; - this.initResolve?.(this); - this.onReady(); - } - }); + if (this.updateProcessor) { + this.updateProcessor.start(); + } else { + // Deferring the start callback should allow client construction to complete before we start + // emitting events. Allowing the client an opportunity to register events. + setTimeout(() => this.initSuccess(), 0); + } } initialized(): boolean { @@ -272,6 +270,168 @@ export default class LDClientImpl implements LDClient { }); } + private typedEval( + key: string, + context: LDContext, + defaultValue: TResult, + eventFactory: EventFactory, + typeChecker: (value: unknown) => [boolean, string], + ): Promise { + return new Promise>((resolve) => { + this.evaluateIfPossible( + key, + context, + defaultValue, + eventFactory, + (res) => { + const typedRes: LDEvaluationDetailTyped = { + value: res.detail.value as TResult, + reason: res.detail.reason, + variationIndex: res.detail.variationIndex, + }; + resolve(typedRes); + }, + typeChecker, + ); + }); + } + + async boolVariation(key: string, context: LDContext, defaultValue: boolean): Promise { + return ( + await this.typedEval(key, context, defaultValue, this.eventFactoryDefault, (value) => [ + TypeValidators.Boolean.is(value), + TypeValidators.Boolean.getType(), + ]) + ).value; + } + + async numberVariation(key: string, context: LDContext, defaultValue: number): Promise { + return ( + await this.typedEval(key, context, defaultValue, this.eventFactoryDefault, (value) => [ + TypeValidators.Number.is(value), + TypeValidators.Number.getType(), + ]) + ).value; + } + + async stringVariation(key: string, context: LDContext, defaultValue: string): Promise { + return ( + await this.typedEval(key, context, defaultValue, this.eventFactoryDefault, (value) => [ + TypeValidators.String.is(value), + TypeValidators.String.getType(), + ]) + ).value; + } + + jsonVariation(key: string, context: LDContext, defaultValue: unknown): Promise { + return this.variation(key, context, defaultValue); + } + + boolVariationDetail( + key: string, + context: LDContext, + defaultValue: boolean, + ): Promise> { + return this.typedEval(key, context, defaultValue, this.eventFactoryWithReasons, (value) => [ + TypeValidators.Boolean.is(value), + TypeValidators.Boolean.getType(), + ]); + } + + numberVariationDetail( + key: string, + context: LDContext, + defaultValue: number, + ): Promise> { + return this.typedEval(key, context, defaultValue, this.eventFactoryWithReasons, (value) => [ + TypeValidators.Number.is(value), + TypeValidators.Number.getType(), + ]); + } + + stringVariationDetail( + key: string, + context: LDContext, + defaultValue: string, + ): Promise> { + return this.typedEval(key, context, defaultValue, this.eventFactoryWithReasons, (value) => [ + TypeValidators.String.is(value), + TypeValidators.String.getType(), + ]); + } + + jsonVariationDetail( + key: string, + context: LDContext, + defaultValue: unknown, + ): Promise> { + return this.variationDetail(key, context, defaultValue); + } + + async migrationVariation( + key: string, + context: LDContext, + defaultValue: LDMigrationStage, + ): Promise { + const convertedContext = Context.fromLDContext(context); + return new Promise((resolve) => { + this.evaluateIfPossible( + key, + context, + defaultValue, + this.eventFactoryWithReasons, + ({ detail }, flag) => { + const contextKeys = convertedContext.valid ? convertedContext.kindsAndKeys : {}; + const checkRatio = flag?.migration?.checkRatio; + const samplingRatio = flag?.samplingRatio; + + if (!IsMigrationStage(detail.value)) { + const error = new Error( + `Unrecognized MigrationState for "${key}"; returning default value.`, + ); + this.onError(error); + const reason = { + kind: 'ERROR', + errorKind: ErrorKinds.WrongType, + }; + resolve({ + value: defaultValue, + tracker: new MigrationOpTracker( + key, + contextKeys, + defaultValue, + defaultValue, + reason, + checkRatio, + undefined, + flag?.version, + samplingRatio, + this.logger, + ), + }); + return; + } + resolve({ + value: detail.value as LDMigrationStage, + tracker: new MigrationOpTracker( + key, + contextKeys, + defaultValue, + detail.value, + detail.reason, + checkRatio, + // Can be null for compatibility reasons. + detail.variationIndex === null ? undefined : detail.variationIndex, + flag?.version, + samplingRatio, + this.logger, + ), + }); + }, + ); + }); + } + allFlagsState( context: LDContext, options?: LDFlagsStateOptions, @@ -369,7 +529,7 @@ export default class LDClientImpl implements LDClient { close(): void { this.eventProcessor.close(); - this.updateProcessor.close(); + this.updateProcessor?.close(); this.featureStore.close(); this.bigSegmentsManager.close(); } @@ -384,11 +544,21 @@ export default class LDClientImpl implements LDClient { this.logger?.warn(ClientMessages.missingContextKeyNoEvent); return; } + this.eventProcessor.sendEvent( this.eventFactoryDefault.customEvent(key, checkedContext!, data, metricValue), ); } + trackMigration(event: LDMigrationOpEvent): void { + const converted = MigrationOpEventToInputEvent(event); + if (!converted) { + return; + } + + this.eventProcessor.sendEvent(converted); + } + identify(context: LDContext): void { const checkedContext = Context.fromLDContext(context); if (!checkedContext.valid) { @@ -412,7 +582,8 @@ export default class LDClientImpl implements LDClient { context: LDContext, defaultValue: any, eventFactory: EventFactory, - cb: (res: EvalResult) => void, + cb: (res: EvalResult, flag?: Flag) => void, + typeChecker?: (value: any) => [boolean, string], ): void { if (this.config.offline) { this.logger?.info('Variation called in offline mode. Returning default value.'); @@ -439,7 +610,7 @@ export default class LDClientImpl implements LDClient { this.onError(error); const result = EvalResult.forError(ErrorKinds.FlagNotFound, undefined, defaultValue); this.eventProcessor.sendEvent( - this.eventFactoryDefault.unknownFlagEvent(flagKey, evalContext, result.detail), + this.eventFactoryDefault.unknownFlagEvent(flagKey, defaultValue, evalContext), ); cb(result); return; @@ -455,25 +626,51 @@ export default class LDClientImpl implements LDClient { this.logger?.debug('Result value is null in variation'); evalRes.setDefault(defaultValue); } - evalRes.events?.forEach((event) => { - this.eventProcessor.sendEvent(event); - }); - this.eventProcessor.sendEvent( - eventFactory.evalEvent(flag, evalContext, evalRes.detail, defaultValue), - ); - cb(evalRes); + + if (typeChecker) { + const [matched, type] = typeChecker(evalRes.detail.value); + if (!matched) { + const errorRes = EvalResult.forError( + ErrorKinds.WrongType, + `Did not receive expected type (${type}) evaluating feature flag "${flagKey}"`, + defaultValue, + ); + this.sendEvalEvent(errorRes, eventFactory, flag, evalContext, defaultValue); + cb(errorRes, flag); + return; + } + } + + this.sendEvalEvent(evalRes, eventFactory, flag, evalContext, defaultValue); + cb(evalRes, flag); }, eventFactory, ); }); } + private sendEvalEvent( + evalRes: EvalResult, + eventFactory: EventFactory, + flag: Flag, + evalContext: Context, + defaultValue: any, + ) { + evalRes.events?.forEach((event) => { + this.eventProcessor.sendEvent({ ...event }); + }); + this.eventProcessor.sendEvent( + eventFactory.evalEventServer(flag, evalContext, evalRes.detail, defaultValue, undefined), + ); + } + private evaluateIfPossible( flagKey: string, context: LDContext, defaultValue: any, eventFactory: EventFactory, - cb: (res: EvalResult) => void, + cb: (res: EvalResult, flag?: Flag) => void, + typeChecker?: (value: any) => [boolean, string], ): void { if (!this.initialized()) { this.featureStore.initialized((storeInitialized) => { @@ -482,7 +679,7 @@ export default class LDClientImpl implements LDClient { 'Variation called before LaunchDarkly client initialization completed' + " (did you wait for the 'ready' event?) - using last known values from feature store", ); - this.variationInternal(flagKey, context, defaultValue, eventFactory, cb); + this.variationInternal(flagKey, context, defaultValue, eventFactory, cb, typeChecker); return; } this.logger?.warn( @@ -493,6 +690,27 @@ export default class LDClientImpl implements LDClient { }); return; } - this.variationInternal(flagKey, context, defaultValue, eventFactory, cb); + this.variationInternal(flagKey, context, defaultValue, eventFactory, cb, typeChecker); + } + + private dataSourceErrorHandler(e: any) { + const error = + e.code === 401 ? new Error('Authentication failed. Double check your SDK key.') : e; + + this.onError(error); + this.onFailed(error); + + if (!this.initialized()) { + this.initState = InitState.Failed; + this.initReject?.(error); + } + } + + private initSuccess() { + if (!this.initialized()) { + this.initState = InitState.Initialized; + this.initResolve?.(this); + this.onReady(); + } } } diff --git a/packages/shared/sdk-server/src/Migration.ts b/packages/shared/sdk-server/src/Migration.ts new file mode 100644 index 000000000..3eebf76d9 --- /dev/null +++ b/packages/shared/sdk-server/src/Migration.ts @@ -0,0 +1,391 @@ +import { LDContext } from '@launchdarkly/js-sdk-common'; + +import { LDClient, LDMigrationStage, LDMigrationTracker } from './api'; +import { + LDMigration, + LDMigrationOrigin, + LDMigrationReadResult, + LDMigrationResult, + LDMigrationWriteResult, +} from './api/LDMigration'; +import { + LDConcurrentExecution, + LDExecution, + LDExecutionOrdering, + LDMethodResult, + LDMigrationOptions, + LDSerialExecution, +} from './api/options/LDMigrationOptions'; + +type MultipleReadResult = { + fromOld: LDMigrationReadResult; + fromNew: LDMigrationReadResult; +}; + +async function safeCall( + method: () => Promise>, +): Promise> { + try { + // Awaiting to allow catching. + const res = await method(); + return res; + } catch (error: any) { + return { + success: false, + error, + }; + } +} + +/** + * Report a successful migration operation from `readNew`, `readOld`, `writeNew` or `writeOld`. + * + * ``` + * readNew: async () => { + * const myResult = doMyOldRead(); + * if(myResult.wasGood) { + * return LDMigrationSuccess(myResult); + * } + * return LDMigrationError(myResult.error) + * } + * ``` + * + * @param result The result of the operation. + * @returns An {@link LDMethodResult} + */ +export function LDMigrationSuccess(result: TResult): LDMethodResult { + return { + success: true, + result, + }; +} + +/** + * Report a failed migration operation from `readNew`, `readOld`, `writeNew` or `writeOld`. + * + * ``` + * readNew: async () => { + * const myResult = doMyOldRead(); + * if(myResult.wasGood) { + * return LDMigrationSuccess(myResult); + * } + * return LDMigrationError(myResult.error) + * } + * ``` + * + * @param result The result of the operations. + * @returns An {@link LDMethodResult} + */ +export function LDMigrationError(error: Error): { success: false; error: Error } { + return { + success: false, + error, + }; +} + +interface MigrationContext { + payload?: TPayload; + tracker: LDMigrationTracker; +} + +/** + * Class which allows performing technology migrations. + */ +class Migration< + TMigrationRead, + TMigrationWrite, + TMigrationReadInput = any, + TMigrationWriteInput = any, +> implements + LDMigration +{ + private readonly execution: LDSerialExecution | LDConcurrentExecution; + + private readonly errorTracking: boolean; + + private readonly latencyTracking: boolean; + + private readonly readTable: { + [index: string]: ( + context: MigrationContext, + ) => Promise>; + } = { + [LDMigrationStage.Off]: async (context) => + this.doSingleOp(context, 'old', this.config.readOld.bind(this.config)), + [LDMigrationStage.DualWrite]: async (context) => + this.doSingleOp(context, 'old', this.config.readOld.bind(this.config)), + [LDMigrationStage.Shadow]: async (context) => { + const { fromOld, fromNew } = await this.doRead(context); + + this.trackConsistency(context, fromOld, fromNew); + + return fromOld; + }, + [LDMigrationStage.Live]: async (context) => { + const { fromNew, fromOld } = await this.doRead(context); + + this.trackConsistency(context, fromOld, fromNew); + + return fromNew; + }, + [LDMigrationStage.RampDown]: async (context) => + this.doSingleOp(context, 'new', this.config.readNew.bind(this.config)), + [LDMigrationStage.Complete]: async (context) => + this.doSingleOp(context, 'new', this.config.readNew.bind(this.config)), + }; + + private readonly writeTable: { + [index: string]: ( + context: MigrationContext, + ) => Promise>; + } = { + [LDMigrationStage.Off]: async (context) => ({ + authoritative: await this.doSingleOp(context, 'old', this.config.writeOld.bind(this.config)), + }), + [LDMigrationStage.DualWrite]: async (context) => { + const fromOld = await this.doSingleOp(context, 'old', this.config.writeOld.bind(this.config)); + if (!fromOld.success) { + return { + authoritative: fromOld, + }; + } + + const fromNew = await this.doSingleOp(context, 'new', this.config.writeNew.bind(this.config)); + + return { + authoritative: fromOld, + nonAuthoritative: fromNew, + }; + }, + [LDMigrationStage.Shadow]: async (context) => { + const fromOld = await this.doSingleOp(context, 'old', this.config.writeOld.bind(this.config)); + if (!fromOld.success) { + return { + authoritative: fromOld, + }; + } + + const fromNew = await this.doSingleOp(context, 'new', this.config.writeNew.bind(this.config)); + + return { + authoritative: fromOld, + nonAuthoritative: fromNew, + }; + }, + [LDMigrationStage.Live]: async (context) => { + const fromNew = await this.doSingleOp(context, 'new', this.config.writeNew.bind(this.config)); + if (!fromNew.success) { + return { + authoritative: fromNew, + }; + } + + const fromOld = await this.doSingleOp(context, 'old', this.config.writeOld.bind(this.config)); + + return { + authoritative: fromNew, + nonAuthoritative: fromOld, + }; + }, + [LDMigrationStage.RampDown]: async (context) => { + const fromNew = await this.doSingleOp(context, 'new', this.config.writeNew.bind(this.config)); + if (!fromNew.success) { + return { + authoritative: fromNew, + }; + } + + const fromOld = await this.doSingleOp(context, 'old', this.config.writeOld.bind(this.config)); + + return { + authoritative: fromNew, + nonAuthoritative: fromOld, + }; + }, + [LDMigrationStage.Complete]: async (context) => ({ + authoritative: await this.doSingleOp(context, 'new', this.config.writeNew.bind(this.config)), + }), + }; + + constructor( + private readonly client: LDClient, + private readonly config: LDMigrationOptions< + TMigrationRead, + TMigrationWrite, + TMigrationReadInput, + TMigrationWriteInput + >, + ) { + if (this.config.execution) { + this.execution = this.config.execution; + } else { + this.execution = new LDConcurrentExecution(); + } + + this.latencyTracking = this.config.latencyTracking ?? true; + this.errorTracking = this.config.errorTracking ?? true; + } + + async read( + key: string, + context: LDContext, + defaultStage: LDMigrationStage, + payload?: TMigrationReadInput, + ): Promise> { + const stage = await this.client.migrationVariation(key, context, defaultStage); + const res = await this.readTable[stage.value]({ + payload, + tracker: stage.tracker, + }); + stage.tracker.op('read'); + this.sendEvent(stage.tracker); + return res; + } + + async write( + key: string, + context: LDContext, + defaultStage: LDMigrationStage, + payload?: TMigrationWriteInput, + ): Promise> { + const stage = await this.client.migrationVariation(key, context, defaultStage); + const res = await this.writeTable[stage.value]({ + payload, + tracker: stage.tracker, + }); + stage.tracker.op('write'); + this.sendEvent(stage.tracker); + return res; + } + + private sendEvent(tracker: LDMigrationTracker) { + const event = tracker.createEvent(); + if (event) { + this.client.trackMigration(event); + } + } + + private trackConsistency( + context: MigrationContext, + oldValue: LDMethodResult, + newValue: LDMethodResult, + ) { + if (!this.config.check) { + return; + } + + if (oldValue.success && newValue.success) { + // Check is validated before this point, so it is force unwrapped. + context.tracker.consistency(() => this.config.check!(oldValue.result, newValue.result)); + } + } + + private async readSequentialFixed( + context: MigrationContext, + ): Promise> { + const fromOld = await this.doSingleOp(context, 'old', this.config.readOld.bind(this.config)); + const fromNew = await this.doSingleOp(context, 'new', this.config.readNew.bind(this.config)); + return { fromOld, fromNew }; + } + + private async readConcurrent( + context: MigrationContext, + ): Promise> { + const fromOldPromise = this.doSingleOp(context, 'old', this.config.readOld.bind(this.config)); + const fromNewPromise = this.doSingleOp(context, 'new', this.config.readNew.bind(this.config)); + const [fromOld, fromNew] = await Promise.all([fromOldPromise, fromNewPromise]); + + return { fromOld, fromNew }; + } + + private async readSequentialRandom( + context: MigrationContext, + ): Promise> { + // This number is not used for a purpose requiring cryptographic security. + const randomIndex = Math.floor(Math.random() * 2); + + // Effectively flip a coin and do it on one order or the other. + if (randomIndex === 0) { + const fromOld = await this.doSingleOp(context, 'old', this.config.readOld.bind(this.config)); + const fromNew = await this.doSingleOp(context, 'new', this.config.readNew.bind(this.config)); + return { fromOld, fromNew }; + } + const fromNew = await this.doSingleOp(context, 'new', this.config.readNew.bind(this.config)); + const fromOld = await this.doSingleOp(context, 'old', this.config.readOld.bind(this.config)); + return { fromOld, fromNew }; + } + + private async doRead( + context: MigrationContext, + ): Promise> { + if (this.execution?.type === LDExecution.Serial) { + const serial = this.execution as LDSerialExecution; + if (serial.ordering === LDExecutionOrdering.Fixed) { + return this.readSequentialFixed(context); + } + return this.readSequentialRandom(context); + } + return this.readConcurrent(context); + } + + private async doSingleOp( + context: MigrationContext, + origin: LDMigrationOrigin, + method: (payload?: TInput) => Promise>, + ): Promise> { + context.tracker.invoked(origin); + const res = await this.trackLatency(context.tracker, origin, () => + safeCall(() => method(context.payload)), + ); + if (!res.success && this.errorTracking) { + context.tracker.error(origin); + } + return { origin, ...res }; + } + + private async trackLatency( + tracker: LDMigrationTracker, + origin: LDMigrationOrigin, + method: () => Promise, + ): Promise { + if (!this.latencyTracking) { + return method(); + } + let start; + let end; + let result: TResult; + // TODO: Need to validate performance existence check with edge SDKs. + if (typeof performance !== undefined) { + start = performance.now(); + result = await method(); + end = performance.now(); + } else { + start = Date.now(); + result = await method(); + end = Date.now(); + } + + // Performance timer is in ms, but may have a microsecond resolution + // fractional component. + const latency = end - start; + tracker.latency(origin, latency); + return result; + } +} + +export function createMigration< + TMigrationRead, + TMigrationWrite, + TMigrationReadInput = any, + TMigrationWriteInput = any, +>( + client: LDClient, + config: LDMigrationOptions< + TMigrationRead, + TMigrationWrite, + TMigrationReadInput, + TMigrationWriteInput + >, +): LDMigration { + return new Migration(client, config); +} diff --git a/packages/shared/sdk-server/src/MigrationOpEventConversion.ts b/packages/shared/sdk-server/src/MigrationOpEventConversion.ts new file mode 100644 index 000000000..75b6a1530 --- /dev/null +++ b/packages/shared/sdk-server/src/MigrationOpEventConversion.ts @@ -0,0 +1,267 @@ +import { internal, TypeValidators } from '@launchdarkly/js-sdk-common'; + +import { + LDMigrationConsistencyMeasurement, + LDMigrationErrorMeasurement, + LDMigrationEvaluation, + LDMigrationInvokedMeasurement, + LDMigrationLatencyMeasurement, + LDMigrationMeasurement, + LDMigrationOp, + LDMigrationOpEvent, +} from './api'; + +function isOperation(value: LDMigrationOp) { + if (!TypeValidators.String.is(value)) { + return false; + } + + return value === 'read' || value === 'write'; +} + +function isLatencyMeasurement( + value: LDMigrationMeasurement, +): value is LDMigrationLatencyMeasurement { + return value.key === 'latency_ms'; +} + +function isErrorMeasurement(value: LDMigrationMeasurement): value is LDMigrationErrorMeasurement { + return value.key === 'error'; +} + +function isInvokedMeasurement( + value: LDMigrationMeasurement, +): value is LDMigrationInvokedMeasurement { + return value.key === 'invoked'; +} + +function isConsistencyMeasurement( + value: LDMigrationMeasurement, +): value is LDMigrationConsistencyMeasurement { + return value.key === 'consistent'; +} + +function areValidNumbers(values: { old?: number; new?: number }) { + const oldValue = values.old; + const newValue = values.new; + if (oldValue !== undefined && !TypeValidators.Number.is(oldValue)) { + return false; + } + if (newValue !== undefined && !TypeValidators.Number.is(newValue)) { + return false; + } + return true; +} + +function areValidBooleans(values: { old?: boolean; new?: boolean }) { + const oldValue = values.old; + const newValue = values.new; + if (oldValue !== undefined && !TypeValidators.Boolean.is(oldValue)) { + return false; + } + if (newValue !== undefined && !TypeValidators.Boolean.is(newValue)) { + return false; + } + return true; +} + +function validateMeasurement( + measurement: LDMigrationMeasurement, +): LDMigrationMeasurement | undefined { + // Here we are protecting ourselves from JS callers. TypeScript says that + // it cannot be an empty string, but those using JS can do what they want. + // @ts-ignore + if (!TypeValidators.String.is(measurement.key) || measurement.key === '') { + return undefined; + } + + if (isLatencyMeasurement(measurement)) { + if (!TypeValidators.Object.is(measurement.values)) { + return undefined; + } + if (!areValidNumbers(measurement.values)) { + return undefined; + } + return { + key: measurement.key, + values: { + old: measurement.values.old, + new: measurement.values.new, + }, + }; + } + + if (isErrorMeasurement(measurement)) { + if (!TypeValidators.Object.is(measurement.values)) { + return undefined; + } + if (!areValidBooleans(measurement.values)) { + return undefined; + } + return { + key: measurement.key, + values: { + old: measurement.values.old, + new: measurement.values.new, + }, + }; + } + + if (isConsistencyMeasurement(measurement)) { + if ( + !TypeValidators.Boolean.is(measurement.value) || + !TypeValidators.Number.is(measurement.samplingRatio) + ) { + return undefined; + } + return { + key: measurement.key, + value: measurement.value, + samplingRatio: measurement.samplingRatio, + }; + } + + if (isInvokedMeasurement(measurement)) { + if (!TypeValidators.Object.is(measurement.values)) { + return undefined; + } + if (!areValidBooleans(measurement.values)) { + return undefined; + } + return { + key: measurement.key, + values: { + old: measurement.values.old, + new: measurement.values.new, + }, + }; + } + + // Not a supported measurement type. + return undefined; +} + +function validateMeasurements(measurements: LDMigrationMeasurement[]): LDMigrationMeasurement[] { + return measurements + .map(validateMeasurement) + .filter((value) => value !== undefined) as LDMigrationMeasurement[]; +} + +function validateEvaluation(evaluation: LDMigrationEvaluation): LDMigrationEvaluation | undefined { + if (!TypeValidators.String.is(evaluation.key) || evaluation.key === '') { + return undefined; + } + if (!TypeValidators.Object.is(evaluation.reason)) { + return undefined; + } + if (!TypeValidators.String.is(evaluation.reason.kind) || evaluation.reason.kind === '') { + return undefined; + } + const validated: LDMigrationEvaluation = { + key: evaluation.key, + value: evaluation.value, + default: evaluation.default, + reason: { + kind: evaluation.reason.kind, + }, + }; + + const inReason = evaluation.reason; + const outReason = validated.reason; + if (TypeValidators.String.is(inReason.errorKind)) { + outReason.errorKind = inReason.errorKind; + } + + if (TypeValidators.String.is(inReason.ruleId)) { + outReason.ruleId = inReason.ruleId; + } + + if (TypeValidators.String.is(inReason.prerequisiteKey)) { + outReason.ruleId = inReason.ruleId; + } + + if (TypeValidators.Boolean.is(inReason.inExperiment)) { + outReason.inExperiment = inReason.inExperiment; + } + + if (TypeValidators.Number.is(inReason.ruleIndex)) { + outReason.ruleIndex = inReason.ruleIndex; + } + + if (TypeValidators.String.is(inReason.bigSegmentsStatus)) { + outReason.bigSegmentsStatus = inReason.bigSegmentsStatus; + } + + if (evaluation.variation !== undefined && TypeValidators.Number.is(evaluation.variation)) { + validated.variation = evaluation.variation; + } + + if (evaluation.version !== undefined && TypeValidators.Number.is(evaluation.version)) { + validated.version = evaluation.version; + } + + return validated; +} + +/** + * Migration events can be generated directly in user code and may not follow the shape + * expected by the TypeScript definitions. So we do some validation on these events, as well + * as copying the data out of them, to reduce the amount of invalid data we may send. + * + * @param inEvent The event to process. + * @returns An event, or undefined if it could not be converted. + */ +export default function MigrationOpEventToInputEvent( + inEvent: LDMigrationOpEvent, +): internal.InputMigrationEvent | undefined { + // The sampling ratio is omitted and needs populated by the track migration method. + if (inEvent.kind !== 'migration_op') { + return undefined; + } + + if (!isOperation(inEvent.operation)) { + return undefined; + } + + if (!TypeValidators.Object.is(inEvent.contextKeys)) { + return undefined; + } + + if (!TypeValidators.Number.is(inEvent.creationDate)) { + return undefined; + } + + if (!Object.keys(inEvent.contextKeys).every((key) => TypeValidators.Kind.is(key))) { + return undefined; + } + + const samplingRatio = inEvent.samplingRatio ?? 1; + + if (!TypeValidators.Number.is(samplingRatio)) { + return undefined; + } + + if ( + !Object.values(inEvent.contextKeys).every( + (value) => TypeValidators.String.is(value) && value !== '', + ) + ) { + return undefined; + } + + const evaluation = validateEvaluation(inEvent.evaluation); + + if (!evaluation) { + return undefined; + } + + return { + kind: inEvent.kind, + operation: inEvent.operation, + creationDate: inEvent.creationDate, + contextKeys: { ...inEvent.contextKeys }, + measurements: validateMeasurements(inEvent.measurements), + evaluation, + samplingRatio, + }; +} diff --git a/packages/shared/sdk-server/src/MigrationOpTracker.ts b/packages/shared/sdk-server/src/MigrationOpTracker.ts new file mode 100644 index 000000000..fae4884bf --- /dev/null +++ b/packages/shared/sdk-server/src/MigrationOpTracker.ts @@ -0,0 +1,257 @@ +import { + internal, + LDEvaluationReason, + LDLogger, + TypeValidators, +} from '@launchdarkly/js-sdk-common'; + +import { LDMigrationStage, LDMigrationTracker } from './api'; +import { + LDConsistencyCheck, + LDMigrationErrorMeasurement, + LDMigrationInvokedMeasurement, + LDMigrationMeasurement, + LDMigrationOp, + LDMigrationOpEvent, +} from './api/data'; +import { LDMigrationOrigin } from './api/LDMigration'; + +function isPopulated(data: number): boolean { + return !Number.isNaN(data); +} + +export default class MigrationOpTracker implements LDMigrationTracker { + private errors = { + old: false, + new: false, + }; + + private wasInvoked = { + old: false, + new: false, + }; + + private consistencyCheck: LDConsistencyCheck = LDConsistencyCheck.NotChecked; + + private latencyMeasurement = { + old: NaN, + new: NaN, + }; + + private operation?: LDMigrationOp; + + constructor( + private readonly flagKey: string, + private readonly contextKeys: Record, + private readonly defaultStage: LDMigrationStage, + private readonly stage: LDMigrationStage, + private readonly reason: LDEvaluationReason, + private readonly checkRatio?: number, + private readonly variation?: number, + private readonly version?: number, + private readonly samplingRatio?: number, + private readonly logger?: LDLogger, + ) {} + + op(op: LDMigrationOp) { + this.operation = op; + } + + error(origin: LDMigrationOrigin) { + this.errors[origin] = true; + } + + consistency(check: () => boolean) { + if (internal.shouldSample(this.checkRatio ?? 1)) { + try { + const res = check(); + this.consistencyCheck = res + ? LDConsistencyCheck.Consistent + : LDConsistencyCheck.Inconsistent; + } catch (exception) { + this.logger?.error( + 'Exception when executing consistency check function for migration' + + ` '${this.flagKey}' the consistency check will not be included in the generated migration` + + ` op event. Exception: ${exception}`, + ); + } + } + } + + latency(origin: LDMigrationOrigin, value: number) { + this.latencyMeasurement[origin] = value; + } + + invoked(origin: LDMigrationOrigin) { + this.wasInvoked[origin] = true; + } + + createEvent(): LDMigrationOpEvent | undefined { + if (!TypeValidators.String.is(this.flagKey) || this.flagKey === '') { + this.logger?.error('The flag key for a migration operation must be a non-empty string.'); + return undefined; + } + + if (!this.operation) { + this.logger?.error('The operation must be set using "op" before an event can be created.'); + return undefined; + } + + if (Object.keys(this.contextKeys).length === 0) { + this.logger?.error( + 'The migration was not done against a valid context and cannot generate an event.', + ); + return undefined; + } + + if (!this.wasInvoked.old && !this.wasInvoked.new) { + this.logger?.error( + 'The migration invoked neither the "old" or "new" implementation and' + + 'an event cannot be generated', + ); + return undefined; + } + + if (!this.measurementConsistencyCheck()) { + return undefined; + } + + const measurements: LDMigrationMeasurement[] = []; + + this.populateInvoked(measurements); + this.populateConsistency(measurements); + this.populateLatency(measurements); + this.populateErrors(measurements); + + return { + kind: 'migration_op', + operation: this.operation, + creationDate: Date.now(), + contextKeys: this.contextKeys, + evaluation: { + key: this.flagKey, + value: this.stage, + default: this.defaultStage, + reason: this.reason, + variation: this.variation, + version: this.version, + }, + measurements, + samplingRatio: this.samplingRatio ?? 1, + }; + } + + private logTag() { + return `For migration ${this.operation}-${this.flagKey}:`; + } + + private latencyConsistencyMessage(origin: LDMigrationOrigin) { + return `Latency measurement for "${origin}", but "${origin}" was not invoked.`; + } + + private errorConsistencyMessage(origin: LDMigrationOrigin) { + return `Error occurred for "${origin}", but "${origin}" was not invoked.`; + } + + private consistencyCheckConsistencyMessage(origin: LDMigrationOrigin) { + return ( + `Consistency check was done, but "${origin}" was not invoked.` + + 'Both "old" and "new" must be invoked to do a consistency check.' + ); + } + + private checkOriginEventConsistency(origin: LDMigrationOrigin): boolean { + if (this.wasInvoked[origin]) { + return true; + } + + // If the specific origin was not invoked, but it contains measurements, then + // that is a problem. Check each measurement and log a message if it is present. + if (!Number.isNaN(this.latencyMeasurement[origin])) { + this.logger?.error(`${this.logTag()} ${this.latencyConsistencyMessage(origin)}`); + return false; + } + + if (this.errors[origin]) { + this.logger?.error(`${this.logTag()} ${this.errorConsistencyMessage(origin)}`); + return false; + } + + if (this.consistencyCheck !== LDConsistencyCheck.NotChecked) { + this.logger?.error(`${this.logTag()} ${this.consistencyCheckConsistencyMessage(origin)}`); + return false; + } + return true; + } + + /** + * Check that the latency, error, consistency and invoked measurements are self-consistent. + */ + private measurementConsistencyCheck(): boolean { + return this.checkOriginEventConsistency('old') && this.checkOriginEventConsistency('new'); + } + + private populateInvoked(measurements: LDMigrationMeasurement[]) { + const measurement: LDMigrationInvokedMeasurement = { + key: 'invoked', + values: {}, + }; + if (!this.wasInvoked.old && !this.wasInvoked.new) { + this.logger?.error('Migration op completed without executing any origins (old/new).'); + } + if (this.wasInvoked.old) { + measurement.values.old = true; + } + if (this.wasInvoked.new) { + measurement.values.new = true; + } + measurements.push(measurement); + } + + private populateConsistency(measurements: LDMigrationMeasurement[]) { + if ( + this.consistencyCheck !== undefined && + this.consistencyCheck !== LDConsistencyCheck.NotChecked + ) { + measurements.push({ + key: 'consistent', + value: this.consistencyCheck === LDConsistencyCheck.Consistent, + samplingRatio: this.checkRatio ?? 1, + }); + } + } + + private populateErrors(measurements: LDMigrationMeasurement[]) { + if (this.errors.new || this.errors.old) { + const measurement: LDMigrationErrorMeasurement = { + key: 'error', + values: {}, + }; + if (this.errors.new) { + measurement.values.new = true; + } + if (this.errors.old) { + measurement.values.old = true; + } + measurements.push(measurement); + } + } + + private populateLatency(measurements: LDMigrationMeasurement[]) { + const newIsPopulated = isPopulated(this.latencyMeasurement.new); + const oldIsPopulated = isPopulated(this.latencyMeasurement.old); + if (newIsPopulated || oldIsPopulated) { + const values: { old?: number; new?: number } = {}; + if (newIsPopulated) { + values.new = this.latencyMeasurement.new; + } + if (oldIsPopulated) { + values.old = this.latencyMeasurement.old; + } + measurements.push({ + key: 'latency_ms', + values, + }); + } + } +} diff --git a/packages/shared/sdk-server/src/api/LDClient.ts b/packages/shared/sdk-server/src/api/LDClient.ts index bc3460614..177727c44 100644 --- a/packages/shared/sdk-server/src/api/LDClient.ts +++ b/packages/shared/sdk-server/src/api/LDClient.ts @@ -1,7 +1,14 @@ -import { LDContext, LDEvaluationDetail, LDFlagValue } from '@launchdarkly/js-sdk-common'; +import { + LDContext, + LDEvaluationDetail, + LDEvaluationDetailTyped, + LDFlagValue, +} from '@launchdarkly/js-sdk-common'; +import { LDMigrationOpEvent, LDMigrationVariation } from './data'; import { LDFlagsState } from './data/LDFlagsState'; import { LDFlagsStateOptions } from './data/LDFlagsStateOptions'; +import { LDMigrationStage } from './data/LDMigrationStage'; /** * The LaunchDarkly SDK client object. @@ -120,6 +127,202 @@ export interface LDClient { callback?: (err: any, res: LDEvaluationDetail) => void, ): Promise; + /** + * Returns the migration stage of the migration feature flag for the given + * evaluation context. + * + * If the evaluated value of the flag cannot be converted to an LDMigrationStage, then the default + * value will be returned and error will be logged. + * + * @param key The unique key of the feature flag. + * @param context The context requesting the flag. The client will generate an analytics event to + * register this context with LaunchDarkly if the context does not already exist. + * @param defaultValue The default value of the flag, to be used if the value is not available + * from LaunchDarkly. + * @returns + * A Promise which will be resolved with the result (as an{@link LDMigrationVariation}). + */ + migrationVariation( + key: string, + context: LDContext, + defaultValue: LDMigrationStage, + ): Promise; + + /** + * Determines the boolean variation of a feature flag for a context. + * + * If the flag variation does not have a boolean value, defaultValue is returned. + * + * @param key The unique key of the feature flag. + * @param context The context requesting the flag. The client will generate an analytics event to + * register this context with LaunchDarkly if the context does not already exist. + * @param defaultValue The default value of the flag, to be used if the value is not available + * from LaunchDarkly. + * @returns + * A Promise which will be resolved with the result value. + */ + boolVariation(key: string, context: LDContext, defaultValue: boolean): Promise; + + /** + * Determines the numeric variation of a feature flag for a context. + * + * If the flag variation does not have a numeric value, defaultValue is returned. + * + * @param key The unique key of the feature flag. + * @param context The context requesting the flag. The client will generate an analytics event to + * register this context with LaunchDarkly if the context does not already exist. + * @param defaultValue The default value of the flag, to be used if the value is not available + * from LaunchDarkly. + * @returns + * A Promise which will be resolved with the result value. + */ + numberVariation(key: string, context: LDContext, defaultValue: number): Promise; + + /** + * Determines the string variation of a feature flag for a context. + * + * If the flag variation does not have a string value, defaultValue is returned. + * + * @param key The unique key of the feature flag. + * @param context The context requesting the flag. The client will generate an analytics event to + * register this context with LaunchDarkly if the context does not already exist. + * @param defaultValue The default value of the flag, to be used if the value is not available + * from LaunchDarkly. + * @returns + * A Promise which will be resolved with the result value. + */ + stringVariation(key: string, context: LDContext, defaultValue: string): Promise; + + /** + * Determines the variation of a feature flag for a context. + * + * This version may be favored in TypeScript versus `variation` because it returns + * an `unknown` type instead of `any`. `unknown` will require a cast before usage. + * + * @param key The unique key of the feature flag. + * @param context The context requesting the flag. The client will generate an analytics event to + * register this context with LaunchDarkly if the context does not already exist. + * @param defaultValue The default value of the flag, to be used if the value is not available + * from LaunchDarkly. + * @returns + * A Promise which will be resolved with the result value. + */ + jsonVariation(key: string, context: LDContext, defaultValue: unknown): Promise; + + /** + * Determines the boolean variation of a feature flag for a context, along with information about + * how it was calculated. + * + * The `reason` property of the result will also be included in analytics events, if you are + * capturing detailed event data for this flag. + * + * If the flag variation does not have a boolean value, defaultValue is returned. The reason will + * indicate an error of the type `WRONG_KIND` in this case. + * + * For more information, see the [SDK reference + * guide](https://docs.launchdarkly.com/sdk/features/evaluation-reasons#nodejs-server-side). + * + * @param key The unique key of the feature flag. + * @param context The context requesting the flag. The client will generate an analytics event to + * register this context with LaunchDarkly if the context does not already exist. + * @param defaultValue The default value of the flag, to be used if the value is not available + * from LaunchDarkly. + * @returns + * A Promise which will be resolved with the result + * (as an {@link LDEvaluationDetailTyped}). + */ + boolVariationDetail( + key: string, + context: LDContext, + defaultValue: boolean, + ): Promise>; + + /** + * Determines the numeric variation of a feature flag for a context, along with information about + * how it was calculated. + * + * The `reason` property of the result will also be included in analytics events, if you are + * capturing detailed event data for this flag. + * + * If the flag variation does not have a numeric value, defaultValue is returned. The reason will + * indicate an error of the type `WRONG_KIND` in this case. + * + * For more information, see the [SDK reference + * guide](https://docs.launchdarkly.com/sdk/features/evaluation-reasons#nodejs-server-side). + * + * @param key The unique key of the feature flag. + * @param context The context requesting the flag. The client will generate an analytics event to + * register this context with LaunchDarkly if the context does not already exist. + * @param defaultValue The default value of the flag, to be used if the value is not available + * from LaunchDarkly. + * @returns + * A Promise which will be resolved with the result + * (as an {@link LDEvaluationDetailTyped}). + */ + numberVariationDetail( + key: string, + context: LDContext, + defaultValue: number, + ): Promise>; + + /** + * Determines the string variation of a feature flag for a context, along with information about + * how it was calculated. + * + * The `reason` property of the result will also be included in analytics events, if you are + * capturing detailed event data for this flag. + * + * If the flag variation does not have a string value, defaultValue is returned. The reason will + * indicate an error of the type `WRONG_KIND` in this case. + * + * For more information, see the [SDK reference + * guide](https://docs.launchdarkly.com/sdk/features/evaluation-reasons#nodejs-server-side). + * + * @param key The unique key of the feature flag. + * @param context The context requesting the flag. The client will generate an analytics event to + * register this context with LaunchDarkly if the context does not already exist. + * @param defaultValue The default value of the flag, to be used if the value is not available + * from LaunchDarkly. + * @returns + * A Promise which will be resolved with the result + * (as an {@link LDEvaluationDetailTyped}). + */ + stringVariationDetail( + key: string, + context: LDContext, + defaultValue: string, + ): Promise>; + + /** + * Determines the variation of a feature flag for a context, along with information about how it + * was calculated. + * + * The `reason` property of the result will also be included in analytics events, if you are + * capturing detailed event data for this flag. + * + * This version may be favored in TypeScript versus `variation` because it returns + * an `unknown` type instead of `any`. `unknown` will require a cast before usage. + * + * For more information, see the [SDK reference + * guide](https://docs.launchdarkly.com/sdk/features/evaluation-reasons#nodejs-server-side). + * + * @param key The unique key of the feature flag. + * @param context The context requesting the flag. The client will generate an analytics event to + * register this context with LaunchDarkly if the context does not already exist. + * @param defaultValue The default value of the flag, to be used if the value is not available + * from LaunchDarkly. + * @param callback A Node-style callback to receive the result (as an {@link LDEvaluationDetail}). + * If omitted, you will receive a Promise instead. + * @returns + * If you provided a callback, then nothing. Otherwise, a Promise which will be resolved with + * the result (as an{@link LDEvaluationDetailTyped}). + */ + jsonVariationDetail( + key: string, + context: LDContext, + defaultValue: unknown, + ): Promise>; + /** * Builds an object that encapsulates the state of all feature flags for a given context. * This includes the flag values and also metadata that can be used on the front end. This @@ -197,6 +400,13 @@ export interface LDClient { */ track(key: string, context: LDContext, data?: any, metricValue?: number): void; + /** + * Track the details of a migration. + * + * @param event Event containing information about the migration operation. + */ + trackMigration(event: LDMigrationOpEvent): void; + /** * Identifies a context to LaunchDarkly. * diff --git a/packages/shared/sdk-server/src/api/LDMigration.ts b/packages/shared/sdk-server/src/api/LDMigration.ts new file mode 100644 index 000000000..707678645 --- /dev/null +++ b/packages/shared/sdk-server/src/api/LDMigration.ts @@ -0,0 +1,93 @@ +import { LDContext } from '@launchdarkly/js-sdk-common'; + +import { LDMigrationStage } from './data/LDMigrationStage'; + +/** + * Specifies the origin of the result or error. + * + * Results from `readOld` or `writeOld` will be 'old'. + * Results from `readNew` or `writeNew` will be 'new'. + */ +export type LDMigrationOrigin = 'old' | 'new'; + +/** + * Result of a component of an LDMigration. + * + * Should not need to be used by a consumer of this API directly. + */ +export type LDMigrationResult = + | { + success: true; + origin: LDMigrationOrigin; + result: TResult; + } + | { + success: false; + origin: LDMigrationOrigin; + error: any; + }; + +/** + * Result of a migration read operation. + */ +export type LDMigrationReadResult = LDMigrationResult; + +/** + * Result of a migration write operation. + * + * Authoritative writes are done before non-authoritative, so the authoritative + * field should contain either an error or a result. + * + * If the authoritative write fails, then the non-authoritative operation will + * not be executed. When this happens the nonAuthoritative field will not be + * populated. + * + * When the non-authoritative operation is executed, then it will result in + * either a result or an error and the field will be populated as such. + */ +export type LDMigrationWriteResult = { + authoritative: LDMigrationResult; + nonAuthoritative?: LDMigrationResult; +}; + +/** + * Interface representing a migration. + */ +export interface LDMigration< + TMigrationRead, + TMigrationWrite, + TMigrationReadInput = any, + TMigrationWriteInput = any, +> { + /** + * Perform a read using the migration. + * + * @param key The key of the flag controlling the migration. + * @param context The context requesting the flag. The client will generate an analytics event to + * register this context with LaunchDarkly if the context does not already exist. + * @param defaultValue The default migration step. Used if the value is not available from + * LaunchDarkly. + */ + read( + key: string, + context: LDContext, + defaultValue: LDMigrationStage, + payload?: TMigrationReadInput, + ): Promise>; + + /** + * Perform a write using the migration. + * + * @param key The key of the flag controlling the migration. + * @param context The context requesting the flag. The client will generate an analytics event to + * register this context with LaunchDarkly if the context does not already exist. + * @param defaultValue The default migration step. Used if the value is not available from + * LaunchDarkly. + */ + write( + key: string, + context: LDContext, + defaultValue: LDMigrationStage, + payload?: TMigrationWriteInput, + ): Promise>; +} diff --git a/packages/shared/sdk-server/src/api/data/LDMigrationOpEvent.ts b/packages/shared/sdk-server/src/api/data/LDMigrationOpEvent.ts new file mode 100644 index 000000000..544a7b34c --- /dev/null +++ b/packages/shared/sdk-server/src/api/data/LDMigrationOpEvent.ts @@ -0,0 +1,73 @@ +import { LDEvaluationReason } from '@launchdarkly/js-sdk-common'; + +import { LDMigrationStage } from './LDMigrationStage'; + +export type LDMigrationOp = 'read' | 'write'; + +/** + * Component of an LDMigrationOpEvent which tracks information about the + * evaluation of the migration flag. + */ +export interface LDMigrationEvaluation { + key: string; + value: LDMigrationStage; + default: LDMigrationStage; + variation?: number; + version?: number; + reason: LDEvaluationReason; +} + +export interface LDMigrationConsistencyMeasurement { + key: 'consistent'; + value: boolean; + samplingRatio: number; +} + +export interface LDMigrationLatencyMeasurement { + key: 'latency_ms'; + values: { + old?: number; + new?: number; + }; +} + +export interface LDMigrationErrorMeasurement { + key: 'error'; + values: { + old?: boolean; + new?: boolean; + }; +} + +export interface LDMigrationInvokedMeasurement { + key: 'invoked'; + values: { + old?: boolean; + new?: boolean; + }; +} + +/** + * Types of measurements supported by an LDMigrationOpEvent. + */ +export type LDMigrationMeasurement = + | LDMigrationLatencyMeasurement + | LDMigrationErrorMeasurement + | LDMigrationConsistencyMeasurement + | LDMigrationInvokedMeasurement; + +/** + * Event used to track information about a migration operation. + * + * Generally this event should not be created directly and instead an + * {@link MigrationOpTracker} should be used to generate it. + */ +export interface LDMigrationOpEvent { + kind: 'migration_op'; + operation: LDMigrationOp; + creationDate: number; + contextKeys: Record; + evaluation: LDMigrationEvaluation; + measurements: LDMigrationMeasurement[]; + samplingRatio: number; +} diff --git a/packages/shared/sdk-server/src/api/data/LDMigrationStage.ts b/packages/shared/sdk-server/src/api/data/LDMigrationStage.ts new file mode 100644 index 000000000..f99c0bcfb --- /dev/null +++ b/packages/shared/sdk-server/src/api/data/LDMigrationStage.ts @@ -0,0 +1,46 @@ +/** + * Stage denotes one of six possible stages a technology migration could be a + * part of, progressing through the following order. + * + * Off -> DualWrite -> Shadow -> Live -> RampDown -> Complete + */ +export enum LDMigrationStage { + /** + * Off - migration hasn't started, "old" is authoritative for reads and writes + */ + Off = 'off', + + /** + * DualWrite - write to both "old" and "new", "old" is authoritative for reads + */ + DualWrite = 'dualwrite', + + /** + * Shadow - both "new" and "old" versions run with a preference for "old" + */ + Shadow = 'shadow', + + /** + * Live - both "new" and "old" versions run with a preference for "new" + */ + Live = 'live', + + /** + * RampDown - only read from "new", write to "old" and "new" + */ + RampDown = 'rampdown', + + /** + * Complete - migration is done + */ + Complete = 'complete', +} + +/** + * Check if the given string is a migration stage. + * @param value The string to check. + * @returns True if the string is a migration stage. + */ +export function IsMigrationStage(value: string): boolean { + return Object.values(LDMigrationStage).includes(value as LDMigrationStage); +} diff --git a/packages/shared/sdk-server/src/api/data/LDMigrationVariation.ts b/packages/shared/sdk-server/src/api/data/LDMigrationVariation.ts new file mode 100644 index 000000000..fe4f81efb --- /dev/null +++ b/packages/shared/sdk-server/src/api/data/LDMigrationVariation.ts @@ -0,0 +1,91 @@ +import { LDMigrationOrigin } from '../LDMigration'; +import { LDMigrationOp, LDMigrationOpEvent } from './LDMigrationOpEvent'; +import { LDMigrationStage } from './LDMigrationStage'; + +/** + * Used for reporting the state of a consistency check. + */ +export enum LDConsistencyCheck { + Inconsistent = 0, + Consistent = 1, + NotChecked = 2, +} + +/** + * Used to track information related to a migration operation. + */ +export interface LDMigrationTracker { + /** + * Sets the migration related operation associated with these tracking measurements. + * + * @param op The operation being tracked. + */ + op(op: LDMigrationOp): void; + + /** + * Report that an error has occurred for the specified origin. + * + * @param origin The origin of the error. + */ + error(origin: LDMigrationOrigin): void; + + /** + * Check the consistency of a read result. This method should be invoked if the `check` function + * is defined for the migration and both reads ("new"/"old") were done. + * + * The function will use the checkRatio to determine if the check should be executed, and it + * will record the result. + * + * Example calling the check function from the migration config. + * ``` + * context.tracker.consistency(() => config.check!(oldValue.result, newValue.result)); + * ``` + * + * If the consistency check function throws an exception, then the consistency check result + * will not be included in the generated event. + * + * @param check The function which executes the check. This is not the `check` function from the + * migration options, but instead should be a parameter-less function that calls that function. + */ + consistency(check: () => boolean): void; + + /** + * Call this to report that an origin was invoked (executed). There are some situations where the + * expectation is that both the old and new implementation will be used, but with writes + * it is possible that the non-authoritative will not execute. Reporting the execution allows + * for more accurate analytics. + * + * @param origin The origin that was invoked. + */ + invoked(origin: LDMigrationOrigin): void; + + /** + * Report the latency of an operation. + * + * @param origin The origin the latency is being reported for. + * @param value The latency, in milliseconds, of the operation. + */ + latency(origin: LDMigrationOrigin, value: number): void; + + /** + * Create a migration op event. If the event could not be created, because of a missing + * operation, then undefined is returned. + */ + createEvent(): LDMigrationOpEvent | undefined; +} + +/** + * Migration value and tracker. + */ +export interface LDMigrationVariation { + /** + * The result of the flag evaluation. This will be either one of the flag's variations or + * the default value that was passed to `LDClient.migrationVariation`. + */ + value: LDMigrationStage; + + /** + * A tracker which can be used to generate analytics for the migration. + */ + tracker: LDMigrationTracker; +} diff --git a/packages/shared/sdk-server/src/api/data/index.ts b/packages/shared/sdk-server/src/api/data/index.ts index c0bd06c1e..0ce283a47 100644 --- a/packages/shared/sdk-server/src/api/data/index.ts +++ b/packages/shared/sdk-server/src/api/data/index.ts @@ -1,2 +1,5 @@ export * from './LDFlagsStateOptions'; export * from './LDFlagsState'; +export * from './LDMigrationStage'; +export * from './LDMigrationOpEvent'; +export * from './LDMigrationVariation'; diff --git a/packages/shared/sdk-server/src/api/index.ts b/packages/shared/sdk-server/src/api/index.ts index ffd30fc3c..1018e6eff 100644 --- a/packages/shared/sdk-server/src/api/index.ts +++ b/packages/shared/sdk-server/src/api/index.ts @@ -3,7 +3,6 @@ export * from './options'; export * from './LDClient'; export * from './interfaces/DataKind'; export * from './subsystems/LDFeatureStore'; -export * from './subsystems/LDStreamProcessor'; // These are items that should be less frequently used, and therefore they // are namespaced to reduce clutter amongst the top level exports. diff --git a/packages/shared/sdk-server/src/api/options/LDMigrationOptions.ts b/packages/shared/sdk-server/src/api/options/LDMigrationOptions.ts new file mode 100644 index 000000000..361469be1 --- /dev/null +++ b/packages/shared/sdk-server/src/api/options/LDMigrationOptions.ts @@ -0,0 +1,158 @@ +/* eslint-disable max-classes-per-file */ +// Disabling max classes per file as these are tag classes without +// logic implementation. + +/** + * When execution is sequential this enum is used to control if execution + * should be in a fixed or random order. + */ +export enum LDExecutionOrdering { + Fixed, + Random, +} + +/** + * Tag used to determine if execution should be serial or concurrent. + * Callers should not need to use this directly. + */ +export enum LDExecution { + /** + * Execution will be serial. One read method will be executed fully before + * the other read method. + */ + Serial, + /** + * Execution will be concurrent. The execution of the read methods will be + * started and then resolved concurrently. + */ + Concurrent, +} + +/** + * Migration methods may return an LDMethodResult. + * The implementation includes methods for creating results conveniently. + * + * An implementation may also throw an exception to represent an error. + */ +export type LDMethodResult = + | { + success: true; + result: TResult; + } + | { + success: false; + error: any; + }; + +/** + * Configuration class for configuring serial execution of a migration. + */ +export class LDSerialExecution { + readonly type: LDExecution = LDExecution.Serial; + + constructor(public readonly ordering: LDExecutionOrdering) {} +} + +/** + * Configuration class for configuring concurrent execution of a migration. + */ +export class LDConcurrentExecution { + readonly type: LDExecution = LDExecution.Concurrent; +} + +/** + * Configuration for a migration. + */ +export interface LDMigrationOptions< + TMigrationRead, + TMigrationWrite, + TMigrationReadInput, + TMigrationWriteInput, +> { + /** + * Configure how the migration should execute. If omitted the execution will + * be concurrent. + */ + execution?: LDSerialExecution | LDConcurrentExecution; + + /** + * Configure the latency tracking for the migration. + * + * Defaults to {@link true}. + */ + latencyTracking?: boolean; + + /** + * Configure the error tracking for the migration. + * + * Defaults to {@link true}. + */ + errorTracking?: boolean; + + /** + * Implementation which provides a read from the "new" source. + * + * Users are required to provide two different read methods -- one to read from the old migration source, and one to + * read from the new source. Additionally, customers can opt-in to consistency tracking by providing a `check` + * function. + * + * Depending on the migration stage, one or both of these read methods may be called. + * + * Throwing an exception from this method will be treated as an error. + * + * @param payload An optional payload. The payload is provided when calling the `read` method on the migration. + * @returns The result of the operation. Use {@link LDMigrationSuccess} or {@link LDMigrationError} to create a suitable return value. + */ + readNew: (payload?: TMigrationReadInput) => Promise>; + + /** + * Implementation which provides a write to the "new" source. + * + * Users are required to provide two different write methods -- one to write to the old migration source, and one to + * write to the new source. Not every stage requires + * + * + * Depending on the migration stage, one or both of these write methods may be called. + * + * Throwing an exception from this method will be treated as an error. + * + * @param payload An optional payload. The payload is provided when calling the `read` method on the migration. + * @returns The result of the operation. Use {@link LDMigrationSuccess} or {@link LDMigrationError} to create a suitable return value. + */ + writeNew: (payload?: TMigrationWriteInput) => Promise>; + + /** + * Implementation which provides a read from the "old" source. + * + * Users are required to provide two different read methods -- one to read from the old migration source, and one to + * read from the new source. Additionally, customers can opt-in to consistency tracking by providing a `check` + * function. + * + * Depending on the migration stage, one or both of these read methods may be called. + * + * Throwing an exception from this method will be treated as an error. + * + */ + readOld: (payload?: TMigrationReadInput) => Promise>; + + /** + * Implementation which provides a write to the "old" source. + * + * Users are required to provide two different write methods -- one to write to the old migration source, and one to + * write to the new source. Not every stage requires + * + * Depending on the migration stage, one or both of these write methods may be called. + * + * Throwing an exception from this method will be treated as an error. + * + * @param payload An optional payload. The payload is provided when calling the `read` method on the migration. + * @returns The result of the operation. Use {@link LDMigrationSuccess} or {@link LDMigrationError} to create a suitable return value. + */ + writeOld: (payload?: TMigrationWriteInput) => Promise>; + + /** + * Method used to do consistency checks for read operations. After a read operation, during which both data sources + * are read from, a check of read consistency may be done using this method. + */ + check?: (a: TMigrationRead, b: TMigrationRead) => boolean; +} diff --git a/packages/shared/sdk-server/src/api/options/LDOptions.ts b/packages/shared/sdk-server/src/api/options/LDOptions.ts index a9c77c2b5..f36590b4c 100644 --- a/packages/shared/sdk-server/src/api/options/LDOptions.ts +++ b/packages/shared/sdk-server/src/api/options/LDOptions.ts @@ -1,7 +1,6 @@ -import { LDClientContext, LDLogger } from '@launchdarkly/js-sdk-common'; +import { LDClientContext, LDLogger, subsystem, VoidFunction } from '@launchdarkly/js-sdk-common'; -import { LDDataSourceUpdates, LDStreamProcessor } from '../subsystems'; -import { LDFeatureStore } from '../subsystems/LDFeatureStore'; +import { LDDataSourceUpdates, LDFeatureStore } from '../subsystems'; import { LDBigSegmentsOptions } from './LDBigSegmentsOptions'; import { LDProxyOptions } from './LDProxyOptions'; import { LDTLSOptions } from './LDTLSOptions'; @@ -93,7 +92,9 @@ export interface LDOptions { | (( clientContext: LDClientContext, dataSourceUpdates: LDDataSourceUpdates, - ) => LDStreamProcessor); + initSuccessHandler: VoidFunction, + errorHandler?: (e: Error) => void, + ) => subsystem.LDStreamProcessor); /** * The interval in between flushes of the analytics events queue, in seconds. diff --git a/packages/shared/sdk-server/src/api/options/index.ts b/packages/shared/sdk-server/src/api/options/index.ts index a3467b9cc..1e7b63de7 100644 --- a/packages/shared/sdk-server/src/api/options/index.ts +++ b/packages/shared/sdk-server/src/api/options/index.ts @@ -2,3 +2,4 @@ export * from './LDBigSegmentsOptions'; export * from './LDOptions'; export * from './LDProxyOptions'; export * from './LDTLSOptions'; +export * from './LDMigrationOptions'; diff --git a/packages/shared/sdk-server/src/api/subsystems/index.ts b/packages/shared/sdk-server/src/api/subsystems/index.ts index 23bb2a2d3..4e21d2794 100644 --- a/packages/shared/sdk-server/src/api/subsystems/index.ts +++ b/packages/shared/sdk-server/src/api/subsystems/index.ts @@ -1,4 +1,3 @@ export * from './LDFeatureRequestor'; export * from './LDFeatureStore'; -export * from './LDStreamProcessor'; export * from './LDDataSourceUpdates'; diff --git a/packages/shared/sdk-server/src/data_sources/FileDataSource.ts b/packages/shared/sdk-server/src/data_sources/FileDataSource.ts index c40209e0b..69bfc7187 100644 --- a/packages/shared/sdk-server/src/data_sources/FileDataSource.ts +++ b/packages/shared/sdk-server/src/data_sources/FileDataSource.ts @@ -1,14 +1,20 @@ -import { Filesystem, LDLogger } from '@launchdarkly/js-sdk-common'; - -import { LDStreamProcessor } from '../api'; +import { + Filesystem, + LDFileDataSourceError, + LDLogger, + subsystem, + VoidFunction, +} from '@launchdarkly/js-sdk-common'; + +import { DataKind, LDFeatureStore, LDFeatureStoreDataStorage } from '../api'; import { FileDataSourceOptions } from '../api/integrations'; -import { DataKind } from '../api/interfaces'; -import { LDFeatureStore, LDFeatureStoreDataStorage } from '../api/subsystems'; import { Flag } from '../evaluation/data/Flag'; import { processFlag, processSegment } from '../store/serialization'; import VersionedDataKinds from '../store/VersionedDataKinds'; import FileLoader from './FileLoader'; +export type FileDataSourceErrorHandler = (err: LDFileDataSourceError) => void; + function makeFlagWithValue(key: string, value: any): Flag { return { key, @@ -19,7 +25,7 @@ function makeFlagWithValue(key: string, value: any): Flag { }; } -export default class FileDataSource implements LDStreamProcessor { +export default class FileDataSource implements subsystem.LDStreamProcessor { private logger?: LDLogger; private yamlParser?: (data: string) => any; @@ -28,8 +34,6 @@ export default class FileDataSource implements LDStreamProcessor { private allData: LDFeatureStoreDataStorage = {}; - private initCallback?: (err?: any) => void; - /** * This is internal because we want instances to only be created with the * factory. @@ -39,6 +43,8 @@ export default class FileDataSource implements LDStreamProcessor { options: FileDataSourceOptions, filesystem: Filesystem, private readonly featureStore: LDFeatureStore, + private initSuccessHandler: VoidFunction = () => {}, + private readonly errorHandler?: FileDataSourceErrorHandler, ) { this.fileLoader = new FileLoader( filesystem, @@ -51,7 +57,7 @@ export default class FileDataSource implements LDStreamProcessor { this.processFileData(results); } catch (err) { // If this was during start, then the initCallback will be present. - this.initCallback?.(err); + this.errorHandler?.(err as LDFileDataSourceError); this.logger?.error(`Error processing files: ${err}`); } }, @@ -61,8 +67,7 @@ export default class FileDataSource implements LDStreamProcessor { this.yamlParser = options.yamlParser; } - start(fn?: ((err?: any) => void) | undefined): void { - this.initCallback = fn; + start(): void { // Use an immediately invoked function expression to allow handling of the // async loading without making start async itself. (async () => { @@ -71,7 +76,7 @@ export default class FileDataSource implements LDStreamProcessor { } catch (err) { // There was an issue loading/watching the files. // Report back to the caller. - fn?.(err); + this.errorHandler?.(err as LDFileDataSourceError); } })(); } @@ -118,8 +123,8 @@ export default class FileDataSource implements LDStreamProcessor { this.featureStore.init(this.allData, () => { // Call the init callback if present. // Then clear the callback so we cannot call it again. - this.initCallback?.(); - this.initCallback = undefined; + this.initSuccessHandler(); + this.initSuccessHandler = () => {}; }); } diff --git a/packages/shared/sdk-server/src/data_sources/NullUpdateProcessor.ts b/packages/shared/sdk-server/src/data_sources/NullUpdateProcessor.ts deleted file mode 100644 index 849535fb4..000000000 --- a/packages/shared/sdk-server/src/data_sources/NullUpdateProcessor.ts +++ /dev/null @@ -1,21 +0,0 @@ -// This is an empty implementation, so it doesn't use this, and it has empty methods, and it -// has unused variables. - -/* eslint-disable class-methods-use-this */ - -/* eslint-disable @typescript-eslint/no-empty-function */ - -/* eslint-disable @typescript-eslint/no-unused-vars */ -import { LDStreamProcessor } from '../api'; - -export default class NullUpdateProcessor implements LDStreamProcessor { - start(fn?: ((err?: any) => void) | undefined) { - // Deferring the start callback should allow client construction to complete before we start - // emitting events. Allowing the client an opportunity to register events. - setTimeout(() => fn?.(), 0); - } - - stop() {} - - close() {} -} diff --git a/packages/shared/sdk-server/src/data_sources/PollingProcessor.ts b/packages/shared/sdk-server/src/data_sources/PollingProcessor.ts index a7af081a4..a51beee7c 100644 --- a/packages/shared/sdk-server/src/data_sources/PollingProcessor.ts +++ b/packages/shared/sdk-server/src/data_sources/PollingProcessor.ts @@ -1,18 +1,24 @@ -import { LDLogger } from '@launchdarkly/js-sdk-common'; +import { + httpErrorMessage, + isHttpRecoverable, + LDLogger, + LDPollingError, + subsystem, + VoidFunction, +} from '@launchdarkly/js-sdk-common'; -import { LDStreamProcessor } from '../api'; import { LDDataSourceUpdates } from '../api/subsystems'; -import { isHttpRecoverable, LDPollingError } from '../errors'; import Configuration from '../options/Configuration'; -import { deserializePoll } from '../store/serialization'; +import { deserializePoll } from '../store'; import VersionedDataKinds from '../store/VersionedDataKinds'; -import httpErrorMessage from './httpErrorMessage'; import Requestor from './Requestor'; +export type PollingErrorHandler = (err: LDPollingError) => void; + /** * @internal */ -export default class PollingProcessor implements LDStreamProcessor { +export default class PollingProcessor implements subsystem.LDStreamProcessor { private stopped = false; private logger?: LDLogger; @@ -25,13 +31,14 @@ export default class PollingProcessor implements LDStreamProcessor { config: Configuration, private readonly requestor: Requestor, private readonly featureStore: LDDataSourceUpdates, + private readonly initSuccessHandler: VoidFunction = () => {}, + private readonly errorHandler?: PollingErrorHandler, ) { this.logger = config.logger; this.pollInterval = config.pollInterval; - this.featureStore = featureStore; } - private poll(fn?: ((err?: any) => void) | undefined) { + private poll() { if (this.stopped) { return; } @@ -39,7 +46,7 @@ export default class PollingProcessor implements LDStreamProcessor { const reportJsonError = (data: string) => { this.logger?.error('Polling received invalid data'); this.logger?.debug(`Invalid JSON follows: ${data}`); - fn?.(new LDPollingError('Malformed JSON data in polling response')); + this.errorHandler?.(new LDPollingError('Malformed JSON data in polling response')); }; const startTime = Date.now(); @@ -50,10 +57,11 @@ export default class PollingProcessor implements LDStreamProcessor { this.logger?.debug('Elapsed: %d ms, sleeping for %d ms', elapsed, sleepFor); if (err) { - if (err.status && !isHttpRecoverable(err.status)) { + const { status } = err; + if (status && !isHttpRecoverable(status)) { const message = httpErrorMessage(err, 'polling request'); this.logger?.error(message); - fn?.(new LDPollingError(message)); + this.errorHandler?.(new LDPollingError(message, status)); // It is not recoverable, return and do not trigger another // poll. return; @@ -71,10 +79,10 @@ export default class PollingProcessor implements LDStreamProcessor { [VersionedDataKinds.Segments.namespace]: parsed.segments, }; this.featureStore.init(initData, () => { - fn?.(); + this.initSuccessHandler(); // Triggering the next poll after the init has completed. this.timeoutHandle = setTimeout(() => { - this.poll(fn); + this.poll(); }, sleepFor); }); // The poll will be triggered by the feature store initialization @@ -86,13 +94,13 @@ export default class PollingProcessor implements LDStreamProcessor { // Falling through, there was some type of error and we need to trigger // a new poll. this.timeoutHandle = setTimeout(() => { - this.poll(fn); + this.poll(); }, sleepFor); }); } - start(fn?: ((err?: any) => void) | undefined) { - this.poll(fn); + start() { + this.poll(); } stop() { diff --git a/packages/shared/sdk-server/src/data_sources/Requestor.ts b/packages/shared/sdk-server/src/data_sources/Requestor.ts index b0bd20f80..6ffb2200d 100644 --- a/packages/shared/sdk-server/src/data_sources/Requestor.ts +++ b/packages/shared/sdk-server/src/data_sources/Requestor.ts @@ -1,9 +1,14 @@ -import { Info, Options, Requests, Response } from '@launchdarkly/js-sdk-common'; +import { + defaultHeaders, + Info, + LDStreamingError, + Options, + Requests, + Response, +} from '@launchdarkly/js-sdk-common'; import { LDFeatureRequestor } from '../api/subsystems'; -import { LDStreamingError } from '../errors'; import Configuration from '../options/Configuration'; -import defaultHeaders from './defaultHeaders'; /** * @internal @@ -27,7 +32,7 @@ export default class Requestor implements LDFeatureRequestor { info: Info, private readonly requests: Requests, ) { - this.headers = defaultHeaders(sdkKey, config, info); + this.headers = defaultHeaders(sdkKey, info, config.tags); this.uri = `${config.serviceEndpoints.polling}/sdk/latest-all`; } diff --git a/packages/shared/sdk-server/src/data_sources/StreamingProcessor.ts b/packages/shared/sdk-server/src/data_sources/StreamingProcessor.ts deleted file mode 100644 index 411657aff..000000000 --- a/packages/shared/sdk-server/src/data_sources/StreamingProcessor.ts +++ /dev/null @@ -1,199 +0,0 @@ -import { EventSource, Info, LDLogger, Requests } from '@launchdarkly/js-sdk-common'; - -import { LDStreamProcessor } from '../api'; -import { LDDataSourceUpdates } from '../api/subsystems'; -import { isHttpRecoverable, LDStreamingError } from '../errors'; -import DiagnosticsManager from '../events/DiagnosticsManager'; -import Configuration from '../options/Configuration'; -import { deserializeAll, deserializeDelete, deserializePatch } from '../store/serialization'; -import VersionedDataKinds, { VersionedDataKind } from '../store/VersionedDataKinds'; -import defaultHeaders from './defaultHeaders'; -import httpErrorMessage from './httpErrorMessage'; - -const STREAM_READ_TIMEOUT_MS = 5 * 60 * 1000; -const RETRY_RESET_INTERVAL_MS = 60 * 1000; - -function getKeyFromPath(kind: VersionedDataKind, path: string): string | undefined { - return path.startsWith(kind.streamApiPath) - ? path.substring(kind.streamApiPath.length) - : undefined; -} - -/** - * @internal - */ -export default class StreamingProcessor implements LDStreamProcessor { - private headers: { [key: string]: string | string[] }; - - private eventSource?: EventSource; - - private logger?: LDLogger; - - private streamUri: string; - - private streamInitialReconnectDelay: number; - - private requests: Requests; - - private connectionAttemptStartTime?: number; - - constructor( - sdkKey: string, - config: Configuration, - requests: Requests, - info: Info, - private readonly featureStore: LDDataSourceUpdates, - private readonly diagnosticsManager?: DiagnosticsManager, - ) { - this.headers = defaultHeaders(sdkKey, config, info); - this.logger = config.logger; - this.streamInitialReconnectDelay = config.streamInitialReconnectDelay; - this.requests = requests; - - this.streamUri = `${config.serviceEndpoints.streaming}/all`; - } - - private logConnectionStarted() { - this.connectionAttemptStartTime = Date.now(); - } - - private logConnectionResult(success: boolean) { - if (this.connectionAttemptStartTime && this.diagnosticsManager) { - this.diagnosticsManager.recordStreamInit( - this.connectionAttemptStartTime, - !success, - Date.now() - this.connectionAttemptStartTime, - ); - } - - this.connectionAttemptStartTime = undefined; - } - - start(fn?: ((err?: any) => void) | undefined) { - this.logConnectionStarted(); - - const errorFilter = (err: { status: number; message: string }): boolean => { - if (err.status && !isHttpRecoverable(err.status)) { - this.logConnectionResult(false); - fn?.(new LDStreamingError(err.message, err.status)); - this.logger?.error(httpErrorMessage(err, 'streaming request')); - return false; - } - - this.logger?.warn(httpErrorMessage(err, 'streaming request', 'will retry')); - this.logConnectionResult(false); - this.logConnectionStarted(); - return true; - }; - - const reportJsonError = (type: string, data: string) => { - this.logger?.error(`Stream received invalid data in "${type}" message`); - this.logger?.debug(`Invalid JSON follows: ${data}`); - fn?.(new LDStreamingError('Malformed JSON data in event stream')); - }; - - // TLS is handled by the platform implementation. - - const eventSource = this.requests.createEventSource(this.streamUri, { - headers: this.headers, - errorFilter, - initialRetryDelayMillis: 1000 * this.streamInitialReconnectDelay, - readTimeoutMillis: STREAM_READ_TIMEOUT_MS, - retryResetIntervalMillis: RETRY_RESET_INTERVAL_MS, - }); - this.eventSource = eventSource; - - eventSource.onclose = () => { - this.logger?.info('Closed LaunchDarkly stream connection'); - }; - - eventSource.onerror = () => { - // The work is done by `errorFilter`. - }; - - eventSource.onopen = () => { - this.logger?.info('Opened LaunchDarkly stream connection'); - }; - - eventSource.onretrying = (e) => { - this.logger?.info(`Will retry stream connection in ${e.delayMillis} milliseconds`); - }; - - eventSource.addEventListener('put', (event) => { - this.logger?.debug('Received put event'); - if (event && event.data) { - this.logConnectionResult(true); - const parsed = deserializeAll(event.data); - if (!parsed) { - reportJsonError('put', event.data); - return; - } - const initData = { - [VersionedDataKinds.Features.namespace]: parsed.data.flags, - [VersionedDataKinds.Segments.namespace]: parsed.data.segments, - }; - - this.featureStore.init(initData, () => fn?.()); - } else { - fn?.(new LDStreamingError('Unexpected payload from event stream')); - } - }); - - eventSource.addEventListener('patch', (event) => { - this.logger?.debug('Received patch event'); - if (event && event.data) { - const parsed = deserializePatch(event.data); - if (!parsed) { - reportJsonError('patch', event.data); - return; - } - if (parsed.kind) { - const key = getKeyFromPath(parsed.kind, parsed.path); - if (key) { - this.logger?.debug(`Updating ${key} in ${parsed.kind.namespace}`); - this.featureStore.upsert(parsed.kind, parsed.data, () => {}); - } - } - } else { - fn?.(new LDStreamingError('Unexpected payload from event stream')); - } - }); - - eventSource.addEventListener('delete', (event) => { - this.logger?.debug('Received delete event'); - if (event && event.data) { - const parsed = deserializeDelete(event.data); - if (!parsed) { - reportJsonError('delete', event.data); - return; - } - if (parsed.kind) { - const key = getKeyFromPath(parsed.kind, parsed.path); - if (key) { - this.logger?.debug(`Deleting ${key} in ${parsed.kind.namespace}`); - this.featureStore.upsert( - parsed.kind, - { - key, - version: parsed.version, - deleted: true, - }, - () => {}, - ); - } - } - } else { - fn?.(new LDStreamingError('Unexpected payload from event stream')); - } - }); - } - - stop() { - this.eventSource?.close(); - this.eventSource = undefined; - } - - close() { - this.stop(); - } -} diff --git a/packages/shared/sdk-server/src/data_sources/createStreamListeners.test.ts b/packages/shared/sdk-server/src/data_sources/createStreamListeners.test.ts new file mode 100644 index 000000000..002f43336 --- /dev/null +++ b/packages/shared/sdk-server/src/data_sources/createStreamListeners.test.ts @@ -0,0 +1,182 @@ +import { logger } from '@launchdarkly/private-js-mocks'; + +import { LDDataSourceUpdates } from '../api/subsystems'; +import { deserializeAll, deserializeDelete, deserializePatch } from '../store/serialization'; +import VersionedDataKinds from '../store/VersionedDataKinds'; +import { createStreamListeners } from './createStreamListeners'; + +jest.mock('../store/serialization'); + +const allData = { + data: { + flags: { + flagkey: { key: 'flagkey', version: 1 }, + }, + segments: { + segkey: { key: 'segkey', version: 2 }, + }, + }, +}; + +const patchData = { + path: '/flags/flagkey', + data: { key: 'flagkey', version: 1 }, + kind: VersionedDataKinds.Features, +}; + +const deleteData = { path: '/flags/flagkey', version: 2, kind: VersionedDataKinds.Features }; + +describe('createStreamListeners', () => { + let dataSourceUpdates: LDDataSourceUpdates; + let onPutCompleteHandler: jest.Mock; + let onPatchCompleteHandler: jest.Mock; + let onDeleteCompleteHandler: jest.Mock; + let onCompleteHandlers: { + put: jest.Mock; + patch: jest.Mock; + delete: jest.Mock; + }; + + beforeEach(() => { + dataSourceUpdates = { + init: jest.fn(), + upsert: jest.fn(), + }; + onPutCompleteHandler = jest.fn(); + onPatchCompleteHandler = jest.fn(); + onDeleteCompleteHandler = jest.fn(); + onCompleteHandlers = { + put: onPutCompleteHandler, + patch: onPatchCompleteHandler, + delete: onDeleteCompleteHandler, + }; + }); + + afterEach(() => { + jest.resetAllMocks(); + }); + + describe('put', () => { + test('creates put patch delete handlers', () => { + const listeners = createStreamListeners(dataSourceUpdates, logger, onCompleteHandlers); + + expect(listeners.size).toEqual(3); + expect(listeners.has('put')).toBeTruthy(); + expect(listeners.has('patch')).toBeTruthy(); + expect(listeners.has('delete')).toBeTruthy(); + }); + + test('createPutListener', () => { + const listeners = createStreamListeners(dataSourceUpdates, logger, onCompleteHandlers); + const { deserializeData, processJson } = listeners.get('put')!; + + expect(deserializeData).toBe(deserializeAll); + expect(processJson).toBeDefined(); + }); + + test('data source init is called', async () => { + const listeners = createStreamListeners(dataSourceUpdates, logger, onCompleteHandlers); + const { processJson } = listeners.get('put')!; + const { + data: { flags, segments }, + } = allData; + + processJson(allData); + + expect(logger.debug).toBeCalledWith(expect.stringMatching(/initializing/i)); + expect(dataSourceUpdates.init).toBeCalledWith( + { + features: flags, + segments, + }, + onPutCompleteHandler, + ); + }); + }); + + describe('patch', () => { + test('createPatchListener', () => { + const listeners = createStreamListeners(dataSourceUpdates, logger, onCompleteHandlers); + const { deserializeData, processJson } = listeners.get('patch')!; + + expect(deserializeData).toBe(deserializePatch); + expect(processJson).toBeDefined(); + }); + + test('data source upsert is called', async () => { + const listeners = createStreamListeners(dataSourceUpdates, logger, onCompleteHandlers); + const { processJson } = listeners.get('patch')!; + const { data, kind } = patchData; + + processJson(patchData); + + expect(logger.debug).toBeCalledWith(expect.stringMatching(/updating/i)); + expect(dataSourceUpdates.upsert).toBeCalledWith(kind, data, onPatchCompleteHandler); + }); + + test('data source upsert not called missing kind', async () => { + const listeners = createStreamListeners(dataSourceUpdates, logger, onCompleteHandlers); + const { processJson } = listeners.get('patch')!; + const missingKind = { ...patchData, kind: undefined }; + + processJson(missingKind); + + expect(dataSourceUpdates.upsert).not.toBeCalled(); + }); + + test('data source upsert not called wrong namespace path', async () => { + const listeners = createStreamListeners(dataSourceUpdates, logger, onCompleteHandlers); + const { processJson } = listeners.get('patch')!; + const wrongKey = { ...patchData, path: '/wrong/flagkey' }; + + processJson(wrongKey); + + expect(dataSourceUpdates.upsert).not.toBeCalled(); + }); + }); + + describe('delete', () => { + test('createDeleteListener', () => { + const listeners = createStreamListeners(dataSourceUpdates, logger, onCompleteHandlers); + const { deserializeData, processJson } = listeners.get('delete')!; + + expect(deserializeData).toBe(deserializeDelete); + expect(processJson).toBeDefined(); + }); + + test('data source upsert is called', async () => { + const listeners = createStreamListeners(dataSourceUpdates, logger, onCompleteHandlers); + const { processJson } = listeners.get('delete')!; + const { kind, version } = deleteData; + + processJson(deleteData); + + expect(logger.debug).toBeCalledWith(expect.stringMatching(/deleting/i)); + expect(dataSourceUpdates.upsert).toBeCalledWith( + kind, + { key: 'flagkey', version, deleted: true }, + onDeleteCompleteHandler, + ); + }); + + test('data source upsert not called missing kind', async () => { + const listeners = createStreamListeners(dataSourceUpdates, logger, onCompleteHandlers); + const { processJson } = listeners.get('delete')!; + const missingKind = { ...deleteData, kind: undefined }; + + processJson(missingKind); + + expect(dataSourceUpdates.upsert).not.toBeCalled(); + }); + + test('data source upsert not called wrong namespace path', async () => { + const listeners = createStreamListeners(dataSourceUpdates, logger, onCompleteHandlers); + const { processJson } = listeners.get('delete')!; + const wrongKey = { ...deleteData, path: '/wrong/flagkey' }; + + processJson(wrongKey); + + expect(dataSourceUpdates.upsert).not.toBeCalled(); + }); + }); +}); diff --git a/packages/shared/sdk-server/src/data_sources/createStreamListeners.ts b/packages/shared/sdk-server/src/data_sources/createStreamListeners.ts new file mode 100644 index 000000000..391e14191 --- /dev/null +++ b/packages/shared/sdk-server/src/data_sources/createStreamListeners.ts @@ -0,0 +1,95 @@ +import { + EventName, + LDLogger, + ProcessStreamResponse, + VoidFunction, +} from '@launchdarkly/js-sdk-common'; + +import { LDDataSourceUpdates } from '../api/subsystems'; +import { + AllData, + DeleteData, + deserializeAll, + deserializeDelete, + deserializePatch, + PatchData, +} from '../store/serialization'; +import VersionedDataKinds from '../store/VersionedDataKinds'; + +export const createPutListener = ( + dataSourceUpdates: LDDataSourceUpdates, + logger?: LDLogger, + onPutCompleteHandler: VoidFunction = () => {}, +) => ({ + deserializeData: deserializeAll, + processJson: async ({ data: { flags, segments } }: AllData) => { + const initData = { + [VersionedDataKinds.Features.namespace]: flags, + [VersionedDataKinds.Segments.namespace]: segments, + }; + + logger?.debug('Initializing all data'); + dataSourceUpdates.init(initData, onPutCompleteHandler); + }, +}); + +export const createPatchListener = ( + dataSourceUpdates: LDDataSourceUpdates, + logger?: LDLogger, + onPatchCompleteHandler: VoidFunction = () => {}, +) => ({ + deserializeData: deserializePatch, + processJson: async ({ data, kind, path }: PatchData) => { + if (kind) { + const key = VersionedDataKinds.getKeyFromPath(kind, path); + if (key) { + logger?.debug(`Updating ${key} in ${kind.namespace}`); + dataSourceUpdates.upsert(kind, data, onPatchCompleteHandler); + } + } + }, +}); + +export const createDeleteListener = ( + dataSourceUpdates: LDDataSourceUpdates, + logger?: LDLogger, + onDeleteCompleteHandler: VoidFunction = () => {}, +) => ({ + deserializeData: deserializeDelete, + processJson: async ({ kind, path, version }: DeleteData) => { + if (kind) { + const key = VersionedDataKinds.getKeyFromPath(kind, path); + if (key) { + logger?.debug(`Deleting ${key} in ${kind.namespace}`); + dataSourceUpdates.upsert( + kind, + { + key, + version, + deleted: true, + }, + onDeleteCompleteHandler, + ); + } + } + }, +}); + +export const createStreamListeners = ( + dataSourceUpdates: LDDataSourceUpdates, + logger?: LDLogger, + onCompleteHandlers?: { + put?: VoidFunction; + patch?: VoidFunction; + delete?: VoidFunction; + }, +) => { + const listeners = new Map(); + listeners.set('put', createPutListener(dataSourceUpdates, logger, onCompleteHandlers?.put)); + listeners.set('patch', createPatchListener(dataSourceUpdates, logger, onCompleteHandlers?.patch)); + listeners.set( + 'delete', + createDeleteListener(dataSourceUpdates, logger, onCompleteHandlers?.delete), + ); + return listeners; +}; diff --git a/packages/shared/sdk-server/src/data_sources/defaultHeaders.ts b/packages/shared/sdk-server/src/data_sources/defaultHeaders.ts deleted file mode 100644 index 42db4ac2a..000000000 --- a/packages/shared/sdk-server/src/data_sources/defaultHeaders.ts +++ /dev/null @@ -1,32 +0,0 @@ -import { ApplicationTags, Info } from '@launchdarkly/js-sdk-common'; - -export interface DefaultHeaderOptions { - tags: ApplicationTags; -} - -export default function defaultHeaders( - sdkKey: string, - config: DefaultHeaderOptions, - info: Info, -): { [key: string]: string } { - const sdkData = info.sdkData(); - const headers: { [key: string]: string } = { - authorization: sdkKey, - 'user-agent': `${sdkData.userAgentBase ? sdkData.userAgentBase : 'NodeJSClient'}/${ - sdkData.version - }`, - }; - - if (sdkData.wrapperName) { - headers['x-launchdarkly-wrapper'] = sdkData.wrapperVersion - ? `${sdkData.wrapperName}/${sdkData.wrapperVersion}` - : sdkData.wrapperName; - } - - const tags = config.tags.value; - if (tags) { - headers['x-launchdarkly-tags'] = tags; - } - - return headers; -} diff --git a/packages/shared/sdk-server/src/data_sources/httpErrorMessage.ts b/packages/shared/sdk-server/src/data_sources/httpErrorMessage.ts deleted file mode 100644 index 4555072bc..000000000 --- a/packages/shared/sdk-server/src/data_sources/httpErrorMessage.ts +++ /dev/null @@ -1,17 +0,0 @@ -export default function httpErrorMessage( - err: { - status: number; - message: string; - }, - context: string, - retryMessage?: string, -): string { - let desc; - if (err.status) { - desc = `error ${err.status}${err.status === 401 ? ' (invalid SDK key)' : ''}`; - } else { - desc = `I/O error (${err.message || err})`; - } - const action = retryMessage ?? 'giving up permanently'; - return `Received ${desc} for ${context} - ${action}`; -} diff --git a/packages/shared/sdk-server/src/diagnostics/createDiagnosticsInitConfig.test.ts b/packages/shared/sdk-server/src/diagnostics/createDiagnosticsInitConfig.test.ts new file mode 100644 index 000000000..a6a566bf6 --- /dev/null +++ b/packages/shared/sdk-server/src/diagnostics/createDiagnosticsInitConfig.test.ts @@ -0,0 +1,112 @@ +import { basicPlatform } from '@launchdarkly/private-js-mocks'; + +import { LDOptions } from '../api'; +import Configuration from '../options/Configuration'; +import createDiagnosticsInitConfig from './createDiagnosticsInitConfig'; + +const mockFeatureStore = { + getDescription: jest.fn(() => 'Mock Feature Store'), +}; + +describe.each([ + [ + {}, + { + allAttributesPrivate: false, + connectTimeoutMillis: 5000, + customBaseURI: false, + customEventsURI: false, + customStreamURI: false, + dataStoreType: 'Mock Feature Store', + diagnosticRecordingIntervalMillis: 900000, + eventsCapacity: 10000, + eventsFlushIntervalMillis: 5000, + offline: false, + pollingIntervalMillis: 30000, + reconnectTimeMillis: 1000, + socketTimeoutMillis: 5000, + streamingDisabled: false, + contextKeysCapacity: 1000, + contextKeysFlushIntervalMillis: 300000, + usingProxy: false, + usingProxyAuthenticator: false, + usingRelayDaemon: false, + }, + ], + [ + { baseUri: 'http://other' }, + { + customBaseURI: true, + customEventsURI: false, + customStreamURI: false, + }, + ], + [ + { eventsUri: 'http://other' }, + { + customBaseURI: false, + customEventsURI: true, + customStreamURI: false, + }, + ], + [ + { streamUri: 'http://other' }, + { + customBaseURI: false, + customEventsURI: false, + customStreamURI: true, + }, + ], + [{ allAttributesPrivate: true }, { allAttributesPrivate: true }], + [{ timeout: 6 }, { connectTimeoutMillis: 6000, socketTimeoutMillis: 6000 }], + [{ diagnosticRecordingInterval: 999 }, { diagnosticRecordingIntervalMillis: 999000 }], + [{ capacity: 999 }, { eventsCapacity: 999 }], + [{ flushInterval: 33 }, { eventsFlushIntervalMillis: 33000 }], + [{ stream: false }, { streamingDisabled: true }], + [{ streamInitialReconnectDelay: 33 }, { reconnectTimeMillis: 33000 }], + [{ contextKeysCapacity: 111 }, { contextKeysCapacity: 111 }], + [{ contextKeysFlushInterval: 33 }, { contextKeysFlushIntervalMillis: 33000 }], + [{ useLdd: true }, { usingRelayDaemon: true }], + [{ featureStore: undefined }, { dataStoreType: 'memory' }], +])('given diagnostics managers with different configurations', (optionsIn, configOut) => { + let configuration: Configuration; + + beforeEach(() => { + jest.spyOn(Date, 'now').mockImplementation(() => 7777); + configuration = new Configuration(optionsIn as LDOptions); + }); + + afterEach(() => { + jest.resetAllMocks(); + }); + + it('translates the configuration correctly', () => { + const c = createDiagnosticsInitConfig(configuration, basicPlatform, mockFeatureStore as any); + + expect(c).toMatchObject(configOut); + }); +}); + +describe.each([true, false])('Given proxy && proxyAuth = %p', (auth) => { + beforeEach(() => { + basicPlatform.requests.usingProxy = jest.fn(() => auth); + basicPlatform.requests.usingProxyAuth = jest.fn(() => auth); + }); + + afterEach(() => { + jest.resetAllMocks(); + }); + + it('it gets the proxy configuration from the basicPlatform', () => { + const c = createDiagnosticsInitConfig( + new Configuration(), + basicPlatform, + mockFeatureStore as any, + ); + + expect(c).toMatchObject({ + usingProxy: auth, + usingProxyAuthenticator: auth, + }); + }); +}); diff --git a/packages/shared/sdk-server/src/diagnostics/createDiagnosticsInitConfig.ts b/packages/shared/sdk-server/src/diagnostics/createDiagnosticsInitConfig.ts new file mode 100644 index 000000000..764efef6e --- /dev/null +++ b/packages/shared/sdk-server/src/diagnostics/createDiagnosticsInitConfig.ts @@ -0,0 +1,37 @@ +import { Platform, secondsToMillis } from '@launchdarkly/js-sdk-common'; + +import { LDFeatureStore } from '../api'; +import Configuration, { defaultValues } from '../options/Configuration'; + +const createDiagnosticsInitConfig = ( + config: Configuration, + platform: Platform, + featureStore: LDFeatureStore, +) => ({ + customBaseURI: config.serviceEndpoints.polling !== defaultValues.baseUri, + customStreamURI: config.serviceEndpoints.streaming !== defaultValues.streamUri, + customEventsURI: config.serviceEndpoints.events !== defaultValues.eventsUri, + eventsCapacity: config.eventsCapacity, + + // Node doesn't distinguish between these two kinds of timeouts. It is unlikely other web + // based implementations would be able to either. + connectTimeoutMillis: secondsToMillis(config.timeout), + socketTimeoutMillis: secondsToMillis(config.timeout), + eventsFlushIntervalMillis: secondsToMillis(config.flushInterval), + pollingIntervalMillis: secondsToMillis(config.pollInterval), + reconnectTimeMillis: secondsToMillis(config.streamInitialReconnectDelay), + contextKeysFlushIntervalMillis: secondsToMillis(config.contextKeysFlushInterval), + diagnosticRecordingIntervalMillis: secondsToMillis(config.diagnosticRecordingInterval), + + streamingDisabled: !config.stream, + usingRelayDaemon: config.useLdd, + offline: config.offline, + allAttributesPrivate: config.allAttributesPrivate, + contextKeysCapacity: config.contextKeysCapacity, + + usingProxy: !!platform.requests.usingProxy?.(), + usingProxyAuthenticator: !!platform.requests.usingProxyAuth?.(), + dataStoreType: featureStore.getDescription?.() ?? 'memory', +}); + +export default createDiagnosticsInitConfig; diff --git a/packages/shared/sdk-server/src/evaluation/EvalResult.ts b/packages/shared/sdk-server/src/evaluation/EvalResult.ts index 01a712cd9..4ddb54543 100644 --- a/packages/shared/sdk-server/src/evaluation/EvalResult.ts +++ b/packages/shared/sdk-server/src/evaluation/EvalResult.ts @@ -1,8 +1,8 @@ import { internal, LDEvaluationDetail, LDEvaluationReason } from '@launchdarkly/js-sdk-common'; -import ErrorKinds from './ErrorKinds'; import Reasons from './Reasons'; +const { createErrorEvaluationDetail, createSuccessEvaluationDetail } = internal; /** * A class which encapsulates the result of an evaluation. It allows for differentiating between * successful and error result types. @@ -30,23 +30,12 @@ export default class EvalResult { this.detail.value = def; } - static forError(errorKind: ErrorKinds, message?: string, def?: any): EvalResult { - return new EvalResult( - true, - { - value: def ?? null, - variationIndex: null, - reason: { kind: 'ERROR', errorKind }, - }, - message, - ); + static forError(errorKind: internal.ErrorKinds, message?: string, def?: any): EvalResult { + return new EvalResult(true, createErrorEvaluationDetail(errorKind, def), message); } static forSuccess(value: any, reason: LDEvaluationReason, variationIndex?: number) { - return new EvalResult(false, { - value, - variationIndex: variationIndex === undefined ? null : variationIndex, - reason, - }); + const successDetail = createSuccessEvaluationDetail(value, variationIndex, reason); + return new EvalResult(false, successDetail as LDEvaluationDetail); } } diff --git a/packages/shared/sdk-server/src/evaluation/Evaluator.ts b/packages/shared/sdk-server/src/evaluation/Evaluator.ts index 2abe12a9f..ef95b9eee 100644 --- a/packages/shared/sdk-server/src/evaluation/Evaluator.ts +++ b/packages/shared/sdk-server/src/evaluation/Evaluator.ts @@ -13,7 +13,6 @@ import { FlagRule } from './data/FlagRule'; import { Segment } from './data/Segment'; import { SegmentRule } from './data/SegmentRule'; import { VariationOrRollout } from './data/VariationOrRollout'; -import ErrorKinds from './ErrorKinds'; import EvalResult from './EvalResult'; import evalTargets from './evalTargets'; import makeBigSegmentRef from './makeBigSegmentRef'; @@ -23,6 +22,8 @@ import { Queries } from './Queries'; import Reasons from './Reasons'; import { getBucketBy, getOffVariation, getVariation } from './variations'; +const { ErrorKinds } = internal; + /** * PERFORMANCE NOTE: The evaluation algorithm uses callbacks instead of async/await to optimize * performance. This is most important for collections where iterating through rules/clauses @@ -175,7 +176,6 @@ export default class Evaluator { private evaluateInternal( flag: Flag, context: Context, - // eslint-disable-next-line @typescript-eslint/no-unused-vars state: EvalState, visitedFlags: string[], cb: (res: EvalResult) => void, @@ -272,11 +272,11 @@ export default class Evaluator { updatedVisitedFlags, (res) => { // eslint-disable-next-line no-param-reassign - state.events = state.events ?? []; + state.events ??= []; if (eventFactory) { state.events.push( - eventFactory.evalEvent(prereqFlag, context, res.detail, null, flag), + eventFactory.evalEventServer(prereqFlag, context, res.detail, null, flag), ); } @@ -601,9 +601,7 @@ export default class Evaluator { segmentMatchContext( segment: Segment, context: Context, - // eslint-disable-next-line @typescript-eslint/no-unused-vars state: EvalState, - // eslint-disable-next-line @typescript-eslint/no-unused-vars segmentsVisited: string[], cb: (res: MatchOrError) => void, ): void { diff --git a/packages/shared/sdk-server/src/evaluation/data/Flag.ts b/packages/shared/sdk-server/src/evaluation/data/Flag.ts index c5d03cb9f..2b4bf79de 100644 --- a/packages/shared/sdk-server/src/evaluation/data/Flag.ts +++ b/packages/shared/sdk-server/src/evaluation/data/Flag.ts @@ -25,4 +25,9 @@ export interface Flag extends Versioned { trackEvents?: boolean; trackEventsFallthrough?: boolean; debugEventsUntilDate?: number; + excludeFromSummaries?: boolean; + samplingRatio?: number; + migration?: { + checkRatio?: number; + }; } diff --git a/packages/shared/sdk-server/src/evaluation/variations.ts b/packages/shared/sdk-server/src/evaluation/variations.ts index 7c048a2b2..909516230 100644 --- a/packages/shared/sdk-server/src/evaluation/variations.ts +++ b/packages/shared/sdk-server/src/evaluation/variations.ts @@ -1,13 +1,14 @@ import { AttributeReference, + internal, LDEvaluationReason, TypeValidators, } from '@launchdarkly/js-sdk-common'; import { Flag } from './data/Flag'; -import ErrorKinds from './ErrorKinds'; import EvalResult from './EvalResult'; +const { ErrorKinds } = internal; const KEY_ATTR_REF = new AttributeReference('key'); /** diff --git a/packages/shared/sdk-server/src/events/DiagnosticsManager.ts b/packages/shared/sdk-server/src/events/DiagnosticsManager.ts deleted file mode 100644 index e9fb1a806..000000000 --- a/packages/shared/sdk-server/src/events/DiagnosticsManager.ts +++ /dev/null @@ -1,199 +0,0 @@ -import { Platform } from '@launchdarkly/js-sdk-common'; - -import { LDFeatureStore } from '../api/subsystems'; -import Configuration, { defaultValues } from '../options/Configuration'; - -interface DiagnosticPlatformData { - name?: string; - osArch?: string; - osName?: string; - osVersion?: string; - /** - * Platform specific identifiers. - * For instance `nodeVersion` - */ - [key: string]: string | undefined; -} - -interface DiagnosticSdkData { - name?: string; - wrapperName?: string; - wrapperVersion?: string; -} - -interface DiagnosticConfigData { - customBaseURI: boolean; - customStreamURI: boolean; - customEventsURI: boolean; - eventsCapacity: number; - connectTimeoutMillis: number; - socketTimeoutMillis: number; - eventsFlushIntervalMillis: number; - pollingIntervalMillis: number; - // startWaitMillis: n/a (SDK does not have this feature) - // samplingInterval: n/a (SDK does not have this feature) - reconnectTimeMillis: number; - streamingDisabled: boolean; - usingRelayDaemon: boolean; - offline: boolean; - allAttributesPrivate: boolean; - contextKeysCapacity: number; - contextKeysFlushIntervalMillis: number; - usingProxy: boolean; - usingProxyAuthenticator: boolean; - diagnosticRecordingIntervalMillis: number; - dataStoreType: string; -} - -interface DiagnosticId { - diagnosticId: string; - sdkKeySuffix: string; -} - -export interface DiagnosticInitEvent { - kind: 'diagnostic-init'; - id: DiagnosticId; - creationDate: number; - sdk: DiagnosticSdkData; - configuration: DiagnosticConfigData; - platform: DiagnosticPlatformData; -} - -interface StreamInitData { - timestamp: number; - failed: boolean; - durationMillis: number; -} - -export interface DiagnosticStatsEvent { - kind: 'diagnostic'; - id: DiagnosticId; - creationDate: number; - dataSinceDate: number; - droppedEvents: number; - deduplicatedUsers: number; - eventsInLastBatch: number; - streamInits: StreamInitData[]; -} - -function secondsToMillis(sec: number): number { - return Math.trunc(sec * 1000); -} - -/** - * Maintains information for diagnostic events. - * - * @internal - */ -export default class DiagnosticsManager { - private startTime: number; - - private streamInits: StreamInitData[] = []; - - private id: DiagnosticId; - - private dataSinceDate: number; - - constructor( - sdkKey: string, - private readonly config: Configuration, - private readonly platform: Platform, - private readonly featureStore: LDFeatureStore, - ) { - this.startTime = Date.now(); - this.dataSinceDate = this.startTime; - this.id = { - diagnosticId: platform.crypto.randomUUID(), - sdkKeySuffix: sdkKey.length > 6 ? sdkKey.substring(sdkKey.length - 6) : sdkKey, - }; - } - - /** - * Creates the initial event that is sent by the event processor when the SDK starts up. This will - * not be repeated during the lifetime of the SDK client. - */ - createInitEvent(): DiagnosticInitEvent { - const sdkData = this.platform.info.sdkData(); - const platformData = this.platform.info.platformData(); - - return { - kind: 'diagnostic-init', - id: this.id, - creationDate: this.startTime, - sdk: sdkData, - configuration: { - customBaseURI: this.config.serviceEndpoints.polling !== defaultValues.baseUri, - customStreamURI: this.config.serviceEndpoints.streaming !== defaultValues.streamUri, - customEventsURI: this.config.serviceEndpoints.events !== defaultValues.eventsUri, - eventsCapacity: this.config.eventsCapacity, - // Node doesn't distinguish between these two kinds of timeouts. It is unlikely other web - // based implementations would be able to either. - connectTimeoutMillis: secondsToMillis(this.config.timeout), - socketTimeoutMillis: secondsToMillis(this.config.timeout), - eventsFlushIntervalMillis: secondsToMillis(this.config.flushInterval), - pollingIntervalMillis: secondsToMillis(this.config.pollInterval), - reconnectTimeMillis: secondsToMillis(this.config.streamInitialReconnectDelay), - streamingDisabled: !this.config.stream, - usingRelayDaemon: this.config.useLdd, - offline: this.config.offline, - allAttributesPrivate: this.config.allAttributesPrivate, - contextKeysCapacity: this.config.contextKeysCapacity, - contextKeysFlushIntervalMillis: secondsToMillis(this.config.contextKeysFlushInterval), - usingProxy: !!this.platform.requests.usingProxy?.(), - usingProxyAuthenticator: !!this.platform.requests.usingProxyAuth?.(), - diagnosticRecordingIntervalMillis: secondsToMillis(this.config.diagnosticRecordingInterval), - dataStoreType: this.featureStore.getDescription?.() ?? 'memory', - }, - platform: { - name: platformData.name, - osArch: platformData.os?.arch, - osName: platformData.os?.name, - osVersion: platformData.os?.version, - ...(platformData.additional || {}), - }, - }; - } - - /** - * Records a stream connection attempt (called by the stream processor). - * - * @param timestamp Time of the *beginning* of the connection attempt. - * @param failed True if the connection failed, or we got a read timeout before receiving a "put". - * @param durationMillis Elapsed time between starting timestamp and when we either gave up/lost - * the connection or received a successful "put". - */ - recordStreamInit(timestamp: number, failed: boolean, durationMillis: number) { - const item = { timestamp, failed, durationMillis }; - this.streamInits.push(item); - } - - /** - * Creates a periodic event containing time-dependent stats, and resets the state of the manager - * with regard to those stats. - * - * Note: the reason droppedEvents, deduplicatedUsers, and eventsInLastBatch are passed into this - * function, instead of being properties of the DiagnosticsManager, is that the event processor is - * the one who's calling this function and is also the one who's tracking those stats. - */ - createStatsEventAndReset( - droppedEvents: number, - deduplicatedUsers: number, - eventsInLastBatch: number, - ): DiagnosticStatsEvent { - const currentTime = Date.now(); - const evt: DiagnosticStatsEvent = { - kind: 'diagnostic', - id: this.id, - creationDate: currentTime, - dataSinceDate: this.dataSinceDate, - droppedEvents, - deduplicatedUsers, - eventsInLastBatch, - streamInits: this.streamInits, - }; - - this.streamInits = []; - this.dataSinceDate = currentTime; - return evt; - } -} diff --git a/packages/shared/sdk-server/src/events/EventFactory.ts b/packages/shared/sdk-server/src/events/EventFactory.ts index 76411dd48..53f9abc35 100644 --- a/packages/shared/sdk-server/src/events/EventFactory.ts +++ b/packages/shared/sdk-server/src/events/EventFactory.ts @@ -6,10 +6,8 @@ import isExperiment from './isExperiment'; /** * @internal */ -export default class EventFactory { - constructor(private readonly withReasons: boolean) {} - - evalEvent( +export default class EventFactory extends internal.EventFactoryBase { + evalEventServer( flag: Flag, context: Context, detail: LDEvaluationDetail, @@ -17,33 +15,20 @@ export default class EventFactory { prereqOfFlag?: Flag, ): internal.InputEvalEvent { const addExperimentData = isExperiment(flag, detail.reason); - return new internal.InputEvalEvent( - this.withReasons, + return super.evalEvent({ + addExperimentData, context, - flag.key, + debugEventsUntilDate: flag.debugEventsUntilDate, defaultVal, - detail, - flag.version, - // Exclude null as a possibility. - detail.variationIndex ?? undefined, - flag.trackEvents || addExperimentData, - prereqOfFlag?.key, - this.withReasons || addExperimentData ? detail.reason : undefined, - flag.debugEventsUntilDate, - ); - } - - unknownFlagEvent(key: string, context: Context, detail: LDEvaluationDetail) { - return new internal.InputEvalEvent(this.withReasons, context, key, detail.value, detail); - } - - /* eslint-disable-next-line class-methods-use-this */ - identifyEvent(context: Context) { - return new internal.InputIdentifyEvent(context); - } - - /* eslint-disable-next-line class-methods-use-this */ - customEvent(key: string, context: Context, data?: any, metricValue?: number) { - return new internal.InputCustomEvent(context, key, data ?? undefined, metricValue ?? undefined); + excludeFromSummaries: flag.excludeFromSummaries, + flagKey: flag.key, + prereqOfFlagKey: prereqOfFlag?.key, + reason: detail.reason, + samplingRatio: flag.samplingRatio, + trackEvents: flag.trackEvents || addExperimentData, + value: detail.value, + variation: detail.variationIndex ?? undefined, + version: flag.version, + }); } } diff --git a/packages/shared/sdk-server/src/events/EventSender.ts b/packages/shared/sdk-server/src/events/EventSender.ts deleted file mode 100644 index e405a6254..000000000 --- a/packages/shared/sdk-server/src/events/EventSender.ts +++ /dev/null @@ -1,123 +0,0 @@ -import { - ApplicationTags, - ClientContext, - Crypto, - Requests, - subsystem, -} from '@launchdarkly/js-sdk-common'; - -import defaultHeaders from '../data_sources/defaultHeaders'; -import httpErrorMessage from '../data_sources/httpErrorMessage'; -import { isHttpRecoverable, LDUnexpectedResponseError } from '../errors'; - -export interface EventSenderOptions { - tags: ApplicationTags; -} - -export default class EventSender implements subsystem.LDEventSender { - private defaultHeaders: { - [key: string]: string; - }; - - private eventsUri: string; - - private diagnosticEventsUri: string; - - private requests: Requests; - - private crypto: Crypto; - - constructor(config: EventSenderOptions, clientContext: ClientContext) { - this.defaultHeaders = { - ...defaultHeaders( - clientContext.basicConfiguration.sdkKey, - config, - clientContext.platform.info, - ), - }; - - this.eventsUri = `${clientContext.basicConfiguration.serviceEndpoints.events}/bulk`; - - this.diagnosticEventsUri = `${clientContext.basicConfiguration.serviceEndpoints.events}/diagnostic`; - - this.requests = clientContext.platform.requests; - - this.crypto = clientContext.platform.crypto; - } - - private async tryPostingEvents( - events: any, - uri: string, - payloadId: string | undefined, - canRetry: boolean, - ): Promise { - const tryRes: subsystem.LDEventSenderResult = { - status: subsystem.LDDeliveryStatus.Succeeded, - }; - - const headers: Record = { - ...this.defaultHeaders, - 'content-type': 'application/json', - }; - - if (payloadId) { - headers['x-launchdarkly-payload-id'] = payloadId; - headers['x-launchDarkly-event-schema'] = '4'; - } - let error; - try { - const res = await this.requests.fetch(uri, { - headers, - body: JSON.stringify(events), - method: 'POST', - }); - - const serverDate = Date.parse(res.headers.get('date') || ''); - if (serverDate) { - tryRes.serverTime = serverDate; - } - - if (res.status <= 204) { - return tryRes; - } - - error = new LDUnexpectedResponseError( - httpErrorMessage( - { status: res.status, message: 'some events were dropped' }, - 'event posting', - ), - ); - - if (!isHttpRecoverable(res.status)) { - tryRes.status = subsystem.LDDeliveryStatus.FailedAndMustShutDown; - tryRes.error = error; - return tryRes; - } - } catch (err) { - error = err; - } - - if (error && !canRetry) { - tryRes.status = subsystem.LDDeliveryStatus.Failed; - tryRes.error = error; - return tryRes; - } - - await new Promise((r) => { - setTimeout(r, 1000); - }); - return this.tryPostingEvents(events, this.eventsUri, payloadId, false); - } - - async sendEventData( - type: subsystem.LDEventType, - data: any, - ): Promise { - const payloadId = - type === subsystem.LDEventType.AnalyticsEvents ? this.crypto.randomUUID() : undefined; - const uri = - type === subsystem.LDEventType.AnalyticsEvents ? this.eventsUri : this.diagnosticEventsUri; - - return this.tryPostingEvents(data, uri, payloadId, true); - } -} diff --git a/packages/shared/sdk-server/src/events/NullEventProcessor.ts b/packages/shared/sdk-server/src/events/NullEventProcessor.ts deleted file mode 100644 index 2dd4ae313..000000000 --- a/packages/shared/sdk-server/src/events/NullEventProcessor.ts +++ /dev/null @@ -1,22 +0,0 @@ -// This is an empty implementation, so it doesn't use this, and it has empty methods, and it -// has unused variables. - -/* eslint-disable class-methods-use-this */ - -/* eslint-disable @typescript-eslint/no-empty-function */ - -/* eslint-disable @typescript-eslint/no-unused-vars */ -import { internal, subsystem } from '@launchdarkly/js-sdk-common'; - -/** - * Empty event processor implementation for when events are not desired. - * - * @internal - */ -export default class NullEventProcessor implements subsystem.LDEventProcessor { - close(): void {} - - async flush(): Promise {} - - sendEvent(inputEvent: internal.InputEvent): void {} -} diff --git a/packages/shared/sdk-server/src/index.ts b/packages/shared/sdk-server/src/index.ts index 96acd80a4..1c99f0097 100644 --- a/packages/shared/sdk-server/src/index.ts +++ b/packages/shared/sdk-server/src/index.ts @@ -1,11 +1,19 @@ import BigSegmentStoreStatusProviderImpl from './BigSegmentStatusProviderImpl'; import LDClientImpl from './LDClientImpl'; +import { createMigration, LDMigrationError, LDMigrationSuccess } from './Migration'; export * as integrations from './integrations'; export * as platform from '@launchdarkly/js-sdk-common'; export * from './api'; export * from './store'; export * from './events'; + export * from '@launchdarkly/js-sdk-common'; -export { LDClientImpl, BigSegmentStoreStatusProviderImpl }; +export { + LDClientImpl, + BigSegmentStoreStatusProviderImpl, + LDMigrationError, + LDMigrationSuccess, + createMigration, +}; diff --git a/packages/shared/sdk-server/src/integrations/FileDataSourceFactory.ts b/packages/shared/sdk-server/src/integrations/FileDataSourceFactory.ts index 896542c9b..885776ee2 100644 --- a/packages/shared/sdk-server/src/integrations/FileDataSourceFactory.ts +++ b/packages/shared/sdk-server/src/integrations/FileDataSourceFactory.ts @@ -1,8 +1,8 @@ -import { LDClientContext, LDLogger } from '@launchdarkly/js-sdk-common'; +import { LDClientContext, LDLogger, subsystem, VoidFunction } from '@launchdarkly/js-sdk-common'; import { FileDataSourceOptions } from '../api/integrations'; -import { LDFeatureStore, LDStreamProcessor } from '../api/subsystems'; -import FileDataSource from '../data_sources/FileDataSource'; +import { LDFeatureStore } from '../api/subsystems'; +import FileDataSource, { FileDataSourceErrorHandler } from '../data_sources/FileDataSource'; /** * Components of the SDK runtime configuration which are required @@ -30,20 +30,34 @@ export default class FileDataSourceFactory { * * @internal */ - create(ldClientContext: LDClientContext, featureStore: LDFeatureStore) { + create( + ldClientContext: LDClientContext, + featureStore: LDFeatureStore, + initSuccessHandler?: VoidFunction, + errorHandler?: FileDataSourceErrorHandler, + ) { const updatedOptions: FileDataSourceOptions = { paths: this.options.paths, autoUpdate: this.options.autoUpdate, logger: this.options.logger || ldClientContext.basicConfiguration.logger, yamlParser: this.options.yamlParser, }; - return new FileDataSource(updatedOptions, ldClientContext.platform.fileSystem!, featureStore); + return new FileDataSource( + updatedOptions, + ldClientContext.platform.fileSystem!, + featureStore, + initSuccessHandler, + errorHandler, + ); } getFactory(): ( ldClientContext: LDClientContext, featureStore: LDFeatureStore, - ) => LDStreamProcessor { - return (ldClientContext, featureStore) => this.create(ldClientContext, featureStore); + initSuccessHandler?: VoidFunction, + errorHandler?: FileDataSourceErrorHandler, + ) => subsystem.LDStreamProcessor { + return (ldClientContext, featureStore, initSuccessHandler, errorHandler) => + this.create(ldClientContext, featureStore, initSuccessHandler, errorHandler); } } diff --git a/packages/shared/sdk-server/src/integrations/test_data/TestData.ts b/packages/shared/sdk-server/src/integrations/test_data/TestData.ts index 1dc261488..cd4b41417 100644 --- a/packages/shared/sdk-server/src/integrations/test_data/TestData.ts +++ b/packages/shared/sdk-server/src/integrations/test_data/TestData.ts @@ -1,7 +1,7 @@ -import { LDClientContext } from '@launchdarkly/js-sdk-common'; +import { LDClientContext, subsystem, VoidFunction } from '@launchdarkly/js-sdk-common'; -import { LDStreamProcessor } from '../../api'; -import { LDFeatureStore } from '../../api/subsystems'; +import { LDFeatureStore } from '../../api'; +import { createStreamListeners } from '../../data_sources/createStreamListeners'; import { Flag } from '../../evaluation/data/Flag'; import { Segment } from '../../evaluation/data/Segment'; import AsyncStoreFacade from '../../store/AsyncStoreFacade'; @@ -58,22 +58,32 @@ export default class TestData { getFactory(): ( clientContext: LDClientContext, featureStore: LDFeatureStore, - ) => LDStreamProcessor { + initSuccessHandler: VoidFunction, + errorHandler?: (e: Error) => void, + ) => subsystem.LDStreamProcessor { // Provides an arrow function to prevent needed to bind the method to // maintain `this`. return ( - /* eslint-disable-next-line @typescript-eslint/no-unused-vars */ clientContext: LDClientContext, featureStore: LDFeatureStore, + initSuccessHandler: VoidFunction, + _errorHandler?: (e: Error) => void, ) => { + const listeners = createStreamListeners( + featureStore, + clientContext.basicConfiguration.logger, + { + put: initSuccessHandler, + }, + ); const newSource = new TestDataSource( new AsyncStoreFacade(featureStore), this.currentFlags, - this.currentSegments, (tds) => { this.dataSources.splice(this.dataSources.indexOf(tds)); }, + listeners, ); this.dataSources.push(newSource); diff --git a/packages/shared/sdk-server/src/integrations/test_data/TestDataFlagBuilder.ts b/packages/shared/sdk-server/src/integrations/test_data/TestDataFlagBuilder.ts index 168480f43..88dade591 100644 --- a/packages/shared/sdk-server/src/integrations/test_data/TestDataFlagBuilder.ts +++ b/packages/shared/sdk-server/src/integrations/test_data/TestDataFlagBuilder.ts @@ -18,6 +18,10 @@ interface BuilderData { // Each target being a context kind and a list of keys for that kind. targetsByVariation?: Record>; rules?: TestDataRuleBuilder[]; + migration?: { + checkRatio?: number; + }; + samplingRatio?: number; } /** @@ -367,6 +371,17 @@ export default class TestDataFlagBuilder { return flagRuleBuilder.andNotMatch(contextKind, attribute, ...values); } + checkRatio(ratio: number): TestDataFlagBuilder { + this.data.migration = this.data.migration ?? {}; + this.data.migration.checkRatio = ratio; + return this; + } + + samplingRatio(ratio: number): TestDataFlagBuilder { + this.data.samplingRatio = ratio; + return this; + } + /** * @internal */ @@ -390,6 +405,8 @@ export default class TestDataFlagBuilder { variation: this.data.fallthroughVariation, }, variations: [...this.data.variations], + migration: this.data.migration, + samplingRatio: this.data.samplingRatio, }; if (this.data.targetsByVariation) { diff --git a/packages/shared/sdk-server/src/integrations/test_data/TestDataSource.ts b/packages/shared/sdk-server/src/integrations/test_data/TestDataSource.ts index 6f44bfba9..a4984ef07 100644 --- a/packages/shared/sdk-server/src/integrations/test_data/TestDataSource.ts +++ b/packages/shared/sdk-server/src/integrations/test_data/TestDataSource.ts @@ -1,28 +1,34 @@ -import { LDStreamProcessor } from '../../api'; -import { DataKind } from '../../api/interfaces'; -import { LDKeyedFeatureStoreItem } from '../../api/subsystems'; +import { EventName, ProcessStreamResponse, subsystem } from '@launchdarkly/js-sdk-common'; + +import { DataKind, LDKeyedFeatureStoreItem } from '../../api'; import { Flag } from '../../evaluation/data/Flag'; import { Segment } from '../../evaluation/data/Segment'; import AsyncStoreFacade from '../../store/AsyncStoreFacade'; -import VersionedDataKinds from '../../store/VersionedDataKinds'; /** * @internal */ -export default class TestDataSource implements LDStreamProcessor { +export default class TestDataSource implements subsystem.LDStreamProcessor { + private readonly flags: Record; + private readonly segments: Record; constructor( private readonly featureStore: AsyncStoreFacade, - private readonly flags: Record, - private readonly segments: Record, + initialFlags: Record, + initialSegments: Record, private readonly onStop: (tfs: TestDataSource) => void, - ) {} + private readonly listeners: Map, + ) { + // make copies of these objects to decouple them from the originals + // so updates made to the originals don't affect these internal data. + this.flags = { ...initialFlags }; + this.segments = { ...initialSegments }; + } - async start(fn?: ((err?: any) => void) | undefined) { - await this.featureStore.init({ - [VersionedDataKinds.Features.namespace]: { ...this.flags }, - [VersionedDataKinds.Segments.namespace]: { ...this.segments }, + async start() { + this.listeners.forEach(({ processJson }) => { + const dataJson = { data: { flags: this.flags, segments: this.segments } }; + processJson(dataJson); }); - fn?.(); } stop() { diff --git a/packages/shared/sdk-server/src/options/Configuration.ts b/packages/shared/sdk-server/src/options/Configuration.ts index 35e4f0af0..494bb83c5 100644 --- a/packages/shared/sdk-server/src/options/Configuration.ts +++ b/packages/shared/sdk-server/src/options/Configuration.ts @@ -5,17 +5,13 @@ import { NumberWithMinimum, OptionMessages, ServiceEndpoints, + subsystem, TypeValidator, TypeValidators, + VoidFunction, } from '@launchdarkly/js-sdk-common'; -import { - LDBigSegmentsOptions, - LDOptions, - LDProxyOptions, - LDStreamProcessor, - LDTLSOptions, -} from '../api'; +import { LDBigSegmentsOptions, LDOptions, LDProxyOptions, LDTLSOptions } from '../api'; import { LDDataSourceUpdates, LDFeatureStore } from '../api/subsystems'; import InMemoryFeatureStore from '../store/InMemoryFeatureStore'; import { ValidatedOptions } from './ValidatedOptions'; @@ -65,7 +61,7 @@ const validations: Record = { export const defaultValues: ValidatedOptions = { baseUri: 'https://sdk.launchdarkly.com', streamUri: 'https://stream.launchdarkly.com', - eventsUri: 'https://events.launchdarkly.com', + eventsUri: ServiceEndpoints.DEFAULT_EVENTS, stream: true, streamInitialReconnectDelay: 1, sendEvents: true, @@ -205,7 +201,9 @@ export default class Configuration { public readonly updateProcessorFactory?: ( clientContext: LDClientContext, dataSourceUpdates: LDDataSourceUpdates, - ) => LDStreamProcessor; + initSuccessHandler: VoidFunction, + errorHandler?: (e: Error) => void, + ) => subsystem.LDStreamProcessor; public readonly bigSegments?: LDBigSegmentsOptions; diff --git a/packages/shared/sdk-server/src/options/ValidatedOptions.ts b/packages/shared/sdk-server/src/options/ValidatedOptions.ts index 894898b81..f50497711 100644 --- a/packages/shared/sdk-server/src/options/ValidatedOptions.ts +++ b/packages/shared/sdk-server/src/options/ValidatedOptions.ts @@ -1,12 +1,6 @@ -import { LDLogger } from '@launchdarkly/js-sdk-common'; +import { LDLogger, subsystem } from '@launchdarkly/js-sdk-common'; -import { - LDBigSegmentsOptions, - LDOptions, - LDProxyOptions, - LDStreamProcessor, - LDTLSOptions, -} from '../api'; +import { LDBigSegmentsOptions, LDOptions, LDProxyOptions, LDTLSOptions } from '../api'; import { LDFeatureStore } from '../api/subsystems'; /** @@ -36,7 +30,7 @@ export interface ValidatedOptions { diagnosticRecordingInterval: number; featureStore: LDFeatureStore | ((options: LDOptions) => LDFeatureStore); tlsParams?: LDTLSOptions; - updateProcessor?: LDStreamProcessor; + updateProcessor?: subsystem.LDStreamProcessor; wrapperName?: string; wrapperVersion?: string; application?: { id?: string; version?: string }; diff --git a/packages/shared/sdk-server/src/store/VersionedDataKinds.ts b/packages/shared/sdk-server/src/store/VersionedDataKinds.ts index 6bfab16e7..da841e6d7 100644 --- a/packages/shared/sdk-server/src/store/VersionedDataKinds.ts +++ b/packages/shared/sdk-server/src/store/VersionedDataKinds.ts @@ -3,7 +3,6 @@ import { DataKind } from '../api/interfaces'; export interface VersionedDataKind extends DataKind { namespace: string; streamApiPath: string; - requestPath: string; getDependencyKeys?: (item: any) => string[]; } @@ -11,12 +10,16 @@ export default class VersionedDataKinds { static readonly Features: VersionedDataKind = { namespace: 'features', streamApiPath: '/flags/', - requestPath: '/sdk/latest-flags/', }; static readonly Segments: VersionedDataKind = { namespace: 'segments', streamApiPath: '/segments/', - requestPath: '/sdk/latest-segments/', }; + + static getKeyFromPath(kind: VersionedDataKind, path: string): string | undefined { + return path.startsWith(kind.streamApiPath) + ? path.substring(kind.streamApiPath.length) + : undefined; + } } diff --git a/packages/shared/sdk-server/src/store/serialization.ts b/packages/shared/sdk-server/src/store/serialization.ts index 82bf457ea..aaba1bc4d 100644 --- a/packages/shared/sdk-server/src/store/serialization.ts +++ b/packages/shared/sdk-server/src/store/serialization.ts @@ -26,12 +26,12 @@ export function reviver(this: any, key: string, value: any): any { return value; } -interface FlagsAndSegments { +export interface FlagsAndSegments { flags: { [name: string]: Flag }; segments: { [name: string]: Segment }; } -interface AllData { +export interface AllData { data: FlagsAndSegments; } @@ -81,7 +81,7 @@ export function replacer(this: any, key: string, value: any): any { return value; } -interface DeleteData extends Omit { +export interface DeleteData extends Omit { path: string; kind?: VersionedDataKind; } @@ -89,7 +89,7 @@ interface DeleteData extends Omit { type VersionedFlag = VersionedData & Flag; type VersionedSegment = VersionedData & Segment; -interface PatchData { +export interface PatchData { path: string; data: VersionedFlag | VersionedSegment; kind?: VersionedDataKind; diff --git a/packages/shared/sdk-server/tsconfig.json b/packages/shared/sdk-server/tsconfig.json index cd3f7af3c..1a394bfbf 100644 --- a/packages/shared/sdk-server/tsconfig.json +++ b/packages/shared/sdk-server/tsconfig.json @@ -2,7 +2,7 @@ "compilerOptions": { "rootDir": "src", "outDir": "dist", - "target": "es2017", + "target": "ES2017", "lib": ["es6"], "module": "commonjs", "strict": true, diff --git a/packages/store/node-server-sdk-dynamodb/README.md b/packages/store/node-server-sdk-dynamodb/README.md index 59f9c6917..07aef68e0 100644 --- a/packages/store/node-server-sdk-dynamodb/README.md +++ b/packages/store/node-server-sdk-dynamodb/README.md @@ -52,7 +52,9 @@ const client = LaunchDarkly.init('YOUR SDK KEY', config); By default, the DynamoDB client will try to get your AWS credentials and region name from environment variables and/or local configuration files, as described in the AWS SDK documentation. You can also specify any valid [DynamoDB client options](https://docs.aws.amazon.com/AWSJavaScriptSDK/latest/AWS/DynamoDB.html#constructor-property) like this: ```typescript -const dynamoDBOptions = { credentials: { accessKeyId: 'YOUR KEY', secretAccessKey: 'YOUR SECRET' }}; +const dynamoDBOptions = { + credentials: { accessKeyId: 'YOUR KEY', secretAccessKey: 'YOUR SECRET' }, +}; const store = DynamoDBFeatureStore('YOUR TABLE NAME', { clientOptions: dynamoDBOptions }); ``` diff --git a/packages/store/node-server-sdk-dynamodb/src/DynamoDBBigSegmentStore.ts b/packages/store/node-server-sdk-dynamodb/src/DynamoDBBigSegmentStore.ts index bac777d65..e7ed6614b 100644 --- a/packages/store/node-server-sdk-dynamodb/src/DynamoDBBigSegmentStore.ts +++ b/packages/store/node-server-sdk-dynamodb/src/DynamoDBBigSegmentStore.ts @@ -39,7 +39,6 @@ export default class DynamoDBBigSegmentStore implements interfaces.BigSegmentSto // Logger is not currently used, but is included to reduce the chance of a // compatibility break to add a log. - // eslint-disable-next-line @typescript-eslint/no-unused-vars constructor( private readonly tableName: string, options?: LDDynamoDBOptions, diff --git a/packages/store/node-server-sdk-redis/src/RedisBigSegmentStore.ts b/packages/store/node-server-sdk-redis/src/RedisBigSegmentStore.ts index 7e51fd701..8934bd2ba 100644 --- a/packages/store/node-server-sdk-redis/src/RedisBigSegmentStore.ts +++ b/packages/store/node-server-sdk-redis/src/RedisBigSegmentStore.ts @@ -23,7 +23,6 @@ export default class RedisBigSegmentStore implements interfaces.BigSegmentStore // Logger is not currently used, but is included to reduce the chance of a // compatibility break to add a log. - // eslint-disable-next-line @typescript-eslint/no-unused-vars constructor( options?: LDRedisOptions, private readonly logger?: LDLogger, diff --git a/release-please-config.json b/release-please-config.json index 63cc82409..057e93957 100644 --- a/release-please-config.json +++ b/release-please-config.json @@ -3,9 +3,7 @@ "packages/shared/common": {}, "packages/shared/sdk-server": {}, "packages/shared/sdk-server-edge": {}, - "packages/shared/akamai-edgeworker-sdk": { - "bump-minor-pre-major": true - }, + "packages/shared/akamai-edgeworker-sdk": {}, "packages/sdk/server-node": {}, "packages/sdk/cloudflare": {}, "packages/sdk/vercel": { diff --git a/scripts/build-package.sh b/scripts/build-package.sh index 0466d3697..0969d60c8 100755 --- a/scripts/build-package.sh +++ b/scripts/build-package.sh @@ -1,4 +1,5 @@ #!/usr/bin/env bash + # Run this script like: # ./scripts/build-package.sh diff --git a/scripts/doc-name.sh b/scripts/doc-name.sh index 150c0fb70..5e252b768 100755 --- a/scripts/doc-name.sh +++ b/scripts/doc-name.sh @@ -1,3 +1,4 @@ +#!/usr/bin/env bash # Given a path get the name for the documentation. # ./scripts/doc-name.sh packages/sdk/server-node # Produces something like: diff --git a/scripts/package-name.sh b/scripts/package-name.sh index e443b2c7d..1827284c7 100755 --- a/scripts/package-name.sh +++ b/scripts/package-name.sh @@ -1,3 +1,4 @@ +#!/usr/bin/env bash # Given a path get the name of the package. # ./scripts/package-name.sh packages/sdk/server-node # Produces something like: diff --git a/scripts/publish-doc.sh b/scripts/publish-doc.sh index 44d885c79..46e74467f 100755 --- a/scripts/publish-doc.sh +++ b/scripts/publish-doc.sh @@ -1,3 +1,4 @@ +#!/usr/bin/env bash # Run this script like: # ./scripts/publish-doc.sh packages/sdk/node @@ -55,10 +56,10 @@ set +e while true do - + git pull origin gh-pages --no-edit # should accept the default message after_pull_sha=$(git rev-parse HEAD) - + # The first time this runs the head_sha will be empty and they will not match. # If the push fails, then we pull again, and if the SHA does not change, then # the push will not succeed. @@ -66,13 +67,13 @@ do echo "Failed to get changes. Could not publish docs." exit 1 fi - + head_sha=$after_pull_sha - + if git push; then break fi - + echo "Push failed, trying again." done diff --git a/scripts/publish.sh b/scripts/publish.sh index 597e964da..8619a6a12 100755 --- a/scripts/publish.sh +++ b/scripts/publish.sh @@ -1,3 +1,4 @@ +#!/usr/bin/env bash if $LD_RELEASE_IS_DRYRUN ; then # Dry run just pack the workspace. echo "Doing a dry run of publishing." diff --git a/scripts/replace-version.sh b/scripts/replace-version.sh index 87d2309af..1e3fd218e 100755 --- a/scripts/replace-version.sh +++ b/scripts/replace-version.sh @@ -1,3 +1,4 @@ +#!/usr/bin/env bash # Run this script like: # ./scripts/replace-version.sh packages/sdk/node diff --git a/tsconfig.json b/tsconfig.json index 93c61e83e..0c40df097 100644 --- a/tsconfig.json +++ b/tsconfig.json @@ -7,6 +7,9 @@ { "path": "./packages/shared/common/tsconfig.ref.json" }, + { + "path": "./packages/shared/mocks/tsconfig.ref.json" + }, { "path": "./packages/shared/sdk-server/tsconfig.ref.json" },