diff --git a/.husky/pre-push b/.husky/pre-push index 3b6d564252..b5f55642f9 100755 --- a/.husky/pre-push +++ b/.husky/pre-push @@ -7,4 +7,5 @@ npm t \ -w packages/metrics \ -w packages/tracer \ -w packages/idempotency \ - -w packages/parameters \ No newline at end of file + -w packages/parameters \ + -w packages/parser \ No newline at end of file diff --git a/package-lock.json b/package-lock.json index 2b28ed8c51..07b8a5b164 100644 --- a/package-lock.json +++ b/package-lock.json @@ -196,6 +196,19 @@ "node": ">=6.0.0" } }, + "node_modules/@anatine/zod-mock": { + "version": "3.13.3", + "resolved": "https://registry.npmjs.org/@anatine/zod-mock/-/zod-mock-3.13.3.tgz", + "integrity": "sha512-AN+0YEFE7s6BpuALQHhEoVmJmD+0gPnf4Fehc6oE5NHbM3X2ZD5fW5M6vvot29NWUB6nxvj0gu+BPQ9cVnxALw==", + "dev": true, + "dependencies": { + "randexp": "^0.5.3" + }, + "peerDependencies": { + "@faker-js/faker": "^7.0.0 || ^8.0.0", + "zod": "^3.21.4" + } + }, "node_modules/@aws-cdk/asset-awscli-v1": { "version": "2.2.200", "resolved": "https://registry.npmjs.org/@aws-cdk/asset-awscli-v1/-/asset-awscli-v1-2.2.200.tgz", @@ -2291,6 +2304,22 @@ "node": "^12.22.0 || ^14.17.0 || >=16.0.0" } }, + "node_modules/@faker-js/faker": { + "version": "8.3.1", + "resolved": "https://registry.npmjs.org/@faker-js/faker/-/faker-8.3.1.tgz", + "integrity": "sha512-FdgpFxY6V6rLZE9mmIBb9hM0xpfvQOSNOLnzolzKwsE1DH+gC7lEKV1p1IbR0lAYyvYd5a4u3qWJzowUkw1bIw==", + "dev": true, + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/fakerjs" + } + ], + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0", + "npm": ">=6.14.13" + } + }, "node_modules/@humanwhocodes/config-array": { "version": "0.11.11", "resolved": "https://registry.npmjs.org/@humanwhocodes/config-array/-/config-array-0.11.11.tgz", @@ -7597,6 +7626,15 @@ "node": ">=12" } }, + "node_modules/drange": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/drange/-/drange-1.1.1.tgz", + "integrity": "sha512-pYxfDYpued//QpnLIm4Avk7rsNtAtQkUES2cwAYSvD/wd2pKD71gN2Ebj3e7klzXwjocvE8c5vx/1fxwpqmSxA==", + "dev": true, + "engines": { + "node": ">=4" + } + }, "node_modules/duplexer": { "version": "0.1.2", "resolved": "https://registry.npmjs.org/duplexer/-/duplexer-0.1.2.tgz", @@ -14053,6 +14091,19 @@ "node": ">=8" } }, + "node_modules/randexp": { + "version": "0.5.3", + "resolved": "https://registry.npmjs.org/randexp/-/randexp-0.5.3.tgz", + "integrity": "sha512-U+5l2KrcMNOUPYvazA3h5ekF80FHTUG+87SEAmHZmolh1M+i/WyTCxVzmi+tidIa1tM4BSe8g2Y/D3loWDjj+w==", + "dev": true, + "dependencies": { + "drange": "^1.0.2", + "ret": "^0.2.0" + }, + "engines": { + "node": ">=4" + } + }, "node_modules/react-is": { "version": "18.2.0", "resolved": "https://registry.npmjs.org/react-is/-/react-is-18.2.0.tgz", @@ -14545,6 +14596,15 @@ "node": ">=8" } }, + "node_modules/ret": { + "version": "0.2.2", + "resolved": "https://registry.npmjs.org/ret/-/ret-0.2.2.tgz", + "integrity": "sha512-M0b3YWQs7R3Z917WRQy1HHA7Ba7D8hvZg6UE5mLykJxQVE2ju0IXbGlaHPPlkY+WN7wFP+wUMXmBFA0aV6vYGQ==", + "dev": true, + "engines": { + "node": ">=4" + } + }, "node_modules/retry": { "version": "0.12.0", "resolved": "https://registry.npmjs.org/retry/-/retry-0.12.0.tgz", @@ -16763,8 +16823,12 @@ "name": "@aws-lambda-powertools/parser", "version": "0.0.0", "license": "MIT-0", + "devDependencies": { + "@anatine/zod-mock": "^3.13.3", + "@faker-js/faker": "^8.3.1" + }, "peerDependencies": { - "zod": "^3.22.4" + "zod": ">=3.x" } }, "packages/testing": { diff --git a/packages/parser/package.json b/packages/parser/package.json index 6dd191ca17..fb4b54fe51 100644 --- a/packages/parser/package.json +++ b/packages/parser/package.json @@ -60,8 +60,11 @@ "serverless", "nodejs" ], - "peerDependencies": { "zod": ">=3.x" + }, + "devDependencies": { + "@anatine/zod-mock": "^3.13.3", + "@faker-js/faker": "^8.3.1" } -} \ No newline at end of file +} diff --git a/packages/parser/src/envelopes/apigw.ts b/packages/parser/src/envelopes/apigw.ts new file mode 100644 index 0000000000..49d094b405 --- /dev/null +++ b/packages/parser/src/envelopes/apigw.ts @@ -0,0 +1,18 @@ +import { parse } from './envelope.js'; +import { z, ZodSchema } from 'zod'; +import { APIGatewayProxyEventSchema } from '../schemas/apigw.js'; + +/** + * API Gateway envelope to extract data within body key + */ +export const apiGatewayEnvelope = ( + data: unknown, + schema: T +): z.infer => { + const parsedEnvelope = APIGatewayProxyEventSchema.parse(data); + if (!parsedEnvelope.body) { + throw new Error('Body field of API Gateway event is undefined'); + } + + return parse(parsedEnvelope.body, schema); +}; diff --git a/packages/parser/src/envelopes/apigwv2.ts b/packages/parser/src/envelopes/apigwv2.ts new file mode 100644 index 0000000000..decadfcc57 --- /dev/null +++ b/packages/parser/src/envelopes/apigwv2.ts @@ -0,0 +1,18 @@ +import { parse } from './envelope.js'; +import { z, ZodSchema } from 'zod'; +import { APIGatewayProxyEventV2Schema } from '../schemas/apigwv2.js'; + +/** + * API Gateway V2 envelope to extract data within body key + */ +export const apiGatewayV2Envelope = ( + data: unknown, + schema: T +): z.infer => { + const parsedEnvelope = APIGatewayProxyEventV2Schema.parse(data); + if (!parsedEnvelope.body) { + throw new Error('Body field of API Gateway event is undefined'); + } + + return parse(parsedEnvelope.body, schema); +}; diff --git a/packages/parser/src/envelopes/cloudwatch.ts b/packages/parser/src/envelopes/cloudwatch.ts new file mode 100644 index 0000000000..848e7ab070 --- /dev/null +++ b/packages/parser/src/envelopes/cloudwatch.ts @@ -0,0 +1,23 @@ +import { parse } from './envelope.js'; +import { z, ZodSchema } from 'zod'; +import { CloudWatchLogsSchema } from '../schemas/cloudwatch.js'; + +/** + * CloudWatch Envelope to extract a List of log records. + * + * The record's body parameter is a string (after being base64 decoded and gzipped), + * though it can also be a JSON encoded string. + * Regardless of its type it'll be parsed into a BaseModel object. + * + * Note: The record will be parsed the same way so if model is str + */ +export const cloudWatchEnvelope = ( + data: unknown, + schema: T +): z.infer => { + const parsedEnvelope = CloudWatchLogsSchema.parse(data); + + return parsedEnvelope.awslogs.data.logEvents.map((record) => { + return parse(record.message, schema); + }); +}; diff --git a/packages/parser/src/envelopes/dynamodb.ts b/packages/parser/src/envelopes/dynamodb.ts new file mode 100644 index 0000000000..bb378b2d0b --- /dev/null +++ b/packages/parser/src/envelopes/dynamodb.ts @@ -0,0 +1,28 @@ +import { parse } from './envelope.js'; +import { z, ZodSchema } from 'zod'; +import { DynamoDBStreamSchema } from '../schemas/dynamodb.js'; + +type DynamoDBStreamEnvelopeResponse = { + NewImage: z.infer; + OldImage: z.infer; +}; + +/** + * DynamoDB Stream Envelope to extract data within NewImage/OldImage + * + * Note: Values are the parsed models. Images' values can also be None, and + * length of the list is the record's amount in the original event. + */ +export const dynamoDDStreamEnvelope = ( + data: unknown, + schema: T +): DynamoDBStreamEnvelopeResponse[] => { + const parsedEnvelope = DynamoDBStreamSchema.parse(data); + + return parsedEnvelope.Records.map((record) => { + return { + NewImage: parse(record.dynamodb.NewImage, schema), + OldImage: parse(record.dynamodb.OldImage, schema), + }; + }); +}; diff --git a/packages/parser/src/envelopes/envelope.ts b/packages/parser/src/envelopes/envelope.ts new file mode 100644 index 0000000000..4c2dd9570d --- /dev/null +++ b/packages/parser/src/envelopes/envelope.ts @@ -0,0 +1,23 @@ +import { z, ZodSchema } from 'zod'; + +/** + * Abstract function to parse the content of the envelope using provided schema. + * Both inputs are provided as unknown by the user. + * We expect the data to be either string that can be parsed to json or object. + * @internal + * @param data data to parse + * @param schema schema + */ +export const parse = ( + data: unknown, + schema: T +): z.infer[] => { + if (typeof data === 'string') { + return schema.parse(JSON.parse(data)); + } else if (typeof data === 'object') { + return schema.parse(data); + } else + throw new Error( + `Invalid data type for envelope. Expected string or object, got ${typeof data}` + ); +}; diff --git a/packages/parser/src/envelopes/eventbridge.ts b/packages/parser/src/envelopes/eventbridge.ts new file mode 100644 index 0000000000..4484635348 --- /dev/null +++ b/packages/parser/src/envelopes/eventbridge.ts @@ -0,0 +1,13 @@ +import { parse } from './envelope.js'; +import { z, ZodSchema } from 'zod'; +import { EventBridgeSchema } from '../schemas/eventbridge.js'; + +/** + * Envelope for EventBridge schema that extracts and parses data from the `detail` key. + */ +export const eventBridgeEnvelope = ( + data: unknown, + schema: T +): z.infer => { + return parse(EventBridgeSchema.parse(data).detail, schema); +}; diff --git a/packages/parser/src/envelopes/kafka.ts b/packages/parser/src/envelopes/kafka.ts new file mode 100644 index 0000000000..32529d4255 --- /dev/null +++ b/packages/parser/src/envelopes/kafka.ts @@ -0,0 +1,39 @@ +import { z, ZodSchema } from 'zod'; +import { parse } from './envelope.js'; +import { + KafkaMskEventSchema, + KafkaSelfManagedEventSchema, +} from '../schemas/kafka.js'; +import { type KafkaRecord } from '../types/schema.js'; + +/** + * Kafka event envelope to extract data within body key + * The record's body parameter is a string, though it can also be a JSON encoded string. + * Regardless of its type it'll be parsed into a BaseModel object. + * + * Note: Records will be parsed the same way so if model is str, + * all items in the list will be parsed as str and not as JSON (and vice versa) + */ +export const kafkaEnvelope = ( + data: unknown, + schema: T +): z.infer => { + // manually fetch event source to deside between Msk or SelfManaged + + // eslint-disable-next-line @typescript-eslint/ban-ts-comment + // @ts-ignore + const eventSource = data['eventSource']; + + const parsedEnvelope: + | z.infer + | z.infer = + eventSource === 'aws:kafka' + ? KafkaMskEventSchema.parse(data) + : KafkaSelfManagedEventSchema.parse(data); + + return Object.values(parsedEnvelope.records).map((topicRecord) => { + return topicRecord.map((record: KafkaRecord) => { + return parse(record.value, schema); + }); + }); +}; diff --git a/packages/parser/src/envelopes/kinesis-firehose.ts b/packages/parser/src/envelopes/kinesis-firehose.ts new file mode 100644 index 0000000000..e51ae47a12 --- /dev/null +++ b/packages/parser/src/envelopes/kinesis-firehose.ts @@ -0,0 +1,26 @@ +import { parse } from './envelope.js'; +import { z, ZodSchema } from 'zod'; +import { KinesisFirehoseSchema } from '../schemas/kinesis-firehose.js'; + +/** + * Kinesis Firehose Envelope to extract array of Records + * + * The record's data parameter is a base64 encoded string which is parsed into a bytes array, + * though it can also be a JSON encoded string. + * Regardless of its type it'll be parsed into a BaseModel object. + * + * Note: Records will be parsed the same way so if model is str, + * all items in the list will be parsed as str and not as JSON (and vice versa) + * + * https://docs.aws.amazon.com/lambda/latest/dg/services-kinesisfirehose.html + */ +export const kinesisFirehoseEnvelope = ( + data: unknown, + schema: T +): z.infer => { + const parsedEnvelope = KinesisFirehoseSchema.parse(data); + + return parsedEnvelope.records.map((record) => { + return parse(record.data, schema); + }); +}; diff --git a/packages/parser/src/envelopes/kinesis.ts b/packages/parser/src/envelopes/kinesis.ts new file mode 100644 index 0000000000..311223042d --- /dev/null +++ b/packages/parser/src/envelopes/kinesis.ts @@ -0,0 +1,24 @@ +import { parse } from './envelope.js'; +import { z, ZodSchema } from 'zod'; +import { KinesisDataStreamSchema } from '../schemas/kinesis.js'; + +/** + * Kinesis Data Stream Envelope to extract array of Records + * + * The record's data parameter is a base64 encoded string which is parsed into a bytes array, + * though it can also be a JSON encoded string. + * Regardless of its type it'll be parsed into a BaseModel object. + * + * Note: Records will be parsed the same way so if model is str, + * all items in the list will be parsed as str and not as JSON (and vice versa) + */ +export const kinesisEnvelope = ( + data: unknown, + schema: T +): z.infer => { + const parsedEnvelope = KinesisDataStreamSchema.parse(data); + + return parsedEnvelope.Records.map((record) => { + return parse(record.kinesis.data, schema); + }); +}; diff --git a/packages/parser/src/envelopes/lambda.ts b/packages/parser/src/envelopes/lambda.ts new file mode 100644 index 0000000000..3ac1f2b8c6 --- /dev/null +++ b/packages/parser/src/envelopes/lambda.ts @@ -0,0 +1,18 @@ +import { parse } from './envelope.js'; +import { z, ZodSchema } from 'zod'; +import { LambdaFunctionUrlSchema } from '../schemas/lambda.js'; + +/** + * Lambda function URL envelope to extract data within body key + */ +export const lambdaFunctionUrlEnvelope = ( + data: unknown, + schema: T +): z.infer => { + const parsedEnvelope = LambdaFunctionUrlSchema.parse(data); + if (!parsedEnvelope.body) { + throw new Error('Body field of Lambda function URL event is undefined'); + } + + return parse(parsedEnvelope.body, schema); +}; diff --git a/packages/parser/src/envelopes/sns.ts b/packages/parser/src/envelopes/sns.ts new file mode 100644 index 0000000000..3e897a00a8 --- /dev/null +++ b/packages/parser/src/envelopes/sns.ts @@ -0,0 +1,50 @@ +import { z, ZodSchema } from 'zod'; +import { parse } from './envelope.js'; +import { SnsSchema, SnsSqsNotificationSchema } from '../schemas/sns.js'; +import { SqsSchema } from '../schemas/sqs.js'; + +/** + * SNS Envelope to extract array of Records + * + * The record's body parameter is a string, though it can also be a JSON encoded string. + * Regardless of its type it'll be parsed into a BaseModel object. + * + * Note: Records will be parsed the same way so if model is str, + * all items in the list will be parsed as str and npt as JSON (and vice versa) + */ +export const snsEnvelope = ( + data: unknown, + schema: T +): z.infer => { + const parsedEnvelope = SnsSchema.parse(data); + + return parsedEnvelope.Records.map((record) => { + return parse(record.Sns.Message, schema); + }); +}; + +/** + * SNS plus SQS Envelope to extract array of Records + * + * Published messages from SNS to SQS has a slightly different payload. + * Since SNS payload is marshalled into `Record` key in SQS, we have to: + * + * 1. Parse SQS schema with incoming data + * 2. Unmarshall SNS payload and parse against SNS Notification schema not SNS/SNS Record + * 3. Finally, parse provided model against payload extracted + * + */ +export const snsSqsEnvelope = ( + data: unknown, + schema: T +): z.infer => { + const parsedEnvelope = SqsSchema.parse(data); + + return parsedEnvelope.Records.map((record) => { + const snsNotification = SnsSqsNotificationSchema.parse( + JSON.parse(record.body) + ); + + return parse(snsNotification.Message, schema); + }); +}; diff --git a/packages/parser/src/envelopes/sqs.ts b/packages/parser/src/envelopes/sqs.ts new file mode 100644 index 0000000000..2757663a95 --- /dev/null +++ b/packages/parser/src/envelopes/sqs.ts @@ -0,0 +1,23 @@ +import { z, ZodSchema } from 'zod'; +import { SqsSchema } from '../schemas/sqs.js'; +import { parse } from './envelope.js'; + +/** + * SQS Envelope to extract array of Records + * + * The record's body parameter is a string, though it can also be a JSON encoded string. + * Regardless of its type it'll be parsed into a BaseModel object. + * + * Note: Records will be parsed the same way so if model is str, + * all items in the list will be parsed as str and npt as JSON (and vice versa) + */ +export const sqsEnvelope = ( + data: unknown, + schema: T +): z.infer => { + const parsedEnvelope = SqsSchema.parse(data); + + return parsedEnvelope.Records.map((record) => { + return parse(record.body, schema); + }); +}; diff --git a/packages/parser/src/envelopes/vpc-lattice.ts b/packages/parser/src/envelopes/vpc-lattice.ts new file mode 100644 index 0000000000..03d2998757 --- /dev/null +++ b/packages/parser/src/envelopes/vpc-lattice.ts @@ -0,0 +1,15 @@ +import { parse } from './envelope.js'; +import { z, ZodSchema } from 'zod'; +import { VpcLatticeSchema } from '../schemas/vpc-lattice.js'; + +/** + * Amazon VPC Lattice envelope to extract data within body key + */ +export const vpcLatticeEnvelope = ( + data: unknown, + schema: T +): z.infer => { + const parsedEnvelope = VpcLatticeSchema.parse(data); + + return parse(parsedEnvelope.body, schema); +}; diff --git a/packages/parser/src/envelopes/vpc-latticev2.ts b/packages/parser/src/envelopes/vpc-latticev2.ts new file mode 100644 index 0000000000..a3fa4389c0 --- /dev/null +++ b/packages/parser/src/envelopes/vpc-latticev2.ts @@ -0,0 +1,15 @@ +import { parse } from './envelope.js'; +import { z, ZodSchema } from 'zod'; +import { VpcLatticeV2Schema } from '../schemas/vpc-latticev2.js'; + +/** + * Amazon VPC Lattice envelope to extract data within body key + */ +export const vpcLatticeV2Envelope = ( + data: unknown, + schema: T +): z.infer => { + const parsedEnvelope = VpcLatticeV2Schema.parse(data); + + return parse(parsedEnvelope.body, schema); +}; diff --git a/packages/parser/src/schemas/cloudwatch.ts b/packages/parser/src/schemas/cloudwatch.ts index 8c9e71f9a0..2694507b04 100644 --- a/packages/parser/src/schemas/cloudwatch.ts +++ b/packages/parser/src/schemas/cloudwatch.ts @@ -30,15 +30,9 @@ const CloudWatchLogsSchema = z.object({ }), }); -const extractCloudWatchLogFromEvent = ( - data: string -): z.infer => { - return decompressRecordToJSON(data); -}; - export { CloudWatchLogsSchema, CloudWatchLogsDecodeSchema, decompressRecordToJSON, - extractCloudWatchLogFromEvent, + CloudWatchLogEventSchema, }; diff --git a/packages/parser/src/schemas/kafka.ts b/packages/parser/src/schemas/kafka.ts index 880fb404d8..08bd5d03ab 100644 --- a/packages/parser/src/schemas/kafka.ts +++ b/packages/parser/src/schemas/kafka.ts @@ -41,4 +41,4 @@ const KafkaMskEventSchema = KafkaBaseEventSchema.extend({ eventSourceArn: z.string(), }); -export { KafkaSelfManagedEventSchema, KafkaMskEventSchema }; +export { KafkaSelfManagedEventSchema, KafkaMskEventSchema, KafkaRecordSchema }; diff --git a/packages/parser/src/schemas/kinesis.ts b/packages/parser/src/schemas/kinesis.ts index d598715420..fbd734297e 100644 --- a/packages/parser/src/schemas/kinesis.ts +++ b/packages/parser/src/schemas/kinesis.ts @@ -1,13 +1,31 @@ import { z } from 'zod'; +import { gunzipSync } from 'node:zlib'; const KinesisDataStreamRecordPayload = z.object({ kinesisSchemaVersion: z.string(), partitionKey: z.string(), sequenceNumber: z.string(), approximateArrivalTimestamp: z.number(), - data: z.string(), + data: z.string().transform((data) => { + const decompresed = decompress(data); + const decoded = Buffer.from(data, 'base64').toString('utf-8'); + try { + // If data was not compressed, try to parse it as JSON otherwise it must be string + return decompresed === data ? JSON.parse(decoded) : decompresed; + } catch (e) { + return decoded; + } + }), }); +const decompress = (data: string): string => { + try { + return JSON.parse(gunzipSync(Buffer.from(data, 'base64')).toString('utf8')); + } catch (e) { + return data; + } +}; + const KinesisDataStreamRecord = z.object({ eventSource: z.literal('aws:kinesis'), eventVersion: z.string(), @@ -22,4 +40,8 @@ const KinesisDataStreamSchema = z.object({ Records: z.array(KinesisDataStreamRecord), }); -export { KinesisDataStreamSchema }; +export { + KinesisDataStreamSchema, + KinesisDataStreamRecord, + KinesisDataStreamRecordPayload, +}; diff --git a/packages/parser/src/schemas/sns.ts b/packages/parser/src/schemas/sns.ts index f8d8d8bbc4..862306a66b 100644 --- a/packages/parser/src/schemas/sns.ts +++ b/packages/parser/src/schemas/sns.ts @@ -9,16 +9,26 @@ const SnsNotificationSchema = z.object({ Subject: z.string().optional(), TopicArn: z.string(), UnsubscribeUrl: z.string().url(), + UnsubscribeURL: z.string().url().optional(), + SigningCertUrl: z.string().url().optional(), + SigningCertURL: z.string().url().optional(), Type: z.literal('Notification'), MessageAttributes: z.record(z.string(), SnsMsgAttribute).optional(), Message: z.string(), MessageId: z.string(), Signature: z.string().optional(), SignatureVersion: z.string().optional(), - SigningCertUrl: z.string().url().optional(), Timestamp: z.string().datetime(), }); +const SnsSqsNotificationSchema = SnsNotificationSchema.extend({ + UnsubscribeURL: z.string().optional(), + SigningCertURL: z.string().url().optional(), +}).omit({ + UnsubscribeUrl: true, + SigningCertUrl: true, +}); + const SnsRecordSchema = z.object({ EventSource: z.literal('aws:sns'), EventVersion: z.string(), @@ -30,4 +40,10 @@ const SnsSchema = z.object({ Records: z.array(SnsRecordSchema), }); -export { SnsSchema, SnsRecordSchema, SnsNotificationSchema, SnsMsgAttribute }; +export { + SnsSchema, + SnsRecordSchema, + SnsNotificationSchema, + SnsMsgAttribute, + SnsSqsNotificationSchema, +}; diff --git a/packages/parser/src/types/index.ts b/packages/parser/src/types/index.ts new file mode 100644 index 0000000000..e69de29bb2 diff --git a/packages/parser/src/types/schema.ts b/packages/parser/src/types/schema.ts new file mode 100644 index 0000000000..cc5869a1e2 --- /dev/null +++ b/packages/parser/src/types/schema.ts @@ -0,0 +1,17 @@ +import { KafkaRecordSchema } from '../schemas/kafka.js'; +import { z } from 'zod'; +import { + KinesisDataStreamRecord, + KinesisDataStreamRecordPayload, +} from '../schemas/kinesis.js'; +import { APIGatewayProxyEventSchema } from '../schemas/apigw.js'; + +export type KafkaRecord = z.infer; + +export type KinesisDataStreamRecord = z.infer; + +export type KinesisDataStreamRecordPayload = z.infer< + typeof KinesisDataStreamRecordPayload +>; + +export type ApiGatewayProxyEvent = z.infer; diff --git a/packages/parser/tests/unit/envelopes/apigwt.test.ts b/packages/parser/tests/unit/envelopes/apigwt.test.ts new file mode 100644 index 0000000000..6c51736b16 --- /dev/null +++ b/packages/parser/tests/unit/envelopes/apigwt.test.ts @@ -0,0 +1,29 @@ +/** + * Test built in schema envelopes for api gateway + * + * @group unit/parser/envelopes + */ + +import { generateMock } from '@anatine/zod-mock'; +import { TestEvents, TestSchema } from '../schema/utils.js'; +import { ApiGatewayProxyEvent } from '../../../src/types/schema.js'; +import { apiGatewayEnvelope } from '../../../src/envelopes/apigw'; + +describe('ApigwEnvelope ', () => { + it('should parse custom schema in envelope', () => { + const testCustomSchemaObject = generateMock(TestSchema); + const testEvent = TestEvents.apiGatewayProxyEvent as ApiGatewayProxyEvent; + + testEvent.body = JSON.stringify(testCustomSchemaObject); + + const resp = apiGatewayEnvelope(testEvent, TestSchema); + expect(resp).toEqual(testCustomSchemaObject); + }); + + it('should throw no body provided', () => { + const testEvent = TestEvents.apiGatewayProxyEvent as ApiGatewayProxyEvent; + testEvent.body = undefined; + + expect(() => apiGatewayEnvelope(testEvent, TestSchema)).toThrow(); + }); +}); diff --git a/packages/parser/tests/unit/envelopes/apigwv2.test.ts b/packages/parser/tests/unit/envelopes/apigwv2.test.ts new file mode 100644 index 0000000000..acf7ee815b --- /dev/null +++ b/packages/parser/tests/unit/envelopes/apigwv2.test.ts @@ -0,0 +1,30 @@ +/** + * Test built in schema envelopes for api gateway v2 + * + * @group unit/parser/envelopes + */ + +import { TestEvents, TestSchema } from '../schema/utils.js'; +import { generateMock } from '@anatine/zod-mock'; +import { APIGatewayProxyEventV2 } from 'aws-lambda'; +import { apiGatewayV2Envelope } from '../../../src/envelopes/apigwv2'; + +describe('ApiGwV2Envelope ', () => { + it('should parse custom schema in envelope', () => { + const testEvent = + TestEvents.apiGatewayProxyV2Event as APIGatewayProxyEventV2; + const data = generateMock(TestSchema); + + testEvent.body = JSON.stringify(data); + + expect(apiGatewayV2Envelope(testEvent, TestSchema)).toEqual(data); + }); + + it('should throw when no body provided', () => { + const testEvent = + TestEvents.apiGatewayProxyV2Event as APIGatewayProxyEventV2; + testEvent.body = undefined; + + expect(() => apiGatewayV2Envelope(testEvent, TestSchema)).toThrow(); + }); +}); diff --git a/packages/parser/tests/unit/envelopes/cloudwatch.test.ts b/packages/parser/tests/unit/envelopes/cloudwatch.test.ts new file mode 100644 index 0000000000..3258768b5a --- /dev/null +++ b/packages/parser/tests/unit/envelopes/cloudwatch.test.ts @@ -0,0 +1,63 @@ +/** + * Test built in schema envelopes for CloudWatch + * + * @group unit/parser/envelopes + */ + +import { generateMock } from '@anatine/zod-mock'; +import { gzipSync } from 'node:zlib'; +import { + CloudWatchLogEventSchema, + CloudWatchLogsDecodeSchema, +} from '../../../src/schemas/cloudwatch.js'; +import { TestSchema } from '../schema/utils.js'; +import { cloudWatchEnvelope } from '../../../src/envelopes/cloudwatch'; + +describe('CloudWatch', () => { + it('should parse custom schema in envelope', () => { + const testEvent = { + awslogs: { + data: '', + }, + }; + + const data = generateMock(TestSchema); + const eventMock = generateMock(CloudWatchLogEventSchema, { + stringMap: { + message: () => JSON.stringify(data), + }, + }); + + const logMock = generateMock(CloudWatchLogsDecodeSchema); + logMock.logEvents = [eventMock]; + + testEvent.awslogs.data = gzipSync( + Buffer.from(JSON.stringify(logMock), 'utf8') + ).toString('base64'); + + expect(cloudWatchEnvelope(testEvent, TestSchema)).toEqual([data]); + }); + + it('should throw when schema does not match', () => { + const testEvent = { + awslogs: { + data: '', + }, + }; + + const eventMock = generateMock(CloudWatchLogEventSchema, { + stringMap: { + message: () => JSON.stringify({ foo: 'bar' }), + }, + }); + + const logMock = generateMock(CloudWatchLogsDecodeSchema); + logMock.logEvents = [eventMock]; + + testEvent.awslogs.data = gzipSync( + Buffer.from(JSON.stringify(logMock), 'utf8') + ).toString('base64'); + + expect(() => cloudWatchEnvelope(testEvent, TestSchema)).toThrow(); + }); +}); diff --git a/packages/parser/tests/unit/envelopes/dynamodb.test.ts b/packages/parser/tests/unit/envelopes/dynamodb.test.ts new file mode 100644 index 0000000000..342ad474e3 --- /dev/null +++ b/packages/parser/tests/unit/envelopes/dynamodb.test.ts @@ -0,0 +1,43 @@ +/** + * Test built in schema envelopes for api gateway v2 + * + * @group unit/parser/envelopes + */ + +import { generateMock } from '@anatine/zod-mock'; +import { TestEvents } from '../schema/utils.js'; +import { DynamoDBStreamEvent } from 'aws-lambda'; +import { z } from 'zod'; +import { dynamoDDStreamEnvelope } from '../../../src/envelopes/dynamodb'; + +describe('DynamoDB', () => { + const schema = z.object({ + Message: z.record(z.literal('S'), z.string()), + Id: z.record(z.literal('N'), z.number().min(0).max(100)), + }); + + it('should parse dynamodb envelope', () => { + const mockOldImage = generateMock(schema); + const mockNewImage = generateMock(schema); + const dynamodbEvent = TestEvents.dynamoStreamEvent as DynamoDBStreamEvent; + + (dynamodbEvent.Records[0].dynamodb!.NewImage as typeof mockNewImage) = + mockNewImage; + (dynamodbEvent.Records[1].dynamodb!.NewImage as typeof mockNewImage) = + mockNewImage; + (dynamodbEvent.Records[0].dynamodb!.OldImage as typeof mockOldImage) = + mockOldImage; + (dynamodbEvent.Records[1].dynamodb!.OldImage as typeof mockOldImage) = + mockOldImage; + + const parsed = dynamoDDStreamEnvelope(dynamodbEvent, schema); + expect(parsed[0]).toEqual({ + OldImage: mockOldImage, + NewImage: mockNewImage, + }); + expect(parsed[1]).toEqual({ + OldImage: mockOldImage, + NewImage: mockNewImage, + }); + }); +}); diff --git a/packages/parser/tests/unit/envelopes/eventbridge.test.ts b/packages/parser/tests/unit/envelopes/eventbridge.test.ts new file mode 100644 index 0000000000..ee8c62a95c --- /dev/null +++ b/packages/parser/tests/unit/envelopes/eventbridge.test.ts @@ -0,0 +1,51 @@ +/** + * Test built in schema envelopes for event bridge + * + * @group unit/parser/envelopes + */ + +import { TestEvents, TestSchema } from '../schema/utils.js'; +import { generateMock } from '@anatine/zod-mock'; +import { EventBridgeEvent } from 'aws-lambda'; +import { eventBridgeEnvelope } from '../../../src/envelopes/eventbridge.js'; + +describe('EventBridgeEnvelope ', () => { + it('should parse eventbridge event', () => { + const eventBridgeEvent = TestEvents.eventBridgeEvent as EventBridgeEvent< + string, + object + >; + + const data = generateMock(TestSchema); + + eventBridgeEvent.detail = data; + + expect(eventBridgeEnvelope(eventBridgeEvent, TestSchema)).toEqual(data); + }); + + it('should throw error if detail type does not match schema', () => { + const eventBridgeEvent = TestEvents.eventBridgeEvent as EventBridgeEvent< + string, + object + >; + + eventBridgeEvent.detail = { + foo: 'bar', + }; + + expect(() => + eventBridgeEnvelope(eventBridgeEvent, TestSchema) + ).toThrowError(); + }); + + it('should throw when invalid data type provided', () => { + const eventBridgeEvent = TestEvents.eventBridgeEvent as EventBridgeEvent< + string, + object + >; + + eventBridgeEvent.detail = 1 as unknown as object; + + expect(() => eventBridgeEnvelope(eventBridgeEvent, TestSchema)).toThrow(); + }); +}); diff --git a/packages/parser/tests/unit/envelopes/kafka.test.ts b/packages/parser/tests/unit/envelopes/kafka.test.ts new file mode 100644 index 0000000000..57e43a584f --- /dev/null +++ b/packages/parser/tests/unit/envelopes/kafka.test.ts @@ -0,0 +1,39 @@ +/** + * Test built in schema envelopes for api gateway v2 + * + * @group unit/parser/envelopes + */ + +import { generateMock } from '@anatine/zod-mock'; +import { TestEvents, TestSchema } from '../schema/utils.js'; +import { MSKEvent, SelfManagedKafkaEvent } from 'aws-lambda'; +import { kafkaEnvelope } from '../../../src/envelopes/kafka'; + +describe('Kafka', () => { + it('should parse MSK kafka envelope', () => { + const mock = generateMock(TestSchema); + + const kafkaEvent = TestEvents.kafkaEventMsk as MSKEvent; + kafkaEvent.records['mytopic-0'][0].value = Buffer.from( + JSON.stringify(mock) + ).toString('base64'); + + const result = kafkaEnvelope(kafkaEvent, TestSchema); + + expect(result).toEqual([[mock]]); + }); + + it('should parse Self Managed kafka envelope', () => { + const mock = generateMock(TestSchema); + + const kafkaEvent = + TestEvents.kafkaEventSelfManaged as SelfManagedKafkaEvent; + kafkaEvent.records['mytopic-0'][0].value = Buffer.from( + JSON.stringify(mock) + ).toString('base64'); + + const result = kafkaEnvelope(kafkaEvent, TestSchema); + + expect(result).toEqual([[mock]]); + }); +}); diff --git a/packages/parser/tests/unit/envelopes/kinesis-firehose.test.ts b/packages/parser/tests/unit/envelopes/kinesis-firehose.test.ts new file mode 100644 index 0000000000..581785d99f --- /dev/null +++ b/packages/parser/tests/unit/envelopes/kinesis-firehose.test.ts @@ -0,0 +1,55 @@ +/** + * Test built in schema envelopes for Kinesis Firehose + * + * @group unit/parser/envelopes + */ + +import { TestEvents, TestSchema } from '../schema/utils.js'; +import { generateMock } from '@anatine/zod-mock'; +import { KinesisFirehoseSchema } from '../../../src/schemas/kinesis-firehose.js'; +import { z } from 'zod'; +import { kinesisFirehoseEnvelope } from '../../../src/envelopes/kinesis-firehose'; + +describe('Kinesis Firehose Envelope', () => { + it('should parse records for PutEvent', () => { + const mock = generateMock(TestSchema); + const testEvent = TestEvents.kinesisFirehosePutEvent as z.infer< + typeof KinesisFirehoseSchema + >; + + testEvent.records.map((record) => { + record.data = Buffer.from(JSON.stringify(mock)).toString('base64'); + }); + + const resp = kinesisFirehoseEnvelope(testEvent, TestSchema); + expect(resp).toEqual([mock, mock]); + }); + + it('should parse a single record for SQS event', () => { + const mock = generateMock(TestSchema); + const testEvent = TestEvents.kinesisFirehoseSQSEvent as z.infer< + typeof KinesisFirehoseSchema + >; + + testEvent.records.map((record) => { + record.data = Buffer.from(JSON.stringify(mock)).toString('base64'); + }); + + const resp = kinesisFirehoseEnvelope(testEvent, TestSchema); + expect(resp).toEqual([mock]); + }); + + it('should parse records for kinesis event', () => { + const mock = generateMock(TestSchema); + const testEvent = TestEvents.kinesisFirehoseKinesisEvent as z.infer< + typeof KinesisFirehoseSchema + >; + + testEvent.records.map((record) => { + record.data = Buffer.from(JSON.stringify(mock)).toString('base64'); + }); + + const resp = kinesisFirehoseEnvelope(testEvent, TestSchema); + expect(resp).toEqual([mock, mock]); + }); +}); diff --git a/packages/parser/tests/unit/envelopes/kinesis.test.ts b/packages/parser/tests/unit/envelopes/kinesis.test.ts new file mode 100644 index 0000000000..6395cf9803 --- /dev/null +++ b/packages/parser/tests/unit/envelopes/kinesis.test.ts @@ -0,0 +1,26 @@ +/** + * Test built in schema envelopes for Kinesis + * + * @group unit/parser/envelopes + */ + +import { generateMock } from '@anatine/zod-mock'; +import { KinesisStreamEvent } from 'aws-lambda'; +import { TestEvents, TestSchema } from '../schema/utils.js'; +import { kinesisEnvelope } from '../../../src/envelopes/kinesis'; + +describe('Kinesis', () => { + it('should parse Kinesis Stream event', () => { + const mock = generateMock(TestSchema); + const testEvent = TestEvents.kinesisStreamEvent as KinesisStreamEvent; + + testEvent.Records.map((record) => { + record.kinesis.data = Buffer.from(JSON.stringify(mock)).toString( + 'base64' + ); + }); + + const resp = kinesisEnvelope(testEvent, TestSchema); + expect(resp).toEqual([mock, mock]); + }); +}); diff --git a/packages/parser/tests/unit/envelopes/lambda.test.ts b/packages/parser/tests/unit/envelopes/lambda.test.ts new file mode 100644 index 0000000000..f00411e8b3 --- /dev/null +++ b/packages/parser/tests/unit/envelopes/lambda.test.ts @@ -0,0 +1,30 @@ +/** + * Test built in schema envelopes for Lambda Functions URL + * + * @group unit/parser/envelopes + */ + +import { TestEvents, TestSchema } from '../schema/utils.js'; +import { generateMock } from '@anatine/zod-mock'; +import { APIGatewayProxyEventV2 } from 'aws-lambda'; +import { lambdaFunctionUrlEnvelope } from '../../../src/envelopes/lambda'; + +describe('Lambda Functions Url ', () => { + it('should parse custom schema in envelope', () => { + const testEvent = + TestEvents.lambdaFunctionUrlEvent as APIGatewayProxyEventV2; + const data = generateMock(TestSchema); + + testEvent.body = JSON.stringify(data); + + expect(lambdaFunctionUrlEnvelope(testEvent, TestSchema)).toEqual(data); + }); + + it('should throw when no body provided', () => { + const testEvent = + TestEvents.apiGatewayProxyV2Event as APIGatewayProxyEventV2; + testEvent.body = undefined; + + expect(() => lambdaFunctionUrlEnvelope(testEvent, TestSchema)).toThrow(); + }); +}); diff --git a/packages/parser/tests/unit/envelopes/sns.test.ts b/packages/parser/tests/unit/envelopes/sns.test.ts new file mode 100644 index 0000000000..3a380303ed --- /dev/null +++ b/packages/parser/tests/unit/envelopes/sns.test.ts @@ -0,0 +1,51 @@ +/** + * Test built in schema envelopes for SNS + * + * @group unit/parser/envelopes + */ + +import { z } from 'zod'; +import { generateMock } from '@anatine/zod-mock'; +import { SNSEvent, SQSEvent } from 'aws-lambda'; +import { TestEvents, TestSchema } from '../schema/utils.js'; +import { snsEnvelope, snsSqsEnvelope } from '../../../src/envelopes/sns'; + +describe('SNS Envelope', () => { + it('should parse custom schema in envelope', () => { + const testEvent = TestEvents.snsEvent as SNSEvent; + + const testRecords = [] as z.infer[]; + + testEvent.Records.map((record) => { + const value = generateMock(TestSchema); + testRecords.push(value); + record.Sns.Message = JSON.stringify(value); + }); + + expect(snsEnvelope(testEvent, TestSchema)).toEqual(testRecords); + }); + + it('should throw if message does not macht schema', () => { + const testEvent = TestEvents.snsEvent as SNSEvent; + + testEvent.Records.map((record) => { + record.Sns.Message = JSON.stringify({ + foo: 'bar', + }); + }); + + expect(() => snsEnvelope(testEvent, TestSchema)).toThrowError(); + }); + + it('should parse sqs inside sns envelope', () => { + const snsSqsTestEvent = TestEvents.snsSqsEvent as SQSEvent; + + const data = generateMock(TestSchema); + const snsEvent = JSON.parse(snsSqsTestEvent.Records[0].body); + snsEvent.Message = JSON.stringify(data); + + snsSqsTestEvent.Records[0].body = JSON.stringify(snsEvent); + + expect(snsSqsEnvelope(snsSqsTestEvent, TestSchema)).toEqual([data]); + }); +}); diff --git a/packages/parser/tests/unit/envelopes/sqs.test.ts b/packages/parser/tests/unit/envelopes/sqs.test.ts new file mode 100644 index 0000000000..4fb775433f --- /dev/null +++ b/packages/parser/tests/unit/envelopes/sqs.test.ts @@ -0,0 +1,45 @@ +/** + * Test built in schema envelopes for sqs + * + * @group unit/parser/envelopes + */ + +import { generateMock } from '@anatine/zod-mock'; +import { TestEvents, TestSchema } from '../schema/utils.js'; +import { SQSEvent } from 'aws-lambda'; +import { sqsEnvelope } from '../../../src/envelopes/sqs'; + +describe('SqsEnvelope ', () => { + it('should parse custom schema in envelope', () => { + const mock = generateMock(TestSchema); + + const sqsEvent = TestEvents.sqsEvent as SQSEvent; + sqsEvent.Records[0].body = JSON.stringify(mock); + sqsEvent.Records[1].body = JSON.stringify(mock); + + const resp = sqsEnvelope(sqsEvent, TestSchema); + expect(resp).toEqual([mock, mock]); + }); + + it('should throw error if invalid keys for a schema', () => { + expect(() => { + sqsEnvelope({ Records: [{ foo: 'bar' }] }, TestSchema); + }).toThrow(); + }); + + it('should throw error if invalid values for a schema', () => { + expect(() => { + sqsEnvelope( + { + Records: [ + { + name: 'foo', + age: 17, + }, + ], + }, + TestSchema + ); + }).toThrow(); + }); +}); diff --git a/packages/parser/tests/unit/envelopes/vpc-lattice.test.ts b/packages/parser/tests/unit/envelopes/vpc-lattice.test.ts new file mode 100644 index 0000000000..a3b1b0f6c4 --- /dev/null +++ b/packages/parser/tests/unit/envelopes/vpc-lattice.test.ts @@ -0,0 +1,38 @@ +/** + * Test built in schema envelopes for VPC Lattice + * + * @group unit/parser/envelopes + */ + +import { generateMock } from '@anatine/zod-mock'; +import { TestEvents, TestSchema } from '../schema/utils.js'; +import { VpcLatticeSchema } from '../../../src/schemas/vpc-lattice.js'; +import { z } from 'zod'; +import { vpcLatticeEnvelope } from '../../../src/envelopes/vpc-lattice'; + +describe('VPC Lattice envelope', () => { + it('should parse VPC Lattice event', () => { + const mock = generateMock(TestSchema); + const testEvent = TestEvents.vpcLatticeEvent as z.infer< + typeof VpcLatticeSchema + >; + + testEvent.body = JSON.stringify(mock); + + const resp = vpcLatticeEnvelope(testEvent, TestSchema); + + expect(resp).toEqual(mock); + }); + + it('should parse VPC Lattice event with trailing slash', () => { + const mock = generateMock(TestSchema); + const testEvent = TestEvents.vpcLatticeEventPathTrailingSlash as z.infer< + typeof VpcLatticeSchema + >; + + testEvent.body = JSON.stringify(mock); + + const resp = vpcLatticeEnvelope(testEvent, TestSchema); + expect(resp).toEqual(mock); + }); +}); diff --git a/packages/parser/tests/unit/envelopes/vpc-latticev2.test.ts b/packages/parser/tests/unit/envelopes/vpc-latticev2.test.ts new file mode 100644 index 0000000000..207dedb2da --- /dev/null +++ b/packages/parser/tests/unit/envelopes/vpc-latticev2.test.ts @@ -0,0 +1,38 @@ +/** + * Test built in schema envelopes for VPC Lattice V2 + * + * @group unit/parser/envelopes + */ + +import { generateMock } from '@anatine/zod-mock'; +import { VpcLatticeSchema } from '../../../src/schemas/vpc-lattice.js'; +import { z } from 'zod'; +import { TestEvents, TestSchema } from '../schema/utils.js'; +import { vpcLatticeV2Envelope } from '../../../src/envelopes/vpc-latticev2'; + +describe('VPC Lattice envelope', () => { + it('should parse VPC Lattice event', () => { + const mock = generateMock(TestSchema); + const testEvent = TestEvents.vpcLatticeV2Event as z.infer< + typeof VpcLatticeSchema + >; + + testEvent.body = JSON.stringify(mock); + + const resp = vpcLatticeV2Envelope(testEvent, TestSchema); + + expect(resp).toEqual(mock); + }); + + it('should parse VPC Lattice event with trailing slash', () => { + const mock = generateMock(TestSchema); + const testEvent = TestEvents.vpcLatticeEventV2PathTrailingSlash as z.infer< + typeof VpcLatticeSchema + >; + + testEvent.body = JSON.stringify(mock); + + const resp = vpcLatticeV2Envelope(testEvent, TestSchema); + expect(resp).toEqual(mock); + }); +}); diff --git a/packages/parser/tests/unit/schema/alb.test.ts b/packages/parser/tests/unit/schema/alb.test.ts index 5e9144582e..071f4598ce 100644 --- a/packages/parser/tests/unit/schema/alb.test.ts +++ b/packages/parser/tests/unit/schema/alb.test.ts @@ -7,25 +7,22 @@ import { AlbSchema, AlbMultiValueHeadersSchema, } from '../../../src/schemas/alb.js'; -import { loadExampleEvent } from './utils.js'; +import { TestEvents } from './utils.js'; describe('ALB ', () => { it('should parse alb event', () => { - const albEvent = loadExampleEvent('albEvent.json'); + const albEvent = TestEvents.albEvent; expect(AlbSchema.parse(albEvent)).toEqual(albEvent); }); it('should parse alb event path trailing slash', () => { - const albEventPathTrailingSlash = loadExampleEvent( - 'albEventPathTrailingSlash.json' - ); + const albEventPathTrailingSlash = TestEvents.albEventPathTrailingSlash; expect(AlbSchema.parse(albEventPathTrailingSlash)).toEqual( albEventPathTrailingSlash ); }); it('should parse alb event with multi value headers event', () => { - const albMultiValueHeadersEvent = loadExampleEvent( - 'albMultiValueHeadersEvent.json' - ); + const albMultiValueHeadersEvent = TestEvents.albMultiValueHeadersEvent; + expect(AlbMultiValueHeadersSchema.parse(albMultiValueHeadersEvent)).toEqual( albMultiValueHeadersEvent ); diff --git a/packages/parser/tests/unit/schema/apigw.test.ts b/packages/parser/tests/unit/schema/apigw.test.ts index 9aa23c6694..472aa89d39 100644 --- a/packages/parser/tests/unit/schema/apigw.test.ts +++ b/packages/parser/tests/unit/schema/apigw.test.ts @@ -4,68 +4,66 @@ * @group unit/parser/schema/ */ -import { loadExampleEvent } from './utils.js'; import { APIGatewayProxyEventSchema } from '../../../src/schemas/apigw.js'; +import { TestEvents } from './utils.js'; describe('APIGateway ', () => { it('should parse api gateway event', () => { - const apiGatewayProxyEvent = loadExampleEvent('apiGatewayProxyEvent.json'); + const apiGatewayProxyEvent = TestEvents.apiGatewayProxyEvent; + expect(APIGatewayProxyEventSchema.parse(apiGatewayProxyEvent)).toEqual( apiGatewayProxyEvent ); }); it('should parse api gateway authorizer request event', () => { - const apiGatewayAuthorizerRequestEvent = loadExampleEvent( - 'apiGatewayAuthorizerRequestEvent.json' - ); + const apiGatewayAuthorizerRequestEvent = + TestEvents.apiGatewayAuthorizerRequestEvent; + expect( APIGatewayProxyEventSchema.parse(apiGatewayAuthorizerRequestEvent) ).toEqual(apiGatewayAuthorizerRequestEvent); }); it('should parse schema middleware invalid event', () => { - const apiGatewaySchemaMiddlewareInvalidEvent = loadExampleEvent( - 'apiGatewaySchemaMiddlewareInvalidEvent.json' - ); + const apiGatewaySchemaMiddlewareInvalidEvent = + TestEvents.apiGatewaySchemaMiddlewareInvalidEvent; + expect( APIGatewayProxyEventSchema.parse(apiGatewaySchemaMiddlewareInvalidEvent) ).toEqual(apiGatewaySchemaMiddlewareInvalidEvent); }); it('should parse schema middleware valid event', () => { - const apiGatewaySchemaMiddlewareValidEvent = loadExampleEvent( - 'apiGatewaySchemaMiddlewareValidEvent.json' - ); + const apiGatewaySchemaMiddlewareValidEvent = + TestEvents.apiGatewaySchemaMiddlewareValidEvent; + expect( APIGatewayProxyEventSchema.parse(apiGatewaySchemaMiddlewareValidEvent) ).toEqual(apiGatewaySchemaMiddlewareValidEvent); }); it('should parse proxy event with no version auth', () => { - const apiGatewayProxyEvent_noVersionAuth = loadExampleEvent( - 'apiGatewayProxyEvent_noVersionAuth.json' - ); + const apiGatewayProxyEvent_noVersionAuth = + TestEvents.apiGatewayProxyEvent_noVersionAuth; + expect( APIGatewayProxyEventSchema.parse(apiGatewayProxyEvent_noVersionAuth) ).toEqual(apiGatewayProxyEvent_noVersionAuth); }); it('should parse proxy event with another path', () => { - const apiGatewayProxyEventAnotherPath = loadExampleEvent( - 'apiGatewayProxyEventAnotherPath.json' - ); + const apiGatewayProxyEventAnotherPath = + TestEvents.apiGatewayProxyEventAnotherPath; + expect( APIGatewayProxyEventSchema.parse(apiGatewayProxyEventAnotherPath) ).toEqual(apiGatewayProxyEventAnotherPath); }); it('should parse proxy event with path trailing slash', () => { - const apiGatewayProxyEventPathTrailingSlash = loadExampleEvent( - 'apiGatewayProxyEventPathTrailingSlash.json' - ); + const apiGatewayProxyEventPathTrailingSlash = + TestEvents.apiGatewayProxyEventPathTrailingSlash; expect( APIGatewayProxyEventSchema.parse(apiGatewayProxyEventPathTrailingSlash) ).toEqual(apiGatewayProxyEventPathTrailingSlash); }); it('should parse other proxy event', () => { - const apiGatewayProxyOtherEvent = loadExampleEvent( - 'apiGatewayProxyOtherEvent.json' - ); + const apiGatewayProxyOtherEvent = TestEvents.apiGatewayProxyOtherEvent; expect(APIGatewayProxyEventSchema.parse(apiGatewayProxyOtherEvent)).toEqual( apiGatewayProxyOtherEvent ); diff --git a/packages/parser/tests/unit/schema/apigwv2.test.ts b/packages/parser/tests/unit/schema/apigwv2.test.ts index 85ac2da1a2..59faa6ca70 100644 --- a/packages/parser/tests/unit/schema/apigwv2.test.ts +++ b/packages/parser/tests/unit/schema/apigwv2.test.ts @@ -4,30 +4,27 @@ * @group unit/parser/schema/ */ -import { loadExampleEvent } from './utils.js'; import { APIGatewayProxyEventV2Schema } from '../../../src/schemas/apigwv2.js'; +import { TestEvents } from './utils.js'; describe('API GW v2 ', () => { it('should parse api gateway v2 event', () => { - const apiGatewayProxyV2Event = loadExampleEvent( - 'apiGatewayProxyV2Event.json' - ); + const apiGatewayProxyV2Event = TestEvents.apiGatewayProxyV2Event; + expect(APIGatewayProxyEventV2Schema.parse(apiGatewayProxyV2Event)).toEqual( apiGatewayProxyV2Event ); }); it('should parse api gateway v2 event with GET method', () => { - const apiGatewayProxyV2Event_GET = loadExampleEvent( - 'apiGatewayProxyV2Event_GET.json' - ); + const apiGatewayProxyV2Event_GET = TestEvents.apiGatewayProxyV2Event_GET; expect( APIGatewayProxyEventV2Schema.parse(apiGatewayProxyV2Event_GET) ).toEqual(apiGatewayProxyV2Event_GET); }); it('should parse api gateway v2 event with path trailing slash', () => { - const apiGatewayProxyV2EventPathTrailingSlash = loadExampleEvent( - 'apiGatewayProxyV2EventPathTrailingSlash.json' - ); + const apiGatewayProxyV2EventPathTrailingSlash = + TestEvents.apiGatewayProxyV2EventPathTrailingSlash; + expect( APIGatewayProxyEventV2Schema.parse( apiGatewayProxyV2EventPathTrailingSlash @@ -35,33 +32,32 @@ describe('API GW v2 ', () => { ).toEqual(apiGatewayProxyV2EventPathTrailingSlash); }); it('should parse api gateway v2 event with iam', () => { - const apiGatewayProxyV2IamEvent = loadExampleEvent( - 'apiGatewayProxyV2IamEvent.json' - ); + const apiGatewayProxyV2IamEvent = TestEvents.apiGatewayProxyV2IamEvent; + expect( APIGatewayProxyEventV2Schema.parse(apiGatewayProxyV2IamEvent) ).toEqual(apiGatewayProxyV2IamEvent); }); it('should parse api gateway v2 event with lambda authorizer', () => { - const apiGatewayProxyV2LambdaAuthorizerEvent = loadExampleEvent( - 'apiGatewayProxyV2LambdaAuthorizerEvent.json' - ); + const apiGatewayProxyV2LambdaAuthorizerEvent = + TestEvents.apiGatewayProxyV2LambdaAuthorizerEvent; + expect( APIGatewayProxyEventV2Schema.parse(apiGatewayProxyV2LambdaAuthorizerEvent) ).toEqual(apiGatewayProxyV2LambdaAuthorizerEvent); }); it('should parse api gateway v2 event with other get event', () => { - const apiGatewayProxyV2OtherGetEvent = loadExampleEvent( - 'apiGatewayProxyV2OtherGetEvent.json' - ); + const apiGatewayProxyV2OtherGetEvent = + TestEvents.apiGatewayProxyV2OtherGetEvent; + expect( APIGatewayProxyEventV2Schema.parse(apiGatewayProxyV2OtherGetEvent) ).toEqual(apiGatewayProxyV2OtherGetEvent); }); it('should parse api gateway v2 event with schema middleware', () => { - const apiGatewayProxyV2SchemaMiddlewareValidEvent = loadExampleEvent( - 'apiGatewayProxyV2SchemaMiddlewareValidEvent.json' - ); + const apiGatewayProxyV2SchemaMiddlewareValidEvent = + TestEvents.apiGatewayProxyV2SchemaMiddlewareValidEvent; + expect( APIGatewayProxyEventV2Schema.parse( apiGatewayProxyV2SchemaMiddlewareValidEvent diff --git a/packages/parser/tests/unit/schema/cloudformation-custom-resource.test.ts b/packages/parser/tests/unit/schema/cloudformation-custom-resource.test.ts index 66ec61680d..d7ac35ac29 100644 --- a/packages/parser/tests/unit/schema/cloudformation-custom-resource.test.ts +++ b/packages/parser/tests/unit/schema/cloudformation-custom-resource.test.ts @@ -4,18 +4,18 @@ * @group unit/parser/schema/ */ -import { loadExampleEvent } from './utils.js'; import { CloudFormationCustomResourceCreateSchema, CloudFormationCustomResourceUpdateSchema, CloudFormationCustomResourceDeleteSchema, } from '../../../src/schemas/cloudformation-custom-resource.js'; +import { TestEvents } from './utils.js'; describe('CloudFormationCustomResource ', () => { it('should parse create event', () => { - const cloudFormationCustomResourceCreateEvent = loadExampleEvent( - 'cloudFormationCustomResourceCreateEvent.json' - ); + const cloudFormationCustomResourceCreateEvent = + TestEvents.cloudFormationCustomResourceCreateEvent; + expect( CloudFormationCustomResourceCreateSchema.parse( cloudFormationCustomResourceCreateEvent @@ -23,9 +23,9 @@ describe('CloudFormationCustomResource ', () => { ).toEqual(cloudFormationCustomResourceCreateEvent); }); it('should parse update event', () => { - const cloudFormationCustomResourceUpdateEvent = loadExampleEvent( - 'cloudFormationCustomResourceUpdateEvent.json' - ); + const cloudFormationCustomResourceUpdateEvent = + TestEvents.cloudFormationCustomResourceUpdateEvent; + expect( CloudFormationCustomResourceUpdateSchema.parse( cloudFormationCustomResourceUpdateEvent @@ -33,9 +33,9 @@ describe('CloudFormationCustomResource ', () => { ).toEqual(cloudFormationCustomResourceUpdateEvent); }); it('should parse delete event', () => { - const cloudFormationCustomResourceDeleteEvent = loadExampleEvent( - 'cloudFormationCustomResourceDeleteEvent.json' - ); + const cloudFormationCustomResourceDeleteEvent = + TestEvents.cloudFormationCustomResourceDeleteEvent; + expect( CloudFormationCustomResourceDeleteSchema.parse( cloudFormationCustomResourceDeleteEvent diff --git a/packages/parser/tests/unit/schema/cloudwatch.test.ts b/packages/parser/tests/unit/schema/cloudwatch.test.ts index a978030de2..c12e0d608c 100644 --- a/packages/parser/tests/unit/schema/cloudwatch.test.ts +++ b/packages/parser/tests/unit/schema/cloudwatch.test.ts @@ -4,12 +4,12 @@ * @group unit/parser/schema/ */ -import { loadExampleEvent } from './utils.js'; import { CloudWatchLogsSchema } from '../../../src/schemas/cloudwatch.js'; +import { TestEvents } from './utils.js'; describe('CloudWatchLogs ', () => { it('should parse cloudwatch logs event', () => { - const cloudWatchLogEvent = loadExampleEvent('cloudWatchLogEvent.json'); + const cloudWatchLogEvent = TestEvents.cloudWatchLogEvent; const parsed = CloudWatchLogsSchema.parse(cloudWatchLogEvent); expect(parsed.awslogs.data).toBeDefined(); expect(parsed.awslogs.data?.logEvents[0]).toEqual({ diff --git a/packages/parser/tests/unit/schema/dynamodb.test.ts b/packages/parser/tests/unit/schema/dynamodb.test.ts index b152c07b72..821d484f40 100644 --- a/packages/parser/tests/unit/schema/dynamodb.test.ts +++ b/packages/parser/tests/unit/schema/dynamodb.test.ts @@ -5,10 +5,10 @@ */ import { DynamoDBStreamSchema } from '../../../src/schemas/dynamodb.js'; -import { loadExampleEvent } from './utils.js'; +import { TestEvents } from './utils.js'; describe('DynamoDB ', () => { - const dynamoStreamEvent = loadExampleEvent('dynamoStreamEvent.json'); + const dynamoStreamEvent = TestEvents.dynamoStreamEvent; it('should parse a stream of records', () => { expect(DynamoDBStreamSchema.parse(dynamoStreamEvent)).toEqual( dynamoStreamEvent diff --git a/packages/parser/tests/unit/schema/eventbridge.test.ts b/packages/parser/tests/unit/schema/eventbridge.test.ts index e92bd2248f..b7ed50d37c 100644 --- a/packages/parser/tests/unit/schema/eventbridge.test.ts +++ b/packages/parser/tests/unit/schema/eventbridge.test.ts @@ -4,12 +4,13 @@ * @group unit/parser/schema/ */ -import { loadExampleEvent } from './utils.js'; import { EventBridgeSchema } from '../../../src/schemas/eventbridge.js'; +import { TestEvents } from './utils.js'; describe('EventBridge ', () => { it('should parse eventbridge event', () => { - const eventBridgeEvent = loadExampleEvent('eventBridgeEvent.json'); + const eventBridgeEvent = TestEvents.eventBridgeEvent; + expect(EventBridgeSchema.parse(eventBridgeEvent)).toEqual(eventBridgeEvent); }); }); diff --git a/packages/parser/tests/unit/schema/kafka.test.ts b/packages/parser/tests/unit/schema/kafka.test.ts index 3b2bc50b83..1296130dd8 100644 --- a/packages/parser/tests/unit/schema/kafka.test.ts +++ b/packages/parser/tests/unit/schema/kafka.test.ts @@ -4,11 +4,11 @@ * @group unit/parser/schema/ */ -import { loadExampleEvent } from './utils.js'; import { KafkaMskEventSchema, KafkaSelfManagedEventSchema, } from '../../../src/schemas/kafka.js'; +import { TestEvents } from './utils.js'; describe('Kafka ', () => { const expectedTestEvent = { @@ -26,15 +26,15 @@ describe('Kafka ', () => { ], }; it('should parse kafka MSK event', () => { - const kafkaEventMsk = loadExampleEvent('kafkaEventMsk.json'); + const kafkaEventMsk = TestEvents.kafkaEventMsk; + expect( KafkaMskEventSchema.parse(kafkaEventMsk).records['mytopic-0'][0] ).toEqual(expectedTestEvent); }); it('should parse kafka self managed event', () => { - const kafkaEventSelfManaged = loadExampleEvent( - 'kafkaEventSelfManaged.json' - ); + const kafkaEventSelfManaged = TestEvents.kafkaEventSelfManaged; + expect( KafkaSelfManagedEventSchema.parse(kafkaEventSelfManaged).records[ 'mytopic-0' @@ -42,9 +42,8 @@ describe('Kafka ', () => { ).toEqual(expectedTestEvent); }); it('should transform bootstrapServers to array', () => { - const kafkaEventSelfManaged = loadExampleEvent( - 'kafkaEventSelfManaged.json' - ); + const kafkaEventSelfManaged = TestEvents.kafkaEventSelfManaged; + expect( KafkaSelfManagedEventSchema.parse(kafkaEventSelfManaged).bootstrapServers ).toEqual([ @@ -53,11 +52,12 @@ describe('Kafka ', () => { ]); }); it('should return undefined if bootstrapServers is not present', () => { - const kafkaEventSelfManaged = loadExampleEvent( - 'kafkaEventSelfManaged.json' - ) as { bootstrapServers: string }; + const kafkaEventSelfManaged = TestEvents.kafkaEventSelfManaged as { + bootstrapServers: string; + }; kafkaEventSelfManaged.bootstrapServers = ''; const parsed = KafkaSelfManagedEventSchema.parse(kafkaEventSelfManaged); + expect(parsed.bootstrapServers).toBeUndefined(); }); }); diff --git a/packages/parser/tests/unit/schema/kinesis.test.ts b/packages/parser/tests/unit/schema/kinesis.test.ts index 99fc5eebc4..8a9aabadec 100644 --- a/packages/parser/tests/unit/schema/kinesis.test.ts +++ b/packages/parser/tests/unit/schema/kinesis.test.ts @@ -4,75 +4,57 @@ * @group unit/parser/schema/ */ -import { loadExampleEvent } from './utils.js'; import { KinesisDataStreamSchema } from '../../../src/schemas/kinesis.js'; import { KinesisFirehoseSchema, KinesisFirehoseSqsSchema, } from '../../../src/schemas/kinesis-firehose.js'; -import { extractCloudWatchLogFromEvent } from '../../../src/schemas/cloudwatch.js'; +import { TestEvents } from './utils.js'; describe('Kinesis ', () => { it('should parse kinesis event', () => { - const kinesisStreamEvent = loadExampleEvent('kinesisStreamEvent.json'); + const kinesisStreamEvent = TestEvents.kinesisStreamEvent; const parsed = KinesisDataStreamSchema.parse(kinesisStreamEvent); - const decodedData = Buffer.from( - parsed.Records[0].kinesis.data, - 'base64' - ).toString('utf8'); - expect(decodedData).toEqual('Hello, this is a test.'); + + expect(parsed.Records[0].kinesis.data).toEqual('Hello, this is a test.'); }); it('should parse single kinesis record', () => { - const kinesisStreamEventOneRecord = loadExampleEvent( - 'kinesisStreamEventOneRecord.json' - ); + const kinesisStreamEventOneRecord = TestEvents.kinesisStreamEventOneRecord; const parsed = KinesisDataStreamSchema.parse(kinesisStreamEventOneRecord); - const decodedJson = JSON.parse( - Buffer.from(parsed.Records[0].kinesis.data, 'base64').toString('utf8') - ); - expect(decodedJson).toEqual({ + + expect(parsed.Records[0].kinesis.data).toEqual({ message: 'test message', username: 'test', }); }); it('should parse Firehose event', () => { - const kinesisFirehoseKinesisEvent = loadExampleEvent( - 'kinesisFirehoseKinesisEvent.json' - ); + const kinesisFirehoseKinesisEvent = TestEvents.kinesisFirehoseKinesisEvent; const parsed = KinesisFirehoseSchema.parse(kinesisFirehoseKinesisEvent); expect(parsed.records[0].data).toEqual('Hello World'); }); it('should parse Kinesis Firehose PutEvents event', () => { - const kinesisFirehosePutEvent = loadExampleEvent( - 'kinesisFirehosePutEvent.json' - ); + const kinesisFirehosePutEvent = TestEvents.kinesisFirehosePutEvent; const parsed = KinesisFirehoseSchema.parse(kinesisFirehosePutEvent); expect(JSON.parse(parsed.records[1].data)).toEqual({ Hello: 'World', }); }); it('should parse Firehose event with SQS event', () => { - const kinesisFirehoseSQSEvent = loadExampleEvent( - 'kinesisFirehoseSQSEvent.json' - ); + const kinesisFirehoseSQSEvent = TestEvents.kinesisFirehoseSQSEvent; const parsed = KinesisFirehoseSqsSchema.parse(kinesisFirehoseSQSEvent); expect(parsed.records[0].data).toMatchObject({ messageId: '5ab807d4-5644-4c55-97a3-47396635ac74', body: 'Test message.', }); }); - it('should parse Firehose event with CloudWatch event', () => { - const kinesisStreamCloudWatchLogsEvent = loadExampleEvent( - 'kinesisStreamCloudWatchLogsEvent.json' - ); + it('should parse Kinesis event with CloudWatch event', () => { + const kinesisStreamCloudWatchLogsEvent = + TestEvents.kinesisStreamCloudWatchLogsEvent; const parsed = KinesisDataStreamSchema.parse( kinesisStreamCloudWatchLogsEvent ); - const jsonParsed = extractCloudWatchLogFromEvent( - parsed.Records[0].kinesis.data - ); - expect(jsonParsed).toMatchObject({ + expect(parsed.Records[0].kinesis.data).toMatchObject({ messageType: 'DATA_MESSAGE', owner: '231436140809', logGroup: '/aws/lambda/pt-1488-DummyLogDataFunction-gnWXPvL6jJyG', @@ -80,9 +62,9 @@ describe('Kinesis ', () => { }); }); it('should return original value if cannot parse KinesisFirehoseSqsRecord', () => { - const kinesisFirehoseSQSEvent = loadExampleEvent( - 'kinesisFirehoseSQSEvent.json' - ) as { records: { data: string }[] }; + const kinesisFirehoseSQSEvent = TestEvents.kinesisFirehoseSQSEvent as { + records: { data: string }[]; + }; kinesisFirehoseSQSEvent.records[0].data = 'not a valid json'; const parsed = KinesisFirehoseSqsSchema.parse(kinesisFirehoseSQSEvent); expect(parsed.records[0].data).toEqual('not a valid json'); diff --git a/packages/parser/tests/unit/schema/lambda.test.ts b/packages/parser/tests/unit/schema/lambda.test.ts index 459cd8a32e..cd789704bc 100644 --- a/packages/parser/tests/unit/schema/lambda.test.ts +++ b/packages/parser/tests/unit/schema/lambda.test.ts @@ -4,14 +4,13 @@ * @group unit/parser/schema/ */ -import { loadExampleEvent } from './utils.js'; import { LambdaFunctionUrlSchema } from '../../../src/schemas/lambda.js'; +import { TestEvents } from './utils.js'; describe('Lambda ', () => { it('should parse lambda event', () => { - const lambdaFunctionUrlEvent = loadExampleEvent( - 'apiGatewayProxyV2Event.json' - ); + const lambdaFunctionUrlEvent = TestEvents.apiGatewayProxyV2Event; + expect(LambdaFunctionUrlSchema.parse(lambdaFunctionUrlEvent)).toEqual( lambdaFunctionUrlEvent ); diff --git a/packages/parser/tests/unit/schema/s3.test.ts b/packages/parser/tests/unit/schema/s3.test.ts index 2c5378fdf7..eff1bb6840 100644 --- a/packages/parser/tests/unit/schema/s3.test.ts +++ b/packages/parser/tests/unit/schema/s3.test.ts @@ -10,18 +10,19 @@ import { S3Schema, S3ObjectLambdaEventSchema, } from '../../../src/schemas/s3.js'; -import { loadExampleEvent } from './utils.js'; +import { TestEvents } from './utils.js'; describe('S3 ', () => { it('should parse s3 event', () => { - const s3Event = loadExampleEvent('s3Event.json'); + const s3Event = TestEvents.s3Event; + expect(S3Schema.parse(s3Event)).toEqual(s3Event); }); it('should parse s3 event bridge notification event created', () => { - const s3EventBridgeNotificationObjectCreatedEvent = loadExampleEvent( - 's3EventBridgeNotificationObjectCreatedEvent.json' - ); + const s3EventBridgeNotificationObjectCreatedEvent = + TestEvents.s3EventBridgeNotificationObjectCreatedEvent; + expect( S3EventNotificationEventBridgeSchema.parse( s3EventBridgeNotificationObjectCreatedEvent @@ -30,9 +31,9 @@ describe('S3 ', () => { }); it('should parse s3 event bridge notification event detelted', () => { - const s3EventBridgeNotificationObjectDeletedEvent = loadExampleEvent( - 's3EventBridgeNotificationObjectDeletedEvent.json' - ); + const s3EventBridgeNotificationObjectDeletedEvent = + TestEvents.s3EventBridgeNotificationObjectDeletedEvent; + expect( S3EventNotificationEventBridgeSchema.parse( s3EventBridgeNotificationObjectDeletedEvent @@ -40,9 +41,9 @@ describe('S3 ', () => { ).toEqual(s3EventBridgeNotificationObjectDeletedEvent); }); it('should parse s3 event bridge notification event expired', () => { - const s3EventBridgeNotificationObjectExpiredEvent = loadExampleEvent( - 's3EventBridgeNotificationObjectExpiredEvent.json' - ); + const s3EventBridgeNotificationObjectExpiredEvent = + TestEvents.s3EventBridgeNotificationObjectExpiredEvent; + expect( S3EventNotificationEventBridgeSchema.parse( s3EventBridgeNotificationObjectExpiredEvent @@ -51,27 +52,27 @@ describe('S3 ', () => { }); it('should parse s3 sqs notification event', () => { - const s3SqsEvent = loadExampleEvent('s3SqsEvent.json'); + const s3SqsEvent = TestEvents.s3SqsEvent; expect(S3SqsEventNotificationSchema.parse(s3SqsEvent)).toEqual(s3SqsEvent); }); it('should parse s3 event with decoded key', () => { - const s3EventDecodedKey = loadExampleEvent('s3EventDecodedKey.json'); + const s3EventDecodedKey = TestEvents.s3EventDecodedKey; expect(S3Schema.parse(s3EventDecodedKey)).toEqual(s3EventDecodedKey); }); it('should parse s3 event delete object', () => { - const s3EventDeleteObject = loadExampleEvent('s3EventDeleteObject.json'); + const s3EventDeleteObject = TestEvents.s3EventDeleteObject; expect(S3Schema.parse(s3EventDeleteObject)).toEqual(s3EventDeleteObject); }); it('should parse s3 event glacier', () => { - const s3EventGlacier = loadExampleEvent('s3EventGlacier.json'); + const s3EventGlacier = TestEvents.s3EventGlacier; expect(S3Schema.parse(s3EventGlacier)).toEqual(s3EventGlacier); }); it('should parse s3 object event iam user', () => { - const s3ObjectEventIAMUser = loadExampleEvent('s3ObjectEventIAMUser.json'); + const s3ObjectEventIAMUser = TestEvents.s3ObjectEventIAMUser; expect(S3ObjectLambdaEventSchema.parse(s3ObjectEventIAMUser)).toEqual( s3ObjectEventIAMUser ); @@ -79,9 +80,8 @@ describe('S3 ', () => { it('should parse s3 object event temp credentials', () => { // ignore any because we don't want typed json - const s3ObjectEventTempCredentials = loadExampleEvent( - 's3ObjectEventTempCredentials.json' - ) as any; // eslint-disable-line @typescript-eslint/no-explicit-any + const s3ObjectEventTempCredentials = + TestEvents.s3ObjectEventTempCredentials as any; // eslint-disable-line @typescript-eslint/no-explicit-any const parsed = S3ObjectLambdaEventSchema.parse( s3ObjectEventTempCredentials ); diff --git a/packages/parser/tests/unit/schema/ses.test.ts b/packages/parser/tests/unit/schema/ses.test.ts index eeb29f6a1b..3d714ea074 100644 --- a/packages/parser/tests/unit/schema/ses.test.ts +++ b/packages/parser/tests/unit/schema/ses.test.ts @@ -4,12 +4,12 @@ * @group unit/parser/schema/ */ -import { loadExampleEvent } from './utils.js'; import { SesSchema } from '../../../src/schemas/ses.js'; +import { TestEvents } from './utils.js'; describe('Schema:', () => { - const sesEvent = loadExampleEvent('sesEvent.json'); it('SES should parse ses event', () => { + const sesEvent = TestEvents.sesEvent; expect(SesSchema.parse(sesEvent)).toEqual(sesEvent); }); }); diff --git a/packages/parser/tests/unit/schema/sns.test.ts b/packages/parser/tests/unit/schema/sns.test.ts index 66ec7aa297..1875d20642 100644 --- a/packages/parser/tests/unit/schema/sns.test.ts +++ b/packages/parser/tests/unit/schema/sns.test.ts @@ -4,12 +4,12 @@ * @group unit/parser/schema/ */ -import { loadExampleEvent } from './utils.js'; import { SnsSchema } from '../../../src/schemas/sns.js'; +import { TestEvents } from './utils.js'; describe('Schema:', () => { - const snsEvent = loadExampleEvent('snsEvent.json'); it('SNS should parse sns event', () => { + const snsEvent = TestEvents.snsEvent; expect(SnsSchema.parse(snsEvent)).toEqual(snsEvent); }); }); diff --git a/packages/parser/tests/unit/schema/sqs.test.ts b/packages/parser/tests/unit/schema/sqs.test.ts index 191b843298..802c36da08 100644 --- a/packages/parser/tests/unit/schema/sqs.test.ts +++ b/packages/parser/tests/unit/schema/sqs.test.ts @@ -4,12 +4,12 @@ * @group unit/parser/schema/ */ -import { loadExampleEvent } from './utils.js'; import { SqsSchema } from '../../../src/schemas/sqs.js'; +import { TestEvents } from './utils.js'; describe('SQS ', () => { - const sqsEvent = loadExampleEvent('sqsEvent.json'); it('should parse sqs event', () => { + const sqsEvent = TestEvents.sqsEvent; expect(SqsSchema.parse(sqsEvent)).toEqual(sqsEvent); }); }); diff --git a/packages/parser/tests/unit/schema/utils.ts b/packages/parser/tests/unit/schema/utils.ts index 8101dfefdb..3a17df8570 100644 --- a/packages/parser/tests/unit/schema/utils.ts +++ b/packages/parser/tests/unit/schema/utils.ts @@ -1,7 +1,116 @@ import { readFileSync } from 'node:fs'; +import { z } from 'zod'; -export const loadExampleEvent = (fileName: string): unknown => { - const event = readFileSync(`./tests/events/${fileName}`, 'utf8'); +export const TestSchema = z.object({ + name: z.string(), + age: z.number().min(18).max(99), +}); - return JSON.parse(event); +const filenames = [ + 'activeMQEvent', + 'albEvent', + 'albEventPathTrailingSlash', + 'albMultiValueHeadersEvent', + 'apiGatewayAuthorizerRequestEvent', + 'apiGatewayAuthorizerTokenEvent', + 'apiGatewayAuthorizerV2Event', + 'apiGatewayProxyEvent', + 'apiGatewayProxyEventAnotherPath', + 'apiGatewayProxyEventPathTrailingSlash', + 'apiGatewayProxyEventPrincipalId', + 'apiGatewayProxyEvent_noVersionAuth', + 'apiGatewayProxyOtherEvent', + 'apiGatewayProxyV2Event', + 'apiGatewayProxyV2EventPathTrailingSlash', + 'apiGatewayProxyV2Event_GET', + 'apiGatewayProxyV2IamEvent', + 'apiGatewayProxyV2LambdaAuthorizerEvent', + 'apiGatewayProxyV2OtherGetEvent', + 'apiGatewayProxyV2SchemaMiddlewareInvalidEvent', + 'apiGatewayProxyV2SchemaMiddlewareValidEvent', + 'apiGatewaySchemaMiddlewareInvalidEvent', + 'apiGatewaySchemaMiddlewareValidEvent', + 'appSyncAuthorizerEvent', + 'appSyncAuthorizerResponse', + 'appSyncDirectResolver', + 'appSyncResolverEvent', + 'awsConfigRuleConfigurationChanged', + 'awsConfigRuleOversizedConfiguration', + 'awsConfigRuleScheduled', + 'bedrockAgentEvent', + 'bedrockAgentPostEvent', + 'cloudFormationCustomResourceCreateEvent', + 'cloudFormationCustomResourceDeleteEvent', + 'cloudFormationCustomResourceUpdateEvent', + 'cloudWatchDashboardEvent', + 'cloudWatchLogEvent', + 'codePipelineEvent', + 'codePipelineEventData', + 'codePipelineEventEmptyUserParameters', + 'codePipelineEventWithEncryptionKey', + 'cognitoCreateAuthChallengeEvent', + 'cognitoCustomMessageEvent', + 'cognitoDefineAuthChallengeEvent', + 'cognitoPostAuthenticationEvent', + 'cognitoPostConfirmationEvent', + 'cognitoPreAuthenticationEvent', + 'cognitoPreSignUpEvent', + 'cognitoPreTokenGenerationEvent', + 'cognitoUserMigrationEvent', + 'cognitoVerifyAuthChallengeResponseEvent', + 'connectContactFlowEventAll', + 'connectContactFlowEventMin', + 'dynamoStreamEvent', + 'eventBridgeEvent', + 'kafkaEventMsk', + 'kafkaEventSelfManaged', + 'kinesisFirehoseKinesisEvent', + 'kinesisFirehosePutEvent', + 'kinesisFirehoseSQSEvent', + 'kinesisStreamCloudWatchLogsEvent', + 'kinesisStreamEvent', + 'kinesisStreamEventOneRecord', + 'lambdaFunctionUrlEvent', + 'lambdaFunctionUrlEventPathTrailingSlash', + 'lambdaFunctionUrlIAMEvent', + 'rabbitMQEvent', + 's3Event', + 's3EventBridgeNotificationObjectCreatedEvent', + 's3EventBridgeNotificationObjectDeletedEvent', + 's3EventBridgeNotificationObjectExpiredEvent', + 's3EventBridgeNotificationObjectRestoreCompletedEvent', + 's3EventDecodedKey', + 's3EventDeleteObject', + 's3EventGlacier', + 's3ObjectEventIAMUser', + 's3ObjectEventTempCredentials', + 's3SqsEvent', + 'secretsManagerEvent', + 'sesEvent', + 'snsEvent', + 'snsSqsEvent', + 'snsSqsFifoEvent', + 'sqsEvent', + 'vpcLatticeEvent', + 'vpcLatticeEventPathTrailingSlash', + 'vpcLatticeEventV2PathTrailingSlash', + 'vpcLatticeV2Event', +] as const; + +type TestEvents = { [K in (typeof filenames)[number]]: unknown }; +const loadFileContent = (filename: string): string => + readFileSync(`./tests/events/${filename}.json`, 'utf-8'); + +const createTestEvents = (fileList: readonly string[]): TestEvents => { + const testEvents: Partial = {}; + + fileList.forEach((filename) => { + Object.defineProperty(testEvents, filename, { + get: () => JSON.parse(loadFileContent(filename)), + }); + }); + + return testEvents as TestEvents; }; + +export const TestEvents = createTestEvents(filenames); diff --git a/packages/parser/tests/unit/schema/vpc-lattice.test.ts b/packages/parser/tests/unit/schema/vpc-lattice.test.ts index ea0a0dd4a4..576efa623f 100644 --- a/packages/parser/tests/unit/schema/vpc-lattice.test.ts +++ b/packages/parser/tests/unit/schema/vpc-lattice.test.ts @@ -4,18 +4,17 @@ * @group unit/parser/schema/ */ -import { loadExampleEvent } from './utils.js'; import { VpcLatticeSchema } from '../../../src/schemas/vpc-lattice.js'; +import { TestEvents } from './utils.js'; describe('VPC Lattice ', () => { it('should parse vpc lattice event', () => { - const vpcLatticeEvent = loadExampleEvent('vpcLatticeEvent.json'); + const vpcLatticeEvent = TestEvents.vpcLatticeEvent; expect(VpcLatticeSchema.parse(vpcLatticeEvent)).toEqual(vpcLatticeEvent); }); it('should parse vpc lattice path trailing slash event', () => { - const vpcLatticeEventPathTrailingSlash = loadExampleEvent( - 'vpcLatticeEventPathTrailingSlash.json' - ); + const vpcLatticeEventPathTrailingSlash = + TestEvents.vpcLatticeEventPathTrailingSlash; expect(VpcLatticeSchema.parse(vpcLatticeEventPathTrailingSlash)).toEqual( vpcLatticeEventPathTrailingSlash ); diff --git a/packages/parser/tests/unit/schema/vpc-latticev2.test.ts b/packages/parser/tests/unit/schema/vpc-latticev2.test.ts index da6d7d885e..e93deb24c1 100644 --- a/packages/parser/tests/unit/schema/vpc-latticev2.test.ts +++ b/packages/parser/tests/unit/schema/vpc-latticev2.test.ts @@ -4,20 +4,19 @@ * @group unit/parser/schema/ */ -import { loadExampleEvent } from './utils.js'; import { VpcLatticeV2Schema } from '../../../src/schemas/vpc-latticev2.js'; +import { TestEvents } from './utils.js'; describe('VpcLatticeV2 ', () => { it('should parse VpcLatticeV2 event', () => { - const vpcLatticeV2Event = loadExampleEvent('vpcLatticeV2Event.json'); + const vpcLatticeV2Event = TestEvents.vpcLatticeV2Event; const parsed = VpcLatticeV2Schema.parse(vpcLatticeV2Event); expect(parsed).toEqual(vpcLatticeV2Event); }); it('should parse VpcLatticeV2PathTrailingSlash event', () => { - const vpcLatticeEventV2PathTrailingSlash = loadExampleEvent( - 'vpcLatticeEventV2PathTrailingSlash.json' - ); + const vpcLatticeEventV2PathTrailingSlash = + TestEvents.vpcLatticeEventV2PathTrailingSlash; const parsed = VpcLatticeV2Schema.parse(vpcLatticeEventV2PathTrailingSlash); expect(parsed).toEqual(vpcLatticeEventV2PathTrailingSlash); });