diff --git a/.github/workflows/deploy.yml b/.github/workflows/deploy.yml index d55a272..999e88a 100644 --- a/.github/workflows/deploy.yml +++ b/.github/workflows/deploy.yml @@ -28,10 +28,12 @@ jobs: id: build_image_step env: ECR_REGISTRY: ${{ steps.login-ecr.outputs.registry }} - ECR_REPOSITORY: your-repo-name + ECR_REPOSITORY: micro-test IMAGE_TAG: ${{ github.sha }} run: | - docker build -t $ECR_REGISTRY/$ECR_REPOSITORY:$IMAGE_TAG -t $ECR_REGISTRY/$ECR_REPOSITORY:latest . + docker build \ + -t $ECR_REGISTRY/$ECR_REPOSITORY:$IMAGE_TAG \ + -t $ECR_REGISTRY/$ECR_REPOSITORY:latest . docker push $ECR_REGISTRY/$ECR_REPOSITORY:$IMAGE_TAG docker push $ECR_REGISTRY/$ECR_REPOSITORY:latest echo "::set-output name=image_tag::$IMAGE_TAG" \ No newline at end of file diff --git a/.gitignore b/.gitignore index 4edc30a..61d8d87 100644 --- a/.gitignore +++ b/.gitignore @@ -1,5 +1,6 @@ .env .venv +.vscode node_modules state.tf.json throwaway diff --git a/.terraform.lock.hcl b/.terraform.lock.hcl index 016d279..df04baa 100644 --- a/.terraform.lock.hcl +++ b/.terraform.lock.hcl @@ -2,23 +2,66 @@ # Manual edits may be lost in future updates. provider "registry.terraform.io/hashicorp/aws" { - version = "5.20.0" + version = "5.23.1" + constraints = ">= 5.20.0" hashes = [ - "h1:PVeHB4vepOfQ3QZxhu4kw+ZMEv5EBn1ReEa6VTU9SOM=", - "zh:1c518fd0764fe91db43568d3ddf5383b890da6b37d42f339d3728a3583a9f313", - "zh:21572e06a7e10a0284bf21b805ffa10221d11826ba31f2e090928e9a04108e09", - "zh:25157c40c3cc4bc80ff9029c7e5f4d98294cb69457b2ce3bbceae7f649b9319d", - "zh:2b527e0da619ee21ca316140aa9645f813f9034cf537705b61ed5c9a1fd80a22", - "zh:3305379a123e40cc290cfc576d2582991e93d080da6ed28b974f5082bd1fb6b7", - "zh:67c3637ea0973910d6310697dbd03269c5f796bc2f36ba8f83c496c1143f53ae", - "zh:974d6e66db4337e85878162437bee323e797de64d4a25a4cca151e07051beb7a", + "h1:keD9rGwuFbn70D1npMx486xFsSP/TtyNa6E0AgVJY1U=", + "zh:024a188ad3c979a9ec0d7d898aaa90a3867a8839edc8d3543ea6155e6e010064", + "zh:05b73a04c58534a7527718ef55040577d5c573ea704e16a813e7d1b18a7f4c26", + "zh:13932cdee2fa90f40ebaa783f033752864eb6899129e055511359f8d1ada3710", + "zh:3500f5febc7878b4426ef89a16c0096eefd4dd0c5b0d9ba00f9ed54387df5d09", + "zh:394a48dea7dfb0ae40e506ccdeb5387829dbb8ab00fb64f41c347a1de092aa00", + "zh:51a57f258b3bce2c167b39b6ecf486f72f523da05d4c92adc6b697abe1c5ff1f", + "zh:7290488a96d8d10119b431eb08a37407c0812283042a21b69bcc2454eabc08ad", + "zh:7545389dbbba624c0ffa72fa376b359b27f484aba02139d37ee5323b589e0939", + "zh:92266ac6070809e0c874511ae93097c8b1eddce4c0213e487c5439e89b6ad64d", "zh:9b12af85486a96aedd8d7984b0ff811a4b42e3d88dad1a3fb4c0b580d04fa425", - "zh:bc25ddaee47dfef719b94b548854c8a134d730198a3ae320ae2d2bbc7b0430e9", - "zh:be94bdb8017e9d3fc79d83c5720431890488d45d2a7edd7ad1d38b3a4ca3a483", - "zh:c5c4e6f1b7052b611223c919461643f30bec74c827494d9bdfba2b6ad92f8f5f", - "zh:d3ff194735af0e3868ae381b31f61434fac20f378e9c889e8ed681fbd47800f2", - "zh:e202e245c4615d5a7b0b50abc8707f2d52ba1a85d26928c3118959996f7544ce", - "zh:ec73134e00e7ce67138273b706583b7fe40483e6b81fa1d3ffc5e925d99c6b68", - "zh:f8871c9c3f57f31b04f2e611bacccf610ac5ed487d789be8cfc92542b0a4af76", + "zh:9c3841bd650d6ba471c7159bcdfa35200e5e49c2ea11032c481a33cf7875879d", + "zh:bd103c46a16e7f9357e08d6427c316ccc56d203452130eed8e36ede3afa3322c", + "zh:cab0a16e320c6ca285a3a51f40c8f46dbaa0712856594819b415b4d8b3e63910", + "zh:e8adedcda4d6ff47dcae9c9bb884da26ca448fb6f7436be95ad6a341e4d8094a", + "zh:fc23701a3723f50878f440dcdf8768ea96d60a0d7c351aa6dfb912ad832c8384", + ] +} + +provider "registry.terraform.io/hashicorp/null" { + version = "3.2.1" + constraints = ">= 2.0.0" + hashes = [ + "h1:ydA0/SNRVB1o95btfshvYsmxA+jZFRZcvKzZSB+4S1M=", + "zh:58ed64389620cc7b82f01332e27723856422820cfd302e304b5f6c3436fb9840", + "zh:62a5cc82c3b2ddef7ef3a6f2fedb7b9b3deff4ab7b414938b08e51d6e8be87cb", + "zh:63cff4de03af983175a7e37e52d4bd89d990be256b16b5c7f919aff5ad485aa5", + "zh:74cb22c6700e48486b7cabefa10b33b801dfcab56f1a6ac9b6624531f3d36ea3", + "zh:78d5eefdd9e494defcb3c68d282b8f96630502cac21d1ea161f53cfe9bb483b3", + "zh:79e553aff77f1cfa9012a2218b8238dd672ea5e1b2924775ac9ac24d2a75c238", + "zh:a1e06ddda0b5ac48f7e7c7d59e1ab5a4073bbcf876c73c0299e4610ed53859dc", + "zh:c37a97090f1a82222925d45d84483b2aa702ef7ab66532af6cbcfb567818b970", + "zh:e4453fbebf90c53ca3323a92e7ca0f9961427d2f0ce0d2b65523cc04d5d999c2", + "zh:e80a746921946d8b6761e77305b752ad188da60688cfd2059322875d363be5f5", + "zh:fbdb892d9822ed0e4cb60f2fedbdbb556e4da0d88d3b942ae963ed6ff091e48f", + "zh:fca01a623d90d0cad0843102f9b8b9fe0d3ff8244593bd817f126582b52dd694", + ] +} + +provider "registry.terraform.io/kreuzwerker/docker" { + version = "3.0.2" + constraints = ">= 3.0.0" + hashes = [ + "h1:XjdpVL61KtTsuPE8swok3GY8A+Bu3TZs8T2DOEpyiXo=", + "zh:15b0a2b2b563d8d40f62f83057d91acb02cd0096f207488d8b4298a59203d64f", + "zh:23d919de139f7cd5ebfd2ff1b94e6d9913f0977fcfc2ca02e1573be53e269f95", + "zh:38081b3fe317c7e9555b2aaad325ad3fa516a886d2dfa8605ae6a809c1072138", + "zh:4a9c5065b178082f79ad8160243369c185214d874ff5048556d48d3edd03c4da", + "zh:5438ef6afe057945f28bce43d76c4401254073de01a774760169ac1058830ac2", + "zh:60b7fadc287166e5c9873dfe53a7976d98244979e0ab66428ea0dea1ebf33e06", + "zh:61c5ec1cb94e4c4a4fb1e4a24576d5f39a955f09afb17dab982de62b70a9bdd1", + "zh:a38fe9016ace5f911ab00c88e64b156ebbbbfb72a51a44da3c13d442cd214710", + "zh:c2c4d2b1fd9ebb291c57f524b3bf9d0994ff3e815c0cd9c9bcb87166dc687005", + "zh:d567bb8ce483ab2cf0602e07eae57027a1a53994aba470fa76095912a505533d", + "zh:e83bf05ab6a19dd8c43547ce9a8a511f8c331a124d11ac64687c764ab9d5a792", + "zh:e90c934b5cd65516fbcc454c89a150bfa726e7cf1fe749790c7480bbeb19d387", + "zh:f05f167d2eaf913045d8e7b88c13757e3cf595dd5cd333057fdafc7c4b7fed62", + "zh:fcc9c1cea5ce85e8bcb593862e699a881bd36dffd29e2e367f82d15368659c3d", ] } diff --git a/examples/index.ts b/examples/index.ts index d8031ef..3dd35e8 100644 --- a/examples/index.ts +++ b/examples/index.ts @@ -1,5 +1,4 @@ -import { modulate, config, lambda, api, topic, zone } from '../src/index' -import type { Provider, Terraform } from '../src/types' +import { modulate, config, lambda, api, topic, zone, Provider, Terraform } from '../src/index' const apex = 'chopshop-test.net' const name = 'throwaway-test-123' @@ -24,6 +23,7 @@ const topic_arn = out_topic?.sns?.resource?.sns_topic?.arn // // ======= LAMBDA ======= const lambdaMod = modulate({ ms1: lambda }) + const [mod_lambda, out_lambda] = lambdaMod({ name, file_path: '${path.root}/lambdas/template/zipped/handler.py.zip', @@ -47,6 +47,7 @@ const [mod_lambda, out_lambda] = lambdaMod({ }, tags, }) + const functionInvokeArn = out_lambda?.lambda?.resource?.lambda_function?.invoke_arn const functionName = out_lambda?.lambda?.resource?.lambda_function?.function_name @@ -65,7 +66,7 @@ const [mod_api, out_api] = modulate({ api })({ }, tags, }) -//JSON.stringify(out_api, null, 4) +//JSON.stringify(out_api, null, 4) // ======= COMPILE ======= @@ -80,7 +81,17 @@ const terraform: Terraform = { required_providers: { aws: { source: 'hashicorp/aws', - version: '5.20.0', + version: '>= 5.20', + }, + // for docker + docker: { + source: 'kreuzwerker/docker', + version: '>= 3.0', + }, + // for null resources + null: { + source: 'hashicorp/null', + version: '>= 2.0', }, }, } @@ -89,7 +100,7 @@ const compile = config(provider, terraform, 'main.tf.json') const micro = [mod_zone, mod_topic, mod_lambda, mod_api] const compiled = compile(...micro) -JSON.stringify(compiled, null, 4) //? +console.log(JSON.stringify(compiled, null, 4)) //? // ~~~888~~~ ,88~-_ 888~-_ ,88~-_ // 888 d888 \ 888 \ d888 \ diff --git a/lib/src/config.d.ts b/lib/src/config.d.ts index 4061162..79f2210 100644 --- a/lib/src/config.d.ts +++ b/lib/src/config.d.ts @@ -1,10 +1,10 @@ -import { Provider, Terraform, NestedObject } from './constants'; +import { Provider, Terraform, NestedObject } from './types'; /** * flattens modules into a single object, with unique keys created by * joining nested key identifiers until the function reaches a pivot point * (resource or data) and then prepending the module name to the key ("_"). */ -export declare const flattenPreservingPaths: (obj: object, provider?: string, path?: string[], acc?: NestedObject, refs?: boolean) => object; +export declare const flattenPreservingPaths: (obj: object, provider?: string, path?: any[], acc?: NestedObject, refs?: boolean) => object; type FnParams any> = T extends (...args: infer P) => any ? P : never; type FnReturn any> = T extends (...args: any[]) => infer R ? R : never; /** diff --git a/lib/src/config.d.ts.map b/lib/src/config.d.ts.map index 45de3b1..6edea32 100644 --- a/lib/src/config.d.ts.map +++ b/lib/src/config.d.ts.map @@ -1 +1 @@ -{"version":3,"file":"config.d.ts","sourceRoot":"","sources":["../../src/config.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,QAAQ,EAAE,SAAS,EAAE,YAAY,EAAE,MAAM,aAAa,CAAA;AAwH/D;;;;GAIG;AACH,eAAO,MAAM,sBAAsB,QAC1B,MAAM,4BAEL,MAAM,EAAE,QACT,YAAY,qBAElB,MAsCF,CAAA;AAED,KAAK,QAAQ,CAAC,CAAC,SAAS,CAAC,GAAG,IAAI,EAAE,GAAG,EAAE,KAAK,GAAG,IAAI,CAAC,SAAS,CAAC,GAAG,IAAI,EAAE,MAAM,CAAC,KAAK,GAAG,GAAG,CAAC,GAAG,KAAK,CAAA;AAClG,KAAK,QAAQ,CAAC,CAAC,SAAS,CAAC,GAAG,IAAI,EAAE,GAAG,EAAE,KAAK,GAAG,IAAI,CAAC,SAAS,CAAC,GAAG,IAAI,EAAE,GAAG,EAAE,KAAK,MAAM,CAAC,GAAG,CAAC,GAAG,KAAK,CAAA;AAEpG;;;;;;;;;GASG;AACH,eAAO,MAAM,QAAQ;6BAAyC,GAAG,EAAE,KAAK,GAAG;8JAa1E,CAAA;AAmCD;;GAEG;AACH,eAAO,MAAM,MAAM,aACL,QAAQ,EAAE,GAAG,QAAQ,aACpB,SAAS,cACR,MAAM,2BAerB,CAAA;;AAED;;;;GAIG"} \ No newline at end of file +{"version":3,"file":"config.d.ts","sourceRoot":"","sources":["../../src/config.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,QAAQ,EAAE,SAAS,EAAE,YAAY,EAAE,MAAM,SAAS,CAAA;AA4H3D;;;;GAIG;AACH,eAAO,MAAM,sBAAsB,QAC1B,MAAM,4BAEL,GAAG,EAAE,QACN,YAAY,qBAElB,MAmDF,CAAA;AAED,KAAK,QAAQ,CAAC,CAAC,SAAS,CAAC,GAAG,IAAI,EAAE,GAAG,EAAE,KAAK,GAAG,IAAI,CAAC,SAAS,CAAC,GAAG,IAAI,EAAE,MAAM,CAAC,KAAK,GAAG,GAAG,CAAC,GAAG,KAAK,CAAA;AAClG,KAAK,QAAQ,CAAC,CAAC,SAAS,CAAC,GAAG,IAAI,EAAE,GAAG,EAAE,KAAK,GAAG,IAAI,CAAC,SAAS,CAAC,GAAG,IAAI,EAAE,GAAG,EAAE,KAAK,MAAM,CAAC,GAAG,CAAC,GAAG,KAAK,CAAA;AAEpG;;;;;;;;;GASG;AACH,eAAO,MAAM,QAAQ;6BAAyC,GAAG,EAAE,KAAK,GAAG;8JAa1E,CAAA;AAgCD;;GAEG;AACH,eAAO,MAAM,MAAM,aACL,QAAQ,EAAE,GAAG,QAAQ,aACpB,SAAS,cACR,MAAM,2BAerB,CAAA;;AAED;;;;GAIG"} \ No newline at end of file diff --git a/lib/src/config.js b/lib/src/config.js index 20cd8e5..d7a8ceb 100644 --- a/lib/src/config.js +++ b/lib/src/config.js @@ -1,5 +1,6 @@ -import { writeFileSync } from 'fs'; import { isPlainObject, isArray, isString } from '@thi.ng/checks'; +import { isEmpty } from './utils/index'; +import { writeFileSync } from 'fs'; // regex that replaces a number surrounded by periods .0. with a number surrounded by brackets [0] const bracketRegex = /\.\d+\./g; // function that replaces any internal .0. with [0]. to allow for terraform interpolation @@ -65,7 +66,7 @@ const pathObjectifier = (path) => { return { [head]: pathObjectifier(tail) }; else { // create an array of dummy objects leading up to the index - const dummyArray = (head && Array(head - 1).fill({})) || []; + const dummyArray = Array(head).fill({}) || []; return [...dummyArray, pathObjectifier(tail)]; } } @@ -80,11 +81,10 @@ const pathObjectifier = (path) => { * cleans out any export-specific values (--> prefixed) recursively and warns * the user if they forgot to export a value using the --> prefix */ -const exportFinalizer = (obj, path) => { +const exportFinalizer = (obj, path, scoped) => { const warn = (path) => { const reminder = '\nšŸ”„ Upstream export (-->) missing. Required by:'; console.warn(`${reminder}\n${JSON.stringify(pathObjectifier(path), null, 4)}`); - //console.log(JSON.stringify(path)) }; return Object.entries(obj).reduce((a, c) => { const [k, v] = c; @@ -102,7 +102,7 @@ const exportFinalizer = (obj, path) => { } } else if (isPlainObject(v)) { - return { ...a, [k]: exportFinalizer(v, [...path, k]) }; + return { ...a, [k]: exportFinalizer(v, [...path, k], scoped) }; } else if (isArray(v)) { //console.log(`array found for ${k}: ${JSON.stringify(v)}`) @@ -111,8 +111,12 @@ const exportFinalizer = (obj, path) => { [k]: v.map((x, i) => { if (x == 'undefined' || x == 'null') warn([...path, k, i]); + if (isString(x) && x.includes('$SCOPE')) { + // for depends_on: [...] + return x.replace(/\$SCOPE/g, scoped); + } if (isPlainObject(x)) - return exportFinalizer(x, [...path, k, i]); + return exportFinalizer(x, [...path, k, i], scoped); else return x; }), @@ -156,17 +160,33 @@ path = [], acc = {}, refs = false) => { ...a[key], [type]: { ...(a[key] && a[key][type]), - [scoped]: exportFinalizer(target, [key, raw_type]), + [scoped]: exportFinalizer(target, [key, raw_type], scoped), }, }, }; } - else { + else if (isPlainObject(val)) { return { ...a, ...flattenPreservingPaths(val, provider, [...path, key], a, refs), }; } + else if (isArray(val)) { + return { + ...a, + [key]: val.map((x, i) => { + if (isPlainObject(x)) { + return flattenPreservingPaths(x, provider, [...path, key, i], a, refs); + } + else { + return x; + } + }), + }; + } + else { + return { ...a, [key]: val }; + } }, acc); }; /** @@ -189,7 +209,6 @@ export const modulate = (obj, provider = 'aws') => { return [out, refs]; }; }; -const isEmpty = (x) => isPlainObject(x) && !Object.keys(x).length ? true : isArray(x) && !x.length ? true : false; /** * deep merges arbitrary number of objects into one */ diff --git a/lib/src/config.js.map b/lib/src/config.js.map index 38f9978..6b5f172 100644 --- a/lib/src/config.js.map +++ b/lib/src/config.js.map @@ -1 +1 @@ -{"version":3,"file":"config.js","sourceRoot":"","sources":["../../src/config.ts"],"names":[],"mappings":"AACA,OAAO,EAAE,aAAa,EAAE,MAAM,IAAI,CAAA;AAClC,OAAO,EAAE,aAAa,EAAE,OAAO,EAAE,QAAQ,EAAE,MAAM,gBAAgB,CAAA;AAEjE,kGAAkG;AAClG,MAAM,YAAY,GAAG,UAAU,CAAA;AAC/B,yFAAyF;AACzF,MAAM,YAAY,GAAG,CAAC,GAAW,EAAE,EAAE,CACjC,GAAG,CAAC,OAAO,CAAC,YAAY,EAAE,CAAC,KAAK,EAAE,EAAE,CAAC,IAAI,KAAK,CAAC,KAAK,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,IAAI,CAAC,CAAA;AACpE,MAAM,UAAU,GAAG,CAAC,GAAW,EAAE,EAAE,CAAC,GAAG,CAAC,OAAO,CAAC,YAAY,EAAE,CAAC,KAAK,EAAE,EAAE,CAAC,IAAI,KAAK,CAAC,KAAK,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,GAAG,CAAC,CAAA;AAEnG;;;GAGG;AACH,MAAM,QAAQ,GAAG,CACb,GAAW,EACX,MAAc,EACd,KAAa,EACb,IAAY,EACZ,OAAuB,EAAE,EACb,EAAE,CACd,MAAM,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,EAAE;IAChC,MAAM,CAAC,CAAC,EAAE,CAAC,CAAC,GAAG,CAAC,CAAA;IAChB,MAAM,QAAQ,GAAG,GAAG,KAAK,IAAI,IAAI,IAAI,MAAM,EAAE,CAAA;IAC7C,MAAM,UAAU,GAAG,IAAI,CAAC,MAAM,CAAC,CAAC,CAAC,IAAI,CAAC,IAAI,CAAC,GAAG,CAAC,GAAG,GAAG,CAAC,CAAC,CAAC,EAAE,CAAA;IAC1D,MAAM,MAAM,GAAG,MAAM,QAAQ,IAAI,UAAU,GAAG,CAAC,GAAG,CAAA;IAClD,MAAM,KAAK,GAAG,YAAY,CAAC,MAAM,CAAC,CAAA;IAClC,MAAM,CAAC,IAAI,EAAE,IAAI,CAAC,GAAG,UAAU,CAAC,UAAU,CAAC,CAAC,KAAK,CAAC,GAAG,CAAC,CAAA;IACtD,MAAM,MAAM,GAAG,aAAa,QAAQ,IAAI,IAAI,KAAK,IAAI,IAAI,CAAC,GAAG,CAAA;IAC7D,IAAI,QAAQ,CAAC,CAAC,CAAC,EAAE;QACb,IAAI,CAAC,CAAC,UAAU,CAAC,MAAM,CAAC,EAAE;YACtB,+EAA+E;YAC/E,MAAM,GAAG,GAAG,UAAU,QAAQ,IAAI,IAAI,IAAI,IAAI,CAAC,OAAO,CAAC,KAAK,EAAE,EAAE,CAAC,IAAI,CAAC,GAAG,CAAA;YACzE,OAAO,EAAE,GAAG,CAAC,EAAE,CAAC,CAAC,CAAC,EAAE,GAAG,EAAE,CAAA;SAC5B;aAAM,IAAI,CAAC,CAAC,UAAU,CAAC,KAAK,CAAC,EAAE;YAC5B,OAAO,EAAE,GAAG,CAAC,EAAE,CAAC,CAAC,CAAC,EAAE,KAAK,EAAE,CAAA;SAC9B;aAAM;YACH,OAAO,EAAE,GAAG,CAAC,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,CAAA;SAC1B;KACJ;SAAM,IAAI,aAAa,CAAC,CAAC,CAAC,EAAE;QACzB,OAAO,EAAE,GAAG,CAAC,EAAE,CAAC,CAAC,CAAC,EAAE,QAAQ,CAAC,CAAC,EAAE,MAAM,EAAE,KAAK,EAAE,IAAI,EAAE,CAAC,GAAG,IAAI,EAAE,CAAC,CAAC,CAAC,EAAE,CAAA;KACvE;SAAM,IAAI,OAAO,CAAC,CAAC,CAAC,EAAE;QACnB,OAAO;YACH,GAAG,CAAC;YACJ,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,EAAE;gBAChB,IAAI,QAAQ,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,UAAU,CAAC,KAAK,CAAC,EAAE;oBACpC,OAAO,UAAU,CAAC,GAAG,MAAM,IAAI,CAAC,GAAG,CAAC,CAAA;iBACvC;qBAAM,IAAI,aAAa,CAAC,CAAC,CAAC,EAAE;oBACzB,OAAO,QAAQ,CAAC,CAAC,EAAE,MAAM,EAAE,KAAK,EAAE,IAAI,EAAE,CAAC,GAAG,IAAI,EAAE,CAAC,EAAE,CAAC,CAAC,CAAC,CAAA;iBAC3D;gBACD,OAAO,CAAC,CAAA;YACZ,CAAC,CAAC;SACL,CAAA;KACJ;SAAM;QACH,oDAAoD;QACpD,4CAA4C;QAC5C,OAAO,EAAE,GAAG,CAAC,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,CAAA;KAC1B;AACL,CAAC,EAAE,EAAE,CAAC,CAAA;AAEV;;;;IAII;AACJ,MAAM,eAAe,GAAG,CAAC,IAAW,EAAE,EAAE;IACpC,MAAM,CAAC,IAAI,EAAE,GAAG,IAAI,CAAC,GAAG,IAAI,CAAA;IAC5B,IAAI,IAAI,IAAI,IAAI,CAAC,MAAM,EAAE;QACrB,IAAI,QAAQ,CAAC,IAAI,CAAC;YAAE,OAAO,EAAE,CAAC,IAAI,CAAC,EAAE,eAAe,CAAC,IAAI,CAAC,EAAE,CAAA;aACvD;YACD,2DAA2D;YAC3D,MAAM,UAAU,GAAG,CAAC,IAAI,IAAI,KAAK,CAAC,IAAI,GAAG,CAAC,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,CAAC,IAAI,EAAE,CAAA;YAE3D,OAAO,CAAC,GAAG,UAAU,EAAE,eAAe,CAAC,IAAI,CAAC,CAAC,CAAA;SAChD;KACJ;SAAM;QACH,IAAI,QAAQ,CAAC,IAAI,CAAC;YAAE,OAAO,EAAE,CAAC,IAAI,CAAC,EAAE,IAAI,EAAE,CAAA;;YACtC,OAAO,CAAC,GAAG,KAAK,CAAC,IAAI,CAAC,CAAC,IAAI,CAAC,KAAK,CAAC,EAAE,IAAI,CAAC,CAAA;KACjD;AACL,CAAC,CAAA;AACD;;;GAGG;AACH,MAAM,eAAe,GAAG,CAAC,GAAW,EAAE,IAAI,EAAgB,EAAE;IACxD,MAAM,IAAI,GAAG,CAAC,IAAc,EAAE,EAAE;QAC5B,MAAM,QAAQ,GAAG,kDAAkD,CAAA;QACnE,OAAO,CAAC,IAAI,CAAC,GAAG,QAAQ,KAAK,IAAI,CAAC,SAAS,CAAC,eAAe,CAAC,IAAI,CAAC,EAAE,IAAI,EAAE,CAAC,CAAC,EAAE,CAAC,CAAA;QAC9E,mCAAmC;IACvC,CAAC,CAAA;IACD,OAAO,MAAM,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,EAAE;QACvC,MAAM,CAAC,CAAC,EAAE,CAAC,CAAC,GAAG,CAAC,CAAA;QAChB,IAAI,CAAC,KAAK,KAAK;YAAE,OAAO,CAAC,CAAA;QACzB,IAAI,CAAC,KAAK,WAAW,IAAI,CAAC,KAAK,MAAM;YAAE,IAAI,CAAC,CAAC,GAAG,IAAI,EAAE,CAAC,CAAC,CAAC,CAAA;QACzD,IAAI,QAAQ,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,UAAU,CAAC,KAAK,CAAC,EAAE;YACpC,MAAM,OAAO,GAAG,CAAC,CAAC,OAAO,CAAC,QAAQ,EAAE,EAAE,CAAC,CAAA;YACvC,IAAI,OAAO,KAAK,EAAE,EAAE;gBAChB,OAAO,CAAC,CAAA;aACX;iBAAM;gBACH,OAAO,EAAE,GAAG,CAAC,EAAE,CAAC,CAAC,CAAC,EAAE,OAAO,EAAE,CAAA;aAChC;SACJ;aAAM,IAAI,aAAa,CAAC,CAAC,CAAC,EAAE;YACzB,OAAO,EAAE,GAAG,CAAC,EAAE,CAAC,CAAC,CAAC,EAAE,eAAe,CAAC,CAAC,EAAE,CAAC,GAAG,IAAI,EAAE,CAAC,CAAC,CAAC,EAAE,CAAA;SACzD;aAAM,IAAI,OAAO,CAAC,CAAC,CAAC,EAAE;YACnB,2DAA2D;YAC3D,OAAO;gBACH,GAAG,CAAC;gBACJ,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,EAAE;oBAChB,IAAI,CAAC,IAAI,WAAW,IAAI,CAAC,IAAI,MAAM;wBAAE,IAAI,CAAC,CAAC,GAAG,IAAI,EAAE,CAAC,EAAE,CAAC,CAAC,CAAC,CAAA;oBAC1D,IAAI,aAAa,CAAC,CAAC,CAAC;wBAAE,OAAO,eAAe,CAAC,CAAC,EAAE,CAAC,GAAG,IAAI,EAAE,CAAC,EAAE,CAAC,CAAC,CAAC,CAAA;;wBAC3D,OAAO,CAAC,CAAA;gBACjB,CAAC,CAAC;aACL,CAAA;SACJ;aAAM;YACH,OAAO,EAAE,GAAG,CAAC,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,CAAA;SAC1B;IACL,CAAC,EAAE,EAAE,CAAC,CAAA;AACV,CAAC,CAAA;AAED;;;;GAIG;AACH,MAAM,CAAC,MAAM,sBAAsB,GAAG,CAClC,GAAW,EACX,QAAQ,GAAG,KAAK,EAAE,+DAA+D;AACjF,OAAiB,EAAE,EACnB,MAAoB,EAAE,EACtB,IAAI,GAAG,KAAK,EACN,EAAE;IACR,MAAM,WAAW,GAAG,CAAC,UAAU,EAAE,MAAM,CAAC,CAAA;IACxC,OAAO,MAAM,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,EAAE;QACvC,MAAM,CAAC,GAAG,EAAE,GAAG,CAAC,GAAG,CAAC,CAAA;QACpB,IAAI,WAAW,CAAC,QAAQ,CAAC,GAAG,CAAC,EAAE;YAC3B,MAAM,MAAM,GAAG,MAAM,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC,CAAC,CAAW,CAAA,CAAC,mBAAmB;YAClE,MAAM,QAAQ,GAAG,MAAM,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAA,CAAC,wBAAwB;YAC7D,MAAM,IAAI,GAAG,GAAG,QAAQ,IAAI,QAAQ,EAAE,CAAA;YACtC,MAAM,MAAM,GAAG,IAAI,CAAC,IAAI,CAAC,GAAG,CAAC,CAAA;YAC7B,MAAM,KAAK,GAAG,IAAI,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAA;YAC/B,OAAO,IAAI;gBACP,CAAC,CAAC;oBACI,GAAG,CAAC;oBACJ,CAAC,KAAK,CAAC,EAAE;wBACL,GAAG,CAAC,CAAC,KAAK,CAAC;wBACX,CAAC,GAAG,CAAC,EAAE;4BACH,GAAG,CAAC,CAAC,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC,KAAK,CAAC,CAAC,GAAG,CAAC,CAAC;4BAC9B,CAAC,QAAQ,CAAC,EAAE,QAAQ,CAAC,MAAM,EAAE,MAAM,EAAE,GAAG,EAAE,IAAI,CAAC;yBAClD;qBACJ;iBACJ;gBACH,CAAC,CAAC;oBACI,GAAG,CAAC;oBACJ,CAAC,GAAG,CAAC,EAAE;wBACH,GAAG,CAAC,CAAC,GAAG,CAAC;wBACT,CAAC,IAAI,CAAC,EAAE;4BACJ,GAAG,CAAC,CAAC,CAAC,GAAG,CAAC,IAAI,CAAC,CAAC,GAAG,CAAC,CAAC,IAAI,CAAC,CAAC;4BAC3B,CAAC,MAAM,CAAC,EAAE,eAAe,CAAC,MAAM,EAAE,CAAC,GAAG,EAAE,QAAQ,CAAC,CAAC;yBACrD;qBACJ;iBACJ,CAAA;SACV;aAAM;YACH,OAAO;gBACH,GAAG,CAAC;gBACJ,GAAG,sBAAsB,CAAC,GAAG,EAAE,QAAQ,EAAE,CAAC,GAAG,IAAI,EAAE,GAAG,CAAC,EAAE,CAAC,EAAE,IAAI,CAAC;aACpE,CAAA;SACJ;IACL,CAAC,EAAE,GAAG,CAAC,CAAA;AACX,CAAC,CAAA;AAKD;;;;;;;;;GASG;AACH,MAAM,CAAC,MAAM,QAAQ,GAAG,CACpB,GAAM,EACN,QAAQ,GAAG,KAAK,EAClB,EAAE;IACA,MAAM,CAAC,GAAG,EAAE,EAAE,CAAC,GAAG,MAAM,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAA;IAExC,OAAO,CAAC,GAAG,IAAmE,EAAE,EAAE;QAC9E,MAAM,GAAG,GAAG,EAAE,CAAC,GAAG,CAAC,EAAE,EAAE,CAAC,GAAG,IAAI,CAAC,EAAE,CAAA;QAClC,MAAM,IAAI,GAAG,sBAAsB,CAAC,GAAG,EAAE,QAAQ,EAAE,EAAE,EAAE,EAAE,EAAE,IAAI,CAAC,CAAA;QAChE,MAAM,GAAG,GAAG,EAAE,CAAC,GAAG,CAAC,EAAE,EAAE,CAAC,GAAG,IAAI,EAAE,IAAI,CAAC,EAAE,CAAA;QACxC,MAAM,GAAG,GAAG,sBAAsB,CAAC,GAAG,EAAE,QAAQ,EAAE,EAAE,EAAE,EAAE,EAAE,KAAK,CAAC,CAAA;QAChE,OAAO,CAAC,GAAG,EAAE,IAAI,CAAiD,CAAA;IACtE,CAAC,CAAA;AACL,CAAC,CAAA;AAED,MAAM,OAAO,GAAG,CAAC,CAAM,EAAE,EAAE,CACvB,aAAa,CAAC,CAAC,CAAC,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,KAAK,CAAA;AAE9F;;GAEG;AACH,MAAM,SAAS,GAAG,CAAC,GAAG,IAAI,EAAE,EAAE;IAC1B,MAAM,MAAM,GAAG,EAAE,CAAA;IACjB,KAAK,MAAM,GAAG,IAAI,IAAI,EAAE;QACpB,KAAK,MAAM,GAAG,IAAI,GAAG,EAAE;YACnB,MAAM,GAAG,GAAG,GAAG,CAAC,GAAG,CAAC,CAAA;YACpB,IAAI,GAAG,KAAK,UAAU,IAAI,MAAM,CAAC,GAAG,CAAC,IAAI,OAAO,IAAI,GAAG,EAAE;gBACrD,4BAA4B;gBAC5B,SAAQ;aACX;YACD,IAAI,KAAK,CAAC,OAAO,CAAC,GAAG,CAAC,EAAE;gBACpB,uCAAuC;gBACvC,MAAM,QAAQ,GAAG,GAAG,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,CAAA;gBAC/C,IAAI,CAAC,QAAQ,CAAC,MAAM;oBAAE,SAAQ;qBACzB;oBACD,MAAM,CAAC,GAAG,CAAC,GAAG,MAAM,CAAC,GAAG,CAAC,IAAI,EAAE,CAAA;oBAC/B,MAAM,CAAC,GAAG,CAAC,CAAC,IAAI,CAAC,GAAG,QAAQ,CAAC,CAAA;iBAChC;aACJ;iBAAM,IAAI,OAAO,GAAG,KAAK,QAAQ,EAAE;gBAChC,MAAM,CAAC,GAAG,CAAC,GAAG,SAAS,CAAC,MAAM,CAAC,GAAG,CAAC,IAAI,EAAE,EAAE,GAAG,CAAC,CAAA;aAClD;iBAAM;gBACH,MAAM,CAAC,GAAG,CAAC,GAAG,GAAG,CAAA;aACpB;SACJ;KACJ;IACD,OAAO,MAAM,CAAA;AACjB,CAAC,CAAA;AAED;;GAEG;AACH,MAAM,CAAC,MAAM,MAAM,GAAG,CAClB,QAA+B,EAC/B,SAAoB,EACpB,UAAkB,EACpB,EAAE;IACA,IAAI,CAAC,OAAO,CAAC,QAAQ,CAAC,EAAE;QACpB,QAAQ,GAAG,CAAC,QAAQ,CAAC,CAAA;KACxB;IACD,MAAM,eAAe,GAAG;QACpB,SAAS;QACT,QAAQ;KACX,CAAA;IACD,OAAO,CAAC,GAAG,IAAI,EAAE,EAAE;QACf,MAAM,MAAM,GAAG,SAAS,CAAC,GAAG,IAAI,EAAE,eAAe,CAAC,CAAA;QAClD,MAAM,GAAG,GAAG,IAAI,CAAC,SAAS,CAAC,MAAM,EAAE,IAAI,EAAE,CAAC,CAAC,CAAA;QAC3C,aAAa,CAAC,UAAU,EAAE,GAAG,CAAC,CAAA;QAC9B,OAAO,MAAM,CAAA;IACjB,CAAC,CAAA;AACL,CAAC,CAAA;AAED;;;;GAIG"} \ No newline at end of file +{"version":3,"file":"config.js","sourceRoot":"","sources":["../../src/config.ts"],"names":[],"mappings":"AACA,OAAO,EAAE,aAAa,EAAE,OAAO,EAAE,QAAQ,EAAE,MAAM,gBAAgB,CAAA;AACjE,OAAO,EAAE,OAAO,EAAE,MAAM,eAAe,CAAA;AACvC,OAAO,EAAE,aAAa,EAAE,MAAM,IAAI,CAAA;AAElC,kGAAkG;AAClG,MAAM,YAAY,GAAG,UAAU,CAAA;AAC/B,yFAAyF;AACzF,MAAM,YAAY,GAAG,CAAC,GAAW,EAAE,EAAE,CACjC,GAAG,CAAC,OAAO,CAAC,YAAY,EAAE,CAAC,KAAK,EAAE,EAAE,CAAC,IAAI,KAAK,CAAC,KAAK,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,IAAI,CAAC,CAAA;AACpE,MAAM,UAAU,GAAG,CAAC,GAAW,EAAE,EAAE,CAAC,GAAG,CAAC,OAAO,CAAC,YAAY,EAAE,CAAC,KAAK,EAAE,EAAE,CAAC,IAAI,KAAK,CAAC,KAAK,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,GAAG,CAAC,CAAA;AAEnG;;;GAGG;AACH,MAAM,QAAQ,GAAG,CACb,GAAW,EACX,MAAc,EACd,KAAa,EACb,IAAY,EACZ,OAAuB,EAAE,EACb,EAAE,CACd,MAAM,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,EAAE;IAChC,MAAM,CAAC,CAAC,EAAE,CAAC,CAAC,GAAG,CAAC,CAAA;IAChB,MAAM,QAAQ,GAAG,GAAG,KAAK,IAAI,IAAI,IAAI,MAAM,EAAE,CAAA;IAC7C,MAAM,UAAU,GAAG,IAAI,CAAC,MAAM,CAAC,CAAC,CAAC,IAAI,CAAC,IAAI,CAAC,GAAG,CAAC,GAAG,GAAG,CAAC,CAAC,CAAC,EAAE,CAAA;IAC1D,MAAM,MAAM,GAAG,MAAM,QAAQ,IAAI,UAAU,GAAG,CAAC,GAAG,CAAA;IAClD,MAAM,KAAK,GAAG,YAAY,CAAC,MAAM,CAAC,CAAA;IAClC,MAAM,CAAC,IAAI,EAAE,IAAI,CAAC,GAAG,UAAU,CAAC,UAAU,CAAC,CAAC,KAAK,CAAC,GAAG,CAAC,CAAA;IACtD,MAAM,MAAM,GAAG,aAAa,QAAQ,IAAI,IAAI,KAAK,IAAI,IAAI,CAAC,GAAG,CAAA;IAC7D,IAAI,QAAQ,CAAC,CAAC,CAAC,EAAE;QACb,IAAI,CAAC,CAAC,UAAU,CAAC,MAAM,CAAC,EAAE;YACtB,+EAA+E;YAC/E,MAAM,GAAG,GAAG,UAAU,QAAQ,IAAI,IAAI,IAAI,IAAI,CAAC,OAAO,CAAC,KAAK,EAAE,EAAE,CAAC,IAAI,CAAC,GAAG,CAAA;YACzE,OAAO,EAAE,GAAG,CAAC,EAAE,CAAC,CAAC,CAAC,EAAE,GAAG,EAAE,CAAA;SAC5B;aAAM,IAAI,CAAC,CAAC,UAAU,CAAC,KAAK,CAAC,EAAE;YAC5B,OAAO,EAAE,GAAG,CAAC,EAAE,CAAC,CAAC,CAAC,EAAE,KAAK,EAAE,CAAA;SAC9B;aAAM;YACH,OAAO,EAAE,GAAG,CAAC,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,CAAA;SAC1B;KACJ;SAAM,IAAI,aAAa,CAAC,CAAC,CAAC,EAAE;QACzB,OAAO,EAAE,GAAG,CAAC,EAAE,CAAC,CAAC,CAAC,EAAE,QAAQ,CAAC,CAAC,EAAE,MAAM,EAAE,KAAK,EAAE,IAAI,EAAE,CAAC,GAAG,IAAI,EAAE,CAAC,CAAC,CAAC,EAAE,CAAA;KACvE;SAAM,IAAI,OAAO,CAAC,CAAC,CAAC,EAAE;QACnB,OAAO;YACH,GAAG,CAAC;YACJ,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,EAAE;gBAChB,IAAI,QAAQ,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,UAAU,CAAC,KAAK,CAAC,EAAE;oBACpC,OAAO,UAAU,CAAC,GAAG,MAAM,IAAI,CAAC,GAAG,CAAC,CAAA;iBACvC;qBAAM,IAAI,aAAa,CAAC,CAAC,CAAC,EAAE;oBACzB,OAAO,QAAQ,CAAC,CAAC,EAAE,MAAM,EAAE,KAAK,EAAE,IAAI,EAAE,CAAC,GAAG,IAAI,EAAE,CAAC,EAAE,CAAC,CAAC,CAAC,CAAA;iBAC3D;gBACD,OAAO,CAAC,CAAA;YACZ,CAAC,CAAC;SACL,CAAA;KACJ;SAAM;QACH,oDAAoD;QACpD,4CAA4C;QAC5C,OAAO,EAAE,GAAG,CAAC,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,CAAA;KAC1B;AACL,CAAC,EAAE,EAAE,CAAC,CAAA;AAEV;;;;IAII;AACJ,MAAM,eAAe,GAAG,CAAC,IAAW,EAAE,EAAE;IACpC,MAAM,CAAC,IAAI,EAAE,GAAG,IAAI,CAAC,GAAG,IAAI,CAAA;IAC5B,IAAI,IAAI,IAAI,IAAI,CAAC,MAAM,EAAE;QACrB,IAAI,QAAQ,CAAC,IAAI,CAAC;YAAE,OAAO,EAAE,CAAC,IAAI,CAAC,EAAE,eAAe,CAAC,IAAI,CAAC,EAAE,CAAA;aACvD;YACD,2DAA2D;YAC3D,MAAM,UAAU,GAAG,KAAK,CAAC,IAAI,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,IAAI,EAAE,CAAA;YAE7C,OAAO,CAAC,GAAG,UAAU,EAAE,eAAe,CAAC,IAAI,CAAC,CAAC,CAAA;SAChD;KACJ;SAAM;QACH,IAAI,QAAQ,CAAC,IAAI,CAAC;YAAE,OAAO,EAAE,CAAC,IAAI,CAAC,EAAE,IAAI,EAAE,CAAA;;YACtC,OAAO,CAAC,GAAG,KAAK,CAAC,IAAI,CAAC,CAAC,IAAI,CAAC,KAAK,CAAC,EAAE,IAAI,CAAC,CAAA;KACjD;AACL,CAAC,CAAA;AACD;;;GAGG;AACH,MAAM,eAAe,GAAG,CAAC,GAAW,EAAE,IAAI,EAAE,MAAM,EAAgB,EAAE;IAChE,MAAM,IAAI,GAAG,CAAC,IAAc,EAAE,EAAE;QAC5B,MAAM,QAAQ,GAAG,kDAAkD,CAAA;QACnE,OAAO,CAAC,IAAI,CAAC,GAAG,QAAQ,KAAK,IAAI,CAAC,SAAS,CAAC,eAAe,CAAC,IAAI,CAAC,EAAE,IAAI,EAAE,CAAC,CAAC,EAAE,CAAC,CAAA;IAClF,CAAC,CAAA;IACD,OAAO,MAAM,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,EAAE;QACvC,MAAM,CAAC,CAAC,EAAE,CAAC,CAAC,GAAG,CAAC,CAAA;QAChB,IAAI,CAAC,KAAK,KAAK;YAAE,OAAO,CAAC,CAAA;QACzB,IAAI,CAAC,KAAK,WAAW,IAAI,CAAC,KAAK,MAAM;YAAE,IAAI,CAAC,CAAC,GAAG,IAAI,EAAE,CAAC,CAAC,CAAC,CAAA;QACzD,IAAI,QAAQ,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,UAAU,CAAC,KAAK,CAAC,EAAE;YACpC,MAAM,OAAO,GAAG,CAAC,CAAC,OAAO,CAAC,QAAQ,EAAE,EAAE,CAAC,CAAA;YACvC,IAAI,OAAO,KAAK,EAAE,EAAE;gBAChB,OAAO,CAAC,CAAA;aACX;iBAAM;gBACH,OAAO,EAAE,GAAG,CAAC,EAAE,CAAC,CAAC,CAAC,EAAE,OAAO,EAAE,CAAA;aAChC;SACJ;aAAM,IAAI,aAAa,CAAC,CAAC,CAAC,EAAE;YACzB,OAAO,EAAE,GAAG,CAAC,EAAE,CAAC,CAAC,CAAC,EAAE,eAAe,CAAC,CAAC,EAAE,CAAC,GAAG,IAAI,EAAE,CAAC,CAAC,EAAE,MAAM,CAAC,EAAE,CAAA;SACjE;aAAM,IAAI,OAAO,CAAC,CAAC,CAAC,EAAE;YACnB,2DAA2D;YAC3D,OAAO;gBACH,GAAG,CAAC;gBACJ,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,EAAE;oBAChB,IAAI,CAAC,IAAI,WAAW,IAAI,CAAC,IAAI,MAAM;wBAAE,IAAI,CAAC,CAAC,GAAG,IAAI,EAAE,CAAC,EAAE,CAAC,CAAC,CAAC,CAAA;oBAC1D,IAAI,QAAQ,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,QAAQ,CAAC,QAAQ,CAAC,EAAE;wBACrC,wBAAwB;wBACxB,OAAO,CAAC,CAAC,OAAO,CAAC,UAAU,EAAE,MAAM,CAAC,CAAA;qBACvC;oBACD,IAAI,aAAa,CAAC,CAAC,CAAC;wBAAE,OAAO,eAAe,CAAC,CAAC,EAAE,CAAC,GAAG,IAAI,EAAE,CAAC,EAAE,CAAC,CAAC,EAAE,MAAM,CAAC,CAAA;;wBACnE,OAAO,CAAC,CAAA;gBACjB,CAAC,CAAC;aACL,CAAA;SACJ;aAAM;YACH,OAAO,EAAE,GAAG,CAAC,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,CAAA;SAC1B;IACL,CAAC,EAAE,EAAE,CAAC,CAAA;AACV,CAAC,CAAA;AAED;;;;GAIG;AACH,MAAM,CAAC,MAAM,sBAAsB,GAAG,CAClC,GAAW,EACX,QAAQ,GAAG,KAAK,EAAE,+DAA+D;AACjF,OAAc,EAAE,EAChB,MAAoB,EAAE,EACtB,IAAI,GAAG,KAAK,EACN,EAAE;IACR,MAAM,WAAW,GAAG,CAAC,UAAU,EAAE,MAAM,CAAC,CAAA;IACxC,OAAO,MAAM,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,EAAE;QACvC,MAAM,CAAC,GAAG,EAAE,GAAG,CAAC,GAAG,CAAC,CAAA;QACpB,IAAI,WAAW,CAAC,QAAQ,CAAC,GAAG,CAAC,EAAE;YAC3B,MAAM,MAAM,GAAG,MAAM,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC,CAAC,CAAW,CAAA,CAAC,mBAAmB;YAClE,MAAM,QAAQ,GAAG,MAAM,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAA,CAAC,wBAAwB;YAC7D,MAAM,IAAI,GAAG,GAAG,QAAQ,IAAI,QAAQ,EAAE,CAAA;YACtC,MAAM,MAAM,GAAG,IAAI,CAAC,IAAI,CAAC,GAAG,CAAC,CAAA;YAC7B,MAAM,KAAK,GAAG,IAAI,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAA;YAC/B,OAAO,IAAI;gBACP,CAAC,CAAC;oBACI,GAAG,CAAC;oBACJ,CAAC,KAAK,CAAC,EAAE;wBACL,GAAG,CAAC,CAAC,KAAK,CAAC;wBACX,CAAC,GAAG,CAAC,EAAE;4BACH,GAAG,CAAC,CAAC,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC,KAAK,CAAC,CAAC,GAAG,CAAC,CAAC;4BAC9B,CAAC,QAAQ,CAAC,EAAE,QAAQ,CAAC,MAAM,EAAE,MAAM,EAAE,GAAG,EAAE,IAAI,CAAC;yBAClD;qBACJ;iBACJ;gBACH,CAAC,CAAC;oBACI,GAAG,CAAC;oBACJ,CAAC,GAAG,CAAC,EAAE;wBACH,GAAG,CAAC,CAAC,GAAG,CAAC;wBACT,CAAC,IAAI,CAAC,EAAE;4BACJ,GAAG,CAAC,CAAC,CAAC,GAAG,CAAC,IAAI,CAAC,CAAC,GAAG,CAAC,CAAC,IAAI,CAAC,CAAC;4BAC3B,CAAC,MAAM,CAAC,EAAE,eAAe,CAAC,MAAM,EAAE,CAAC,GAAG,EAAE,QAAQ,CAAC,EAAE,MAAM,CAAC;yBAC7D;qBACJ;iBACJ,CAAA;SACV;aAAM,IAAI,aAAa,CAAC,GAAG,CAAC,EAAE;YAC3B,OAAO;gBACH,GAAG,CAAC;gBACJ,GAAG,sBAAsB,CAAC,GAAG,EAAE,QAAQ,EAAE,CAAC,GAAG,IAAI,EAAE,GAAG,CAAC,EAAE,CAAC,EAAE,IAAI,CAAC;aACpE,CAAA;SACJ;aAAM,IAAI,OAAO,CAAC,GAAG,CAAC,EAAE;YACrB,OAAO;gBACH,GAAG,CAAC;gBACJ,CAAC,GAAG,CAAC,EAAE,GAAG,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,EAAE;oBACpB,IAAI,aAAa,CAAC,CAAC,CAAC,EAAE;wBAClB,OAAO,sBAAsB,CAAC,CAAC,EAAE,QAAQ,EAAE,CAAC,GAAG,IAAI,EAAE,GAAG,EAAE,CAAC,CAAC,EAAE,CAAC,EAAE,IAAI,CAAC,CAAA;qBACzE;yBAAM;wBACH,OAAO,CAAC,CAAA;qBACX;gBACL,CAAC,CAAC;aACL,CAAA;SACJ;aAAM;YACH,OAAO,EAAE,GAAG,CAAC,EAAE,CAAC,GAAG,CAAC,EAAE,GAAG,EAAE,CAAA;SAC9B;IACL,CAAC,EAAE,GAAG,CAAC,CAAA;AACX,CAAC,CAAA;AAKD;;;;;;;;;GASG;AACH,MAAM,CAAC,MAAM,QAAQ,GAAG,CACpB,GAAM,EACN,QAAQ,GAAG,KAAK,EAClB,EAAE;IACA,MAAM,CAAC,GAAG,EAAE,EAAE,CAAC,GAAG,MAAM,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAA;IAExC,OAAO,CAAC,GAAG,IAAmE,EAAE,EAAE;QAC9E,MAAM,GAAG,GAAG,EAAE,CAAC,GAAG,CAAC,EAAE,EAAE,CAAC,GAAG,IAAI,CAAC,EAAE,CAAA;QAClC,MAAM,IAAI,GAAG,sBAAsB,CAAC,GAAG,EAAE,QAAQ,EAAE,EAAE,EAAE,EAAE,EAAE,IAAI,CAAC,CAAA;QAChE,MAAM,GAAG,GAAG,EAAE,CAAC,GAAG,CAAC,EAAE,EAAE,CAAC,GAAG,IAAI,EAAE,IAAI,CAAC,EAAE,CAAA;QACxC,MAAM,GAAG,GAAG,sBAAsB,CAAC,GAAG,EAAE,QAAQ,EAAE,EAAE,EAAE,EAAE,EAAE,KAAK,CAAC,CAAA;QAChE,OAAO,CAAC,GAAG,EAAE,IAAI,CAAiD,CAAA;IACtE,CAAC,CAAA;AACL,CAAC,CAAA;AAED;;GAEG;AACH,MAAM,SAAS,GAAG,CAAC,GAAG,IAAI,EAAE,EAAE;IAC1B,MAAM,MAAM,GAAG,EAAE,CAAA;IACjB,KAAK,MAAM,GAAG,IAAI,IAAI,EAAE;QACpB,KAAK,MAAM,GAAG,IAAI,GAAG,EAAE;YACnB,MAAM,GAAG,GAAG,GAAG,CAAC,GAAG,CAAC,CAAA;YACpB,IAAI,GAAG,KAAK,UAAU,IAAI,MAAM,CAAC,GAAG,CAAC,IAAI,OAAO,IAAI,GAAG,EAAE;gBACrD,4BAA4B;gBAC5B,SAAQ;aACX;YACD,IAAI,KAAK,CAAC,OAAO,CAAC,GAAG,CAAC,EAAE;gBACpB,uCAAuC;gBACvC,MAAM,QAAQ,GAAG,GAAG,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,CAAA;gBAC/C,IAAI,CAAC,QAAQ,CAAC,MAAM;oBAAE,SAAQ;qBACzB;oBACD,MAAM,CAAC,GAAG,CAAC,GAAG,MAAM,CAAC,GAAG,CAAC,IAAI,EAAE,CAAA;oBAC/B,MAAM,CAAC,GAAG,CAAC,CAAC,IAAI,CAAC,GAAG,QAAQ,CAAC,CAAA;iBAChC;aACJ;iBAAM,IAAI,OAAO,GAAG,KAAK,QAAQ,EAAE;gBAChC,MAAM,CAAC,GAAG,CAAC,GAAG,SAAS,CAAC,MAAM,CAAC,GAAG,CAAC,IAAI,EAAE,EAAE,GAAG,CAAC,CAAA;aAClD;iBAAM;gBACH,MAAM,CAAC,GAAG,CAAC,GAAG,GAAG,CAAA;aACpB;SACJ;KACJ;IACD,OAAO,MAAM,CAAA;AACjB,CAAC,CAAA;AAED;;GAEG;AACH,MAAM,CAAC,MAAM,MAAM,GAAG,CAClB,QAA+B,EAC/B,SAAoB,EACpB,UAAkB,EACpB,EAAE;IACA,IAAI,CAAC,OAAO,CAAC,QAAQ,CAAC,EAAE;QACpB,QAAQ,GAAG,CAAC,QAAQ,CAAC,CAAA;KACxB;IACD,MAAM,eAAe,GAAG;QACpB,SAAS;QACT,QAAQ;KACX,CAAA;IACD,OAAO,CAAC,GAAG,IAAI,EAAE,EAAE;QACf,MAAM,MAAM,GAAG,SAAS,CAAC,GAAG,IAAI,EAAE,eAAe,CAAC,CAAA;QAClD,MAAM,GAAG,GAAG,IAAI,CAAC,SAAS,CAAC,MAAM,EAAE,IAAI,EAAE,CAAC,CAAC,CAAA;QAC3C,aAAa,CAAC,UAAU,EAAE,GAAG,CAAC,CAAA;QAC9B,OAAO,MAAM,CAAA;IACjB,CAAC,CAAA;AACL,CAAC,CAAA;AAED;;;;GAIG"} \ No newline at end of file diff --git a/lib/src/index.d.ts b/lib/src/index.d.ts index 75b94ce..3f11390 100644 --- a/lib/src/index.d.ts +++ b/lib/src/index.d.ts @@ -1,4 +1,4 @@ -export type { AWS, Provider, Terraform } from './constants'; +export type { AWS, Provider, Terraform } from './types'; export { config, modulate } from './config'; -export { micro, api, topic, zone } from './modules/index'; +export { lambda, api, topic, zone } from './modules/index'; //# sourceMappingURL=index.d.ts.map \ No newline at end of file diff --git a/lib/src/index.d.ts.map b/lib/src/index.d.ts.map index 75bcdf1..d760234 100644 --- a/lib/src/index.d.ts.map +++ b/lib/src/index.d.ts.map @@ -1 +1 @@ -{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":"AAAA,YAAY,EAAE,GAAG,EAAE,QAAQ,EAAE,SAAS,EAAE,MAAM,aAAa,CAAA;AAC3D,OAAO,EAAE,MAAM,EAAE,QAAQ,EAAE,MAAM,UAAU,CAAA;AAC3C,OAAO,EAAE,KAAK,EAAE,GAAG,EAAE,KAAK,EAAE,IAAI,EAAE,MAAM,iBAAiB,CAAA"} \ No newline at end of file +{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":"AAAA,YAAY,EAAE,GAAG,EAAE,QAAQ,EAAE,SAAS,EAAE,MAAM,SAAS,CAAA;AACvD,OAAO,EAAE,MAAM,EAAE,QAAQ,EAAE,MAAM,UAAU,CAAA;AAC3C,OAAO,EAAE,MAAM,EAAE,GAAG,EAAE,KAAK,EAAE,IAAI,EAAE,MAAM,iBAAiB,CAAA"} \ No newline at end of file diff --git a/lib/src/index.js b/lib/src/index.js index 865d82d..896cfeb 100644 --- a/lib/src/index.js +++ b/lib/src/index.js @@ -1,3 +1,3 @@ export { config, modulate } from './config'; -export { micro, api, topic, zone } from './modules/index'; +export { lambda, api, topic, zone } from './modules/index'; //# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/lib/src/index.js.map b/lib/src/index.js.map index cb77e93..4eccf32 100644 --- a/lib/src/index.js.map +++ b/lib/src/index.js.map @@ -1 +1 @@ -{"version":3,"file":"index.js","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":"AACA,OAAO,EAAE,MAAM,EAAE,QAAQ,EAAE,MAAM,UAAU,CAAA;AAC3C,OAAO,EAAE,KAAK,EAAE,GAAG,EAAE,KAAK,EAAE,IAAI,EAAE,MAAM,iBAAiB,CAAA"} \ No newline at end of file +{"version":3,"file":"index.js","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":"AACA,OAAO,EAAE,MAAM,EAAE,QAAQ,EAAE,MAAM,UAAU,CAAA;AAC3C,OAAO,EAAE,MAAM,EAAE,GAAG,EAAE,KAAK,EAAE,IAAI,EAAE,MAAM,iBAAiB,CAAA"} \ No newline at end of file diff --git a/lib/src/modules/api.d.ts b/lib/src/modules/api.d.ts index d50d2c0..f0e2adf 100644 --- a/lib/src/modules/api.d.ts +++ b/lib/src/modules/api.d.ts @@ -1,6 +1,8 @@ -import { AWS } from '../constants'; +import { AWS } from '../types'; interface RouteMethods { + /** route */ [key: string]: { + /** method */ [key: string]: { invoke_arn: string; function_name: string; @@ -29,7 +31,7 @@ interface SubDomains { * */ export declare const api: ({ apex, zone_id, subdomainRoutes, tags, }: SubDomains, my: { - [key: string]: import("../../registry").AWS05200; + [key: string]: AWS; }) => {}; export {}; //# sourceMappingURL=api.d.ts.map \ No newline at end of file diff --git a/lib/src/modules/api.d.ts.map b/lib/src/modules/api.d.ts.map index b0f8cce..c1c51d0 100644 --- a/lib/src/modules/api.d.ts.map +++ b/lib/src/modules/api.d.ts.map @@ -1 +1 @@ -{"version":3,"file":"api.d.ts","sourceRoot":"","sources":["../../../src/modules/api.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,GAAG,EAAQ,MAAM,cAAc,CAAA;AA2HxC,UAAU,YAAY;IAClB,CAAC,GAAG,EAAE,MAAM,GAAG;QACX,CAAC,GAAG,EAAE,MAAM,GAAG;YACX,UAAU,EAAE,MAAM,CAAA;YAClB,aAAa,EAAE,MAAM,CAAA;SACxB,CAAA;KACJ,CAAA;CACJ;AAED,UAAU,UAAU;IAChB,IAAI,EAAE,MAAM,CAAA;IACZ,OAAO,EAAE,MAAM,CAAA;IACf,eAAe,EAAE,YAAY,CAAA;IAC7B,IAAI,CAAC,EAAE,MAAM,CAAA;CAChB;AAED;;;;;;;;;;;;;;GAcG;AACH,eAAO,MAAM,GAAG,8CAaT,UAAU;;QAqEZ,CAAA"} \ No newline at end of file +{"version":3,"file":"api.d.ts","sourceRoot":"","sources":["../../../src/modules/api.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,GAAG,EAAQ,MAAM,UAAU,CAAA;AA8GpC,UAAU,YAAY;IAClB,YAAY;IACZ,CAAC,GAAG,EAAE,MAAM,GAAG;QACX,aAAa;QACb,CAAC,GAAG,EAAE,MAAM,GAAG;YACX,UAAU,EAAE,MAAM,CAAA;YAClB,aAAa,EAAE,MAAM,CAAA;SACxB,CAAA;KACJ,CAAA;CACJ;AAED,UAAU,UAAU;IAChB,IAAI,EAAE,MAAM,CAAA;IACZ,OAAO,EAAE,MAAM,CAAA;IACf,eAAe,EAAE,YAAY,CAAA;IAC7B,IAAI,CAAC,EAAE,MAAM,CAAA;CAChB;AAED;;;;;;;;;;;;;;GAcG;AACH,eAAO,MAAM,GAAG,8CAaT,UAAU;;QAqEZ,CAAA"} \ No newline at end of file diff --git a/lib/src/modules/api.js b/lib/src/modules/api.js index b7fef34..aa651c8 100644 --- a/lib/src/modules/api.js +++ b/lib/src/modules/api.js @@ -1,23 +1,10 @@ -import { flag } from '../constants'; +import { flag } from '../types'; import { lambda_invoke_cred } from './lambda'; import { acm_certificate, route53_record, acm_certificate_validation } from './route53'; -// ,e, -// /~~~8e 888-~88e " -// 88b 888 888b 888 -// e88~-888 888 8888 888 -// C888 888 888 888P 888 -// "88_-888 888-_88" 888 -// 888 const api_domain = ({ full_domain, cert_arn, tags = {} }) => ({ resource: { apigatewayv2_domain_name: { domain_name: full_domain, - /** - * Block type "domain_name_configuration" is represented by a list - * of objects, so it must be indexed using a numeric key, like - * .domain_name_configuration[0] - */ - // @ts-ignore domain_name_configuration: [ { certificate_arn: cert_arn, @@ -80,7 +67,7 @@ const api_stage = ({ api_id, name = '$default', tags = {} }) => ({ }); const api_lambda_integration = ({ api_id, lambda_invoke_arn }) => ({ resource: { - // @ts-ignore: šŸ› [3] + // @ts-ignore: šŸ› FIXME `response_parameters` subsection Heading missing apigatewayv2_integration: { api_id, integration_uri: lambda_invoke_arn, @@ -95,7 +82,7 @@ const api_lambda_integration = ({ api_id, lambda_invoke_arn }) => ({ }); const api_route = ({ api_id, route_key = 'ANY /', integration_id }) => ({ resource: { - // @ts-ignore: šŸ› [2] + // @ts-ignore: šŸ› FIXME `request_parameters` subsection Heading missing apigatewayv2_route: { api_id, route_key, diff --git a/lib/src/modules/api.js.map b/lib/src/modules/api.js.map index 52b91ed..44b7c81 100644 --- a/lib/src/modules/api.js.map +++ b/lib/src/modules/api.js.map @@ -1 +1 @@ -{"version":3,"file":"api.js","sourceRoot":"","sources":["../../../src/modules/api.ts"],"names":[],"mappings":"AAAA,OAAO,EAAO,IAAI,EAAE,MAAM,cAAc,CAAA;AACxC,OAAO,EAAE,kBAAkB,EAAE,MAAM,UAAU,CAAA;AAC7C,OAAO,EAAE,eAAe,EAAE,cAAc,EAAE,0BAA0B,EAAE,MAAM,WAAW,CAAA;AAEvF,2BAA2B;AAC3B,0BAA0B;AAC1B,2BAA2B;AAC3B,2BAA2B;AAC3B,2BAA2B;AAC3B,2BAA2B;AAC3B,iBAAiB;AACjB,MAAM,UAAU,GAAG,CAAC,EAAE,WAAW,EAAE,QAAQ,EAAE,IAAI,GAAG,EAAE,EAAE,EAAO,EAAE,CAAC,CAAC;IAC/D,QAAQ,EAAE;QACN,wBAAwB,EAAE;YACtB,WAAW,EAAE,WAAW;YACxB;;;;eAIG;YACH,aAAa;YACb,yBAAyB,EAAE;gBACvB;oBACI,eAAe,EAAE,QAAQ;oBACzB,aAAa,EAAE,UAAU;oBACzB,eAAe,EAAE,SAAS;oBAC1B,kBAAkB,EAAE,KAAK;oBACzB,cAAc,EAAE,KAAK;iBACxB;aACJ;YACD,IAAI,EAAE;gBACF,GAAG,IAAI;gBACP,GAAG,IAAI;aACV;SACJ;KACJ;CACJ,CAAC,CAAA;AAEF,MAAM,WAAW,GAAG,CAAC,EAAE,WAAW,EAAE,IAAI,GAAG,EAAE,EAAE,EAAO,EAAE,CAAC,CAAC;IACtD,QAAQ,EAAE;QACN,gBAAgB,EAAE;YACd,IAAI,EAAE,WAAW;YACjB,WAAW,EAAE,WAAW,WAAW,EAAE;YACrC,4BAA4B,EAAE,KAAK;YACnC,aAAa,EAAE,MAAM;YACrB,kBAAkB,EAAE;gBAChB,aAAa,EAAE;oBACX,cAAc;oBACd,YAAY;oBACZ,eAAe;oBACf,WAAW;oBACX,sBAAsB;oBACtB,kBAAkB;iBACrB;gBACD,aAAa,EAAE,CAAC,GAAG,CAAC;gBACpB,aAAa,EAAE,CAAC,GAAG,CAAC;gBACpB,OAAO,EAAE,GAAG;aACf;YACD,IAAI,EAAE;gBACF,GAAG,IAAI;gBACP,GAAG,IAAI;aACV;YACD,YAAY,EAAE,KAAK;YACnB,aAAa,EAAE,KAAK;YACpB,EAAE,EAAE,KAAK;SACZ;KACJ;CACJ,CAAC,CAAA;AAEF,MAAM,SAAS,GAAG,CAAC,EAAE,MAAM,EAAE,IAAI,GAAG,UAAU,EAAE,IAAI,GAAG,EAAE,EAAE,EAAO,EAAE,CAAC,CAAC;IAClE,QAAQ,EAAE;QACN,kBAAkB,EAAE;YAChB,MAAM;YACN,IAAI;YACJ,WAAW,EAAE,IAAI;YACjB,WAAW,EAAE,SAAS,IAAI,MAAM;YAChC,IAAI,EAAE;gBACF,GAAG,IAAI;gBACP,GAAG,IAAI;aACV;SACJ;KACJ;CACJ,CAAC,CAAA;AAEF,MAAM,sBAAsB,GAAG,CAAC,EAAE,MAAM,EAAE,iBAAiB,EAAE,EAAO,EAAE,CAAC,CAAC;IACpE,QAAQ,EAAE;QACN,qBAAqB;QACrB,wBAAwB,EAAE;YACtB,MAAM;YACN,eAAe,EAAE,iBAAiB;YAClC,gBAAgB,EAAE,WAAW;YAC7B,kBAAkB,EAAE,MAAM;YAC1B,eAAe,EAAE,UAAU;YAC3B,sBAAsB,EAAE,KAAK;YAC7B,oBAAoB,EAAE,KAAK;YAC3B,EAAE,EAAE,KAAK;SACZ;KACJ;CACJ,CAAC,CAAA;AAEF,MAAM,SAAS,GAAG,CAAC,EAAE,MAAM,EAAE,SAAS,GAAG,OAAO,EAAE,cAAc,EAAE,EAAO,EAAE,CAAC,CAAC;IACzE,QAAQ,EAAE;QACN,qBAAqB;QACrB,kBAAkB,EAAE;YAChB,MAAM;YACN,SAAS;YACT,MAAM,EAAE,gBAAgB,cAAc,EAAE;YACxC,EAAE,EAAE,KAAK;YAET,+BAA+B;YAC/B,6BAA6B;YAC7B,uBAAuB;SAC1B;KACJ;CACJ,CAAC,CAAA;AAyBF;;;;;;;;;;;;;;GAcG;AACH,MAAM,CAAC,MAAM,GAAG,GAAG,CACf,EACI,IAAI,GAAG,mBAAmB,EAC1B,OAAO,EACP,eAAe,GAAG;IACd,IAAI,EAAE;QACF,OAAO,EAAE;YACL,aAAa,EAAE,mCAAmC;YAClD,UAAU,EAAE,gCAAgC;SAC/C;KACJ;CACJ,EACD,IAAI,GAAG,EAAE,GACA,EACb,EAA0B,EAC5B,EAAE,CACA,MAAM,CAAC,OAAO,CAAC,eAAe,CAAC,CAAC,MAAM,CAClC,CAAC,CAAC,EAAE,CAAC,EAAE,EAAE,MAAM,CAAC,EAAE,EAAE,CAAC,CAAC;IAClB,GAAG,CAAC;IACJ,CAAC,QAAQ,EAAE,EAAE,CAAC,EAAE,eAAe,CAAC,EAAE,WAAW,EAAE,GAAG,EAAE,IAAI,IAAI,EAAE,EAAE,IAAI,EAAE,CAAC;IACvE,CAAC,UAAU,EAAE,EAAE,CAAC,EAAE,UAAU,CAAC;QACzB,WAAW,EAAE,GAAG,EAAE,IAAI,IAAI,EAAE;QAC5B,QAAQ,EACJ,EAAE,EAAE,CAAC,cAAc,EAAE,EAAE,CAAC,EAAE,QAAQ,EAAE,0BAA0B,EAAE,eAAe;QACnF,IAAI;KACP,CAAC;IACF,CAAC,UAAU,EAAE,EAAE,CAAC,EAAE,cAAc,CAAC;QAC7B,eAAe,EAAE,OAAO;QACxB,WAAW,EAAE,GAAG,EAAE,IAAI,IAAI,EAAE;QAC5B,eAAe,EACX,EAAE,EAAE,CAAC,UAAU,EAAE,EAAE,CAAC,EAAE,QAAQ,EAAE,wBAAwB;YACpD,EAAE,yBAAyB,CAAC,CAAC,CAAC,EAAE,kBAAkB;QAC1D,kBAAkB,EACd,EAAE,EAAE,CAAC,UAAU,EAAE,EAAE,CAAC,EAAE,QAAQ,EAAE,wBAAwB;YACpD,EAAE,yBAAyB,CAAC,CAAC,CAAC,EAAE,cAAc;KACzD,CAAC;IACF,CAAC,gBAAgB,EAAE,EAAE,CAAC,EAAE,cAAc,CAAC;QACnC,eAAe,EAAE,OAAO;QACxB,WAAW,EACP,EAAE,EAAE,CAAC,QAAQ,EAAE,EAAE,CAAC,EAAE,QAAQ,EAAE,eAAe,EAAE,yBAAyB,CAAC,CAAC,CAAC;YACvE,EAAE,oBAAoB;QAC9B,OAAO,EAAE;YACL,EAAE,EAAE,CAAC,QAAQ,EAAE,EAAE,CAAC,EAAE,QAAQ,EAAE,eAAe,EAAE,yBAAyB,CAAC,CAAC,CAAC;gBACvE,EAAE,qBAAqB;SAC9B;QACD,IAAI,EAAE,EAAE,EAAE,CAAC,QAAQ,EAAE,EAAE,CAAC,EAAE,QAAQ,EAAE,eAAe,EAAE,yBAAyB,CAAC,CAAC,CAAC;YAC7E,EAAE,oBAAoB;KAC7B,CAAC;IACF,CAAC,cAAc,EAAE,EAAE,CAAC,EAAE,0BAA0B,CAAC;QAC7C,QAAQ,EAAE,EAAE,EAAE,CAAC,QAAQ,EAAE,EAAE,CAAC,EAAE,QAAQ,EAAE,eAAe,EAAE,GAAG;QAC5D,KAAK,EAAE,CAAC,EAAE,EAAE,CAAC,gBAAgB,EAAE,EAAE,CAAC,EAAE,QAAQ,EAAE,cAAc,EAAE,IAAI,CAAC;KACtE,CAAC;IACF,CAAC,SAAS,EAAE,EAAE,CAAC,EAAE,WAAW,CAAC,EAAE,WAAW,EAAE,GAAG,EAAE,IAAI,IAAI,EAAE,EAAE,IAAI,EAAE,CAAC;IACpE,CAAC,SAAS,EAAE,EAAE,CAAC,EAAE,SAAS,CAAC;QACvB,MAAM,EAAE,EAAE,EAAE,CAAC,SAAS,EAAE,EAAE,CAAC,EAAE,QAAQ,EAAE,gBAAgB,EAAE,EAAE;QAC3D,IAAI;KACP,CAAC;IACF,GAAG,MAAM,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC,MAAM,CAAC,CAAC,GAAG,EAAE,CAAC,KAAK,EAAE,EAAE,UAAU,EAAE,aAAa,EAAE,CAAC,EAAE,EAAE;QAC7E,MAAM,MAAM,GAAG,KAAK,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAA;QAClC,OAAO;YACH,GAAG,GAAG;YACN,CAAC,WAAW,EAAE,IAAI,MAAM,EAAE,CAAC,EAAE,kBAAkB,CAAC;gBAC5C,aAAa,EAAE,aAAa;gBAC5B,UAAU,EAAE,EAAE,EAAE,CAAC,SAAS,EAAE,EAAE,CAAC,EAAE,QAAQ,EAAE,gBAAgB,EAAE,aAAa;gBAC1E,SAAS,EAAE,0BAA0B;gBACrC,YAAY,EAAE,8BAA8B;aAC/C,CAAC;YACF,CAAC,eAAe,EAAE,IAAI,MAAM,EAAE,CAAC,EAAE,sBAAsB,CAAC;gBACpD,MAAM,EAAE,EAAE,EAAE,CAAC,SAAS,EAAE,EAAE,CAAC,EAAE,QAAQ,EAAE,gBAAgB,EAAE,EAAE;gBAC3D,iBAAiB,EAAE,UAAU;aAChC,CAAC;YACF,CAAC,SAAS,EAAE,IAAI,MAAM,EAAE,CAAC,EAAE,SAAS,CAAC;gBACjC,MAAM,EAAE,EAAE,EAAE,CAAC,SAAS,EAAE,EAAE,CAAC,EAAE,QAAQ,EAAE,gBAAgB,EAAE,EAAE;gBAC3D,SAAS,EAAE,KAAK;gBAChB,cAAc,EACV,EAAE,EAAE,CAAC,eAAe,EAAE,IAAI,MAAM,EAAE,CAAC,EAAE,QAAQ,EAAE,wBAAwB;oBACnE,EAAE,EAAE;aACf,CAAC;SACL,CAAA;IACL,CAAC,EAAE,EAAE,CAAC;CACT,CAAC,EACF,EAAE,CACL,CAAA"} \ No newline at end of file +{"version":3,"file":"api.js","sourceRoot":"","sources":["../../../src/modules/api.ts"],"names":[],"mappings":"AAAA,OAAO,EAAO,IAAI,EAAE,MAAM,UAAU,CAAA;AACpC,OAAO,EAAE,kBAAkB,EAAE,MAAM,UAAU,CAAA;AAC7C,OAAO,EAAE,eAAe,EAAE,cAAc,EAAE,0BAA0B,EAAE,MAAM,WAAW,CAAA;AAEvF,MAAM,UAAU,GAAG,CAAC,EAAE,WAAW,EAAE,QAAQ,EAAE,IAAI,GAAG,EAAE,EAAE,EAAO,EAAE,CAAC,CAAC;IAC/D,QAAQ,EAAE;QACN,wBAAwB,EAAE;YACtB,WAAW,EAAE,WAAW;YACxB,yBAAyB,EAAE;gBACvB;oBACI,eAAe,EAAE,QAAQ;oBACzB,aAAa,EAAE,UAAU;oBACzB,eAAe,EAAE,SAAS;oBAC1B,kBAAkB,EAAE,KAAK;oBACzB,cAAc,EAAE,KAAK;iBACxB;aACJ;YACD,IAAI,EAAE;gBACF,GAAG,IAAI;gBACP,GAAG,IAAI;aACV;SACJ;KACJ;CACJ,CAAC,CAAA;AAEF,MAAM,WAAW,GAAG,CAAC,EAAE,WAAW,EAAE,IAAI,GAAG,EAAE,EAAE,EAAO,EAAE,CAAC,CAAC;IACtD,QAAQ,EAAE;QACN,gBAAgB,EAAE;YACd,IAAI,EAAE,WAAW;YACjB,WAAW,EAAE,WAAW,WAAW,EAAE;YACrC,4BAA4B,EAAE,KAAK;YACnC,aAAa,EAAE,MAAM;YACrB,kBAAkB,EAAE;gBAChB,aAAa,EAAE;oBACX,cAAc;oBACd,YAAY;oBACZ,eAAe;oBACf,WAAW;oBACX,sBAAsB;oBACtB,kBAAkB;iBACrB;gBACD,aAAa,EAAE,CAAC,GAAG,CAAC;gBACpB,aAAa,EAAE,CAAC,GAAG,CAAC;gBACpB,OAAO,EAAE,GAAG;aACf;YACD,IAAI,EAAE;gBACF,GAAG,IAAI;gBACP,GAAG,IAAI;aACV;YACD,YAAY,EAAE,KAAK;YACnB,aAAa,EAAE,KAAK;YACpB,EAAE,EAAE,KAAK;SACZ;KACJ;CACJ,CAAC,CAAA;AAEF,MAAM,SAAS,GAAG,CAAC,EAAE,MAAM,EAAE,IAAI,GAAG,UAAU,EAAE,IAAI,GAAG,EAAE,EAAE,EAAO,EAAE,CAAC,CAAC;IAClE,QAAQ,EAAE;QACN,kBAAkB,EAAE;YAChB,MAAM;YACN,IAAI;YACJ,WAAW,EAAE,IAAI;YACjB,WAAW,EAAE,SAAS,IAAI,MAAM;YAChC,IAAI,EAAE;gBACF,GAAG,IAAI;gBACP,GAAG,IAAI;aACV;SACJ;KACJ;CACJ,CAAC,CAAA;AAEF,MAAM,sBAAsB,GAAG,CAAC,EAAE,MAAM,EAAE,iBAAiB,EAAE,EAAO,EAAE,CAAC,CAAC;IACpE,QAAQ,EAAE;QACN,wEAAwE;QACxE,wBAAwB,EAAE;YACtB,MAAM;YACN,eAAe,EAAE,iBAAiB;YAClC,gBAAgB,EAAE,WAAW;YAC7B,kBAAkB,EAAE,MAAM;YAC1B,eAAe,EAAE,UAAU;YAC3B,sBAAsB,EAAE,KAAK;YAC7B,oBAAoB,EAAE,KAAK;YAC3B,EAAE,EAAE,KAAK;SACZ;KACJ;CACJ,CAAC,CAAA;AAEF,MAAM,SAAS,GAAG,CAAC,EAAE,MAAM,EAAE,SAAS,GAAG,OAAO,EAAE,cAAc,EAAE,EAAO,EAAE,CAAC,CAAC;IACzE,QAAQ,EAAE;QACN,uEAAuE;QACvE,kBAAkB,EAAE;YAChB,MAAM;YACN,SAAS;YACT,MAAM,EAAE,gBAAgB,cAAc,EAAE;YACxC,EAAE,EAAE,KAAK;YAET,+BAA+B;YAC/B,6BAA6B;YAC7B,uBAAuB;SAC1B;KACJ;CACJ,CAAC,CAAA;AA2BF;;;;;;;;;;;;;;GAcG;AACH,MAAM,CAAC,MAAM,GAAG,GAAG,CACf,EACI,IAAI,GAAG,mBAAmB,EAC1B,OAAO,EACP,eAAe,GAAG;IACd,IAAI,EAAE;QACF,OAAO,EAAE;YACL,aAAa,EAAE,mCAAmC;YAClD,UAAU,EAAE,gCAAgC;SAC/C;KACJ;CACJ,EACD,IAAI,GAAG,EAAE,GACA,EACb,EAA0B,EAC5B,EAAE,CACA,MAAM,CAAC,OAAO,CAAC,eAAe,CAAC,CAAC,MAAM,CAClC,CAAC,CAAC,EAAE,CAAC,EAAE,EAAE,MAAM,CAAC,EAAE,EAAE,CAAC,CAAC;IAClB,GAAG,CAAC;IACJ,CAAC,QAAQ,EAAE,EAAE,CAAC,EAAE,eAAe,CAAC,EAAE,WAAW,EAAE,GAAG,EAAE,IAAI,IAAI,EAAE,EAAE,IAAI,EAAE,CAAC;IACvE,CAAC,UAAU,EAAE,EAAE,CAAC,EAAE,UAAU,CAAC;QACzB,WAAW,EAAE,GAAG,EAAE,IAAI,IAAI,EAAE;QAC5B,QAAQ,EACJ,EAAE,EAAE,CAAC,cAAc,EAAE,EAAE,CAAC,EAAE,QAAQ,EAAE,0BAA0B,EAAE,eAAe;QACnF,IAAI;KACP,CAAC;IACF,CAAC,UAAU,EAAE,EAAE,CAAC,EAAE,cAAc,CAAC;QAC7B,eAAe,EAAE,OAAO;QACxB,WAAW,EAAE,GAAG,EAAE,IAAI,IAAI,EAAE;QAC5B,eAAe,EACX,EAAE,EAAE,CAAC,UAAU,EAAE,EAAE,CAAC,EAAE,QAAQ,EAAE,wBAAwB;YACpD,EAAE,yBAAyB,CAAC,CAAC,CAAC,EAAE,kBAAkB;QAC1D,kBAAkB,EACd,EAAE,EAAE,CAAC,UAAU,EAAE,EAAE,CAAC,EAAE,QAAQ,EAAE,wBAAwB;YACpD,EAAE,yBAAyB,CAAC,CAAC,CAAC,EAAE,cAAc;KACzD,CAAC;IACF,CAAC,gBAAgB,EAAE,EAAE,CAAC,EAAE,cAAc,CAAC;QACnC,eAAe,EAAE,OAAO;QACxB,WAAW,EACP,EAAE,EAAE,CAAC,QAAQ,EAAE,EAAE,CAAC,EAAE,QAAQ,EAAE,eAAe,EAAE,yBAAyB,CAAC,CAAC,CAAC;YACvE,EAAE,oBAAoB;QAC9B,OAAO,EAAE;YACL,EAAE,EAAE,CAAC,QAAQ,EAAE,EAAE,CAAC,EAAE,QAAQ,EAAE,eAAe,EAAE,yBAAyB,CAAC,CAAC,CAAC;gBACvE,EAAE,qBAAqB;SAC9B;QACD,IAAI,EAAE,EAAE,EAAE,CAAC,QAAQ,EAAE,EAAE,CAAC,EAAE,QAAQ,EAAE,eAAe,EAAE,yBAAyB,CAAC,CAAC,CAAC;YAC7E,EAAE,oBAAoB;KAC7B,CAAC;IACF,CAAC,cAAc,EAAE,EAAE,CAAC,EAAE,0BAA0B,CAAC;QAC7C,QAAQ,EAAE,EAAE,EAAE,CAAC,QAAQ,EAAE,EAAE,CAAC,EAAE,QAAQ,EAAE,eAAe,EAAE,GAAG;QAC5D,KAAK,EAAE,CAAC,EAAE,EAAE,CAAC,gBAAgB,EAAE,EAAE,CAAC,EAAE,QAAQ,EAAE,cAAc,EAAE,IAAI,CAAC;KACtE,CAAC;IACF,CAAC,SAAS,EAAE,EAAE,CAAC,EAAE,WAAW,CAAC,EAAE,WAAW,EAAE,GAAG,EAAE,IAAI,IAAI,EAAE,EAAE,IAAI,EAAE,CAAC;IACpE,CAAC,SAAS,EAAE,EAAE,CAAC,EAAE,SAAS,CAAC;QACvB,MAAM,EAAE,EAAE,EAAE,CAAC,SAAS,EAAE,EAAE,CAAC,EAAE,QAAQ,EAAE,gBAAgB,EAAE,EAAE;QAC3D,IAAI;KACP,CAAC;IACF,GAAG,MAAM,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC,MAAM,CAAC,CAAC,GAAG,EAAE,CAAC,KAAK,EAAE,EAAE,UAAU,EAAE,aAAa,EAAE,CAAC,EAAE,EAAE;QAC7E,MAAM,MAAM,GAAG,KAAK,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAA;QAClC,OAAO;YACH,GAAG,GAAG;YACN,CAAC,WAAW,EAAE,IAAI,MAAM,EAAE,CAAC,EAAE,kBAAkB,CAAC;gBAC5C,aAAa,EAAE,aAAa;gBAC5B,UAAU,EAAE,EAAE,EAAE,CAAC,SAAS,EAAE,EAAE,CAAC,EAAE,QAAQ,EAAE,gBAAgB,EAAE,aAAa;gBAC1E,SAAS,EAAE,0BAA0B;gBACrC,YAAY,EAAE,8BAA8B;aAC/C,CAAC;YACF,CAAC,eAAe,EAAE,IAAI,MAAM,EAAE,CAAC,EAAE,sBAAsB,CAAC;gBACpD,MAAM,EAAE,EAAE,EAAE,CAAC,SAAS,EAAE,EAAE,CAAC,EAAE,QAAQ,EAAE,gBAAgB,EAAE,EAAE;gBAC3D,iBAAiB,EAAE,UAAU;aAChC,CAAC;YACF,CAAC,SAAS,EAAE,IAAI,MAAM,EAAE,CAAC,EAAE,SAAS,CAAC;gBACjC,MAAM,EAAE,EAAE,EAAE,CAAC,SAAS,EAAE,EAAE,CAAC,EAAE,QAAQ,EAAE,gBAAgB,EAAE,EAAE;gBAC3D,SAAS,EAAE,KAAK;gBAChB,cAAc,EACV,EAAE,EAAE,CAAC,eAAe,EAAE,IAAI,MAAM,EAAE,CAAC,EAAE,QAAQ,EAAE,wBAAwB;oBACnE,EAAE,EAAE;aACf,CAAC;SACL,CAAA;IACL,CAAC,EAAE,EAAE,CAAC;CACT,CAAC,EACF,EAAE,CACL,CAAA"} \ No newline at end of file diff --git a/lib/src/modules/docker.d.ts b/lib/src/modules/docker.d.ts new file mode 100644 index 0000000..132d60f --- /dev/null +++ b/lib/src/modules/docker.d.ts @@ -0,0 +1,24 @@ +import { AWS } from '../types'; +export declare const caller_id: AWS; +/** + * requires required providers to include kreuzwerker/docker and hashicorp/null + * at root of compiler + * ```ts + * { required_providers: { + * aws: { + * source: 'hashicorp/aws', + * version: '>= 5.20', + * }, + * docker: { + * source: 'kreuzwerker/docker', + * version: '>= 3.0', + * }, + * null: { + * source: 'hashicorp/null', + * version: '>= 2.0', + * } + * } + * } + * ``` + */ +//# sourceMappingURL=docker.d.ts.map \ No newline at end of file diff --git a/lib/src/modules/docker.d.ts.map b/lib/src/modules/docker.d.ts.map new file mode 100644 index 0000000..429c7df --- /dev/null +++ b/lib/src/modules/docker.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"docker.d.ts","sourceRoot":"","sources":["../../../src/modules/docker.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,GAAG,EAAQ,MAAM,UAAU,CAAA;AAGpC,eAAO,MAAM,SAAS,EAAE,GASvB,CAAA;AAmOD;;;;;;;;;;;;;;;;;;;;GAoBG"} \ No newline at end of file diff --git a/lib/src/modules/docker.js b/lib/src/modules/docker.js new file mode 100644 index 0000000..1923ca4 --- /dev/null +++ b/lib/src/modules/docker.js @@ -0,0 +1,187 @@ +import { flag } from '../types'; +import { isFile, isEmpty, cleanNullEntries } from '../utils'; +export const caller_id = { + data: { + region: { + name: '-->', + }, + caller_identity: { + account_id: '-->', + }, + }, +}; +const docker_img = ({ img_name, src_path, dockerfile_path, build_args, platform }) => ({ + resource: { + docker_image: { + name: img_name, + build: { + context: src_path, + dockerfile: dockerfile_path, + args: build_args, + platform: platform, + }, + }, + }, +}); +const docker_registry_img = ({ name, keep_remotely = false }) => ({ + resource: { + docker_registry_image: { + name, + keep_remotely, + }, + }, +}); +const build = ({ runtime, source_path, artifacts_dir, builder = '${path.root}/src/utils/package.py', docker_config = {}, }) => { + const { docker_pip_cache, docker_build_root, docker_file, docker_image, with_ssh_agent, docker_additional_options, docker_entrypoint, } = docker_config; + return { + data: { + external: { + program: ['python', builder, 'prepare'], + query: { + paths: JSON.stringify({ + module: '${path.module}', + root: '${path.root}', + cwd: '${path.cwd}', + }), + }, + ...(!isEmpty(docker_config) + ? { + docker: JSON.stringify(cleanNullEntries({ + docker_pip_cache, + docker_build_root, + docker_file, + docker_image, + with_ssh_agent, + docker_additional_options, + docker_entrypoint, + })), + } + : {}), + }, + artifacts_dir, + runtime, + source_path, + /** + * + * Temporary fix when building from multiple locations. We should + * take into account content of package.py when counting hash + * Related issue: + * https://github.com/terraform-aws-modules/terraform-aws-lambda/issues/63 + * "${path.module}/package.py" + */ + hash_extra_paths: JSON.stringify([]), + }, + }; +}; +const ecr_repo = ({ name, image_tag_mutability = 'MUTABLE', force_delete = true, scan = false, tags = {}, }) => ({ + resource: { + // @ts-ignore: FIXME (src/types or regex) + // image_scanning_configuration is qualified in place + // instead of as separate section with heading + ecr_repository: { + name, + image_tag_mutability, + force_delete, + image_scanning_configuration: { + scan_on_push: scan, + }, + tags: { + ...flag, + ...tags, + }, + }, + }, +}); +const lifecycle_policy = ({ policy, repo }) => ({ + resource: { + ecr_lifecycle_policy: { + repository: repo, + policy, + }, + }, +}); +const sam_metadata_image = ({ src_path, dockerfile_path, build_args, img_tag, img_uri }) => ({ + resource: { + null_resource: { + triggers: { + resource_type: 'IMAGE_LAMBDA_FUNCTION', + docker_context: src_path, + docker_file: dockerfile_path, + docker_tag: img_tag, + docker_build_args: JSON.stringify(build_args), + built_image_uri: img_uri, + }, + depends_on: ['docker_registry_image.$SCOPE'], + }, + }, +}); +const ecr_repo_fmt = (acct_id, region) => `${acct_id}.dkr.ecr.${region}.amazonaws.com`; +const null_resource = ({ file_path }) => ({ + resource: { + // TODO: add to types [1] + null_resource: { + triggers: isFile(file_path) + ? { diff: `\${md5(file(${file_path}))}` } + : { + diff: `\${sha1(join("", [for f in fileset(${file_path}, "**"): filesha1(f)]))}`, + }, + }, + }, +}); +/** + * requires required providers to include kreuzwerker/docker and hashicorp/null + * at root of compiler + * ```ts + * { required_providers: { + * aws: { + * source: 'hashicorp/aws', + * version: '>= 5.20', + * }, + * docker: { + * source: 'kreuzwerker/docker', + * version: '>= 3.0', + * }, + * null: { + * source: 'hashicorp/null', + * version: '>= 2.0', + * } + * } + * } + * ``` + */ +//export const docked = ({ name, region, account_id }, my) => { +// const ecr_address = ecr_repo_fmt(account_id, region) +// const img_tag = `${ecr_address}/${name}:latest` +// return { +// image: docker_img({ +// img_name, +// src_path, +// dockerfile_path, +// build_args, +// platform, +// }), +// registry_img: docker_registry_img({ +// name, +// keep_remotely, +// }), +// ecr_repo: ecr_repo({ +// name, +// image_tag_mutability, +// force_delete, +// scan, +// tags, +// }), +// lifecycle: lifecycle_policy({ +// policy, +// repo: name, +// }), +// sam_image_meta: sam_metadata_image({ +// src_path, +// dockerfile_path, +// build_args, +// img_tag, +// img_uri, +// }), +// } +//} +//# sourceMappingURL=docker.js.map \ No newline at end of file diff --git a/lib/src/modules/docker.js.map b/lib/src/modules/docker.js.map new file mode 100644 index 0000000..ad2b30d --- /dev/null +++ b/lib/src/modules/docker.js.map @@ -0,0 +1 @@ +{"version":3,"file":"docker.js","sourceRoot":"","sources":["../../../src/modules/docker.ts"],"names":[],"mappings":"AAAA,OAAO,EAAO,IAAI,EAAE,MAAM,UAAU,CAAA;AACpC,OAAO,EAAE,MAAM,EAAE,OAAO,EAAE,gBAAgB,EAAE,MAAM,UAAU,CAAA;AAE5D,MAAM,CAAC,MAAM,SAAS,GAAQ;IAC1B,IAAI,EAAE;QACF,MAAM,EAAE;YACJ,IAAI,EAAE,KAAK;SACd;QACD,eAAe,EAAE;YACb,UAAU,EAAE,KAAK;SACpB;KACJ;CACJ,CAAA;AAED,MAAM,UAAU,GAAG,CAAC,EAAE,QAAQ,EAAE,QAAQ,EAAE,eAAe,EAAE,UAAU,EAAE,QAAQ,EAAE,EAAE,EAAE,CAAC,CAAC;IACnF,QAAQ,EAAE;QACN,YAAY,EAAE;YACV,IAAI,EAAE,QAAQ;YACd,KAAK,EAAE;gBACH,OAAO,EAAE,QAAQ;gBACjB,UAAU,EAAE,eAAe;gBAC3B,IAAI,EAAE,UAAU;gBAChB,QAAQ,EAAE,QAAQ;aACrB;SACJ;KACJ;CACJ,CAAC,CAAA;AAEF,MAAM,mBAAmB,GAAG,CAAC,EAAE,IAAI,EAAE,aAAa,GAAG,KAAK,EAAE,EAAE,EAAE,CAAC,CAAC;IAC9D,QAAQ,EAAE;QACN,qBAAqB,EAAE;YACnB,IAAI;YACJ,aAAa;SAChB;KACJ;CACJ,CAAC,CAAA;AA+EF,MAAM,KAAK,GAAG,CAAC,EACX,OAAO,EACP,WAAW,EACX,aAAa,EACb,OAAO,GAAG,mCAAmC,EAC7C,aAAa,GAAG,EAAE,GACd,EAAE,EAAE;IACR,MAAM,EACF,gBAAgB,EAChB,iBAAiB,EACjB,WAAW,EACX,YAAY,EACZ,cAAc,EACd,yBAAyB,EACzB,iBAAiB,GACpB,GAAG,aAAa,CAAA;IACjB,OAAO;QACH,IAAI,EAAE;YACF,QAAQ,EAAE;gBACN,OAAO,EAAE,CAAC,QAAQ,EAAE,OAAO,EAAE,SAAS,CAAC;gBACvC,KAAK,EAAE;oBACH,KAAK,EAAE,IAAI,CAAC,SAAS,CAAC;wBAClB,MAAM,EAAE,gBAAgB;wBACxB,IAAI,EAAE,cAAc;wBACpB,GAAG,EAAE,aAAa;qBACrB,CAAC;iBACL;gBACD,GAAG,CAAC,CAAC,OAAO,CAAC,aAAa,CAAC;oBACvB,CAAC,CAAC;wBACI,MAAM,EAAE,IAAI,CAAC,SAAS,CAClB,gBAAgB,CAAC;4BACb,gBAAgB;4BAChB,iBAAiB;4BACjB,WAAW;4BACX,YAAY;4BACZ,cAAc;4BACd,yBAAyB;4BACzB,iBAAiB;yBACpB,CAAC,CACL;qBACJ;oBACH,CAAC,CAAC,EAAE,CAAC;aACZ;YACD,aAAa;YACb,OAAO;YACP,WAAW;YACX;;;;;;;eAOG;YACH,gBAAgB,EAAE,IAAI,CAAC,SAAS,CAAC,EAAE,CAAC;SACvC;KACJ,CAAA;AACL,CAAC,CAAA;AAED,MAAM,QAAQ,GAAG,CAAC,EACd,IAAI,EACJ,oBAAoB,GAAG,SAAS,EAChC,YAAY,GAAG,IAAI,EACnB,IAAI,GAAG,KAAK,EACZ,IAAI,GAAG,EAAE,GACZ,EAAO,EAAE,CAAC,CAAC;IACR,QAAQ,EAAE;QACN,yCAAyC;QACzC,qDAAqD;QACrD,8CAA8C;QAC9C,cAAc,EAAE;YACZ,IAAI;YACJ,oBAAoB;YACpB,YAAY;YACZ,4BAA4B,EAAE;gBAC1B,YAAY,EAAE,IAAI;aACrB;YACD,IAAI,EAAE;gBACF,GAAG,IAAI;gBACP,GAAG,IAAI;aACV;SACJ;KACJ;CACJ,CAAC,CAAA;AAEF,MAAM,gBAAgB,GAAG,CAAC,EAAE,MAAM,EAAE,IAAI,EAAE,EAAO,EAAE,CAAC,CAAC;IACjD,QAAQ,EAAE;QACN,oBAAoB,EAAE;YAClB,UAAU,EAAE,IAAI;YAChB,MAAM;SACT;KACJ;CACJ,CAAC,CAAA;AAEF,MAAM,kBAAkB,GAAG,CAAC,EAAE,QAAQ,EAAE,eAAe,EAAE,UAAU,EAAE,OAAO,EAAE,OAAO,EAAE,EAAE,EAAE,CAAC,CAAC;IACzF,QAAQ,EAAE;QACN,aAAa,EAAE;YACX,QAAQ,EAAE;gBACN,aAAa,EAAE,uBAAuB;gBACtC,cAAc,EAAE,QAAQ;gBACxB,WAAW,EAAE,eAAe;gBAC5B,UAAU,EAAE,OAAO;gBACnB,iBAAiB,EAAE,IAAI,CAAC,SAAS,CAAC,UAAU,CAAC;gBAC7C,eAAe,EAAE,OAAO;aAC3B;YACD,UAAU,EAAE,CAAC,8BAA8B,CAAC;SAC/C;KACJ;CACJ,CAAC,CAAA;AAEF,MAAM,YAAY,GAAG,CAAC,OAAO,EAAE,MAAM,EAAE,EAAE,CAAC,GAAG,OAAO,YAAY,MAAM,gBAAgB,CAAA;AAEtF,MAAM,aAAa,GAAG,CAAC,EAAE,SAAS,EAAE,EAAE,EAAE,CAAC,CAAC;IACtC,QAAQ,EAAE;QACN,yBAAyB;QACzB,aAAa,EAAE;YACX,QAAQ,EAAE,MAAM,CAAC,SAAS,CAAC;gBACvB,CAAC,CAAC,EAAE,IAAI,EAAE,eAAe,SAAS,KAAK,EAAE;gBACzC,CAAC,CAAC;oBACI,IAAI,EAAE,sCAAsC,SAAS,0BAA0B;iBAClF;SACV;KACJ;CACJ,CAAC,CAAA;AAEF;;;;;;;;;;;;;;;;;;;;GAoBG;AACH,+DAA+D;AAC/D,0DAA0D;AAC1D,qDAAqD;AACrD,cAAc;AACd,6BAA6B;AAC7B,uBAAuB;AACvB,uBAAuB;AACvB,8BAA8B;AAC9B,yBAAyB;AACzB,uBAAuB;AACvB,aAAa;AACb,6CAA6C;AAC7C,mBAAmB;AACnB,4BAA4B;AAC5B,aAAa;AACb,8BAA8B;AAC9B,mBAAmB;AACnB,mCAAmC;AACnC,2BAA2B;AAC3B,mBAAmB;AACnB,mBAAmB;AACnB,aAAa;AACb,uCAAuC;AACvC,qBAAqB;AACrB,yBAAyB;AACzB,aAAa;AACb,8CAA8C;AAC9C,uBAAuB;AACvB,8BAA8B;AAC9B,yBAAyB;AACzB,sBAAsB;AACtB,sBAAsB;AACtB,aAAa;AACb,OAAO;AACP,GAAG"} \ No newline at end of file diff --git a/lib/src/modules/ecr.d.ts b/lib/src/modules/ecr.d.ts new file mode 100644 index 0000000..64e4477 --- /dev/null +++ b/lib/src/modules/ecr.d.ts @@ -0,0 +1,17 @@ +import { AWS } from '../types'; +export declare const ecr_repo: ({ name, tags }: { + name: any; + tags?: {} | undefined; +}) => AWS; +export declare const isFile: (path: string) => boolean; +export declare const ecr_image: ({ repo, file_path, image_tag }: { + repo: any; + file_path: any; + image_tag: any; +}) => AWS; +/** + * References: + * + * [1] https://stackoverflow.com/a/66501021 + */ +//# sourceMappingURL=ecr.d.ts.map \ No newline at end of file diff --git a/lib/src/modules/ecr.d.ts.map b/lib/src/modules/ecr.d.ts.map new file mode 100644 index 0000000..7ac1011 --- /dev/null +++ b/lib/src/modules/ecr.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"ecr.d.ts","sourceRoot":"","sources":["../../../src/modules/ecr.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,GAAG,EAAQ,MAAM,UAAU,CAAA;AAEpC,eAAO,MAAM,QAAQ;;;MAA0B,GAa7C,CAAA;AAEF,eAAO,MAAM,MAAM,SAAU,MAAM,YAIlC,CAAA;AA6BD,eAAO,MAAM,SAAS;;;;MAAqC,GAGzD,CAAA;AA0BF;;;;GAIG"} \ No newline at end of file diff --git a/lib/src/modules/ecr.js b/lib/src/modules/ecr.js new file mode 100644 index 0000000..e9c08b9 --- /dev/null +++ b/lib/src/modules/ecr.js @@ -0,0 +1,79 @@ +import { flag } from '../types'; +// one per subdomain? +export const ecr_repo = ({ name, tags = {} }) => ({ + resource: { + // @ts-ignore: FIXME (src/types or regex) + // image_scanning_configuration is qualified in place + // instead of as separate section with heading + ecr_repository: { + name, + tags: { + ...flag, + ...tags, + }, + }, + }, +}); +export const isFile = (path) => { + const parts = path.split('/'); + const [last] = parts.slice(-1); + return last.includes('.'); +}; +const null_resource = ({ file_path }) => { + return { + resource: { + // @ts-ignore: FIXME (src/types) no null_resource in AWS (tf proper) + // [1] + null_resource: { + triggers: isFile(file_path) + ? { diff: `\${md5(file(${file_path}))}` } + : { + diff: `\${sha1(join("", [for f in fileset(${file_path}, "**"): filesha1(f)]))}`, + }, + }, + }, + }; +}; +const image = ({ repo, image_tag }) => ({ + data: { + ecr_image: { + repository_name: repo, + image_tag, + // @ts-ignore: FIXME (src/types) add depends_on to data + depends_on: [`null_resource.$SCOPE`], + }, + }, +}); +export const ecr_image = ({ repo, file_path, image_tag }) => ({ + ...null_resource({ file_path }), + ...image({ repo, image_tag }), +}); +const current_region = { + data: { + region: { + name: '-->', + }, + caller_identity: { + account_id: '-->', + }, + }, +}; +// TODO: add docker_image type? +const docker_img = ({ name, src_dir, acct_id, region, repo, image_tag }) => { + const dockerfile = `${src_dir}/Dockerfile`; + return { + docker_image: { + name, + build: { + //context: file_path, + dockerfile, + }, + }, + }; +}; +/** + * References: + * + * [1] https://stackoverflow.com/a/66501021 + */ +//# sourceMappingURL=ecr.js.map \ No newline at end of file diff --git a/lib/src/modules/ecr.js.map b/lib/src/modules/ecr.js.map new file mode 100644 index 0000000..8564169 --- /dev/null +++ b/lib/src/modules/ecr.js.map @@ -0,0 +1 @@ +{"version":3,"file":"ecr.js","sourceRoot":"","sources":["../../../src/modules/ecr.ts"],"names":[],"mappings":"AAAA,OAAO,EAAO,IAAI,EAAE,MAAM,UAAU,CAAA;AACpC,qBAAqB;AACrB,MAAM,CAAC,MAAM,QAAQ,GAAG,CAAC,EAAE,IAAI,EAAE,IAAI,GAAG,EAAE,EAAE,EAAO,EAAE,CAAC,CAAC;IACnD,QAAQ,EAAE;QACN,yCAAyC;QACzC,qDAAqD;QACrD,8CAA8C;QAC9C,cAAc,EAAE;YACZ,IAAI;YACJ,IAAI,EAAE;gBACF,GAAG,IAAI;gBACP,GAAG,IAAI;aACV;SACJ;KACJ;CACJ,CAAC,CAAA;AAEF,MAAM,CAAC,MAAM,MAAM,GAAG,CAAC,IAAY,EAAE,EAAE;IACnC,MAAM,KAAK,GAAG,IAAI,CAAC,KAAK,CAAC,GAAG,CAAC,CAAA;IAC7B,MAAM,CAAC,IAAI,CAAC,GAAG,KAAK,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAA;IAC9B,OAAO,IAAI,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAA;AAC7B,CAAC,CAAA;AAED,MAAM,aAAa,GAAG,CAAC,EAAE,SAAS,EAAE,EAAO,EAAE;IACzC,OAAO;QACH,QAAQ,EAAE;YACN,oEAAoE;YACpE,MAAM;YACN,aAAa,EAAE;gBACX,QAAQ,EAAE,MAAM,CAAC,SAAS,CAAC;oBACvB,CAAC,CAAC,EAAE,IAAI,EAAE,eAAe,SAAS,KAAK,EAAE;oBACzC,CAAC,CAAC;wBACI,IAAI,EAAE,sCAAsC,SAAS,0BAA0B;qBAClF;aACV;SACJ;KACJ,CAAA;AACL,CAAC,CAAA;AAED,MAAM,KAAK,GAAG,CAAC,EAAE,IAAI,EAAE,SAAS,EAAE,EAAO,EAAE,CAAC,CAAC;IACzC,IAAI,EAAE;QACF,SAAS,EAAE;YACP,eAAe,EAAE,IAAI;YACrB,SAAS;YACT,uDAAuD;YACvD,UAAU,EAAE,CAAC,sBAAsB,CAAC;SACvC;KACJ;CACJ,CAAC,CAAA;AAEF,MAAM,CAAC,MAAM,SAAS,GAAG,CAAC,EAAE,IAAI,EAAE,SAAS,EAAE,SAAS,EAAE,EAAO,EAAE,CAAC,CAAC;IAC/D,GAAG,aAAa,CAAC,EAAE,SAAS,EAAE,CAAC;IAC/B,GAAG,KAAK,CAAC,EAAE,IAAI,EAAE,SAAS,EAAE,CAAC;CAChC,CAAC,CAAA;AAEF,MAAM,cAAc,GAAQ;IACxB,IAAI,EAAE;QACF,MAAM,EAAE;YACJ,IAAI,EAAE,KAAK;SACd;QACD,eAAe,EAAE;YACb,UAAU,EAAE,KAAK;SACpB;KACJ;CACJ,CAAA;AAED,+BAA+B;AAC/B,MAAM,UAAU,GAAG,CAAC,EAAE,IAAI,EAAE,OAAO,EAAE,OAAO,EAAE,MAAM,EAAE,IAAI,EAAE,SAAS,EAAE,EAAE,EAAE;IACvE,MAAM,UAAU,GAAG,GAAG,OAAO,aAAa,CAAA;IAC1C,OAAO;QACH,YAAY,EAAE;YACV,IAAI;YACJ,KAAK,EAAE;gBACH,qBAAqB;gBACrB,UAAU;aACb;SACJ;KACJ,CAAA;AACL,CAAC,CAAA;AACD;;;;GAIG"} \ No newline at end of file diff --git a/lib/src/modules/iam.d.ts b/lib/src/modules/iam.d.ts new file mode 100644 index 0000000..2102acb --- /dev/null +++ b/lib/src/modules/iam.d.ts @@ -0,0 +1,23 @@ +import { AWS } from '../types'; +export declare const iam_policy_doc: AWS; +export declare const iam_role: ({ name, policy_json, tags }: { + name: any; + policy_json: any; + tags?: {} | undefined; +}) => AWS; +export declare const multi_stmt_policy_doc: ({ bucket_name, topic_arn, cloudwatch_arn, lambda_role_arn, }: { + bucket_name?: string | undefined; + topic_arn?: string | undefined; + cloudwatch_arn?: string | undefined; + lambda_role_arn?: string | undefined; +}) => AWS; +export declare const iam_role_policy_attachment: ({ role_name, policy_arn }: { + role_name: any; + policy_arn: any; +}) => AWS; +export declare const iam_policy: ({ name, policy_json, tags }: { + name: any; + policy_json: any; + tags?: {} | undefined; +}) => AWS; +//# sourceMappingURL=iam.d.ts.map \ No newline at end of file diff --git a/lib/src/modules/iam.d.ts.map b/lib/src/modules/iam.d.ts.map new file mode 100644 index 0000000..660e8a4 --- /dev/null +++ b/lib/src/modules/iam.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"iam.d.ts","sourceRoot":"","sources":["../../../src/modules/iam.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,GAAG,EAAmB,MAAM,UAAU,CAAA;AAS/C,eAAO,MAAM,cAAc,EAAE,GAc5B,CAAA;AAED,eAAO,MAAM,QAAQ;;;;MAAuC,GAY1D,CAAA;AAgBF,eAAO,MAAM,qBAAqB;;;;;MAK9B,GAiCF,CAAA;AAEF,eAAO,MAAM,0BAA0B;;;MAAgC,GAOrE,CAAA;AAEF,eAAO,MAAM,UAAU;;;;MAAuC,GAY5D,CAAA"} \ No newline at end of file diff --git a/lib/src/modules/iam.js b/lib/src/modules/iam.js new file mode 100644 index 0000000..7b30ff2 --- /dev/null +++ b/lib/src/modules/iam.js @@ -0,0 +1,104 @@ +import { flag } from '../types'; +// ,e, +// " /~~~8e 888-~88e-~88e +// 888 88b 888 888 888 +// 888 e88~-888 888 888 888 +// 888 C888 888 888 888 888 +// 888 "88_-888 888 888 888 +export const iam_policy_doc = { + data: { + iam_policy_document: { + statement: { + effect: 'Allow', + actions: ['sts:AssumeRole'], + principals: { + identifiers: ['lambda.amazonaws.com', 'apigateway.amazonaws.com'], + type: 'Service', + }, + }, + json: '-->', + }, + }, +}; +export const iam_role = ({ name, policy_json, tags = {} }) => ({ + resource: { + iam_role: { + name: `-->${name}-role`, + assume_role_policy: policy_json, + tags: { + ...flag, + ...tags, + }, + arn: '-->', + }, + }, +}); +const bucket_policy_statement = ({ bucket_name, lambda_role_arn = '' }) => ({ + ...(lambda_role_arn ? { principals: { identifiers: [lambda_role_arn], type: 'AWS' } } : {}), + effect: 'Allow', + actions: [ + 's3:AbortMultipartUpload', + 's3:ListMultipartUploadParts', + 's3:ListBucketMultipartUploads', + 's3:PutObject', + 's3:GetObject', + 's3:DeleteObject', + ], + resources: [`arn:aws:s3:::${bucket_name}`, `arn:aws:s3:::${bucket_name}/*`], +}); +export const multi_stmt_policy_doc = ({ bucket_name = '', topic_arn = '', cloudwatch_arn = '', lambda_role_arn = '', }) => ({ + data: { + iam_policy_document: { + statement: [ + ...(bucket_name + ? [bucket_policy_statement({ bucket_name, lambda_role_arn })] + : []), + ...(topic_arn + ? [ + { + effect: 'Allow', + actions: ['sns:Publish', 'sns:Subscribe'], + resources: [topic_arn], + }, + ] + : []), + ...(cloudwatch_arn + ? [ + { + effect: 'Allow', + actions: [ + 'logs:CreateLogGroup', + 'logs:CreateLogStream', + 'logs:PutLogEvents', + ], + resources: [`${cloudwatch_arn}:*`, `${cloudwatch_arn}:*:*`], + }, + ] + : []), + ], + json: '-->', + }, + }, +}); +export const iam_role_policy_attachment = ({ role_name, policy_arn }) => ({ + resource: { + iam_role_policy_attachment: { + role: role_name, + policy_arn, + }, + }, +}); +export const iam_policy = ({ name, policy_json, tags = {} }) => ({ + resource: { + iam_policy: { + name: `-->${name}-policy`, + policy: policy_json, + tags: { + ...flag, + ...tags, + }, + arn: '-->', + }, + }, +}); +//# sourceMappingURL=iam.js.map \ No newline at end of file diff --git a/lib/src/modules/iam.js.map b/lib/src/modules/iam.js.map new file mode 100644 index 0000000..2f881e6 --- /dev/null +++ b/lib/src/modules/iam.js.map @@ -0,0 +1 @@ +{"version":3,"file":"iam.js","sourceRoot":"","sources":["../../../src/modules/iam.ts"],"names":[],"mappings":"AAAA,OAAO,EAAkB,IAAI,EAAE,MAAM,UAAU,CAAA;AAE/C,OAAO;AACP,+BAA+B;AAC/B,+BAA+B;AAC/B,+BAA+B;AAC/B,+BAA+B;AAC/B,+BAA+B;AAE/B,MAAM,CAAC,MAAM,cAAc,GAAQ;IAC/B,IAAI,EAAE;QACF,mBAAmB,EAAE;YACjB,SAAS,EAAE;gBACP,MAAM,EAAE,OAAO;gBACf,OAAO,EAAE,CAAC,gBAAgB,CAAC;gBAC3B,UAAU,EAAE;oBACR,WAAW,EAAE,CAAC,sBAAsB,EAAE,0BAA0B,CAAC;oBACjE,IAAI,EAAE,SAAS;iBAClB;aACJ;YACD,IAAI,EAAE,KAAK;SACd;KACJ;CACJ,CAAA;AAED,MAAM,CAAC,MAAM,QAAQ,GAAG,CAAC,EAAE,IAAI,EAAE,WAAW,EAAE,IAAI,GAAG,EAAE,EAAE,EAAO,EAAE,CAAC,CAAC;IAChE,QAAQ,EAAE;QACN,QAAQ,EAAE;YACN,IAAI,EAAE,MAAM,IAAI,OAAO;YACvB,kBAAkB,EAAE,WAAW;YAC/B,IAAI,EAAE;gBACF,GAAG,IAAI;gBACP,GAAG,IAAI;aACV;YACD,GAAG,EAAE,KAAK;SACb;KACJ;CACJ,CAAC,CAAA;AAEF,MAAM,uBAAuB,GAAG,CAAC,EAAE,WAAW,EAAE,eAAe,GAAG,EAAE,EAAE,EAAa,EAAE,CAAC,CAAC;IACnF,GAAG,CAAC,eAAe,CAAC,CAAC,CAAC,EAAE,UAAU,EAAE,EAAE,WAAW,EAAE,CAAC,eAAe,CAAC,EAAE,IAAI,EAAE,KAAK,EAAE,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC;IAC3F,MAAM,EAAE,OAAO;IACf,OAAO,EAAE;QACL,yBAAyB;QACzB,6BAA6B;QAC7B,+BAA+B;QAC/B,cAAc;QACd,cAAc;QACd,iBAAiB;KACpB;IACD,SAAS,EAAE,CAAC,gBAAgB,WAAW,EAAE,EAAE,gBAAgB,WAAW,IAAI,CAAC;CAC9E,CAAC,CAAA;AAEF,MAAM,CAAC,MAAM,qBAAqB,GAAG,CAAC,EAClC,WAAW,GAAG,EAAE,EAChB,SAAS,GAAG,EAAE,EACd,cAAc,GAAG,EAAE,EACnB,eAAe,GAAG,EAAE,GACvB,EAAO,EAAE,CAAC,CAAC;IACR,IAAI,EAAE;QACF,mBAAmB,EAAE;YACjB,SAAS,EAAE;gBACP,GAAG,CAAC,WAAW;oBACX,CAAC,CAAE,CAAC,uBAAuB,CAAC,EAAE,WAAW,EAAE,eAAe,EAAE,CAAC,CAAiB;oBAC9E,CAAC,CAAC,EAAE,CAAC;gBACT,GAAG,CAAC,SAAS;oBACT,CAAC,CAAE;wBACG;4BACI,MAAM,EAAE,OAAO;4BACf,OAAO,EAAE,CAAC,aAAa,EAAE,eAAe,CAAC;4BACzC,SAAS,EAAE,CAAC,SAAS,CAAC;yBACzB;qBACY;oBACnB,CAAC,CAAC,EAAE,CAAC;gBACT,GAAG,CAAC,cAAc;oBACd,CAAC,CAAE;wBACG;4BACI,MAAM,EAAE,OAAO;4BACf,OAAO,EAAE;gCACL,qBAAqB;gCACrB,sBAAsB;gCACtB,mBAAmB;6BACtB;4BACD,SAAS,EAAE,CAAC,GAAG,cAAc,IAAI,EAAE,GAAG,cAAc,MAAM,CAAC;yBAC9D;qBACY;oBACnB,CAAC,CAAC,EAAE,CAAC;aACZ;YACD,IAAI,EAAE,KAAK;SACd;KACJ;CACJ,CAAC,CAAA;AAEF,MAAM,CAAC,MAAM,0BAA0B,GAAG,CAAC,EAAE,SAAS,EAAE,UAAU,EAAE,EAAO,EAAE,CAAC,CAAC;IAC3E,QAAQ,EAAE;QACN,0BAA0B,EAAE;YACxB,IAAI,EAAE,SAAS;YACf,UAAU;SACb;KACJ;CACJ,CAAC,CAAA;AAEF,MAAM,CAAC,MAAM,UAAU,GAAG,CAAC,EAAE,IAAI,EAAE,WAAW,EAAE,IAAI,GAAG,EAAE,EAAE,EAAO,EAAE,CAAC,CAAC;IAClE,QAAQ,EAAE;QACN,UAAU,EAAE;YACR,IAAI,EAAE,MAAM,IAAI,SAAS;YACzB,MAAM,EAAE,WAAW;YACnB,IAAI,EAAE;gBACF,GAAG,IAAI;gBACP,GAAG,IAAI;aACV;YACD,GAAG,EAAE,KAAK;SACb;KACJ;CACJ,CAAC,CAAA"} \ No newline at end of file diff --git a/lib/src/modules/index.d.ts b/lib/src/modules/index.d.ts index 08d6682..2866dad 100644 --- a/lib/src/modules/index.d.ts +++ b/lib/src/modules/index.d.ts @@ -1,5 +1,5 @@ export { topic } from './sns'; -export { micro } from './lambda'; +export { lambda } from './lambda'; export { api } from './api'; export { zone } from './route53'; //# sourceMappingURL=index.d.ts.map \ No newline at end of file diff --git a/lib/src/modules/index.d.ts.map b/lib/src/modules/index.d.ts.map index 16edc76..c17eab4 100644 --- a/lib/src/modules/index.d.ts.map +++ b/lib/src/modules/index.d.ts.map @@ -1 +1 @@ -{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../../src/modules/index.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,KAAK,EAAE,MAAM,OAAO,CAAA;AAC7B,OAAO,EAAE,KAAK,EAAE,MAAM,UAAU,CAAA;AAChC,OAAO,EAAE,GAAG,EAAE,MAAM,OAAO,CAAA;AAC3B,OAAO,EAAE,IAAI,EAAE,MAAM,WAAW,CAAA"} \ No newline at end of file +{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../../src/modules/index.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,KAAK,EAAE,MAAM,OAAO,CAAA;AAC7B,OAAO,EAAE,MAAM,EAAE,MAAM,UAAU,CAAA;AACjC,OAAO,EAAE,GAAG,EAAE,MAAM,OAAO,CAAA;AAC3B,OAAO,EAAE,IAAI,EAAE,MAAM,WAAW,CAAA"} \ No newline at end of file diff --git a/lib/src/modules/index.js b/lib/src/modules/index.js index 106bed6..acc5d8a 100644 --- a/lib/src/modules/index.js +++ b/lib/src/modules/index.js @@ -1,5 +1,5 @@ export { topic } from './sns'; -export { micro } from './lambda'; +export { lambda } from './lambda'; export { api } from './api'; export { zone } from './route53'; //# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/lib/src/modules/index.js.map b/lib/src/modules/index.js.map index 6a66aa5..1d3b2d8 100644 --- a/lib/src/modules/index.js.map +++ b/lib/src/modules/index.js.map @@ -1 +1 @@ -{"version":3,"file":"index.js","sourceRoot":"","sources":["../../../src/modules/index.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,KAAK,EAAE,MAAM,OAAO,CAAA;AAC7B,OAAO,EAAE,KAAK,EAAE,MAAM,UAAU,CAAA;AAChC,OAAO,EAAE,GAAG,EAAE,MAAM,OAAO,CAAA;AAC3B,OAAO,EAAE,IAAI,EAAE,MAAM,WAAW,CAAA"} \ No newline at end of file +{"version":3,"file":"index.js","sourceRoot":"","sources":["../../../src/modules/index.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,KAAK,EAAE,MAAM,OAAO,CAAA;AAC7B,OAAO,EAAE,MAAM,EAAE,MAAM,UAAU,CAAA;AACjC,OAAO,EAAE,GAAG,EAAE,MAAM,OAAO,CAAA;AAC3B,OAAO,EAAE,IAAI,EAAE,MAAM,WAAW,CAAA"} \ No newline at end of file diff --git a/lib/src/modules/lambda.d.ts b/lib/src/modules/lambda.d.ts index f6a8b8c..ef7be38 100644 --- a/lib/src/modules/lambda.d.ts +++ b/lib/src/modules/lambda.d.ts @@ -1,16 +1,27 @@ -import { AWS, AWSColls } from '../constants'; +import { AWS } from '../types'; export declare const lambda_invoke_cred: ({ function_name, source_arn, principal, statement_id, }: { function_name: any; source_arn: any; principal?: string | undefined; statement_id?: string | undefined; }) => AWS; +interface MessageAttributes { + /** key (name) can contain the following characters: A-Z, a-z, 0-9, underscore(_), hyphen(-), and period (.) */ + [key: string]: { + /** Can be: 'String', 'Number', 'Binary', or 'String.Array' (which can contain strings, numbers, true, false, and null) */ + DataType: string; + StringValue?: any[] | any; + }; +} interface SNSTopic { /** SNS Topic ARN */ topic_arn: string; - /** The name cannot start with `AWS.` or `Amazon.` See [DOCS](https://docs.aws.amazon.com/sns/latest/dg/sns-publishing.html) for more... */ - message_attrs?: object; - filter_policy?: object; + /** Message Attribute keys (names) cannot start with `AWS.` or `Amazon.` See [Docs](https://docs.aws.amazon.com/sns/latest/dg/sns-publishing.html) for more info. */ + message_attrs?: MessageAttributes; + /** See [Examples in Docs](https://docs.aws.amazon.com/sns/latest/dg/example-filter-policies.html) */ + filter_policy?: { + [key: string]: any[]; + }; } interface SNSTopicFlow { /** SNS Topic subscribed to */ @@ -47,19 +58,22 @@ interface Lambda { * const compiled = compiler(output) * ``` */ -export declare const micro: ({ name, file_path, handler, env_vars, sns, tags, }: Lambda, my: { - [key: string]: import("../../registry").AWS05200; +export declare const lambda: ({ name, file_path, handler, env_vars, tags, sns, }: Lambda, my: { + [key: string]: AWS; }) => { - sns_invoke_cred?: import("../../registry").AWS05200 | undefined; - subscription?: import("../../registry").AWS05200 | undefined; - lambda_creds: import("../../registry").AWS05200; - cloudwatch: import("../../registry").AWS05200; - lambda_policy: import("../../registry").AWS05200; - lambda_role: import("../../registry").AWS05200; - lambda_policy_attachment: import("../../registry").AWS05200; - s3: import("../../registry").AWS05200; - lambda: import("../../registry").AWS05200; - lambda_access_creds: AWSColls; + sns_invoke_cred?: AWS | undefined; + subscription?: AWS | undefined; + iam_policy_doc: AWS; + lambda_role: AWS; + bucket: AWS; + bucket_access_creds: AWS; + bucket_cors: AWS; + bucket_policy: AWS; + cloudwatch: AWS; + lambda_access_creds: AWS; + lambda_policy: AWS; + lambda_policy_attachment: AWS; + lambda: AWS; }; export {}; //# sourceMappingURL=lambda.d.ts.map \ No newline at end of file diff --git a/lib/src/modules/lambda.d.ts.map b/lib/src/modules/lambda.d.ts.map index d252929..2a0e563 100644 --- a/lib/src/modules/lambda.d.ts.map +++ b/lib/src/modules/lambda.d.ts.map @@ -1 +1 @@ -{"version":3,"file":"lambda.d.ts","sourceRoot":"","sources":["../../../src/modules/lambda.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,GAAG,EAAE,QAAQ,EAAmB,MAAM,cAAc,CAAA;AAgH7D,eAAO,MAAM,kBAAkB;;;;;MAK3B,GAUF,CAAA;AAmJF,UAAU,QAAQ;IACd,oBAAoB;IACpB,SAAS,EAAE,MAAM,CAAA;IACjB,2IAA2I;IAC3I,aAAa,CAAC,EAAE,MAAM,CAAA;IACtB,aAAa,CAAC,EAAE,MAAM,CAAA;CACzB;AAED,UAAU,YAAY;IAClB,8BAA8B;IAC9B,QAAQ,CAAC,EAAE,QAAQ,CAAA;IACnB,8BAA8B;IAC9B,UAAU,CAAC,EAAE,QAAQ,CAAA;CACxB;AAED,UAAU,MAAM;IACZ,IAAI,EAAE,MAAM,CAAA;IACZ,SAAS,EAAE,MAAM,CAAA;IACjB,OAAO,EAAE,MAAM,CAAA;IACf,QAAQ,CAAC,EAAE,MAAM,CAAA;IACjB,GAAG,CAAC,EAAE,YAAY,CAAA;IAClB,IAAI,CAAC,EAAE,MAAM,CAAA;CAChB;AAED;;;;;;;;;;;;;;;;;;;;GAoBG;AACH,eAAO,MAAM,KAAK,uDAQX,MAAM;;;;;;;;;;;;;CA2DX,CAAA"} \ No newline at end of file +{"version":3,"file":"lambda.d.ts","sourceRoot":"","sources":["../../../src/modules/lambda.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,GAAG,EAAQ,MAAM,UAAU,CAAA;AAYpC,eAAO,MAAM,kBAAkB;;;;;MAK3B,GAUF,CAAA;AA8EF,UAAU,iBAAiB;IACvB,+GAA+G;IAC/G,CAAC,GAAG,EAAE,MAAM,GAAG;QACX,0HAA0H;QAC1H,QAAQ,EAAE,MAAM,CAAA;QAChB,WAAW,CAAC,EAAE,GAAG,EAAE,GAAG,GAAG,CAAA;KAC5B,CAAA;CACJ;AAED,UAAU,QAAQ;IACd,oBAAoB;IACpB,SAAS,EAAE,MAAM,CAAA;IACjB,oKAAoK;IACpK,aAAa,CAAC,EAAE,iBAAiB,CAAA;IACjC,qGAAqG;IACrG,aAAa,CAAC,EAAE;QAAE,CAAC,GAAG,EAAE,MAAM,GAAG,GAAG,EAAE,CAAA;KAAE,CAAA;CAC3C;AAED,UAAU,YAAY;IAClB,8BAA8B;IAC9B,QAAQ,CAAC,EAAE,QAAQ,CAAA;IACnB,8BAA8B;IAC9B,UAAU,CAAC,EAAE,QAAQ,CAAA;CACxB;AAED,UAAU,MAAM;IACZ,IAAI,EAAE,MAAM,CAAA;IACZ,SAAS,EAAE,MAAM,CAAA;IACjB,OAAO,EAAE,MAAM,CAAA;IACf,QAAQ,CAAC,EAAE,MAAM,CAAA;IACjB,GAAG,CAAC,EAAE,YAAY,CAAA;IAClB,IAAI,CAAC,EAAE,MAAM,CAAA;CAChB;AAED;;;;;;;;;;;;;;;;;;;;GAoBG;AACH,eAAO,MAAM,MAAM,uDAQZ,MAAM;;;;;;;;;;;;;;;;CAyEZ,CAAA"} \ No newline at end of file diff --git a/lib/src/modules/lambda.js b/lib/src/modules/lambda.js index 9fa1653..4870ca2 100644 --- a/lib/src/modules/lambda.js +++ b/lib/src/modules/lambda.js @@ -1,109 +1,8 @@ -import { flag } from '../constants'; -// ,e, -// " /~~~8e 888-~88e-~88e -// 888 88b 888 888 888 -// 888 e88~-888 888 888 888 -// 888 C888 888 888 888 888 -// 888 "88_-888 888 888 888 -const lambda_creds = { - data: { - iam_policy_document: { - statement: { - effect: 'Allow', - actions: ['sts:AssumeRole'], - principals: { - identifiers: ['lambda.amazonaws.com', 'apigateway.amazonaws.com'], - type: 'Service', - }, - }, - json: '-->', - }, - }, -}; -const lambda_role = ({ name, policy_json, tags = {} }) => ({ - resource: { - iam_role: { - name: `-->${name}-role`, - assume_role_policy: policy_json, - tags: { - ...flag, - ...tags, - }, - arn: '-->', - }, - }, -}); -const lambda_access_creds = ({ bucket_name, topic_arn, cloudwatch_arn }) => ({ - data: { - iam_policy_document: { - statement: [ - ...(bucket_name - ? [ - { - effect: 'Allow', - actions: [ - 's3:AbortMultipartUpload', - 's3:ListMultipartUploadParts', - 's3:ListBucketMultipartUploads', - 's3:PutObject', - 's3:GetObject', - 's3:DeleteObject', - ], - resources: [ - `arn:aws:s3:::${bucket_name}`, - `arn:aws:s3:::${bucket_name}/*`, - ], - }, - ] - : []), - ...(topic_arn - ? [ - { - effect: 'Allow', - actions: ['sns:Publish', 'sns:Subscribe'], - resources: [topic_arn], - }, - ] - : []), - ...(cloudwatch_arn - ? [ - { - effect: 'Allow', - actions: [ - 'logs:CreateLogGroup', - 'logs:CreateLogStream', - 'logs:PutLogEvents', - ], - resources: [`${cloudwatch_arn}:*`, `${cloudwatch_arn}:*:*`], - }, - ] - : []), - ], - json: '-->', - }, - }, -}); -const lambda_policy_attachment = ({ role_name, policy_arn }) => ({ - resource: { - iam_role_policy_attachment: { - role: role_name, - policy_arn, - }, - }, -}); -const lambda_policy = ({ name, policy_json, tags = {} }) => ({ - resource: { - iam_policy: { - name: `-->${name}-policy`, - policy: policy_json, - tags: { - ...flag, - ...tags, - }, - arn: '-->', - }, - }, -}); +import { flag } from '../types'; +import { bucket_policy, bucket_cors, bucket } from './s3'; +import { subscription } from './sns'; +import { iam_policy_doc, iam_role, multi_stmt_policy_doc, iam_role_policy_attachment, iam_policy, } from './iam'; +//import { ecr_repo, ecr_image, isFile } from './ecr' export const lambda_invoke_cred = ({ function_name, source_arn, principal = 'sns.amazonaws.com', statement_id = 'AllowExecutionFromSNS', }) => ({ resource: { lambda_permission: { @@ -131,69 +30,8 @@ const cloudwatch = ({ name, retention_in_days = 7, tags = {} }) => ({ ...tags, }, arn: '-->', - }, - }, -}); -// d88~\ 888-~88e d88~\ -// C888 888 888 C888 -// Y88b 888 888 Y88b -// 888D 888 888 888D -// \_88P 888 888 \_88P -const subscription = ({ topic_arn, lambda_arn, filter = {}, scope = 'MessageAttributes', }) => ({ - resource: { - // @ts-ignore: subscription_role_arn only needed if protocol == 'firehose' - sns_topic_subscription: { - topic_arn, - protocol: 'lambda', - endpoint: lambda_arn, - filter_policy: JSON.stringify(filter), - filter_policy_scope: scope, - arn: '-->', - }, - }, -}); -// 88~\ -// e88~~8e _888__ d88~\ -// d888 88b 888 C888 -// 8888__888 888 Y88b -// Y888 , 888 888D -// "88___/ 888 \_88P -// reference [4] -const efs = { - resource: { - efs_file_system: { - arn: '-->', - tags: { - ...flag, - }, - }, - }, -}; -const efs_access_point = ({ name, efs_arn }) => ({ - resource: { - efs_access_point: { - file_system_id: 'TODO', - tags: { - ...flag, - }, - }, - }, -}); -// _-~88e -// d88~\ 888b -// C888 __888" -// Y88b 888e -// 888D 888P -// \_88P ~-_88" -const s3 = ({ name, tags = {} }) => ({ - resource: { - s3_bucket: { - bucket: `-->${name}-bucket`, - // @ts-ignore (docs šŸ›) - tags: { - ...tags, - ...flag, - }, + // @ts-ignore + depends_on: ['docker_registry_image.$SCOPE'], }, }, }); @@ -203,23 +41,26 @@ const s3 = ({ name, tags = {} }) => ({ // 888 e88~-888 888 888 888 888 8888 8888 888 e88~-888 // 888 C888 888 888 888 888 888 888P Y888 888 C888 888 // 888 "88_-888 888 888 888 888-_88" "88_/888 "88_-888 -const lambda_fn = ({ name, -//efs_arn, -role_arn, file_path, env_vars = {}, handler = 'handler.handler', runtime = 'python3.8', tags = {}, }) => ({ +/** + * TODO: + * - build lambdas JIT + * - for zipped lambdas + * - python: use @-0/build-lambda-py + * - for container lambdas + * - use @-0/build-lambda-container + */ +const lambda_fn = ({ name, role_arn, file_path, env_vars = {}, handler = 'handler.handler', package_type = 'Zip', runtime = 'python3.8', tags = {}, }) => ({ resource: { lambda_function: { - function_name: `-->lambda-${name}`, - role: role_arn, runtime, handler, - filename: file_path, - //file_system_config: { - // arn: efs_arn, - // local_mount_path: '/mnt/efs', - //}, + package_type, + function_name: `-->lambda-${name}`, + role: role_arn, environment: { variables: env_vars, }, + ...(package_type === 'Image' ? { image_uri: file_path } : { filename: file_path }), tags: { ...flag, ...tags, @@ -250,62 +91,76 @@ role_arn, file_path, env_vars = {}, handler = 'handler.handler', runtime = 'pyth * const compiled = compiler(output) * ``` */ -export const micro = ({ name = 'microservice', file_path = '${path.root}/lambdas/template/zipped/handler.py.zip', handler = 'handler.handler', env_vars = {}, sns, tags = {}, }, my) => ({ - //efs, - lambda_creds, - cloudwatch: cloudwatch({ name, tags }), - lambda_policy: lambda_policy({ - name: `${name}-policy`, - policy_json: my?.lambda_access_creds?.data?.iam_policy_document?.json, - tags, - }), - lambda_role: lambda_role({ - name, - policy_json: my?.lambda_creds?.data?.iam_policy_document?.json, - tags, - }), - lambda_policy_attachment: lambda_policy_attachment({ - policy_arn: my?.lambda_policy?.resource?.iam_policy?.arn, - role_name: my?.lambda_role?.resource?.iam_role?.name, - }), - s3: s3({ name, tags }), - lambda: lambda_fn({ - name, - //efs_arn: my?.efs?.resource?.efs_file_system?.arn, - role_arn: my?.lambda_role?.resource?.iam_role?.arn, - file_path, - handler, - tags, - env_vars: { - S3_BUCKET_NAME: my?.s3.resource?.s3_bucket?.bucket, - ...(sns - ? { - SNS_TOPIC_ARN: sns.downstream?.topic_arn, - SNS_MESSAGE_ATTRS: JSON.stringify(sns.downstream?.message_attrs), - } - : {}), - ...env_vars, - }, - }), - lambda_access_creds: lambda_access_creds({ - bucket_name: my?.s3.resource?.s3_bucket?.bucket, - cloudwatch_arn: my?.cloudwatch.resource?.cloudwatch_log_group?.arn, - topic_arn: sns?.downstream?.topic_arn, - }), - ...(sns?.downstream - ? { - sns_invoke_cred: lambda_invoke_cred({ - function_name: my?.lambda?.resource?.lambda_function?.function_name, - source_arn: sns.downstream?.topic_arn, - principal: 'sns.amazonaws.com', - statement_id: 'AllowExecutionFromSNS', - }), - subscription: subscription({ - topic_arn: sns.downstream?.topic_arn, - lambda_arn: my?.lambda?.resource?.lambda_function?.arn, - filter: sns.upstream?.filter_policy, - }), - } - : {}), -}); +export const lambda = ({ name = 'microservice', file_path = '${path.root}/lambdas/template/zipped/handler.py.zip', handler = 'handler.handler', env_vars = {}, tags = {}, sns, }, my) => { + // TODO: consider triggering @-0/build-lambda-py here + // - would have to make this async... + const ext = file_path.split('.').pop(); + const isZip = ext === 'zip'; + return { + iam_policy_doc, + lambda_role: iam_role({ + name, + policy_json: my?.lambda_creds?.data?.iam_policy_document?.json, + tags, + }), + bucket: bucket({ name, tags }), + bucket_access_creds: multi_stmt_policy_doc({ + bucket_name: my?.bucket.resource?.s3_bucket?.bucket, + lambda_role_arn: my?.lambda_role?.resource?.iam_role?.arn, + }), + bucket_cors: bucket_cors({ bucket_name: my?.bucket.resource?.s3_bucket?.bucket }), + bucket_policy: bucket_policy({ + bucket_name: my?.bucket.resource?.s3_bucket?.bucket, + policy_json: my?.bucket_access_creds?.data?.iam_policy_document?.json, + }), + cloudwatch: cloudwatch({ name, tags }), + lambda_access_creds: multi_stmt_policy_doc({ + bucket_name: my?.bucket.resource?.s3_bucket?.bucket, + cloudwatch_arn: my?.cloudwatch.resource?.cloudwatch_log_group?.arn, + topic_arn: sns?.downstream?.topic_arn, + }), + lambda_policy: iam_policy({ + name: `${name}-policy`, + policy_json: my?.lambda_access_creds?.data?.iam_policy_document?.json, + tags, + }), + lambda_policy_attachment: iam_role_policy_attachment({ + policy_arn: my?.lambda_policy?.resource?.iam_policy?.arn, + role_name: my?.lambda_role?.resource?.iam_role?.name, + }), + lambda: lambda_fn({ + name, + role_arn: my?.lambda_role?.resource?.iam_role?.arn, + file_path, + package_type: isZip ? 'Zip' : 'Image', + handler, + tags, + env_vars: { + S3_BUCKET_NAME: my?.bucket.resource?.s3_bucket?.bucket, + ...(sns?.downstream + ? { + SNS_TOPIC_ARN: sns.downstream.topic_arn, + SNS_MESSAGE_ATTRS: JSON.stringify(sns.downstream.message_attrs), + } + : {}), + ...env_vars, + }, + }), + ...(sns?.upstream + ? { + sns_invoke_cred: lambda_invoke_cred({ + function_name: my?.lambda?.resource?.lambda_function?.function_name, + source_arn: sns.upstream?.topic_arn, + principal: 'sns.amazonaws.com', + statement_id: 'AllowExecutionFromSNS', + }), + subscription: subscription({ + topic_arn: sns.upstream.topic_arn, + lambda_arn: my?.lambda?.resource?.lambda_function?.arn, + filter: sns.upstream.filter_policy, + }), + } + : {}), + }; +}; //# sourceMappingURL=lambda.js.map \ No newline at end of file diff --git a/lib/src/modules/lambda.js.map b/lib/src/modules/lambda.js.map index ff214a0..e87a9e6 100644 --- a/lib/src/modules/lambda.js.map +++ b/lib/src/modules/lambda.js.map @@ -1 +1 @@ -{"version":3,"file":"lambda.js","sourceRoot":"","sources":["../../../src/modules/lambda.ts"],"names":[],"mappings":"AAAA,OAAO,EAA4B,IAAI,EAAE,MAAM,cAAc,CAAA;AAC7D,OAAO;AACP,+BAA+B;AAC/B,+BAA+B;AAC/B,+BAA+B;AAC/B,+BAA+B;AAC/B,+BAA+B;AAE/B,MAAM,YAAY,GAAQ;IACtB,IAAI,EAAE;QACF,mBAAmB,EAAE;YACjB,SAAS,EAAE;gBACP,MAAM,EAAE,OAAO;gBACf,OAAO,EAAE,CAAC,gBAAgB,CAAC;gBAC3B,UAAU,EAAE;oBACR,WAAW,EAAE,CAAC,sBAAsB,EAAE,0BAA0B,CAAC;oBACjE,IAAI,EAAE,SAAS;iBAClB;aACJ;YACD,IAAI,EAAE,KAAK;SACd;KACJ;CACJ,CAAA;AAED,MAAM,WAAW,GAAG,CAAC,EAAE,IAAI,EAAE,WAAW,EAAE,IAAI,GAAG,EAAE,EAAE,EAAO,EAAE,CAAC,CAAC;IAC5D,QAAQ,EAAE;QACN,QAAQ,EAAE;YACN,IAAI,EAAE,MAAM,IAAI,OAAO;YACvB,kBAAkB,EAAE,WAAW;YAC/B,IAAI,EAAE;gBACF,GAAG,IAAI;gBACP,GAAG,IAAI;aACV;YACD,GAAG,EAAE,KAAK;SACb;KACJ;CACJ,CAAC,CAAA;AAEF,MAAM,mBAAmB,GAAG,CAAC,EAAE,WAAW,EAAE,SAAS,EAAE,cAAc,EAAE,EAAY,EAAE,CAAC,CAAC;IACnF,IAAI,EAAE;QACF,mBAAmB,EAAE;YACjB,SAAS,EAAE;gBACP,GAAG,CAAC,WAAW;oBACX,CAAC,CAAE;wBACG;4BACI,MAAM,EAAE,OAAO;4BACf,OAAO,EAAE;gCACL,yBAAyB;gCACzB,6BAA6B;gCAC7B,+BAA+B;gCAC/B,cAAc;gCACd,cAAc;gCACd,iBAAiB;6BACpB;4BACD,SAAS,EAAE;gCACP,gBAAgB,WAAW,EAAE;gCAC7B,gBAAgB,WAAW,IAAI;6BAClC;yBACJ;qBACY;oBACnB,CAAC,CAAC,EAAE,CAAC;gBACT,GAAG,CAAC,SAAS;oBACT,CAAC,CAAE;wBACG;4BACI,MAAM,EAAE,OAAO;4BACf,OAAO,EAAE,CAAC,aAAa,EAAE,eAAe,CAAC;4BACzC,SAAS,EAAE,CAAC,SAAS,CAAC;yBACzB;qBACY;oBACnB,CAAC,CAAC,EAAE,CAAC;gBACT,GAAG,CAAC,cAAc;oBACd,CAAC,CAAE;wBACG;4BACI,MAAM,EAAE,OAAO;4BACf,OAAO,EAAE;gCACL,qBAAqB;gCACrB,sBAAsB;gCACtB,mBAAmB;6BACtB;4BACD,SAAS,EAAE,CAAC,GAAG,cAAc,IAAI,EAAE,GAAG,cAAc,MAAM,CAAC;yBAC9D;qBACY;oBACnB,CAAC,CAAC,EAAE,CAAC;aACZ;YACD,IAAI,EAAE,KAAK;SACd;KACJ;CACJ,CAAC,CAAA;AAEF,MAAM,wBAAwB,GAAG,CAAC,EAAE,SAAS,EAAE,UAAU,EAAE,EAAO,EAAE,CAAC,CAAC;IAClE,QAAQ,EAAE;QACN,0BAA0B,EAAE;YACxB,IAAI,EAAE,SAAS;YACf,UAAU;SACb;KACJ;CACJ,CAAC,CAAA;AAEF,MAAM,aAAa,GAAG,CAAC,EAAE,IAAI,EAAE,WAAW,EAAE,IAAI,GAAG,EAAE,EAAE,EAAO,EAAE,CAAC,CAAC;IAC9D,QAAQ,EAAE;QACN,UAAU,EAAE;YACR,IAAI,EAAE,MAAM,IAAI,SAAS;YACzB,MAAM,EAAE,WAAW;YACnB,IAAI,EAAE;gBACF,GAAG,IAAI;gBACP,GAAG,IAAI;aACV;YACD,GAAG,EAAE,KAAK;SACb;KACJ;CACJ,CAAC,CAAA;AAEF,MAAM,CAAC,MAAM,kBAAkB,GAAG,CAAC,EAC/B,aAAa,EACb,UAAU,EACV,SAAS,GAAG,mBAAmB,EAC/B,YAAY,GAAG,uBAAuB,GACzC,EAAO,EAAE,CAAC,CAAC;IACR,QAAQ,EAAE;QACN,iBAAiB,EAAE;YACf,YAAY;YACZ,MAAM,EAAE,uBAAuB;YAC/B,aAAa;YACb,SAAS;YACT,UAAU;SACb;KACJ;CACJ,CAAC,CAAA;AAEF,uFAAuF;AACvF,4FAA4F;AAC5F,4FAA4F;AAC5F,4FAA4F;AAC5F,4FAA4F;AAC5F,4FAA4F;AAE5F,MAAM,UAAU,GAAG,CAAC,EAAE,IAAI,EAAE,iBAAiB,GAAG,CAAC,EAAE,IAAI,GAAG,EAAE,EAAE,EAAO,EAAE,CAAC,CAAC;IACrE,QAAQ,EAAE;QACN,oBAAoB,EAAE;YAClB,IAAI,EAAE,eAAe,IAAI,YAAY;YACrC,iBAAiB;YACjB,IAAI,EAAE;gBACF,GAAG,IAAI;gBACP,GAAG,IAAI;aACV;YACD,GAAG,EAAE,KAAK;SACb;KACJ;CACJ,CAAC,CAAA;AAEF,0BAA0B;AAC1B,wBAAwB;AACxB,yBAAyB;AACzB,0BAA0B;AAC1B,yBAAyB;AAEzB,MAAM,YAAY,GAAG,CAAC,EAClB,SAAS,EACT,UAAU,EACV,MAAM,GAAG,EAAE,EACX,KAAK,GAAG,mBAAmB,GAC9B,EAAO,EAAE,CAAC,CAAC;IACR,QAAQ,EAAE;QACN,0EAA0E;QAC1E,sBAAsB,EAAE;YACpB,SAAS;YACT,QAAQ,EAAE,QAAQ;YAClB,QAAQ,EAAE,UAAU;YACpB,aAAa,EAAE,IAAI,CAAC,SAAS,CAAC,MAAM,CAAC;YACrC,mBAAmB,EAAE,KAAK;YAC1B,GAAG,EAAE,KAAK;SACb;KACJ;CACJ,CAAC,CAAA;AAEF,oBAAoB;AACpB,2BAA2B;AAC3B,yBAAyB;AACzB,0BAA0B;AAC1B,2BAA2B;AAC3B,0BAA0B;AAE1B,gBAAgB;AAEhB,MAAM,GAAG,GAAQ;IACb,QAAQ,EAAE;QACN,eAAe,EAAE;YACb,GAAG,EAAE,KAAK;YACV,IAAI,EAAE;gBACF,GAAG,IAAI;aACV;SACJ;KACJ;CACJ,CAAA;AAED,MAAM,gBAAgB,GAAG,CAAC,EAAE,IAAI,EAAE,OAAO,EAAE,EAAO,EAAE,CAAC,CAAC;IAClD,QAAQ,EAAE;QACN,gBAAgB,EAAE;YACd,cAAc,EAAE,MAAM;YACtB,IAAI,EAAE;gBACF,GAAG,IAAI;aACV;SACJ;KACJ;CACJ,CAAC,CAAA;AAEF,iBAAiB;AACjB,kBAAkB;AAClB,kBAAkB;AAClB,kBAAkB;AAClB,kBAAkB;AAClB,iBAAiB;AAEjB,MAAM,EAAE,GAAG,CAAC,EAAE,IAAI,EAAE,IAAI,GAAG,EAAE,EAAE,EAAO,EAAE,CAAC,CAAC;IACtC,QAAQ,EAAE;QACN,SAAS,EAAE;YACP,MAAM,EAAE,MAAM,IAAI,SAAS;YAC3B,uBAAuB;YACvB,IAAI,EAAE;gBACF,GAAG,IAAI;gBACP,GAAG,IAAI;aACV;SACJ;KACJ;CACJ,CAAC,CAAA;AAEF,mDAAmD;AACnD,4DAA4D;AAC5D,6DAA6D;AAC7D,6DAA6D;AAC7D,6DAA6D;AAC7D,6DAA6D;AAE7D,MAAM,SAAS,GAAG,CAAC,EACf,IAAI;AACJ,UAAU;AACV,QAAQ,EACR,SAAS,EACT,QAAQ,GAAG,EAAE,EACb,OAAO,GAAG,iBAAiB,EAC3B,OAAO,GAAG,WAAW,EACrB,IAAI,GAAG,EAAE,GACZ,EAAO,EAAE,CAAC,CAAC;IACR,QAAQ,EAAE;QACN,eAAe,EAAE;YACb,aAAa,EAAE,aAAa,IAAI,EAAE;YAClC,IAAI,EAAE,QAAQ;YACd,OAAO;YACP,OAAO;YACP,QAAQ,EAAE,SAAS;YACnB,uBAAuB;YACvB,mBAAmB;YACnB,mCAAmC;YACnC,IAAI;YACJ,WAAW,EAAE;gBACT,SAAS,EAAE,QAAQ;aACtB;YACD,IAAI,EAAE;gBACF,GAAG,IAAI;gBACP,GAAG,IAAI;aACV;YACD,GAAG,EAAE,KAAK;YACV,UAAU,EAAE,KAAK;SACpB;KACJ;CACJ,CAAC,CAAA;AAiCF;;;;;;;;;;;;;;;;;;;;GAoBG;AACH,MAAM,CAAC,MAAM,KAAK,GAAG,CACjB,EACI,IAAI,GAAG,cAAc,EACrB,SAAS,GAAG,qDAAqD,EACjE,OAAO,GAAG,iBAAiB,EAC3B,QAAQ,GAAG,EAAE,EACb,GAAG,EACH,IAAI,GAAG,EAAE,GACJ,EACT,EAA0B,EAC5B,EAAE,CAAC,CAAC;IACF,MAAM;IACN,YAAY;IACZ,UAAU,EAAE,UAAU,CAAC,EAAE,IAAI,EAAE,IAAI,EAAE,CAAC;IACtC,aAAa,EAAE,aAAa,CAAC;QACzB,IAAI,EAAE,GAAG,IAAI,SAAS;QACtB,WAAW,EAAE,EAAE,EAAE,mBAAmB,EAAE,IAAI,EAAE,mBAAmB,EAAE,IAAI;QACrE,IAAI;KACP,CAAC;IACF,WAAW,EAAE,WAAW,CAAC;QACrB,IAAI;QACJ,WAAW,EAAE,EAAE,EAAE,YAAY,EAAE,IAAI,EAAE,mBAAmB,EAAE,IAAI;QAC9D,IAAI;KACP,CAAC;IACF,wBAAwB,EAAE,wBAAwB,CAAC;QAC/C,UAAU,EAAE,EAAE,EAAE,aAAa,EAAE,QAAQ,EAAE,UAAU,EAAE,GAAG;QACxD,SAAS,EAAE,EAAE,EAAE,WAAW,EAAE,QAAQ,EAAE,QAAQ,EAAE,IAAI;KACvD,CAAC;IACF,EAAE,EAAE,EAAE,CAAC,EAAE,IAAI,EAAE,IAAI,EAAE,CAAC;IACtB,MAAM,EAAE,SAAS,CAAC;QACd,IAAI;QACJ,mDAAmD;QACnD,QAAQ,EAAE,EAAE,EAAE,WAAW,EAAE,QAAQ,EAAE,QAAQ,EAAE,GAAG;QAClD,SAAS;QACT,OAAO;QACP,IAAI;QACJ,QAAQ,EAAE;YACN,cAAc,EAAE,EAAE,EAAE,EAAE,CAAC,QAAQ,EAAE,SAAS,EAAE,MAAM;YAClD,GAAG,CAAC,GAAG;gBACH,CAAC,CAAC;oBACI,aAAa,EAAE,GAAG,CAAC,UAAU,EAAE,SAAS;oBACxC,iBAAiB,EAAE,IAAI,CAAC,SAAS,CAAC,GAAG,CAAC,UAAU,EAAE,aAAa,CAAC;iBACnE;gBACH,CAAC,CAAC,EAAE,CAAC;YACT,GAAG,QAAQ;SACd;KACJ,CAAC;IACF,mBAAmB,EAAE,mBAAmB,CAAC;QACrC,WAAW,EAAE,EAAE,EAAE,EAAE,CAAC,QAAQ,EAAE,SAAS,EAAE,MAAM;QAC/C,cAAc,EAAE,EAAE,EAAE,UAAU,CAAC,QAAQ,EAAE,oBAAoB,EAAE,GAAG;QAClE,SAAS,EAAE,GAAG,EAAE,UAAU,EAAE,SAAS;KACxC,CAAC;IACF,GAAG,CAAC,GAAG,EAAE,UAAU;QACf,CAAC,CAAC;YACI,eAAe,EAAE,kBAAkB,CAAC;gBAChC,aAAa,EAAE,EAAE,EAAE,MAAM,EAAE,QAAQ,EAAE,eAAe,EAAE,aAAa;gBACnE,UAAU,EAAE,GAAG,CAAC,UAAU,EAAE,SAAS;gBACrC,SAAS,EAAE,mBAAmB;gBAC9B,YAAY,EAAE,uBAAuB;aACxC,CAAC;YACF,YAAY,EAAE,YAAY,CAAC;gBACvB,SAAS,EAAE,GAAG,CAAC,UAAU,EAAE,SAAS;gBACpC,UAAU,EAAE,EAAE,EAAE,MAAM,EAAE,QAAQ,EAAE,eAAe,EAAE,GAAG;gBACtD,MAAM,EAAE,GAAG,CAAC,QAAQ,EAAE,aAAa;aACtC,CAAC;SACL;QACH,CAAC,CAAC,EAAE,CAAC;CACZ,CAAC,CAAA"} \ No newline at end of file +{"version":3,"file":"lambda.js","sourceRoot":"","sources":["../../../src/modules/lambda.ts"],"names":[],"mappings":"AAAA,OAAO,EAAO,IAAI,EAAE,MAAM,UAAU,CAAA;AACpC,OAAO,EAAE,aAAa,EAAE,WAAW,EAAE,MAAM,EAAE,MAAM,MAAM,CAAA;AACzD,OAAO,EAAE,YAAY,EAAE,MAAM,OAAO,CAAA;AACpC,OAAO,EACH,cAAc,EACd,QAAQ,EACR,qBAAqB,EACrB,0BAA0B,EAC1B,UAAU,GACb,MAAM,OAAO,CAAA;AACd,qDAAqD;AAErD,MAAM,CAAC,MAAM,kBAAkB,GAAG,CAAC,EAC/B,aAAa,EACb,UAAU,EACV,SAAS,GAAG,mBAAmB,EAC/B,YAAY,GAAG,uBAAuB,GACzC,EAAO,EAAE,CAAC,CAAC;IACR,QAAQ,EAAE;QACN,iBAAiB,EAAE;YACf,YAAY;YACZ,MAAM,EAAE,uBAAuB;YAC/B,aAAa;YACb,SAAS;YACT,UAAU;SACb;KACJ;CACJ,CAAC,CAAA;AAEF,uFAAuF;AACvF,4FAA4F;AAC5F,4FAA4F;AAC5F,4FAA4F;AAC5F,4FAA4F;AAC5F,4FAA4F;AAE5F,MAAM,UAAU,GAAG,CAAC,EAAE,IAAI,EAAE,iBAAiB,GAAG,CAAC,EAAE,IAAI,GAAG,EAAE,EAAE,EAAO,EAAE,CAAC,CAAC;IACrE,QAAQ,EAAE;QACN,oBAAoB,EAAE;YAClB,IAAI,EAAE,eAAe,IAAI,YAAY;YACrC,iBAAiB;YACjB,IAAI,EAAE;gBACF,GAAG,IAAI;gBACP,GAAG,IAAI;aACV;YACD,GAAG,EAAE,KAAK;YACV,aAAa;YACb,UAAU,EAAE,CAAC,8BAA8B,CAAC;SAC/C;KACJ;CACJ,CAAC,CAAA;AAEF,mDAAmD;AACnD,4DAA4D;AAC5D,6DAA6D;AAC7D,6DAA6D;AAC7D,6DAA6D;AAC7D,6DAA6D;AAE7D;;;;;;;GAOG;AACH,MAAM,SAAS,GAAG,CAAC,EACf,IAAI,EACJ,QAAQ,EACR,SAAS,EACT,QAAQ,GAAG,EAAE,EACb,OAAO,GAAG,iBAAiB,EAC3B,YAAY,GAAG,KAAK,EACpB,OAAO,GAAG,WAAW,EACrB,IAAI,GAAG,EAAE,GACZ,EAAO,EAAE,CAAC,CAAC;IACR,QAAQ,EAAE;QACN,eAAe,EAAE;YACb,OAAO;YACP,OAAO;YACP,YAAY;YACZ,aAAa,EAAE,aAAa,IAAI,EAAE;YAClC,IAAI,EAAE,QAAQ;YACd,WAAW,EAAE;gBACT,SAAS,EAAE,QAAQ;aACtB;YACD,GAAG,CAAC,YAAY,KAAK,OAAO,CAAC,CAAC,CAAC,EAAE,SAAS,EAAE,SAAS,EAAE,CAAC,CAAC,CAAC,EAAE,QAAQ,EAAE,SAAS,EAAE,CAAC;YAClF,IAAI,EAAE;gBACF,GAAG,IAAI;gBACP,GAAG,IAAI;aACV;YACD,GAAG,EAAE,KAAK;YACV,UAAU,EAAE,KAAK;SACpB;KACJ;CACJ,CAAC,CAAA;AA2CF;;;;;;;;;;;;;;;;;;;;GAoBG;AACH,MAAM,CAAC,MAAM,MAAM,GAAG,CAClB,EACI,IAAI,GAAG,cAAc,EACrB,SAAS,GAAG,qDAAqD,EACjE,OAAO,GAAG,iBAAiB,EAC3B,QAAQ,GAAG,EAAE,EACb,IAAI,GAAG,EAAE,EACT,GAAG,GACE,EACT,EAA0B,EAC5B,EAAE;IACA,qDAAqD;IACrD,qCAAqC;IACrC,MAAM,GAAG,GAAG,SAAS,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,GAAG,EAAE,CAAA;IACtC,MAAM,KAAK,GAAG,GAAG,KAAK,KAAK,CAAA;IAC3B,OAAO;QACH,cAAc;QACd,WAAW,EAAE,QAAQ,CAAC;YAClB,IAAI;YACJ,WAAW,EAAE,EAAE,EAAE,YAAY,EAAE,IAAI,EAAE,mBAAmB,EAAE,IAAI;YAC9D,IAAI;SACP,CAAC;QACF,MAAM,EAAE,MAAM,CAAC,EAAE,IAAI,EAAE,IAAI,EAAE,CAAC;QAC9B,mBAAmB,EAAE,qBAAqB,CAAC;YACvC,WAAW,EAAE,EAAE,EAAE,MAAM,CAAC,QAAQ,EAAE,SAAS,EAAE,MAAM;YACnD,eAAe,EAAE,EAAE,EAAE,WAAW,EAAE,QAAQ,EAAE,QAAQ,EAAE,GAAG;SAC5D,CAAC;QACF,WAAW,EAAE,WAAW,CAAC,EAAE,WAAW,EAAE,EAAE,EAAE,MAAM,CAAC,QAAQ,EAAE,SAAS,EAAE,MAAM,EAAE,CAAC;QACjF,aAAa,EAAE,aAAa,CAAC;YACzB,WAAW,EAAE,EAAE,EAAE,MAAM,CAAC,QAAQ,EAAE,SAAS,EAAE,MAAM;YACnD,WAAW,EAAE,EAAE,EAAE,mBAAmB,EAAE,IAAI,EAAE,mBAAmB,EAAE,IAAI;SACxE,CAAC;QACF,UAAU,EAAE,UAAU,CAAC,EAAE,IAAI,EAAE,IAAI,EAAE,CAAC;QACtC,mBAAmB,EAAE,qBAAqB,CAAC;YACvC,WAAW,EAAE,EAAE,EAAE,MAAM,CAAC,QAAQ,EAAE,SAAS,EAAE,MAAM;YACnD,cAAc,EAAE,EAAE,EAAE,UAAU,CAAC,QAAQ,EAAE,oBAAoB,EAAE,GAAG;YAClE,SAAS,EAAE,GAAG,EAAE,UAAU,EAAE,SAAS;SACxC,CAAC;QACF,aAAa,EAAE,UAAU,CAAC;YACtB,IAAI,EAAE,GAAG,IAAI,SAAS;YACtB,WAAW,EAAE,EAAE,EAAE,mBAAmB,EAAE,IAAI,EAAE,mBAAmB,EAAE,IAAI;YACrE,IAAI;SACP,CAAC;QACF,wBAAwB,EAAE,0BAA0B,CAAC;YACjD,UAAU,EAAE,EAAE,EAAE,aAAa,EAAE,QAAQ,EAAE,UAAU,EAAE,GAAG;YACxD,SAAS,EAAE,EAAE,EAAE,WAAW,EAAE,QAAQ,EAAE,QAAQ,EAAE,IAAI;SACvD,CAAC;QACF,MAAM,EAAE,SAAS,CAAC;YACd,IAAI;YACJ,QAAQ,EAAE,EAAE,EAAE,WAAW,EAAE,QAAQ,EAAE,QAAQ,EAAE,GAAG;YAClD,SAAS;YACT,YAAY,EAAE,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,OAAO;YACrC,OAAO;YACP,IAAI;YACJ,QAAQ,EAAE;gBACN,cAAc,EAAE,EAAE,EAAE,MAAM,CAAC,QAAQ,EAAE,SAAS,EAAE,MAAM;gBACtD,GAAG,CAAC,GAAG,EAAE,UAAU;oBACf,CAAC,CAAC;wBACI,aAAa,EAAE,GAAG,CAAC,UAAU,CAAC,SAAS;wBACvC,iBAAiB,EAAE,IAAI,CAAC,SAAS,CAAC,GAAG,CAAC,UAAU,CAAC,aAAa,CAAC;qBAClE;oBACH,CAAC,CAAC,EAAE,CAAC;gBACT,GAAG,QAAQ;aACd;SACJ,CAAC;QACF,GAAG,CAAC,GAAG,EAAE,QAAQ;YACb,CAAC,CAAC;gBACI,eAAe,EAAE,kBAAkB,CAAC;oBAChC,aAAa,EAAE,EAAE,EAAE,MAAM,EAAE,QAAQ,EAAE,eAAe,EAAE,aAAa;oBACnE,UAAU,EAAE,GAAG,CAAC,QAAQ,EAAE,SAAS;oBACnC,SAAS,EAAE,mBAAmB;oBAC9B,YAAY,EAAE,uBAAuB;iBACxC,CAAC;gBACF,YAAY,EAAE,YAAY,CAAC;oBACvB,SAAS,EAAE,GAAG,CAAC,QAAQ,CAAC,SAAS;oBACjC,UAAU,EAAE,EAAE,EAAE,MAAM,EAAE,QAAQ,EAAE,eAAe,EAAE,GAAG;oBACtD,MAAM,EAAE,GAAG,CAAC,QAAQ,CAAC,aAAa;iBACrC,CAAC;aACL;YACH,CAAC,CAAC,EAAE,CAAC;KACZ,CAAA;AACL,CAAC,CAAA"} \ No newline at end of file diff --git a/lib/src/modules/route53.d.ts b/lib/src/modules/route53.d.ts index 5c84e91..ed2e161 100644 --- a/lib/src/modules/route53.d.ts +++ b/lib/src/modules/route53.d.ts @@ -1,11 +1,11 @@ -import { AWS, AWSColls } from '../constants'; +import { AWS } from '../types'; export declare const zone: ({ apex }: { apex?: string | undefined; }) => AWS; export declare const acm_certificate: ({ full_domain, tags }: { full_domain?: string | undefined; tags?: {} | undefined; -}) => AWSColls; +}) => AWS; export declare const acm_certificate_validation: ({ cert_arn, fqdns }: { cert_arn: any; fqdns: any; diff --git a/lib/src/modules/route53.d.ts.map b/lib/src/modules/route53.d.ts.map index 5b6cc57..1378620 100644 --- a/lib/src/modules/route53.d.ts.map +++ b/lib/src/modules/route53.d.ts.map @@ -1 +1 @@ -{"version":3,"file":"route53.d.ts","sourceRoot":"","sources":["../../../src/modules/route53.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,GAAG,EAAQ,QAAQ,EAAE,MAAM,cAAc,CAAA;AASlD,eAAO,MAAM,IAAI;;MAAqC,GAOpD,CAAA;AAEF,eAAO,MAAM,eAAe;;;MAAuD,QAuBjF,CAAA;AAEF,eAAO,MAAM,0BAA0B;;;MAA0B,GAO/D,CAAA;AAEF,UAAU,aAAa;IACnB,WAAW,EAAE,MAAM,CAAA;IACnB,eAAe,EAAE,MAAM,CAAA;IACvB,eAAe,CAAC,EAAE,MAAM,CAAA;IACxB,kBAAkB,CAAC,EAAE,MAAM,CAAA;IAC3B,IAAI,CAAC,EAAE,MAAM,CAAA;IACb,OAAO,CAAC,EAAE,MAAM,EAAE,CAAA;CACrB;AAED,eAAO,MAAM,cAAc,0FAOxB,aAAa,KAAG,GA2BlB,CAAA"} \ No newline at end of file +{"version":3,"file":"route53.d.ts","sourceRoot":"","sources":["../../../src/modules/route53.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,GAAG,EAAQ,MAAM,UAAU,CAAA;AASpC,eAAO,MAAM,IAAI;;MAAqC,GAOpD,CAAA;AAEF,eAAO,MAAM,eAAe;;;MAAuD,GAuBjF,CAAA;AAEF,eAAO,MAAM,0BAA0B;;;MAA0B,GAO/D,CAAA;AAEF,UAAU,aAAa;IACnB,WAAW,EAAE,MAAM,CAAA;IACnB,eAAe,EAAE,MAAM,CAAA;IACvB,eAAe,CAAC,EAAE,MAAM,CAAA;IACxB,kBAAkB,CAAC,EAAE,MAAM,CAAA;IAC3B,IAAI,CAAC,EAAE,MAAM,CAAA;IACb,OAAO,CAAC,EAAE,MAAM,EAAE,CAAA;CACrB;AAED,eAAO,MAAM,cAAc,0FAOxB,aAAa,KAAG,GA6BlB,CAAA"} \ No newline at end of file diff --git a/lib/src/modules/route53.js b/lib/src/modules/route53.js index 1bce34d..c5c0364 100644 --- a/lib/src/modules/route53.js +++ b/lib/src/modules/route53.js @@ -1,4 +1,4 @@ -import { flag } from '../constants'; +import { flag } from '../types'; // d8 /~~~~~~ _-~88e // 888-~\ e88~-_ 888 888 _d88__ e88~~8e / 888b // 888 d888 i 888 888 888 d888 88b `-~~88e __888" @@ -18,10 +18,6 @@ export const acm_certificate = ({ full_domain = 'chopshop-test.net', tags = {} } acm_certificate: { domain_name: full_domain, validation_method: 'DNS', - tags: { - ...flag, - ...tags, - }, // @ts-ignore -> terraform meta argument (not in docs) lifecycle: { create_before_destroy: true, @@ -33,6 +29,10 @@ export const acm_certificate = ({ full_domain = 'chopshop-test.net', tags = {} } resource_record_value: '-->*', }, ], + tags: { + ...flag, + ...tags, + }, arn: '-->', }, }, @@ -47,7 +47,8 @@ export const acm_certificate_validation = ({ cert_arn, fqdns }) => ({ }); export const route53_record = ({ full_domain, route53_zone_id, api_domain_name, api_hosted_zone_id, type = 'A', records = [], }) => { if (records.length && api_domain_name) { - console.error(`Error in route53_record:\n'records' and 'api_domain_name' are mutually exclusive`); + console.error('Error in route53_record:\n' + + "'records' and 'api_domain_name' ('alias') are mutually exclusive"); } return { resource: { @@ -62,7 +63,8 @@ export const route53_record = ({ full_domain, route53_zone_id, api_domain_name, ...((api_domain_name && { alias: { name: api_domain_name, - zone_id: api_hosted_zone_id, + // TODO? force error for missing exports by defaulting to 'null' + zone_id: api_hosted_zone_id || 'null', evaluate_target_health: false, }, }) || { ttl: 60 }), diff --git a/lib/src/modules/route53.js.map b/lib/src/modules/route53.js.map index 90fa6ed..d318579 100644 --- a/lib/src/modules/route53.js.map +++ b/lib/src/modules/route53.js.map @@ -1 +1 @@ -{"version":3,"file":"route53.js","sourceRoot":"","sources":["../../../src/modules/route53.ts"],"names":[],"mappings":"AAAA,OAAO,EAAO,IAAI,EAAY,MAAM,cAAc,CAAA;AAElD,6DAA6D;AAC7D,8DAA8D;AAC9D,8DAA8D;AAC9D,8DAA8D;AAC9D,8DAA8D;AAC9D,6DAA6D;AAE7D,MAAM,CAAC,MAAM,IAAI,GAAG,CAAC,EAAE,IAAI,GAAG,mBAAmB,EAAE,EAAO,EAAE,CAAC,CAAC;IAC1D,IAAI,EAAE;QACF,YAAY,EAAE;YACV,IAAI,EAAE,IAAI;YACV,OAAO,EAAE,KAAK;SACjB;KACJ;CACJ,CAAC,CAAA;AAEF,MAAM,CAAC,MAAM,eAAe,GAAG,CAAC,EAAE,WAAW,GAAG,mBAAmB,EAAE,IAAI,GAAG,EAAE,EAAE,EAAY,EAAE,CAAC,CAAC;IAC5F,QAAQ,EAAE;QACN,eAAe,EAAE;YACb,WAAW,EAAE,WAAW;YACxB,iBAAiB,EAAE,KAAK;YACxB,IAAI,EAAE;gBACF,GAAG,IAAI;gBACP,GAAG,IAAI;aACV;YACD,sDAAsD;YACtD,SAAS,EAAE;gBACP,qBAAqB,EAAE,IAAI;aAC9B;YACD,yBAAyB,EAAE;gBACvB;oBACI,oBAAoB,EAAE,MAAM;oBAC5B,oBAAoB,EAAE,MAAM;oBAC5B,qBAAqB,EAAE,MAAM;iBAChC;aACJ;YACD,GAAG,EAAE,KAAK;SACb;KACJ;CACJ,CAAC,CAAA;AAEF,MAAM,CAAC,MAAM,0BAA0B,GAAG,CAAC,EAAE,QAAQ,EAAE,KAAK,EAAE,EAAO,EAAE,CAAC,CAAC;IACrE,QAAQ,EAAE;QACN,0BAA0B,EAAE;YACxB,eAAe,EAAE,MAAM,QAAQ,EAAE;YACjC,uBAAuB,EAAE,KAAK;SACjC;KACJ;CACJ,CAAC,CAAA;AAWF,MAAM,CAAC,MAAM,cAAc,GAAG,CAAC,EAC3B,WAAW,EACX,eAAe,EACf,eAAe,EACf,kBAAkB,EAClB,IAAI,GAAG,GAAG,EACV,OAAO,GAAG,EAAE,GACA,EAAO,EAAE;IACrB,IAAI,OAAO,CAAC,MAAM,IAAI,eAAe,EAAE;QACnC,OAAO,CAAC,KAAK,CACT,kFAAkF,CACrF,CAAA;KACJ;IACD,OAAO;QACH,QAAQ,EAAE;YACN,cAAc,EAAE;gBACZ,IAAI,EAAE,WAAW;gBACjB,6BAA6B;gBAC7B,IAAI;gBACJ,OAAO,EAAE,eAAe;gBACxB,eAAe,EAAE,IAAI;gBACrB,GAAG,CAAC,CAAC,OAAO,CAAC,MAAM,IAAI,EAAE,OAAO,EAAE,CAAC,IAAI,EAAE,CAAC;gBAC1C,kBAAkB;gBAClB,GAAG,CAAC,CAAC,eAAe,IAAI;oBACpB,KAAK,EAAE;wBACH,IAAI,EAAE,eAAe;wBACrB,OAAO,EAAE,kBAAkB;wBAC3B,sBAAsB,EAAE,KAAK;qBAChC;iBACJ,CAAC,IAAI,EAAE,GAAG,EAAE,EAAE,EAAE,CAAC;gBAClB,IAAI,EAAE,KAAK;aACd;SACJ;KACJ,CAAA;AACL,CAAC,CAAA"} \ No newline at end of file +{"version":3,"file":"route53.js","sourceRoot":"","sources":["../../../src/modules/route53.ts"],"names":[],"mappings":"AAAA,OAAO,EAAO,IAAI,EAAE,MAAM,UAAU,CAAA;AAEpC,6DAA6D;AAC7D,8DAA8D;AAC9D,8DAA8D;AAC9D,8DAA8D;AAC9D,8DAA8D;AAC9D,6DAA6D;AAE7D,MAAM,CAAC,MAAM,IAAI,GAAG,CAAC,EAAE,IAAI,GAAG,mBAAmB,EAAE,EAAO,EAAE,CAAC,CAAC;IAC1D,IAAI,EAAE;QACF,YAAY,EAAE;YACV,IAAI,EAAE,IAAI;YACV,OAAO,EAAE,KAAK;SACjB;KACJ;CACJ,CAAC,CAAA;AAEF,MAAM,CAAC,MAAM,eAAe,GAAG,CAAC,EAAE,WAAW,GAAG,mBAAmB,EAAE,IAAI,GAAG,EAAE,EAAE,EAAO,EAAE,CAAC,CAAC;IACvF,QAAQ,EAAE;QACN,eAAe,EAAE;YACb,WAAW,EAAE,WAAW;YACxB,iBAAiB,EAAE,KAAK;YACxB,sDAAsD;YACtD,SAAS,EAAE;gBACP,qBAAqB,EAAE,IAAI;aAC9B;YACD,yBAAyB,EAAE;gBACvB;oBACI,oBAAoB,EAAE,MAAM;oBAC5B,oBAAoB,EAAE,MAAM;oBAC5B,qBAAqB,EAAE,MAAM;iBAChC;aACJ;YACD,IAAI,EAAE;gBACF,GAAG,IAAI;gBACP,GAAG,IAAI;aACV;YACD,GAAG,EAAE,KAAK;SACb;KACJ;CACJ,CAAC,CAAA;AAEF,MAAM,CAAC,MAAM,0BAA0B,GAAG,CAAC,EAAE,QAAQ,EAAE,KAAK,EAAE,EAAO,EAAE,CAAC,CAAC;IACrE,QAAQ,EAAE;QACN,0BAA0B,EAAE;YACxB,eAAe,EAAE,MAAM,QAAQ,EAAE;YACjC,uBAAuB,EAAE,KAAK;SACjC;KACJ;CACJ,CAAC,CAAA;AAWF,MAAM,CAAC,MAAM,cAAc,GAAG,CAAC,EAC3B,WAAW,EACX,eAAe,EACf,eAAe,EACf,kBAAkB,EAClB,IAAI,GAAG,GAAG,EACV,OAAO,GAAG,EAAE,GACA,EAAO,EAAE;IACrB,IAAI,OAAO,CAAC,MAAM,IAAI,eAAe,EAAE;QACnC,OAAO,CAAC,KAAK,CACT,4BAA4B;YACxB,kEAAkE,CACzE,CAAA;KACJ;IACD,OAAO;QACH,QAAQ,EAAE;YACN,cAAc,EAAE;gBACZ,IAAI,EAAE,WAAW;gBACjB,6BAA6B;gBAC7B,IAAI;gBACJ,OAAO,EAAE,eAAe;gBACxB,eAAe,EAAE,IAAI;gBACrB,GAAG,CAAC,CAAC,OAAO,CAAC,MAAM,IAAI,EAAE,OAAO,EAAE,CAAC,IAAI,EAAE,CAAC;gBAC1C,kBAAkB;gBAClB,GAAG,CAAC,CAAC,eAAe,IAAI;oBACpB,KAAK,EAAE;wBACH,IAAI,EAAE,eAAe;wBACrB,gEAAgE;wBAChE,OAAO,EAAE,kBAAkB,IAAI,MAAM;wBACrC,sBAAsB,EAAE,KAAK;qBAChC;iBACJ,CAAC,IAAI,EAAE,GAAG,EAAE,EAAE,EAAE,CAAC;gBAClB,IAAI,EAAE,KAAK;aACd;SACJ;KACJ,CAAA;AACL,CAAC,CAAA"} \ No newline at end of file diff --git a/lib/src/modules/s3.d.ts b/lib/src/modules/s3.d.ts index 1a14bf8..a7c0b3a 100644 --- a/lib/src/modules/s3.d.ts +++ b/lib/src/modules/s3.d.ts @@ -1 +1,13 @@ +import { AWS } from '../types'; +export declare const bucket_policy: ({ bucket_name, policy_json }: { + bucket_name: any; + policy_json: any; +}) => AWS; +export declare const bucket_cors: ({ bucket_name }: { + bucket_name: any; +}) => AWS; +export declare const bucket: ({ name, tags }: { + name: any; + tags?: {} | undefined; +}) => AWS; //# sourceMappingURL=s3.d.ts.map \ No newline at end of file diff --git a/lib/src/modules/s3.d.ts.map b/lib/src/modules/s3.d.ts.map index 0311774..1850235 100644 --- a/lib/src/modules/s3.d.ts.map +++ b/lib/src/modules/s3.d.ts.map @@ -1 +1 @@ -{"version":3,"file":"s3.d.ts","sourceRoot":"","sources":["../../../src/modules/s3.ts"],"names":[],"mappings":""} \ No newline at end of file +{"version":3,"file":"s3.d.ts","sourceRoot":"","sources":["../../../src/modules/s3.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,GAAG,EAAQ,MAAM,UAAU,CAAA;AAEpC,eAAO,MAAM,aAAa;;;MAAmC,GAO3D,CAAA;AAEF,eAAO,MAAM,WAAW;;MAAsB,GAa5C,CAAA;AACF,eAAO,MAAM,MAAM;;;MAA0B,GAW3C,CAAA"} \ No newline at end of file diff --git a/lib/src/modules/s3.js b/lib/src/modules/s3.js index 1509698..3eaccf9 100644 --- a/lib/src/modules/s3.js +++ b/lib/src/modules/s3.js @@ -1,2 +1,36 @@ -"use strict"; +import { flag } from '../types'; +export const bucket_policy = ({ bucket_name, policy_json }) => ({ + resource: { + s3_bucket_policy: { + bucket: bucket_name, + policy: policy_json, + }, + }, +}); +export const bucket_cors = ({ bucket_name }) => ({ + resource: { + s3_bucket_cors_configuration: { + bucket: bucket_name, + cors_rule: { + allowed_methods: ['POST', 'GET', 'HEAD', 'DELETE', 'PUT'], + allowed_origins: ['*'], + allowed_headers: ['*'], + expose_headers: ['ETag'], + max_age_seconds: 3000, + }, + }, + }, +}); +export const bucket = ({ name, tags = {} }) => ({ + resource: { + s3_bucket: { + bucket: `-->${name}-bucket`, + // @ts-ignore šŸ› FIXME: tick_group failure? + tags: { + ...flag, + ...tags, + }, + }, + }, +}); //# sourceMappingURL=s3.js.map \ No newline at end of file diff --git a/lib/src/modules/s3.js.map b/lib/src/modules/s3.js.map index 69bb505..d14f78a 100644 --- a/lib/src/modules/s3.js.map +++ b/lib/src/modules/s3.js.map @@ -1 +1 @@ -{"version":3,"file":"s3.js","sourceRoot":"","sources":["../../../src/modules/s3.ts"],"names":[],"mappings":""} \ No newline at end of file +{"version":3,"file":"s3.js","sourceRoot":"","sources":["../../../src/modules/s3.ts"],"names":[],"mappings":"AAAA,OAAO,EAAO,IAAI,EAAE,MAAM,UAAU,CAAA;AAEpC,MAAM,CAAC,MAAM,aAAa,GAAG,CAAC,EAAE,WAAW,EAAE,WAAW,EAAE,EAAO,EAAE,CAAC,CAAC;IACjE,QAAQ,EAAE;QACN,gBAAgB,EAAE;YACd,MAAM,EAAE,WAAW;YACnB,MAAM,EAAE,WAAW;SACtB;KACJ;CACJ,CAAC,CAAA;AAEF,MAAM,CAAC,MAAM,WAAW,GAAG,CAAC,EAAE,WAAW,EAAE,EAAO,EAAE,CAAC,CAAC;IAClD,QAAQ,EAAE;QACN,4BAA4B,EAAE;YAC1B,MAAM,EAAE,WAAW;YACnB,SAAS,EAAE;gBACP,eAAe,EAAE,CAAC,MAAM,EAAE,KAAK,EAAE,MAAM,EAAE,QAAQ,EAAE,KAAK,CAAC;gBACzD,eAAe,EAAE,CAAC,GAAG,CAAC;gBACtB,eAAe,EAAE,CAAC,GAAG,CAAC;gBACtB,cAAc,EAAE,CAAC,MAAM,CAAC;gBACxB,eAAe,EAAE,IAAI;aACxB;SACJ;KACJ;CACJ,CAAC,CAAA;AACF,MAAM,CAAC,MAAM,MAAM,GAAG,CAAC,EAAE,IAAI,EAAE,IAAI,GAAG,EAAE,EAAE,EAAO,EAAE,CAAC,CAAC;IACjD,QAAQ,EAAE;QACN,SAAS,EAAE;YACP,MAAM,EAAE,MAAM,IAAI,SAAS;YAC3B,2CAA2C;YAC3C,IAAI,EAAE;gBACF,GAAG,IAAI;gBACP,GAAG,IAAI;aACV;SACJ;KACJ;CACJ,CAAC,CAAA"} \ No newline at end of file diff --git a/lib/src/modules/sns.d.ts b/lib/src/modules/sns.d.ts index 628828d..3db8531 100644 --- a/lib/src/modules/sns.d.ts +++ b/lib/src/modules/sns.d.ts @@ -1,6 +1,12 @@ -import { AWS } from '../constants'; +import { AWS } from '../types'; export declare const topic: ({ name, tags }: { name: any; tags?: {} | undefined; }) => AWS; +export declare const subscription: ({ topic_arn, lambda_arn, filter, scope, }: { + topic_arn: any; + lambda_arn: any; + filter?: {} | undefined; + scope?: string | undefined; +}) => AWS; //# sourceMappingURL=sns.d.ts.map \ No newline at end of file diff --git a/lib/src/modules/sns.d.ts.map b/lib/src/modules/sns.d.ts.map index d15a092..94286e1 100644 --- a/lib/src/modules/sns.d.ts.map +++ b/lib/src/modules/sns.d.ts.map @@ -1 +1 @@ -{"version":3,"file":"sns.d.ts","sourceRoot":"","sources":["../../../src/modules/sns.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,GAAG,EAAQ,MAAM,cAAc,CAAA;AAExC,eAAO,MAAM,KAAK;;;MAA0B,GAW1C,CAAA"} \ No newline at end of file +{"version":3,"file":"sns.d.ts","sourceRoot":"","sources":["../../../src/modules/sns.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,GAAG,EAAQ,MAAM,UAAU,CAAA;AAEpC,eAAO,MAAM,KAAK;;;MAA0B,GAW1C,CAAA;AAEF,eAAO,MAAM,YAAY;;;;;MAKrB,GAYF,CAAA"} \ No newline at end of file diff --git a/lib/src/modules/sns.js b/lib/src/modules/sns.js index ff75c97..54d0fcf 100644 --- a/lib/src/modules/sns.js +++ b/lib/src/modules/sns.js @@ -1,4 +1,4 @@ -import { flag } from '../constants'; +import { flag } from '../types'; export const topic = ({ name, tags = {} }) => ({ resource: { sns_topic: { @@ -11,4 +11,17 @@ export const topic = ({ name, tags = {} }) => ({ }, }, }); +export const subscription = ({ topic_arn, lambda_arn, filter = {}, scope = 'MessageAttributes', }) => ({ + resource: { + // @ts-ignore: subscription_role_arn only needed if protocol == 'firehose' + sns_topic_subscription: { + topic_arn, + protocol: 'lambda', + endpoint: lambda_arn, + filter_policy: JSON.stringify(filter), + filter_policy_scope: scope, + arn: '-->', + }, + }, +}); //# sourceMappingURL=sns.js.map \ No newline at end of file diff --git a/lib/src/modules/sns.js.map b/lib/src/modules/sns.js.map index 36b7aba..2365607 100644 --- a/lib/src/modules/sns.js.map +++ b/lib/src/modules/sns.js.map @@ -1 +1 @@ -{"version":3,"file":"sns.js","sourceRoot":"","sources":["../../../src/modules/sns.ts"],"names":[],"mappings":"AAAA,OAAO,EAAO,IAAI,EAAE,MAAM,cAAc,CAAA;AAExC,MAAM,CAAC,MAAM,KAAK,GAAG,CAAC,EAAE,IAAI,EAAE,IAAI,GAAG,EAAE,EAAE,EAAO,EAAE,CAAC,CAAC;IAChD,QAAQ,EAAE;QACN,SAAS,EAAE;YACP,IAAI,EAAE,GAAG,IAAI,QAAQ;YACrB,IAAI,EAAE;gBACF,GAAG,IAAI;gBACP,GAAG,IAAI;aACV;YACD,GAAG,EAAE,KAAK;SACb;KACJ;CACJ,CAAC,CAAA"} \ No newline at end of file +{"version":3,"file":"sns.js","sourceRoot":"","sources":["../../../src/modules/sns.ts"],"names":[],"mappings":"AAAA,OAAO,EAAO,IAAI,EAAE,MAAM,UAAU,CAAA;AAEpC,MAAM,CAAC,MAAM,KAAK,GAAG,CAAC,EAAE,IAAI,EAAE,IAAI,GAAG,EAAE,EAAE,EAAO,EAAE,CAAC,CAAC;IAChD,QAAQ,EAAE;QACN,SAAS,EAAE;YACP,IAAI,EAAE,GAAG,IAAI,QAAQ;YACrB,IAAI,EAAE;gBACF,GAAG,IAAI;gBACP,GAAG,IAAI;aACV;YACD,GAAG,EAAE,KAAK;SACb;KACJ;CACJ,CAAC,CAAA;AAEF,MAAM,CAAC,MAAM,YAAY,GAAG,CAAC,EACzB,SAAS,EACT,UAAU,EACV,MAAM,GAAG,EAAE,EACX,KAAK,GAAG,mBAAmB,GAC9B,EAAO,EAAE,CAAC,CAAC;IACR,QAAQ,EAAE;QACN,0EAA0E;QAC1E,sBAAsB,EAAE;YACpB,SAAS;YACT,QAAQ,EAAE,QAAQ;YAClB,QAAQ,EAAE,UAAU;YACpB,aAAa,EAAE,IAAI,CAAC,SAAS,CAAC,MAAM,CAAC;YACrC,mBAAmB,EAAE,KAAK;YAC1B,GAAG,EAAE,KAAK;SACb;KACJ;CACJ,CAAC,CAAA"} \ No newline at end of file diff --git a/lib/src/types.d.ts b/lib/src/types.d.ts new file mode 100644 index 0000000..08333d1 --- /dev/null +++ b/lib/src/types.d.ts @@ -0,0 +1,68 @@ +import { AWS05200 } from 'registry'; +type AwsVersion = AWS05200; +export declare const flag: { + BroughtToYouBy: string; +}; +export type NestedObject = { + [key: string]: NestedObject; +}; +export interface Provider { + [key: string]: { + region: string; + profile?: string; + alias?: string; + }; +} +export interface Terraform { + required_providers: { + [key: string]: { + source: string; + version: string; + }; + }; +} +/** + * The following type customizations provide an example of how to modify a block + * to allow for Array values in addition to default interfaces... + * + * reference blog [1] + */ +type Data = NonNullable; +type IamPolicyDoc = NonNullable; +export type Statement = NonNullable; +export interface Statements extends Statement { + [index: number]: Statement; +} +interface IamPolicyDocs extends IamPolicyDoc { + statement?: Statement | Statements; +} +export interface Datums extends Data { + iam_policy_document?: IamPolicyDocs; +} +type Resource = NonNullable; +type AcmCertificate = NonNullable; +type DomainValidationOptions = NonNullable; +interface ValidationOptions extends DomainValidationOptions { + [index: number]: DomainValidationOptions; +} +interface AcmCertificates extends AcmCertificate { + domain_validation_options?: DomainValidationOptions | ValidationOptions; +} +type ApiGw2DomainName = NonNullable; +type DomainNameConfiguration = NonNullable; +interface DomainNameConfigurations extends DomainNameConfiguration { + [index: number]: DomainNameConfiguration; +} +interface ApiGw2DomainNames extends ApiGw2DomainName { + domain_name_configuration: DomainNameConfigurations | DomainNameConfiguration; +} +export interface Resources extends Resource { + acm_certificate?: AcmCertificates; + apigatewayv2_domain_name?: ApiGw2DomainNames; +} +export interface AWS extends AwsVersion { + data?: Datums; + resource?: Resources; +} +export {}; +//# sourceMappingURL=types.d.ts.map \ No newline at end of file diff --git a/lib/src/types.d.ts.map b/lib/src/types.d.ts.map new file mode 100644 index 0000000..236abd5 --- /dev/null +++ b/lib/src/types.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"types.d.ts","sourceRoot":"","sources":["../../src/types.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,QAAQ,EAAY,MAAM,UAAU,CAAA;AAC7C,KAAK,UAAU,GAAG,QAAQ,CAAA;AAE1B,eAAO,MAAM,IAAI;;CAAkC,CAAA;AAEnD,MAAM,MAAM,YAAY,GAAG;IAAE,CAAC,GAAG,EAAE,MAAM,GAAG,YAAY,CAAA;CAAE,CAAA;AAE1D,MAAM,WAAW,QAAQ;IACrB,CAAC,GAAG,EAAE,MAAM,GAAG;QACX,MAAM,EAAE,MAAM,CAAA;QACd,OAAO,CAAC,EAAE,MAAM,CAAA;QAChB,KAAK,CAAC,EAAE,MAAM,CAAA;KACjB,CAAA;CACJ;AAED,MAAM,WAAW,SAAS;IACtB,kBAAkB,EAAE;QAChB,CAAC,GAAG,EAAE,MAAM,GAAG;YACX,MAAM,EAAE,MAAM,CAAA;YACd,OAAO,EAAE,MAAM,CAAA;SAClB,CAAA;KACJ,CAAA;CACJ;AASD;;;;;GAKG;AACH,KAAK,IAAI,GAAG,WAAW,CAAC,UAAU,CAAC,MAAM,CAAC,CAAC,CAAA;AAG3C,KAAK,YAAY,GAAG,WAAW,CAAC,IAAI,CAAC,qBAAqB,CAAC,CAAC,CAAA;AAC5D,MAAM,MAAM,SAAS,GAAG,WAAW,CAAC,YAAY,CAAC,WAAW,CAAC,CAAC,CAAA;AAC9D,MAAM,WAAW,UAAW,SAAQ,SAAS;IACzC,CAAC,KAAK,EAAE,MAAM,GAAG,SAAS,CAAA;CAC7B;AACD,UAAU,aAAc,SAAQ,YAAY;IACxC,SAAS,CAAC,EAAE,SAAS,GAAG,UAAU,CAAA;CACrC;AAED,MAAM,WAAW,MAAO,SAAQ,IAAI;IAChC,mBAAmB,CAAC,EAAE,aAAa,CAAA;CACtC;AAQD,KAAK,QAAQ,GAAG,WAAW,CAAC,UAAU,CAAC,UAAU,CAAC,CAAC,CAAA;AAGnD,KAAK,cAAc,GAAG,WAAW,CAAC,QAAQ,CAAC,iBAAiB,CAAC,CAAC,CAAA;AAC9D,KAAK,uBAAuB,GAAG,WAAW,CAAC,cAAc,CAAC,2BAA2B,CAAC,CAAC,CAAA;AACvF,UAAU,iBAAkB,SAAQ,uBAAuB;IACvD,CAAC,KAAK,EAAE,MAAM,GAAG,uBAAuB,CAAA;CAC3C;AACD,UAAU,eAAgB,SAAQ,cAAc;IAC5C,yBAAyB,CAAC,EAAE,uBAAuB,GAAG,iBAAiB,CAAA;CAC1E;AAGD,KAAK,gBAAgB,GAAG,WAAW,CAAC,QAAQ,CAAC,0BAA0B,CAAC,CAAC,CAAA;AACzE,KAAK,uBAAuB,GAAG,WAAW,CAAC,gBAAgB,CAAC,2BAA2B,CAAC,CAAC,CAAA;AACzF,UAAU,wBAAyB,SAAQ,uBAAuB;IAC9D,CAAC,KAAK,EAAE,MAAM,GAAG,uBAAuB,CAAA;CAC3C;AACD,UAAU,iBAAkB,SAAQ,gBAAgB;IAChD,yBAAyB,EAAE,wBAAwB,GAAG,uBAAuB,CAAA;CAChF;AAED,MAAM,WAAW,SAAU,SAAQ,QAAQ;IACvC,eAAe,CAAC,EAAE,eAAe,CAAA;IACjC,wBAAwB,CAAC,EAAE,iBAAiB,CAAA;CAC/C;AASD,MAAM,WAAW,GAAI,SAAQ,UAAU;IACnC,IAAI,CAAC,EAAE,MAAM,CAAA;IACb,QAAQ,CAAC,EAAE,SAAS,CAAA;CACvB"} \ No newline at end of file diff --git a/lib/src/types.js b/lib/src/types.js new file mode 100644 index 0000000..b651d4f --- /dev/null +++ b/lib/src/types.js @@ -0,0 +1,2 @@ +export const flag = { BroughtToYouBy: '@-0/micro' }; +//# sourceMappingURL=types.js.map \ No newline at end of file diff --git a/lib/src/types.js.map b/lib/src/types.js.map new file mode 100644 index 0000000..19508ac --- /dev/null +++ b/lib/src/types.js.map @@ -0,0 +1 @@ +{"version":3,"file":"types.js","sourceRoot":"","sources":["../../src/types.ts"],"names":[],"mappings":"AAGA,MAAM,CAAC,MAAM,IAAI,GAAG,EAAE,cAAc,EAAE,WAAW,EAAE,CAAA"} \ No newline at end of file diff --git a/lib/src/utils/checks.d.ts b/lib/src/utils/checks.d.ts new file mode 100644 index 0000000..4f6c512 --- /dev/null +++ b/lib/src/utils/checks.d.ts @@ -0,0 +1,4 @@ +export declare const isFile: (path: string) => boolean; +export declare const isEmpty: (x: any) => boolean; +export declare const cleanNullEntries: (obj: any) => {}; +//# sourceMappingURL=checks.d.ts.map \ No newline at end of file diff --git a/lib/src/utils/checks.d.ts.map b/lib/src/utils/checks.d.ts.map new file mode 100644 index 0000000..b50725f --- /dev/null +++ b/lib/src/utils/checks.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"checks.d.ts","sourceRoot":"","sources":["../../../src/utils/checks.ts"],"names":[],"mappings":"AACA,eAAO,MAAM,MAAM,SAAU,MAAM,YAIlC,CAAA;AACD,eAAO,MAAM,OAAO,MAAO,GAAG,YACgE,CAAA;AAE9F,eAAO,MAAM,gBAAgB,QAAS,GAAG,OAQ/B,CAAA"} \ No newline at end of file diff --git a/lib/src/utils/checks.js b/lib/src/utils/checks.js new file mode 100644 index 0000000..06b862a --- /dev/null +++ b/lib/src/utils/checks.js @@ -0,0 +1,17 @@ +import { isArray, isPlainObject } from '@thi.ng/checks'; +export const isFile = (path) => { + const parts = path.split('/'); + const [last] = parts.slice(-1); + return last.includes('.'); +}; +export const isEmpty = (x) => isPlainObject(x) && !Object.keys(x).length ? true : isArray(x) && !x.length ? true : false; +export const cleanNullEntries = (obj) => Object.entries(obj).reduce((a, c) => { + const [k, v] = c; + if (v === null || v === undefined) { + return a; + } + else { + return { ...a, [k]: v }; + } +}, {}); +//# sourceMappingURL=checks.js.map \ No newline at end of file diff --git a/lib/src/utils/checks.js.map b/lib/src/utils/checks.js.map new file mode 100644 index 0000000..c16ad7e --- /dev/null +++ b/lib/src/utils/checks.js.map @@ -0,0 +1 @@ +{"version":3,"file":"checks.js","sourceRoot":"","sources":["../../../src/utils/checks.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,OAAO,EAAE,aAAa,EAAE,MAAM,gBAAgB,CAAA;AACvD,MAAM,CAAC,MAAM,MAAM,GAAG,CAAC,IAAY,EAAE,EAAE;IACnC,MAAM,KAAK,GAAG,IAAI,CAAC,KAAK,CAAC,GAAG,CAAC,CAAA;IAC7B,MAAM,CAAC,IAAI,CAAC,GAAG,KAAK,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAA;IAC9B,OAAO,IAAI,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAA;AAC7B,CAAC,CAAA;AACD,MAAM,CAAC,MAAM,OAAO,GAAG,CAAC,CAAM,EAAE,EAAE,CAC9B,aAAa,CAAC,CAAC,CAAC,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,KAAK,CAAA;AAE9F,MAAM,CAAC,MAAM,gBAAgB,GAAG,CAAC,GAAQ,EAAE,EAAE,CACzC,MAAM,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,EAAE;IAChC,MAAM,CAAC,CAAC,EAAE,CAAC,CAAC,GAAG,CAAC,CAAA;IAChB,IAAI,CAAC,KAAK,IAAI,IAAI,CAAC,KAAK,SAAS,EAAE;QAC/B,OAAO,CAAC,CAAA;KACX;SAAM;QACH,OAAO,EAAE,GAAG,CAAC,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,CAAA;KAC1B;AACL,CAAC,EAAE,EAAE,CAAC,CAAA"} \ No newline at end of file diff --git a/lib/src/utils/index.d.ts b/lib/src/utils/index.d.ts new file mode 100644 index 0000000..8bde3e7 --- /dev/null +++ b/lib/src/utils/index.d.ts @@ -0,0 +1,2 @@ +export { isFile, isEmpty, cleanNullEntries } from './checks'; +//# sourceMappingURL=index.d.ts.map \ No newline at end of file diff --git a/lib/src/utils/index.d.ts.map b/lib/src/utils/index.d.ts.map new file mode 100644 index 0000000..e1cc88d --- /dev/null +++ b/lib/src/utils/index.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../../src/utils/index.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,MAAM,EAAE,OAAO,EAAE,gBAAgB,EAAE,MAAM,UAAU,CAAA"} \ No newline at end of file diff --git a/lib/src/utils/index.js b/lib/src/utils/index.js new file mode 100644 index 0000000..5e704ce --- /dev/null +++ b/lib/src/utils/index.js @@ -0,0 +1,842 @@ +export { isFile, isEmpty, cleanNullEntries } from './checks'; +/* + +variable "create_ecr_repo" { + description = "Controls whether ECR repository for Lambda image should be created" + type = bool + default = false +} + +variable "ecr_address" { + description = "Address of ECR repository for cross-account container image pulling (optional). Option `create_ecr_repo` must be `false`" + type = string + default = null +} + +variable "ecr_repo" { + description = "Name of ECR repository to use or to create" + type = string + default = null +} + +variable "image_tag" { + description = "Image tag to use. If not specified current timestamp in format 'YYYYMMDDhhmmss' will be used. This can lead to unnecessary rebuilds." + type = string + default = null +} + +variable "source_path" { + description = "Path to folder containing application code" + type = string + default = null +} + +variable "docker_file_path" { + description = "Path to Dockerfile in source package" + type = string + default = "Dockerfile" +} + + +variable "image_tag_mutability" { + description = "The tag mutability setting for the repository. Must be one of: `MUTABLE` or `IMMUTABLE`" + type = string + default = "MUTABLE" +} + +variable "scan_on_push" { + description = "Indicates whether images are scanned after being pushed to the repository" + type = bool + default = false +} + +variable "ecr_force_delete" { + description = "If true, will delete the repository even if it contains images." + default = true + type = bool +} + +variable "ecr_repo_tags" { + description = "A map of tags to assign to ECR repository" + type = map(string) + default = {} +} + +variable "build_args" { + description = "A map of Docker build arguments." + type = map(string) + default = {} +} + +variable "ecr_repo_lifecycle_policy" { + description = "A JSON formatted ECR lifecycle policy to automate the cleaning up of unused images." + type = string + default = null +} + +variable "keep_remotely" { + description = "Whether to keep Docker image in the remote registry on destroy operation." + type = bool + default = false +} + +variable "platform" { + description = "The target architecture platform to build the image for." + type = string + default = null +} + +########### +# Root +########### + +variable "create" { + description = "Controls whether resources should be created" + type = bool + default = true +} + +variable "create_package" { + description = "Controls whether Lambda package should be created" + type = bool + default = true +} + +variable "create_function" { + description = "Controls whether Lambda Function resource should be created" + type = bool + default = true +} + +variable "create_layer" { + description = "Controls whether Lambda Layer resource should be created" + type = bool + default = false +} + +variable "create_role" { + description = "Controls whether IAM role for Lambda Function should be created" + type = bool + default = true +} + +variable "create_lambda_function_url" { + description = "Controls whether the Lambda Function URL resource should be created" + type = bool + default = false +} + +variable "create_sam_metadata" { + description = "Controls whether the SAM metadata null resource should be created" + type = bool + default = false +} + +variable "putin_khuylo" { + description = "Do you agree that Putin doesn't respect Ukrainian sovereignty and territorial integrity? More info: https://en.wikipedia.org/wiki/Putin_khuylo!" + type = bool + default = true +} + +########### +# Function +########### + +variable "lambda_at_edge" { + description = "Set this to true if using Lambda@Edge, to enable publishing, limit the timeout, and allow edgelambda.amazonaws.com to invoke the function" + type = bool + default = false +} + +variable "lambda_at_edge_logs_all_regions" { + description = "Whether to specify a wildcard in IAM policy used by Lambda@Edge to allow logging in all regions" + type = bool + default = true +} + +variable "function_name" { + description = "A unique name for your Lambda Function" + type = string + default = "" +} + +variable "handler" { + description = "Lambda Function entrypoint in your code" + type = string + default = "" +} + +variable "runtime" { + description = "Lambda Function runtime" + type = string + default = "" +} + +variable "lambda_role" { + description = " IAM role ARN attached to the Lambda Function. This governs both who / what can invoke your Lambda Function, as well as what resources our Lambda Function has access to. See Lambda Permission Model for more details." + type = string + default = "" +} + +variable "description" { + description = "Description of your Lambda Function (or Layer)" + type = string + default = "" +} + +variable "code_signing_config_arn" { + description = "Amazon Resource Name (ARN) for a Code Signing Configuration" + type = string + default = null +} + +variable "layers" { + description = "List of Lambda Layer Version ARNs (maximum of 5) to attach to your Lambda Function." + type = list(string) + default = null +} + +variable "architectures" { + description = "Instruction set architecture for your Lambda function. Valid values are [\"x86_64\"] and [\"arm64\"]." + type = list(string) + default = null +} + +variable "kms_key_arn" { + description = "The ARN of KMS key to use by your Lambda Function" + type = string + default = null +} + +variable "memory_size" { + description = "Amount of memory in MB your Lambda Function can use at runtime. Valid value between 128 MB to 10,240 MB (10 GB), in 64 MB increments." + type = number + default = 128 +} + +variable "ephemeral_storage_size" { + description = "Amount of ephemeral storage (/tmp) in MB your Lambda Function can use at runtime. Valid value between 512 MB to 10,240 MB (10 GB)." + type = number + default = 512 +} + +variable "publish" { + description = "Whether to publish creation/change as new Lambda Function Version." + type = bool + default = false +} + +variable "reserved_concurrent_executions" { + description = "The amount of reserved concurrent executions for this Lambda Function. A value of 0 disables Lambda Function from being triggered and -1 removes any concurrency limitations. Defaults to Unreserved Concurrency Limits -1." + type = number + default = -1 +} + +variable "timeout" { + description = "The amount of time your Lambda Function has to run in seconds." + type = number + default = 3 +} + +variable "dead_letter_target_arn" { + description = "The ARN of an SNS topic or SQS queue to notify when an invocation fails." + type = string + default = null +} + +variable "environment_variables" { + description = "A map that defines environment variables for the Lambda Function." + type = map(string) + default = {} +} + +variable "tracing_mode" { + description = "Tracing mode of the Lambda Function. Valid value can be either PassThrough or Active." + type = string + default = null +} + +variable "vpc_subnet_ids" { + description = "List of subnet ids when Lambda Function should run in the VPC. Usually private or intra subnets." + type = list(string) + default = null +} + +variable "vpc_security_group_ids" { + description = "List of security group ids when Lambda Function should run in the VPC." + type = list(string) + default = null +} + +variable "tags" { + description = "A map of tags to assign to resources." + type = map(string) + default = {} +} + +variable "s3_object_tags" { + description = "A map of tags to assign to S3 bucket object." + type = map(string) + default = {} +} + +variable "s3_object_tags_only" { + description = "Set to true to not merge tags with s3_object_tags. Useful to avoid breaching S3 Object 10 tag limit." + type = bool + default = false +} + +variable "package_type" { + description = "The Lambda deployment package type. Valid options: Zip or Image" + type = string + default = "Zip" +} + +variable "image_uri" { + description = "The ECR image URI containing the function's deployment package." + type = string + default = null +} + +variable "image_config_entry_point" { + description = "The ENTRYPOINT for the docker image" + type = list(string) + default = [] + +} +variable "image_config_command" { + description = "The CMD for the docker image" + type = list(string) + default = [] +} + +variable "image_config_working_directory" { + description = "The working directory for the docker image" + type = string + default = null +} + +variable "snap_start" { + description = "(Optional) Snap start settings for low-latency startups" + type = bool + default = false +} + +variable "replace_security_groups_on_destroy" { + description = "(Optional) When true, all security groups defined in vpc_security_group_ids will be replaced with the default security group after the function is destroyed. Set the replacement_security_group_ids variable to use a custom list of security groups for replacement instead." + type = bool + default = null +} + +variable "replacement_security_group_ids" { + description = "(Optional) List of security group IDs to assign to orphaned Lambda function network interfaces upon destruction. replace_security_groups_on_destroy must be set to true to use this attribute." + type = list(string) + default = null +} + +variable "timeouts" { + description = "Define maximum timeout for creating, updating, and deleting Lambda Function resources" + type = map(string) + default = {} +} + +############### +# Function URL +############### + +variable "create_unqualified_alias_lambda_function_url" { + description = "Whether to use unqualified alias pointing to $LATEST version in Lambda Function URL" + type = bool + default = true +} + +variable "authorization_type" { + description = "The type of authentication that the Lambda Function URL uses. Set to 'AWS_IAM' to restrict access to authenticated IAM users only. Set to 'NONE' to bypass IAM authentication and create a public endpoint." + type = string + default = "NONE" +} + +variable "cors" { + description = "CORS settings to be used by the Lambda Function URL" + type = any + default = {} +} + +variable "invoke_mode" { + description = "Invoke mode of the Lambda Function URL. Valid values are BUFFERED (default) and RESPONSE_STREAM." + type = string + default = null +} + +######## +# Layer +######## + +variable "layer_name" { + description = "Name of Lambda Layer to create" + type = string + default = "" +} + +variable "layer_skip_destroy" { + description = "Whether to retain the old version of a previously deployed Lambda Layer." + type = bool + default = false +} + +variable "license_info" { + description = "License info for your Lambda Layer. Eg, MIT or full url of a license." + type = string + default = "" +} + +variable "compatible_runtimes" { + description = "A list of Runtimes this layer is compatible with. Up to 5 runtimes can be specified." + type = list(string) + default = [] +} + +variable "compatible_architectures" { + description = "A list of Architectures Lambda layer is compatible with. Currently x86_64 and arm64 can be specified." + type = list(string) + default = null +} + +############################ +# Lambda Async Event Config +############################ + +variable "create_async_event_config" { + description = "Controls whether async event configuration for Lambda Function/Alias should be created" + type = bool + default = false +} + +variable "create_current_version_async_event_config" { + description = "Whether to allow async event configuration on current version of Lambda Function (this will revoke permissions from previous version because Terraform manages only current resources)" + type = bool + default = true +} + +variable "create_unqualified_alias_async_event_config" { + description = "Whether to allow async event configuration on unqualified alias pointing to $LATEST version" + type = bool + default = true +} + +variable "maximum_event_age_in_seconds" { + description = "Maximum age of a request that Lambda sends to a function for processing in seconds. Valid values between 60 and 21600." + type = number + default = null +} + +variable "maximum_retry_attempts" { + description = "Maximum number of times to retry when the function returns an error. Valid values between 0 and 2. Defaults to 2." + type = number + default = null +} + +variable "destination_on_failure" { + description = "Amazon Resource Name (ARN) of the destination resource for failed asynchronous invocations" + type = string + default = null +} + +variable "destination_on_success" { + description = "Amazon Resource Name (ARN) of the destination resource for successful asynchronous invocations" + type = string + default = null +} + +########################## +# Provisioned Concurrency +########################## + +variable "provisioned_concurrent_executions" { + description = "Amount of capacity to allocate. Set to 1 or greater to enable, or set to 0 to disable provisioned concurrency." + type = number + default = -1 +} + +############################################ +# Lambda Permissions (for allowed triggers) +############################################ + +variable "create_current_version_allowed_triggers" { + description = "Whether to allow triggers on current version of Lambda Function (this will revoke permissions from previous version because Terraform manages only current resources)" + type = bool + default = true +} + +variable "create_unqualified_alias_allowed_triggers" { + description = "Whether to allow triggers on unqualified alias pointing to $LATEST version" + type = bool + default = true +} + +variable "allowed_triggers" { + description = "Map of allowed triggers to create Lambda permissions" + type = map(any) + default = {} +} + +############################################ +# Lambda Event Source Mapping +############################################ + +variable "event_source_mapping" { + description = "Map of event source mapping" + type = any + default = {} +} + +################# +# CloudWatch Logs +################# + +variable "use_existing_cloudwatch_log_group" { + description = "Whether to use an existing CloudWatch log group or create new" + type = bool + default = false +} + +variable "cloudwatch_logs_retention_in_days" { + description = "Specifies the number of days you want to retain log events in the specified log group. Possible values are: 1, 3, 5, 7, 14, 30, 60, 90, 120, 150, 180, 365, 400, 545, 731, 1827, and 3653." + type = number + default = null +} + +variable "cloudwatch_logs_kms_key_id" { + description = "The ARN of the KMS Key to use when encrypting log data." + type = string + default = null +} + +variable "cloudwatch_logs_tags" { + description = "A map of tags to assign to the resource." + type = map(string) + default = {} +} + +###### +# IAM +###### + +variable "role_name" { + description = "Name of IAM role to use for Lambda Function" + type = string + default = null +} + +variable "role_description" { + description = "Description of IAM role to use for Lambda Function" + type = string + default = null +} + +variable "role_path" { + description = "Path of IAM role to use for Lambda Function" + type = string + default = null +} + +variable "role_force_detach_policies" { + description = "Specifies to force detaching any policies the IAM role has before destroying it." + type = bool + default = true +} + +variable "role_permissions_boundary" { + description = "The ARN of the policy that is used to set the permissions boundary for the IAM role used by Lambda Function" + type = string + default = null +} + +variable "role_tags" { + description = "A map of tags to assign to IAM role" + type = map(string) + default = {} +} + +variable "role_maximum_session_duration" { + description = "Maximum session duration, in seconds, for the IAM role" + type = number + default = 3600 +} + +########### +# Policies +########### + +variable "policy_name" { + description = "IAM policy name. It override the default value, which is the same as role_name" + type = string + default = null +} + +variable "attach_cloudwatch_logs_policy" { + description = "Controls whether CloudWatch Logs policy should be added to IAM role for Lambda Function" + type = bool + default = true +} + +variable "attach_dead_letter_policy" { + description = "Controls whether SNS/SQS dead letter notification policy should be added to IAM role for Lambda Function" + type = bool + default = false +} + +variable "attach_network_policy" { + description = "Controls whether VPC/network policy should be added to IAM role for Lambda Function" + type = bool + default = false +} + +variable "attach_tracing_policy" { + description = "Controls whether X-Ray tracing policy should be added to IAM role for Lambda Function" + type = bool + default = false +} + +variable "attach_async_event_policy" { + description = "Controls whether async event policy should be added to IAM role for Lambda Function" + type = bool + default = false +} + +variable "attach_policy_json" { + description = "Controls whether policy_json should be added to IAM role for Lambda Function" + type = bool + default = false +} + +variable "attach_policy_jsons" { + description = "Controls whether policy_jsons should be added to IAM role for Lambda Function" + type = bool + default = false +} + +variable "attach_policy" { + description = "Controls whether policy should be added to IAM role for Lambda Function" + type = bool + default = false +} + +variable "attach_policies" { + description = "Controls whether list of policies should be added to IAM role for Lambda Function" + type = bool + default = false +} + +variable "policy_path" { + description = "Path of policies to that should be added to IAM role for Lambda Function" + type = string + default = null +} + +variable "number_of_policy_jsons" { + description = "Number of policies JSON to attach to IAM role for Lambda Function" + type = number + default = 0 +} + +variable "number_of_policies" { + description = "Number of policies to attach to IAM role for Lambda Function" + type = number + default = 0 +} + +variable "attach_policy_statements" { + description = "Controls whether policy_statements should be added to IAM role for Lambda Function" + type = bool + default = false +} + +variable "trusted_entities" { + description = "List of additional trusted entities for assuming Lambda Function role (trust relationship)" + type = any + default = [] +} + +variable "assume_role_policy_statements" { + description = "Map of dynamic policy statements for assuming Lambda Function role (trust relationship)" + type = any + default = {} +} + +variable "policy_json" { + description = "An additional policy document as JSON to attach to the Lambda Function role" + type = string + default = null +} + +variable "policy_jsons" { + description = "List of additional policy documents as JSON to attach to Lambda Function role" + type = list(string) + default = [] +} + +variable "policy" { + description = "An additional policy document ARN to attach to the Lambda Function role" + type = string + default = null +} + +variable "policies" { + description = "List of policy statements ARN to attach to Lambda Function role" + type = list(string) + default = [] +} + +variable "policy_statements" { + description = "Map of dynamic policy statements to attach to Lambda Function role" + type = any + default = {} +} + +variable "file_system_arn" { + description = "The Amazon Resource Name (ARN) of the Amazon EFS Access Point that provides access to the file system." + type = string + default = null +} + +variable "file_system_local_mount_path" { + description = "The path where the function can access the file system, starting with /mnt/." + type = string + default = null +} + +########################## +# Build artifact settings +########################## + +variable "artifacts_dir" { + description = "Directory name where artifacts should be stored" + type = string + default = "builds" +} + +variable "s3_prefix" { + description = "Directory name where artifacts should be stored in the S3 bucket. If unset, the path from `artifacts_dir` is used" + type = string + default = null +} + +variable "ignore_source_code_hash" { + description = "Whether to ignore changes to the function's source code hash. Set to true if you manage infrastructure and code deployments separately." + type = bool + default = false +} + +variable "local_existing_package" { + description = "The absolute path to an existing zip-file to use" + type = string + default = null +} + +variable "s3_existing_package" { + description = "The S3 bucket object with keys bucket, key, version pointing to an existing zip-file to use" + type = map(string) + default = null +} + +variable "store_on_s3" { + description = "Whether to store produced artifacts on S3 or locally." + type = bool + default = false +} + +variable "s3_object_storage_class" { + description = "Specifies the desired Storage Class for the artifact uploaded to S3. Can be either STANDARD, REDUCED_REDUNDANCY, ONEZONE_IA, INTELLIGENT_TIERING, or STANDARD_IA." + type = string + default = "ONEZONE_IA" # Cheaper than STANDARD and it is enough for Lambda deployments +} + +variable "s3_bucket" { + description = "S3 bucket to store artifacts" + type = string + default = null +} + +variable "s3_acl" { + description = "The canned ACL to apply. Valid values are private, public-read, public-read-write, aws-exec-read, authenticated-read, bucket-owner-read, and bucket-owner-full-control. Defaults to private." + type = string + default = "private" +} + +variable "s3_server_side_encryption" { + description = "Specifies server-side encryption of the object in S3. Valid values are \"AES256\" and \"aws:kms\"." + type = string + default = null +} + +variable "source_path" { + description = "The absolute path to a local file or directory containing your Lambda source code" + type = any # string | list(string | map(any)) + default = null +} + +variable "hash_extra" { + description = "The string to add into hashing function. Useful when building same source path for different functions." + type = string + default = "" +} + +variable "build_in_docker" { + description = "Whether to build dependencies in Docker" + type = bool + default = false +} + +# Docker options + +variable "docker_file" { + description = "Path to a Dockerfile when building in Docker" + type = string + default = "" +} + +variable "docker_build_root" { + description = "Root dir where to build in Docker" + type = string + default = "" +} + +variable "docker_image" { + description = "Docker image to use for the build" + type = string + default = "" +} + +variable "docker_with_ssh_agent" { + description = "Whether to pass SSH_AUTH_SOCK into docker environment or not" + type = bool + default = false +} + +variable "docker_pip_cache" { + description = "Whether to mount a shared pip cache folder into docker environment or not" + type = any + default = null +} + +variable "docker_additional_options" { + description = "Additional options to pass to the docker run command (e.g. to set environment variables, volumes, etc.)" + type = list(string) + default = [] +} + +variable "docker_entrypoint" { + description = "Path to the Docker entrypoint to use" + type = string + default = null +} + +variable "recreate_missing_package" { + description = "Whether to recreate missing Lambda package if it is missing locally or not" + type = bool + default = true +} + +*/ +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/lib/src/utils/index.js.map b/lib/src/utils/index.js.map new file mode 100644 index 0000000..fb47a43 --- /dev/null +++ b/lib/src/utils/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sourceRoot":"","sources":["../../../src/utils/index.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,MAAM,EAAE,OAAO,EAAE,gBAAgB,EAAE,MAAM,UAAU,CAAA;AAE5D;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;EAu0BE"} \ No newline at end of file diff --git a/main.tf.json b/main.tf.json index 4aa1fa8..685b0a5 100644 --- a/main.tf.json +++ b/main.tf.json @@ -6,7 +6,7 @@ } }, "aws_iam_policy_document": { - "ms1_iam_policy_doc": { + "ms1_lambda_creds": { "statement": { "effect": "Allow", "actions": [ @@ -102,6 +102,7 @@ "aws_iam_role": { "ms1_lambda_role": { "name": "throwaway-test-123-role", + "assume_role_policy": "${data.aws_iam_policy_document.ms1_lambda_creds.json}", "tags": { "BroughtToYouBy": "@-0/micro", "Moms": "Spaghetti" @@ -343,7 +344,15 @@ "required_providers": { "aws": { "source": "hashicorp/aws", - "version": "5.20.0" + "version": ">= 5.20" + }, + "docker": { + "source": "kreuzwerker/docker", + "version": ">= 3.0" + }, + "null": { + "source": "hashicorp/null", + "version": ">= 2.0" } } }, diff --git a/repl/module.ts b/repl/module.ts index 049fa73..cb6ae9b 100644 --- a/repl/module.ts +++ b/repl/module.ts @@ -1,4 +1,4 @@ -import { modulate, config, Provider, Terraform } from '../src/config' +import { modulate, config, Provider, Terraform } from '../src' import { AWS05200 as AWS } from '../registry/index' // https://dev.to/madflanderz/how-to-get-parts-of-an-typescript-interface-3mko diff --git a/src/config.ts b/src/config.ts index 96a0eba..76ae6e5 100644 --- a/src/config.ts +++ b/src/config.ts @@ -1,6 +1,7 @@ import { Provider, Terraform, NestedObject } from './types' -import { writeFileSync } from 'fs' import { isPlainObject, isArray, isString } from '@thi.ng/checks' +import { isEmpty } from './utils/index' +import { writeFileSync } from 'fs' // regex that replaces a number surrounded by periods .0. with a number surrounded by brackets [0] const bracketRegex = /\.\d+\./g @@ -14,43 +15,42 @@ const bracketify = (str: string) => str.replace(bracketRegex, (match) => `[${mat * recursively */ const exporter = ( - obj: object, + target: object, scoped: string, pivot: string, type: string, path: string[] | any = [] -): NestedObject => - Object.entries(obj).reduce((a, c) => { - const [k, v] = c - const basePath = `${pivot}.${type}.${scoped}` - const accessPath = path.length ? path.join('.') + '.' : '' - const access = `\${${basePath}.${accessPath}${k}}` +): NestedObject | string => { + const basePath = `${pivot}.${type}.${scoped}` + const accessPath = path.length ? path.join('.') + '.' : '' + const stringTemplate = (v: string, path: any[] = []) => { + const key = path.slice(-1)[0] + const access = `\${${basePath}.${accessPath}${key}}` const fixed = bracketifyTF(access) const [head, tail] = bracketify(accessPath).split('[') - const tolist = `\${tolist(${basePath}.${head})[${tail}.${k}}` + //const tolist = `\${tolist(${basePath}.${head})[${tail}.${k}}` + if (v.startsWith('-->*')) { + // [1] tolist alternative for set unpacking a single item - from apparentlymart + const one = `\${one(${basePath}.${head})${tail.replace(/\d]/, '')}.${key}}` + return one + } else if (v.startsWith('-->')) { + return fixed + } else { + return v + } + } + if (isString(target)) return stringTemplate(target, path) + if (!isPlainObject(target)) return target as NestedObject + return Object.entries(target).reduce((a, c) => { + const [k, v] = c if (isString(v)) { - if (v.startsWith('-->*')) { - // [1] tolist alternative for set unpacking a single item - from apparentlymart - const one = `\${one(${basePath}.${head})${tail.replace(/\d]/, '')}.${k}}` - return { ...a, [k]: one } - } else if (v.startsWith('-->')) { - return { ...a, [k]: fixed } - } else { - return { ...a, [k]: v } - } + return { ...a, [k]: stringTemplate(v, [...path, k]) } } else if (isPlainObject(v)) { return { ...a, [k]: exporter(v, scoped, pivot, type, [...path, k]) } } else if (isArray(v)) { return { ...a, - [k]: v.map((x, i) => { - if (isString(x) && x.startsWith('-->')) { - return bracketify(`${access}[${i}]`) - } else if (isPlainObject(x)) { - return exporter(x, scoped, pivot, type, [...path, k, i]) - } - return x - }), + [k]: v.map((x, i) => exporter(x, scoped, pivot, type, [...path, k, i])), } } else { //console.log(`passthrough in exporter function...`) @@ -58,58 +58,71 @@ const exporter = ( return { ...a, [k]: v } } }, {}) - +} /** * recursive function that takes a path of strings or numbers * and returns an object with nested objects and arrays * **/ -const pathObjectifier = (path: any[]) => { +const stub = (path: any[]) => { const [head, ...tail] = path if (tail && tail.length) { - if (isString(head)) return { [head]: pathObjectifier(tail) } + if (isString(head)) return { [head]: stub(tail) } else { // create an array of dummy objects leading up to the index const dummyArray = Array(head).fill({}) || [] - - return [...dummyArray, pathObjectifier(tail)] + return [...dummyArray, stub(tail)] } } else { if (isString(head)) return { [head]: 'šŸ”„' } else return [...Array(head).fill('...'), 'šŸ”„'] } } +const stringTemplate = (v: string, scoped) => { + if (v.startsWith('-->')) { + const cleaned = v.replace(/-->\*?/, '') + if (cleaned === '') { + return null + } else { + return cleaned + } + } else if (v.includes('$SCOPE')) { + const replaced = v.replace('$SCOPE', scoped) + return replaced + } else { + return v + } +} +const warn = (path: string[]) => { + const reminder = 'šŸ”„ Dependency missing. Could be a missing export (-->)' + const trouble = 'šŸ”„ or a mispelled root key/id in a provisioning function.' + const problems = [reminder, trouble] + console.warn(`${problems.join('\n')}\nRequired by:${JSON.stringify(stub(path), null, 4)}`) +} /** * cleans out any export-specific values (--> prefixed) recursively and warns * the user if they forgot to export a value using the --> prefix */ -const exportFinalizer = (obj: object, path): NestedObject => { - const warn = (path: string[]) => { - const reminder = '\nšŸ”„ Upstream export (-->) missing. Required by:' - console.warn(`${reminder}\n${JSON.stringify(pathObjectifier(path), null, 4)}`) - } - return Object.entries(obj).reduce((a, c) => { +const exportFinalizer = (target: object, path, scoped): NestedObject | any => { + if (isString(target)) return stringTemplate(target, scoped) + if (!isPlainObject(target)) return target as NestedObject + return Object.entries(target).reduce((a, c) => { const [k, v] = c if (v === '-->') return a - if (v === 'undefined' || v === 'null') warn([...path, k]) - if (isString(v) && v.startsWith('-->')) { - const cleaned = v.replace(/-->\*?/, '') - if (cleaned === '') { - return a - } else { - return { ...a, [k]: cleaned } + if (v === undefined || v === null) return warn([...path, k]), a + if (isString(v)) { + if (v === 'undefined' || v === 'null') return warn([...path, k]), a + return { + ...a, + [k]: stringTemplate(v, scoped), } } else if (isPlainObject(v)) { - return { ...a, [k]: exportFinalizer(v, [...path, k]) } + return { ...a, [k]: exportFinalizer(v, [...path, k], scoped) } } else if (isArray(v)) { //console.log(`array found for ${k}: ${JSON.stringify(v)}`) return { ...a, - [k]: v.map((x, i) => { - if (x == 'undefined' || x == 'null') warn([...path, k, i]) - if (isPlainObject(x)) return exportFinalizer(x, [...path, k, i]) - else return x - }), + [k]: v.map((x, i) => exportFinalizer(x, [...path, k, i], scoped)), } } else { return { ...a, [k]: v } @@ -125,7 +138,7 @@ const exportFinalizer = (obj: object, path): NestedObject => { export const flattenPreservingPaths = ( obj: object, provider = 'aws', // FIXME: adds this to everything, even things you may not want - path: string[] = [], + path: any[] = [], acc: NestedObject = {}, refs = false ): object => { @@ -136,6 +149,8 @@ export const flattenPreservingPaths = ( const target = Object.values(val)[0] as object // { [key]: {...} } const raw_type = Object.keys(val)[0] // e.g., lambda_function const type = `${provider}_${raw_type}` + const parent_path = path.slice(0, -1) + const parent_scope = parent_path.join('_') const scoped = path.join('_') const scope = path.slice(-1)[0] return refs @@ -155,15 +170,24 @@ export const flattenPreservingPaths = ( ...a[key], [type]: { ...(a[key] && a[key][type]), - [scoped]: exportFinalizer(target, [key, raw_type]), + [scoped]: exportFinalizer(target, [key, raw_type], parent_scope), }, }, } - } else { + } else if (isPlainObject(val)) { return { ...a, ...flattenPreservingPaths(val, provider, [...path, key], a, refs), } + } else if (isArray(val)) { + return { + ...a, + [key]: val.map((x, i) => + flattenPreservingPaths(x, provider, [...path, key, i], {}, refs) + ), + } + } else { + return { ...a, [key]: val } } }, acc) } @@ -196,9 +220,6 @@ export const modulate = any }>( } } -const isEmpty = (x: any) => - isPlainObject(x) && !Object.keys(x).length ? true : isArray(x) && !x.length ? true : false - /** * deep merges arbitrary number of objects into one */ diff --git a/src/docker/README.md b/src/docker/README.md index f9f81b6..2aafe5e 100644 --- a/src/docker/README.md +++ b/src/docker/README.md @@ -32,7 +32,6 @@ resource "aws_ecr_repository" "example" { "project" : "blog-example" } } - ``` [optional arguments]: https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/ecr_repository @@ -117,6 +116,7 @@ resource "aws_iam_role" "lambda" { } EOF } + data "aws_iam_policy_document" "lambda" { statement { actions = [ @@ -129,6 +129,7 @@ data "aws_iam_policy_document" "lambda" { sid = "CreateCloudWatchLogs" } } + resource "aws_iam_policy" "lambda" { name = "example-lambda-policy" path = "/" diff --git a/src/modules/docker.ts b/src/modules/docker.ts new file mode 100644 index 0000000..74cfaad --- /dev/null +++ b/src/modules/docker.ts @@ -0,0 +1,295 @@ +import { AWS, flag } from '../types' +import { isFile, isEmpty, cleanNullEntries } from '../utils' + +export const caller_id: AWS = { + data: { + region: { + name: '-->', + }, + caller_identity: { + account_id: '-->', + }, + }, +} + +const docker_img = ({ img_name, src_path, dockerfile_path, build_args, platform }) => ({ + resource: { + docker_image: { + name: img_name, + build: { + context: src_path, + dockerfile: dockerfile_path, + args: build_args, + platform: platform, + }, + }, + }, +}) + +const docker_registry_img = ({ name, keep_remotely = false }) => ({ + resource: { + docker_registry_image: { + name, + keep_remotely, + }, + }, +}) + +/* + +# Generates a filename for the zip archive based on the content of the files +# in source_path. The filename will change when the source code changes. +data "external" "archive_prepare" { + count = var.create && var.create_package ? 1 : 0 + + program = [local.python, "${path.module}/package.py", "prepare"] + + query = { + paths = jsonencode({ + module = path.module + root = path.root + cwd = path.cwd + }) + + docker = var.build_in_docker ? jsonencode({ + docker_pip_cache = var.docker_pip_cache + docker_build_root = var.docker_build_root + docker_file = var.docker_file + docker_image = var.docker_image + with_ssh_agent = var.docker_with_ssh_agent + docker_additional_options = var.docker_additional_options + docker_entrypoint = var.docker_entrypoint + }) : null + + artifacts_dir = var.artifacts_dir + runtime = var.runtime + source_path = jsonencode(var.source_path) + hash_extra = var.hash_extra + hash_extra_paths = jsonencode( + [ + # Temporary fix when building from multiple locations + # We should take into account content of package.py when counting hash + # Related issue: https://github.com/terraform-aws-modules/terraform-aws-lambda/issues/63 + # "${path.module}/package.py" + ] + ) + + recreate_missing_package = var.recreate_missing_package + } +} +*/ +/** + * @param {string} runtime - runtime for lambda + * @param {string} source_path - path to source code + * @param {string} artifacts_dir - path where artifacts should be stored + * @param {string} builder - path to package.py + * @param {object} docker_config - docker configuration + * @param {string} docker_config.docker_pip_cache - path to pip cache + * @param {string} docker_config.docker_build_root - path to build root + * @param {string} docker_config.docker_file - path to dockerfile + * @param {string} docker_config.docker_image - docker image + * @param {boolean} docker_config.with_ssh_agent - whether to use ssh agent + * @param {string} docker_config.docker_additional_options - additional docker options + * @param {string} docker_config.docker_entrypoint - docker entrypoint + */ + +interface Build { + runtime: string + source_path: string + artifacts_dir: string + builder?: string + docker_config?: { + /** path to pip cache */ + docker_pip_cache?: string + /** Root dir where to build in Docker */ + docker_build_root?: string + /** Path to a Dockerfile when building in Docker */ + docker_file?: string + docker_image?: string + with_ssh_agent?: boolean + docker_additional_options?: string + docker_entrypoint?: string + } +} + +const build = ({ + runtime, + source_path, + artifacts_dir, + builder = '${path.root}/src/utils/package.py', + docker_config = {}, +}: Build) => { + const { + docker_pip_cache, + docker_build_root, + docker_file, + docker_image, + with_ssh_agent, + docker_additional_options, + docker_entrypoint, + } = docker_config + return { + data: { + external: { + program: ['python', builder, 'prepare'], + query: { + paths: JSON.stringify({ + module: '${path.module}', + root: '${path.root}', + cwd: '${path.cwd}', + }), + }, + ...(!isEmpty(docker_config) + ? { + docker: JSON.stringify( + cleanNullEntries({ + docker_pip_cache, + docker_build_root, + docker_file, + docker_image, + with_ssh_agent, + docker_additional_options, + docker_entrypoint, + }) + ), + } + : {}), + }, + artifacts_dir, + runtime, + source_path, + /** + * + * Temporary fix when building from multiple locations. We should + * take into account content of package.py when counting hash + * Related issue: + * https://github.com/terraform-aws-modules/terraform-aws-lambda/issues/63 + * "${path.module}/package.py" + */ + hash_extra_paths: JSON.stringify([]), + }, + } +} + +const ecr_repo = ({ + name, + image_tag_mutability = 'MUTABLE', + force_delete = true, + scan = false, + tags = {}, +}): AWS => ({ + resource: { + // @ts-ignore: FIXME (src/types or regex) + // image_scanning_configuration is qualified in place + // instead of as separate section with heading + ecr_repository: { + name, + image_tag_mutability, + force_delete, + image_scanning_configuration: { + scan_on_push: scan, + }, + tags: { + ...flag, + ...tags, + }, + }, + }, +}) + +const lifecycle_policy = ({ policy, repo }): AWS => ({ + resource: { + ecr_lifecycle_policy: { + repository: repo, + policy, + }, + }, +}) + +const sam_metadata_image = ({ src_path, dockerfile_path, build_args, img_tag, img_uri }) => ({ + resource: { + null_resource: { + triggers: { + resource_type: 'IMAGE_LAMBDA_FUNCTION', + docker_context: src_path, + docker_file: dockerfile_path, + docker_tag: img_tag, + docker_build_args: JSON.stringify(build_args), + built_image_uri: img_uri, + }, + depends_on: ['${docker_registry_image.$SCOPE}'], + }, + }, +}) + +const ecr_repo_fmt = (acct_id, region) => `${acct_id}.dkr.ecr.${region}.amazonaws.com` + +const null_resource = ({ file_path }) => ({ + resource: { + // TODO: add to types [1] + null_resource: { + triggers: isFile(file_path) + ? { diff: `\${md5(file(${file_path}))}` } + : { + diff: `\${sha1(join("", [for f in fileset(${file_path}, "**"): filesha1(f)]))}`, + }, + }, + }, +}) + +/** + * requires required providers to include kreuzwerker/docker and hashicorp/null + * at root of compiler + * ```ts + * { required_providers: { + * aws: { + * source: 'hashicorp/aws', + * version: '>= 5.20', + * }, + * docker: { + * source: 'kreuzwerker/docker', + * version: '>= 3.0', + * }, + * null: { + * source: 'hashicorp/null', + * version: '>= 2.0', + * } + * } + * } + * ``` + */ +//export const docked = ({ name, region, account_id }, my) => { +// const ecr_address = ecr_repo_fmt(account_id, region) +// const img_tag = `${ecr_address}/${name}:latest` +// return { +// image: docker_img({ +// img_name, +// src_path, +// dockerfile_path, +// build_args, +// platform, +// }), +// registry_img: docker_registry_img({ +// name, +// keep_remotely, +// }), +// ecr_repo: ecr_repo({ +// name, +// image_tag_mutability, +// force_delete, +// scan, +// tags, +// }), +// lifecycle: lifecycle_policy({ +// policy, +// repo: name, +// }), +// sam_image_meta: sam_metadata_image({ +// src_path, +// dockerfile_path, +// build_args, +// img_tag, +// img_uri, +// }), +// } +//} diff --git a/src/modules/ecr.ts b/src/modules/ecr.ts new file mode 100644 index 0000000..38c3c81 --- /dev/null +++ b/src/modules/ecr.ts @@ -0,0 +1,84 @@ +import { AWS, flag } from '../types' +// one per subdomain? +export const ecr_repo = ({ name, tags = {} }): AWS => ({ + resource: { + // @ts-ignore: FIXME (src/types or regex) + // image_scanning_configuration is qualified in place + // instead of as separate section with heading + ecr_repository: { + name, + tags: { + ...flag, + ...tags, + }, + }, + }, +}) + +export const isFile = (path: string) => { + const parts = path.split('/') + const [last] = parts.slice(-1) + return last.includes('.') +} + +const null_resource = ({ file_path }): AWS => { + return { + resource: { + // @ts-ignore: FIXME (src/types) no null_resource in AWS (tf proper) + // [1] + null_resource: { + triggers: isFile(file_path) + ? { diff: `\${md5(file(${file_path}))}` } + : { + diff: `\${sha1(join("", [for f in fileset(${file_path}, "**"): filesha1(f)]))}`, + }, + }, + }, + } +} + +const image = ({ repo, image_tag }): AWS => ({ + data: { + ecr_image: { + repository_name: repo, + image_tag, + // @ts-ignore: FIXME (src/types) add depends_on to data + depends_on: [`null_resource.$SCOPE`], + }, + }, +}) + +export const ecr_image = ({ repo, file_path, image_tag }): AWS => ({ + ...null_resource({ file_path }), + ...image({ repo, image_tag }), +}) + +const current_region: AWS = { + data: { + region: { + name: '-->', + }, + caller_identity: { + account_id: '-->', + }, + }, +} + +// TODO: add docker_image type? +const docker_img = ({ name, src_dir, acct_id, region, repo, image_tag }) => { + const dockerfile = `${src_dir}/Dockerfile` + return { + docker_image: { + name, + build: { + //context: file_path, + dockerfile, + }, + }, + } +} +/** + * References: + * + * [1] https://stackoverflow.com/a/66501021 + */ diff --git a/src/modules/iam.ts b/src/modules/iam.ts index 7782bef..7450205 100644 --- a/src/modules/iam.ts +++ b/src/modules/iam.ts @@ -1,4 +1,4 @@ -import { AWS, Statement, flag } from '../types' +import { AWS, flag } from '../types' // ,e, // " /~~~8e 888-~88e-~88e @@ -7,22 +7,6 @@ import { AWS, Statement, flag } from '../types' // 888 C888 888 888 888 888 // 888 "88_-888 888 888 888 -export const iam_policy_doc: AWS = { - data: { - iam_policy_document: { - statement: { - effect: 'Allow', - actions: ['sts:AssumeRole'], - principals: { - identifiers: ['lambda.amazonaws.com', 'apigateway.amazonaws.com'], - type: 'Service', - }, - }, - json: '-->', - }, - }, -} - export const iam_role = ({ name, policy_json, tags = {} }): AWS => ({ resource: { iam_role: { @@ -37,60 +21,6 @@ export const iam_role = ({ name, policy_json, tags = {} }): AWS => ({ }, }) -const bucket_policy_statement = ({ bucket_name, lambda_role_arn = '' }): Statement => ({ - ...(lambda_role_arn ? { principals: { identifiers: [lambda_role_arn], type: 'AWS' } } : {}), - effect: 'Allow', - actions: [ - 's3:AbortMultipartUpload', - 's3:ListMultipartUploadParts', - 's3:ListBucketMultipartUploads', - 's3:PutObject', - 's3:GetObject', - 's3:DeleteObject', - ], - resources: [`arn:aws:s3:::${bucket_name}`, `arn:aws:s3:::${bucket_name}/*`], -}) - -export const multi_stmt_policy_doc = ({ - bucket_name = '', - topic_arn = '', - cloudwatch_arn = '', - lambda_role_arn = '', -}): AWS => ({ - data: { - iam_policy_document: { - statement: [ - ...(bucket_name - ? ([bucket_policy_statement({ bucket_name, lambda_role_arn })] as Statement[]) - : []), - ...(topic_arn - ? ([ - { - effect: 'Allow', - actions: ['sns:Publish', 'sns:Subscribe'], - resources: [topic_arn], - }, - ] as Statement[]) - : []), - ...(cloudwatch_arn - ? ([ - { - effect: 'Allow', - actions: [ - 'logs:CreateLogGroup', - 'logs:CreateLogStream', - 'logs:PutLogEvents', - ], - resources: [`${cloudwatch_arn}:*`, `${cloudwatch_arn}:*:*`], - }, - ] as Statement[]) - : []), - ], - json: '-->', - }, - }, -}) - export const iam_role_policy_attachment = ({ role_name, policy_arn }): AWS => ({ resource: { iam_role_policy_attachment: { diff --git a/src/modules/lambda.ts b/src/modules/lambda.ts index 756986a..023bed8 100644 --- a/src/modules/lambda.ts +++ b/src/modules/lambda.ts @@ -1,13 +1,78 @@ -import { AWS, flag } from '../types' +import { AWS, Statement, flag } from '../types' import { bucket_policy, bucket_cors, bucket } from './s3' import { subscription } from './sns' -import { - iam_policy_doc, - iam_role, - multi_stmt_policy_doc, - iam_role_policy_attachment, - iam_policy, -} from './iam' +import { iam_role, iam_role_policy_attachment, iam_policy } from './iam' +//import { ecr_repo, ecr_image, isFile } from './ecr' + +const lambda_creds: AWS = { + data: { + iam_policy_document: { + statement: { + effect: 'Allow', + actions: ['sts:AssumeRole'], + principals: { + identifiers: ['lambda.amazonaws.com', 'apigateway.amazonaws.com'], + type: 'Service', + }, + }, + json: '-->', + }, + }, +} + +const bucket_policy_statement = ({ bucket_name, lambda_role_arn = '' }): Statement => ({ + ...(lambda_role_arn ? { principals: { identifiers: [lambda_role_arn], type: 'AWS' } } : {}), + effect: 'Allow', + actions: [ + 's3:AbortMultipartUpload', + 's3:ListMultipartUploadParts', + 's3:ListBucketMultipartUploads', + 's3:PutObject', + 's3:GetObject', + 's3:DeleteObject', + ], + resources: [`arn:aws:s3:::${bucket_name}`, `arn:aws:s3:::${bucket_name}/*`], +}) + +const multi_stmt_policy_doc = ({ + bucket_name = '', + topic_arn = '', + cloudwatch_arn = '', + lambda_role_arn = '', +}): AWS => ({ + data: { + iam_policy_document: { + statement: [ + ...(bucket_name + ? ([bucket_policy_statement({ bucket_name, lambda_role_arn })] as Statement[]) + : []), + ...(topic_arn + ? ([ + { + effect: 'Allow', + actions: ['sns:Publish', 'sns:Subscribe'], + resources: [topic_arn], + }, + ] as Statement[]) + : []), + ...(cloudwatch_arn + ? ([ + { + effect: 'Allow', + actions: [ + 'logs:CreateLogGroup', + 'logs:CreateLogStream', + 'logs:PutLogEvents', + ], + resources: [`${cloudwatch_arn}:*`, `${cloudwatch_arn}:*:*`], + }, + ] as Statement[]) + : []), + ], + json: '-->', + }, + }, +}) export const lambda_invoke_cred = ({ function_name, @@ -71,6 +136,7 @@ const lambda_fn = ({ package_type = 'Zip', runtime = 'python3.8', tags = {}, + log_group_name = '', }): AWS => ({ resource: { lambda_function: { @@ -87,6 +153,8 @@ const lambda_fn = ({ ...flag, ...tags, }, + // @ts-ignore + //depends_on: [`aws_cloudwatch_log_group.$SCOPE_${log_group_name}`], arn: '-->', invoke_arn: '-->', }, @@ -165,69 +233,76 @@ export const lambda = ( sns, }: Lambda, my: { [key: string]: AWS } -) => ({ - iam_policy_doc, - lambda_role: iam_role({ - name, - policy_json: my?.lambda_creds?.data?.iam_policy_document?.json, - tags, - }), - bucket: bucket({ name, tags }), - bucket_access_creds: multi_stmt_policy_doc({ - bucket_name: my?.bucket.resource?.s3_bucket?.bucket, - lambda_role_arn: my?.lambda_role?.resource?.iam_role?.arn, - }), - bucket_cors: bucket_cors({ bucket_name: my?.bucket.resource?.s3_bucket?.bucket }), - bucket_policy: bucket_policy({ - bucket_name: my?.bucket.resource?.s3_bucket?.bucket, - policy_json: my?.bucket_access_creds?.data?.iam_policy_document?.json, - }), - cloudwatch: cloudwatch({ name, tags }), - lambda_access_creds: multi_stmt_policy_doc({ - bucket_name: my?.bucket.resource?.s3_bucket?.bucket, - cloudwatch_arn: my?.cloudwatch.resource?.cloudwatch_log_group?.arn, - topic_arn: sns?.downstream?.topic_arn, - }), - lambda_policy: iam_policy({ - name: `${name}-policy`, - policy_json: my?.lambda_access_creds?.data?.iam_policy_document?.json, - tags, - }), - lambda_policy_attachment: iam_role_policy_attachment({ - policy_arn: my?.lambda_policy?.resource?.iam_policy?.arn, - role_name: my?.lambda_role?.resource?.iam_role?.name, - }), - lambda: lambda_fn({ - name, - //efs_arn: my?.efs?.resource?.efs_file_system?.arn, - role_arn: my?.lambda_role?.resource?.iam_role?.arn, - file_path, - handler, - tags, - env_vars: { - S3_BUCKET_NAME: my?.bucket.resource?.s3_bucket?.bucket, - ...(sns?.downstream - ? { - SNS_TOPIC_ARN: sns.downstream.topic_arn, - SNS_MESSAGE_ATTRS: JSON.stringify(sns.downstream.message_attrs), - } - : {}), - ...env_vars, - }, - }), - ...(sns?.upstream - ? { - sns_invoke_cred: lambda_invoke_cred({ - function_name: my?.lambda?.resource?.lambda_function?.function_name, - source_arn: sns.upstream?.topic_arn, - principal: 'sns.amazonaws.com', - statement_id: 'AllowExecutionFromSNS', - }), - subscription: subscription({ - topic_arn: sns.upstream.topic_arn, - lambda_arn: my?.lambda?.resource?.lambda_function?.arn, - filter: sns.upstream.filter_policy, - }), - } - : {}), -}) +) => { + // TODO: consider triggering @-0/build-lambda-py here + // - would have to make this async... + const ext = file_path.split('.').pop() + const isZip = ext === 'zip' + return { + lambda_creds, + lambda_role: iam_role({ + name, + policy_json: my?.lambda_creds?.data?.iam_policy_document?.json, + tags, + }), + bucket: bucket({ name, tags }), + bucket_access_creds: multi_stmt_policy_doc({ + bucket_name: my?.bucket.resource?.s3_bucket?.bucket, + lambda_role_arn: my?.lambda_role?.resource?.iam_role?.arn, + }), + bucket_cors: bucket_cors({ bucket_name: my?.bucket.resource?.s3_bucket?.bucket }), + bucket_policy: bucket_policy({ + bucket_name: my?.bucket.resource?.s3_bucket?.bucket, + policy_json: my?.bucket_access_creds?.data?.iam_policy_document?.json, + }), + cloudwatch: cloudwatch({ name, tags }), + lambda_access_creds: multi_stmt_policy_doc({ + bucket_name: my?.bucket.resource?.s3_bucket?.bucket, + cloudwatch_arn: my?.cloudwatch.resource?.cloudwatch_log_group?.arn, + topic_arn: sns?.downstream?.topic_arn, + }), + lambda_policy: iam_policy({ + name: `${name}-policy`, + policy_json: my?.lambda_access_creds?.data?.iam_policy_document?.json, + tags, + }), + lambda_policy_attachment: iam_role_policy_attachment({ + policy_arn: my?.lambda_policy?.resource?.iam_policy?.arn, + role_name: my?.lambda_role?.resource?.iam_role?.name, + }), + lambda: lambda_fn({ + name, + role_arn: my?.lambda_role?.resource?.iam_role?.arn, + file_path, + package_type: isZip ? 'Zip' : 'Image', + handler, + tags, + log_group_name: 'cloudwatch', + env_vars: { + S3_BUCKET_NAME: my?.bucket.resource?.s3_bucket?.bucket, + ...(sns?.downstream + ? { + SNS_TOPIC_ARN: sns.downstream.topic_arn, + SNS_MESSAGE_ATTRS: JSON.stringify(sns.downstream.message_attrs), + } + : {}), + ...env_vars, + }, + }), + ...(sns?.upstream + ? { + sns_invoke_cred: lambda_invoke_cred({ + function_name: my?.lambda?.resource?.lambda_function?.function_name, + source_arn: sns.upstream?.topic_arn, + principal: 'sns.amazonaws.com', + statement_id: 'AllowExecutionFromSNS', + }), + subscription: subscription({ + topic_arn: sns.upstream.topic_arn, + lambda_arn: my?.lambda?.resource?.lambda_function?.arn, + filter: sns.upstream.filter_policy, + }), + } + : {}), + } +} diff --git a/src/utils/checks.ts b/src/utils/checks.ts new file mode 100644 index 0000000..0cdc45f --- /dev/null +++ b/src/utils/checks.ts @@ -0,0 +1,32 @@ +import { isArray, isPlainObject } from '@thi.ng/checks' +export const isFile = (path: string) => { + const parts = path.split('/') + const [last] = parts.slice(-1) + return last.includes('.') +} +const allNulls = (obj: any) => { + const entries = Object.entries(obj) + return entries.every(([, v]) => v === null) +} +export const isEmpty = (x: any) => { + if (isPlainObject(x)) { + if (!Object.keys(x).length) return true + if (allNulls(x)) return true + return false + } else if (isArray(x)) { + if (!x.length) return true + return false + } else { + return false + } +} + +export const cleanNullEntries = (obj: any) => + Object.entries(obj).reduce((a, c) => { + const [k, v] = c + if (v === null || v === undefined) { + return a + } else { + return { ...a, [k]: v } + } + }, {}) diff --git a/src/utils/index.ts b/src/utils/index.ts new file mode 100644 index 0000000..07f8970 --- /dev/null +++ b/src/utils/index.ts @@ -0,0 +1,842 @@ +export { isFile, isEmpty, cleanNullEntries } from './checks' + +/* + +variable "create_ecr_repo" { + description = "Controls whether ECR repository for Lambda image should be created" + type = bool + default = false +} + +variable "ecr_address" { + description = "Address of ECR repository for cross-account container image pulling (optional). Option `create_ecr_repo` must be `false`" + type = string + default = null +} + +variable "ecr_repo" { + description = "Name of ECR repository to use or to create" + type = string + default = null +} + +variable "image_tag" { + description = "Image tag to use. If not specified current timestamp in format 'YYYYMMDDhhmmss' will be used. This can lead to unnecessary rebuilds." + type = string + default = null +} + +variable "source_path" { + description = "Path to folder containing application code" + type = string + default = null +} + +variable "docker_file_path" { + description = "Path to Dockerfile in source package" + type = string + default = "Dockerfile" +} + + +variable "image_tag_mutability" { + description = "The tag mutability setting for the repository. Must be one of: `MUTABLE` or `IMMUTABLE`" + type = string + default = "MUTABLE" +} + +variable "scan_on_push" { + description = "Indicates whether images are scanned after being pushed to the repository" + type = bool + default = false +} + +variable "ecr_force_delete" { + description = "If true, will delete the repository even if it contains images." + default = true + type = bool +} + +variable "ecr_repo_tags" { + description = "A map of tags to assign to ECR repository" + type = map(string) + default = {} +} + +variable "build_args" { + description = "A map of Docker build arguments." + type = map(string) + default = {} +} + +variable "ecr_repo_lifecycle_policy" { + description = "A JSON formatted ECR lifecycle policy to automate the cleaning up of unused images." + type = string + default = null +} + +variable "keep_remotely" { + description = "Whether to keep Docker image in the remote registry on destroy operation." + type = bool + default = false +} + +variable "platform" { + description = "The target architecture platform to build the image for." + type = string + default = null +} + +########### +# Root +########### + +variable "create" { + description = "Controls whether resources should be created" + type = bool + default = true +} + +variable "create_package" { + description = "Controls whether Lambda package should be created" + type = bool + default = true +} + +variable "create_function" { + description = "Controls whether Lambda Function resource should be created" + type = bool + default = true +} + +variable "create_layer" { + description = "Controls whether Lambda Layer resource should be created" + type = bool + default = false +} + +variable "create_role" { + description = "Controls whether IAM role for Lambda Function should be created" + type = bool + default = true +} + +variable "create_lambda_function_url" { + description = "Controls whether the Lambda Function URL resource should be created" + type = bool + default = false +} + +variable "create_sam_metadata" { + description = "Controls whether the SAM metadata null resource should be created" + type = bool + default = false +} + +variable "putin_khuylo" { + description = "Do you agree that Putin doesn't respect Ukrainian sovereignty and territorial integrity? More info: https://en.wikipedia.org/wiki/Putin_khuylo!" + type = bool + default = true +} + +########### +# Function +########### + +variable "lambda_at_edge" { + description = "Set this to true if using Lambda@Edge, to enable publishing, limit the timeout, and allow edgelambda.amazonaws.com to invoke the function" + type = bool + default = false +} + +variable "lambda_at_edge_logs_all_regions" { + description = "Whether to specify a wildcard in IAM policy used by Lambda@Edge to allow logging in all regions" + type = bool + default = true +} + +variable "function_name" { + description = "A unique name for your Lambda Function" + type = string + default = "" +} + +variable "handler" { + description = "Lambda Function entrypoint in your code" + type = string + default = "" +} + +variable "runtime" { + description = "Lambda Function runtime" + type = string + default = "" +} + +variable "lambda_role" { + description = " IAM role ARN attached to the Lambda Function. This governs both who / what can invoke your Lambda Function, as well as what resources our Lambda Function has access to. See Lambda Permission Model for more details." + type = string + default = "" +} + +variable "description" { + description = "Description of your Lambda Function (or Layer)" + type = string + default = "" +} + +variable "code_signing_config_arn" { + description = "Amazon Resource Name (ARN) for a Code Signing Configuration" + type = string + default = null +} + +variable "layers" { + description = "List of Lambda Layer Version ARNs (maximum of 5) to attach to your Lambda Function." + type = list(string) + default = null +} + +variable "architectures" { + description = "Instruction set architecture for your Lambda function. Valid values are [\"x86_64\"] and [\"arm64\"]." + type = list(string) + default = null +} + +variable "kms_key_arn" { + description = "The ARN of KMS key to use by your Lambda Function" + type = string + default = null +} + +variable "memory_size" { + description = "Amount of memory in MB your Lambda Function can use at runtime. Valid value between 128 MB to 10,240 MB (10 GB), in 64 MB increments." + type = number + default = 128 +} + +variable "ephemeral_storage_size" { + description = "Amount of ephemeral storage (/tmp) in MB your Lambda Function can use at runtime. Valid value between 512 MB to 10,240 MB (10 GB)." + type = number + default = 512 +} + +variable "publish" { + description = "Whether to publish creation/change as new Lambda Function Version." + type = bool + default = false +} + +variable "reserved_concurrent_executions" { + description = "The amount of reserved concurrent executions for this Lambda Function. A value of 0 disables Lambda Function from being triggered and -1 removes any concurrency limitations. Defaults to Unreserved Concurrency Limits -1." + type = number + default = -1 +} + +variable "timeout" { + description = "The amount of time your Lambda Function has to run in seconds." + type = number + default = 3 +} + +variable "dead_letter_target_arn" { + description = "The ARN of an SNS topic or SQS queue to notify when an invocation fails." + type = string + default = null +} + +variable "environment_variables" { + description = "A map that defines environment variables for the Lambda Function." + type = map(string) + default = {} +} + +variable "tracing_mode" { + description = "Tracing mode of the Lambda Function. Valid value can be either PassThrough or Active." + type = string + default = null +} + +variable "vpc_subnet_ids" { + description = "List of subnet ids when Lambda Function should run in the VPC. Usually private or intra subnets." + type = list(string) + default = null +} + +variable "vpc_security_group_ids" { + description = "List of security group ids when Lambda Function should run in the VPC." + type = list(string) + default = null +} + +variable "tags" { + description = "A map of tags to assign to resources." + type = map(string) + default = {} +} + +variable "s3_object_tags" { + description = "A map of tags to assign to S3 bucket object." + type = map(string) + default = {} +} + +variable "s3_object_tags_only" { + description = "Set to true to not merge tags with s3_object_tags. Useful to avoid breaching S3 Object 10 tag limit." + type = bool + default = false +} + +variable "package_type" { + description = "The Lambda deployment package type. Valid options: Zip or Image" + type = string + default = "Zip" +} + +variable "image_uri" { + description = "The ECR image URI containing the function's deployment package." + type = string + default = null +} + +variable "image_config_entry_point" { + description = "The ENTRYPOINT for the docker image" + type = list(string) + default = [] + +} +variable "image_config_command" { + description = "The CMD for the docker image" + type = list(string) + default = [] +} + +variable "image_config_working_directory" { + description = "The working directory for the docker image" + type = string + default = null +} + +variable "snap_start" { + description = "(Optional) Snap start settings for low-latency startups" + type = bool + default = false +} + +variable "replace_security_groups_on_destroy" { + description = "(Optional) When true, all security groups defined in vpc_security_group_ids will be replaced with the default security group after the function is destroyed. Set the replacement_security_group_ids variable to use a custom list of security groups for replacement instead." + type = bool + default = null +} + +variable "replacement_security_group_ids" { + description = "(Optional) List of security group IDs to assign to orphaned Lambda function network interfaces upon destruction. replace_security_groups_on_destroy must be set to true to use this attribute." + type = list(string) + default = null +} + +variable "timeouts" { + description = "Define maximum timeout for creating, updating, and deleting Lambda Function resources" + type = map(string) + default = {} +} + +############### +# Function URL +############### + +variable "create_unqualified_alias_lambda_function_url" { + description = "Whether to use unqualified alias pointing to $LATEST version in Lambda Function URL" + type = bool + default = true +} + +variable "authorization_type" { + description = "The type of authentication that the Lambda Function URL uses. Set to 'AWS_IAM' to restrict access to authenticated IAM users only. Set to 'NONE' to bypass IAM authentication and create a public endpoint." + type = string + default = "NONE" +} + +variable "cors" { + description = "CORS settings to be used by the Lambda Function URL" + type = any + default = {} +} + +variable "invoke_mode" { + description = "Invoke mode of the Lambda Function URL. Valid values are BUFFERED (default) and RESPONSE_STREAM." + type = string + default = null +} + +######## +# Layer +######## + +variable "layer_name" { + description = "Name of Lambda Layer to create" + type = string + default = "" +} + +variable "layer_skip_destroy" { + description = "Whether to retain the old version of a previously deployed Lambda Layer." + type = bool + default = false +} + +variable "license_info" { + description = "License info for your Lambda Layer. Eg, MIT or full url of a license." + type = string + default = "" +} + +variable "compatible_runtimes" { + description = "A list of Runtimes this layer is compatible with. Up to 5 runtimes can be specified." + type = list(string) + default = [] +} + +variable "compatible_architectures" { + description = "A list of Architectures Lambda layer is compatible with. Currently x86_64 and arm64 can be specified." + type = list(string) + default = null +} + +############################ +# Lambda Async Event Config +############################ + +variable "create_async_event_config" { + description = "Controls whether async event configuration for Lambda Function/Alias should be created" + type = bool + default = false +} + +variable "create_current_version_async_event_config" { + description = "Whether to allow async event configuration on current version of Lambda Function (this will revoke permissions from previous version because Terraform manages only current resources)" + type = bool + default = true +} + +variable "create_unqualified_alias_async_event_config" { + description = "Whether to allow async event configuration on unqualified alias pointing to $LATEST version" + type = bool + default = true +} + +variable "maximum_event_age_in_seconds" { + description = "Maximum age of a request that Lambda sends to a function for processing in seconds. Valid values between 60 and 21600." + type = number + default = null +} + +variable "maximum_retry_attempts" { + description = "Maximum number of times to retry when the function returns an error. Valid values between 0 and 2. Defaults to 2." + type = number + default = null +} + +variable "destination_on_failure" { + description = "Amazon Resource Name (ARN) of the destination resource for failed asynchronous invocations" + type = string + default = null +} + +variable "destination_on_success" { + description = "Amazon Resource Name (ARN) of the destination resource for successful asynchronous invocations" + type = string + default = null +} + +########################## +# Provisioned Concurrency +########################## + +variable "provisioned_concurrent_executions" { + description = "Amount of capacity to allocate. Set to 1 or greater to enable, or set to 0 to disable provisioned concurrency." + type = number + default = -1 +} + +############################################ +# Lambda Permissions (for allowed triggers) +############################################ + +variable "create_current_version_allowed_triggers" { + description = "Whether to allow triggers on current version of Lambda Function (this will revoke permissions from previous version because Terraform manages only current resources)" + type = bool + default = true +} + +variable "create_unqualified_alias_allowed_triggers" { + description = "Whether to allow triggers on unqualified alias pointing to $LATEST version" + type = bool + default = true +} + +variable "allowed_triggers" { + description = "Map of allowed triggers to create Lambda permissions" + type = map(any) + default = {} +} + +############################################ +# Lambda Event Source Mapping +############################################ + +variable "event_source_mapping" { + description = "Map of event source mapping" + type = any + default = {} +} + +################# +# CloudWatch Logs +################# + +variable "use_existing_cloudwatch_log_group" { + description = "Whether to use an existing CloudWatch log group or create new" + type = bool + default = false +} + +variable "cloudwatch_logs_retention_in_days" { + description = "Specifies the number of days you want to retain log events in the specified log group. Possible values are: 1, 3, 5, 7, 14, 30, 60, 90, 120, 150, 180, 365, 400, 545, 731, 1827, and 3653." + type = number + default = null +} + +variable "cloudwatch_logs_kms_key_id" { + description = "The ARN of the KMS Key to use when encrypting log data." + type = string + default = null +} + +variable "cloudwatch_logs_tags" { + description = "A map of tags to assign to the resource." + type = map(string) + default = {} +} + +###### +# IAM +###### + +variable "role_name" { + description = "Name of IAM role to use for Lambda Function" + type = string + default = null +} + +variable "role_description" { + description = "Description of IAM role to use for Lambda Function" + type = string + default = null +} + +variable "role_path" { + description = "Path of IAM role to use for Lambda Function" + type = string + default = null +} + +variable "role_force_detach_policies" { + description = "Specifies to force detaching any policies the IAM role has before destroying it." + type = bool + default = true +} + +variable "role_permissions_boundary" { + description = "The ARN of the policy that is used to set the permissions boundary for the IAM role used by Lambda Function" + type = string + default = null +} + +variable "role_tags" { + description = "A map of tags to assign to IAM role" + type = map(string) + default = {} +} + +variable "role_maximum_session_duration" { + description = "Maximum session duration, in seconds, for the IAM role" + type = number + default = 3600 +} + +########### +# Policies +########### + +variable "policy_name" { + description = "IAM policy name. It override the default value, which is the same as role_name" + type = string + default = null +} + +variable "attach_cloudwatch_logs_policy" { + description = "Controls whether CloudWatch Logs policy should be added to IAM role for Lambda Function" + type = bool + default = true +} + +variable "attach_dead_letter_policy" { + description = "Controls whether SNS/SQS dead letter notification policy should be added to IAM role for Lambda Function" + type = bool + default = false +} + +variable "attach_network_policy" { + description = "Controls whether VPC/network policy should be added to IAM role for Lambda Function" + type = bool + default = false +} + +variable "attach_tracing_policy" { + description = "Controls whether X-Ray tracing policy should be added to IAM role for Lambda Function" + type = bool + default = false +} + +variable "attach_async_event_policy" { + description = "Controls whether async event policy should be added to IAM role for Lambda Function" + type = bool + default = false +} + +variable "attach_policy_json" { + description = "Controls whether policy_json should be added to IAM role for Lambda Function" + type = bool + default = false +} + +variable "attach_policy_jsons" { + description = "Controls whether policy_jsons should be added to IAM role for Lambda Function" + type = bool + default = false +} + +variable "attach_policy" { + description = "Controls whether policy should be added to IAM role for Lambda Function" + type = bool + default = false +} + +variable "attach_policies" { + description = "Controls whether list of policies should be added to IAM role for Lambda Function" + type = bool + default = false +} + +variable "policy_path" { + description = "Path of policies to that should be added to IAM role for Lambda Function" + type = string + default = null +} + +variable "number_of_policy_jsons" { + description = "Number of policies JSON to attach to IAM role for Lambda Function" + type = number + default = 0 +} + +variable "number_of_policies" { + description = "Number of policies to attach to IAM role for Lambda Function" + type = number + default = 0 +} + +variable "attach_policy_statements" { + description = "Controls whether policy_statements should be added to IAM role for Lambda Function" + type = bool + default = false +} + +variable "trusted_entities" { + description = "List of additional trusted entities for assuming Lambda Function role (trust relationship)" + type = any + default = [] +} + +variable "assume_role_policy_statements" { + description = "Map of dynamic policy statements for assuming Lambda Function role (trust relationship)" + type = any + default = {} +} + +variable "policy_json" { + description = "An additional policy document as JSON to attach to the Lambda Function role" + type = string + default = null +} + +variable "policy_jsons" { + description = "List of additional policy documents as JSON to attach to Lambda Function role" + type = list(string) + default = [] +} + +variable "policy" { + description = "An additional policy document ARN to attach to the Lambda Function role" + type = string + default = null +} + +variable "policies" { + description = "List of policy statements ARN to attach to Lambda Function role" + type = list(string) + default = [] +} + +variable "policy_statements" { + description = "Map of dynamic policy statements to attach to Lambda Function role" + type = any + default = {} +} + +variable "file_system_arn" { + description = "The Amazon Resource Name (ARN) of the Amazon EFS Access Point that provides access to the file system." + type = string + default = null +} + +variable "file_system_local_mount_path" { + description = "The path where the function can access the file system, starting with /mnt/." + type = string + default = null +} + +########################## +# Build artifact settings +########################## + +variable "artifacts_dir" { + description = "Directory name where artifacts should be stored" + type = string + default = "builds" +} + +variable "s3_prefix" { + description = "Directory name where artifacts should be stored in the S3 bucket. If unset, the path from `artifacts_dir` is used" + type = string + default = null +} + +variable "ignore_source_code_hash" { + description = "Whether to ignore changes to the function's source code hash. Set to true if you manage infrastructure and code deployments separately." + type = bool + default = false +} + +variable "local_existing_package" { + description = "The absolute path to an existing zip-file to use" + type = string + default = null +} + +variable "s3_existing_package" { + description = "The S3 bucket object with keys bucket, key, version pointing to an existing zip-file to use" + type = map(string) + default = null +} + +variable "store_on_s3" { + description = "Whether to store produced artifacts on S3 or locally." + type = bool + default = false +} + +variable "s3_object_storage_class" { + description = "Specifies the desired Storage Class for the artifact uploaded to S3. Can be either STANDARD, REDUCED_REDUNDANCY, ONEZONE_IA, INTELLIGENT_TIERING, or STANDARD_IA." + type = string + default = "ONEZONE_IA" # Cheaper than STANDARD and it is enough for Lambda deployments +} + +variable "s3_bucket" { + description = "S3 bucket to store artifacts" + type = string + default = null +} + +variable "s3_acl" { + description = "The canned ACL to apply. Valid values are private, public-read, public-read-write, aws-exec-read, authenticated-read, bucket-owner-read, and bucket-owner-full-control. Defaults to private." + type = string + default = "private" +} + +variable "s3_server_side_encryption" { + description = "Specifies server-side encryption of the object in S3. Valid values are \"AES256\" and \"aws:kms\"." + type = string + default = null +} + +variable "source_path" { + description = "The absolute path to a local file or directory containing your Lambda source code" + type = any # string | list(string | map(any)) + default = null +} + +variable "hash_extra" { + description = "The string to add into hashing function. Useful when building same source path for different functions." + type = string + default = "" +} + +variable "build_in_docker" { + description = "Whether to build dependencies in Docker" + type = bool + default = false +} + +# Docker options + +variable "docker_file" { + description = "Path to a Dockerfile when building in Docker" + type = string + default = "" +} + +variable "docker_build_root" { + description = "Root dir where to build in Docker" + type = string + default = "" +} + +variable "docker_image" { + description = "Docker image to use for the build" + type = string + default = "" +} + +variable "docker_with_ssh_agent" { + description = "Whether to pass SSH_AUTH_SOCK into docker environment or not" + type = bool + default = false +} + +variable "docker_pip_cache" { + description = "Whether to mount a shared pip cache folder into docker environment or not" + type = any + default = null +} + +variable "docker_additional_options" { + description = "Additional options to pass to the docker run command (e.g. to set environment variables, volumes, etc.)" + type = list(string) + default = [] +} + +variable "docker_entrypoint" { + description = "Path to the Docker entrypoint to use" + type = string + default = null +} + +variable "recreate_missing_package" { + description = "Whether to recreate missing Lambda package if it is missing locally or not" + type = bool + default = true +} + +*/ diff --git a/src/utils/package.py b/src/utils/package.py new file mode 100644 index 0000000..b8202fd --- /dev/null +++ b/src/utils/package.py @@ -0,0 +1,1638 @@ +# coding: utf-8 + +import sys + +if sys.version_info < (3, 6): + raise RuntimeError("A python version 3.6 or newer is required") + +import os +import re +import time +import stat +import json +import shlex +import shutil +import hashlib +import zipfile +import argparse +import datetime +import tempfile +import operator +import platform +import subprocess +from subprocess import check_call, check_output +from contextlib import contextmanager +from base64 import b64encode +import logging + +PY38 = sys.version_info >= (3, 8) +PY37 = sys.version_info >= (3, 7) +PY36 = sys.version_info >= (3, 6) + +WINDOWS = platform.system() == 'Windows' +OSX = platform.system() == 'Darwin' + +################################################################################ +# Logging + +DEBUG2 = 9 +DEBUG3 = 8 +DUMP_ENV = 1 + +log_handler = None +log = logging.getLogger() +cmd_log = logging.getLogger('cmd') + + +def configure_logging(use_tf_stderr=False): + global log_handler + + logging.addLevelName(DEBUG2, 'DEBUG2') + logging.addLevelName(DEBUG3, 'DEBUG3') + logging.addLevelName(DUMP_ENV, 'DUMP_ENV') + + class LogFormatter(logging.Formatter): + default_format = '%(message)s' + formats = { + 'root': default_format, + 'build': default_format, + 'prepare': '[{}] %(name)s: %(message)s'.format(os.getpid()), + 'cmd': '> %(message)s', + '': '%(name)s: %(message)s' + } + + def formatMessage(self, record): + prefix = record.name.rsplit('.') + self._style._fmt = self.formats.get(prefix[0], self.formats['']) + return super().formatMessage(record) + + tf_stderr_fd = 5 + log_stream = sys.stderr + if use_tf_stderr: + try: + if os.isatty(tf_stderr_fd): + log_stream = os.fdopen(tf_stderr_fd, mode='w') + except OSError: + pass + + log_handler = logging.StreamHandler(stream=log_stream) + log_handler.setFormatter(LogFormatter()) + + log.addHandler(log_handler) + log.setLevel(logging.INFO) + + +def dump_env(): + if log.isEnabledFor(DUMP_ENV): + log.debug('ENV: %s', json.dumps(dict(os.environ), indent=2)) + + +################################################################################ +# Backports + +def shlex_join(split_command): + """Return a shell-escaped string from *split_command*.""" + return ' '.join(shlex.quote(arg) for arg in split_command) + + +################################################################################ +# Common functions + +def abort(message): + """Exits with an error message.""" + log.error(message) + sys.exit(1) + + +@contextmanager +def cd(path, silent=False): + """Changes the working directory.""" + cwd = os.getcwd() + if not silent: + cmd_log.info('cd %s', shlex.quote(path)) + try: + os.chdir(path) + yield + finally: + os.chdir(cwd) + + +@contextmanager +def tempdir(dir=None): + """Creates a temporary directory and then deletes it afterwards.""" + prefix = 'terraform-aws-lambda-' + path = tempfile.mkdtemp(prefix=prefix, dir=dir) + cmd_log.info('mktemp -d %sXXXXXXXX # %s', prefix, shlex.quote(path)) + try: + yield path + finally: + shutil.rmtree(path) + + +def list_files(top_path, log=None): + """ + Returns a sorted list of all files in a directory. + """ + + if log: + log = log.getChild('ls') + + results = [] + + for root, dirs, files in os.walk(top_path, followlinks=True): + # Sort directories and files to ensure they are always processed in the same order + dirs.sort() + files.sort() + for file_name in files: + file_path = os.path.join(root, file_name) + relative_path = os.path.relpath(file_path, top_path) + results.append(relative_path) + if log: + log.debug(relative_path) + + results.sort() + return results + + +def dataclass(name): + typ = type(name, (dict,), { + '__getattr__': lambda self, x: self.get(x), + '__init__': lambda self, **k: self.update(k), + }) + return typ + + +def datatree(name, **fields): + def decode_json(k, v): + if v and isinstance(v, str) and v[0] in '"[{': + try: + o = json.loads(v) + if isinstance(o, dict): + return dataclass(k)(**o) + return o + except json.JSONDecodeError: + pass + return v + + return dataclass(name)(**dict((( + k, datatree(k, **v) if isinstance(v, dict) else decode_json(k, v)) + for k, v in fields.items()))) + + +def timestamp_now_ns(): + timestamp = datetime.datetime.now().timestamp() + timestamp = int(timestamp * 10 ** 7) * 10 ** 2 + return timestamp + + +def source_code_hash(bytes): + return b64encode(hashlib.sha256(bytes).digest()).decode() + + +def yesno_bool(val): + if val is None: + return + if isinstance(val, bool): + return val + if isinstance(val, int): + return bool(val) + if isinstance(val, str): + if val.isnumeric(): + return bool(int(val)) + val = val.lower() + if val in ('true', 'yes', 'y'): + return True + elif val in ('false', 'no', 'n'): + return False + else: + raise ValueError("Unsupported value: %s" % val) + return False + + +################################################################################ +# Packaging functions + +def emit_dir_content(base_dir): + for root, dirs, files in os.walk(base_dir, followlinks=True): + # Sort directories and files to ensure they are always processed in the same order + dirs.sort() + files.sort() + if root != base_dir: + yield os.path.normpath(root) + for name in files: + yield os.path.normpath(os.path.join(root, name)) + + +def generate_content_hash(source_paths, + hash_func=hashlib.sha256, log=None): + """ + Generate a content hash of the source paths. + """ + + if log: + log = log.getChild('hash') + + hash_obj = hash_func() + + for source_path in source_paths: + if os.path.isdir(source_path): + source_dir = source_path + _log = log if log.isEnabledFor(DEBUG3) else None + for source_file in list_files(source_dir, log=_log): + update_hash(hash_obj, source_dir, source_file) + if log: + log.debug(os.path.join(source_dir, source_file)) + else: + source_dir = os.path.dirname(source_path) + source_file = os.path.relpath(source_path, source_dir) + update_hash(hash_obj, source_dir, source_file) + if log: + log.debug(source_path) + + return hash_obj + + +def update_hash(hash_obj, file_root, file_path): + """ + Update a hashlib object with the relative path and contents of a file. + """ + + relative_path = os.path.join(file_root, file_path) + hash_obj.update(relative_path.encode()) + + with open(relative_path, 'rb') as open_file: + while True: + data = open_file.read(1024 * 8) + if not data: + break + hash_obj.update(data) + + +class ZipWriteStream: + """""" + + def __init__(self, zip_filename, + compress_type=zipfile.ZIP_DEFLATED, + compresslevel=None, + timestamp=None): + + self.timestamp = timestamp + self.filename = zip_filename + + if not (self.filename and isinstance(self.filename, str)): + raise ValueError('Zip file path must be provided') + + self._tmp_filename = None + self._compress_type = compress_type + self._compresslevel = compresslevel + self._zip = None + + self._log = logging.getLogger('zip') + + def open(self): + if self._tmp_filename: + raise zipfile.BadZipFile("ZipStream object can't be reused") + self._ensure_base_path(self.filename) + self._tmp_filename = '{}.tmp'.format(self.filename) + self._log.info("creating '%s' archive", self.filename) + self._zip = zipfile.ZipFile(self._tmp_filename, "w", + self._compress_type) + return self + + def close(self, failed=False): + self._zip.close() + self._zip = None + if failed: + os.unlink(self._tmp_filename) + else: + os.replace(self._tmp_filename, self.filename) + + def __enter__(self): + return self.open() + + def __exit__(self, exc_type, exc_val, exc_tb): + if exc_type is not None: + self._log.exception("Error during zip archive creation") + self.close(failed=True) + raise SystemExit(1) + self.close() + + def _ensure_open(self): + if self._zip is not None: + return True + if self._tmp_filename: + raise zipfile.BadZipFile("ZipWriteStream object can't be reused") + raise zipfile.BadZipFile('ZipWriteStream should be opened first') + + def _ensure_base_path(self, zip_filename): + archive_dir = os.path.dirname(zip_filename) + + if archive_dir and not os.path.exists(archive_dir): + self._log.info("creating %s", archive_dir) + os.makedirs(archive_dir, exist_ok=True) + + def write_dirs(self, *base_dirs, prefix=None, timestamp=None): + """ + Writes a directory content to a prefix inside of a zip archive + """ + self._ensure_open() + for base_dir in base_dirs: + self._log.info("adding content of directory: %s", base_dir) + for path in emit_dir_content(base_dir): + arcname = os.path.relpath(path, base_dir) + self._write_file(path, prefix, arcname, timestamp) + + def write_files(self, files_stream, prefix=None, timestamp=None): + """ + Expects just files stream, directories will be created automatically + """ + self._ensure_open() + for file_path, arcname in files_stream: + self._write_file(file_path, prefix, arcname, timestamp) + + def write_file(self, file_path, prefix=None, name=None, timestamp=None): + """ + Reads a file and writes it to a prefix + or a full qualified name in a zip archive + """ + self._ensure_open() + self._write_file(file_path, prefix, name, timestamp) + + def _write_file(self, file_path, prefix=None, name=None, timestamp=None): + arcname = name if name else os.path.basename(file_path) + if prefix: + arcname = os.path.join(prefix, arcname) + zinfo = self._make_zinfo_from_file(file_path, arcname) + if zinfo.is_dir(): + self._log.info("adding: %s/", arcname) + else: + self._log.info("adding: %s", arcname) + if timestamp is None: + timestamp = self.timestamp + date_time = self._timestamp_to_date_time(timestamp) + if date_time: + self._update_zinfo(zinfo, date_time=date_time) + self._write_zinfo(zinfo, file_path) + + def write_file_obj(self, file_path, data, prefix=None, timestamp=None): + """ + Write a data to a zip archive by a full qualified archive file path + """ + self._ensure_open() + raise NotImplementedError + + def _write_zinfo(self, zinfo, filename, + compress_type=None, compresslevel=None): + self._ensure_open() + + zip = self._zip + + if not zip.fp: + raise ValueError( + "Attempt to write to ZIP archive that was already closed") + if zip._writing: + raise ValueError( + "Can't write to ZIP archive while an open writing handle exists" + ) + + if zinfo.is_dir(): + zinfo.compress_size = 0 + zinfo.CRC = 0 + else: + if compress_type is not None: + zinfo.compress_type = compress_type + else: + zinfo.compress_type = self._compress_type + + if PY37: + if compresslevel is not None: + zinfo._compresslevel = compresslevel + else: + zinfo._compresslevel = self._compresslevel + + if zinfo.is_dir(): + with zip._lock: + if zip._seekable: + zip.fp.seek(zip.start_dir) + zinfo.header_offset = zip.fp.tell() # Start of header bytes + if zinfo.compress_type == zipfile.ZIP_LZMA: + # Compressed data includes an end-of-stream (EOS) marker + zinfo.flag_bits |= 0x02 + + zip._writecheck(zinfo) + zip._didModify = True + + zip.filelist.append(zinfo) + zip.NameToInfo[zinfo.filename] = zinfo + zip.fp.write(zinfo.FileHeader(False)) + zip.start_dir = zip.fp.tell() + else: + with open(filename, "rb") as src, zip.open(zinfo, 'w') as dest: + shutil.copyfileobj(src, dest, 1024 * 8) + + def _make_zinfo_from_file(self, filename, arcname=None): + if PY38: + zinfo_func = zipfile.ZipInfo.from_file + strict_timestamps = self._zip._strict_timestamps + else: + zinfo_func = self._zinfo_from_file + strict_timestamps = True + + return zinfo_func(filename, arcname, + strict_timestamps=strict_timestamps) + + @staticmethod + def _update_zinfo(zinfo, date_time): + zinfo.date_time = date_time + + # Borrowed from python 3.8 zipfile.py library + # due to the need of strict_timestamps functionality. + @staticmethod + def _zinfo_from_file(filename, arcname=None, *, strict_timestamps=True): + """Construct an appropriate ZipInfo for a file on the filesystem. + + filename should be the path to a file or directory on the filesystem. + + arcname is the name which it will have within the archive (by default, + this will be the same as filename, but without a drive letter and with + leading path separators removed). + """ + if isinstance(filename, os.PathLike): + filename = os.fspath(filename) + st = os.stat(filename) + isdir = stat.S_ISDIR(st.st_mode) + mtime = time.localtime(st.st_mtime) + date_time = mtime[0:6] + if strict_timestamps and date_time[0] < 1980: + date_time = (1980, 1, 1, 0, 0, 0) + elif strict_timestamps and date_time[0] > 2107: + date_time = (2107, 12, 31, 23, 59, 59) + # Create ZipInfo instance to store file information + if arcname is None: + arcname = filename + arcname = os.path.normpath(os.path.splitdrive(arcname)[1]) + while arcname[0] in (os.sep, os.altsep): + arcname = arcname[1:] + if isdir: + arcname += '/' + zinfo = zipfile.ZipInfo(arcname, date_time) + zinfo.external_attr = (st.st_mode & 0xFFFF) << 16 # Unix attributes + if isdir: + zinfo.file_size = 0 + zinfo.external_attr |= 0x10 # MS-DOS directory flag + else: + zinfo.file_size = st.st_size + + return zinfo + + @staticmethod + def _timestamp_to_date_time(timestamp): + def str_int_to_timestamp(s): + min_zip_ts = datetime.datetime(1980, 1, 1).timestamp() + ts = int(s) + if ts < min_zip_ts: + return min_zip_ts + deg = len(str(int(s))) - 9 + if deg < 0: + ts = ts * 10 ** deg + return ts + + date_time = None + if timestamp is not None: + if isinstance(timestamp, str): + if timestamp.isnumeric(): + timestamp = str_int_to_timestamp(timestamp) + else: + timestamp = float(timestamp) + elif isinstance(timestamp, int): + timestamp = str_int_to_timestamp(str(timestamp)) + + date_time = datetime.datetime.fromtimestamp(timestamp).timetuple() + date_time = date_time[:6] + if date_time[0] < 1980: + raise ValueError('ZIP does not support timestamps before 1980') + return date_time + + +################################################################################ +# Building + +def patterns_list(args, patterns): + _filter = str.strip + if args.pattern_comments: + def _filter(x): + x = x.strip() + p = re.search("^(.*?)[ \t]*(?:[ \t]{2}#.*)?$", x).group(1).rstrip() + if p.startswith('#'): + return + if p: + return p + if isinstance(patterns, str): + return list(filter(None, map(_filter, patterns.splitlines()))) + return patterns + + +class ZipContentFilter: + """""" + + def __init__(self, args): + self._args = args + self._rules = None + self._excludes = set() + self._log = logging.getLogger('zip') + + def compile(self, patterns): + rules = [] + for p in patterns_list(self._args, patterns): + self._log.debug("filter pattern: %s", p) + if p.startswith('!'): + r = re.compile(p[1:]) + rules.append((operator.not_, r)) + else: + r = re.compile(p) + rules.append((None, r)) + self._rules = rules + + def filter(self, path, prefix=None): + path = os.path.normpath(path) + if prefix: + prefix = os.path.normpath(prefix) + rules = self._rules + + def norm_path(path, root, filename=None): + op = os.path.join(root, filename) if filename else root + p = os.path.relpath(root, path) + if prefix: + p = os.path.join(prefix, p) + if filename: + p = os.path.normpath(os.path.join(p, filename)) + return op, p + return op, p + os.sep + + def apply(path): + d = True + for r in rules: + op, regex = r + neg = op is operator.not_ + m = regex.fullmatch(path) + if neg and m: + d = False + elif m: + d = True + if d: + return path + + def emit_dir(dpath, opath): + if apply(dpath): + yield opath + else: + self._log.debug('skip: %s', dpath) + + def emit_file(fpath, opath): + if apply(fpath): + yield opath + else: + self._log.debug('skip: %s', fpath) + + if os.path.isfile(path): + name = os.path.basename(path) + if prefix: + name = os.path.join(prefix, name) + if apply(name): + yield path + else: + for root, dirs, files in os.walk(path, followlinks=True): + # Sort directories and files to ensure they are always processed in the same order + dirs.sort() + files.sort() + o, d = norm_path(path, root) + # log.info('od: %s %s', o, d) + if root != path: + yield from emit_dir(d, o) + for name in files: + o, f = norm_path(path, root, name) + # log.info('of: %s %s', o, f) + yield from emit_file(f, o) + + +def get_build_system_from_pyproject_toml(pyproject_file): + # Implement a basic TOML parser because python stdlib does not provide toml support and we probably do not want to add external dependencies + if os.path.isfile(pyproject_file): + with open(pyproject_file) as f: + bs = False + for line in f.readlines(): + if line.startswith("[build-system]"): + bs = True + continue + if bs and line.startswith("build-backend") and "poetry" in line: + return "poetry" + + +class BuildPlanManager: + """""" + + def __init__(self, args, log=None): + self._args = args + self._source_paths = None + self._log = log or logging.root + + def hash(self, extra_paths): + if not self._source_paths: + raise ValueError('BuildPlanManager.plan() should be called first') + + content_hash_paths = self._source_paths + extra_paths + + # Generate a hash based on file names and content. Also use the + # runtime value, build command, and content of the build paths + # because they can have an effect on the resulting archive. + self._log.debug("Computing content hash on files...") + content_hash = generate_content_hash(content_hash_paths, + log=self._log) + return content_hash + + def plan(self, source_path, query): + claims = source_path + if not isinstance(source_path, list): + claims = [source_path] + + source_paths = [] + build_plan = [] + + step = lambda *x: build_plan.append(x) + hash = source_paths.append + + def pip_requirements_step(path, prefix=None, required=False, tmp_dir=None): + command = runtime + requirements = path + if os.path.isdir(path): + requirements = os.path.join(path, 'requirements.txt') + if not os.path.isfile(requirements): + if required: + raise RuntimeError( + 'File not found: {}'.format(requirements)) + else: + if not query.docker and not shutil.which(command): + raise RuntimeError( + "Python interpreter version equal " + "to defined lambda runtime ({}) should be " + "available in system PATH".format(command)) + + step('pip', runtime, requirements, prefix, tmp_dir) + hash(requirements) + + def poetry_install_step(path, prefix=None, required=False): + pyproject_file = path + if os.path.isdir(path): + pyproject_file = os.path.join(path, "pyproject.toml") + if get_build_system_from_pyproject_toml(pyproject_file) != "poetry": + if required: + raise RuntimeError( + "poetry configuration not found: {}".format(pyproject_file)) + else: + step("poetry", runtime, path, prefix) + hash(pyproject_file) + poetry_lock_file = os.path.join(path, "poetry.lock") + if os.path.isfile(poetry_lock_file): + hash(poetry_lock_file) + poetry_toml_file = os.path.join(path, "poetry.toml") + if os.path.isfile(poetry_toml_file): + hash(poetry_toml_file) + + def npm_requirements_step(path, prefix=None, required=False, tmp_dir=None): + command = "npm" + requirements = path + if os.path.isdir(path): + requirements = os.path.join(path, 'package.json') + if not os.path.isfile(requirements): + if required: + raise RuntimeError( + 'File not found: {}'.format(requirements)) + else: + if not query.docker and not shutil.which(command): + raise RuntimeError( + "Nodejs package manager ({}) should be " + "available in system PATH".format(command)) + + step('npm', runtime, requirements, prefix, tmp_dir) + hash(requirements) + + def commands_step(path, commands): + if not commands: + return + + if isinstance(commands, str): + commands = map(str.strip, commands.splitlines()) + + if path: + path = os.path.normpath(path) + batch = [] + for c in commands: + if isinstance(c, str): + if c.startswith(':zip'): + if path: + hash(path) + else: + # If path doesn't defined for a block with + # commands it will be set to Terraform's + # current working directory + # NB: cwd may vary when using Terraform 0.14+ like: + # `terraform -chdir=...` + path = query.paths.cwd + if batch: + step('sh', path, '\n'.join(batch)) + batch.clear() + c = shlex.split(c) + if len(c) == 3: + _, _path, prefix = c + prefix = prefix.strip() + _path = os.path.normpath(os.path.join(path, _path)) + step('zip:embedded', _path, prefix) + elif len(c) == 2: + prefix = None + _, _path = c + step('zip:embedded', _path, prefix) + elif len(c) == 1: + prefix = None + step('zip:embedded', path, prefix) + else: + raise ValueError( + ":zip invalid call signature, use: " + "':zip [path [prefix_in_zip]]'") + else: + batch.append(c) + + for claim in claims: + if isinstance(claim, str): + path = claim + if not os.path.exists(path): + abort('Could not locate source_path "{path}". Paths are relative to directory where `terraform plan` is being run ("{pwd}")'.format( + path=path, + pwd=os.getcwd() + )) + runtime = query.runtime + if runtime.startswith('python'): + pip_requirements_step( + os.path.join(path, 'requirements.txt')) + poetry_install_step(path) + elif runtime.startswith('nodejs'): + npm_requirements_step( + os.path.join(path, 'package.json')) + step('zip', path, None) + hash(path) + + elif isinstance(claim, dict): + path = claim.get('path') + patterns = claim.get('patterns') + commands = claim.get('commands') + if patterns: + step('set:filter', patterns_list(self._args, patterns)) + if commands: + commands_step(path, commands) + else: + prefix = claim.get('prefix_in_zip') + pip_requirements = claim.get('pip_requirements') + poetry_install = claim.get("poetry_install") + npm_requirements = claim.get('npm_package_json') + runtime = claim.get('runtime', query.runtime) + + if pip_requirements and runtime.startswith('python'): + if isinstance(pip_requirements, bool) and path: + pip_requirements_step( + path, prefix, required=True, tmp_dir=claim.get('pip_tmp_dir')) + else: + pip_requirements_step(pip_requirements, prefix, + required=True, tmp_dir=claim.get('pip_tmp_dir')) + + if poetry_install and runtime.startswith("python"): + if path: + poetry_install_step(path, prefix, required=True) + + if npm_requirements and runtime.startswith('nodejs'): + if isinstance(npm_requirements, bool) and path: + npm_requirements_step( + path, prefix, required=True, tmp_dir=claim.get('npm_tmp_dir')) + else: + npm_requirements_step(npm_requirements, prefix, + required=True, tmp_dir=claim.get('npm_tmp_dir')) + if path: + step('zip', path, prefix) + if patterns: + # Take patterns into account when computing hash + pf = ZipContentFilter(args=self._args) + pf.compile(patterns) + + for path_from_pattern in pf.filter(path, prefix): + hash(path_from_pattern) + else: + hash(path) + + if patterns: + step('clear:filter') + else: + raise ValueError( + 'Unsupported source_path item: {}'.format(claim)) + + self._source_paths = source_paths + return build_plan + + def execute(self, build_plan, zip_stream, query): + zs = zip_stream + sh_work_dir = None + pf = None + + for action in build_plan: + cmd = action[0] + if cmd.startswith('zip'): + ts = 0 if cmd == 'zip:embedded' else None + source_path, prefix = action[1:] + if sh_work_dir: + if source_path != sh_work_dir: + if not os.path.isfile(source_path): + source_path = sh_work_dir + if os.path.isdir(source_path): + if pf: + self._zip_write_with_filter(zs, pf, source_path, prefix, + timestamp=ts) + else: + zs.write_dirs(source_path, prefix=prefix, timestamp=ts) + else: + zs.write_file(source_path, prefix=prefix, timestamp=ts) + elif cmd == 'pip': + runtime, pip_requirements, prefix, tmp_dir = action[1:] + with install_pip_requirements(query, pip_requirements, tmp_dir) as rd: + if rd: + if pf: + self._zip_write_with_filter( + zs, pf, rd, prefix, timestamp=0) + else: + # XXX: timestamp=0 - what actually do with it? + zs.write_dirs(rd, prefix=prefix, timestamp=0) + elif cmd == "poetry": + runtime, path, prefix = action[1:] + with install_poetry_dependencies(query, path) as rd: + if rd: + if pf: + self._zip_write_with_filter( + zs, pf, rd, prefix, timestamp=0) + else: + # XXX: timestamp=0 - what actually do with it? + zs.write_dirs(rd, prefix=prefix, timestamp=0) + elif cmd == 'npm': + runtime, npm_requirements, prefix, tmp_dir = action[1:] + with install_npm_requirements(query, npm_requirements, tmp_dir) as rd: + if rd: + if pf: + self._zip_write_with_filter(zs, pf, rd, prefix, + timestamp=0) + else: + # XXX: timestamp=0 - what actually do with it? + zs.write_dirs(rd, prefix=prefix, timestamp=0) + elif cmd == 'sh': + r, w = os.pipe() + side_ch = os.fdopen(r) + path, script = action[1:] + script = "{}\npwd >&{}".format(script, w) + + p = subprocess.Popen(script, shell=True, cwd=path, + pass_fds=(w,)) + os.close(w) + sh_work_dir = side_ch.read().strip() + p.wait() + log.info('WD: %s', sh_work_dir) + side_ch.close() + elif cmd == 'set:filter': + patterns = action[1] + pf = ZipContentFilter(args=self._args) + pf.compile(patterns) + elif cmd == 'clear:filter': + pf = None + + @staticmethod + def _zip_write_with_filter(zip_stream, path_filter, source_path, prefix, + timestamp=None): + for path in path_filter.filter(source_path, prefix): + if os.path.isdir(source_path): + arcname = os.path.relpath(path, source_path) + else: + arcname = os.path.basename(path) + zip_stream.write_file(path, prefix, arcname, timestamp=timestamp) + + +@contextmanager +def install_pip_requirements(query, requirements_file, tmp_dir): + # TODO: + # 1. Emit files instead of temp_dir + + if not os.path.exists(requirements_file): + yield + return + + runtime = query.runtime + artifacts_dir = query.artifacts_dir + docker = query.docker + temp_dir = query.temp_dir + docker_image_tag_id = None + + if docker: + docker_file = docker.docker_file + docker_image = docker.docker_image + docker_build_root = docker.docker_build_root + + if docker_image: + ok = False + while True: + output = check_output(docker_image_id_command(docker_image)) + if output: + docker_image_tag_id = output.decode().strip() + log.debug("DOCKER TAG ID: %s -> %s", + docker_image, docker_image_tag_id) + ok = True + if ok: + break + docker_cmd = docker_build_command( + build_root=docker_build_root, + docker_file=docker_file, + tag=docker_image, + ) + check_call(docker_cmd) + ok = True + elif docker_file or docker_build_root: + raise ValueError('docker_image must be specified ' + 'for a custom image future references') + + working_dir = os.getcwd() + + log.info('Installing python requirements: %s', requirements_file) + with tempdir(tmp_dir) as temp_dir: + requirements_filename = os.path.basename(requirements_file) + target_file = os.path.join(temp_dir, requirements_filename) + shutil.copyfile(requirements_file, target_file) + + python_exec = runtime + subproc_env = None + + if not docker: + if WINDOWS: + python_exec = 'python.exe' + elif OSX: + # Workaround for OSX when XCode command line tools' + # python becomes the main system python interpreter + os_path = '{}:/Library/Developer/CommandLineTools' \ + '/usr/bin'.format(os.environ['PATH']) + subproc_env = os.environ.copy() + subproc_env['PATH'] = os_path + + # Install dependencies into the temporary directory. + with cd(temp_dir): + pip_command = [ + python_exec, '-m', 'pip', + 'install', '--no-compile', + '--prefix=', '--target=.', + '--requirement={}'.format(requirements_filename), + ] + if docker: + with_ssh_agent = docker.with_ssh_agent + pip_cache_dir = docker.docker_pip_cache + if pip_cache_dir: + if isinstance(pip_cache_dir, str): + pip_cache_dir = os.path.abspath( + os.path.join(working_dir, pip_cache_dir)) + else: + pip_cache_dir = os.path.abspath(os.path.join( + working_dir, artifacts_dir, 'cache/pip')) + + chown_mask = '{}:{}'.format(os.getuid(), os.getgid()) + shell_command = [shlex_join(pip_command), '&&', + shlex_join(['chown', '-R', + chown_mask, '.'])] + shell_command = [' '.join(shell_command)] + check_call(docker_run_command( + '.', shell_command, runtime, + image=docker_image_tag_id, + shell=True, ssh_agent=with_ssh_agent, + pip_cache_dir=pip_cache_dir, docker=docker, + )) + else: + cmd_log.info(shlex_join(pip_command)) + log_handler and log_handler.flush() + try: + check_call(pip_command, env=subproc_env) + except FileNotFoundError as e: + raise RuntimeError( + "Python interpreter version equal " + "to defined lambda runtime ({}) should be " + "available in system PATH".format(runtime) + ) from e + + os.remove(target_file) + yield temp_dir + + +@contextmanager +def install_poetry_dependencies(query, path): + # TODO: + # 1. Emit files instead of temp_dir + + # pyproject.toml is always required by poetry + pyproject_file = os.path.join(path, "pyproject.toml") + if not os.path.exists(pyproject_file): + yield + return + + # poetry.lock & poetry.toml are optional + poetry_lock_file = os.path.join(path, "poetry.lock") + poetry_toml_file = os.path.join(path, "poetry.toml") + + runtime = query.runtime + artifacts_dir = query.artifacts_dir + docker = query.docker + docker_image_tag_id = None + + if docker: + docker_file = docker.docker_file + docker_image = docker.docker_image + docker_build_root = docker.docker_build_root + + if docker_image: + ok = False + while True: + output = check_output(docker_image_id_command(docker_image)) + if output: + docker_image_tag_id = output.decode().strip() + log.debug( + "DOCKER TAG ID: %s -> %s", docker_image, docker_image_tag_id + ) + ok = True + if ok: + break + docker_cmd = docker_build_command( + build_root=docker_build_root, + docker_file=docker_file, + tag=docker_image, + ) + check_call(docker_cmd) + ok = True + elif docker_file or docker_build_root: + raise ValueError( + "docker_image must be specified for a custom image future references" + ) + + working_dir = os.getcwd() + + log.info("Installing python dependencies with poetry & pip: %s", + poetry_lock_file) + with tempdir() as temp_dir: + def copy_file_to_target(file, temp_dir): + filename = os.path.basename(file) + target_file = os.path.join(temp_dir, filename) + shutil.copyfile(file, target_file) + return target_file + + pyproject_target_file = copy_file_to_target(pyproject_file, temp_dir) + + if os.path.isfile(poetry_lock_file): + log.info("Using poetry lock file: %s", poetry_lock_file) + poetry_lock_target_file = copy_file_to_target( + poetry_lock_file, temp_dir) + else: + poetry_lock_target_file = None + + if os.path.isfile(poetry_toml_file): + log.info("Using poetry configuration file: %s", poetry_lock_file) + poetry_toml_target_file = copy_file_to_target( + poetry_toml_file, temp_dir) + else: + poetry_toml_target_file = None + + poetry_exec = "poetry" + python_exec = runtime + subproc_env = None + + if not docker: + if WINDOWS: + poetry_exec = "poetry.bat" + + # Install dependencies into the temporary directory. + with cd(temp_dir): + # NOTE: poetry must be available in the build environment, which is the case with lambci/lambda:build-python* docker images but not public.ecr.aws/sam/build-python* docker images + # FIXME: poetry install does not currently allow to specify the target directory so we export the + # requirements then install them with "pip --no-deps" to avoid using pip dependency resolver + poetry_commands = [ + [ + poetry_exec, + "config", + "--no-interaction", + "virtualenvs.create", + "true", + ], + [ + poetry_exec, + "config", + "--no-interaction", + "virtualenvs.in-project", + "true", + ], + [ + poetry_exec, + "export", + "--format", + "requirements.txt", + "--output", + "requirements.txt", + "--with-credentials", + ], + [ + python_exec, + "-m", + "pip", + "install", + "--no-compile", + "--no-deps", + "--prefix=", + "--target=.", + "--requirement=requirements.txt", + ], + ] + if docker: + with_ssh_agent = docker.with_ssh_agent + poetry_cache_dir = docker.docker_poetry_cache + if poetry_cache_dir: + if isinstance(poetry_cache_dir, str): + poetry_cache_dir = os.path.abspath( + os.path.join(working_dir, poetry_cache_dir) + ) + else: + poetry_cache_dir = os.path.abspath( + os.path.join( + working_dir, artifacts_dir, "cache/poetry") + ) + + chown_mask = "{}:{}".format(os.getuid(), os.getgid()) + poetry_commands += [["chown", "-R", chown_mask, "."]] + shell_commands = [ + shlex_join(poetry_command) for poetry_command in poetry_commands + ] + shell_command = [" && ".join(shell_commands)] + check_call( + docker_run_command( + ".", + shell_command, + runtime, + image=docker_image_tag_id, + shell=True, + ssh_agent=with_ssh_agent, + poetry_cache_dir=poetry_cache_dir, + docker=docker, + ) + ) + else: + cmd_log.info(poetry_commands) + log_handler and log_handler.flush() + for poetry_command in poetry_commands: + check_call(poetry_command, env=subproc_env) + + os.remove(pyproject_target_file) + if poetry_lock_target_file: + os.remove(poetry_lock_target_file) + if poetry_toml_target_file: + os.remove(poetry_toml_target_file) + + yield temp_dir + + +@contextmanager +def install_npm_requirements(query, requirements_file, tmp_dir): + # TODO: + # 1. Emit files instead of temp_dir + + if not os.path.exists(requirements_file): + yield + return + + runtime = query.runtime + artifacts_dir = query.artifacts_dir + temp_dir = query.temp_dir + docker = query.docker + docker_image_tag_id = None + + if docker: + docker_file = docker.docker_file + docker_image = docker.docker_image + docker_build_root = docker.docker_build_root + + if docker_image: + ok = False + while True: + output = check_output(docker_image_id_command(docker_image)) + if output: + docker_image_tag_id = output.decode().strip() + log.debug("DOCKER TAG ID: %s -> %s", + docker_image, docker_image_tag_id) + ok = True + if ok: + break + docker_cmd = docker_build_command( + build_root=docker_build_root, + docker_file=docker_file, + tag=docker_image, + ) + check_call(docker_cmd) + ok = True + elif docker_file or docker_build_root: + raise ValueError('docker_image must be specified ' + 'for a custom image future references') + + log.info('Installing npm requirements: %s', requirements_file) + with tempdir(tmp_dir) as temp_dir: + requirements_filename = os.path.basename(requirements_file) + target_file = os.path.join(temp_dir, requirements_filename) + shutil.copyfile(requirements_file, target_file) + + subproc_env = None + if not docker and OSX: + subproc_env = os.environ.copy() + + # Install dependencies into the temporary directory. + with cd(temp_dir): + npm_command = ['npm', 'install'] + if docker: + with_ssh_agent = docker.with_ssh_agent + chown_mask = '{}:{}'.format(os.getuid(), os.getgid()) + shell_command = [shlex_join(npm_command), '&&', + shlex_join(['chown', '-R', + chown_mask, '.'])] + shell_command = [' '.join(shell_command)] + check_call(docker_run_command( + '.', shell_command, runtime, + image=docker_image_tag_id, + shell=True, ssh_agent=with_ssh_agent, + docker=docker, + )) + else: + cmd_log.info(shlex_join(npm_command)) + log_handler and log_handler.flush() + try: + check_call(npm_command, env=subproc_env) + except FileNotFoundError as e: + raise RuntimeError( + "Nodejs interpreter version equal " + "to defined lambda runtime ({}) should be " + "available in system PATH".format(runtime) + ) from e + + os.remove(target_file) + yield temp_dir + + +def docker_image_id_command(tag): + """""" + docker_cmd = ['docker', 'images', '--format={{.ID}}', tag] + cmd_log.info(shlex_join(docker_cmd)) + log_handler and log_handler.flush() + return docker_cmd + + +def docker_build_command(tag=None, docker_file=None, build_root=False): + """""" + if not (build_root or docker_file): + raise ValueError('docker_build_root or docker_file must be provided') + + docker_cmd = ['docker', 'build'] + + if tag: + docker_cmd.extend(['--tag', tag]) + else: + raise ValueError('docker_image must be specified') + if not build_root: + build_root = os.path.dirname(docker_file) + if docker_file: + docker_cmd.extend(['--file', docker_file]) + docker_cmd.append(build_root) + + cmd_log.info(shlex_join(docker_cmd)) + log_handler and log_handler.flush() + return docker_cmd + + +def docker_run_command(build_root, command, runtime, + image=None, shell=None, ssh_agent=False, + interactive=False, pip_cache_dir=None, poetry_cache_dir=None, + docker=None): + """""" + if platform.system() not in ('Linux', 'Darwin'): + raise RuntimeError("Unsupported platform for docker building") + + workdir = '/var/task' + + docker_cmd = ['docker', 'run', '--rm', '-w', workdir] + + if interactive: + docker_cmd.append('-it') + + bind_path = os.path.abspath(build_root) + docker_cmd.extend(['-v', "{}:{}:z".format(bind_path, workdir)]) + + home = os.environ['HOME'] + docker_cmd.extend([ + # '-v', '{}/.ssh/id_rsa:/root/.ssh/id_rsa:z'.format(home), + '-v', '{}/.ssh/known_hosts:/root/.ssh/known_hosts:z'.format(home), + ]) + + if docker and docker.docker_additional_options: + docker_cmd.extend(docker.docker_additional_options) + + if ssh_agent: + if platform.system() == 'Darwin': + # https://docs.docker.com/docker-for-mac/osxfs/#ssh-agent-forwarding + docker_cmd.extend([ + '--mount', 'type=bind,' + 'src=/run/host-services/ssh-auth.sock,' + 'target=/run/host-services/ssh-auth.sock', + '-e', 'SSH_AUTH_SOCK=/run/host-services/ssh-auth.sock', + ]) + elif platform.system() == 'Linux': + sock = os.environ['SSH_AUTH_SOCK'] # TODO: Handle missing env var + docker_cmd.extend([ + '-v', '{}:/tmp/ssh_sock:z'.format(sock), + '-e', 'SSH_AUTH_SOCK=/tmp/ssh_sock', + ]) + + if platform.system() in ('Linux', 'Darwin'): + if pip_cache_dir: + pip_cache_dir = os.path.abspath(pip_cache_dir) + docker_cmd.extend([ + '-v', '{}:/root/.cache/pip:z'.format(pip_cache_dir), + ]) + if poetry_cache_dir: + poetry_cache_dir = os.path.abspath(poetry_cache_dir) + docker_cmd.extend([ + '-v', '{}:/root/.cache/pypoetry:z'.format(poetry_cache_dir), + ]) + + if not image: + image = 'public.ecr.aws/sam/build-{}'.format(runtime) + + if docker and docker.docker_entrypoint: + docker_cmd.extend(['--entrypoint', docker.docker_entrypoint]) + else: + docker_cmd.extend(['--entrypoint', '']) + + docker_cmd.append(image) + + assert isinstance(command, list) + if shell: + if not isinstance(shell, str): + shell = '/bin/sh' + docker_cmd.extend([shell, '-c']) + docker_cmd.extend(command) + + cmd_log.info(shlex_join(docker_cmd)) + log_handler and log_handler.flush() + return docker_cmd + + +################################################################################ +# Commands + +def prepare_command(args): + """ + Generates a content hash of the source_path, which is used to determine if + the Lambda code has changed, ignoring file modification and access times. + + Outputs a filename and a command to run if the archive needs to be built. + """ + + log = logging.getLogger('prepare') + + # Load the query. + query_data = json.load(sys.stdin) + + dump_env() + if log.isEnabledFor(DEBUG2): + if log.isEnabledFor(DEBUG3): + log.debug('QUERY: %s', json.dumps(query_data, indent=2)) + else: + log_excludes = ('source_path', 'hash_extra_paths', 'paths') + qd = {k: v for k, v in query_data.items() if k not in log_excludes} + log.debug('QUERY (excerpt): %s', json.dumps(qd, indent=2)) + + query = datatree('prepare_query', **query_data) + + tf_paths = query.paths + runtime = query.runtime + function_name = query.function_name + artifacts_dir = query.artifacts_dir + hash_extra_paths = query.hash_extra_paths + source_path = query.source_path + hash_extra = query.hash_extra + recreate_missing_package = yesno_bool( + args.recreate_missing_package if args.recreate_missing_package is not None else query.recreate_missing_package) + docker = query.docker + + bpm = BuildPlanManager(args, log=log) + build_plan = bpm.plan(source_path, query) + + if log.isEnabledFor(DEBUG2): + log.debug('BUILD_PLAN: %s', json.dumps(build_plan, indent=2)) + + # Expand a Terraform path. references + hash_extra_paths = [p.format(path=tf_paths) for p in hash_extra_paths] + + content_hash = bpm.hash(hash_extra_paths) + content_hash.update(json.dumps(build_plan, sort_keys=True).encode()) + content_hash.update(runtime.encode()) + content_hash.update(hash_extra.encode()) + content_hash = content_hash.hexdigest() + + # Generate a unique filename based on the hash. + filename = os.path.join(artifacts_dir, '{}.zip'.format(content_hash)) + + # Compute timestamp trigger + was_missing = False + filename_path = os.path.join(os.getcwd(), filename) + if recreate_missing_package: + if os.path.exists(filename_path): + st = os.stat(filename_path) + timestamp = st.st_mtime_ns + else: + timestamp = timestamp_now_ns() + was_missing = True + else: + timestamp = "" + + # Replace variables in the build command with calculated values. + build_data = { + 'filename': filename, + 'runtime': runtime, + 'artifacts_dir': artifacts_dir, + 'build_plan': build_plan, + } + if docker: + build_data['docker'] = docker + + build_plan = json.dumps(build_data) + build_plan_filename = os.path.join(artifacts_dir, + '{}.plan.json'.format(content_hash)) + if not os.path.exists(artifacts_dir): + os.makedirs(artifacts_dir, exist_ok=True) + with open(build_plan_filename, 'w') as f: + f.write(build_plan) + + # Output the result to Terraform. + json.dump({ + 'filename': filename, + 'build_plan': build_plan, + 'build_plan_filename': build_plan_filename, + 'timestamp': str(timestamp), + 'was_missing': 'true' if was_missing else 'false', + }, sys.stdout, indent=2) + sys.stdout.write('\n') + + +def build_command(args): + """ + Builds a zip file from the source_dir or source_file. + Installs dependencies with pip or npm automatically. + """ + + log = logging.getLogger('build') + + dump_env() + if log.isEnabledFor(DEBUG2): + log.debug('CMD: python3 %s', shlex_join(sys.argv)) + + with open(args.build_plan_file) as f: + query_data = json.load(f) + query = datatree('build_query', **query_data) + + runtime = query.runtime + filename = query.filename + build_plan = query.build_plan + _timestamp = args.zip_file_timestamp + + timestamp = 0 + if _timestamp.isnumeric(): + timestamp = int(_timestamp) + + if os.path.exists(filename) and not args.force: + log.info('Reused: %s', shlex.quote(filename)) + return + + # Zip up the build plan and write it to the target filename. + # This will be used by the Lambda function as the source code package. + with ZipWriteStream(filename) as zs: + bpm = BuildPlanManager(args, log=log) + bpm.execute(build_plan, zs, query) + + os.utime(filename, ns=(timestamp, timestamp)) + log.info('Created: %s', shlex.quote(filename)) + if log.isEnabledFor(logging.DEBUG): + with open(filename, 'rb') as f: + log.info('Base64sha256: %s', source_code_hash(f.read())) + + +def add_hidden_commands(sub_parsers): + sp = sub_parsers + + def hidden_parser(name, **kwargs): + p = sp.add_parser(name, **kwargs) + sp._choices_actions.pop() # XXX: help=argparse.SUPPRESS - doesn't work + return p + + p = hidden_parser('docker', help='Run docker build') + p.set_defaults(command=lambda args: subprocess.call(docker_run_command( + args.build_root, args.docker_command, args.runtime, interactive=True))) + p.add_argument('build_root', help='A docker build root folder') + p.add_argument('docker_command', help='A docker container command', + metavar='command', nargs=argparse.REMAINDER) + p.add_argument('-r', '--runtime', help='A docker image runtime', + default='python3.8') + + p = hidden_parser('docker-image', help='Run docker build') + p.set_defaults(command=lambda args: subprocess.call(docker_build_command( + args.build_root, args.docker_file, args.tag))) + p.add_argument('-t', '--tag', help='A docker image tag') + p.add_argument('build_root', help='A docker build root folder') + p.add_argument('docker_file', help='A docker file path', + nargs=argparse.OPTIONAL) + + def zip_cmd(args): + if args.verbose: + log.setLevel(logging.DEBUG) + with ZipWriteStream(args.zipfile) as zs: + zs.write_dirs(*args.dir, timestamp=args.timestamp) + if log.isEnabledFor(logging.DEBUG): + zipinfo = shutil.which('zipinfo') + if zipinfo: + log.debug('-' * 80) + subprocess.call([zipinfo, args.zipfile]) + log.debug('-' * 80) + log.debug('Source code hash: %s', + source_code_hash(open(args.zipfile, 'rb').read())) + + p = hidden_parser('zip', help='Zip folder with provided files timestamp') + p.set_defaults(command=zip_cmd) + p.add_argument('zipfile', help='Path to a zip file') + p.add_argument('dir', nargs=argparse.ONE_OR_MORE, + help='Path to a directory for packaging') + p.add_argument('-t', '--timestamp', type=int, + help='A timestamp to override for all zip members') + p.add_argument('-v', '--verbose', action='store_true') + + p = hidden_parser('hash', help='Generate content hash for a file') + p.set_defaults( + command=lambda args: print(source_code_hash(args.file.read()))) + p.add_argument('file', help='Path to a file', type=argparse.FileType('rb')) + + +def args_parser(): + ap = argparse.ArgumentParser() + ap.set_defaults(command=lambda _: ap.print_usage()) + sp = ap.add_subparsers(metavar="COMMAND") + + p = sp.add_parser('prepare', + help='compute a filename hash for a zip archive') + p.set_defaults(command=prepare_command) + + p = sp.add_parser('build', + help='build and pack to a zip archive') + p.set_defaults(command=build_command) + p.add_argument('--force', action='store_true', + help='Force rebuilding even if a zip artifact exists') + p.add_argument('-t', '--timestamp', + dest='zip_file_timestamp', required=True, + help='A zip file timestamp generated by the prepare command') + p.add_argument('build_plan_file', metavar='PLAN_FILE', + help='A build plan file provided by the prepare command') + add_hidden_commands(sp) + return ap + + +def main(): + ns = argparse.Namespace( + pattern_comments=yesno_bool(os.environ.get( + 'TF_LAMBDA_PACKAGE_PATTERN_COMMENTS', False)), + recreate_missing_package=os.environ.get( + 'TF_RECREATE_MISSING_LAMBDA_PACKAGE', None), + log_level=os.environ.get('TF_LAMBDA_PACKAGE_LOG_LEVEL', 'INFO'), + ) + + p = args_parser() + args = p.parse_args(namespace=ns) + + if args.command is prepare_command: + configure_logging(use_tf_stderr=True) + else: + configure_logging() + + if args.log_level: + ll = logging._nameToLevel.get(args.log_level) + if ll and logging._checkLevel(ll): + logging.root.setLevel(args.log_level) + + exit(args.command(args)) + + +if __name__ == '__main__': + main() diff --git a/tsconfig.json b/tsconfig.json index 65fc68c..907cb23 100644 --- a/tsconfig.json +++ b/tsconfig.json @@ -25,9 +25,9 @@ // "moduleDetection": "auto", /* Control what method is used to detect module-format JS files. */ /* Modules */ - "module": "ESNext" /* Specify what module code is generated. */, // "rootDir": "./", /* Specify the root folder within your source files. */ - "moduleResolution": "Node" /* Specify how TypeScript looks up a file from a given module specifier. */, + "module": "ESNext" /* Specify what module code is generated. */, + "moduleResolution": "Bundler" /* Specify how TypeScript looks up a file from a given module specifier. */, "baseUrl": "./" /* Specify the base directory to resolve non-relative module names. */, // "paths": {}, /* Specify a set of entries that re-map imports to additional lookup locations. */ // "rootDirs": [], /* Allow multiple folders to be treated as one when resolving modules. */