From 590385472dba8306455d15d6d03a1f50606013e4 Mon Sep 17 00:00:00 2001 From: Carter Cundiff Date: Thu, 9 May 2024 15:35:56 -0400 Subject: [PATCH] #28 Change credentials for LocalStack between environments without replicating all of my environment properties --- DRAFT_RELEASE_NOTES.md | 4 +- build-parent/pom.xml | 2 +- .../src/main/resources/docker/Dockerfile | 3 + .../aissemble-localstack-chart/README.md | 9 +- .../templates/credential-secret.yaml | 9 + .../tests/credential_secret_test.yaml | 34 +++ .../aissemble-localstack-chart/values.yaml | 7 +- .../argocd/v2/sealed.secret.yaml.vm | 2 +- .../deployment/mlflow/v2/mlflow.chart.yaml.vm | 2 +- .../mlflow/v2/mlflow.values-dev.yaml.vm | 7 +- .../spark-application-base-values.yaml.vm | 29 +-- .../v1_7_0/MlflowV2ExternalS3Migration.java | 29 +-- ...SparkApplicationBaseValuesS3Migration.java | 225 ++++++++++++++++++ .../aissemble/upgrade/pojo/MlflowValues.java | 25 +- .../upgrade/pojo/SparkApplication.java | 125 ++++++++++ .../aissemble/upgrade/util/FileUtils.java | 14 ++ .../src/main/resources/migrations.json | 148 +++++++----- ...ApplicationBaseValuesS3MigrationSteps.java | 97 ++++++++ ...plication-base-values-s3-migration.feature | 47 ++++ .../aissemble-mlflow/values-dev.yaml | 7 +- .../validation/aissemble-mlflow/values.yaml | 2 +- .../validation/externalS3/values-dev.yaml | 5 +- .../validation/externalS3/values.yaml | 2 +- .../validation/mlflow/values-dev.yaml | 5 +- .../validation/mlflow/values.yaml | 2 +- .../driver-env/test-pipeline-base-values.yaml | 78 ++++++ .../test-pipeline-base-values.yaml | 87 +++++++ .../env/test-pipeline-base-values.yaml | 85 +++++++ .../envFrom/test-pipeline-base-values.yaml | 93 ++++++++ .../test-pipeline-base-values.yaml | 79 ++++++ .../test-pipeline-base-values.yaml | 87 +++++++ .../test-pipeline-base-values.yaml | 25 ++ .../test-pipeline-base-values.yaml | 73 ++++++ .../test-pipeline-base-values.yaml | 97 ++++++++ .../driver-env/test-pipeline-base-values.yaml | 76 ++++++ .../test-pipeline-base-values.yaml | 84 +++++++ .../env/test-pipeline-base-values.yaml | 80 +++++++ .../envFrom/test-pipeline-base-values.yaml | 86 +++++++ .../test-pipeline-base-values.yaml | 77 ++++++ .../test-pipeline-base-values.yaml | 84 +++++++ 40 files changed, 1896 insertions(+), 136 deletions(-) create mode 100644 extensions/extensions-helm/aissemble-localstack-chart/templates/credential-secret.yaml create mode 100644 extensions/extensions-helm/aissemble-localstack-chart/tests/credential_secret_test.yaml create mode 100644 foundation/foundation-upgrade/src/main/java/com/boozallen/aissemble/upgrade/migration/v1_7_0/SparkApplicationBaseValuesS3Migration.java create mode 100644 foundation/foundation-upgrade/src/main/java/com/boozallen/aissemble/upgrade/pojo/SparkApplication.java create mode 100644 foundation/foundation-upgrade/src/test/java/com/boozallen/aissemble/upgrade/migration/v1_7_0/SparkApplicationBaseValuesS3MigrationSteps.java create mode 100644 foundation/foundation-upgrade/src/test/resources/specifications/v1_7_0/spark-application-base-values-s3-migration.feature create mode 100644 foundation/foundation-upgrade/src/test/resources/test-files/v1_7_0/SparkApplicationBaseValuesS3Migration/migration/driver-env/test-pipeline-base-values.yaml create mode 100644 foundation/foundation-upgrade/src/test/resources/test-files/v1_7_0/SparkApplicationBaseValuesS3Migration/migration/driver-envFrom/test-pipeline-base-values.yaml create mode 100644 foundation/foundation-upgrade/src/test/resources/test-files/v1_7_0/SparkApplicationBaseValuesS3Migration/migration/env/test-pipeline-base-values.yaml create mode 100644 foundation/foundation-upgrade/src/test/resources/test-files/v1_7_0/SparkApplicationBaseValuesS3Migration/migration/envFrom/test-pipeline-base-values.yaml create mode 100644 foundation/foundation-upgrade/src/test/resources/test-files/v1_7_0/SparkApplicationBaseValuesS3Migration/migration/executor-env/test-pipeline-base-values.yaml create mode 100644 foundation/foundation-upgrade/src/test/resources/test-files/v1_7_0/SparkApplicationBaseValuesS3Migration/migration/executor-envFrom/test-pipeline-base-values.yaml create mode 100644 foundation/foundation-upgrade/src/test/resources/test-files/v1_7_0/SparkApplicationBaseValuesS3Migration/skip-migration/base-values-no-env/test-pipeline-base-values.yaml create mode 100644 foundation/foundation-upgrade/src/test/resources/test-files/v1_7_0/SparkApplicationBaseValuesS3Migration/skip-migration/base-values-no-s3-cred/test-pipeline-base-values.yaml create mode 100644 foundation/foundation-upgrade/src/test/resources/test-files/v1_7_0/SparkApplicationBaseValuesS3Migration/skip-migration/base-values-secret-s3-cred/test-pipeline-base-values.yaml create mode 100644 foundation/foundation-upgrade/src/test/resources/test-files/v1_7_0/SparkApplicationBaseValuesS3Migration/validation/driver-env/test-pipeline-base-values.yaml create mode 100644 foundation/foundation-upgrade/src/test/resources/test-files/v1_7_0/SparkApplicationBaseValuesS3Migration/validation/driver-envFrom/test-pipeline-base-values.yaml create mode 100644 foundation/foundation-upgrade/src/test/resources/test-files/v1_7_0/SparkApplicationBaseValuesS3Migration/validation/env/test-pipeline-base-values.yaml create mode 100644 foundation/foundation-upgrade/src/test/resources/test-files/v1_7_0/SparkApplicationBaseValuesS3Migration/validation/envFrom/test-pipeline-base-values.yaml create mode 100644 foundation/foundation-upgrade/src/test/resources/test-files/v1_7_0/SparkApplicationBaseValuesS3Migration/validation/executor-env/test-pipeline-base-values.yaml create mode 100644 foundation/foundation-upgrade/src/test/resources/test-files/v1_7_0/SparkApplicationBaseValuesS3Migration/validation/executor-envFrom/test-pipeline-base-values.yaml diff --git a/DRAFT_RELEASE_NOTES.md b/DRAFT_RELEASE_NOTES.md index 1d516aa97..bbfe8a2b5 100644 --- a/DRAFT_RELEASE_NOTES.md +++ b/DRAFT_RELEASE_NOTES.md @@ -103,8 +103,10 @@ To reduce burden of upgrading aiSSEMBLE, the Baton project is used to automate t | upgrade-v2-chart-files-aissemble-version-migration | Updates the helm chart dependencies within your project's deployment resources (-deploy/src/main/resources/apps/) to use the latest version of the aiSSEMBLE | | upgrade-v1-chart-files-aissemble-version-migration | Updates the docker image tags within your project's deployment resources (-deploy/src/main/resources/apps/) to use the latest version of the aiSSEMBLE | | upgrade-mlflow-v2-external-s3-migration | Update the mlflow V2 deployment (if present) in your project to utilize Localstack for local development and SealedSecrets for remote deployments | +| upgrade-spark-application-s3-migration | Update the pipeline SparkApplication(s) (if present) in your project to utilize Localstack for local development and SealedSecrets for remote deployments | | upgrade-foundation-extension-python-package-migration | Updates the pyproject.toml files within your projects pipelines folder (-pipelines) to use the updated aiSSEMBLE foundation and extension Python packages with the latest naming convention | -|upgrade-helm-chart-files-names-migration | Updates the Chart.yaml and values*.yaml files within your project's deploy folder (-deploy) to use the new Helm chart naming convention (aissemble--chart) | +|upgrade-helm-chart-files-names-migration | Updates the Chart.yaml and values*.yaml files within your project's deploy folder (-deploy) to use the new Helm chart naming convention (aissemble-\-chart) | +| upgrade-dockerfile-pip-install-migration | Updates dockerfiles such that python dependency installations fail during the build, rather than at runtime | To deactivate any of these migrations, add the following configuration to the `baton-maven-plugin` within your root `pom.xml`: diff --git a/build-parent/pom.xml b/build-parent/pom.xml index 8ea98c29c..34e268307 100644 --- a/build-parent/pom.xml +++ b/build-parent/pom.xml @@ -69,7 +69,7 @@ 3.3.4 4.1.5_for_spark_3 1.12.262 - 0.2.0 + 1.0.0 diff --git a/extensions/extensions-docker/aissemble-spark/src/main/resources/docker/Dockerfile b/extensions/extensions-docker/aissemble-spark/src/main/resources/docker/Dockerfile index 8bfae16f9..00faf9812 100644 --- a/extensions/extensions-docker/aissemble-spark/src/main/resources/docker/Dockerfile +++ b/extensions/extensions-docker/aissemble-spark/src/main/resources/docker/Dockerfile @@ -37,6 +37,9 @@ RUN apt-get update -y && apt-get install --assume-yes \ && apt-get clean \ && ln -s /usr/bin/python${PYTHON_VERSION} /usr/bin/python +# Pyspark uses `python3` to execute pyspark pipelines. This links out latest python install to that command. +RUN ln -sf /usr/bin/python3.11 /usr/bin/python3 + ## Add spark configurations COPY ./src/main/resources/conf/ ${SPARK_HOME}/conf/ diff --git a/extensions/extensions-helm/aissemble-localstack-chart/README.md b/extensions/extensions-helm/aissemble-localstack-chart/README.md index 870d8ab87..86788962c 100644 --- a/extensions/extensions-helm/aissemble-localstack-chart/README.md +++ b/extensions/extensions-helm/aissemble-localstack-chart/README.md @@ -25,6 +25,7 @@ https://github.com/localstack/helm-charts/tree/main/charts/localstack |----------------------|-----------------------------------------------------------------------------------------------------|-------------------|--------------------------------------------------------------------------| | fullnameOverride | String to fully override common.names.fullname | No | s3-local | | startServices | Comma-separated list of AWS CLI service names which should be loaded right when starting LocalStack | No | s3 | +| service.type | Kubernetes Service type | No | LoadBalancer | | enableStartupScripts | Enables execution of startup behaviors | No | true | | startupScriptContent | Base script for triggering creation of localstack resources | No | Triggers creation of s3 buckets/keys | | volumes | Creates required volumes | No | configMap `localstack-resources` -> `create-s3-resources.sh` | @@ -34,9 +35,11 @@ https://github.com/localstack/helm-charts/tree/main/charts/localstack The following properties are provided by the `aissemble-localstack-chart` chart -| Property | Description | Required Override | Default | -|----------|------------------------------------------------|-------------------|---------| -| buckets | Collection of buckets and keys to create in s3 | No | [] | +| Property | Description | Required Override | Default | +|--------------------------|-----------------------------------------------------|-------------------|--------------------| +| buckets | Collection of buckets and keys to create in s3 | No | [] | +| credentialSecret.enabled | Whether to use a secret to store the S3 credentials | No | true | +| credentialSecret.name | Name of the credential secret | No | remote-auth-config | # Migration From v1 Structure diff --git a/extensions/extensions-helm/aissemble-localstack-chart/templates/credential-secret.yaml b/extensions/extensions-helm/aissemble-localstack-chart/templates/credential-secret.yaml new file mode 100644 index 000000000..2ebc69d21 --- /dev/null +++ b/extensions/extensions-helm/aissemble-localstack-chart/templates/credential-secret.yaml @@ -0,0 +1,9 @@ +{{- if .Values.credentialSecret.enabled }} +apiVersion: v1 +kind: Secret +metadata: + name: {{ .Values.credentialSecret.name }} +stringData: + AWS_ACCESS_KEY_ID: "123" + AWS_SECRET_ACCESS_KEY: "456" +{{- end }} \ No newline at end of file diff --git a/extensions/extensions-helm/aissemble-localstack-chart/tests/credential_secret_test.yaml b/extensions/extensions-helm/aissemble-localstack-chart/tests/credential_secret_test.yaml new file mode 100644 index 000000000..d7b6a34df --- /dev/null +++ b/extensions/extensions-helm/aissemble-localstack-chart/tests/credential_secret_test.yaml @@ -0,0 +1,34 @@ +suite: localstack +templates: + - credential-secret.yaml +tests: + - it: Should contain correct default values when enabled + asserts: + - isKind: + of: Secret + - equal: + path: metadata.name + value: remote-auth-config + - equal: + path: stringData.AWS_ACCESS_KEY_ID + value: "123" + - equal: + path: stringData.AWS_SECRET_ACCESS_KEY + value: "456" + - it: Should be able to set the secret name + set: + credentialSecret: + name: test-name + asserts: + - isKind: + of: Secret + - equal: + path: metadata.name + value: test-name + - it: Should not exist when disabled + set: + credentialSecret: + enabled: false + asserts: + - hasDocuments: + count: 0 \ No newline at end of file diff --git a/extensions/extensions-helm/aissemble-localstack-chart/values.yaml b/extensions/extensions-helm/aissemble-localstack-chart/values.yaml index 5b3f56eb2..9b55e625c 100644 --- a/extensions/extensions-helm/aissemble-localstack-chart/values.yaml +++ b/extensions/extensions-helm/aissemble-localstack-chart/values.yaml @@ -1,6 +1,8 @@ localstack: fullnameOverride: s3-local startServices: s3 + service: + type: LoadBalancer enableStartupScripts: true startupScriptContent: | #!/bin/sh @@ -17,4 +19,7 @@ localstack: - key: create-s3-resources.sh path: create-s3-resources.sh -buckets: [] \ No newline at end of file +buckets: [] +credentialSecret: + enabled: true + name: remote-auth-config diff --git a/foundation/foundation-mda/src/main/resources/templates/deployment/argocd/v2/sealed.secret.yaml.vm b/foundation/foundation-mda/src/main/resources/templates/deployment/argocd/v2/sealed.secret.yaml.vm index 3200351c2..4df88e41d 100644 --- a/foundation/foundation-mda/src/main/resources/templates/deployment/argocd/v2/sealed.secret.yaml.vm +++ b/foundation/foundation-mda/src/main/resources/templates/deployment/argocd/v2/sealed.secret.yaml.vm @@ -9,4 +9,4 @@ metadata: name: remote-auth-config stringData: YOUR_SECRET_KEY: YOUR_SECRET_VALUE - YOUR_SECRET_KEY: YOUR_SECRET_VALUE \ No newline at end of file + YOUR_SECRET_KEY: YOUR_SECRET_VALUE diff --git a/foundation/foundation-mda/src/main/resources/templates/deployment/mlflow/v2/mlflow.chart.yaml.vm b/foundation/foundation-mda/src/main/resources/templates/deployment/mlflow/v2/mlflow.chart.yaml.vm index 9b410eb56..c6612c601 100644 --- a/foundation/foundation-mda/src/main/resources/templates/deployment/mlflow/v2/mlflow.chart.yaml.vm +++ b/foundation/foundation-mda/src/main/resources/templates/deployment/mlflow/v2/mlflow.chart.yaml.vm @@ -31,4 +31,4 @@ appVersion: "1.0.0" dependencies: - name: aissemble-mlflow-chart version: ${versionTag} - repository: oci://ghcr.io/boozallen \ No newline at end of file + repository: oci://ghcr.io/boozallen diff --git a/foundation/foundation-mda/src/main/resources/templates/deployment/mlflow/v2/mlflow.values-dev.yaml.vm b/foundation/foundation-mda/src/main/resources/templates/deployment/mlflow/v2/mlflow.values-dev.yaml.vm index b9b5cc81d..ac9482be2 100644 --- a/foundation/foundation-mda/src/main/resources/templates/deployment/mlflow/v2/mlflow.values-dev.yaml.vm +++ b/foundation/foundation-mda/src/main/resources/templates/deployment/mlflow/v2/mlflow.values-dev.yaml.vm @@ -9,9 +9,8 @@ aissemble-mlflow-chart: service: type: LoadBalancer externalS3: - existingSecret: "" host: "s3-local" port: 4566 - accessKeyID: "123" - accessKeySecret: "456" - protocol: http \ No newline at end of file + protocol: http + existingSecretAccessKeyIDKey: "AWS_ACCESS_KEY_ID" + existingSecretKeySecretKey: "AWS_SECRET_ACCESS_KEY" diff --git a/foundation/foundation-mda/src/main/resources/templates/deployment/spark-operator/spark-application-base-values.yaml.vm b/foundation/foundation-mda/src/main/resources/templates/deployment/spark-operator/spark-application-base-values.yaml.vm index 80e10fa4c..4589639d1 100644 --- a/foundation/foundation-mda/src/main/resources/templates/deployment/spark-operator/spark-application-base-values.yaml.vm +++ b/foundation/foundation-mda/src/main/resources/templates/deployment/spark-operator/spark-application-base-values.yaml.vm @@ -70,6 +70,14 @@ sparkApp: cores: 1 coreLimit: "1200m" memory: "2048m" + #if (${useS3Local}) + # Setup these secret key references within your SealedSecret +## # See our guide for using SealedSecret's in your project to learn more +## # TODO: LINK-TO-GUIDE-HERE + envFrom: + - secretRef: + name: remote-auth-config + #end env: - name: KRAUSENING_BASE value: /opt/spark/krausening/base @@ -77,20 +85,17 @@ sparkApp: - name: ENABLE_LINEAGE value: "true" #end - #if (${useS3Local}) - - name: AWS_ACCESS_KEY_ID - value: "123" - - name: AWS_SECRET_ACCESS_KEY - value: "456" - - name: STORAGE_ENDPOINT - value: "http://s3-local:4566" - #end #if (${isJavaPipeline}) javaOptions: "-DKRAUSENING_BASE=/opt/spark/krausening/base" #end executor: cores: 1 memory: "4096m" + #if (${useS3Local}) + envFrom: + - secretRef: + name: remote-auth-config + #end env: - name: KRAUSENING_BASE value: /opt/spark/krausening/base @@ -98,14 +103,6 @@ sparkApp: - name: ENABLE_LINEAGE value: "true" #end - #if (${useS3Local}) - - name: AWS_ACCESS_KEY_ID - value: "123" - - name: AWS_SECRET_ACCESS_KEY - value: "456" - - name: STORAGE_ENDPOINT - value: "http://s3-local:4566" - #end #if (${isJavaPipeline}) javaOptions: "-DKRAUSENING_BASE=/opt/spark/krausening/base" #end \ No newline at end of file diff --git a/foundation/foundation-upgrade/src/main/java/com/boozallen/aissemble/upgrade/migration/v1_7_0/MlflowV2ExternalS3Migration.java b/foundation/foundation-upgrade/src/main/java/com/boozallen/aissemble/upgrade/migration/v1_7_0/MlflowV2ExternalS3Migration.java index ec1e9bd76..f220e1a51 100644 --- a/foundation/foundation-upgrade/src/main/java/com/boozallen/aissemble/upgrade/migration/v1_7_0/MlflowV2ExternalS3Migration.java +++ b/foundation/foundation-upgrade/src/main/java/com/boozallen/aissemble/upgrade/migration/v1_7_0/MlflowV2ExternalS3Migration.java @@ -12,6 +12,7 @@ import com.boozallen.aissemble.upgrade.migration.AbstractAissembleMigration; import com.boozallen.aissemble.upgrade.pojo.Chart; import com.boozallen.aissemble.upgrade.pojo.MlflowValues; +import com.boozallen.aissemble.upgrade.pojo.MlflowValues.Mlflow; import com.boozallen.aissemble.upgrade.pojo.Chart.Dependency; import com.boozallen.aissemble.upgrade.util.FileUtils; import com.boozallen.aissemble.upgrade.util.YamlUtils; @@ -122,12 +123,11 @@ protected boolean performMigration(File chartFile) { LinkedList linesToAddValuesDev = new LinkedList(Arrays.asList( "externalS3:", - "existingSecret: \"\"", "host: \"s3-local\"", "port: 4566", - "accessKeyID: \"123\"", - "accessKeySecret: \"456\"", - "protocol: http" + "protocol: http", + "existingSecretAccessKeyIDKey: \"AWS_ACCESS_KEY_ID\"", + "existingSecretKeySecretKey: \"AWS_SECRET_ACCESS_KEY\"" )); return migrateValuesFile(this.valuesFile, this.valuesObject, linesToAddValues) @@ -146,8 +146,8 @@ private boolean migrateValuesFile(File file, MlflowValues helmValuesObject, Link logger.info("Migrating file: {}", file.getAbsolutePath()); List newFileContents = new ArrayList<>(); List originalFile; - try { - originalFile = FileUtils.readAllFileLines(file); + try { + originalFile = FileUtils.readAllFileLines(file); logger.debug("Read in {} lines", originalFile.size()); int tabSize; @@ -159,7 +159,8 @@ private boolean migrateValuesFile(File file, MlflowValues helmValuesObject, Link newFileContents.add("aissemble-mlflow:"); newFileContents.add(SPACE.repeat(tabSize) + "mlflow:"); - indentValues(linesToAdd, tabSize); + FileUtils.indentValues(linesToAdd.subList(0, 1), tabSize * 2); + FileUtils.indentValues(linesToAdd.subList(1, linesToAdd.size()), tabSize * 3); newFileContents.addAll(linesToAdd); } else { @@ -184,7 +185,8 @@ private boolean migrateValuesFile(File file, MlflowValues helmValuesObject, Link // (leading space on current line) - (leading space on previous line) tabSize = (line.length() - line.stripLeading().length()) - (lineBeingAppended.length() - lineBeingAppended.stripLeading().length()); - indentValues(linesToAdd, tabSize); + FileUtils.indentValues(linesToAdd.subList(0, 1), tabSize * 2); + FileUtils.indentValues(linesToAdd.subList(1, linesToAdd.size()), tabSize * 3); if (addMlFlowHeader) { linesToAdd.addFirst(SPACE.repeat(tabSize) + "mlflow:"); @@ -211,15 +213,4 @@ private boolean migrateValuesFile(File file, MlflowValues helmValuesObject, Link } return false; } - - private void indentValues(LinkedList values, int tabSize) { - for (int i = 0; i < values.size(); i++) { - // indent the first header values 2 tabs and the nested elements 3 tabs - if (i < 1) { - values.set(i, SPACE.repeat(tabSize * 2) + values.get(i)); - } else { - values.set(i, SPACE.repeat(tabSize * 3) + values.get(i)); - } - } - } } diff --git a/foundation/foundation-upgrade/src/main/java/com/boozallen/aissemble/upgrade/migration/v1_7_0/SparkApplicationBaseValuesS3Migration.java b/foundation/foundation-upgrade/src/main/java/com/boozallen/aissemble/upgrade/migration/v1_7_0/SparkApplicationBaseValuesS3Migration.java new file mode 100644 index 000000000..71400fc1c --- /dev/null +++ b/foundation/foundation-upgrade/src/main/java/com/boozallen/aissemble/upgrade/migration/v1_7_0/SparkApplicationBaseValuesS3Migration.java @@ -0,0 +1,225 @@ +package com.boozallen.aissemble.upgrade.migration.v1_7_0; + +/*- + * #%L + * aiSSEMBLE::Foundation::Upgrade + * %% + * Copyright (C) 2021 Booz Allen + * %% + * This software package is licensed under the Booz Allen Public License. All Rights Reserved. + * #L% + */ + +import java.io.File; +import java.io.IOException; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; + +import org.apache.commons.lang3.StringUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import com.boozallen.aissemble.upgrade.migration.AbstractAissembleMigration; +import com.boozallen.aissemble.upgrade.pojo.SparkApplication; +import com.boozallen.aissemble.upgrade.pojo.SparkApplication.Env; +import com.boozallen.aissemble.upgrade.util.FileUtils; +import com.boozallen.aissemble.upgrade.util.YamlUtils; + +/** + * Migration to update S3 configuration within the base values of a SparkApplication to use a secret. + */ +public class SparkApplicationBaseValuesS3Migration extends AbstractAissembleMigration { + + protected static final String SPACE = " "; + protected static final Logger logger = LoggerFactory.getLogger(SparkApplicationBaseValuesS3Migration.class); + + /** + * Function to determine whether a SparkApplication base values contains Localstack configuration that we would like to migrate. + */ + @Override + protected boolean shouldExecuteOnFile(File sparkApplicationBaseValuesFile) { + SparkApplication baseValuesObject = YamlUtils.createYamlObjectFromFile(sparkApplicationBaseValuesFile, SparkApplication.class, logger); + + // check that the base values file contains localstack configuration + if (hasLocalstackConfig(baseValuesObject.getDriverEnvs()) || + hasLocalstackConfig(baseValuesObject.getExecutorEnvs())) { + logger.info("Found base values with localstack configuration, proceeding with migration."); + return true; + } else { + logger.info("Skipping migration, did not find localstack configuration in the driver or executor environment variables"); + } + return false; + } + + /** + * Determines whether any of the environment variables in the list contain Localstack configuration. + * @param envs List of environment variables + * @return {@link Boolean} + */ + protected boolean hasLocalstackConfig(List envs) { + Boolean hasLocalstackAccessKey, hasLocalstackSecretKey; + hasLocalstackAccessKey = hasLocalstackSecretKey = false; + + // check there are envs and iterate through them + if (envs != null && envs.size() > 0) { + for (Env env: envs) { + // check for the default localstack config values + if (StringUtils.equals(env.getName(), "AWS_ACCESS_KEY_ID") && StringUtils.equals(env.getValue(), "123")) { + hasLocalstackAccessKey = true; + } else if (StringUtils.equals(env.getName(), "AWS_SECRET_ACCESS_KEY") && StringUtils.equals(env.getValue(), "456")) { + hasLocalstackSecretKey = true; + } + } + } + + return hasLocalstackAccessKey && hasLocalstackSecretKey; + } + + + /** + * Function to migrate the driver and executor environment variables from the hardcoded LocalStack config to use secrets. + * @return success of the migration + */ + @Override + protected boolean performMigration(File sparkApplicationBaseValuesFile) { + logger.info("Migrating file: {}", sparkApplicationBaseValuesFile.getAbsolutePath()); + SparkApplication baseValuesObject = YamlUtils.createYamlObjectFromFile(sparkApplicationBaseValuesFile, SparkApplication.class, logger); + + // Old yaml elements to remove + List envsToRemove = new ArrayList<>(Arrays.asList( + "AWS_ACCESS_KEY_ID", + "AWS_SECRET_ACCESS_KEY", + "STORAGE_ENDPOINT" + )); + Boolean removeCurrentLine = false; + + // New yaml elements to add to add + List instructionComment = new ArrayList<>(Arrays.asList( + "# Setup these secret key references within your SealedSecret" + )); + List envFromContents = new ArrayList<>(Arrays.asList( + "- secretRef:", + SPACE.repeat(4) + "name: remote-auth-config" + )); + + // Determine which lines we want to append after + List linesToAppend = new ArrayList(); + String lineBeingAppended = null; + Boolean hasInstructionComment, appendDriverEnvFrom, appendExecutorEnvFrom; + hasInstructionComment = appendDriverEnvFrom = appendExecutorEnvFrom = false; + + if (hasLocalstackConfig(baseValuesObject.getDriverEnvs())) { + if (baseValuesObject.hasDriverEnvFrom()) { + linesToAppend.add("envFrom:"); + appendDriverEnvFrom = true; + } else { + linesToAppend.add("driver:"); + } + } + + if (hasLocalstackConfig(baseValuesObject.getExecutorEnvs())) { + if (baseValuesObject.hasExecutorEnvFrom()) { + linesToAppend.add("envFrom:"); + appendExecutorEnvFrom= true; + } else { + linesToAppend.add("executor:"); + } + } + + + Boolean isDriver, isExecutor; + isDriver = isExecutor = false; + + List newFileContents = new ArrayList<>(); + List originalFile; + + try { + originalFile = FileUtils.readAllFileLines(sparkApplicationBaseValuesFile); + + // Iterate through the file + for (String line: originalFile) { + + // Check the current line to see if it's one of interest + if (!removeCurrentLine && lineBeingAppended == null && !line.isBlank()) { + + // Check if we are in the driver or executor sections + if (line.stripLeading().startsWith("driver:")) { + isDriver = true; + isExecutor = false; + } else if (line.stripLeading().startsWith("executor:")) { + isExecutor = true; + isDriver = false; + } + + // Check for beginning of an env key/value pair to remove + if (envsToRemove.stream().anyMatch(str -> line.stripLeading().startsWith("-") && line.contains(str))) { + logger.debug("Found environment variable to remove: {}", line.strip()); + removeCurrentLine = true; + } + // Check if we're on a line we want to append + else if (linesToAppend.stream().anyMatch(str -> line.stripLeading().startsWith(str))) { + + // Check if we're at the correct envFrom header we want to append - could be driver or executor + // If it's not envFrom then good to append regardless + if ((line.stripLeading().startsWith("envFrom:") && ((isDriver && appendDriverEnvFrom) || (isExecutor && appendExecutorEnvFrom))) + || !line.stripLeading().startsWith("envFrom:")) { + lineBeingAppended = line; + logger.debug("Found line to append: '{}'", line.trim()); + } + } + } + // check that line to append has been found and the current line isn't blank + else if (lineBeingAppended != null && !line.isBlank()) { + int leadingWhitespace = line.length() - line.stripLeading().length(); + List envFromContentsCopy = new ArrayList<>(envFromContents); // create a copy as indentation could vary by location + + // add the new instructions for using the secret + if (!hasInstructionComment) { + logger.debug("Adding instruction comment"); + FileUtils.indentValues(instructionComment, leadingWhitespace); + newFileContents.addAll(instructionComment); + hasInstructionComment = true; + } + + // Add the envFrom header if needed + if (!lineBeingAppended.stripLeading().startsWith("envFrom:")) { + logger.debug("Adding envFrom yaml header"); + newFileContents.add(SPACE.repeat(leadingWhitespace) + "envFrom:"); + + // add default tab size of 2 spaces within the envFrom section + FileUtils.indentValues(envFromContentsCopy, leadingWhitespace + 2); + } else { + // assume the white space of other entries already within the envFrom section + FileUtils.indentValues(envFromContentsCopy, leadingWhitespace); + } + + logger.debug("Adding envFrom secretRef"); + newFileContents.addAll(envFromContentsCopy); + + lineBeingAppended = null; + } + + // Check for end of env key/value pair to remove + if(removeCurrentLine) { + if (!line.stripLeading().startsWith("value:")) { + continue; // remove any comments/blank lines between the env '- name:' and 'value:' lines + } else { + removeCurrentLine = false; + logger.debug("Finished removing the environment variable"); + } + } else { + newFileContents.add(line); + } + + } + + FileUtils.writeFile(sparkApplicationBaseValuesFile, newFileContents); + logger.debug("Wrote {} lines to file: {}", newFileContents.size(), sparkApplicationBaseValuesFile.getAbsolutePath()); + return true; + } catch (IOException e) { + logger.error("Unable to overwrite file at " + sparkApplicationBaseValuesFile.getAbsolutePath(), e); + } + return false; + } +} \ No newline at end of file diff --git a/foundation/foundation-upgrade/src/main/java/com/boozallen/aissemble/upgrade/pojo/MlflowValues.java b/foundation/foundation-upgrade/src/main/java/com/boozallen/aissemble/upgrade/pojo/MlflowValues.java index 460788c82..e609d75cc 100644 --- a/foundation/foundation-upgrade/src/main/java/com/boozallen/aissemble/upgrade/pojo/MlflowValues.java +++ b/foundation/foundation-upgrade/src/main/java/com/boozallen/aissemble/upgrade/pojo/MlflowValues.java @@ -33,22 +33,19 @@ public static class AissembleMlflow { public Boolean hasExternalS3() { return this.mlflow != null ? this.mlflow.hasExternalS3() : false; } + } - @Data - public static class Mlflow { - private ExternalS3 externalS3; - - public Boolean hasExternalS3() { - return this.externalS3 != null ? this.externalS3.hasExternalS3() : false; - } + @Data + public static class Mlflow { + private ExternalS3 externalS3; - @Data - public static class ExternalS3 { - public Boolean hasExternalS3() { - return true; - } - } + public Boolean hasExternalS3() { + return this.externalS3 != null ? true : false; } } -} + + @Data + public static class ExternalS3 { + } +} \ No newline at end of file diff --git a/foundation/foundation-upgrade/src/main/java/com/boozallen/aissemble/upgrade/pojo/SparkApplication.java b/foundation/foundation-upgrade/src/main/java/com/boozallen/aissemble/upgrade/pojo/SparkApplication.java new file mode 100644 index 000000000..f09e3b449 --- /dev/null +++ b/foundation/foundation-upgrade/src/main/java/com/boozallen/aissemble/upgrade/pojo/SparkApplication.java @@ -0,0 +1,125 @@ +package com.boozallen.aissemble.upgrade.pojo; + +import java.util.List; +import java.util.Map; + +/*- + * #%L + * aiSSEMBLE::Foundation::Upgrade + * %% + * Copyright (C) 2021 Booz Allen + * %% + * This software package is licensed under the Booz Allen Public License. All Rights Reserved. + * #L% + */ +import lombok.Data; + +/** + * Java object to represent a SparkApplication yaml file. + */ +@Data +public class SparkApplication implements AbstractYamlObject { + private SparkApp sparkApp; + + /** + * Function to check whether a {@link SparkApplication} instance contains any driver environment variables. + * @return {@link List}<{@link Env}> for the driver + */ + public List getDriverEnvs() { + return this.sparkApp != null ? this.sparkApp.getDriverEnvs(): null; + } + + /** + * Function to check whether a {@link SparkApplication} instance contains any executor environment variables. + * @return {@link List}<{@link Env}> for the executor + */ + public List getExecutorEnvs() { + return this.sparkApp != null ? this.sparkApp.getExecutorEnvs(): null; + } + + /** + * Function to check whether a {@link SparkApplication} instance contains any driver envFrom variables. + * @return {@link Boolean} result + */ + public Boolean hasDriverEnvFrom() { + return this.sparkApp != null ? this.sparkApp.hasDriverEnvFrom(): false; + } + + /** + * Function to check whether a {@link SparkApplication} instance contains any executor envFrom variables. + * @return {@link Boolean} result + */ + public Boolean hasExecutorEnvFrom() { + return this.sparkApp != null ? this.sparkApp.hasExecutorEnvFrom(): false; + } + + @Data + public static class SparkApp { + private Spec spec; + + public List getDriverEnvs() { + return this.spec != null ? this.spec.getDriverEnvs(): null; + } + + public List getExecutorEnvs() { + return this.spec != null ? this.spec.getExecutorEnvs(): null; + } + + public Boolean hasDriverEnvFrom() { + return this.spec != null ? this.spec.hasDriverEnvFrom(): false; + } + + public Boolean hasExecutorEnvFrom() { + return this.spec != null ? this.spec.hasExecutorEnvFrom(): false; + } + + } + + @Data + public static class Spec { + private Driver driver; + private Executor executor; + + public List getDriverEnvs() { + return this.driver != null ? this.driver.getEnv(): null; + } + + public List getExecutorEnvs() { + return this.executor != null ? this.executor.getEnv(): null; + } + + public Boolean hasDriverEnvFrom() { + return this.driver != null ? this.driver.hasDriverEnvFrom(): false; + } + + public Boolean hasExecutorEnvFrom() { + return this.executor != null ? this.executor.hasExecutorEnvFrom(): false; + } + } + + @Data + public static class Driver { + private List env; + private List envFrom; + + public Boolean hasDriverEnvFrom() { + return this.envFrom != null ? true : false; + } + } + + @Data + public static class Executor { + private List env; + private List envFrom; + + public Boolean hasExecutorEnvFrom() { + return this.envFrom != null ? true : false; + } + } + + @Data + public static class Env { + private String name; + private String value; + } +} \ No newline at end of file diff --git a/foundation/foundation-upgrade/src/main/java/com/boozallen/aissemble/upgrade/util/FileUtils.java b/foundation/foundation-upgrade/src/main/java/com/boozallen/aissemble/upgrade/util/FileUtils.java index 0a00cb966..1b245e209 100644 --- a/foundation/foundation-upgrade/src/main/java/com/boozallen/aissemble/upgrade/util/FileUtils.java +++ b/foundation/foundation-upgrade/src/main/java/com/boozallen/aissemble/upgrade/util/FileUtils.java @@ -175,4 +175,18 @@ public static String getIndent(String line, int level) { } return line.substring(0, i/level); } + + /** + * Indent the values the desired number of tabs with a variable tab size. + * @param values List of {@link String} values to indent + * @param numSpaces number of spaces to indent + */ + public static void indentValues(List values, int numSpaces) { + String SPACE = " "; + for (int i = 0; i < values.size(); i++) { + if (!values.get(i).isBlank()) { + values.set(i, SPACE.repeat(numSpaces) + values.get(i)); + } + } + } } diff --git a/foundation/foundation-upgrade/src/main/resources/migrations.json b/foundation/foundation-upgrade/src/main/resources/migrations.json index 783f9681e..bac17c3c9 100644 --- a/foundation/foundation-upgrade/src/main/resources/migrations.json +++ b/foundation/foundation-upgrade/src/main/resources/migrations.json @@ -1,80 +1,104 @@ [ { - "name": "upgrade-tiltfile-aissemble-version-migration", - "implementation": "com.boozallen.aissemble.upgrade.migration.TiltfileMigration", - "fileSets": [ + "group": "v1-7-0-migrations", + "type": "ordered", + "migrations": [ { - "includes": ["Tiltfile"] - } - ] - }, - { - "name": "upgrade-v2-chart-files-aissemble-version-migration", - "implementation": "com.boozallen.aissemble.upgrade.migration.HelmChartsV2Migration", - "fileSets": [ + "name": "upgrade-foundation-extension-python-package-migration", + "implementation": "com.boozallen.aissemble.upgrade.migration.v1_7_0.FoundationExtensionPythonPackageMigration", + "fileSets": [ + { + "includes": ["*-pipelines/**/pyproject.toml"] + } + ] + }, { - "includes": ["**/apps/**/Chart.yaml"] - } - ] - }, - { - "name": "upgrade-foundation-extension-python-package-migration", - "implementation": "com.boozallen.aissemble.upgrade.migration.v1_7_0.FoundationExtensionPythonPackageMigration", - "fileSets": [ + "name": "upgrade-mlflow-v2-external-s3-migration", + "implementation": "com.boozallen.aissemble.upgrade.migration.v1_7_0.MlflowV2ExternalS3Migration", + "fileSets": [ + { + "includes": ["*-deploy/src/main/resources/apps/*/Chart.yaml"], + "excludes": ["**/target/**/Chart.yaml"] + } + ] + }, { - "includes": ["*-pipelines/**/pyproject.toml"] - } - ] - }, - { - "name": "upgrade-v1-chart-files-aissemble-version-migration", - "implementation": "com.boozallen.aissemble.upgrade.migration.HelmChartsV1Migration", - "fileSets": [ + "name": "upgrade-spark-application-base-values-s3-migration", + "implementation": "com.boozallen.aissemble.upgrade.migration.v1_7_0.SparkApplicationBaseValuesS3Migration", + "fileSets": [ + { + "includes": [ + "*-pipelines/*/src/*/resources/apps/*-base-values.yaml", + "*-pipelines/*/src/main/resources/apps/*-base-values.yaml" + ] + } + ] + }, { - "includes": [ - "**/spark-operator/values.yaml", - "**/spark-infrastructure/values.yaml", - "**/jenkins/values.yaml", - "**/kafka-cluster/values.yaml", - "**/hive-metastore-db/values.yaml", - "**/hive-metastore-service/values.yaml", - "**/metadata/values.yaml", - "**/model-training-api/values.yaml", - "**/pipeline-invocation-service/values.yaml" + "name": "upgrade-docker-pip-install-migration", + "implementation": "com.boozallen.aissemble.upgrade.migration.v1_7_0.DockerPipInstallMigration", + "fileSets": [ + { + "includes": ["*-docker/*/src/main/resources/docker/Dockerfile"], + "excludes": ["*-docker/*/target/Dockerfile"] + } ] - } - ] - }, - { - "name": "upgrade-mlflow-v2-external-s3-migration", - "implementation": "com.boozallen.aissemble.upgrade.migration.v1_7_0.MlflowV2ExternalS3Migration", - "fileSets": [ + }, { - "includes": ["*-deploy/src/main/resources/apps/*/Chart.yaml"], - "excludes": ["**/target/**/Chart.yaml"] + "name": "upgrade-helm-chart-files-names-migration", + "implementation": "com.boozallen.aissemble.upgrade.migration.v1_7_0.HelmChartsNameMigration", + "fileSets": [ + { + "includes": [ + "*-deploy/src/main/resources/apps/*/Chart.yaml", + "*-deploy/src/main/resources/apps/*/values*.yaml" + ] + } + ] } ] }, { - "name": "upgrade-docker-pip-install-migration", - "implementation": "com.boozallen.aissemble.upgrade.migration.v1_7_0.DockerPipInstallMigration", - "fileSets": [ + "group": "general-migrations", + "type": "ordered", + "migrations": [ { - "includes": ["*-docker/*/src/main/resources/docker/Dockerfile"], - "excludes": ["*-docker/*/target/Dockerfile"] - } - ] - }, - { - "name": "upgrade-helm-chart-files-names-migration", - "implementation": "com.boozallen.aissemble.upgrade.migration.v1_7_0.HelmChartsNameMigration", - "fileSets": [ + "name": "upgrade-tiltfile-aissemble-version-migration", + "implementation": "com.boozallen.aissemble.upgrade.migration.TiltfileMigration", + "fileSets": [ + { + "includes": ["Tiltfile"] + } + ] + }, + { + "name": "upgrade-v2-chart-files-aissemble-version-migration", + "implementation": "com.boozallen.aissemble.upgrade.migration.HelmChartsV2Migration", + "fileSets": [ + { + "includes": ["**/apps/**/Chart.yaml"] + } + ] + }, { - "includes": [ - "*-deploy/src/main/resources/apps/*/Chart.yaml", - "*-deploy/src/main/resources/apps/*/values*.yaml" + "name": "upgrade-v1-chart-files-aissemble-version-migration", + "implementation": "com.boozallen.aissemble.upgrade.migration.HelmChartsV1Migration", + "fileSets": [ + { + "includes": [ + "**/spark-operator/values.yaml", + "**/spark-infrastructure/values.yaml", + "**/jenkins/values.yaml", + "**/kafka-cluster/values.yaml", + "**/hive-metastore-db/values.yaml", + "**/hive-metastore-service/values.yaml", + "**/metadata/values.yaml", + "**/model-training-api/values.yaml", + "**/pipeline-invocation-service/values.yaml" + ] + } ] } ] } -] +] \ No newline at end of file diff --git a/foundation/foundation-upgrade/src/test/java/com/boozallen/aissemble/upgrade/migration/v1_7_0/SparkApplicationBaseValuesS3MigrationSteps.java b/foundation/foundation-upgrade/src/test/java/com/boozallen/aissemble/upgrade/migration/v1_7_0/SparkApplicationBaseValuesS3MigrationSteps.java new file mode 100644 index 000000000..a039bc630 --- /dev/null +++ b/foundation/foundation-upgrade/src/test/java/com/boozallen/aissemble/upgrade/migration/v1_7_0/SparkApplicationBaseValuesS3MigrationSteps.java @@ -0,0 +1,97 @@ +package com.boozallen.aissemble.upgrade.migration.v1_7_0; +/*- + * #%L + * aiSSEMBLE::Foundation::Upgrade + * %% + * Copyright (C) 2021 Booz Allen + * %% + * This software package is licensed under the Booz Allen Public License. All Rights Reserved. + * #L% + */ + + import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertTrue; + +import java.io.File; +import java.io.IOException; + +import org.apache.commons.io.FileUtils; + +import com.boozallen.aissemble.upgrade.migration.AbstractMigrationTest; + +import io.cucumber.java.Before; +import io.cucumber.java.en.Given; +import io.cucumber.java.en.Then; +import io.cucumber.java.en.When; + +public class SparkApplicationBaseValuesS3MigrationSteps extends AbstractMigrationTest { + + String sparkConfig; + String testCase; + + @Before + public void setTestCase() { + testCase = null; + sparkConfig = null; + } + + @Given("a project that has a spark application base values") + public void a_project_that_has_a_spark_application_base_values() { + // nothing to do here, handling setting the testFile below + } + + @Given("both the base values executor and driver contain hardcoded Localstack S3 credentials") + public void both_the_base_values_executor_and_driver_contain_hardcoded_localstack_S3_credentials() { + // nothing to do here, handling setting the testFile below + } + + @Given("only the base values {string} contain hardcoded Localstack S3 credentials") + public void only_the_base_values_contain_hardcoded_localstack_S3_credentials(String sparkConfig) { + this.sparkConfig = sparkConfig + "-"; + } + + @Given("the base values contains {string} configuration") + public void the_base_values_contains_configuration(String yamlConfig) { + this.testCase = this.sparkConfig == null ? yamlConfig : this.sparkConfig + yamlConfig; + testFile = getTestFile("v1_7_0/SparkApplicationBaseValuesS3Migration/migration/" + this.testCase + "/test-pipeline-base-values.yaml"); + } + + @Given("the base values contains secret based S3 credentials") + public void the_base_values_contains_secret_based_S3_credentials() throws IOException { + testFile = getTestFile("v1_7_0/SparkApplicationBaseValuesS3Migration/skip-migration/base-values-secret-s3-cred/test-pipeline-base-values.yaml"); + } + + @Given("the base values does not contain any S3 credentials") + public void the_base_values_does_not_contain_any_S3_credentials() throws IOException { + testFile = getTestFile("v1_7_0/SparkApplicationBaseValuesS3Migration/skip-migration/base-values-no-s3-cred/test-pipeline-base-values.yaml"); + } + + @Given("the base values does not contain any environment variables") + public void the_base_values_does_not_contain_any_environment_variables() throws IOException { + testFile = getTestFile("v1_7_0/SparkApplicationBaseValuesS3Migration/skip-migration/base-values-no-env/test-pipeline-base-values.yaml"); + } + + @When("the spark application base values S3 migration executes") + public void the_spark_application_base_values_S3_migration_executes() { + performMigration(new SparkApplicationBaseValuesS3Migration()); + } + + @Then("the base values S3 credentials will be updated to use a secret reference") + public void the_base_values_S3_credentials_will_be_updated_to_use_a_secret_reference() throws IOException { + File validationFile = getTestFile("v1_7_0/SparkApplicationBaseValuesS3Migration/validation/" + this.testCase + "/test-pipeline-base-values.yaml"); + File migratedFile = getTestFile("v1_7_0/SparkApplicationBaseValuesS3Migration/migration/" + this.testCase + "/test-pipeline-base-values.yaml"); + + assertTrue("The migrated file is different from the validation file", FileUtils.contentEqualsIgnoreEOL(migratedFile, validationFile, null)); + assertTrue("Migration did not complete successfully", successful); + } + + @Then("the hardcoded Localstack S3 credentials will be removed") + public void the_hardcoded_Localstack_S3_credentials_will_be_removed() { + // nothing to do here, handled by the validation above + } + + @Then("the spark application base values S3 migration is skipped") + public void the_spark_application_base_values_S3_migration_is_skipped() { + assertFalse("The migration should be skipped", shouldExecute); + } +} \ No newline at end of file diff --git a/foundation/foundation-upgrade/src/test/resources/specifications/v1_7_0/spark-application-base-values-s3-migration.feature b/foundation/foundation-upgrade/src/test/resources/specifications/v1_7_0/spark-application-base-values-s3-migration.feature new file mode 100644 index 000000000..50ea14c8d --- /dev/null +++ b/foundation/foundation-upgrade/src/test/resources/specifications/v1_7_0/spark-application-base-values-s3-migration.feature @@ -0,0 +1,47 @@ +@spark-application-s3-migration +Feature: Migrate a spark application base values with Localstack S3 credentials to use a secret reference + Scenario: Migrate a spark application base values with Localstack S3 credentials in both driver and executor + Given a project that has a spark application base values + And both the base values executor and driver contain hardcoded Localstack S3 credentials + And the base values contains "" configuration + When the spark application base values S3 migration executes + Then the base values S3 credentials will be updated to use a secret reference + And the hardcoded Localstack S3 credentials will be removed + + Examples: + | yaml-config | + | env | + | envFrom | + + Scenario: Migrate a spark application base values with Localstack S3 credentials in driver or executor + Given a project that has a spark application base values + And only the base values "" contain hardcoded Localstack S3 credentials + And the base values contains "" configuration + When the spark application base values S3 migration executes + Then the base values S3 credentials will be updated to use a secret reference + And the hardcoded Localstack S3 credentials will be removed + + Examples: + | spark-config | yaml-config | + | driver | env | + | driver | envFrom | + | executor | env | + | executor | envFrom | + + Scenario: Skip spark application base values migration with secret based S3 credentials in the base values + Given a project that has a spark application base values + And the base values contains secret based S3 credentials + When the spark application base values S3 migration executes + Then the spark application base values S3 migration is skipped + + Scenario: Skip spark application base values migration without any S3 credentials in the base values + Given a project that has a spark application base values + And the base values does not contain any S3 credentials + When the spark application base values S3 migration executes + Then the spark application base values S3 migration is skipped + + Scenario: Skip spark application base values migration without any environment variables in the base values + Given a project that has a spark application base values + And the base values does not contain any environment variables + When the spark application base values S3 migration executes + Then the spark application base values S3 migration is skipped \ No newline at end of file diff --git a/foundation/foundation-upgrade/src/test/resources/test-files/v1_7_0/MlflowV2ExternalS3Migration/validation/aissemble-mlflow/values-dev.yaml b/foundation/foundation-upgrade/src/test/resources/test-files/v1_7_0/MlflowV2ExternalS3Migration/validation/aissemble-mlflow/values-dev.yaml index b67c1d51c..1dc55dad4 100644 --- a/foundation/foundation-upgrade/src/test/resources/test-files/v1_7_0/MlflowV2ExternalS3Migration/validation/aissemble-mlflow/values-dev.yaml +++ b/foundation/foundation-upgrade/src/test/resources/test-files/v1_7_0/MlflowV2ExternalS3Migration/validation/aissemble-mlflow/values-dev.yaml @@ -6,9 +6,8 @@ aissemble-mlflow: mlflow: externalS3: - existingSecret: "" host: "s3-local" port: 4566 - accessKeyID: "123" - accessKeySecret: "456" - protocol: http \ No newline at end of file + protocol: http + existingSecretAccessKeyIDKey: "AWS_ACCESS_KEY_ID" + existingSecretKeySecretKey: "AWS_SECRET_ACCESS_KEY" \ No newline at end of file diff --git a/foundation/foundation-upgrade/src/test/resources/test-files/v1_7_0/MlflowV2ExternalS3Migration/validation/aissemble-mlflow/values.yaml b/foundation/foundation-upgrade/src/test/resources/test-files/v1_7_0/MlflowV2ExternalS3Migration/validation/aissemble-mlflow/values.yaml index 671bd6224..b73f85708 100644 --- a/foundation/foundation-upgrade/src/test/resources/test-files/v1_7_0/MlflowV2ExternalS3Migration/validation/aissemble-mlflow/values.yaml +++ b/foundation/foundation-upgrade/src/test/resources/test-files/v1_7_0/MlflowV2ExternalS3Migration/validation/aissemble-mlflow/values.yaml @@ -9,7 +9,7 @@ aissemble-mlflow: externalS3: existingSecret: remote-auth-config bucket: mlflow-models/mlflow-storage - + # Update these keys with your external S3 details and credentials defined here: # [YOUR-PROJECT]-deploy/src/main/resources/templates/sealed-secret.yaml # existingSecretAccessKeyIDKey: diff --git a/foundation/foundation-upgrade/src/test/resources/test-files/v1_7_0/MlflowV2ExternalS3Migration/validation/externalS3/values-dev.yaml b/foundation/foundation-upgrade/src/test/resources/test-files/v1_7_0/MlflowV2ExternalS3Migration/validation/externalS3/values-dev.yaml index 409c49297..c533d1fe9 100644 --- a/foundation/foundation-upgrade/src/test/resources/test-files/v1_7_0/MlflowV2ExternalS3Migration/validation/externalS3/values-dev.yaml +++ b/foundation/foundation-upgrade/src/test/resources/test-files/v1_7_0/MlflowV2ExternalS3Migration/validation/externalS3/values-dev.yaml @@ -7,10 +7,9 @@ aissemble-mlflow: mlflow: externalS3: - existingSecret: "" host: "s3-local" port: 4566 - accessKeyID: "123" - accessKeySecret: "456" protocol: http + existingSecretAccessKeyIDKey: "AWS_ACCESS_KEY_ID" + existingSecretKeySecretKey: "AWS_SECRET_ACCESS_KEY" test: value diff --git a/foundation/foundation-upgrade/src/test/resources/test-files/v1_7_0/MlflowV2ExternalS3Migration/validation/externalS3/values.yaml b/foundation/foundation-upgrade/src/test/resources/test-files/v1_7_0/MlflowV2ExternalS3Migration/validation/externalS3/values.yaml index 519d57595..60028d301 100644 --- a/foundation/foundation-upgrade/src/test/resources/test-files/v1_7_0/MlflowV2ExternalS3Migration/validation/externalS3/values.yaml +++ b/foundation/foundation-upgrade/src/test/resources/test-files/v1_7_0/MlflowV2ExternalS3Migration/validation/externalS3/values.yaml @@ -8,7 +8,7 @@ aissemble-mlflow: externalS3: existingSecret: remote-auth-config bucket: mlflow-models/mlflow-storage - + # Update these keys with your external S3 details and credentials defined here: # [YOUR-PROJECT]-deploy/src/main/resources/templates/sealed-secret.yaml # existingSecretAccessKeyIDKey: diff --git a/foundation/foundation-upgrade/src/test/resources/test-files/v1_7_0/MlflowV2ExternalS3Migration/validation/mlflow/values-dev.yaml b/foundation/foundation-upgrade/src/test/resources/test-files/v1_7_0/MlflowV2ExternalS3Migration/validation/mlflow/values-dev.yaml index a712e907c..e445132b0 100644 --- a/foundation/foundation-upgrade/src/test/resources/test-files/v1_7_0/MlflowV2ExternalS3Migration/validation/mlflow/values-dev.yaml +++ b/foundation/foundation-upgrade/src/test/resources/test-files/v1_7_0/MlflowV2ExternalS3Migration/validation/mlflow/values-dev.yaml @@ -6,10 +6,9 @@ aissemble-mlflow: mlflow: externalS3: - existingSecret: "" host: "s3-local" port: 4566 - accessKeyID: "123" - accessKeySecret: "456" protocol: http + existingSecretAccessKeyIDKey: "AWS_ACCESS_KEY_ID" + existingSecretKeySecretKey: "AWS_SECRET_ACCESS_KEY" test: value \ No newline at end of file diff --git a/foundation/foundation-upgrade/src/test/resources/test-files/v1_7_0/MlflowV2ExternalS3Migration/validation/mlflow/values.yaml b/foundation/foundation-upgrade/src/test/resources/test-files/v1_7_0/MlflowV2ExternalS3Migration/validation/mlflow/values.yaml index 3a36eac2c..efd71ea7f 100644 --- a/foundation/foundation-upgrade/src/test/resources/test-files/v1_7_0/MlflowV2ExternalS3Migration/validation/mlflow/values.yaml +++ b/foundation/foundation-upgrade/src/test/resources/test-files/v1_7_0/MlflowV2ExternalS3Migration/validation/mlflow/values.yaml @@ -10,7 +10,7 @@ aissemble-mlflow: externalS3: existingSecret: remote-auth-config bucket: mlflow-models/mlflow-storage - + # Update these keys with your external S3 details and credentials defined here: # [YOUR-PROJECT]-deploy/src/main/resources/templates/sealed-secret.yaml # existingSecretAccessKeyIDKey: diff --git a/foundation/foundation-upgrade/src/test/resources/test-files/v1_7_0/SparkApplicationBaseValuesS3Migration/migration/driver-env/test-pipeline-base-values.yaml b/foundation/foundation-upgrade/src/test/resources/test-files/v1_7_0/SparkApplicationBaseValuesS3Migration/migration/driver-env/test-pipeline-base-values.yaml new file mode 100644 index 000000000..6e145400b --- /dev/null +++ b/foundation/foundation-upgrade/src/test/resources/test-files/v1_7_0/SparkApplicationBaseValuesS3Migration/migration/driver-env/test-pipeline-base-values.yaml @@ -0,0 +1,78 @@ +metadata: + name: test-pipeline +sparkApp: + spec: + type: Java + image: "boozallen/test-spark-worker-docker:latest" + mainClass: com.example.SparkPipelineDriver + mainApplicationFile: "local:///opt/spark/jobs/pipelines/test-pipeline.jar" + deps: + packages: + - mysql:mysql-connector-java:8.0.30 + - org.apache.hadoop:hadoop-aws:3.3.4 + - com.amazonaws:aws-java-sdk-bundle:1.12.262 + excludePackages: [] + hadoopConf: + fs.s3a.fast.upload: "true" + fs.s3a.path.style: "true" + driver: + cores: 1 + coreLimit: "1200m" + memory: "512m" + env: + - name: KRAUSENING_BASE + value: /opt/spark/krausening/base + - name: AWS_ACCESS_KEY_ID + value: "123" + - name: AWS_SECRET_ACCESS_KEY + value: "456" + - name: STORAGE_ENDPOINT + value: "s3.us-east-1.amazonaws.com" + - name: TEST_VAR_SECRET + valueFrom: + secretKeyRef: + name: test-secret + key: TEST_KEY + - name: TEST_VAR_CONFIG_MAP + valueFrom: + configMapKeyRef: + name: test-config-map + key: TEST_KEY + - name: TEST_VAR_FIELD_PATH + valueFrom: + fieldRef: + fieldPath: test.path + - name: TEST_VAR_FIELD_PATH + valueFrom: + resourceFieldRef: + resource: test.resource + fieldPath: "testDivisor" + javaOptions: "-DKRAUSENING_BASE=/opt/spark/krausening/base" + executor: + cores: 1 + memory: "512m" + env: + + - name: KRAUSENING_BASE + value: /opt/spark/krausening/base + - name: TEST_VAR_FIELD_PATH + valueFrom: + resourceFieldRef: + resource: test.resource + fieldPath: "testDivisor" + - name: TEST_VAR_FIELD_PATH + valueFrom: + fieldRef: + fieldPath: test.path + # Test comment + - name: TEST_VAR_CONFIG_MAP + valueFrom: + configMapKeyRef: + name: test-config-map + key: TEST_KEY + + - name: TEST_VAR_SECRET + valueFrom: + secretKeyRef: + name: test-secret + key: TEST_KEY \ No newline at end of file diff --git a/foundation/foundation-upgrade/src/test/resources/test-files/v1_7_0/SparkApplicationBaseValuesS3Migration/migration/driver-envFrom/test-pipeline-base-values.yaml b/foundation/foundation-upgrade/src/test/resources/test-files/v1_7_0/SparkApplicationBaseValuesS3Migration/migration/driver-envFrom/test-pipeline-base-values.yaml new file mode 100644 index 000000000..5b8966653 --- /dev/null +++ b/foundation/foundation-upgrade/src/test/resources/test-files/v1_7_0/SparkApplicationBaseValuesS3Migration/migration/driver-envFrom/test-pipeline-base-values.yaml @@ -0,0 +1,87 @@ +metadata: + name: test-pipeline +sparkApp: + spec: + type: Java + image: "boozallen/test-spark-worker-docker:latest" + mainClass: com.example.SparkPipelineDriver + mainApplicationFile: "local:///opt/spark/jobs/pipelines/test-pipeline.jar" + deps: + packages: + - mysql:mysql-connector-java:8.0.30 + - org.apache.hadoop:hadoop-aws:3.3.4 + - com.amazonaws:aws-java-sdk-bundle:1.12.262 + excludePackages: [] + hadoopConf: + fs.s3a.fast.upload: "true" + fs.s3a.path.style: "true" + driver: + cores: 1 + coreLimit: "1200m" + memory: "512m" + env: + - name: KRAUSENING_BASE + value: /opt/spark/krausening/base + - name: AWS_ACCESS_KEY_ID + value: "123" + - name: AWS_SECRET_ACCESS_KEY + value: "456" + - name: STORAGE_ENDPOINT + value: "s3.us-east-1.amazonaws.com" + - name: TEST_VAR_SECRET + valueFrom: + secretKeyRef: + name: test-secret + key: TEST_KEY + - name: TEST_VAR_CONFIG_MAP + valueFrom: + configMapKeyRef: + name: test-config-map + key: TEST_KEY + - name: TEST_VAR_FIELD_PATH + valueFrom: + fieldRef: + fieldPath: test.path + - name: TEST_VAR_FIELD_PATH + valueFrom: + resourceFieldRef: + resource: test.resource + fieldPath: "testDivisor" + envFrom: + - configMapRef: + name: env-config-map + javaOptions: "-DKRAUSENING_BASE=/opt/spark/krausening/base" + executor: + cores: 1 + memory: "512m" + env: + + - name: KRAUSENING_BASE + value: /opt/spark/krausening/base + - name: TEST_VAR_FIELD_PATH + valueFrom: + resourceFieldRef: + resource: test.resource + fieldPath: "testDivisor" + + - name: TEST_VAR_FIELD_PATH + valueFrom: + fieldRef: + fieldPath: test.path + # Test comment + - name: TEST_VAR_CONFIG_MAP + valueFrom: + configMapKeyRef: + name: test-config-map + key: TEST_KEY + + - name: TEST_VAR_SECRET + valueFrom: + secretKeyRef: + name: test-secret + key: TEST_KEY + envFrom: + + # Test comment + - secretRef: # Test comment + name: env-secret \ No newline at end of file diff --git a/foundation/foundation-upgrade/src/test/resources/test-files/v1_7_0/SparkApplicationBaseValuesS3Migration/migration/env/test-pipeline-base-values.yaml b/foundation/foundation-upgrade/src/test/resources/test-files/v1_7_0/SparkApplicationBaseValuesS3Migration/migration/env/test-pipeline-base-values.yaml new file mode 100644 index 000000000..3af017ac5 --- /dev/null +++ b/foundation/foundation-upgrade/src/test/resources/test-files/v1_7_0/SparkApplicationBaseValuesS3Migration/migration/env/test-pipeline-base-values.yaml @@ -0,0 +1,85 @@ +metadata: + name: test-pipeline +sparkApp: + spec: + type: Java + image: "boozallen/test-spark-worker-docker:latest" + mainClass: com.example.SparkPipelineDriver + mainApplicationFile: "local:///opt/spark/jobs/pipelines/test-pipeline.jar" + deps: + packages: + - mysql:mysql-connector-java:8.0.30 + - org.apache.hadoop:hadoop-aws:3.3.4 + - com.amazonaws:aws-java-sdk-bundle:1.12.262 + excludePackages: [] + hadoopConf: + fs.s3a.fast.upload: "true" + fs.s3a.path.style: "true" + driver: + cores: 1 + coreLimit: "1200m" + memory: "512m" + env: + - name: KRAUSENING_BASE + value: /opt/spark/krausening/base + - name: AWS_ACCESS_KEY_ID + value: "123" + - name: AWS_SECRET_ACCESS_KEY + value: "456" + - name: STORAGE_ENDPOINT + value: "s3.us-east-1.amazonaws.com" + - name: TEST_VAR_SECRET + valueFrom: + secretKeyRef: + name: test-secret + key: TEST_KEY + - name: TEST_VAR_CONFIG_MAP + valueFrom: + configMapKeyRef: + name: test-config-map + key: TEST_KEY + - name: TEST_VAR_FIELD_PATH + valueFrom: + fieldRef: + fieldPath: test.path + - name: TEST_VAR_FIELD_PATH + valueFrom: + resourceFieldRef: + resource: test.resource + fieldPath: "testDivisor" + javaOptions: "-DKRAUSENING_BASE=/opt/spark/krausening/base" + executor: + cores: 1 + memory: "512m" + env: + + - name: KRAUSENING_BASE + value: /opt/spark/krausening/base + - name: TEST_VAR_FIELD_PATH + valueFrom: + resourceFieldRef: + resource: test.resource + fieldPath: "testDivisor" + + - name: AWS_ACCESS_KEY_ID + # Test comment + value: "123" + - name: TEST_VAR_FIELD_PATH + valueFrom: + fieldRef: + fieldPath: test.path + # Test comment + - name: AWS_SECRET_ACCESS_KEY # Test comment + + value: "456" # Test comment + - name: TEST_VAR_CONFIG_MAP + valueFrom: + configMapKeyRef: + name: test-config-map + key: TEST_KEY + + - name: TEST_VAR_SECRET + valueFrom: + secretKeyRef: + name: test-secret + key: TEST_KEY \ No newline at end of file diff --git a/foundation/foundation-upgrade/src/test/resources/test-files/v1_7_0/SparkApplicationBaseValuesS3Migration/migration/envFrom/test-pipeline-base-values.yaml b/foundation/foundation-upgrade/src/test/resources/test-files/v1_7_0/SparkApplicationBaseValuesS3Migration/migration/envFrom/test-pipeline-base-values.yaml new file mode 100644 index 000000000..b05db7dd0 --- /dev/null +++ b/foundation/foundation-upgrade/src/test/resources/test-files/v1_7_0/SparkApplicationBaseValuesS3Migration/migration/envFrom/test-pipeline-base-values.yaml @@ -0,0 +1,93 @@ +metadata: + name: test-pipeline +sparkApp: + spec: + type: Java + image: "boozallen/test-spark-worker-docker:latest" + mainClass: com.example.SparkPipelineDriver + mainApplicationFile: "local:///opt/spark/jobs/pipelines/test-pipeline.jar" + deps: + packages: + - mysql:mysql-connector-java:8.0.30 + - org.apache.hadoop:hadoop-aws:3.3.4 + - com.amazonaws:aws-java-sdk-bundle:1.12.262 + excludePackages: [] + hadoopConf: + fs.s3a.fast.upload: "true" + fs.s3a.path.style: "true" + driver: + cores: 1 + coreLimit: "1200m" + memory: "512m" + env: + - name: KRAUSENING_BASE + value: /opt/spark/krausening/base + - name: AWS_ACCESS_KEY_ID + value: "123" + - name: AWS_SECRET_ACCESS_KEY + value: "456" + - name: STORAGE_ENDPOINT + value: "s3.us-east-1.amazonaws.com" + - name: TEST_VAR_SECRET + valueFrom: + secretKeyRef: + name: test-secret + key: TEST_KEY + - name: TEST_VAR_CONFIG_MAP + valueFrom: + configMapKeyRef: + name: test-config-map + key: TEST_KEY + - name: TEST_VAR_FIELD_PATH + valueFrom: + fieldRef: + fieldPath: test.path + - name: TEST_VAR_FIELD_PATH + valueFrom: + resourceFieldRef: + resource: test.resource + fieldPath: "testDivisor" + envFrom: + - configMapRef: + name: env-config-map + javaOptions: "-DKRAUSENING_BASE=/opt/spark/krausening/base" + executor: + cores: 1 + memory: "512m" + env: + + - name: KRAUSENING_BASE + value: /opt/spark/krausening/base + - name: TEST_VAR_FIELD_PATH + valueFrom: + resourceFieldRef: + resource: test.resource + fieldPath: "testDivisor" + + - name: AWS_ACCESS_KEY_ID + # Test comment + value: "123" + - name: TEST_VAR_FIELD_PATH + valueFrom: + fieldRef: + fieldPath: test.path + # Test comment + - name: AWS_SECRET_ACCESS_KEY # Test comment + + value: "456" # Test comment + - name: TEST_VAR_CONFIG_MAP + valueFrom: + configMapKeyRef: + name: test-config-map + key: TEST_KEY + + - name: TEST_VAR_SECRET + valueFrom: + secretKeyRef: + name: test-secret + key: TEST_KEY + envFrom: + + # Test comment + - secretRef: # Test comment + name: env-secret \ No newline at end of file diff --git a/foundation/foundation-upgrade/src/test/resources/test-files/v1_7_0/SparkApplicationBaseValuesS3Migration/migration/executor-env/test-pipeline-base-values.yaml b/foundation/foundation-upgrade/src/test/resources/test-files/v1_7_0/SparkApplicationBaseValuesS3Migration/migration/executor-env/test-pipeline-base-values.yaml new file mode 100644 index 000000000..5c89fa548 --- /dev/null +++ b/foundation/foundation-upgrade/src/test/resources/test-files/v1_7_0/SparkApplicationBaseValuesS3Migration/migration/executor-env/test-pipeline-base-values.yaml @@ -0,0 +1,79 @@ +metadata: + name: test-pipeline +sparkApp: + spec: + type: Java + image: "boozallen/test-spark-worker-docker:latest" + mainClass: com.example.SparkPipelineDriver + mainApplicationFile: "local:///opt/spark/jobs/pipelines/test-pipeline.jar" + deps: + packages: + - mysql:mysql-connector-java:8.0.30 + - org.apache.hadoop:hadoop-aws:3.3.4 + - com.amazonaws:aws-java-sdk-bundle:1.12.262 + excludePackages: [] + hadoopConf: + fs.s3a.fast.upload: "true" + fs.s3a.path.style: "true" + driver: + cores: 1 + coreLimit: "1200m" + memory: "512m" + env: + - name: KRAUSENING_BASE + value: /opt/spark/krausening/base + - name: TEST_VAR_SECRET + valueFrom: + secretKeyRef: + name: test-secret + key: TEST_KEY + - name: TEST_VAR_CONFIG_MAP + valueFrom: + configMapKeyRef: + name: test-config-map + key: TEST_KEY + - name: TEST_VAR_FIELD_PATH + valueFrom: + fieldRef: + fieldPath: test.path + - name: TEST_VAR_FIELD_PATH + valueFrom: + resourceFieldRef: + resource: test.resource + fieldPath: "testDivisor" + javaOptions: "-DKRAUSENING_BASE=/opt/spark/krausening/base" + executor: + cores: 1 + memory: "512m" + env: + + - name: KRAUSENING_BASE + value: /opt/spark/krausening/base + - name: TEST_VAR_FIELD_PATH + valueFrom: + resourceFieldRef: + resource: test.resource + fieldPath: "testDivisor" + + - name: AWS_ACCESS_KEY_ID + # Test comment + value: "123" + - name: TEST_VAR_FIELD_PATH + valueFrom: + fieldRef: + fieldPath: test.path + # Test comment + - name: AWS_SECRET_ACCESS_KEY # Test comment + + value: "456" # Test comment + - name: TEST_VAR_CONFIG_MAP + valueFrom: + configMapKeyRef: + name: test-config-map + key: TEST_KEY + + - name: TEST_VAR_SECRET + valueFrom: + secretKeyRef: + name: test-secret + key: TEST_KEY \ No newline at end of file diff --git a/foundation/foundation-upgrade/src/test/resources/test-files/v1_7_0/SparkApplicationBaseValuesS3Migration/migration/executor-envFrom/test-pipeline-base-values.yaml b/foundation/foundation-upgrade/src/test/resources/test-files/v1_7_0/SparkApplicationBaseValuesS3Migration/migration/executor-envFrom/test-pipeline-base-values.yaml new file mode 100644 index 000000000..708df41f8 --- /dev/null +++ b/foundation/foundation-upgrade/src/test/resources/test-files/v1_7_0/SparkApplicationBaseValuesS3Migration/migration/executor-envFrom/test-pipeline-base-values.yaml @@ -0,0 +1,87 @@ +metadata: + name: test-pipeline +sparkApp: + spec: + type: Java + image: "boozallen/test-spark-worker-docker:latest" + mainClass: com.example.SparkPipelineDriver + mainApplicationFile: "local:///opt/spark/jobs/pipelines/test-pipeline.jar" + deps: + packages: + - mysql:mysql-connector-java:8.0.30 + - org.apache.hadoop:hadoop-aws:3.3.4 + - com.amazonaws:aws-java-sdk-bundle:1.12.262 + excludePackages: [] + hadoopConf: + fs.s3a.fast.upload: "true" + fs.s3a.path.style: "true" + driver: + cores: 1 + coreLimit: "1200m" + memory: "512m" + env: + - name: KRAUSENING_BASE + value: /opt/spark/krausening/base + - name: TEST_VAR_SECRET + valueFrom: + secretKeyRef: + name: test-secret + key: TEST_KEY + - name: TEST_VAR_CONFIG_MAP + valueFrom: + configMapKeyRef: + name: test-config-map + key: TEST_KEY + - name: TEST_VAR_FIELD_PATH + valueFrom: + fieldRef: + fieldPath: test.path + - name: TEST_VAR_FIELD_PATH + valueFrom: + resourceFieldRef: + resource: test.resource + fieldPath: "testDivisor" + envFrom: + - configMapRef: + name: env-config-map + javaOptions: "-DKRAUSENING_BASE=/opt/spark/krausening/base" + executor: + cores: 1 + memory: "512m" + env: + + - name: KRAUSENING_BASE + value: /opt/spark/krausening/base + - name: TEST_VAR_FIELD_PATH + valueFrom: + resourceFieldRef: + resource: test.resource + fieldPath: "testDivisor" + + - name: AWS_ACCESS_KEY_ID + # Test comment + value: "123" + - name: TEST_VAR_FIELD_PATH + valueFrom: + fieldRef: + fieldPath: test.path + # Test comment + - name: AWS_SECRET_ACCESS_KEY # Test comment + + value: "456" # Test comment + - name: TEST_VAR_CONFIG_MAP + valueFrom: + configMapKeyRef: + name: test-config-map + key: TEST_KEY + + - name: TEST_VAR_SECRET + valueFrom: + secretKeyRef: + name: test-secret + key: TEST_KEY + envFrom: + + # Test comment + - secretRef: # Test comment + name: env-secret \ No newline at end of file diff --git a/foundation/foundation-upgrade/src/test/resources/test-files/v1_7_0/SparkApplicationBaseValuesS3Migration/skip-migration/base-values-no-env/test-pipeline-base-values.yaml b/foundation/foundation-upgrade/src/test/resources/test-files/v1_7_0/SparkApplicationBaseValuesS3Migration/skip-migration/base-values-no-env/test-pipeline-base-values.yaml new file mode 100644 index 000000000..f3eb51da5 --- /dev/null +++ b/foundation/foundation-upgrade/src/test/resources/test-files/v1_7_0/SparkApplicationBaseValuesS3Migration/skip-migration/base-values-no-env/test-pipeline-base-values.yaml @@ -0,0 +1,25 @@ +metadata: + name: test-pipeline +sparkApp: + spec: + type: Java + image: "boozallen/test-spark-worker-docker:latest" + mainClass: com.example.SparkPipelineDriver + mainApplicationFile: "local:///opt/spark/jobs/pipelines/test-pipeline.jar" + deps: + packages: + - mysql:mysql-connector-java:8.0.30 + - org.apache.hadoop:hadoop-aws:3.3.4 + - com.amazonaws:aws-java-sdk-bundle:1.12.262 + excludePackages: [] + hadoopConf: + fs.s3a.fast.upload: "true" + fs.s3a.path.style: "true" + driver: + cores: 1 + coreLimit: "1200m" + memory: "512m" + javaOptions: "-DKRAUSENING_BASE=/opt/spark/krausening/base" + executor: + cores: 1 + memory: "512m" diff --git a/foundation/foundation-upgrade/src/test/resources/test-files/v1_7_0/SparkApplicationBaseValuesS3Migration/skip-migration/base-values-no-s3-cred/test-pipeline-base-values.yaml b/foundation/foundation-upgrade/src/test/resources/test-files/v1_7_0/SparkApplicationBaseValuesS3Migration/skip-migration/base-values-no-s3-cred/test-pipeline-base-values.yaml new file mode 100644 index 000000000..8dde8b983 --- /dev/null +++ b/foundation/foundation-upgrade/src/test/resources/test-files/v1_7_0/SparkApplicationBaseValuesS3Migration/skip-migration/base-values-no-s3-cred/test-pipeline-base-values.yaml @@ -0,0 +1,73 @@ +metadata: + name: test-pipeline +sparkApp: + spec: + type: Java + image: "boozallen/test-spark-worker-docker:latest" + mainClass: com.example.SparkPipelineDriver + mainApplicationFile: "local:///opt/spark/jobs/pipelines/test-pipeline.jar" + deps: + packages: + - mysql:mysql-connector-java:8.0.30 + - org.apache.hadoop:hadoop-aws:3.3.4 + - com.amazonaws:aws-java-sdk-bundle:1.12.262 + excludePackages: [] + hadoopConf: + fs.s3a.fast.upload: "true" + fs.s3a.path.style: "true" + driver: + cores: 1 + coreLimit: "1200m" + memory: "512m" + env: + - name: KRAUSENING_BASE + value: /opt/spark/krausening/base + - name: TEST_VAR_SECRET + valueFrom: + secretKeyRef: + name: test-secret + key: TEST_KEY + - name: TEST_VAR_CONFIG_MAP + valueFrom: + configMapKeyRef: + name: test-config-map + key: TEST_KEY + - name: TEST_VAR_FIELD_PATH + valueFrom: + fieldRef: + fieldPath: test.path + - name: TEST_VAR_FIELD_PATH + valueFrom: + resourceFieldRef: + resource: test.resource + fieldPath: "testDivisor" + javaOptions: "-DKRAUSENING_BASE=/opt/spark/krausening/base" + executor: + cores: 1 + memory: "512m" + env: + + - name: KRAUSENING_BASE + value: /opt/spark/krausening/base + - name: TEST_VAR_FIELD_PATH + valueFrom: + resourceFieldRef: + resource: test.resource + fieldPath: "testDivisor" + + - name: TEST_VAR_FIELD_PATH + valueFrom: + fieldRef: + fieldPath: test.path + + - name: TEST_VAR_CONFIG_MAP + valueFrom: + configMapKeyRef: + name: test-config-map + key: TEST_KEY + + - name: TEST_VAR_SECRET + valueFrom: + secretKeyRef: + name: test-secret + key: TEST_KEY \ No newline at end of file diff --git a/foundation/foundation-upgrade/src/test/resources/test-files/v1_7_0/SparkApplicationBaseValuesS3Migration/skip-migration/base-values-secret-s3-cred/test-pipeline-base-values.yaml b/foundation/foundation-upgrade/src/test/resources/test-files/v1_7_0/SparkApplicationBaseValuesS3Migration/skip-migration/base-values-secret-s3-cred/test-pipeline-base-values.yaml new file mode 100644 index 000000000..df07c8b97 --- /dev/null +++ b/foundation/foundation-upgrade/src/test/resources/test-files/v1_7_0/SparkApplicationBaseValuesS3Migration/skip-migration/base-values-secret-s3-cred/test-pipeline-base-values.yaml @@ -0,0 +1,97 @@ +metadata: + name: test-pipeline +sparkApp: + spec: + type: Java + image: "boozallen/test-spark-worker-docker:latest" + mainClass: com.example.SparkPipelineDriver + mainApplicationFile: "local:///opt/spark/jobs/pipelines/test-pipeline.jar" + deps: + packages: + - mysql:mysql-connector-java:8.0.30 + - org.apache.hadoop:hadoop-aws:3.3.4 + - com.amazonaws:aws-java-sdk-bundle:1.12.262 + excludePackages: [] + hadoopConf: + fs.s3a.fast.upload: "true" + fs.s3a.path.style: "true" + driver: + cores: 1 + coreLimit: "1200m" + memory: "512m" + env: + - name: KRAUSENING_BASE + value: /opt/spark/krausening/base + - name: AWS_ACCESS_KEY_ID + valueFrom: + secretKeyRef: + name: remote-auth-config + key: AWS_ACCESS_KEY_ID + - name: AWS_SECRET_ACCESS_KEY + valueFrom: + secretKeyRef: + name: remote-auth-config + key: AWS_SECRET_ACCESS_KEY + - name: STORAGE_ENDPOINT + value: "s3.us-east-1.amazonaws.com" + - name: TEST_VAR_SECRET + valueFrom: + secretKeyRef: + name: test-secret + key: TEST_KEY + - name: TEST_VAR_CONFIG_MAP + valueFrom: + configMapKeyRef: + name: test-config-map + key: TEST_KEY + - name: TEST_VAR_FIELD_PATH + valueFrom: + fieldRef: + fieldPath: test.path + - name: TEST_VAR_FIELD_PATH + valueFrom: + resourceFieldRef: + resource: test.resource + fieldPath: "testDivisor" + javaOptions: "-DKRAUSENING_BASE=/opt/spark/krausening/base" + executor: + cores: 1 + memory: "512m" + env: + + - name: KRAUSENING_BASE + value: /opt/spark/krausening/base + - name: TEST_VAR_FIELD_PATH + valueFrom: + resourceFieldRef: + resource: test.resource + fieldPath: "testDivisor" + + - name: AWS_ACCESS_KEY_ID + # Test comment + valueFrom: + secretKeyRef: + name: remote-auth-config + key: AWS_ACCESS_KEY_ID + - name: TEST_VAR_FIELD_PATH + valueFrom: + fieldRef: + fieldPath: test.path + # Test comment + - name: AWS_SECRET_ACCESS_KEY # Test comment + + valueFrom: + secretKeyRef: + name: remote-auth-config + key: AWS_SECRET_ACCESS_KEY + - name: TEST_VAR_CONFIG_MAP + valueFrom: + configMapKeyRef: + name: test-config-map + key: TEST_KEY + + - name: TEST_VAR_SECRET + valueFrom: + secretKeyRef: + name: test-secret + key: TEST_KEY \ No newline at end of file diff --git a/foundation/foundation-upgrade/src/test/resources/test-files/v1_7_0/SparkApplicationBaseValuesS3Migration/validation/driver-env/test-pipeline-base-values.yaml b/foundation/foundation-upgrade/src/test/resources/test-files/v1_7_0/SparkApplicationBaseValuesS3Migration/validation/driver-env/test-pipeline-base-values.yaml new file mode 100644 index 000000000..3d555ff6d --- /dev/null +++ b/foundation/foundation-upgrade/src/test/resources/test-files/v1_7_0/SparkApplicationBaseValuesS3Migration/validation/driver-env/test-pipeline-base-values.yaml @@ -0,0 +1,76 @@ +metadata: + name: test-pipeline +sparkApp: + spec: + type: Java + image: "boozallen/test-spark-worker-docker:latest" + mainClass: com.example.SparkPipelineDriver + mainApplicationFile: "local:///opt/spark/jobs/pipelines/test-pipeline.jar" + deps: + packages: + - mysql:mysql-connector-java:8.0.30 + - org.apache.hadoop:hadoop-aws:3.3.4 + - com.amazonaws:aws-java-sdk-bundle:1.12.262 + excludePackages: [] + hadoopConf: + fs.s3a.fast.upload: "true" + fs.s3a.path.style: "true" + driver: + # Setup these secret key references within your SealedSecret + envFrom: + - secretRef: + name: remote-auth-config + cores: 1 + coreLimit: "1200m" + memory: "512m" + env: + - name: KRAUSENING_BASE + value: /opt/spark/krausening/base + - name: TEST_VAR_SECRET + valueFrom: + secretKeyRef: + name: test-secret + key: TEST_KEY + - name: TEST_VAR_CONFIG_MAP + valueFrom: + configMapKeyRef: + name: test-config-map + key: TEST_KEY + - name: TEST_VAR_FIELD_PATH + valueFrom: + fieldRef: + fieldPath: test.path + - name: TEST_VAR_FIELD_PATH + valueFrom: + resourceFieldRef: + resource: test.resource + fieldPath: "testDivisor" + javaOptions: "-DKRAUSENING_BASE=/opt/spark/krausening/base" + executor: + cores: 1 + memory: "512m" + env: + + - name: KRAUSENING_BASE + value: /opt/spark/krausening/base + - name: TEST_VAR_FIELD_PATH + valueFrom: + resourceFieldRef: + resource: test.resource + fieldPath: "testDivisor" + - name: TEST_VAR_FIELD_PATH + valueFrom: + fieldRef: + fieldPath: test.path + # Test comment + - name: TEST_VAR_CONFIG_MAP + valueFrom: + configMapKeyRef: + name: test-config-map + key: TEST_KEY + + - name: TEST_VAR_SECRET + valueFrom: + secretKeyRef: + name: test-secret + key: TEST_KEY \ No newline at end of file diff --git a/foundation/foundation-upgrade/src/test/resources/test-files/v1_7_0/SparkApplicationBaseValuesS3Migration/validation/driver-envFrom/test-pipeline-base-values.yaml b/foundation/foundation-upgrade/src/test/resources/test-files/v1_7_0/SparkApplicationBaseValuesS3Migration/validation/driver-envFrom/test-pipeline-base-values.yaml new file mode 100644 index 000000000..3b64e672d --- /dev/null +++ b/foundation/foundation-upgrade/src/test/resources/test-files/v1_7_0/SparkApplicationBaseValuesS3Migration/validation/driver-envFrom/test-pipeline-base-values.yaml @@ -0,0 +1,84 @@ +metadata: + name: test-pipeline +sparkApp: + spec: + type: Java + image: "boozallen/test-spark-worker-docker:latest" + mainClass: com.example.SparkPipelineDriver + mainApplicationFile: "local:///opt/spark/jobs/pipelines/test-pipeline.jar" + deps: + packages: + - mysql:mysql-connector-java:8.0.30 + - org.apache.hadoop:hadoop-aws:3.3.4 + - com.amazonaws:aws-java-sdk-bundle:1.12.262 + excludePackages: [] + hadoopConf: + fs.s3a.fast.upload: "true" + fs.s3a.path.style: "true" + driver: + cores: 1 + coreLimit: "1200m" + memory: "512m" + env: + - name: KRAUSENING_BASE + value: /opt/spark/krausening/base + - name: TEST_VAR_SECRET + valueFrom: + secretKeyRef: + name: test-secret + key: TEST_KEY + - name: TEST_VAR_CONFIG_MAP + valueFrom: + configMapKeyRef: + name: test-config-map + key: TEST_KEY + - name: TEST_VAR_FIELD_PATH + valueFrom: + fieldRef: + fieldPath: test.path + - name: TEST_VAR_FIELD_PATH + valueFrom: + resourceFieldRef: + resource: test.resource + fieldPath: "testDivisor" + envFrom: + # Setup these secret key references within your SealedSecret + - secretRef: + name: remote-auth-config + - configMapRef: + name: env-config-map + javaOptions: "-DKRAUSENING_BASE=/opt/spark/krausening/base" + executor: + cores: 1 + memory: "512m" + env: + + - name: KRAUSENING_BASE + value: /opt/spark/krausening/base + - name: TEST_VAR_FIELD_PATH + valueFrom: + resourceFieldRef: + resource: test.resource + fieldPath: "testDivisor" + + - name: TEST_VAR_FIELD_PATH + valueFrom: + fieldRef: + fieldPath: test.path + # Test comment + - name: TEST_VAR_CONFIG_MAP + valueFrom: + configMapKeyRef: + name: test-config-map + key: TEST_KEY + + - name: TEST_VAR_SECRET + valueFrom: + secretKeyRef: + name: test-secret + key: TEST_KEY + envFrom: + + # Test comment + - secretRef: # Test comment + name: env-secret \ No newline at end of file diff --git a/foundation/foundation-upgrade/src/test/resources/test-files/v1_7_0/SparkApplicationBaseValuesS3Migration/validation/env/test-pipeline-base-values.yaml b/foundation/foundation-upgrade/src/test/resources/test-files/v1_7_0/SparkApplicationBaseValuesS3Migration/validation/env/test-pipeline-base-values.yaml new file mode 100644 index 000000000..9f0ce7c54 --- /dev/null +++ b/foundation/foundation-upgrade/src/test/resources/test-files/v1_7_0/SparkApplicationBaseValuesS3Migration/validation/env/test-pipeline-base-values.yaml @@ -0,0 +1,80 @@ +metadata: + name: test-pipeline +sparkApp: + spec: + type: Java + image: "boozallen/test-spark-worker-docker:latest" + mainClass: com.example.SparkPipelineDriver + mainApplicationFile: "local:///opt/spark/jobs/pipelines/test-pipeline.jar" + deps: + packages: + - mysql:mysql-connector-java:8.0.30 + - org.apache.hadoop:hadoop-aws:3.3.4 + - com.amazonaws:aws-java-sdk-bundle:1.12.262 + excludePackages: [] + hadoopConf: + fs.s3a.fast.upload: "true" + fs.s3a.path.style: "true" + driver: + # Setup these secret key references within your SealedSecret + envFrom: + - secretRef: + name: remote-auth-config + cores: 1 + coreLimit: "1200m" + memory: "512m" + env: + - name: KRAUSENING_BASE + value: /opt/spark/krausening/base + - name: TEST_VAR_SECRET + valueFrom: + secretKeyRef: + name: test-secret + key: TEST_KEY + - name: TEST_VAR_CONFIG_MAP + valueFrom: + configMapKeyRef: + name: test-config-map + key: TEST_KEY + - name: TEST_VAR_FIELD_PATH + valueFrom: + fieldRef: + fieldPath: test.path + - name: TEST_VAR_FIELD_PATH + valueFrom: + resourceFieldRef: + resource: test.resource + fieldPath: "testDivisor" + javaOptions: "-DKRAUSENING_BASE=/opt/spark/krausening/base" + executor: + envFrom: + - secretRef: + name: remote-auth-config + cores: 1 + memory: "512m" + env: + + - name: KRAUSENING_BASE + value: /opt/spark/krausening/base + - name: TEST_VAR_FIELD_PATH + valueFrom: + resourceFieldRef: + resource: test.resource + fieldPath: "testDivisor" + + - name: TEST_VAR_FIELD_PATH + valueFrom: + fieldRef: + fieldPath: test.path + # Test comment + - name: TEST_VAR_CONFIG_MAP + valueFrom: + configMapKeyRef: + name: test-config-map + key: TEST_KEY + + - name: TEST_VAR_SECRET + valueFrom: + secretKeyRef: + name: test-secret + key: TEST_KEY \ No newline at end of file diff --git a/foundation/foundation-upgrade/src/test/resources/test-files/v1_7_0/SparkApplicationBaseValuesS3Migration/validation/envFrom/test-pipeline-base-values.yaml b/foundation/foundation-upgrade/src/test/resources/test-files/v1_7_0/SparkApplicationBaseValuesS3Migration/validation/envFrom/test-pipeline-base-values.yaml new file mode 100644 index 000000000..f65fcb583 --- /dev/null +++ b/foundation/foundation-upgrade/src/test/resources/test-files/v1_7_0/SparkApplicationBaseValuesS3Migration/validation/envFrom/test-pipeline-base-values.yaml @@ -0,0 +1,86 @@ +metadata: + name: test-pipeline +sparkApp: + spec: + type: Java + image: "boozallen/test-spark-worker-docker:latest" + mainClass: com.example.SparkPipelineDriver + mainApplicationFile: "local:///opt/spark/jobs/pipelines/test-pipeline.jar" + deps: + packages: + - mysql:mysql-connector-java:8.0.30 + - org.apache.hadoop:hadoop-aws:3.3.4 + - com.amazonaws:aws-java-sdk-bundle:1.12.262 + excludePackages: [] + hadoopConf: + fs.s3a.fast.upload: "true" + fs.s3a.path.style: "true" + driver: + cores: 1 + coreLimit: "1200m" + memory: "512m" + env: + - name: KRAUSENING_BASE + value: /opt/spark/krausening/base + - name: TEST_VAR_SECRET + valueFrom: + secretKeyRef: + name: test-secret + key: TEST_KEY + - name: TEST_VAR_CONFIG_MAP + valueFrom: + configMapKeyRef: + name: test-config-map + key: TEST_KEY + - name: TEST_VAR_FIELD_PATH + valueFrom: + fieldRef: + fieldPath: test.path + - name: TEST_VAR_FIELD_PATH + valueFrom: + resourceFieldRef: + resource: test.resource + fieldPath: "testDivisor" + envFrom: + # Setup these secret key references within your SealedSecret + - secretRef: + name: remote-auth-config + - configMapRef: + name: env-config-map + javaOptions: "-DKRAUSENING_BASE=/opt/spark/krausening/base" + executor: + cores: 1 + memory: "512m" + env: + + - name: KRAUSENING_BASE + value: /opt/spark/krausening/base + - name: TEST_VAR_FIELD_PATH + valueFrom: + resourceFieldRef: + resource: test.resource + fieldPath: "testDivisor" + + - name: TEST_VAR_FIELD_PATH + valueFrom: + fieldRef: + fieldPath: test.path + # Test comment + - name: TEST_VAR_CONFIG_MAP + valueFrom: + configMapKeyRef: + name: test-config-map + key: TEST_KEY + + - name: TEST_VAR_SECRET + valueFrom: + secretKeyRef: + name: test-secret + key: TEST_KEY + envFrom: + + - secretRef: + name: remote-auth-config + # Test comment + - secretRef: # Test comment + name: env-secret \ No newline at end of file diff --git a/foundation/foundation-upgrade/src/test/resources/test-files/v1_7_0/SparkApplicationBaseValuesS3Migration/validation/executor-env/test-pipeline-base-values.yaml b/foundation/foundation-upgrade/src/test/resources/test-files/v1_7_0/SparkApplicationBaseValuesS3Migration/validation/executor-env/test-pipeline-base-values.yaml new file mode 100644 index 000000000..ab5e42d1a --- /dev/null +++ b/foundation/foundation-upgrade/src/test/resources/test-files/v1_7_0/SparkApplicationBaseValuesS3Migration/validation/executor-env/test-pipeline-base-values.yaml @@ -0,0 +1,77 @@ +metadata: + name: test-pipeline +sparkApp: + spec: + type: Java + image: "boozallen/test-spark-worker-docker:latest" + mainClass: com.example.SparkPipelineDriver + mainApplicationFile: "local:///opt/spark/jobs/pipelines/test-pipeline.jar" + deps: + packages: + - mysql:mysql-connector-java:8.0.30 + - org.apache.hadoop:hadoop-aws:3.3.4 + - com.amazonaws:aws-java-sdk-bundle:1.12.262 + excludePackages: [] + hadoopConf: + fs.s3a.fast.upload: "true" + fs.s3a.path.style: "true" + driver: + cores: 1 + coreLimit: "1200m" + memory: "512m" + env: + - name: KRAUSENING_BASE + value: /opt/spark/krausening/base + - name: TEST_VAR_SECRET + valueFrom: + secretKeyRef: + name: test-secret + key: TEST_KEY + - name: TEST_VAR_CONFIG_MAP + valueFrom: + configMapKeyRef: + name: test-config-map + key: TEST_KEY + - name: TEST_VAR_FIELD_PATH + valueFrom: + fieldRef: + fieldPath: test.path + - name: TEST_VAR_FIELD_PATH + valueFrom: + resourceFieldRef: + resource: test.resource + fieldPath: "testDivisor" + javaOptions: "-DKRAUSENING_BASE=/opt/spark/krausening/base" + executor: + # Setup these secret key references within your SealedSecret + envFrom: + - secretRef: + name: remote-auth-config + cores: 1 + memory: "512m" + env: + + - name: KRAUSENING_BASE + value: /opt/spark/krausening/base + - name: TEST_VAR_FIELD_PATH + valueFrom: + resourceFieldRef: + resource: test.resource + fieldPath: "testDivisor" + + - name: TEST_VAR_FIELD_PATH + valueFrom: + fieldRef: + fieldPath: test.path + # Test comment + - name: TEST_VAR_CONFIG_MAP + valueFrom: + configMapKeyRef: + name: test-config-map + key: TEST_KEY + + - name: TEST_VAR_SECRET + valueFrom: + secretKeyRef: + name: test-secret + key: TEST_KEY \ No newline at end of file diff --git a/foundation/foundation-upgrade/src/test/resources/test-files/v1_7_0/SparkApplicationBaseValuesS3Migration/validation/executor-envFrom/test-pipeline-base-values.yaml b/foundation/foundation-upgrade/src/test/resources/test-files/v1_7_0/SparkApplicationBaseValuesS3Migration/validation/executor-envFrom/test-pipeline-base-values.yaml new file mode 100644 index 000000000..ad2c8ed19 --- /dev/null +++ b/foundation/foundation-upgrade/src/test/resources/test-files/v1_7_0/SparkApplicationBaseValuesS3Migration/validation/executor-envFrom/test-pipeline-base-values.yaml @@ -0,0 +1,84 @@ +metadata: + name: test-pipeline +sparkApp: + spec: + type: Java + image: "boozallen/test-spark-worker-docker:latest" + mainClass: com.example.SparkPipelineDriver + mainApplicationFile: "local:///opt/spark/jobs/pipelines/test-pipeline.jar" + deps: + packages: + - mysql:mysql-connector-java:8.0.30 + - org.apache.hadoop:hadoop-aws:3.3.4 + - com.amazonaws:aws-java-sdk-bundle:1.12.262 + excludePackages: [] + hadoopConf: + fs.s3a.fast.upload: "true" + fs.s3a.path.style: "true" + driver: + cores: 1 + coreLimit: "1200m" + memory: "512m" + env: + - name: KRAUSENING_BASE + value: /opt/spark/krausening/base + - name: TEST_VAR_SECRET + valueFrom: + secretKeyRef: + name: test-secret + key: TEST_KEY + - name: TEST_VAR_CONFIG_MAP + valueFrom: + configMapKeyRef: + name: test-config-map + key: TEST_KEY + - name: TEST_VAR_FIELD_PATH + valueFrom: + fieldRef: + fieldPath: test.path + - name: TEST_VAR_FIELD_PATH + valueFrom: + resourceFieldRef: + resource: test.resource + fieldPath: "testDivisor" + envFrom: + - configMapRef: + name: env-config-map + javaOptions: "-DKRAUSENING_BASE=/opt/spark/krausening/base" + executor: + cores: 1 + memory: "512m" + env: + + - name: KRAUSENING_BASE + value: /opt/spark/krausening/base + - name: TEST_VAR_FIELD_PATH + valueFrom: + resourceFieldRef: + resource: test.resource + fieldPath: "testDivisor" + + - name: TEST_VAR_FIELD_PATH + valueFrom: + fieldRef: + fieldPath: test.path + # Test comment + - name: TEST_VAR_CONFIG_MAP + valueFrom: + configMapKeyRef: + name: test-config-map + key: TEST_KEY + + - name: TEST_VAR_SECRET + valueFrom: + secretKeyRef: + name: test-secret + key: TEST_KEY + envFrom: + + # Setup these secret key references within your SealedSecret + - secretRef: + name: remote-auth-config + # Test comment + - secretRef: # Test comment + name: env-secret \ No newline at end of file