Skip to content

Commit

Permalink
Merge branch 'main' into private/ron/aws/aws_cis_snowflake_premium
Browse files Browse the repository at this point in the history
  • Loading branch information
erezrokah authored Dec 12, 2023
2 parents 1d0a10d + 90a1f26 commit 8540933
Show file tree
Hide file tree
Showing 44 changed files with 1,300 additions and 737 deletions.
4 changes: 2 additions & 2 deletions .github/workflows/publish_transformation.yml
Original file line number Diff line number Diff line change
Expand Up @@ -108,7 +108,7 @@ jobs:
- name: Setup CloudQuery
uses: cloudquery/setup-cloudquery@v3
with:
version: v4.3.1
version: v4.3.2
- name: Migrate DB Postgres
if: needs.prepare.outputs.postgres == 'true'
run: cloudquery migrate tests/postgres.yml
Expand Down Expand Up @@ -140,7 +140,7 @@ jobs:
- name: Setup CloudQuery
uses: cloudquery/setup-cloudquery@v3
with:
version: v4.3.1
version: v4.3.2
- name: Publish tranformation
working-directory: ${{ needs.prepare.outputs.transformation_dir }}
env:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,7 @@ jobs:
- name: Setup CloudQuery
uses: cloudquery/setup-cloudquery@v3
with:
version: v4.3.1
version: v4.3.2
- name: Publish visualization
env:
CLOUDQUERY_API_KEY: ${{ secrets.CLOUDQUERY_API_KEY }}
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/publish_visualization_aws_compliance.yml
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,7 @@ jobs:
- name: Setup CloudQuery
uses: cloudquery/setup-cloudquery@v3
with:
version: v4.3.1
version: v4.3.2
- name: Publish visualization
env:
CLOUDQUERY_API_KEY: ${{ secrets.CLOUDQUERY_API_KEY }}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,7 @@ jobs:
- name: Setup CloudQuery
uses: cloudquery/setup-cloudquery@v3
with:
version: v4.3.1
version: v4.3.2
- name: Publish visualization
env:
CLOUDQUERY_API_KEY: ${{ secrets.CLOUDQUERY_API_KEY }}
Expand Down
126 changes: 126 additions & 0 deletions .github/workflows/transformations_aws_asset_inventory_free.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,126 @@
name: "Test AWS Asset Inventory Free Policies"

on:
pull_request:
paths:
- "transformations/aws/asset-inventory-free/**"
- ".github/workflows/transformations_aws_asset_inventory_free.yml"
- "transformations/aws/macros/**"
- "transformations/aws/models/**"
- "transformations/macros/**"
push:
branches:
- main
paths:
- "transformations/aws/asset-inventory-free/**"
- ".github/workflows/transformations_aws_asset_inventory_free.yml"
- "transformations/aws/macros/**"
- "transformations/aws/models/**"
- "transformations/macros/**"

env:
SNOW_USER: ${{ secrets.SNOW_USER }}
SNOW_PASSWORD: ${{ secrets.SNOW_PASSWORD }}
# DBT assumes the account is in the form of <account>.<region>
SNOW_ACCOUNT: "${{ secrets.SNOW_ACCOUNT }}.${{ secrets.SNOW_REGION }}"
SNOW_WAREHOUSE: ${{ secrets.SNOW_WAREHOUSE }}
SNOW_DATABASE: ${{ secrets.SNOW_DATABASE }}
SNOW_SCHEMA: ${{ secrets.SNOW_SCHEMA }}
SNOW_REGION: ${{ secrets.SNOW_REGION }}

jobs:
prepare:
runs-on: ubuntu-latest
outputs:
transformation_dir: ${{ fromJson(steps.set-result.outputs.result).transformation_dir }}
postgres: ${{ fromJson(steps.set-result.outputs.result).postgres }}
snowflake: ${{ fromJson(steps.set-result.outputs.result).snowflake }}
bigquery: ${{ fromJson(steps.set-result.outputs.result).bigquery }}
steps:
- name: Checkout
uses: actions/checkout@v4
- uses: actions/github-script@v7
id: set-result
env:
TRANSFORMATION_DIR: transformations/aws/asset-inventory-free
with:
script: |
const fs = require('fs/promises');
const { TRANSFORMATION_DIR: transformation_dir } = process.env;
const [postgres, snowflake, bigquery] = await Promise.all([
fs.access(`${transformation_dir}/tests/postgres.yml`, fs.constants.F_OK).then(() => true).catch(() => false),
fs.access(`${transformation_dir}/tests/snowflake.yml`, fs.constants.F_OK).then(() => true).catch(() => false),
fs.access(`${transformation_dir}/tests/bigquery.yml`, fs.constants.F_OK).then(() => true).catch(() => false),
]);
console.log(JSON.stringify({ transformation_dir, postgres, snowflake, bigquery }));
return {
transformation_dir,
postgres,
snowflake,
bigquery,
};
transformations-aws-asset-inventory-free:
permissions:
id-token: 'write'
contents: 'read'
name: ${{ needs.prepare.outputs.transformation_dir }}
needs: prepare
timeout-minutes: 30
runs-on: ubuntu-latest
defaults:
run:
working-directory: ${{ needs.prepare.outputs.transformation_dir }}
services:
postgres:
image: postgres:11
env:
POSTGRES_PASSWORD: pass
POSTGRES_USER: postgres
POSTGRES_DB: postgres
ports:
- 5432:5432
# Set health checks to wait until postgres has started
options: >-
--health-cmd pg_isready
--health-interval 10s
--health-timeout 5s
--health-retries 5
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Authenticate to Google Cloud
uses: 'google-github-actions/auth@v1'
if: needs.prepare.outputs.bigquery == 'true'
with:
workload_identity_provider: 'projects/151868820337/locations/global/workloadIdentityPools/integration-test-pool/providers/integration-test-provider'
service_account: 'integration-service-account@cq-integration-tests.iam.gserviceaccount.com'
- uses: actions/setup-python@v5
with:
python-version: "3.9"
cache: "pip"
cache-dependency-path: "${{ needs.prepare.outputs.transformation_dir }}/requirements.txt"
- name: Install dependencies
run: pip install -r requirements.txt
- name: Setup CloudQuery
uses: cloudquery/setup-cloudquery@v3
with:
version: v4.3.2
- name: Test Postgres
run: |
cloudquery migrate tests/postgres.yml
dbt run --target dev-pg --profiles-dir ./tests
if: needs.prepare.outputs.postgres == 'true'
env:
CQ_DSN: postgresql://postgres:pass@localhost:5432/postgres
- name: Test Snowflake
run: |
cloudquery migrate tests/snowflake.yml
dbt run --target dev-snowflake --profiles-dir ./tests
if: needs.prepare.outputs.snowflake == 'true'
env:
SNOWFLAKE_CONNECTION_STRING: "${{ secrets.SNOW_USER }}:${{ secrets.SNOW_PASSWORD }}@${{ secrets.SNOW_ACCOUNT }}.${{ secrets.SNOW_REGION }}/${{ secrets.SNOW_DATABASE }}/${{ secrets.SNOW_SCHEMA }}?warehouse=${{ secrets.SNOW_WAREHOUSE }}"
- name: Test BigQuery
if: needs.prepare.outputs.bigquery == 'true'
run: |
cloudquery migrate tests/bigquery.yml
dbt run --target dev-bigquery --profiles-dir ./tests

This file was deleted.

125 changes: 125 additions & 0 deletions .github/workflows/transformations_aws_compliance_free.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,125 @@
name: "Test AWS Compliance Free Policies"

on:
pull_request:
paths:
- "transformations/aws/compliance-free/**"
- ".github/workflows/transformations_aws_compliance_free.yml"
- "transformations/aws/macros/**"
- "transformations/aws/models/**"
- "transformations/macros/**"
push:
branches:
- main
paths:
- "transformations/aws/compliance-free/**"
- ".github/workflows/transformations_aws_compliance_free.yml"
- "transformations/aws/macros/**"
- "transformations/aws/models/**"
- "transformations/macros/**"

env:
SNOW_USER: ${{ secrets.SNOW_USER }}
SNOW_PASSWORD: ${{ secrets.SNOW_PASSWORD }}
# DBT assumes the account is in the form of <account>.<region>
SNOW_ACCOUNT: "${{ secrets.SNOW_ACCOUNT }}.${{ secrets.SNOW_REGION }}"
SNOW_WAREHOUSE: ${{ secrets.SNOW_WAREHOUSE }}
SNOW_DATABASE: ${{ secrets.SNOW_DATABASE }}
SNOW_SCHEMA: ${{ secrets.SNOW_SCHEMA }}
SNOW_REGION: ${{ secrets.SNOW_REGION }}

jobs:
prepare:
runs-on: ubuntu-latest
outputs:
transformation_dir: ${{ fromJson(steps.set-result.outputs.result).transformation_dir }}
postgres: ${{ fromJson(steps.set-result.outputs.result).postgres }}
snowflake: ${{ fromJson(steps.set-result.outputs.result).snowflake }}
bigquery: ${{ fromJson(steps.set-result.outputs.result).bigquery }}
steps:
- name: Checkout
uses: actions/checkout@v4
- uses: actions/github-script@v7
id: set-result
env:
TRANSFORMATION_DIR: transformations/aws/compliance-free
with:
script: |
const fs = require('fs/promises');
const { TRANSFORMATION_DIR: transformation_dir } = process.env;
const [postgres, snowflake, bigquery] = await Promise.all([
fs.access(`${transformation_dir}/tests/postgres.yml`, fs.constants.F_OK).then(() => true).catch(() => false),
fs.access(`${transformation_dir}/tests/snowflake.yml`, fs.constants.F_OK).then(() => true).catch(() => false),
fs.access(`${transformation_dir}/tests/bigquery.yml`, fs.constants.F_OK).then(() => true).catch(() => false),
]);
return {
transformation_dir,
postgres,
snowflake,
bigquery,
};
transformations-aws-compliance-free:
permissions:
id-token: 'write'
contents: 'read'
name: ${{ needs.prepare.outputs.transformation_dir }}
needs: prepare
timeout-minutes: 30
runs-on: ubuntu-latest
defaults:
run:
working-directory: ${{ needs.prepare.outputs.transformation_dir }}
services:
postgres:
image: postgres:11
env:
POSTGRES_PASSWORD: pass
POSTGRES_USER: postgres
POSTGRES_DB: postgres
ports:
- 5432:5432
# Set health checks to wait until postgres has started
options: >-
--health-cmd pg_isready
--health-interval 10s
--health-timeout 5s
--health-retries 5
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Authenticate to Google Cloud
uses: 'google-github-actions/auth@v1'
if: needs.prepare.outputs.bigquery == 'true'
with:
workload_identity_provider: 'projects/151868820337/locations/global/workloadIdentityPools/integration-test-pool/providers/integration-test-provider'
service_account: 'integration-service-account@cq-integration-tests.iam.gserviceaccount.com'
- uses: actions/setup-python@v5
with:
python-version: "3.9"
cache: "pip"
cache-dependency-path: "${{ needs.prepare.outputs.transformation_dir }}/requirements.txt"
- name: Install dependencies
run: pip install -r requirements.txt
- name: Setup CloudQuery
uses: cloudquery/setup-cloudquery@v3
with:
version: v4.3.2
- name: Test Postgres
run: |
cloudquery migrate tests/postgres.yml
dbt run --target dev-pg --profiles-dir ./tests --select aws_compliance__foundational_security_free
if: needs.prepare.outputs.postgres == 'true'
env:
CQ_DSN: postgresql://postgres:pass@localhost:5432/postgres
- name: Test Snowflake
run: |
cloudquery migrate tests/snowflake.yml
dbt run --target dev-snowflake --profiles-dir ./tests --select aws_compliance__foundational_security_free
if: needs.prepare.outputs.snowflake == 'true'
env:
SNOWFLAKE_CONNECTION_STRING: "${{ secrets.SNOW_USER }}:${{ secrets.SNOW_PASSWORD }}@${{ secrets.SNOW_ACCOUNT }}.${{ secrets.SNOW_REGION }}/${{ secrets.SNOW_DATABASE }}/${{ secrets.SNOW_SCHEMA }}?warehouse=${{ secrets.SNOW_WAREHOUSE }}"
- name: Test BigQuery
if: needs.prepare.outputs.bigquery == 'true'
run: |
cloudquery migrate tests/bigquery.yml
dbt run --target dev-bigquery --profiles-dir ./tests --select aws_compliance__foundational_security_free
Loading

0 comments on commit 8540933

Please sign in to comment.