-
Notifications
You must be signed in to change notification settings - Fork 0
159 lines (156 loc) · 6.23 KB
/
transformations_aws_compliance_premium.yml
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
name: "Test AWS Compliance Premium Policies"
on:
pull_request:
paths:
- "transformations/aws/compliance-premium/**"
- ".github/workflows/transformations_aws_compliance_premium.yml"
- "transformations/aws/macros/**"
- "transformations/aws/models/**"
- "transformations/macros/**"
- ".github/workflows/wait_for_required_workflows.yml"
- "scripts/workflows/wait_for_required_workflows.js"
push:
branches:
- main
paths:
- "transformations/aws/compliance-premium/**"
- ".github/workflows/transformations_aws_compliance_premium.yml"
- "transformations/aws/macros/**"
- "transformations/aws/models/**"
- "transformations/macros/**"
env:
SNOW_USER: ${{ secrets.SNOW_USER }}
SNOW_PASSWORD: ${{ secrets.SNOW_PASSWORD }}
# DBT assumes the account is in the form of <account>.<region>
SNOW_ACCOUNT: "${{ secrets.SNOW_ACCOUNT }}.${{ secrets.SNOW_REGION }}"
SNOW_WAREHOUSE: ${{ secrets.SNOW_WAREHOUSE }}
SNOW_DATABASE: ${{ secrets.SNOW_DATABASE }}
SNOW_SCHEMA: ${{ secrets.SNOW_SCHEMA }}
SNOW_REGION: ${{ secrets.SNOW_REGION }}
CLOUDQUERY_API_KEY: ${{ secrets.CLOUDQUERY_API_KEY }}
jobs:
prepare:
runs-on: ubuntu-latest
outputs:
transformation_dir: ${{ fromJson(steps.set-result.outputs.result).transformation_dir }}
postgres: ${{ fromJson(steps.set-result.outputs.result).postgres }}
snowflake: ${{ fromJson(steps.set-result.outputs.result).snowflake }}
bigquery: ${{ fromJson(steps.set-result.outputs.result).bigquery }}
s3: ${{ fromJson(steps.set-result.outputs.result).s3 }}
steps:
- name: Checkout
uses: actions/checkout@v4
- uses: actions/github-script@v7
id: set-result
env:
TRANSFORMATION_DIR: transformations/aws/compliance-premium
with:
script: |
const fs = require('fs/promises');
const { TRANSFORMATION_DIR: transformation_dir } = process.env;
const [postgres, snowflake, bigquery, s3] = await Promise.all([
fs.access(`${transformation_dir}/tests/postgres.yml`, fs.constants.F_OK).then(() => true).catch(() => false),
fs.access(`${transformation_dir}/tests/snowflake.yml`, fs.constants.F_OK).then(() => true).catch(() => false),
fs.access(`${transformation_dir}/tests/bigquery.yml`, fs.constants.F_OK).then(() => true).catch(() => false),
fs.access(`${transformation_dir}/tests/s3.yml`, fs.constants.F_OK).then(() => true).catch(() => false),
]);
return {
transformation_dir,
postgres,
snowflake,
bigquery,
s3
};
transformations-aws-compliance-premium:
permissions:
id-token: 'write'
contents: 'read'
name: ${{ needs.prepare.outputs.transformation_dir }}
needs: prepare
timeout-minutes: 30
runs-on: ubuntu-latest
defaults:
run:
working-directory: ${{ needs.prepare.outputs.transformation_dir }}
services:
postgres:
image: postgres:11
env:
POSTGRES_PASSWORD: pass
POSTGRES_USER: postgres
POSTGRES_DB: postgres
ports:
- 5432:5432
# Set health checks to wait until postgres has started
options: >-
--health-cmd pg_isready
--health-interval 10s
--health-timeout 5s
--health-retries 5
steps:
- name: Checkout
uses: actions/checkout@v4
with:
token: ${{ secrets.GH_CQ_BOT }}
- name: Authenticate to Google Cloud
uses: 'google-github-actions/auth@v2'
if: needs.prepare.outputs.bigquery == 'true'
with:
workload_identity_provider: 'projects/151868820337/locations/global/workloadIdentityPools/integration-test-pool/providers/integration-test-provider'
service_account: 'integration-service-account@cq-integration-tests.iam.gserviceaccount.com'
- name: Configure AWS credentials
uses: aws-actions/configure-aws-credentials@v4
with:
role-to-assume: arn:aws:iam::590184131402:role/cq-integration-tests-aws-github-action
aws-region: us-east-1
- uses: actions/setup-python@v5
with:
python-version: "3.9"
cache: "pip"
cache-dependency-path: "${{ needs.prepare.outputs.transformation_dir }}/requirements.txt"
- name: Install dependencies
run: pip install -r requirements.txt
- name: Setup CloudQuery
uses: cloudquery/setup-cloudquery@v4
with:
version: v6.8.5
- name: Test Postgres
run: |
cloudquery migrate tests/postgres.yml
dbt run --target dev-pg --profiles-dir ./tests
if: needs.prepare.outputs.postgres == 'true'
env:
CQ_DSN: postgresql://postgres:pass@localhost:5432/postgres
- name: Test Snowflake
run: |
cloudquery migrate tests/snowflake.yml
dbt run --target dev-snowflake --profiles-dir ./tests
if: needs.prepare.outputs.snowflake == 'true'
env:
SNOWFLAKE_CONNECTION_STRING: "${{ secrets.SNOW_USER }}:${{ secrets.SNOW_PASSWORD }}@${{ secrets.SNOW_ACCOUNT }}.${{ secrets.SNOW_REGION }}/${{ secrets.SNOW_DATABASE }}/${{ secrets.SNOW_SCHEMA }}?warehouse=${{ secrets.SNOW_WAREHOUSE }}"
- name: Test BigQuery
if: needs.prepare.outputs.bigquery == 'true'
run: |
cloudquery migrate tests/bigquery.yml
dbt run --target dev-bigquery --profiles-dir ./tests
- name: Test Athena
if: needs.prepare.outputs.s3 == 'true'
run: |
cloudquery migrate tests/s3.yml
dbt run --target dev-athena --profiles-dir ./tests
- name: Setup Node.js
uses: actions/setup-node@v4
with:
node-version: 'lts/*'
cache: 'npm'
cache-dependency-path: scripts/dbt-pack/package-lock.json
- name: Install Dependencies
run: npm ci
working-directory: ./scripts/dbt-pack
- name: Gen docs
run: |
make gen-docs
- uses: stefanzweifel/git-auto-commit-action@v5
with:
commit_message: "chore: Update readme"
file_pattern: '${{ needs.prepare.outputs.transformation_dir }}/README.md'