-
Notifications
You must be signed in to change notification settings - Fork 0
129 lines (118 loc) · 4.78 KB
/
tests.yaml
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
name: Tests
on: push
env:
GIT_SYNC_IMAGE: europe-north1-docker.pkg.dev/knada-gcp/knada-north/git-sync
DATAVERK_AIRFLOW_IMAGE_BASE_URL: europe-north1-docker.pkg.dev/knada-gcp/knada-north/dataverk-airflow-python
permissions:
contents: read
id-token: write
jobs:
unit-tests:
strategy:
matrix:
version: ["3.8", "3.9", "3.10", "3.11"]
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: actions/setup-python@v5
with:
python-version: ${{ matrix.version }}
- run: pip3 install poetry
- run: poetry install --with test
- run: poetry run pytest
integration-tests:
runs-on: ubuntu-latest
strategy:
matrix:
version: ["3.8", "3.9", "3.10", "3.11"]
steps:
- uses: actions/checkout@v4
- uses: actions/setup-python@v5
with:
python-version: ${{ matrix.version }}
- id: auth
name: Authenticate with Google Cloud
uses: google-github-actions/auth@v2
with:
token_format: access_token
workload_identity_provider: projects/193123067890/locations/global/workloadIdentityPools/ci-knada-images/providers/ci-knada-images
service_account: [email protected]
- name: Install python dependencies
run: |
pip3 install poetry
poetry install --with integration-test
- name: Start kind cluster
uses: helm/[email protected]
- name: Verify cluster up
run: |
kubectl cluster-info
kubectl get nodes
- name: Setup cluster for tests
run: |
kubectl create sa airflow
kubectl create ns composer-user-workloads
kubectl create secret generic github-app-secret --from-literal=test=test
kubectl create cm ca-bundle-pem --from-literal=test=test
kubectl apply -f ./tests-integration/dbt/k8s_resources
- name: Set up Cloud SDK
uses: google-github-actions/setup-gcloud@v2
with:
version: "480.0.0"
- name: Setup Airflow
env:
AIRFLOW_CONN_SQLITE_DEFAULT: sqlite://?mode=ro
run: |
poetry run airflow db reset -y
poetry run airflow variables set quarto_token ${{ secrets.QUARTO_TOKEN }}
# envs
echo "AIRFLOW__CORE__DAGS_FOLDER=$(pwd)/tests-integration" >> $GITHUB_ENV
gitsync_tag=$(gcloud artifacts docker images list "$GIT_SYNC_IMAGE" --include-tags --sort-by=~Update_Time --limit=1 --format=json | jq -rc '.[0].tags.[0]')
echo $gitsync_tag
echo "CLONE_REPO_IMAGE=ghcr.io/navikt/knada-git-sync/git-sync:$gitsync_tag" >> $GITHUB_ENV
echo $CLONE_REPO_IMAGE
dataverk_airflow_tag=$(gcloud artifacts docker images list "$DATAVERK_AIRFLOW_IMAGE_BASE_URL-${{ matrix.version}}" --include-tags --sort-by=~Update_Time --limit=1 --format=json | jq -rc '.[0].tags.[0]')
echo "KNADA_AIRFLOW_OPERATOR_IMAGE=ghcr.io/navikt/knada-images/dataverk-airflow-python-${{ matrix.version }}:$dataverk_airflow_tag" >> $GITHUB_ENV
- name: Run tests Knada
env:
AIRFLOW_CONN_SQLITE_DEFAULT: sqlite://?mode=ro
KNADA_TEAM_SECRET: secret
NAMESPACE: default
K8S_IMAGE_PULL_SECRETS: ghcr-creds
MARKEDSPLASSEN_HOST: data.ekstern.dev.nav.no
INTEGRATION_TEST: "true"
run: |
poetry run airflow dags reserialize
poetry run airflow dags test KnadaOperators
- name: Run tests Composer
env:
AIRFLOW_CONN_SQLITE_DEFAULT: sqlite://?mode=ro
KNADA_TEAM_SECRET: secret
NAMESPACE: default
K8S_IMAGE_PULL_SECRETS: ghcr-creds
MARKEDSPLASSEN_HOST: data.ekstern.dev.nav.no
INTEGRATION_TEST: "true"
GCS_BUCKET: "dataverk-airflow-tests"
run: |
poetry run airflow dags reserialize
poetry run airflow dags test CloudComposerOperators
- name: Verify successful dag tests
run: |
poetry run airflow dags list-runs -d KnadaOperators
poetry run airflow dags list-runs -d CloudComposerOperators
dvk=$(poetry run airflow dags list-runs -d KnadaOperators -ojson)
dvc=$(poetry run airflow dags list-runs -d CloudComposerOperators -ojson)
errorCount=0
for res in $(jq -n --argjson var1 "$dvk" --argjson var2 "$dvc" '[$var1, $var2] | add' | jq -rc '.[]')
do
status=$(echo $res | jq -r '.state')
if [ "$status" != "success" ]
then
errorCount=$((errorCount+1))
dag_id=$(echo $res | jq -r '.dag_id')
echo "DAG test error: DAG $dag_id got status $status"
fi
done
if [ $errorCount -gt 0 ]
then
exit 1
fi