Skip to content

Commit

Permalink
Add mock_pipeline_test to GHA
Browse files Browse the repository at this point in the history
  • Loading branch information
danielvdende committed Sep 11, 2023
1 parent 52fd668 commit b12f870
Show file tree
Hide file tree
Showing 3 changed files with 59 additions and 20 deletions.
65 changes: 45 additions & 20 deletions .github/workflows/ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -3,35 +3,60 @@ on:
push

jobs:
lint:
# lint:
# runs-on: ubuntu-latest
# steps:
# - uses: actions/checkout@v3
#
# - uses: actions/setup-python@v4
# with:
# python-version: '3.10'
#
# - name: Install flake8
# run: pip install flake8
#
# - name: Run flake8
# run: flake8 dags/
#
# integrity_test:
# runs-on: ubuntu-latest
# steps:
# - uses: actions/checkout@v3
#
# - uses: actions/setup-python@v4
# with:
# python-version: '3.10'
#
# - name: Install integrity test requirements
# run: pip install -r integrity_tests/requirements.txt
#
# - name: Initialize Airflow DB
# run: airflow db init
#
# - name: Run integrity tests
# run: coverage run -m pytest integrity_tests/*

mock_pipeline_test:
runs-on: ubuntu-latest
# needs:
# - lint
# - integrity_test
steps:
- uses: actions/checkout@v3

- uses: actions/setup-python@v4
with:
python-version: '3.10'

- name: Install flake8
run: pip install flake8
- name: Install system dependencies
run: sudo apt-get install -y libsasl2-dev build-essential

- name: Run flake8
run: flake8 dags/
- name: Install mock_pipeline test requirements
run: pip install -r mock_pipeline_requirements.txt

integrity_test:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3

- uses: actions/setup-python@v4
with:
python-version: '3.10'

- name: Install integrity test requirements
run: pip install -r integrity_tests/requirements.txt
- name: Run dbt
run : dbt run --target mock_pipeline

- name: Initialize Airflow DB
run: airflow db init
- name: Run dbt tests
run : dbt test --target mock_pipeline

- name: Run integrity tests
run: coverage run -m pytest integrity_tests/*
11 changes: 11 additions & 0 deletions dags/dbt/profiles.yml
Original file line number Diff line number Diff line change
Expand Up @@ -11,3 +11,14 @@ transactions:
"spark.sql.warehouse.dir": "/opt/airflow/spark-warehouse"
"spark.sql.parquet.compression.codec": "gzip"
"spark.hadoop.javax.jdo.option.ConnectionURL": "jdbc:derby:;databaseName=/opt/airflow/metastore_db;create=true"

mock_pipeline:
type: spark
host: localhost
method: session
schema: bank
server_side_parameters:
"spark.databricks.delta.schema.autoMerge.enabled": "True"
"spark.sql.warehouse.dir": "/opt/airflow/spark-warehouse"
"spark.sql.parquet.compression.codec": "gzip"
"spark.hadoop.javax.jdo.option.ConnectionURL": "jdbc:derby:;databaseName=/opt/airflow/metastore_db;create=true"
3 changes: 3 additions & 0 deletions mock_pipeline_requirements.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
dbt-spark[PyHive]==1.7.0b1
dbt-core==1.7.0b1
pyspark==3.4.0

0 comments on commit b12f870

Please sign in to comment.