move internal version into separate action.yaml #12
Workflow file for this run
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
# What? | |
# | |
# Run tests and edit metadata as needed to tag and release an arbitrary ref. | |
# Uploads to an internal archive for further processing. | |
# | |
# How? | |
# | |
# After checking out and testing the provided ref, the image is built and uploaded. | |
# Metadata is altered with stream processing commands. | |
# | |
# When? | |
# | |
# Called by upstream internal-release.yml workflows that live in adapter repos | |
name: "Internal Archive Release" | |
on: | |
workflow_call: | |
inputs: | |
dbms_name: | |
description: "The warehouse name for the adapter." | |
type: string | |
required: true | |
package_test_command: | |
description: "Package test command" | |
type: string | |
required: true | |
ref: | |
description: "The ref to use (default to main)" | |
type: string | |
default: "main" | |
required: false | |
org: | |
description: "The organization that maintains the adapter" | |
type: string | |
default: "dbt-labs" | |
required: false # only needed by third party workflows | |
permissions: read-all | |
# will cancel previous workflows triggered by the same event and for the same ref for PRs or same SHA otherwise | |
concurrency: | |
group: ${{ github.workflow }}-${{ github.event_name }}-${{ contains(github.event_name, 'pull_request') && github.event.pull_request.head.ref || github.ref }}-${{ inputs.deploy-to }} | |
cancel-in-progress: true | |
defaults: | |
run: | |
shell: bash | |
env: | |
PYTHON_TARGET_VERSION: 3.8 | |
NOTIFICATION_PREFIX: "[Internal Archive Release]" | |
TEMP_PROFILE_NAME: "temp_aws_profile" | |
HATCH_ADAPTERS: (postgres) # Must be | deliminated list | |
ADAPTERS_THAT_SKIP_TESTS: (spark|trino|databricks) | |
jobs: | |
initial-setup: | |
runs-on: ubuntu-latest | |
outputs: | |
is_hatch_adapter: ${{ steps.check_if_adapter_builds_with_hatch.outputs.is_hatch_adapter }} | |
skip_tests: ${{ steps.check_if_this_build_can_skip_tests.outputs.skip_tests }} | |
steps: | |
- name: "Check if dbms_name is contained in HATCH_ADAPTERS" | |
id: "check_if_adapter_builds_with_hatch" | |
run: | | |
echo "HATCH_ADAPTERS='${{ env.HATCH_ADAPTERS }}'" >> $GITHUB_ENV | |
# note regex arg not commented to avoid unintended Bash quoting effects | |
if [[ "${{ inputs.dbms_name }}" =~ ${{ env.HATCH_ADAPTERS }} ]]; then | |
echo "is_hatch_adapter=true" >> $GITHUB_OUTPUT | |
else | |
echo "is_hatch_adapter=false" >> $GITHUB_OUTPUT | |
fi | |
- name: "Check if dbms can skip tests" | |
id: "check_if_this_build_can_skip_tests" | |
run: | | |
# again, regexes are unquoted to avoid quoting side effects | |
if [[ "${{ inputs.dbms_name }}" =~ ${{ env.ADAPTERS_THAT_SKIP_TESTS }} ]]; then | |
echo "skip_tests=true" >> $GITHUB_OUTPUT | |
else | |
echo "skip_tests=false" >> $GITHUB_OUTPUT | |
fi | |
job-setup: | |
name: Job Setup | |
runs-on: ubuntu-latest | |
needs: [initial-setup] | |
steps: | |
- name: "[DEBUG] Print Variables" | |
run: | | |
echo Warehouse name: ${{ inputs.dbms_name }} | |
echo The release ref: ${{ inputs.ref }} | |
echo Package test command: ${{ inputs.package_test_command }} | |
- name: "Checkout provided ref, default to branch main" | |
uses: actions/checkout@v4 | |
with: | |
repository: "${{ inputs.org }}/dbt-${{ inputs.dbms_name }}" | |
ref: "${{ inputs.ref }}" | |
run-unit-tests-tox: | |
name: 'Unit Tests (Tox)' | |
runs-on: ubuntu-latest | |
needs: [initial-setup, job-setup] | |
if: | | |
needs.initial-setup.outputs.is_hatch_adapter == 'false' && | |
needs.initial-setup.outputs.skip_tests == 'false' | |
env: | |
TOXENV: unit | |
steps: | |
- name: "Checkout provided ref, default to branch main" | |
uses: actions/checkout@v4 | |
with: | |
repository: "${{ inputs.org }}/dbt-${{ inputs.dbms_name }}" | |
ref: "${{ inputs.ref }}" | |
- name: "Set up Python - ${{ env.PYTHON_TARGET_VERSION }}" | |
uses: actions/setup-python@v5 | |
with: | |
python-version: ${{ env.PYTHON_TARGET_VERSION }} | |
- name: "Install Python Dependencies" | |
run: | | |
python -m pip install --user --upgrade pip | |
python -m pip install tox | |
python -m pip --version | |
python -m tox --version | |
- name: "Run Tests using tox" | |
run: tox | |
run-unit-tests-hatch: | |
name: 'Unit Tests (Hatch)' | |
runs-on: ubuntu-latest | |
needs: [initial-setup, job-setup] | |
if: | | |
needs.initial-setup.outputs.is_hatch_adapter == 'true' && | |
needs.initial-setup.outputs.skip_tests == 'false' | |
steps: | |
- name: "Checkout provided ref, default to branch main" | |
uses: actions/checkout@v4 | |
with: | |
repository: "${{ inputs.org }}/dbt-${{ inputs.dbms_name }}" | |
ref: "${{ inputs.ref }}" | |
- name: "Setup `hatch`" | |
uses: dbt-labs/dbt-adapters/.github/actions/setup-hatch@main | |
- name: "Run Tests using hatch" | |
run: hatch run unit-tests:all | |
#################### | |
# Integration Tests | |
#################### | |
run-integration-tests-tox: | |
name: 'Integration Tests (Tox)' | |
runs-on: ubuntu-latest | |
needs: [initial-setup, job-setup, run-unit-tests-tox] | |
if: | | |
needs.initial-setup.outputs.is_hatch_adapter == 'false' && | |
needs.initial-setup.outputs.skip_tests == 'false' | |
env: | |
TOXENV: integration | |
steps: | |
- name: "Checkout provided ref, default to branch main" | |
uses: actions/checkout@v4 | |
with: | |
repository: "${{ inputs.org }}/dbt-${{ inputs.dbms_name }}" | |
ref: "${{ inputs.ref }}" | |
- name: "Setup Environment Variables - ./scripts/env-setup.sh" | |
run: | | |
if [ -f './scripts/env-setup.sh' ]; then | |
source ./scripts/env-setup.sh | |
fi | |
- name: "Setup Environment Variables - Secrets Context" | |
uses: actions/github-script@v7 | |
id: check-env | |
with: | |
result-encoding: string | |
script: | | |
try { | |
const { SECRETS_CONTEXT, INTEGRATION_TESTS_SECRETS_PREFIX } = process.env | |
const secrets = JSON.parse(SECRETS_CONTEXT) | |
if (INTEGRATION_TESTS_SECRETS_PREFIX) { | |
for (const [key, value] of Object.entries(secrets)) { | |
if (key.startsWith(INTEGRATION_TESTS_SECRETS_PREFIX)) { | |
core.exportVariable(key, value) | |
} | |
} | |
} else { | |
core.info("The INTEGRATION_TESTS_SECRETS_PREFIX env variable is empty or not defined, skipping the secrets setup.") | |
} | |
} catch (err) { | |
core.error("Error while reading or parsing the JSON") | |
core.setFailed(err) | |
} | |
env: | |
SECRETS_CONTEXT: ${{ toJson(secrets) }} | |
- name: "Set up Python - ${{ env.PYTHON_TARGET_VERSION }}" | |
uses: actions/setup-python@v5 | |
with: | |
python-version: ${{ env.PYTHON_TARGET_VERSION }} | |
- name: "Install Python Dependencies" | |
run: | | |
python -m pip install --user --upgrade pip | |
python -m pip --version | |
python -m pip install tox | |
tox --version | |
- name: "Run Tests using tox" | |
run: tox | |
run-integration-tests-hatch: | |
name: 'Integration Tests (Hatch)' | |
needs: [initial-setup, job-setup, run-unit-tests-hatch] | |
if: | | |
needs.initial-setup.outputs.is_hatch_adapter == 'true' && | |
needs.initial-setup.outputs.skip_tests == 'false' | |
uses: "dbt-labs/dbt-postgres/.github/workflows/integration-tests.yml@main" | |
with: | |
core_branch: "main" | |
dbt_adapters_branch: "main" | |
#################### | |
# Artifact Handling | |
#################### | |
create-internal-release: | |
name: Create release for internal archive | |
runs-on: ubuntu-latest | |
needs: [initial-setup, job-setup, run-integration-tests-tox, run-integration-tests-hatch] | |
# Build artifact if | |
# 1. Setup jobs succeeded | |
# 2a. Tests can be skipped | |
# 2b. One of the integration test sets passed (these only run on passing unit tests) | |
if: | | |
always() && | |
needs.job-setup.result == 'success' && | |
( | |
needs.initial-setup.outputs.skip_tests == 'true' || | |
(needs.run-integration-tests-tox.result == 'success' || needs.run-integration-tests-hatch.result == 'success') | |
) | |
steps: | |
- name: "Checkout provided ref, default to branch main" | |
uses: actions/checkout@v4 | |
with: | |
repository: "${{ inputs.org }}/dbt-${{ inputs.dbms_name }}" | |
ref: "${{ inputs.ref }}" | |
- name: "Set up Python - ${{ env.PYTHON_TARGET_VERSION }}" | |
uses: actions/setup-python@v5 | |
with: | |
python-version: ${{ env.PYTHON_TARGET_VERSION }} | |
- name: "Install Python Dependencies" | |
run: | | |
python -m pip install --user --upgrade pip | |
python -m pip install --upgrade setuptools wheel twine check-wheel-contents | |
python -m pip --version | |
- name: "Configure AWS profile for upload" | |
run: | | |
aws configure set aws_access_key_id ${{ secrets.AWS_ARCHIVE_ACCESS_KEY_ID }} --profile ${{ env.TEMP_PROFILE_NAME }} | |
aws configure set aws_secret_access_key ${{ secrets.AWS_ARCHIVE_SECRET_ACCESS_KEY }} --profile ${{ env.TEMP_PROFILE_NAME }} | |
aws configure set region ${{ secrets.AWS_REGION }} --profile ${{ env.TEMP_PROFILE_NAME }} | |
aws configure set output text --profile ${{ env.TEMP_PROFILE_NAME }} | |
aws codeartifact login --tool twine --repository ${{ secrets.AWS_REPOSITORY }} \ | |
--domain ${{ secrets.AWS_DOMAIN }} --domain-owner ${{ secrets.AWS_DOMAIN_OWNER }} \ | |
--region ${{ secrets.AWS_REGION }} --profile ${{ env.TEMP_PROFILE_NAME }} | |
- name: "Get version in package and versions published to internal pypi" | |
uses: | |
run: | | |
version_file="dbt/adapters/${{ inputs.dbms_name }}/__version__.py" | |
setup_file="./setup.py" | |
version_in_file=$(grep -E 'version(: str)? =' "${version_file}" | cut -d '"' -f2) | |
echo "[Debug] version_in_file: ${version_in_file}" | |
echo "CURRENT_PKG_VERSION=${version_in_file}" >> "$GITHUB_ENV" | |
- name: "Determine next internal release version" | |
uses: dbt-labs/dbt-release/.github/actions/next-internal-version | |
with: | |
version_number: $CURRENT_PKG_VERSION | |
- name: "Update version in package and setup.py" | |
uses: | |
run: | | |
internal_release_version="${{ steps.get_next_internal_release_version.outputs.internal_release_version }}" | |
commit_sha="$(git rev-parse HEAD)" | |
echo ">>> Altering ${version_file}" | |
# Ensure a build+xxx where xxx is an integer is always present in versioning | |
# sed may be a no-op -- this is fine! | |
v="${internal_release_version}+${commit_sha}" | |
tee <<< "version = \"${v}\"" "${version_file}" | |
if [ -f "${setup_file}" ]; then | |
sed -i "s/^package_version = .*$/package_version = \"${v}\"/" "${setup_file}" | |
fi | |
################ | |
# Build package | |
################ | |
# | |
# 1. Build with setup.py | |
# | |
- name: "Build Distributions - scripts/build-dist.sh" | |
if: "${{ needs.initial-setup.outputs.is_hatch_adapter == 'false' }}" | |
run: | | |
if [ -f scripts/build-dist.sh ]; then | |
scripts/build-dist.sh | |
else | |
# Fallback onto to basic command | |
python setup.py sdist bdist_wheel | |
fi | |
# | |
# 2. Build with Hatch | |
# | |
- name: "Setup `hatch`" | |
if: "${{ needs.initial-setup.outputs.is_hatch_adapter == 'true' }}" | |
uses: dbt-labs/dbt-adapters/.github/actions/setup-hatch@main | |
- name: "Build Distributions - hatch" | |
if: "${{ needs.initial-setup.outputs.is_hatch_adapter == 'true' }}" | |
run: hatch build | |
################### | |
# Check and publish | |
################### | |
- name: "[DEBUG] Show Distributions" | |
run: ls -lh dist/ | |
- name: "Check Distribution Descriptions" | |
run: | | |
version_file="dbt/adapters/__version__.py" | |
if [[ "${{ needs.initial-setup.outputs.is_hatch_adapter }}" == 'true' ]]; then | |
hatch run build:check-all | |
else | |
twine check dist/* | |
fi | |
- name: "[DEBUG] Check Wheel Contents" | |
run: | | |
check-wheel-contents dist/*.whl --ignore W007,W008 | |
- name: "Upload Build Artifact - ${{ steps.get_next_internal_release_version.outputs.internal_release_version }}" | |
run: | | |
twine upload --repository codeartifact dist/* | |
version_file="$(echo "dbt/adapters/${{ inputs.dbms_name }}/__version__.py")" | |
version="$(grep 'version =' "${version_file}" | cut -d '"' -f2)" | |
message="-- Success -- released ${version}" | |
echo "::notice $NOTIFICATION_PREFIX::$message" |