Just trying stuff. #11
Workflow file for this run
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
# What? | |
# | |
# Run tests and edit metadata as needed to tag and release an arbitrary ref. | |
# Uploads to an internal archive for further processing. | |
# | |
# How? | |
# | |
# After checking out and testing the provided ref, the image is built and uploaded. | |
# Metadata is altered with stream processing commands. | |
# | |
# When? | |
# | |
# Called by upstream internal-release.yml workflows that live in adapter repos | |
name: "Internal Archive Release" | |
on: | |
workflow_call: | |
inputs: | |
dbms_name: | |
description: "The warehouse name for the adapter." | |
type: string | |
required: true | |
version_number: | |
description: "The release version number (i.e. 1.0.0b1)" | |
type: string | |
required: true | |
package_test_command: | |
description: "Package test command" | |
type: string | |
required: true | |
ref: | |
description: "The ref to use (leave empty to use main)" | |
type: string | |
required: true | |
org: | |
description: "The organization that maintains the adapter" | |
type: string | |
default: "dbt-labs" | |
required: true | |
permissions: read-all | |
# will cancel previous workflows triggered by the same event and for the same ref for PRs or same SHA otherwise | |
concurrency: | |
group: ${{ github.workflow }}-${{ github.event_name }}-${{ contains(github.event_name, 'pull_request') && github.event.pull_request.head.ref || github.ref }}-${{ inputs.deploy-to }} | |
cancel-in-progress: true | |
defaults: | |
run: | |
shell: bash | |
env: | |
PYTHON_TARGET_VERSION: 3.8 | |
NOTIFICATION_PREFIX: "[Internal Archive Release]" | |
TEMP_PROFILE_NAME: "temp_aws_profile" | |
HATCH_ADAPTERS: (postgres) # Must be | deliminated list | |
SKIP_TEST_ADAPTERS: (spark|trino|databricks) | |
jobs: | |
initial-setup: | |
runs-on: ubuntu-latest | |
outputs: | |
is_hatch_adapter: ${{ steps.check_if_adapter_builds_with_hatch.outputs.is_hatch_adapter }} | |
skip_tests: ${{ steps.check_if_this_build_can_skip_tests.outputs.skip_tests }} | |
steps: | |
- name: "Check if dbms_name is contained in HATCH_ADAPTERS" | |
id: "check_if_adapter_builds_with_hatch" | |
run: | | |
echo "HATCH_ADAPTERS='${{ env.HATCH_ADAPTERS }}'" >> $GITHUB_ENV | |
# note regex arg not commented to avoid unintended Bash quoting effects | |
if [[ "${{ inputs.dbms_name }}" =~ ${{ env.HATCH_ADAPTERS }} ]]; then | |
echo "is_hatch_adapter=true" >> $GITHUB_OUTPUT | |
else | |
echo "is_hatch_adapter=false" >> $GITHUB_OUTPUT | |
fi | |
- name: "Check if dbms can skip tests" | |
id: "check_if_this_build_can_skip_tests" | |
run: | | |
if [[ "${{ inputs.dbms_name }}" =~ ${{ env.SKIP_TEST_ADAPTERS }} ]]; then | |
echo "skip_tests=true" >> $GITHUB_OUTPUT | |
else | |
echo "skip_tests=false" >> $GITHUB_OUTPUT | |
fi | |
job-setup: | |
name: Job Setup | |
runs-on: ubuntu-latest | |
needs: [initial-setup] | |
steps: | |
- name: "[DEBUG] Print Variables" | |
run: | | |
echo Warehouse name: ${{ inputs.dbms_name }} | |
echo The release version number: ${{ inputs.version_number }} | |
echo The release ref: ${{ inputs.ref }} | |
echo Package test command: ${{ inputs.package_test_command }} | |
- name: Testtttt | |
uses: actions/checkout@v4 | |
run: git clone https://github.com/starburstdata/dbt-trino.git | |
- name: "Checkout provided ref, default to branch main" | |
uses: actions/checkout@v4 | |
with: | |
repository: "${{ inputs.org }}/dbt-${{ inputs.dbms_name }}" | |
ref: "${{ inputs.ref || 'main' }}" | |
path: "dbt-${{ inputs.dbms_name }}" | |
- name: "Validate patch version input against patch version of ref" | |
id: validate_version | |
run: | | |
version_in_file="$(grep 'version =' "dbt/adapters/${{ inputs.dbms_name }}/__version__.py" | cut -d '"' -f2)" | |
if [[ "${{ inputs.version_number }}" != "${version_in_file}" ]]; then | |
message="Error: patch version input to this job ${{ inputs.version_number }} and version of code at input ref ${version_in_file} are not equal. Exiting..." | |
echo "::error $title::$message" | |
exit 1 | |
fi | |
run-unit-tests-tox: | |
name: 'Unit Tests (Tox)' | |
runs-on: ubuntu-latest | |
needs: [initial-setup, job-setup] | |
if: | | |
"${{ needs.initial-setup.outputs.is_hatch_adapter == 'false' }}" && | |
"${{ needs.initial-setup.outputs.skip_tests == 'false' }}" | |
env: | |
TOXENV: unit | |
steps: | |
- name: "Checkout provided ref, default to branch main" | |
uses: actions/checkout@v4 | |
with: | |
repository: "${{ inputs.org }}/dbt-${{ inputs.dbms_name}}" | |
ref: "${{ inputs.ref || 'main' }}" | |
path: "dbt-${{ inputs.dbms_name }}" | |
- name: "Set up Python - ${{ env.PYTHON_TARGET_VERSION }}" | |
uses: actions/setup-python@v5 | |
with: | |
python-version: ${{ env.PYTHON_TARGET_VERSION }} | |
- name: "Install Python Dependencies" | |
run: | | |
python -m pip install --user --upgrade pip | |
python -m pip install tox | |
python -m pip --version | |
python -m tox --version | |
- name: "Run Tests using tox" | |
run: tox | |
run-unit-tests-hatch: | |
name: 'Unit Tests (Hatch)' | |
runs-on: ubuntu-latest | |
if: | | |
"${{ needs.initial-setup.outputs.is_hatch_adapter == 'true' }}" && | |
"${{ needs.initial-setup.outputs.skip_tests == 'false' }}" | |
needs: [initial-setup, job-setup] | |
steps: | |
- name: "Checkout provided ref, default to branch main" | |
uses: actions/checkout@v4 | |
with: | |
repository: "${{ inputs.org }}/dbt-${{ inputs.dbms_name}}" | |
ref: "${{ inputs.ref || 'main' }}" | |
path: "dbt-${{ inputs.dbms_name }}" | |
- name: "Setup `hatch`" | |
uses: dbt-labs/dbt-adapters/.github/actions/setup-hatch@main | |
- name: "Run Tests using hatch" | |
run: hatch run unit-tests:all | |
#################### | |
# Integration Tests | |
#################### | |
run-integration-tests-tox: | |
name: 'Integration Tests (Tox)' | |
runs-on: ubuntu-latest | |
needs: [initial-setup, job-setup, run-unit-tests-tox] | |
if: | | |
"${{ needs.initial-setup.outputs.is_hatch_adapter == 'false' }}" && | |
"${{ needs.initial-setup.outputs.skip_tests == 'false' }}" | |
env: | |
TOXENV: integration | |
steps: | |
- name: "Checkout provided ref, default to branch main" | |
uses: actions/checkout@v4 | |
with: | |
repository: "${{ inputs.org }}/dbt-${{ inputs.dbms_name}}" | |
ref: "${{ inputs.ref || 'main' }}" | |
path: "dbt-${{ inputs.dbms_name }}" | |
- name: "Setup Environment Variables - ./scripts/env-setup.sh" | |
run: | | |
if [ -f './scripts/env-setup.sh' ]; then | |
source ./scripts/env-setup.sh | |
fi | |
- name: "Setup Environment Variables - Secrets Context" | |
uses: actions/github-script@v7 | |
id: check-env | |
with: | |
result-encoding: string | |
script: | | |
try { | |
const { SECRETS_CONTEXT, INTEGRATION_TESTS_SECRETS_PREFIX } = process.env | |
const secrets = JSON.parse(SECRETS_CONTEXT) | |
if (INTEGRATION_TESTS_SECRETS_PREFIX) { | |
for (const [key, value] of Object.entries(secrets)) { | |
if (key.startsWith(INTEGRATION_TESTS_SECRETS_PREFIX)) { | |
core.exportVariable(key, value) | |
} | |
} | |
} else { | |
core.info("The INTEGRATION_TESTS_SECRETS_PREFIX env variable is empty or not defined, skipping the secrets setup.") | |
} | |
} catch (err) { | |
core.error("Error while reading or parsing the JSON") | |
core.setFailed(err) | |
} | |
env: | |
SECRETS_CONTEXT: ${{ toJson(secrets) }} | |
- name: "Set up Python - ${{ env.PYTHON_TARGET_VERSION }}" | |
uses: actions/setup-python@v5 | |
with: | |
python-version: ${{ env.PYTHON_TARGET_VERSION }} | |
- name: "Install Python Dependencies" | |
run: | | |
python -m pip install --user --upgrade pip | |
python -m pip --version | |
python -m pip install tox | |
tox --version | |
- name: "Run Tests using tox" | |
run: tox | |
run-integration-tests-hatch: | |
name: 'Integration Tests (Hatch)' | |
needs: [initial-setup, job-setup, run-unit-tests-hatch] | |
if: | | |
"${{ needs.initial-setup.outputs.is_hatch_adapter == 'true' }}" && | |
"${{ needs.initial-setup.outputs.skip_tests == 'false' }}" | |
uses: "dbt-labs/dbt-postgres/.github/workflows/integration-tests.yml@main" | |
with: | |
core_branch: "main" | |
dbt_adapters_branch: "main" | |
#################### | |
# Artifact Handling | |
#################### | |
create-internal-release: | |
name: Create release for internal archive | |
runs-on: ubuntu-latest | |
needs: [initial-setup, job-setup, run-integration-tests-tox, run-integration-tests-hatch] | |
# Build artifact if | |
# 1. Setup jobs succeeded | |
# 2a. Tests can be skipped | |
# 2b. One of the integration test sets passed (these only run on passing unit tests) | |
if: | | |
always() && | |
needs.job-setup.result == 'success' && | |
( | |
needs.initial-setup.outputs.skip_tests == 'true' || | |
(needs.run-integration-tests-tox.result == 'success' || needs.run-integration-tests-hatch.result == 'success') | |
) | |
steps: | |
- name: "Checkout provided ref, default to branch main" | |
uses: actions/checkout@v4 | |
with: | |
repository: "${{ inputs.org }}/dbt-${{ inputs.dbms_name}}" | |
ref: "${{ inputs.ref || 'main' }}" | |
path: "dbt-${{ inputs.dbms_name }}" | |
- name: "Set up Python - ${{ env.PYTHON_TARGET_VERSION }}" | |
uses: actions/setup-python@v5 | |
with: | |
python-version: ${{ env.PYTHON_TARGET_VERSION }} | |
- name: "Install Python Dependencies" | |
run: | | |
python -m pip install --user --upgrade pip | |
python -m pip install --upgrade setuptools wheel twine check-wheel-contents | |
python -m pip --version | |
- name: "Configure AWS profile for upload" | |
run: | | |
aws configure set aws_access_key_id ${{ secrets.AWS_ARCHIVE_ACCESS_KEY_ID }} --profile ${{ env.TEMP_PROFILE_NAME }} | |
aws configure set aws_secret_access_key ${{ secrets.AWS_ARCHIVE_SECRET_ACCESS_KEY }} --profile ${{ env.TEMP_PROFILE_NAME }} | |
aws configure set region ${{ secrets.AWS_REGION }} --profile ${{ env.TEMP_PROFILE_NAME }} | |
aws configure set output text --profile ${{ env.TEMP_PROFILE_NAME }} | |
aws codeartifact login --tool twine --repository ${{ secrets.AWS_REPOSITORY }} --domain ${{ secrets.AWS_DOMAIN }} --domain-owner ${{ secrets.AWS_DOMAIN_OWNER }} --region ${{ secrets.AWS_REGION }} --profile ${{ env.TEMP_PROFILE_NAME }} | |
- name: "Alter version in metadata of python package" | |
run: | | |
version_file="dbt/adapters/${{ inputs.dbms_name }}/__version__.py" | |
setup_file="./setup.py" | |
version_in_file=$(grep 'version =' "${version_file}" | cut -d '"' -f2) | |
# check the latest build of adapter code in our archive | |
versions_on_aws="$(aws codeartifact list-package-versions --repository ${{ secrets.AWS_REPOSITORY }} --domain ${{ secrets.AWS_DOMAIN }} --domain-owner ${{ secrets.AWS_DOMAIN_OWNER }} --region ${{ secrets.AWS_REGION }} --profile ${{ env.TEMP_PROFILE_NAME }} --format pypi --package dbt-${{ inputs.dbms_name }} --output json --query 'versions[*].version' | jq -r '.[]' | grep "^${{ inputs.version_number }}" || true )" # suppress pipefail only here | |
current_latest_version="$(echo "${versions_on_aws}" | sort -V | tail -n 1 )" | |
echo "[Debug] version_in_file: ${version_in_file}" | |
echo "[Debug] current_latest_version: ${current_latest_version}" | |
echo ">>> Altering ${version_file}" | |
# Ensure a build+xxx where xxx is an integer is always present in versioning | |
# sed may be a no-op -- this is fine! | |
if [[ ${current_latest_version} =~ (.*build)([0-9]+)$ ]]; then | |
base="${BASH_REMATCH[1]}" | |
number="${BASH_REMATCH[2]}" | |
new_number=$((number + 1)) | |
v="${base}${new_number}" | |
tee <<< "version = \"${v}\"" "${version_file}" | |
if [ -f "${setup_file}" ]; then | |
sed -i "s/^package_version = .*$/package_version = \"${v}\"/" "${setup_file}" | |
fi | |
else | |
v="${version_in_file}+build1" | |
tee <<< "version = \"${v}\"" "${version_file}" | |
if [ -f "${setup_file}" ]; then | |
sed -i "s/^package_version = .*$/package_version = \"${v}\"/" "${setup_file}" | |
fi | |
fi | |
################ | |
# Build package | |
################ | |
# | |
# 1. Build with setup.py | |
# | |
- name: "Build Distributions - scripts/build-dist.sh" | |
if: "${{ needs.initial-setup.outputs.is_hatch_adapter == 'false' }}" | |
run: | | |
if [ -f scripts/build-dist.sh ]; then | |
scripts/build-dist.sh | |
else | |
# Fallback onto to basic command | |
python setup.py sdist bdist_wheel | |
fi | |
# | |
# 2. Build with Hatch | |
# | |
- name: "Setup `hatch`" | |
if: "${{ needs.initial-setup.outputs.is_hatch_adapter == 'true' }}" | |
uses: dbt-labs/dbt-adapters/.github/actions/setup-hatch@main | |
- name: "Build Distributions - hatch" | |
if: "${{ needs.initial-setup.outputs.is_hatch_adapter == 'true' }}" | |
run: hatch build | |
################### | |
# Check and publish | |
################### | |
- name: "[DEBUG] Show Distributions" | |
run: ls -lh dist/ | |
- name: "Check Distribution Descriptions" | |
run: | | |
version_file="dbt/adapters/__version__.py" | |
if [[ "${{ needs.initial-setup.outputs.is_hatch_adapter }}" == 'true' ]]; then | |
hatch run build:check-all | |
else | |
twine check dist/* | |
fi | |
- name: "[DEBUG] Check Wheel Contents" | |
run: | | |
check-wheel-contents dist/*.whl --ignore W007,W008 | |
- name: "Upload Build Artifact - ${{ inputs.version_number }}" | |
run: | | |
twine upload --repository codeartifact dist/* | |
version_file="$(echo "dbt/adapters/${{ inputs.dbms_name }}/__version__.py")" | |
version="$(grep 'version =' "${version_file}" | cut -d '"' -f2)" | |
message="-- Success -- released ${version}" | |
echo "::notice $NOTIFICATION_PREFIX::$message" |