Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

refactor: simplify make commands #32

Merged
12 commits merged into from
Aug 23, 2023
125 changes: 63 additions & 62 deletions Makefile
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
# Copyright 2023 Google LLC
# Copyright 2022 Google LLC
This conversation was marked as resolved.
Show resolved Hide resolved

# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
Expand All @@ -23,77 +23,78 @@ pre-commit: ## Runs the pre-commit checks over entire repo
cd pipelines && \
poetry run pre-commit run --all-files

setup: ## Set up local environment for Python development on pipelines
@cd pipelines && \
poetry install --with dev
env ?= dev
deploy: ## Deploy the Terraform infrastructure to your project. Requires VERTEX_PROJECT_ID and VERTEX_LOCATION env variables to be set in env.sh. Optionally specify env=<dev|test|prod> (default = dev)
@ cd terraform/envs/$(env) && \
terraform init -backend-config='bucket=${VERTEX_PROJECT_ID}-tfstate' && \
terraform apply -var 'project_id=${VERTEX_PROJECT_ID}' -var 'region=${VERTEX_LOCATION}'

test-utils: ## Runs unit tests for the util scripts
undeploy: ## DESTROY the Terraform infrastructure in your project. Requires VERTEX_PROJECT_ID and VERTEX_LOCATION env variables to be set in env.sh. Optionally specify env=<dev|test|prod> (default = dev)
@ cd terraform/envs/$(env) && \
terraform init -backend-config='bucket=${VERTEX_PROJECT_ID}-tfstate' && \
terraform destroy -var 'project_id=${VERTEX_PROJECT_ID}' -var 'region=${VERTEX_LOCATION}'

install: ## Set up local environment for Python development on pipelines
@cd pipelines && \
poetry run python -m pytest tests/utils
poetry install --with dev && \
cd .. && \
for component_group in components/*/ ; do \
echo "Setup for $$component_group" && \
cd "$$component_group" && \
poetry install --with dev && \
cd ../.. ;\
done ; \

compile-pipeline: ## Compile the pipeline to pipeline.yaml. Must specify pipeline=<training|prediction>

compile: ## Compile the pipeline to pipeline.yaml. Must specify pipeline=<training|prediction>
@cd pipelines/src && \
poetry run kfp dsl compile --py pipelines/${pipeline}/pipeline.py --output pipelines/${pipeline}/pipeline.yaml --function pipeline

setup-components: ## Run unit tests for a component group
@cd "components/${GROUP}" && \
poetry install --with dev

setup-all-components: ## Run unit tests for all pipeline components
@set -e && \
for component_group in components/*/ ; do \
echo "Setup components under $$component_group" && \
$(MAKE) setup-components GROUP=$$(basename $$component_group) ; \
done

test-components: ## Run unit tests for a component group
@cd "components/${GROUP}" && \
poetry run pytest

test-all-components: ## Run unit tests for all pipeline components
@set -e && \
for component_group in components/*/ ; do \
echo "Test components under $$component_group" && \
$(MAKE) test-components GROUP=$$(basename $$component_group) ; \
done
targets ?= "training serving"
build: ## Build and push training and/or serving container(s) image using Docker. Specify targets=<training serving> e.g. targets=training or targets=training serving (default)
Copy link

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

add quotes to the docstring as well e.g. targets=training or targets="training serving" (default)

Copy link

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Copy link
Collaborator Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

@cd model && \
for target in $$targets ; do \
echo "Building $$target image" && \
gcloud builds submit . \
--region=${VERTEX_LOCATION} \
--project=${VERTEX_PROJECT_ID} \
--gcs-source-staging-dir=gs://${VERTEX_PROJECT_ID}-staging/source \
--substitutions=_DOCKER_TARGET=${target},_DESTINATION_IMAGE_URI=${CONTAINER_IMAGE_REGISTRY}/${target}:${RESOURCE_SUFFIX} ; \
done


compile ?=true
build ?= true
run: ## Compile or build pipeline and run pipeline in sandbox environment. Set compile=false to skip recompiling the pipeline and set build=false to skip rebuilding container images
@if [ "${compile}" ]; then \
$(MAKE) compile ; \
fi && \
if [ "${build}" ]; then \
$(MAKE) build ; \
fi && \
cd pipelines/src \
poetry run python -m pipelines.utils.trigger_pipeline --template_path=pipelines/${pipeline}/pipeline.yaml --display_name=${pipeline}

test-components-coverage: ## Run tests with coverage
@cd "components/${GROUP}" && \
poetry run coverage run -m pytest && \
poetry run coverage report -m

test-all-components-coverage: ## Run tests with coverage
@set -e && \
for component_group in components/*/ ; do \
echo "Test components under $$component_group" && \
$(MAKE) test-components-coverage GROUP=$$(basename $$component_group) ; \
done
test: ## Run unit tests for a component group or for all component groups and the pipeline trigger code.
@if [ -n "${GROUP}" ]; then \
echo "Test components under components/${GROUP}" && \
cd components/${GROUP} && \
poetry run pytest ; \
else \
echo "Testing scripts" && \
cd pipelines && \
poetry run python -m pytest tests/utils &&\
cd .. && \
for i in components/*/ ; do \
echo "Test components under $$i" && \
cd "$$i" && \
poetry run pytest && \
cd ../.. ;\
done ; \
fi

run: ## Compile pipeline and run pipeline in sandbox environment. Must specify pipeline=<training|prediction>. Optionally specify ENABLE_PIPELINE_CACHING=<true|false> (defaults to default Vertex caching behaviour)
@ $(MAKE) compile-pipeline && \
cd pipelines/src && \
poetry run python -m pipelines.utils.trigger_pipeline --template_path=pipelines/${pipeline}/pipeline.yaml --display_name=${pipeline}

e2e-tests: ## Perform end-to-end (E2E) pipeline tests. Must specify pipeline=<training|prediction>. Optionally specify ENABLE_PIPELINE_CACHING=<true|false> (defaults to default Vertex caching behaviour).
@ cd pipelines && \
poetry run pytest --log-cli-level=INFO tests/$(pipeline)

env ?= dev
deploy-infra: ## Deploy the Terraform infrastructure to your project. Requires VERTEX_PROJECT_ID and VERTEX_LOCATION env variables to be set in env.sh. Optionally specify env=<dev|test|prod> (default = dev)
@ cd terraform/envs/$(env) && \
terraform init -backend-config='bucket=${VERTEX_PROJECT_ID}-tfstate' && \
terraform apply -var 'project_id=${VERTEX_PROJECT_ID}' -var 'region=${VERTEX_LOCATION}'

destroy-infra: ## DESTROY the Terraform infrastructure in your project. Requires VERTEX_PROJECT_ID and VERTEX_LOCATION env variables to be set in env.sh. Optionally specify env=<dev|test|prod> (default = dev)
@ cd terraform/envs/$(env) && \
terraform init -backend-config='bucket=${VERTEX_PROJECT_ID}-tfstate' && \
terraform destroy -var 'project_id=${VERTEX_PROJECT_ID}' -var 'region=${VERTEX_LOCATION}'

target ?= training
build-container: ## Build and push training/serving container image using Docker. Specify target=<training|serving>
@ cd model && \
gcloud builds submit . \
--region=${VERTEX_LOCATION} \
--project=${VERTEX_PROJECT_ID} \
--gcs-source-staging-dir=gs://${VERTEX_PROJECT_ID}-staging/source \
--substitutions=_DOCKER_TARGET=${target},_DESTINATION_IMAGE_URI=${CONTAINER_IMAGE_REGISTRY}/${target}:${RESOURCE_SUFFIX}
2 changes: 1 addition & 1 deletion cloudbuild/e2e-test.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,7 @@ steps:
- |
curl -sSL https://install.python-poetry.org | python3 - && \
export PATH="/builder/home/.local/bin:$$PATH" && \
make setup && \
make install && \
make e2e-tests pipeline=training && \
make e2e-tests pipeline=prediction
env:
Expand Down
10 changes: 4 additions & 6 deletions cloudbuild/pr-checks.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -24,15 +24,13 @@ steps:
- |
curl -sSL https://install.python-poetry.org | python3 - && \
export PATH="/builder/home/.local/bin:$$PATH" && \
make setup && \
make install && \
git init && \
git add . && \
make pre-commit && \
make test-utils && \
make compile-pipeline pipeline=training && \
make compile-pipeline pipeline=prediction && \
make setup-all-components && \
make test-all-components
make compile pipeline=training && \
make compile pipeline=prediction && \
felix-datatonic marked this conversation as resolved.
Show resolved Hide resolved
make test
env:
- SKIP=terraform-fmt,git-dirty
- CONTAINER_IMAGE_REGISTRY=dummy_value
Expand Down
6 changes: 3 additions & 3 deletions cloudbuild/release.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -39,11 +39,11 @@ steps:
- |
curl -sSL https://install.python-poetry.org | python3 - && \
export PATH="/builder/home/.local/bin:$$PATH" && \
make setup && \
make install && \
for proj in ${_DESTINATION_PROJECTS} ; do \
CONTAINER_IMAGE_REGISTRY=${_VERTEX_LOCATION}-docker.pkg.dev/$$proj/vertex-images \
make compile-pipeline pipeline=training && \
make compile-pipeline pipeline=prediction && \
make compile pipeline=training && \
make compile pipeline=prediction && \
cd pipelines && \
poetry run python -m pipelines.utils.upload_pipeline \
--dest=https://${_VERTEX_LOCATION}-kfp.pkg.dev/$$proj/vertex-pipelines \
Expand Down