From d3d406a85c5a71abfd28768e6ec6ba8f6a78c6d8 Mon Sep 17 00:00:00 2001 From: Brian Cunnie Date: Thu, 13 Apr 2017 15:33:44 -0700 Subject: [PATCH] Add script to automatically provision ec2 environment when running local integration tests [#143526019](https://www.pivotaltracker.com/story/show/143526019) Signed-off-by: Zak Auerbach --- docs/development.md | 55 ++++++++++++++++----------- src/bosh_aws_cpi/bin/test-integration | 42 ++++++++++++++++++++ 2 files changed, 75 insertions(+), 22 deletions(-) create mode 100755 src/bosh_aws_cpi/bin/test-integration diff --git a/docs/development.md b/docs/development.md index 7328beb9..1035535e 100644 --- a/docs/development.md +++ b/docs/development.md @@ -20,31 +20,42 @@ bosh create release --force The release is now ready for use. If everything works, commit the changes including the updated gems. -### Manually deploy bosh director - -1. Claim env from pool - 1. `cd ~/workspace/bosh-cpi-environments` - 1. `git mv aws/unclaimed/SOME_ENV aws/claimed/` - 1. `git ci -m "manually claiming SOME_ENV for testing on #STORY_ID"` - 1. `git push` -1. Create a file containing necessary environment variables in `~/scratch` - 1. `source ~/scratch/YOUR_ENV_FILE` -1. Generate bosh-init manifest and Artifacts - 1. `METADATA_FILE=~/workspace/bosh-cpi-environments/aws/claimed/SOME_ENV \ - OUTPUT_DIR=~/scratch/OUTPUT_DIR \ - ./ci/tasks/prepare-director.sh` -1. Deploy with bosh-init - 1. `cd ~/scratch/OUTPUT_DIR` - 1. `bosh-init deploy director.yml` - ### Manually run lifecycle tests -1. Claim env from pool -1. Create a file containing necessary environment variables in `~/scratch` +Our script uses terraform to prepare an environment on ec2 for lifecycle tests. +You must provide the proper access credentials as well as a KMS Key and Key +Pair fixture. Terraform will create all other required resources and destroy +them at the end of a successful test run. If tests fail, terraform will leave +the environment as is for debugging. + +1. Create a `lifecycle.env` file containing the 4 required environment variables. The key +pair name must exist in the ec2 console; however, you do not need to have a copy +of it on your local system. + ```bash + export AWS_ACCESS_KEY_ID=AKIAINSxxxxxxxxxxxxx + export AWS_SECRET_ACCESS_KEY=LvgQOmCtjL1yhcxxxxxxxxxxxxxxxxxxxxxxxxxx + export AWS_KMS_KEY_ARN="arn:aws:kms:us-west-1:088499999999:key/944e4xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx" + export AWS_PUBLIC_KEY_NAME=dev_aws.pem + # optional + # export AWS_DEFAULT_REGION=us-west-1 + ``` +1. source your `lifecycle.env` file + ```bash + . ~/scratch/aws/lifecycle.env + ``` 1. Run tests - 1. `RSPEC_ARGUMENTS=spec/integration/lifecycle_spec.rb \ - METADATA_FILE=~/workspace/bosh-cpi-environments/aws/claimed/SOME_ENV \ - ./ci/tasks/run-integration.sh` + ```bash + src/bosh_aws_cpi/bin/test-integration + ``` + * Use `RSPEC_ARGUMENTS` to run a subset of tests + ```bash + RSPEC_ARGUMENTS=spec/integration/lifecycle_spec.rb:247 src/bosh_aws_cpi/bin/test-integration + ``` + +This script will only terraform one environment per workstation. For example, +if your workstation was named `moncada`, it would create a VPC named +`moncada-local-integration` and associated resources. + ### Rubymine support diff --git a/src/bosh_aws_cpi/bin/test-integration b/src/bosh_aws_cpi/bin/test-integration new file mode 100755 index 00000000..28f31dd3 --- /dev/null +++ b/src/bosh_aws_cpi/bin/test-integration @@ -0,0 +1,42 @@ +#!/bin/bash + +set -e + +: ${AWS_ACCESS_KEY_ID:?} +: ${AWS_SECRET_ACCESS_KEY:?} +: ${AWS_DEFAULT_REGION:=us-west-1} +: ${AWS_PUBLIC_KEY_NAME:?} +: ${AWS_KMS_KEY_ARN:?} + +export AWS_DEFAULT_REGION +export STATE_FILE=/tmp/integration-terraform-state.tfstate +export METADATA_FILE=/tmp/integration-terraform-metadata.$$.json + +echo "#######################################################" +echo "Applying terraform. Metadata file at $METADATA_FILE" +echo "#######################################################" + +DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" + +RELEASE_DIR="$( cd ${DIR}/../../.. && pwd )" + +task_script="${RELEASE_DIR}/ci/tasks/run-integration.sh" + +terraform apply -state="${STATE_FILE}" \ + -var "access_key=${AWS_ACCESS_KEY_ID}" \ + -var "secret_key=${AWS_SECRET_ACCESS_KEY}" \ + -var "region=${AWS_DEFAULT_REGION}" \ + -var "env_name=$(hostname)-local-integration" \ + "${RELEASE_DIR}/ci/assets/terraform" + +jq -e --raw-output '.modules[0].outputs | map_values(.value)' "$STATE_FILE" > $METADATA_FILE + +${RELEASE_DIR}/ci/tasks/run-integration.sh + +terraform destroy -force -state="${STATE_FILE}" \ + -var "access_key=${AWS_ACCESS_KEY_ID}" \ + -var "secret_key=${AWS_SECRET_ACCESS_KEY}" \ + -var "region=${AWS_DEFAULT_REGION}" \ + -var "env_name=$(hostname)-local-integration" \ + "${RELEASE_DIR}/ci/assets/terraform" +