Skip to content

Trigger End-to-end Tests Workflow #53

Trigger End-to-end Tests Workflow

Trigger End-to-end Tests Workflow #53

name: Trigger End-to-end Tests Workflow
on:
workflow_dispatch:
inputs:
environment:
description: 'Environment to run tests against (production/staging)'
default: staging
required: true
grep:
description: 'Grep pattern for selecting tests'
required: false
default: ''
grepTags:
description: 'Grep tags for selecting tests'
required: false
default: '@essential'
workflow_call:
inputs:
environment:
description: 'Environment to run tests against'
required: true
type: string
grep:
description: 'Grep pattern for selecting tests'
required: false
type: string
grepTags:
description: 'Grep tags for selecting tests'
required: false
type: string
default: '@essential'
secrets:
DOCKER_PAT:
required: true
CYPRESS_PASSWORD:
required: true
jobs:
built-and-run-cypress:
runs-on: ubuntu-latest
steps:
- name: Checkout repository
uses: actions/checkout@v4
- name: Create Docker network
run: docker network create footlight-network.test
- name: Configure AWS credentials
uses: aws-actions/configure-aws-credentials@v4
with:
aws-access-key-id: ${{ secrets.S3_ACCESS_KEY_ID }}
aws-secret-access-key: ${{ secrets.S3_SECRET_ACCESS_KEY }}
aws-region: ca-central-1
- name: Run mongodb with latest dump
env:
BUCKET_NAME: footlight-dump
run: |
docker run -d --name test.mongo --network footlight-network.test -p 27017:27017 mongo:latest
latest_file=$(aws s3 ls s3://$BUCKET_NAME/ --recursive | sort | tail -n 1 | awk '{print $4}')
aws s3 cp s3://$BUCKET_NAME/$latest_file ./latest_file.zip
unzip latest_file.zip -d ./latest_file
docker cp ./latest_file test.mongo:/dump
docker exec test.mongo mongorestore --db footlight-calendar /dump/$latest_file/footlight-calendar
- name: Log in to GitHub Container Registry
uses: docker/login-action@v3
with:
registry: ghcr.io
username: ${{ vars.USERNAME_DEV }}
password: ${{ secrets.DOCKER_PAT }}
- name: Create env file
run: |
echo "APP_PORT=8080" >> .env
echo "DATABASE_URL=mongodb://test.mongo:27017/footlight-calendar" >> .env
echo "AWS_S3_ACCESS_KEY_ID=${{ secrets.S3_ACCESS_KEY_ID }}" >> .env
echo "AWS_S3_SECRET_ACCESS_KEY=${{ secrets.S3_SECRET_ACCESS_KEY}}" >> .env
echo "DEFAULT_TIMEZONE=Canada/Eastern" >> .env
echo "AWS_S3_BUCKET=${{vars.AWS_S3_BUCKET}}" >> .env
echo "AWS_S3_REGION=${{vars.AWS_S3_REGION}}" >> .env
- name: Pull and Run CMS Backend Docker Image
run: |
if [ "${{ github.event.inputs.environment }}" == "production" ]; then
IMAGE="ghcr.io/culturecreates/footlight-calendar-api/footlight-admin-api:master"
elif [ "${{ github.event.inputs.environment }}" == "staging" ]; then
IMAGE="ghcr.io/culturecreates/footlight-calendar-api/footlight-admin-api:develop"
fi
docker pull $IMAGE
docker run -d \
--restart always \
--name test.footlight.api \
--network footlight-network.test \
-p 8080:8080 \
$IMAGE
docker cp ./.env test.footlight.api:/usr/src/app
- name: Build and run Footlight Container
run: |
sed -i 's|^REACT_APP_API_URL=.*|REACT_APP_API_URL="http://test.footlight.api:8080"|' .env.staging
docker build -t footlight .
docker run -d --name test.footlight.app --network footlight-network.test -p 3000:3000 footlight
- name: Wait for Footlight to be ready
run: |
for i in {1..5}; do
if curl -s http://localhost:3000; then
echo "Footlight is up and running!"
exit 0
fi
echo "Waiting for Footlight to be ready..."
sleep 10
done
echo "Footlight did not start in time!"
exit 1
- name: Pull cypress docker image
run: docker pull ghcr.io/kmdvs/cms-cypress_regression_tests:main
- name: Run Cypress tests
run: |
base_url="http://test.footlight.app:3000/"
grep_value="${{ github.event.inputs.grep }}"
echo "Original grep_value: '$grep_value'"
grep_value_clean=$(echo "$grep_value" | tr -d '\n\r')
echo "Cleaned grep_value: '$grep_value_clean'"
if [ -z "$grep_value_clean" ]; then
grep_value_json='""'
else
grep_value_json=$(printf '%s' "$grep_value_clean" | sed 's/"/\\"/g; s/.*/"&"/')
fi
echo "JSON grep_value: '$grep_value_json'"
# Properly format the --env argument for Cypress
env_json="{\"grepTags\":\"${{ github.event.inputs.grepTags }}\",\"grep\":${grep_value_json}}"
echo "Formatted env JSON: $env_json"
# Simulate the Cypress --env argument
env_arg=$(printf '%s' "$env_json")
echo "Simulated --env argument for Cypress: $env_arg"
# Validate JSON formatting
echo "$env_json" | jq . # Validate JSON formatting
# Run Cypress tests with the formatted --env argument
docker run \
--network footlight-network.test \
-e XDG_RUNTIME_DIR=/tmp/runtime \
-e CYPRESS_BASE_URL=$base_url \
-e CYPRESS_ADMIN_EN_EMAIL="${{ vars.CYPRESS_ADMIN_EN_EMAIL }}" \
-e CYPRESS_ADMIN_FR_EMAIL="${{ vars.CYPRESS_ADMIN_FR_EMAIL }}" \
-e CYPRESS_GUEST_EN_EMAIL="${{ vars.CYPRESS_GUEST_EN_EMAIL }}" \
-e CYPRESS_GUEST_FR_EMAIL="${{ vars.CYPRESS_GUEST_FR_EMAIL }}" \
-e CYPRESS_ADMIN_EN_PASSWORD=${{ secrets.CYPRESS_PASSWORD }} \
-e CYPRESS_ADMIN_FR_PASSWORD=${{ secrets.CYPRESS_PASSWORD }} \
-e CYPRESS_GUEST_EN_PASSWORD=${{ secrets.CYPRESS_PASSWORD }} \
-e CYPRESS_GUEST_FR_PASSWORD=${{ secrets.CYPRESS_PASSWORD }} \
-v ${GITHUB_WORKSPACE}/cypress/screenshots:/e2e/cypress/screenshots \
-v ${GITHUB_WORKSPACE}/cypress/videos:/e2e/cypress/videos \
ghcr.io/kmdvs/cms-cypress_regression_tests:main npx cypress run --browser firefox --headless --env "$env_arg"
- name: Upload Cypress Debug Logs
uses: actions/upload-artifact@v3
with:
name: cypress-debug-logs
path: cypress/logs/debug.log
- name: Upload Cypress Screenshots
uses: actions/upload-artifact@v4
if: always()
with:
name: cypress-screenshots-firefox
path: cypress/screenshots
if-no-files-found: ignore
- name: Upload Cypress Videos
uses: actions/upload-artifact@v4
if: always()
with:
name: cypress-videos-firefox
path: cypress/videos
if-no-files-found: ignore