Skip to content

Commit

Permalink
Merge branch 'dev' into jitpack-gtfs-lib
Browse files Browse the repository at this point in the history
  • Loading branch information
landonreed committed Jan 22, 2021
2 parents cf1fc17 + 4992d14 commit bb23478
Show file tree
Hide file tree
Showing 11 changed files with 230 additions and 148 deletions.
162 changes: 162 additions & 0 deletions .github/workflows/maven.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,162 @@
name: Java CI

on: [push, pull_request]

jobs:
build:

runs-on: ubuntu-latest
services:
postgres:
image: postgres:10.8
# Set postgres env variables according to test env.yml config
env:
POSTGRES_USER: postgres
POSTGRES_PASSWORD: postgres
POSTGRES_DB: catalogue
ports:
- 5432:5432
# Set health checks to wait until postgres has started
options: >-
--health-cmd pg_isready
--health-interval 10s
--health-timeout 5s
--health-retries 5
steps:
- uses: actions/checkout@v2
- name: Set up JDK 1.8
uses: actions/setup-java@v1
with:
java-version: 1.8
# Install node 12 for running e2e tests (and for maven-semantic-release).
- name: Use Node.js 12.x
uses: actions/setup-node@v1
with:
node-version: 12.x
- name: Start MongoDB
uses: supercharge/[email protected]
with:
mongodb-version: 4.2
- name: Setup Maven Cache
uses: actions/cache@v2
id: cache
with:
path: ~/.m2
key: maven-local-repo
- name: Inject slug/short variables # so that we can reference $GITHUB_HEAD_REF_SLUG for branch name
uses: rlespinasse/[email protected]
- name: Install maven-semantic-release
# FIXME: Enable cache for node packages (add package.json?)
run: |
yarn global add @conveyal/maven-semantic-release semantic-release
# Add yarn path to GITHUB_PATH so that global package is executable.
echo "$(yarn global bin)" >> $GITHUB_PATH
# run a script to see if the e2e tests should be ran. This script will set the environment variable SHOULD_RUN_E2E
# which is used in later CI commands.
- name: Check if end-to-end tests should run
run: ./scripts/check-if-e2e-tests-should-run-on-ci.sh
- name: Add profile credentials to ~/.aws/credentials
run: ./scripts/add-aws-credentials.sh
env:
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
AWS_REGION: ${{ secrets.AWS_REGION }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
- name: Setup GTFS+ directory (used during testing)
run: mkdir /tmp/gtfsplus
- name: Build with Maven (run unit tests)
run: mvn --no-transfer-progress package
- name: Restart MongoDB with fresh database (for e2e tests)
run: ./scripts/restart-mongo-with-fresh-db.sh
- name: Copy unit test coverage results into another folder # so the e2e tests don't overwrite them
run: cp -R target target-unit-test-results
- name: Run e2e tests
if: env.SHOULD_RUN_E2E == 'true'
run: mvn test
env:
AUTH0_API_CLIENT: ${{ secrets.AUTH0_API_CLIENT }}
AUTH0_API_SECRET: ${{ secrets.AUTH0_API_SECRET }}
AUTH0_CLIENT_ID: ${{ secrets.AUTH0_CLIENT_ID }}
AUTH0_DOMAIN: ${{ secrets.AUTH0_DOMAIN }}
AUTH0_SECRET: ${{ secrets.AUTH0_SECRET }}
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
AWS_REGION: ${{ secrets.AWS_REGION }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
E2E_AUTH0_PASSWORD: ${{ secrets.E2E_AUTH0_PASSWORD }}
E2E_AUTH0_USERNAME: ${{ secrets.E2E_AUTH0_USERNAME }}
GRAPH_HOPPER_KEY: ${{ secrets.GRAPH_HOPPER_KEY }}
GTFS_DATABASE_PASSWORD: ${{ secrets.GTFS_DATABASE_PASSWORD }}
GTFS_DATABASE_URL: ${{ secrets.GTFS_DATABASE_URL }}
GTFS_DATABASE_USER: ${{ secrets.GTFS_DATABASE_USER }}
MAPBOX_ACCESS_TOKEN: ${{ secrets.MAPBOX_ACCESS_TOKEN }}
MONGO_DB_NAME: ${{ secrets.MONGO_DB_NAME }}
OSM_VEX: ${{ secrets.OSM_VEX }}
RUN_E2E: "true"
S3_BUCKET: ${{ secrets.S3_BUCKET }}
SPARKPOST_EMAIL: ${{ secrets.SPARKPOST_EMAIL }}
SPARKPOST_KEY: ${{ secrets.SPARKPOST_KEY }}
TRANSITFEEDS_KEY: ${{ secrets.TRANSITFEEDS_KEY }}
- name: Copy e2e coverage results into another folder # so the deployment results don't overwrite them
run: if [ "$SHOULD_RUN_E2E" = "true" ]; then cp -R target target-e2e-test-results; fi
# these first codecov runs will upload a report associated with the commit set through CI environment variables
# use codecov script flags to upload the coverage report for the unit tests
- name: Upload codecov for unit tests
run: bash <(curl -s https://codecov.io/bash) -s target-unit-test-results -F unit_tests
- name: Upload the coverage report for the e2e tests
run: |
if [ "$SHOULD_RUN_E2E" = "true" ]; then
bash <(curl -s https://codecov.io/bash) -s target-e2e-test-results -F end_to_end_tests;
fi
# Run maven-semantic-release to potentially create a new release of datatools-server. The flag --skip-maven-deploy is
# used to avoid deploying to maven central. So essentially, this just creates a release with a changelog on github.
#
# If maven-semantic-release finishes successfully and the current branch is master, upload coverage reports for the
# commits that maven-semantic-release generated. Since the above codecov run is associated with the commit that
# initiated the CI build, the report will not be associated with the commits that maven-semantic-release performed
# (if it ended up creating a release and the two commits that were a part of that workflow). Therefore, if on master
# codecov needs to be ran two more times to create codecov reports for the commits made by maven-semantic-release.
# See https://github.com/conveyal/gtfs-lib/issues/193. In order to create reports for both the unit and e2e tsts,
# the codecov scripts must be ran twice.
#
# The git commands get the commit hash of the HEAD commit and the commit just before HEAD.
- name: Run maven-semantic-release
run: |
semantic-release --prepare @conveyal/maven-semantic-release --publish @semantic-release/github,@conveyal/maven-semantic-release --verify-conditions @semantic-release/github,@conveyal/maven-semantic-release --verify-release @conveyal/maven-semantic-release --use-conveyal-workflow --dev-branch=dev --skip-maven-deploy
if [[ "$GITHUB_REF_SLUG" = "master" ]]; then
bash <(curl -s https://codecov.io/bash) -C "$(git rev-parse HEAD)" -s target-unit-test-results -F unit_tests
bash <(curl -s https://codecov.io/bash) -C "$(git rev-parse HEAD^)" -s target-unit-test-results -F unit_tests
if [ "$SHOULD_RUN_E2E" = "true" ]; then
bash <(curl -s https://codecov.io/bash) -C "$(git rev-parse HEAD)" -s target-e2e-test-results -F end_to_end_tests;
bash <(curl -s https://codecov.io/bash) -C "$(git rev-parse HEAD^)" -s target-e2e-test-results -F end_to_end_tests;
fi
fi
- name: Prepare deploy artifacts
run: |
# get branch name of current branch for use in jar name
export BRANCH=$GITHUB_HEAD_REF_SLUG
# Replace forward slashes with underscores in branch name.
export BRANCH_CLEAN=${BRANCH//\//_}
# Create directory that will contain artifacts to deploy to s3.
mkdir deploy
# Display contents of target directory (for logging purposes only).
ls target/*.jar
# Copy packaged jar over to deploy dir.
cp target/dt-*.jar deploy/
# Get the first jar file and copy it into a new file that adds the current branch name. During a
# merge to master, there are multiple jar files produced, but they're each effectively the same
# code (there may be slight differences in the version shown in the `pom.xml`, but that's not
# important for the purposes of creating this "latest branch" jar).
ALL_JARS=(target/dt-*.jar)
FIRST_JAR="${ALL_JARS[0]}"
cp "$FIRST_JAR" "deploy/dt-latest-$BRANCH_CLEAN.jar"
- name: Deploy to S3
uses: jakejarvis/s3-sync-action@master
with:
args: --acl public-read
env:
AWS_S3_BUCKET: datatools-builds
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
SOURCE_DIR: 'deploy'
110 changes: 0 additions & 110 deletions .travis.yml

This file was deleted.

4 changes: 2 additions & 2 deletions configurations/test/env.yml.tmp
Original file line number Diff line number Diff line change
Expand Up @@ -13,8 +13,8 @@ OSM_VEX: http://localhost:1000
SPARKPOST_KEY: your-sparkpost-key
SPARKPOST_EMAIL: [email protected]
GTFS_DATABASE_URL: jdbc:postgresql://localhost/catalogue
# GTFS_DATABASE_USER:
# GTFS_DATABASE_PASSWORD:
GTFS_DATABASE_USER: postgres
GTFS_DATABASE_PASSWORD: postgres

# To configure a remote MongoDB service (such as MongoDB Atlas), provide all
# Mongo properties below. Otherwise, only a database name is needed (server
Expand Down
12 changes: 10 additions & 2 deletions pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -40,6 +40,7 @@
</scm>
<properties>
<jackson.version>2.10.1</jackson.version>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
<!-- Using the latest version of geotools (e.g, 20) seems to cause issues with the shapefile
plugin where the_geom for each feature is null. -->
<geotools.version>17.5</geotools.version>
Expand Down Expand Up @@ -102,7 +103,7 @@
<plugin>
<groupId>pl.project13.maven</groupId>
<artifactId>git-commit-id-plugin</artifactId>
<version>2.2.1</version>
<version>3.0.1</version>
<executions>
<execution>
<goals>
Expand All @@ -117,6 +118,13 @@
-->
<generateGitPropertiesFile>true</generateGitPropertiesFile>
<injectAllReactorProjects>true</injectAllReactorProjects>
<!-- "git describe always" needed to keep GitHub actions from failing
See: https://github.com/git-commit-id/git-commit-id-maven-plugin/issues/61#issuecomment-68037525
-->
<gitDescribe>
<skip>false</skip>
<always>true</always>
</gitDescribe>
</configuration>
</plugin>
<plugin>
Expand Down Expand Up @@ -273,7 +281,7 @@
<dependency>
<groupId>com.google.guava</groupId>
<artifactId>guava</artifactId>
<version>18.0</version>
<version>30.0-jre</version>
</dependency>

<!-- Note: Unless we are explicit with the jackson dependencies listed below, other versions included in other
Expand Down
17 changes: 17 additions & 0 deletions scripts/add-aws-credentials.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,17 @@
#!/usr/bin/env bash
# This script will create the AWS credentials file if it does not exist.
# It is only meant to be run on CI (to create the proper
# environment for E2E tests).
mkdir -p ~/.aws

# If credentials do not exist, create file setting values to
# environment variables (which must be defined in CI).
# This should avoid any accidental overwrite on your local dev machine :)
if [ ! -f ~/.aws/credentials ]; then
cat > ~/.aws/credentials << EOL
[default]
aws_access_key_id = ${AWS_ACCESS_KEY_ID}
aws_secret_access_key = ${AWS_SECRET_ACCESS_KEY}
region = ${AWS_REGION}
EOL
fi
20 changes: 20 additions & 0 deletions scripts/check-if-e2e-tests-should-run-on-ci.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,20 @@
# Since the e2e tests take a while to run and it could present an inconvenience
# to be making sure the e2e tests work on every single PR, only run the e2e
# tests on CI for PRs to master or on commits directly to dev or master
if [[ "$GITHUB_BASE_REF_SLUG" = "master" ]]; then
echo "SHOULD_RUN_E2E=true" >> $GITHUB_ENV && export SHOULD_RUN_E2E=true
echo 'Will run E2E tests because this is a PR to master'
else
if [[ "$GITHUB_REPOSITORY" = "ibi-group/datatools-server" ]] && [[ "$GITHUB_REF_SLUG" = "master" || "$GITHUB_REF_SLUG" = "dev" || "$GITHUB_REF_SLUG" = "github-actions" ]]; then
echo "SHOULD_RUN_E2E=true" >> $GITHUB_ENV && export SHOULD_RUN_E2E=true
echo 'Will run E2E tests because this is a commit to master or dev'
fi
fi

if [[ "$SHOULD_RUN_E2E" != "true" ]]; then
echo 'Skipping E2E tests...'
fi

# FIXME: Re-enable e2e for conditions above.
echo "SHOULD_RUN_E2E=false" >> $GITHUB_ENV && export SHOULD_RUN_E2E=true
echo 'Overriding E2E. Temporarily forcing to be false...'
Loading

0 comments on commit bb23478

Please sign in to comment.