Skip to content

Commit

Permalink
reuse hive docker container in CI
Browse files Browse the repository at this point in the history
  • Loading branch information
mchades committed Aug 30, 2024
1 parent 9caaf85 commit f0cc742
Show file tree
Hide file tree
Showing 30 changed files with 685 additions and 575 deletions.
26 changes: 24 additions & 2 deletions .github/workflows/backend-integration-test.yml
Original file line number Diff line number Diff line change
Expand Up @@ -164,12 +164,34 @@ jobs:
- name: Free up disk space
run: |
dev/ci/util_free_space.sh

# - name: Setup debug Github Action
# if: ${{ contains(github.event.pull_request.labels.*.name, 'debug action') }}
# uses: csexton/debugger-action@master

- name: Setup reused hive services
run: |
free -h
docker compose -f integration-test-common/docker-script/docker-compose.yaml up -d hive hive_with_kerberos || true
if [ $? -ne 0 ]; then
docker compose integration-test/trino-it/docker-compose.yaml ps
docker compose integration-test/trino-it/docker-compose.yaml logs
exit 1
else
echo "Services started successfully"
fi
- name: check services status
run: |
docker compose -f integration-test-common/docker-script/docker-compose.yaml ps
- name: Backend Integration Test (JDK${{ matrix.java-version }}-${{ matrix.test-mode }}-${{ matrix.backend }})
id: integrationTest
run: >
./gradlew test -PskipTests -PtestMode=${{ matrix.test-mode }} -PjdkVersion=${{ matrix.java-version }} -PjdbcBackend=${{ matrix.backend }} -PskipWebITs -PskipDockerTests=false
-x :web:test -x :clients:client-python:test -x :flink-connector:test -x :spark-connector:test -x :spark-connector:spark-common:test
export ACTIVE_CI=true && ./gradlew test -PskipTests -PtestMode=${{ matrix.test-mode }} -PjdkVersion=${{ matrix.java-version }} -PjdbcBackend=${{ matrix.backend }} -PskipWebITs -PskipDockerTests=false
-x :web:test -x :clients:client-python:test -x :flink-connector:test -x :spark-connector:test -x :flink-connector:flink:test -x :spark-connector:spark-common:test
-x :spark-connector:spark-3.3:test -x :spark-connector:spark-3.4:test -x :spark-connector:spark-3.5:test
-x :spark-connector:spark-runtime-3.3:test -x :spark-connector:spark-runtime-3.4:test -x :spark-connector:spark-runtime-3.5:test
-x :authorizations:authorization-ranger:test -x :web:integration-test:test -x :trino-connector:integration-test:test -x :trino-connector:test
Expand Down
142 changes: 0 additions & 142 deletions .github/workflows/build.yml

This file was deleted.

104 changes: 0 additions & 104 deletions .github/workflows/flink-integration-test.yml
Original file line number Diff line number Diff line change
@@ -1,104 +0,0 @@
name: Flink Integration Test

# Controls when the workflow will run
on:
# Triggers the workflow on push or pull request events but only for the "main" branch
push:
branches: [ "main", "branch-*" ]
pull_request:
branches: [ "main", "branch-*" ]

concurrency:
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
cancel-in-progress: true

jobs:
changes:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- uses: dorny/paths-filter@v2
id: filter
with:
filters: |
source_changes:
- .github/**
- api/**
- bin/**
- catalogs/**
- clients/client-java/**
- clients/client-java-runtime/**
- clients/filesystem-hadoop3/**
- clients/filesystem-hadoop3-runtime/**
- common/**
- conf/**
- core/**
- dev/**
- gradle/**
- meta/**
- server/**
- server-common/**
- flink-connector/**
- docs/open-api/**
- build.gradle.kts
- gradle.properties
- gradlew
- setting.gradle.kts
outputs:
source_changes: ${{ steps.filter.outputs.source_changes }}

# Integration test for AMD64 architecture
test-amd64-arch:
needs: changes
if: needs.changes.outputs.source_changes == 'true'
runs-on: ubuntu-latest
timeout-minutes: 30
strategy:
matrix:
architecture: [linux/amd64]
java-version: [ 8, 11, 17 ]
env:
PLATFORM: ${{ matrix.architecture }}
steps:
- uses: actions/checkout@v3

- uses: actions/setup-java@v4
with:
java-version: ${{ matrix.java-version }}
distribution: 'temurin'
cache: 'gradle'

- name: Set up QEMU
uses: docker/setup-qemu-action@v2

- name: Check required command
run: |
dev/ci/check_commands.sh
- name: Package Gravitino
run: |
./gradlew compileDistribution -x test -PjdkVersion=${{ matrix.java-version }}
- name: Free up disk space
run: |
dev/ci/util_free_space.sh
- name: Flink Integration Test
id: integrationTest
run: |
./gradlew -PskipTests -PtestMode=embedded -PjdkVersion=${{ matrix.java-version }} -PskipDockerTests=false :flink-connector:flink:test --tests "org.apache.gravitino.flink.connector.integration.test.**"
./gradlew -PskipTests -PtestMode=deploy -PjdkVersion=${{ matrix.java-version }} -PskipDockerTests=false :flink-connector:flink:test --tests "org.apache.gravitino.flink.connector.integration.test.**"
- name: Upload integrate tests reports
uses: actions/upload-artifact@v3
if: ${{ (failure() && steps.integrationTest.outcome == 'failure') || contains(github.event.pull_request.labels.*.name, 'upload log') }}
with:
name: flink-connector-integrate-test-reports-${{ matrix.java-version }}
path: |
build/reports
flink-connector/flink/build/*.log
flink-connector/flink/build/*.tar
distribution/package/logs/gravitino-server.out
distribution/package/logs/gravitino-server.log
catalogs/**/*.log
catalogs/**/*.tar
86 changes: 0 additions & 86 deletions .github/workflows/python-integration-test.yml

This file was deleted.

Loading

0 comments on commit f0cc742

Please sign in to comment.