diff --git a/ci/test_cpp.sh b/ci/test_cpp.sh index 2c8ff156e8c..a6c4cdb4a4f 100755 --- a/ci/test_cpp.sh +++ b/ci/test_cpp.sh @@ -1,5 +1,5 @@ #!/bin/bash -# Copyright (c) 2022, NVIDIA CORPORATION. +# Copyright (c) 2022-2023, NVIDIA CORPORATION. set -euo pipefail @@ -21,7 +21,6 @@ set -u CPP_CHANNEL=$(rapids-download-conda-from-s3 cpp) RAPIDS_TESTS_DIR=${RAPIDS_TESTS_DIR:-"${PWD}/test-results"}/ mkdir -p "${RAPIDS_TESTS_DIR}" -SUITEERROR=0 rapids-print-env @@ -38,34 +37,23 @@ pushd "${RAPIDS_DATASET_ROOT_DIR}" ./get_test_data.sh popd -# Run libcugraph gtests from libcugraph-tests package -rapids-logger "Run gtests" +EXITCODE=0 +trap "EXITCODE=1" ERR set +e -# TODO: exit code handling is too verbose. Find a cleaner solution. - +# Run libcugraph gtests from libcugraph-tests package +rapids-logger "Run gtests" for gt in "$CONDA_PREFIX"/bin/gtests/libcugraph/* ; do test_name=$(basename ${gt}) echo "Running gtest $test_name" ${gt} --gtest_output=xml:${RAPIDS_TESTS_DIR} - - exitcode=$? - if (( ${exitcode} != 0 )); then - SUITEERROR=${exitcode} - echo "FAILED: GTest ${gt}" - fi done for ct in "$CONDA_PREFIX"/bin/gtests/libcugraph_c/CAPI_*_TEST ; do test_name=$(basename ${ct}) echo "Running C API test $test_name" ${ct} - - exitcode=$? - if (( ${exitcode} != 0 )); then - SUITEERROR=${exitcode} - echo "FAILED: C API test ${ct}" - fi done -exit ${SUITEERROR} +rapids-logger "Test script exiting with value: $EXITCODE" +exit ${EXITCODE} diff --git a/ci/test_python.sh b/ci/test_python.sh index fea55844cef..a954e386c52 100755 --- a/ci/test_python.sh +++ b/ci/test_python.sh @@ -25,7 +25,6 @@ PYTHON_CHANNEL=$(rapids-download-conda-from-s3 python) RAPIDS_TESTS_DIR=${RAPIDS_TESTS_DIR:-"${PWD}/test-results"} RAPIDS_COVERAGE_DIR=${RAPIDS_COVERAGE_DIR:-"${PWD}/coverage-results"} mkdir -p "${RAPIDS_TESTS_DIR}" "${RAPIDS_COVERAGE_DIR}" -SUITEERROR=0 rapids-print-env @@ -48,6 +47,8 @@ pushd "${RAPIDS_DATASET_ROOT_DIR}" ./get_test_data.sh popd +EXITCODE=0 +trap "EXITCODE=1" ERR set +e rapids-logger "pytest pylibcugraph" @@ -60,12 +61,6 @@ pytest \ --cov-report=xml:"${RAPIDS_COVERAGE_DIR}/pylibcugraph-coverage.xml" \ --cov-report=term \ tests -exitcode=$? - -if (( ${exitcode} != 0 )); then - SUITEERROR=${exitcode} - echo "FAILED: 1 or more tests in pylibcugraph" -fi popd rapids-logger "pytest cugraph" @@ -79,12 +74,6 @@ pytest \ --cov-report=xml:"${RAPIDS_COVERAGE_DIR}/cugraph-coverage.xml" \ --cov-report=term \ tests -exitcode=$? - -if (( ${exitcode} != 0 )); then - SUITEERROR=${exitcode} - echo "FAILED: 1 or more tests in cugraph" -fi popd rapids-logger "pytest cugraph benchmarks (run as tests)" @@ -95,12 +84,6 @@ pytest \ -m "managedmem_on and poolallocator_on and tiny" \ --benchmark-disable \ cugraph/pytest-based/bench_algos.py -exitcode=$? - -if (( ${exitcode} != 0 )); then - SUITEERROR=${exitcode} - echo "FAILED: 1 or more tests in cugraph benchmarks" -fi popd rapids-logger "pytest cugraph_pyg (single GPU)" @@ -116,12 +99,6 @@ pytest \ --cov-report=xml:"${RAPIDS_COVERAGE_DIR}/cugraph-pyg-coverage.xml" \ --cov-report=term \ . -exitcode=$? - -if (( ${exitcode} != 0 )); then - SUITEERROR=${exitcode} - echo "FAILED: 1 or more tests in cugraph-pyg" -fi popd rapids-logger "pytest cugraph-service (single GPU)" @@ -139,12 +116,7 @@ pytest \ --benchmark-disable \ -k "not mg" \ tests -exitcode=$? - -if (( ${exitcode} != 0 )); then - SUITEERROR=${exitcode} - echo "FAILED: 1 or more tests in cugraph-service" -fi popd -exit ${SUITEERROR} +rapids-logger "Test script exiting with value: $EXITCODE" +exit ${EXITCODE}