diff --git a/.ci/azure/linux_ngraph_onnx.yml b/.ci/azure/linux_ngraph_onnx.yml index b756eb59149c29..af8fc3e34f9769 100644 --- a/.ci/azure/linux_ngraph_onnx.yml +++ b/.ci/azure/linux_ngraph_onnx.yml @@ -87,7 +87,7 @@ jobs: sudo apt --assume-yes update && sudo apt --assume-yes install nfs-common sudo apt install nfs-common -y sudo mount -vvv -t nfs cinfsshare.file.core.windows.net:/cinfsshare/onnxtestdata $(MODELS_DIR) -o vers=4,minorversion=1,sec=sys - mkdir -p $(MODELS_DIR)/models_data + mkdir -p $(MODELS_DIR) displayName: 'Make dirs' - checkout: self @@ -97,13 +97,15 @@ jobs: - script: | set -e - apt-get update && apt-get install -y lsb-release && apt-get clean all + sudo apt-get update && sudo apt-get install -y lsb-release git git-lfs + git-lfs install + sudo apt-get clean all curl -fsSL https://get.docker.com -o get-docker.sh sudo sh get-docker.sh displayName: 'Install dependencies' - script: - src/frontends/onnx/tests/tests_python/model_zoo_preprocess.sh -d $(MODELS_DIR)/models_data -o -s "$(ONNX_MODEL_ZOO_SHA)" + src/frontends/onnx/tests/tests_python/model_zoo_preprocess.sh -d $(MODELS_DIR) -o -s "$(ONNX_MODEL_ZOO_SHA)" displayName: 'Update models' condition: ne(variables['BUILD_TYPE'], 'Debug') @@ -120,7 +122,7 @@ jobs: - script: | sudo docker run \ --name openvino-onnx-ci-container \ - --volume $(MODELS_DIR)/models_data/model_zoo/onnx_model_zoo_$(ONNX_MODEL_ZOO_SHA):/root/.onnx/model_zoo/onnx_model_zoo \ + --volume $(MODELS_DIR)/onnx_model_zoo:/root/.onnx/model_zoo/onnx_model_zoo \ --volume $(MODELS_DIR)/msft:/root/.onnx/model_zoo/MSFT openvino-onnx-ci-image \ /bin/bash -c "$(TOX_COMMAND)" displayName: 'Docker run $(BUILD_TYPE)' diff --git a/.github/ISSUE_TEMPLATE/good_first_issue.yml b/.github/ISSUE_TEMPLATE/good_first_issue.yml index 0b7edcbaecd1a4..1d7eecf9a37b5e 100644 --- a/.github/ISSUE_TEMPLATE/good_first_issue.yml +++ b/.github/ISSUE_TEMPLATE/good_first_issue.yml @@ -41,10 +41,7 @@ body: Any materials related to the task, such as operator specifications, discussions, guides. value: | - - [What is OpenVINO?](https://github.com/openvinotoolkit/openvino#what-is-openvino-toolkit) - - [Contribution guide](https://github.com/openvinotoolkit/openvino/blob/master/CONTRIBUTING.md) - - [Blog post on contributing to OpenVINO](https://github.com/openvinotoolkit/openvino/blob/master/CONTRIBUTING.md) - - [User documentation](https://docs.openvino.ai/) + - [Contribution guide - start here!](https://github.com/openvinotoolkit/openvino/blob/master/CONTRIBUTING.md) validations: required: true diff --git a/.github/actions/setup_python/action.yml b/.github/actions/setup_python/action.yml index d067842135cd9d..c076c5156da039 100644 --- a/.github/actions/setup_python/action.yml +++ b/.github/actions/setup_python/action.yml @@ -29,9 +29,9 @@ runs: run: apt-get update && apt-get install -y ca-certificates software-properties-common - if: ${{ runner.os == 'Linux' && runner.arch == 'ARM64' }} - name: Setup sudo + name: Setup sudo and python3 shell: bash - run: apt-get update && apt-get install -y sudo # Needed for the deadsnakes action + run: apt-get update && apt-get install -y sudo python3 # Needed for the deadsnakes action - if: ${{ runner.os == 'Linux' && runner.arch == 'ARM64' }} name: Setup Python ${{ inputs.version }} diff --git a/.github/dependency_review.yml b/.github/dependency_review.yml index c86c621069171b..408a1c279bc39a 100644 --- a/.github/dependency_review.yml +++ b/.github/dependency_review.yml @@ -5,6 +5,11 @@ allow-licenses: - 'BSD-2-Clause AND BSD-3-Clause' - 'MIT' - 'Apache-2.0' + - 'ISC' + - 'Apache-2.0 AND MIT' + - 'BlueOak-1.0.0' + - '0BSD' + - 'Python-2.0' fail-on-scopes: - 'runtime' - 'development' diff --git a/.github/workflows/android_arm64.yml b/.github/workflows/android_arm64.yml index 1b648d780268d3..a8deb0e3d476e1 100644 --- a/.github/workflows/android_arm64.yml +++ b/.github/workflows/android_arm64.yml @@ -5,6 +5,7 @@ on: push: branches: - master + - 'releases/**' concurrency: # github.ref is not unique in post-commit @@ -45,7 +46,7 @@ jobs: container: image: openvinogithubactions.azurecr.io/dockerhub/ubuntu:20.04 volumes: - - /mount/caches:/mount/caches + - /mount:/mount options: -e SCCACHE_AZURE_BLOB_CONTAINER -e SCCACHE_AZURE_CONNECTION_STRING env: DEBIAN_FRONTEND: noninteractive # to prevent apt-get from waiting user input @@ -169,3 +170,17 @@ jobs: - name: Show ccache stats run: ${SCCACHE_PATH} --show-stats + + Overall_Status: + name: ci/gha_overall_status_android + needs: [Smart_CI, Build] + if: ${{ always() }} + runs-on: ubuntu-latest + steps: + - name: Check status of all jobs + if: >- + ${{ + contains(needs.*.result, 'failure') || + contains(needs.*.result, 'cancelled') + }} + run: exit 1 diff --git a/.github/workflows/build_doc.yml b/.github/workflows/build_doc.yml index 8baa49922369cd..40e12d507cba54 100644 --- a/.github/workflows/build_doc.yml +++ b/.github/workflows/build_doc.yml @@ -25,7 +25,7 @@ jobs: packages: graphviz texlive liblua5.2-0 libclang1-9 libclang-cpp9 version: 3.0 - - uses: actions/setup-python@v4 + - uses: actions/setup-python@v5 id: cp310 with: python-version: '3.10' diff --git a/.github/workflows/cleanup_pip_cache.yml b/.github/workflows/cleanup_pip_cache.yml index cd66e1150c3ef1..355d0d68d7cf93 100644 --- a/.github/workflows/cleanup_pip_cache.yml +++ b/.github/workflows/cleanup_pip_cache.yml @@ -11,7 +11,7 @@ jobs: container: image: openvinogithubactions.azurecr.io/dockerhub/ubuntu:20.04 volumes: - - /mount/caches:/mount/caches + - /mount:/mount env: PIP_CACHE_PATH: /mount/caches/pip diff --git a/.github/workflows/coverage.yml b/.github/workflows/coverage.yml index d89666b898fa80..50114d986b23c6 100644 --- a/.github/workflows/coverage.yml +++ b/.github/workflows/coverage.yml @@ -16,7 +16,7 @@ jobs: steps: - name: Setup python - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: '3.10.10' architecture: 'x64' diff --git a/.github/workflows/coverity.yml b/.github/workflows/coverity.yml new file mode 100644 index 00000000000000..206d80cd02f0af --- /dev/null +++ b/.github/workflows/coverity.yml @@ -0,0 +1,146 @@ +name: Coverity (Ubuntu 20.04, Python 3.11) +on: + workflow_dispatch: + inputs: + openvinoRef: + description: 'Branch, tag or commit hash to clone openvino from. Taken from event context if not set' + type: string + schedule: + # run daily at 00:00 + - cron: '0 0 * * *' + +concurrency: + # github.ref is not unique in post-commit + group: ${{ github.event_name == 'push' && github.run_id || github.ref }}-linux-coverity + cancel-in-progress: true + +env: + PIP_CACHE_PATH: /mount/caches/pip/linux + PYTHON_VERSION: '3.11' + +jobs: + Build: + timeout-minutes: 150 + defaults: + run: + shell: bash + runs-on: aks-linux-16-cores-32gb + container: + image: openvinogithubactions.azurecr.io/dockerhub/ubuntu:20.04 + volumes: + - /mount/caches:/mount/caches + options: -e SCCACHE_AZURE_BLOB_CONTAINER -e SCCACHE_AZURE_CONNECTION_STRING + env: + DEBIAN_FRONTEND: noninteractive # to prevent apt-get from waiting user input + CMAKE_BUILD_TYPE: 'Release' + CMAKE_GENERATOR: 'Ninja Multi-Config' + CMAKE_CXX_COMPILER_LAUNCHER: sccache + CMAKE_C_COMPILER_LAUNCHER: sccache + GITHUB_WORKSPACE: '/__w/openvino/openvino' + OPENVINO_REPO: /__w/openvino/openvino/openvino + OPENVINO_CONTRIB_REPO: /__w/openvino/openvino/openvino_contrib + BUILD_DIR: /__w/openvino/openvino/openvino_build + SCCACHE_AZURE_KEY_PREFIX: coverity_ubuntu20_x86_64 + COVERITY_TOOL_DIR: /__w/openvino/openvino/coverity_tool + + steps: + - name: Install git + run: | + apt-get update + apt-get install --assume-yes --no-install-recommends git ca-certificates + + - name: Clone OpenVINO + uses: actions/checkout@v4 + with: + path: ${{ env.OPENVINO_REPO }} + submodules: 'true' + ref: ${{ inputs.openvinoRef }} + + - name: Clone OpenVINO Contrib + uses: actions/checkout@v4 + with: + repository: 'openvinotoolkit/openvino_contrib' + path: ${{ env.OPENVINO_CONTRIB_REPO }} + submodules: 'true' + ref: 'master' + + # + # Dependencies + # + + - name: Install build dependencies + run: | + bash ${OPENVINO_REPO}/install_build_dependencies.sh + # default-jdk - Java API + apt install --assume-yes --no-install-recommends default-jdk + + - name: Install sccache + uses: mozilla-actions/sccache-action@v0.0.3 + with: + version: "v0.5.4" + + - name: Setup Python ${{ env.PYTHON_VERSION }} + uses: ./openvino/.github/actions/setup_python + with: + version: ${{ env.PYTHON_VERSION }} + pip-cache-path: ${{ env.PIP_CACHE_PATH }} + should-setup-pip-paths: 'true' + self-hosted-runner: 'true' + + # + # Build + # + + - name: CMake configure - OpenVINO + run: | + cmake \ + -G "${{ env.CMAKE_GENERATOR }}" \ + -DENABLE_CPPLINT=OFF \ + -DENABLE_STRICT_DEPENDENCIES=OFF \ + -DENABLE_SYSTEM_TBB=ON \ + -DENABLE_SYSTEM_OPENCL=ON \ + -DCMAKE_VERBOSE_MAKEFILE=ON \ + -DCPACK_GENERATOR=TGZ \ + -DBUILD_nvidia_plugin=OFF \ + -DOPENVINO_EXTRA_MODULES=${OPENVINO_CONTRIB_REPO}/modules \ + -DCMAKE_CXX_COMPILER_LAUNCHER=${{ env.CMAKE_CXX_COMPILER_LAUNCHER }} \ + -DCMAKE_C_COMPILER_LAUNCHER=${{ env.CMAKE_C_COMPILER_LAUNCHER }} \ + -S ${OPENVINO_REPO} \ + -B ${BUILD_DIR} + + - name: Clean sccache stats + run: ${SCCACHE_PATH} --zero-stats + + - name: Install Coverity tool + run: | + rm -rf ${COVERITY_TOOL_DIR} && mkdir -p ${COVERITY_TOOL_DIR} + pushd ${COVERITY_TOOL_DIR} + wget https://scan.coverity.com/download/linux64 --progress=bar:force:noscroll --post-data "token=${{ secrets.COVERITY_TOKEN }}&project=openvino" -O coverity_tool.tgz + tar xvf coverity_tool.tgz && rm coverity_tool.tgz + popd + + - name: Cmake build - OpenVINO with Coverity + run: | + ${COVERITY_TOOL_DIR}/cov-analysis*/bin/cov-build --dir ${BUILD_DIR}/cov-int \ + cmake --build ${BUILD_DIR} --parallel --config ${{ env.CMAKE_BUILD_TYPE }} + + - name: Show sccache stats + run: ${SCCACHE_PATH} --show-stats + + - name: Pack Artefacts + run: | + pushd ${BUILD_DIR} + tar -C ${BUILD_DIR} -czvf openvino.tgz cov-int + popd + + - name: Submit artefacts + run: | + apt-get update && apt-get install -y curl + pushd ${BUILD_DIR} + curl --form token=${{ secrets.COVERITY_TOKEN }} \ + --form email=${{ secrets.COVERITY_USER }} \ + --form file=@openvino.tgz \ + --form version="${{ github.sha }}" \ + --form description="https://github.com/openvinotoolkit/openvino/runs/${{ github.run_number }}" \ + https://scan.coverity.com/builds?project=openvino + popd diff --git a/.github/workflows/fedora.yml b/.github/workflows/fedora.yml index 6308d07910ecac..19d32ef74e07c9 100644 --- a/.github/workflows/fedora.yml +++ b/.github/workflows/fedora.yml @@ -46,7 +46,7 @@ jobs: container: image: fedora:33 volumes: - - /mount/caches:/mount/caches + - /mount:/mount options: -e SCCACHE_AZURE_BLOB_CONTAINER -e SCCACHE_AZURE_CONNECTION_STRING env: CMAKE_BUILD_TYPE: 'Release' @@ -242,3 +242,17 @@ jobs: python3 -c 'from openvino.frontend import FrontEndManager; assert len(FrontEndManager().get_available_front_ends()) == 6' benchmark_app --help ovc --help + + Overall_Status: + name: ci/gha_overall_status_fedora + needs: [Smart_CI, Build, RPM_Packages] + if: ${{ always() }} + runs-on: ubuntu-latest + steps: + - name: Check status of all jobs + if: >- + ${{ + contains(needs.*.result, 'failure') || + contains(needs.*.result, 'cancelled') + }} + run: exit 1 diff --git a/.github/workflows/job_cpu_functional_tests.yml b/.github/workflows/job_cpu_functional_tests.yml new file mode 100644 index 00000000000000..b1f2e6bbf08b59 --- /dev/null +++ b/.github/workflows/job_cpu_functional_tests.yml @@ -0,0 +1,124 @@ +name: CPU functional tests + +on: + workflow_call: + inputs: + runner: + description: 'Machine on which the tests would run' + type: string + required: true + image: + description: 'Docker image in which the tests would run' + type: string + required: false + default: null + +jobs: + CPU_Functional_Tests: + name: CPU functional tests + timeout-minutes: 25 + runs-on: ${{ inputs.runner }} + container: + image: ${{ inputs.image }} + defaults: + run: + shell: bash + env: + DEBIAN_FRONTEND: noninteractive # to prevent apt-get from waiting user input + INSTALL_DIR: ${{ github.workspace }}/install + INSTALL_TEST_DIR: ${{ github.workspace }}/install/tests + PARALLEL_TEST_SCRIPT: ${{ github.workspace }}/install/tests/functional_test_utils/layer_tests_summary/run_parallel.py + PARALLEL_TEST_CACHE: ${{ github.workspace }}/install/tests/test_cache.lst + steps: + - name: Download OpenVINO package + uses: actions/download-artifact@v3 + with: + name: openvino_package + path: ${{ env.INSTALL_DIR }} + + - name: Download OpenVINO tests package + uses: actions/download-artifact@v3 + with: + name: openvino_tests + path: ${{ env.INSTALL_TEST_DIR }} + + # Needed as ${{ github.workspace }} is not working correctly when using Docker + - name: Setup Variables + run: | + echo "INSTALL_DIR=$GITHUB_WORKSPACE/install" >> "$GITHUB_ENV" + echo "INSTALL_TEST_DIR=$GITHUB_WORKSPACE/install/tests" >> "$GITHUB_ENV" + echo "PARALLEL_TEST_SCRIPT=$GITHUB_WORKSPACE/install/tests/functional_test_utils/layer_tests_summary/run_parallel.py" >> "$GITHUB_ENV" + echo "PARALLEL_TEST_CACHE=$GITHUB_WORKSPACE/install/tests/test_cache.lst" >> "$GITHUB_ENV" + + - name: Extract OpenVINO packages + run: | + pushd $INSTALL_DIR + tar -xzf openvino_package.tar.gz -C $INSTALL_DIR + popd + + pushd $INSTALL_TEST_DIR + tar -xzf openvino_tests.tar.gz -C $INSTALL_DIR + popd + + - name: Install OpenVINO dependencies (Linux) + if: runner.os == 'Linux' + run: $INSTALL_DIR/install_dependencies/install_openvino_dependencies.sh -c=core -c=dev -c=gpu -y + + - name: Fetch setup_python action + uses: actions/checkout@v4 + with: + sparse-checkout: | + .github/actions/setup_python/action.yml + sparse-checkout-cone-mode: false + path: 'openvino' + + - name: Setup Python 3.11 + uses: ./openvino/.github/actions/setup_python + with: + version: '3.11' + should-setup-pip-paths: 'false' + self-hosted-runner: ${{ runner.os == 'Linux' }} + + - name: Install python dependencies for run_parallel.py + run: python3 -m pip install -r ${INSTALL_TEST_DIR}/functional_test_utils/layer_tests_summary/requirements.txt + + - name: Restore tests execution time + uses: actions/cache/restore@v3 + with: + path: ${{ env.PARALLEL_TEST_CACHE }} + key: ${{ runner.os }}-${{ runner.arch }}-tests-functional-cpu-stamp-${{ github.sha }} + restore-keys: | + ${{ runner.os }}-${{ runner.arch }}-tests-functional-cpu-stamp + + - name: Intel CPU plugin func tests (parallel) + run: | + # Needed as the Linux CC does not require setupvars to work + if [[ -f "${INSTALL_DIR}/setupvars.sh" ]]; then + source ${INSTALL_DIR}/setupvars.sh + fi + + python3 ${PARALLEL_TEST_SCRIPT} -e ${INSTALL_TEST_DIR}/ov_cpu_func_tests -c ${PARALLEL_TEST_CACHE} -w ${INSTALL_TEST_DIR} -s suite -rf 0 -- --gtest_print_time=1 --gtest_filter=*smoke* + timeout-minutes: 20 + + - name: Save tests execution time + uses: actions/cache/save@v3 + if: github.ref_name == 'master' + with: + path: ${{ env.PARALLEL_TEST_CACHE }} + key: ${{ runner.os }}-${{ runner.arch }}-tests-functional-cpu-stamp-${{ github.sha }} + + - name: Upload Test Results + uses: actions/upload-artifact@v3 + if: ${{ !cancelled() }} + with: + name: test-results-functional-cpu + path: | + ${{ env.INSTALL_TEST_DIR }}/temp/*.log + ${{ env.INSTALL_TEST_DIR }}/logs/*.log + ${{ env.INSTALL_TEST_DIR }}/logs/failed/*.log + ${{ env.INSTALL_TEST_DIR }}/logs/crashed/*.log + ${{ env.INSTALL_TEST_DIR }}/logs/hanged/*.log + ${{ env.INSTALL_TEST_DIR }}/logs/interapted/*.log + ${{ env.INSTALL_TEST_DIR }}/logs/hash_table.csv + ${{ env.PARALLEL_TEST_CACHE }} + if-no-files-found: 'error' diff --git a/.github/workflows/job_cxx_unit_tests.yml b/.github/workflows/job_cxx_unit_tests.yml new file mode 100644 index 00000000000000..e2c1cfc0fa70f4 --- /dev/null +++ b/.github/workflows/job_cxx_unit_tests.yml @@ -0,0 +1,280 @@ +name: Samples + +on: + workflow_call: + inputs: + runner: + description: 'Machine on which the tests would run' + type: string + required: true + image: + description: 'Docker image in which the tests would run' + type: string + required: false + default: null + affected-components: + description: 'Components that are affected by changes in the commit defined by the Smart CI Action' + type: string + required: true + +jobs: + CXX_Unit_Tests: + name: C++ unit tests + timeout-minutes: 30 + runs-on: ${{ inputs.runner }} + container: + image: ${{ inputs.image }} + defaults: + run: + shell: bash + env: + DEBIAN_FRONTEND: noninteractive # to prevent apt-get from waiting user input + INSTALL_DIR: ${{ github.workspace }}/install + INSTALL_TEST_DIR: ${{ github.workspace }}/install/tests + steps: + - name: Download OpenVINO package + uses: actions/download-artifact@v3 + with: + name: openvino_package + path: ${{ env.INSTALL_DIR }} + + - name: Download OpenVINO tests package + uses: actions/download-artifact@v3 + with: + name: openvino_tests + path: ${{ env.INSTALL_TEST_DIR }} + + # Needed as ${{ github.workspace }} is not working correctly when using Docker + - name: Setup Variables + run: | + echo "INSTALL_DIR=$GITHUB_WORKSPACE/install" >> "$GITHUB_ENV" + echo "INSTALL_TEST_DIR=$GITHUB_WORKSPACE/install/tests" >> "$GITHUB_ENV" + + - name: Extract OpenVINO packages + run: | + pushd $INSTALL_DIR + tar -xzf openvino_package.tar.gz -C $INSTALL_DIR + popd + pushd $INSTALL_TEST_DIR + tar -xzf openvino_tests.tar.gz -C $INSTALL_DIR + popd + + - name: Install OpenVINO dependencies (Linux) + if: runner.os == 'Linux' + run: $INSTALL_DIR/install_dependencies/install_openvino_dependencies.sh -c=core -c=dev -c=gpu -y + + # + # Tests + # + + - name: OpenVINO Core Unit Tests + if: fromJSON(inputs.affected-components).Core.test + run: | + source ${INSTALL_DIR}/setupvars.sh + ${INSTALL_TEST_DIR}/ov_core_unit_tests --gtest_print_time=1 --gtest_filter=-*IE_GPU* \ + --gtest_output=xml:${INSTALL_TEST_DIR}/TEST-OVCoreUT.xml + + - name: OpenVINO Inference Functional Tests + if: fromJSON(inputs.affected-components).inference.test + run: | + source ${INSTALL_DIR}/setupvars.sh + ${INSTALL_TEST_DIR}/ov_inference_functional_tests --gtest_print_time=1 \ + --gtest_output=xml:${INSTALL_TEST_DIR}/TEST-InferenceFunc.xml + + - name: OpenVINO Inference Unit Tests + if: fromJSON(inputs.affected-components).inference.test + run: | + source ${INSTALL_DIR}/setupvars.sh + ${INSTALL_TEST_DIR}/ov_inference_unit_tests --gtest_print_time=1 \ + --gtest_output=xml:${INSTALL_TEST_DIR}/TEST-InferenceUnit.xml + + - name: Low Precision Transformations Tests + if: fromJSON(inputs.affected-components).LP_transformations.test + run: | + source ${INSTALL_DIR}/setupvars.sh + + ${INSTALL_TEST_DIR}/ov_lp_transformations_tests --gtest_print_time=1 \ + --gtest_output=xml:${INSTALL_TEST_DIR}/TEST-LpTransformations.xml + + - name: OpenVINO Conditional compilation tests + if: fromJSON(inputs.affected-components).Core.test + run: | + source ${INSTALL_DIR}/setupvars.sh + ${INSTALL_TEST_DIR}/ov_conditional_compilation_tests --gtest_print_time=1 \ + --gtest_output=xml:${INSTALL_TEST_DIR}/TEST-ConditionalCompilation.xml + + - name: IR frontend tests + if: fromJSON(inputs.affected-components).IR_FE.test + run: | + source ${INSTALL_DIR}/setupvars.sh + ${INSTALL_TEST_DIR}/ov_ir_frontend_tests --gtest_print_time=1 \ + --gtest_output=xml:${INSTALL_TEST_DIR}/TEST-IRFrontend.xml + + - name: PaddlePaddle frontend tests + if: fromJSON(inputs.affected-components).PDPD_FE.test + run: | + source ${INSTALL_DIR}/setupvars.sh + ${INSTALL_TEST_DIR}/paddle_tests --gtest_print_time=1 \ + --gtest_output=xml:${INSTALL_TEST_DIR}/TEST-PaddleTests.xml + + - name: ONNX frontend tests + if: ${{ fromJSON(inputs.affected-components).ONNX_FE.test && runner.arch != 'ARM64' }} # Ticket for macOS ARM64: 122663, for Linux ARM64: 126280 + run: | + source ${INSTALL_DIR}/setupvars.sh + ${INSTALL_TEST_DIR}/ov_onnx_frontend_tests --gtest_print_time=1 \ + --gtest_filter=-*IE_GPU* \ + --gtest_output=xml:${INSTALL_TEST_DIR}/TEST-ONNXFrontend.xml + + - name: TensorFlow Common frontend tests + if: fromJSON(inputs.affected-components).TF_FE.test || + fromJSON(inputs.affected-components).TFL_FE.test && + (runner.os != 'macOS' && runner.arch != 'ARM64') + run: | + source ${INSTALL_DIR}/setupvars.sh + ${INSTALL_TEST_DIR}/ov_tensorflow_common_tests --gtest_print_time=1 \ + --gtest_output=xml:${INSTALL_TEST_DIR}/TEST-TensorFlowCommonFrontend.xml + + - name: TensorFlow frontend tests + if: fromJSON(inputs.affected-components).TF_FE.test + run: | + source ${INSTALL_DIR}/setupvars.sh + + ${INSTALL_TEST_DIR}/ov_tensorflow_frontend_tests --gtest_print_time=1 \ + --gtest_output=xml:${INSTALL_TEST_DIR}/TEST-TensorFlowFrontend.xml + + - name: TensorFlow Lite frontend tests + if: fromJSON(inputs.affected-components).TFL_FE.test + run: | + source ${INSTALL_DIR}/setupvars.sh + ${INSTALL_TEST_DIR}/ov_tensorflow_lite_frontend_tests --gtest_print_time=1 \ + --gtest_output=xml:${INSTALL_TEST_DIR}/TEST-TensorFlowLiteFrontend.xml + + - name: Transformations func tests + if: ${{ fromJSON(inputs.affected-components).transformations.test && runner.arch != 'ARM64' }} # Ticket: 126281 + run: | + source ${INSTALL_DIR}/setupvars.sh + + ${INSTALL_TEST_DIR}/ov_transformations_tests --gtest_print_time=1 \ + --gtest_output=xml:${INSTALL_TEST_DIR}/TEST-Transformations.xml + + - name: Legacy Transformations func tests + if: fromJSON(inputs.affected-components).GNA.test && + (runner.os != 'macOS' && runner.arch != 'ARM64') + run: | + source ${INSTALL_DIR}/setupvars.sh + ${INSTALL_TEST_DIR}/ov_legacy_transformations_tests --gtest_print_time=1 \ + --gtest_output=xml:${INSTALL_TEST_DIR}/TEST-LegacyTransformations.xml + + - name: Inference Engine 1.0 unit tests + if: fromJSON(inputs.affected-components).GNA.test && + (runner.os != 'macOS' && runner.arch != 'ARM64') + run: | + source ${INSTALL_DIR}/setupvars.sh + ${INSTALL_TEST_DIR}/InferenceEngineUnitTests --gtest_print_time=1 \ + --gtest_output=xml:${INSTALL_TEST_DIR}/TEST-InferenceEngineUnitTests.xml + + - name: Common test utils tests + run: | + source ${INSTALL_DIR}/setupvars.sh + ${INSTALL_TEST_DIR}/ov_util_tests --gtest_print_time=1 \ + --gtest_output=xml:${INSTALL_TEST_DIR}/TEST-CommonUtilTests.xml + + - name: Snippets func tests + if: fromJSON(inputs.affected-components).CPU.test + run: | + source ${INSTALL_DIR}/setupvars.sh + ${INSTALL_TEST_DIR}/ov_snippets_func_tests --gtest_print_time=1 \ + --gtest_output=xml:${INSTALL_TEST_DIR}/TEST-SnippetsFuncTests.xml + + - name: CPU plugin unit tests + if: fromJSON(inputs.affected-components).CPU.test + run: | + source ${INSTALL_DIR}/setupvars.sh + ${INSTALL_TEST_DIR}/ov_cpu_unit_tests --gtest_print_time=1 \ + --gtest_output=xml:${INSTALL_TEST_DIR}/TEST-CPUUnitTests.xml + + - name: ov_subgraphs_dumper_tests tests + run: | + source ${INSTALL_DIR}/setupvars.sh + ${INSTALL_TEST_DIR}/ov_subgraphs_dumper_tests --gtest_print_time=1 \ + --gtest_output=xml:${INSTALL_TEST_DIR}/TEST-ov_subgraphs_dumper_tests.xml + + - name: Template OpImpl tests + run: | + source ${INSTALL_DIR}/setupvars.sh + ${INSTALL_TEST_DIR}/ov_op_conformance_tests --gtest_print_time=1 --device=TEMPLATE --gtest_filter=*OpImpl*\ + --gtest_output=xml:${INSTALL_TEST_DIR}/TEST-OpImplTests.xml + + - name: AUTO unit tests + if: fromJSON(inputs.affected-components).AUTO.test + run: | + source ${INSTALL_DIR}/setupvars.sh + ${INSTALL_TEST_DIR}/ov_auto_unit_tests --gtest_print_time=1 \ + --gtest_output=xml:${INSTALL_TEST_DIR}/TEST-ov_auto_unit_tests.xml + + - name: AUTO func Tests + if: fromJSON(inputs.affected-components).AUTO.test + run: | + source ${{ env.INSTALL_DIR }}/setupvars.sh + ${{ env.INSTALL_TEST_DIR }}/ov_auto_func_tests --gtest_print_time=1 \ + --gtest_output=xml:${{ env.INSTALL_TEST_DIR }}/TEST-ov_auto_func_tests.xml + + - name: Template plugin func tests + if: fromJSON(inputs.affected-components).TEMPLATE.test + run: | + source ${INSTALL_DIR}/setupvars.sh + ${INSTALL_TEST_DIR}/ov_template_func_tests --gtest_print_time=1 \ + --gtest_filter=*smoke* \ + --gtest_output=xml:${INSTALL_TEST_DIR}/TEST-TemplateFuncTests.xml + + - name: Inference Engine C API tests + if: fromJSON(inputs.affected-components).C_API.test + run: | + source ${INSTALL_DIR}/setupvars.sh + ${INSTALL_TEST_DIR}/InferenceEngineCAPITests --gtest_print_time=1 \ + --gtest_output=xml:${INSTALL_TEST_DIR}/TEST-InferenceEngineCAPITests.xml + + - name: OpenVINO C API tests + if: fromJSON(inputs.affected-components).C_API.test + run: | + source ${INSTALL_DIR}/setupvars.sh + ${INSTALL_TEST_DIR}/ov_capi_test --gtest_print_time=1 \ + --gtest_output=xml:${INSTALL_TEST_DIR}/TEST-OpenVINOCAPITests.xml + + - name: AutoBatch unit tests + if: fromJSON(inputs.affected-components).AUTO_BATCH.test + run: | + source ${INSTALL_DIR}/setupvars.sh + ${INSTALL_TEST_DIR}/ov_auto_batch_unit_tests --gtest_output=xml:${INSTALL_TEST_DIR}/TEST-ov_auto_batch_unit_tests.xml + + - name: AutoBatch func tests + if: fromJSON(inputs.affected-components).AUTO_BATCH.test + run: | + source ${INSTALL_DIR}/setupvars.sh + ${INSTALL_TEST_DIR}/ov_auto_batch_func_tests --gtest_output=xml:${INSTALL_TEST_DIR}/TEST-ov_auto_batch_func_tests.xml + + - name: Proxy Plugin func tests + if: fromJSON(inputs.affected-components).PROXY.test + run: | + source ${INSTALL_DIR}/setupvars.sh + ${INSTALL_TEST_DIR}/ov_proxy_plugin_tests --gtest_print_time=1 --gtest_output=xml:${INSTALL_TEST_DIR}/TEST-OVProxyTests.xml + + - name: Hetero unit tests + if: fromJSON(inputs.affected-components).HETERO.test + run: | + source ${{ env.INSTALL_DIR }}/setupvars.sh + ${{ env.INSTALL_TEST_DIR }}/ov_hetero_unit_tests --gtest_print_time=1 --gtest_output=xml:${{ env.INSTALL_TEST_DIR }}/TEST-OVHeteroUnitTests.xml + + - name: Hetero func tests + if: fromJSON(inputs.affected-components).HETERO.test + run: | + source ${INSTALL_DIR}/setupvars.sh + ${INSTALL_TEST_DIR}/ov_hetero_func_tests --gtest_print_time=1 --gtest_output=xml:${INSTALL_TEST_DIR}/TEST-OVHeteroFuncTests.xml + + - name: Upload Test Results + uses: actions/upload-artifact@v3 + if: ${{ !cancelled() }} + with: + name: test-results-cpp + path: ${{ env.INSTALL_TEST_DIR }}/TEST*.xml + if-no-files-found: 'warn' diff --git a/.github/workflows/job_debian_packages.yml b/.github/workflows/job_debian_packages.yml new file mode 100644 index 00000000000000..f063a7734b8aec --- /dev/null +++ b/.github/workflows/job_debian_packages.yml @@ -0,0 +1,83 @@ +name: Debian Packages + +on: + workflow_call: + inputs: + runner: + description: 'Machine on which the tests would run' + type: string + required: true + image: + description: 'Docker image in which the tests would run' + type: string + required: false + default: null + +jobs: + Debian_Packages: + name: Debian Packages + runs-on: ${{ inputs.runner }} + container: + image: ${{ inputs.image }} + defaults: + run: + shell: bash + env: + DEBIAN_FRONTEND: noninteractive # to prevent apt-get from waiting user input + DEBIAN_PACKAGES_DIR: ${{ github.workspace }}/packages + steps: + + - name: Download OpenVINO debian packages + uses: actions/download-artifact@v3 + with: + name: openvino_debian_packages + path: ${{ env.DEBIAN_PACKAGES_DIR }} + + # Needed as ${{ github.workspace }} is not working correctly when using Docker + - name: Setup Variables + run: echo "DEBIAN_PACKAGES_DIR=$GITHUB_WORKSPACE/packages" >> "$GITHUB_ENV" + + - name: Install debian packages & check conflicts + run: | + apt-get update -y + + if [[ "${{ runner.arch }}" == "X64" ]]; then + # Install debian packages from previous release + apt-get install --no-install-recommends -y gnupg wget ca-certificates + wget https://apt.repos.intel.com/intel-gpg-keys/GPG-PUB-KEY-INTEL-SW-PRODUCTS.PUB + apt-key add GPG-PUB-KEY-INTEL-SW-PRODUCTS.PUB + echo "deb https://apt.repos.intel.com/openvino/2023 ubuntu20 main" | tee /etc/apt/sources.list.d/intel-openvino-2023.list + apt-get update -y + apt-get install -y openvino + fi + + # install our local one and make sure the conflicts are resolved + apt-get install --no-install-recommends -y dpkg-dev + dpkg-scanpackages . /dev/null | gzip -9c > Packages.gz + echo "deb [trusted=yes] file:${DEBIAN_PACKAGES_DIR} ./" | tee /etc/apt/sources.list.d/openvino-local.list + apt-get update -y + apt-get install openvino -y + working-directory: ${{ env.DEBIAN_PACKAGES_DIR }} + + - name: Test debian packages + run: | + /usr/share/openvino/samples/cpp/build_samples.sh + /usr/share/openvino/samples/c/build_samples.sh + + [[ "${{ runner.arch }}" == "X64" ]] && path_by_arch="intel64" || path_by_arch="aarch64" + ~/openvino_cpp_samples_build/$path_by_arch/Release/hello_query_device + + python3 /usr/share/openvino/samples/python/hello_query_device/hello_query_device.py + python3 -c 'from openvino import Core; Core().get_property("CPU", "AVAILABLE_DEVICES")' + + if [[ "${{ runner.arch }}" == "X64" ]]; then + python3 -c 'from openvino import Core; Core().get_property("GPU", "AVAILABLE_DEVICES")' + fi + + python3 -c 'from openvino import Core; Core().get_property("AUTO", "SUPPORTED_METRICS")' + python3 -c 'from openvino import Core; Core().get_property("MULTI", "SUPPORTED_METRICS")' + python3 -c 'from openvino import Core; Core().get_property("HETERO", "SUPPORTED_METRICS")' + python3 -c 'from openvino import Core; Core().get_property("BATCH", "SUPPORTED_METRICS")' + python3 -c 'from openvino.frontend import FrontEndManager; assert len(FrontEndManager().get_available_front_ends()) == 6' + benchmark_app --help + ovc --help diff --git a/.github/workflows/job_onnx_models_tests.yml b/.github/workflows/job_onnx_models_tests.yml new file mode 100644 index 00000000000000..1fbc0c11fe960c --- /dev/null +++ b/.github/workflows/job_onnx_models_tests.yml @@ -0,0 +1,106 @@ +name: ONNX Models tests + +on: + workflow_call: + inputs: + runner: + description: 'Machine on which the tests would run' + type: string + required: true + container: + description: 'JSON to be converted to the value of the "container" configuration for the job' + type: string + required: false + default: '{"image": null}' + +jobs: + ONNX_Models_tests: + name: ONNX Models tests + timeout-minutes: 60 + runs-on: ${{ inputs.runner }} + container: ${{ fromJSON(inputs.container) }} + defaults: + run: + shell: bash + env: + DEBIAN_FRONTEND: noninteractive # to prevent apt-get from waiting user input + OPENVINO_REPO: ${{ github.workspace }}/openvino + INSTALL_DIR: ${{ github.workspace }}/install + INSTALL_TEST_DIR: ${{ github.workspace }}/install/tests + ONNX_MODELS_PATH: ${{ github.workspace }}/onnx_test_models + MODELS_SHARE_PATH: "/mount/onnxtestdata" + ONNX_MODEL_ZOO_SHA: "d58213534f2a4d1c4b19ba62b3bb5f544353256e" + steps: + - name: Download OpenVINO package + uses: actions/download-artifact@v3 + with: + name: openvino_package + path: ${{ env.INSTALL_DIR }} + + - name: Download OpenVINO tests package + uses: actions/download-artifact@v3 + with: + name: openvino_tests + path: ${{ env.INSTALL_TEST_DIR }} + + # Needed as ${{ github.workspace }} is not working correctly when using Docker + - name: Setup Variables + run: | + echo "OPENVINO_REPO=$GITHUB_WORKSPACE/openvino" >> "$GITHUB_ENV" + echo "INSTALL_DIR=$GITHUB_WORKSPACE/install" >> "$GITHUB_ENV" + echo "INSTALL_TEST_DIR=$GITHUB_WORKSPACE/install/tests" >> "$GITHUB_ENV" + + - name: Extract OpenVINO packages + run: | + pushd ${INSTALL_DIR} + tar -xzf openvino_package.tar.gz -C ${INSTALL_DIR} + popd + + pushd ${INSTALL_TEST_DIR} + tar -xzf openvino_tests.tar.gz -C ${INSTALL_DIR} + popd + + - name: Fetch setup_python action and model_zoo_preprocess script + uses: actions/checkout@v4 + with: + sparse-checkout: | + .github/actions/setup_python/action.yml + src/frontends/onnx/tests/tests_python/model_zoo_preprocess.sh + sparse-checkout-cone-mode: false + path: 'openvino' + + - name: Install dependencies + run: | + # install git (required to build pip deps from the sources) + apt-get update && apt-get install --assume-yes --no-install-recommends git ca-certificates git-lfs + + - name: Setup Python 3.11 + uses: ./openvino/.github/actions/setup_python + with: + version: '3.11' + should-setup-pip-paths: 'false' + self-hosted-runner: ${{ contains(inputs.runner, 'aks') }} + + - name: Update Models + run: bash ${OPENVINO_REPO}/src/frontends/onnx/tests/tests_python/model_zoo_preprocess.sh -d ${MODELS_SHARE_PATH} -o -s "${{ env.ONNX_MODEL_ZOO_SHA }}" + + - name: Install OpenVINO Python wheels + run: | + # Install the core OV wheel + python3 -m pip install ${INSTALL_DIR}/tools/openvino-*.whl + + extras_to_install="onnx" + + # Find and install OV dev wheel + pushd ${INSTALL_DIR}/tools + ov_dev_wheel_name=$(find . -name 'openvino_dev*.whl') + python3 -m pip install $ov_dev_wheel_name[$extras_to_install] + popd + + - name: Install Python tests dependencies + run: | + # To enable pytest parallel features + python3 -m pip install pytest-xdist[psutil] pytest-forked + + - name: ONNX Models Tests + run: python3 -m pytest --backend="CPU" --model_zoo_dir="${MODELS_SHARE_PATH}" ${INSTALL_TEST_DIR}/onnx/tests/tests_python/test_zoo_models.py -v -n 12 --forked -k 'not _cuda' --model_zoo_xfail diff --git a/.github/workflows/job_onnx_runtime.yml b/.github/workflows/job_onnx_runtime.yml new file mode 100644 index 00000000000000..5a6f5cb27eceae --- /dev/null +++ b/.github/workflows/job_onnx_runtime.yml @@ -0,0 +1,157 @@ +name: ONNX Runtime Integration + +on: + workflow_call: + inputs: + runner: + description: 'Machine on which the tests would run' + type: string + required: true + container: + description: 'JSON to be converted to the value of the "container" configuration for the job' + type: string + required: false + default: '{"image": null}' + sccache-azure-key-prefix: + description: 'Key prefix for the cache folder on the Azure' + type: string + required: true + +jobs: + ONNX_Runtime: + name: ONNX Runtime Integration + timeout-minutes: 60 + runs-on: ${{ inputs.runner }} + container: ${{ fromJSON(inputs.container) }} + defaults: + run: + shell: bash + env: + DEBIAN_FRONTEND: noninteractive # to prevent apt-get from waiting user input + OPENVINO_REPO: ${{ github.workspace }}/openvino + INSTALL_DIR: ${{ github.workspace }}/install + CMAKE_GENERATOR: 'Ninja Multi-Config' + CMAKE_CXX_COMPILER_LAUNCHER: sccache + CMAKE_C_COMPILER_LAUNCHER: sccache + SCCACHE_AZURE_KEY_PREFIX: ${{ inputs.sccache-azure-key-prefix }} + ONNX_RUNTIME_REPO: ${{ github.workspace }}/onnxruntime + ONNX_RUNTIME_UTILS: ${{ github.workspace }}/install/onnxruntime + ONNX_RUNTIME_BUILD_DIR: ${{ github.workspace }}/onnxruntime/build + steps: + - name: Download OpenVINO package + uses: actions/download-artifact@v3 + with: + name: openvino_package + path: ${{ env.INSTALL_DIR }} + + # Needed as ${{ github.workspace }} is not working correctly when using Docker + - name: Setup Variables + run: | + echo "OPENVINO_REPO=$GITHUB_WORKSPACE/openvino" >> "$GITHUB_ENV" + echo "INSTALL_DIR=$GITHUB_WORKSPACE/install" >> "$GITHUB_ENV" + echo "ONNX_RUNTIME_REPO=$GITHUB_WORKSPACE/onnxruntime" >> "$GITHUB_ENV" + echo "ONNX_RUNTIME_UTILS=$GITHUB_WORKSPACE/install/onnxruntime" >> "$GITHUB_ENV" + echo "ONNX_RUNTIME_BUILD_DIR=$GITHUB_WORKSPACE/onnxruntime/build" >> "$GITHUB_ENV" + + - name: Fetch install_build_dependencies.sh and setup_python action + uses: actions/checkout@v4 + with: + sparse-checkout: | + install_build_dependencies.sh + .github/actions/setup_python/action.yml + sparse-checkout-cone-mode: false + path: 'openvino' + + - name: Install git + run: | + apt-get update + apt-get install --assume-yes --no-install-recommends git ca-certificates + + - name: Setup Python ${{ env.PYTHON_VERSION }} + uses: ./openvino/.github/actions/setup_python + with: + version: '3.11' + should-setup-pip-paths: 'false' + + - name: Extract OpenVINO package + run: | + pushd ${INSTALL_DIR} + tar -xzf openvino_package.tar.gz -C ${INSTALL_DIR} + popd + + - name: Install OpenVINO dependencies + run: ${INSTALL_DIR}/install_dependencies/install_openvino_dependencies.sh -c=core -c=dev -y + + - name: Clone ONNX Runtime + run: | + branch=`tr -s '\n ' < ${ONNX_RUNTIME_UTILS}/version` + git clone --branch $branch --single-branch --recursive https://github.com/microsoft/onnxruntime.git ${ONNX_RUNTIME_REPO} + + # + # Tests + # + + - name: Install Build Dependencies + run: bash ${OPENVINO_REPO}/install_build_dependencies.sh + + - name: Install sccache + uses: mozilla-actions/sccache-action@v0.0.3 + with: + version: "v0.5.4" + + - name: Build Lin ONNX Runtime + run: | + source ${INSTALL_DIR}/setupvars.sh + + ${ONNX_RUNTIME_REPO}/build.sh \ + --config RelWithDebInfo \ + --use_openvino CPU_FP32 \ + --build_shared_lib \ + --parallel \ + --skip_tests \ + --compile_no_warning_as_error \ + --build_dir ${ONNX_RUNTIME_BUILD_DIR} + env: + CXXFLAGS: "-Wno-error=deprecated-declarations" + + - name: Show sccache stats + run: ${SCCACHE_PATH} --show-stats + + - name: Run onnxruntime_test_all + if: ${{ runner.arch != 'ARM64' }} # Ticket: 126277 + run: | + source ${INSTALL_DIR}/setupvars.sh + skip_tests=$(tr -s '\n ' ':' < ${ONNX_RUNTIME_UTILS}/skip_tests) + + ./onnxruntime_test_all --gtest_filter=-$skip_tests + working-directory: ${{ env.ONNX_RUNTIME_BUILD_DIR }}/RelWithDebInfo/RelWithDebInfo + + - name: Run onnxruntime_shared_lib_test + run: | + source ${INSTALL_DIR}/setupvars.sh + ./onnxruntime_shared_lib_test --gtest_filter=-CApiTest.test_custom_op_openvino_wrapper_library + working-directory: ${{ env.ONNX_RUNTIME_BUILD_DIR }}/RelWithDebInfo/RelWithDebInfo + + - name: Run onnxruntime_global_thread_pools_test + run: | + source ${INSTALL_DIR}/setupvars.sh + ./onnxruntime_global_thread_pools_test + working-directory: ${{ env.ONNX_RUNTIME_BUILD_DIR }}/RelWithDebInfo/RelWithDebInfo + + - name: Run onnxruntime_api_tests_without_env + run: | + source ${INSTALL_DIR}/setupvars.sh + ./onnxruntime_api_tests_without_env + working-directory: ${{ env.ONNX_RUNTIME_BUILD_DIR }}/RelWithDebInfo/RelWithDebInfo + + - name: Run pytorch-converted tests + run: | + source ${INSTALL_DIR}/setupvars.sh + ./onnx_test_runner "${ONNX_RUNTIME_REPO}/cmake/external/onnx/onnx/backend/test/data/pytorch-converted" + working-directory: ${{ env.ONNX_RUNTIME_BUILD_DIR }}/RelWithDebInfo/RelWithDebInfo + + - name: Run pytorch-operator tests + run: | + source ${INSTALL_DIR}/setupvars.sh + ./onnx_test_runner "${ONNX_RUNTIME_REPO}/cmake/external/onnx/onnx/backend/test/data/pytorch-operator" + working-directory: ${{ env.ONNX_RUNTIME_BUILD_DIR }}/RelWithDebInfo/RelWithDebInfo diff --git a/.github/workflows/job_python_unit_tests.yml b/.github/workflows/job_python_unit_tests.yml new file mode 100644 index 00000000000000..38b35d17623f38 --- /dev/null +++ b/.github/workflows/job_python_unit_tests.yml @@ -0,0 +1,323 @@ +name: Python unit tests + +on: + workflow_call: + inputs: + runner: + description: 'Machine on which the tests would run' + type: string + required: true + container: + description: 'JSON to be converted to the value of the "container" configuration for the job' + type: string + required: false + default: '{"image": null}' + affected-components: + description: 'Components that are affected by changes in the commit defined by the Smart CI Action' + type: string + required: true + +env: + PIP_CACHE_PATH: /mount/caches/pip/linux + PYTHON_VERSION: '3.11' + +jobs: + Python_Unit_Tests: + name: Python unit tests + timeout-minutes: 60 + runs-on: ${{ inputs.runner }} + container: ${{ fromJSON(inputs.container) }} + defaults: + run: + shell: bash + env: + DEBIAN_FRONTEND: noninteractive # to prevent apt-get from waiting user input + OPENVINO_REPO: ${{ github.workspace }}/openvino + INSTALL_DIR: ${{ github.workspace }}/install + INSTALL_TEST_DIR: ${{ github.workspace }}/install/tests + LAYER_TESTS_INSTALL_DIR: ${{ github.workspace }}/install/tests/layer_tests + steps: + - name: Download OpenVINO package + uses: actions/download-artifact@v3 + with: + name: openvino_package + path: ${{ env.INSTALL_DIR }} + + - name: Download OpenVINO tests package + uses: actions/download-artifact@v3 + with: + name: openvino_tests + path: ${{ env.INSTALL_TEST_DIR }} + + # Needed as ${{ github.workspace }} is not working correctly when using Docker + - name: Setup Variables + run: | + echo "OPENVINO_REPO=$GITHUB_WORKSPACE/openvino" >> "$GITHUB_ENV" + echo "INSTALL_DIR=$GITHUB_WORKSPACE/install" >> "$GITHUB_ENV" + echo "INSTALL_TEST_DIR=$GITHUB_WORKSPACE/install/tests" >> "$GITHUB_ENV" + echo "LAYER_TESTS_INSTALL_DIR=$GITHUB_WORKSPACE/install/tests/layer_tests" >> "$GITHUB_ENV" + + - name: Extract OpenVINO packages + run: | + pushd $INSTALL_DIR + tar -xzf openvino_package.tar.gz -C $INSTALL_DIR + popd + pushd $INSTALL_TEST_DIR + tar -xzf openvino_tests.tar.gz -C $INSTALL_DIR + popd + + - name: Install OpenVINO dependencies (Linux) + if: runner.os == 'Linux' + run: $INSTALL_DIR/install_dependencies/install_openvino_dependencies.sh -c=core -c=dev -y + + - name: Fetch setup_python action + uses: actions/checkout@v4 + with: + sparse-checkout: | + .github/actions/setup_python/action.yml + sparse-checkout-cone-mode: false + path: 'openvino' + + - name: Setup Python ${{ env.PYTHON_VERSION }} + uses: ./openvino/.github/actions/setup_python + with: + version: ${{ env.PYTHON_VERSION }} + pip-cache-path: ${{ runner.os == 'Linux' && env.PIP_CACHE_PATH || '' }} + should-setup-pip-paths: ${{ runner.os == 'Linux' }} + self-hosted-runner: ${{ runner.os == 'Linux' }} + + # + # Tests + # + + - name: Install OpenVINO Python wheels + run: | + # Install the core OV wheel + python3 -m pip install ${INSTALL_DIR}/tools/openvino-*.whl + + extras_to_install="caffe,kaldi,onnx,tensorflow2,pytorch" + + if [[ "${{ runner.arch }}" != "ARM64" ]]; then + extras_to_install="mxnet,$extras_to_install" + fi + + # Find and install OV dev wheel + pushd ${INSTALL_DIR}/tools + ov_dev_wheel_name=$(find . -name 'openvino_dev*.whl') + python3 -m pip install $ov_dev_wheel_name[$extras_to_install] + popd + + - name: Install Python API tests dependencies + run: | + # To enable pytest parallel features + python3 -m pip install pytest-xdist[psutil] + # For torchvision to OpenVINO preprocessing converter + python3 -m pip install -r ${INSTALL_TEST_DIR}/python/preprocess/torchvision/requirements.txt + + # TODO: replace with Python API tests requirements + python3 -m pip install -r ${INSTALL_TEST_DIR}/mo/requirements_dev.txt + + # + # Tests + # + + - name: Python API 1.0 Tests + # if: fromJSON(inputs.affected-components).Python_API.test # Ticket: 127101 + run: | + python3 -m pytest -s ${INSTALL_TEST_DIR}/pyngraph \ + --junitxml=${INSTALL_TEST_DIR}/TEST-Pyngraph.xml \ + --ignore=${INSTALL_TEST_DIR}/pyngraph/tests_compatibility/test_onnx/test_zoo_models.py \ + --ignore=${INSTALL_TEST_DIR}/pyngraph/tests_compatibility/test_onnx/test_backend.py + + - name: Python API 2.0 Tests + # if: ${{ fromJSON(inputs.affected-components).Python_API.test && runner.arch != 'ARM64' }} # Ticket: 126380, 127101 + run: | + # for 'template' extension + export LD_LIBRARY_PATH=${INSTALL_TEST_DIR}:$LD_LIBRARY_PATH + python3 -m pytest -sv ${INSTALL_TEST_DIR}/pyopenvino \ + --junitxml=${INSTALL_TEST_DIR}/TEST-Pyngraph.xml \ + --ignore=${INSTALL_TEST_DIR}/pyopenvino/tests/test_utils/test_utils.py + + - name: Model Optimizer unit tests + if: fromJSON(inputs.affected-components).MO.test + run: | + skip_filter='' + if [[ "${{ runner.os }}" != "Linux" ]] && [[ "${{ runner.arch }} != "ARM64" ]] || [[ "${{ runner.os }} != "macOS" ]]; then + # required for MxNet + apt-get install -y libgomp1 libquadmath0 + else + # Skips under Ticket: 122666 + skip_filter='--ignore-glob=**/mo/unit_tests/mo/front/mxnet/**' + fi + + python3 -m pytest -s ${INSTALL_TEST_DIR}/mo/unit_tests \ + --junitxml=${INSTALL_TEST_DIR}/TEST-ModelOptimizer.xml \ + "$skip_filter" + + - name: Python ONNX operators tests + if: fromJSON(inputs.affected-components).Python_API.test || + fromJSON(inputs.affected-components).ONNX_FE.test && runner.os != 'macOS' # Ticket: 123325 + run: | + # Skip test_onnx/test_zoo_models and test_onnx/test_backend due to long execution time - ONNX Model Zoo tests are run separately + python3 -m pytest -sv ${INSTALL_TEST_DIR}/onnx -k 'not cuda' \ + --junitxml=${INSTALL_TEST_DIR}/TEST-onnx_frontend.xml \ + --ignore=${INSTALL_TEST_DIR}/onnx/test_python/test_zoo_models.py + + - name: OVC unit tests + if: fromJSON(inputs.affected-components).MO.test + run: python3 -m pytest -s ${INSTALL_TEST_DIR}/ovc/unit_tests --junitxml=${INSTALL_TEST_DIR}/TEST-OpenVinoConversion.xml + + - name: Install Python Layer tests dependencies + run: | + # layer test requirements + python3 -m pip install -r ${LAYER_TESTS_INSTALL_DIR}/requirements.txt + + - name: MO Python API Tests + if: fromJSON(inputs.affected-components).MO.test + run: | + # Import 'test_utils' installed in '/tests/python/openvino' + export LD_LIBRARY_PATH=${PIP_INSTALL_PATH}/openvino/libs:$LD_LIBRARY_PATH + export PYTHONPATH=${INSTALL_TEST_DIR}/python + + if [[ "${{ runner.os }}" == "Linux" ]] && [[ "${{ runner.arch }}" == "ARM64" ]]; then + # Find gomp lib + GOMP_LIB=$(find "${PIP_INSTALL_PATH}/torch/lib/../../torch.libs/" -name '*libgomp-*so*') + export LD_PRELOAD=${GOMP_LIB} + fi + + python3 -m pytest ${LAYER_TESTS_INSTALL_DIR}/mo_python_api_tests --junitxml=${INSTALL_TEST_DIR}/TEST-test_mo_convert.xml + env: + TEST_DEVICE: CPU + TEST_PRECISION: FP16 + + - name: OVC Python API Tests + if: fromJSON(inputs.affected-components).MO.test + run: | + # Import 'test_utils' installed in '/tests/python/openvino' + export PYTHONPATH=${INSTALL_TEST_DIR}/python + export LD_LIBRARY_PATH=${PIP_INSTALL_PATH}/openvino/libs:$LD_LIBRARY_PATH + + if [[ "${{ runner.os }}" == "Linux" ]] && [[ "${{ runner.arch }}" == "ARM64" ]]; then + # Find gomp lib + GOMP_LIB=$(find "${PIP_INSTALL_PATH}/torch/lib/../../torch.libs/" -name '*libgomp-*so*') + export LD_PRELOAD=${GOMP_LIB} + fi + + python3 -m pytest ${LAYER_TESTS_INSTALL_DIR}/ovc_python_api_tests --junitxml=${INSTALL_TEST_DIR}/TEST-test_ovc_convert.xml + env: + TEST_DEVICE: CPU + TEST_PRECISION: FP16 + + - name: Python Frontend tests + if: fromJSON(inputs.affected-components).PyTorch_FE.test || + fromJSON(inputs.affected-components).PDPD_FE.test + run: | + # to allow 'libtest_builtin_extensions.so' to find 'libopenvino_onnx_frontend.so' + export LD_LIBRARY_PATH=${PIP_INSTALL_PATH}/openvino/libs:$LD_LIBRARY_PATH + python3 -m pytest ${LAYER_TESTS_INSTALL_DIR}/py_frontend_tests --junitxml=${INSTALL_TEST_DIR}/TEST-test_py_fontend.xml + + - name: PyTorch Layer Tests + if: ${{ fromJSON(inputs.affected-components).PyTorch_FE.test && runner.arch != 'ARM64' }} # Ticket: 126287 + run: python3 -m pytest ${LAYER_TESTS_INSTALL_DIR}/pytorch_tests -n logical -m precommit --junitxml=${INSTALL_TEST_DIR}/TEST-pytorch.xml + env: + TEST_DEVICE: CPU + TEST_PRECISION: FP32 + + - name: PyTorch torch.compile TORCHFX Layer Tests + if: ${{ fromJSON(inputs.affected-components).PyTorch_FE.test && runner.os != 'macOS' }} + run: | + python3 -m pytest ${LAYER_TESTS_INSTALL_DIR}/pytorch_tests -m precommit_fx_backend --junitxml=${INSTALL_TEST_DIR}/TEST-pytorch.xml + env: + TEST_DEVICE: CPU + TEST_PRECISION: FP32 + PYTORCH_TRACING_MODE: TORCHFX + + - name: PyTorch torch.compile TORCHSCRIPT Layer Tests + if: ${{ fromJSON(inputs.affected-components).PyTorch_FE.test && runner.os != 'macOS' }} + run: | + python3 -m pytest ${LAYER_TESTS_INSTALL_DIR}/pytorch_tests -m precommit_ts_backend --junitxml=${INSTALL_TEST_DIR}/TEST-pytorch.xml + env: + TEST_DEVICE: CPU + TEST_PRECISION: FP32 + PYTORCH_TRACING_MODE: TORCHSCRIPT + + - name: ONNX Layer Tests + if: fromJSON(inputs.affected-components).ONNX_FE.test + run: | + # requires 'unit_tests' from 'tools/mo' + export PYTHONPATH=${INSTALL_TEST_DIR}/mo:$PYTHONPATH + python3 -m pytest ${LAYER_TESTS_INSTALL_DIR}/onnx_tests -m "not launch_only_if_manually_specified and precommit" --junitxml=${INSTALL_TEST_DIR}/TEST-onnx.xml + env: + TEST_DEVICE: CPU + TEST_PRECISION: FP16 + + - name: TensorFlow 1 Layer Tests - TF FE + if: fromJSON(inputs.affected-components).TF_FE.test + run: | + # requires 'unit_tests' from 'mo' + export PYTHONPATH=${INSTALL_TEST_DIR}/mo + python3 -m pytest ${LAYER_TESTS_INSTALL_DIR}/tensorflow_tests/ --use_new_frontend -m precommit_tf_fe --junitxml=${INSTALL_TEST_DIR}/TEST-tf_fe.xml + env: + TEST_DEVICE: CPU + TEST_PRECISION: FP16 + + - name: TensorFlow 2 Layer Tests - TF FE + if: fromJSON(inputs.affected-components).TF_FE.test && runner.os != 'macOS' # Ticket: 123322 + run: | + # requires 'unit_tests' from 'mo' + export PYTHONPATH=${INSTALL_TEST_DIR}/mo + python3 -m pytest ${LAYER_TESTS_INSTALL_DIR}/tensorflow2_keras_tests/ --use_new_frontend -m precommit_tf_fe --junitxml=${INSTALL_TEST_DIR}/TEST-tf2_fe.xml + env: + TEST_DEVICE: CPU + TEST_PRECISION: FP16 + + - name: JAX Layer Tests - TF FE + if: ${{ fromJSON(inputs.affected-components).TF_FE.test && runner.arch != 'ARM64' }} + run: python3 -m pytest ${LAYER_TESTS_INSTALL_DIR}/jax_tests/ -m precommit --junitxml=${INSTALL_TEST_DIR}/TEST-jax.xml + env: + TEST_DEVICE: CPU + + - name: TensorFlow 1 Layer Tests - Legacy FE + if: fromJSON(inputs.affected-components).TF_FE.test + run: python3 -m pytest ${LAYER_TESTS_INSTALL_DIR}/tensorflow_tests/test_tf_Roll.py --ir_version=10 --junitxml=${INSTALL_TEST_DIR}/TEST-tf_Roll.xml + + - name: TensorFlow 2 Layer Tests - Legacy FE + if: fromJSON(inputs.affected-components).TF_FE.test + run: python3 -m pytest ${LAYER_TESTS_INSTALL_DIR}/tensorflow2_keras_tests/test_tf2_keras_activation.py --ir_version=11 -k "sigmoid" --junitxml=${INSTALL_TEST_DIR}/TEST-tf2_Activation.xml + env: + TEST_DEVICE: CPU + TEST_PRECISION: FP16 + + - name: TensorFlow Lite Layer Tests - TFL FE + if: fromJSON(inputs.affected-components).TFL_FE.test + run: python3 -m pytest ${LAYER_TESTS_INSTALL_DIR}/tensorflow_lite_tests/ --junitxml=${INSTALL_TEST_DIR}/TEST-tfl_fe.xml + env: + TEST_DEVICE: CPU + TEST_PRECISION: FP16 + + - name: Clone API snippets + if: runner.os != 'macOS' + uses: actions/checkout@v4 + with: + sparse-checkout: openvino/docs/snippets + path: ${{ env.OPENVINO_REPO }} + submodules: 'false' + + - name: Docs Python snippets + if: runner.os != 'macOS' + run: | + # to find 'snippets' module in docs + export PYTHONPATH=${OPENVINO_REPO}/docs + # for 'template' extension + export LD_LIBRARY_PATH=${INSTALL_TEST_DIR}:$LD_LIBRARY_PATH + python3 ${OPENVINO_REPO}/docs/snippets/main.py + + - name: Upload Test Results + uses: actions/upload-artifact@v3 + if: ${{ !cancelled() }} + with: + name: test-results-python + path: | + ${{ env.INSTALL_TEST_DIR }}/TEST*.html + ${{ env.INSTALL_TEST_DIR }}/TEST*.xml + if-no-files-found: 'warn' diff --git a/.github/workflows/job_pytorch_models_tests.yml b/.github/workflows/job_pytorch_models_tests.yml new file mode 100644 index 00000000000000..8904b2212e9e33 --- /dev/null +++ b/.github/workflows/job_pytorch_models_tests.yml @@ -0,0 +1,132 @@ +name: PyTorch Models tests + +on: + workflow_call: + inputs: + runner: + description: 'Machine on which the tests would run' + type: string + required: true + container: + description: 'JSON to be converted to the value of the "container" configuration for the job' + type: string + required: false + default: '{"image": null}' + event: + description: 'Event that triggered the workflow. E.g., "schedule" for nightly runs' + type: string + required: true + +jobs: + PyTorch_Models_Tests: + name: PyTorch Models tests + timeout-minutes: ${{ inputs.event == 'schedule' && 400 || 30 }} + runs-on: ${{ inputs.runner }} + container: ${{ fromJSON(inputs.container) }} + defaults: + run: + shell: bash + env: + DEBIAN_FRONTEND: noninteractive # to prevent apt-get from waiting user input + OPENVINO_REPO: ${{ github.workspace }}/openvino + INSTALL_DIR: ${{ github.workspace }}/install + INSTALL_TEST_DIR: ${{ github.workspace }}/install/tests + MODEL_HUB_TESTS_INSTALL_DIR: ${{ github.workspace }}/install/tests/model_hub_tests + steps: + + - name: Check sudo + if: ${{ runner.os == 'Linux' }} + run: if [ "$(id -u)" -eq 0 ]; then apt update && apt --assume-yes install sudo; fi + + - name: Download OpenVINO package + uses: actions/download-artifact@v3 + with: + name: openvino_package + path: ${{ env.INSTALL_DIR }} + + - name: Download OpenVINO tests package + uses: actions/download-artifact@v3 + with: + name: openvino_tests + path: ${{ env.INSTALL_TEST_DIR }} + + # Needed as ${{ github.workspace }} is not working correctly when using Docker + - name: Setup Variables + run: | + echo "OPENVINO_REPO=$GITHUB_WORKSPACE/openvino" >> "$GITHUB_ENV" + echo "INSTALL_DIR=$GITHUB_WORKSPACE/install" >> "$GITHUB_ENV" + echo "INSTALL_TEST_DIR=$GITHUB_WORKSPACE/install/tests" >> "$GITHUB_ENV" + echo "MODEL_HUB_TESTS_INSTALL_DIR=$GITHUB_WORKSPACE/install/tests/model_hub_tests" >> "$GITHUB_ENV" + + - name: Extract OpenVINO packages + run: | + pushd ${INSTALL_DIR} + tar -xzf openvino_package.tar.gz -C ${INSTALL_DIR} + popd + + pushd ${INSTALL_TEST_DIR} + tar -xzf openvino_tests.tar.gz -C ${INSTALL_DIR} + popd + + - name: Fetch setup_python action + uses: actions/checkout@v4 + with: + sparse-checkout: | + .github/actions/setup_python/action.yml + sparse-checkout-cone-mode: false + path: 'openvino' + + - name: Install dependencies + if: ${{ runner.os == 'Linux' }} + run: | + # install git (required to build pip deps from the sources) + # install 'g++' to build 'detectron2' and 'natten' wheels + sudo apt-get install --assume-yes --no-install-recommends g++ git ca-certificates + + - name: Setup Python 3.11 + uses: ./openvino/.github/actions/setup_python + with: + version: '3.11' + should-setup-pip-paths: 'false' + self-hosted-runner: ${{ contains(inputs.runner, 'aks') }} + + - name: Install OpenVINO Python wheels + run: python3 -m pip install ${INSTALL_DIR}/tools/openvino-* + + - name: Install PyTorch tests requirements + run: | + python3 -m pip install -r ${MODEL_HUB_TESTS_INSTALL_DIR}/torch_tests/requirements.txt + python3 -m pip install -r ${MODEL_HUB_TESTS_INSTALL_DIR}/torch_tests/requirements_secondary.txt + echo "Available storage:" + df -h + env: + CPLUS_INCLUDE_PATH: ${{ env.Python_ROOT_DIR }}/include/python${{ env.PYTHON_VERSION }} + + - name: PyTorch Models Tests + run: | + export PYTHONPATH=${MODEL_HUB_TESTS_INSTALL_DIR}:$PYTHONPATH + python3 -m pytest ${MODEL_HUB_TESTS_INSTALL_DIR}/torch_tests -m ${TYPE} --html=${INSTALL_TEST_DIR}/TEST-torch_model_tests.html --self-contained-html -v + env: + TYPE: ${{ inputs.event == 'schedule' && 'nightly' || 'precommit'}} + TEST_DEVICE: CPU + USE_SYSTEM_CACHE: False + OP_REPORT_FILE: ${{ env.INSTALL_TEST_DIR }}/TEST-torch_unsupported_ops.log + + - name: Reformat unsupported ops file + if: '!cancelled()' + run: | + python3 ${MODEL_HUB_TESTS_INSTALL_DIR}/torch_tests/scripts/process_op_report.py ${INSTALL_TEST_DIR}/TEST-torch_unsupported_ops.log + + - name: Available storage after tests + run: | + echo "Available storage:" + df -h + + - name: Upload Test Results + uses: actions/upload-artifact@v3 + if: ${{ !cancelled() }} + with: + name: test-results-torch-models + path: | + ${{ env.INSTALL_TEST_DIR }}/TEST-torch* + if-no-files-found: 'error' diff --git a/.github/workflows/job_samples_tests.yml b/.github/workflows/job_samples_tests.yml new file mode 100644 index 00000000000000..8c1401a819d845 --- /dev/null +++ b/.github/workflows/job_samples_tests.yml @@ -0,0 +1,132 @@ +name: Samples + +on: + workflow_call: + inputs: + runner: + description: 'Machine on which the tests would run' + type: string + required: true + image: + description: 'Docker image in which the tests would run' + type: string + required: false + default: null + affected-components: + description: 'Components that are affected by changes in the commit defined by the Smart CI Action' + type: string + required: true + +jobs: + Samples: + runs-on: ${{ inputs.runner }} + container: + image: ${{ inputs.image }} + defaults: + run: + shell: bash + env: + DEBIAN_FRONTEND: noninteractive # to prevent apt-get from waiting user input + INSTALL_DIR: ${{ github.workspace }}/install + INSTALL_TEST_DIR: ${{ github.workspace }}/install/tests + BUILD_DIR: ${{ github.workspace }}/build + steps: + - name: Download OpenVINO package + uses: actions/download-artifact@v3 + with: + name: openvino_package + path: ${{ env.INSTALL_DIR }} + + - name: Download OpenVINO tests package + uses: actions/download-artifact@v3 + with: + name: openvino_tests + path: ${{ env.INSTALL_TEST_DIR }} + + # Needed as ${{ github.workspace }} is not working correctly when using Docker + - name: Setup Variables + run: | + echo "INSTALL_DIR=$GITHUB_WORKSPACE/install" >> "$GITHUB_ENV" + echo "INSTALL_TEST_DIR=$GITHUB_WORKSPACE/install/tests" >> "$GITHUB_ENV" + echo "BUILD_DIR=$GITHUB_WORKSPACE/build" >> "$GITHUB_ENV" + + - name: Extract OpenVINO packages + run: | + pushd $INSTALL_DIR + tar -xzf openvino_package.tar.gz -C $INSTALL_DIR + popd + pushd $INSTALL_TEST_DIR + tar -xzf openvino_tests.tar.gz -C $INSTALL_DIR + popd + + - name: Install OpenVINO dependencies (Linux) + if: runner.os == 'Linux' + run: $INSTALL_DIR/install_dependencies/install_openvino_dependencies.sh -c=core -c=dev -y + + - name: Install OpenVINO dependencies (mac) + if: runner.os == 'macOS' + run: brew install coreutils + + - name: Fetch setup_python action + uses: actions/checkout@v4 + with: + sparse-checkout: | + .github/actions/setup_python/action.yml + sparse-checkout-cone-mode: false + path: 'openvino' + + - name: Setup Python 3.11 + uses: ./openvino/.github/actions/setup_python + with: + version: '3.11' + should-setup-pip-paths: 'false' + self-hosted-runner: ${{ runner.os == 'Linux' }} + + - name: Build cpp samples - GCC + run: $INSTALL_DIR/samples/cpp/build_samples.sh -i $INSTALL_DIR -b $BUILD_DIR/cpp_samples + env: + CMAKE_COMPILE_WARNING_AS_ERROR: 'ON' + + - name: Build cpp samples - Clang + if: runner.os == 'Linux' + run: | + apt-get install -y clang + $INSTALL_DIR/samples/cpp/build_samples.sh -i $INSTALL_DIR -b $BUILD_DIR/cpp_samples_clang + env: + CMAKE_COMPILE_WARNING_AS_ERROR: 'ON' + CC: clang + CXX: clang++ + + - name: Build c samples + run: $INSTALL_DIR/samples/c/build_samples.sh -i $INSTALL_DIR -b $BUILD_DIR/c_samples + env: + CMAKE_COMPILE_WARNING_AS_ERROR: 'ON' + + # + # Tests + # + + - name: Samples tests + if: fromJSON(inputs.affected-components).samples.test + run: | + export WORKSPACE=$INSTALL_DIR + export IE_APP_PATH=$INSTALL_DIR/samples_bin + export IE_APP_PYTHON_PATH=$INSTALL_DIR/samples/python + export SHARE=$INSTALL_TEST_DIR/smoke_tests/samples_smoke_tests_data + + python3 -m pip install --ignore-installed PyYAML -r $INSTALL_TEST_DIR/smoke_tests/requirements.txt + export LD_LIBRARY_PATH=${IE_APP_PATH}:$LD_LIBRARY_PATH + + source ${INSTALL_DIR}/setupvars.sh + + python3 -m pytest -sv $INSTALL_TEST_DIR/smoke_tests \ + --env_conf $INSTALL_TEST_DIR/smoke_tests/env_config.yml \ + --junitxml=$INSTALL_TEST_DIR/TEST-SamplesSmokeTests.xml + + - name: Upload Test Results + uses: actions/upload-artifact@v3 + if: ${{ !cancelled() }} + with: + name: test-results-samples + path: ${{ env.INSTALL_TEST_DIR }}/TEST*.xml + if-no-files-found: 'warn' diff --git a/.github/workflows/job_tensorflow_hub_models_tests.yml b/.github/workflows/job_tensorflow_hub_models_tests.yml new file mode 100644 index 00000000000000..6dcecaa267e8c4 --- /dev/null +++ b/.github/workflows/job_tensorflow_hub_models_tests.yml @@ -0,0 +1,113 @@ +name: TensorFlow Hub Models tests + +on: + workflow_call: + inputs: + runner: + description: 'Machine on which the tests would run' + type: string + required: true + container: + description: 'JSON to be converted to the value of the "container" configuration for the job' + type: string + required: false + default: '{"image": null}' + event: + description: 'Event that triggered the workflow. E.g., "schedule" for nightly runs' + type: string + required: true + +jobs: + TensorFlow_Hub_Models_Tests: + name: TensorFlow Hub Models tests + timeout-minutes: ${{ inputs.event == 'schedule' && 400 || 25 }} + runs-on: ${{ inputs.runner }} + container: ${{ fromJSON(inputs.container) }} + defaults: + run: + shell: bash + env: + DEBIAN_FRONTEND: noninteractive # to prevent apt-get from waiting user input + OPENVINO_REPO: ${{ github.workspace }}/openvino + INSTALL_DIR: ${{ github.workspace }}/install + INSTALL_TEST_DIR: ${{ github.workspace }}/install/tests + MODEL_HUB_TESTS_INSTALL_DIR: ${{ github.workspace }}/install/tests/model_hub_tests + steps: + + - name: Check sudo + if: ${{ runner.os == 'Linux' }} + run: if [ "$(id -u)" -eq 0 ]; then apt update && apt --assume-yes install sudo; fi + + - name: Download OpenVINO package + uses: actions/download-artifact@v3 + with: + name: openvino_package + path: ${{ env.INSTALL_DIR }} + + - name: Download OpenVINO tests package + uses: actions/download-artifact@v3 + with: + name: openvino_tests + path: ${{ env.INSTALL_TEST_DIR }} + + # Needed as ${{ github.workspace }} is not working correctly when using Docker + - name: Setup Variables + run: | + echo "OPENVINO_REPO=$GITHUB_WORKSPACE/openvino" >> "$GITHUB_ENV" + echo "INSTALL_DIR=$GITHUB_WORKSPACE/install" >> "$GITHUB_ENV" + echo "INSTALL_TEST_DIR=$GITHUB_WORKSPACE/install/tests" >> "$GITHUB_ENV" + echo "MODEL_HUB_TESTS_INSTALL_DIR=$GITHUB_WORKSPACE/install/tests/model_hub_tests" >> "$GITHUB_ENV" + + - name: Extract OpenVINO packages + run: | + pushd ${INSTALL_DIR} + tar -xzf openvino_package.tar.gz -C ${INSTALL_DIR} + popd + + pushd ${INSTALL_TEST_DIR} + tar -xzf openvino_tests.tar.gz -C ${INSTALL_DIR} + popd + + - name: Fetch setup_python action + uses: actions/checkout@v4 + with: + sparse-checkout: | + .github/actions/setup_python/action.yml + sparse-checkout-cone-mode: false + path: 'openvino' + + - name: Install dependencies + if: ${{ runner.os == 'Linux' }} + run: | + # install git (required to build pip deps from the sources) + sudo apt-get install --assume-yes --no-install-recommends g++ git ca-certificates + + - name: Setup Python 3.11 + uses: ./openvino/.github/actions/setup_python + with: + version: '3.11' + should-setup-pip-paths: 'false' + self-hosted-runner: ${{ contains(inputs.runner, 'aks') }} + + - name: Install OpenVINO Python wheels + run: python3 -m pip install ${INSTALL_DIR}/tools/openvino-* + + - name: Install TF Hub tests requirements + run: python3 -m pip install -r ${MODEL_HUB_TESTS_INSTALL_DIR}/tf_hub_tests/requirements.txt + + - name: TensorFlow Hub Tests - TF FE + run: | + export PYTHONPATH=${MODEL_HUB_TESTS_INSTALL_DIR}:$PYTHONPATH + python3 -m pytest ${MODEL_HUB_TESTS_INSTALL_DIR}/tf_hub_tests/ -m ${TYPE} --html=${INSTALL_TEST_DIR}/TEST-tf_hub_tf_fe.html --self-contained-html -v + env: + TYPE: ${{ inputs.event == 'schedule' && 'nightly' || 'precommit'}} + TEST_DEVICE: CPU + + - name: Upload Test Results + uses: actions/upload-artifact@v3 + if: ${{ !cancelled() }} + with: + name: test-results-tensorflow-hub-models + path: | + ${{ env.INSTALL_TEST_DIR }}/TEST*.html + if-no-files-found: 'error' diff --git a/.github/workflows/linux.yml b/.github/workflows/linux.yml index 567c6e9a14c038..fcc5e1f4923510 100644 --- a/.github/workflows/linux.yml +++ b/.github/workflows/linux.yml @@ -53,7 +53,7 @@ jobs: container: image: openvinogithubactions.azurecr.io/dockerhub/ubuntu:20.04 volumes: - - /mount/caches:/mount/caches + - /mount:/mount options: -e SCCACHE_AZURE_BLOB_CONTAINER -e SCCACHE_AZURE_CONNECTION_STRING env: DEBIAN_FRONTEND: noninteractive # to prevent apt-get from waiting user input @@ -261,164 +261,22 @@ jobs: Debian_Packages: name: Debian Packages needs: Build - timeout-minutes: 5 - defaults: - run: - shell: bash - runs-on: ubuntu-20.04 - container: - image: ubuntu:20.04 - env: - DEBIAN_FRONTEND: noninteractive # to prevent apt-get from waiting user input - DEBIAN_PACKAGES_DIR: /__w/openvino/packages/ - - steps: - - name: Download OpenVINO debian packages - uses: actions/download-artifact@v3 - with: - name: openvino_debian_packages - path: ${{ env.DEBIAN_PACKAGES_DIR }} - - - name: Install debian packages & check conflicts - run: | - apt-get update -y - # Install debian packages from previous release - apt-get install --no-install-recommends -y gnupg wget ca-certificates - wget https://apt.repos.intel.com/intel-gpg-keys/GPG-PUB-KEY-INTEL-SW-PRODUCTS.PUB - apt-key add GPG-PUB-KEY-INTEL-SW-PRODUCTS.PUB - echo "deb https://apt.repos.intel.com/openvino/2023 ubuntu20 main" | tee /etc/apt/sources.list.d/intel-openvino-2023.list - apt-get update -y - apt-get install -y openvino - # install our local one and make sure the conflicts are resolved - apt-get install --no-install-recommends -y dpkg-dev - dpkg-scanpackages . /dev/null | gzip -9c > Packages.gz - echo "deb [trusted=yes] file:${DEBIAN_PACKAGES_DIR} ./" | tee /etc/apt/sources.list.d/openvino-local.list - apt-get update -y - apt-get install openvino -y - working-directory: ${{ env.DEBIAN_PACKAGES_DIR }} - - - name: Test debian packages - run: | - /usr/share/openvino/samples/cpp/build_samples.sh - /usr/share/openvino/samples/c/build_samples.sh - ~/openvino_cpp_samples_build/intel64/Release/hello_query_device - python3 /usr/share/openvino/samples/python/hello_query_device/hello_query_device.py - python3 -c 'from openvino import Core; Core().get_property("CPU", "AVAILABLE_DEVICES")' - python3 -c 'from openvino import Core; Core().get_property("GPU", "AVAILABLE_DEVICES")' - python3 -c 'from openvino import Core; Core().get_property("AUTO", "SUPPORTED_METRICS")' - python3 -c 'from openvino import Core; Core().get_property("MULTI", "SUPPORTED_METRICS")' - python3 -c 'from openvino import Core; Core().get_property("HETERO", "SUPPORTED_METRICS")' - python3 -c 'from openvino import Core; Core().get_property("BATCH", "SUPPORTED_METRICS")' - python3 -c 'from openvino.frontend import FrontEndManager; assert len(FrontEndManager().get_available_front_ends()) == 6' - benchmark_app --help - ovc --help + uses: ./.github/workflows/job_debian_packages.yml + with: + runner: 'aks-linux-4-cores-16gb' + image: 'openvinogithubactions.azurecr.io/dockerhub/ubuntu:20.04' Samples: - needs: [Build, Smart_CI] - timeout-minutes: 20 - defaults: - run: - shell: bash - runs-on: ubuntu-20.04 - container: - image: ubuntu:20.04 - env: - DEBIAN_FRONTEND: noninteractive # to prevent apt-get from waiting user input - INSTALL_DIR: /__w/openvino/openvino/install - INSTALL_TEST_DIR: /__w/openvino/openvino/install/tests - BUILD_DIR: /__w/openvino/openvino/build + needs: [ Build, Smart_CI ] if: fromJSON(needs.smart_ci.outputs.affected_components).samples - - steps: - - name: Download OpenVINO package - uses: actions/download-artifact@v3 - with: - name: openvino_package - path: ${{ env.INSTALL_DIR }} - - - name: Download OpenVINO tests package - uses: actions/download-artifact@v3 - with: - name: openvino_tests - path: ${{ env.INSTALL_TEST_DIR }} - - - name: Extract OpenVINO packages - run: | - pushd ${INSTALL_DIR} - tar -xzf openvino_package.tar.gz -C ${INSTALL_DIR} - popd - pushd ${INSTALL_TEST_DIR} - tar -xzf openvino_tests.tar.gz -C ${INSTALL_DIR} - popd - - - name: Install OpenVINO dependencies - run: ${INSTALL_DIR}/install_dependencies/install_openvino_dependencies.sh -c=core -c=dev -y - - - name: Fetch setup_python action - uses: actions/checkout@v4 - with: - sparse-checkout: | - .github/actions/setup_python/action.yml - sparse-checkout-cone-mode: false - path: 'openvino' - - - name: Setup Python ${{ env.PYTHON_VERSION }} - uses: ./openvino/.github/actions/setup_python - with: - version: ${{ env.PYTHON_VERSION }} - should-setup-pip-paths: 'false' - self-hosted-runner: 'false' - - - name: Build cpp samples - GCC - run: ${INSTALL_DIR}/samples/cpp/build_samples.sh -i ${INSTALL_DIR} -b ${BUILD_DIR}/cpp_samples - env: - CMAKE_COMPILE_WARNING_AS_ERROR: 'ON' - - - name: Build cpp samples - Clang - run: | - apt-get install -y clang - ${INSTALL_DIR}/samples/cpp/build_samples.sh -i ${INSTALL_DIR} -b ${BUILD_DIR}/cpp_samples_clang - env: - CMAKE_COMPILE_WARNING_AS_ERROR: 'ON' - CC: clang - CXX: clang++ - - - name: Build c samples - run: ${INSTALL_DIR}/samples/c/build_samples.sh -i ${INSTALL_DIR} -b ${BUILD_DIR}/c_samples - env: - CMAKE_COMPILE_WARNING_AS_ERROR: 'ON' - - # - # Tests - # - - - name: Samples tests - if: fromJSON(needs.smart_ci.outputs.affected_components).samples.test - run: | - export WORKSPACE=${INSTALL_DIR} - export IE_APP_PATH=${INSTALL_DIR}/samples_bin - export IE_APP_PYTHON_PATH=${INSTALL_DIR}/samples/python - export SHARE=${INSTALL_TEST_DIR}/smoke_tests/samples_smoke_tests_data - - python3 -m pip install --ignore-installed PyYAML -r ${INSTALL_TEST_DIR}/smoke_tests/requirements.txt - export LD_LIBRARY_PATH=${IE_APP_PATH}:$LD_LIBRARY_PATH - - source ${INSTALL_DIR}/setupvars.sh - - python3 -m pytest -sv ${INSTALL_TEST_DIR}/smoke_tests \ - --env_conf ${INSTALL_TEST_DIR}/smoke_tests/env_config.yml \ - --junitxml=${INSTALL_TEST_DIR}/TEST-SamplesSmokeTests.xml - - - name: Upload Test Results - uses: actions/upload-artifact@v3 - if: ${{ !cancelled() }} - with: - name: test-results-samples - path: ${{ env.INSTALL_TEST_DIR }}/TEST*.xml - if-no-files-found: 'warn' + uses: ./.github/workflows/job_samples_tests.yml + with: + runner: 'aks-linux-4-cores-16gb' + image: 'openvinogithubactions.azurecr.io/dockerhub/ubuntu:20.04' + affected-components: ${{ needs.smart_ci.outputs.affected_components }} Conformance: - needs: [Build, Smart_CI] + needs: [ Build, Smart_CI ] timeout-minutes: ${{ matrix.TEST_TYPE == 'API' && 5 || 15 }} defaults: run: @@ -522,1028 +380,83 @@ jobs: ONNX_Runtime: name: ONNX Runtime Integration - needs: [Build, Smart_CI] - timeout-minutes: 20 - defaults: - run: - shell: bash - runs-on: aks-linux-16-cores-32gb - container: - image: openvinogithubactions.azurecr.io/dockerhub/ubuntu:20.04 - volumes: - - /mount/caches:/mount/caches - options: -e SCCACHE_AZURE_BLOB_CONTAINER -e SCCACHE_AZURE_CONNECTION_STRING - env: - DEBIAN_FRONTEND: noninteractive # to prevent apt-get from waiting user input - CMAKE_GENERATOR: 'Ninja Multi-Config' - CMAKE_CXX_COMPILER_LAUNCHER: sccache - CMAKE_C_COMPILER_LAUNCHER: sccache - OPENVINO_REPO: /__w/openvino/openvino/openvino - INSTALL_DIR: /__w/openvino/openvino/install - SCCACHE_AZURE_KEY_PREFIX: ubuntu20_x86_64_onnxruntime - ONNX_RUNTIME_REPO: /__w/openvino/openvino/onnxruntime - ONNX_RUNTIME_UTILS: /__w/openvino/openvino/install/onnxruntime - ONNX_RUNTIME_BUILD_DIR: /__w/openvino/openvino/onnxruntime/build if: fromJSON(needs.smart_ci.outputs.affected_components).ONNX_RT - - steps: - - name: Fetch install_build_dependencies.sh and setup_python action - uses: actions/checkout@v4 - with: - sparse-checkout: | - install_build_dependencies.sh - .github/actions/setup_python/action.yml - sparse-checkout-cone-mode: false - path: ${{ env.OPENVINO_REPO }} - - - name: Install git - run: | - apt-get update - apt-get install --assume-yes --no-install-recommends git ca-certificates - - - name: Setup Python ${{ env.PYTHON_VERSION }} - uses: ./openvino/.github/actions/setup_python - with: - version: ${{ env.PYTHON_VERSION }} - should-setup-pip-paths: 'false' - - # - # Initialize OpenVINO - # - - - name: Download OpenVINO package - uses: actions/download-artifact@v3 - with: - name: openvino_package - path: ${{ env.INSTALL_DIR }} - - - name: Extract OpenVINO package - run: | - pushd ${INSTALL_DIR} - tar -xzf openvino_package.tar.gz -C ${INSTALL_DIR} - popd - - - name: Install OpenVINO dependencies - run: ${INSTALL_DIR}/install_dependencies/install_openvino_dependencies.sh -c=core -c=dev -y - - - name: Clone ONNX Runtime - run: | - branch=`tr -s '\n ' < ${ONNX_RUNTIME_UTILS}/version` - git clone --branch $branch --single-branch --recursive https://github.com/microsoft/onnxruntime.git ${ONNX_RUNTIME_REPO} - - # - # Tests - # - - - name: Install Build Dependencies - run: bash ${OPENVINO_REPO}/install_build_dependencies.sh - - - name: Install sccache - uses: mozilla-actions/sccache-action@v0.0.3 - with: - version: "v0.5.4" - - - name: Build Lin ONNX Runtime - run: | - source ${INSTALL_DIR}/setupvars.sh - - ${ONNX_RUNTIME_REPO}/build.sh \ - --config RelWithDebInfo \ - --use_openvino CPU_FP32 \ - --build_shared_lib \ - --parallel \ - --skip_tests \ - --compile_no_warning_as_error \ - --build_dir ${ONNX_RUNTIME_BUILD_DIR} - env: - CXXFLAGS: "-Wno-error=deprecated-declarations" - - - name: Show sccache stats - run: ${SCCACHE_PATH} --show-stats - - - name: Run onnxruntime_test_all - run: | - source ${INSTALL_DIR}/setupvars.sh - skip_tests=$(tr -s '\n ' ':' < ${ONNX_RUNTIME_UTILS}/skip_tests) - - ./onnxruntime_test_all --gtest_filter=-$skip_tests - working-directory: ${{ env.ONNX_RUNTIME_BUILD_DIR }}/RelWithDebInfo/RelWithDebInfo - - - name: Run onnxruntime_shared_lib_test - run: | - source ${INSTALL_DIR}/setupvars.sh - ./onnxruntime_shared_lib_test --gtest_filter=-CApiTest.test_custom_op_openvino_wrapper_library - working-directory: ${{ env.ONNX_RUNTIME_BUILD_DIR }}/RelWithDebInfo/RelWithDebInfo - - - name: Run onnxruntime_global_thread_pools_test - run: | - source ${INSTALL_DIR}/setupvars.sh - ./onnxruntime_global_thread_pools_test - working-directory: ${{ env.ONNX_RUNTIME_BUILD_DIR }}/RelWithDebInfo/RelWithDebInfo - - - name: Run onnxruntime_api_tests_without_env - run: | - source ${INSTALL_DIR}/setupvars.sh - ./onnxruntime_api_tests_without_env - working-directory: ${{ env.ONNX_RUNTIME_BUILD_DIR }}/RelWithDebInfo/RelWithDebInfo - - - name: Run pytorch-converted tests - run: | - source ${INSTALL_DIR}/setupvars.sh - ./onnx_test_runner "${ONNX_RUNTIME_REPO}/cmake/external/onnx/onnx/backend/test/data/pytorch-converted" - working-directory: ${{ env.ONNX_RUNTIME_BUILD_DIR }}/RelWithDebInfo/RelWithDebInfo - - - name: Run pytorch-operator tests - run: | - source ${INSTALL_DIR}/setupvars.sh - ./onnx_test_runner "${ONNX_RUNTIME_REPO}/cmake/external/onnx/onnx/backend/test/data/pytorch-operator" - working-directory: ${{ env.ONNX_RUNTIME_BUILD_DIR }}/RelWithDebInfo/RelWithDebInfo + needs: [ Build, Smart_CI ] + uses: ./.github/workflows/job_onnx_runtime.yml + with: + runner: 'aks-linux-16-cores-32gb' + container: '{"image": "openvinogithubactions.azurecr.io/dockerhub/ubuntu:20.04", "volumes": ["/mount:/mount"], "options": "-e SCCACHE_AZURE_BLOB_CONTAINER -e SCCACHE_AZURE_CONNECTION_STRING"}' + sccache-azure-key-prefix: 'ubuntu20_x86_64_onnxruntime' + + ONNX_Models: + name: ONNX Models Tests + if: fromJSON(needs.smart_ci.outputs.affected_components).Python_API.test || + fromJSON(needs.smart_ci.outputs.affected_components).ONNX_FE.test + needs: [ Build, Smart_CI ] + uses: ./.github/workflows/job_onnx_models_tests.yml + with: + runner: 'aks-linux-16-cores-32gb' + container: '{"image": "openvinogithubactions.azurecr.io/dockerhub/ubuntu:20.04", "volumes": ["/mount:/mount"]}' CXX_Unit_Tests: name: C++ unit tests - needs: [Build, Smart_CI] - timeout-minutes: 20 - defaults: - run: - shell: bash - runs-on: aks-linux-4-cores-16gb - container: - image: openvinogithubactions.azurecr.io/dockerhub/ubuntu:20.04 - env: - INSTALL_DIR: /__w/openvino/openvino/install - INSTALL_TEST_DIR: /__w/openvino/openvino/install/tests - - steps: - - name: Download OpenVINO package - uses: actions/download-artifact@v3 - with: - name: openvino_package - path: ${{ env.INSTALL_DIR }} - - - name: Download OpenVINO tests package - uses: actions/download-artifact@v3 - with: - name: openvino_tests - path: ${{ env.INSTALL_TEST_DIR }} - - - name: Extract OpenVINO packages - run: | - pushd ${INSTALL_DIR} - tar -xzf openvino_package.tar.gz -C ${INSTALL_DIR} - popd - pushd ${INSTALL_TEST_DIR} - tar -xzf openvino_tests.tar.gz -C ${INSTALL_DIR} - popd + needs: [ Build, Smart_CI ] + uses: ./.github/workflows/job_cxx_unit_tests.yml + with: + runner: 'aks-linux-4-cores-16gb' + image: 'openvinogithubactions.azurecr.io/dockerhub/ubuntu:20.04' + affected-components: ${{ needs.smart_ci.outputs.affected_components }} - - name: Install OpenVINO dependencies - run: ${INSTALL_DIR}/install_dependencies/install_openvino_dependencies.sh -c=core -c=gpu -y - - # - # Tests - # - - - name: OpenVINO Core Unit Tests - if: fromJSON(needs.smart_ci.outputs.affected_components).Core.test - run: | - source ${INSTALL_DIR}/setupvars.sh - ${INSTALL_TEST_DIR}/ov_core_unit_tests --gtest_print_time=1 --gtest_filter=-*IE_GPU* \ - --gtest_output=xml:${INSTALL_TEST_DIR}/TEST-OVCoreUT.xml - - - name: OpenVINO Inference Functional Tests - if: fromJSON(needs.smart_ci.outputs.affected_components).inference.test - run: | - source ${INSTALL_DIR}/setupvars.sh - ${INSTALL_TEST_DIR}/ov_inference_functional_tests --gtest_print_time=1 \ - --gtest_output=xml:${INSTALL_TEST_DIR}/TEST-InferenceFunc.xml - - - name: OpenVINO Inference Unit Tests - if: fromJSON(needs.smart_ci.outputs.affected_components).inference.test - run: | - source ${INSTALL_DIR}/setupvars.sh - ${INSTALL_TEST_DIR}/ov_inference_unit_tests --gtest_print_time=1 \ - --gtest_output=xml:${INSTALL_TEST_DIR}/TEST-InferenceUnit.xml - - - name: Low Precision Transformations Tests - if: fromJSON(needs.smart_ci.outputs.affected_components).LP_transformations.test - run: | - source ${INSTALL_DIR}/setupvars.sh - ${INSTALL_TEST_DIR}/ov_lp_transformations_tests --gtest_print_time=1 \ - --gtest_output=xml:${INSTALL_TEST_DIR}/TEST-LpTransformations.xml - - - name: OpenVINO Conditional compilation tests - if: fromJSON(needs.smart_ci.outputs.affected_components).Core.test - run: | - source ${INSTALL_DIR}/setupvars.sh - ${INSTALL_TEST_DIR}/ov_conditional_compilation_tests --gtest_print_time=1 \ - --gtest_output=xml:${INSTALL_TEST_DIR}/TEST-ConditionalCompilation.xml - - - name: IR frontend tests - if: fromJSON(needs.smart_ci.outputs.affected_components).IR_FE.test - run: | - source ${INSTALL_DIR}/setupvars.sh - ${INSTALL_TEST_DIR}/ov_ir_frontend_tests --gtest_print_time=1 \ - --gtest_output=xml:${INSTALL_TEST_DIR}/TEST-IRFrontend.xml - - - name: PaddlePaddle frontend tests - if: ${{ 'false' }} - run: | - source ${INSTALL_DIR}/setupvars.sh - ${INSTALL_TEST_DIR}/paddle_tests --gtest_print_time=1 \ - --gtest_output=xml:${INSTALL_TEST_DIR}/TEST-PaddleTests.xml - - - name: ONNX frontend tests - if: fromJSON(needs.smart_ci.outputs.affected_components).ONNX_FE.test - run: | - source ${INSTALL_DIR}/setupvars.sh - ${INSTALL_TEST_DIR}/ov_onnx_frontend_tests --gtest_print_time=1 \ - --gtest_filter=-*IE_GPU* \ - --gtest_output=xml:${INSTALL_TEST_DIR}/TEST-ONNXFrontend.xml - - - name: TensorFlow Common frontend tests - if: fromJSON(needs.smart_ci.outputs.affected_components).TF_FE.test || - fromJSON(needs.smart_ci.outputs.affected_components).TFL_FE.test - run: | - source ${INSTALL_DIR}/setupvars.sh - ${INSTALL_TEST_DIR}/ov_tensorflow_common_tests --gtest_print_time=1 \ - --gtest_output=xml:${INSTALL_TEST_DIR}/TEST-TensorFlowCommonFrontend.xml - - - name: TensorFlow frontend tests - if: fromJSON(needs.smart_ci.outputs.affected_components).TF_FE.test - run: | - source ${INSTALL_DIR}/setupvars.sh - ${INSTALL_TEST_DIR}/ov_tensorflow_frontend_tests --gtest_print_time=1 \ - --gtest_output=xml:${INSTALL_TEST_DIR}/TEST-TensorFlowFrontend.xml - - - name: TensorFlow Lite frontend tests - if: fromJSON(needs.smart_ci.outputs.affected_components).TFL_FE.test - run: | - source ${INSTALL_DIR}/setupvars.sh - ${INSTALL_TEST_DIR}/ov_tensorflow_lite_frontend_tests --gtest_print_time=1 \ - --gtest_output=xml:${INSTALL_TEST_DIR}/TEST-TensorFlowLiteFrontend.xml - - - name: Transformations func tests - if: fromJSON(needs.smart_ci.outputs.affected_components).transformations.test - run: | - source ${INSTALL_DIR}/setupvars.sh - ${INSTALL_TEST_DIR}/ov_transformations_tests --gtest_print_time=1 \ - --gtest_output=xml:${INSTALL_TEST_DIR}/TEST-Transformations.xml - - - name: Legacy Transformations func tests - if: fromJSON(needs.smart_ci.outputs.affected_components).GNA.test - run: | - source ${INSTALL_DIR}/setupvars.sh - ${INSTALL_TEST_DIR}/ov_legacy_transformations_tests --gtest_print_time=1 \ - --gtest_output=xml:${INSTALL_TEST_DIR}/TEST-LegacyTransformations.xml - - - name: Inference Engine 1.0 unit tests - if: fromJSON(needs.smart_ci.outputs.affected_components).GNA.test - run: | - source ${INSTALL_DIR}/setupvars.sh - ${INSTALL_TEST_DIR}/InferenceEngineUnitTests --gtest_print_time=1 \ - --gtest_output=xml:${INSTALL_TEST_DIR}/TEST-InferenceEngineUnitTests.xml - - - name: Common test utils tests - run: | - source ${INSTALL_DIR}/setupvars.sh - ${INSTALL_TEST_DIR}/ov_util_tests --gtest_print_time=1 \ - --gtest_output=xml:${INSTALL_TEST_DIR}/TEST-CommonUtilTests.xml - - - name: Snippets func tests - if: fromJSON(needs.smart_ci.outputs.affected_components).CPU.test - run: | - source ${INSTALL_DIR}/setupvars.sh - ${INSTALL_TEST_DIR}/ov_snippets_func_tests --gtest_print_time=1 \ - --gtest_output=xml:${INSTALL_TEST_DIR}/TEST-SnippetsFuncTests.xml - - - name: CPU plugin unit tests - if: fromJSON(needs.smart_ci.outputs.affected_components).CPU.test - run: | - source ${INSTALL_DIR}/setupvars.sh - ${INSTALL_TEST_DIR}/ov_cpu_unit_tests --gtest_print_time=1 \ - --gtest_output=xml:${INSTALL_TEST_DIR}/TEST-CPUUnitTests.xml - - - name: ov_subgraphs_dumper_tests tests - run: | - source ${INSTALL_DIR}/setupvars.sh - ${INSTALL_TEST_DIR}/ov_subgraphs_dumper_tests --gtest_print_time=1 \ - --gtest_output=xml:${INSTALL_TEST_DIR}/TEST-ov_subgraphs_dumper_tests.xml - - - name: Template OpImpl tests - run: | - source ${INSTALL_DIR}/setupvars.sh - ${INSTALL_TEST_DIR}/ov_op_conformance_tests --gtest_print_time=1 --device=TEMPLATE --gtest_filter=*OpImpl*\ - --gtest_output=xml:${INSTALL_TEST_DIR}/TEST-OpImplTests.xml - - - name: AUTO unit tests - if: fromJSON(needs.smart_ci.outputs.affected_components).AUTO.test - run: | - source ${INSTALL_DIR}/setupvars.sh - ${INSTALL_TEST_DIR}/ov_auto_unit_tests --gtest_print_time=1 \ - --gtest_output=xml:${INSTALL_TEST_DIR}/TEST-ov_auto_unit_tests.xml - - - name: AUTO func Tests - if: fromJSON(needs.smart_ci.outputs.affected_components).AUTO.test - run: | - source ${{ env.INSTALL_DIR }}/setupvars.sh - ${{ env.INSTALL_TEST_DIR }}/ov_auto_func_tests --gtest_print_time=1 \ - --gtest_output=xml:${{ env.INSTALL_TEST_DIR }}/TEST-ov_auto_func_tests.xml - - - name: Template plugin func tests - if: fromJSON(needs.smart_ci.outputs.affected_components).TEMPLATE.test - run: | - source ${INSTALL_DIR}/setupvars.sh - ${INSTALL_TEST_DIR}/ov_template_func_tests --gtest_print_time=1 \ - --gtest_filter=*smoke* \ - --gtest_output=xml:${INSTALL_TEST_DIR}/TEST-TemplateFuncTests.xml - - - name: Inference Engine C API tests - if: fromJSON(needs.smart_ci.outputs.affected_components).C_API.test - run: | - source ${INSTALL_DIR}/setupvars.sh - ${INSTALL_TEST_DIR}/InferenceEngineCAPITests --gtest_print_time=1 \ - --gtest_output=xml:${INSTALL_TEST_DIR}/TEST-InferenceEngineCAPITests.xml - - - name: OpenVINO C API tests - if: fromJSON(needs.smart_ci.outputs.affected_components).C_API.test - run: | - source ${INSTALL_DIR}/setupvars.sh - ${INSTALL_TEST_DIR}/ov_capi_test --gtest_print_time=1 \ - --gtest_output=xml:${INSTALL_TEST_DIR}/TEST-OpenVINOCAPITests.xml - - - name: AutoBatch unit tests - if: fromJSON(needs.smart_ci.outputs.affected_components).AUTO_BATCH.test - run: | - source ${INSTALL_DIR}/setupvars.sh - ${INSTALL_TEST_DIR}/ov_auto_batch_unit_tests --gtest_output=xml:${INSTALL_TEST_DIR}/TEST-ov_auto_batch_unit_tests.xml - - - name: AutoBatch func tests - if: fromJSON(needs.smart_ci.outputs.affected_components).AUTO_BATCH.test - run: | - source ${INSTALL_DIR}/setupvars.sh - ${INSTALL_TEST_DIR}/ov_auto_batch_func_tests --gtest_output=xml:${INSTALL_TEST_DIR}/TEST-ov_auto_batch_func_tests.xml - - - name: Proxy Plugin func tests - if: fromJSON(needs.smart_ci.outputs.affected_components).PROXY.test - run: | - source ${INSTALL_DIR}/setupvars.sh - ${INSTALL_TEST_DIR}/ov_proxy_plugin_tests --gtest_print_time=1 --gtest_output=xml:${INSTALL_TEST_DIR}/TEST-OVProxyTests.xml - - - name: Hetero unit tests - if: fromJSON(needs.smart_ci.outputs.affected_components).HETERO.test - run: | - source ${{ env.INSTALL_DIR }}/setupvars.sh - ${{ env.INSTALL_TEST_DIR }}/ov_hetero_unit_tests --gtest_print_time=1 --gtest_output=xml:${{ env.INSTALL_TEST_DIR }}/TEST-OVHeteroUnitTests.xml - - - name: Hetero func tests - if: fromJSON(needs.smart_ci.outputs.affected_components).HETERO.test - run: | - source ${INSTALL_DIR}/setupvars.sh - ${INSTALL_TEST_DIR}/ov_hetero_func_tests --gtest_print_time=1 --gtest_output=xml:${INSTALL_TEST_DIR}/TEST-OVHeteroFuncTests.xml - - - name: Upload Test Results - uses: actions/upload-artifact@v3 - if: ${{ !cancelled() }} - with: - name: test-results-cpp - path: ${{ env.INSTALL_TEST_DIR }}/TEST*.xml - if-no-files-found: 'warn' - - Python_Unit_Tests: - name: Python unit tests - needs: [Build, Smart_CI] - timeout-minutes: 40 - defaults: - run: - shell: bash - runs-on: aks-linux-4-cores-16gb - container: - image: openvinogithubactions.azurecr.io/dockerhub/ubuntu:20.04 - volumes: - - /mount/caches:/mount/caches - env: - OPENVINO_REPO: /__w/openvino/openvino/openvino - INSTALL_DIR: /__w/openvino/openvino/install - INSTALL_TEST_DIR: /__w/openvino/openvino/install/tests - LAYER_TESTS_INSTALL_DIR: /__w/openvino/openvino/install/tests/layer_tests - - steps: - # - # Initialize OpenVINO - # - - name: Download OpenVINO package - uses: actions/download-artifact@v3 - with: - name: openvino_package - path: ${{ env.INSTALL_DIR }} - - - name: Download OpenVINO tests package - uses: actions/download-artifact@v3 - with: - name: openvino_tests - path: ${{ env.INSTALL_TEST_DIR }} - - - name: Extract OpenVINO packages - run: | - pushd ${INSTALL_DIR} - tar -xzf openvino_package.tar.gz -C ${INSTALL_DIR} - popd - - pushd ${INSTALL_TEST_DIR} - tar -xzf openvino_tests.tar.gz -C ${INSTALL_DIR} - popd - - - name: Fetch setup_python action - uses: actions/checkout@v4 - with: - sparse-checkout: | - .github/actions/setup_python/action.yml - sparse-checkout-cone-mode: false - path: ${{ env.OPENVINO_REPO }} - - - name: Setup Python ${{ env.PYTHON_VERSION }} - uses: ./openvino/.github/actions/setup_python - with: - version: ${{ env.PYTHON_VERSION }} - pip-cache-path: ${{ env.PIP_CACHE_PATH }} - should-setup-pip-paths: 'true' - - - name: Install OpenVINO dependencies - run: ${INSTALL_DIR}/install_dependencies/install_openvino_dependencies.sh -c=core -y - - - name: Install OpenVINO Python wheels - run: | - # Install the core OV wheel - python3 -m pip install ${INSTALL_DIR}/tools/openvino-*.whl - - # Find and install OV dev wheel - pushd ${INSTALL_DIR}/tools - ov_dev_wheel_name=$(find . -name 'openvino_dev*.whl') - python3 -m pip install $ov_dev_wheel_name[mxnet,caffe,kaldi,onnx,tensorflow2,pytorch] - popd - - - name: Install Python API tests dependencies - run: | - # To enable pytest parallel features - python3 -m pip install pytest-xdist[psutil] - # For torchvision to OpenVINO preprocessing converter - python3 -m pip install -r ${INSTALL_TEST_DIR}/python/preprocess/torchvision/requirements.txt - - # TODO: replace with Python API tests requirements - python3 -m pip install -r ${INSTALL_TEST_DIR}/mo/requirements_dev.txt - - # - # Tests - # - - - name: Python API 1.0 Tests - #if: fromJSON(needs.smart_ci.outputs.affected_components).Python_API.test - run: | - python3 -m pytest -s ${INSTALL_TEST_DIR}/pyngraph \ - --junitxml=${INSTALL_TEST_DIR}/TEST-Pyngraph.xml \ - --ignore=${INSTALL_TEST_DIR}/pyngraph/tests_compatibility/test_onnx/test_zoo_models.py \ - --ignore=${INSTALL_TEST_DIR}/pyngraph/tests_compatibility/test_onnx/test_backend.py - - - name: Python API 2.0 Tests - #if: fromJSON(needs.smart_ci.outputs.affected_components).Python_API.test - run: | - # for 'template' extension - export LD_LIBRARY_PATH=${INSTALL_TEST_DIR}:$LD_LIBRARY_PATH - python3 -m pytest -sv ${INSTALL_TEST_DIR}/pyopenvino \ - --junitxml=${INSTALL_TEST_DIR}/TEST-Pyngraph.xml \ - --ignore=${INSTALL_TEST_DIR}/pyopenvino/tests/test_utils/test_utils.py - - - name: Model Optimizer unit tests - if: fromJSON(needs.smart_ci.outputs.affected_components).MO.test - run: | - # required for MxNet - apt-get install -y libgomp1 libquadmath0 - - python3 -m pytest -s ${INSTALL_TEST_DIR}/mo/unit_tests \ - --junitxml=${INSTALL_TEST_DIR}/TEST-ModelOptimizer.xml - - - name: Python ONNX operators tests - if: fromJSON(needs.smart_ci.outputs.affected_components).Python_API.test || - fromJSON(needs.smart_ci.outputs.affected_components).ONNX_FE.test - run: | - # Skip test_onnx/test_zoo_models and test_onnx/test_backend due to long execution time - ONNX Model Zoo tests are run separately - python3 -m pytest -sv ${INSTALL_TEST_DIR}/onnx -k 'not cuda' \ - --junitxml=${INSTALL_TEST_DIR}/TEST-onnx_frontend.xml \ - --ignore=${INSTALL_TEST_DIR}/onnx/test_python/test_zoo_models.py - - - name: OVC unit tests - if: fromJSON(needs.smart_ci.outputs.affected_components).MO.test - run: python3 -m pytest -s ${INSTALL_TEST_DIR}/ovc/unit_tests --junitxml=${INSTALL_TEST_DIR}/TEST-OpenVinoConversion.xml - - - name: Install Python Layer tests dependencies - run: | - # layer test requirements - python3 -m pip install -r ${LAYER_TESTS_INSTALL_DIR}/requirements.txt - - - name: MO Python API Tests - if: fromJSON(needs.smart_ci.outputs.affected_components).MO.test - run: | - # Import 'test_utils' installed in '/tests/python/openvino' - export LD_LIBRARY_PATH=${PIP_INSTALL_PATH}/openvino/libs:$LD_LIBRARY_PATH - export PYTHONPATH=${INSTALL_TEST_DIR}/python - python3 -m pytest ${LAYER_TESTS_INSTALL_DIR}/mo_python_api_tests --junitxml=${INSTALL_TEST_DIR}/TEST-test_mo_convert.xml - env: - TEST_DEVICE: CPU - TEST_PRECISION: FP16 - - - name: OVC Python API Tests - if: fromJSON(needs.smart_ci.outputs.affected_components).MO.test - run: | - # Import 'test_utils' installed in '/tests/python/openvino' - export PYTHONPATH=${INSTALL_TEST_DIR}/python - export LD_LIBRARY_PATH=${PIP_INSTALL_PATH}/openvino/libs:$LD_LIBRARY_PATH - python3 -m pytest ${LAYER_TESTS_INSTALL_DIR}/ovc_python_api_tests --junitxml=${INSTALL_TEST_DIR}/TEST-test_ovc_convert.xml - env: - TEST_DEVICE: CPU - TEST_PRECISION: FP16 - - - name: Python Frontend tests - if: fromJSON(needs.smart_ci.outputs.affected_components).PyTorch_FE.test || - fromJSON(needs.smart_ci.outputs.affected_components).PDPD_FE.test - run: | - # to allow 'libtest_builtin_extensions.so' to find 'libopenvino_onnx_frontend.so' - export LD_LIBRARY_PATH=${PIP_INSTALL_PATH}/openvino/libs:$LD_LIBRARY_PATH - python3 -m pytest ${LAYER_TESTS_INSTALL_DIR}/py_frontend_tests --junitxml=${INSTALL_TEST_DIR}/TEST-test_py_fontend.xml - - - name: PyTorch Layer Tests - if: fromJSON(needs.smart_ci.outputs.affected_components).PyTorch_FE.test - run: python3 -m pytest ${LAYER_TESTS_INSTALL_DIR}/pytorch_tests -n logical -m precommit --junitxml=${INSTALL_TEST_DIR}/TEST-pytorch.xml - env: - TEST_DEVICE: CPU - TEST_PRECISION: FP32 - - - name: PyTorch torch.compile TORCHFX Layer Tests - if: fromJSON(needs.smart_ci.outputs.affected_components).PyTorch_FE.test - run: | - python3 -m pytest ${LAYER_TESTS_INSTALL_DIR}/pytorch_tests -m precommit_fx_backend --junitxml=${INSTALL_TEST_DIR}/TEST-pytorch.xml - env: - TEST_DEVICE: CPU - TEST_PRECISION: FP32 - PYTORCH_TRACING_MODE: TORCHFX - - - name: PyTorch torch.compile TORCHSCRIPT Layer Tests - if: fromJSON(needs.smart_ci.outputs.affected_components).PyTorch_FE.test - run: | - python3 -m pytest ${LAYER_TESTS_INSTALL_DIR}/pytorch_tests -m precommit_ts_backend --junitxml=${INSTALL_TEST_DIR}/TEST-pytorch.xml - env: - TEST_DEVICE: CPU - TEST_PRECISION: FP32 - PYTORCH_TRACING_MODE: TORCHSCRIPT - - - name: ONNX Layer Tests - if: fromJSON(needs.smart_ci.outputs.affected_components).ONNX_FE.test - run: | - # requires 'unit_tests' from 'tools/mo' - export PYTHONPATH=${INSTALL_TEST_DIR}/mo:$PYTHONPATH - python3 -m pytest ${LAYER_TESTS_INSTALL_DIR}/onnx_tests -m "not launch_only_if_manually_specified and precommit" --junitxml=${INSTALL_TEST_DIR}/TEST-onnx.xml - env: - TEST_DEVICE: CPU - TEST_PRECISION: FP16 - - - name: TensorFlow 1 Layer Tests - TF FE - if: fromJSON(needs.smart_ci.outputs.affected_components).TF_FE.test - run: | - # requires 'unit_tests' from 'mo' - export PYTHONPATH=${INSTALL_TEST_DIR}/mo - python3 -m pytest ${LAYER_TESTS_INSTALL_DIR}/tensorflow_tests/ --use_new_frontend -m precommit_tf_fe --junitxml=${INSTALL_TEST_DIR}/TEST-tf_fe.xml - env: - TEST_DEVICE: CPU - TEST_PRECISION: FP16 - - - name: TensorFlow 2 Layer Tests - TF FE - if: fromJSON(needs.smart_ci.outputs.affected_components).TF_FE.test - run: | - # requires 'unit_tests' from 'mo' - export PYTHONPATH=${INSTALL_TEST_DIR}/mo - python3 -m pytest ${LAYER_TESTS_INSTALL_DIR}/tensorflow2_keras_tests/ --use_new_frontend -m precommit_tf_fe --junitxml=${INSTALL_TEST_DIR}/TEST-tf2_fe.xml - env: - TEST_DEVICE: CPU - TEST_PRECISION: FP16 - - - name: JAX Layer Tests - TF FE - if: fromJSON(needs.smart_ci.outputs.affected_components).TF_FE.test - run: python3 -m pytest ${LAYER_TESTS_INSTALL_DIR}/jax_tests/ -m precommit --junitxml=${INSTALL_TEST_DIR}/TEST-jax.xml - env: - TEST_DEVICE: CPU - - - name: TensorFlow 1 Layer Tests - Legacy FE - if: fromJSON(needs.smart_ci.outputs.affected_components).TF_FE.test - run: python3 -m pytest ${LAYER_TESTS_INSTALL_DIR}/tensorflow_tests/test_tf_Roll.py --ir_version=10 --junitxml=${INSTALL_TEST_DIR}/TEST-tf_Roll.xml - - - name: TensorFlow 2 Layer Tests - Legacy FE - if: fromJSON(needs.smart_ci.outputs.affected_components).TF_FE.test - run: python3 -m pytest ${LAYER_TESTS_INSTALL_DIR}/tensorflow2_keras_tests/test_tf2_keras_activation.py --ir_version=11 -k "sigmoid" --junitxml=${INSTALL_TEST_DIR}/TEST-tf2_Activation.xml - env: - TEST_DEVICE: CPU - TEST_PRECISION: FP16 - - - name: TensorFlow Lite Layer Tests - TFL FE - if: fromJSON(needs.smart_ci.outputs.affected_components).TFL_FE.test - run: python3 -m pytest ${LAYER_TESTS_INSTALL_DIR}/tensorflow_lite_tests/ --junitxml=${INSTALL_TEST_DIR}/TEST-tfl_fe.xml - env: - TEST_DEVICE: CPU - TEST_PRECISION: FP16 - - - name: Clone API snippets - uses: actions/checkout@v4 - with: - sparse-checkout: openvino/docs/snippets - path: ${{ env.OPENVINO_REPO }} - submodules: 'false' - - - name: Docs Python snippets - run: | - # to find 'snippets' module in docs - export PYTHONPATH=${OPENVINO_REPO}/docs - # for 'template' extension - export LD_LIBRARY_PATH=${INSTALL_TEST_DIR}:$LD_LIBRARY_PATH - python3 ${OPENVINO_REPO}/docs/snippets/main.py - - - name: Upload Test Results - uses: actions/upload-artifact@v3 - if: ${{ !cancelled() }} - with: - name: test-results-python - path: | - ${{ env.INSTALL_TEST_DIR }}/TEST*.html - ${{ env.INSTALL_TEST_DIR }}/TEST*.xml - if-no-files-found: 'warn' + Python_Unit_Tests: + name: Python unit tests + needs: [ Build, Smart_CI ] + uses: ./.github/workflows/job_python_unit_tests.yml + with: + runner: 'aks-linux-4-cores-16gb' + container: '{"image": "openvinogithubactions.azurecr.io/dockerhub/ubuntu:20.04", "volumes": ["/mount:/mount"]}' + affected-components: ${{ needs.smart_ci.outputs.affected_components }} CPU_Functional_Tests: name: CPU functional tests - needs: [Build, Smart_CI] - timeout-minutes: 25 - defaults: - run: - shell: bash - runs-on: aks-linux-8-cores-32gb - container: - image: openvinogithubactions.azurecr.io/dockerhub/ubuntu:20.04 - env: - OPENVINO_REPO: /__w/openvino/openvino/openvino - INSTALL_DIR: /__w/openvino/openvino/install - INSTALL_TEST_DIR: /__w/openvino/openvino/install/tests - PARALLEL_TEST_SCRIPT: /__w/openvino/openvino/install/tests/functional_test_utils/layer_tests_summary/run_parallel.py - PARALLEL_TEST_CACHE: /__w/openvino/openvino/install/tests/test_cache.lst if: fromJSON(needs.smart_ci.outputs.affected_components).CPU.test - steps: - - name: Download OpenVINO package - uses: actions/download-artifact@v3 - with: - name: openvino_package - path: ${{ env.INSTALL_DIR }} - - - name: Download OpenVINO tests package - uses: actions/download-artifact@v3 - with: - name: openvino_tests - path: ${{ env.INSTALL_TEST_DIR }} - - - name: Extract OpenVINO packages - run: | - pushd ${INSTALL_DIR} - tar -xzf openvino_package.tar.gz -C ${INSTALL_DIR} - popd - pushd ${INSTALL_TEST_DIR} - tar -xzf openvino_tests.tar.gz -C ${INSTALL_DIR} - popd - - - name: Install OpenVINO dependencies - run: bash ${INSTALL_DIR}/install_dependencies/install_openvino_dependencies.sh -c=core -y - - - name: Fetch setup_python action - uses: actions/checkout@v4 - with: - sparse-checkout: | - .github/actions/setup_python/action.yml - sparse-checkout-cone-mode: false - path: ${{ env.OPENVINO_REPO }} - - - name: Setup Python ${{ env.PYTHON_VERSION }} - uses: ./openvino/.github/actions/setup_python - with: - version: ${{ env.PYTHON_VERSION }} - should-setup-pip-paths: 'false' - - - name: Install python dependencies for run_parallel.py - run: python3 -m pip install -r ${INSTALL_TEST_DIR}/functional_test_utils/layer_tests_summary/requirements.txt - - - name: Restore tests execution time - uses: actions/cache/restore@v3 - with: - path: ${{ env.PARALLEL_TEST_CACHE }} - key: ${{ runner.os }}-tests-functional-cpu-stamp-${{ github.sha }} - restore-keys: | - ${{ runner.os }}-tests-functional-cpu-stamp - - - name: Intel CPU plugin func tests (parallel) - run: | - source ${INSTALL_DIR}/setupvars.sh - python3 ${PARALLEL_TEST_SCRIPT} -e ${INSTALL_TEST_DIR}/ov_cpu_func_tests -c ${PARALLEL_TEST_CACHE} -w ${INSTALL_TEST_DIR} -s suite -rf 0 -- --gtest_print_time=1 --gtest_filter=*smoke* - timeout-minutes: 20 - - - name: Save tests execution time - uses: actions/cache/save@v3 - if: github.ref_name == 'master' - with: - path: ${{ env.PARALLEL_TEST_CACHE }} - key: ${{ runner.os }}-tests-functional-cpu-stamp-${{ github.sha }} - - - name: Upload Test Results - uses: actions/upload-artifact@v3 - if: ${{ !cancelled() }} - with: - name: test-results-functional-cpu - path: | - ${{ env.INSTALL_TEST_DIR }}/temp/*.log - ${{ env.INSTALL_TEST_DIR }}/logs/*.log - ${{ env.INSTALL_TEST_DIR }}/logs/failed/*.log - ${{ env.INSTALL_TEST_DIR }}/logs/crashed/*.log - ${{ env.INSTALL_TEST_DIR }}/logs/hanged/*.log - ${{ env.INSTALL_TEST_DIR }}/logs/interapted/*.log - ${{ env.INSTALL_TEST_DIR }}/logs/hash_table.csv - ${{ env.PARALLEL_TEST_CACHE }} - if-no-files-found: 'error' - + needs: [ Build, Smart_CI ] + uses: ./.github/workflows/job_cpu_functional_tests.yml + with: + runner: 'aks-linux-8-cores-32gb' + image: 'openvinogithubactions.azurecr.io/dockerhub/ubuntu:20.04' + + # TODO: Switch back to self-hosted runners + # container: + # image: openvinogithubactions.azurecr.io/dockerhub/ubuntu:20.04 + # volumes: + # - /mount:/mount TensorFlow_Hub_Models_Tests: name: TensorFlow Hub Models tests - needs: [Build, Smart_CI] - defaults: - run: - shell: bash - runs-on: ${{ github.event_name == 'schedule' && 'ubuntu-20.04-16-cores' || 'ubuntu-20.04-8-cores'}} - timeout-minutes: ${{ github.event_name == 'schedule' && 400 || 25 }} - # TODO: Switch back to self-hosted runners - # container: - # image: openvinogithubactions.azurecr.io/dockerhub/ubuntu:20.04 - # volumes: - # - /mount/caches:/mount/caches - env: - OPENVINO_REPO: ${{ github.workspace }}/openvino - INSTALL_DIR: ${{ github.workspace }}/install - INSTALL_TEST_DIR: ${{ github.workspace }}/install/tests - MODEL_HUB_TESTS_INSTALL_DIR: ${{ github.workspace }}/install/tests/model_hub_tests if: fromJSON(needs.smart_ci.outputs.affected_components).TF_FE.test || fromJSON(needs.smart_ci.outputs.affected_components).TFL_FE.test - - steps: - - name: Check sudo - run: if [ "$(id -u)" -eq 0 ]; then apt update && apt --assume-yes install sudo; fi - - - name: Download OpenVINO package - uses: actions/download-artifact@v3 - with: - name: openvino_package - path: ${{ env.INSTALL_DIR }} - - - name: Download OpenVINO tests package - uses: actions/download-artifact@v3 - with: - name: openvino_tests - path: ${{ env.INSTALL_TEST_DIR }} - - - name: Extract OpenVINO packages - run: | - pushd ${INSTALL_DIR} - tar -xzf openvino_package.tar.gz -C ${INSTALL_DIR} - popd - - pushd ${INSTALL_TEST_DIR} - tar -xzf openvino_tests.tar.gz -C ${INSTALL_DIR} - popd - - - name: Fetch setup_python action - uses: actions/checkout@v4 - with: - sparse-checkout: | - .github/actions/setup_python/action.yml - sparse-checkout-cone-mode: false - path: 'openvino' - - - name: Setup Python ${{ env.PYTHON_VERSION }} - uses: ./openvino/.github/actions/setup_python - with: - version: ${{ env.PYTHON_VERSION }} - should-setup-pip-paths: 'false' - self-hosted-runner: 'false' - - - name: Install OpenVINO Python wheels - run: python3 -m pip install ${INSTALL_DIR}/tools/openvino-* - - - name: Install TF Hub tests requirements - run: | - python3 -m pip install -r ${MODEL_HUB_TESTS_INSTALL_DIR}/tf_hub_tests/requirements.txt - - - name: TensorFlow Hub Tests - TF FE - run: | - export PYTHONPATH=${MODEL_HUB_TESTS_INSTALL_DIR}:$PYTHONPATH - python3 -m pytest ${MODEL_HUB_TESTS_INSTALL_DIR}/tf_hub_tests/ -m ${TYPE} --html=${INSTALL_TEST_DIR}/TEST-tf_hub_tf_fe.html --self-contained-html -v - env: - TYPE: ${{ github.event_name == 'schedule' && 'nightly' || 'precommit'}} - TEST_DEVICE: CPU - - - name: Upload Test Results - uses: actions/upload-artifact@v3 - if: ${{ !cancelled() }} - with: - name: test-results-tensorflow-hub-models - path: | - ${{ env.INSTALL_TEST_DIR }}/TEST*.html - if-no-files-found: 'error' - - TensorFlow_Hub_Performance_Models_Tests: - name: TensorFlow Hub Performance Models tests - needs: [Build, Smart_CI] - defaults: - run: - shell: bash - runs-on: ${{ github.event_name == 'schedule' && 'ubuntu-20.04-16-cores' || 'ubuntu-20.04-8-cores'}} - timeout-minutes: ${{ github.event_name == 'schedule' && 400 || 25 }} - env: - OPENVINO_REPO: ${{ github.workspace }}/openvino - INSTALL_DIR: ${{ github.workspace }}/install - INSTALL_TEST_DIR: ${{ github.workspace }}/install/tests - MODEL_HUB_TESTS_INSTALL_DIR: ${{ github.workspace }}/install/tests/model_hub_tests - if: fromJSON(needs.smart_ci.outputs.affected_components).TF_FE.test || - fromJSON(needs.smart_ci.outputs.affected_components).TFL_FE.test - - steps: - - name: Check sudo - run: if [ "$(id -u)" -eq 0 ]; then apt update && apt --assume-yes install sudo; fi - - - name: Download OpenVINO package - uses: actions/download-artifact@v3 - with: - name: openvino_package - path: ${{ env.INSTALL_DIR }} - - - name: Download OpenVINO tests package - uses: actions/download-artifact@v3 - with: - name: openvino_tests - path: ${{ env.INSTALL_TEST_DIR }} - - - name: Extract OpenVINO packages - run: | - pushd ${INSTALL_DIR} - tar -xzf openvino_package.tar.gz -C ${INSTALL_DIR} - popd - - pushd ${INSTALL_TEST_DIR} - tar -xzf openvino_tests.tar.gz -C ${INSTALL_DIR} - popd - - - name: Fetch setup_python action - uses: actions/checkout@v4 - with: - sparse-checkout: | - .github/actions/setup_python/action.yml - sparse-checkout-cone-mode: false - path: 'openvino' - - - name: Setup Python ${{ env.PYTHON_VERSION }} - uses: ./openvino/.github/actions/setup_python - with: - version: ${{ env.PYTHON_VERSION }} - should-setup-pip-paths: 'false' - self-hosted-runner: 'false' - - - name: Install OpenVINO Python wheels - run: python3 -m pip install ${INSTALL_DIR}/tools/openvino-* - - - name: Install TF Hub tests requirements - run: | - python3 -m pip install -r ${MODEL_HUB_TESTS_INSTALL_DIR}/tf_hub_tests/requirements.txt - - - name: Install Hub Performance tests requirements - run: | - python3 -m pip install -r ${MODEL_HUB_TESTS_INSTALL_DIR}/performance_tests/requirements.txt - - - name: Performance Hub Tests - run: | - export PYTHONPATH=${MODEL_HUB_TESTS_INSTALL_DIR}:$PYTHONPATH - python3 -m pytest ${MODEL_HUB_TESTS_INSTALL_DIR}/performance_tests/ -m ${TYPE} --html=${INSTALL_TEST_DIR}/TEST-tf_hub_performance.html --self-contained-html -v - env: - TYPE: ${{ github.event_name == 'schedule' && 'nightly' || 'precommit'}} - TEST_DEVICE: CPU - - - name: Upload Test Results - uses: actions/upload-artifact@v3 - if: ${{ !cancelled() }} - with: - name: test-results-tensorflow-hub-performance-models - path: | - ${{ env.INSTALL_TEST_DIR }}/TEST*.html - if-no-files-found: 'error' - + needs: [ Build, Smart_CI ] + uses: ./.github/workflows/job_tensorflow_hub_models_tests.yml + with: + runner: ${{ github.event_name == 'schedule' && 'ubuntu-20.04-16-cores' || 'ubuntu-20.04-8-cores' }} + event: ${{ github.event_name }} + + # TODO: Switch back to self-hosted runners + # container: + # image: openvinogithubactions.azurecr.io/dockerhub/ubuntu:20.04 + # volumes: + # - /mount:/mount PyTorch_Models_Tests: name: PyTorch Models tests - needs: [Build, Smart_CI] - timeout-minutes: ${{ github.event_name == 'schedule' && 400 || 30 }} - defaults: - run: - shell: bash - runs-on: ${{ github.event_name == 'schedule' && 'ubuntu-20.04-16-cores' || 'ubuntu-20.04-8-cores'}} - # TODO: Switch back to self-hosted runners - # container: - # image: openvinogithubactions.azurecr.io/dockerhub/ubuntu:20.04 - # volumes: - # - /mount/caches:/mount/caches - env: - OPENVINO_REPO: ${{ github.workspace }}/openvino - INSTALL_DIR: ${{ github.workspace }}/install - INSTALL_TEST_DIR: ${{ github.workspace }}/install/tests - MODEL_HUB_TESTS_INSTALL_DIR: ${{ github.workspace }}/install/tests/model_hub_tests if: fromJSON(needs.smart_ci.outputs.affected_components).PyTorch_FE.test - - steps: - - name: Check sudo - run: if [ "$(id -u)" -eq 0 ]; then apt update && apt --assume-yes install sudo; fi - - - name: Install dependencies - run: | - # install git (required to build pip deps from the sources) - # install 'g++' to build 'detectron2' and 'natten' wheels - sudo apt-get install --assume-yes --no-install-recommends g++ git ca-certificates - - - name: Download OpenVINO package - uses: actions/download-artifact@v3 - with: - name: openvino_package - path: ${{ env.INSTALL_DIR }} - - - name: Download OpenVINO tests package - uses: actions/download-artifact@v3 - with: - name: openvino_tests - path: ${{ env.INSTALL_TEST_DIR }} - - - name: Extract OpenVINO packages - run: | - pushd ${INSTALL_DIR} - tar -xzf openvino_package.tar.gz -C ${INSTALL_DIR} - popd - pushd ${INSTALL_TEST_DIR} - tar -xzf openvino_tests.tar.gz -C ${INSTALL_DIR} - popd - - - name: Fetch setup_python action - uses: actions/checkout@v4 - with: - sparse-checkout: | - .github/actions/setup_python/action.yml - sparse-checkout-cone-mode: false - path: 'openvino' - - - name: Setup Python ${{ env.PYTHON_VERSION }} - uses: ./openvino/.github/actions/setup_python - with: - version: ${{ env.PYTHON_VERSION }} - should-setup-pip-paths: 'false' - self-hosted-runner: 'false' - - - name: Install OpenVINO Python wheels - run: python3 -m pip install ${INSTALL_DIR}/tools/openvino-* - - - name: Install PyTorch tests requirements - run: | - python3 -m pip install -r ${MODEL_HUB_TESTS_INSTALL_DIR}/torch_tests/requirements.txt - python3 -m pip install -r ${MODEL_HUB_TESTS_INSTALL_DIR}/torch_tests/requirements_secondary.txt - echo "Available storage:" - df -h - env: - CPLUS_INCLUDE_PATH: ${{ env.Python_ROOT_DIR }}/include/python${{ env.PYTHON_VERSION }} - - - name: PyTorch Models Tests - run: | - export PYTHONPATH=${MODEL_HUB_TESTS_INSTALL_DIR}:$PYTHONPATH - python3 -m pytest ${MODEL_HUB_TESTS_INSTALL_DIR}/torch_tests -m ${TYPE} --html=${INSTALL_TEST_DIR}/TEST-torch_model_tests.html --self-contained-html -v - env: - TYPE: ${{ github.event_name == 'schedule' && 'nightly' || 'precommit'}} - TEST_DEVICE: CPU - USE_SYSTEM_CACHE: False - - - name: Available storage after tests - run: | - echo "Available storage:" - df -h - - - name: Upload Test Results - uses: actions/upload-artifact@v3 - if: ${{ !cancelled() }} - with: - name: test-results-torch-models - path: | - ${{ env.INSTALL_TEST_DIR }}/TEST*.html - if-no-files-found: 'error' + needs: [ Build, Smart_CI ] + uses: ./.github/workflows/job_pytorch_models_tests.yml + with: + runner: ${{ github.event_name == 'schedule' && 'ubuntu-20.04-16-cores' || 'ubuntu-20.04-8-cores' }} + event: ${{ github.event_name }} NVIDIA_Plugin: name: NVIDIA plugin - needs: [Build, Smart_CI] + needs: [ Build, Smart_CI ] timeout-minutes: 15 defaults: run: @@ -1552,7 +465,7 @@ jobs: container: image: openvinogithubactions.azurecr.io/dockerhub/nvidia/cuda:11.8.0-runtime-ubuntu20.04 volumes: - - /mount/caches:/mount/caches + - /mount:/mount options: -e SCCACHE_AZURE_BLOB_CONTAINER -e SCCACHE_AZURE_CONNECTION_STRING env: CMAKE_BUILD_TYPE: 'Release' @@ -1660,7 +573,7 @@ jobs: run: ${SCCACHE_PATH} --show-stats GPU_Stub: - needs: [Build, Smart_CI] + needs: [ Build, Smart_CI ] runs-on: ubuntu-latest if: fromJSON(needs.smart_ci.outputs.affected_components).GPU steps: @@ -1673,7 +586,7 @@ jobs: Overall_Status: name: ci/gha_overall_status needs: [Smart_CI, Build, Debian_Packages, Samples, Conformance, ONNX_Runtime, CXX_Unit_Tests, Python_Unit_Tests, - CPU_Functional_Tests, TensorFlow_Hub_Models_Tests, TensorFlow_Hub_Performance_Models_Tests, PyTorch_Models_Tests, NVIDIA_Plugin] + CPU_Functional_Tests, TensorFlow_Hub_Models_Tests, PyTorch_Models_Tests, NVIDIA_Plugin, ONNX_Models] if: ${{ always() }} runs-on: ubuntu-latest steps: diff --git a/.github/workflows/linux_arm64.yml b/.github/workflows/linux_arm64.yml index a611f59db5a55c..a695b05489592c 100644 --- a/.github/workflows/linux_arm64.yml +++ b/.github/workflows/linux_arm64.yml @@ -1,7 +1,7 @@ name: Linux ARM64 (Ubuntu 20.04, Python 3.11) on: workflow_dispatch: - # pull_request: + pull_request: push: branches: - master @@ -52,7 +52,7 @@ jobs: container: image: openvinogithubactions.azurecr.io/dockerhub/ubuntu:20.04 volumes: - - /mount/caches:/mount/caches + - /mount:/mount options: -e SCCACHE_AZURE_BLOB_CONTAINER -e SCCACHE_AZURE_CONNECTION_STRING env: DEBIAN_FRONTEND: noninteractive # to prevent apt-get from waiting user input @@ -117,7 +117,6 @@ jobs: version: ${{ env.PYTHON_VERSION }} pip-cache-path: ${{ env.PIP_CACHE_PATH }} should-setup-pip-paths: 'true' - self-hosted-runner: 'true' show-cache-info: 'true' - name: Install python dependencies @@ -150,7 +149,7 @@ jobs: -DENABLE_NCC_STYLE=OFF \ -DENABLE_TESTS=ON \ -DENABLE_STRICT_DEPENDENCIES=OFF \ - -DENABLE_SYSTEM_TBB=ON \ + -DENABLE_SYSTEM_TBB=OFF \ -DENABLE_SYSTEM_OPENCL=ON \ -DCMAKE_VERBOSE_MAKEFILE=ON \ -DCPACK_GENERATOR=TGZ \ @@ -197,6 +196,7 @@ jobs: popd - name: Build Debian packages + if: ${{ 'false' }} run: | /usr/bin/python3.8 -m pip install -U pip /usr/bin/python3.8 -m pip install -r ${OPENVINO_REPO}/src/bindings/python/wheel/requirements-dev.txt @@ -240,7 +240,7 @@ jobs: if-no-files-found: 'error' - name: Upload openvino debian packages - if: ${{ always() }} + if: ${{ 'false' }} uses: actions/upload-artifact@v3 with: name: openvino_debian_packages @@ -258,1208 +258,85 @@ jobs: Debian_Packages: name: Debian Packages needs: Build - timeout-minutes: 10 - defaults: - run: - shell: bash - runs-on: 'aks-linux-16-cores-arm' - container: + if: ${{ 'false' }} + uses: ./.github/workflows/job_debian_packages.yml + with: + runner: 'aks-linux-16-cores-arm' image: 'openvinogithubactions.azurecr.io/dockerhub/ubuntu:20.04' - env: - DEBIAN_FRONTEND: noninteractive # to prevent apt-get from waiting user input - DEBIAN_PACKAGES_DIR: /__w/openvino/packages/ - - steps: - - name: Download OpenVINO debian packages - uses: actions/download-artifact@v3 - with: - name: openvino_debian_packages - path: ${{ env.DEBIAN_PACKAGES_DIR }} - - - name: Install debian packages & check conflicts - run: | - apt-get update -y - # install our local one - apt-get install --no-install-recommends -y dpkg-dev - dpkg-scanpackages . /dev/null | gzip -9c > Packages.gz - echo "deb [trusted=yes] file:${DEBIAN_PACKAGES_DIR} ./" | tee /etc/apt/sources.list.d/openvino-local.list - apt-get update -y - apt-get install openvino -y - working-directory: ${{ env.DEBIAN_PACKAGES_DIR }} - - - name: Test debian packages - run: | - /usr/share/openvino/samples/cpp/build_samples.sh - /usr/share/openvino/samples/c/build_samples.sh - - ~/openvino_cpp_samples_build/aarch64/Release/hello_query_device - - python3 /usr/share/openvino/samples/python/hello_query_device/hello_query_device.py - python3 -c 'from openvino import Core; Core().get_property("CPU", "AVAILABLE_DEVICES")' - python3 -c 'from openvino import Core; Core().get_property("AUTO", "SUPPORTED_METRICS")' - python3 -c 'from openvino import Core; Core().get_property("MULTI", "SUPPORTED_METRICS")' - python3 -c 'from openvino import Core; Core().get_property("HETERO", "SUPPORTED_METRICS")' - python3 -c 'from openvino import Core; Core().get_property("BATCH", "SUPPORTED_METRICS")' - python3 -c 'from openvino.frontend import FrontEndManager; assert len(FrontEndManager().get_available_front_ends()) == 6' - benchmark_app --help - ovc --help Samples: - needs: [Build, Smart_CI] - timeout-minutes: 20 - defaults: - run: - shell: bash - runs-on: 'aks-linux-16-cores-arm' - container: - image: 'openvinogithubactions.azurecr.io/dockerhub/ubuntu:20.04' - env: - DEBIAN_FRONTEND: noninteractive # to prevent apt-get from waiting user input - INSTALL_DIR: /__w/openvino/openvino/install - INSTALL_TEST_DIR: /__w/openvino/openvino/install/tests - BUILD_DIR: /__w/openvino/openvino/build + needs: [ Build, Smart_CI ] if: fromJSON(needs.smart_ci.outputs.affected_components).samples - - steps: - - name: Download OpenVINO package - uses: actions/download-artifact@v3 - with: - name: openvino_package - path: ${{ env.INSTALL_DIR }} - - - name: Download OpenVINO tests package - uses: actions/download-artifact@v3 - with: - name: openvino_tests - path: ${{ env.INSTALL_TEST_DIR }} - - - name: Extract OpenVINO packages - run: | - pushd ${INSTALL_DIR} - tar -xzf openvino_package.tar.gz -C ${INSTALL_DIR} - popd - pushd ${INSTALL_TEST_DIR} - tar -xzf openvino_tests.tar.gz -C ${INSTALL_DIR} - popd - - - name: Install OpenVINO dependencies - run: ${INSTALL_DIR}/install_dependencies/install_openvino_dependencies.sh -c=core -c=dev -y - - - name: Fetch setup_python action - uses: actions/checkout@v4 - with: - sparse-checkout: | - .github/actions/setup_python/action.yml - sparse-checkout-cone-mode: false - path: 'openvino' - - - name: Setup Python ${{ env.PYTHON_VERSION }} - uses: ./openvino/.github/actions/setup_python - with: - version: ${{ env.PYTHON_VERSION }} - should-setup-pip-paths: 'false' - self-hosted-runner: 'true' - - - name: Build cpp samples - GCC - run: ${INSTALL_DIR}/samples/cpp/build_samples.sh -i ${INSTALL_DIR} -b ${BUILD_DIR}/cpp_samples - env: - CMAKE_COMPILE_WARNING_AS_ERROR: 'ON' - - - name: Build cpp samples - Clang - run: | - apt-get install -y clang - ${INSTALL_DIR}/samples/cpp/build_samples.sh -i ${INSTALL_DIR} -b ${BUILD_DIR}/cpp_samples_clang - env: - CMAKE_COMPILE_WARNING_AS_ERROR: 'ON' - CC: clang - CXX: clang++ - - - name: Build c samples - run: ${INSTALL_DIR}/samples/c/build_samples.sh -i ${INSTALL_DIR} -b ${BUILD_DIR}/c_samples - env: - CMAKE_COMPILE_WARNING_AS_ERROR: 'ON' - - # - # Tests - # - - - name: Samples tests - if: fromJSON(needs.smart_ci.outputs.affected_components).samples.test - run: | - export WORKSPACE=${INSTALL_DIR} - export IE_APP_PATH=${INSTALL_DIR}/samples_bin - export IE_APP_PYTHON_PATH=${INSTALL_DIR}/samples/python - export SHARE=${INSTALL_TEST_DIR}/smoke_tests/samples_smoke_tests_data - - python3 -m pip install --ignore-installed PyYAML -r ${INSTALL_TEST_DIR}/smoke_tests/requirements.txt - export LD_LIBRARY_PATH=${IE_APP_PATH}:$LD_LIBRARY_PATH - - source ${INSTALL_DIR}/setupvars.sh - - python3 -m pytest -sv ${INSTALL_TEST_DIR}/smoke_tests \ - --env_conf ${INSTALL_TEST_DIR}/smoke_tests/env_config.yml \ - --junitxml=${INSTALL_TEST_DIR}/TEST-SamplesSmokeTests.xml - - - name: Upload Test Results - uses: actions/upload-artifact@v3 - if: ${{ !cancelled() }} - with: - name: test-results-samples - path: ${{ env.INSTALL_TEST_DIR }}/TEST*.xml - if-no-files-found: 'warn' + uses: ./.github/workflows/job_samples_tests.yml + with: + runner: 'aks-linux-16-cores-arm' + image: 'openvinogithubactions.azurecr.io/dockerhub/ubuntu:20.04' + affected-components: ${{ needs.smart_ci.outputs.affected_components }} ONNX_Runtime: name: ONNX Runtime Integration - needs: [Build, Smart_CI] - timeout-minutes: 30 - defaults: - run: - shell: bash - runs-on: 'aks-linux-16-cores-arm' - container: - image: openvinogithubactions.azurecr.io/dockerhub/ubuntu:20.04 - volumes: - - /mount/caches:/mount/caches - options: -e SCCACHE_AZURE_BLOB_CONTAINER -e SCCACHE_AZURE_CONNECTION_STRING - env: - DEBIAN_FRONTEND: noninteractive # to prevent apt-get from waiting user input - CMAKE_GENERATOR: 'Ninja Multi-Config' - CMAKE_CXX_COMPILER_LAUNCHER: sccache - CMAKE_C_COMPILER_LAUNCHER: sccache - OPENVINO_REPO: /__w/openvino/openvino/openvino - INSTALL_DIR: /__w/openvino/openvino/install - SCCACHE_AZURE_KEY_PREFIX: 'ubuntu20_aarch64_onnxruntime' - ONNX_RUNTIME_REPO: /__w/openvino/openvino/onnxruntime - ONNX_RUNTIME_UTILS: /__w/openvino/openvino/install/onnxruntime - ONNX_RUNTIME_BUILD_DIR: /__w/openvino/openvino/onnxruntime/build if: fromJSON(needs.smart_ci.outputs.affected_components).ONNX_RT - - steps: - - name: Fetch install_build_dependencies.sh and setup_python action - uses: actions/checkout@v4 - with: - sparse-checkout: | - install_build_dependencies.sh - .github/actions/setup_python/action.yml - sparse-checkout-cone-mode: false - path: ${{ env.OPENVINO_REPO }} - - - name: Install git - run: | - apt-get update - apt-get install --assume-yes --no-install-recommends git ca-certificates - - - name: Setup Python ${{ env.PYTHON_VERSION }} - uses: ./openvino/.github/actions/setup_python - with: - version: ${{ env.PYTHON_VERSION }} - should-setup-pip-paths: 'false' - - # - # Initialize OpenVINO - # - - - name: Download OpenVINO package - uses: actions/download-artifact@v3 - with: - name: openvino_package - path: ${{ env.INSTALL_DIR }} - - - name: Extract OpenVINO package - run: | - pushd ${INSTALL_DIR} - tar -xzf openvino_package.tar.gz -C ${INSTALL_DIR} - popd - - - name: Install OpenVINO dependencies - run: ${INSTALL_DIR}/install_dependencies/install_openvino_dependencies.sh -c=core -c=dev -y - - - name: Clone ONNX Runtime - run: | - branch=`tr -s '\n ' < ${ONNX_RUNTIME_UTILS}/version` - git clone --branch $branch --single-branch --recursive https://github.com/microsoft/onnxruntime.git ${ONNX_RUNTIME_REPO} - - # - # Tests - # - - - name: Install Build Dependencies - run: bash ${OPENVINO_REPO}/install_build_dependencies.sh - - - name: Install sccache - uses: mozilla-actions/sccache-action@v0.0.3 - with: - version: "v0.5.4" - - - name: Build Lin ONNX Runtime - run: | - source ${INSTALL_DIR}/setupvars.sh - - ${ONNX_RUNTIME_REPO}/build.sh \ - --config RelWithDebInfo \ - --use_openvino CPU_FP32 \ - --build_shared_lib \ - --parallel \ - --skip_tests \ - --compile_no_warning_as_error \ - --build_dir ${ONNX_RUNTIME_BUILD_DIR} - env: - CXXFLAGS: "-Wno-error=deprecated-declarations" - - - name: Show sccache stats - run: ${SCCACHE_PATH} --show-stats - - - name: Run onnxruntime_test_all - if: ${{ 'false' }} # Ticket: 126277 - run: | - source ${INSTALL_DIR}/setupvars.sh - skip_tests=$(tr -s '\n ' ':' < ${ONNX_RUNTIME_UTILS}/skip_tests) - - ./onnxruntime_test_all --gtest_filter=-$skip_tests - working-directory: ${{ env.ONNX_RUNTIME_BUILD_DIR }}/RelWithDebInfo/RelWithDebInfo - - - name: Run onnxruntime_shared_lib_test - run: | - source ${INSTALL_DIR}/setupvars.sh - ./onnxruntime_shared_lib_test --gtest_filter=-CApiTest.test_custom_op_openvino_wrapper_library - working-directory: ${{ env.ONNX_RUNTIME_BUILD_DIR }}/RelWithDebInfo/RelWithDebInfo - - - name: Run onnxruntime_global_thread_pools_test - run: | - source ${INSTALL_DIR}/setupvars.sh - ./onnxruntime_global_thread_pools_test - working-directory: ${{ env.ONNX_RUNTIME_BUILD_DIR }}/RelWithDebInfo/RelWithDebInfo - - - name: Run onnxruntime_api_tests_without_env - run: | - source ${INSTALL_DIR}/setupvars.sh - ./onnxruntime_api_tests_without_env - working-directory: ${{ env.ONNX_RUNTIME_BUILD_DIR }}/RelWithDebInfo/RelWithDebInfo - - - name: Run pytorch-converted tests - run: | - source ${INSTALL_DIR}/setupvars.sh - ./onnx_test_runner "${ONNX_RUNTIME_REPO}/cmake/external/onnx/onnx/backend/test/data/pytorch-converted" - working-directory: ${{ env.ONNX_RUNTIME_BUILD_DIR }}/RelWithDebInfo/RelWithDebInfo - - - name: Run pytorch-operator tests - run: | - source ${INSTALL_DIR}/setupvars.sh - ./onnx_test_runner "${ONNX_RUNTIME_REPO}/cmake/external/onnx/onnx/backend/test/data/pytorch-operator" - working-directory: ${{ env.ONNX_RUNTIME_BUILD_DIR }}/RelWithDebInfo/RelWithDebInfo + needs: [ Build, Smart_CI ] + uses: ./.github/workflows/job_onnx_runtime.yml + with: + runner: 'aks-linux-16-cores-arm' + container: '{"image": "openvinogithubactions.azurecr.io/dockerhub/ubuntu:20.04", "volumes": ["/mount:/mount"], "options": "-e SCCACHE_AZURE_BLOB_CONTAINER -e SCCACHE_AZURE_CONNECTION_STRING"}' + sccache-azure-key-prefix: 'ubuntu20_aarch64_onnxruntime' CXX_Unit_Tests: name: C++ unit tests - needs: [Build, Smart_CI] - timeout-minutes: 20 - defaults: - run: - shell: bash - runs-on: 'aks-linux-16-cores-arm' - container: - image: openvinogithubactions.azurecr.io/dockerhub/ubuntu:20.04 - env: - INSTALL_DIR: /__w/openvino/openvino/install - INSTALL_TEST_DIR: /__w/openvino/openvino/install/tests - - steps: - - name: Download OpenVINO package - uses: actions/download-artifact@v3 - with: - name: openvino_package - path: ${{ env.INSTALL_DIR }} - - - name: Download OpenVINO tests package - uses: actions/download-artifact@v3 - with: - name: openvino_tests - path: ${{ env.INSTALL_TEST_DIR }} - - - name: Extract OpenVINO packages - run: | - pushd ${INSTALL_DIR} - tar -xzf openvino_package.tar.gz -C ${INSTALL_DIR} - popd - pushd ${INSTALL_TEST_DIR} - tar -xzf openvino_tests.tar.gz -C ${INSTALL_DIR} - popd - - - name: Install OpenVINO dependencies - run: ${INSTALL_DIR}/install_dependencies/install_openvino_dependencies.sh -c=core -c=gpu -y - - # - # Tests - # - - - name: OpenVINO Core Unit Tests - if: fromJSON(needs.smart_ci.outputs.affected_components).Core.test - run: | - source ${INSTALL_DIR}/setupvars.sh - ${INSTALL_TEST_DIR}/ov_core_unit_tests --gtest_print_time=1 --gtest_filter=-*IE_GPU* \ - --gtest_output=xml:${INSTALL_TEST_DIR}/TEST-OVCoreUT.xml - - - name: OpenVINO Inference Functional Tests - if: fromJSON(needs.smart_ci.outputs.affected_components).inference.test - run: | - source ${INSTALL_DIR}/setupvars.sh - ${INSTALL_TEST_DIR}/ov_inference_functional_tests --gtest_print_time=1 \ - --gtest_output=xml:${INSTALL_TEST_DIR}/TEST-InferenceFunc.xml - - - name: OpenVINO Inference Unit Tests - if: fromJSON(needs.smart_ci.outputs.affected_components).inference.test - run: | - source ${INSTALL_DIR}/setupvars.sh - ${INSTALL_TEST_DIR}/ov_inference_unit_tests --gtest_print_time=1 \ - --gtest_output=xml:${INSTALL_TEST_DIR}/TEST-InferenceUnit.xml - - - name: Low Precision Transformations Tests - if: fromJSON(needs.smart_ci.outputs.affected_components).LP_transformations.test - run: | - source ${INSTALL_DIR}/setupvars.sh - - # Skip filter ticket: 126279 - ${INSTALL_TEST_DIR}/ov_lp_transformations_tests --gtest_print_time=1 \ - --gtest_filter=-*smoke_LPT/FoldFakeQuantizeInTransformations.CompareFunctions* \ - --gtest_output=xml:${INSTALL_TEST_DIR}/TEST-LpTransformations.xml - - - name: OpenVINO Conditional compilation tests - if: fromJSON(needs.smart_ci.outputs.affected_components).Core.test - run: | - source ${INSTALL_DIR}/setupvars.sh - ${INSTALL_TEST_DIR}/ov_conditional_compilation_tests --gtest_print_time=1 \ - --gtest_output=xml:${INSTALL_TEST_DIR}/TEST-ConditionalCompilation.xml - - - name: IR frontend tests - if: fromJSON(needs.smart_ci.outputs.affected_components).IR_FE.test - run: | - source ${INSTALL_DIR}/setupvars.sh - ${INSTALL_TEST_DIR}/ov_ir_frontend_tests --gtest_print_time=1 \ - --gtest_output=xml:${INSTALL_TEST_DIR}/TEST-IRFrontend.xml - - - name: PaddlePaddle frontend tests - if: ${{ 'false' }} - run: | - source ${INSTALL_DIR}/setupvars.sh - ${INSTALL_TEST_DIR}/paddle_tests --gtest_print_time=1 \ - --gtest_output=xml:${INSTALL_TEST_DIR}/TEST-PaddleTests.xml - - - name: ONNX frontend tests - if: ${{ 'false' }} # Ticket: 126280 - #if: fromJSON(needs.smart_ci.outputs.affected_components).ONNX_FE.test - run: | - source ${INSTALL_DIR}/setupvars.sh - ${INSTALL_TEST_DIR}/ov_onnx_frontend_tests --gtest_print_time=1 \ - --gtest_filter=-*IE_GPU* \ - --gtest_output=xml:${INSTALL_TEST_DIR}/TEST-ONNXFrontend.xml - - - name: TensorFlow Common frontend tests - if: fromJSON(needs.smart_ci.outputs.affected_components).TF_FE.test || - fromJSON(needs.smart_ci.outputs.affected_components).TFL_FE.test - run: | - source ${INSTALL_DIR}/setupvars.sh - ${INSTALL_TEST_DIR}/ov_tensorflow_common_tests --gtest_print_time=1 \ - --gtest_output=xml:${INSTALL_TEST_DIR}/TEST-TensorFlowCommonFrontend.xml - - - name: TensorFlow frontend tests - if: fromJSON(needs.smart_ci.outputs.affected_components).TF_FE.test - run: | - source ${INSTALL_DIR}/setupvars.sh - ${INSTALL_TEST_DIR}/ov_tensorflow_frontend_tests --gtest_print_time=1 \ - --gtest_output=xml:${INSTALL_TEST_DIR}/TEST-TensorFlowFrontend.xml - - - name: TensorFlow Lite frontend tests - if: fromJSON(needs.smart_ci.outputs.affected_components).TFL_FE.test - run: | - source ${INSTALL_DIR}/setupvars.sh - ${INSTALL_TEST_DIR}/ov_tensorflow_lite_frontend_tests --gtest_print_time=1 \ - --gtest_output=xml:${INSTALL_TEST_DIR}/TEST-TensorFlowLiteFrontend.xml - - - name: Transformations func tests - if: ${{ 'false' }} # Ticket: 126281 - #if: fromJSON(needs.smart_ci.outputs.affected_components).transformations.test - run: | - source ${INSTALL_DIR}/setupvars.sh - ${INSTALL_TEST_DIR}/ov_transformations_tests --gtest_print_time=1 \ - --gtest_output=xml:${INSTALL_TEST_DIR}/TEST-Transformations.xml - - - name: Common test utils tests - run: | - source ${INSTALL_DIR}/setupvars.sh - ${INSTALL_TEST_DIR}/ov_util_tests --gtest_print_time=1 \ - --gtest_output=xml:${INSTALL_TEST_DIR}/TEST-CommonUtilTests.xml - - - name: Snippets func tests - #if: fromJSON(needs.smart_ci.outputs.affected_components).CPU.test - run: | - source ${INSTALL_DIR}/setupvars.sh - ${INSTALL_TEST_DIR}/ov_snippets_func_tests --gtest_print_time=1 \ - --gtest_output=xml:${INSTALL_TEST_DIR}/TEST-SnippetsFuncTests.xml - - - name: CPU plugin unit tests - if: fromJSON(needs.smart_ci.outputs.affected_components).CPU.test - run: | - source ${INSTALL_DIR}/setupvars.sh - ${INSTALL_TEST_DIR}/ov_cpu_unit_tests --gtest_print_time=1 \ - --gtest_output=xml:${INSTALL_TEST_DIR}/TEST-CPUUnitTests.xml - - - name: ov_subgraphs_dumper_tests tests - run: | - source ${INSTALL_DIR}/setupvars.sh - ${INSTALL_TEST_DIR}/ov_subgraphs_dumper_tests --gtest_print_time=1 \ - --gtest_output=xml:${INSTALL_TEST_DIR}/TEST-ov_subgraphs_dumper_tests.xml - - - name: Template OpImpl tests - run: | - source ${INSTALL_DIR}/setupvars.sh - ${INSTALL_TEST_DIR}/ov_op_conformance_tests --gtest_print_time=1 --device=TEMPLATE --gtest_filter=*OpImpl*\ - --gtest_output=xml:${INSTALL_TEST_DIR}/TEST-OpImplTests.xml - - - name: AUTO unit tests - if: fromJSON(needs.smart_ci.outputs.affected_components).AUTO.test - run: | - source ${INSTALL_DIR}/setupvars.sh - ${INSTALL_TEST_DIR}/ov_auto_unit_tests --gtest_print_time=1 \ - --gtest_output=xml:${INSTALL_TEST_DIR}/TEST-ov_auto_unit_tests.xml - - - name: AUTO func Tests - if: fromJSON(needs.smart_ci.outputs.affected_components).AUTO.test - run: | - source ${{ env.INSTALL_DIR }}/setupvars.sh - ${{ env.INSTALL_TEST_DIR }}/ov_auto_func_tests --gtest_print_time=1 \ - --gtest_output=xml:${{ env.INSTALL_TEST_DIR }}/TEST-ov_auto_func_tests.xml - - - name: Template plugin func tests - if: fromJSON(needs.smart_ci.outputs.affected_components).TEMPLATE.test - run: | - source ${INSTALL_DIR}/setupvars.sh - ${INSTALL_TEST_DIR}/ov_template_func_tests --gtest_print_time=1 \ - --gtest_filter=*smoke* \ - --gtest_output=xml:${INSTALL_TEST_DIR}/TEST-TemplateFuncTests.xml - - - name: Inference Engine C API tests - if: fromJSON(needs.smart_ci.outputs.affected_components).C_API.test - run: | - source ${INSTALL_DIR}/setupvars.sh - ${INSTALL_TEST_DIR}/InferenceEngineCAPITests --gtest_print_time=1 \ - --gtest_output=xml:${INSTALL_TEST_DIR}/TEST-InferenceEngineCAPITests.xml - - - name: OpenVINO C API tests - if: fromJSON(needs.smart_ci.outputs.affected_components).C_API.test - run: | - source ${INSTALL_DIR}/setupvars.sh - - # Skip filter ticket: 126283 - ${INSTALL_TEST_DIR}/ov_capi_test --gtest_print_time=1 \ - --gtest_filter=-*ov_core/ov_core_test.ov_core_compile_model_with_property* \ - --gtest_output=xml:${INSTALL_TEST_DIR}/TEST-OpenVINOCAPITests.xml - - - name: AutoBatch unit tests - if: fromJSON(needs.smart_ci.outputs.affected_components).AUTO_BATCH.test - run: | - source ${INSTALL_DIR}/setupvars.sh - ${INSTALL_TEST_DIR}/ov_auto_batch_unit_tests --gtest_output=xml:${INSTALL_TEST_DIR}/TEST-ov_auto_batch_unit_tests.xml - - - name: AutoBatch func tests - if: fromJSON(needs.smart_ci.outputs.affected_components).AUTO_BATCH.test - run: | - source ${INSTALL_DIR}/setupvars.sh - ${INSTALL_TEST_DIR}/ov_auto_batch_func_tests --gtest_output=xml:${INSTALL_TEST_DIR}/TEST-ov_auto_batch_func_tests.xml - - - name: Proxy Plugin func tests - if: fromJSON(needs.smart_ci.outputs.affected_components).PROXY.test - run: | - source ${INSTALL_DIR}/setupvars.sh - ${INSTALL_TEST_DIR}/ov_proxy_plugin_tests --gtest_print_time=1 --gtest_output=xml:${INSTALL_TEST_DIR}/TEST-OVProxyTests.xml - - - name: Hetero unit tests - if: fromJSON(needs.smart_ci.outputs.affected_components).HETERO.test - run: | - source ${{ env.INSTALL_DIR }}/setupvars.sh - ${{ env.INSTALL_TEST_DIR }}/ov_hetero_unit_tests --gtest_print_time=1 --gtest_output=xml:${{ env.INSTALL_TEST_DIR }}/TEST-OVHeteroUnitTests.xml - - - name: Hetero func tests - if: fromJSON(needs.smart_ci.outputs.affected_components).HETERO.test - run: | - source ${INSTALL_DIR}/setupvars.sh - ${INSTALL_TEST_DIR}/ov_hetero_func_tests --gtest_print_time=1 --gtest_output=xml:${INSTALL_TEST_DIR}/TEST-OVHeteroFuncTests.xml - - - name: Upload Test Results - uses: actions/upload-artifact@v3 - if: ${{ !cancelled() }} - with: - name: test-results-cpp - path: ${{ env.INSTALL_TEST_DIR }}/TEST*.xml - if-no-files-found: 'warn' + needs: [ Build, Smart_CI ] + uses: ./.github/workflows/job_cxx_unit_tests.yml + with: + runner: 'aks-linux-16-cores-arm' + image: 'openvinogithubactions.azurecr.io/dockerhub/ubuntu:20.04' + affected-components: ${{ needs.smart_ci.outputs.affected_components }} Python_Unit_Tests: name: Python unit tests - needs: [Build, Smart_CI] - timeout-minutes: 180 - defaults: - run: - shell: bash - runs-on: 'aks-linux-16-cores-arm' - container: - image: openvinogithubactions.azurecr.io/dockerhub/ubuntu:20.04 - volumes: - - /mount/caches:/mount/caches - env: - OPENVINO_REPO: /__w/openvino/openvino/openvino - INSTALL_DIR: /__w/openvino/openvino/install - INSTALL_TEST_DIR: /__w/openvino/openvino/install/tests - LAYER_TESTS_INSTALL_DIR: /__w/openvino/openvino/install/tests/layer_tests - - steps: - # - # Initialize OpenVINO - # - - name: Download OpenVINO package - uses: actions/download-artifact@v3 - with: - name: openvino_package - path: ${{ env.INSTALL_DIR }} - - - name: Download OpenVINO tests package - uses: actions/download-artifact@v3 - with: - name: openvino_tests - path: ${{ env.INSTALL_TEST_DIR }} - - - name: Extract OpenVINO packages - run: | - pushd ${INSTALL_DIR} - tar -xzf openvino_package.tar.gz -C ${INSTALL_DIR} - popd - - pushd ${INSTALL_TEST_DIR} - tar -xzf openvino_tests.tar.gz -C ${INSTALL_DIR} - popd - - - name: Fetch setup_python action - uses: actions/checkout@v4 - with: - sparse-checkout: | - .github/actions/setup_python/action.yml - sparse-checkout-cone-mode: false - path: ${{ env.OPENVINO_REPO }} - - - name: Setup Python ${{ env.PYTHON_VERSION }} - uses: ./openvino/.github/actions/setup_python - with: - version: ${{ env.PYTHON_VERSION }} - pip-cache-path: ${{ env.PIP_CACHE_PATH }} - should-setup-pip-paths: 'true' - - - name: Install OpenVINO dependencies - run: | - apt-get update && apt-get install -y gcc python3-dev # Needed for building `psutil` - ${INSTALL_DIR}/install_dependencies/install_openvino_dependencies.sh -c=core -y - - - name: Install OpenVINO Python wheels - run: | - # Install the core OV wheel - python3 -m pip install ${INSTALL_DIR}/tools/openvino-*.whl - - # Find and install OV dev wheel - pushd ${INSTALL_DIR}/tools - ov_dev_wheel_name=$(find . -name 'openvino_dev*.whl') - python3 -m pip install $ov_dev_wheel_name[mxnet,caffe,kaldi,onnx,tensorflow2,pytorch] - popd - - - name: Install Python API tests dependencies - run: | - # To enable pytest parallel features - python3 -m pip install pytest-xdist[psutil] - # For torchvision to OpenVINO preprocessing converter - python3 -m pip install -r ${INSTALL_TEST_DIR}/python/preprocess/torchvision/requirements.txt - - # TODO: replace with Python API tests requirements - python3 -m pip install -r ${INSTALL_TEST_DIR}/mo/requirements_dev.txt - - # - # Tests - # - - - name: Python API 1.0 Tests - if: fromJSON(needs.smart_ci.outputs.affected_components).Python_API.test - run: | - python3 -m pytest -s ${INSTALL_TEST_DIR}/pyngraph \ - --junitxml=${INSTALL_TEST_DIR}/TEST-Pyngraph.xml \ - --ignore=${INSTALL_TEST_DIR}/pyngraph/tests_compatibility/test_onnx/test_zoo_models.py \ - --ignore=${INSTALL_TEST_DIR}/pyngraph/tests_compatibility/test_onnx/test_backend.py - - - name: Python API 2.0 Tests - timeout-minutes: 30 - if: ${{ 'false' }} # Ticket: 126380 - #if: fromJSON(needs.smart_ci.outputs.affected_components).Python_API.test - run: | - # for 'template' extension - export LD_LIBRARY_PATH=${INSTALL_TEST_DIR}:$LD_LIBRARY_PATH - python3 -m pytest -sv ${INSTALL_TEST_DIR}/pyopenvino \ - --junitxml=${INSTALL_TEST_DIR}/TEST-Pyngraph.xml \ - --ignore=${INSTALL_TEST_DIR}/pyopenvino/tests/test_utils/test_utils.py - - - name: Model Optimizer unit tests - if: fromJSON(needs.smart_ci.outputs.affected_components).MO.test - run: | - python3 -m pytest -s ${INSTALL_TEST_DIR}/mo/unit_tests \ - --junitxml=${INSTALL_TEST_DIR}/TEST-ModelOptimizer.xml \ - --ignore-glob="**/mo/front/mxnet/**" - - - name: Python ONNX operators tests - if: fromJSON(needs.smart_ci.outputs.affected_components).Python_API.test || - fromJSON(needs.smart_ci.outputs.affected_components).ONNX_FE.test - run: | - # Skip test_onnx/test_zoo_models and test_onnx/test_backend due to long execution time - ONNX Model Zoo tests are run separately - python3 -m pytest -sv ${INSTALL_TEST_DIR}/onnx -k 'not cuda' \ - --junitxml=${INSTALL_TEST_DIR}/TEST-onnx_frontend.xml \ - --ignore=${INSTALL_TEST_DIR}/onnx/test_python/test_zoo_models.py - - - name: OVC unit tests - if: fromJSON(needs.smart_ci.outputs.affected_components).MO.test - run: python3 -m pytest -s ${INSTALL_TEST_DIR}/ovc/unit_tests --junitxml=${INSTALL_TEST_DIR}/TEST-OpenVinoConversion.xml - - - name: Install Python Layer tests dependencies - if: ${{ always() }} - run: | - # layer test requirements - python3 -m pip install -r ${LAYER_TESTS_INSTALL_DIR}/requirements.txt - - - name: MO Python API Tests - if: fromJSON(needs.smart_ci.outputs.affected_components).MO.test - run: | - # Import 'test_utils' installed in '/tests/python/openvino' - export LD_LIBRARY_PATH=${PIP_INSTALL_PATH}/openvino/libs:$LD_LIBRARY_PATH - export PYTHONPATH=${INSTALL_TEST_DIR}/python - export LD_PRELOAD=${PIP_INSTALL_PATH}/torch/lib/../../torch.libs/libgomp-d22c30c5.so.1.0.0 - - echo ${PIP_INSTALL_PATH} - echo ${PIP_INSTALL_PATH} - echo ${PIP_INSTALL_PATH} - - python3 -m pytest ${LAYER_TESTS_INSTALL_DIR}/mo_python_api_tests --junitxml=${INSTALL_TEST_DIR}/TEST-test_mo_convert.xml - env: - TEST_DEVICE: CPU - TEST_PRECISION: FP16 - - - name: OVC Python API Tests - if: fromJSON(needs.smart_ci.outputs.affected_components).MO.test - run: | - # Import 'test_utils' installed in '/tests/python/openvino' - export PYTHONPATH=${INSTALL_TEST_DIR}/python - export LD_LIBRARY_PATH=${PIP_INSTALL_PATH}/openvino/libs:$LD_LIBRARY_PATH - export LD_PRELOAD=${PIP_INSTALL_PATH}/torch/lib/../../torch.libs/libgomp-d22c30c5.so.1.0.0 - - python3 -m pytest ${LAYER_TESTS_INSTALL_DIR}/ovc_python_api_tests --junitxml=${INSTALL_TEST_DIR}/TEST-test_ovc_convert.xml - env: - TEST_DEVICE: CPU - TEST_PRECISION: FP16 - - - name: Python Frontend tests - if: fromJSON(needs.smart_ci.outputs.affected_components).PyTorch_FE.test || - fromJSON(needs.smart_ci.outputs.affected_components).PDPD_FE.test - run: | - # to allow 'libtest_builtin_extensions.so' to find 'libopenvino_onnx_frontend.so' - export LD_LIBRARY_PATH=${PIP_INSTALL_PATH}/openvino/libs:$LD_LIBRARY_PATH - python3 -m pytest ${LAYER_TESTS_INSTALL_DIR}/py_frontend_tests --junitxml=${INSTALL_TEST_DIR}/TEST-test_py_fontend.xml - - - name: PyTorch Layer Tests - timeout-minutes: 20 - if: ${{ 'false' }} # Ticket: 126287 - #if: fromJSON(needs.smart_ci.outputs.affected_components).PyTorch_FE.test - run: python3 -m pytest ${LAYER_TESTS_INSTALL_DIR}/pytorch_tests -n logical -m precommit --junitxml=${INSTALL_TEST_DIR}/TEST-pytorch.xml - env: - TEST_DEVICE: CPU - TEST_PRECISION: FP32 - - - name: PyTorch torch.compile TORCHFX Layer Tests - if: ${{ 'false' }} # RuntimeError: Python 3.11+ not yet supported for torch.compile, torch 2.0.1 is installed on Linux ARM64, it works in torch 2.1.1 - timeout-minutes: 20 - #if: fromJSON(needs.smart_ci.outputs.affected_components).PyTorch_FE.test - run: | - python3 -m pytest ${LAYER_TESTS_INSTALL_DIR}/pytorch_tests -m precommit_fx_backend --junitxml=${INSTALL_TEST_DIR}/TEST-pytorch.xml - env: - TEST_DEVICE: CPU - TEST_PRECISION: FP32 - PYTORCH_TRACING_MODE: TORCHFX - - - name: PyTorch torch.compile TORCHSCRIPT Layer Tests - if: ${{ 'false' }} # RuntimeError: Python 3.11+ not yet supported for torch.compile, torch 2.0.1 is installed on Linux ARM64, it works in torch 2.1.1 - timeout-minutes: 20 - #if: fromJSON(needs.smart_ci.outputs.affected_components).PyTorch_FE.test - run: | - python3 -m pytest ${LAYER_TESTS_INSTALL_DIR}/pytorch_tests -m precommit_ts_backend --junitxml=${INSTALL_TEST_DIR}/TEST-pytorch.xml - env: - TEST_DEVICE: CPU - TEST_PRECISION: FP32 - PYTORCH_TRACING_MODE: TORCHSCRIPT - - - name: ONNX Layer Tests - timeout-minutes: 30 - if: fromJSON(needs.smart_ci.outputs.affected_components).ONNX_FE.test - run: | - # requires 'unit_tests' from 'tools/mo' - export PYTHONPATH=${INSTALL_TEST_DIR}/mo:$PYTHONPATH - python3 -m pytest ${LAYER_TESTS_INSTALL_DIR}/onnx_tests -m "not launch_only_if_manually_specified and precommit" --junitxml=${INSTALL_TEST_DIR}/TEST-onnx.xml - env: - TEST_DEVICE: CPU - TEST_PRECISION: FP16 - - - name: TensorFlow 1 Layer Tests - TF FE - timeout-minutes: 30 - if: fromJSON(needs.smart_ci.outputs.affected_components).TF_FE.test - run: | - # requires 'unit_tests' from 'mo' - export PYTHONPATH=${INSTALL_TEST_DIR}/mo - python3 -m pytest ${LAYER_TESTS_INSTALL_DIR}/tensorflow_tests/ --use_new_frontend -m precommit_tf_fe --junitxml=${INSTALL_TEST_DIR}/TEST-tf_fe.xml - env: - TEST_DEVICE: CPU - TEST_PRECISION: FP16 - - - name: TensorFlow 2 Layer Tests - TF FE - timeout-minutes: 30 - if: fromJSON(needs.smart_ci.outputs.affected_components).TF_FE.test - run: | - # requires 'unit_tests' from 'mo' - export PYTHONPATH=${INSTALL_TEST_DIR}/mo - python3 -m pytest ${LAYER_TESTS_INSTALL_DIR}/tensorflow2_keras_tests/ --use_new_frontend -m precommit_tf_fe --junitxml=${INSTALL_TEST_DIR}/TEST-tf2_fe.xml - env: - TEST_DEVICE: CPU - TEST_PRECISION: FP16 - - - name: JAX Layer Tests - TF FE - timeout-minutes: 30 - if: ${{ 'false' }} - #if: fromJSON(needs.smart_ci.outputs.affected_components).TF_FE.test - run: python3 -m pytest ${LAYER_TESTS_INSTALL_DIR}/jax_tests/ -m precommit --junitxml=${INSTALL_TEST_DIR}/TEST-jax.xml - env: - TEST_DEVICE: CPU - - - name: TensorFlow 1 Layer Tests - Legacy FE - timeout-minutes: 30 - run: python3 -m pytest ${LAYER_TESTS_INSTALL_DIR}/tensorflow_tests/test_tf_Roll.py --ir_version=10 --junitxml=${INSTALL_TEST_DIR}/TEST-tf_Roll.xml - - - name: TensorFlow 2 Layer Tests - Legacy FE - timeout-minutes: 30 - if: fromJSON(needs.smart_ci.outputs.affected_components).TF_FE.test - run: python3 -m pytest ${LAYER_TESTS_INSTALL_DIR}/tensorflow_tests/test_tf_Roll.py --ir_version=10 --junitxml=${INSTALL_TEST_DIR}/TEST-tf_Roll.xml - - - name: TensorFlow 2 Layer Tests - Legacy FE - if: fromJSON(needs.smart_ci.outputs.affected_components).TF_FE.test - run: python3 -m pytest ${LAYER_TESTS_INSTALL_DIR}/tensorflow2_keras_tests/test_tf2_keras_activation.py --ir_version=11 -k "sigmoid" --junitxml=${INSTALL_TEST_DIR}/TEST-tf2_Activation.xml - env: - TEST_DEVICE: CPU - TEST_PRECISION: FP16 - - - name: TensorFlow Lite Layer Tests - TFL FE - timeout-minutes: 30 - if: fromJSON(needs.smart_ci.outputs.affected_components).TFL_FE.test - run: python3 -m pytest ${LAYER_TESTS_INSTALL_DIR}/tensorflow_lite_tests/ --junitxml=${INSTALL_TEST_DIR}/TEST-tfl_fe.xml - env: - TEST_DEVICE: CPU - TEST_PRECISION: FP16 - - - name: Clone API snippets - if: ${{ always() }} - uses: actions/checkout@v4 - with: - sparse-checkout: openvino/docs/snippets - path: ${{ env.OPENVINO_REPO }} - submodules: 'false' - - - name: Docs Python snippets - if: ${{ always() }} - run: | - # to find 'snippets' module in docs - export PYTHONPATH=${OPENVINO_REPO}/docs - # for 'template' extension - export LD_LIBRARY_PATH=${INSTALL_TEST_DIR}:$LD_LIBRARY_PATH - python3 ${OPENVINO_REPO}/docs/snippets/main.py - - - name: Upload Test Results - uses: actions/upload-artifact@v3 - if: ${{ !cancelled() }} - with: - name: test-results-python - path: | - ${{ env.INSTALL_TEST_DIR }}/TEST*.html - ${{ env.INSTALL_TEST_DIR }}/TEST*.xml - if-no-files-found: 'warn' + needs: [ Build, Smart_CI ] + uses: ./.github/workflows/job_python_unit_tests.yml + with: + runner: 'aks-linux-16-cores-arm' + container: '{"image": "openvinogithubactions.azurecr.io/dockerhub/ubuntu:20.04", "volumes": ["/mount:/mount"]}' + affected-components: ${{ needs.smart_ci.outputs.affected_components }} CPU_Functional_Tests: name: CPU functional tests - needs: [Build, Smart_CI] - timeout-minutes: 60 - defaults: - run: - shell: bash - runs-on: 'aks-linux-16-cores-arm' - container: - image: openvinogithubactions.azurecr.io/dockerhub/ubuntu:20.04 - env: - OPENVINO_REPO: /__w/openvino/openvino/openvino - INSTALL_DIR: /__w/openvino/openvino/install - INSTALL_TEST_DIR: /__w/openvino/openvino/install/tests - PARALLEL_TEST_SCRIPT: /__w/openvino/openvino/install/tests/functional_test_utils/layer_tests_summary/run_parallel.py - PARALLEL_TEST_CACHE: /__w/openvino/openvino/install/tests/test_cache.lst - # if: fromJSON(needs.smart_ci.outputs.affected_components).CPU.test - if: ${{ 'false' }} # Ticket: 126379 - steps: - - name: Download OpenVINO package - uses: actions/download-artifact@v3 - with: - name: openvino_package - path: ${{ env.INSTALL_DIR }} - - - name: Download OpenVINO tests package - uses: actions/download-artifact@v3 - with: - name: openvino_tests - path: ${{ env.INSTALL_TEST_DIR }} - - - name: Extract OpenVINO packages - run: | - pushd ${INSTALL_DIR} - tar -xzf openvino_package.tar.gz -C ${INSTALL_DIR} - popd - pushd ${INSTALL_TEST_DIR} - tar -xzf openvino_tests.tar.gz -C ${INSTALL_DIR} - popd - - - name: Install OpenVINO dependencies - run: bash ${INSTALL_DIR}/install_dependencies/install_openvino_dependencies.sh -c=core -y - - - name: Fetch setup_python action - uses: actions/checkout@v4 - with: - sparse-checkout: | - .github/actions/setup_python/action.yml - sparse-checkout-cone-mode: false - path: ${{ env.OPENVINO_REPO }} - - - name: Setup Python ${{ env.PYTHON_VERSION }} - uses: ./openvino/.github/actions/setup_python - with: - version: ${{ env.PYTHON_VERSION }} - should-setup-pip-paths: 'false' - - - name: Install python dependencies for run_parallel.py - run: python3 -m pip install -r ${INSTALL_TEST_DIR}/functional_test_utils/layer_tests_summary/requirements.txt - - - name: Restore tests execution time - uses: actions/cache/restore@v3 - with: - path: ${{ env.PARALLEL_TEST_CACHE }} - key: ${{ runner.os }}-${{ runner.arch }}-tests-functional-cpu-stamp-${{ github.sha }} - restore-keys: | - ${{ runner.os }}-${{ runner.arch }}-tests-functional-cpu-stamp - - - name: Intel CPU plugin func tests (parallel) - run: | - source ${INSTALL_DIR}/setupvars.sh - python3 ${PARALLEL_TEST_SCRIPT} -e ${INSTALL_TEST_DIR}/ov_cpu_func_tests -c ${PARALLEL_TEST_CACHE} -w ${INSTALL_TEST_DIR} -s suite -rf 0 -- --gtest_print_time=1 --gtest_filter=*smoke* - timeout-minutes: 40 - - - name: Save tests execution time - uses: actions/cache/save@v3 - if: github.ref_name == 'master' - with: - path: ${{ env.PARALLEL_TEST_CACHE }} - key: ${{ runner.os }}-${{ runner.arch }}-tests-functional-cpu-stamp-${{ github.sha }} - - - name: Upload Test Results - uses: actions/upload-artifact@v3 - if: ${{ !cancelled() }} - with: - name: test-results-functional-cpu - path: | - ${{ env.INSTALL_TEST_DIR }}/temp/*.log - ${{ env.INSTALL_TEST_DIR }}/logs/*.log - ${{ env.INSTALL_TEST_DIR }}/logs/failed/*.log - ${{ env.INSTALL_TEST_DIR }}/logs/crashed/*.log - ${{ env.INSTALL_TEST_DIR }}/logs/hanged/*.log - ${{ env.INSTALL_TEST_DIR }}/logs/interapted/*.log - ${{ env.INSTALL_TEST_DIR }}/logs/hash_table.csv - ${{ env.PARALLEL_TEST_CACHE }} - if-no-files-found: 'error' + if: fromJSON(needs.smart_ci.outputs.affected_components).CPU.test + needs: [ Build, Smart_CI ] + uses: ./.github/workflows/job_cpu_functional_tests.yml + with: + runner: 'aks-linux-16-cores-arm' + image: 'openvinogithubactions.azurecr.io/dockerhub/ubuntu:20.04' TensorFlow_Hub_Models_Tests: name: TensorFlow Hub Models tests - needs: [Build, Smart_CI] - defaults: - run: - shell: bash - runs-on: 'aks-linux-16-cores-arm' - timeout-minutes: ${{ github.event_name == 'schedule' && 400 || 5 }} - container: - image: openvinogithubactions.azurecr.io/dockerhub/ubuntu:20.04 - volumes: - - /mount/caches:/mount/caches - env: - OPENVINO_REPO: ${{ github.workspace }}/openvino - INSTALL_DIR: ${{ github.workspace }}/install - INSTALL_TEST_DIR: ${{ github.workspace }}/install/tests - MODEL_HUB_TESTS_INSTALL_DIR: ${{ github.workspace }}/install/tests/model_hub_tests + if: ${{ 'false' }} # TODO: Enable once the dependencies are ready for arm (no tensorflow-text available for arm from PyPI) # if: fromJSON(needs.smart_ci.outputs.affected_components).TF_FE.test || # fromJSON(needs.smart_ci.outputs.affected_components).TFL_FE.test - if: ${{ 'false' }} # TODO: Enable once the self-hosted runners are ready for them - - steps: - - name: Check sudo - run: if [ "$(id -u)" -eq 0 ]; then apt update && apt --assume-yes install sudo; fi - - - name: Download OpenVINO package - uses: actions/download-artifact@v3 - with: - name: openvino_package_x86_64 - path: ${{ env.INSTALL_DIR }} - - - name: Download OpenVINO tests package - uses: actions/download-artifact@v3 - with: - name: openvino_tests_x86_64 - path: ${{ env.INSTALL_TEST_DIR }} - - - name: Extract OpenVINO packages - run: | - pushd ${INSTALL_DIR} - tar -xzf openvino_package.tar.gz -C ${INSTALL_DIR} - popd - - pushd ${INSTALL_TEST_DIR} - tar -xzf openvino_tests.tar.gz -C ${INSTALL_DIR} - popd - - - name: Fetch setup_python action - uses: actions/checkout@v4 - with: - sparse-checkout: | - .github/actions/setup_python/action.yml - sparse-checkout-cone-mode: false - path: 'openvino' - - - name: Setup Python ${{ env.PYTHON_VERSION }} - uses: ./openvino/.github/actions/setup_python - with: - version: ${{ env.PYTHON_VERSION }} - should-setup-pip-paths: 'false' - self-hosted-runner: 'false' - - - name: Install OpenVINO Python wheels - run: python3 -m pip install ${INSTALL_DIR}/tools/openvino-* - - - name: Install TF Hub tests requirements - run: | - python3 -m pip install -r ${MODEL_HUB_TESTS_INSTALL_DIR}/tf_hub_tests/requirements.txt + needs: [ Build, Smart_CI ] + uses: ./.github/workflows/job_tensorflow_hub_models_tests.yml + with: + runner: 'aks-linux-16-cores-arm' + container: '{"image": "openvinogithubactions.azurecr.io/dockerhub/ubuntu:20.04"}' + event: ${{ github.event_name }} - - name: TensorFlow Hub Tests - TF FE - run: | - export PYTHONPATH=${MODEL_HUB_TESTS_INSTALL_DIR}:$PYTHONPATH - python3 -m pytest ${MODEL_HUB_TESTS_INSTALL_DIR}/tf_hub_tests/ -m ${TYPE} --html=${INSTALL_TEST_DIR}/TEST-tf_hub_tf_fe.html --self-contained-html -v - env: - TYPE: ${{ github.event_name == 'schedule' && 'nightly' || 'precommit'}} - TEST_DEVICE: CPU - - - name: Upload Test Results - uses: actions/upload-artifact@v3 - if: ${{ !cancelled() }} - with: - name: test-results-tensorflow-hub-models - path: | - ${{ env.INSTALL_TEST_DIR }}/TEST*.html - if-no-files-found: 'error' - - TensorFlow_Hub_Performance_Models_Tests: - name: TensorFlow Hub Performance Models tests - needs: [Build, Smart_CI] - defaults: - run: - shell: bash - runs-on: 'aks-linux-16-cores-arm' - timeout-minutes: ${{ github.event_name == 'schedule' && 400 || 5 }} - env: - OPENVINO_REPO: ${{ github.workspace }}/openvino - INSTALL_DIR: ${{ github.workspace }}/install - INSTALL_TEST_DIR: ${{ github.workspace }}/install/tests - MODEL_HUB_TESTS_INSTALL_DIR: ${{ github.workspace }}/install/tests/model_hub_tests - # if: fromJSON(needs.smart_ci.outputs.affected_components).TF_FE.test || - # fromJSON(needs.smart_ci.outputs.affected_components).TFL_FE.test - if: ${{ 'false' }} # TODO: Enable once the self-hosted runners are ready for them - - steps: - - name: Check sudo - run: if [ "$(id -u)" -eq 0 ]; then apt update && apt --assume-yes install sudo; fi - - - name: Download OpenVINO package - uses: actions/download-artifact@v3 - with: - name: openvino_package_x86_64 - path: ${{ env.INSTALL_DIR }} - - - name: Download OpenVINO tests package - uses: actions/download-artifact@v3 - with: - name: openvino_tests_x86_64 - path: ${{ env.INSTALL_TEST_DIR }} - - - name: Extract OpenVINO packages - run: | - pushd ${INSTALL_DIR} - tar -xzf openvino_package.tar.gz -C ${INSTALL_DIR} - popd - - pushd ${INSTALL_TEST_DIR} - tar -xzf openvino_tests.tar.gz -C ${INSTALL_DIR} - popd - - - name: Fetch setup_python action - uses: actions/checkout@v4 - with: - sparse-checkout: | - .github/actions/setup_python/action.yml - sparse-checkout-cone-mode: false - path: 'openvino' - - - name: Setup Python ${{ env.PYTHON_VERSION }} - uses: ./openvino/.github/actions/setup_python - with: - version: ${{ env.PYTHON_VERSION }} - should-setup-pip-paths: 'false' - self-hosted-runner: 'false' - - - name: Install OpenVINO Python wheels - run: python3 -m pip install ${INSTALL_DIR}/tools/openvino-* - - - name: Install TF Hub tests requirements - run: | - python3 -m pip install -r ${MODEL_HUB_TESTS_INSTALL_DIR}/tf_hub_tests/requirements.txt - - - name: Install Hub Performance tests requirements - run: | - python3 -m pip install -r ${MODEL_HUB_TESTS_INSTALL_DIR}/performance_tests/requirements.txt - - - name: Performance Hub Tests - run: | - export PYTHONPATH=${MODEL_HUB_TESTS_INSTALL_DIR}:$PYTHONPATH - python3 -m pytest ${MODEL_HUB_TESTS_INSTALL_DIR}/performance_tests/ -m ${TYPE} --html=${INSTALL_TEST_DIR}/TEST-tf_hub_performance.html --self-contained-html -v - env: - TYPE: ${{ github.event_name == 'schedule' && 'nightly' || 'precommit'}} - TEST_DEVICE: CPU - - - name: Upload Test Results - uses: actions/upload-artifact@v3 - if: ${{ !cancelled() }} - with: - name: test-results-tensorflow-hub-performance-models - path: | - ${{ env.INSTALL_TEST_DIR }}/TEST*.html - if-no-files-found: 'error' - - # TODO: Enable once they are ready for self-hosted runners PyTorch_Models_Tests: name: PyTorch Models tests - needs: [Build, Smart_CI] - timeout-minutes: ${{ github.event_name == 'schedule' && 400 || 30 }} - defaults: - run: - shell: bash - runs-on: 'aks-linux-16-cores-arm' - container: - image: openvinogithubactions.azurecr.io/dockerhub/ubuntu:20.04 - volumes: - - /mount/caches:/mount/caches - env: - OPENVINO_REPO: ${{ github.workspace }}/openvino - INSTALL_DIR: ${{ github.workspace }}/install - INSTALL_TEST_DIR: ${{ github.workspace }}/install/tests - MODEL_HUB_TESTS_INSTALL_DIR: ${{ github.workspace }}/install/tests/model_hub_tests + if: ${{ 'false' }} # TODO: Enable once the dependencies are ready for arm (no tensorflow-text available for arm from PyPI) # if: fromJSON(needs.smart_ci.outputs.affected_components).PyTorch_FE.test - if: ${{ 'false' }} # TODO: Enable once the self-hosted runners are ready for them - - steps: - - name: Check sudo - run: if [ "$(id -u)" -eq 0 ]; then apt update && apt --assume-yes install sudo; fi - - - name: Install dependencies - run: | - # install git (required to build pip deps from the sources) - # install 'g++' to build 'detectron2' and 'natten' wheels - sudo apt-get install --assume-yes --no-install-recommends g++ git ca-certificates - - - name: Download OpenVINO package - uses: actions/download-artifact@v3 - with: - name: openvino_package_x86_64 - path: ${{ env.INSTALL_DIR }} - - - name: Download OpenVINO tests package - uses: actions/download-artifact@v3 - with: - name: openvino_tests_x86_64 - path: ${{ env.INSTALL_TEST_DIR }} - - - name: Extract OpenVINO packages - run: | - pushd ${INSTALL_DIR} - tar -xzf openvino_package.tar.gz -C ${INSTALL_DIR} - popd - pushd ${INSTALL_TEST_DIR} - tar -xzf openvino_tests.tar.gz -C ${INSTALL_DIR} - popd - - - name: Fetch setup_python action - uses: actions/checkout@v4 - with: - sparse-checkout: | - .github/actions/setup_python/action.yml - sparse-checkout-cone-mode: false - path: 'openvino' - - - name: Setup Python ${{ env.PYTHON_VERSION }} - uses: ./openvino/.github/actions/setup_python - with: - version: ${{ env.PYTHON_VERSION }} - should-setup-pip-paths: 'false' - self-hosted-runner: 'false' - - - name: Install OpenVINO Python wheels - run: python3 -m pip install ${INSTALL_DIR}/tools/openvino-* - - - name: Install PyTorch tests requirements - run: | - python3 -m pip install -r ${MODEL_HUB_TESTS_INSTALL_DIR}/torch_tests/requirements.txt - python3 -m pip install -r ${MODEL_HUB_TESTS_INSTALL_DIR}/torch_tests/requirements_secondary.txt - echo "Available storage:" - df -h - env: - CPLUS_INCLUDE_PATH: ${{ env.Python_ROOT_DIR }}/include/python${{ env.PYTHON_VERSION }} - - - name: PyTorch Models Tests - run: | - export PYTHONPATH=${MODEL_HUB_TESTS_INSTALL_DIR}:$PYTHONPATH - python3 -m pytest ${MODEL_HUB_TESTS_INSTALL_DIR}/torch_tests -m ${TYPE} --html=${INSTALL_TEST_DIR}/TEST-torch_model_tests.html --self-contained-html -v - env: - TYPE: ${{ github.event_name == 'schedule' && 'nightly' || 'precommit'}} - TEST_DEVICE: CPU - USE_SYSTEM_CACHE: False - - - name: Available storage after tests - run: | - echo "Available storage:" - df -h - - - name: Upload Test Results - uses: actions/upload-artifact@v3 - if: ${{ !cancelled() }} - with: - name: test-results-torch-models - path: | - ${{ env.INSTALL_TEST_DIR }}/TEST*.html - if-no-files-found: 'error' + needs: [ Build, Smart_CI ] + uses: ./.github/workflows/job_pytorch_models_tests.yml + with: + runner: 'aks-linux-16-cores-arm' + container: '{"image": "openvinogithubactions.azurecr.io/dockerhub/ubuntu:20.04"}' + event: ${{ github.event_name }} Overall_Status: - name: ci/gha_overall_status - needs: [Smart_CI, Build, Debian_Packages, Samples, ONNX_Runtime, CXX_Unit_Tests, Python_Unit_Tests] + name: ci/gha_overall_status_linux_arm64 + needs: [Smart_CI, Build, Debian_Packages, Samples, ONNX_Runtime, CXX_Unit_Tests, Python_Unit_Tests, CPU_Functional_Tests, + TensorFlow_Hub_Models_Tests, PyTorch_Models_Tests] if: ${{ always() }} runs-on: ubuntu-latest steps: diff --git a/.github/workflows/linux_conditional_compilation.yml b/.github/workflows/linux_conditional_compilation.yml index c5d09112d2e7e7..82b2f24b6b7075 100644 --- a/.github/workflows/linux_conditional_compilation.yml +++ b/.github/workflows/linux_conditional_compilation.yml @@ -5,6 +5,7 @@ on: push: branches: - master + - 'releases/**' concurrency: # github.ref is not unique in post-commit @@ -49,7 +50,7 @@ jobs: container: image: openvinogithubactions.azurecr.io/dockerhub/ubuntu:22.04 volumes: - - /mount/caches:/mount/caches + - /mount:/mount options: -e SCCACHE_AZURE_BLOB_CONTAINER -e SCCACHE_AZURE_CONNECTION_STRING env: DEBIAN_FRONTEND: noninteractive # to prevent apt-get from waiting user input @@ -60,6 +61,7 @@ jobs: GITHUB_WORKSPACE: '/__w/openvino/openvino' OPENVINO_REPO: /__w/openvino/openvino/openvino INSTALL_DIR: /__w/openvino/openvino/openvino_install + INSTALL_TEST_DIR: /__w/openvino/openvino/tests_install BUILD_DIR: /__w/openvino/openvino/openvino_build SELECTIVE_BUILD_STAT_DIR: /__w/openvino/openvino/selective_build_stat MODELS_PATH: /__w/openvino/openvino/testdata @@ -163,7 +165,9 @@ jobs: run: ${SCCACHE_PATH} --show-stats - name: Cmake install - OpenVINO - run: cmake -DCMAKE_INSTALL_PREFIX=${INSTALL_DIR} -P ${BUILD_DIR}/cmake_install.cmake + run: | + cmake -DCMAKE_INSTALL_PREFIX=${INSTALL_DIR} -P ${BUILD_DIR}/cmake_install.cmake + cmake -DCMAKE_INSTALL_PREFIX=${INSTALL_TEST_DIR} -DCOMPONENT=tests -P ${BUILD_DIR}/cmake_install.cmake - name: Build C++ samples - OpenVINO build tree run: | @@ -189,13 +193,26 @@ jobs: tar -czvf ${BUILD_DIR}/openvino_selective_build_stat.tar.gz * popd - pushd ${OPENVINO_REPO} + pushd ${INSTALL_DIR} + tar -czvf ${BUILD_DIR}/openvino_package.tar.gz \ + install_dependencies/install_openvino_dependencies.sh + popd + + pushd ${INSTALL_TEST_DIR} tar -czvf ${BUILD_DIR}/openvino_tests.tar.gz \ - bin/intel64/Release/ov_cpu_func_tests \ - src/tests/test_utils/functional_test_utils/layer_tests_summary/* \ - scripts/install_dependencies/* + tests/ov_cpu_func_tests \ + tests/libtemplate_extension.so \ + tests/functional_test_utils/layer_tests_summary/* popd + - name: Upload openvino package + if: ${{ always() }} + uses: actions/upload-artifact@v3 + with: + name: openvino_package + path: ${{ env.BUILD_DIR }}/openvino_package.tar.gz + if-no-files-found: 'error' + - name: Upload selective build statistics package if: ${{ always() }} uses: actions/upload-artifact@v3 @@ -223,7 +240,7 @@ jobs: container: image: openvinogithubactions.azurecr.io/dockerhub/ubuntu:22.04 volumes: - - /mount/caches:/mount/caches + - /mount:/mount options: -e SCCACHE_AZURE_BLOB_CONTAINER -e SCCACHE_AZURE_CONNECTION_STRING env: DEBIAN_FRONTEND: noninteractive # to prevent apt-get from waiting user input @@ -284,7 +301,6 @@ jobs: -DBUILD_SHARED_LIBS=OFF \ -DENABLE_CPPLINT=OFF \ -DSELECTIVE_BUILD=ON \ - -DENABLE_LTO=OFF \ -DENABLE_TEMPLATE=OFF \ -DENABLE_INTEL_GPU=OFF \ -DENABLE_INTEL_GNA=OFF \ @@ -310,74 +326,23 @@ jobs: CPU_Functional_Tests: name: CPU functional tests - needs: [Build, Smart_CI] - timeout-minutes: 25 - defaults: - run: - shell: bash - runs-on: aks-linux-8-cores-32gb - container: - image: openvinogithubactions.azurecr.io/dockerhub/ubuntu:22.04 - env: - OPENVINO_REPO: /__w/openvino/openvino/openvino - DEBIAN_FRONTEND: noninteractive # to prevent apt-get from waiting user input - INSTALL_TEST_DIR: /__w/openvino/openvino/install/tests - PARALLEL_TEST_SCRIPT: /__w/openvino/openvino/install/tests/src/tests/test_utils/functional_test_utils/layer_tests_summary/run_parallel.py - PARALLEL_TEST_CACHE: /__w/openvino/openvino/install/tests/test_cache.lst if: fromJSON(needs.smart_ci.outputs.affected_components).CPU.test - + needs: [ Build, Smart_CI ] + uses: ./.github/workflows/job_cpu_functional_tests.yml + with: + runner: 'aks-linux-8-cores-32gb' + image: 'openvinogithubactions.azurecr.io/dockerhub/ubuntu:22.04' + + Overall_Status: + name: ci/gha_overall_status_linux_cc + needs: [Smart_CI, Build, CC_Build, CPU_Functional_Tests] + if: ${{ always() }} + runs-on: ubuntu-latest steps: - - name: Download OpenVINO tests package - uses: actions/download-artifact@v3 - with: - name: openvino_tests - path: ${{ env.INSTALL_TEST_DIR }} - - - name: Extract OpenVINO tests package - run: tar -xvzf ${INSTALL_TEST_DIR}/openvino_tests.tar.gz -C ${INSTALL_TEST_DIR} - - - name: Install OpenVINO dependencies - run: bash ${INSTALL_TEST_DIR}/scripts/install_dependencies/install_openvino_dependencies.sh -c=core -c=gpu -y - - - name: Fetch setup_python action - uses: actions/checkout@v4 - with: - sparse-checkout: | - .github/actions/setup_python/action.yml - sparse-checkout-cone-mode: false - path: ${{ env.OPENVINO_REPO }} - - - name: Setup Python ${{ env.PYTHON_VERSION }} - uses: ./openvino/.github/actions/setup_python - with: - version: ${{ env.PYTHON_VERSION }} - should-setup-pip-paths: 'false' - - - name: Install python dependencies for run_parallel.py - run: python3 -m pip install -r ${INSTALL_TEST_DIR}/src/tests/test_utils/functional_test_utils/layer_tests_summary/requirements.txt - - - name: Restore tests execution time - uses: actions/cache/restore@v3 - with: - path: ${{ env.PARALLEL_TEST_CACHE }} - key: ${{ runner.os }}-tests-functional-cpu-stamp-${{ github.sha }} - restore-keys: | - ${{ runner.os }}-tests-functional-cpu-stamp - - - name: Intel CPU plugin func tests (parallel) - run: python3 ${PARALLEL_TEST_SCRIPT} -e ${INSTALL_TEST_DIR}/bin/intel64/Release/ov_cpu_func_tests -c ${PARALLEL_TEST_CACHE} -w ${INSTALL_TEST_DIR} -s suite -rf 0 -- --gtest_print_time=1 --gtest_filter=*smoke* - timeout-minutes: 20 - - - name: Upload Test Results - uses: actions/upload-artifact@v3 - if: ${{ !cancelled() }} - with: - name: test-results-functional-cpu - path: | - ${{ env.INSTALL_TEST_DIR }}/TEST*.xml - ${{ env.INSTALL_TEST_DIR }}/logs/failed/*.log - ${{ env.INSTALL_TEST_DIR }}/logs/crashed/*.log - ${{ env.INSTALL_TEST_DIR }}/logs/hanged/*.log - ${{ env.INSTALL_TEST_DIR }}/logs/interapted/*.log - ${{ env.INSTALL_TEST_DIR }}/logs/disabled_tests.log - if-no-files-found: 'error' + - name: Check status of all jobs + if: >- + ${{ + contains(needs.*.result, 'failure') || + contains(needs.*.result, 'cancelled') + }} + run: exit 1 diff --git a/.github/workflows/linux_riscv.yml b/.github/workflows/linux_riscv.yml index 83eebb5b54b7b8..5db7ed22a02707 100644 --- a/.github/workflows/linux_riscv.yml +++ b/.github/workflows/linux_riscv.yml @@ -49,7 +49,7 @@ jobs: container: image: openvinogithubactions.azurecr.io/dockerhub/ubuntu:22.04 volumes: - - /mount/caches:/mount/caches + - /mount:/mount env: CMAKE_BUILD_TYPE: 'Release' CMAKE_GENERATOR: 'Ninja' @@ -206,3 +206,17 @@ jobs: source ${OPENVINO_BUILD_DIR}/dependencies/deactivate_conanbuild.sh env: CMAKE_TOOLCHAIN_FILE: ${{ env.OPENVINO_BUILD_DIR }}/dependencies/conan_toolchain.cmake + + Overall_Status: + name: ci/gha_overall_status_linux_riscv + needs: [Smart_CI, Build] + if: ${{ always() }} + runs-on: ubuntu-latest + steps: + - name: Check status of all jobs + if: >- + ${{ + contains(needs.*.result, 'failure') || + contains(needs.*.result, 'cancelled') + }} + run: exit 1 diff --git a/.github/workflows/mac.yml b/.github/workflows/mac.yml index d8ac800e18b7d5..2a59d9de4d50b2 100644 --- a/.github/workflows/mac.yml +++ b/.github/workflows/mac.yml @@ -33,27 +33,41 @@ env: PYTHON_VERSION: '3.11' jobs: + + Smart_CI: + runs-on: ubuntu-latest + outputs: + affected_components: "${{ steps.smart_ci.outputs.affected_components }}" + skip_workflow: "${{ steps.smart_ci.outputs.skip_workflow }}" + steps: + - name: checkout action + uses: actions/checkout@v4 + with: + sparse-checkout: .github/actions/smart-ci + + - name: Get affected components + id: smart_ci + uses: ./.github/actions/smart-ci + with: + repository: ${{ github.repository }} + pr: ${{ github.event.number }} + commit_sha: ${{ github.sha }} + component_pattern: "category: (.*)" + repo_token: ${{ secrets.GITHUB_TOKEN }} + skip_when_only_listed_labels_set: 'docs' + skip_when_only_listed_files_changed: '*.md,*.rst,*.png,*.jpg,*.svg' + Build: + needs: Smart_CI timeout-minutes: 150 defaults: run: shell: bash - strategy: - max-parallel: 2 - fail-fast: false - matrix: - include: - - arhitecture: 'x86_64' - machine: 'macos-13-large' - macos_deployment_target: '10.12' - - arhitecture: 'arm64' - machine: 'macos-13-xlarge' - macos_deployment_target: '11.0' - runs-on: ${{ matrix.machine }} + runs-on: 'macos-13-large' env: CMAKE_BUILD_TYPE: 'Release' CMAKE_GENERATOR: 'Ninja Multi-Config' - MACOSX_DEPLOYMENT_TARGET: ${{ matrix.macos_deployment_target }} + MACOSX_DEPLOYMENT_TARGET: '10.12' CMAKE_CXX_COMPILER_LAUNCHER: ccache CMAKE_C_COMPILER_LAUNCHER: ccache OPENVINO_REPO: ${{ github.workspace }}/openvino @@ -125,9 +139,9 @@ jobs: # github.ref_name is 'ref/PR_#' in case of the PR, and 'branch_name' when executed on push save: ${{ github.ref_name == 'master' && 'true' || 'false' }} verbose: 2 - key: ${{ runner.os }}-${{ matrix.arhitecture }}-main + key: ${{ runner.os }}-${{ runner.arch }}-main restore-keys: | - ${{ runner.os }}-${{ matrix.arhitecture }}-main + ${{ runner.os }}-${{ runner.arch }}-main - name: CMake configure run: | @@ -184,7 +198,7 @@ jobs: if: ${{ always() }} uses: actions/upload-artifact@v3 with: - name: openvino_package_${{ matrix.arhitecture }} + name: openvino_package path: ${{ env.BUILD_DIR }}/openvino_package.tar.gz if-no-files-found: 'error' @@ -192,650 +206,39 @@ jobs: if: ${{ always() }} uses: actions/upload-artifact@v3 with: - name: openvino_tests_${{ matrix.arhitecture }} + name: openvino_tests path: ${{ env.BUILD_DIR }}/openvino_tests.tar.gz if-no-files-found: 'error' Samples: - needs: Build - timeout-minutes: 5 - defaults: - run: - shell: bash - strategy: - max-parallel: 2 - fail-fast: false - matrix: - include: - - arhitecture: 'x86_64' - machine: 'macos-13' - - arhitecture: 'arm64' - machine: 'macos-13-xlarge' - runs-on: ${{ matrix.machine }} - env: - OPENVINO_REPO: ${{ github.workspace }}/openvino - INSTALL_DIR: ${{ github.workspace }}/install - INSTALL_TEST_DIR: ${{ github.workspace }}/install/tests - BUILD_DIR: ${{ github.workspace }}/build - - steps: - - # - # Initialize OpenVINO - # - - - name: Download OpenVINO package - uses: actions/download-artifact@v3 - with: - name: openvino_package_${{ matrix.arhitecture }} - path: ${{ env.INSTALL_DIR }} - - - name: Download OpenVINO tests package - uses: actions/download-artifact@v3 - with: - name: openvino_tests_${{ matrix.arhitecture }} - path: ${{ env.INSTALL_TEST_DIR }} - - - name: Extract OpenVINO packages - run: | - pushd ${INSTALL_DIR} - tar -xzf openvino_package.tar.gz -C ${INSTALL_DIR} - popd - - pushd ${INSTALL_TEST_DIR} - tar -xzf openvino_tests.tar.gz -C ${INSTALL_DIR} - popd - - - name: Install dependencies - run: brew install coreutils - - - name: Fetch setup_python action - uses: actions/checkout@v4 - with: - sparse-checkout: | - .github/actions/setup_python/action.yml - sparse-checkout-cone-mode: false - path: 'openvino' - - - name: Setup Python ${{ env.PYTHON_VERSION }} - uses: ./openvino/.github/actions/setup_python - with: - version: ${{ env.PYTHON_VERSION }} - should-setup-pip-paths: 'false' - self-hosted-runner: 'false' - - - name: Build cpp samples - run: ${INSTALL_DIR}/samples/cpp/build_samples.sh -i ${INSTALL_DIR} -b ${BUILD_DIR}/cpp_samples - env: - CMAKE_COMPILE_WARNING_AS_ERROR: 'ON' - - - name: Build c samples - run: ${INSTALL_DIR}/samples/c/build_samples.sh -i ${INSTALL_DIR} -b ${BUILD_DIR}/c_samples - env: - CMAKE_COMPILE_WARNING_AS_ERROR: 'ON' - - # - # Tests - # - - - name: Samples tests - run: | - export WORKSPACE=${INSTALL_DIR} - export IE_APP_PATH=${INSTALL_DIR}/samples_bin - export IE_APP_PYTHON_PATH=${INSTALL_DIR}/samples/python - export SHARE=${INSTALL_TEST_DIR}/smoke_tests/samples_smoke_tests_data - - python3 -m pip install --ignore-installed PyYAML -r ${INSTALL_TEST_DIR}/smoke_tests/requirements.txt - - source ${INSTALL_DIR}/setupvars.sh - - python3 -m pytest -sv ${INSTALL_TEST_DIR}/smoke_tests \ - --env_conf ${INSTALL_TEST_DIR}/smoke_tests/env_config.yml \ - --junitxml=${INSTALL_TEST_DIR}/TEST-SamplesSmokeTests.xml - - - name: Upload Test Results - uses: actions/upload-artifact@v3 - if: ${{ !cancelled() }} - with: - name: test-results-samples-${{ matrix.arhitecture }} - path: ${{ env.INSTALL_TEST_DIR }}/TEST*.xml - if-no-files-found: 'error' + needs: [ Build, Smart_CI ] + if: fromJSON(needs.smart_ci.outputs.affected_components).samples + uses: ./.github/workflows/job_samples_tests.yml + with: + runner: 'macos-13' + affected-components: ${{ needs.smart_ci.outputs.affected_components }} CXX_Unit_Tests: - name: C++ Unit tests - needs: Build - timeout-minutes: 20 - defaults: - run: - shell: bash - strategy: - max-parallel: 2 - fail-fast: false - matrix: - include: - - arhitecture: 'x86_64' - machine: 'macos-13' - - arhitecture: 'arm64' - machine: 'macos-13-xlarge' - runs-on: ${{ matrix.machine }} - env: - INSTALL_DIR: ${{ github.workspace }}/install - INSTALL_TEST_DIR: ${{ github.workspace }}/install/tests - - steps: - # - # Dependencies - # - - - name: Download OpenVINO package - uses: actions/download-artifact@v3 - with: - name: openvino_package_${{ matrix.arhitecture }} - path: ${{ env.INSTALL_DIR }} - - - name: Download OpenVINO tests package - uses: actions/download-artifact@v3 - with: - name: openvino_tests_${{ matrix.arhitecture }} - path: ${{ env.INSTALL_TEST_DIR }} - - - name: Extract OpenVINO packages - run: | - pushd ${{ env.INSTALL_DIR }} - tar -xzf openvino_package.tar.gz -C ${{ env.INSTALL_DIR }} && rm openvino_package.tar.gz || exit 1 - popd - pushd ${{ env.INSTALL_TEST_DIR }} - tar -xzf openvino_tests.tar.gz -C ${{ env.INSTALL_DIR }} && rm openvino_tests.tar.gz || exit 1 - popd - - # - # Tests - # - - - name: OpenVINO Core Unit Tests - run: | - source ${{ env.INSTALL_DIR }}/setupvars.sh - ${{ env.INSTALL_TEST_DIR }}/ov_core_unit_tests --gtest_print_time=1 --gtest_filter=-*IE_GPU* \ - --gtest_output=xml:${{ env.INSTALL_TEST_DIR }}/TEST-NGraphUT.xml - - - name: OpenVINO Inference Functional Tests - run: | - source ${{ env.INSTALL_DIR }}/setupvars.sh - ${{ env.INSTALL_TEST_DIR }}/ov_inference_functional_tests --gtest_print_time=1 \ - --gtest_output=xml:${{ env.INSTALL_TEST_DIR }}/TEST-InferenceFunc.xml - - - name: OpenVINO Inference Unit Tests - run: | - source ${{ env.INSTALL_DIR }}/setupvars.sh - ${{ env.INSTALL_TEST_DIR }}/ov_inference_unit_tests --gtest_print_time=1 \ - --gtest_output=xml:${{ env.INSTALL_TEST_DIR }}/TEST-InferenceUnit.xml - - - name: Low Precision Transformations Tests - run: | - source ${{ env.INSTALL_DIR }}/setupvars.sh - - # Skips under Ticket: 122660 - skip_filter=${{ matrix.arhitecture == 'arm64' && '--gtest_filter=-*smoke_LPT/FoldFakeQuantizeInTransformations.CompareFunctions*' || '' }} - - ${{ env.INSTALL_TEST_DIR }}/ov_lp_transformations_tests --gtest_print_time=1 "$skip_filter" \ - --gtest_output=xml:${{ env.INSTALL_TEST_DIR }}/TEST-LpTransformations.xml - - - name: OpenVINO Conditional compilation tests - run: | - source ${{ env.INSTALL_DIR }}/setupvars.sh - ${{ env.INSTALL_TEST_DIR }}/ov_conditional_compilation_tests --gtest_print_time=1 \ - --gtest_output=xml:${{ env.INSTALL_TEST_DIR }}/TEST-ConditionalCompilation.xml - - - name: IR frontend tests - run: | - source ${{ env.INSTALL_DIR }}/setupvars.sh - ${{ env.INSTALL_TEST_DIR }}/ov_ir_frontend_tests --gtest_print_time=1 \ - --gtest_output=xml:${{ env.INSTALL_TEST_DIR }}/TEST-IRFrontend.xml - - - name: PaddlePaddle frontend tests - if: ${{ 'false' }} - run: | - source ${{ env.INSTALL_DIR }}/setupvars.sh - ${{ env.INSTALL_TEST_DIR }}/paddle_tests --gtest_print_time=1 --gtest_filter=*smoke* \ - --gtest_output=xml:${{ env.INSTALL_TEST_DIR }}/TEST-PaddleTests.xml - - - name: ONNX frontend tests - if: ${{ matrix.arhitecture == 'x86_64' }} # Ticket for ARM64: 122663 - run: | - source ${{ env.INSTALL_DIR }}/setupvars.sh - - ${{ env.INSTALL_TEST_DIR }}/ov_onnx_frontend_tests --gtest_print_time=1 --gtest_filter=-*IE_GPU* \ - --gtest_output=xml:${{ env.INSTALL_TEST_DIR }}/TEST-ONNXFrontend.xml - - - name: TensorFlow Common tests - run: | - source ${{ env.INSTALL_DIR }}/setupvars.sh - ${{ env.INSTALL_TEST_DIR }}/ov_tensorflow_common_tests --gtest_print_time=1 \ - --gtest_output=xml:${{ env.INSTALL_TEST_DIR }}/TEST-TensorFlowCommonFrontend.xml - - - name: TensorFlow frontend tests - run: | - source ${{ env.INSTALL_DIR }}/setupvars.sh - - # Skips under Ticket: 122666 - skip_filter=${{ matrix.arhitecture == 'arm64' && '--gtest_filter=-*CompileModelsTests.ModelWithSplitConvConcat*:*NgramCompilation*' || '' }} - - ${{ env.INSTALL_TEST_DIR }}/ov_tensorflow_frontend_tests --gtest_print_time=1 "$skip_filter" \ - --gtest_output=xml:${{ env.INSTALL_TEST_DIR }}/TEST-TensorFlowFrontend.xml - - - name: TensorFlow Lite frontend tests - run: | - source ${{ env.INSTALL_DIR }}/setupvars.sh - ${{ env.INSTALL_TEST_DIR }}/ov_tensorflow_lite_frontend_tests --gtest_print_time=1 \ - --gtest_output=xml:${{ env.INSTALL_TEST_DIR }}/TEST-TensorFlowLiteFrontend.xml - - - name: Transformations func tests - run: | - source ${{ env.INSTALL_DIR }}/setupvars.sh - - # Skips under Ticket: 122668 - skip_filter=${{ matrix.arhitecture == 'arm64' && '--gtest_filter=-*TransformationTestsF.CompressQuantizeWeights*:*TransformationTests/CompressQuantizeWeightsTests.FusionTest*' || '' }} - - ${{ env.INSTALL_TEST_DIR }}/ov_transformations_tests --gtest_print_time=1 "$skip_filter" \ - --gtest_output=xml:${{ env.INSTALL_TEST_DIR }}/TEST-Transformations.xml - - - name: Common test utils tests - run: | - source ${{ env.INSTALL_DIR }}/setupvars.sh - ${{ env.INSTALL_TEST_DIR }}/ov_util_tests --gtest_print_time=1 \ - --gtest_output=xml:${{ env.INSTALL_TEST_DIR }}/TEST-commonUtilsTests.xml - - - name: Snippets func tests - run: | - source ${{ env.INSTALL_DIR }}/setupvars.sh - ${{ env.INSTALL_TEST_DIR }}/ov_snippets_func_tests --gtest_print_time=1 \ - --gtest_output=xml:${{ env.INSTALL_TEST_DIR }}/TEST-SnippetsFuncTests.xml - - - name: CPU plugin unit tests - run: | - source ${{ env.INSTALL_DIR }}/setupvars.sh - ${{ env.INSTALL_TEST_DIR }}/ov_cpu_unit_tests --gtest_print_time=1 \ - --gtest_output=xml:${{ env.INSTALL_TEST_DIR }}/TEST-CPUUnitTests.xml - - - name: ov_subgraphs_dumper_tests tests - run: | - source ${{ env.INSTALL_DIR }}/setupvars.sh - ${{ env.INSTALL_TEST_DIR }}/ov_subgraphs_dumper_tests --gtest_print_time=1 \ - --gtest_output=xml:${INSTALL_TEST_DIR}/TEST-ov_subgraphs_dumper_tests.xml - - - name: Template OpImpl tests - run: | - source ${{ env.INSTALL_DIR }}/setupvars.sh - ${{ env.INSTALL_TEST_DIR }}/ov_op_conformance_tests --gtest_print_time=1 --device=TEMPLATE --gtest_filter="*OpImpl*" \ - --gtest_output=xml:${INSTALL_TEST_DIR}/TEST-TemplateOpImplTests.xml - - - name: AUTO unit tests - run: | - source ${{ env.INSTALL_DIR }}/setupvars.sh - ${{ env.INSTALL_TEST_DIR }}/ov_auto_unit_tests --gtest_print_time=1 \ - --gtest_output=xml:${{ env.INSTALL_TEST_DIR }}/TEST-ov_auto_unit_tests.xml - - - name: AUTO func Tests - run: | - source ${{ env.INSTALL_DIR }}/setupvars.sh - ${{ env.INSTALL_TEST_DIR }}/ov_auto_func_tests --gtest_print_time=1 \ - --gtest_output=xml:${{ env.INSTALL_TEST_DIR }}/TEST-ov_auto_func_tests.xml - - - name: Template plugin func tests - run: | - source ${{ env.INSTALL_DIR }}/setupvars.sh - ${{ env.INSTALL_TEST_DIR }}/ov_template_func_tests --gtest_print_time=1 \ - --gtest_filter=*smoke* \ - --gtest_output=xml:${{ env.INSTALL_TEST_DIR }}/TEST-TemplateFuncTests.xml - - - name: Inference Engine C API tests - run: | - source ${{ env.INSTALL_DIR }}/setupvars.sh - ${{ env.INSTALL_TEST_DIR }}/InferenceEngineCAPITests --gtest_print_time=1 \ - --gtest_output=xml:${{ env.INSTALL_TEST_DIR }}/TEST-InferenceEngineCAPITests.xml - - - name: OpenVINO C API tests - run: | - source ${{ env.INSTALL_DIR }}/setupvars.sh - ${{ env.INSTALL_TEST_DIR }}/ov_capi_test --gtest_print_time=1 \ - --gtest_output=xml:${{ env.INSTALL_TEST_DIR }}/TEST-OpenVINOCAPITests.xml - - - name: AutoBatch unit tests - run: | - source ${{ env.INSTALL_DIR }}/setupvars.sh - ${{ env.INSTALL_TEST_DIR }}/ov_auto_batch_unit_tests --gtest_output=xml:${{ env.INSTALL_TEST_DIR }}/TEST-ov_auto_batch_unit_tests.xml - - - name: AutoBatch func tests - run: | - source ${{ env.INSTALL_DIR }}/setupvars.sh - ${{ env.INSTALL_TEST_DIR }}/ov_auto_batch_func_tests --gtest_output=xml:${{ env.INSTALL_TEST_DIR }}/TEST-ov_auto_batch_func_tests.xml - - - name: Proxy Plugin func tests - run: | - source ${{ env.INSTALL_DIR }}/setupvars.sh - ${{ env.INSTALL_TEST_DIR }}/ov_proxy_plugin_tests --gtest_print_time=1 --gtest_output=xml:${{ env.INSTALL_TEST_DIR }}/TEST-OVProxyTests.xml - - - name: Hetero unit tests - run: | - source ${{ env.INSTALL_DIR }}/setupvars.sh - ${{ env.INSTALL_TEST_DIR }}/ov_hetero_unit_tests --gtest_print_time=1 --gtest_output=xml:${{ env.INSTALL_TEST_DIR }}/TEST-OVHeteroUnitTests.xml - - - name: Hetero func tests - run: | - source ${{ env.INSTALL_DIR }}/setupvars.sh - ${{ env.INSTALL_TEST_DIR }}/ov_hetero_func_tests --gtest_print_time=1 --gtest_output=xml:${{ env.INSTALL_TEST_DIR }}/TEST-OVHeteroFuncTests.xml - - - name: Upload Test Results - uses: actions/upload-artifact@v3 - if: ${{ always() }} - with: - name: test-results-cpp-${{ matrix.arhitecture }} - path: ${{ env.INSTALL_TEST_DIR }}/TEST*.xml - if-no-files-found: 'error' + name: C++ unit tests + needs: [ Build, Smart_CI ] + uses: ./.github/workflows/job_cxx_unit_tests.yml + with: + runner: 'macos-13' + affected-components: ${{ needs.smart_ci.outputs.affected_components }} Python_Unit_Tests: name: Python unit tests - needs: Build - timeout-minutes: 55 - defaults: - run: - shell: bash - strategy: - max-parallel: 2 - fail-fast: false - matrix: - include: - - arhitecture: 'x86_64' - machine: 'macos-13' - - arhitecture: 'arm64' - machine: 'macos-13-xlarge' - runs-on: ${{ matrix.machine }} - env: - OPENVINO_REPO: ${{ github.workspace }}/openvino - OPENVINO_CONTRIB_REPO: ${{ github.workspace }}/openvino_contrib - INSTALL_DIR: ${{ github.workspace }}/install - INSTALL_TEST_DIR: ${{ github.workspace }}/install/tests - LAYER_TESTS_INSTALL_DIR: ${{ github.workspace }}/install/tests/layer_tests - steps: - - name: Fetch setup_python action - uses: actions/checkout@v4 - with: - sparse-checkout: | - .github/actions/setup_python/action.yml - sparse-checkout-cone-mode: false - path: 'openvino' - - - name: Setup Python ${{ env.PYTHON_VERSION }} - uses: ./openvino/.github/actions/setup_python - with: - version: ${{ env.PYTHON_VERSION }} - should-setup-pip-paths: 'false' - self-hosted-runner: 'false' - - # - # Dependencies - # - - - name: Download OpenVINO package - uses: actions/download-artifact@v3 - with: - name: openvino_package_${{ matrix.arhitecture }} - path: ${{ env.INSTALL_DIR }} - - - name: Download OpenVINO tests package - uses: actions/download-artifact@v3 - with: - name: openvino_tests_${{ matrix.arhitecture }} - path: ${{ env.INSTALL_TEST_DIR }} - - - name: Extract OpenVINO packages - run: | - pushd ${{ env.INSTALL_DIR }} - tar -xzf openvino_package.tar.gz -C ${{ env.INSTALL_DIR }} - popd - - pushd ${{ env.INSTALL_TEST_DIR }} - tar -xzf openvino_tests.tar.gz -C ${{ env.INSTALL_DIR }} - popd - - - name: Install OpenVINO Python wheels - run: | - # Install the core OV wheel - python3 -m pip install ${{ env.INSTALL_DIR }}/tools/openvino-*.whl - - # mxnet is only available on x86_64 - extras_to_install="caffe,kaldi,onnx,tensorflow2,pytorch" - if [[ "${{ matrix.arhitecture }}" == "x86_64" ]]; then - extras_to_install="mxnet,$extras_to_install" - fi - - # Find and install OV dev wheel - pushd ${{ env.INSTALL_DIR }}/tools - ov_dev_wheel_name=$(find . -name 'openvino_dev*.whl') - python3 -m pip install $ov_dev_wheel_name[$extras_to_install] - popd - - - name: Install Python API tests dependencies - run: | - # For torchvision to OpenVINO preprocessing converter - python3 -m pip install -r ${{ env.INSTALL_TEST_DIR }}/python/preprocess/torchvision/requirements.txt - - # TODO: replace with Python API tests requirements - python3 -m pip install -r ${{ env.INSTALL_TEST_DIR }}/mo/requirements_dev.txt - - - name: Python API 1.0 Tests - run: | - python3 -m pytest -s ${{ env.INSTALL_TEST_DIR }}/pyngraph \ - --junitxml=${{ env.INSTALL_TEST_DIR }}/TEST-Pyngraph.xml \ - --ignore=${{ env.INSTALL_TEST_DIR }}/pyngraph/tests_compatibility/test_onnx/test_zoo_models.py \ - --ignore=${{ env.INSTALL_TEST_DIR }}/pyngraph/tests_compatibility/test_onnx/test_backend.py - - - name: Python API 2.0 Tests - run: | - # For python imports to import pybind_mock_frontend - export PYTHONPATH=${{ env.INSTALL_TEST_DIR }}:$PYTHONPATH - # for 'template' extension - export DYLD_LIBRARY_PATH=${{ env.INSTALL_TEST_DIR }}:$DYLD_LIBRARY_PATH - - python3 -m pytest -sv ${{ env.INSTALL_TEST_DIR }}/pyopenvino \ - --junitxml=${{ env.INSTALL_TEST_DIR }}/TEST-Pyngraph.xml \ - --ignore=${{ env.INSTALL_TEST_DIR }}/pyopenvino/tests/test_utils/test_utils.py - - - name: MO Python API Tests - run: | - python3 -m pip install -r ${{ env.LAYER_TESTS_INSTALL_DIR }}/requirements.txt - - # Used for 'test_utils' installed in '/python/openvino/test_utils' - export PYTHONPATH=${{ env.INSTALL_TEST_DIR }}/python/openvino/test_utils:${{ env.INSTALL_TEST_DIR }}/python:$PYTHONPATH - - python3 -m pytest ${{ env.LAYER_TESTS_INSTALL_DIR }}/mo_python_api_tests/ --junitxml=${{ env.INSTALL_TEST_DIR }}/TEST-test_mo_convert.xml - env: - TEST_DEVICE: CPU - TEST_PRECISION: FP16 - - - name: OVC Python API Tests - run: | - python3 -m pip install -r ${{ env.LAYER_TESTS_INSTALL_DIR }}/requirements.txt - - # Used for 'test_utils' installed in '/python/openvino/test_utils' - export PYTHONPATH=${{ env.INSTALL_TEST_DIR }}/python/openvino/test_utils:${{ env.INSTALL_TEST_DIR }}/python:$PYTHONPATH - - python3 -m pytest ${{ env.LAYER_TESTS_INSTALL_DIR }}/ovc_python_api_tests --junitxml=${{ env.INSTALL_TEST_DIR }}/TEST-test_ovc_convert.xml - env: - TEST_DEVICE: CPU - TEST_PRECISION: FP16 - - - name: Model Optimizer unit tests - run: | - export PYTHONPATH=${{ env.INSTALL_TEST_DIR }}:$PYTHONPATH - python3 -m pytest -s ${{ env.INSTALL_TEST_DIR }}/mo/unit_tests \ - --ignore=${{ env.INSTALL_TEST_DIR }}/mo/unit_tests/mo/front/mxnet \ - --junitxml=${{ env.INSTALL_TEST_DIR }}/TEST-ModelOptimizer.xml - - - name: PyTorch Layer Tests - run: | - python3 -m pip install -r ${{ env.LAYER_TESTS_INSTALL_DIR }}/requirements.txt - export PYTHONPATH=${{ env.LAYER_TESTS_INSTALL_DIR }}:$PYTHONPATH - - python3 -m pytest ${{ env.LAYER_TESTS_INSTALL_DIR }}/pytorch_tests -m precommit --junitxml=${{ env.INSTALL_TEST_DIR }}/TEST-pytorch.xml - env: - TEST_DEVICE: CPU - TEST_PRECISION: FP16 - - - name: ONNX Layer Tests - run: | - python3 -m pip install -r ${{ env.LAYER_TESTS_INSTALL_DIR }}/requirements.txt - - export PYTHONPATH=${{ env.INSTALL_TEST_DIR }}/mo:$PYTHONPATH - - python3 -m pytest ${{ env.LAYER_TESTS_INSTALL_DIR }}/onnx_tests -m "not launch_only_if_manually_specified and precommit" --junitxml=${{ env.INSTALL_TEST_DIR }}/TEST-onnx.xml - env: - TEST_DEVICE: CPU - TEST_PRECISION: FP16 - - - name: TensorFlow 1 Layer Tests - TF FE - run: | - python3 -m pip install -r ${{ env.LAYER_TESTS_INSTALL_DIR }}/requirements.txt - - export PYTHONPATH=${{ env.INSTALL_TEST_DIR }}/mo:$PYTHONPATH - - python3 -m pytest ${{ env.LAYER_TESTS_INSTALL_DIR }}/tensorflow_tests/ --use_new_frontend -m precommit_tf_fe --junitxml=${{ env.INSTALL_TEST_DIR }}/TEST-tf_fe.xml - env: - TEST_DEVICE: CPU - - - name: TensorFlow 2 Layer Tests - TF FE - if: ${{ 'false' }} # Ticket: 123322 - run: | - python3 -m pip install -r ${{ env.LAYER_TESTS_INSTALL_DIR }}/requirements.txt - - export PYTHONPATH=${{ env.INSTALL_TEST_DIR }}/mo:$PYTHONPATH - - python3 -m pytest ${{ env.LAYER_TESTS_INSTALL_DIR }}/tensorflow2_keras_tests/ --use_new_frontend -m precommit_tf_fe --junitxml=${{ env.INSTALL_TEST_DIR }}/TEST-tf2_fe.xml - env: - TEST_DEVICE: CPU - - - name: TensorFlow 1 Layer Tests - Legacy FE - run: | - python3 -m pip install -r ${{ env.LAYER_TESTS_INSTALL_DIR }}/requirements.txt - - export PYTHONPATH=${{ env.INSTALL_TEST_DIR }}/mo:$PYTHONPATH - - python3 -m pytest ${{ env.LAYER_TESTS_INSTALL_DIR }}/tensorflow_tests/test_tf_Roll.py --ir_version=10 --junitxml=${{ env.INSTALL_TEST_DIR }}/TEST-tf_Roll.xml - - - name: TensorFlow 2 Layer Tests - Legacy FE - run: | - python3 -m pip install -r ${{ env.LAYER_TESTS_INSTALL_DIR }}/requirements.txt - - export PYTHONPATH=${{ env.INSTALL_TEST_DIR }}/mo:$PYTHONPATH - - python3 -m pytest ${{ env.LAYER_TESTS_INSTALL_DIR }}/tensorflow2_keras_tests/test_tf2_keras_activation.py \ - --ir_version=11 --junitxml=${{ env.INSTALL_TEST_DIR }}/TEST-tf2_Activation.xml -k "sigmoid" - env: - TEST_DEVICE: CPU - TEST_PRECISION: FP16 - - - name: TensorFlow Lite Layer Tests - TFL FE - run: | - python3 -m pip install -r ${{ env.LAYER_TESTS_INSTALL_DIR }}/requirements.txt - - export PYTHONPATH=${{ env.INSTALL_TEST_DIR }}/mo:$PYTHONPATH - - python3 -m pytest ${{ env.LAYER_TESTS_INSTALL_DIR }}/tensorflow_lite_tests/ --junitxml=${{ env.INSTALL_TEST_DIR }}/TEST-tfl_fe.xml - env: - TEST_DEVICE: CPU - TEST_PRECISION: FP16 - - - name: Python ONNX operators tests - if: ${{ 'false' }} # Ticket: 123325 - run: | - # Skip test_onnx/test_zoo_models and test_onnx/test_backend due to long execution time - ONNX Model Zoo tests are run separately - python3 -m pytest -sv ${{ env.INSTALL_TEST_DIR }}/onnx -k 'not cuda' \ - --junitxml=${{ env.INSTALL_TEST_DIR }}/TEST-onnx_frontend.xml \ - --ignore=${{ env.INSTALL_TEST_DIR }}/onnx/test_python/test_zoo_models.py - - - name: Python Frontend tests - run: | - python3 -m pip install -r ${{ env.LAYER_TESTS_INSTALL_DIR }}/requirements.txt - - export PYTHONPATH=${{ env.INSTALL_TEST_DIR }}/mo:$PYTHONPATH - - # to allow 'libtest_builtin_extensions.so' to find 'libopenvino_onnx_frontend.so' - source ${{ env.INSTALL_DIR }}/setupvars.sh - - python3 -m pytest ${{ env.LAYER_TESTS_INSTALL_DIR }}/py_frontend_tests --junitxml=${{ env.INSTALL_TEST_DIR }}/TEST-test_py_fontend.xml - - # TODO: install to 'tests' component via cpack - - name: OVC unit tests - run: python3 -m pytest -s ${{ env.INSTALL_TEST_DIR }}/ovc/unit_tests --junitxml=${{ env.INSTALL_TEST_DIR }}/TEST-OpenVinoConversion.xml - - - name: Upload Test Results - uses: actions/upload-artifact@v3 - if: ${{ always() }} - with: - name: test-results-python-${{ matrix.arhitecture }} - path: ${{ env.INSTALL_TEST_DIR }}/TEST*.xml - if-no-files-found: 'error' + needs: [ Build, Smart_CI ] + uses: ./.github/workflows/job_python_unit_tests.yml + with: + runner: 'macos-13' + affected-components: ${{ needs.smart_ci.outputs.affected_components }} CPU_Functional_Tests: name: CPU functional tests - needs: Build - timeout-minutes: 25 - defaults: - run: - shell: bash - strategy: - max-parallel: 2 - fail-fast: false - matrix: - include: - # ticket: 122001 - # - arhitecture: 'x86_64' - # machine: 'macos-13' - - arhitecture: 'arm64' - machine: 'macos-13-xlarge' - runs-on: ${{ matrix.machine }} - env: - INSTALL_DIR: ${{ github.workspace }}/install - INSTALL_TEST_DIR: ${{ github.workspace }}/install/tests - - steps: - - name: Create Directories - run: mkdir -p ${{ env.INSTALL_DIR }} ${{ env.INSTALL_TEST_DIR }} - - - name: Download OpenVINO package - uses: actions/download-artifact@v3 - with: - name: openvino_package_${{ matrix.arhitecture }} - path: ${{ env.INSTALL_DIR }} - - - name: Download OpenVINO tests package - uses: actions/download-artifact@v3 - with: - name: openvino_tests_${{ matrix.arhitecture }} - path: ${{ env.INSTALL_TEST_DIR }} - - - name: Extract OpenVINO packages - run: | - pushd ${{ env.INSTALL_DIR }} - tar -xzf openvino_package.tar.gz -C ${{ env.INSTALL_DIR }} && rm openvino_package.tar.gz - popd - pushd ${{ env.INSTALL_TEST_DIR }} - tar -xzf openvino_tests.tar.gz -C ${{ env.INSTALL_DIR }} && rm openvino_tests.tar.gz - popd - - - name: CPU plugin func tests - run: | - source ${{ env.INSTALL_DIR }}/setupvars.sh - - # Skips under Ticket: 122769 - skip_filter=${{ matrix.arhitecture == 'arm64' && '--gtest_filter=-*smoke_nonzero/NonZeroLayerTest.Inference/IS*:*smoke_NormalizeL2_*:*Extension.XmlModelWithExtensionFromDSO*:*Extension.OnnxModelWithExtensionFromDSO*:*ONNXQuantizedModels/QuantizedModelsTests.MaxPool*:*ONNXQuantizedModels/QuantizedModelsTests.Convolution*:**' || '' }} - - ${{ env.INSTALL_TEST_DIR }}/ov_cpu_func_tests --gtest_print_time=1 --gtest_filter=*smoke* "$skip_filter" --gtest_output=xml:"${{ env.INSTALL_TEST_DIR }}/TEST-CPUFuncTests.xml" - - - name: Upload Test Results - uses: actions/upload-artifact@v3 - if: ${{ always() }} - with: - name: test-results-functional-cpu-${{ matrix.arhitecture }} - path: ${{ env.INSTALL_TEST_DIR }}/TEST*.xml - if-no-files-found: 'error' + # if: fromJSON(needs.smart_ci.outputs.affected_components).CPU.test + if: ${{ 'false' }} # Ticket: 122001 + needs: [ Build, Smart_CI ] + uses: ./.github/workflows/job_cpu_functional_tests.yml + with: + runner: 'macos-13' diff --git a/.github/workflows/mac_arm64.yml b/.github/workflows/mac_arm64.yml new file mode 100644 index 00000000000000..ea96b26c465a7e --- /dev/null +++ b/.github/workflows/mac_arm64.yml @@ -0,0 +1,241 @@ +name: macOS ARM64 (Python 3.11) +on: + workflow_dispatch: + schedule: + # at 00:00 on workdays + - cron: '0 0 * * 1,2,3,4,5' +# pull_request: +# paths-ignore: +# - '**/docs/**' +# - 'docs/**' +# - '**/**.md' +# - '**.md' +# - '**/layer_tests_summary/**' +# - '**/conformance/**' +# push: +# paths-ignore: +# - '**/docs/**' +# - 'docs/**' +# - '**/**.md' +# - '**.md' +# - '**/layer_tests_summary/**' +# - '**/conformance/**' +# branches: +# - master +# - 'releases/**' + +concurrency: + # github.ref is not unique in post-commit + group: ${{ github.event_name == 'push' && github.run_id || github.ref }}-mac-arm64 + cancel-in-progress: true + +env: + PYTHON_VERSION: '3.11' + +jobs: + Smart_CI: + runs-on: ubuntu-latest + outputs: + affected_components: "${{ steps.smart_ci.outputs.affected_components }}" + skip_workflow: "${{ steps.smart_ci.outputs.skip_workflow }}" + steps: + - name: checkout action + uses: actions/checkout@v4 + with: + sparse-checkout: .github/actions/smart-ci + + - name: Get affected components + id: smart_ci + uses: ./.github/actions/smart-ci + with: + repository: ${{ github.repository }} + pr: ${{ github.event.number }} + commit_sha: ${{ github.sha }} + component_pattern: "category: (.*)" + repo_token: ${{ secrets.GITHUB_TOKEN }} + skip_when_only_listed_labels_set: 'docs' + skip_when_only_listed_files_changed: '*.md,*.rst,*.png,*.jpg,*.svg' + + Build: + needs: Smart_CI + timeout-minutes: 150 + defaults: + run: + shell: bash + runs-on: 'macos-13-xlarge' + env: + CMAKE_BUILD_TYPE: 'Release' + CMAKE_GENERATOR: 'Ninja Multi-Config' + MACOSX_DEPLOYMENT_TARGET: '11.0' + CMAKE_CXX_COMPILER_LAUNCHER: ccache + CMAKE_C_COMPILER_LAUNCHER: ccache + OPENVINO_REPO: ${{ github.workspace }}/openvino + OPENVINO_CONTRIB_REPO: ${{ github.workspace }}/openvino_contrib + INSTALL_DIR: ${{ github.workspace }}/openvino_install + INSTALL_TEST_DIR: ${{ github.workspace }}/tests_install + BUILD_DIR: ${{ github.workspace }}/build + steps: + - name: Clone OpenVINO + uses: actions/checkout@v4 + with: + path: 'openvino' + submodules: 'true' + + - name: Clone OpenVINO Contrib + uses: actions/checkout@v4 + with: + repository: 'openvinotoolkit/openvino_contrib' + path: 'openvino_contrib' + + # + # Print system info + # + + - name: System info + uses: ./openvino/.github/actions/system_info + + # + # Dependencies + # + + - name: Install build dependencies + run: brew install coreutils ninja scons + + - name: Setup Python ${{ env.PYTHON_VERSION }} + uses: ./openvino/.github/actions/setup_python + with: + version: ${{ env.PYTHON_VERSION }} + should-setup-pip-paths: 'false' + self-hosted-runner: 'false' + + - name: Install python dependencies + run: | + # For Python API + python3 -m pip install -r ${{ env.OPENVINO_REPO }}/src/bindings/python/wheel/requirements-dev.txt + python3 -m pip install -r ${{ env.OPENVINO_REPO }}/src/bindings/python/requirements.txt + + # For running Python API tests + python3 -m pip install -r ${{ env.OPENVINO_REPO }}/src/bindings/python/src/compatibility/openvino/requirements-dev.txt + + # For running ONNX frontend unit tests + python3 -m pip install --force-reinstall -r ${{ env.OPENVINO_REPO }}/src/frontends/onnx/tests/requirements.txt + + # For running TensorFlow frontend unit tests + python3 -m pip install -r ${{ env.OPENVINO_REPO }}/src/frontends/tensorflow/tests/requirements.txt + + # For running Paddle frontend unit tests + python3 -m pip install -r ${{ env.OPENVINO_REPO }}/src/frontends/paddle/tests/requirements.txt + + # + # Build + # + + - name: Setup ccache + uses: hendrikmuhs/ccache-action@v1.2 + with: + max-size: "2000M" + # Should save cache only if run in the master branch of the base repo + # github.ref_name is 'ref/PR_#' in case of the PR, and 'branch_name' when executed on push + save: ${{ github.ref_name == 'master' && 'true' || 'false' }} + verbose: 2 + key: ${{ runner.os }}-${{ runner.arch }}-main + restore-keys: | + ${{ runner.os }}-${{ runner.arch }}-main + + - name: CMake configure + run: | + cmake \ + -G "${{ env.CMAKE_GENERATOR }}" \ + -DENABLE_CPPLINT=OFF \ + -DENABLE_NCC_STYLE=OFF \ + -DENABLE_TESTS=ON \ + -DCMAKE_COMPILE_WARNING_AS_ERROR=OFF \ + -DENABLE_STRICT_DEPENDENCIES=OFF \ + -DCMAKE_CXX_COMPILER_LAUNCHER=${{ env.CMAKE_CXX_COMPILER_LAUNCHER }} \ + -DCMAKE_C_COMPILER_LAUNCHER=${{ env.CMAKE_C_COMPILER_LAUNCHER }} \ + -S ${{ env.OPENVINO_REPO }} \ + -B ${{ env.BUILD_DIR }} + + - name: Cmake build - OpenVINO + run: cmake --build ${{ env.BUILD_DIR }} --parallel --config ${{ env.CMAKE_BUILD_TYPE }} + + - name: Show ccache stats + run: ccache --show-stats + + - name: Cmake install - OpenVINO + run: | + cmake -DCMAKE_INSTALL_PREFIX=${{ env.INSTALL_DIR }} -P ${{ env.BUILD_DIR }}/cmake_install.cmake + cmake -DCMAKE_INSTALL_PREFIX=${{ env.INSTALL_TEST_DIR }} -DCOMPONENT=tests -P ${{ env.BUILD_DIR }}/cmake_install.cmake + cmake -DCMAKE_INSTALL_PREFIX=${{ env.INSTALL_DIR }} -DCOMPONENT=python_wheels -P ${{ env.BUILD_DIR }}/cmake_install.cmake + + - name: Pack Artifacts + run: | + pushd ${{ env.INSTALL_DIR }} + tar -czvf ${{ env.BUILD_DIR }}/openvino_package.tar.gz * + popd + + pushd ${{ env.INSTALL_TEST_DIR }} + tar -czvf ${{ env.BUILD_DIR }}/openvino_tests.tar.gz * + popd + + - name: Cmake & Build - OpenVINO Contrib + run: | + cmake \ + -DBUILD_nvidia_plugin=OFF \ + -DBUILD_java_api=OFF \ + -DCUSTOM_OPERATIONS="calculate_grid;complex_mul;fft;grid_sample;sparse_conv;sparse_conv_transpose" \ + -DOPENVINO_EXTRA_MODULES=${{ env.OPENVINO_CONTRIB_REPO }}/modules \ + -S ${{ env.OPENVINO_REPO }} \ + -B ${{ env.BUILD_DIR }} + cmake --build ${{ env.BUILD_DIR }} --parallel --config ${{ env.CMAKE_BUILD_TYPE }} + + # + # Upload build artifacts + # + + - name: Upload openvino package + if: ${{ always() }} + uses: actions/upload-artifact@v3 + with: + name: openvino_package + path: ${{ env.BUILD_DIR }}/openvino_package.tar.gz + if-no-files-found: 'error' + + - name: Upload openvino tests package + if: ${{ always() }} + uses: actions/upload-artifact@v3 + with: + name: openvino_tests + path: ${{ env.BUILD_DIR }}/openvino_tests.tar.gz + if-no-files-found: 'error' + + Samples: + needs: Build + uses: ./.github/workflows/job_samples_tests.yml + with: + runner: 'macos-13-xlarge' + affected-components: ${{ needs.smart_ci.outputs.affected_components }} + + CXX_Unit_Tests: + name: C++ unit tests + needs: [ Build, Smart_CI ] + uses: ./.github/workflows/job_cxx_unit_tests.yml + with: + runner: 'macos-13-xlarge' + affected-components: ${{ needs.smart_ci.outputs.affected_components }} + + Python_Unit_Tests: + name: Python unit tests + needs: [ Build, Smart_CI ] + uses: ./.github/workflows/job_python_unit_tests.yml + with: + runner: 'macos-13-xlarge' + affected-components: ${{ needs.smart_ci.outputs.affected_components }} + + CPU_Functional_Tests: + name: CPU functional tests + if: fromJSON(needs.smart_ci.outputs.affected_components).CPU.test + needs: [ Build, Smart_CI ] + uses: ./.github/workflows/job_cpu_functional_tests.yml + with: + runner: 'macos-13-xlarge' diff --git a/.github/workflows/mo.yml b/.github/workflows/mo.yml index 69be4df6bf8fbc..f8956969b6a403 100644 --- a/.github/workflows/mo.yml +++ b/.github/workflows/mo.yml @@ -24,7 +24,7 @@ jobs: uses: actions/checkout@v4 - name: Setup Python - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: '3.10' diff --git a/.github/workflows/py_checks.yml b/.github/workflows/py_checks.yml index c97d5167e2b035..80d6cad5243af3 100644 --- a/.github/workflows/py_checks.yml +++ b/.github/workflows/py_checks.yml @@ -28,7 +28,7 @@ jobs: uses: actions/checkout@v4 - name: Setup Python - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: '3.8' diff --git a/.github/workflows/stale_prs_and_issues.yml b/.github/workflows/stale_prs_and_issues.yml index 0ea15bb6d4e6cb..deaf62781842e4 100644 --- a/.github/workflows/stale_prs_and_issues.yml +++ b/.github/workflows/stale_prs_and_issues.yml @@ -12,7 +12,7 @@ jobs: stale: runs-on: ubuntu-latest steps: - - uses: actions/stale@v8 + - uses: actions/stale@v9 with: stale-issue-message: 'This issue will be closed in a week because of 9 months of no activity.' stale-pr-message: 'This PR will be closed in a week because of 2 weeks of no activity.' diff --git a/.github/workflows/webassembly.yml b/.github/workflows/webassembly.yml index 60685489f6414f..c5d94f267e4298 100644 --- a/.github/workflows/webassembly.yml +++ b/.github/workflows/webassembly.yml @@ -2,21 +2,7 @@ name: Webassembly on: workflow_dispatch: pull_request: - paths-ignore: - - '**/docs/**' - - 'docs/**' - - '**/**.md' - - '**.md' - - '**/layer_tests_summary/**' - - '**/conformance/**' push: - paths-ignore: - - '**/docs/**' - - 'docs/**' - - '**/**.md' - - '**.md' - - '**/layer_tests_summary/**' - - '**/conformance/**' branches: - master - 'releases/**' @@ -27,7 +13,31 @@ concurrency: cancel-in-progress: true jobs: + Smart_CI: + runs-on: ubuntu-latest + outputs: + affected_components: "${{ steps.smart_ci.outputs.affected_components }}" + skip_workflow: "${{ steps.smart_ci.outputs.skip_workflow }}" + steps: + - name: checkout action + uses: actions/checkout@v4 + with: + sparse-checkout: .github/actions/smart-ci + + - name: Get affected components + id: smart_ci + uses: ./.github/actions/smart-ci + with: + repository: ${{ github.repository }} + pr: ${{ github.event.number }} + commit_sha: ${{ github.sha }} + component_pattern: "category: (.*)" + repo_token: ${{ secrets.GITHUB_TOKEN }} + skip_when_only_listed_labels_set: 'docs' + skip_when_only_listed_files_changed: '*.md,*.rst,*.png,*.jpg,*.svg,*/layer_tests_summary/*,*/conformance/*' + Build: + needs: Smart_CI defaults: run: shell: bash @@ -35,7 +45,7 @@ jobs: container: image: emscripten/emsdk volumes: - - /mount/caches:/mount/caches + - /mount:/mount options: -e SCCACHE_AZURE_BLOB_CONTAINER -e SCCACHE_AZURE_CONNECTION_STRING env: CMAKE_BUILD_TYPE: 'Release' @@ -44,6 +54,7 @@ jobs: OPENVINO_REPO: /__w/openvino/openvino/openvino OPENVINO_BUILD_DIR: /__w/openvino/openvino/openvino_build SCCACHE_AZURE_KEY_PREFIX: webassembly_Release + if: "!needs.smart_ci.outputs.skip_workflow" steps: - name: Install git run: apt-get update && apt-get install --assume-yes --no-install-recommends git ca-certificates @@ -76,3 +87,17 @@ jobs: - name: Show ccache stats run: ${SCCACHE_PATH} --show-stats + + Overall_Status: + name: ci/gha_overall_status_webassembly + needs: [Smart_CI, Build] + if: ${{ always() }} + runs-on: ubuntu-latest + steps: + - name: Check status of all jobs + if: >- + ${{ + contains(needs.*.result, 'failure') || + contains(needs.*.result, 'cancelled') + }} + run: exit 1 diff --git a/.github/workflows/windows.yml b/.github/workflows/windows.yml index 353a38666d7862..3c1df21a9cf656 100644 --- a/.github/workflows/windows.yml +++ b/.github/workflows/windows.yml @@ -1,34 +1,46 @@ name: Windows (VS 2019, Python 3.11) on: workflow_dispatch: -# pull_request: -# paths-ignore: -# - '**/docs/**' -# - 'docs/**' -# - '**/**.md' -# - '**.md' -# - '**/layer_tests_summary/**' -# - '**/conformance/**' + pull_request: push: - paths-ignore: - - '**/docs/**' - - 'docs/**' - - '**/**.md' - - '**.md' - - '**/layer_tests_summary/**' - - '**/conformance/**' branches: - master + - 'releases/**' concurrency: # github.ref is not unique in post-commit group: ${{ github.event_name == 'push' && github.run_id || github.ref }}-windows cancel-in-progress: true env: + PIP_CACHE_PATH: /mount/caches/pip/win PYTHON_VERSION: '3.11' jobs: + Smart_CI: + runs-on: ubuntu-latest + outputs: + affected_components: "${{ steps.smart_ci.outputs.affected_components }}" + skip_workflow: "${{ steps.smart_ci.outputs.skip_workflow }}" + steps: + - name: checkout action + uses: actions/checkout@v4 + with: + sparse-checkout: .github/actions/smart-ci + + - name: Get affected components + id: smart_ci + uses: ./.github/actions/smart-ci + with: + repository: ${{ github.repository }} + pr: ${{ github.event.number }} + commit_sha: ${{ github.sha }} + component_pattern: "category: (.*)" + repo_token: ${{ secrets.GITHUB_TOKEN }} + skip_when_only_listed_labels_set: 'docs' + skip_when_only_listed_files_changed: '*.md,*.rst,*.png,*.jpg,*.svg,*/layer_tests_summary/*,*/conformance/*' + Build: + needs: Smart_CI timeout-minutes: 180 defaults: run: @@ -46,6 +58,8 @@ jobs: BUILD_DIR: "${{ github.workspace }}\\openvino_build" # TODO: specify version of compiler here SCCACHE_AZURE_KEY_PREFIX: windows2022_x86_64_Release + if: "!needs.smart_ci.outputs.skip_workflow" + steps: - name: Clone OpenVINO uses: actions/checkout@v4 @@ -75,8 +89,10 @@ jobs: uses: ./openvino/.github/actions/setup_python with: version: ${{ env.PYTHON_VERSION }} - should-setup-pip-paths: 'false' - self-hosted-runner: 'false' + pip-cache-path: ${{ env.PIP_CACHE_PATH }} + should-setup-pip-paths: 'true' + self-hosted-runner: 'true' + show-cache-info: 'true' - name: Install python dependencies run: | @@ -93,9 +109,13 @@ jobs: # For running TensorFlow Lite frontend unit tests python3 -m pip install -r ${{ env.OPENVINO_REPO }}/src/frontends/tensorflow_lite/tests/requirements.txt + # Disabled because of CVS-95904 # For running Paddle frontend unit tests # python3 -m pip install -r ${{ env.OPENVINO_REPO }}/src/frontends/paddle/tests/requirements.txt + # For getting rid of SSL issues during model downloading for unit tests + python3 -m pip install certifi + - name: Install sccache uses: mozilla-actions/sccache-action@v0.0.3 with: @@ -111,6 +131,9 @@ jobs: - name: Configure Developer Command Prompt for Microsoft Visual C++ uses: ilammy/msvc-dev-cmd@v1 + - name: Set SSL_CERT_FILE for model downloading for unit tests + run: echo SSL_CERT_FILE=$(python3 -m certifi) >> $env:GITHUB_ENV + - name: CMake configure run: | cmake -G "${{ env.CMAKE_GENERATOR }}" ` @@ -186,7 +209,8 @@ jobs: if-no-files-found: 'error' Samples: - needs: Build + needs: [Build, Smart_CI] + if: fromJSON(needs.smart_ci.outputs.affected_components).samples timeout-minutes: 20 defaults: run: @@ -234,7 +258,7 @@ jobs: with: version: ${{ env.PYTHON_VERSION }} should-setup-pip-paths: 'false' - self-hosted-runner: 'false' + self-hosted-runner: 'true' - name: Build cpp samples run: | @@ -267,12 +291,12 @@ jobs: Python_Unit_Tests: name: Python unit tests - needs: Build + needs: [Build, Smart_CI] timeout-minutes: 75 defaults: run: shell: pwsh - runs-on: aks-win-4-cores-8gb + runs-on: aks-win-8-cores-16gb env: OPENVINO_REPO: "${{ github.workspace }}\\openvino" OPENVINO_CONTRIB_REPO: "${{ github.workspace }}\\openvino_contrib" @@ -315,8 +339,9 @@ jobs: uses: ./openvino/.github/actions/setup_python with: version: ${{ env.PYTHON_VERSION }} + pip-cache-path: ${{ env.PIP_CACHE_PATH }} should-setup-pip-paths: 'false' - self-hosted-runner: 'false' + self-hosted-runner: 'true' - name: Install OpenVINO Python wheels run: | @@ -330,6 +355,9 @@ jobs: - name: Install Python API tests dependencies run: | + # To enable pytest parallel features + python3 -m pip install pytest-xdist[psutil] + # For torchvision to OpenVINO preprocessing converter python3 -m pip install -r ${{ env.INSTALL_TEST_DIR }}/python/preprocess/torchvision/requirements.txt @@ -337,48 +365,53 @@ jobs: python3 -m pip install -r ${{ env.INSTALL_TEST_DIR }}/mo/requirements_dev.txt - name: Python API 1.0 Tests + #if: fromJSON(needs.smart_ci.outputs.affected_components).Python_API.test # Ticket: 127101 shell: cmd run: | python3 -m pytest -s ${{ env.INSTALL_TEST_DIR }}/pyngraph ${{ env.PYTHON_STATIC_ARGS }} --junitxml=${{ env.INSTALL_TEST_DIR }}/TEST-Pyngraph.xml --ignore=${{ env.INSTALL_TEST_DIR }}/pyngraph/tests_compatibility/test_onnx/test_zoo_models.py - name: Python API 2.0 Tests + #if: fromJSON(needs.smart_ci.outputs.affected_components).Python_API.test # Ticket: 127101 shell: cmd run: | set PYTHONPATH=${{ env.LAYER_TESTS_INSTALL_DIR }};%PYTHONPATH% python3 -m pytest -sv ${{ env.INSTALL_TEST_DIR }}/pyopenvino ${{ env.PYTHON_STATIC_ARGS }} --junitxml=${{ env.INSTALL_TEST_DIR }}/TEST-Pyngraph.xml --ignore=${{ env.INSTALL_TEST_DIR }}/pyopenvino/tests/test_utils/test_utils.py - name: Model Optimizer UT + if: fromJSON(needs.smart_ci.outputs.affected_components).MO.test shell: cmd run: | python3 -m pytest -s ${{ env.INSTALL_TEST_DIR }}/mo/unit_tests --ignore=${{ env.INSTALL_TEST_DIR }}/mo/unit_tests/mo/front/mxnet --junitxml=${{ env.INSTALL_TEST_DIR }}/TEST-ModelOptimizer.xml + - name: Install Python Layer tests dependencies + run: | + # layer test requirements + python3 -m pip install -r ${{ env.LAYER_TESTS_INSTALL_DIR }}/requirements.txt + # Ticket - 115085 - name: PyTorch Layer Tests if: ${{ 'false' }} shell: cmd run: | - python3 -m pip install -r ${{ env.LAYER_TESTS_INSTALL_DIR }}/requirements.txt - python3 -m pytest ${{ env.LAYER_TESTS_INSTALL_DIR }}/pytorch_tests -m precommit --junitxml=${{ env.INSTALL_TEST_DIR }}/TEST-pytorch.xml + python3 -m pytest ${{ env.LAYER_TESTS_INSTALL_DIR }}/pytorch_tests -n logical -m precommit --junitxml=${{ env.INSTALL_TEST_DIR }}/TEST-pytorch.xml env: TEST_DEVICE: CPU - name: ONNX Layer Tests + if: fromJSON(needs.smart_ci.outputs.affected_components).ONNX_FE.test shell: cmd run: | - python3 -m pip install -r ${{ env.LAYER_TESTS_INSTALL_DIR }}/requirements.txt - :: requires 'unit_tests' from 'tools/mo' set PYTHONPATH=${{ env.INSTALL_TEST_DIR }}\mo;%PYTHONPATH% - python3 -m pytest ${{ env.LAYER_TESTS_INSTALL_DIR }}/onnx_tests -m "not launch_only_if_manually_specified and precommit" --junitxml=${INSTALL_TEST_DIR}/TEST-onnx.xml + python3 -m pytest ${{ env.LAYER_TESTS_INSTALL_DIR }}/onnx_tests -n logical -m "not launch_only_if_manually_specified and precommit" --junitxml=${INSTALL_TEST_DIR}/TEST-onnx.xml env: TEST_DEVICE: CPU TEST_PRECISION: FP16 - name: TensorFlow 1 Layer Tests - TF FE + if: fromJSON(needs.smart_ci.outputs.affected_components).TF_FE.test shell: cmd run: | - python3 -m pip install -r ${{ env.LAYER_TESTS_INSTALL_DIR }}/requirements.txt - :: requires 'unit_tests' from 'tools/mo' set PYTHONPATH=${{ env.INSTALL_TEST_DIR }}\mo;%PYTHONPATH% python3 -m pytest ${{ env.LAYER_TESTS_INSTALL_DIR }}/tensorflow_tests/ --use_new_frontend -m precommit_tf_fe --junitxml=${{ env.INSTALL_TEST_DIR }}/TEST-tf_fe.xml @@ -387,10 +420,9 @@ jobs: TEST_PRECISION: FP16 - name: TensorFlow 2 Layer Tests - TF FE + if: fromJSON(needs.smart_ci.outputs.affected_components).TF_FE.test shell: cmd run: | - python3 -m pip install -r ${{ env.LAYER_TESTS_INSTALL_DIR }}/requirements.txt - :: requires 'unit_tests' from 'tools/mo' set PYTHONPATH=${{ env.INSTALL_TEST_DIR }}\mo;%PYTHONPATH% @@ -399,30 +431,32 @@ jobs: TEST_DEVICE: CPU - name: TensorFlow 1 Layer Tests - Legacy FE + if: fromJSON(needs.smart_ci.outputs.affected_components).TF_FE.test shell: cmd run: | - python3 -m pip install -r ${{ env.LAYER_TESTS_INSTALL_DIR }}/requirements.txt python3 -m pytest ${{ env.LAYER_TESTS_INSTALL_DIR }}/tensorflow_tests/test_tf_Roll.py --ir_version=10 --junitxml=${{ env.INSTALL_TEST_DIR }}/TEST-tf_Roll.xml - name: TensorFlow 2 Layer Tests - Legacy FE + if: fromJSON(needs.smart_ci.outputs.affected_components).TF_FE.test shell: cmd run: | - python3 -m pip install -r ${{ env.LAYER_TESTS_INSTALL_DIR }}/requirements.txt python3 -m pytest ${{ env.LAYER_TESTS_INSTALL_DIR }}/tensorflow2_keras_tests/test_tf2_keras_activation.py --ir_version=11 --junitxml=${{ env.INSTALL_TEST_DIR }}/TEST-tf2_Activation.xml -k "sigmoid" env: TEST_DEVICE: CPU TEST_PRECISION: FP16 - name: TensorFlow Lite Layer Tests - TFL FE + if: fromJSON(needs.smart_ci.outputs.affected_components).TFL_FE.test shell: cmd run: | - python3 -m pip install -r ${{ env.LAYER_TESTS_INSTALL_DIR }}/requirements.txt python3 -m pytest ${{ env.LAYER_TESTS_INSTALL_DIR }}/tensorflow_lite_tests/ --junitxml=${{ env.INSTALL_TEST_DIR }}/TEST-tfl_fe.xml env: TEST_DEVICE: CPU TEST_PRECISION: FP16 - name: Python ONNX operators tests + if: fromJSON(needs.smart_ci.outputs.affected_components).Python_API.test || + fromJSON(needs.smart_ci.outputs.affected_components).ONNX_FE.test shell: cmd run: | :: Skip test_onnx/test_zoo_models and test_onnx/test_backend due to long execution time - ONNX Model Zoo tests are run separately @@ -431,26 +465,24 @@ jobs: --ignore=${{ env.INSTALL_TEST_DIR }}/onnx/test_python/test_zoo_models.py - name: MO Python API Tests + if: fromJSON(needs.smart_ci.outputs.affected_components).MO.test shell: cmd run: | - python3 -m pip install -r ${{ env.LAYER_TESTS_INSTALL_DIR }}/requirements.txt - :: Used for 'test_utils' installed in '\python\openvino\test_utils' set PYTHONPATH=${{ env.INSTALL_TEST_DIR }}\python\openvino\test_utils;${{ env.INSTALL_TEST_DIR }}\python;%PYTHONPATH% - + python3 -m pytest ${{ env.LAYER_TESTS_INSTALL_DIR }}/mo_python_api_tests --junitxml=${{ env.INSTALL_TEST_DIR }}/TEST-test_mo_convert.xml env: TEST_DEVICE: CPU TEST_PRECISION: FP16 - name: OVC Python API Tests + if: fromJSON(needs.smart_ci.outputs.affected_components).MO.test shell: cmd run: | - python3 -m pip install -r ${{ env.LAYER_TESTS_INSTALL_DIR }}/requirements.txt - :: Used for 'test_utils' installed in '\python\openvino\test_utils' set PYTHONPATH=${{ env.INSTALL_TEST_DIR }}\python\openvino\test_utils;${{ env.INSTALL_TEST_DIR }}\python;%PYTHONPATH% - + :: Skip test ticket: 126319 python3 -m pytest ${{ env.LAYER_TESTS_INSTALL_DIR }}/ovc_python_api_tests -k "not test_ovc_tool_non_existng_output_dir" --junitxml=${{ env.INSTALL_TEST_DIR }}/TEST-test_ovc_convert.xml env: @@ -458,13 +490,14 @@ jobs: TEST_PRECISION: FP16 - name: Python Frontend tests + if: fromJSON(needs.smart_ci.outputs.affected_components).PyTorch_FE.test || + fromJSON(needs.smart_ci.outputs.affected_components).PDPD_FE.test shell: cmd run: | - python3 -m pip install -r ${{ env.LAYER_TESTS_INSTALL_DIR }}/requirements.txt - call "${{ env.INSTALL_DIR }}\\setupvars.bat" && python3 -m pytest ${{ env.LAYER_TESTS_INSTALL_DIR }}/py_frontend_tests --junitxml=${{ env.INSTALL_TEST_DIR }}/TEST-test_py_fontend.xml - name: OVC unit tests + if: fromJSON(needs.smart_ci.outputs.affected_components).MO.test shell: cmd run: python3 -m pytest -s ${{ env.INSTALL_TEST_DIR }}/ovc/unit_tests --junitxml=${{ env.INSTALL_TEST_DIR }}/TEST-OpenVinoConversion.xml @@ -478,7 +511,7 @@ jobs: CXX_Unit_Tests: name: C++ unit tests - needs: Build + needs: [Build, Smart_CI] timeout-minutes: 25 defaults: run: @@ -511,73 +544,87 @@ jobs: popd - name: OpenVINO Core unit tests + if: fromJSON(needs.smart_ci.outputs.affected_components).Core.test shell: cmd run: | call "${{ env.INSTALL_DIR }}\\setupvars.bat" && ${{ env.INSTALL_TEST_DIR }}/ov_core_unit_tests --gtest_print_time=1 --gtest_filter=-*IE_GPU* --gtest_output=xml:${{ env.INSTALL_TEST_DIR }}/TEST-NGraphUT.xml - name: OpenVINO Inference functional tests + if: fromJSON(needs.smart_ci.outputs.affected_components).inference.test shell: cmd run: | call "${{ env.INSTALL_DIR }}\\setupvars.bat" && ${{ env.INSTALL_TEST_DIR }}/ov_inference_functional_tests --gtest_print_time=1 --gtest_output=xml:${{ env.INSTALL_TEST_DIR }}/TEST-InferenceFunc.xml - name: OpenVINO Inference unit tests + if: fromJSON(needs.smart_ci.outputs.affected_components).inference.test shell: cmd run: | call "${{ env.INSTALL_DIR }}\\setupvars.bat" && ${{ env.INSTALL_TEST_DIR }}/ov_inference_unit_tests --gtest_print_time=1 --gtest_output=xml:${{ env.INSTALL_TEST_DIR }}/TEST-InferenceUnit.xml - name: Low Precision Transformations Tests + if: fromJSON(needs.smart_ci.outputs.affected_components).LP_transformations.test shell: cmd run: | call "${{ env.INSTALL_DIR }}\\setupvars.bat" && ${{ env.INSTALL_TEST_DIR }}/ov_lp_transformations_tests --gtest_print_time=1 --gtest_output=xml:${{ env.INSTALL_TEST_DIR }}/TEST-LpTransformations.xml - name: OpenVINO Conditional compilation tests + if: fromJSON(needs.smart_ci.outputs.affected_components).Core.test shell: cmd run: | call "${{ env.INSTALL_DIR }}\\setupvars.bat" && ${{ env.INSTALL_TEST_DIR }}/ov_conditional_compilation_tests --gtest_print_time=1 --gtest_output=xml:${{ env.INSTALL_TEST_DIR }}/TEST-ConditionalCompilation.xml - name: IR frontend tests + if: fromJSON(needs.smart_ci.outputs.affected_components).IR_FE.test shell: cmd run: | call "${{ env.INSTALL_DIR }}\\setupvars.bat" && ${{ env.INSTALL_TEST_DIR }}/ov_ir_frontend_tests --gtest_print_time=1 --gtest_output=xml:${{ env.INSTALL_TEST_DIR }}/TEST-IRFrontend.xml - - name: PaddlePaddle frontend tests # Disabled in Azure: https://github.com/openvinotoolkit/openvino/blob/master/.ci/azure/linux.yml#L403 + - name: PaddlePaddle frontend tests # Disabled because of CVS-95904 if: ${{ 'false' }} shell: cmd run: | call "${{ env.INSTALL_DIR }}\\setupvars.bat" && ${{ env.INSTALL_TEST_DIR }}/paddle_tests --gtest_print_time=1 --gtest_filter=*smoke* --gtest_output=xml:${{ env.INSTALL_TEST_DIR }}/TEST-PaddleTests.xml - name: ONNX frontend tests + if: fromJSON(needs.smart_ci.outputs.affected_components).ONNX_FE.test shell: cmd run: | call "${{ env.INSTALL_DIR }}\\setupvars.bat" && ${{ env.INSTALL_TEST_DIR }}/ov_onnx_frontend_tests --gtest_print_time=1 --gtest_filter=-*IE_GPU* --gtest_output=xml:${{ env.INSTALL_TEST_DIR }}/TEST-ONNXFrontend.xml - name: TensorFlow Common frontend tests + if: fromJSON(needs.smart_ci.outputs.affected_components).TF_FE.test || + fromJSON(needs.smart_ci.outputs.affected_components).TFL_FE.test shell: cmd run: | call "${{ env.INSTALL_DIR }}\\setupvars.bat" && ${{ env.INSTALL_TEST_DIR }}/ov_tensorflow_common_tests --gtest_print_time=1 --gtest_output=xml:${{ env.INSTALL_TEST_DIR }}/TEST-TensorFlowCommonFrontend.xml - name: TensorFlow frontend tests + if: fromJSON(needs.smart_ci.outputs.affected_components).TF_FE.test shell: cmd run: | call "${{ env.INSTALL_DIR }}\\setupvars.bat" && ${{ env.INSTALL_TEST_DIR }}/ov_tensorflow_frontend_tests --gtest_print_time=1 --gtest_output=xml:${{ env.INSTALL_TEST_DIR }}/TEST-TensorFlowFrontend.xml - name: TensorFlow Lite frontend tests + if: fromJSON(needs.smart_ci.outputs.affected_components).TFL_FE.test shell: cmd run: | :: Skip ticket: 126320 call "${{ env.INSTALL_DIR }}\\setupvars.bat" && ${{ env.INSTALL_TEST_DIR }}/ov_tensorflow_lite_frontend_tests --gtest_print_time=1 --gtest_filter=-*test_decode_convert_equal_convert*:*test_convert_partially_equal_convert* --gtest_output=xml:${{ env.INSTALL_TEST_DIR }}/TEST-TensorFlowLiteFrontend.xml - name: Transformations func tests + if: fromJSON(needs.smart_ci.outputs.affected_components).transformations.test shell: cmd run: | call "${{ env.INSTALL_DIR }}\\setupvars.bat" && ${{ env.INSTALL_TEST_DIR }}/ov_transformations_tests --gtest_print_time=1 --gtest_output=xml:${{ env.INSTALL_TEST_DIR }}/TEST-Transformations.xml - name: Legacy Transformations func tests + if: fromJSON(needs.smart_ci.outputs.affected_components).GNA.test shell: cmd run: | call "${{ env.INSTALL_DIR }}\\setupvars.bat" && ${{ env.INSTALL_TEST_DIR }}/ov_legacy_transformations_tests --gtest_print_time=1 --gtest_output=xml:${{ env.INSTALL_TEST_DIR }}/TEST-LegacyTransformations.xml - name: Inference Engine 1.0 unit tests + if: fromJSON(needs.smart_ci.outputs.affected_components).GNA.test shell: cmd run: | call "${{ env.INSTALL_DIR }}\\setupvars.bat" && ${{ env.INSTALL_TEST_DIR }}/InferenceEngineUnitTests --gtest_print_time=1 --gtest_output=xml:${{ env.INSTALL_TEST_DIR }}/TEST-InferenceEngineUnitTests.xml @@ -588,11 +635,13 @@ jobs: call "${{ env.INSTALL_DIR }}\\setupvars.bat" && ${{ env.INSTALL_TEST_DIR }}/ov_util_tests --gtest_print_time=1 --gtest_output=xml:${{ env.INSTALL_TEST_DIR }}/TEST-commonUtilsTests.xml - name: Snippets func tests + if: fromJSON(needs.smart_ci.outputs.affected_components).CPU.test shell: cmd run: | call "${{ env.INSTALL_DIR }}\\setupvars.bat" && ${{ env.INSTALL_TEST_DIR }}/ov_snippets_func_tests --gtest_print_time=1 --gtest_output=xml:${{ env.INSTALL_TEST_DIR }}/TEST-SnippetsFuncTests.xml - name: CPU plugin unit tests + if: fromJSON(needs.smart_ci.outputs.affected_components).CPU.test shell: cmd run: | call "${{ env.INSTALL_DIR }}\\setupvars.bat" && ${{ env.INSTALL_TEST_DIR }}/ov_cpu_unit_tests --gtest_print_time=1 --gtest_output=xml:${{ env.INSTALL_TEST_DIR }}/TEST-CPUUnitTests.xml @@ -608,26 +657,31 @@ jobs: call "${{ env.INSTALL_DIR }}\\setupvars.bat" && ${{ env.INSTALL_TEST_DIR }}/ov_op_conformance_tests --gtest_print_time=1 --gtest_filter="*OpImpl*" --gtest_output=xml:${{ env.INSTALL_TEST_DIR }}/TEST-TemplateOpImplTests.xml - name: GNA plugin unit tests + if: fromJSON(needs.smart_ci.outputs.affected_components).GNA.test shell: cmd run: | call "${{ env.INSTALL_DIR }}\\setupvars.bat" && ${{ env.INSTALL_TEST_DIR }}/ov_gna_unit_tests --gtest_print_time=1 --gtest_output=xml:${{ env.INSTALL_TEST_DIR }}/TEST-GNAUnitTests.xml - name: AUTO unit tests + if: fromJSON(needs.smart_ci.outputs.affected_components).AUTO.test shell: cmd run: | call "${{ env.INSTALL_DIR }}\\setupvars.bat" && ${{ env.INSTALL_TEST_DIR }}/ov_auto_unit_tests --gtest_print_time=1 --gtest_output=xml:${{ env.INSTALL_TEST_DIR }}/TEST-ov_auto_unit_tests.xml - name: AUTO func Tests + if: fromJSON(needs.smart_ci.outputs.affected_components).AUTO.test shell: cmd run: | call "${{ env.INSTALL_DIR }}\\setupvars.bat" && ${{ env.INSTALL_TEST_DIR }}/ov_auto_func_tests --gtest_print_time=1 --gtest_output=xml:${{ env.INSTALL_TEST_DIR }}/TEST-ov_auto_func_tests.xml - name: Template plugin func tests + if: fromJSON(needs.smart_ci.outputs.affected_components).TEMPLATE.test shell: cmd run: | call "${{ env.INSTALL_DIR }}\\setupvars.bat" && ${{ env.INSTALL_TEST_DIR }}/ov_template_func_tests --gtest_print_time=1 --gtest_filter=*smoke* --gtest_output=xml:${{ env.INSTALL_TEST_DIR }}/TEST-TemplateFuncTests.xml - name: Inference Engine C API tests + if: fromJSON(needs.smart_ci.outputs.affected_components).C_API.test shell: cmd run: | call "${{ env.INSTALL_DIR }}\\setupvars.bat" && ${{ env.INSTALL_TEST_DIR }}/InferenceEngineCAPITests --gtest_print_time=1 --gtest_output=xml:${{ env.INSTALL_TEST_DIR }}/TEST-InferenceEngineCAPITests.xml @@ -639,26 +693,31 @@ jobs: call "${{ env.INSTALL_DIR }}\\setupvars.bat" && ${{ env.INSTALL_TEST_DIR }}/ov_capi_test --gtest_print_time=1 --gtest_output=xml:${{ env.INSTALL_TEST_DIR }}/TEST-OpenVINOCAPITests.xml - name: AutoBatch unit tests + if: fromJSON(needs.smart_ci.outputs.affected_components).AUTO_BATCH.test shell: cmd run: | call "${{ env.INSTALL_DIR }}\\setupvars.bat" && ${{ env.INSTALL_TEST_DIR }}/ov_auto_batch_unit_tests --gtest_output=xml:${{ env.INSTALL_TEST_DIR }}/TEST-ov_auto_batch_unit_tests.xml - name: AutoBatch func tests + if: fromJSON(needs.smart_ci.outputs.affected_components).AUTO_BATCH.test shell: cmd run: | call "${{ env.INSTALL_DIR }}\\setupvars.bat" && ${{ env.INSTALL_TEST_DIR }}/ov_auto_batch_func_tests --gtest_output=xml:${{ env.INSTALL_TEST_DIR }}/TEST-ov_auto_batch_func_tests.xml - name: Proxy Plugin func tests + if: fromJSON(needs.smart_ci.outputs.affected_components).PROXY.test shell: cmd run: | call "${{ env.INSTALL_DIR }}\\setupvars.bat" && ${{ env.INSTALL_TEST_DIR }}/ov_proxy_plugin_tests --gtest_print_time=1 --gtest_output=xml:${{ env.INSTALL_TEST_DIR }}/TEST-OVProxyTests.xml - name: Hetero Unit Tests + if: fromJSON(needs.smart_ci.outputs.affected_components).HETERO.test shell: cmd run: | call "${{ env.INSTALL_DIR }}\\setupvars.bat" && ${{ env.INSTALL_TEST_DIR }}/ov_hetero_unit_tests --gtest_print_time=1 --gtest_output=xml:${{ env.INSTALL_TEST_DIR }}/TEST-OVHeteroUnitTests.xml - name: Hetero Func Tests + if: fromJSON(needs.smart_ci.outputs.affected_components).HETERO.test shell: cmd run: | call "${{ env.INSTALL_DIR }}\\setupvars.bat" && ${{ env.INSTALL_TEST_DIR }}/ov_hetero_func_tests --gtest_print_time=1 --gtest_output=xml:${{ env.INSTALL_TEST_DIR }}/TEST-OVHeteroFuncTests.xml @@ -673,7 +732,7 @@ jobs: CPU_Functional_Tests: name: CPU functional tests - needs: Build + needs: [Build, Smart_CI] timeout-minutes: 70 defaults: run: @@ -685,7 +744,7 @@ jobs: INSTALL_TEST_DIR: "${{ github.workspace }}\\install\\tests" PARALLEL_TEST_SCRIPT: "${{ github.workspace }}\\install\\tests\\functional_test_utils\\layer_tests_summary\\run_parallel.py" PARALLEL_TEST_CACHE: "${{ github.workspace }}\\install\\tests\\test_cache.lst" - + if: fromJSON(needs.smart_ci.outputs.affected_components).CPU.test steps: - name: Download OpenVINO package uses: actions/download-artifact@v3 @@ -721,7 +780,7 @@ jobs: with: version: ${{ env.PYTHON_VERSION }} should-setup-pip-paths: 'false' - self-hosted-runner: 'false' + self-hosted-runner: 'true' - name: Install python dependencies shell: cmd @@ -763,3 +822,17 @@ jobs: ${{ env.INSTALL_TEST_DIR }}/logs/hash_table.csv ${{ env.PARALLEL_TEST_CACHE }} if-no-files-found: 'error' + + Overall_Status: + name: ci/gha_overall_status_windows + needs: [Smart_CI, Build, Samples, CXX_Unit_Tests, Python_Unit_Tests, CPU_Functional_Tests] + if: ${{ always() }} + runs-on: ubuntu-latest + steps: + - name: Check status of all jobs + if: >- + ${{ + contains(needs.*.result, 'failure') || + contains(needs.*.result, 'cancelled') + }} + run: exit 1 diff --git a/.github/workflows/windows_conditional_compilation.yml b/.github/workflows/windows_conditional_compilation.yml index e2155ab06997f3..57d8e327782450 100644 --- a/.github/workflows/windows_conditional_compilation.yml +++ b/.github/workflows/windows_conditional_compilation.yml @@ -4,24 +4,11 @@ on: schedule: # run daily at 00:00 - cron: '0 0 * * *' -# pull_request: -# paths-ignore: -# - '**/docs/**' -# - 'docs/**' -# - '**/**.md' -# - '**.md' -# - '**/layer_tests_summary/**' -# - '**/conformance/**' -# push: -# paths-ignore: -# - '**/docs/**' -# - 'docs/**' -# - '**/**.md' -# - '**.md' -# - '**/layer_tests_summary/**' -# - '**/conformance/**' -# branches: -# - master + pull_request: + push: + branches: + - master + - 'releases/**' concurrency: # github.ref is not unique in post-commit @@ -32,12 +19,36 @@ env: PYTHON_VERSION: '3.11' jobs: + Smart_CI: + runs-on: ubuntu-latest + outputs: + affected_components: "${{ steps.smart_ci.outputs.affected_components }}" + skip_workflow: "${{ steps.smart_ci.outputs.skip_workflow }}" + steps: + - name: checkout action + uses: actions/checkout@v4 + with: + sparse-checkout: .github/actions/smart-ci + + - name: Get affected components + id: smart_ci + uses: ./.github/actions/smart-ci + with: + repository: ${{ github.repository }} + pr: ${{ github.event.number }} + commit_sha: ${{ github.sha }} + component_pattern: "category: (.*)" + repo_token: ${{ secrets.GITHUB_TOKEN }} + skip_when_only_listed_labels_set: 'docs' + skip_when_only_listed_files_changed: '*.md,*.rst,*.png,*.jpg,*.svg,*/layer_tests_summary/*,*/conformance/*' + Build: + needs: Smart_CI timeout-minutes: 180 defaults: run: shell: pwsh - runs-on: windows-latest-8-cores + runs-on: aks-win-16-cores-32gb env: CMAKE_BUILD_TYPE: 'Release' CMAKE_GENERATOR: 'Ninja Multi-Config' @@ -49,6 +60,10 @@ jobs: BUILD_DIR: "${{ github.workspace }}\\openvino_build" MODELS_PATH: "${{ github.workspace }}\\testdata" SELECTIVE_BUILD_STAT_DIR: "${{ github.workspace }}\\selective_build_stat" + # TODO: specify version of compiler here + SCCACHE_AZURE_KEY_PREFIX: windows2022_x86_64_itt_Release + if: "!needs.smart_ci.outputs.skip_workflow" + steps: - name: Clone OpenVINO uses: actions/checkout@v4 @@ -82,6 +97,11 @@ jobs: should-setup-pip-paths: 'false' self-hosted-runner: 'false' + - name: Install sccache + uses: mozilla-actions/sccache-action@v0.0.3 + with: + version: "v0.5.4" + - name: Install build dependencies run: choco install --no-progress ninja @@ -89,15 +109,19 @@ jobs: run: | # For running ONNX frontend unit tests python3 -m pip install --force-reinstall -r ${{ env.OPENVINO_REPO }}/src/frontends/onnx/tests/requirements.txt - + # For running TensorFlow frontend unit tests python3 -m pip install -r ${{ env.OPENVINO_REPO }}/src/frontends/tensorflow/tests/requirements.txt - + # For running TensorFlow Lite frontend unit tests python3 -m pip install -r ${{ env.OPENVINO_REPO }}/src/frontends/tensorflow_lite/tests/requirements.txt - + + # For getting rid of SSL issues during model downloading for unit tests + python3 -m pip install certifi + + # Disabled because of CVS-95904 # For running Paddle frontend unit tests - python3 -m pip install -r ${{ env.OPENVINO_REPO }}/src/frontends/paddle/tests/requirements.txt + # python3 -m pip install -r ${{ env.OPENVINO_REPO }}/src/frontends/paddle/tests/requirements.txt # # Build @@ -106,17 +130,8 @@ jobs: - name: Configure Developer Command Prompt for Microsoft Visual C++ uses: ilammy/msvc-dev-cmd@v1 - - name: Setup sccache - uses: hendrikmuhs/ccache-action@v1.2 - with: - variant: sccache - max-size: "2000M" - # Should save cache only if run in the master branch of the base repo - # github.ref_name is 'ref/PR_#' in case of the PR, and 'branch_name' when executed on push - save: ${{ github.ref_name == 'master' && 'true' || 'false' }} - key: ${{ github.job }}-${{ runner.os }}-itt - restore-keys: | - ${{ github.job }}-${{ runner.os }}-itt + - name: Set SSL_CERT_FILE for model downloading for unit tests + run: echo SSL_CERT_FILE=$(python3 -m certifi) >> $env:GITHUB_ENV - name: CMake configure - CC COLLECT run: | @@ -133,10 +148,29 @@ jobs: -S ${{ env.OPENVINO_REPO }} ` -B ${{ env.BUILD_DIR }} + - name: Clean sccache stats + run: '& "$Env:SCCACHE_PATH" --zero-stats' + + # to get more information on the issue + # described in the next step + - name: Show which network ports are used + run: netstat -ban + + # the case is the following: + # sccache: error: An attempt was made to access a socket in a way forbidden by its access permissions. (os error 10013) + # This looks like the attempt to use + # a port below 1024 or a port + # which is occupied by another app + - name: Stop sccache server just in case + run: '& "$Env:SCCACHE_PATH" --stop-server' + - name: Cmake build - CC COLLECT run: | - cmake --build ${{ env.BUILD_DIR }} --parallel --config ${{ env.CMAKE_BUILD_TYPE }} - cmake --build ${{ env.BUILD_DIR }} --parallel --config ${{ env.CMAKE_BUILD_TYPE }} --target sea_itt_lib + cmake --build ${{ env.BUILD_DIR }} --parallel 8 --config ${{ env.CMAKE_BUILD_TYPE }} && ` + cmake --build ${{ env.BUILD_DIR }} --parallel 8 --config ${{ env.CMAKE_BUILD_TYPE }} --target sea_itt_lib + + - name: Show sccache stats + run: '& "$Env:SCCACHE_PATH" --show-stats' - name: Cmake install - OpenVINO run: cmake -DCMAKE_INSTALL_PREFIX=${{ env.INSTALL_DIR }} -P ${{ env.BUILD_DIR }}/cmake_install.cmake @@ -160,7 +194,7 @@ jobs: shell: cmd run: | set path=%path%;${{ env.OPENVINO_REPO }}\temp\tbb\bin - + python3 ${{ env.OPENVINO_REPO }}\thirdparty\itt_collector\runtool\sea_runtool.py ^ --bindir ${{ env.OPENVINO_REPO }}\bin\intel64\${{ env.CMAKE_BUILD_TYPE }} ^ -o ${{ env.SELECTIVE_BUILD_STAT_DIR }}\itt_stat ! ${{ env.OPENVINO_REPO }}\bin\intel64\${{ env.CMAKE_BUILD_TYPE }}\benchmark_app.exe ^ @@ -188,7 +222,7 @@ jobs: Compress-Archive @compress $compress = @{ - Path = "${{ env.OPENVINO_REPO }}/bin/intel64/${{ env.CMAKE_BUILD_TYPE }}/ov_cpu_func_tests.exe", "${{ env.OPENVINO_REPO }}/src/tests/test_utils/functional_test_utils/layer_tests_summary", "${{ env.INSTALL_DIR }}/runtime/3rdparty/tbb" + Path = "${{ env.OPENVINO_REPO }}/bin/intel64/${{ env.CMAKE_BUILD_TYPE }}/ov_cpu_func_tests.exe", "${{ env.OPENVINO_REPO }}/bin/intel64/${{ env.CMAKE_BUILD_TYPE }}/template_extension.dll", "${{ env.OPENVINO_REPO }}/src/tests/test_utils/functional_test_utils/layer_tests_summary", "${{ env.INSTALL_DIR }}/runtime/3rdparty/tbb" CompressionLevel = "Optimal" DestinationPath = "${{ env.BUILD_DIR }}/openvino_tests.zip" } @@ -212,11 +246,11 @@ jobs: CC_Build: name: Conditional Compilation - needs: Build + needs: [Build, Smart_CI] defaults: run: shell: pwsh - runs-on: windows-latest-8-cores + runs-on: aks-win-16-cores-32gb env: CMAKE_BUILD_TYPE: 'Release' CMAKE_CXX_COMPILER_LAUNCHER: sccache @@ -225,6 +259,9 @@ jobs: BUILD_DIR: "${{ github.workspace }}\\openvino_build" MODELS_PATH: "${{ github.workspace }}\\testdata" SELECTIVE_BUILD_STAT_DIR: "${{ github.workspace }}\\selective_build_stat" + SCCACHE_AZURE_KEY_PREFIX: windows2022_x86_64_cc_Release + if: "!needs.smart_ci.outputs.skip_workflow" + steps: - name: Clone OpenVINO uses: actions/checkout@v4 @@ -249,6 +286,18 @@ jobs: - name: Extract selective build statistics package run: Expand-Archive ${{ env.SELECTIVE_BUILD_STAT_DIR }}/openvino_selective_build_stat.zip -DestinationPath "${{ env.SELECTIVE_BUILD_STAT_DIR }}" + - name: Setup Python ${{ env.PYTHON_VERSION }} + uses: ./openvino/.github/actions/setup_python + with: + version: ${{ env.PYTHON_VERSION }} + should-setup-pip-paths: 'false' + self-hosted-runner: 'false' + + - name: Install sccache + uses: mozilla-actions/sccache-action@v0.0.3 + with: + version: "v0.5.4" + - name: CMake configure - CC ON run: | cmake ` @@ -267,9 +316,15 @@ jobs: -S ${{ env.OPENVINO_REPO }} ` -B ${{ env.BUILD_DIR }} + - name: Clean sccache stats + run: '& "$Env:SCCACHE_PATH" --zero-stats' + - name: Cmake build - CC ON run: cmake --build ${{ env.BUILD_DIR }} --parallel --config ${{ env.CMAKE_BUILD_TYPE }} --target benchmark_app + - name: Show sccache stats + run: '& "$Env:SCCACHE_PATH" --show-stats' + - name: List bin files shell: cmd run: dir ${{ env.OPENVINO_REPO }}\bin\ /s @@ -282,16 +337,18 @@ jobs: CPU_Functional_Tests: name: CPU functional tests - needs: Build + needs: [Build, Smart_CI] + timeout-minutes: 70 defaults: run: shell: pwsh - runs-on: windows-latest-8-cores + runs-on: aks-win-8-cores-16gb env: OPENVINO_REPO: "${{ github.workspace }}\\openvino" INSTALL_TEST_DIR: "${{ github.workspace }}\\tests_install" PARALLEL_TEST_SCRIPT: "${{ github.workspace }}\\tests_install\\layer_tests_summary\\run_parallel.py" PARALLEL_TEST_CACHE: "${{ github.workspace }}\\tests_install\\test_cache.lst" + if: fromJSON(needs.smart_ci.outputs.affected_components).CPU.test steps: - name: Download OpenVINO tests package @@ -350,3 +407,17 @@ jobs: ${{ env.INSTALL_TEST_DIR }}/logs/interapted/*.log ${{ env.INSTALL_TEST_DIR }}/logs/disabled_tests.log if-no-files-found: 'error' + + Overall_Status: + name: ci/gha_overall_status_windows_cc + needs: [Smart_CI, Build, CC_Build, CPU_Functional_Tests] + if: ${{ always() }} + runs-on: ubuntu-latest + steps: + - name: Check status of all jobs + if: >- + ${{ + contains(needs.*.result, 'failure') || + contains(needs.*.result, 'cancelled') + }} + run: exit 1 diff --git a/.gitignore b/.gitignore index 769c4a4e581fae..9bc1e79b3e53b1 100644 --- a/.gitignore +++ b/.gitignore @@ -7,7 +7,7 @@ cmake-build* !__init__.py !__main__.py # and sphinx documentation folders -!docs/_* +!docs/sphinx_setup/_* # developer tools *.idea diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 5579299cc8b3c6..86ca1d88b5c6d7 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -4,8 +4,14 @@ OpenVINO™ is always looking for opportunities to improve and your contributions play a big role in this process. There are several ways you can make the -product better: +product better. +# Table of Contents +1. [Forms of contribution](#Forms-of-contribution) +2. [Technical guide](#Technical-guide) + + +## Forms of contribution ### Provide Feedback @@ -32,7 +38,7 @@ product better: If you want to help improving OpenVINO, choose one of the issues reported in [GitHub Issue Tracker](https://github.com/openvinotoolkit/openvino/issues) and [create a Pull Request](./CONTRIBUTING_PR.md) addressing it. Consider one of the - tasks listed as [first-time contributions](https://github.com/openvinotoolkit/openvino/issues/17502). + tasks listed as [first-time contributions](https://github.com/orgs/openvinotoolkit/projects/3). If the feature you want to develop is more complex or not well defined by the reporter, it is always a good idea to [discuss it](https://github.com/openvinotoolkit/openvino/discussions) with OpenVINO developers first. Before creating a new PR, check if nobody is already @@ -81,6 +87,66 @@ product better: share your expertise with the community. Check GitHub Discussions and Issues to see if you can help someone. +## Technical guide + +This section lists all the necessary steps required to set up your environment, build OpenVINO locally, and run tests for specific components. It's a perfect place to start when you have just picked a Good First Issue and are wondering how to start working on it. + +Keep in mind that we are here to help - **do not hesitate to ask the development team if something is not clear**. Such questions allow us to keep improving our documentation. + +### 1. Prerequisites + +You can start with the following links: +- [What is OpenVINO?](https://github.com/openvinotoolkit/openvino#what-is-openvino-toolkit) +- [OpenVINO architecture](https://github.com/openvinotoolkit/openvino/blob/master/src/docs/architecture.md) +- [User documentation](https://docs.openvino.ai/) +- [Blog post on contributing to OpenVINO](https://medium.com/openvino-toolkit/how-to-contribute-to-an-ai-open-source-project-c741f48e009e) +- [Pick up a Good First Issue](https://github.com/orgs/openvinotoolkit/projects/3) + +### 2. Building the project + +In order to build the project, follow the [build instructions for your specific OS](https://github.com/openvinotoolkit/openvino/blob/master/docs/dev/build.md). + +### 3. Familiarize yourself with the component you'll be working with + +Choose the component your Good First Issue is related to. You can run tests to make sure it works correctly. + +##### APIs +- [C API](https://github.com/openvinotoolkit/openvino/tree/master/src/bindings/c) +- [Core](https://github.com/openvinotoolkit/openvino/tree/master/src/core) +- [Python API](https://github.com/openvinotoolkit/openvino/tree/master/src/bindings/python) + +##### Frontends +- [IR Frontend](https://github.com/openvinotoolkit/openvino/tree/master/src/frontends/ir) +- [ONNX Frontend](https://github.com/openvinotoolkit/openvino/tree/master/src/frontends/onnx) +- [PaddlePaddle Frontend](https://github.com/openvinotoolkit/openvino/tree/master/src/frontends/paddle) +- [PyTorch Frontend](https://github.com/openvinotoolkit/openvino/tree/master/src/frontends/pytorch) +- [TensorFlow Frontend](https://github.com/openvinotoolkit/openvino/tree/master/src/frontends/tensorflow) + +##### Plugins +- [Auto plugin](https://github.com/openvinotoolkit/openvino/blob/master/src/plugins/auto) +- [CPU plugin](https://github.com/openvinotoolkit/openvino/blob/master/src/plugins/intel_cpu) +- [GPU plugin](https://github.com/openvinotoolkit/openvino/blob/master/src/plugins/intel_gpu) +- [Hetero plugin](https://github.com/openvinotoolkit/openvino/blob/master/src/plugins/hetero) +- [Template plugin](https://github.com/openvinotoolkit/openvino/tree/master/src/plugins/template) + +##### Tools +- [Benchmark Tool](https://github.com/openvinotoolkit/openvino/tree/master/tools/benchmark_tool) +- [Model Optimizer](https://github.com/openvinotoolkit/openvino/tree/master/tools/mo) + +##### Others +- [Documentation](https://github.com/openvinotoolkit/openvino/blob/master/CONTRIBUTING_DOCS.md) + +### 3. Start working on your Good First Issue + +Use the issue description and locally built OpenVINO to complete the task. Remember that you can always ask users tagged in the "Contact points" section for help! + +### 4. Submit a PR with your changes + +Follow our [Good Pull Request guidelines](https://github.com/openvinotoolkit/openvino/blob/master/CONTRIBUTING_PR.md). + +### 5. Wait for a review + +We'll make sure to review your Pull Request as soon as possible and provide you with our feedback. You can expect a merge once your changes are validated with automatic tests and approved by maintainers. ## License diff --git a/cmake/developer_package/OpenVINODeveloperScriptsConfig.cmake b/cmake/developer_package/OpenVINODeveloperScriptsConfig.cmake index bc512b9b229b02..fc9abc64b9e4cc 100644 --- a/cmake/developer_package/OpenVINODeveloperScriptsConfig.cmake +++ b/cmake/developer_package/OpenVINODeveloperScriptsConfig.cmake @@ -87,11 +87,6 @@ function(ov_set_temp_directory temp_variable source_tree_dir) endif() endfunction() -macro(set_temp_directory) - message(WARNING "'set_temp_directory' is deprecated. Please, use 'ov_set_temp_directory'") - ov_set_temp_directory(${ARGV}) -endmacro() - # # For cross-compilation # @@ -294,11 +289,6 @@ function(ov_mark_target_as_cc TARGET_NAME) add_dependencies(${TARGET_NAME} conditional_compilation_gen) endfunction() -function(ie_mark_target_as_cc TARGET_NAME) - message(WARNING "This function is deprecated. Please use ov_mark_target_as_cc(TARGET_NAME) instead.") - ov_mark_target_as_cc(${TARGET_NAME}) -endfunction() - include(python_requirements) # Code style utils diff --git a/cmake/developer_package/add_target_helpers.cmake b/cmake/developer_package/add_target_helpers.cmake index 92f4afbc23bbbe..238a9cde5b37eb 100644 --- a/cmake/developer_package/add_target_helpers.cmake +++ b/cmake/developer_package/add_target_helpers.cmake @@ -181,15 +181,3 @@ function(ov_add_test_target) COMPONENT ${ARG_COMPONENT} EXCLUDE_FROM_ALL) endfunction() - -# deprecated - -function(addIeTarget) - message(WARNING "'addIeTarget' is deprecated, please, use 'ov_add_target' instead") - ov_add_target(${ARGV}) -endfunction() - -function(addIeTargetTest) - message(WARNING "'addIeTargetTest' is deprecated, please, use 'ov_add_test_target' instead") - ov_add_test_target(${ARGV}) -endfunction() diff --git a/cmake/developer_package/api_validator/api_validator.cmake b/cmake/developer_package/api_validator/api_validator.cmake index 6749366a64db05..4eeb9e1e5e0b7e 100644 --- a/cmake/developer_package/api_validator/api_validator.cmake +++ b/cmake/developer_package/api_validator/api_validator.cmake @@ -196,10 +196,3 @@ endfunction() function(ov_add_api_validator_post_build_step) _ov_add_api_validator_post_build_step(${ARGN}) endfunction() - -# deprecated - -function(ie_add_api_validator_post_build_step) - message(WARNING "'ie_add_api_validator_post_build_step' is deprecated, use 'ov_add_api_validator_post_build_step' instead") - _ov_add_api_validator_post_build_step(${ARGN}) -endfunction() diff --git a/cmake/developer_package/clang_format/clang_format.cmake b/cmake/developer_package/clang_format/clang_format.cmake index 57319e48006938..b031c1e640bce9 100644 --- a/cmake/developer_package/clang_format/clang_format.cmake +++ b/cmake/developer_package/clang_format/clang_format.cmake @@ -130,8 +130,3 @@ function(ov_add_clang_format_target TARGET_NAME) add_dependencies(clang_format_check_all ${TARGET_NAME}) add_dependencies(clang_format_fix_all ${TARGET_NAME}_fix) endfunction() - -function(add_clang_format_target) - message(WARNING "add_clang_format_target is deprecated, use ov_add_clang_format_target instead") - ov_add_clang_format_target(${ARGV}) -endfunction() diff --git a/cmake/developer_package/compile_flags/os_flags.cmake b/cmake/developer_package/compile_flags/os_flags.cmake index c0c878e0183eb0..2e2e52b015c58d 100644 --- a/cmake/developer_package/compile_flags/os_flags.cmake +++ b/cmake/developer_package/compile_flags/os_flags.cmake @@ -32,11 +32,6 @@ macro(ov_disable_deprecated_warnings) set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} ${ov_c_cxx_deprecated}") endmacro() -macro(disable_deprecated_warnings) - message(WARNING "'disable_deprecated_warnings' is deprecated, use 'ov_disable_deprecated_warnings' instead") - ov_disable_deprecated_warnings() -endmacro() - # # ov_deprecated_no_errors() # @@ -125,7 +120,7 @@ macro(ov_avx2_optimization_flags flags) set(${flags} -xCORE-AVX2) endif() elseif(OV_COMPILER_IS_CLANG OR CMAKE_COMPILER_IS_GNUCXX) - set(${flags} -mavx2 -mfma) + set(${flags} -mavx2 -mfma -mf16c) else() message(WARNING "Unsupported CXX compiler ${CMAKE_CXX_COMPILER_ID}") endif() @@ -147,7 +142,7 @@ macro(ov_avx512_optimization_flags flags) set(${flags} -xCOMMON-AVX512) endif() elseif(OV_COMPILER_IS_CLANG OR CMAKE_COMPILER_IS_GNUCXX) - set(${flags} -mavx512f -mfma) + set(${flags} -mavx512f -mfma -mf16c) else() message(WARNING "Unsupported CXX compiler ${CMAKE_CXX_COMPILER_ID}") endif() @@ -213,16 +208,6 @@ function(ov_disable_all_warnings) endforeach() endfunction() -# -# ie_enable_lto() -# -# Enables Link Time Optimization compilation -# -macro(ie_enable_lto) - message(WARNING "'ie_enable_lto' is deprecated, set 'INTERPROCEDURAL_OPTIMIZATION_RELEASE' target property instead") - set(CMAKE_INTERPROCEDURAL_OPTIMIZATION_RELEASE ON) -endmacro() - # # ov_add_compiler_flags(]) # @@ -235,11 +220,6 @@ macro(ov_add_compiler_flags) endforeach() endmacro() -macro(ie_add_compiler_flags) - message(WARNING "'ie_add_compiler_flags' is deprecated, use 'ov_add_compiler_flags' instead") - ov_add_compiler_flags(${ARGN}) -endmacro() - # # ov_force_include(
) # diff --git a/cmake/developer_package/faster_build.cmake b/cmake/developer_package/faster_build.cmake index f70274f465070c..b9ad18dfa98570 100644 --- a/cmake/developer_package/faster_build.cmake +++ b/cmake/developer_package/faster_build.cmake @@ -19,10 +19,3 @@ function(ov_build_target_faster TARGET_NAME) target_precompile_headers(${TARGET_NAME} ${FASTER_BUILD_PCH}) endif() endfunction() - -# deprecated - -function(ie_faster_build) - message(WARNING "ie_faster_build is deprecated, use ov_build_target_faster instead") - ov_build_target_faster(${ARGV}) -endfunction() diff --git a/cmake/developer_package/frontends/frontends.cmake b/cmake/developer_package/frontends/frontends.cmake index 1a037c5ab72309..0b14cabe54a05e 100644 --- a/cmake/developer_package/frontends/frontends.cmake +++ b/cmake/developer_package/frontends/frontends.cmake @@ -57,10 +57,10 @@ function(ov_generate_frontends_hpp) # for some reason dependency on source files does not work # so, we have to use explicit target and make it dependency for frontend_common add_custom_target(_ov_frontends_hpp DEPENDS ${ov_frontends_hpp}) - add_dependencies(frontend_common_obj _ov_frontends_hpp) + add_dependencies(openvino_frontend_common_obj _ov_frontends_hpp) # add dependency for object files - get_target_property(sources frontend_common_obj SOURCES) + get_target_property(sources openvino_frontend_common_obj SOURCES) foreach(source IN LISTS sources) if("${source}" MATCHES "\\$\\") # object library @@ -220,6 +220,7 @@ macro(ov_add_frontend) PUBLIC $ PRIVATE + $ ${frontend_root_dir}/src ${CMAKE_CURRENT_BINARY_DIR}) @@ -342,6 +343,7 @@ macro(ov_add_frontend) install(DIRECTORY ${${TARGET_NAME}_INCLUDE_DIR}/openvino DESTINATION ${FRONTEND_INSTALL_INCLUDE} COMPONENT ${dev_component} + ${OV_CPACK_COMP_CORE_DEV_EXCLUDE_ALL} FILES_MATCHING PATTERN "*.hpp") # public target name diff --git a/cmake/developer_package/options.cmake b/cmake/developer_package/options.cmake index 4506d85a027f92..7a9baa0b41de24 100644 --- a/cmake/developer_package/options.cmake +++ b/cmake/developer_package/options.cmake @@ -55,20 +55,3 @@ function (ov_print_enabled_features) endforeach() message(STATUS "") endfunction() - -# deprecated - -macro (ie_option variable description value) - message(WARNING "'ie_option' is deprecated, please, use 'ov_option' instead") - ov_option(${variable} "${description}" ${value}) -endmacro() - -macro(ie_dependent_option variable description def_value condition fallback_value) - message(WARNING "'ie_dependent_option' is deprecated, please, use 'ov_dependent_option' instead") - ov_dependent_option(${variable} "${description}" ${def_value} "${condition}" ${fallback_value}) -endmacro() - -function(print_enabled_features) - message(WARNING "'print_enabled_features' is deprecated, please, use 'ov_print_enabled_features' instead") - ov_print_enabled_features() -endfunction() diff --git a/cmake/developer_package/packaging/archive.cmake b/cmake/developer_package/packaging/archive.cmake index 5f259a78d72838..339371979980e8 100644 --- a/cmake/developer_package/packaging/archive.cmake +++ b/cmake/developer_package/packaging/archive.cmake @@ -69,6 +69,9 @@ macro(ov_define_component_include_rules) unset(OV_CPACK_COMP_CORE_C_EXCLUDE_ALL) unset(OV_CPACK_COMP_CORE_DEV_EXCLUDE_ALL) unset(OV_CPACK_COMP_CORE_C_DEV_EXCLUDE_ALL) + # tbb + unset(OV_CPACK_COMP_TBB_EXCLUDE_ALL) + unset(OV_CPACK_COMP_TBB_DEV_EXCLUDE_ALL) # licensing unset(OV_CPACK_COMP_LICENSING_EXCLUDE_ALL) # samples @@ -82,6 +85,8 @@ macro(ov_define_component_include_rules) set(OV_CPACK_COMP_PYTHON_OPENVINO_PACKAGE_EXCLUDE_ALL EXCLUDE_FROM_ALL) unset(OV_CPACK_COMP_PYTHON_WHEELS_EXCLUDE_ALL) unset(OV_CPACK_COMP_OPENVINO_REQ_FILES_EXCLUDE_ALL) + # nodejs + set(OV_CPACK_COMP_NPM_EXCLUDE_ALL EXCLUDE_FROM_ALL) # tools set(OV_CPACK_COMP_OPENVINO_DEV_REQ_FILES_EXCLUDE_ALL EXCLUDE_FROM_ALL) unset(OV_CPACK_COMP_DEPLOYMENT_MANAGER_EXCLUDE_ALL) diff --git a/cmake/developer_package/packaging/common-libraries.cmake b/cmake/developer_package/packaging/common-libraries.cmake index 9671d827521d20..d4aaedb9ab5775 100644 --- a/cmake/developer_package/packaging/common-libraries.cmake +++ b/cmake/developer_package/packaging/common-libraries.cmake @@ -77,6 +77,9 @@ macro(ov_define_component_include_rules) set(OV_CPACK_COMP_CORE_C_EXCLUDE_ALL ${OV_CPACK_COMP_CORE_EXCLUDE_ALL}) unset(OV_CPACK_COMP_CORE_DEV_EXCLUDE_ALL) set(OV_CPACK_COMP_CORE_C_DEV_EXCLUDE_ALL ${OV_CPACK_COMP_CORE_DEV_EXCLUDE_ALL}) + # tbb + set(OV_CPACK_COMP_TBB_EXCLUDE_ALL EXCLUDE_FROM_ALL) + set(OV_CPACK_COMP_TBB_DEV_EXCLUDE_ALL EXCLUDE_FROM_ALL) # licensing if(CPACK_GENERATOR STREQUAL "CONAN") unset(OV_CPACK_COMP_LICENSING_EXCLUDE_ALL) @@ -98,6 +101,8 @@ macro(ov_define_component_include_rules) set(OV_CPACK_COMP_PYTHON_WHEELS_EXCLUDE_ALL EXCLUDE_FROM_ALL) # we don't need requirements.txt in package, because dependencies are installed by packages managers like conda set(OV_CPACK_COMP_OPENVINO_REQ_FILES_EXCLUDE_ALL EXCLUDE_FROM_ALL) + # nodejs + set(OV_CPACK_COMP_NPM_EXCLUDE_ALL EXCLUDE_FROM_ALL) # tools set(OV_CPACK_COMP_OPENVINO_DEV_REQ_FILES_EXCLUDE_ALL EXCLUDE_FROM_ALL) set(OV_CPACK_COMP_DEPLOYMENT_MANAGER_EXCLUDE_ALL EXCLUDE_FROM_ALL) diff --git a/cmake/developer_package/packaging/debian/debian.cmake b/cmake/developer_package/packaging/debian/debian.cmake index 38cd649ad41cc3..be5f16d6b2814a 100644 --- a/cmake/developer_package/packaging/debian/debian.cmake +++ b/cmake/developer_package/packaging/debian/debian.cmake @@ -78,6 +78,9 @@ macro(ov_define_component_include_rules) set(OV_CPACK_COMP_CORE_C_EXCLUDE_ALL ${OV_CPACK_COMP_CORE_EXCLUDE_ALL}) unset(OV_CPACK_COMP_CORE_DEV_EXCLUDE_ALL) set(OV_CPACK_COMP_CORE_C_DEV_EXCLUDE_ALL ${OV_CPACK_COMP_CORE_DEV_EXCLUDE_ALL}) + # tbb + set(OV_CPACK_COMP_TBB_EXCLUDE_ALL EXCLUDE_FROM_ALL) + set(OV_CPACK_COMP_TBB_DEV_EXCLUDE_ALL EXCLUDE_FROM_ALL) # licensing set(OV_CPACK_COMP_LICENSING_EXCLUDE_ALL EXCLUDE_FROM_ALL) # samples @@ -103,6 +106,8 @@ macro(ov_define_component_include_rules) set(OV_CPACK_COMP_PYTHON_WHEELS_EXCLUDE_ALL EXCLUDE_FROM_ALL) # because numpy is installed by apt set(OV_CPACK_COMP_OPENVINO_REQ_FILES_EXCLUDE_ALL EXCLUDE_FROM_ALL) + # nodejs + set(OV_CPACK_COMP_NPM_EXCLUDE_ALL EXCLUDE_FROM_ALL) # tools set(OV_CPACK_COMP_OPENVINO_DEV_REQ_FILES_EXCLUDE_ALL EXCLUDE_FROM_ALL) set(OV_CPACK_COMP_DEPLOYMENT_MANAGER_EXCLUDE_ALL EXCLUDE_FROM_ALL) diff --git a/cmake/developer_package/packaging/npm.cmake b/cmake/developer_package/packaging/npm.cmake new file mode 100644 index 00000000000000..3d3c74e34d54ac --- /dev/null +++ b/cmake/developer_package/packaging/npm.cmake @@ -0,0 +1,105 @@ +# Copyright (C) 2018-2023 Intel Corporation +# SPDX-License-Identifier: Apache-2.0 +# + +include(GNUInstallDirs) + +# We have to specify RPATH, all runtime libs are in one dir +set(CMAKE_SKIP_INSTALL_RPATH OFF) + +# +# ov_npm_cpack_set_dirs() +# +# Set directories for cpack +# +macro(ov_npm_cpack_set_dirs) + set(OV_CPACK_INCLUDEDIR .) + set(OV_CPACK_IE_CMAKEDIR .) + set(OV_CPACK_NGRAPH_CMAKEDIR .) + set(OV_CPACK_OPENVINO_CMAKEDIR .) + set(OV_CPACK_DOCDIR .) + set(OV_CPACK_LICENSESDIR .) + set(OV_CPACK_SAMPLESDIR .) + set(OV_CPACK_WHEELSDIR .) + set(OV_CPACK_TOOLSDIR .) + set(OV_CPACK_DEVREQDIR .) + set(OV_CPACK_PYTHONDIR .) + + if(WIN32) + set(OV_CPACK_LIBRARYDIR .) + set(OV_CPACK_RUNTIMEDIR .) + set(OV_CPACK_ARCHIVEDIR .) + elseif(APPLE) + set(OV_CPACK_LIBRARYDIR .) + set(OV_CPACK_RUNTIMEDIR .) + set(OV_CPACK_ARCHIVEDIR .) + else() + set(OV_CPACK_LIBRARYDIR .) + set(OV_CPACK_RUNTIMEDIR .) + set(OV_CPACK_ARCHIVEDIR .) + endif() + + set(OV_CPACK_LIBRARYDIR .) + set(OV_CPACK_ARCHIVEDIR .) + set(OV_CPACK_PLUGINSDIR .) + set(OV_CPACK_IE_CMAKEDIR .) + set(OV_CPACK_NGRAPH_CMAKEDIR .) + set(OV_CPACK_OPENVINO_CMAKEDIR .) + set(OV_CPACK_DOCDIR .) + set(OV_CPACK_LICENSESDIR licenses) + set(OV_CPACK_PYTHONDIR .) + + # non-native stuff + set(OV_CPACK_SHAREDIR .) + set(OV_CPACK_SAMPLESDIR .) + set(OV_CPACK_DEVREQDIR .) + unset(OV_CPACK_SHAREDIR) + + # skipped during debian packaging + set(OV_CPACK_WHEELSDIR .) +endmacro() + +ov_npm_cpack_set_dirs() + +# +# Override include / exclude rules for components +# This is required to exclude some files from installation +# (e.g. npm package requires only C++ Core component) +# + +macro(ov_define_component_include_rules) + # core components + unset(OV_CPACK_COMP_CORE_EXCLUDE_ALL) + set(OV_CPACK_COMP_CORE_C_EXCLUDE_ALL EXCLUDE_FROM_ALL) + set(OV_CPACK_COMP_CORE_DEV_EXCLUDE_ALL EXCLUDE_FROM_ALL) + set(OV_CPACK_COMP_CORE_C_DEV_EXCLUDE_ALL EXCLUDE_FROM_ALL) + # tbb + unset(OV_CPACK_COMP_TBB_EXCLUDE_ALL) + set(OV_CPACK_COMP_TBB_DEV_EXCLUDE_ALL EXCLUDE_FROM_ALL) + # licensing + unset(OV_CPACK_COMP_LICENSING_EXCLUDE_ALL) + # samples + set(OV_CPACK_COMP_CPP_SAMPLES_EXCLUDE_ALL EXCLUDE_FROM_ALL) + set(OV_CPACK_COMP_C_SAMPLES_EXCLUDE_ALL EXCLUDE_FROM_ALL) + set(OV_CPACK_COMP_PYTHON_SAMPLES_EXCLUDE_ALL EXCLUDE_FROM_ALL) + # python + set(OV_CPACK_COMP_PYTHON_OPENVINO_EXCLUDE_ALL EXCLUDE_FROM_ALL) + set(OV_CPACK_COMP_BENCHMARK_APP_EXCLUDE_ALL EXCLUDE_FROM_ALL) + set(OV_CPACK_COMP_OVC_EXCLUDE_ALL EXCLUDE_FROM_ALL) + set(OV_CPACK_COMP_PYTHON_OPENVINO_PACKAGE_EXCLUDE_ALL EXCLUDE_FROM_ALL) + set(OV_CPACK_COMP_PYTHON_WHEELS_EXCLUDE_ALL EXCLUDE_FROM_ALL) + set(OV_CPACK_COMP_OPENVINO_REQ_FILES_EXCLUDE_ALL EXCLUDE_FROM_ALL) + # nodejs + unset(OV_CPACK_COMP_NPM_EXCLUDE_ALL) + # tools + set(OV_CPACK_COMP_OPENVINO_DEV_REQ_FILES_EXCLUDE_ALL EXCLUDE_FROM_ALL) + set(OV_CPACK_COMP_DEPLOYMENT_MANAGER_EXCLUDE_ALL EXCLUDE_FROM_ALL) + # scripts + set(OV_CPACK_COMP_INSTALL_DEPENDENCIES_EXCLUDE_ALL EXCLUDE_FROM_ALL) + set(OV_CPACK_COMP_SETUPVARS_EXCLUDE_ALL EXCLUDE_FROM_ALL) +endmacro() + +ov_define_component_include_rules() + +# New in version 3.18 +set(CPACK_ARCHIVE_THREADS 8) diff --git a/cmake/developer_package/packaging/nsis.cmake b/cmake/developer_package/packaging/nsis.cmake index 4174037af74f39..03d080aec58648 100644 --- a/cmake/developer_package/packaging/nsis.cmake +++ b/cmake/developer_package/packaging/nsis.cmake @@ -101,6 +101,9 @@ macro(ov_define_component_include_rules) unset(OV_CPACK_COMP_CORE_C_EXCLUDE_ALL) unset(OV_CPACK_COMP_CORE_DEV_EXCLUDE_ALL) unset(OV_CPACK_COMP_CORE_C_DEV_EXCLUDE_ALL) + # tbb + unset(OV_CPACK_COMP_TBB_EXCLUDE_ALL) + unset(OV_CPACK_COMP_TBB_DEV_EXCLUDE_ALL) # licensing unset(OV_CPACK_COMP_LICENSING_EXCLUDE_ALL) # samples @@ -114,6 +117,8 @@ macro(ov_define_component_include_rules) set(OV_CPACK_COMP_PYTHON_WHEELS_EXCLUDE_ALL EXCLUDE_FROM_ALL) set(OV_CPACK_COMP_PYTHON_OPENVINO_PACKAGE_EXCLUDE_ALL EXCLUDE_FROM_ALL) unset(OV_CPACK_COMP_OPENVINO_REQ_FILES_EXCLUDE_ALL) + # nodejs + set(OV_CPACK_COMP_NPM_EXCLUDE_ALL EXCLUDE_FROM_ALL) # tools unset(OV_CPACK_COMP_OPENVINO_DEV_REQ_FILES_EXCLUDE_ALL) unset(OV_CPACK_COMP_DEPLOYMENT_MANAGER_EXCLUDE_ALL) diff --git a/cmake/developer_package/packaging/packaging.cmake b/cmake/developer_package/packaging/packaging.cmake index 2279580040f736..a66b24dfc710f3 100644 --- a/cmake/developer_package/packaging/packaging.cmake +++ b/cmake/developer_package/packaging/packaging.cmake @@ -29,25 +29,32 @@ macro(ov_install_static_lib target comp) endmacro() # -# ov_set_apple_rpath( ...) +# ov_set_install_rpath( ...) # +# macOS: # Sets LC_RPATH properties for macOS MACH-O binaries to ensure that libraries can find their dependencies # when macOS system integrity protection (SIP) is enabled (DYLD_LIBRARY_PATH is ignored in this case). # Note, that this is important when binaries are dynamically loaded at runtime (e.g. via Python). # -function(ov_set_apple_rpath TARGET_NAME lib_install_path) - if(APPLE AND CPACK_GENERATOR MATCHES "^(7Z|TBZ2|TGZ|TXZ|TZ|TZST|ZIP)$") +# NPM: +# we need to set RPATH, because archive must be self-sufficient +# +function(ov_set_install_rpath TARGET_NAME lib_install_path) + if(APPLE AND CPACK_GENERATOR MATCHES "^(7Z|TBZ2|TGZ|TXZ|TZ|TZST|ZIP)$" OR CPACK_GENERATOR STREQUAL "NPM") + if (APPLE) + set(RPATH_PREFIX "@loader_path") + else() + set(RPATH_PREFIX "$ORIGIN") + endif() + unset(rpath_list) foreach(dependency_install_path IN LISTS ARGN) file(RELATIVE_PATH dependency_rpath "/${lib_install_path}" "/${dependency_install_path}") - set(dependency_rpath "@loader_path/${dependency_rpath}") + set(dependency_rpath "${RPATH_PREFIX}/${dependency_rpath}") list(APPEND rpath_list "${dependency_rpath}") endforeach() - set_target_properties(${TARGET_NAME} PROPERTIES - MACOSX_RPATH ON - INSTALL_RPATH "${rpath_list}" - INSTALL_NAME_DIR "@rpath") + set_target_properties(${TARGET_NAME} PROPERTIES INSTALL_RPATH "${rpath_list}") endif() endfunction() @@ -138,6 +145,8 @@ macro(ov_define_component_names) set(OV_CPACK_COMP_PYTHON_OPENVINO_PACKAGE "pyopenvino_package") set(OV_CPACK_COMP_PYTHON_WHEELS "python_wheels") set(OV_CPACK_COMP_OPENVINO_REQ_FILES "openvino_req_files") + # nodejs + set(OV_CPACK_COMP_NPM "ov_node_addon") # tools set(OV_CPACK_COMP_OPENVINO_DEV_REQ_FILES "openvino_dev_req_files") set(OV_CPACK_COMP_DEPLOYMENT_MANAGER "deployment_manager") @@ -178,6 +187,8 @@ elseif(CPACK_GENERATOR STREQUAL "RPM") include(packaging/rpm/rpm) elseif(CPACK_GENERATOR STREQUAL "NSIS") include(packaging/nsis) +elseif(CPACK_GENERATOR STREQUAL "NPM") + include(packaging/npm) elseif(CPACK_GENERATOR MATCHES "^(CONDA-FORGE|BREW|CONAN|VCPKG)$") include(packaging/common-libraries) elseif(CPACK_GENERATOR MATCHES "^(7Z|TBZ2|TGZ|TXZ|TZ|TZST|ZIP)$") @@ -241,10 +252,3 @@ macro(ov_cpack) include(CPack) endmacro() - -# deprecated - -macro(ie_cpack) - message(WARNING "'ie_cpack' is deprecated. Please, use 'ov_cpack'") - ov_cpack(${ARGV}) -endmacro() diff --git a/cmake/developer_package/packaging/rpm/rpm.cmake b/cmake/developer_package/packaging/rpm/rpm.cmake index a7c0ec2cf61087..d4cad3c128740b 100644 --- a/cmake/developer_package/packaging/rpm/rpm.cmake +++ b/cmake/developer_package/packaging/rpm/rpm.cmake @@ -69,6 +69,9 @@ macro(ov_define_component_include_rules) set(OV_CPACK_COMP_CORE_C_EXCLUDE_ALL ${OV_CPACK_COMP_CORE_EXCLUDE_ALL}) unset(OV_CPACK_COMP_CORE_DEV_EXCLUDE_ALL) set(OV_CPACK_COMP_CORE_C_DEV_EXCLUDE_ALL ${OV_CPACK_COMP_CORE_DEV_EXCLUDE_ALL}) + # tbb + set(OV_CPACK_COMP_TBB_EXCLUDE_ALL EXCLUDE_FROM_ALL) + set(OV_CPACK_COMP_TBB_DEV_EXCLUDE_ALL EXCLUDE_FROM_ALL) # licensing set(OV_CPACK_COMP_LICENSING_EXCLUDE_ALL EXCLUDE_FROM_ALL) # samples @@ -94,6 +97,8 @@ macro(ov_define_component_include_rules) set(OV_CPACK_COMP_PYTHON_WHEELS_EXCLUDE_ALL EXCLUDE_FROM_ALL) # because numpy is installed by rpm set(OV_CPACK_COMP_OPENVINO_REQ_FILES_EXCLUDE_ALL EXCLUDE_FROM_ALL) + # nodejs + set(OV_CPACK_COMP_NPM_EXCLUDE_ALL EXCLUDE_FROM_ALL) # tools set(OV_CPACK_COMP_OPENVINO_DEV_REQ_FILES_EXCLUDE_ALL EXCLUDE_FROM_ALL) set(OV_CPACK_COMP_DEPLOYMENT_MANAGER_EXCLUDE_ALL EXCLUDE_FROM_ALL) diff --git a/cmake/developer_package/plugins/plugins.cmake b/cmake/developer_package/plugins/plugins.cmake index a8ba97ad9fa27d..16a9e935a896c8 100644 --- a/cmake/developer_package/plugins/plugins.cmake +++ b/cmake/developer_package/plugins/plugins.cmake @@ -135,9 +135,6 @@ function(ov_add_plugin) install(TARGETS ${OV_PLUGIN_NAME} LIBRARY DESTINATION ${OV_CPACK_PLUGINSDIR} COMPONENT ${install_component}) - install(TARGETS ${OV_PLUGIN_NAME} - LIBRARY DESTINATION ${OV_CPACK_PLUGINSDIR} - COMPONENT ${install_component}) else() ov_install_static_lib(${OV_PLUGIN_NAME} ${OV_CPACK_COMP_CORE}) endif() @@ -167,11 +164,6 @@ function(ov_add_plugin) endif() endfunction() -function(ie_add_plugin) - message(WARNING "'ie_add_plugin' is deprecated. Please, use 'ov_add_plugin'") - ov_add_plugin(${ARGN}) -endfunction() - # # ov_register_in_plugins_xml(MAIN_TARGET
) # @@ -263,14 +255,6 @@ macro(ov_register_plugins) endif() endmacro() -# -# ie_register_plugins() -# -macro(ie_register_plugins) - message(WARNING "'ie_register_plugins' is deprecated. Please, use 'ov_register_plugins'") - ov_register_plugins(${ARGN}) -endmacro() - # # ov_target_link_plugins() # diff --git a/cmake/developer_package/version.cmake b/cmake/developer_package/version.cmake index effb320014452a..4ecf558225fb65 100644 --- a/cmake/developer_package/version.cmake +++ b/cmake/developer_package/version.cmake @@ -166,28 +166,6 @@ macro(ov_parse_ci_build_number repo_root) endif() endmacro() -macro (addVersionDefines FILE) - message(WARNING "'addVersionDefines' is deprecated. Please, use 'ov_add_version_defines'") - - set(__version_file ${FILE}) - if(NOT IS_ABSOLUTE ${__version_file}) - set(__version_file "${CMAKE_CURRENT_SOURCE_DIR}/${__version_file}") - endif() - if(NOT EXISTS ${__version_file}) - message(FATAL_ERROR "${FILE} does not exists in current source directory") - endif() - foreach (VAR ${ARGN}) - if (DEFINED ${VAR} AND NOT "${${VAR}}" STREQUAL "") - set_property( - SOURCE ${__version_file} - APPEND - PROPERTY COMPILE_DEFINITIONS - ${VAR}="${${VAR}}") - endif() - endforeach() - unset(__version_file) -endmacro() - macro (ov_add_version_defines FILE TARGET) set(__version_file ${FILE}) if(NOT IS_ABSOLUTE ${__version_file}) diff --git a/cmake/developer_package/whole_archive.cmake b/cmake/developer_package/whole_archive.cmake index 0ad00055fbfb0e..c15a5c378a3181 100644 --- a/cmake/developer_package/whole_archive.cmake +++ b/cmake/developer_package/whole_archive.cmake @@ -51,10 +51,3 @@ function(ov_target_link_whole_archive targetName) target_link_libraries(${targetName} PRIVATE ${libs}) endif() endfunction() - -# deprecated - -function(ieTargetLinkWholeArchive) - message(WARNING "'ieTargetLinkWholeArchive' is deprecated, use 'ov_target_link_whole_archive' instead") - ov_target_link_whole_archive(${ARGN}) -endfunction() diff --git a/cmake/extra_modules.cmake b/cmake/extra_modules.cmake index 6c392fcc6eed12..dae1cb0b2e045e 100644 --- a/cmake/extra_modules.cmake +++ b/cmake/extra_modules.cmake @@ -2,38 +2,6 @@ # SPDX-License-Identifier: Apache-2.0 # -function(ie_generate_dev_package_config) - # dummy check that OpenCV is here - find_package(OpenCV QUIET) - if(OpenCV_VERSION VERSION_LESS 3.0) - set(OpenCV_FOUND OFF) - endif() - - # export all targets with prefix and use them during extra modules build - export(TARGETS ${_OPENVINO_DEVELOPER_PACKAGE_TARGETS} NAMESPACE IE:: - APPEND FILE "${CMAKE_BINARY_DIR}/inference_engine_developer_package_targets.cmake") - add_custom_target(ie_dev_targets DEPENDS ${_OPENVINO_DEVELOPER_PACKAGE_TARGETS}) - - set(PATH_VARS "OpenVINO_SOURCE_DIR") - if(ENABLE_SAMPLES OR ENABLE_TESTS) - list(APPEND PATH_VARS "gflags_BINARY_DIR") - # if we've found system gflags - if(gflags_DIR) - set(gflags_BINARY_DIR "${gflags_DIR}") - endif() - endif() - - configure_package_config_file("${OpenVINO_SOURCE_DIR}/cmake/templates/InferenceEngineDeveloperPackageConfig.cmake.in" - "${CMAKE_BINARY_DIR}/InferenceEngineDeveloperPackageConfig.cmake" - INSTALL_DESTINATION share # not used - PATH_VARS ${PATH_VARS} - NO_CHECK_REQUIRED_COMPONENTS_MACRO) - - configure_file("${OpenVINO_SOURCE_DIR}/cmake/templates/InferenceEngineConfig-version.cmake.in" - "${CMAKE_BINARY_DIR}/InferenceEngineDeveloperPackageConfig-version.cmake" - @ONLY) -endfunction() - function(ov_generate_dev_package_config) # dummy check that OpenCV is here find_package(OpenCV QUIET) @@ -207,7 +175,6 @@ endfunction() # this OpenVINODeveloperPackageConfig.cmake is not used during extra modules build # since it's generated after modules are configured -ie_generate_dev_package_config() ov_generate_dev_package_config() # extra modules must be registered after inference_engine library diff --git a/cmake/features.cmake b/cmake/features.cmake index 209fb56b71b2bf..adf5d2ce54af4a 100644 --- a/cmake/features.cmake +++ b/cmake/features.cmake @@ -47,6 +47,7 @@ ov_dependent_option (ENABLE_ONEDNN_FOR_GPU "Enable oneDNN with GPU support" ${EN ov_option (ENABLE_DEBUG_CAPS "enable OpenVINO debug capabilities at runtime" OFF) ov_dependent_option (ENABLE_GPU_DEBUG_CAPS "enable GPU debug capabilities at runtime" ON "ENABLE_DEBUG_CAPS;ENABLE_INTEL_GPU" OFF) ov_dependent_option (ENABLE_CPU_DEBUG_CAPS "enable CPU debug capabilities at runtime" ON "ENABLE_DEBUG_CAPS;ENABLE_INTEL_CPU" OFF) +ov_dependent_option (ENABLE_SNIPPETS_DEBUG_CAPS "enable Snippets debug capabilities at runtime" ON "ENABLE_DEBUG_CAPS" OFF) ov_option (ENABLE_PROFILING_ITT "Build with ITT tracing. Optionally configure pre-built ittnotify library though INTEL_VTUNE_DIR variable." OFF) @@ -207,4 +208,8 @@ if (ENABLE_PROFILING_RAW) add_definitions(-DENABLE_PROFILING_RAW=1) endif() +if (ENABLE_SNIPPETS_DEBUG_CAPS) + add_definitions(-DSNIPPETS_DEBUG_CAPS) +endif() + ov_print_enabled_features() diff --git a/cmake/packaging/common-libraries.cmake b/cmake/packaging/common-libraries.cmake index 9b96ed528609df..32287e642218ec 100644 --- a/cmake/packaging/common-libraries.cmake +++ b/cmake/packaging/common-libraries.cmake @@ -15,9 +15,6 @@ macro(ov_cpack_settings) if(NOT OV_CPACK_COMP_${UPPER_COMP}_EXCLUDE_ALL AND # because in case of VCPKG | CONAN | BREW | CONDA-FORGE distributions, python is either not needed or installed separately (NOT item MATCHES "^${OV_CPACK_COMP_PYTHON_OPENVINO_PACKAGE}_python.*" OR ENABLE_PYTHON_PACKAGING) AND - # even for case of system TBB we have installation rules for wheels packages - # so, need to skip this explicitly since they are installed in `host` section - NOT item MATCHES "^tbb(_dev)?$" AND # the same for pugixml NOT item STREQUAL "pugixml") list(APPEND CPACK_COMPONENTS_ALL ${item}) diff --git a/cmake/packaging/debian.cmake b/cmake/packaging/debian.cmake index 4df826c876cbef..234f921e34723c 100644 --- a/cmake/packaging/debian.cmake +++ b/cmake/packaging/debian.cmake @@ -57,9 +57,6 @@ macro(ov_cpack_settings) NOT item STREQUAL "nvidia" AND # don't install Intel OpenMP NOT item STREQUAL "omp" AND - # even for case of system TBB we have installation rules for wheels packages - # so, need to skip this explicitly - NOT item MATCHES "^tbb(_dev)?$" AND # the same for pugixml NOT item STREQUAL "pugixml") list(APPEND CPACK_COMPONENTS_ALL ${item}) diff --git a/cmake/packaging/npm.cmake b/cmake/packaging/npm.cmake new file mode 100644 index 00000000000000..9da86e89d1deb9 --- /dev/null +++ b/cmake/packaging/npm.cmake @@ -0,0 +1,27 @@ +# Copyright (C) 2018-2023 Intel Corporation +# SPDX-License-Identifier: Apache-2.0 +# + +# +# OpenVINO npm binaries, includes openvino:runtime, frontends, plugins, tbb +# +macro(ov_cpack_settings) + # fill a list of components which are part of conda + set(cpack_components_all ${CPACK_COMPONENTS_ALL}) + + unset(CPACK_COMPONENTS_ALL) + foreach(item IN LISTS cpack_components_all) + string(TOUPPER ${item} UPPER_COMP) + # filter out some components, which are not needed to be wrapped to npm package + if(NOT OV_CPACK_COMP_${UPPER_COMP}_EXCLUDE_ALL AND + # python is not required for npm package + NOT item MATCHES "^${OV_CPACK_COMP_PYTHON_OPENVINO_PACKAGE}_python.*") + list(APPEND CPACK_COMPONENTS_ALL ${item}) + endif() + endforeach() + unset(cpack_components_all) + list(REMOVE_DUPLICATES CPACK_COMPONENTS_ALL) + + # override generator + set(CPACK_GENERATOR "TGZ") +endmacro() diff --git a/cmake/packaging/packaging.cmake b/cmake/packaging/packaging.cmake index 7d685f43143de0..43a180dd6f4739 100644 --- a/cmake/packaging/packaging.cmake +++ b/cmake/packaging/packaging.cmake @@ -4,6 +4,8 @@ if(CPACK_GENERATOR STREQUAL "DEB") include("${OpenVINO_SOURCE_DIR}/cmake/packaging/debian.cmake") +elseif(CPACK_GENERATOR STREQUAL "NPM") + include("${OpenVINO_SOURCE_DIR}/cmake/packaging/npm.cmake") elseif(CPACK_GENERATOR STREQUAL "RPM") include("${OpenVINO_SOURCE_DIR}/cmake/packaging/rpm.cmake") elseif(CPACK_GENERATOR MATCHES "^(CONDA-FORGE|BREW|CONAN|VCPKG)$") diff --git a/cmake/packaging/rpm.cmake b/cmake/packaging/rpm.cmake index 52ef6aebbfcf8a..5aa6808fa11d82 100644 --- a/cmake/packaging/rpm.cmake +++ b/cmake/packaging/rpm.cmake @@ -43,9 +43,6 @@ macro(ov_cpack_settings) NOT item STREQUAL "nvidia" AND # don't install Intel OpenMP NOT item STREQUAL "omp" AND - # even for case of system TBB we have installation rules for wheels packages - # so, need to skip this explicitly - NOT item MATCHES "^tbb(_dev)?$" AND # the same for pugixml NOT item STREQUAL "pugixml") list(APPEND CPACK_COMPONENTS_ALL ${item}) diff --git a/cmake/templates/InferenceEngineDeveloperPackageConfig.cmake.in b/cmake/templates/InferenceEngineDeveloperPackageConfig.cmake.in deleted file mode 100644 index a98b4207e285d2..00000000000000 --- a/cmake/templates/InferenceEngineDeveloperPackageConfig.cmake.in +++ /dev/null @@ -1,188 +0,0 @@ -# Copyright (C) 2018-2023 Intel Corporation -# SPDX-License-Identifier: Apache-2.0 -# - -@PACKAGE_INIT@ - -include(CMakeFindDependencyMacro) - -message(WARNING "find_package(InferenceEngineDeveloperPackage) is deprecated and will be removed in 2024.0 release. Please, use find_package(OpenVINODeveloperPackage)") - -# TODO: remove after changing [private plugins] -set_and_check(OpenVINO_SOURCE_DIR "@OpenVINO_SOURCE_DIR@") # NPU -set_and_check(OpenVINO_MAIN_SOURCE_DIR "@OpenVINO_SOURCE_DIR@") # NPU - -# Variables to export in plugin's projects - -set(ov_options "@OV_OPTIONS@") -list(APPEND ov_options CMAKE_CXX_COMPILER_LAUNCHER CMAKE_C_COMPILER_LAUNCHER - CMAKE_CXX_LINKER_LAUNCHER CMAKE_C_LINKER_LAUNCHER - CMAKE_INSTALL_PREFIX CPACK_GENERATOR) - -if(APPLE) - list(APPEND ov_options CMAKE_OSX_ARCHITECTURES CMAKE_OSX_DEPLOYMENT_TARGET) -endif() - -get_property(_OV_GENERATOR_MULTI_CONFIG GLOBAL PROPERTY GENERATOR_IS_MULTI_CONFIG) -if(_OV_GENERATOR_MULTI_CONFIG) - list(APPEND ov_options CMAKE_CONFIGURATION_TYPES) - if(CMAKE_GENERATOR MATCHES "^Ninja Multi-Config$") - list(APPEND ov_options CMAKE_DEFAULT_BUILD_TYPE) - endif() -else() - list(APPEND ov_options CMAKE_BUILD_TYPE) -endif() -unset(_OV_GENERATOR_MULTI_CONFIG) - -file(TO_CMAKE_PATH "${CMAKE_CURRENT_LIST_DIR}" cache_path) - -message(STATUS "The following CMake options are exported from Inference Engine Developer package") -message(" ") -foreach(option IN LISTS ov_options) - if(NOT DEFINED "${option}") - load_cache("${cache_path}" READ_WITH_PREFIX "" ${option}) - endif() - message(" ${option}: ${${option}}") -endforeach() -message(" ") - -# for samples in 3rd party projects -if(ENABLE_SAMPLES) - set_and_check(gflags_DIR "@gflags_BINARY_DIR@") -endif() - -# Disable warning as error for private components -set(CMAKE_COMPILE_WARNING_AS_ERROR OFF) - -# -# Content -# - -find_dependency(OpenVINODeveloperScripts - PATHS "${OpenVINO_SOURCE_DIR}/cmake/developer_package" - NO_CMAKE_FIND_ROOT_PATH - NO_DEFAULT_PATH) - -find_dependency(InferenceEngine - PATHS "${CMAKE_CURRENT_LIST_DIR}" - NO_CMAKE_FIND_ROOT_PATH - NO_DEFAULT_PATH) - -find_dependency(ngraph - PATHS "${CMAKE_CURRENT_LIST_DIR}" - NO_CMAKE_FIND_ROOT_PATH - NO_DEFAULT_PATH) - -if(TARGET openvino::runtime AND NOT TARGET IE::runtime) - add_library(IE::runtime INTERFACE IMPORTED) - set_target_properties(IE::runtime PROPERTIES - INTERFACE_LINK_LIBRARIES openvino::runtime) -endif() - -# WA for cmake: it exports ngraph as IE::ngraph in the IE export list -# while we already have ngraph export in its own export list as ngraph::ngraph -if(TARGET ngraph::ngraph AND NOT TARGET IE::ngraph) - add_library(IE::ngraph INTERFACE IMPORTED) - set_target_properties(IE::ngraph PROPERTIES INTERFACE_LINK_LIBRARIES ngraph::ngraph) -endif() - -_ov_find_tbb() - -include("${CMAKE_CURRENT_LIST_DIR}/inference_engine_developer_package_targets.cmake") - -if(TARGET IE::ov_core_dev AND NOT TARGET openvino::core::dev) - add_library(openvino::core::dev INTERFACE IMPORTED) - set_target_properties(openvino::core::dev PROPERTIES - INTERFACE_LINK_LIBRARIES IE::ov_core_dev) -endif() - -if(TARGET IE::runtime::dev AND NOT TARGET openvino::runtime::dev) - add_library(openvino::runtime::dev INTERFACE IMPORTED) - set_target_properties(openvino::runtime::dev PROPERTIES - INTERFACE_LINK_LIBRARIES IE::runtime::dev) -endif() - -if(TARGET IE::reference AND NOT TARGET IE::ngraph_reference) - add_library(IE::ngraph_reference INTERFACE IMPORTED) - set_target_properties(IE::ngraph_reference PROPERTIES - INTERFACE_LINK_LIBRARIES IE::reference) -endif() - -if(ENABLE_SYSTEM_PUGIXML) - set(_ov_pugixml_pkgconfig_interface "@pugixml_FOUND@") - set(_ov_pugixml_cmake_interface "@PugiXML_FOUND@") - if(_ov_pugixml_pkgconfig_interface) - find_dependency(PkgConfig) - elseif(_ov_pugixml_cmake_interface) - find_dependency(PugiXML) - endif() - if(PugiXML_FOUND) - set_property(TARGET pugixml PROPERTY IMPORTED_GLOBAL TRUE) - add_library(IE::pugixml ALIAS pugixml) - elseif(PkgConfig_FOUND) - if(${CMAKE_FIND_PACKAGE_NAME}_FIND_QUIETLY) - set(pkg_config_quiet_arg QUIET) - endif() - if(${CMAKE_FIND_PACKAGE_NAME}_FIND_REQUIRED) - set(pkg_config_required_arg REQUIRED) - endif() - - pkg_search_module(pugixml - ${pkg_config_quiet_arg} - ${pkg_config_required_arg} - IMPORTED_TARGET GLOBAL - pugixml) - - unset(pkg_config_quiet_arg) - unset(pkg_config_required_arg) - - if(pugixml_FOUND) - add_library(IE::pugixml ALIAS PkgConfig::pugixml) - - # PATCH: on Ubuntu 18.04 pugixml.pc contains incorrect include directories - get_target_property(interface_include_dir PkgConfig::pugixml INTERFACE_INCLUDE_DIRECTORIES) - if(interface_include_dir AND NOT EXISTS "${interface_include_dir}") - set_target_properties(PkgConfig::pugixml PROPERTIES - INTERFACE_INCLUDE_DIRECTORIES "") - endif() - endif() - endif() - - # debian 9 case: no cmake, no pkg-config files - if(NOT TARGET IE::pugixml) - find_library(PUGIXML_LIBRARY NAMES pugixml DOC "Path to pugixml library") - if(PUGIXML_LIBRARY) - add_library(IE::pugixml INTERFACE IMPORTED GLOBAL) - set_target_properties(IE::pugixml PROPERTIES INTERFACE_LINK_LIBRARIES "${PUGIXML_LIBRARY}") - else() - message(FATAL_ERROR "Failed to find system pugixml in OpenVINO Developer Package") - endif() - endif() -endif() - -set(_ov_nlohmann_json_FOUND "@nlohmann_json_FOUND@") -if(_ov_nlohmann_json_FOUND) - find_dependency(nlohmann_json) - set_target_properties(nlohmann_json::nlohmann_json PROPERTIES IMPORTED_GLOBAL ON) - add_library(IE::nlohmann_json ALIAS nlohmann_json::nlohmann_json) -endif() -unset(_ov_nlohmann_json_FOUND) - -# inherit OpenCV from main IE project if enabled -if("@OpenCV_FOUND@") - # Use OpenCV_DIR from cache only if user doesn't define OpenCV_DIR - if(NOT OpenCV_DIR) - load_cache("${cache_path}" READ_WITH_PREFIX "" OpenCV_DIR) - endif() - find_dependency(OpenCV) -endif() - -# -# Extra Compile Flags -# - -# don't fail on strict compilation options in 3rd party modules -ov_dev_package_no_errors() - -# Don't threat deprecated API warnings as errors in 3rd party apps -ov_deprecated_no_errors() diff --git a/conan.lock b/conan.lock index 106847c9819051..d5e4bd423148e6 100644 --- a/conan.lock +++ b/conan.lock @@ -10,7 +10,7 @@ "opencl-icd-loader/2023.04.17#5f73dd9f0c023d416a7f162e320b9c77%1692732261.088", "opencl-headers/2023.04.17#3d98f2d12a67c2400de6f11d5335b5a6%1683936272.16", "opencl-clhpp-headers/2023.04.17#7c62fcc7ac2559d4839150d2ebaac5c8%1685450803.672", - "onnx/1.14.1#d95f4e64bedf3dc6898253847ac69005%1693130309.828", + "onnx/1.15.0#54b6d944e6995300bc7bcdd3a3206d74%1698840505.336", "onetbb/2021.10.0#cbb2fc43088070b48f6e4339bc8fa0e1%1693812561.235", "ittapi/3.24.0#9246125f13e7686dee2b0c992b71db94%1682969872.743", "hwloc/2.9.2#1c63e2eccac57048ae226e6c946ebf0e%1688677682.002", diff --git a/conanfile.txt b/conanfile.txt index 49b4dc1f5d3f84..cb23849a5daac5 100644 --- a/conanfile.txt +++ b/conanfile.txt @@ -8,7 +8,7 @@ opencl-icd-loader/[>=2023.04.17] rapidjson/[>=1.1.0] xbyak/[>=6.62] snappy/[>=1.1.7] -onnx/1.14.1 +onnx/1.15.0 pybind11/[>=2.10.1] flatbuffers/[>=22.9.24] diff --git a/docs/CMakeLists.txt b/docs/CMakeLists.txt index bff1c2f33bff98..c972193f7c06a7 100644 --- a/docs/CMakeLists.txt +++ b/docs/CMakeLists.txt @@ -65,11 +65,11 @@ function(build_docs) if(${ENABLE_NOTEBOOKS}) set(NBDOC_SCRIPT "${DOCS_SOURCE_DIR}/nbdoc/nbdoc.py") - list(APPEND commands COMMAND ${CMAKE_COMMAND} -E cmake_echo_color --green "STARTED preprocessing OpenVINO notebooks") - list(APPEND commands + list(PREPEND commands COMMAND ${CMAKE_COMMAND} -E cmake_echo_color --green "STARTED preprocessing OpenVINO notebooks") + list(PREPEND commands COMMAND ${Python3_EXECUTABLE} "${NBDOC_SCRIPT}" "${DOCS_SOURCE_DIR}/notebooks" "${SPHINX_SOURCE_DIR}/notebooks" ) - list(APPEND commands COMMAND ${CMAKE_COMMAND} -E cmake_echo_color --green "FINISHED preprocessing OpenVINO notebooks") + list(PREPEND commands COMMAND ${CMAKE_COMMAND} -E cmake_echo_color --green "FINISHED preprocessing OpenVINO notebooks") endif() if(${ENABLE_OVMS}) diff --git a/docs/Extensibility_UG/img/graph_rewrite_efficient_search.png b/docs/Extensibility_UG/img/graph_rewrite_efficient_search.png deleted file mode 100644 index 1376398de71b7c..00000000000000 --- a/docs/Extensibility_UG/img/graph_rewrite_efficient_search.png +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:05eb8600d2c905975674f3a0a5dc676107d22f65f2a1f78ee1cfabc1771721ea -size 41307 diff --git a/docs/Extensibility_UG/img/graph_rewrite_execution.png b/docs/Extensibility_UG/img/graph_rewrite_execution.png deleted file mode 100644 index 17dc2d9652c097..00000000000000 --- a/docs/Extensibility_UG/img/graph_rewrite_execution.png +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:17cd470c6d04d7aabbdb4a08e31f9c97eab960cf7ef5bbd3a541df92db38f26b -size 40458 diff --git a/docs/Extensibility_UG/img/ngraph_insert_node.png b/docs/Extensibility_UG/img/ngraph_insert_node.png deleted file mode 100644 index 5569ae87907563..00000000000000 --- a/docs/Extensibility_UG/img/ngraph_insert_node.png +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:0838f4046b7f135a2dcd251a0bac9ae445801cf2e23535ec085bb2da2818b352 -size 23310 diff --git a/docs/Extensibility_UG/img/ngraph_replace_node.png b/docs/Extensibility_UG/img/ngraph_replace_node.png deleted file mode 100644 index 29614ca4d2eac5..00000000000000 --- a/docs/Extensibility_UG/img/ngraph_replace_node.png +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:4793780a48ad63936a046c7c1b87b16d3867676191e10794475630e70169cfa2 -size 44911 diff --git a/docs/Extensibility_UG/img/transformations_structure.png b/docs/Extensibility_UG/img/transformations_structure.png deleted file mode 100644 index 953d66778a55c5..00000000000000 --- a/docs/Extensibility_UG/img/transformations_structure.png +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:0b206c602626f17ba5787810b9a28f9cde511448c3e63a5c7ba976cee7868bdb -size 14907 diff --git a/docs/IE_PLUGIN_DG/Doxyfile b/docs/IE_PLUGIN_DG/Doxyfile deleted file mode 100644 index d71ef57c659147..00000000000000 --- a/docs/IE_PLUGIN_DG/Doxyfile +++ /dev/null @@ -1,2450 +0,0 @@ -# Doxyfile 1.8.12 - -# This file describes the settings to be used by the documentation system -# doxygen (www.doxygen.org) for a project. -# -# All text after a double hash (##) is considered a comment and is placed in -# front of the TAG it is preceding. -# -# All text after a single hash (#) is considered a comment and will be ignored. -# The format is: -# TAG = value [value, ...] -# For lists, items can also be appended using: -# TAG += value [value, ...] -# Values that contain spaces should be placed between quotes (\" \"). - -#--------------------------------------------------------------------------- -# Project related configuration options -#--------------------------------------------------------------------------- - -# This tag specifies the encoding used for all characters in the config file -# that follow. The default is UTF-8 which is also the encoding used for all text -# before the first occurrence of this tag. Doxygen uses libiconv (or the iconv -# built into libc) for the transcoding. See http://www.gnu.org/software/libiconv -# for the list of possible encodings. -# The default value is: UTF-8. - -DOXYFILE_ENCODING = UTF-8 - -# The PROJECT_NAME tag is a single word (or a sequence of words surrounded by -# double-quotes, unless you are using Doxywizard) that should identify the -# project for which the documentation is generated. This name is used in the -# title of most generated pages and in a few other places. -# The default value is: My Project. - -PROJECT_NAME = "OpenVINO™ Toolkit" - -# The PROJECT_NUMBER tag can be used to enter a project or revision number. This -# could be handy for archiving the generated documentation or if some version -# control system is used. - -PROJECT_NUMBER = - -# Using the PROJECT_BRIEF tag one can provide an optional one line description -# for a project that appears at the top of each page and should give viewer a -# quick idea about the purpose of the project. Keep the description short. - -PROJECT_BRIEF = - -# With the PROJECT_LOGO tag one can specify a logo or an icon that is included -# in the documentation. The maximum height of the logo should not exceed 55 -# pixels and the maximum width should not exceed 200 pixels. Doxygen will copy -# the logo to the output directory. - -PROJECT_LOGO = - -# The OUTPUT_DIRECTORY tag is used to specify the (relative or absolute) path -# into which the generated documentation will be written. If a relative path is -# entered, it will be relative to the location where doxygen was started. If -# left blank the current directory will be used. - -OUTPUT_DIRECTORY = - -# If the CREATE_SUBDIRS tag is set to YES then doxygen will create 4096 sub- -# directories (in 2 levels) under the output directory of each output format and -# will distribute the generated files over these directories. Enabling this -# option can be useful when feeding doxygen a huge amount of source files, where -# putting all generated files in the same directory would otherwise causes -# performance problems for the file system. -# The default value is: NO. - -CREATE_SUBDIRS = NO - -# If the ALLOW_UNICODE_NAMES tag is set to YES, doxygen will allow non-ASCII -# characters to appear in the names of generated files. If set to NO, non-ASCII -# characters will be escaped, for example _xE3_x81_x84 will be used for Unicode -# U+3044. -# The default value is: NO. - -ALLOW_UNICODE_NAMES = NO - -# The OUTPUT_LANGUAGE tag is used to specify the language in which all -# documentation generated by doxygen is written. Doxygen will use this -# information to generate all constant output in the proper language. -# Possible values are: Afrikaans, Arabic, Armenian, Brazilian, Catalan, Chinese, -# Chinese-Traditional, Croatian, Czech, Danish, Dutch, English (United States), -# Esperanto, Farsi (Persian), Finnish, French, German, Greek, Hungarian, -# Indonesian, Italian, Japanese, Japanese-en (Japanese with English messages), -# Korean, Korean-en (Korean with English messages), Latvian, Lithuanian, -# Macedonian, Norwegian, Persian (Farsi), Polish, Portuguese, Romanian, Russian, -# Serbian, Serbian-Cyrillic, Slovak, Slovene, Spanish, Swedish, Turkish, -# Ukrainian and Vietnamese. -# The default value is: English. - -OUTPUT_LANGUAGE = English - -# If the BRIEF_MEMBER_DESC tag is set to YES, doxygen will include brief member -# descriptions after the members that are listed in the file and class -# documentation (similar to Javadoc). Set to NO to disable this. -# The default value is: YES. - -BRIEF_MEMBER_DESC = YES - -# If the REPEAT_BRIEF tag is set to YES, doxygen will prepend the brief -# description of a member or function before the detailed description -# -# Note: If both HIDE_UNDOC_MEMBERS and BRIEF_MEMBER_DESC are set to NO, the -# brief descriptions will be completely suppressed. -# The default value is: YES. - -REPEAT_BRIEF = YES - -# This tag implements a quasi-intelligent brief description abbreviator that is -# used to form the text in various listings. Each string in this list, if found -# as the leading text of the brief description, will be stripped from the text -# and the result, after processing the whole list, is used as the annotated -# text. Otherwise, the brief description is used as-is. If left blank, the -# following values are used ($name is automatically replaced with the name of -# the entity):The $name class, The $name widget, The $name file, is, provides, -# specifies, contains, represents, a, an and the. - -ABBREVIATE_BRIEF = - -# If the ALWAYS_DETAILED_SEC and REPEAT_BRIEF tags are both set to YES then -# doxygen will generate a detailed section even if there is only a brief -# description. -# The default value is: NO. - -ALWAYS_DETAILED_SEC = NO - -# If the INLINE_INHERITED_MEMB tag is set to YES, doxygen will show all -# inherited members of a class in the documentation of that class as if those -# members were ordinary class members. Constructors, destructors and assignment -# operators of the base classes will not be shown. -# The default value is: NO. - -INLINE_INHERITED_MEMB = NO - -# If the FULL_PATH_NAMES tag is set to YES, doxygen will prepend the full path -# before files name in the file list and in the header files. If set to NO the -# shortest path that makes the file name unique will be used -# The default value is: YES. - -FULL_PATH_NAMES = YES - -# The STRIP_FROM_PATH tag can be used to strip a user-defined part of the path. -# Stripping is only done if one of the specified strings matches the left-hand -# part of the path. The tag can be used to show relative paths in the file list. -# If left blank the directory from which doxygen is run is used as the path to -# strip. -# -# Note that you can specify absolute paths here, but also relative paths, which -# will be relative from the directory where doxygen is started. -# This tag requires that the tag FULL_PATH_NAMES is set to YES. - -STRIP_FROM_PATH = - -# The STRIP_FROM_INC_PATH tag can be used to strip a user-defined part of the -# path mentioned in the documentation of a class, which tells the reader which -# header file to include in order to use a class. If left blank only the name of -# the header file containing the class definition is used. Otherwise one should -# specify the list of include paths that are normally passed to the compiler -# using the -I flag. - -STRIP_FROM_INC_PATH = - -# If the SHORT_NAMES tag is set to YES, doxygen will generate much shorter (but -# less readable) file names. This can be useful is your file systems doesn't -# support long names like on DOS, Mac, or CD-ROM. -# The default value is: NO. - -SHORT_NAMES = NO - -# If the JAVADOC_AUTOBRIEF tag is set to YES then doxygen will interpret the -# first line (until the first dot) of a Javadoc-style comment as the brief -# description. If set to NO, the Javadoc-style will behave just like regular Qt- -# style comments (thus requiring an explicit @brief command for a brief -# description.) -# The default value is: NO. - -JAVADOC_AUTOBRIEF = NO - -# If the QT_AUTOBRIEF tag is set to YES then doxygen will interpret the first -# line (until the first dot) of a Qt-style comment as the brief description. If -# set to NO, the Qt-style will behave just like regular Qt-style comments (thus -# requiring an explicit \brief command for a brief description.) -# The default value is: NO. - -QT_AUTOBRIEF = NO - -# The MULTILINE_CPP_IS_BRIEF tag can be set to YES to make doxygen treat a -# multi-line C++ special comment block (i.e. a block of //! or /// comments) as -# a brief description. This used to be the default behavior. The new default is -# to treat a multi-line C++ comment block as a detailed description. Set this -# tag to YES if you prefer the old behavior instead. -# -# Note that setting this tag to YES also means that rational rose comments are -# not recognized any more. -# The default value is: NO. - -MULTILINE_CPP_IS_BRIEF = NO - -# If the INHERIT_DOCS tag is set to YES then an undocumented member inherits the -# documentation from any documented member that it re-implements. -# The default value is: YES. - -INHERIT_DOCS = YES - -# If the SEPARATE_MEMBER_PAGES tag is set to YES then doxygen will produce a new -# page for each member. If set to NO, the documentation of a member will be part -# of the file/class/namespace that contains it. -# The default value is: NO. - -SEPARATE_MEMBER_PAGES = NO - -# The TAB_SIZE tag can be used to set the number of spaces in a tab. Doxygen -# uses this value to replace tabs by spaces in code fragments. -# Minimum value: 1, maximum value: 16, default value: 4. - -TAB_SIZE = 4 - -# This tag can be used to specify a number of aliases that act as commands in -# the documentation. An alias has the form: -# name=value -# For example adding -# "sideeffect=@par Side Effects:\n" -# will allow you to put the command \sideeffect (or @sideeffect) in the -# documentation, which will result in a user-defined paragraph with heading -# "Side Effects:". You can put \n's in the value part of an alias to insert -# newlines. - -ALIASES = - -# This tag can be used to specify a number of word-keyword mappings (TCL only). -# A mapping has the form "name=value". For example adding "class=itcl::class" -# will allow you to use the command class in the itcl::class meaning. - -TCL_SUBST = - -# Set the OPTIMIZE_OUTPUT_FOR_C tag to YES if your project consists of C sources -# only. Doxygen will then generate output that is more tailored for C. For -# instance, some of the names that are used will be different. The list of all -# members will be omitted, etc. -# The default value is: NO. - -OPTIMIZE_OUTPUT_FOR_C = YES - -# Set the OPTIMIZE_OUTPUT_JAVA tag to YES if your project consists of Java or -# Python sources only. Doxygen will then generate output that is more tailored -# for that language. For instance, namespaces will be presented as packages, -# qualified scopes will look different, etc. -# The default value is: NO. - -OPTIMIZE_OUTPUT_JAVA = NO - -# Set the OPTIMIZE_FOR_FORTRAN tag to YES if your project consists of Fortran -# sources. Doxygen will then generate output that is tailored for Fortran. -# The default value is: NO. - -OPTIMIZE_FOR_FORTRAN = NO - -# Set the OPTIMIZE_OUTPUT_VHDL tag to YES if your project consists of VHDL -# sources. Doxygen will then generate output that is tailored for VHDL. -# The default value is: NO. - -OPTIMIZE_OUTPUT_VHDL = NO - -# Doxygen selects the parser to use depending on the extension of the files it -# parses. With this tag you can assign which parser to use for a given -# extension. Doxygen has a built-in mapping, but you can override or extend it -# using this tag. The format is ext=language, where ext is a file extension, and -# language is one of the parsers supported by doxygen: IDL, Java, Javascript, -# C#, C, C++, D, PHP, Objective-C, Python, Fortran (fixed format Fortran: -# FortranFixed, free formatted Fortran: FortranFree, unknown formatted Fortran: -# Fortran. In the later case the parser tries to guess whether the code is fixed -# or free formatted code, this is the default for Fortran type files), VHDL. For -# instance to make doxygen treat .inc files as Fortran files (default is PHP), -# and .f files as C (default is Fortran), use: inc=Fortran f=C. -# -# Note: For files without extension you can use no_extension as a placeholder. -# -# Note that for custom extensions you also need to set FILE_PATTERNS otherwise -# the files are not read by doxygen. - -EXTENSION_MAPPING = - -# If the MARKDOWN_SUPPORT tag is enabled then doxygen pre-processes all comments -# according to the Markdown format, which allows for more readable -# documentation. See http://daringfireball.net/projects/markdown/ for details. -# The output of markdown processing is further processed by doxygen, so you can -# mix doxygen, HTML, and XML commands with Markdown formatting. Disable only in -# case of backward compatibilities issues. -# The default value is: YES. - -MARKDOWN_SUPPORT = YES - -# When the TOC_INCLUDE_HEADINGS tag is set to a non-zero value, all headings up -# to that level are automatically included in the table of contents, even if -# they do not have an id attribute. -# Note: This feature currently applies only to Markdown headings. -# Minimum value: 0, maximum value: 99, default value: 0. -# This tag requires that the tag MARKDOWN_SUPPORT is set to YES. - -TOC_INCLUDE_HEADINGS = 0 - -# When enabled doxygen tries to link words that correspond to documented -# classes, or namespaces to their corresponding documentation. Such a link can -# be prevented in individual cases by putting a % sign in front of the word or -# globally by setting AUTOLINK_SUPPORT to NO. -# The default value is: YES. - -AUTOLINK_SUPPORT = YES - -# If you use STL classes (i.e. std::string, std::vector, etc.) but do not want -# to include (a tag file for) the STL sources as input, then you should set this -# tag to YES in order to let doxygen match functions declarations and -# definitions whose arguments contain STL classes (e.g. func(std::string); -# versus func(std::string) {}). This also make the inheritance and collaboration -# diagrams that involve STL classes more complete and accurate. -# The default value is: NO. - -BUILTIN_STL_SUPPORT = NO - -# If you use Microsoft's C++/CLI language, you should set this option to YES to -# enable parsing support. -# The default value is: NO. - -CPP_CLI_SUPPORT = NO - -# Set the SIP_SUPPORT tag to YES if your project consists of sip (see: -# http://www.riverbankcomputing.co.uk/software/sip/intro) sources only. Doxygen -# will parse them like normal C++ but will assume all classes use public instead -# of private inheritance when no explicit protection keyword is present. -# The default value is: NO. - -SIP_SUPPORT = NO - -# For Microsoft's IDL there are propget and propput attributes to indicate -# getter and setter methods for a property. Setting this option to YES will make -# doxygen to replace the get and set methods by a property in the documentation. -# This will only work if the methods are indeed getting or setting a simple -# type. If this is not the case, or you want to show the methods anyway, you -# should set this option to NO. -# The default value is: YES. - -IDL_PROPERTY_SUPPORT = YES - -# If member grouping is used in the documentation and the DISTRIBUTE_GROUP_DOC -# tag is set to YES then doxygen will reuse the documentation of the first -# member in the group (if any) for the other members of the group. By default -# all members of a group must be documented explicitly. -# The default value is: NO. - -DISTRIBUTE_GROUP_DOC = NO - -# If one adds a struct or class to a group and this option is enabled, then also -# any nested class or struct is added to the same group. By default this option -# is disabled and one has to add nested compounds explicitly via \ingroup. -# The default value is: NO. - -GROUP_NESTED_COMPOUNDS = NO - -# Set the SUBGROUPING tag to YES to allow class member groups of the same type -# (for instance a group of public functions) to be put as a subgroup of that -# type (e.g. under the Public Functions section). Set it to NO to prevent -# subgrouping. Alternatively, this can be done per class using the -# \nosubgrouping command. -# The default value is: YES. - -SUBGROUPING = YES - -# When the INLINE_GROUPED_CLASSES tag is set to YES, classes, structs and unions -# are shown inside the group in which they are included (e.g. using \ingroup) -# instead of on a separate page (for HTML and Man pages) or section (for LaTeX -# and RTF). -# -# Note that this feature does not work in combination with -# SEPARATE_MEMBER_PAGES. -# The default value is: NO. - -INLINE_GROUPED_CLASSES = NO - -# When the INLINE_SIMPLE_STRUCTS tag is set to YES, structs, classes, and unions -# with only public data fields or simple typedef fields will be shown inline in -# the documentation of the scope in which they are defined (i.e. file, -# namespace, or group documentation), provided this scope is documented. If set -# to NO, structs, classes, and unions are shown on a separate page (for HTML and -# Man pages) or section (for LaTeX and RTF). -# The default value is: NO. - -INLINE_SIMPLE_STRUCTS = NO - -# When TYPEDEF_HIDES_STRUCT tag is enabled, a typedef of a struct, union, or -# enum is documented as struct, union, or enum with the name of the typedef. So -# typedef struct TypeS {} TypeT, will appear in the documentation as a struct -# with name TypeT. When disabled the typedef will appear as a member of a file, -# namespace, or class. And the struct will be named TypeS. This can typically be -# useful for C code in case the coding convention dictates that all compound -# types are typedef'ed and only the typedef is referenced, never the tag name. -# The default value is: NO. - -TYPEDEF_HIDES_STRUCT = NO - -# The size of the symbol lookup cache can be set using LOOKUP_CACHE_SIZE. This -# cache is used to resolve symbols given their name and scope. Since this can be -# an expensive process and often the same symbol appears multiple times in the -# code, doxygen keeps a cache of pre-resolved symbols. If the cache is too small -# doxygen will become slower. If the cache is too large, memory is wasted. The -# cache size is given by this formula: 2^(16+LOOKUP_CACHE_SIZE). The valid range -# is 0..9, the default is 0, corresponding to a cache size of 2^16=65536 -# symbols. At the end of a run doxygen will report the cache usage and suggest -# the optimal cache size from a speed point of view. -# Minimum value: 0, maximum value: 9, default value: 0. - -LOOKUP_CACHE_SIZE = 0 - -#--------------------------------------------------------------------------- -# Build related configuration options -#--------------------------------------------------------------------------- - -# If the EXTRACT_ALL tag is set to YES, doxygen will assume all entities in -# documentation are documented, even if no documentation was available. Private -# class members and static file members will be hidden unless the -# EXTRACT_PRIVATE respectively EXTRACT_STATIC tags are set to YES. -# Note: This will also disable the warnings about undocumented members that are -# normally produced when WARNINGS is set to YES. -# The default value is: NO. - -EXTRACT_ALL = NO - -# If the EXTRACT_PRIVATE tag is set to YES, all private members of a class will -# be included in the documentation. -# The default value is: NO. - -EXTRACT_PRIVATE = NO - -# If the EXTRACT_PACKAGE tag is set to YES, all members with package or internal -# scope will be included in the documentation. -# The default value is: NO. - -EXTRACT_PACKAGE = NO - -# If the EXTRACT_STATIC tag is set to YES, all static members of a file will be -# included in the documentation. -# The default value is: NO. - -EXTRACT_STATIC = YES - -# If the EXTRACT_LOCAL_CLASSES tag is set to YES, classes (and structs) defined -# locally in source files will be included in the documentation. If set to NO, -# only classes defined in header files are included. Does not have any effect -# for Java sources. -# The default value is: YES. - -EXTRACT_LOCAL_CLASSES = NO - -# This flag is only useful for Objective-C code. If set to YES, local methods, -# which are defined in the implementation section but not in the interface are -# included in the documentation. If set to NO, only methods in the interface are -# included. -# The default value is: NO. - -EXTRACT_LOCAL_METHODS = NO - -# If this flag is set to YES, the members of anonymous namespaces will be -# extracted and appear in the documentation as a namespace called -# 'anonymous_namespace{file}', where file will be replaced with the base name of -# the file that contains the anonymous namespace. By default anonymous namespace -# are hidden. -# The default value is: NO. - -EXTRACT_ANON_NSPACES = NO - -# If the HIDE_UNDOC_MEMBERS tag is set to YES, doxygen will hide all -# undocumented members inside documented classes or files. If set to NO these -# members will be included in the various overviews, but no documentation -# section is generated. This option has no effect if EXTRACT_ALL is enabled. -# The default value is: NO. - -HIDE_UNDOC_MEMBERS = NO - -# If the HIDE_UNDOC_CLASSES tag is set to YES, doxygen will hide all -# undocumented classes that are normally visible in the class hierarchy. If set -# to NO, these classes will be included in the various overviews. This option -# has no effect if EXTRACT_ALL is enabled. -# The default value is: NO. - -HIDE_UNDOC_CLASSES = NO - -# If the HIDE_FRIEND_COMPOUNDS tag is set to YES, doxygen will hide all friend -# (class|struct|union) declarations. If set to NO, these declarations will be -# included in the documentation. -# The default value is: NO. - -HIDE_FRIEND_COMPOUNDS = NO - -# If the HIDE_IN_BODY_DOCS tag is set to YES, doxygen will hide any -# documentation blocks found inside the body of a function. If set to NO, these -# blocks will be appended to the function's detailed documentation block. -# The default value is: NO. - -HIDE_IN_BODY_DOCS = NO - -# The INTERNAL_DOCS tag determines if documentation that is typed after a -# \internal command is included. If the tag is set to NO then the documentation -# will be excluded. Set it to YES to include the internal documentation. -# The default value is: NO. - -INTERNAL_DOCS = NO - -# If the CASE_SENSE_NAMES tag is set to NO then doxygen will only generate file -# names in lower-case letters. If set to YES, upper-case letters are also -# allowed. This is useful if you have classes or files whose names only differ -# in case and if your file system supports case sensitive file names. Windows -# and Mac users are advised to set this option to NO. -# The default value is: system dependent. - -CASE_SENSE_NAMES = YES - -# If the HIDE_SCOPE_NAMES tag is set to NO then doxygen will show members with -# their full class and namespace scopes in the documentation. If set to YES, the -# scope will be hidden. -# The default value is: NO. - -HIDE_SCOPE_NAMES = NO - -# If the HIDE_COMPOUND_REFERENCE tag is set to NO (default) then doxygen will -# append additional text to a page's title, such as Class Reference. If set to -# YES the compound reference will be hidden. -# The default value is: NO. - -HIDE_COMPOUND_REFERENCE= NO - -# If the SHOW_INCLUDE_FILES tag is set to YES then doxygen will put a list of -# the files that are included by a file in the documentation of that file. -# The default value is: YES. - -SHOW_INCLUDE_FILES = YES - -# If the SHOW_GROUPED_MEMB_INC tag is set to YES then Doxygen will add for each -# grouped member an include statement to the documentation, telling the reader -# which file to include in order to use the member. -# The default value is: NO. - -SHOW_GROUPED_MEMB_INC = NO - -# If the FORCE_LOCAL_INCLUDES tag is set to YES then doxygen will list include -# files with double quotes in the documentation rather than with sharp brackets. -# The default value is: NO. - -FORCE_LOCAL_INCLUDES = NO - -# If the INLINE_INFO tag is set to YES then a tag [inline] is inserted in the -# documentation for inline members. -# The default value is: YES. - -INLINE_INFO = YES - -# If the SORT_MEMBER_DOCS tag is set to YES then doxygen will sort the -# (detailed) documentation of file and class members alphabetically by member -# name. If set to NO, the members will appear in declaration order. -# The default value is: YES. - -SORT_MEMBER_DOCS = YES - -# If the SORT_BRIEF_DOCS tag is set to YES then doxygen will sort the brief -# descriptions of file, namespace and class members alphabetically by member -# name. If set to NO, the members will appear in declaration order. Note that -# this will also influence the order of the classes in the class list. -# The default value is: NO. - -SORT_BRIEF_DOCS = NO - -# If the SORT_MEMBERS_CTORS_1ST tag is set to YES then doxygen will sort the -# (brief and detailed) documentation of class members so that constructors and -# destructors are listed first. If set to NO the constructors will appear in the -# respective orders defined by SORT_BRIEF_DOCS and SORT_MEMBER_DOCS. -# Note: If SORT_BRIEF_DOCS is set to NO this option is ignored for sorting brief -# member documentation. -# Note: If SORT_MEMBER_DOCS is set to NO this option is ignored for sorting -# detailed member documentation. -# The default value is: NO. - -SORT_MEMBERS_CTORS_1ST = NO - -# If the SORT_GROUP_NAMES tag is set to YES then doxygen will sort the hierarchy -# of group names into alphabetical order. If set to NO the group names will -# appear in their defined order. -# The default value is: NO. - -SORT_GROUP_NAMES = NO - -# If the SORT_BY_SCOPE_NAME tag is set to YES, the class list will be sorted by -# fully-qualified names, including namespaces. If set to NO, the class list will -# be sorted only by class name, not including the namespace part. -# Note: This option is not very useful if HIDE_SCOPE_NAMES is set to YES. -# Note: This option applies only to the class list, not to the alphabetical -# list. -# The default value is: NO. - -SORT_BY_SCOPE_NAME = NO - -# If the STRICT_PROTO_MATCHING option is enabled and doxygen fails to do proper -# type resolution of all parameters of a function it will reject a match between -# the prototype and the implementation of a member function even if there is -# only one candidate or it is obvious which candidate to choose by doing a -# simple string match. By disabling STRICT_PROTO_MATCHING doxygen will still -# accept a match between prototype and implementation in such cases. -# The default value is: NO. - -STRICT_PROTO_MATCHING = NO - -# The GENERATE_TODOLIST tag can be used to enable (YES) or disable (NO) the todo -# list. This list is created by putting \todo commands in the documentation. -# The default value is: YES. - -GENERATE_TODOLIST = YES - -# The GENERATE_TESTLIST tag can be used to enable (YES) or disable (NO) the test -# list. This list is created by putting \test commands in the documentation. -# The default value is: YES. - -GENERATE_TESTLIST = YES - -# The GENERATE_BUGLIST tag can be used to enable (YES) or disable (NO) the bug -# list. This list is created by putting \bug commands in the documentation. -# The default value is: YES. - -GENERATE_BUGLIST = YES - -# The GENERATE_DEPRECATEDLIST tag can be used to enable (YES) or disable (NO) -# the deprecated list. This list is created by putting \deprecated commands in -# the documentation. -# The default value is: YES. - -GENERATE_DEPRECATEDLIST= YES - -# The ENABLED_SECTIONS tag can be used to enable conditional documentation -# sections, marked by \if ... \endif and \cond -# ... \endcond blocks. - -ENABLED_SECTIONS = - -# The MAX_INITIALIZER_LINES tag determines the maximum number of lines that the -# initial value of a variable or macro / define can have for it to appear in the -# documentation. If the initializer consists of more lines than specified here -# it will be hidden. Use a value of 0 to hide initializers completely. The -# appearance of the value of individual variables and macros / defines can be -# controlled using \showinitializer or \hideinitializer command in the -# documentation regardless of this setting. -# Minimum value: 0, maximum value: 10000, default value: 30. - -MAX_INITIALIZER_LINES = 30 - -# Set the SHOW_USED_FILES tag to NO to disable the list of files generated at -# the bottom of the documentation of classes and structs. If set to YES, the -# list will mention the files that were used to generate the documentation. -# The default value is: YES. - -SHOW_USED_FILES = YES - -# Set the SHOW_FILES tag to NO to disable the generation of the Files page. This -# will remove the Files entry from the Quick Index and from the Folder Tree View -# (if specified). -# The default value is: YES. - -SHOW_FILES = YES - -# Set the SHOW_NAMESPACES tag to NO to disable the generation of the Namespaces -# page. This will remove the Namespaces entry from the Quick Index and from the -# Folder Tree View (if specified). -# The default value is: YES. - -SHOW_NAMESPACES = YES - -# The FILE_VERSION_FILTER tag can be used to specify a program or script that -# doxygen should invoke to get the current version for each file (typically from -# the version control system). Doxygen will invoke the program by executing (via -# popen()) the command input-file, where command is the value of the -# FILE_VERSION_FILTER tag, and input-file is the name of an input file provided -# by doxygen. Whatever the program writes to standard output is used as the file -# version. For an example see the documentation. - -FILE_VERSION_FILTER = - -# The LAYOUT_FILE tag can be used to specify a layout file which will be parsed -# by doxygen. The layout file controls the global structure of the generated -# output files in an output format independent way. To create the layout file -# that represents doxygen's defaults, run doxygen with the -l option. You can -# optionally specify a file name after the option, if omitted DoxygenLayout.xml -# will be used as the name of the layout file. -# -# Note that if you run doxygen from a directory containing a file called -# DoxygenLayout.xml, doxygen will parse it automatically even if the LAYOUT_FILE -# tag is left empty. - -LAYOUT_FILE = layout.xml - -# The CITE_BIB_FILES tag can be used to specify one or more bib files containing -# the reference definitions. This must be a list of .bib files. The .bib -# extension is automatically appended if omitted. This requires the bibtex tool -# to be installed. See also http://en.wikipedia.org/wiki/BibTeX for more info. -# For LaTeX the style of the bibliography can be controlled using -# LATEX_BIB_STYLE. To use this feature you need bibtex and perl available in the -# search path. See also \cite for info how to create references. - -CITE_BIB_FILES = - -#--------------------------------------------------------------------------- -# Configuration options related to warning and progress messages -#--------------------------------------------------------------------------- - -# The QUIET tag can be used to turn on/off the messages that are generated to -# standard output by doxygen. If QUIET is set to YES this implies that the -# messages are off. -# The default value is: NO. - -QUIET = NO - -# The WARNINGS tag can be used to turn on/off the warning messages that are -# generated to standard error (stderr) by doxygen. If WARNINGS is set to YES -# this implies that the warnings are on. -# -# Tip: Turn warnings on while writing the documentation. -# The default value is: YES. - -WARNINGS = YES - -# If the WARN_IF_UNDOCUMENTED tag is set to YES then doxygen will generate -# warnings for undocumented members. If EXTRACT_ALL is set to YES then this flag -# will automatically be disabled. -# The default value is: YES. - -WARN_IF_UNDOCUMENTED = YES - -# If the WARN_IF_DOC_ERROR tag is set to YES, doxygen will generate warnings for -# potential errors in the documentation, such as not documenting some parameters -# in a documented function, or documenting parameters that don't exist or using -# markup commands wrongly. -# The default value is: YES. - -WARN_IF_DOC_ERROR = YES - -# This WARN_NO_PARAMDOC option can be enabled to get warnings for functions that -# are documented, but have no documentation for their parameters or return -# value. If set to NO, doxygen will only warn about wrong or incomplete -# parameter documentation, but not about the absence of documentation. -# The default value is: NO. - -WARN_NO_PARAMDOC = YES - -# If the WARN_AS_ERROR tag is set to YES then doxygen will immediately stop when -# a warning is encountered. -# The default value is: NO. - -WARN_AS_ERROR = NO - -# The WARN_FORMAT tag determines the format of the warning messages that doxygen -# can produce. The string should contain the $file, $line, and $text tags, which -# will be replaced by the file and line number from which the warning originated -# and the warning text. Optionally the format may contain $version, which will -# be replaced by the version of the file (if it could be obtained via -# FILE_VERSION_FILTER) -# The default value is: $file:$line: $text. - -WARN_FORMAT = "$file:$line: $text" - -# The WARN_LOGFILE tag can be used to specify a file to which warning and error -# messages should be written. If left blank the output is written to standard -# error (stderr). - -WARN_LOGFILE = - -#--------------------------------------------------------------------------- -# Configuration options related to the input files -#--------------------------------------------------------------------------- - -# The INPUT tag is used to specify the files and/or directories that contain -# documented source files. You may enter file names like myfile.cpp or -# directories like /usr/src/myproject. Separate the files or directories with -# spaces. See also FILE_PATTERNS and EXTENSION_MAPPING -# Note: If this tag is empty the current directory is searched. - -INPUT = . \ - ../../src/common/transformations/include/transformations \ - ../../src/inference/dev_api \ - ../../src/common/itt/include/openvino - -# This tag can be used to specify the character encoding of the source files -# that doxygen parses. Internally doxygen uses the UTF-8 encoding. Doxygen uses -# libiconv (or the iconv built into libc) for the transcoding. See the libiconv -# documentation (see: http://www.gnu.org/software/libiconv) for the list of -# possible encodings. -# The default value is: UTF-8. - -INPUT_ENCODING = UTF-8 - -# If the value of the INPUT tag contains directories, you can use the -# FILE_PATTERNS tag to specify one or more wildcard patterns (like *.cpp and -# *.h) to filter out the source-files in the directories. -# -# Note that for custom extensions or not directly supported extensions you also -# need to set EXTENSION_MAPPING for the extension otherwise the files are not -# read by doxygen. -# -# If left blank the following patterns are tested:*.c, *.cc, *.cxx, *.cpp, -# *.c++, *.java, *.ii, *.ixx, *.ipp, *.i++, *.inl, *.idl, *.ddl, *.odl, *.h, -# *.hh, *.hxx, *.hpp, *.h++, *.cs, *.d, *.php, *.php4, *.php5, *.phtml, *.inc, -# *.m, *.markdown, *.md, *.mm, *.dox, *.py, *.pyw, *.f90, *.f95, *.f03, *.f08, -# *.f, *.for, *.tcl, *.vhd, *.vhdl, *.ucf and *.qsf. - -FILE_PATTERNS = *.c \ - *.cpp \ - *.c++ \ - *.h \ - *.hpp \ - *.md - -# The RECURSIVE tag can be used to specify whether or not subdirectories should -# be searched for input files as well. -# The default value is: NO. - -RECURSIVE = YES - -# The EXCLUDE tag can be used to specify files and/or directories that should be -# excluded from the INPUT source files. This way you can easily exclude a -# subdirectory from a directory tree whose root is specified with the INPUT tag. -# -# Note that relative paths are relative to the directory from which doxygen is -# run. - -EXCLUDE = - -# The EXCLUDE_SYMLINKS tag can be used to select whether or not files or -# directories that are symbolic links (a Unix file system feature) are excluded -# from the input. -# The default value is: NO. - -EXCLUDE_SYMLINKS = NO - -# If the value of the INPUT tag contains directories, you can use the -# EXCLUDE_PATTERNS tag to specify one or more wildcard patterns to exclude -# certain files from those directories. -# -# Note that the wildcards are matched against the file with absolute path, so to -# exclude all test directories for example use the pattern */test/* - -EXCLUDE_PATTERNS = function_name.hpp \ - macro_overload.hpp - -# The EXCLUDE_SYMBOLS tag can be used to specify one or more symbol names -# (namespaces, classes, functions, etc.) that should be excluded from the -# output. The symbol name can be a fully qualified name, a word, or if the -# wildcard * is used, a substring. Examples: ANamespace, AClass, -# AClass::ANamespace, ANamespace::*Test -# -# Note that the wildcards are matched against the file with absolute path, so to -# exclude all test directories use the pattern */test/* - -EXCLUDE_SYMBOLS = - -# The EXAMPLE_PATH tag can be used to specify one or more files or directories -# that contain example code fragments that are included (see the \include -# command). - -EXAMPLE_PATH = ../../src/plugins/ \ - ../../src/plugins/template/tests/functional/CMakeLists.txt \ - ../../src/plugins/template/tests/functional/transformations \ - ../../src/plugins/template/tests/functional/shared_tests_instances/ \ - ../../src/tests/functional/plugin/shared/include \ - ../snippets - -# If the value of the EXAMPLE_PATH tag contains directories, you can use the -# EXAMPLE_PATTERNS tag to specify one or more wildcard pattern (like *.cpp and -# *.h) to filter out the source-files in the directories. If left blank all -# files are included. - -EXAMPLE_PATTERNS = *.cpp \ - *.hpp \ - *.txt - -# If the EXAMPLE_RECURSIVE tag is set to YES then subdirectories will be -# searched for input files to be used with the \include or \dontinclude commands -# irrespective of the value of the RECURSIVE tag. -# The default value is: NO. - -EXAMPLE_RECURSIVE = YES - -# The IMAGE_PATH tag can be used to specify one or more files or directories -# that contain images that are to be included in the documentation (see the -# \image command). - -IMAGE_PATH = ./images - -# The INPUT_FILTER tag can be used to specify a program that doxygen should -# invoke to filter for each input file. Doxygen will invoke the filter program -# by executing (via popen()) the command: -# -# -# -# where is the value of the INPUT_FILTER tag, and is the -# name of an input file. Doxygen will then use the output that the filter -# program writes to standard output. If FILTER_PATTERNS is specified, this tag -# will be ignored. -# -# Note that the filter must not add or remove lines; it is applied before the -# code is scanned, but not when the output code is generated. If lines are added -# or removed, the anchors will not be placed correctly. -# -# Note that for custom extensions or not directly supported extensions you also -# need to set EXTENSION_MAPPING for the extension otherwise the files are not -# properly processed by doxygen. - -INPUT_FILTER = - -# The FILTER_PATTERNS tag can be used to specify filters on a per file pattern -# basis. Doxygen will compare the file name with each pattern and apply the -# filter if there is a match. The filters are a list of the form: pattern=filter -# (like *.cpp=my_cpp_filter). See INPUT_FILTER for further information on how -# filters are used. If the FILTER_PATTERNS tag is empty or if none of the -# patterns match the file name, INPUT_FILTER is applied. -# -# Note that for custom extensions or not directly supported extensions you also -# need to set EXTENSION_MAPPING for the extension otherwise the files are not -# properly processed by doxygen. - -FILTER_PATTERNS = - -# If the FILTER_SOURCE_FILES tag is set to YES, the input filter (if set using -# INPUT_FILTER) will also be used to filter the input files that are used for -# producing the source files to browse (i.e. when SOURCE_BROWSER is set to YES). -# The default value is: NO. - -FILTER_SOURCE_FILES = NO - -# The FILTER_SOURCE_PATTERNS tag can be used to specify source filters per file -# pattern. A pattern will override the setting for FILTER_PATTERN (if any) and -# it is also possible to disable source filtering for a specific pattern using -# *.ext= (so without naming a filter). -# This tag requires that the tag FILTER_SOURCE_FILES is set to YES. - -FILTER_SOURCE_PATTERNS = - -# If the USE_MDFILE_AS_MAINPAGE tag refers to the name of a markdown file that -# is part of the input, its contents will be placed on the main page -# (index.html). This can be useful if you have a project on for instance GitHub -# and want to reuse the introduction page also for the doxygen output. - -USE_MDFILE_AS_MAINPAGE = - -#--------------------------------------------------------------------------- -# Configuration options related to source browsing -#--------------------------------------------------------------------------- - -# If the SOURCE_BROWSER tag is set to YES then a list of source files will be -# generated. Documented entities will be cross-referenced with these sources. -# -# Note: To get rid of all source code in the generated output, make sure that -# also VERBATIM_HEADERS is set to NO. -# The default value is: NO. - -SOURCE_BROWSER = NO - -# Setting the INLINE_SOURCES tag to YES will include the body of functions, -# classes and enums directly into the documentation. -# The default value is: NO. - -INLINE_SOURCES = NO - -# Setting the STRIP_CODE_COMMENTS tag to YES will instruct doxygen to hide any -# special comment blocks from generated source code fragments. Normal C, C++ and -# Fortran comments will always remain visible. -# The default value is: YES. - -STRIP_CODE_COMMENTS = NO - -# If the REFERENCED_BY_RELATION tag is set to YES then for each documented -# function all documented functions referencing it will be listed. -# The default value is: NO. - -REFERENCED_BY_RELATION = NO - -# If the REFERENCES_RELATION tag is set to YES then for each documented function -# all documented entities called/used by that function will be listed. -# The default value is: NO. - -REFERENCES_RELATION = NO - -# If the REFERENCES_LINK_SOURCE tag is set to YES and SOURCE_BROWSER tag is set -# to YES then the hyperlinks from functions in REFERENCES_RELATION and -# REFERENCED_BY_RELATION lists will link to the source code. Otherwise they will -# link to the documentation. -# The default value is: YES. - -REFERENCES_LINK_SOURCE = YES - -# If SOURCE_TOOLTIPS is enabled (the default) then hovering a hyperlink in the -# source code will show a tooltip with additional information such as prototype, -# brief description and links to the definition and documentation. Since this -# will make the HTML file larger and loading of large files a bit slower, you -# can opt to disable this feature. -# The default value is: YES. -# This tag requires that the tag SOURCE_BROWSER is set to YES. - -SOURCE_TOOLTIPS = YES - -# If the USE_HTAGS tag is set to YES then the references to source code will -# point to the HTML generated by the htags(1) tool instead of doxygen built-in -# source browser. The htags tool is part of GNU's global source tagging system -# (see http://www.gnu.org/software/global/global.html). You will need version -# 4.8.6 or higher. -# -# To use it do the following: -# - Install the latest version of global -# - Enable SOURCE_BROWSER and USE_HTAGS in the config file -# - Make sure the INPUT points to the root of the source tree -# - Run doxygen as normal -# -# Doxygen will invoke htags (and that will in turn invoke gtags), so these -# tools must be available from the command line (i.e. in the search path). -# -# The result: instead of the source browser generated by doxygen, the links to -# source code will now point to the output of htags. -# The default value is: NO. -# This tag requires that the tag SOURCE_BROWSER is set to YES. - -USE_HTAGS = NO - -# If the VERBATIM_HEADERS tag is set the YES then doxygen will generate a -# verbatim copy of the header file for each class for which an include is -# specified. Set to NO to disable this. -# See also: Section \class. -# The default value is: YES. - -VERBATIM_HEADERS = YES - -#--------------------------------------------------------------------------- -# Configuration options related to the alphabetical class index -#--------------------------------------------------------------------------- - -# If the ALPHABETICAL_INDEX tag is set to YES, an alphabetical index of all -# compounds will be generated. Enable this if the project contains a lot of -# classes, structs, unions or interfaces. -# The default value is: YES. - -ALPHABETICAL_INDEX = YES - -# The COLS_IN_ALPHA_INDEX tag can be used to specify the number of columns in -# which the alphabetical index list will be split. -# Minimum value: 1, maximum value: 20, default value: 5. -# This tag requires that the tag ALPHABETICAL_INDEX is set to YES. - -COLS_IN_ALPHA_INDEX = 5 - -# In case all classes in a project start with a common prefix, all classes will -# be put under the same header in the alphabetical index. The IGNORE_PREFIX tag -# can be used to specify a prefix (or a list of prefixes) that should be ignored -# while generating the index headers. -# This tag requires that the tag ALPHABETICAL_INDEX is set to YES. - -IGNORE_PREFIX = - -#--------------------------------------------------------------------------- -# Configuration options related to the HTML output -#--------------------------------------------------------------------------- - -# If the GENERATE_HTML tag is set to YES, doxygen will generate HTML output -# The default value is: YES. - -GENERATE_HTML = YES - -# The HTML_OUTPUT tag is used to specify where the HTML docs will be put. If a -# relative path is entered the value of OUTPUT_DIRECTORY will be put in front of -# it. -# The default directory is: html. -# This tag requires that the tag GENERATE_HTML is set to YES. - -HTML_OUTPUT = html - -# The HTML_FILE_EXTENSION tag can be used to specify the file extension for each -# generated HTML page (for example: .htm, .php, .asp). -# The default value is: .html. -# This tag requires that the tag GENERATE_HTML is set to YES. - -HTML_FILE_EXTENSION = .html - -# The HTML_HEADER tag can be used to specify a user-defined HTML header file for -# each generated HTML page. If the tag is left blank doxygen will generate a -# standard header. -# -# To get valid HTML the header file that includes any scripts and style sheets -# that doxygen needs, which is dependent on the configuration options used (e.g. -# the setting GENERATE_TREEVIEW). It is highly recommended to start with a -# default header using -# doxygen -w html new_header.html new_footer.html new_stylesheet.css -# YourConfigFile -# and then modify the file new_header.html. See also section "Doxygen usage" -# for information on how to generate the default header that doxygen normally -# uses. -# Note: The header is subject to change so you typically have to regenerate the -# default header when upgrading to a newer version of doxygen. For a description -# of the possible markers and block names see the documentation. -# This tag requires that the tag GENERATE_HTML is set to YES. - -HTML_HEADER = - -# The HTML_FOOTER tag can be used to specify a user-defined HTML footer for each -# generated HTML page. If the tag is left blank doxygen will generate a standard -# footer. See HTML_HEADER for more information on how to generate a default -# footer and what special commands can be used inside the footer. See also -# section "Doxygen usage" for information on how to generate the default footer -# that doxygen normally uses. -# This tag requires that the tag GENERATE_HTML is set to YES. - -HTML_FOOTER = - -# The HTML_STYLESHEET tag can be used to specify a user-defined cascading style -# sheet that is used by each HTML page. It can be used to fine-tune the look of -# the HTML output. If left blank doxygen will generate a default style sheet. -# See also section "Doxygen usage" for information on how to generate the style -# sheet that doxygen normally uses. -# Note: It is recommended to use HTML_EXTRA_STYLESHEET instead of this tag, as -# it is more robust and this tag (HTML_STYLESHEET) will in the future become -# obsolete. -# This tag requires that the tag GENERATE_HTML is set to YES. - -HTML_STYLESHEET = - -# The HTML_EXTRA_STYLESHEET tag can be used to specify additional user-defined -# cascading style sheets that are included after the standard style sheets -# created by doxygen. Using this option one can overrule certain style aspects. -# This is preferred over using HTML_STYLESHEET since it does not replace the -# standard style sheet and is therefore more robust against future updates. -# Doxygen will copy the style sheet files to the output directory. -# Note: The order of the extra style sheet files is of importance (e.g. the last -# style sheet in the list overrules the setting of the previous ones in the -# list). For an example see the documentation. -# This tag requires that the tag GENERATE_HTML is set to YES. - -HTML_EXTRA_STYLESHEET = - -# The HTML_EXTRA_FILES tag can be used to specify one or more extra images or -# other source files which should be copied to the HTML output directory. Note -# that these files will be copied to the base HTML output directory. Use the -# $relpath^ marker in the HTML_HEADER and/or HTML_FOOTER files to load these -# files. In the HTML_STYLESHEET file, use the file name only. Also note that the -# files will be copied as-is; there are no commands or markers available. -# This tag requires that the tag GENERATE_HTML is set to YES. - -HTML_EXTRA_FILES = - -# The HTML_COLORSTYLE_HUE tag controls the color of the HTML output. Doxygen -# will adjust the colors in the style sheet and background images according to -# this color. Hue is specified as an angle on a colorwheel, see -# http://en.wikipedia.org/wiki/Hue for more information. For instance the value -# 0 represents red, 60 is yellow, 120 is green, 180 is cyan, 240 is blue, 300 -# purple, and 360 is red again. -# Minimum value: 0, maximum value: 359, default value: 220. -# This tag requires that the tag GENERATE_HTML is set to YES. - -HTML_COLORSTYLE_HUE = 220 - -# The HTML_COLORSTYLE_SAT tag controls the purity (or saturation) of the colors -# in the HTML output. For a value of 0 the output will use grayscales only. A -# value of 255 will produce the most vivid colors. -# Minimum value: 0, maximum value: 255, default value: 100. -# This tag requires that the tag GENERATE_HTML is set to YES. - -HTML_COLORSTYLE_SAT = 100 - -# The HTML_COLORSTYLE_GAMMA tag controls the gamma correction applied to the -# luminance component of the colors in the HTML output. Values below 100 -# gradually make the output lighter, whereas values above 100 make the output -# darker. The value divided by 100 is the actual gamma applied, so 80 represents -# a gamma of 0.8, The value 220 represents a gamma of 2.2, and 100 does not -# change the gamma. -# Minimum value: 40, maximum value: 240, default value: 80. -# This tag requires that the tag GENERATE_HTML is set to YES. - -HTML_COLORSTYLE_GAMMA = 80 - -# If the HTML_TIMESTAMP tag is set to YES then the footer of each generated HTML -# page will contain the date and time when the page was generated. Setting this -# to YES can help to show when doxygen was last run and thus if the -# documentation is up to date. -# The default value is: NO. -# This tag requires that the tag GENERATE_HTML is set to YES. - -HTML_TIMESTAMP = NO - -# If the HTML_DYNAMIC_SECTIONS tag is set to YES then the generated HTML -# documentation will contain sections that can be hidden and shown after the -# page has loaded. -# The default value is: NO. -# This tag requires that the tag GENERATE_HTML is set to YES. - -HTML_DYNAMIC_SECTIONS = NO - -# With HTML_INDEX_NUM_ENTRIES one can control the preferred number of entries -# shown in the various tree structured indices initially; the user can expand -# and collapse entries dynamically later on. Doxygen will expand the tree to -# such a level that at most the specified number of entries are visible (unless -# a fully collapsed tree already exceeds this amount). So setting the number of -# entries 1 will produce a full collapsed tree by default. 0 is a special value -# representing an infinite number of entries and will result in a full expanded -# tree by default. -# Minimum value: 0, maximum value: 9999, default value: 100. -# This tag requires that the tag GENERATE_HTML is set to YES. - -HTML_INDEX_NUM_ENTRIES = 100 - -# If the GENERATE_DOCSET tag is set to YES, additional index files will be -# generated that can be used as input for Apple's Xcode 3 integrated development -# environment (see: http://developer.apple.com/tools/xcode/), introduced with -# OSX 10.5 (Leopard). To create a documentation set, doxygen will generate a -# Makefile in the HTML output directory. Running make will produce the docset in -# that directory and running make install will install the docset in -# ~/Library/Developer/Shared/Documentation/DocSets so that Xcode will find it at -# startup. See http://developer.apple.com/tools/creatingdocsetswithdoxygen.html -# for more information. -# The default value is: NO. -# This tag requires that the tag GENERATE_HTML is set to YES. - -GENERATE_DOCSET = NO - -# This tag determines the name of the docset feed. A documentation feed provides -# an umbrella under which multiple documentation sets from a single provider -# (such as a company or product suite) can be grouped. -# The default value is: Doxygen generated docs. -# This tag requires that the tag GENERATE_DOCSET is set to YES. - -DOCSET_FEEDNAME = "Doxygen generated docs" - -# This tag specifies a string that should uniquely identify the documentation -# set bundle. This should be a reverse domain-name style string, e.g. -# com.mycompany.MyDocSet. Doxygen will append .docset to the name. -# The default value is: org.doxygen.Project. -# This tag requires that the tag GENERATE_DOCSET is set to YES. - -DOCSET_BUNDLE_ID = org.doxygen.Project - -# The DOCSET_PUBLISHER_ID tag specifies a string that should uniquely identify -# the documentation publisher. This should be a reverse domain-name style -# string, e.g. com.mycompany.MyDocSet.documentation. -# The default value is: org.doxygen.Publisher. -# This tag requires that the tag GENERATE_DOCSET is set to YES. - -DOCSET_PUBLISHER_ID = org.doxygen.Publisher - -# The DOCSET_PUBLISHER_NAME tag identifies the documentation publisher. -# The default value is: Publisher. -# This tag requires that the tag GENERATE_DOCSET is set to YES. - -DOCSET_PUBLISHER_NAME = Publisher - -# If the GENERATE_HTMLHELP tag is set to YES then doxygen generates three -# additional HTML index files: index.hhp, index.hhc, and index.hhk. The -# index.hhp is a project file that can be read by Microsoft's HTML Help Workshop -# (see: http://www.microsoft.com/en-us/download/details.aspx?id=21138) on -# Windows. -# -# The HTML Help Workshop contains a compiler that can convert all HTML output -# generated by doxygen into a single compiled HTML file (.chm). Compiled HTML -# files are now used as the Windows 98 help format, and will replace the old -# Windows help format (.hlp) on all Windows platforms in the future. Compressed -# HTML files also contain an index, a table of contents, and you can search for -# words in the documentation. The HTML workshop also contains a viewer for -# compressed HTML files. -# The default value is: NO. -# This tag requires that the tag GENERATE_HTML is set to YES. - -GENERATE_HTMLHELP = NO - -# The CHM_FILE tag can be used to specify the file name of the resulting .chm -# file. You can add a path in front of the file if the result should not be -# written to the html output directory. -# This tag requires that the tag GENERATE_HTMLHELP is set to YES. - -CHM_FILE = - -# The HHC_LOCATION tag can be used to specify the location (absolute path -# including file name) of the HTML help compiler (hhc.exe). If non-empty, -# doxygen will try to run the HTML help compiler on the generated index.hhp. -# The file has to be specified with full path. -# This tag requires that the tag GENERATE_HTMLHELP is set to YES. - -HHC_LOCATION = - -# The GENERATE_CHI flag controls if a separate .chi index file is generated -# (YES) or that it should be included in the master .chm file (NO). -# The default value is: NO. -# This tag requires that the tag GENERATE_HTMLHELP is set to YES. - -GENERATE_CHI = NO - -# The CHM_INDEX_ENCODING is used to encode HtmlHelp index (hhk), content (hhc) -# and project file content. -# This tag requires that the tag GENERATE_HTMLHELP is set to YES. - -CHM_INDEX_ENCODING = - -# The BINARY_TOC flag controls whether a binary table of contents is generated -# (YES) or a normal table of contents (NO) in the .chm file. Furthermore it -# enables the Previous and Next buttons. -# The default value is: NO. -# This tag requires that the tag GENERATE_HTMLHELP is set to YES. - -BINARY_TOC = NO - -# The TOC_EXPAND flag can be set to YES to add extra items for group members to -# the table of contents of the HTML help documentation and to the tree view. -# The default value is: NO. -# This tag requires that the tag GENERATE_HTMLHELP is set to YES. - -TOC_EXPAND = NO - -# If the GENERATE_QHP tag is set to YES and both QHP_NAMESPACE and -# QHP_VIRTUAL_FOLDER are set, an additional index file will be generated that -# can be used as input for Qt's qhelpgenerator to generate a Qt Compressed Help -# (.qch) of the generated HTML documentation. -# The default value is: NO. -# This tag requires that the tag GENERATE_HTML is set to YES. - -GENERATE_QHP = NO - -# If the QHG_LOCATION tag is specified, the QCH_FILE tag can be used to specify -# the file name of the resulting .qch file. The path specified is relative to -# the HTML output folder. -# This tag requires that the tag GENERATE_QHP is set to YES. - -QCH_FILE = - -# The QHP_NAMESPACE tag specifies the namespace to use when generating Qt Help -# Project output. For more information please see Qt Help Project / Namespace -# (see: http://qt-project.org/doc/qt-4.8/qthelpproject.html#namespace). -# The default value is: org.doxygen.Project. -# This tag requires that the tag GENERATE_QHP is set to YES. - -QHP_NAMESPACE = org.doxygen.Project - -# The QHP_VIRTUAL_FOLDER tag specifies the namespace to use when generating Qt -# Help Project output. For more information please see Qt Help Project / Virtual -# Folders (see: http://qt-project.org/doc/qt-4.8/qthelpproject.html#virtual- -# folders). -# The default value is: doc. -# This tag requires that the tag GENERATE_QHP is set to YES. - -QHP_VIRTUAL_FOLDER = doc - -# If the QHP_CUST_FILTER_NAME tag is set, it specifies the name of a custom -# filter to add. For more information please see Qt Help Project / Custom -# Filters (see: http://qt-project.org/doc/qt-4.8/qthelpproject.html#custom- -# filters). -# This tag requires that the tag GENERATE_QHP is set to YES. - -QHP_CUST_FILTER_NAME = - -# The QHP_CUST_FILTER_ATTRS tag specifies the list of the attributes of the -# custom filter to add. For more information please see Qt Help Project / Custom -# Filters (see: http://qt-project.org/doc/qt-4.8/qthelpproject.html#custom- -# filters). -# This tag requires that the tag GENERATE_QHP is set to YES. - -QHP_CUST_FILTER_ATTRS = - -# The QHP_SECT_FILTER_ATTRS tag specifies the list of the attributes this -# project's filter section matches. Qt Help Project / Filter Attributes (see: -# http://qt-project.org/doc/qt-4.8/qthelpproject.html#filter-attributes). -# This tag requires that the tag GENERATE_QHP is set to YES. - -QHP_SECT_FILTER_ATTRS = - -# The QHG_LOCATION tag can be used to specify the location of Qt's -# qhelpgenerator. If non-empty doxygen will try to run qhelpgenerator on the -# generated .qhp file. -# This tag requires that the tag GENERATE_QHP is set to YES. - -QHG_LOCATION = - -# If the GENERATE_ECLIPSEHELP tag is set to YES, additional index files will be -# generated, together with the HTML files, they form an Eclipse help plugin. To -# install this plugin and make it available under the help contents menu in -# Eclipse, the contents of the directory containing the HTML and XML files needs -# to be copied into the plugins directory of eclipse. The name of the directory -# within the plugins directory should be the same as the ECLIPSE_DOC_ID value. -# After copying Eclipse needs to be restarted before the help appears. -# The default value is: NO. -# This tag requires that the tag GENERATE_HTML is set to YES. - -GENERATE_ECLIPSEHELP = NO - -# A unique identifier for the Eclipse help plugin. When installing the plugin -# the directory name containing the HTML and XML files should also have this -# name. Each documentation set should have its own identifier. -# The default value is: org.doxygen.Project. -# This tag requires that the tag GENERATE_ECLIPSEHELP is set to YES. - -ECLIPSE_DOC_ID = org.doxygen.Project - -# If you want full control over the layout of the generated HTML pages it might -# be necessary to disable the index and replace it with your own. The -# DISABLE_INDEX tag can be used to turn on/off the condensed index (tabs) at top -# of each HTML page. A value of NO enables the index and the value YES disables -# it. Since the tabs in the index contain the same information as the navigation -# tree, you can set this option to YES if you also set GENERATE_TREEVIEW to YES. -# The default value is: NO. -# This tag requires that the tag GENERATE_HTML is set to YES. - -DISABLE_INDEX = NO - -# The GENERATE_TREEVIEW tag is used to specify whether a tree-like index -# structure should be generated to display hierarchical information. If the tag -# value is set to YES, a side panel will be generated containing a tree-like -# index structure (just like the one that is generated for HTML Help). For this -# to work a browser that supports JavaScript, DHTML, CSS and frames is required -# (i.e. any modern browser). Windows users are probably better off using the -# HTML help feature. Via custom style sheets (see HTML_EXTRA_STYLESHEET) one can -# further fine-tune the look of the index. As an example, the default style -# sheet generated by doxygen has an example that shows how to put an image at -# the root of the tree instead of the PROJECT_NAME. Since the tree basically has -# the same information as the tab index, you could consider setting -# DISABLE_INDEX to YES when enabling this option. -# The default value is: NO. -# This tag requires that the tag GENERATE_HTML is set to YES. - -GENERATE_TREEVIEW = NO - -# The ENUM_VALUES_PER_LINE tag can be used to set the number of enum values that -# doxygen will group on one line in the generated HTML documentation. -# -# Note that a value of 0 will completely suppress the enum values from appearing -# in the overview section. -# Minimum value: 0, maximum value: 20, default value: 4. -# This tag requires that the tag GENERATE_HTML is set to YES. - -ENUM_VALUES_PER_LINE = 1 - -# If the treeview is enabled (see GENERATE_TREEVIEW) then this tag can be used -# to set the initial width (in pixels) of the frame in which the tree is shown. -# Minimum value: 0, maximum value: 1500, default value: 250. -# This tag requires that the tag GENERATE_HTML is set to YES. - -TREEVIEW_WIDTH = 250 - -# If the EXT_LINKS_IN_WINDOW option is set to YES, doxygen will open links to -# external symbols imported via tag files in a separate window. -# The default value is: NO. -# This tag requires that the tag GENERATE_HTML is set to YES. - -EXT_LINKS_IN_WINDOW = NO - -# Use this tag to change the font size of LaTeX formulas included as images in -# the HTML documentation. When you change the font size after a successful -# doxygen run you need to manually remove any form_*.png images from the HTML -# output directory to force them to be regenerated. -# Minimum value: 8, maximum value: 50, default value: 10. -# This tag requires that the tag GENERATE_HTML is set to YES. - -FORMULA_FONTSIZE = 10 - -# Use the FORMULA_TRANPARENT tag to determine whether or not the images -# generated for formulas are transparent PNGs. Transparent PNGs are not -# supported properly for IE 6.0, but are supported on all modern browsers. -# -# Note that when changing this option you need to delete any form_*.png files in -# the HTML output directory before the changes have effect. -# The default value is: YES. -# This tag requires that the tag GENERATE_HTML is set to YES. - -FORMULA_TRANSPARENT = YES - -# Enable the USE_MATHJAX option to render LaTeX formulas using MathJax (see -# http://www.mathjax.org) which uses client side Javascript for the rendering -# instead of using pre-rendered bitmaps. Use this if you do not have LaTeX -# installed or if you want to formulas look prettier in the HTML output. When -# enabled you may also need to install MathJax separately and configure the path -# to it using the MATHJAX_RELPATH option. -# The default value is: NO. -# This tag requires that the tag GENERATE_HTML is set to YES. - -USE_MATHJAX = NO - -# When MathJax is enabled you can set the default output format to be used for -# the MathJax output. See the MathJax site (see: -# http://docs.mathjax.org/en/latest/output.html) for more details. -# Possible values are: HTML-CSS (which is slower, but has the best -# compatibility), NativeMML (i.e. MathML) and SVG. -# The default value is: HTML-CSS. -# This tag requires that the tag USE_MATHJAX is set to YES. - -MATHJAX_FORMAT = HTML-CSS - -# When MathJax is enabled you need to specify the location relative to the HTML -# output directory using the MATHJAX_RELPATH option. The destination directory -# should contain the MathJax.js script. For instance, if the mathjax directory -# is located at the same level as the HTML output directory, then -# MATHJAX_RELPATH should be ../mathjax. The default value points to the MathJax -# Content Delivery Network so you can quickly see the result without installing -# MathJax. However, it is strongly recommended to install a local copy of -# MathJax from http://www.mathjax.org before deployment. -# The default value is: http://cdn.mathjax.org/mathjax/latest. -# This tag requires that the tag USE_MATHJAX is set to YES. - -MATHJAX_RELPATH = http://cdn.mathjax.org/mathjax/latest - -# The MATHJAX_EXTENSIONS tag can be used to specify one or more MathJax -# extension names that should be enabled during MathJax rendering. For example -# MATHJAX_EXTENSIONS = TeX/AMSmath TeX/AMSsymbols -# This tag requires that the tag USE_MATHJAX is set to YES. - -MATHJAX_EXTENSIONS = - -# The MATHJAX_CODEFILE tag can be used to specify a file with javascript pieces -# of code that will be used on startup of the MathJax code. See the MathJax site -# (see: http://docs.mathjax.org/en/latest/output.html) for more details. For an -# example see the documentation. -# This tag requires that the tag USE_MATHJAX is set to YES. - -MATHJAX_CODEFILE = - -# When the SEARCHENGINE tag is enabled doxygen will generate a search box for -# the HTML output. The underlying search engine uses javascript and DHTML and -# should work on any modern browser. Note that when using HTML help -# (GENERATE_HTMLHELP), Qt help (GENERATE_QHP), or docsets (GENERATE_DOCSET) -# there is already a search function so this one should typically be disabled. -# For large projects the javascript based search engine can be slow, then -# enabling SERVER_BASED_SEARCH may provide a better solution. It is possible to -# search using the keyboard; to jump to the search box use + S -# (what the is depends on the OS and browser, but it is typically -# , /