Skip to content

Commit

Permalink
Merge remote-tracking branch 'upstream/master' into ci/gha/docs/new-t…
Browse files Browse the repository at this point in the history
…ests
  • Loading branch information
akashchi committed Jan 16, 2024
2 parents a1ec4bf + 585a7dc commit c3dea7f
Show file tree
Hide file tree
Showing 584 changed files with 13,588 additions and 23,738 deletions.
3 changes: 3 additions & 0 deletions .github/components.yml
Original file line number Diff line number Diff line change
Expand Up @@ -199,6 +199,9 @@ IE_Tests:
- IR_FE

MO:
revalidate:
- PyTorch_FE
- TF_FE
build:
- Python_API

Expand Down
5 changes: 3 additions & 2 deletions .github/workflows/linux.yml
Original file line number Diff line number Diff line change
Expand Up @@ -302,7 +302,7 @@ jobs:

Conformance:
needs: [ Build, Smart_CI ]
timeout-minutes: ${{ matrix.TEST_TYPE == 'API' && 5 || 15 }}
timeout-minutes: ${{ matrix.TEST_TYPE == 'API' && 5 || 20 }}
defaults:
run:
shell: bash
Expand Down Expand Up @@ -386,7 +386,8 @@ jobs:
-d=${TEST_DEVICE} \
-t=${{ matrix.TEST_TYPE }} \
-w=${CONFORMANCE_ARTIFACTS_DIR} \
-f=${CONFORMANCE_TOOLS_DIR}/skip_configs/${TEST_DEVICE}/expected_failures_${{ matrix.TEST_TYPE }}.csv
--cache_path=${CONFORMANCE_TOOLS_DIR}/github/cache/${TEST_DEVICE}/test_cache_${{ matrix.TEST_TYPE }}.lst \
-f=${CONFORMANCE_TOOLS_DIR}/github/skip_configs/${TEST_DEVICE}/expected_failures_${{ matrix.TEST_TYPE }}.csv
- name: Pack Conformance Artifacts
if: ${{ always() }}
Expand Down
5 changes: 5 additions & 0 deletions CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,11 @@ if(POLICY CMP0091)
cmake_policy(SET CMP0091 NEW) # Enables use of MSVC_RUNTIME_LIBRARY
endif()

# Avoid warning about DOWNLOAD_EXTRACT_TIMESTAMP in CMake 3.24:
if(POLICY CMP0135)
cmake_policy(SET CMP0135 NEW)
endif()

project(OpenVINO DESCRIPTION "OpenVINO toolkit")

find_package(OpenVINODeveloperScripts REQUIRED
Expand Down
2 changes: 2 additions & 0 deletions cmake/developer_package/OpenVINODeveloperScriptsConfig.cmake
Original file line number Diff line number Diff line change
Expand Up @@ -206,6 +206,8 @@ set(CMAKE_POLICY_DEFAULT_CMP0025 NEW)
set(CMAKE_POLICY_DEFAULT_CMP0026 NEW)
# CMake 3.0+ (2.8.12): MacOS "@rpath" in target's install name
set(CMAKE_POLICY_DEFAULT_CMP0042 NEW)
# CMake 3.1+: Simplify variable reference and escape sequence evaluation.
set(CMAKE_POLICY_DEFAULT_CMP0053 NEW)
# CMake 3.9+: `RPATH` settings on macOS do not affect `install_name`.
set(CMAKE_POLICY_DEFAULT_CMP0068 NEW)
# CMake 3.12+: find_package() uses <PackageName>_ROOT variables.
Expand Down
12 changes: 6 additions & 6 deletions cmake/developer_package/api_validator/api_validator.cmake
Original file line number Diff line number Diff line change
Expand Up @@ -3,25 +3,25 @@
#

if(WIN32)
set(PROGRAMFILES_ENV "ProgramFiles(X86)")
set(PROGRAMFILES_ENV "ProgramFiles\(X86\)")

# check that PROGRAMFILES_ENV is defined, because in case of cross-compilation for Windows
# we don't have such variable
if(DEFINED ENV{PROGRAMFILES_ENV})
if(DEFINED ENV{${PROGRAMFILES_ENV}})
file(TO_CMAKE_PATH $ENV{${PROGRAMFILES_ENV}} PROGRAMFILES)

set(WDK_PATHS "${PROGRAMFILES}/Windows Kits/10/bin/${CMAKE_VS_WINDOWS_TARGET_PLATFORM_VERSION}/x64"
"${PROGRAMFILES}/Windows Kits/10/bin/x64")
"${PROGRAMFILES}/Windows Kits/10/bin/x64")

message(STATUS "Trying to find apivalidator in: ")
foreach(wdk_path IN LISTS WDK_PATHS)
message(" * ${wdk_path}")
endforeach()

find_host_program(ONECORE_API_VALIDATOR
NAMES apivalidator
PATHS ${WDK_PATHS}
DOC "ApiValidator for OneCore compliance")
NAMES apivalidator
PATHS ${WDK_PATHS}
DOC "ApiValidator for OneCore compliance")

if(ONECORE_API_VALIDATOR)
message(STATUS "Found apivalidator: ${ONECORE_API_VALIDATOR}")
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,7 @@ function(_generate_dispatcher)
// !! do not modify it !!!
//
#include \"${XARCH_API_HEADER}\"
#include \"ie_system_conf.h\"
#include \"openvino/runtime/system_conf.hpp\"
")

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,7 @@ set(DISPATCHER_GEN_OPTIONS_HOLDER ${CMAKE_CURRENT_LIST_DIR}/cross_compiled_disp_
#
# Allow to enable multiple cross compilation of source file inside one module
# with keeping requirements on minimal instruction set. The CPU check performed
# in runtime via common utils declared in "ie_system_conf.h".
# in runtime via common utils declared in "system_conf.h".
#
# Usage example:
# cross_compiled_file(<target>
Expand Down
4 changes: 2 additions & 2 deletions cmake/developer_package/plugins/create_plugins_hpp.cmake
Original file line number Diff line number Diff line change
Expand Up @@ -42,10 +42,10 @@ foreach(dev_map IN LISTS OV_DEVICE_MAPPING)

# declarations
set(OV_PLUGINS_DECLARATIONS "${OV_PLUGINS_DECLARATIONS}
IE_DEFINE_PLUGIN_CREATE_FUNCTION_DECLARATION(${_OV_CREATE_PLUGIN_FUNC});")
OV_DEFINE_PLUGIN_CREATE_FUNCTION_DECLARATION(${_OV_CREATE_PLUGIN_FUNC});")
if(${actual_dev_name}_AS_EXTENSION)
set(OV_PLUGINS_DECLARATIONS "${OV_PLUGINS_DECLARATIONS}
IE_DEFINE_EXTENSION_CREATE_FUNCTION_DECLARATION(${_OV_CREATE_EXTENSION_FUNC});")
OV_DEFINE_EXTENSION_CREATE_FUNCTION_DECLARATION(${_OV_CREATE_EXTENSION_FUNC});")
else()
set(_OV_CREATE_EXTENSION_FUNC "nullptr")
endif()
Expand Down
2 changes: 1 addition & 1 deletion cmake/developer_package/plugins/plugins.cmake
Original file line number Diff line number Diff line change
Expand Up @@ -80,7 +80,7 @@ function(ov_add_plugin)
if(OV_PLUGIN_AS_EXTENSION)
# to distinguish functions creating extensions objects
target_compile_definitions(${OV_PLUGIN_NAME} PRIVATE
IE_CREATE_EXTENSION=CreateExtensionShared${OV_PLUGIN_DEVICE_NAME})
OV_CREATE_EXTENSION=CreateExtensionShared${OV_PLUGIN_DEVICE_NAME})
endif()
endif()

Expand Down
16 changes: 13 additions & 3 deletions cmake/developer_package/plugins/plugins.hpp.in
Original file line number Diff line number Diff line change
Expand Up @@ -9,13 +9,23 @@

#ifdef OPENVINO_STATIC_LIBRARY

#include "cpp_interfaces/interface/ie_iplugin_internal.hpp"
// The Macro used to create extensions for static library
#define OV_DEFINE_EXTENSION_CREATE_FUNCTION_DECLARATION(_OV_CREATE_EXTENSION_FUNC) \
OPENVINO_EXTENSION_C_API void \
_OV_CREATE_EXTENSION_FUNC(std::vector<::ov::Extension::Ptr>& ext)

// The Macro used to create plugin for static library
#define OV_DEFINE_PLUGIN_CREATE_FUNCTION_DECLARATION(_OV_CREATE_PLUGIN_FUNC) \
OPENVINO_PLUGIN_API void \
_OV_CREATE_PLUGIN_FUNC(::std::shared_ptr<::ov::IPlugin> &plugin) noexcept(false)

@OV_PLUGINS_DECLARATIONS@

using CreateExtensionFunc = void(std::vector<::ov::Extension::Ptr>&);
using CreatePluginEngineFunc = void(std::shared_ptr<::ov::IPlugin>&);
struct Value {
InferenceEngine::CreatePluginEngineFunc * m_create_plugin_func;
InferenceEngine::CreateExtensionFunc * m_create_extension_func;
CreatePluginEngineFunc * m_create_plugin_func;
CreateExtensionFunc * m_create_extension_func;
std::map<std::string, std::string> m_default_config;
};

Expand Down
3 changes: 3 additions & 0 deletions cmake/features.cmake
Original file line number Diff line number Diff line change
Expand Up @@ -177,6 +177,9 @@ ov_dependent_option (ENABLE_SYSTEM_SNAPPY "Enables use of system version of Snap
ov_dependent_option (ENABLE_PYTHON_PACKAGING "Enables packaging of Python API in APT / YUM" OFF
"ENABLE_PYTHON;UNIX" OFF)

ov_dependent_option (ENABLE_JS "Enables JS API building" ON
"NOT WIN32" OFF)

ov_option(ENABLE_OPENVINO_DEBUG "Enable output for OPENVINO_DEBUG statements" OFF)

if(NOT BUILD_SHARED_LIBS AND ENABLE_OV_TF_FRONTEND)
Expand Down
1 change: 0 additions & 1 deletion cmake/packaging/rpm.cmake
Original file line number Diff line number Diff line change
Expand Up @@ -276,7 +276,6 @@ macro(ov_cpack_settings)
ov_rpm_add_rpmlint_suppression("${python_component}"
# all directories
"non-standard-dir-perm /usr/lib64/${pyversion}/site-packages/openvino/*"
"non-standard-dir-perm /usr/lib64/${pyversion}/site-packages/ngraph/*"
)
endif()

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -16,9 +16,7 @@ Test performance with the benchmark_app

You can run OpenVINO benchmarks in both C++ and Python APIs, yet the experience differs in each case.
The Python one is part of OpenVINO Runtime installation, while C++ is available as a code sample.
For a detailed description, see:
* :doc:`benchmark_app for C++ <openvino_inference_engine_samples_benchmark_app_README>`
* :doc:`benchmark_app for Python <openvino_inference_engine_tools_benchmark_tool_README>`.
For a detailed description, see: :doc:`benchmark_app <openvino_sample_benchmark_tool>`.

Make sure to install the latest release package with support for frameworks of the models you want to test.
For the most reliable performance benchmarks, :doc:`prepare the model for use with OpenVINO <openvino_docs_model_processing_introduction>`.
Expand Down Expand Up @@ -87,7 +85,7 @@ slower than the subsequent ones, an aggregated value can be used for the executi

When comparing the OpenVINO Runtime performance with the framework or another reference code, make sure that both versions are as similar as possible:

- Wrap the exact inference execution (for examples, see :doc:`Benchmark app <openvino_inference_engine_samples_benchmark_app_README>`).
- Wrap the exact inference execution (for examples, see :doc:`Benchmark app <openvino_sample_benchmark_tool>`).
- Do not include model loading time.
- Ensure that the inputs are identical for OpenVINO Runtime and the framework. For example, watch out for random values that can be used to populate the inputs.
- In situations when any user-side pre-processing should be tracked separately, consider :doc:`image pre-processing and conversion <openvino_docs_OV_UG_Preprocessing_Overview>`.
Expand All @@ -98,7 +96,7 @@ Internal Inference Performance Counters and Execution Graphs
+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++

More detailed insights into inference performance breakdown can be achieved with device-specific performance counters and/or execution graphs.
Both :doc:`C++ <openvino_inference_engine_samples_benchmark_app_README>` and :doc:`Python <openvino_inference_engine_tools_benchmark_tool_README>`
Both :doc:`C++ and Python <openvino_sample_benchmark_tool>`
versions of the *benchmark_app* support a ``-pc`` command-line parameter that outputs internal execution breakdown.

For example, the table shown below is part of performance counters for quantized
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -31,10 +31,8 @@ Performance Information F.A.Q.

All of the performance benchmarks are generated using the
open-source tool within the Intel® Distribution of OpenVINO™ toolkit
called ``benchmark_app``. This tool is available
:doc:`for C++ apps <openvino_inference_engine_samples_benchmark_app_README>`.
as well as
:doc:`for Python apps <openvino_inference_engine_tools_benchmark_tool_README>`.
called :doc:`benchmark_app <openvino_sample_benchmark_tool>`.
This tool is available for Python and C++ apps.

For a simple instruction on testing performance, see the :doc:`Getting Performance Numbers Guide <openvino_docs_MO_DG_Getting_Performance_Numbers>`.

Expand Down
2 changes: 1 addition & 1 deletion docs/articles_en/documentation/openvino_extensibility.rst
Original file line number Diff line number Diff line change
Expand Up @@ -194,5 +194,5 @@ See Also

* :doc:`OpenVINO Transformations <openvino_docs_transformations>`
* :doc:`Using OpenVINO Runtime Samples <openvino_docs_OV_UG_Samples_Overview>`
* :doc:`Hello Shape Infer SSD sample <openvino_inference_engine_samples_hello_reshape_ssd_README>`
* :doc:`Hello Shape Infer SSD sample <openvino_sample_hello_reshape_ssd>`

Original file line number Diff line number Diff line change
Expand Up @@ -329,7 +329,7 @@ After that you should quantize model by the :doc:`Model Quantizer <omz_tools_dow
Inference
+++++++++

The simplest way to infer the model and collect performance counters is :doc:`Benchmark Application <openvino_inference_engine_samples_benchmark_app_README>`.
The simplest way to infer the model and collect performance counters is :doc:`Benchmark Application <openvino_sample_benchmark_tool>`.

.. code-block:: sh
Expand Down
57 changes: 25 additions & 32 deletions docs/articles_en/documentation/openvino_legacy_features.rst
Original file line number Diff line number Diff line change
Expand Up @@ -13,17 +13,14 @@ Legacy Features and Components
Deploy Application with Deployment Manager <openvino_docs_install_guides_deployment_manager_tool>
OpenVINO API 2.0 transition <openvino_2_0_transition_guide>
Open Model ZOO <model_zoo>
Apache MXNet, Caffe, and Kaldi <mxnet_caffe_kaldi>
Post-training Optimization Tool <pot_introduction>



Since OpenVINO has grown very rapidly in recent years, some of its features
and components have been replaced by other solutions. Some of them are still
Since OpenVINO has grown very rapidly in recent years, some of its features
and components have been replaced by other solutions. Some of them are still
supported to assure OpenVINO users are given enough time to adjust their projects,
before the features are fully discontinued.
before the features are fully discontinued.

This section will give you an overview of these major changes and tell you how
This section will give you an overview of these major changes and tell you how
you can proceed to get the best experience and results with the current OpenVINO
offering.

Expand All @@ -32,10 +29,10 @@ offering.
| *New solution:* OpenVINO Runtime includes all supported components
| *Old solution:* discontinuation planned for OpenVINO 2025.0
|
| OpenVINO Development Tools used to be the OpenVINO package with tools for
advanced operations on models, such as Model conversion API, Benchmark Tool,
Accuracy Checker, Annotation Converter, Post-Training Optimization Tool,
and Open Model Zoo tools. Most of these tools have been either removed,
| OpenVINO Development Tools used to be the OpenVINO package with tools for
advanced operations on models, such as Model conversion API, Benchmark Tool,
Accuracy Checker, Annotation Converter, Post-Training Optimization Tool,
and Open Model Zoo tools. Most of these tools have been either removed,
replaced by other solutions, or moved to the OpenVINO Runtime package.
| :doc:`See how to install Development Tools <openvino_docs_install_guides_install_dev_tools>`
Expand All @@ -44,16 +41,16 @@ offering.
| *New solution:* Direct model support and OpenVINO Converter (OVC)
| *Old solution:* Legacy Conversion API discontinuation planned for OpenVINO 2025.0
|
| The role of Model Optimizer and later the Conversion API was largely reduced
| The role of Model Optimizer and later the Conversion API was largely reduced
when all major model frameworks became supported directly. For converting model
files explicitly, it has been replaced with a more light-weight and efficient
files explicitly, it has been replaced with a more light-weight and efficient
solution, the OpenVINO Converter (launched with OpenVINO 2023.1).
| :doc:`See how to use OVC <openvino_docs_model_processing_introduction>`
| :doc:`See how to transition from the legacy solution <openvino_docs_OV_Converter_UG_prepare_model_convert_model_MO_OVC_transition>`
| **OpenVINO Deployment Manager**
| *New solution:* the tool is no longer needed
| *Old solution:* discontinuation planned for OpenVINO 2024.0
| *Old solution:* discontinuation planned for OpenVINO 2024.0
|
| It is recommended to explore alternative deployment solutions available in OpenVINO.
| :doc:`See how to deploy locally <openvino_deployment_guide>`
Expand All @@ -76,23 +73,19 @@ offering.

| **Apache MXNet, Caffe, and Kaldi model formats**
| *New solution:* conversion to ONNX via external tools
| *Old solution:* model support will be discontinued with OpenVINO 2024.0
| *Old solution:* model support discontinued with OpenVINO 2024.0
|
| Since these three model formats proved to be far less popular among OpenVINO users
than the remaining ones, their support has been discontinued. Converting them to the
ONNX format is a possible way of retaining them in the OpenVINO-based pipeline.
| :doc:`See the previous conversion instructions <mxnet_caffe_kaldi>`
| `See the previous conversion instructions <https://docs.openvino.ai/2023.3/mxnet_caffe_kaldi.html>`__
| :doc:`See the currently supported frameworks <openvino_docs_model_processing_introduction>`

| **Post-training Optimization Tool (POT)**
| *New solution:* NNCF extended in OpenVINO 2023.0
| *Old solution:* POT discontinuation planned for 2024.0
|
|
| Neural Network Compression Framework (NNCF) now offers the same functionality as POT,
apart from its original feature set. It is currently the default tool for performing
apart from its original feature set. It is currently the default tool for performing
both, post-training and quantization optimizations, while POT is considered deprecated.
| :doc:`See the deprecated POT documentation <pot_introduction>`
| :doc:`See how to use NNCF for model optimization <openvino_docs_model_optimization_guide>`
| `Check the NNCF GitHub project, including documentation <https://github.com/openvinotoolkit/nncf>`__
Expand All @@ -101,7 +94,7 @@ offering.
| *New solution:* API 2.0 launched in OpenVINO 2022.1
| *Old solution:* discontinuation planned for OpenVINO 2024.0
|
| API 1.0 (Inference Engine and nGraph) is now deprecated. It can still be
| API 1.0 (Inference Engine and nGraph) is now deprecated. It can still be
used but is not recommended. Its discontinuation is planned for 2024.
| :doc:`See how to transition to API 2.0 <openvino_2_0_transition_guide>`
Expand All @@ -110,21 +103,21 @@ offering.
| *New solution:* the tool is no longer needed
| *Old solution:* deprecated in OpenVINO 2023.0
|
| Compile tool is now deprecated. If you need to compile a model for inference on
| Compile tool is now deprecated. If you need to compile a model for inference on
a specific device, use the following script:
.. tab-set::

.. tab-item:: Python
:sync: py

.. doxygensnippet:: docs/snippets/export_compiled_model.py
:language: python
:fragment: [export_compiled_model]

.. tab-item:: C++
:sync: cpp

.. doxygensnippet:: docs/snippets/export_compiled_model.cpp
:language: cpp
:fragment: [export_compiled_model]
Expand All @@ -137,15 +130,15 @@ offering.
| *New solution:* DevCloud version
| *Old solution:* local distribution discontinued in OpenVINO 2022.3
|
| The stand-alone version of DL Workbench, a GUI tool for previewing and benchmarking
| The stand-alone version of DL Workbench, a GUI tool for previewing and benchmarking
deep learning models, has been discontinued. You can use its cloud version:
| `Intel® Developer Cloud for the Edge <https://www.intel.com/content/www/us/en/developer/tools/devcloud/edge/overview.html>`__.
| **OpenVINO™ integration with TensorFlow (OVTF)**
| *New solution:* Direct model support and OpenVINO Converter (OVC)
| *Old solution:* discontinued in OpenVINO 2023.0
|
| OpenVINO™ Integration with TensorFlow is longer supported, as OpenVINO now features a
native TensorFlow support, significantly enhancing user experience with no need for
explicit model conversion.
| OpenVINO™ Integration with TensorFlow is longer supported, as OpenVINO now features a
native TensorFlow support, significantly enhancing user experience with no need for
explicit model conversion.
Loading

0 comments on commit c3dea7f

Please sign in to comment.