diff --git a/docs/snippets/example_async_infer_request.cpp b/docs/snippets/example_async_infer_request.cpp deleted file mode 100644 index 41a6c0ac6f1250..00000000000000 --- a/docs/snippets/example_async_infer_request.cpp +++ /dev/null @@ -1,78 +0,0 @@ -// Copyright (C) 2018-2020 Intel Corporation -// SPDX-License-Identifier: Apache-2.0 -// - -#ifndef IN_OV_COMPONENT -# define IN_OV_COMPONENT -# define WAS_OV_LIBRARY_DEFINED -#endif - -#include -#include -#include - -#ifdef WAS_OV_LIBRARY_DEFINED -# undef IN_OV_COMPONENT -# undef WAS_OV_LIBRARY_DEFINED -#endif - -using namespace InferenceEngine; - -class AcceleratorSyncRequest : public IInferRequestInternal { -public: - using Ptr = std::shared_ptr; - - void preprocess(); - void write_to_device(); - void run_on_device(); - void read_from_device(); - void post_process(); -}; - -// ! [async_infer_request:define_pipeline] -// Inherits from AsyncInferRequestThreadSafeDefault -class AcceleratorAsyncInferRequest : public AsyncInferRequestThreadSafeDefault { - // Store the pointer to the synchronous request and five executors - AcceleratorAsyncInferRequest(const AcceleratorSyncRequest::Ptr& syncRequest, - const ITaskExecutor::Ptr& preprocessExecutor, - const ITaskExecutor::Ptr& writeToDeviceExecutor, - const ITaskExecutor::Ptr& runOnDeviceExecutor, - const ITaskExecutor::Ptr& readFromDeviceExecutor, - const ITaskExecutor::Ptr& postProcessExecutor) : - AsyncInferRequestThreadSafeDefault(syncRequest, nullptr, nullptr), - _accSyncRequest{syncRequest}, - _preprocessExecutor{preprocessExecutor}, - _writeToDeviceExecutor{writeToDeviceExecutor}, - _runOnDeviceExecutor{runOnDeviceExecutor}, - _readFromDeviceExecutor{readFromDeviceExecutor}, - _postProcessExecutor{postProcessExecutor} - { - // Five pipeline stages of synchronous infer request are run by different executors - _pipeline = { - { _preprocessExecutor , [this] { - _accSyncRequest->preprocess(); - }}, - { _writeToDeviceExecutor , [this] { - _accSyncRequest->write_to_device(); - }}, - { _runOnDeviceExecutor , [this] { - _accSyncRequest->run_on_device(); - }}, - { _readFromDeviceExecutor , [this] { - _accSyncRequest->read_from_device(); - }}, - { _postProcessExecutor , [this] { - _accSyncRequest->post_process(); - }}, - }; - } - - // As all stages use _accSyncRequest member we should wait for all stages tasks before the destructor destroy this member. - ~AcceleratorAsyncInferRequest() { - StopAndWait(); - } - - AcceleratorSyncRequest::Ptr _accSyncRequest; - ITaskExecutor::Ptr _preprocessExecutor, _writeToDeviceExecutor, _runOnDeviceExecutor, _readFromDeviceExecutor, _postProcessExecutor; -}; -// ! [async_infer_request:define_pipeline] diff --git a/src/inference/dev_api/cpp_interfaces/impl/ie_executable_network_thread_safe_default.hpp b/src/inference/dev_api/cpp_interfaces/impl/ie_executable_network_thread_safe_default.hpp index 873e435bd89352..00de2c26128224 100644 --- a/src/inference/dev_api/cpp_interfaces/impl/ie_executable_network_thread_safe_default.hpp +++ b/src/inference/dev_api/cpp_interfaces/impl/ie_executable_network_thread_safe_default.hpp @@ -21,7 +21,7 @@ namespace InferenceEngine { * The class is recommended to be used as a base class for Executable Network implementation during plugin development. * @ingroup ie_dev_api_exec_network_api */ -class ExecutableNetworkThreadSafeDefault : public IExecutableNetworkInternal { +class INFERENCE_ENGINE_1_0_DEPRECATED ExecutableNetworkThreadSafeDefault : public IExecutableNetworkInternal { public: /** * @brief A shared pointer to a ExecutableNetworkThreadSafeDefault object diff --git a/src/inference/dev_api/cpp_interfaces/impl/ie_infer_async_request_thread_safe_default.hpp b/src/inference/dev_api/cpp_interfaces/impl/ie_infer_async_request_thread_safe_default.hpp index 01c4b4a7342c56..e73b57004f2c4a 100644 --- a/src/inference/dev_api/cpp_interfaces/impl/ie_infer_async_request_thread_safe_default.hpp +++ b/src/inference/dev_api/cpp_interfaces/impl/ie_infer_async_request_thread_safe_default.hpp @@ -37,9 +37,8 @@ IE_SUPPRESS_DEPRECATED_START * Here is an example of asynchronous inference request implementation for some accelerator device. * It uses 5 different executors to run different stages of a synchronous inference request. * - * @snippet example_async_infer_request.cpp async_infer_request:define_pipeline */ -class AsyncInferRequestThreadSafeDefault : public IInferRequestInternal { +class INFERENCE_ENGINE_1_0_DEPRECATED AsyncInferRequestThreadSafeDefault : public IInferRequestInternal { enum InferState { Idle, Busy, Cancelled, Stop }; using Futures = std::vector>; using Promise = std::shared_ptr>; diff --git a/src/inference/dev_api/cpp_interfaces/interface/ie_iexecutable_network_internal.hpp b/src/inference/dev_api/cpp_interfaces/interface/ie_iexecutable_network_internal.hpp index 49288ab7d0060b..1182260081543a 100644 --- a/src/inference/dev_api/cpp_interfaces/interface/ie_iexecutable_network_internal.hpp +++ b/src/inference/dev_api/cpp_interfaces/interface/ie_iexecutable_network_internal.hpp @@ -37,7 +37,7 @@ class ICompiledModelWrapper; * @brief An internal API of executable network to be implemented by plugin, * @ingroup ie_dev_api_exec_network_api */ -class INFERENCE_ENGINE_API_CLASS(IExecutableNetworkInternal) +class INFERENCE_ENGINE_1_0_DEPRECATED INFERENCE_ENGINE_API_CLASS(IExecutableNetworkInternal) : public std::enable_shared_from_this { public: /** diff --git a/src/inference/dev_api/cpp_interfaces/interface/ie_iinfer_request_internal.hpp b/src/inference/dev_api/cpp_interfaces/interface/ie_iinfer_request_internal.hpp index 2aeccc8d0a1731..be3d58a621ec62 100644 --- a/src/inference/dev_api/cpp_interfaces/interface/ie_iinfer_request_internal.hpp +++ b/src/inference/dev_api/cpp_interfaces/interface/ie_iinfer_request_internal.hpp @@ -28,7 +28,8 @@ class IVariableStateInternal; * which is used in InferRequestBase forwarding mechanism * @ingroup ie_dev_api_infer_request_api */ -class INFERENCE_ENGINE_API_CLASS(IInferRequestInternal) : public std::enable_shared_from_this { +class INFERENCE_ENGINE_1_0_DEPRECATED INFERENCE_ENGINE_API_CLASS(IInferRequestInternal) + : public std::enable_shared_from_this { public: /** * @brief A shared pointer to a IInferRequestInternal interface diff --git a/src/inference/dev_api/cpp_interfaces/interface/ie_internal_plugin_config.hpp b/src/inference/dev_api/cpp_interfaces/interface/ie_internal_plugin_config.hpp index aab0f2129ca3e1..eeac793acc7dcc 100644 --- a/src/inference/dev_api/cpp_interfaces/interface/ie_internal_plugin_config.hpp +++ b/src/inference/dev_api/cpp_interfaces/interface/ie_internal_plugin_config.hpp @@ -38,65 +38,65 @@ namespace PluginConfigInternalParams { * @brief Defines a low precision mode key * @ingroup ie_dev_api_plugin_api */ -DECLARE_CONFIG_KEY(LP_TRANSFORMS_MODE); +INFERENCE_ENGINE_1_0_DEPRECATED DECLARE_CONFIG_KEY(LP_TRANSFORMS_MODE); /** * @brief Limit \#threads that are used by CPU Executor Streams to execute `parallel_for` calls * @ingroup ie_dev_api_plugin_api */ -DECLARE_CONFIG_KEY(CPU_THREADS_PER_STREAM); +INFERENCE_ENGINE_1_0_DEPRECATED DECLARE_CONFIG_KEY(CPU_THREADS_PER_STREAM); /** * @brief Number of streams in Performance-core(big core) * @ingroup ie_dev_api_plugin_api * @brief Shortcut for defining internal configuration values */ -DECLARE_CONFIG_KEY(BIG_CORE_STREAMS); +INFERENCE_ENGINE_1_0_DEPRECATED DECLARE_CONFIG_KEY(BIG_CORE_STREAMS); /** * @brief Number of streams in Efficient-core(small core) on hybrid cores machine * @ingroup ie_dev_api_plugin_api * @brief Shortcut for defining internal configuration values */ -DECLARE_CONFIG_KEY(SMALL_CORE_STREAMS); +INFERENCE_ENGINE_1_0_DEPRECATED DECLARE_CONFIG_KEY(SMALL_CORE_STREAMS); /** * @brief Number of threads per stream in big cores * @ingroup ie_dev_api_plugin_api * @brief Shortcut for defining internal configuration values */ -DECLARE_CONFIG_KEY(THREADS_PER_STREAM_BIG); +INFERENCE_ENGINE_1_0_DEPRECATED DECLARE_CONFIG_KEY(THREADS_PER_STREAM_BIG); /** * @brief Number of threads per stream in small cores on hybrid cores machine * @ingroup ie_dev_api_plugin_api * @brief Shortcut for defining internal configuration values */ -DECLARE_CONFIG_KEY(THREADS_PER_STREAM_SMALL); +INFERENCE_ENGINE_1_0_DEPRECATED DECLARE_CONFIG_KEY(THREADS_PER_STREAM_SMALL); /** * @brief Small core start offset when binding cpu cores * @ingroup ie_dev_api_plugin_api * @brief Shortcut for defining internal configuration values */ -DECLARE_CONFIG_KEY(SMALL_CORE_OFFSET); +INFERENCE_ENGINE_1_0_DEPRECATED DECLARE_CONFIG_KEY(SMALL_CORE_OFFSET); /** * @brief Defines how many records can be stored in the CPU runtime parameters cache per CPU runtime parameter type per * stream * @ingroup ie_dev_api_plugin_api */ -DECLARE_CONFIG_KEY(CPU_RUNTIME_CACHE_CAPACITY); +INFERENCE_ENGINE_1_0_DEPRECATED DECLARE_CONFIG_KEY(CPU_RUNTIME_CACHE_CAPACITY); /** * @brief Internal device id for particular device (like GPU.0, GPU.1 etc) */ -DECLARE_CONFIG_KEY(CONFIG_DEVICE_ID); +INFERENCE_ENGINE_1_0_DEPRECATED DECLARE_CONFIG_KEY(CONFIG_DEVICE_ID); /** * @brief enable hyper thread */ -DECLARE_CONFIG_KEY(ENABLE_HYPER_THREAD); +INFERENCE_ENGINE_1_0_DEPRECATED DECLARE_CONFIG_KEY(ENABLE_HYPER_THREAD); /** * @brief Defines Snippets tokenization mode @@ -105,10 +105,10 @@ DECLARE_CONFIG_KEY(ENABLE_HYPER_THREAD); * @param DISABLE - turn off the Snippets * @ingroup ie_dev_api_plugin_api */ -DECLARE_CONFIG_KEY(SNIPPETS_MODE); -DECLARE_CONFIG_VALUE(ENABLE); -DECLARE_CONFIG_VALUE(IGNORE_CALLBACK); -DECLARE_CONFIG_VALUE(DISABLE); +INFERENCE_ENGINE_1_0_DEPRECATED DECLARE_CONFIG_KEY(SNIPPETS_MODE); +INFERENCE_ENGINE_1_0_DEPRECATED DECLARE_CONFIG_VALUE(ENABLE); +INFERENCE_ENGINE_1_0_DEPRECATED DECLARE_CONFIG_VALUE(IGNORE_CALLBACK); +INFERENCE_ENGINE_1_0_DEPRECATED DECLARE_CONFIG_VALUE(DISABLE); } // namespace PluginConfigInternalParams diff --git a/src/inference/dev_api/cpp_interfaces/interface/ie_iplugin_internal.hpp b/src/inference/dev_api/cpp_interfaces/interface/ie_iplugin_internal.hpp index eaa3f8c3aff344..c25ea4f72be37e 100644 --- a/src/inference/dev_api/cpp_interfaces/interface/ie_iplugin_internal.hpp +++ b/src/inference/dev_api/cpp_interfaces/interface/ie_iplugin_internal.hpp @@ -124,7 +124,8 @@ GetSupportedNodes(const std::shared_ptr& model, * @brief An API of plugin to be implemented by a plugin * @ingroup ie_dev_api_plugin_api */ -class INFERENCE_ENGINE_API_CLASS(IInferencePlugin) : public std::enable_shared_from_this { +class INFERENCE_ENGINE_1_0_DEPRECATED INFERENCE_ENGINE_API_CLASS(IInferencePlugin) + : public std::enable_shared_from_this { class VersionStore : public Version { void copyFrom(const Version& v); diff --git a/src/inference/dev_api/cpp_interfaces/interface/ie_ivariable_state_internal.hpp b/src/inference/dev_api/cpp_interfaces/interface/ie_ivariable_state_internal.hpp index ac04183efbaf5f..134c7cf64fd9a7 100644 --- a/src/inference/dev_api/cpp_interfaces/interface/ie_ivariable_state_internal.hpp +++ b/src/inference/dev_api/cpp_interfaces/interface/ie_ivariable_state_internal.hpp @@ -12,12 +12,15 @@ namespace InferenceEngine { +IE_SUPPRESS_DEPRECATED_START + /** * @interface IVariableStateInternal * @brief Minimal interface for variable state implementation * @ingroup ie_dev_api_variable_state_api */ -class INFERENCE_ENGINE_API_CLASS(IVariableStateInternal) : public std::enable_shared_from_this { +class INFERENCE_ENGINE_1_0_DEPRECATED INFERENCE_ENGINE_API_CLASS(IVariableStateInternal) + : public std::enable_shared_from_this { public: /** * @brief A shared pointer to a IVariableStateInternal interface @@ -74,5 +77,6 @@ using SoIVariableStateInternal = ov::SoPtr; * @brief For compatibility reasons. */ using MemoryStateInternal = IVariableStateInternal; +IE_SUPPRESS_DEPRECATED_END } // namespace InferenceEngine diff --git a/src/inference/dev_api/description_buffer.hpp b/src/inference/dev_api/description_buffer.hpp index b5d2ee7653016b..fc4f2acdbb76c5 100644 --- a/src/inference/dev_api/description_buffer.hpp +++ b/src/inference/dev_api/description_buffer.hpp @@ -21,7 +21,7 @@ namespace InferenceEngine { * @brief A description buffer wrapping StatusCode and ResponseDesc * @ingroup ie_dev_api_error_debug */ -struct DescriptionBuffer : public std::basic_streambuf> { +struct INFERENCE_ENGINE_1_0_DEPRECATED DescriptionBuffer : public std::basic_streambuf> { /** * @brief Creeates a description buffer with parameters * diff --git a/src/inference/dev_api/ie_ngraph_utils.hpp b/src/inference/dev_api/ie_ngraph_utils.hpp index e3f4542c7035ec..9d42867eba611f 100644 --- a/src/inference/dev_api/ie_ngraph_utils.hpp +++ b/src/inference/dev_api/ie_ngraph_utils.hpp @@ -16,7 +16,7 @@ namespace InferenceEngine { namespace details { -inline ::ngraph::element::Type convertPrecision(const Precision& precision) { +INFERENCE_ENGINE_1_0_DEPRECATED inline ::ngraph::element::Type convertPrecision(const Precision& precision) { Precision::ePrecision pType = precision; switch (pType) { case Precision::UNSPECIFIED: @@ -61,11 +61,11 @@ inline ::ngraph::element::Type convertPrecision(const Precision& precision) { } } -inline ::ngraph::element::Type convertPrecision(const std::string& precision) { +INFERENCE_ENGINE_1_0_DEPRECATED inline ::ngraph::element::Type convertPrecision(const std::string& precision) { return ::ov::element::Type(precision); } -inline Precision convertPrecision(const ::ngraph::element::Type& precision) { +INFERENCE_ENGINE_1_0_DEPRECATED inline Precision convertPrecision(const ::ngraph::element::Type& precision) { switch (precision) { case ::ngraph::element::Type_t::undefined: return Precision(Precision::UNSPECIFIED); @@ -115,7 +115,7 @@ inline Precision convertPrecision(const ::ngraph::element::Type& precision) { * @param network A network to clone * @return A cloned object */ -INFERENCE_ENGINE_API_CPP(CNNNetwork) cloneNetwork(const CNNNetwork& network); +INFERENCE_ENGINE_1_0_DEPRECATED INFERENCE_ENGINE_API_CPP(CNNNetwork) cloneNetwork(const CNNNetwork& network); } // namespace details } // namespace InferenceEngine diff --git a/src/inference/src/cpp/ie_variable_state.cpp b/src/inference/src/cpp/ie_variable_state.cpp index b58d8de76e8e75..1cb0708f4b904f 100644 --- a/src/inference/src/cpp/ie_variable_state.cpp +++ b/src/inference/src/cpp/ie_variable_state.cpp @@ -8,6 +8,7 @@ #include "openvino/runtime/ivariable_state.hpp" #include "openvino/runtime/variable_state.hpp" +IE_SUPPRESS_DEPRECATED_START #define VARIABLE_CALL_STATEMENT(...) \ if (_impl == nullptr) \ IE_THROW(NotAllocated) << "VariableState was not initialized."; \ @@ -40,8 +41,6 @@ VariableState::VariableState(const IVariableStateInternal::Ptr& impl, const std: IE_THROW() << "VariableState was not initialized."; } -IE_SUPPRESS_DEPRECATED_START - void VariableState::Reset() { VARIABLE_CALL_STATEMENT(_impl->Reset()); } diff --git a/src/inference/tests/functional/response_buffer_test.cpp b/src/inference/tests/functional/response_buffer_test.cpp index 4d75bdfffdc82f..3d5d3ecdfd2dcc 100644 --- a/src/inference/tests/functional/response_buffer_test.cpp +++ b/src/inference/tests/functional/response_buffer_test.cpp @@ -9,6 +9,7 @@ using namespace std; using namespace InferenceEngine; +IE_SUPPRESS_DEPRECATED_START using ResponseBufferTests = ::testing::Test; TEST_F(ResponseBufferTests, canCreateResponseMessage) { diff --git a/src/inference/tests/unit/cpp_interfaces/exception_test.cpp b/src/inference/tests/unit/cpp_interfaces/exception_test.cpp index 5602dedce2807a..c830f35cb285f6 100644 --- a/src/inference/tests/unit/cpp_interfaces/exception_test.cpp +++ b/src/inference/tests/unit/cpp_interfaces/exception_test.cpp @@ -7,6 +7,7 @@ #include using namespace InferenceEngine; +IE_SUPPRESS_DEPRECATED_START using ExceptionTests = ::testing::Test;