Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Deprecate main IE developer API classes #17983

Merged
merged 3 commits into from
Jun 10, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
78 changes: 0 additions & 78 deletions docs/snippets/example_async_infer_request.cpp

This file was deleted.

Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ namespace InferenceEngine {
* The class is recommended to be used as a base class for Executable Network implementation during plugin development.
* @ingroup ie_dev_api_exec_network_api
*/
class ExecutableNetworkThreadSafeDefault : public IExecutableNetworkInternal {
class INFERENCE_ENGINE_1_0_DEPRECATED ExecutableNetworkThreadSafeDefault : public IExecutableNetworkInternal {
public:
/**
* @brief A shared pointer to a ExecutableNetworkThreadSafeDefault object
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -37,9 +37,8 @@ IE_SUPPRESS_DEPRECATED_START
* Here is an example of asynchronous inference request implementation for some accelerator device.
* It uses 5 different executors to run different stages of a synchronous inference request.
*
* @snippet example_async_infer_request.cpp async_infer_request:define_pipeline
*/
class AsyncInferRequestThreadSafeDefault : public IInferRequestInternal {
class INFERENCE_ENGINE_1_0_DEPRECATED AsyncInferRequestThreadSafeDefault : public IInferRequestInternal {
enum InferState { Idle, Busy, Cancelled, Stop };
using Futures = std::vector<std::shared_future<void>>;
using Promise = std::shared_ptr<std::promise<void>>;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,7 @@ class ICompiledModelWrapper;
* @brief An internal API of executable network to be implemented by plugin,
* @ingroup ie_dev_api_exec_network_api
*/
class INFERENCE_ENGINE_API_CLASS(IExecutableNetworkInternal)
class INFERENCE_ENGINE_1_0_DEPRECATED INFERENCE_ENGINE_API_CLASS(IExecutableNetworkInternal)
: public std::enable_shared_from_this<IExecutableNetworkInternal> {
public:
/**
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,8 @@ class IVariableStateInternal;
* which is used in InferRequestBase forwarding mechanism
* @ingroup ie_dev_api_infer_request_api
*/
class INFERENCE_ENGINE_API_CLASS(IInferRequestInternal) : public std::enable_shared_from_this<IInferRequestInternal> {
class INFERENCE_ENGINE_1_0_DEPRECATED INFERENCE_ENGINE_API_CLASS(IInferRequestInternal)
: public std::enable_shared_from_this<IInferRequestInternal> {
public:
/**
* @brief A shared pointer to a IInferRequestInternal interface
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -38,65 +38,65 @@ namespace PluginConfigInternalParams {
* @brief Defines a low precision mode key
* @ingroup ie_dev_api_plugin_api
*/
DECLARE_CONFIG_KEY(LP_TRANSFORMS_MODE);
INFERENCE_ENGINE_1_0_DEPRECATED DECLARE_CONFIG_KEY(LP_TRANSFORMS_MODE);

/**
* @brief Limit \#threads that are used by CPU Executor Streams to execute `parallel_for` calls
* @ingroup ie_dev_api_plugin_api
*/
DECLARE_CONFIG_KEY(CPU_THREADS_PER_STREAM);
INFERENCE_ENGINE_1_0_DEPRECATED DECLARE_CONFIG_KEY(CPU_THREADS_PER_STREAM);

/**
* @brief Number of streams in Performance-core(big core)
* @ingroup ie_dev_api_plugin_api
* @brief Shortcut for defining internal configuration values
*/
DECLARE_CONFIG_KEY(BIG_CORE_STREAMS);
INFERENCE_ENGINE_1_0_DEPRECATED DECLARE_CONFIG_KEY(BIG_CORE_STREAMS);

/**
* @brief Number of streams in Efficient-core(small core) on hybrid cores machine
* @ingroup ie_dev_api_plugin_api
* @brief Shortcut for defining internal configuration values
*/
DECLARE_CONFIG_KEY(SMALL_CORE_STREAMS);
INFERENCE_ENGINE_1_0_DEPRECATED DECLARE_CONFIG_KEY(SMALL_CORE_STREAMS);

/**
* @brief Number of threads per stream in big cores
* @ingroup ie_dev_api_plugin_api
* @brief Shortcut for defining internal configuration values
*/
DECLARE_CONFIG_KEY(THREADS_PER_STREAM_BIG);
INFERENCE_ENGINE_1_0_DEPRECATED DECLARE_CONFIG_KEY(THREADS_PER_STREAM_BIG);

/**
* @brief Number of threads per stream in small cores on hybrid cores machine
* @ingroup ie_dev_api_plugin_api
* @brief Shortcut for defining internal configuration values
*/
DECLARE_CONFIG_KEY(THREADS_PER_STREAM_SMALL);
INFERENCE_ENGINE_1_0_DEPRECATED DECLARE_CONFIG_KEY(THREADS_PER_STREAM_SMALL);

/**
* @brief Small core start offset when binding cpu cores
* @ingroup ie_dev_api_plugin_api
* @brief Shortcut for defining internal configuration values
*/
DECLARE_CONFIG_KEY(SMALL_CORE_OFFSET);
INFERENCE_ENGINE_1_0_DEPRECATED DECLARE_CONFIG_KEY(SMALL_CORE_OFFSET);

/**
* @brief Defines how many records can be stored in the CPU runtime parameters cache per CPU runtime parameter type per
* stream
* @ingroup ie_dev_api_plugin_api
*/
DECLARE_CONFIG_KEY(CPU_RUNTIME_CACHE_CAPACITY);
INFERENCE_ENGINE_1_0_DEPRECATED DECLARE_CONFIG_KEY(CPU_RUNTIME_CACHE_CAPACITY);

/**
* @brief Internal device id for particular device (like GPU.0, GPU.1 etc)
*/
DECLARE_CONFIG_KEY(CONFIG_DEVICE_ID);
INFERENCE_ENGINE_1_0_DEPRECATED DECLARE_CONFIG_KEY(CONFIG_DEVICE_ID);

/**
* @brief enable hyper thread
*/
DECLARE_CONFIG_KEY(ENABLE_HYPER_THREAD);
INFERENCE_ENGINE_1_0_DEPRECATED DECLARE_CONFIG_KEY(ENABLE_HYPER_THREAD);

/**
* @brief Defines Snippets tokenization mode
Expand All @@ -105,10 +105,10 @@ DECLARE_CONFIG_KEY(ENABLE_HYPER_THREAD);
* @param DISABLE - turn off the Snippets
* @ingroup ie_dev_api_plugin_api
*/
DECLARE_CONFIG_KEY(SNIPPETS_MODE);
DECLARE_CONFIG_VALUE(ENABLE);
DECLARE_CONFIG_VALUE(IGNORE_CALLBACK);
DECLARE_CONFIG_VALUE(DISABLE);
INFERENCE_ENGINE_1_0_DEPRECATED DECLARE_CONFIG_KEY(SNIPPETS_MODE);
INFERENCE_ENGINE_1_0_DEPRECATED DECLARE_CONFIG_VALUE(ENABLE);
INFERENCE_ENGINE_1_0_DEPRECATED DECLARE_CONFIG_VALUE(IGNORE_CALLBACK);
INFERENCE_ENGINE_1_0_DEPRECATED DECLARE_CONFIG_VALUE(DISABLE);

} // namespace PluginConfigInternalParams

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -124,7 +124,8 @@ GetSupportedNodes(const std::shared_ptr<const ov::Model>& model,
* @brief An API of plugin to be implemented by a plugin
* @ingroup ie_dev_api_plugin_api
*/
class INFERENCE_ENGINE_API_CLASS(IInferencePlugin) : public std::enable_shared_from_this<IInferencePlugin> {
class INFERENCE_ENGINE_1_0_DEPRECATED INFERENCE_ENGINE_API_CLASS(IInferencePlugin)
: public std::enable_shared_from_this<IInferencePlugin> {
class VersionStore : public Version {
void copyFrom(const Version& v);

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -12,12 +12,15 @@

namespace InferenceEngine {

IE_SUPPRESS_DEPRECATED_START

/**
* @interface IVariableStateInternal
* @brief Minimal interface for variable state implementation
* @ingroup ie_dev_api_variable_state_api
*/
class INFERENCE_ENGINE_API_CLASS(IVariableStateInternal) : public std::enable_shared_from_this<IVariableStateInternal> {
class INFERENCE_ENGINE_1_0_DEPRECATED INFERENCE_ENGINE_API_CLASS(IVariableStateInternal)
: public std::enable_shared_from_this<IVariableStateInternal> {
public:
/**
* @brief A shared pointer to a IVariableStateInternal interface
Expand Down Expand Up @@ -74,5 +77,6 @@ using SoIVariableStateInternal = ov::SoPtr<IVariableStateInternal>;
* @brief For compatibility reasons.
*/
using MemoryStateInternal = IVariableStateInternal;
IE_SUPPRESS_DEPRECATED_END

} // namespace InferenceEngine
2 changes: 1 addition & 1 deletion src/inference/dev_api/description_buffer.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ namespace InferenceEngine {
* @brief A description buffer wrapping StatusCode and ResponseDesc
* @ingroup ie_dev_api_error_debug
*/
struct DescriptionBuffer : public std::basic_streambuf<char, std::char_traits<char>> {
struct INFERENCE_ENGINE_1_0_DEPRECATED DescriptionBuffer : public std::basic_streambuf<char, std::char_traits<char>> {
/**
* @brief Creeates a description buffer with parameters
*
Expand Down
8 changes: 4 additions & 4 deletions src/inference/dev_api/ie_ngraph_utils.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@
namespace InferenceEngine {
namespace details {

inline ::ngraph::element::Type convertPrecision(const Precision& precision) {
INFERENCE_ENGINE_1_0_DEPRECATED inline ::ngraph::element::Type convertPrecision(const Precision& precision) {
Precision::ePrecision pType = precision;
switch (pType) {
case Precision::UNSPECIFIED:
Expand Down Expand Up @@ -61,11 +61,11 @@ inline ::ngraph::element::Type convertPrecision(const Precision& precision) {
}
}

inline ::ngraph::element::Type convertPrecision(const std::string& precision) {
INFERENCE_ENGINE_1_0_DEPRECATED inline ::ngraph::element::Type convertPrecision(const std::string& precision) {
return ::ov::element::Type(precision);
}

inline Precision convertPrecision(const ::ngraph::element::Type& precision) {
INFERENCE_ENGINE_1_0_DEPRECATED inline Precision convertPrecision(const ::ngraph::element::Type& precision) {
switch (precision) {
case ::ngraph::element::Type_t::undefined:
return Precision(Precision::UNSPECIFIED);
Expand Down Expand Up @@ -115,7 +115,7 @@ inline Precision convertPrecision(const ::ngraph::element::Type& precision) {
* @param network A network to clone
* @return A cloned object
*/
INFERENCE_ENGINE_API_CPP(CNNNetwork) cloneNetwork(const CNNNetwork& network);
INFERENCE_ENGINE_1_0_DEPRECATED INFERENCE_ENGINE_API_CPP(CNNNetwork) cloneNetwork(const CNNNetwork& network);

} // namespace details
} // namespace InferenceEngine
3 changes: 1 addition & 2 deletions src/inference/src/cpp/ie_variable_state.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@
#include "openvino/runtime/ivariable_state.hpp"
#include "openvino/runtime/variable_state.hpp"

IE_SUPPRESS_DEPRECATED_START
#define VARIABLE_CALL_STATEMENT(...) \
if (_impl == nullptr) \
IE_THROW(NotAllocated) << "VariableState was not initialized."; \
Expand Down Expand Up @@ -40,8 +41,6 @@ VariableState::VariableState(const IVariableStateInternal::Ptr& impl, const std:
IE_THROW() << "VariableState was not initialized.";
}

IE_SUPPRESS_DEPRECATED_START

void VariableState::Reset() {
VARIABLE_CALL_STATEMENT(_impl->Reset());
}
Expand Down
1 change: 1 addition & 0 deletions src/inference/tests/functional/response_buffer_test.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@
using namespace std;
using namespace InferenceEngine;

IE_SUPPRESS_DEPRECATED_START
using ResponseBufferTests = ::testing::Test;

TEST_F(ResponseBufferTests, canCreateResponseMessage) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@
#include <cpp/exception2status.hpp>

using namespace InferenceEngine;
IE_SUPPRESS_DEPRECATED_START

using ExceptionTests = ::testing::Test;

Expand Down