diff --git a/src/inference/dev_api/openvino/runtime/icore.hpp b/src/inference/dev_api/openvino/runtime/icore.hpp index b8ea63086169ea..5e7e9401312cf2 100644 --- a/src/inference/dev_api/openvino/runtime/icore.hpp +++ b/src/inference/dev_api/openvino/runtime/icore.hpp @@ -21,7 +21,7 @@ namespace ov { /** * @interface ICore * @brief Minimal ICore interface to allow plugin to get information from Core Inference Engine class. - * @ingroup ie_dev_api_plugin_api + * @ingroup ov_dev_api_plugin_api */ class ICore { public: diff --git a/src/inference/dev_api/openvino/runtime/iplugin.hpp b/src/inference/dev_api/openvino/runtime/iplugin.hpp index e2a4b4110d3514..653d44c0fc2198 100644 --- a/src/inference/dev_api/openvino/runtime/iplugin.hpp +++ b/src/inference/dev_api/openvino/runtime/iplugin.hpp @@ -209,7 +209,7 @@ class OPENVINO_RUNTIME_API IPlugin : public std::enable_shared_from_this -#include #include +#include "openvino/runtime/common.hpp" #include "openvino/runtime/threading/istreams_executor.hpp" namespace ov { diff --git a/src/plugins/template/src/compiled_model.cpp b/src/plugins/template/src/compiled_model.cpp index d08a8352af47d4..c4cfc59dfa3bc4 100644 --- a/src/plugins/template/src/compiled_model.cpp +++ b/src/plugins/template/src/compiled_model.cpp @@ -9,6 +9,7 @@ #include "async_infer_request.hpp" #include "ie_ngraph_utils.hpp" #include "ie_plugin_config.hpp" +#include "openvino/runtime/properties.hpp" #include "plugin.hpp" #include "template/config.hpp" #include "template_itt.hpp" @@ -16,66 +17,6 @@ using namespace TemplatePlugin; -namespace { - -InferenceEngine::SizeVector get_dims(const ov::Output& port) { - InferenceEngine::SizeVector dims = {}; - const auto& p_shape = port.get_partial_shape(); - if (p_shape.is_static()) - dims = p_shape.get_shape(); - return dims; -} - -} // namespace - -namespace ov { -namespace legacy_convert { - -void fill_input_info(const ov::Output& input, InferenceEngine::InputInfo::Ptr& input_info) { - if (!input_info) { - // Create input info - auto param_name = input.get_node()->get_friendly_name(); - auto dims = get_dims(input); - InferenceEngine::TensorDesc desc(InferenceEngine::details::convertPrecision(input.get_element_type()), - dims, - InferenceEngine::TensorDesc::getLayoutByDims(dims)); - auto data = std::make_shared(param_name, desc); - input_info = std::make_shared(); - input_info->setInputData(data); - } - auto& rt_info = input.get_rt_info(); - auto it = rt_info.find("ie_legacy_preproc"); - if (it != rt_info.end()) { - input_info->getPreProcess() = it->second.as(); - } - it = rt_info.find("ie_legacy_td"); - if (it != rt_info.end()) { - auto td = it->second.as(); - input_info->getInputData()->reshape(td.getDims(), td.getLayout()); - input_info->setPrecision(td.getPrecision()); - } -} -void fill_output_info(const ov::Output& output, InferenceEngine::DataPtr& output_info) { - if (!output_info) { - // Create input info - const auto& res_name = ov::op::util::create_ie_output_name(output); - auto dims = get_dims(output); - InferenceEngine::TensorDesc desc(InferenceEngine::details::convertPrecision(output.get_element_type()), - dims, - InferenceEngine::TensorDesc::getLayoutByDims(dims)); - output_info = std::make_shared(res_name, desc); - } - auto& rt_info = output.get_rt_info(); - auto it = rt_info.find("ie_legacy_td"); - if (it != rt_info.end()) { - auto td = it->second.as(); - output_info->reshape(td.getDims(), td.getLayout()); - output_info->setPrecision(td.getPrecision()); - } -} -} // namespace legacy_convert -} // namespace ov - // ! [executable_network:ctor_cnnnetwork] TemplatePlugin::CompiledModel::CompiledModel(const std::shared_ptr& model, const std::shared_ptr& plugin, @@ -89,8 +30,9 @@ TemplatePlugin::CompiledModel::CompiledModel(const std::shared_ptr& m // In this case, _waitExecutor should also be created per device. try { compile_model(m_model); - } catch (const InferenceEngine::Exception&) { - throw; + } catch (const InferenceEngine::Exception& e) { + // Some transformations can throw legacy exception + throw ov::Exception(e.what()); } catch (const std::exception& e) { OPENVINO_ASSERT(false, "Standard exception from compilation library: ", e.what()); } catch (...) { @@ -148,7 +90,7 @@ std::shared_ptr TemplatePlugin::CompiledModel::get_template_plugin } // ! [executable_network:get_config] -InferenceEngine::Parameter TemplatePlugin::CompiledModel::get_property(const std::string& name) const { +ov::Any TemplatePlugin::CompiledModel::get_property(const std::string& name) const { const auto& add_ro_properties = [](const std::string& name, std::vector& properties) { properties.emplace_back(ov::PropertyName{name, ov::PropertyMutability::RO}); }; @@ -179,7 +121,9 @@ InferenceEngine::Parameter TemplatePlugin::CompiledModel::get_property(const std return to_string_vector(metrics); } else if (EXEC_NETWORK_METRIC_KEY(SUPPORTED_CONFIG_KEYS) == name) { auto configs = default_rw_properties(); - auto streamExecutorConfigKeys = InferenceEngine::IStreamsExecutor::Config{}.SupportedKeys(); + auto streamExecutorConfigKeys = ov::threading::IStreamsExecutor::Config{} + .get_property(ov::supported_properties.name()) + .as>(); for (auto&& configKey : streamExecutorConfigKeys) { configs.emplace_back(configKey); } diff --git a/src/plugins/template/src/plugin.cpp b/src/plugins/template/src/plugin.cpp index 5c3aa091d4ae5b..e5343f53fc6dee 100644 --- a/src/plugins/template/src/plugin.cpp +++ b/src/plugins/template/src/plugin.cpp @@ -131,7 +131,7 @@ std::shared_ptr TemplatePlugin::Plugin::import_model(std::is auto ov_model = get_core()->read_model(xmlString, weights); auto streamsExecutorConfig = - InferenceEngine::IStreamsExecutor::Config::MakeDefaultMultiThreaded(fullConfig._streamsExecutorConfig); + ov::threading::IStreamsExecutor::Config::make_default_multi_threaded(fullConfig._streamsExecutorConfig); streamsExecutorConfig._name = stream_executor_name; auto compiled_model = std::make_shared(ov_model, @@ -236,7 +236,9 @@ ov::Any TemplatePlugin::Plugin::get_property(const std::string& name, const ov:: return to_string_vector(metrics); } else if (METRIC_KEY(SUPPORTED_CONFIG_KEYS) == name) { auto configs = default_rw_properties(); - auto streamExecutorConfigKeys = InferenceEngine::IStreamsExecutor::Config{}.SupportedKeys(); + auto streamExecutorConfigKeys = ov::threading::IStreamsExecutor::Config{} + .get_property(ov::supported_properties.name()) + .as>(); for (auto&& configKey : streamExecutorConfigKeys) { if (configKey != InferenceEngine::PluginConfigParams::KEY_CPU_THROUGHPUT_STREAMS) { configs.emplace_back(configKey); diff --git a/src/plugins/template/src/template_config.cpp b/src/plugins/template/src/template_config.cpp index 16ae7092842798..840d1ae85417dc 100644 --- a/src/plugins/template/src/template_config.cpp +++ b/src/plugins/template/src/template_config.cpp @@ -15,7 +15,7 @@ Configuration::Configuration() {} Configuration::Configuration(const ConfigMap& config, const Configuration& defaultCfg, bool throwOnUnsupported) { *this = defaultCfg; - // If plugin needs to use InferenceEngine::StreamsExecutor it should be able to process its configuration + // If plugin needs to use ov::threading::StreamsExecutor it should be able to process its configuration auto streamExecutorConfigKeys = _streamsExecutorConfig.get_property(ov::supported_properties.name()).as>(); for (auto&& c : config) {