From cae554a4194cd529e3cee3c643df52f4eaa3931f Mon Sep 17 00:00:00 2001 From: "River.Li" Date: Fri, 8 Dec 2023 10:18:25 +0800 Subject: [PATCH] Use .as() to convert std::string to std::AnyMap --- src/inference/src/dev/core_impl.cpp | 2 +- .../tests/functional/caching_test.cpp | 2 +- src/plugins/intel_cpu/src/plugin.cpp | 24 ++++++++---- src/plugins/intel_cpu/src/plugin.h | 2 +- .../include/intel_gpu/plugin/plugin.hpp | 1 - src/plugins/intel_gpu/src/plugin/plugin.cpp | 37 +++---------------- 6 files changed, 26 insertions(+), 42 deletions(-) diff --git a/src/inference/src/dev/core_impl.cpp b/src/inference/src/dev/core_impl.cpp index 40052cf6712b29..4df07df5d0c5c6 100644 --- a/src/inference/src/dev/core_impl.cpp +++ b/src/inference/src/dev/core_impl.cpp @@ -1479,7 +1479,7 @@ ov::SoPtr ov::CoreImpl::load_model_from_cache( if (util::contains(plugin.get_property(ov::internal::supported_properties), ov::internal::compiled_model_format_supported.name())) { ov::AnyMap compiled_model_format = { - {ov::internal::compiled_model_format_supported.name(), std::string(header.getRuntimeInfo())}}; + {ov::internal::compiled_model_format.name(), std::string(header.getRuntimeInfo())}}; auto res = plugin.get_property(ov::internal::compiled_model_format_supported.name(), compiled_model_format); if (!res.as()) { diff --git a/src/inference/tests/functional/caching_test.cpp b/src/inference/tests/functional/caching_test.cpp index c50eb0b3976c68..a5402872f43636 100644 --- a/src/inference/tests/functional/caching_test.cpp +++ b/src/inference/tests/functional/caching_test.cpp @@ -1721,7 +1721,7 @@ TEST_P(CachingTest, TestCacheFileWithCompiledModelFormat) { EXPECT_CALL(*mockPlugin, get_property(ov::internal::compiled_model_format_supported.name(), _)) .Times(AtLeast(1)) .WillRepeatedly(Invoke([&](const std::string&, const ov::AnyMap& options) { - auto it = options.find(ov::internal::compiled_model_format_supported.name()); + auto it = options.find(ov::internal::compiled_model_format.name()); ov::Any ret = true; if (it == options.end() || it->second.as() != compiled_model_format) ret = false; diff --git a/src/plugins/intel_cpu/src/plugin.cpp b/src/plugins/intel_cpu/src/plugin.cpp index ceda2769f05deb..64e00b7e8463ad 100644 --- a/src/plugins/intel_cpu/src/plugin.cpp +++ b/src/plugins/intel_cpu/src/plugin.cpp @@ -174,8 +174,8 @@ Engine::Engine() : #if defined(OV_CPU_WITH_ACL) scheduler_guard = SchedulerGuard::instance(); #endif - auto& ov_version = ov::get_openvino_version(); - compiled_model_format_info = std::string(ov_version.buildNumber); + auto& ov_version = ov::get_openvino_version(); + m_compiled_model_format["OV_VERSION"] = std::string(ov_version.buildNumber); } Engine::~Engine() { @@ -691,12 +691,22 @@ ov::Any Engine::get_property(const std::string& name, const ov::AnyMap& options) } else if (name == ov::hint::execution_mode) { return engConfig.executionMode; } else if (name == ov::internal::compiled_model_format.name()) { - return decltype(ov::internal::compiled_model_format)::value_type(compiled_model_format_info); + auto model_format = ov::Any(m_compiled_model_format); + return decltype(ov::internal::compiled_model_format)::value_type(model_format.as()); } else if (name == ov::internal::compiled_model_format_supported.name()) { - ov::Any res = false; - auto it = options.find(ov::internal::compiled_model_format_supported.name()); - if (it != options.end() && it->second.as() == compiled_model_format_info) { - res = true; + ov::Any res = true; + auto it = options.find(ov::internal::compiled_model_format.name()); + if (it == options.end()) { + res = false; + } else { + ov::AnyMap input_map = it->second.as(); + for (auto& item : m_compiled_model_format) { + auto it = input_map.find(item.first); + if (it == input_map.end() || it->second.as() != item.second.as()) { + res = false; + break; + } + } } return res; } diff --git a/src/plugins/intel_cpu/src/plugin.h b/src/plugins/intel_cpu/src/plugin.h index fee633dcef95b6..a4ea5b3896698e 100644 --- a/src/plugins/intel_cpu/src/plugin.h +++ b/src/plugins/intel_cpu/src/plugin.h @@ -67,7 +67,7 @@ class Engine : public ov::IPlugin { So track if streams is set explicitly (not auto-configured) */ bool streamsExplicitlySetForEngine = false; const std::string deviceFullName; - std::string compiled_model_format_info; + ov::AnyMap m_compiled_model_format; std::shared_ptr specialSetup; diff --git a/src/plugins/intel_gpu/include/intel_gpu/plugin/plugin.hpp b/src/plugins/intel_gpu/include/intel_gpu/plugin/plugin.hpp index 881a8b01d0fe79..9611f3439e44c6 100644 --- a/src/plugins/intel_gpu/include/intel_gpu/plugin/plugin.hpp +++ b/src/plugins/intel_gpu/include/intel_gpu/plugin/plugin.hpp @@ -39,7 +39,6 @@ class Plugin : public ov::IPlugin { std::vector get_device_capabilities(const cldnn::device_info& info) const; uint32_t get_optimal_batch_size(const ov::AnyMap& options) const; uint32_t get_max_batch_size(const ov::AnyMap& options) const; - ov::AnyMap parse_compiled_model_format(const std::string& input) const; ov::AnyMap preprocess_config(const ov::AnyMap& orig_config) const; bool is_metric(const std::string& name) const; diff --git a/src/plugins/intel_gpu/src/plugin/plugin.cpp b/src/plugins/intel_gpu/src/plugin/plugin.cpp index c43932dbd54bf2..3e56f2c2202a82 100644 --- a/src/plugins/intel_gpu/src/plugin/plugin.cpp +++ b/src/plugins/intel_gpu/src/plugin/plugin.cpp @@ -104,27 +104,6 @@ std::string Plugin::get_device_id(const ov::AnyMap& config) const { return id; } -/** Parse compiled model format to be ov::AnyMap - * input:"aaa:1234;ccc:xyzw;" - * output: - * out["aaa"] = "1234" - * out["ccc"] = "xyzw" - */ -ov::AnyMap Plugin::parse_compiled_model_format(const std::string& input) const { - ov::AnyMap res = {}; - auto in = input; - while (!in.empty()) { - auto pos_1 = in.find_first_of(':'); - auto pos_2 = in.find_first_of(';'); - if (pos_1 == std::string::npos || pos_2 == std::string::npos) { - break; - } - res[in.substr(0, pos_1)] = in.substr(pos_1 + 1, pos_2 - pos_1 - 1); - in = in.substr(pos_2 + 1); - } - return res; -} - void Plugin::transform_model(std::shared_ptr& model, const ExecutionConfig& config) const { OV_ITT_SCOPED_TASK(itt::domains::intel_gpu_plugin, "Plugin::transform_model"); auto deviceInfo = m_device_map.at(config.get_property(ov::device::id))->get_info(); @@ -360,22 +339,18 @@ ov::Any Plugin::get_property(const std::string& name, const ov::AnyMap& options) } else if (name == ov::internal::caching_properties) { return decltype(ov::internal::caching_properties)::value_type(get_caching_properties()); } else if (name == ov::internal::compiled_model_format.name()) { - std::string format_info; - for (auto& it : m_compiled_model_format) { - format_info += it.first + ":" + it.second.as() + ";"; - } - return decltype(ov::internal::compiled_model_format)::value_type(format_info); + auto model_format = ov::Any(m_compiled_model_format); + return decltype(ov::internal::compiled_model_format)::value_type(model_format.as()); } else if (name == ov::internal::compiled_model_format_supported.name()) { ov::Any res = true; - auto it = options.find(ov::internal::compiled_model_format_supported.name()); + auto it = options.find(ov::internal::compiled_model_format.name()); if (it == options.end()) { res = false; } else { - const auto data = it->second.as(); - auto input = parse_compiled_model_format(data); + ov::AnyMap input_map = it->second.as(); for (auto& item : m_compiled_model_format) { - auto it = input.find(item.first); - if (it == input.end() || it->second.as() != item.second.as()) { + auto it = input_map.find(item.first); + if (it == input_map.end() || it->second.as() != item.second.as()) { res = false; break; }