diff --git a/src/bindings/python/src/openvino/_ov_api.py b/src/bindings/python/src/openvino/_ov_api.py index 08f6873d1b8f0f..1631bc42051418 100644 --- a/src/bindings/python/src/openvino/_ov_api.py +++ b/src/bindings/python/src/openvino/_ov_api.py @@ -495,11 +495,22 @@ class Core(CoreBase): between several Core instances. The recommended way is to have a single Core instance per application. """ - def read_model(self, model: Union[str, bytes, object], weights: Union[object, str, bytes, Tensor] = None) -> Model: - if weights is not None: + def read_model( + self, + model: Union[str, bytes, object], + weights: Union[object, str, bytes, Tensor] = None, + config: Optional[dict] = None + ) -> Model: + config = {} if config is None else config + + if isinstance(weights, Tensor): return Model(super().read_model(model, weights)) + elif isinstance(model, bytes): + return Model(super().read_model(model, bytes() if weights is None else weights)) + elif weights is None: + return Model(super().read_model(model, config=config)) else: - return Model(super().read_model(model)) + return Model(super().read_model(model, weights, config)) def compile_model( self, diff --git a/src/bindings/python/src/pyopenvino/core/core.cpp b/src/bindings/python/src/pyopenvino/core/core.cpp index 68e3e5cc4841ed..6dae6508d630f3 100644 --- a/src/bindings/python/src/pyopenvino/core/core.cpp +++ b/src/bindings/python/src/pyopenvino/core/core.cpp @@ -393,10 +393,17 @@ void regclass_Core(py::module m) { cls.def( "read_model", - (std::shared_ptr(ov::Core::*)(const std::string&, const std::string&) const) & ov::Core::read_model, - py::call_guard(), + [](ov::Core& self, + const std::string& model_path, + const std::string& weight_path, + const std::map& config) { + const auto any_map = Common::utils::properties_to_any_map(config); + py::gil_scoped_release release; + return self.read_model(model_path, weight_path, any_map); + }, py::arg("model"), py::arg("weights") = "", + py::arg("config") = py::dict(), R"( Reads models from IR / ONNX / PDPD / TF and TFLite formats. @@ -412,6 +419,8 @@ void regclass_Core(py::module m) { For TF format (*.pb) weights parameter is not used. For TFLite format (*.tflite) weights parameter is not used. :type weights: str + :param config: Optional map of pairs: (property name, property value) relevant only for this read operation. + :type config: dict, optional :return: A model. :rtype: openvino.runtime.Model )"); @@ -438,7 +447,10 @@ void regclass_Core(py::module m) { cls.def( "read_model", - [](ov::Core& self, py::object model_path, py::object weights_path) { + [](ov::Core& self, + py::object model_path, + py::object weights_path, + const std::map& config) { if (py::isinstance(model_path, pybind11::module::import("io").attr("BytesIO"))) { std::stringstream _stream; model_path.attr("seek")(0); // Always rewind stream! @@ -466,8 +478,9 @@ void regclass_Core(py::module m) { if (!py::isinstance(weights_path)) { weights_path_cpp = py::str(weights_path); } + const auto any_map = Common::utils::properties_to_any_map(config); py::gil_scoped_release release; - return self.read_model(model_path_cpp, weights_path_cpp); + return self.read_model(model_path_cpp, weights_path_cpp, any_map); } std::stringstream str; @@ -477,6 +490,7 @@ void regclass_Core(py::module m) { }, py::arg("model"), py::arg("weights") = py::none(), + py::arg("config") = py::dict(), R"( Reads models from IR / ONNX / PDPD / TF and TFLite formats. @@ -492,6 +506,8 @@ void regclass_Core(py::module m) { For TF format (*.pb): weights parameter is not used. For TFLite format (*.tflite) weights parameter is not used. :type weights: pathlib.Path + :param config: Optional map of pairs: (property name, property value) relevant only for this read operation. + :type config: dict, optional :return: A model. :rtype: openvino.runtime.Model )"); @@ -653,7 +669,7 @@ void regclass_Core(py::module m) { :param properties: Optional dict of pairs: (property name, property value) :type properties: dict :return: Pairs a operation name -> a device name supporting this operation. - :rtype: dict + :rtype: dict )"); cls.def("add_extension", @@ -671,7 +687,7 @@ void regclass_Core(py::module m) { py::arg("extension"), R"( Registers an extension to a Core object. - + :param extension: Extension object. :type extension: openvino.runtime.Extension )"); diff --git a/src/bindings/python/tests/test_runtime/test_core.py b/src/bindings/python/tests/test_runtime/test_core.py index d147ce2d6bcab2..b9cf5735f01e4c 100644 --- a/src/bindings/python/tests/test_runtime/test_core.py +++ b/src/bindings/python/tests/test_runtime/test_core.py @@ -140,6 +140,24 @@ def test_read_model_from_ir(request, tmp_path): assert isinstance(model, Model) +# request - https://docs.pytest.org/en/7.1.x/reference/reference.html#request +def test_read_model_from_ir_with_user_config(request, tmp_path): + core = Core() + xml_path, bin_path = create_filenames_for_ir(request.node.name, tmp_path) + relu_model = get_relu_model() + serialize(relu_model, xml_path, bin_path) + + core_cache_dir = core.get_property("CACHE_DIR") + cache_path = tmp_path / Path("cache") + + model = core.read_model(xml_path, bin_path, config={"CACHE_DIR": f"{cache_path}"}) + + assert isinstance(model, Model) + assert core_cache_dir == core.get_property("CACHE_DIR") + assert os.path.exists(cache_path) + os.rmdir(cache_path) + + # request - https://docs.pytest.org/en/7.1.x/reference/reference.html#request def test_read_model_from_tensor(request, tmp_path): core = Core() @@ -178,6 +196,24 @@ def test_read_model_as_path(request, tmp_path): assert isinstance(model, Model) +# request - https://docs.pytest.org/en/7.1.x/reference/reference.html#request +def test_read_model_as_path_with_user_config(request, tmp_path): + core = Core() + xml_path, bin_path = create_filenames_for_ir(request.node.name, tmp_path) + relu_model = get_relu_model() + serialize(relu_model, xml_path, bin_path) + + core_cache_dir = core.get_property("CACHE_DIR") + cache_path = tmp_path / Path("cache_as_path") + + model = core.read_model(Path(xml_path), Path(bin_path), config={"CACHE_DIR": f"{cache_path}"}) + + assert isinstance(model, Model) + assert core_cache_dir == core.get_property("CACHE_DIR") + assert os.path.exists(cache_path) + os.rmdir(cache_path) + + # request - https://docs.pytest.org/en/7.1.x/reference/reference.html#request def test_read_model_from_buffer(request, tmp_path): core = Core() diff --git a/src/frontends/ir/tests/frontend_test_mmap.cpp b/src/frontends/ir/tests/frontend_test_mmap.cpp index 6b9ede14fa7d55..a58e3e29ff0a75 100644 --- a/src/frontends/ir/tests/frontend_test_mmap.cpp +++ b/src/frontends/ir/tests/frontend_test_mmap.cpp @@ -52,6 +52,42 @@ TEST_F(IRFrontendMMapTestsAdvanced, core_enable_mmap_property) { auto model = core.read_model(xmlFileName); auto rss_read = ov::test::utils::getVmRSSInKB(); + if (is_mmap != core.get_property("", ov::enable_mmap)) { + std::cout << "Test failed: core property is not set correctly" << std::endl; + exit(1); + } + + bool is_weights_read = (rss_read - rss_init) > REF_RSS; + if (is_mmap == is_weights_read) { + std::cerr << "Test failed: mmap is " << (is_mmap ? "enabled" : "disabled") << ", but weights are " + << (is_weights_read ? "read" : "not read") << " in RAM" << std::endl; + exit(1); + } + std::cerr << "Test passed" << std::endl; + exit(0); + }; + + for (const auto is_mmap : {true, false}) + // Run test in a separate process to not affect RAM values by previous tests + EXPECT_EXIT(test(is_mmap), ::testing::ExitedWithCode(0), "Test passed"); +} + +TEST_F(IRFrontendMMapTestsAdvanced, core_enable_mmap_property_user_config) { + // Test checks that with enabled `mmap` .bin file + // isn't read into RAM on `read_model` stage. + // Otherwise, with disabled `mmap` .bin file should + // be in RAM + + auto test = [&](const bool& is_mmap) { + auto rss_init = ov::test::utils::getVmRSSInKB(); + auto model = core.read_model(xmlFileName, {}, {{ov::enable_mmap(is_mmap)}}); + auto rss_read = ov::test::utils::getVmRSSInKB(); + + if (true != core.get_property("", ov::enable_mmap)) { + std::cout << "Test failed: core property changed by user configuration" << std::endl; + exit(1); + } + bool is_weights_read = (rss_read - rss_init) > REF_RSS; if (is_mmap == is_weights_read) { std::cerr << "Test failed: mmap is " << (is_mmap ? "enabled" : "disabled") << ", but weights are " diff --git a/src/inference/dev_api/openvino/runtime/icore.hpp b/src/inference/dev_api/openvino/runtime/icore.hpp index 659b9c5c0f5788..cc2c94e724ab41 100644 --- a/src/inference/dev_api/openvino/runtime/icore.hpp +++ b/src/inference/dev_api/openvino/runtime/icore.hpp @@ -60,9 +60,12 @@ class OPENVINO_RUNTIME_API ICore { * @param model_path path to IR file * @param bin_path path to bin file, if path is empty, will try to read bin file with the same name as xml and * if bin file with the same name was not found, will load IR without weights. + * @param properties Optional map of pairs: (property name, property value) relevant only for this read operation. * @return shared pointer to ov::Model */ - virtual std::shared_ptr read_model(const std::string& model_path, const std::string& bin_path) const = 0; + virtual std::shared_ptr read_model(const std::string& model_path, + const std::string& bin_path, + const AnyMap& properties) const = 0; virtual ov::AnyMap create_compile_config(const std::string& device_name, const ov::AnyMap& origConfig) const = 0; diff --git a/src/inference/include/openvino/runtime/core.hpp b/src/inference/include/openvino/runtime/core.hpp index c13432d664e736..2ca6dc83bcf726 100644 --- a/src/inference/include/openvino/runtime/core.hpp +++ b/src/inference/include/openvino/runtime/core.hpp @@ -79,11 +79,14 @@ class OPENVINO_RUNTIME_API Core { * For the following file formats the `bin_path` parameter is not used: * * ONNX format (*.onnx) * * PDPD (*.pdmodel) - * * TF (*.pb) + * * TF (*.pb, *.meta, SavedModel directory) * * TFLite (*.tflite) + * @param properties Optional map of pairs: (property name, property value) relevant only for this read operation. * @return A model. */ - std::shared_ptr read_model(const std::wstring& model_path, const std::wstring& bin_path = {}) const; + std::shared_ptr read_model(const std::wstring& model_path, + const std::wstring& bin_path = {}, + const ov::AnyMap& properties = {}) const; #endif /** @@ -96,17 +99,54 @@ class OPENVINO_RUNTIME_API Core { * For the following file formats the `bin_path` parameter is not used: * * ONNX format (*.onnx) * * PDPD (*.pdmodel) - * * TF (*.pb) + * * TF (*.pb, *.meta, SavedModel directory) * * TFLite (*.tflite) + * @param properties Optional map of pairs: (property name, property value) relevant only for this read operation. * @return A model. * @{ */ - std::shared_ptr read_model(const std::string& model_path, const std::string& bin_path = {}) const; + std::shared_ptr read_model(const std::string& model_path, + const std::string& bin_path = {}, + const ov::AnyMap& properties = {}) const; #ifdef OPENVINO_CPP_VER_17 template >* = nullptr> - std::shared_ptr read_model(const Path& model_path, const Path& bin_path = {}) const { - return read_model(model_path.string(), bin_path.string()); + auto read_model(const Path& model_path, const Path& bin_path = {}, const ov::AnyMap& properties = {}) const { + return read_model(model_path.string(), bin_path.string(), properties); + } +#endif + /// @} + + /** + * @brief Reads models from IR / ONNX / PDPD / TF / TFLite file formats. + * + * @param model_path Path to a model. + * @param bin_path Path to a data file. + * For IR format (*.bin): + * * if `bin_path` is empty, will try to read a bin file with the same name as xml and + * * if the bin file with the same name is not found, will load IR without weights. + * For the following file formats the `bin_path` parameter is not used: + * * ONNX format (*.onnx) + * * PDPD (*.pdmodel) + * * TF (*.pb, *.meta, SavedModel directory) + * * TFLite (*.tflite) + * @param properties Optional pack of pairs: (property name, property value) relevant only for this read operation. + * @return A model. + * @{ + */ + template + util::EnableIfAllStringAny read_model(const std::string& model_path, + const std::string& bin_path, + Properties&&... properties) const { + return read_model(model_path, bin_path, AnyMap{std::forward(properties)...}); + } + +#ifdef OPENVINO_CPP_VER_17 + template && (sizeof...(Properties) > 0)>* = nullptr> + auto read_model(const Path& model_path, const Path& bin_path, Properties&&... properties) const { + return read_model(model_path.string(), bin_path.string(), std::forward(properties)...); } #endif /// @} diff --git a/src/inference/src/cpp/core.cpp b/src/inference/src/cpp/core.cpp index 2d6c204757bcf6..5d85fe81364a17 100644 --- a/src/inference/src/cpp/core.cpp +++ b/src/inference/src/cpp/core.cpp @@ -80,14 +80,19 @@ Core::Core(const std::string& xml_config_file) { std::map Core::get_versions(const std::string& device_name) const { OV_CORE_CALL_STATEMENT({ return _impl->get_versions(device_name); })} #ifdef OPENVINO_ENABLE_UNICODE_PATH_SUPPORT -std::shared_ptr Core::read_model(const std::wstring& model_path, const std::wstring& bin_path) const { - OV_CORE_CALL_STATEMENT( - return _impl->read_model(ov::util::wstring_to_string(model_path), ov::util::wstring_to_string(bin_path));); +std::shared_ptr Core::read_model(const std::wstring& model_path, + const std::wstring& bin_path, + const ov::AnyMap& properties) const { + OV_CORE_CALL_STATEMENT(return _impl->read_model(ov::util::wstring_to_string(model_path), + ov::util::wstring_to_string(bin_path), + properties);); } #endif -std::shared_ptr Core::read_model(const std::string& model_path, const std::string& bin_path) const { - OV_CORE_CALL_STATEMENT(return _impl->read_model(model_path, bin_path);); +std::shared_ptr Core::read_model(const std::string& model_path, + const std::string& bin_path, + const AnyMap& properties) const { + OV_CORE_CALL_STATEMENT(return _impl->read_model(model_path, bin_path, properties);); } std::shared_ptr Core::read_model(const std::string& model, const ov::Tensor& weights) const { diff --git a/src/inference/src/dev/core_impl.cpp b/src/inference/src/dev/core_impl.cpp index f332c7c999a548..e0e2fb109dc642 100644 --- a/src/inference/src/dev/core_impl.cpp +++ b/src/inference/src/dev/core_impl.cpp @@ -223,12 +223,6 @@ static const auto core_properties_names = static const auto auto_batch_properties_names = ov::util::make_array(ov::auto_batch_timeout.name(), ov::hint::allow_auto_batching.name()); - -void remove_core_properties(ov::AnyMap& properties) { - for (const auto& name : core_properties_names) { - properties.erase(name); - } -} } // namespace bool ov::is_config_applicable(const std::string& user_device_name, const std::string& subprop_device_name) { @@ -352,10 +346,8 @@ ov::Parsed ov::parseDeviceNameIntoConfig(const std::string& deviceName, // remove core properties for HW devices if (!is_virtual_device(parsed._deviceName)) { - for (const auto& name : {ov::enable_mmap.name(), ov::force_tbb_terminate.name()}) { - // note: ov::cache_dir kept as plugin may require it - parsed._config.erase(name); - } + // note: ov::cache_dir kept as plugin may require it + CoreConfig::remove_core_skip_cache_dir(parsed._config); } return parsed; } @@ -842,7 +834,7 @@ ov::SoPtr ov::CoreImpl::compile_model(const std::string& mod const std::string& device_name, const ov::AnyMap& config) const { OV_ITT_SCOPE(FIRST_INFERENCE, ov::itt::domains::LoadTime, "Core::compile_model::Path"); - auto parsed = parseDeviceNameIntoConfig(device_name, coreConfig, config); + auto parsed = parse_device_config(device_name, coreConfig, config, false); // in case of compile_model(file_name), we need to clear-up core-level properties auto plugin = get_plugin(parsed._deviceName); ov::SoPtr compiled_model; @@ -851,13 +843,13 @@ ov::SoPtr ov::CoreImpl::compile_model(const std::string& mod if (cacheManager && device_supports_model_caching(plugin) && !is_proxy_device(plugin)) { // Skip caching for proxy plugin. HW plugin will load network from the cache + CoreConfig::remove_core_skip_cache_dir(parsed._config); CacheContent cacheContent{cacheManager, parsed._core_config.get_enable_mmap(), model_path}; cacheContent.blobId = ov::ModelCache::compute_hash(model_path, create_compile_config(plugin, parsed._config)); std::unique_ptr lock = cacheGuard.get_hash_lock(cacheContent.blobId); compiled_model = load_model_from_cache(cacheContent, plugin, parsed._config, ov::SoPtr{}, [&]() { - auto model = - ov::util::read_model(model_path, std::string{}, extensions, parsed._core_config.get_enable_mmap()); + const auto model = util::read_model(model_path, "", extensions, parsed._core_config.get_enable_mmap()); return compile_model_and_cache(plugin, model, parsed._config, {}, cacheContent); }); } else { @@ -1593,7 +1585,19 @@ void ov::CoreConfig::set(const ov::AnyMap& config) { void ov::CoreConfig::set_and_update(ov::AnyMap& config) { set(config); - remove_core_properties(config); + remove_core(config); +} + +void ov::CoreConfig::remove_core(ov::AnyMap& config) { + for (const auto& name : core_properties_names) { + config.erase(name); + } +} + +void ov::CoreConfig::remove_core_skip_cache_dir(ov::AnyMap& config) { + for (const auto& name : {ov::enable_mmap.name(), ov::force_tbb_terminate.name()}) { + config.erase(name); + } } void ov::CoreConfig::set_cache_dir_for_device(const std::string& dir, const std::string& name) { @@ -1664,9 +1668,13 @@ void ov::CoreImpl::add_mutex(const std::string& dev_name) { dev_mutexes[dev_name]; } -std::shared_ptr ov::CoreImpl::read_model(const std::string& modelPath, const std::string& binPath) const { +std::shared_ptr ov::CoreImpl::read_model(const std::string& modelPath, + const std::string& binPath, + const AnyMap& properties) const { OV_ITT_SCOPE(FIRST_INFERENCE, ov::itt::domains::ReadTime, "CoreImpl::read_model from file"); - return ov::util::read_model(modelPath, binPath, extensions, coreConfig.get_enable_mmap()); + auto local_core_config = coreConfig; + local_core_config.set(properties); + return ov::util::read_model(modelPath, binPath, extensions, local_core_config.get_enable_mmap()); } std::shared_ptr ov::CoreImpl::read_model(const std::string& model, diff --git a/src/inference/src/dev/core_impl.hpp b/src/inference/src/dev/core_impl.hpp index 7bbab14e4d8c14..85417175c22556 100644 --- a/src/inference/src/dev/core_impl.hpp +++ b/src/inference/src/dev/core_impl.hpp @@ -55,6 +55,10 @@ class CoreConfig final { // Creating thread-safe copy of global config including shared_ptr to ICacheManager CacheConfig get_cache_config_for_device(const ov::Plugin& plugin) const; + // remove core properties + static void remove_core(ov::AnyMap& config); + static void remove_core_skip_cache_dir(ov::AnyMap& config); + private: mutable std::mutex _cacheConfigMutex; CacheConfig _cacheConfig; @@ -303,7 +307,9 @@ class CoreImpl : public ov::ICore, public std::enable_shared_from_this read_model(const std::shared_ptr& model, const std::shared_ptr& weights) const override; - std::shared_ptr read_model(const std::string& model_path, const std::string& bin_path) const override; + std::shared_ptr read_model(const std::string& model_path, + const std::string& bin_path, + const AnyMap& properties) const override; ov::SoPtr compile_model(const std::shared_ptr& model, const std::string& device_name, diff --git a/src/inference/src/dev/iplugin.cpp b/src/inference/src/dev/iplugin.cpp index 1049e39bee6f49..f8c49825ba435a 100644 --- a/src/inference/src/dev/iplugin.cpp +++ b/src/inference/src/dev/iplugin.cpp @@ -4,6 +4,7 @@ #include "openvino/runtime/iplugin.hpp" +#include "core_impl.hpp" #include "openvino/op/convert.hpp" #include "openvino/op/util/op_types.hpp" #include "openvino/op/util/shape_of_base.hpp" @@ -75,8 +76,10 @@ std::shared_ptr ov::IPlugin::compile_model(const std::string const ov::AnyMap& properties) const { auto core = get_core(); OPENVINO_ASSERT(core); - auto model = core->read_model(model_path, std::string()); - return compile_model(model, properties); + const auto model = core->read_model(model_path, {}, properties); + auto local_properties = properties; + CoreConfig::remove_core_skip_cache_dir(local_properties); + return compile_model(model, local_properties); } std::unordered_set ov::get_supported_nodes( diff --git a/src/inference/tests/functional/caching_test.cpp b/src/inference/tests/functional/caching_test.cpp index 6b1c7f938ae731..e3572dc98915b0 100644 --- a/src/inference/tests/functional/caching_test.cpp +++ b/src/inference/tests/functional/caching_test.cpp @@ -276,14 +276,14 @@ class CachingTest : public ::testing::TestWithParam model_buffer; + if (config.count(ov::internal::cached_model_buffer.name())) + model_buffer = config.at(ov::internal::cached_model_buffer.name()).as>(); + EXPECT_FALSE(model_buffer); + + std::string name; + istr >> name; + char space; + istr.read(&space, 1); + std::lock_guard lock(mock_creation_mutex); + return create_mock_compiled_model(m_models[name], mockPlugin); + })); + ON_CALL(*mockPlugin, get_property(ov::internal::supported_properties.name(), _)) + .WillByDefault(Invoke([&](const std::string&, const ov::AnyMap&) { + return std::vector{ov::internal::caching_properties.name(), + ov::internal::caching_with_mmap.name()}; + })); + EXPECT_CALL(*mockPlugin, get_property(_, _)).Times(AnyNumber()); + EXPECT_CALL(*mockPlugin, query_model(_, _)).Times(AnyNumber()); + EXPECT_CALL(*mockPlugin, get_property(ov::device::architecture.name(), _)).Times(AnyNumber()); + EXPECT_CALL(*mockPlugin, get_property(ov::internal::caching_properties.name(), _)).Times(AnyNumber()); + if (m_remoteContext) { + return; // skip the remote Context test for Multi plugin + } + int index = 0; + m_post_mock_net_callbacks.emplace_back([&](MockICompiledModelImpl& net) { + EXPECT_CALL(net, export_model(_)).Times(1); + }); + MkDirGuard guard(m_cacheDir); + EXPECT_CALL(*mockPlugin, compile_model(_, _, _)).Times(0); + EXPECT_CALL(*mockPlugin, compile_model(A&>(), _)).Times(1); + EXPECT_CALL(*mockPlugin, import_model(_, _, _)).Times(0); + EXPECT_CALL(*mockPlugin, import_model(_, _)).Times(1); + testLoad([&](ov::Core& core) { + const auto config = ov::AnyMap{{ov::cache_dir(m_cacheDir)}, {ov::enable_mmap(false)}}; + m_testFunctionWithCfg(core, config); + m_testFunctionWithCfg(core, config); + }); + std::cout << "Caching Load multiple threads test completed. Tried " << index << " times" << std::endl; +} + +TEST_P(CachingTest, Load_mmap_is_not_supported_by_plugin_local_cfg) { + ON_CALL(*mockPlugin, import_model(_, _)).WillByDefault(Invoke([&](std::istream& istr, const ov::AnyMap& config) { + if (m_checkConfigCb) { + m_checkConfigCb(config); + } + std::shared_ptr model_buffer; + if (config.count(ov::internal::cached_model_buffer.name())) + model_buffer = config.at(ov::internal::cached_model_buffer.name()).as>(); + EXPECT_FALSE(model_buffer); + + std::string name; + istr >> name; + char space; + istr.read(&space, 1); + std::lock_guard lock(mock_creation_mutex); + return create_mock_compiled_model(m_models[name], mockPlugin); + })); + EXPECT_CALL(*mockPlugin, get_property(_, _)).Times(AnyNumber()); + EXPECT_CALL(*mockPlugin, query_model(_, _)).Times(AnyNumber()); + EXPECT_CALL(*mockPlugin, get_property(ov::device::architecture.name(), _)).Times(AnyNumber()); + EXPECT_CALL(*mockPlugin, get_property(ov::internal::caching_properties.name(), _)).Times(AnyNumber()); + if (m_remoteContext) { + return; // skip the remote Context test for Multi plugin + } + int index = 0; + m_post_mock_net_callbacks.emplace_back([&](MockICompiledModelImpl& net) { + EXPECT_CALL(net, export_model(_)).Times(1); + }); + MkDirGuard guard(m_cacheDir); + EXPECT_CALL(*mockPlugin, compile_model(_, _, _)).Times(0); + EXPECT_CALL(*mockPlugin, compile_model(A&>(), _)).Times(1); + EXPECT_CALL(*mockPlugin, import_model(_, _, _)).Times(0); + EXPECT_CALL(*mockPlugin, import_model(_, _)).Times(1); + testLoad([&](ov::Core& core) { + const auto config = ov::AnyMap{{ov::cache_dir(m_cacheDir)}, {ov::enable_mmap(false)}}; + m_testFunctionWithCfg(core, config); + m_testFunctionWithCfg(core, config); + }); + std::cout << "Caching Load multiple threads test completed. Tried " << index << " times" << std::endl; +} + #if defined(ENABLE_OV_IR_FRONTEND) static std::string getTestCaseName(const testing::TestParamInfo>& obj) { diff --git a/src/tests/test_utils/unit_test_utils/mocks/openvino/runtime/mock_icore.hpp b/src/tests/test_utils/unit_test_utils/mocks/openvino/runtime/mock_icore.hpp index 367818ebbf9572..534ba6cd1748df 100644 --- a/src/tests/test_utils/unit_test_utils/mocks/openvino/runtime/mock_icore.hpp +++ b/src/tests/test_utils/unit_test_utils/mocks/openvino/runtime/mock_icore.hpp @@ -47,7 +47,10 @@ class MockICore : public ov::ICore { (const std::string&, const ov::Tensor&, const std::string&, const ov::AnyMap&), (const)); MOCK_METHOD(std::shared_ptr, read_model, (const std::string&, const ov::Tensor&, bool), (const)); - MOCK_METHOD(std::shared_ptr, read_model, (const std::string&, const std::string&), (const)); + MOCK_METHOD(std::shared_ptr, + read_model, + (const std::string&, const std::string&, const ov::AnyMap&), + (const)); MOCK_METHOD(std::shared_ptr, read_model, (const std::shared_ptr&, const std::shared_ptr&),