Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

CoreImpl small refactoring #16145

Merged
merged 10 commits into from
Mar 8, 2023
Original file line number Diff line number Diff line change
Expand Up @@ -92,13 +92,6 @@ DECLARE_CONFIG_KEY(SMALL_CORE_OFFSET);
*/
DECLARE_CONFIG_KEY(CPU_RUNTIME_CACHE_CAPACITY);

/**
* @brief This key should be used to force disable export while loading network even if global cache dir is defined
* Used by HETERO plugin to disable automatic caching of subnetworks (set value to YES)
* @ingroup ie_dev_api_plugin_api
*/
DECLARE_CONFIG_KEY(FORCE_DISABLE_CACHE);

/**
* @brief Internal device id for particular device (like GPU.0, GPU.1 etc)
*/
Expand Down
19 changes: 9 additions & 10 deletions src/inference/src/compilation_context.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -56,7 +56,7 @@ uint64_t calculate_td(const InferenceEngine::TensorDesc& td, uint64_t _seed) {

namespace ov {

std::string NetworkCompilationContext::calculate_file_info(const std::string& filePath) {
std::string ModelCache::calculate_file_info(const std::string& filePath) {
uint64_t seed = 0;
auto absPath = filePath;
if (filePath.size() > 0) {
Expand All @@ -78,9 +78,8 @@ std::string NetworkCompilationContext::calculate_file_info(const std::string& fi
return std::to_string(seed);
}

std::string NetworkCompilationContext::compute_hash(const std::shared_ptr<const ov::Model>& model,
const ov::AnyMap& compileOptions) {
OV_ITT_SCOPE(FIRST_INFERENCE, ov::itt::domains::IE_RT, "NetworkCompilationContext::compute_hash - Model");
std::string ModelCache::compute_hash(const std::shared_ptr<const ov::Model>& model, const ov::AnyMap& compileOptions) {
OV_ITT_SCOPE(FIRST_INFERENCE, ov::itt::domains::IE_RT, "ModelCache::compute_hash - Model");

OPENVINO_ASSERT(model);

Expand Down Expand Up @@ -145,8 +144,8 @@ std::string NetworkCompilationContext::compute_hash(const std::shared_ptr<const
return std::to_string(seed);
}

std::string NetworkCompilationContext::compute_hash(const std::string& modelName, const ov::AnyMap& compileOptions) {
OV_ITT_SCOPE(FIRST_INFERENCE, ov::itt::domains::IE_RT, "NetworkCompilationContext::compute_hash - ModelName");
std::string ModelCache::compute_hash(const std::string& modelName, const ov::AnyMap& compileOptions) {
OV_ITT_SCOPE(FIRST_INFERENCE, ov::itt::domains::IE_RT, "ModelCache::compute_hash - ModelName");
uint64_t seed = 0;
try {
seed = hash_combine(seed, FileUtils::absoluteFilePath(modelName));
Expand All @@ -160,10 +159,10 @@ std::string NetworkCompilationContext::compute_hash(const std::string& modelName
return std::to_string(seed);
}

std::string NetworkCompilationContext::compute_hash(const std::string& modelStr,
const ov::Tensor& tensor,
const ov::AnyMap& compileOptions) {
OV_ITT_SCOPE(FIRST_INFERENCE, ov::itt::domains::IE_RT, "NetworkCompilationContext::compute_hash - Model Memory");
std::string ModelCache::compute_hash(const std::string& modelStr,
const ov::Tensor& tensor,
const ov::AnyMap& compileOptions) {
OV_ITT_SCOPE(FIRST_INFERENCE, ov::itt::domains::IE_RT, "ModelCache::compute_hash - Model Memory");
uint64_t seed = 0;
// model string
seed = hash_combine(seed, modelStr);
Expand Down
2 changes: 1 addition & 1 deletion src/inference/src/compilation_context.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@ namespace ov {
class Tensor;
class Model;

struct NetworkCompilationContext final {
struct ModelCache final {
static std::string calculate_file_info(const std::string& filePath);

static std::string compute_hash(const std::shared_ptr<const ov::Model>& model, const ov::AnyMap& compileOptions);
Expand Down
62 changes: 62 additions & 0 deletions src/inference/src/core.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -27,10 +27,72 @@ std::string resolve_extension_path(const std::string& path) {
return retvalue;
}

ov::AnyMap flatten_sub_properties(const std::string& device, const ov::AnyMap& properties) {
ov::AnyMap result = properties;
bool isVirtualDev = device.find("AUTO") != std::string::npos || device.find("MULTI") != std::string::npos ||
device.find("HETERO") != std::string::npos;
for (auto item = result.begin(); item != result.end();) {
auto parsed = ov::parseDeviceNameIntoConfig(item->first);
if (!item->second.is<ov::AnyMap>()) {
item++;
continue;
}
if (device == parsed._deviceName) {
// 1. flatten the secondary property for target device
for (auto&& sub_property : item->second.as<ov::AnyMap>()) {
// 1.1 1st level property overrides 2nd level property
if (result.find(sub_property.first) != result.end())
continue;
result[sub_property.first] = sub_property.second;
}
item = result.erase(item);
} else if (isVirtualDev) {
// 2. keep the secondary property for the other virtual devices
item++;
} else {
// 3. remove the secondary property setting for other hardware device
item = result.erase(item);
}
}
return result;
}

} // namespace

namespace ov {

#ifndef OPENVINO_STATIC_LIBRARY

std::string findPluginXML(const std::string& xmlFile) {
std::string xmlConfigFile_ = xmlFile;
if (xmlConfigFile_.empty()) {
const auto ielibraryDir = ie::getInferenceEngineLibraryPath();

// plugins.xml can be found in either:

// 1. openvino-X.Y.Z relative to libopenvino.so folder
std::ostringstream str;
str << "openvino-" << OPENVINO_VERSION_MAJOR << "." << OPENVINO_VERSION_MINOR << "." << OPENVINO_VERSION_PATCH;
const auto subFolder = ov::util::to_file_path(str.str());

// register plugins from default openvino-<openvino version>/plugins.xml config
ov::util::FilePath xmlConfigFileDefault =
FileUtils::makePath(FileUtils::makePath(ielibraryDir, subFolder), ov::util::to_file_path("plugins.xml"));
if (FileUtils::fileExist(xmlConfigFileDefault))
return xmlConfigFile_ = ov::util::from_file_path(xmlConfigFileDefault);

// 2. in folder with libopenvino.so
xmlConfigFileDefault = FileUtils::makePath(ielibraryDir, ov::util::to_file_path("plugins.xml"));
if (FileUtils::fileExist(xmlConfigFileDefault))
return xmlConfigFile_ = ov::util::from_file_path(xmlConfigFileDefault);

throw ov::Exception("Failed to find plugins.xml file");
}
return xmlConfigFile_;
}

#endif // OPENVINO_STATIC_LIBRARY

#define OV_CORE_CALL_STATEMENT(...) \
try { \
__VA_ARGS__; \
Expand Down
Loading