Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[releases/2021/1]Fixing (recent?) incorrect refactoring: reverting devicePriorities to be vector and respect the order, as opposed to the unordered_map that effectively ignores the priorities #2251

Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
47 changes: 22 additions & 25 deletions inference-engine/src/multi_device/multi_device.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -141,7 +141,7 @@ struct IdleGuard {
};

MultiDeviceExecutableNetwork::MultiDeviceExecutableNetwork(const DeviceMap<InferenceEngine::ExecutableNetwork>& networksPerDevice,
const DeviceMap<DeviceInformation>& networkDevices,
const std::vector<DeviceInformation>& networkDevices,
const std::unordered_map<std::string, InferenceEngine::Parameter>& config,
const bool needPerfCounters) :
InferenceEngine::ExecutableNetworkThreadSafeDefault(nullptr, std::make_shared<InferenceEngine::ImmediateExecutor>()),
Expand All @@ -154,7 +154,8 @@ MultiDeviceExecutableNetwork::MultiDeviceExecutableNetwork(const DeviceMap<Infer
auto& device = networkValue.first;
auto& network = networkValue.second;

auto itNumRequests = _devicePriorities.find(device);
auto itNumRequests = std::find_if(_devicePriorities.cbegin(), _devicePriorities.cend(),
[&device](const DeviceInformation& d){ return d.deviceName == device;});
unsigned int optimalNum = 0;
try {
optimalNum = network.GetMetric(METRIC_KEY(OPTIMAL_NUMBER_OF_INFER_REQUESTS)).as<unsigned int>();
Expand All @@ -165,7 +166,7 @@ MultiDeviceExecutableNetwork::MultiDeviceExecutableNetwork(const DeviceMap<Infer
<< "Failed to query the metric for the " << device << " with error:" << iie.what();
}
const auto numRequests = (_devicePriorities.end() == itNumRequests ||
itNumRequests->second.numRequestsPerDevices == -1) ? optimalNum : itNumRequests->second.numRequestsPerDevices;
itNumRequests->numRequestsPerDevices == -1) ? optimalNum : itNumRequests->numRequestsPerDevices;
auto& workerRequests = _workerRequests[device];
auto& idleWorkerRequests = _idleWorkerRequests[device];
workerRequests.resize(numRequests);
Expand Down Expand Up @@ -197,7 +198,7 @@ void MultiDeviceExecutableNetwork::ScheduleToWorkerInferRequest() {
return _devicePriorities;
}();
for (auto&& device : devices) {
auto& idleWorkerRequests = _idleWorkerRequests[device.first];
auto& idleWorkerRequests = _idleWorkerRequests[device.deviceName];
WorkerInferRequest* workerRequestPtr = nullptr;
if (idleWorkerRequests.try_pop(workerRequestPtr)) {
IdleGuard idleGuard{workerRequestPtr, idleWorkerRequests};
Expand Down Expand Up @@ -258,8 +259,8 @@ void MultiDeviceExecutableNetwork::SetConfig(const std::map<std::string, Inferen
assert(multiPlugin != nullptr);
auto metaDevices = multiPlugin->ParseMetaDevices(priorities->second, {});

if (std::any_of(metaDevices.begin(), metaDevices.end(), [](const std::pair<DeviceName, DeviceInformation> & kvp) {
return kvp.second.numRequestsPerDevices != -1;
if (std::any_of(metaDevices.begin(), metaDevices.end(), [](const DeviceInformation& kvp) {
return kvp.numRequestsPerDevices != -1;
})) {
THROW_IE_EXCEPTION << NOT_IMPLEMENTED_str << "You can only change device priorities but not number of requests"
<<" with the Network's SetConfig(MultiDeviceConfigParams::KEY_MULTI_DEVICE_PRIORITIES!";
Expand All @@ -268,9 +269,10 @@ void MultiDeviceExecutableNetwork::SetConfig(const std::map<std::string, Inferen
{
std::lock_guard<std::mutex> lock{_mutex};
for (auto && device : metaDevices) {
if (_networksPerDevice.find(device.first) == _networksPerDevice.end()) {
if (_networksPerDevice.find(device.deviceName) == _networksPerDevice.end()) {
THROW_IE_EXCEPTION << NOT_FOUND_str << "You can only change device priorities but not add new devices with"
<< " the Network's SetConfig(MultiDeviceConfigParams::KEY_MULTI_DEVICE_PRIORITIES. " << device.first <<
<< " the Network's SetConfig(MultiDeviceConfigParams::KEY_MULTI_DEVICE_PRIORITIES. "
<< device.deviceName <<
" device was not in the original device list!";
}
}
Expand Down Expand Up @@ -353,9 +355,9 @@ std::map<std::string, std::string> MultiDeviceInferencePlugin::GetSupportedConfi
return supportedConfig;
}

DeviceMap<DeviceInformation> MultiDeviceInferencePlugin::ParseMetaDevices(const std::string& priorities,
std::vector<DeviceInformation> MultiDeviceInferencePlugin::ParseMetaDevices(const std::string& priorities,
const std::map<std::string, std::string> & config) const {
DeviceMap<DeviceInformation> metaDevices;
std::vector<DeviceInformation> metaDevices;

// parsing the string and splitting to tokens
std::vector<std::string> devicesWithRequests;
Expand Down Expand Up @@ -399,12 +401,13 @@ DeviceMap<DeviceInformation> MultiDeviceInferencePlugin::ParseMetaDevices(const
}

// create meta device
metaDevices[deviceName] = { getDeviceConfig(deviceName), numRequests };
auto cfg = getDeviceConfig(deviceName);
std::vector<std::string> supportedConfigKeys = GetCore()->GetMetric(deviceName, METRIC_KEY(SUPPORTED_CONFIG_KEYS));
if (std::find(std::begin(supportedConfigKeys), std::end(supportedConfigKeys), CONFIG_KEY_INTERNAL(AGGREGATED_PLUGIN))
!= std::end(supportedConfigKeys)) {
metaDevices[deviceName].config.emplace(CONFIG_KEY_INTERNAL(AGGREGATED_PLUGIN), "");
cfg.emplace(CONFIG_KEY_INTERNAL(AGGREGATED_PLUGIN), "");
}
metaDevices.push_back({ deviceName, cfg, numRequests });
}

return metaDevices;
Expand Down Expand Up @@ -470,17 +473,16 @@ ExecutableNetworkInternal::Ptr MultiDeviceInferencePlugin::LoadExeNetworkImpl(co
THROW_IE_EXCEPTION << "KEY_MULTI_DEVICE_PRIORITIES key is not set for MULTI device";
}

DeviceMap<DeviceInformation> metaDevices = ParseMetaDevices(priorities->second, fullConfig);
auto metaDevices = ParseMetaDevices(priorities->second, fullConfig);

// collect the settings that are applicable to the devices we are loading the network to
std::unordered_map<std::string, InferenceEngine::Parameter> multiNetworkConfig;
multiNetworkConfig.insert(*priorities);

DeviceMap<ExecutableNetwork> executableNetworkPerDevice;
for (auto& p : metaDevices) {
auto & deviceName = p.first;
auto & metaDevice = p.second;
auto & deviceConfig = metaDevice.config;
auto & deviceName = p.deviceName;
auto & deviceConfig = p.config;
auto clonedNetwork = cloneNetwork(network);
executableNetworkPerDevice.insert({ deviceName, GetCore()->LoadNetwork(CNNNetwork{clonedNetwork}, deviceName, deviceConfig) });
multiNetworkConfig.insert(deviceConfig.begin(), deviceConfig.end());
Expand Down Expand Up @@ -514,16 +516,14 @@ void MultiDeviceInferencePlugin::QueryNetwork(const ICNNNetwork&
THROW_IE_EXCEPTION << "KEY_MULTI_DEVICE_PRIORITIES key is not set for MULTI device";
}

DeviceMap<DeviceInformation> metaDevices = ParseMetaDevices(priorities->second, fullConfig);
auto metaDevices = ParseMetaDevices(priorities->second, fullConfig);
std::unordered_set<std::string> supportedLayers;

auto allSupportsNgraph =
std::all_of(std::begin(metaDevices), std::end(metaDevices),
[&] (const DeviceMap<DeviceInformation>::value_type & value) -> bool {
auto& deviceName = value.first;
auto& metaDevice = value.second;
[&] (const DeviceInformation& value) -> bool {
auto clonedNetwork = cloneNetwork(network);
try { GetCore()->QueryNetwork(*clonedNetwork, deviceName, metaDevice.config); }
try { GetCore()->QueryNetwork(*clonedNetwork, value.deviceName, value.config); }
catch (const InferenceEngine::details::InferenceEngineException & ex) {
std::string message = ex.what();
return message.find(NOT_IMPLEMENTED_str) == std::string::npos;
Expand All @@ -532,12 +532,9 @@ void MultiDeviceInferencePlugin::QueryNetwork(const ICNNNetwork&
});

for (auto&& value : metaDevices) {
auto& deviceName = value.first;
auto& metaDevice = value.second;

auto queryNetwork = [&] (const InferenceEngine::ICNNNetwork & networkObject) {
auto clonedNetwork = cloneNetwork(networkObject);
auto deviceQr = GetCore()->QueryNetwork(*clonedNetwork, deviceName, metaDevice.config);
auto deviceQr = GetCore()->QueryNetwork(*clonedNetwork, value.deviceName, value.config);
std::unordered_set<std::string> deviceSupportedLayers;
for (auto&& layerQr : deviceQr.supportedLayersMap) {
deviceSupportedLayers.emplace(layerQr.first);
Expand Down
7 changes: 4 additions & 3 deletions inference-engine/src/multi_device/multi_device.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,7 @@ namespace MultiDevicePlugin {
using DeviceName = std::string;

struct DeviceInformation {
DeviceName deviceName;
std::map<std::string, std::string> config;
int numRequestsPerDevices;
};
Expand Down Expand Up @@ -99,7 +100,7 @@ class MultiDeviceExecutableNetwork : public InferenceEngine::ExecutableNetworkTh
using NotBusyWorkerRequests = ThreadSafeQueue<WorkerInferRequest*>;

explicit MultiDeviceExecutableNetwork(const DeviceMap<InferenceEngine::ExecutableNetwork>& networksPerDevice,
const DeviceMap<DeviceInformation>& networkDevices,
const std::vector<DeviceInformation>& networkDevices,
const std::unordered_map<std::string, InferenceEngine::Parameter>& config,
const bool needPerfCounters = false);

Expand All @@ -117,7 +118,7 @@ class MultiDeviceExecutableNetwork : public InferenceEngine::ExecutableNetworkTh
static thread_local WorkerInferRequest* _thisWorkerInferRequest;
std::atomic_bool _terminate = {false};
std::mutex _mutex;
DeviceMap<DeviceInformation> _devicePriorities;
std::vector<DeviceInformation> _devicePriorities;
DeviceMap<InferenceEngine::ExecutableNetwork> _networksPerDevice;
ThreadSafeQueue<Task> _inferPipelineTasks;
DeviceMap<NotBusyWorkerRequests> _idleWorkerRequests;
Expand Down Expand Up @@ -163,7 +164,7 @@ class MultiDeviceInferencePlugin : public InferenceEngine::InferencePluginIntern
InferenceEngine::Parameter GetMetric(const std::string& name,
const std::map<std::string, InferenceEngine::Parameter>& options) const override;

DeviceMap<DeviceInformation> ParseMetaDevices(const std::string & devicesRequestsCfg,
std::vector<DeviceInformation> ParseMetaDevices(const std::string & devicesRequestsCfg,
const std::map<std::string, std::string> & config) const;

protected:
Expand Down