Skip to content

Commit

Permalink
Enable CPU accelerate FIL in MULTI
Browse files Browse the repository at this point in the history
Signed-off-by: Hu, Yuan2 <[email protected]>
  • Loading branch information
tiger100256-hu committed Sep 6, 2021
1 parent bb84d11 commit a83fcdb
Show file tree
Hide file tree
Showing 14 changed files with 448 additions and 116 deletions.
2 changes: 2 additions & 0 deletions inference-engine/src/inference_engine/src/ie_core.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -68,10 +68,12 @@ Parsed<T> parseDeviceNameIntoConfig(const std::string& deviceName, const std::ma
if (deviceName.find("AUTO:") == 0) {
config_[InferenceEngine::MultiDeviceConfigParams::KEY_MULTI_DEVICE_PRIORITIES] =
deviceName.substr(std::string("AUTO:").size());
config_.insert({CONFIG_KEY_INTERNAL(MULTI_WORK_MODE_AS_AUTO), InferenceEngine::PluginConfigParams::YES});
}
} else {
if (deviceName_.empty()) {
deviceName_ = "AUTO";
config_.insert({CONFIG_KEY_INTERNAL(MULTI_WORK_MODE_AS_AUTO), InferenceEngine::PluginConfigParams::YES});
}
InferenceEngine::DeviceIDParser parser(deviceName_);
deviceName_ = parser.getDeviceName();
Expand Down
357 changes: 301 additions & 56 deletions inference-engine/src/multi_device/multi_device_exec_network.cpp

Large diffs are not rendered by default.

43 changes: 40 additions & 3 deletions inference-engine/src/multi_device/multi_device_exec_network.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -16,14 +16,21 @@
#include <cpp_interfaces/impl/ie_executable_network_thread_safe_default.hpp>
#include <ie_parallel.hpp>
#include <threading/ie_itask_executor.hpp>
#include <threading/ie_executor_manager.hpp>
#include "ie_icore.hpp"

#if (IE_THREAD == IE_THREAD_TBB || IE_THREAD == IE_THREAD_TBB_AUTO)
# include <tbb/concurrent_queue.h>
#endif


namespace MultiDevicePlugin {

class MultiDeviceInferencePlugin;

using DeviceName = std::string;
using NetworkFuture = std::future<InferenceEngine::SoExecutableNetworkInternal>;
using NetworkPromise = std::promise<InferenceEngine::SoExecutableNetworkInternal>;

struct DeviceInformation {
DeviceName deviceName;
Expand Down Expand Up @@ -105,10 +112,14 @@ class MultiDeviceExecutableNetwork : public InferenceEngine::ExecutableNetworkTh
};
using NotBusyWorkerRequests = ThreadSafeBoundedQueue<WorkerInferRequest*>;

explicit MultiDeviceExecutableNetwork(const DeviceMap<InferenceEngine::SoExecutableNetworkInternal>& networksPerDevice,
explicit MultiDeviceExecutableNetwork(const DeviceMap<InferenceEngine::SoExecutableNetworkInternal>& networksPerDevice,
const std::vector<DeviceInformation>& networkDevices,
const std::unordered_map<std::string, InferenceEngine::Parameter>& config,
const bool needPerfCounters = false);
MultiDeviceExecutableNetwork(const std::string& modelPath,
const InferenceEngine::CNNNetwork& network,
const std::map<std::string, std::string>& config,
MultiDeviceInferencePlugin* plugin);

void SetConfig(const std::map<std::string, InferenceEngine::Parameter> &config) override;
InferenceEngine::Parameter GetConfig(const std::string &name) const override;
Expand All @@ -129,15 +140,41 @@ class MultiDeviceExecutableNetwork : public InferenceEngine::ExecutableNetworkTh
static thread_local const char* _thisPreferredDeviceName;
mutable std::mutex _mutex;
std::vector<DeviceInformation> _devicePriorities;
const std::vector<DeviceInformation> _devicePrioritiesInitial;
std::vector<DeviceInformation> _devicePrioritiesInitial;
DeviceMap<InferenceEngine::SoExecutableNetworkInternal> _networksPerDevice;
ThreadSafeQueue<InferenceEngine::Task> _inferPipelineTasks;
DeviceMap<std::unique_ptr<ThreadSafeQueue<InferenceEngine::Task>>> _inferPipelineTasksDeviceSpecific;
DeviceMap<NotBusyWorkerRequests> _idleWorkerRequests;
DeviceMap<std::vector<WorkerInferRequest>> _workerRequests;
std::unordered_map<std::string, InferenceEngine::Parameter> _config;
bool _needPerfCounters = false;
mutable bool _needPerfCounters = false;
std::atomic_size_t _numRequestsCreated = {0};

private:
void GenerateWorkers(const std::string& device, const InferenceEngine::SoExecutableNetworkInternal& executableNetwork);
bool IsActualNetworkReady() const;
void WaitActualNetworkReady() const;
void WaitFirstNetworkReady();
void SetPerfCounts() const;
static bool RunPipelineTask(InferenceEngine::Task& inferPipelineTask,
NotBusyWorkerRequests& idleWorkerRequests,
const DeviceName& preferred_device);

private:
std::shared_ptr<InferenceEngine::ICore> core;
InferenceEngine::IStreamsExecutor::Ptr _executor;
MultiDeviceInferencePlugin* _multiPlugin;
InferenceEngine::SoExecutableNetworkInternal _networkFirstReady;
mutable InferenceEngine::SoExecutableNetworkInternal _networkActualNeeded;
NetworkFuture _cpuFuture;
NetworkPromise _cpuPromise;
mutable NetworkFuture _acceleratorFuture;
mutable NetworkPromise _acceleratorPromise;
mutable bool _alreadyActualNetwork = {false};
bool _workModeIsAUTO { false };
DeviceInformation _cpuDevice;
DeviceInformation _acceleratorDevice;
mutable std::once_flag _oc;
};

} // namespace MultiDevicePlugin
67 changes: 46 additions & 21 deletions inference-engine/src/multi_device/multi_device_plugin.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -212,32 +212,18 @@ IExecutableNetworkInternal::Ptr MultiDeviceInferencePlugin::LoadNetworkImpl(cons
bool workModeAuto = workMode != fullConfig.end() && workMode->second == InferenceEngine::PluginConfigParams::YES;
auto priorities = fullConfig.find(MultiDeviceConfigParams::KEY_MULTI_DEVICE_PRIORITIES);

// not found device priorities for -d AUTO use case
// if workMode is AUTO
if (workModeAuto) {
CheckConfig(fullConfig);
return std::make_shared<MultiDeviceExecutableNetwork>(modelPath, network, fullConfig, this);
}

if (priorities == fullConfig.end()) {
if (workModeAuto) {
std::string allDevices;
auto availableDevices = GetCore()->GetAvailableDevices();
if (availableDevices.empty()) {
IE_THROW(NotFound) << "No available device found";
}
for (auto&& device : availableDevices) {
allDevices += device;
allDevices += ((device == availableDevices[availableDevices.size()-1]) ? "" : ",");
}
metaDevices = ParseMetaDevices(allDevices, fullConfig);
multiNetworkConfig.insert({MultiDeviceConfigParams::KEY_MULTI_DEVICE_PRIORITIES, allDevices});
} else {
IE_THROW() << "KEY_MULTI_DEVICE_PRIORITIES key is not set for " << GetName() << " device";
}
IE_THROW() << "KEY_MULTI_DEVICE_PRIORITIES key is not set for " << GetName() << " device";
} else { // for use case -d MULTI:xPU or -d AUTO:xPU
metaDevices = ParseMetaDevices(priorities->second, fullConfig);
multiNetworkConfig.insert(*priorities);
}
// check if it is -d AUTO or -d AUTO:xPU use case
if (workModeAuto) {
auto targetDevice = SelectDevice(metaDevices, networkPrecision);
metaDevices = { targetDevice };
}

DeviceMap<SoExecutableNetworkInternal> executableNetworkPerDevice;
std::mutex load_mutex;
Expand Down Expand Up @@ -456,4 +442,43 @@ DeviceInformation MultiDeviceInferencePlugin::SelectDevice(const std::vector<Dev
return CPU[0];
}

std::string MultiDeviceInferencePlugin::GetDeviceList(const std::map<std::string, std::string>& config) const {
std::string allDevices;

auto deviceListConfig = config.find(MultiDeviceConfigParams::KEY_MULTI_DEVICE_PRIORITIES);
if (deviceListConfig == config.end()) {
auto deviceList = GetCore()->GetAvailableDevices();
for (auto&& device : deviceList) {
allDevices += device;
allDevices += ((device == deviceList[deviceList.size()-1]) ? "" : ",");
}
} else {
allDevices = deviceListConfig->second;
}

if (allDevices.empty()) {
IE_THROW() << "Please, check environment due to no supported devices can be used";
}

return allDevices;
}

void MultiDeviceInferencePlugin::CheckConfig(const std::map<std::string, std::string>& config) {
for (auto&& kvp : config) {
if (kvp.first.find("AUTO_") == 0) {
continue;
} else if (kvp.first == PluginConfigParams::KEY_PERF_COUNT) {
if (kvp.second == PluginConfigParams::YES ||
kvp.second == PluginConfigParams::NO) {
continue;
} else {
IE_THROW() << "Unsupported config value: " << kvp.second
<< " for key: " << kvp.first;
}
} else if (supported_configKeys.end() == std::find(supported_configKeys.begin(), supported_configKeys.end(), kvp.first)) {
IE_THROW() << "Unsupported config key: " << kvp.first;
}
}
}

} // namespace MultiDevicePlugin
5 changes: 4 additions & 1 deletion inference-engine/src/multi_device/multi_device_plugin.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,9 @@ class MultiDeviceInferencePlugin : public InferenceEngine::IInferencePlugin {
std::vector<MultiDevicePlugin::DeviceInformation> ParseMetaDevices(const std::string & devicesRequestsCfg,
const std::map<std::string, std::string> & config) const;

std::string GetDeviceList(const std::map<std::string, std::string>& config) const;
DeviceInformation SelectDevice(const std::vector<DeviceInformation>& metaDevices, const std::string& networkPrecision = METRIC_VALUE(FP32));

protected:
std::map<std::string, std::string> GetSupportedConfig(const std::map<std::string, std::string>& config,
const MultiDevicePlugin::DeviceName & deviceName) const;
Expand All @@ -45,7 +48,7 @@ class MultiDeviceInferencePlugin : public InferenceEngine::IInferencePlugin {
InferenceEngine::CNNNetwork network,
const std::map<std::string, std::string>& config,
const std::string &networkPrecision = METRIC_VALUE(FP32));
DeviceInformation SelectDevice(const std::vector<DeviceInformation>& metaDevices, const std::string& networkPrecision = METRIC_VALUE(FP32));
static void CheckConfig(const std::map<std::string, std::string>& config);
};

} // namespace MultiDevicePlugin
Original file line number Diff line number Diff line change
Expand Up @@ -84,13 +84,6 @@ namespace {
::testing::ValuesIn(multiconf)),
CorrectConfigAPITests::getTestCaseName);

INSTANTIATE_TEST_SUITE_P(smoke_Auto_BehaviorTests, CorrectConfigAPITests,
::testing::Combine(
::testing::ValuesIn(netPrecisions),
::testing::Values(CommonTestUtils::DEVICE_AUTO),
::testing::ValuesIn(multiconf)),
CorrectConfigAPITests::getTestCaseName);

INSTANTIATE_TEST_SUITE_P(smoke_BehaviorTests, IncorrectConfigTests,
::testing::Combine(
::testing::ValuesIn(netPrecisions),
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -62,18 +62,5 @@ namespace {
::testing::ValuesIn(MultiInConfigs)),
InferRequestConfigTest::getTestCaseName);

INSTANTIATE_TEST_SUITE_P(smoke_Auto_BehaviorTests, InferRequestConfigTest,
::testing::Combine(
::testing::Values(1u),
::testing::Values(CommonTestUtils::DEVICE_AUTO),
::testing::ValuesIn(multiConfigs)),
InferRequestConfigTest::getTestCaseName);


INSTANTIATE_TEST_SUITE_P(smoke_Auto_BehaviorTests_, InferRequestConfigTest,
::testing::Combine(
::testing::Values(1u),
::testing::Values(CommonTestUtils::DEVICE_AUTO),
::testing::ValuesIn(MultiInConfigs)),
InferRequestConfigTest::getTestCaseName);
} // namespace
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,10 @@ namespace {
{InferenceEngine::PluginConfigParams::KEY_CPU_THROUGHPUT_STREAMS, InferenceEngine::PluginConfigParams::CPU_THROUGHPUT_AUTO}}
};

const std::vector<std::map<std::string, std::string>> AutoConfigsInputOutput = {
{{InferenceEngine::MultiDeviceConfigParams::KEY_MULTI_DEVICE_PRIORITIES , CommonTestUtils::DEVICE_CPU}}
};

const std::vector<std::map<std::string, std::string>> configsOutput = {
{},
{{InferenceEngine::PluginConfigParams::KEY_CPU_THROUGHPUT_STREAMS, InferenceEngine::PluginConfigParams::CPU_THROUGHPUT_AUTO}}
Expand All @@ -56,7 +60,7 @@ namespace {
::testing::Combine(
::testing::ValuesIn(netPrecisions),
::testing::Values(CommonTestUtils::DEVICE_AUTO),
::testing::ValuesIn(MultiConfigsInputOutput)),
::testing::ValuesIn(AutoConfigsInputOutput)),
BehaviorTestOutput::getTestCaseName);

INSTANTIATE_TEST_SUITE_P(smoke_BehaviorTests, BehaviorTests,
Expand Down Expand Up @@ -98,7 +102,7 @@ namespace {
::testing::Combine(
::testing::ValuesIn(netPrecisions),
::testing::Values(CommonTestUtils::DEVICE_AUTO),
::testing::ValuesIn(MultiConfigsInputOutput)),
::testing::ValuesIn(AutoConfigsInputOutput)),
BehaviorTestInput::getTestCaseName);

} // namespace
Original file line number Diff line number Diff line change
Expand Up @@ -90,6 +90,13 @@ namespace {
{{InferenceEngine::MultiDeviceConfigParams::KEY_MULTI_DEVICE_PRIORITIES , CommonTestUtils::DEVICE_GPU}}
};

const std::vector<std::map<std::string, std::string>> autoConfigs = {
{{InferenceEngine::MultiDeviceConfigParams::KEY_MULTI_DEVICE_PRIORITIES , CommonTestUtils::DEVICE_GPU},
{InferenceEngine::MultiDeviceConfigParams::KEY_MULTI_DEVICE_PRIORITIES,
CommonTestUtils::DEVICE_GPU + std::string(",") + CommonTestUtils::DEVICE_CPU}}
};


INSTANTIATE_TEST_SUITE_P(smoke_BehaviorTests, CorrectConfigAPITests,
::testing::Combine(
::testing::ValuesIn(netPrecisions),
Expand All @@ -115,7 +122,7 @@ namespace {
::testing::Combine(
::testing::ValuesIn(netPrecisions),
::testing::Values(CommonTestUtils::DEVICE_AUTO),
::testing::ValuesIn(multiconf)),
::testing::ValuesIn(autoConfigs)),
CorrectConfigAPITests::getTestCaseName);

INSTANTIATE_TEST_SUITE_P(smoke_BehaviorTests, IncorrectConfigAPITests,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,11 @@ const std::vector<std::map<std::string, std::string>> multiConfigs = {
{{ MULTI_CONFIG_KEY(DEVICE_PRIORITIES) , CommonTestUtils::DEVICE_GPU}}
};

const std::vector<std::map<std::string, std::string>> autoConfigs = {
{{InferenceEngine::MultiDeviceConfigParams::KEY_MULTI_DEVICE_PRIORITIES , CommonTestUtils::DEVICE_GPU},
{InferenceEngine::MultiDeviceConfigParams::KEY_MULTI_DEVICE_PRIORITIES , CommonTestUtils::DEVICE_GPU + std::string(",") + CommonTestUtils::DEVICE_CPU}}
};

INSTANTIATE_TEST_SUITE_P(smoke_BehaviorTests, InferRequestCallbackTests,
::testing::Combine(
::testing::Values(CommonTestUtils::DEVICE_GPU),
Expand All @@ -27,8 +32,8 @@ INSTANTIATE_TEST_SUITE_P(smoke_Multi_BehaviorTests, InferRequestCallbackTests,
InferRequestCallbackTests::getTestCaseName);

INSTANTIATE_TEST_SUITE_P(smoke_Auto_BehaviorTests, InferRequestCallbackTests,
::testing::Combine(
::testing::Values(CommonTestUtils::DEVICE_AUTO),
::testing::ValuesIn(multiConfigs)),
InferRequestCallbackTests::getTestCaseName);
::testing::Combine(
::testing::Values(CommonTestUtils::DEVICE_AUTO),
::testing::ValuesIn(autoConfigs)),
InferRequestCallbackTests::getTestCaseName);
} // namespace
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,12 @@ namespace {
{{InferenceEngine::MultiDeviceConfigParams::KEY_MULTI_DEVICE_PRIORITIES, CommonTestUtils::DEVICE_GPU}}
};

const std::vector<std::map<std::string, std::string>> autoConfigs = {
{{InferenceEngine::MultiDeviceConfigParams::KEY_MULTI_DEVICE_PRIORITIES , CommonTestUtils::DEVICE_GPU},
{InferenceEngine::MultiDeviceConfigParams::KEY_MULTI_DEVICE_PRIORITIES ,
CommonTestUtils::DEVICE_GPU + std::string(",") + CommonTestUtils::DEVICE_CPU}}
};

INSTANTIATE_TEST_SUITE_P(smoke_BehaviorTests, InferRequestWaitTests,
::testing::Combine(
::testing::Values(CommonTestUtils::DEVICE_GPU),
Expand All @@ -28,7 +34,7 @@ namespace {
INSTANTIATE_TEST_SUITE_P(smoke_Auto_BehaviorTests, InferRequestWaitTests,
::testing::Combine(
::testing::Values(CommonTestUtils::DEVICE_AUTO),
::testing::ValuesIn(configs)),
::testing::ValuesIn(autoConfigs)),
InferRequestWaitTests::getTestCaseName);

} // namespace
} // namespace
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,12 @@ namespace {
{{ InferenceEngine::MultiDeviceConfigParams::KEY_MULTI_DEVICE_PRIORITIES , CommonTestUtils::DEVICE_GPU}}
};

const std::vector<std::map<std::string, std::string>> autoConfigs = {
{{InferenceEngine::MultiDeviceConfigParams::KEY_MULTI_DEVICE_PRIORITIES , CommonTestUtils::DEVICE_GPU},
{InferenceEngine::MultiDeviceConfigParams::KEY_MULTI_DEVICE_PRIORITIES ,
CommonTestUtils::DEVICE_GPU + std::string(",") + CommonTestUtils::DEVICE_CPU}}
};

INSTANTIATE_TEST_SUITE_P(smoke_BehaviorTests, InferRequestPreprocessTest,
::testing::Combine(
::testing::ValuesIn(netPrecisions),
Expand All @@ -40,7 +46,7 @@ namespace {
::testing::Combine(
::testing::ValuesIn(netPrecisions),
::testing::Values(CommonTestUtils::DEVICE_AUTO),
::testing::ValuesIn(multiConfigs)),
::testing::ValuesIn(autoConfigs)),
InferRequestPreprocessTest::getTestCaseName);

const std::vector<InferenceEngine::Precision> ioPrecisions = {
Expand Down Expand Up @@ -85,4 +91,4 @@ namespace {
::testing::ValuesIn(configs)),
InferRequestPreprocessDynamicallyInSetBlobTest::getTestCaseName);

} // namespace
} // namespace
Loading

0 comments on commit a83fcdb

Please sign in to comment.