Skip to content

Commit

Permalink
Refactored VPU tests not to use old interfaces (openvinotoolkit#3888)
Browse files Browse the repository at this point in the history
* Refactored VPU tests not to use old interfaces

* Added except of exceptions

* Commented failing part of HDDL tests
  • Loading branch information
ilya-lavrenov authored Jan 21, 2021
1 parent 05d97fa commit 61ccde7
Show file tree
Hide file tree
Showing 51 changed files with 837 additions and 1,410 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,7 @@ TEST_P(InferRequestTests, SetEmptyConfig) {
// Create CNNNetwork from ngrpah::Function
InferenceEngine::CNNNetwork cnnNet(function);
// Load CNNNetwork to target plugins
InferenceEngine::IExecutableNetwork::Ptr execNet;
InferenceEngine::ExecutableNetwork execNet;
std::map<std::string, std::string> config {};
if (targetDevice.find(CommonTestUtils::DEVICE_MULTI) == std::string::npos &&
targetDevice.find(CommonTestUtils::DEVICE_HETERO) == std::string::npos) {
Expand All @@ -55,7 +55,7 @@ TEST_P(InferRequestTests, canLoadCorrectNetworkToGetExecutable) {
SKIP_IF_CURRENT_TEST_IS_DISABLED()
// Create CNNNetwork from ngrpah::Function
InferenceEngine::CNNNetwork cnnNet(function);
InferenceEngine::IExecutableNetwork::Ptr execNet;
InferenceEngine::ExecutableNetwork execNet;
ASSERT_NO_THROW(execNet = ie->LoadNetwork(cnnNet, targetDevice, configuration));
}

Expand All @@ -64,7 +64,7 @@ TEST_P(InferRequestTests, CanCreateTwoExeNetworks) {
SKIP_IF_CURRENT_TEST_IS_DISABLED()
// Create CNNNetwork from ngrpah::Function
InferenceEngine::CNNNetwork cnnNet(function);
InferenceEngine::IExecutableNetwork::Ptr execNet;
InferenceEngine::ExecutableNetwork execNet;
for (auto i = 0; i < 2; i++) {
ASSERT_NO_THROW(execNet = ie->LoadNetwork(cnnNet, targetDevice, configuration));
ASSERT_NE(nullptr, cnnNet.getFunction());
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,8 +21,7 @@ void MyriadLoadNetworkTestCase::SetUp() {
}

void MyriadLoadNetworkTestCase::LoadNetwork() {
ASSERT_NO_THROW(InferenceEngine::IExecutableNetwork::Ptr exe_network =
ie->LoadNetwork(cnnNetwork, "MYRIAD"));
ASSERT_NO_THROW(ie->LoadNetwork(cnnNetwork, "MYRIAD"));
}

bool MyriadLoadNetworkTestCase::IsDeviceAvailable(std::string device_name) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,9 +21,7 @@ class MyriadProtocolTests : public testing::Test,
public MyriadDevicesInfo {
public:
// IE variables
InferenceEngine::IInferRequest::Ptr request;
InferenceEngine::ResponseDesc resp;
StatusCode statusCode = StatusCode::GENERAL_ERROR;
InferenceEngine::InferRequest request;
static std::shared_ptr<InferenceEngine::Core> ie;

// MVNC variables
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,18 +17,13 @@ TEST_P(MyriadProtocolTests, CanInferenceWithProtocol) {

std::map<std::string, std::string> config = getConfigForProtocol(protocol);

InferenceEngine::IExecutableNetwork::Ptr exe_network =
InferenceEngine::ExecutableNetwork exe_network =
ie->LoadNetwork(network, "MYRIAD", config);

ASSERT_NO_THROW(statusCode = exe_network->CreateInferRequest(request, &resp));
ASSERT_EQ(statusCode, StatusCode::OK) << resp.msg;

ASSERT_NO_THROW(statusCode = request->Infer(&resp));
ASSERT_EQ(statusCode, StatusCode::OK) << resp.msg;
ASSERT_NO_THROW(request = exe_network.CreateInferRequest());
ASSERT_NO_THROW(request.Infer());
}



TEST_P(MyriadProtocolTests, NoErrorsMessagesWhenLoadNetworkSuccessful) {
if (protocol != NC_USB) {
GTEST_SKIP();
Expand All @@ -42,11 +37,10 @@ TEST_P(MyriadProtocolTests, NoErrorsMessagesWhenLoadNetworkSuccessful) {

std::map<std::string, std::string> config = {{CONFIG_KEY(LOG_LEVEL), CONFIG_VALUE(LOG_WARNING)}};

InferenceEngine::IExecutableNetwork::Ptr exe_network =
InferenceEngine::ExecutableNetwork exe_network =
ie->LoadNetwork(network, "MYRIAD", config);
setbuf(stdout, NULL);


std::string content(buff);
for (int i = MVLOG_WARN; i < MVLOG_LAST; i++) {
auto found = content.find(mvLogHeader[i]);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@ namespace Regression { namespace Matchers {
class ClassificationMatcher : public BaseMatcher {
private:
size_t checkResultNumber;
std::vector<std::shared_ptr<InferenceEngine::IExecutableNetwork>> _executableNetworks;
std::vector<InferenceEngine::ExecutableNetwork> _executableNetworks;
std::vector <std::vector<Reference::LabelProbability>> _results;
ResponseDesc _resp;
InferenceEngine::InputsDataMap _inputsInfo;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -14,22 +14,11 @@ class IECoreAdapter {
IECoreAdapter(
std::shared_ptr<InferenceEngine::Core> ieCore, std::string deviceName);

// -----------------------------------------
// IInferencePlugin API (deprecated). Begin.
// - InferenceEngine::ICNNNetwork is replaced by InferenceEngine::CNNNetwork
// -----------------------------------------

InferenceEngine::StatusCode LoadNetwork(
InferenceEngine::IExecutableNetwork::Ptr& ret, InferenceEngine::CNNNetwork network,
const std::map<std::string, std::string>& config, InferenceEngine::ResponseDesc* resp) noexcept;

InferenceEngine::StatusCode ImportNetwork(
InferenceEngine::IExecutableNetwork::Ptr& ret, const std::string& modelFileName,
const std::map<std::string, std::string>& config, InferenceEngine::ResponseDesc* resp) noexcept;

// -----------------------------------------
// IInferencePlugin API (deprecated). End.
// -----------------------------------------
InferenceEngine::ExecutableNetwork LoadNetwork(const InferenceEngine::CNNNetwork & network,
const std::map<std::string, std::string>& config = {});

InferenceEngine::ExecutableNetwork ImportNetwork(const std::string& modelFileName,
const std::map<std::string, std::string>& config = {});

InferenceEngine::ExecutableNetwork ImportNetwork(std::istream& networkModel,
const std::map<std::string, std::string>& config = {});
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -213,18 +213,14 @@ inline void TopResults(unsigned int n, TBlob<T>& input, std::vector<unsigned>& o

void ClassificationMatcher::match_n(size_t top, int index) {
try {
InferenceEngine::IInferRequest::Ptr inferRequest;
if (_executableNetworks[index]->CreateInferRequest(inferRequest, &_resp) != OK) {
THROW_IE_EXCEPTION << "Can not create infer request: " << _resp.msg;
}
auto inferRequest = _executableNetworks[index].CreateInferRequest();
std::string prevImageName = "";

auto batchSize = config.batchSize;

if (config.useDynamicBatching) {
batchSize = config.dynBatch;
InferenceEngine::ResponseDesc resp;
inferRequest->SetBatch(batchSize, &resp);
inferRequest.SetBatch(batchSize);
}

if (config._paths_to_images.size() % batchSize != 0) {
Expand All @@ -249,25 +245,14 @@ void ClassificationMatcher::match_n(size_t top, int index) {
for (int j = 0; j != batchSize; j++) {
const auto & imageName = config._paths_to_images[i + j];

InferenceEngine::Blob::Ptr inputBlob;
if (inferRequest->GetBlob(_inputsInfo.begin()->first.c_str(), inputBlob, &_resp) != OK) {
THROW_IE_EXCEPTION << "Can not get input with name: " << _inputsInfo.begin()->first
<< " error message: " << _resp.msg;
}
auto inputBlob = inferRequest.GetBlob(_inputsInfo.begin()->first.c_str());
loadImage(imageName, inputBlob, true, j);
}
}

StatusCode status = inferRequest->Infer(&_resp);
if (status != OK) {
THROW_IE_EXCEPTION << "Can not do infer: " << _resp.msg;
}
inferRequest.Infer();

InferenceEngine::Blob::Ptr outputBlobPtr;
if (inferRequest->GetBlob(_outputsInfo.begin()->first.c_str(), outputBlobPtr, &_resp) != OK) {
THROW_IE_EXCEPTION << "Can not get output with name: " << _outputsInfo.begin()->first
<< " error message: " << _resp.msg;
}
auto outputBlobPtr = inferRequest.GetBlob(_outputsInfo.begin()->first.c_str());

InferenceEngine::TBlob<float>::Ptr outputFP32;
if (outputBlobPtr->getTensorDesc().getPrecision() == InferenceEngine::Precision::FP16) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -12,32 +12,16 @@ using IECorePtr = std::shared_ptr<InferenceEngine::Core>;
IECoreAdapter::IECoreAdapter(IECorePtr ieCore, std::string deviceName)
: m_ieCore(ieCore), m_deviceName(deviceName) {}

StatusCode IECoreAdapter::LoadNetwork(
IExecutableNetwork::Ptr& ret, CNNNetwork network,
const std::map<std::string, std::string>& config, ResponseDesc* resp) noexcept {

try {
ret = m_ieCore->LoadNetwork(network, m_deviceName, config);
} catch (const std::exception& ex) {
return DescriptionBuffer(GENERAL_ERROR, resp) << ex.what();
}

return OK;
ExecutableNetwork IECoreAdapter::LoadNetwork(
const CNNNetwork & network,
const std::map<std::string, std::string>& config) {
return m_ieCore->LoadNetwork(network, m_deviceName, config);
}

StatusCode IECoreAdapter::ImportNetwork(
IExecutableNetwork::Ptr& ret, const std::string& modelFileName,
const std::map<std::string, std::string>& config, ResponseDesc* resp) noexcept {

try {
ret = m_ieCore->ImportNetwork(modelFileName, m_deviceName, config);
} catch (const NetworkNotRead& ie_ex) {
return DescriptionBuffer(NETWORK_NOT_READ, resp) << ie_ex.what();
} catch (const std::exception& ex) {
return DescriptionBuffer(GENERAL_ERROR, resp) << ex.what();
}

return OK;
ExecutableNetwork IECoreAdapter::ImportNetwork(
const std::string& modelFileName,
const std::map<std::string, std::string>& config) {
return m_ieCore->ImportNetwork(modelFileName, m_deviceName, config);
}

ExecutableNetwork IECoreAdapter::ImportNetwork(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -140,33 +140,12 @@ void SegmentationMatcher::match() {
executableNetwork = loadedExecutableNetwork;
}

InferenceEngine::IInferRequest::Ptr inferRequest;
sts = static_cast<IExecutableNetwork::Ptr&>(executableNetwork)->CreateInferRequest(inferRequest, &dsc);
if (sts != InferenceEngine::OK) {
THROW_IE_EXCEPTION << "Failed CreateInferRequest with error: " << dsc.msg;
}

sts = inferRequest->SetBlob(inputs.begin()->first.c_str(), input, &dsc);
if (sts != InferenceEngine::OK) {
THROW_IE_EXCEPTION << "Failed SetBlob with error: " << dsc.msg;
}

sts = inferRequest->SetBlob(outInfo.begin()->first.c_str(), output, &dsc);
if (sts != InferenceEngine::OK) {
THROW_IE_EXCEPTION << "Failed SetBlob with error: " << dsc.msg;
}
auto inferRequest = executableNetwork.CreateInferRequest();
inferRequest.SetBlob(inputs.begin()->first.c_str(), input);
inferRequest.SetBlob(outInfo.begin()->first.c_str(), output);

// Infer model
sts = inferRequest->Infer(&dsc);

// Check errors
if (sts == InferenceEngine::GENERAL_ERROR) {
THROW_IE_EXCEPTION << "Scoring failed! Critical error: " << dsc.msg;
} else if (sts == InferenceEngine::NOT_IMPLEMENTED) {
THROW_IE_EXCEPTION << "Scoring failed! Input data is incorrect and not supported!";
} else if (sts == InferenceEngine::NETWORK_NOT_LOADED) {
THROW_IE_EXCEPTION << "Scoring failed! " << dsc.msg;
}
inferRequest.Infer();

// Convert output data and save it to image
outArray = blobToImageOutputArray(output, nullptr, nullptr, &C);
Expand Down
Loading

0 comments on commit 61ccde7

Please sign in to comment.