From a19299d8c21044afb0a39473f4e2a6b177fea83b Mon Sep 17 00:00:00 2001 From: Maksim Kutakov Date: Fri, 26 Mar 2021 19:36:21 +0300 Subject: [PATCH 01/11] Parameter->Result network tests was added. --- .../subgraph_tests/parameter_result.cpp | 14 +++++ .../src/param_result_custom_blob.cpp | 56 +++++++++++++++++++ 2 files changed, 70 insertions(+) create mode 100644 inference-engine/tests/functional/plugin/cpu/shared_tests_instances/subgraph_tests/parameter_result.cpp create mode 100644 inference-engine/tests/functional/plugin/cpu/subgraph_tests/src/param_result_custom_blob.cpp diff --git a/inference-engine/tests/functional/plugin/cpu/shared_tests_instances/subgraph_tests/parameter_result.cpp b/inference-engine/tests/functional/plugin/cpu/shared_tests_instances/subgraph_tests/parameter_result.cpp new file mode 100644 index 00000000000000..7eb8265150c599 --- /dev/null +++ b/inference-engine/tests/functional/plugin/cpu/shared_tests_instances/subgraph_tests/parameter_result.cpp @@ -0,0 +1,14 @@ +// Copyright (C) 2021 Intel Corporation +// SPDX-License-Identifier: Apache-2.0 +// + +#include "subgraph_tests/parameter_result.hpp" +#include "common_test_utils/test_constants.hpp" + +using namespace SubgraphTestsDefinitions; + +namespace { + INSTANTIATE_TEST_CASE_P(smoke_Check, ParameterResultSubgraphTest, + ::testing::Values(CommonTestUtils::DEVICE_CPU), + ParameterResultSubgraphTest::getTestCaseName); +} // namespace diff --git a/inference-engine/tests/functional/plugin/cpu/subgraph_tests/src/param_result_custom_blob.cpp b/inference-engine/tests/functional/plugin/cpu/subgraph_tests/src/param_result_custom_blob.cpp new file mode 100644 index 00000000000000..1168c4ee3e1db9 --- /dev/null +++ b/inference-engine/tests/functional/plugin/cpu/subgraph_tests/src/param_result_custom_blob.cpp @@ -0,0 +1,56 @@ +// Copyright (C) 2021 Intel Corporation +// SPDX-License-Identifier: Apache-2.0 +// + +#include "shared_test_classes/subgraph/parameter_result.hpp" +#include "common_test_utils/test_constants.hpp" + +using namespace SubgraphTestsDefinitions; +using namespace InferenceEngine; + +namespace CPULayerTestsDefinitions { + +class ParameterResultCustomBlobTest : public ParameterResultSubgraphTest { + protected: + void Infer() override { + constexpr size_t inferIterations = 10lu; + + inferRequest = executableNetwork.CreateInferRequest(); + + auto inputBlob = inputs.front(); + const size_t elementsCount = inputBlob->size(); + for (size_t i = 0; i < inferIterations; ++i) { + const auto& inputsInfo = cnnNetwork.getInputsInfo().begin()->second; + std::string inputName = cnnNetwork.getInputsInfo().begin()->first; + + float* customInpData = new float[elementsCount]; + auto inpBlobData = inputBlob->buffer().as(); + std::copy(inpBlobData, inpBlobData + elementsCount, customInpData); + + auto& tensorDesc = inputsInfo->getTensorDesc(); + auto customBlob = make_shared_blob(tensorDesc, customInpData, elementsCount * sizeof(float)); + inferRequest.SetBlob(inputName, customBlob); + + inferRequest.Infer(); + + ParameterResultSubgraphTest::Validate(); + + delete[] customInpData; + } + } + void Validate() override { + //Do nothing. We call Validate() in the Infer() method + } +}; + +TEST_P(ParameterResultCustomBlobTest, CompareWithRefs) { + SKIP_IF_CURRENT_TEST_IS_DISABLED() + + Run(); +} +namespace { + INSTANTIATE_TEST_CASE_P(smoke_Check_Custom_Blob, ParameterResultCustomBlobTest, + ::testing::Values(CommonTestUtils::DEVICE_CPU), + ParameterResultSubgraphTest::getTestCaseName); +} // namespace +} // namespace CPULayerTestsDefinitions \ No newline at end of file From b271f4bd2f28ff4c54733d7fc7e26d0eb72bb308 Mon Sep 17 00:00:00 2001 From: Maksim Kutakov Date: Fri, 26 Mar 2021 20:49:33 +0300 Subject: [PATCH 02/11] Support for the same input and output names was added to the CPU plugin. --- .../mkldnn_plugin/mkldnn_infer_request.cpp | 85 ++++++++++--------- .../impl/ie_infer_request_internal.hpp | 0 2 files changed, 47 insertions(+), 38 deletions(-) create mode 100644 inference-engine/src/plugin_api/cpp_interfaces/impl/ie_infer_request_internal.hpp diff --git a/inference-engine/src/mkldnn_plugin/mkldnn_infer_request.cpp b/inference-engine/src/mkldnn_plugin/mkldnn_infer_request.cpp index ca1dd9ed5891bd..360c982fa70620 100644 --- a/inference-engine/src/mkldnn_plugin/mkldnn_infer_request.cpp +++ b/inference-engine/src/mkldnn_plugin/mkldnn_infer_request.cpp @@ -34,6 +34,8 @@ MKLDNNPlugin::MKLDNNInferRequest::MKLDNNInferRequest(InferenceEngine::InputsData if (execNetwork->_graphs.size() == 0) IE_THROW() << "No graph was found"; graph = &(execNetwork->GetGraph()._graph); + + // Allocate all input blobs for (const auto& it : _networkInputs) { MKLDNNInferRequest::GetBlob(it.first); } @@ -221,60 +223,64 @@ InferenceEngine::Blob::Ptr MKLDNNPlugin::MKLDNNInferRequest::GetBlob(const std:: return data; } - if (_inputs.find(name) != _inputs.end()) { - data = _inputs[name]; - checkBlob(data, name, true); - return data; - } + if (_inputs.find(name) == _inputs.end()) { + InferenceEngine::TensorDesc desc = blobs[name]->getTensorDesc(); - InferenceEngine::TensorDesc desc = blobs[name]->getTensorDesc(); - if (_networkInputs.find(name) != _networkInputs.end()) { - InferenceEngine::Layout l = _networkInputs[name]->getLayout(); - InferenceEngine::Precision p = _networkInputs[name]->getPrecision(); - InferenceEngine::SizeVector dims = _networkInputs[name]->getTensorDesc().getDims(); + if (_networkInputs.find(name) != _networkInputs.end()) { + InferenceEngine::Layout l = _networkInputs[name]->getLayout(); + InferenceEngine::Precision p = _networkInputs[name]->getPrecision(); + InferenceEngine::SizeVector dims = _networkInputs[name]->getTensorDesc().getDims(); - desc = InferenceEngine::TensorDesc(p, dims, l); - } + desc = InferenceEngine::TensorDesc(p, dims, l); + } - _inputs[name] = make_blob_with_precision(desc); - _inputs[name]->allocate(); - if (blobs[name]->getTensorDesc() == desc && + _inputs[name] = make_blob_with_precision(desc); + _inputs[name]->allocate(); + if (blobs[name]->getTensorDesc() == desc && graph->_meanImages.find(name) == graph->_meanImages.end() && !graph->getProperty().batchLimit) { - externalPtr[name] = _inputs[name]->buffer(); + externalPtr[name] = _inputs[name]->buffer(); + } } data = _inputs[name]; checkBlob(data, name, true); - return data; } + blobs.clear(); graph->getOutputBlobs(blobs); if (blobs.find(name) != blobs.end()) { - if (_outputs.find(name) != _outputs.end()) { - data = _outputs[name]; - checkBlob(data, name, false); - return data; - } - - InferenceEngine::TensorDesc desc = _networkOutputs[name]->getTensorDesc(); + if (_outputs.find(name) == _outputs.end()) { + if (!data) { + InferenceEngine::TensorDesc desc = _networkOutputs[name]->getTensorDesc(); desc.setPrecision(normalizeToSupportedPrecision(desc.getPrecision())); - // WA: need to avoid exception thrown when we compare blocking desc in SetBlob - // in situation if we push output blobs as inputs for next network (in Hetero plugin) - // it may be that output tensor desc will be different from real input tensor desc for next network - // because the optimal descriptor was chosen (e.g. inPlace case for Split node) - auto currBlockDesc = InferenceEngine::BlockingDesc(desc.getBlockingDesc().getBlockDims(), desc.getBlockingDesc().getOrder()); - desc = InferenceEngine::TensorDesc(desc.getPrecision(), desc.getDims(), currBlockDesc); - - _outputs[name] = make_blob_with_precision(desc); - _outputs[name]->allocate(); - if (blobs[name]->getTensorDesc() == desc && !graph->getProperty().batchLimit) { - externalPtr[name] = _outputs[name]->buffer(); + // WA: need to avoid exception thrown when we compare blocking desc in SetBlob + // in situation if we push output blobs as inputs for next network (in Hetero plugin) + // it may be that output tensor desc will be different from real input tensor desc for next network + // because the optimal descriptor was chosen (e.g. inPlace case for Split node) + auto currBlockDesc = InferenceEngine::BlockingDesc(desc.getBlockingDesc().getBlockDims(), desc.getBlockingDesc().getOrder()); + desc = InferenceEngine::TensorDesc(desc.getPrecision(), desc.getDims(), currBlockDesc); + + data = make_blob_with_precision(desc); + data->allocate(); + } else { + if (blobs[name]->getTensorDesc() != data->getTensorDesc()) { + IE_THROW() << "Network input and output use the same name: " << name << " but expect different tensors."; + } + } + + _outputs[name] = data; + if (!externalPtr.count(name) && data->getTensorDesc() == blobs[name]->getTensorDesc() && !graph->getProperty().batchLimit) { + externalPtr[name] = data->buffer(); + } } data = _outputs[name]; checkBlob(data, name, false); return data; } - IE_THROW() << "Cannot find blob with name: " << name; + if (!data) { + IE_THROW() << "Cannot find blob with name: " << name; + } + return data; } void MKLDNNPlugin::MKLDNNInferRequest::SetBlob(const std::string& name, const InferenceEngine::Blob::Ptr &data) { @@ -295,7 +301,9 @@ void MKLDNNPlugin::MKLDNNInferRequest::SetBlob(const std::string& name, const In InferenceEngine::InputInfo::Ptr foundInput; InferenceEngine::DataPtr foundOutput; size_t dataSize = data->size(); - if (findInputAndOutputBlobByName(name, foundInput, foundOutput)) { + findInputAndOutputBlobByName(name, foundInput, foundOutput); + + if (foundInput) { if (foundInput->getPrecision() != data->getTensorDesc().getPrecision()) { IE_THROW(ParameterMismatch) << "Failed to set input blob with precision: " << data->getTensorDesc().getPrecision() << ", if CNNNetwork input blob precision is: " << foundInput->getPrecision(); @@ -346,7 +354,8 @@ void MKLDNNPlugin::MKLDNNInferRequest::SetBlob(const std::string& name, const In } _inputs[name] = data; } - } else { + } + if (foundOutput) { if (compoundBlobPassed) { IE_THROW(NotImplemented) << "cannot set compound blob: supported only for input pre-processing"; diff --git a/inference-engine/src/plugin_api/cpp_interfaces/impl/ie_infer_request_internal.hpp b/inference-engine/src/plugin_api/cpp_interfaces/impl/ie_infer_request_internal.hpp new file mode 100644 index 00000000000000..e69de29bb2d1d6 From 8f3e6f2e0556bca71fc63e3073427473cc8ab951 Mon Sep 17 00:00:00 2001 From: Maksim Kutakov Date: Mon, 29 Mar 2021 13:04:34 +0300 Subject: [PATCH 03/11] Test with repetitive infer with the same blob was added. --- .../src/param_result_custom_blob.cpp | 26 +++++++++++++++++++ 1 file changed, 26 insertions(+) diff --git a/inference-engine/tests/functional/plugin/cpu/subgraph_tests/src/param_result_custom_blob.cpp b/inference-engine/tests/functional/plugin/cpu/subgraph_tests/src/param_result_custom_blob.cpp index 1168c4ee3e1db9..eb70d4a82aab3c 100644 --- a/inference-engine/tests/functional/plugin/cpu/subgraph_tests/src/param_result_custom_blob.cpp +++ b/inference-engine/tests/functional/plugin/cpu/subgraph_tests/src/param_result_custom_blob.cpp @@ -53,4 +53,30 @@ namespace { ::testing::Values(CommonTestUtils::DEVICE_CPU), ParameterResultSubgraphTest::getTestCaseName); } // namespace + +class ParameterResultSameBlobTest : public ParameterResultSubgraphTest { +protected: + void Infer() override { + constexpr size_t inferIterations = 10lu; + + for (size_t i = 0; i < inferIterations; ++i) { + ParameterResultSubgraphTest::Infer(); + ParameterResultSubgraphTest::Validate(); + } + } + void Validate() override { + //Do nothing. We call Validate() in the Infer() method + } +}; + +TEST_P(ParameterResultSameBlobTest, CompareWithRefs) { + SKIP_IF_CURRENT_TEST_IS_DISABLED() + + Run(); +} +namespace { + INSTANTIATE_TEST_CASE_P(smoke_Check_Same_Blob, ParameterResultSameBlobTest, + ::testing::Values(CommonTestUtils::DEVICE_CPU), + ParameterResultSubgraphTest::getTestCaseName); +} // namespace } // namespace CPULayerTestsDefinitions \ No newline at end of file From 59083a4c6ae85921052e4cf3ef0bf4abfbb8465c Mon Sep 17 00:00:00 2001 From: Maksim Kutakov Date: Tue, 30 Mar 2021 13:52:08 +0300 Subject: [PATCH 04/11] Changed tensor descriptors check. --- .../src/mkldnn_plugin/mkldnn_infer_request.cpp | 18 ++++++++++++++++-- 1 file changed, 16 insertions(+), 2 deletions(-) diff --git a/inference-engine/src/mkldnn_plugin/mkldnn_infer_request.cpp b/inference-engine/src/mkldnn_plugin/mkldnn_infer_request.cpp index 360c982fa70620..1160b20f6fa016 100644 --- a/inference-engine/src/mkldnn_plugin/mkldnn_infer_request.cpp +++ b/inference-engine/src/mkldnn_plugin/mkldnn_infer_request.cpp @@ -263,8 +263,22 @@ InferenceEngine::Blob::Ptr MKLDNNPlugin::MKLDNNInferRequest::GetBlob(const std:: data = make_blob_with_precision(desc); data->allocate(); } else { - if (blobs[name]->getTensorDesc() != data->getTensorDesc()) { - IE_THROW() << "Network input and output use the same name: " << name << " but expect different tensors."; + const auto& expectedTensorDesc = blobs[name]->getTensorDesc(); + + if (expectedTensorDesc.getPrecision() != data->getTensorDesc().getPrecision()) { + IE_THROW(ParameterMismatch) << "Network input and output use the same name: " << name << " but expect blobs with different precision: " + << data->getTensorDesc().getPrecision() << " for input and " << expectedTensorDesc.getPrecision() + << " for output."; + } + + if (expectedTensorDesc.getDims() != data->getTensorDesc().getDims()) { + IE_THROW(ParameterMismatch) << "Network input and output use the same name: " << name << " but expect blobs with different shapes."; + } + + if (data->getTensorDesc().getLayout() != InferenceEngine::Layout::ANY && expectedTensorDesc.getLayout() != InferenceEngine::Layout::ANY && + expectedTensorDesc.getBlockingDesc() != data->getTensorDesc().getBlockingDesc()) { + IE_THROW(ParameterMismatch) << "Network input and output use the same name: " << name + << " but expect blobs with different blocking descriptors."; } } From 1499e6201a66e24df967e3004583143c66424f3c Mon Sep 17 00:00:00 2001 From: Maksim Kutakov Date: Tue, 30 Mar 2021 13:52:50 +0300 Subject: [PATCH 05/11] PullOutputData now cannot change the outputs map. --- .../src/mkldnn_plugin/mkldnn_graph.cpp | 18 ++---------------- .../src/mkldnn_plugin/mkldnn_graph.h | 2 +- 2 files changed, 3 insertions(+), 17 deletions(-) diff --git a/inference-engine/src/mkldnn_plugin/mkldnn_graph.cpp b/inference-engine/src/mkldnn_plugin/mkldnn_graph.cpp index 197373005c5055..d0dda62c0d1c32 100644 --- a/inference-engine/src/mkldnn_plugin/mkldnn_graph.cpp +++ b/inference-engine/src/mkldnn_plugin/mkldnn_graph.cpp @@ -738,7 +738,7 @@ void MKLDNNGraph::PushInputData(const std::string& name, const InferenceEngine:: } } -void MKLDNNGraph::PullOutputData(BlobMap &out) { +void MKLDNNGraph::PullOutputData(const BlobMap &out) { if (!IsReady()) IE_THROW() << "Wrong state. Topology not ready."; @@ -746,22 +746,8 @@ void MKLDNNGraph::PullOutputData(BlobMap &out) { auto name = outputMap.first; auto node = outputMap.second; const MKLDNNMemory& intr_blob = node->getParentEdgeAt(0)->getMemory(); - if (out.find(name) == out.end()) { - // TODO [NM]: Do we really need this path? - // TODO: Create blob from MemoryDesc - Blob::Ptr outBlob = make_shared_blob({Precision::FP32, node->getParentEdgeAt(0)->getDims().ToSizeVector(), - TensorDesc::getLayoutByDims(node->getParentEdgeAt(0)->getDims().ToSizeVector())}, - reinterpret_cast(intr_blob.GetData())); - out[name] = outBlob; - } - - Blob::Ptr &ext_blob = out[name]; - // TODO: Why we allow allocation of output memory inside Infer call?? - // Suggestion is to disable this behaviour - if (ext_blob->buffer() == nullptr) { - ext_blob->allocate(); - } + const Blob::Ptr &ext_blob = out.at(name); auto srcPrec = MKLDNNExtensionUtils::DataTypeToIEPrecision(intr_blob.GetDataType()); auto dstPrec = ext_blob->getTensorDesc().getPrecision(); diff --git a/inference-engine/src/mkldnn_plugin/mkldnn_graph.h b/inference-engine/src/mkldnn_plugin/mkldnn_graph.h index b572558cda4c46..3811c8f8b70d2e 100644 --- a/inference-engine/src/mkldnn_plugin/mkldnn_graph.h +++ b/inference-engine/src/mkldnn_plugin/mkldnn_graph.h @@ -55,7 +55,7 @@ class MKLDNNGraph { } void PushInputData(const std::string& name, const InferenceEngine::Blob::Ptr &in); - void PullOutputData(InferenceEngine::BlobMap &out); + void PullOutputData(const InferenceEngine::BlobMap &out); void Infer(MKLDNNInferRequest* request = nullptr, int batch = -1); From 2f00248a04f3e683bb4edab167de24965033889c Mon Sep 17 00:00:00 2001 From: Maksim Kutakov Date: Tue, 30 Mar 2021 16:06:16 +0300 Subject: [PATCH 06/11] Skip output that is not in the output query map. --- inference-engine/src/mkldnn_plugin/mkldnn_graph.cpp | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/inference-engine/src/mkldnn_plugin/mkldnn_graph.cpp b/inference-engine/src/mkldnn_plugin/mkldnn_graph.cpp index d0dda62c0d1c32..12b6ef97aa4984 100644 --- a/inference-engine/src/mkldnn_plugin/mkldnn_graph.cpp +++ b/inference-engine/src/mkldnn_plugin/mkldnn_graph.cpp @@ -747,6 +747,10 @@ void MKLDNNGraph::PullOutputData(const BlobMap &out) { auto node = outputMap.second; const MKLDNNMemory& intr_blob = node->getParentEdgeAt(0)->getMemory(); + if (!out.count(name)) { + continue; + } + const Blob::Ptr &ext_blob = out.at(name); auto srcPrec = MKLDNNExtensionUtils::DataTypeToIEPrecision(intr_blob.GetDataType()); From 6e2537e975c2c830350da559d2247ca50ceb653d Mon Sep 17 00:00:00 2001 From: Maksim Kutakov Date: Fri, 2 Apr 2021 17:33:16 +0300 Subject: [PATCH 07/11] "Different" precisions added to the test --- .../cpu/subgraph_tests/src/param_result_custom_blob.cpp | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/inference-engine/tests/functional/plugin/cpu/subgraph_tests/src/param_result_custom_blob.cpp b/inference-engine/tests/functional/plugin/cpu/subgraph_tests/src/param_result_custom_blob.cpp index eb70d4a82aab3c..108294f53f220b 100644 --- a/inference-engine/tests/functional/plugin/cpu/subgraph_tests/src/param_result_custom_blob.cpp +++ b/inference-engine/tests/functional/plugin/cpu/subgraph_tests/src/param_result_custom_blob.cpp @@ -46,6 +46,11 @@ class ParameterResultCustomBlobTest : public ParameterResultSubgraphTest { TEST_P(ParameterResultCustomBlobTest, CompareWithRefs) { SKIP_IF_CURRENT_TEST_IS_DISABLED() + // Just to show that it is not possible to set different precisions for inputs and outputs with the same name. + // If it was possible, the input would have I8 precision and couldn't store data from the custom blob. + inPrc = Precision::I8; + outPrc = Precision::FP32; + Run(); } namespace { From a351a23fda68e9c1fea3bd189e90c71e3232e9fe Mon Sep 17 00:00:00 2001 From: Maksim Kutakov Date: Tue, 13 Apr 2021 15:55:48 +0300 Subject: [PATCH 08/11] Minor fix --- inference-engine/src/mkldnn_plugin/mkldnn_infer_request.cpp | 1 - 1 file changed, 1 deletion(-) diff --git a/inference-engine/src/mkldnn_plugin/mkldnn_infer_request.cpp b/inference-engine/src/mkldnn_plugin/mkldnn_infer_request.cpp index 1160b20f6fa016..88d9a6174da1ef 100644 --- a/inference-engine/src/mkldnn_plugin/mkldnn_infer_request.cpp +++ b/inference-engine/src/mkldnn_plugin/mkldnn_infer_request.cpp @@ -289,7 +289,6 @@ InferenceEngine::Blob::Ptr MKLDNNPlugin::MKLDNNInferRequest::GetBlob(const std:: } data = _outputs[name]; checkBlob(data, name, false); - return data; } if (!data) { IE_THROW() << "Cannot find blob with name: " << name; From 10632b70e15864159f95f05a2ea8549c7d9694a7 Mon Sep 17 00:00:00 2001 From: Maksim Kutakov Date: Wed, 21 Apr 2021 14:17:39 +0300 Subject: [PATCH 09/11] Changes after rebase --- .../interface/ie_iinfer_request_internal.cpp | 14 ++++++++------ .../impl/ie_infer_request_internal.hpp | 0 .../interface/ie_iinfer_request_internal.hpp | 1 - 3 files changed, 8 insertions(+), 7 deletions(-) delete mode 100644 inference-engine/src/plugin_api/cpp_interfaces/impl/ie_infer_request_internal.hpp diff --git a/inference-engine/src/inference_engine/cpp_interfaces/interface/ie_iinfer_request_internal.cpp b/inference-engine/src/inference_engine/cpp_interfaces/interface/ie_iinfer_request_internal.cpp index 29f683a07561b3..6c7dca02c0c5d7 100644 --- a/inference-engine/src/inference_engine/cpp_interfaces/interface/ie_iinfer_request_internal.cpp +++ b/inference-engine/src/inference_engine/cpp_interfaces/interface/ie_iinfer_request_internal.cpp @@ -213,16 +213,18 @@ bool IInferRequestInternal::findInputAndOutputBlobByName(const std::string& name [&](const std::pair& pair) { return pair.first == name; }); - if (foundOutputPair == std::end(_networkOutputs) && (foundInputPair == std::end(_networkInputs))) { - IE_THROW(NotFound) << "Failed to find input or output with name: \'" << name << "\'"; - } + bool retVal; + if (foundInputPair != std::end(_networkInputs)) { foundInput = foundInputPair->second; - return true; - } else { + retVal = true; + } else if (foundOutputPair != std::end(_networkOutputs)) { foundOutput = foundOutputPair->second; - return false; + retVal = false; + } else { + IE_THROW(NotFound) << "Failed to find input or output with name: \'" << name << "\'"; } + return retVal; } void IInferRequestInternal::checkBlob(const Blob::Ptr& blob, const std::string& name, bool isInput, const SizeVector& refDims) const { diff --git a/inference-engine/src/plugin_api/cpp_interfaces/impl/ie_infer_request_internal.hpp b/inference-engine/src/plugin_api/cpp_interfaces/impl/ie_infer_request_internal.hpp deleted file mode 100644 index e69de29bb2d1d6..00000000000000 diff --git a/inference-engine/src/plugin_api/cpp_interfaces/interface/ie_iinfer_request_internal.hpp b/inference-engine/src/plugin_api/cpp_interfaces/interface/ie_iinfer_request_internal.hpp index 7c8d65b7686c21..0c409cc2639381 100644 --- a/inference-engine/src/plugin_api/cpp_interfaces/interface/ie_iinfer_request_internal.hpp +++ b/inference-engine/src/plugin_api/cpp_interfaces/interface/ie_iinfer_request_internal.hpp @@ -208,7 +208,6 @@ class INFERENCE_ENGINE_API_CLASS(IInferRequestInternal) : public std::enable_sha * @param foundOutput A pointer to output DataPtr if found. * @return `True` - if loaded network has input with provided name, * `false` - if loaded network has output with provided name - * @throws [parameter_mismatch] exception if input and output has the same name * @throws [not_found] exception if there is no input and output layers with given name */ bool findInputAndOutputBlobByName(const std::string& name, InputInfo::Ptr& foundInput, DataPtr& foundOutput) const; From c2448d60f8de0b5aa36bb80cfe945a6e2dda6b7e Mon Sep 17 00:00:00 2001 From: Maksim Kutakov Date: Wed, 12 May 2021 12:09:11 +0300 Subject: [PATCH 10/11] Fixes after review --- inference-engine/src/mkldnn_plugin/mkldnn_graph.cpp | 2 +- inference-engine/src/mkldnn_plugin/mkldnn_infer_request.cpp | 2 +- .../plugin/cpu/subgraph_tests/src/param_result_custom_blob.cpp | 1 + 3 files changed, 3 insertions(+), 2 deletions(-) diff --git a/inference-engine/src/mkldnn_plugin/mkldnn_graph.cpp b/inference-engine/src/mkldnn_plugin/mkldnn_graph.cpp index 12b6ef97aa4984..86820c88ce405a 100644 --- a/inference-engine/src/mkldnn_plugin/mkldnn_graph.cpp +++ b/inference-engine/src/mkldnn_plugin/mkldnn_graph.cpp @@ -748,7 +748,7 @@ void MKLDNNGraph::PullOutputData(const BlobMap &out) { const MKLDNNMemory& intr_blob = node->getParentEdgeAt(0)->getMemory(); if (!out.count(name)) { - continue; + IE_THROW(Unexpected) << "The network outputs do not contain mkldnn graph output node name: \"" << name << "\""; } const Blob::Ptr &ext_blob = out.at(name); diff --git a/inference-engine/src/mkldnn_plugin/mkldnn_infer_request.cpp b/inference-engine/src/mkldnn_plugin/mkldnn_infer_request.cpp index 88d9a6174da1ef..392226d06d4eba 100644 --- a/inference-engine/src/mkldnn_plugin/mkldnn_infer_request.cpp +++ b/inference-engine/src/mkldnn_plugin/mkldnn_infer_request.cpp @@ -251,7 +251,7 @@ InferenceEngine::Blob::Ptr MKLDNNPlugin::MKLDNNInferRequest::GetBlob(const std:: if (_outputs.find(name) == _outputs.end()) { if (!data) { InferenceEngine::TensorDesc desc = _networkOutputs[name]->getTensorDesc(); - desc.setPrecision(normalizeToSupportedPrecision(desc.getPrecision())); + desc.setPrecision(normalizeToSupportedPrecision(desc.getPrecision())); // WA: need to avoid exception thrown when we compare blocking desc in SetBlob // in situation if we push output blobs as inputs for next network (in Hetero plugin) diff --git a/inference-engine/tests/functional/plugin/cpu/subgraph_tests/src/param_result_custom_blob.cpp b/inference-engine/tests/functional/plugin/cpu/subgraph_tests/src/param_result_custom_blob.cpp index 108294f53f220b..65ac992ec76ac1 100644 --- a/inference-engine/tests/functional/plugin/cpu/subgraph_tests/src/param_result_custom_blob.cpp +++ b/inference-engine/tests/functional/plugin/cpu/subgraph_tests/src/param_result_custom_blob.cpp @@ -20,6 +20,7 @@ class ParameterResultCustomBlobTest : public ParameterResultSubgraphTest { auto inputBlob = inputs.front(); const size_t elementsCount = inputBlob->size(); for (size_t i = 0; i < inferIterations; ++i) { + CommonTestUtils::fill_data_random(inputBlob, 10, 0, 1, i); const auto& inputsInfo = cnnNetwork.getInputsInfo().begin()->second; std::string inputName = cnnNetwork.getInputsInfo().begin()->first; From 7ced6a26bd583da3461cd33a2ed9e689b0a7a6b0 Mon Sep 17 00:00:00 2001 From: Maksim Kutakov Date: Tue, 25 May 2021 18:56:24 +0300 Subject: [PATCH 11/11] GetBlob performance fix --- inference-engine/src/mkldnn_plugin/mkldnn_graph.h | 8 ++++++++ .../src/mkldnn_plugin/mkldnn_infer_request.cpp | 13 ++++++------- 2 files changed, 14 insertions(+), 7 deletions(-) diff --git a/inference-engine/src/mkldnn_plugin/mkldnn_graph.h b/inference-engine/src/mkldnn_plugin/mkldnn_graph.h index 3811c8f8b70d2e..822cdeb387b6a6 100644 --- a/inference-engine/src/mkldnn_plugin/mkldnn_graph.h +++ b/inference-engine/src/mkldnn_plugin/mkldnn_graph.h @@ -79,6 +79,14 @@ class MKLDNNGraph { return outputNodesMap; } + bool hasInputWithName(const std::string& name) const { + return inputNodesMap.count(name); + } + + bool hasOutputWithName(const std::string& name) const { + return outputNodesMap.count(name); + } + mkldnn::engine getEngine() const { return eng; } diff --git a/inference-engine/src/mkldnn_plugin/mkldnn_infer_request.cpp b/inference-engine/src/mkldnn_plugin/mkldnn_infer_request.cpp index 392226d06d4eba..2496ea27fb6913 100644 --- a/inference-engine/src/mkldnn_plugin/mkldnn_infer_request.cpp +++ b/inference-engine/src/mkldnn_plugin/mkldnn_infer_request.cpp @@ -212,10 +212,9 @@ InferenceEngine::Blob::Ptr MKLDNNPlugin::MKLDNNInferRequest::GetBlob(const std:: InferenceEngine::Blob::Ptr data; - InferenceEngine::BlobMap blobs; - graph->getInputBlobs(blobs); - - if (blobs.find(name) != blobs.end()) { + if (graph->hasInputWithName(name)) { + InferenceEngine::BlobMap blobs; + graph->getInputBlobs(blobs); // ROI blob is returned only if it was set previously. auto it = _preProcData.find(name); if (it != _preProcData.end()) { @@ -245,9 +244,9 @@ InferenceEngine::Blob::Ptr MKLDNNPlugin::MKLDNNInferRequest::GetBlob(const std:: checkBlob(data, name, true); } - blobs.clear(); - graph->getOutputBlobs(blobs); - if (blobs.find(name) != blobs.end()) { + if (graph->hasOutputWithName(name)) { + InferenceEngine::BlobMap blobs; + graph->getOutputBlobs(blobs); if (_outputs.find(name) == _outputs.end()) { if (!data) { InferenceEngine::TensorDesc desc = _networkOutputs[name]->getTensorDesc();