Skip to content

Commit

Permalink
[CPU] Get/Set Blob overhead has been eliminated. (openvinotoolkit#6737)
Browse files Browse the repository at this point in the history
  • Loading branch information
maxnick authored and rnugmanx committed Aug 26, 2021
1 parent 9702bd1 commit 2e85bcd
Show file tree
Hide file tree
Showing 3 changed files with 33 additions and 24 deletions.
16 changes: 10 additions & 6 deletions inference-engine/src/mkldnn_plugin/mkldnn_graph.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -965,16 +965,20 @@ Config MKLDNNGraph::getProperty() const {
return config;
}

void MKLDNNGraph::getInputBlobs(InferenceEngine::BlobMap &resp) {
for (auto &it : inputNodesMap) {
resp[it.first] = it.second->getChildEdgeAt(0)->getBlob();
Blob::Ptr MKLDNNGraph::getInputBlob(const std::string& name) {
auto itr = inputNodesMap.find(name);
if (itr != inputNodesMap.end()) {
return itr->second->getChildEdgeAt(0)->getBlob();
}
return nullptr;
}

void MKLDNNGraph::getOutputBlobs(InferenceEngine::BlobMap &resp) {
for (auto &it : outputNodesMap) {
resp[it.first] = it.second->getParentEdgeAt(0)->getBlob();
Blob::Ptr MKLDNNGraph::getOutputBlob(const std::string& name) {
auto itr = outputNodesMap.find(name);
if (itr != outputNodesMap.end()) {
return itr->second->getParentEdgeAt(0)->getBlob();
}
return nullptr;
}

void MKLDNNGraph::RemoveEdge(MKLDNNEdgePtr& edge) {
Expand Down
4 changes: 2 additions & 2 deletions inference-engine/src/mkldnn_plugin/mkldnn_graph.h
Original file line number Diff line number Diff line change
Expand Up @@ -44,8 +44,8 @@ class MKLDNNGraph {
void setProperty(const std::map<std::string, std::string> &properties);
Config getProperty() const;

void getInputBlobs(InferenceEngine::BlobMap &in_map);
void getOutputBlobs(InferenceEngine::BlobMap &out_map);
InferenceEngine::Blob::Ptr getInputBlob(const std::string& name);
InferenceEngine::Blob::Ptr getOutputBlob(const std::string& name);

template<typename NET>
void CreateGraph(NET &network,
Expand Down
37 changes: 21 additions & 16 deletions inference-engine/src/mkldnn_plugin/mkldnn_infer_request.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -213,8 +213,6 @@ InferenceEngine::Blob::Ptr MKLDNNPlugin::MKLDNNInferRequest::GetBlob(const std::
InferenceEngine::Blob::Ptr data;

if (graph->hasInputWithName(name)) {
InferenceEngine::BlobMap blobs;
graph->getInputBlobs(blobs);
// ROI blob is returned only if it was set previously.
auto it = _preProcData.find(name);
if (it != _preProcData.end()) {
Expand All @@ -223,7 +221,12 @@ InferenceEngine::Blob::Ptr MKLDNNPlugin::MKLDNNInferRequest::GetBlob(const std::
}

if (_inputs.find(name) == _inputs.end()) {
InferenceEngine::TensorDesc desc = blobs[name]->getTensorDesc();
auto pBlob = graph->getInputBlob(name);
if (!pBlob) {
IE_THROW() << "MKLDNN graph doesn't contain input node with name: " << name;
}

InferenceEngine::TensorDesc desc = pBlob->getTensorDesc();

if (_networkInputs.find(name) != _networkInputs.end()) {
InferenceEngine::Layout l = _networkInputs[name]->getLayout();
Expand All @@ -235,7 +238,7 @@ InferenceEngine::Blob::Ptr MKLDNNPlugin::MKLDNNInferRequest::GetBlob(const std::

_inputs[name] = make_blob_with_precision(desc);
_inputs[name]->allocate();
if (blobs[name]->getTensorDesc() == desc &&
if (pBlob->getTensorDesc() == desc &&
graph->_normalizePreprocMap.find(name) == graph->_normalizePreprocMap.end() && !graph->getProperty().batchLimit) {
externalPtr[name] = _inputs[name]->buffer();
}
Expand All @@ -258,9 +261,12 @@ InferenceEngine::Blob::Ptr MKLDNNPlugin::MKLDNNInferRequest::GetBlob(const std::
}

if (graph->hasOutputWithName(name)) {
InferenceEngine::BlobMap blobs;
graph->getOutputBlobs(blobs);
if (_outputs.find(name) == _outputs.end()) {
auto pBlob = graph->getOutputBlob(name);
if (!pBlob) {
IE_THROW() << "MKLDNN graph doesn't contain output node with name: " << name;
}

if (!data) {
InferenceEngine::TensorDesc desc = _networkOutputs[name]->getTensorDesc();
desc.setPrecision(normalizeToSupportedPrecision(desc.getPrecision()));
Expand All @@ -275,7 +281,7 @@ InferenceEngine::Blob::Ptr MKLDNNPlugin::MKLDNNInferRequest::GetBlob(const std::
data = make_blob_with_precision(desc);
data->allocate();
} else {
const auto& expectedTensorDesc = blobs[name]->getTensorDesc();
const auto& expectedTensorDesc = pBlob->getTensorDesc();

if (expectedTensorDesc.getPrecision() != data->getTensorDesc().getPrecision()) {
IE_THROW(ParameterMismatch) << "Network input and output use the same name: " << name << " but expect blobs with different precision: "
Expand All @@ -295,7 +301,7 @@ InferenceEngine::Blob::Ptr MKLDNNPlugin::MKLDNNInferRequest::GetBlob(const std::
}

_outputs[name] = data;
if (!externalPtr.count(name) && data->getTensorDesc() == blobs[name]->getTensorDesc() && !graph->getProperty().batchLimit) {
if (!externalPtr.count(name) && data->getTensorDesc() == pBlob->getTensorDesc() && !graph->getProperty().batchLimit) {
externalPtr[name] = data->buffer();
}
}
Expand Down Expand Up @@ -366,12 +372,12 @@ void MKLDNNPlugin::MKLDNNInferRequest::SetBlob(const std::string& name, const In
IE_THROW(ParameterMismatch) << "Failed to set input blob. Blocking descriptor mismatch.";
}

InferenceEngine::BlobMap blobs;
graph->getInputBlobs(blobs);
if (blobs.find(name) == blobs.end())
auto pBlob = graph->getInputBlob(name);
if (!pBlob) {
IE_THROW() << "MKLDNN graph doesn't contain input node with name: " << name;
}

if (data->getTensorDesc() == blobs.at(name)->getTensorDesc() &&
if (data->getTensorDesc() == pBlob->getTensorDesc() &&
graph->_normalizePreprocMap.find(name) == graph->_normalizePreprocMap.end() && !graph->getProperty().batchLimit) {
externalPtr[name] = data->buffer();
} else if (externalPtr.find(name) != externalPtr.end()) {
Expand Down Expand Up @@ -404,12 +410,11 @@ void MKLDNNPlugin::MKLDNNInferRequest::SetBlob(const std::string& name, const In
IE_THROW(ParameterMismatch) << "Failed to set output blob. Blocking descriptor mismatch.";
}

InferenceEngine::BlobMap blobs;
graph->getOutputBlobs(blobs);
if (blobs.find(name) == blobs.end())
auto pBlob = graph->getOutputBlob(name);
if (!pBlob)
IE_THROW() << "MKLDNN graph doesn't contain output node with name: " << name;

if (data->getTensorDesc() == blobs.at(name)->getTensorDesc() &&
if (data->getTensorDesc() == pBlob->getTensorDesc() &&
!graph->getProperty().batchLimit) {
externalPtr[name] = data->buffer();
} else if (externalPtr.find(name) != externalPtr.end()) {
Expand Down

0 comments on commit 2e85bcd

Please sign in to comment.