Skip to content

Commit

Permalink
Removed legacy IE shape infer (#4211)
Browse files Browse the repository at this point in the history
* Removed legacy IE shape infer

* Removed legacy shape infer tests

* Updated tests and make IR Reader to load old experimenal and extension ops as opset6

* Change opset of some ops only if they are currently experimental/extension to avoid situation like opset1::Proposal -> opset6::Proposal

Co-authored-by: Evgeny Lazarev <[email protected]>
  • Loading branch information
lazarevevgeny and undef-nnov authored Feb 10, 2021
1 parent 929fa26 commit 48aa1c3
Show file tree
Hide file tree
Showing 15 changed files with 86 additions and 655 deletions.
1 change: 0 additions & 1 deletion inference-engine/src/inference_engine/CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,6 @@ file (GLOB LIBRARY_SRC
${CMAKE_CURRENT_SOURCE_DIR}/cpp/*.cpp
${CMAKE_CURRENT_SOURCE_DIR}/threading/*.cpp
${CMAKE_CURRENT_SOURCE_DIR}/cpp/*.cpp
${CMAKE_CURRENT_SOURCE_DIR}/shape_infer/ie_built_in_holder.cpp
)

# TODO: WA for OneHot pass usage in reshape
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,6 @@
#include "exec_graph_info.hpp"
#include "ie_itt.hpp"
#include "generic_ie.hpp"
#include "shape_infer/ie_built_in_holder.hpp"

using namespace std;
using namespace InferenceEngine;
Expand Down Expand Up @@ -114,9 +113,6 @@ CNNNetworkNGraphImpl::CNNNetworkNGraphImpl(
network.setInputInfo(info);
};

// Add shape infer method for old operations which are not included to opset1, opset2 and opset3
::ngraph::op::GenericIE::addExtension(_ngraph_function, std::make_shared<ShapeInfer::BuiltInShapeInferHolder>());

reshape();
for (const auto& layer : _ngraph_function->get_parameters()) {
std::string outName = layer->get_friendly_name();
Expand Down
58 changes: 0 additions & 58 deletions inference-engine/src/inference_engine/generic_ie.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,6 @@

#include "blob_factory.hpp"
#include <ie_ngraph_utils.hpp>
#include "shape_infer/ie_ishape_infer_extension.hpp"
#include "ngraph/util.hpp"
#include "ngraph/graph_util.hpp"
#include "ngraph/validation_util.hpp"
Expand Down Expand Up @@ -85,63 +84,6 @@ void ngraph::op::GenericIE::validate_and_infer_types() {
}
return get_output_element_type(index);
};
// Try to find extension with shape inference implementation and apply it
for (const auto& ext : extensions) {
IE_SUPPRESS_DEPRECATED_START
InferenceEngine::IShapeInferImpl::Ptr impl;
InferenceEngine::StatusCode ret = ext->getShapeInferImpl(impl, type.c_str(), nullptr);
if (ret != InferenceEngine::StatusCode::OK || !impl) continue;

std::vector<InferenceEngine::Blob::CPtr> inputs;
std::map<std::string, std::string> parameters;
std::map<std::string, InferenceEngine::Blob::Ptr> blobs;
std::vector<InferenceEngine::SizeVector> outShapes;

for (uint64_t i = 0; i < get_input_size(); i++) {
PartialShape this_input_shape = get_input_partial_shape(i);

if (!this_input_shape.is_static()) {
// Set dynamic output shapes if input shapes are not defined
for (size_t output_index = 0; output_index < outputs.size(); output_index++) {
set_output_type(output_index, get_precision(output_index), PartialShape::dynamic());
}
return;
}

Shape this_ishape = get_input_shape(i);
InferenceEngine::SizeVector dims = this_ishape;
InferenceEngine::Blob::Ptr input = make_blob_with_precision(InferenceEngine::TensorDesc(
InferenceEngine::details::convertPrecision(get_input_element_type(i)), dims,
InferenceEngine::TensorDesc::getLayoutByDims(dims)));
inputs.emplace_back(input);
}

for (const auto& attr : params) {
if (attr.second.is<std::string>()) {
parameters[attr.first] = attr.second.as<std::string>();
} else if (attr.second.is<InferenceEngine::Blob::CPtr>()) {
auto cBlob = attr.second.as<InferenceEngine::Blob::CPtr>();
auto wBlob = std::const_pointer_cast<InferenceEngine::Blob>(cBlob);
blobs[attr.first] = wBlob;
} else if (attr.second.is<InferenceEngine::Blob::Ptr>()) {
auto wBlob = attr.second.as<InferenceEngine::Blob::Ptr>();
blobs[attr.first] = wBlob;
} else {
THROW_IE_EXCEPTION << "Generic node for layer " << get_friendly_name() << " with type " << type
<< " has incorrect parameter " << attr.first << "!";
}
}

ret = impl->inferShapes(inputs, parameters, blobs, outShapes, nullptr);
IE_SUPPRESS_DEPRECATED_END

if (ret != InferenceEngine::StatusCode::OK || outShapes.size() != outputs.size()) continue;

for (size_t output_index = 0; output_index < outputs.size(); output_index++) {
set_output_type(output_index, get_precision(output_index), Shape(outShapes[output_index]));
}
return;
}

// Extensions are not loaded when we create nGraph function
// First call: create node
Expand Down

This file was deleted.

This file was deleted.

145 changes: 0 additions & 145 deletions inference-engine/src/inference_engine/shape_infer/ie_built_in_impl.hpp

This file was deleted.

Loading

0 comments on commit 48aa1c3

Please sign in to comment.