Skip to content

Commit

Permalink
Visitor API deserialization for ops from opset1 (openvinotoolkit#3001)
Browse files Browse the repository at this point in the history
* Comment layer creator and node converter for Elu and Clamp ops.

* Add deserialization for GRN, HardSigmoid, GatherTree.

* Add implementation for Interp, GRN, GatherTree.

* Remove layer creation from ie_ir_parser.cpp

* Remove header files from cnn builder.

* Change op's type in elu functional test, remove name transformtion for this op from ir_parser.

* Remove empty lines.
  • Loading branch information
sdurawa authored and jiwaszki committed Jan 15, 2021
1 parent be71b33 commit 7a3227f
Show file tree
Hide file tree
Showing 10 changed files with 92 additions and 172 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,7 @@ class INFERENCE_ENGINE_API_CLASS(GatherTreeIE) : public Op {
const Output<Node>& end_token);

void validate_and_infer_types() override;
bool visit_attributes(AttributeVisitor& visitor) override;

std::shared_ptr<Node> clone_with_new_inputs(const OutputVector& new_args) const override;
};
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,7 @@ class INFERENCE_ENGINE_API_CLASS(HardSigmoid_IE) : public Op {

std::shared_ptr<Node> clone_with_new_inputs(const OutputVector& new_args) const override;
void validate_and_infer_types() override;
bool visit_attributes(AttributeVisitor& visitor) override;

float get_alpha() const { return m_alpha; }
void set_alpha(float alpha) { m_alpha = alpha; }
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -37,6 +37,8 @@ class INFERENCE_ENGINE_API_CLASS(Interp) : public Op {

void validate_and_infer_types() override;

bool visit_attributes(AttributeVisitor& visitor) override;

std::shared_ptr<Node> clone_with_new_inputs(const OutputVector& new_args) const override;

InterpolateIEAttrs get_attrs() { return m_attrs; }
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -619,7 +619,6 @@ InferenceEngine::details::CNNLayerCreator::CNNLayerCreator(const std::shared_ptr
auto res = std::make_shared<InferenceEngine::CNNLayer>(attrs);
res->params = params;
return res;

});

addSpecificCreator({"NonMaxSuppressionIE3"}, [](const std::shared_ptr<::ngraph::Node>& node,
Expand Down Expand Up @@ -850,6 +849,14 @@ InferenceEngine::details::CNNLayerCreator::CNNLayerCreator(const std::shared_ptr
return res;
});

addSpecificCreator({"Clamp"}, [](const std::shared_ptr<::ngraph::Node>& node,
const std::map<std::string, std::string>& params) -> CNNLayerPtr {
LayerParams attrs = {node->get_friendly_name(), "Clamp", details::convertPrecision(node->get_output_element_type(0))};
auto res = std::make_shared<InferenceEngine::ClampLayer>(attrs);
res->params = params;
return res;
});

addSpecificCreator({"LRN_IE"}, [](const std::shared_ptr<::ngraph::Node>& node,
const std::map<std::string, std::string>& params) -> CNNLayerPtr {
LayerParams attrs = {node->get_friendly_name(), "Norm", details::convertPrecision(node->get_output_element_type(0))};
Expand All @@ -858,6 +865,14 @@ InferenceEngine::details::CNNLayerCreator::CNNLayerCreator(const std::shared_ptr
return res;
});

addSpecificCreator({"Elu"}, [](const std::shared_ptr<::ngraph::Node>& node,
const std::map<std::string, std::string>& params) -> CNNLayerPtr {
LayerParams attrs = {node->get_friendly_name(), "elu", details::convertPrecision(node->get_output_element_type(0))};
auto res = std::make_shared<InferenceEngine::CNNLayer>(attrs);
res->params = params;
return res;
});

addSpecificCreator({"MatMul"}, [](const std::shared_ptr<::ngraph::Node>& node,
const std::map<std::string, std::string>& params) -> CNNLayerPtr {
LayerParams attrs = {node->get_friendly_name(), "Gemm", details::convertPrecision(node->get_output_element_type(0))};
Expand All @@ -866,6 +881,21 @@ InferenceEngine::details::CNNLayerCreator::CNNLayerCreator(const std::shared_ptr
return res;
});

addSpecificCreator({"GatherTreeIE"}, [](const std::shared_ptr<::ngraph::Node>& node,
const std::map<std::string, std::string>& params) -> CNNLayerPtr {
LayerParams attrs = {node->get_friendly_name(), "GatherTree", details::convertPrecision(node->get_output_element_type(0))};
auto res = std::make_shared<InferenceEngine::CNNLayer>(attrs);
return res;
});

addSpecificCreator({"GRN"}, [](const std::shared_ptr<::ngraph::Node>& node,
const std::map<std::string, std::string>& params) -> CNNLayerPtr {
LayerParams attrs = {node->get_friendly_name(), "GRN", details::convertPrecision(node->get_output_element_type(0))};
auto res = std::make_shared<InferenceEngine::GRNLayer>(attrs);
res->params = params;
return res;
});

addSpecificCreator({"OneHotIE"}, [](const std::shared_ptr<::ngraph::Node>& node,
const std::map<std::string, std::string>& params) -> CNNLayerPtr {
LayerParams attrs = {node->get_friendly_name(), "OneHot", details::convertPrecision(node->get_output_element_type(0))};
Expand All @@ -874,6 +904,44 @@ InferenceEngine::details::CNNLayerCreator::CNNLayerCreator(const std::shared_ptr
return res;
});

addSpecificCreator({"HardSigmoid_IE"}, [](const std::shared_ptr<::ngraph::Node>& node,
const std::map<std::string, std::string>& params) -> CNNLayerPtr {
LayerParams attrs = {node->get_friendly_name(), "HardSigmoid", details::convertPrecision(node->get_output_element_type(0))};
auto res = std::make_shared<InferenceEngine::CNNLayer>(attrs);

auto castedLayer = std::dynamic_pointer_cast<ngraph::op::HardSigmoid_IE>(node);
if (!castedLayer)
THROW_IE_EXCEPTION << "Cannot get " << attrs.type << " layer " << attrs.name;

res->params["alpha"] = Builder::asString(castedLayer->get_alpha());
res->params["beta"] = Builder::asString(castedLayer->get_beta());
return res;
});

addSpecificCreator({"Interp"}, [](const std::shared_ptr<::ngraph::Node>& node,
const std::map<std::string, std::string>& params) -> CNNLayerPtr {
LayerParams attrs = {node->get_friendly_name(), "Interp", details::convertPrecision(node->get_output_element_type(0))};
auto castedLayer = std::dynamic_pointer_cast<ngraph::op::Interp>(node);
if (!castedLayer) THROW_IE_EXCEPTION << "Cannot get " << attrs.type << " layer " << attrs.name;

auto interp_attrs = castedLayer->get_attrs();

if (interp_attrs.antialias) {
THROW_IE_EXCEPTION << "Interp do not support antialias";
}
if (interp_attrs.mode != "linear") {
THROW_IE_EXCEPTION << "Interp do not support mode '" << interp_attrs.mode << "'";
}

bool align_corners;
auto res = std::make_shared<InferenceEngine::CNNLayer>(attrs);
res->params = params;

std::istringstream(params.at("align_corners")) >> align_corners;
res->params["align_corners"] = align_corners ? "1" : "0";
return res;
});

addSpecificCreator({"PadIE"}, [](const std::shared_ptr<::ngraph::Node>& node,
const std::map<std::string, std::string>& params) -> CNNLayerPtr {
LayerParams attrs = {node->get_friendly_name(), "Pad", details::convertPrecision(node->get_output_element_type(0))};
Expand Down Expand Up @@ -929,7 +997,6 @@ InferenceEngine::details::CNNLayerCreator::CNNLayerCreator(const std::shared_ptr
InferenceEngine::details::addBlob(biasNode, res, InferenceEngine::details::biases);
}
}

return res;
});
}
Expand Down Expand Up @@ -960,24 +1027,18 @@ void convertFunctionToICNNNetwork(const std::shared_ptr<const ::ngraph::Function
};
const static std::vector<std::shared_ptr<Builder::INodeConverter>> convertors = {
std::make_shared<Builder::NodeConverter<::ngraph::op::v1::AvgPool>>(),
std::make_shared<Builder::NodeConverter<::ngraph::op::Clamp>>(),
std::make_shared<Builder::NodeConverter<::ngraph::op::CropIE>>(),
std::make_shared<Builder::NodeConverter<::ngraph::op::Convert>>(),
std::make_shared<Builder::NodeConverter<::ngraph::op::CTCGreedyDecoder>>(),
std::make_shared<Builder::NodeConverter<::ngraph::op::v1::DeformableConvolution>>(),
std::make_shared<Builder::NodeConverter<::ngraph::op::v1::DeformablePSROIPooling>>(),
std::make_shared<Builder::NodeConverter<::ngraph::op::v1::Reshape>>(),
std::make_shared<Builder::NodeConverter<::ngraph::op::Eltwise>>(),
std::make_shared<Builder::NodeConverter<::ngraph::op::Elu>>(),
std::make_shared<Builder::NodeConverter<::ngraph::op::Ceiling>>(),
std::make_shared<Builder::NodeConverter<::ngraph::op::GatherIE>>(),
std::make_shared<Builder::NodeConverter<::ngraph::op::GatherTreeIE>>(),
std::make_shared<Builder::NodeConverter<::ngraph::op::Interp>>(),
std::make_shared<Builder::NodeConverter<::ngraph::op::v0::Interpolate>>(),
std::make_shared<Builder::NodeConverter<::ngraph::op::MVN>>(),
std::make_shared<Builder::NodeConverter<::ngraph::op::FullyConnected>>(),
std::make_shared<Builder::NodeConverter<::ngraph::op::GenericIE>>(),
std::make_shared<Builder::NodeConverter<::ngraph::op::GRN>>(),
std::make_shared<Builder::NodeConverter<::ngraph::op::v1::MaxPool>>(),
std::make_shared<Builder::NodeConverter<::ngraph::op::v1::Minimum>>(),
std::make_shared<Builder::NodeConverter<::ngraph::op::NormalizeIE>>(),
Expand Down Expand Up @@ -1006,7 +1067,6 @@ void convertFunctionToICNNNetwork(const std::shared_ptr<const ::ngraph::Function
std::make_shared<Builder::NodeConverter<::ngraph::op::TileIE>>(),
std::make_shared<Builder::NodeConverter<::ngraph::op::TensorIterator>>(),
std::make_shared<Builder::NodeConverter<::ngraph::opset5::Loop>>(),
std::make_shared<Builder::NodeConverter<::ngraph::op::HardSigmoid_IE>>(),
std::make_shared<Builder::NodeConverter<::ngraph::op::ShuffleChannels>>(),
std::make_shared<Builder::NodeConverter<::ngraph::op::v4::Interpolate>>(),
std::make_shared<Builder::NodeConverter<::ExecGraphInfoSerialization::ExecutionNode>>(),
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,6 @@
#include "legacy/ngraph_ops/eltwise.hpp"
#include "legacy/ngraph_ops/fully_connected.hpp"
#include "legacy/ngraph_ops/gather_ie.hpp"
#include "legacy/ngraph_ops/gather_tree_ie.hpp"
#include "legacy/ngraph_ops/gru_cell_ie.hpp"
#include "legacy/ngraph_ops/interp.hpp"
#include "legacy/ngraph_ops/lstm_cell_ie.hpp"
Expand All @@ -28,7 +27,6 @@
#include "legacy/ngraph_ops/scaleshift.hpp"
#include "legacy/ngraph_ops/tile_ie.hpp"
#include "legacy/ngraph_ops/rnn_cell_ie.hpp"
#include "legacy/ngraph_ops/hard_sigmoid_ie.hpp"

#include "generic_ie.hpp"
#include "exec_graph_info.hpp"
Expand Down Expand Up @@ -523,19 +521,6 @@ CNNLayer::Ptr NodeConverter<ngraph::op::CropIE>::createLayer(const std::shared_p
return res;
}

template <>
CNNLayer::Ptr NodeConverter<ngraph::op::Clamp>::createLayer(const std::shared_ptr<ngraph::Node>& layer) const {
LayerParams params = {layer->get_friendly_name(), "Clamp",
details::convertPrecision(layer->get_output_element_type(0))};
auto res = std::make_shared<InferenceEngine::ClampLayer>(params);
auto castedLayer = ngraph::as_type_ptr<ngraph::op::Clamp>(layer);
if (castedLayer == nullptr) THROW_IE_EXCEPTION << "Cannot get " << params.type << " layer " << params.name;

res->params["min"] = asString(castedLayer->get_min());
res->params["max"] = asString(castedLayer->get_max());
return res;
}

template <>
CNNLayer::Ptr NodeConverter<ngraph::op::v1::Softmax>::createLayer(const std::shared_ptr<ngraph::Node>& layer) const {
LayerParams params = {layer->get_friendly_name(), "SoftMax",
Expand Down Expand Up @@ -984,14 +969,6 @@ CNNLayer::Ptr NodeConverter<ngraph::op::GatherIE>::createLayer(const std::shared
return res;
}

template <>
CNNLayer::Ptr NodeConverter<ngraph::op::GatherTreeIE>::createLayer(const std::shared_ptr<ngraph::Node>& layer) const {
LayerParams params = {layer->get_friendly_name(), "GatherTree",
details::convertPrecision(layer->get_output_element_type(0))};
auto res = std::make_shared<InferenceEngine::CNNLayer>(params);
return res;
}

template <>
CNNLayer::Ptr NodeConverter<ngraph::op::ReverseSequence>::createLayer(const std::shared_ptr<ngraph::Node>& layer) const {
LayerParams params = {layer->get_friendly_name(), "ReverseSequence", details::convertPrecision(layer->get_output_element_type(0))};
Expand Down Expand Up @@ -1054,19 +1031,6 @@ CNNLayer::Ptr NodeConverter<ngraph::op::ScaleShiftIE>::createLayer(const std::sh
return res;
}

template <>
CNNLayer::Ptr NodeConverter<ngraph::op::Elu>::createLayer(const std::shared_ptr<ngraph::Node>& layer) const {
LayerParams params = {layer->get_friendly_name(), "elu",
details::convertPrecision(layer->get_output_element_type(0))};
auto res = std::make_shared<InferenceEngine::CNNLayer>(params);
auto castedLayer = ngraph::as_type_ptr<ngraph::op::Elu>(layer);
if (castedLayer == nullptr) THROW_IE_EXCEPTION << "Cannot get " << params.type << " layer " << params.name;

res->params["alpha"] = asString(castedLayer->get_alpha());

return res;
}

template <>
CNNLayer::Ptr NodeConverter<ngraph::op::SquaredDifference>::createLayer(const std::shared_ptr<ngraph::Node>& layer) const {
LayerParams params = {layer->get_friendly_name(), "Eltwise",
Expand Down Expand Up @@ -1331,40 +1295,6 @@ CNNLayer::Ptr NodeConverter<ngraph::op::ResampleV2>::createLayer(const std::shar
return res;
}

template <>
CNNLayer::Ptr NodeConverter<ngraph::op::Interp>::createLayer(const std::shared_ptr<ngraph::Node>& layer) const {
LayerParams params = {layer->get_friendly_name(), "Resample",
details::convertPrecision(layer->get_output_element_type(0))};
auto castedLayer = ngraph::as_type_ptr<ngraph::op::Interp>(layer);
if (castedLayer == nullptr) THROW_IE_EXCEPTION << "Cannot get " << params.type << " layer " << params.name;

auto attrs = castedLayer->get_attrs();

if (attrs.antialias) {
THROW_IE_EXCEPTION << "Interp do not support antialias";
}
if (attrs.mode != "linear") {
THROW_IE_EXCEPTION << "Interp do not support mode '" << attrs.mode << "'";
}

params = {layer->get_friendly_name(), "Interp",
details::convertPrecision(layer->get_output_element_type(0))};
auto res = std::make_shared<InferenceEngine::CNNLayer>(params);

res->params["height"] = asString(attrs.height);
res->params["width"] = asString(attrs.width);
res->params["pad_beg"] = asString(attrs.pad_beg);
res->params["pad_end"] = asString(attrs.pad_end);
res->params["align_corners"] = attrs.align_corners ? "1" : "0";

return res;
}

template <>
CNNLayer::Ptr NodeConverter<ngraph::op::v0::Interpolate>::createLayer(const std::shared_ptr<ngraph::Node>& layer) const {
THROW_IE_EXCEPTION << "Interpolate operation should be converted to Interp";
}

template <>
CNNLayer::Ptr NodeConverter<ngraph::op::v4::Interpolate>::createLayer(const std::shared_ptr<ngraph::Node>& layer) const {
LayerParams params = {layer->get_friendly_name(), "Interpolate",
Expand Down Expand Up @@ -1726,30 +1656,5 @@ CNNLayer::Ptr NodeConverter<ngraph::op::Sqrt>::createLayer(const std::shared_ptr
return res;
}

template <>
CNNLayer::Ptr NodeConverter<ngraph::op::HardSigmoid_IE>::createLayer(const std::shared_ptr<ngraph::Node>& layer) const {
LayerParams params = { layer->get_friendly_name(), "HardSigmoid", details::convertPrecision(layer->get_output_element_type(0)) };
auto res = std::make_shared<InferenceEngine::CNNLayer>(params);
auto castedLayer = std::dynamic_pointer_cast<ngraph::op::HardSigmoid_IE>(layer);
if (castedLayer == nullptr)
THROW_IE_EXCEPTION << "Cannot get " << params.type << " layer " << params.name;

res->params["alpha"] = asString(castedLayer->get_alpha());
res->params["beta"] = asString(castedLayer->get_beta());
return res;
}

template <>
CNNLayer::Ptr NodeConverter<ngraph::op::GRN>::createLayer(const std::shared_ptr<ngraph::Node>& layer) const {
LayerParams params = {layer->get_friendly_name(), "GRN",
details::convertPrecision(layer->get_output_element_type(0))};
auto castedLayer = std::dynamic_pointer_cast<ngraph::op::GRN>(layer);
if (castedLayer == nullptr) THROW_IE_EXCEPTION << "Cannot get " << params.type << " layer " << params.name;

auto res = std::make_shared<InferenceEngine::GRNLayer>(params);
res->params["bias"] = asString(castedLayer->get_bias());
return res;
}

} // namespace Builder
} // namespace InferenceEngine
Original file line number Diff line number Diff line change
Expand Up @@ -63,3 +63,7 @@ void op::GatherTreeIE::validate_and_infer_types() {
const auto& step_ids_et = get_input_element_type(0);
set_output_type(0, step_ids_et, step_ids_rank);
}

bool ngraph::op::GatherTreeIE::visit_attributes(AttributeVisitor& visitor) {
return true;
}
Original file line number Diff line number Diff line change
Expand Up @@ -35,3 +35,7 @@ shared_ptr<Node> op::HardSigmoid_IE::clone_with_new_inputs(const OutputVector& n
check_new_args_count(this, new_args);
return make_shared<op::HardSigmoid_IE>(new_args.at(0), m_alpha, m_beta);
}

bool op::HardSigmoid_IE::visit_attributes(AttributeVisitor& visitor) {
return true;
}
10 changes: 10 additions & 0 deletions inference-engine/src/legacy_api/src/ngraph_ops/interp.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -67,6 +67,16 @@ shared_ptr<Node> op::Interp::clone_with_new_inputs(const OutputVector& new_args)
return make_shared<Interp>(new_args.at(0), m_attrs);
}

bool op::Interp::visit_attributes(AttributeVisitor& visitor)
{
visitor.on_attribute("align_corners", m_attrs.align_corners);
visitor.on_attribute("width", m_attrs.width);
visitor.on_attribute("height", m_attrs.height);
visitor.on_attribute("pad_beg", m_attrs.pad_beg);
visitor.on_attribute("pad_end", m_attrs.pad_end);
return true;
}

constexpr NodeTypeInfo op::ResampleV2::type_info;

op::ResampleV2::ResampleV2(const Output<Node>& image, const Output<Node>& output_shape,
Expand Down
Loading

0 comments on commit 7a3227f

Please sign in to comment.