Skip to content

Commit

Permalink
SmartReshape: support Param->Convert->Reshape->Proposal pattern (#10204)
Browse files Browse the repository at this point in the history
Current SmartReshape finds matched to Param->Reshape->Proposal patterns

    For FP16 models, there is additional 'Convert' is inserted after 'Parameter'.

    It causes transformation is not applied and 'ov::set_batch' or CNNNetwork::set_batch will throw

    Proposal1Scales and Proposal4Scales transformations were updated to handle these conditions
  • Loading branch information
nosovmik authored Feb 9, 2022
1 parent a60c110 commit f56c640
Show file tree
Hide file tree
Showing 2 changed files with 91 additions and 5 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@
#include <ngraph/opsets/opset4.hpp>
#include <ngraph/opsets/opset5.hpp>
#include <ngraph/pattern/matcher.hpp>
#include <ngraph/pattern/op/or.hpp>
#include <ngraph/pattern/op/wrap_type.hpp>
#include <ngraph/rt_info.hpp>
#include <transformations/smart_reshape/proposal_scales_stridedslice.hpp>
Expand All @@ -15,8 +16,8 @@
namespace {

bool crop_scales_for_proposal(const ngraph::pattern::PatternValueMap& pattern_to_output,
std::shared_ptr<ngraph::Node> parameter_label,
std::shared_ptr<ngraph::Node> proposal_label) {
const std::shared_ptr<ngraph::Node>& parameter_label,
const std::shared_ptr<ngraph::Node>& proposal_label) {
const auto& parameter = pattern_to_output.at(parameter_label);
const auto& proposal = pattern_to_output.at(proposal_label).get_node_shared_ptr();

Expand Down Expand Up @@ -46,8 +47,11 @@ ngraph::pass::Proposal1Scales::Proposal1Scales() {
return shape.rank().is_static() && shape.rank().get_length() == 2 && shape[1].is_static() &&
(shape[1].get_length() == 3 || shape[1].get_length() == 4);
});
auto convert_label = ngraph::pattern::wrap_type<opset5::Convert>({parameter_label});
auto param_or_convert = std::make_shared<ngraph::pattern::op::Or>(ngraph::OutputVector{parameter_label,
convert_label});
auto reshape_label = ngraph::pattern::wrap_type<opset5::Reshape>(
{parameter_label, ngraph::pattern::wrap_type<opset5::Constant>()},
{param_or_convert, ngraph::pattern::wrap_type<opset5::Constant>()},
[](const Output<Node>& output) {
return output.get_partial_shape().rank().is_static() && output.get_partial_shape().rank().get_length() == 1;
});
Expand All @@ -71,8 +75,11 @@ ngraph::pass::Proposal4Scales::Proposal4Scales() {
return shape.rank().is_static() && shape.rank().get_length() == 2 && shape[1].is_static() &&
(shape[1].get_length() == 3 || shape[1].get_length() == 4);
});
auto convert_label = ngraph::pattern::wrap_type<opset5::Convert>({parameter_label});
auto param_or_convert = std::make_shared<ngraph::pattern::op::Or>(ngraph::OutputVector{parameter_label,
convert_label});
auto reshape_label = ngraph::pattern::wrap_type<opset5::Reshape>(
{parameter_label, ngraph::pattern::wrap_type<opset5::Constant>()},
{param_or_convert, ngraph::pattern::wrap_type<opset5::Constant>()},
[](const Output<Node>& output) {
return output.get_partial_shape().rank().is_static() && output.get_partial_shape().rank().get_length() == 1;
});
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -45,13 +45,52 @@ TEST(SmartReshapeTests, Proposal1Scales) {
ASSERT_TRUE(network.getFunction()->get_results()[0]->get_output_partial_shape(0).compatible({600, 5}));
}

TEST(SmartReshapeTests, Proposal1Scales_WithConvert) {
std::shared_ptr<ngraph::Function> f(nullptr);
{
auto input_0 = std::make_shared<ngraph::opset5::Parameter>(ngraph::element::f16, ngraph::Shape{1, 24, 75, 128});
auto input_1 = std::make_shared<ngraph::opset5::Parameter>(ngraph::element::f16, ngraph::Shape{1, 48, 75, 128});
auto input_2 = std::make_shared<ngraph::opset5::Parameter>(ngraph::element::f32, ngraph::Shape{1, 3});
auto input_2_convert = std::make_shared<ngraph::opset5::Convert>(input_2, ngraph::element::f16);
auto reshape = std::make_shared<ngraph::opset5::Reshape>(input_2_convert,
ngraph::opset5::Constant::create(
ngraph::element::i64, {1}, {3}), true);
ngraph::op::ProposalAttrs attrs;
attrs.base_size = 256;
attrs.box_coordinate_scale = 10.0;
attrs.box_size_scale = 5.0;
attrs.clip_after_nms = false;
attrs.clip_before_nms = true;
attrs.feat_stride = 8;
attrs.framework = "tensorflow";
attrs.min_size = 1;
attrs.nms_thresh = 0.699999988079;
attrs.normalize = true;
attrs.post_nms_topn = 300;
attrs.pre_nms_topn = 2147483647;
attrs.ratio = {0.5, 1.0, 2.0};
attrs.scale = {0.25, 0.5, 1.0, 2.0};
auto proposal = std::make_shared<ngraph::opset1::Proposal>(input_0, input_1, reshape, attrs);
f = std::make_shared<ngraph::Function>(ngraph::NodeVector{proposal}, ngraph::ParameterVector{input_0, input_1, input_2});
}

InferenceEngine::CNNNetwork network(f);
auto unh = std::make_shared<ngraph::pass::UniqueNamesHolder>();
init_unique_names(f, unh);
ASSERT_NO_THROW(network.setBatchSize(2));
check_unique_names(f, unh);
ASSERT_TRUE(network.getFunction()->get_results()[0]->get_output_partial_shape(0).compatible({600, 5}));
}

TEST(SmartReshapeTests, Proposal4Scales) {
std::shared_ptr<ngraph::Function> f(nullptr);
{
auto input_0 = std::make_shared<ngraph::opset5::Parameter>(ngraph::element::f32, ngraph::Shape{1, 24, 75, 128});
auto input_1 = std::make_shared<ngraph::opset5::Parameter>(ngraph::element::f32, ngraph::Shape{1, 48, 75, 128});
auto input_2 = std::make_shared<ngraph::opset5::Parameter>(ngraph::element::f32, ngraph::Shape{1, 4});
auto reshape = std::make_shared<ngraph::opset5::Reshape>(input_2, ngraph::opset5::Constant::create(ngraph::element::i64, {1}, {-1}), true);
auto reshape = std::make_shared<ngraph::opset5::Reshape>(input_2,
ngraph::opset5::Constant::create(
ngraph::element::i64, {1}, {-1}), true);
ngraph::op::ProposalAttrs attrs;
attrs.base_size = 256;
attrs.box_coordinate_scale = 10.0;
Expand All @@ -78,5 +117,45 @@ TEST(SmartReshapeTests, Proposal4Scales) {
ASSERT_NO_THROW(network.setBatchSize(2));
check_unique_names(f, unh);

ASSERT_TRUE(network.getFunction()->get_results()[0]->get_output_partial_shape(0).compatible({600, 5}));
}

TEST(SmartReshapeTests, Proposal4Scales_WithConvert) {
std::shared_ptr<ngraph::Function> f(nullptr);
{
auto input_0 = std::make_shared<ngraph::opset5::Parameter>(ngraph::element::f16, ngraph::Shape{1, 24, 75, 128});
auto input_1 = std::make_shared<ngraph::opset5::Parameter>(ngraph::element::f16, ngraph::Shape{1, 48, 75, 128});
auto input_2 = std::make_shared<ngraph::opset5::Parameter>(ngraph::element::f32, ngraph::Shape{1, 4});
auto input_2_convert = std::make_shared<ngraph::opset5::Convert>(input_2, ngraph::element::f16);
auto reshape = std::make_shared<ngraph::opset5::Reshape>(input_2_convert,
ngraph::opset5::Constant::create(
ngraph::element::i64, {1}, {-1}), true);
ngraph::op::ProposalAttrs attrs;
attrs.base_size = 256;
attrs.box_coordinate_scale = 10.0;
attrs.box_size_scale = 5.0;
attrs.clip_after_nms = false;
attrs.clip_before_nms = true;
attrs.feat_stride = 8;
attrs.framework = "tensorflow";
attrs.min_size = 1;
attrs.nms_thresh = 0.699999988079;
attrs.normalize = true;
attrs.post_nms_topn = 300;
attrs.pre_nms_topn = 2147483647;
attrs.ratio = {0.5, 1.0, 2.0};
attrs.scale = {0.25, 0.5, 1.0, 2.0};
auto proposal = std::make_shared<ngraph::opset5::Proposal>(input_0, input_1, reshape, attrs);
f = std::make_shared<ngraph::Function>(ngraph::NodeVector{proposal},
ngraph::ParameterVector{input_0, input_1, input_2});
}

InferenceEngine::CNNNetwork network(f);

auto unh = std::make_shared<ngraph::pass::UniqueNamesHolder>();
init_unique_names(f, unh);
ASSERT_NO_THROW(network.setBatchSize(2));
check_unique_names(f, unh);

ASSERT_TRUE(network.getFunction()->get_results()[0]->get_output_partial_shape(0).compatible({600, 5}));
}

0 comments on commit f56c640

Please sign in to comment.