Skip to content

Commit

Permalink
Applied Vladislav comments
Browse files Browse the repository at this point in the history
  • Loading branch information
a-sidorova committed Dec 15, 2022
1 parent 2ba9972 commit 339a1b8
Show file tree
Hide file tree
Showing 5 changed files with 44 additions and 52 deletions.
8 changes: 5 additions & 3 deletions src/common/snippets/src/pass/insert_loops.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -71,7 +71,7 @@ void insert_explicitly_loops(const ov::NodeVector& ops, const ov::PartialShape&
auto add_body_results = [](const std::shared_ptr<ov::Node>& op, std::vector<ov::Input<ov::Node>>& body_results) {
for (auto output : op->outputs()) {
for (auto target_input : output.get_target_inputs()) {
auto child = target_input.get_node()->shared_from_this();
auto child = target_input.get_node();
if (ov::is_type<op::LoopBegin>(child) ||
ov::is_type<op::Buffer>(child) ||
ov::is_type<ov::op::v0::Result>(child) ||
Expand Down Expand Up @@ -111,8 +111,10 @@ void insert_explicitly_loops(const ov::NodeVector& ops, const ov::PartialShape&
[](const ov::Input<ov::Node>& in) { return in.get_partial_shape(); });

auto body_master_shape = body_shapes.front();
for (const auto& shape : body_shapes)
PartialShape::broadcast_merge_into(body_master_shape, shape, ::ngraph::op::AutoBroadcastType::NUMPY);
for (const auto& shape : body_shapes) {
NGRAPH_CHECK(PartialShape::broadcast_merge_into(body_master_shape, shape, ::ngraph::op::AutoBroadcastType::NUMPY),
"Loop input and output must be numpy broadcastable");
}
const auto inner_work_amount = utils::get_inner_dim(body_master_shape).get_length();
const auto outer_work_amount = utils::get_outer_dim(body_master_shape).get_length();

Expand Down
10 changes: 6 additions & 4 deletions src/common/snippets/src/pass/reset_buffer.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -39,8 +39,7 @@ ngraph::snippets::pass::ResetBufferState::ResetBufferState() {
// MatMul doesn't change Buffer memory pointer after execution
auto m_loop_end = ngraph::pattern::wrap_type<op::LoopEnd>();

register_matcher(std::make_shared<ngraph::pattern::Matcher>(m_loop_end, matcher_name),
[=](ngraph::pattern::Matcher &m) {
auto callback = [=](ngraph::pattern::Matcher &m) {
OV_ITT_SCOPED_TASK(ngraph::pass::itt::domains::SnippetsTransform, "Snippets::op::ResetBufferState")
auto& pattern_to_output = m.get_pattern_value_map();

Expand Down Expand Up @@ -82,7 +81,7 @@ ngraph::snippets::pass::ResetBufferState::ResetBufferState() {
for (size_t i = 0; i < o_size; ++i) {
const auto result_shape = body_shapes[i_size + i].get_shape();
// check for first target input is enough for Buffer searching because operations can have only single Buffer per each output port as op
const auto consumer = loop_end->output(i).get_target_inputs().begin()->get_node()->shared_from_this();
const auto consumer = loop_end->output(i).get_target_inputs().begin()->get_node();
if (ov::is_type<ngraph::snippets::op::Buffer>(consumer)) {
// To calculate finalization offset we should know index of nesting Loop
auto loop_index = 0lu;
Expand All @@ -108,5 +107,8 @@ ngraph::snippets::pass::ResetBufferState::ResetBufferState() {
loop_end->set_ptr_increments(ptr_increments);

return true;
});
};

auto m = std::make_shared<ngraph::pattern::Matcher>(m_loop_end, matcher_name);
register_matcher(m, callback);
}
14 changes: 9 additions & 5 deletions src/common/snippets/src/pass/set_buffer_offset.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -13,9 +13,10 @@

ngraph::snippets::pass::SetBufferOffset::SetBufferOffset() {
MATCHER_SCOPE(SetBufferOffset);
register_matcher(std::make_shared<ngraph::pattern::Matcher>(
ngraph::pattern::wrap_type<op::Buffer>(), matcher_name),
[&](ngraph::pattern::Matcher &m) {

auto m_buffer = ngraph::pattern::wrap_type<op::Buffer>();

auto callback = [&](ngraph::pattern::Matcher &m) {
OV_ITT_SCOPED_TASK(ngraph::pass::itt::domains::SnippetsTransform, "Snippets::op::SetBufferOffset")
auto root = m.get_match_root();
const auto buffer = ov::as_type_ptr<op::Buffer>(root);
Expand All @@ -29,7 +30,7 @@ ngraph::snippets::pass::SetBufferOffset::SetBufferOffset() {
{
auto parent = buffer->get_input_node_shared_ptr(0);
auto idx = buffer->input(0).get_source_output().get_index();
while (std::dynamic_pointer_cast<snippets::op::LoopBase>(parent) != nullptr) {
while (std::dynamic_pointer_cast<snippets::op::LoopBase>(parent)) {
const auto source_output = parent->input_value(idx);
parent = source_output.get_node_shared_ptr();
idx = source_output.get_index();
Expand Down Expand Up @@ -75,5 +76,8 @@ ngraph::snippets::pass::SetBufferOffset::SetBufferOffset() {

current_offset += buffer->get_byte_size();
return true;
});
};

auto m = std::make_shared<ngraph::pattern::Matcher>(m_buffer, matcher_name);
register_matcher(m, callback);
}
14 changes: 9 additions & 5 deletions src/common/snippets/src/pass/softmax_decomposition.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -20,9 +20,10 @@

ngraph::snippets::pass::SoftmaxDecomposition::SoftmaxDecomposition(const size_t vector_size, const int32_t buffer_allocation_rank) {
MATCHER_SCOPE(SoftmaxDecomposition);
register_matcher(std::make_shared<ngraph::pattern::Matcher>(
ngraph::pattern::wrap_type<ngraph::op::v1::Softmax, ngraph::op::v8::Softmax>(), matcher_name),
[this, vector_size, buffer_allocation_rank](ngraph::pattern::Matcher &m) {

auto m_softmax = ngraph::pattern::wrap_type<ngraph::op::v1::Softmax, ngraph::op::v8::Softmax>();

auto callback = [=](ngraph::pattern::Matcher &m) {
OV_ITT_SCOPED_TASK(ngraph::pass::itt::domains::SnippetsTransform, "Snippets::op::SoftmaxDecomposition")
auto root = m.get_match_root();
const auto master_pshape = root->get_input_partial_shape(0);
Expand All @@ -39,7 +40,7 @@ ngraph::snippets::pass::SoftmaxDecomposition::SoftmaxDecomposition(const size_t
return false;
}

const auto shape_rank = static_cast<int64_t>(rank.get_length());
const auto shape_rank = rank.get_length();
if (axis != shape_rank - 1)
return false;

Expand Down Expand Up @@ -186,5 +187,8 @@ ngraph::snippets::pass::SoftmaxDecomposition::SoftmaxDecomposition(const size_t
/* =========================================== */

return true;
});
};

auto m = std::make_shared<ngraph::pattern::Matcher>(m_softmax, matcher_name);
register_matcher(m, callback);
}
50 changes: 15 additions & 35 deletions src/common/snippets/tests/src/pass/softmax_reshape_elimination.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -17,76 +17,59 @@
using namespace testing;
using namespace ngraph;

TEST(TransformationTests, SoftmaxV1ReshapeElimination) {
std::shared_ptr<Function> f(nullptr), f_ref(nullptr);
TEST_F(TransformationTestsF, SoftmaxV1ReshapeElimination) {
{
auto data = std::make_shared<opset1::Parameter>(element::f32, Shape{2, 3, 240});
auto shape0 = std::make_shared<ov::op::v0::Constant>(ov::element::i32, ov::Shape{2}, std::vector<int32_t>{6, 240});
auto reshape0 = std::make_shared<ov::op::v1::Reshape>(data, shape0, false);
auto softmax_v1 = std::make_shared<ov::op::v1::Softmax>(reshape0, 1);
auto shape1 = std::make_shared<ov::op::v0::Constant>(ov::element::i32, ov::Shape{3}, std::vector<int32_t>{2, 3, 240});
auto reshape1 = std::make_shared<ov::op::v1::Reshape>(softmax_v1, shape1, false);
f = std::make_shared<Function>(NodeVector{reshape1}, ParameterVector{data});
function = std::make_shared<Function>(NodeVector{reshape1}, ParameterVector{data});

pass::Manager m;
m.register_pass<pass::InitNodeInfo>();
m.register_pass<snippets::pass::SoftmaxReshapeElimination>();
m.run_passes(f);
ASSERT_NO_THROW(check_rt_info(f));
manager.register_pass<pass::InitNodeInfo>();
manager.register_pass<snippets::pass::SoftmaxReshapeElimination>();
}
{
auto data = std::make_shared<opset1::Parameter>(element::f32, Shape{2, 3, 240});
auto softmax_v1 = std::make_shared<ov::op::v1::Softmax>(data, 2);
f_ref = std::make_shared<Function>(NodeVector{softmax_v1}, ParameterVector{data});
function_ref = std::make_shared<Function>(NodeVector{softmax_v1}, ParameterVector{data});
}

auto res = compare_functions(f, f_ref);
ASSERT_TRUE(res.first) << res.second;
}

TEST(TransformationTests, SoftmaxV8ReshapeElimination) {
std::shared_ptr<Function> f(nullptr), f_ref(nullptr);
TEST_F(TransformationTestsF, SoftmaxV8ReshapeElimination) {
{
auto data = std::make_shared<opset1::Parameter>(element::f32, Shape{1, 2, 340, 240});
auto shape0 = std::make_shared<ov::op::v0::Constant>(ov::element::i32, ov::Shape{2}, std::vector<int32_t>{680, 240});
auto reshape0 = std::make_shared<ov::op::v1::Reshape>(data, shape0, false);
auto softmax_v1 = std::make_shared<ov::op::v8::Softmax>(reshape0, -1);
auto shape1 = std::make_shared<ov::op::v0::Constant>(ov::element::i32, ov::Shape{4}, std::vector<int32_t>{1, 2, 340, 240});
auto reshape1 = std::make_shared<ov::op::v1::Reshape>(softmax_v1, shape1, false);
f = std::make_shared<Function>(NodeVector{reshape1}, ParameterVector{data});
function = std::make_shared<Function>(NodeVector{reshape1}, ParameterVector{data});

pass::Manager m;
m.register_pass<pass::InitNodeInfo>();
m.register_pass<snippets::pass::SoftmaxReshapeElimination>();
m.run_passes(f);
ASSERT_NO_THROW(check_rt_info(f));
manager.register_pass<pass::InitNodeInfo>();
manager.register_pass<snippets::pass::SoftmaxReshapeElimination>();
}
{
auto data = std::make_shared<opset1::Parameter>(element::f32, Shape{1, 2, 340, 240});
auto softmax_v1 = std::make_shared<ov::op::v8::Softmax>(data, 3);
f_ref = std::make_shared<Function>(NodeVector{softmax_v1}, ParameterVector{data});
function_ref =std::make_shared<Function>(NodeVector{softmax_v1}, ParameterVector{data});
}

auto res = compare_functions(f, f_ref);
ASSERT_TRUE(res.first) << res.second;
}

TEST(TransformationTests, SoftmaxReshapeElimination_IncorrectReshape) {
std::shared_ptr<Function> f(nullptr), f_ref(nullptr);
TEST_F(TransformationTestsF, SoftmaxReshapeElimination_IncorrectReshape) {
{
auto data = std::make_shared<opset1::Parameter>(element::f32, Shape{1, 2, 340, 240});
auto shape0 = std::make_shared<ov::op::v0::Constant>(ov::element::i32, ov::Shape{2}, std::vector<int32_t>{2, 81600});
auto reshape0 = std::make_shared<ov::op::v1::Reshape>(data, shape0, false);
auto softmax_v1 = std::make_shared<ov::op::v8::Softmax>(reshape0, -1);
auto shape1 = std::make_shared<ov::op::v0::Constant>(ov::element::i32, ov::Shape{4}, std::vector<int32_t>{1, 2, 340, 240});
auto reshape1 = std::make_shared<ov::op::v1::Reshape>(softmax_v1, shape1, false);
f = std::make_shared<Function>(NodeVector{reshape1}, ParameterVector{data});
function = std::make_shared<Function>(NodeVector{reshape1}, ParameterVector{data});

pass::Manager m;
m.register_pass<pass::InitNodeInfo>();
m.register_pass<snippets::pass::SoftmaxReshapeElimination>();
m.run_passes(f);
ASSERT_NO_THROW(check_rt_info(f));
manager.register_pass<pass::InitNodeInfo>();
manager.register_pass<snippets::pass::SoftmaxReshapeElimination>();
}
{
auto data = std::make_shared<opset1::Parameter>(element::f32, Shape{1, 2, 340, 240});
Expand All @@ -95,9 +78,6 @@ TEST(TransformationTests, SoftmaxReshapeElimination_IncorrectReshape) {
auto softmax_v1 = std::make_shared<ov::op::v8::Softmax>(reshape0, -1);
auto shape1 = std::make_shared<ov::op::v0::Constant>(ov::element::i32, ov::Shape{4}, std::vector<int32_t>{1, 2, 340, 240});
auto reshape1 = std::make_shared<ov::op::v1::Reshape>(softmax_v1, shape1, false);
f_ref = std::make_shared<Function>(NodeVector{reshape1}, ParameterVector{data});
function_ref =std::make_shared<Function>(NodeVector{reshape1}, ParameterVector{data});
}

auto res = compare_functions(f, f_ref);
ASSERT_TRUE(res.first) << res.second;
}

0 comments on commit 339a1b8

Please sign in to comment.