forked from openvinotoolkit/openvino
-
Notifications
You must be signed in to change notification settings - Fork 0
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
[FrontEnd]enable pdpd ops conversion part3 (openvinotoolkit#6636)
* [FrontEnd]enable pdpd ops conversion part3 * Add adaptive pool2d op conversion (#1) * param support tensor (#2) * add missing sync_batch_norm * Update pow.cpp * deal empty axis (#5) * deal empty axis * apply review comments * fix code style * fix code style * change shape to i32 * fix code in shape * fix code style * fix paddle code style * remove redandent ops * fix maxAdativePool * fix expand_v2 * remove redandent code Co-authored-by: Mang Guo <[email protected]> Co-authored-by: Luo Cheng <[email protected]>
- Loading branch information
1 parent
6f23458
commit 0aa6b07
Showing
16 changed files
with
1,358 additions
and
50 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,67 @@ | ||
// Copyright (C) 2018-2021 Intel Corporation | ||
// SPDX-License-Identifier: Apache-2.0 | ||
// | ||
|
||
#include <ngraph/opsets/opset6.hpp> | ||
#include <node_context.hpp> | ||
|
||
namespace ngraph | ||
{ | ||
namespace frontend | ||
{ | ||
namespace pdpd | ||
{ | ||
namespace op | ||
{ | ||
NamedOutputs mul(const NodeContext& node) | ||
{ | ||
auto x = node.get_ng_input("X"); | ||
auto y = node.get_ng_input("Y"); | ||
PDPD_OP_VALIDATION_CHECK(node, | ||
x.get_partial_shape().rank().is_static(), | ||
"matmul: X rank must be static!"); | ||
int64_t x_rank = x.get_partial_shape().rank().get_length(); | ||
PDPD_OP_VALIDATION_CHECK(node, | ||
y.get_partial_shape().rank().is_static() && | ||
y.get_partial_shape().rank().get_length() == 2, | ||
"matmul: Y rank must be static, and 2!"); | ||
if (x_rank > 2) | ||
{ | ||
auto shape = std::make_shared<ngraph::opset6::ShapeOf>(x); | ||
int64_t x_num_col_dims = node.get_attribute<int32_t>("x_num_col_dims"); | ||
auto axis = ngraph::opset6::Constant::create(ngraph::element::i64, {}, {0}); | ||
auto split_lengths = ngraph::opset6::Constant::create( | ||
ngraph::element::i64, {2}, {x_num_col_dims, x_rank - x_num_col_dims}); | ||
auto split = std::make_shared<ngraph::opset6::VariadicSplit>( | ||
shape, axis, split_lengths); | ||
auto f_dim_red_axis = | ||
ngraph::opset6::Constant::create(ngraph::element::i64, {}, {0}); | ||
auto first_dim_reduce = std::make_shared<ngraph::opset6::ReduceProd>( | ||
split->output(0), f_dim_red_axis); | ||
auto f_dim_shape = | ||
ngraph::opset6::Constant::create(ngraph::element::i64, {1}, {1}); | ||
auto first_dim = std::make_shared<ngraph::opset6::Reshape>( | ||
first_dim_reduce, f_dim_shape, false); | ||
auto s_dim_red_axis = | ||
ngraph::opset6::Constant::create(ngraph::element::i64, {}, {0}); | ||
auto second_dim_reduce = std::make_shared<ngraph::opset6::ReduceProd>( | ||
split->output(1), s_dim_red_axis); | ||
auto s_dim_shape = | ||
ngraph::opset6::Constant::create(ngraph::element::i64, {1}, {1}); | ||
auto second_dim = std::make_shared<ngraph::opset6::Reshape>( | ||
second_dim_reduce, s_dim_shape, false); | ||
auto out_shape = std::make_shared<ngraph::opset6::Concat>( | ||
ngraph::NodeVector{first_dim, second_dim}, 0); | ||
auto x_reshaped = | ||
std::make_shared<ngraph::opset6::Reshape>(x, out_shape, false); | ||
return node.default_single_output_mapping( | ||
{std::make_shared<ngraph::opset6::MatMul>(x_reshaped, y)}, {"Out"}); | ||
} | ||
return node.default_single_output_mapping( | ||
{std::make_shared<ngraph::opset6::MatMul>(x, y)}, {"Out"}); | ||
} | ||
|
||
} // namespace op | ||
} // namespace pdpd | ||
} // namespace frontend | ||
} // namespace ngraph |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,117 @@ | ||
// Copyright (C) 2018-2021 Intel Corporation | ||
// SPDX-License-Identifier: Apache-2.0 | ||
// | ||
|
||
#include <ngraph/opsets/opset6.hpp> | ||
#include <node_context.hpp> | ||
|
||
namespace ngraph | ||
{ | ||
namespace frontend | ||
{ | ||
namespace pdpd | ||
{ | ||
namespace op | ||
{ | ||
NamedOutputs pad3d(const NodeContext& node) | ||
{ | ||
auto data = node.get_ng_input("X"); | ||
auto mode = node.get_attribute<std::string>("mode"); | ||
auto value = node.get_attribute<float>("value", 0.0); | ||
auto data_format = node.get_attribute<std::string>("data_format"); | ||
|
||
auto paddings = std::vector<int32_t>(6, 0); | ||
|
||
// padding of type int feature only supported by PaddlePaddle 'develop' | ||
// version(>=2.1.0) | ||
if (node.has_attribute<std::vector<int32_t>>("paddings")) | ||
{ | ||
auto paddings_vector = node.get_attribute<std::vector<int32_t>>("paddings"); | ||
PDPD_OP_VALIDATION_CHECK(node, | ||
paddings_vector.size() == 6, | ||
"paddings Params size should be 6 in pad3d!"); | ||
paddings = paddings_vector; | ||
} | ||
else if (node.has_attribute<int32_t>("paddings")) | ||
{ | ||
auto padding_int = node.get_attribute<int32_t>("paddings"); | ||
for (int i = 0; i < 6; i++) | ||
paddings[i] = padding_int; | ||
} | ||
else | ||
{ | ||
throw ngraph::ngraph_error("Unsupported paddings attribute!"); | ||
} | ||
|
||
auto pads_begin = std::vector<int32_t>(5, 0); | ||
auto pads_end = std::vector<int32_t>(5, 0); | ||
|
||
Output<ngraph::Node> values; | ||
Output<ngraph::Node> padding_begin; | ||
Output<ngraph::Node> padding_end; | ||
|
||
ngraph::op::PadMode pad_mode; | ||
// TODO Support Circular mode in #55704 | ||
if (mode == "constant") | ||
{ | ||
pad_mode = ngraph::op::PadMode::CONSTANT; | ||
values = ngraph::opset6::Constant::create( | ||
element::f32, ngraph::Shape{}, {value}); | ||
} | ||
else if (mode == "reflect") | ||
{ | ||
pad_mode = ngraph::op::PadMode::REFLECT; | ||
} | ||
else if (mode == "replicate") | ||
{ | ||
pad_mode = ngraph::op::PadMode::EDGE; | ||
} | ||
else | ||
{ | ||
throw ngraph::ngraph_error("Unsupported 3d paddings mode: [" + mode + "]"); | ||
} | ||
|
||
if (data_format == "NCDHW") | ||
{ | ||
pads_begin[4] = paddings[0]; // left | ||
pads_end[4] = paddings[1]; // right | ||
pads_begin[3] = paddings[2]; // top | ||
pads_end[3] = paddings[3]; // down | ||
pads_begin[2] = paddings[4]; // front | ||
pads_end[2] = paddings[5]; // back | ||
} | ||
else if (data_format == "NDHWC") | ||
{ | ||
pads_begin[3] = paddings[0]; // left | ||
pads_end[3] = paddings[1]; // right | ||
pads_begin[2] = paddings[2]; // top | ||
pads_end[2] = paddings[3]; // down | ||
pads_begin[1] = paddings[4]; // front | ||
pads_end[1] = paddings[5]; // back | ||
} | ||
else | ||
{ | ||
throw ngraph::ngraph_error("Unsupported 3d paddings data_format: [" + | ||
data_format + "]"); | ||
} | ||
|
||
padding_begin = ngraph::opset6::Constant::create( | ||
element::i32, ngraph::Shape{pads_begin.size()}, pads_begin); | ||
padding_end = ngraph::opset6::Constant::create( | ||
element::i32, ngraph::Shape{pads_end.size()}, pads_end); | ||
|
||
if (mode == "constant") | ||
return node.default_single_output_mapping( | ||
{std::make_shared<ngraph::opset6::Pad>( | ||
data, padding_begin, padding_end, values, pad_mode)}, | ||
{"Out"}); | ||
else | ||
return node.default_single_output_mapping( | ||
{std::make_shared<ngraph::opset6::Pad>( | ||
data, padding_begin, padding_end, pad_mode)}, | ||
{"Out"}); | ||
} | ||
} // namespace op | ||
} // namespace pdpd | ||
} // namespace frontend | ||
} // namespace ngraph |
Oops, something went wrong.