Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Test calculation output shape for Broadcast op, relax restrictions for partially dynamic input data #1247

Merged
merged 17 commits into from
Aug 10, 2020
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
44 changes: 32 additions & 12 deletions ngraph/src/ngraph/op/broadcast.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -88,15 +88,21 @@ std::pair<bool, AxisSet> op::v3::Broadcast::get_broadcast_axes() const

namespace
{
PartialShape
get_result_shape_bidirectional(const Node* this_ptr, Shape& arg_shape, Shape& target_shape)
PartialShape get_result_shape_bidirectional(const Node* this_ptr,
const PartialShape& arg_shape,
Shape& target_shape)
lazarevevgeny marked this conversation as resolved.
Show resolved Hide resolved
{
if (arg_shape.rank().is_dynamic())
{
return PartialShape::dynamic();
}
auto arg_shape_vec = static_cast<std::vector<Dimension>>(arg_shape);
PartialShape result_shape;
// Add left padding to shorter target or argument shape
const auto target_padded_rank = std::max(arg_shape.size(), target_shape.size());
while (arg_shape.size() < target_padded_rank)
const auto target_padded_rank = std::max(arg_shape_vec.size(), target_shape.size());
while (arg_shape_vec.size() < target_padded_rank)
{
arg_shape.insert(arg_shape.begin(), 1);
arg_shape_vec.insert(arg_shape_vec.begin(), 1);
}
while (target_shape.size() < target_padded_rank)
{
Expand All @@ -106,15 +112,28 @@ namespace
result_shape = target_shape;
for (auto i = 0; i < target_shape.size(); ++i)
{
if (arg_shape_vec[i].is_dynamic())
{
if (target_shape[i] == 1)
{
result_shape[i] = Dimension::dynamic();
}
else
{
result_shape[i] = target_shape[i];
}
continue;
}
const size_t arg_shape_dim = arg_shape_vec[i].get_length();
NODE_VALIDATION_CHECK(this_ptr,
arg_shape[i] == 1 || target_shape[i] == 1 ||
arg_shape[i] == target_shape[i],
arg_shape_dim == 1 || target_shape[i] == 1 ||
arg_shape_dim == target_shape[i],
"Broadcast incorrect target shape. Expecting either 1 or ",
arg_shape[i],
arg_shape_dim,
". Got ",
target_shape[i]);

result_shape[i] = std::max(arg_shape[i], target_shape[i]);
result_shape[i] = std::max(arg_shape_dim, target_shape[i]);
}
return result_shape;
}
Expand All @@ -141,9 +160,9 @@ void op::v3::Broadcast::validate_and_infer_types()
auto result_shape = get_output_partial_shape(0);
if (m_mode.m_type == BroadcastType::BIDIRECTIONAL)
{
if (get_input_partial_shape(0).is_static() && get_input_partial_shape(1).is_static())
if (get_input_partial_shape(0).rank().is_static() && get_input_partial_shape(1).is_static())
{
auto arg_shape = get_input_shape(0);
auto arg_shape = get_input_partial_shape(0);

const auto shape_constant =
as_type_ptr<op::v0::Constant>(input_value(1).get_node_shared_ptr());
Expand Down Expand Up @@ -193,7 +212,8 @@ bool op::v3::Broadcast::evaluate(const HostTensorVector& outputs,
{
auto arg_shape = inputs[0]->get_shape();
Shape target_shape = op::util::BroadcastBase::get_target_shape(inputs[1]);
PartialShape result_shape = get_result_shape_bidirectional(this, arg_shape, target_shape);
PartialShape result_shape =
get_result_shape_bidirectional(this, PartialShape{arg_shape}, target_shape);
auto pair_broadcast_axes =
get_broadcast_axes_bidirectional(arg_shape, result_shape.to_shape());
return op::util::BroadcastBase::evaluate_broadcast(
Expand Down
122 changes: 96 additions & 26 deletions ngraph/src/ngraph/op/util/broadcast_base.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -46,35 +46,79 @@ op::util::BroadcastBase::BroadcastBase(const Output<Node>& arg,
{
}

PartialShape op::util::BroadcastBase::get_result_shape_numpy_pdpd(
const Shape& arg0_shape,
PartialShape op::util::BroadcastBase::get_result_shape_pdpd(
const PartialShape& arg0_shape,
const Shape& target_shape,
const op::BroadcastModeSpec& broadcast_spec) const
{
if (arg0_shape.rank().is_dynamic())
{
return PartialShape::dynamic(target_shape.size());
}
const auto arg_rank_length = arg0_shape.rank().get_length();
PartialShape result_shape = target_shape;
auto start_axis = (broadcast_spec.m_type == op::BroadcastType::PDPD)
? broadcast_spec.m_axis
: target_shape.size() - arg0_shape.size();
auto start_axis = broadcast_spec.m_axis;

NODE_VALIDATION_CHECK(this,
start_axis >= 0,
"Broadcast target_shape has smaller rank ",
target_shape.size(),
" than arg shape ",
arg0_shape.size());
arg_rank_length);
for (auto i = start_axis; i < target_shape.size(); i++)
{
if (arg0_shape[i - start_axis].is_dynamic())
{
result_shape[i] = Dimension::dynamic();
continue;
}
const size_t arg_dim = arg0_shape[i - start_axis].get_length();
NODE_VALIDATION_CHECK(this,
arg0_shape[i - start_axis] == 1 || target_shape[i] == 1 ||
arg0_shape[i - start_axis] == target_shape[i],
arg_dim == 1 || target_shape[i] == 1 || arg_dim == target_shape[i],
"Broadcast incorrect target shape. Expecting either 1 or ",
arg0_shape[i - start_axis],
arg_dim,
" . Got ",
target_shape[i]);
result_shape[i] = std::max(arg0_shape[i - start_axis], target_shape[i]);
result_shape[i] = std::max(arg_dim, target_shape[i]);
}
return result_shape;
}

void op::util::BroadcastBase::validate_target_shape_numpy(const PartialShape& arg_shape,
const Shape& target_shape) const
{
if (arg_shape.rank().is_dynamic())
{
return;
}
const auto arg_rank_length = arg_shape.rank().get_length();
auto start_axis = target_shape.size() - arg_rank_length;
NODE_VALIDATION_CHECK(this,
start_axis >= 0,
"Broadcast target_shape has smaller rank ",
target_shape.size(),
" than arg shape ",
arg_rank_length);
for (auto i = start_axis; i < target_shape.size(); i++)
{
if (arg_shape[i - start_axis].is_dynamic())
{
continue;
}
const size_t arg_dim = arg_shape[i - start_axis].get_length();
NODE_VALIDATION_CHECK(this,
arg_dim == 1 || arg_dim == target_shape[i],
"Input shape dimension equal ",
arg_dim,
" cannot be broadcasted (numpy mode) to ",
target_shape[i],
". Allowed input dimension value would be 1",
target_shape[i] != 1
? (std::string(" or ") + std::to_string(target_shape[i])).c_str()
: "");
}
}

void op::util::BroadcastBase::validate_target_shape_none(const Shape& arg_shape,
const AxisVector& axes_mapping_val,
const Shape& target_shape) const
Expand Down Expand Up @@ -141,13 +185,28 @@ void op::util::BroadcastBase::validate_and_infer_types()
}

PartialShape result_shape{PartialShape::dynamic()};
auto input_rank = input_value(0).get_partial_shape().rank();
auto output_rank = input_value(1).get_partial_shape();
if (input_rank.is_static() && output_rank.is_static() && output_rank[0].is_static())
const auto& input_shape = get_input_partial_shape(0);
const auto input_rank = input_shape.rank();
const auto& target_shape = input_value(1).get_partial_shape();
const bool is_target_shape_known =
target_shape.rank().is_static() && target_shape[0].is_static();

if (m_mode.m_type == BroadcastType::BIDIRECTIONAL)
{
result_shape =
PartialShape::dynamic(std::max(input_rank.get_length(), output_rank[0].get_length()));
if (input_rank.is_static() && is_target_shape_known)
{
result_shape = PartialShape::dynamic(
std::max(input_rank.get_length(), target_shape[0].get_length()));
}
}
else
{
if (is_target_shape_known)
{
result_shape = PartialShape::dynamic(target_shape[0].get_length());
}
}

const auto shape_constant = as_type_ptr<op::v0::Constant>(input_value(1).get_node_shared_ptr());

if (auto concat = as_type_ptr<op::v0::Concat>(input_value(1).get_node_shared_ptr()))
Expand Down Expand Up @@ -205,17 +264,21 @@ void op::util::BroadcastBase::validate_and_infer_types()
}
}
}
else if (m_mode.m_type == BroadcastType::NUMPY || m_mode.m_type == BroadcastType::PDPD)
else if (m_mode.m_type == BroadcastType::NUMPY)
{
if (get_input_partial_shape(0).is_static() && get_input_partial_shape(1).is_static())
if (shape_constant)
{
auto arg_shape = get_input_shape(0);

if (shape_constant)
{
const auto target_shape = shape_constant->get_shape_val();
result_shape = get_result_shape_numpy_pdpd(arg_shape, target_shape, m_mode);
}
const auto target_shape = shape_constant->get_shape_val();
result_shape = target_shape;
validate_target_shape_numpy(input_shape, target_shape);
}
}
else if (m_mode.m_type == BroadcastType::PDPD)
{
if (shape_constant)
{
const auto target_shape = shape_constant->get_shape_val();
result_shape = get_result_shape_pdpd(input_shape, target_shape, m_mode);
}
}
set_output_type(0, get_input_element_type(0), result_shape);
Expand Down Expand Up @@ -486,9 +549,16 @@ bool op::util::BroadcastBase::evaluate(const HostTensorVector& outputs,
validate_target_shape_none(inputs[0]->get_shape(), axes_mapping_val, target_shape);
result_shape = target_shape;
}
else if (m_mode.m_type == BroadcastType::NUMPY || m_mode.m_type == BroadcastType::PDPD)
else if (m_mode.m_type == BroadcastType::PDPD)
{
result_shape = get_result_shape_pdpd(arg_shape, target_shape, m_mode);
pair_broadcast_axes =
get_broadcast_axes_numpy_pdpd(arg_shape, result_shape.to_shape(), m_mode);
}
else if (m_mode.m_type == BroadcastType::NUMPY)
{
result_shape = get_result_shape_numpy_pdpd(arg_shape, target_shape, m_mode);
result_shape = target_shape;
validate_target_shape_numpy(arg_shape, target_shape);
pair_broadcast_axes =
get_broadcast_axes_numpy_pdpd(arg_shape, result_shape.to_shape(), m_mode);
}
Expand Down
10 changes: 7 additions & 3 deletions ngraph/src/ngraph/op/util/broadcast_base.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -77,9 +77,13 @@ namespace ngraph
const AxisSet& broadcast_axes) const;

PartialShape
get_result_shape_numpy_pdpd(const Shape& arg0_shape,
const Shape& target_shape,
const op::BroadcastModeSpec& broadcast_spec) const;
get_result_shape_pdpd(const PartialShape& arg0_shape,
const Shape& target_shape,
const op::BroadcastModeSpec& broadcast_spec) const;

void validate_target_shape_numpy(const PartialShape& arg_shape,
const Shape& target_shape) const;

static std::pair<bool, AxisSet>
get_broadcast_axes_numpy_pdpd(const Shape& arg_shape,
const Shape& result_shape,
Expand Down
22 changes: 21 additions & 1 deletion ngraph/test/eval.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -315,7 +315,7 @@ TEST(eval, evaluate_broadcast_v3_numpy_vs_bidi)
Shape in_shape{1, 4, 1};

auto A = make_shared<op::Parameter>(element::f32, in_shape);
auto target_shape = op::Constant::create<int64_t>(element::i64, Shape{3}, {1, 1, 4});
auto target_shape = op::Constant::create<int64_t>(element::i64, Shape{3}, {1, 4, 4});
lazarevevgeny marked this conversation as resolved.
Show resolved Hide resolved
auto bcast_v3_num = make_shared<op::v3::Broadcast>(A, target_shape, op::BroadcastType::NUMPY);
auto fun_num = make_shared<Function>(OutputVector{bcast_v3_num}, ParameterVector{A});

Expand Down Expand Up @@ -343,6 +343,26 @@ TEST(eval, evaluate_broadcast_v3_numpy_vs_bidi)
ASSERT_EQ(expec2, result_val2);
}

TEST(eval, evaluate_broadcast_v3_bidi_3d)
{
Shape in_shape{1, 4, 1};

auto A = make_shared<op::Parameter>(element::f32, in_shape);
auto target_shape = op::Constant::create<int64_t>(element::i64, Shape{3}, {1, 1, 3});
auto bcast_v3_num =
make_shared<op::v3::Broadcast>(A, target_shape, op::BroadcastType::BIDIRECTIONAL);
auto fun_num = make_shared<Function>(OutputVector{bcast_v3_num}, ParameterVector{A});

auto result = make_shared<HostTensor>();
ASSERT_TRUE(fun_num->evaluate(
{result}, {make_host_tensor<element::Type_t::f32>(in_shape, {1.0f, 2.0f, 3.0f, 4.0f})}));
EXPECT_EQ(result->get_element_type(), element::f32);
EXPECT_EQ(result->get_partial_shape(), (PartialShape{1, 4, 3}));
auto result_val = read_vector<float>(result);
vector<float> expec{1.0f, 1.0f, 1.0f, 2.0f, 2.0f, 2.0f, 3.0f, 3.0f, 3.0f, 4.0f, 4.0f, 4.0f};
ASSERT_EQ(expec, result_val);
}

TEST(eval, evaluate_broadcast_v3_bidi_4d)
{
Shape in_shape{4, 1, 1};
Expand Down
Loading