Skip to content

Commit

Permalink
[IE][VPU]: Fixes BinaryEltwise DTS on empty input (#3879)
Browse files Browse the repository at this point in the history
Makes DTS for BinaryEltwise produce empty output tensor in case if at least one input is empty. As criteria for empty tensor ReduceMin is used (assuming all shape's values are non-negative).

Tests are changed accordingly. Trying to add a new test case on inference with empty input, reference version failed, so those tests are left unchanged.
  • Loading branch information
ggladilo authored Feb 8, 2021
1 parent 132b473 commit 785828d
Show file tree
Hide file tree
Showing 4 changed files with 321 additions and 73 deletions.
6 changes: 3 additions & 3 deletions inference-engine/cmake/vpu_dependencies.cmake
Original file line number Diff line number Diff line change
Expand Up @@ -6,14 +6,14 @@ include_guard(GLOBAL)

set(VPU_SUPPORTED_FIRMWARES usb-ma2x8x pcie-ma2x8x)
set(VPU_SUPPORTED_FIRMWARES_HASH
"7892e82f8ba90b487c4b115bfc266265d8ceb6f3cfc3e7e203ec6150d041fa2c"
"bec36fa7a8b64cd50df8b7782c594df32267c5081d7aa2e77a701dcfa18b3ec6")
"c21f14cf8ee215f5fccf6b50de87e413b4c1ed8331f0c8fddb6c4d5746d884d7"
"f2521913ee6a024cf07bc823c4ed88e265b1c369666f027279f51d6a89e9e7de")

#
# Default packages
#

set(FIRMWARE_PACKAGE_VERSION 1599)
set(FIRMWARE_PACKAGE_VERSION 1606)
set(VPU_CLC_MA2X8X_VERSION "movi-cltools-20.09.2")

#
Expand Down
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
// Copyright (C) 2020 Intel Corporation
// Copyright (C) 2020-2021 Intel Corporation
// SPDX-License-Identifier: Apache-2.0
//

Expand All @@ -9,11 +9,10 @@
#include <vpu/utils/error.hpp>

#include "ngraph/graph_util.hpp"
#include "ngraph/opsets/opset3.hpp"
#include "ngraph/opsets/opset5.hpp"
#include <ngraph/ops.hpp>
#include <ngraph/opsets/opset6.hpp>

#include <memory>
#include <numeric>

namespace vpu {

Expand Down Expand Up @@ -44,21 +43,43 @@ void processBinaryEltwise(std::shared_ptr<ngraph::Node> eltwise, size_t lhsIndex
const auto diff = std::abs(lhsRank.get_length() - rhsRank.get_length());
if (diff) {
auto & broadcastInput = lhsRank.get_length() < rhsRank.get_length() ? lhsInput : rhsInput;
const auto broadcastConst = ngraph::opset3::Constant::create(broadcastInput.get_element_type(), {static_cast<size_t>(diff)}, {1});
broadcastInput = std::make_shared<ngraph::opset3::Concat>(ngraph::OutputVector{broadcastConst, broadcastInput}, 0);
const auto broadcastConst = ngraph::opset6::Constant::create(broadcastInput.get_element_type(), {static_cast<size_t>(diff)}, {1});
broadcastInput = std::make_shared<ngraph::opset6::Concat>(ngraph::OutputVector{broadcastConst, broadcastInput}, 0);
}

const auto shape = std::make_shared<ngraph::opset3::Maximum>(lhsInput, rhsInput);
const auto& lhsInputShape = lhsInput.get_partial_shape();
const auto& rhsInputShape = rhsInput.get_partial_shape();

auto outDSR = std::make_shared<ngraph::vpu::op::DynamicShapeResolver>(copied, shape);
VPU_THROW_UNLESS(lhsInputShape.is_static() && lhsInputShape.rank().get_length() == 1,
"DynamicToStaticShape transformation for {} of type {} expects lhs input shape to be static 1D vector, actual is {}",
eltwise->get_friendly_name(), eltwise->get_type_info(), lhsInputShape);

VPU_THROW_UNLESS(rhsInputShape.is_static() && rhsInputShape.rank().get_length() == 1,
"DynamicToStaticShape transformation for {} of type {} expects rhs input shape to be static 1D vector, actual is {}",
eltwise->get_friendly_name(), eltwise->get_type_info(), rhsInputShape);

std::shared_ptr<ngraph::Node> maxShape = std::make_shared<ngraph::opset6::Maximum>(lhsInput, rhsInput);
const auto& updateOutputShapeOnZerosFrom = [&maxShape, &shapeElementType](const ngraph::Output<ngraph::Node>& input) {
const auto& shapeValue = input.get_partial_shape();
const auto& rank = ngraph::shape_size(shapeValue.to_shape());

const auto& zeros = ngraph::opset6::Constant::create(shapeElementType, {rank}, std::vector<std::int64_t>(rank, 0));
const auto& isZero = std::make_shared<ngraph::opset6::Equal>(input, zeros);
maxShape = std::make_shared<ngraph::opset6::Select>(isZero, zeros, maxShape);
};

updateOutputShapeOnZerosFrom(lhsInput);
updateOutputShapeOnZerosFrom(rhsInput);

auto outDSR = std::make_shared<ngraph::vpu::op::DynamicShapeResolver>(copied, maxShape);
outDSR->set_friendly_name(eltwise->get_friendly_name());
ngraph::replace_node(std::move(eltwise), std::move(outDSR));
}

} // namespace

void dynamicToStaticShapeBinaryEltwise(std::shared_ptr<ngraph::Node> eltwise) {
if (eltwise->get_type_info() == ngraph::opset5::Select::type_info) {
if (eltwise->get_type_info() == ngraph::opset6::Select::type_info) {
processBinaryEltwise(eltwise, 1, 2);
} else {
VPU_THROW_UNLESS(eltwise->get_input_size() == 2,
Expand Down
Original file line number Diff line number Diff line change
@@ -1,9 +1,8 @@
// Copyright (C) 2020 Intel Corporation
// Copyright (C) 2020-2021 Intel Corporation
// SPDX-License-Identifier: Apache-2.0
//

#include <ngraph/opsets/opset3.hpp>
#include <ngraph/opsets/opset5.hpp>
#include <ngraph/opsets/opset6.hpp>
#include <ngraph/shape.hpp>
#include <ngraph/type/element_type.hpp>

Expand All @@ -15,6 +14,7 @@
#include <vpu/utils/error.hpp>

#include <ngraph_functions/utils/ngraph_helpers.hpp>
#include <numeric>

namespace {

Expand Down Expand Up @@ -52,17 +52,17 @@ class DynamicToStaticShapeEltwise: public CommonTestUtils::TestsCommon, public t
const ngraph::Shape& dataDims0,
const ngraph::Shape& dataDims1,
TestShapeTypes testShapeTypes) const {
const auto input0 = std::make_shared<ngraph::opset3::Parameter>(dataType, dataDims0);
const auto input1 = std::make_shared<ngraph::opset3::Parameter>(dataType, dataDims1);
const auto input0 = std::make_shared<ngraph::opset6::Parameter>(dataType, dataDims0);
const auto input1 = std::make_shared<ngraph::opset6::Parameter>(dataType, dataDims1);

const auto input0Dims = std::make_shared<ngraph::opset3::Parameter>(ngraph::element::i64, ngraph::Shape{dataDims0.size()});
const auto input0Dims = std::make_shared<ngraph::opset6::Parameter>(ngraph::element::i64, ngraph::Shape{dataDims0.size()});
const auto dsr0 = std::make_shared<ngraph::vpu::op::DynamicShapeResolver>(input0, input0Dims);

ngraph::ParameterVector params{input0, input1, input0Dims};

std::shared_ptr<ngraph::Node> eltwiseInput1 = input1;
if (testShapeTypes == TestShapeTypes::ALL_DYNAMIC) {
const auto input1Dims = std::make_shared<ngraph::opset3::Parameter>(ngraph::element::i64,
const auto input1Dims = std::make_shared<ngraph::opset6::Parameter>(ngraph::element::i64,
ngraph::Shape{dataDims1.size()});
eltwiseInput1 = std::make_shared<ngraph::vpu::op::DynamicShapeResolver>(input1, input1Dims);
params.push_back(input1Dims);
Expand Down Expand Up @@ -92,20 +92,20 @@ class DynamicToStaticShapeEltwise: public CommonTestUtils::TestsCommon, public t
const ngraph::Shape& dataDims1,
TestShapeTypes testShapeTypes) {
// Data flow subgraph
const auto input0 = std::make_shared<ngraph::opset3::Parameter>(dataType, dataDims0);
const auto input1 = std::make_shared<ngraph::opset3::Parameter>(dataType, dataDims1);
const auto input0 = std::make_shared<ngraph::opset6::Parameter>(dataType, dataDims0);
const auto input1 = std::make_shared<ngraph::opset6::Parameter>(dataType, dataDims1);

const auto input0Dims = std::make_shared<ngraph::opset3::Parameter>(ngraph::element::i64, ngraph::Shape{dataDims0.size()});
const auto input0Dims = std::make_shared<ngraph::opset6::Parameter>(ngraph::element::i64, ngraph::Shape{dataDims0.size()});
const auto dsr0 = std::make_shared<ngraph::vpu::op::DynamicShapeResolver>(input0, input0Dims);

ngraph::ParameterVector params{input0, input1, input0Dims};

std::shared_ptr<ngraph::Node> dims;
if (testShapeTypes == TestShapeTypes:: ALL_DYNAMIC) {
params.push_back(std::make_shared<ngraph::opset3::Parameter>(ngraph::element::i64, ngraph::Shape{dataDims1.size()}));
params.push_back(std::make_shared<ngraph::opset6::Parameter>(ngraph::element::i64, ngraph::Shape{dataDims1.size()}));
dims = params.back();
} else {
dims = ngraph::opset3::Constant::create(ngraph::element::i64, {dataDims1.size()}, dataDims1);
dims = ngraph::opset6::Constant::create(ngraph::element::i64, {dataDims1.size()}, dataDims1);
}

std::shared_ptr<ngraph::Node> eltwiseInput1 = input1;
Expand All @@ -116,8 +116,11 @@ class DynamicToStaticShapeEltwise: public CommonTestUtils::TestsCommon, public t
const auto eltwise = buildEltwise(eltwiseType, {dsr0, eltwiseInput1}, params, testShapeTypes);

// Shape infer subgraph
const auto maximum = std::make_shared<ngraph::opset3::Maximum>(input0Dims, dims);
const auto dsr_final = std::make_shared<ngraph::vpu::op::DynamicShapeResolver>(eltwise, maximum);
std::shared_ptr<ngraph::Node> maxShape = std::make_shared<ngraph::opset6::Maximum>(input0Dims, dims);
maxShape = updateOutputShapeOnZerosFrom(maxShape, input0Dims);
maxShape = updateOutputShapeOnZerosFrom(maxShape, dims);

const auto dsr_final = std::make_shared<ngraph::vpu::op::DynamicShapeResolver>(eltwise, maxShape);

const auto function = std::make_shared<ngraph::Function>(
ngraph::NodeVector{dsr_final},
Expand All @@ -135,20 +138,20 @@ class DynamicToStaticShapeEltwise: public CommonTestUtils::TestsCommon, public t
const ngraph::Shape& dataDims1,
TestShapeTypes testShapeTypes) {
// Data flow subgraph
const auto input0 = std::make_shared<ngraph::opset3::Parameter>(dataType, dataDims0);
const auto input1 = std::make_shared<ngraph::opset3::Parameter>(dataType, dataDims1);
const auto input0 = std::make_shared<ngraph::opset6::Parameter>(dataType, dataDims0);
const auto input1 = std::make_shared<ngraph::opset6::Parameter>(dataType, dataDims1);

const auto input0Dims = std::make_shared<ngraph::opset3::Parameter>(ngraph::element::i64, ngraph::Shape{dataDims0.size()});
const auto input0Dims = std::make_shared<ngraph::opset6::Parameter>(ngraph::element::i64, ngraph::Shape{dataDims0.size()});
const auto dsr0 = std::make_shared<ngraph::vpu::op::DynamicShapeResolver>(input0, input0Dims);

ngraph::ParameterVector params{input0, input1, input0Dims};

std::shared_ptr<ngraph::Node> dims;
if (testShapeTypes == TestShapeTypes::ALL_DYNAMIC) {
params.push_back(std::make_shared<ngraph::opset3::Parameter>(ngraph::element::i64, ngraph::Shape{dataDims1.size()}));
params.push_back(std::make_shared<ngraph::opset6::Parameter>(ngraph::element::i64, ngraph::Shape{dataDims1.size()}));
dims = params.back();
} else {
dims = ngraph::opset3::Constant::create(ngraph::element::i64, {dataDims1.size()}, dataDims1);
dims = ngraph::opset6::Constant::create(ngraph::element::i64, {dataDims1.size()}, dataDims1);
}

std::shared_ptr<ngraph::Node> eltwiseInput1 = input1;
Expand All @@ -159,10 +162,14 @@ class DynamicToStaticShapeEltwise: public CommonTestUtils::TestsCommon, public t
const auto eltwise = buildEltwise(eltwiseType, {dsr0, eltwiseInput1}, params, testShapeTypes);

// Shape infer subgraph
const auto broadcastConst = ngraph::opset3::Constant::create(ngraph::element::i64, {dataDims1.size() - dataDims0.size()}, {1});
const auto concat = std::make_shared<ngraph::opset3::Concat>(ngraph::OutputVector{broadcastConst, input0Dims}, 0);
const auto maximum = std::make_shared<ngraph::opset3::Maximum>(concat, dims);
const auto dsrFinal = std::make_shared<ngraph::vpu::op::DynamicShapeResolver>(eltwise, maximum);
const auto broadcastConst = ngraph::opset6::Constant::create(ngraph::element::i64, {dataDims1.size() - dataDims0.size()}, {1});
const auto concat = std::make_shared<ngraph::opset6::Concat>(ngraph::OutputVector{broadcastConst, input0Dims}, 0);

std::shared_ptr<ngraph::Node> maxShape = std::make_shared<ngraph::opset6::Maximum>(concat, dims);
maxShape = updateOutputShapeOnZerosFrom(maxShape, concat);
maxShape = updateOutputShapeOnZerosFrom(maxShape, dims);

const auto dsrFinal = std::make_shared<ngraph::vpu::op::DynamicShapeResolver>(eltwise, maxShape);

const auto function = std::make_shared<ngraph::Function>(
ngraph::NodeVector{dsrFinal},
Expand All @@ -180,20 +187,20 @@ class DynamicToStaticShapeEltwise: public CommonTestUtils::TestsCommon, public t
const ngraph::Shape& dataDims1,
TestShapeTypes testShapeTypes) {
// Data flow subgraph
const auto input0 = std::make_shared<ngraph::opset3::Parameter>(dataType, dataDims0);
const auto input1 = std::make_shared<ngraph::opset3::Parameter>(dataType, dataDims1);
const auto input0 = std::make_shared<ngraph::opset6::Parameter>(dataType, dataDims0);
const auto input1 = std::make_shared<ngraph::opset6::Parameter>(dataType, dataDims1);

const auto input0Dims = std::make_shared<ngraph::opset3::Parameter>(ngraph::element::i64, ngraph::Shape{dataDims0.size()});
const auto input0Dims = std::make_shared<ngraph::opset6::Parameter>(ngraph::element::i64, ngraph::Shape{dataDims0.size()});
const auto dsr0 = std::make_shared<ngraph::vpu::op::DynamicShapeResolver>(input0, input0Dims);

ngraph::ParameterVector params{input0, input1, input0Dims};

std::shared_ptr<ngraph::Node> dims;
if (testShapeTypes == TestShapeTypes::ALL_DYNAMIC) {
params.push_back(std::make_shared<ngraph::opset3::Parameter>(ngraph::element::i64, ngraph::Shape{dataDims1.size()}));
params.push_back(std::make_shared<ngraph::opset6::Parameter>(ngraph::element::i64, ngraph::Shape{dataDims1.size()}));
dims = params.back();
} else {
dims = ngraph::opset3::Constant::create(ngraph::element::i64, {dataDims1.size()}, dataDims1);
dims = ngraph::opset6::Constant::create(ngraph::element::i64, {dataDims1.size()}, dataDims1);
}

std::shared_ptr<ngraph::Node> eltwiseInput1 = input1;
Expand All @@ -204,10 +211,14 @@ class DynamicToStaticShapeEltwise: public CommonTestUtils::TestsCommon, public t
const auto eltwise = buildEltwise(eltwiseType, {dsr0, eltwiseInput1}, params, testShapeTypes);

// Shape infer subgraph
const auto broadcastConst = ngraph::opset3::Constant::create(ngraph::element::i64, {dataDims0.size() - dataDims1.size()}, {1});
const auto concat = std::make_shared<ngraph::opset3::Concat>(ngraph::OutputVector{broadcastConst, dims}, 0);
const auto maximum = std::make_shared<ngraph::opset3::Maximum>(input0Dims, concat);
const auto dsrFinal = std::make_shared<ngraph::vpu::op::DynamicShapeResolver>(eltwise, maximum);
const auto broadcastConst = ngraph::opset6::Constant::create(ngraph::element::i64, {dataDims0.size() - dataDims1.size()}, {1});
const auto concat = std::make_shared<ngraph::opset6::Concat>(ngraph::OutputVector{broadcastConst, dims}, 0);

std::shared_ptr<ngraph::Node> maxShape = std::make_shared<ngraph::opset6::Maximum>(input0Dims, concat);
maxShape = updateOutputShapeOnZerosFrom(maxShape, input0Dims);
maxShape = updateOutputShapeOnZerosFrom(maxShape, concat);

const auto dsrFinal = std::make_shared<ngraph::vpu::op::DynamicShapeResolver>(eltwise, maxShape);

const auto function = std::make_shared<ngraph::Function>(
ngraph::NodeVector{dsrFinal},
Expand All @@ -224,13 +235,13 @@ class DynamicToStaticShapeEltwise: public CommonTestUtils::TestsCommon, public t
const ngraph::OutputVector& inputs,
ngraph::ParameterVector& params,
TestShapeTypes testShapeTypes) {
if (eltwiseType == ngraph::opset5::Select::type_info) {
params.push_back(std::make_shared<ngraph::opset3::Parameter>(
if (eltwiseType == ngraph::opset6::Select::type_info) {
params.push_back(std::make_shared<ngraph::opset6::Parameter>(
ngraph::element::boolean,
ngraph::Shape{inputs.front().get_shape()}));
std::shared_ptr<ngraph::Node> condInput = params.back();
if (testShapeTypes == TestShapeTypes::ALL_DYNAMIC) {
params.push_back(std::make_shared<ngraph::opset3::Parameter>(
params.push_back(std::make_shared<ngraph::opset6::Parameter>(
ngraph::element::i64,
ngraph::Shape{static_cast<size_t>(inputs.front().get_partial_shape().rank().get_length())}));
condInput = std::make_shared<ngraph::vpu::op::DynamicShapeResolver>(condInput, params.back());
Expand All @@ -240,6 +251,16 @@ class DynamicToStaticShapeEltwise: public CommonTestUtils::TestsCommon, public t
return ngraph::helpers::getNodeSharedPtr(eltwiseType, inputs);
}
}

static std::shared_ptr<ngraph::Node> updateOutputShapeOnZerosFrom(
const std::shared_ptr<ngraph::Node>& outputShape, const ngraph::Output<ngraph::Node>& inputShape) {
const auto& shapeValue = inputShape.get_partial_shape();
const auto& rank = ngraph::shape_size(shapeValue.to_shape());

const auto& zeros = ngraph::opset6::Constant::create(ngraph::element::i64, {rank}, std::vector<std::int64_t>(rank, 0));
const auto& isZero = std::make_shared<ngraph::opset6::Equal>(inputShape, zeros);
return std::make_shared<ngraph::opset6::Select>(isZero, zeros, outputShape);
}
};

TEST_P(DynamicToStaticShapeEltwise, CompareFunctions) {
Expand All @@ -253,17 +274,17 @@ INSTANTIATE_TEST_CASE_P(smoke_EltwiseBroadcast, DynamicToStaticShapeEltwise, tes
ngraph::element::i64,
ngraph::element::u8),
testing::Values(
ngraph::opset3::Add::type_info,
ngraph::opset3::Divide::type_info,
ngraph::opset3::Equal::type_info,
ngraph::opset3::Greater::type_info,
ngraph::opset3::Power::type_info,
ngraph::opset3::Multiply::type_info,
ngraph::opset3::Subtract::type_info,
ngraph::opset3::Maximum::type_info,
ngraph::opset3::Minimum::type_info,
ngraph::opset3::Less::type_info,
ngraph::opset5::Select::type_info),
ngraph::opset6::Add::type_info,
ngraph::opset6::Divide::type_info,
ngraph::opset6::Equal::type_info,
ngraph::opset6::Greater::type_info,
ngraph::opset6::Power::type_info,
ngraph::opset6::Multiply::type_info,
ngraph::opset6::Subtract::type_info,
ngraph::opset6::Maximum::type_info,
ngraph::opset6::Minimum::type_info,
ngraph::opset6::Less::type_info,
ngraph::opset6::Select::type_info),
testing::Values(
EltwiseParams{DataDims{1000}, DataDims{1}, DynamicToStaticShapeEltwise::reference_simple},
EltwiseParams{DataDims{1000, 1, 1}, DataDims{1000, 1, 1}, DynamicToStaticShapeEltwise::reference_simple},
Expand All @@ -272,4 +293,4 @@ INSTANTIATE_TEST_CASE_P(smoke_EltwiseBroadcast, DynamicToStaticShapeEltwise, tes
testing::Values(TestShapeTypes::ALL_DYNAMIC, TestShapeTypes::SINGLE_DSR)
));

} // namespace
} // namespace
Loading

0 comments on commit 785828d

Please sign in to comment.