Skip to content

Commit

Permalink
refactoring: InputLayerType::NONE was removed
Browse files Browse the repository at this point in the history
  • Loading branch information
eshoguli committed Oct 26, 2023
1 parent 2739b7c commit affa0b8
Show file tree
Hide file tree
Showing 5 changed files with 48 additions and 58 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -158,58 +158,56 @@ void EltwiseLayerCPUTest::SetUp() {
}
ov::ParameterVector parameters{std::make_shared<ov::op::v0::Parameter>(netType, inputDynamicShapes.front())};
std::shared_ptr<ngraph::Node> secondaryInput;
switch (secondaryInputType) {
case ngraph::helpers::InputLayerType::PARAMETER: {
auto param = std::make_shared<ov::op::v0::Parameter>(netType, inputDynamicShapes.back());
secondaryInput = param;
parameters.push_back(param);
break;
}
case ngraph::helpers::InputLayerType::CONSTANT: {
auto pShape = inputDynamicShapes.back();
ngraph::Shape shape;
if (pShape.is_static()) {
shape = pShape.get_shape();
} else {
ASSERT_TRUE(pShape.rank().is_static());
shape = std::vector<size_t>(pShape.rank().get_length(), 1);
for (size_t i = 0; i < pShape.size(); ++i) {
if (pShape[i].is_static()) {
shape[i] = pShape[i].get_length();
if (eltwiseType != ngraph::helpers::EltwiseTypes::BITWISE_NOT) {
switch (secondaryInputType) {
case ngraph::helpers::InputLayerType::PARAMETER: {
auto param = std::make_shared<ov::op::v0::Parameter>(netType, inputDynamicShapes.back());
secondaryInput = param;
parameters.push_back(param);
break;
}
case ngraph::helpers::InputLayerType::CONSTANT: {
auto pShape = inputDynamicShapes.back();
ngraph::Shape shape;
if (pShape.is_static()) {
shape = pShape.get_shape();
} else {
ASSERT_TRUE(pShape.rank().is_static());
shape = std::vector<size_t>(pShape.rank().get_length(), 1);
for (size_t i = 0; i < pShape.size(); ++i) {
if (pShape[i].is_static()) {
shape[i] = pShape[i].get_length();
}
}
}
}

auto data_tensor = generate_eltwise_input(netType, shape);
if ((netType == ElementType::i8) || (netType == ElementType::u8)) {
auto data_ptr = reinterpret_cast<uint8_t*>(data_tensor.data());
std::vector<uint8_t> data(data_ptr, data_ptr + ngraph::shape_size(shape));
secondaryInput = ngraph::builder::makeConstant(netType, shape, data);
} else if ((netType == ElementType::i16) || (netType == ElementType::u16)) {
auto data_ptr = reinterpret_cast<uint16_t*>(data_tensor.data());
std::vector<uint16_t> data(data_ptr, data_ptr + ngraph::shape_size(shape));
secondaryInput = ngraph::builder::makeConstant(netType, shape, data);
} else if ((netType == ElementType::i32) || (netType == ElementType::u32)) {
auto data_ptr = reinterpret_cast<uint32_t*>(data_tensor.data());
std::vector<uint32_t> data(data_ptr, data_ptr + ngraph::shape_size(shape));
secondaryInput = ngraph::builder::makeConstant(netType, shape, data);
} else if (netType == ElementType::f16) {
auto data_ptr = reinterpret_cast<ov::float16*>(data_tensor.data());
std::vector<ov::float16> data(data_ptr, data_ptr + ngraph::shape_size(shape));
secondaryInput = ngraph::builder::makeConstant(netType, shape, data);
} else {
auto data_ptr = reinterpret_cast<float*>(data_tensor.data());
std::vector<float> data(data_ptr, data_ptr + ngraph::shape_size(shape));
secondaryInput = ngraph::builder::makeConstant(netType, shape, data);
auto data_tensor = generate_eltwise_input(netType, shape);
if ((netType == ElementType::i8) || (netType == ElementType::u8)) {
auto data_ptr = reinterpret_cast<uint8_t*>(data_tensor.data());
std::vector<uint8_t> data(data_ptr, data_ptr + ngraph::shape_size(shape));
secondaryInput = ngraph::builder::makeConstant(netType, shape, data);
} else if ((netType == ElementType::i16) || (netType == ElementType::u16)) {
auto data_ptr = reinterpret_cast<uint16_t*>(data_tensor.data());
std::vector<uint16_t> data(data_ptr, data_ptr + ngraph::shape_size(shape));
secondaryInput = ngraph::builder::makeConstant(netType, shape, data);
} else if ((netType == ElementType::i32) || (netType == ElementType::u32)) {
auto data_ptr = reinterpret_cast<uint32_t*>(data_tensor.data());
std::vector<uint32_t> data(data_ptr, data_ptr + ngraph::shape_size(shape));
secondaryInput = ngraph::builder::makeConstant(netType, shape, data);
} else if (netType == ElementType::f16) {
auto data_ptr = reinterpret_cast<ov::float16*>(data_tensor.data());
std::vector<ov::float16> data(data_ptr, data_ptr + ngraph::shape_size(shape));
secondaryInput = ngraph::builder::makeConstant(netType, shape, data);
} else {
auto data_ptr = reinterpret_cast<float*>(data_tensor.data());
std::vector<float> data(data_ptr, data_ptr + ngraph::shape_size(shape));
secondaryInput = ngraph::builder::makeConstant(netType, shape, data);
}
break;
}
default: {
FAIL() << "Unsupported InputLayerType";
}
break;
}
case ngraph::helpers::InputLayerType::NONE: {
// the second input is absent
break;
}
default: {
FAIL() << "Unsupported InputLayerType";
}
}
auto eltwise = ngraph::builder::makeEltwise(parameters[0], secondaryInput, eltwiseType);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -278,7 +278,7 @@ const auto params_4D_bitwise_NOT = ::testing::Combine(
::testing::Combine(
::testing::Values(bitwise_in_shapes_4D),
::testing::ValuesIn({ ngraph::helpers::EltwiseTypes::BITWISE_NOT }),
::testing::ValuesIn({ ngraph::helpers::InputLayerType::NONE }),
::testing::ValuesIn({ ngraph::helpers::InputLayerType::CONSTANT }),
::testing::ValuesIn({ ov::test::utils::OpType::VECTOR }),
::testing::ValuesIn({ ov::element::Type_t::i8, ov::element::Type_t::u8, ov::element::Type_t::i32 }),
::testing::Values(ov::element::Type_t::undefined),
Expand All @@ -295,7 +295,7 @@ const auto params_4D_bitwise_NOT_i16 = ::testing::Combine(
::testing::Combine(
::testing::Values(bitwise_in_shapes_4D),
::testing::ValuesIn({ ngraph::helpers::EltwiseTypes::BITWISE_NOT }),
::testing::ValuesIn({ ngraph::helpers::InputLayerType::NONE }),
::testing::ValuesIn({ ngraph::helpers::InputLayerType::CONSTANT }),
::testing::ValuesIn({ ov::test::utils::OpType::VECTOR }),
::testing::ValuesIn({ ov::element::Type_t::i16, ov::element::Type_t::u16 }),
::testing::Values(ov::element::Type_t::undefined),
Expand Down
4 changes: 0 additions & 4 deletions src/tests/ov_helpers/ov_models/src/input_layer.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -24,10 +24,6 @@ std::shared_ptr<ov::Node> makeInputLayer(const element::Type& type,
input = std::make_shared<ov::op::v0::Parameter>(type, ov::Shape(shape));
break;
}
case ov::test::utils::InputLayerType::NONE: {
// input is not used
break;
}
default:
throw std::runtime_error("Unsupported inputType");
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -71,7 +71,6 @@ enum SqueezeOpType {
enum class InputLayerType {
CONSTANT,
PARAMETER,
NONE,
};

enum LogicalTypes {
Expand Down
3 changes: 0 additions & 3 deletions src/tests/test_utils/common_test_utils/src/test_enums.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -110,9 +110,6 @@ std::ostream& operator<<(std::ostream& os, ov::test::utils::InputLayerType type)
case ov::test::utils::InputLayerType::PARAMETER:
os << "PARAMETER";
break;
case ov::test::utils::InputLayerType::NONE:
os << "NONE";
break;
default:
throw std::runtime_error("NOT_SUPPORTED_INPUT_LAYER_TYPE");
}
Expand Down

0 comments on commit affa0b8

Please sign in to comment.