Skip to content

Commit

Permalink
Refaсtor ActivationLayerTest (openvinotoolkit#20180)
Browse files Browse the repository at this point in the history
* Refator ActivationLayerTest
  • Loading branch information
olpipi authored Oct 25, 2023
1 parent 973b194 commit 30260e3
Show file tree
Hide file tree
Showing 7 changed files with 482 additions and 336 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -5,144 +5,129 @@
#include <vector>

#include "common_test_utils/test_enums.hpp"
#include "single_layer_tests/activation.hpp"
#include "single_op_tests/activation.hpp"
#include "common_test_utils/test_constants.hpp"

using namespace LayerTestsDefinitions;
using namespace ov::test::utils;
namespace {
// Common params
const std::vector<InferenceEngine::Precision> inputPrecisions = {
InferenceEngine::Precision::FP32
// TODO: Fix Issue-27390
// InferenceEngine::Precision::I16,
// InferenceEngine::Precision::U8
};

const std::vector<InferenceEngine::Precision> netPrecisions = {
InferenceEngine::Precision::FP32,
InferenceEngine::Precision::FP16
};
using ov::test::ActivationLayerTest;
using ov::test::ActivationParamLayerTest;
using ov::test::utils::ActivationTypes;

const std::vector<InferenceEngine::Precision> intPrecisions = {
InferenceEngine::Precision::I32,
const std::vector<ov::element::Type> model_types = {
ov::element::f32,
ov::element::f16
};

const std::map<ov::test::utils::ActivationTypes, std::vector<std::vector<float>>> activationTypes = {
{Sigmoid, {}},
{Tan, {}},
{Tanh, {}},
{Relu, {}},
{Exp, {}},
{Log, {}},
{Sign, {}},
{Abs, {}},
{Clamp, {{-2.0f, 2.0f}}},
{Negative, {}},
{Acos, {}},
{Acosh, {}},
{Asin, {}},
{Asinh, {}},
{Atan, {}},
{Atanh, {}},
{Cos, {}},
{Cosh, {}},
{Floor, {}},
{Sin, {}},
{Sinh, {}},
{Sqrt, {}},
{Elu, {{0.1f}}},
{Erf, {}},
{HardSigmoid, {{0.2f, 0.5f}}},
{Selu, {{1.6732f, 1.0507f}}},
{Ceiling, {}},
{Mish, {}},
{HSwish, {}},
{SoftPlus, {}},
{HSigmoid, {}},
{RoundHalfToEven, {}},
{RoundHalfAwayFromZero, {}},
{GeluErf, {}},
{GeluTanh, {}},
{Swish, {{0.4f}}}
const std::map<ActivationTypes, std::vector<std::vector<float>>> activationTypes = {
{ActivationTypes::Sigmoid, {}},
{ActivationTypes::Tan, {}},
{ActivationTypes::Tanh, {}},
{ActivationTypes::Relu, {}},
{ActivationTypes::Exp, {}},
{ActivationTypes::Log, {}},
{ActivationTypes::Sign, {}},
{ActivationTypes::Abs, {}},
{ActivationTypes::Clamp, {{-2.0f, 2.0f}}},
{ActivationTypes::Negative, {}},
{ActivationTypes::Acos, {}},
{ActivationTypes::Acosh, {}},
{ActivationTypes::Asin, {}},
{ActivationTypes::Asinh, {}},
{ActivationTypes::Atan, {}},
{ActivationTypes::Atanh, {}},
{ActivationTypes::Cos, {}},
{ActivationTypes::Cosh, {}},
{ActivationTypes::Floor, {}},
{ActivationTypes::Sin, {}},
{ActivationTypes::Sinh, {}},
{ActivationTypes::Sqrt, {}},
{ActivationTypes::Elu, {{0.1f}}},
{ActivationTypes::Erf, {}},
{ActivationTypes::HardSigmoid, {{0.2f, 0.5f}}},
{ActivationTypes::Selu, {{1.6732f, 1.0507f}}},
{ActivationTypes::Ceiling, {}},
{ActivationTypes::Mish, {}},
{ActivationTypes::HSwish, {}},
{ActivationTypes::SoftPlus, {}},
{ActivationTypes::HSigmoid, {}},
{ActivationTypes::RoundHalfToEven, {}},
{ActivationTypes::RoundHalfAwayFromZero, {}},
{ActivationTypes::GeluErf, {}},
{ActivationTypes::GeluTanh, {}},
{ActivationTypes::Swish, {{0.4f}}}
};

// List of operations that should be tested also with integer precision
const std::map<ActivationTypes, std::vector<std::vector<float>>> intActivationTypes = {
{Acosh, {}},
{Asinh, {}},
{Atan, {}},
{Negative, {}},
{Ceiling, {}},
{Cos, {}},
{Cosh, {}},
{Sign, {}},
{Sinh, {}},
{Sqrt, {}},
{Tan, {}},
{Tanh, {}},
{ActivationTypes::Acosh, {}},
{ActivationTypes::Asinh, {}},
{ActivationTypes::Atan, {}},
{ActivationTypes::Negative, {}},
{ActivationTypes::Ceiling, {}},
{ActivationTypes::Cos, {}},
{ActivationTypes::Cosh, {}},
{ActivationTypes::Sign, {}},
{ActivationTypes::Sinh, {}},
{ActivationTypes::Sqrt, {}},
{ActivationTypes::Tan, {}},
{ActivationTypes::Tanh, {}},
};

const std::map<ActivationTypes, std::vector<std::vector<float>>> activationParamTypes = {
{PReLu, {{}}}, // Slope will be filled with increasing values from -10 to match slope input shape
{LeakyRelu, {{0.01f}}}
{ActivationTypes::PReLu, {{}}}, // Slope will be filled with increasing values from -10 to match slope input shape
{ActivationTypes::LeakyRelu, {{0.01f}}}
};

std::map<std::vector<size_t>, std::vector<std::vector<size_t>>> basic = {
{{1, 50}, {{}}},
{{5, 128}, {{}}},
{{2, 2, 2, 2, 2, 2, 2, 2}, {{}}},
std::map<std::vector<ov::Shape>, std::vector<ov::Shape>> basic_input_shapes_static = {
{{{1, 50}}, {}},
{{{5, 128}}, {}},
{{{2, 2, 2, 2, 2, 2, 2, 2}}, {}},
};

std::map<std::vector<size_t>, std::vector<std::vector<size_t>>> preluBasic = {
{{1, 50}, {{1}, {50}}},
{{1, 128}, {{1}, {128}}},
std::map<std::vector<ov::Shape>, std::vector<ov::Shape>> prelu_basic_input_shapes_static = {
{{{1, 50}}, {{1}, {50}}},
{{{1, 128}}, {{1}, {128}}},

// Broadcast check
{{3, 2}, {{1}, {2}, {3, 2}}},
{{3, 2, 5}, {{1}, {2}, {5}, {2, 5}, {3, 1, 5}, {1, 2, 1}, {1, 1, 5}, {3, 1, 1}, {3, 2, 5}}},
{{2, 1, 2}, {{2}, {2, 1, 1}}},
{{3, 2, 5, 7}, {{1}, {7}, {2}, {5, 7}, {2, 5, 7}, {2, 1, 1}, {1, 2, 1, 1}, {3, 2, 1, 1}, {3, 2, 5, 7}}},
{{2, 2, 2, 2, 2, 2, 2, 2}, {{2}, {2, 2}, {2, 1, 1, 2}}},
{{{3, 2}}, {{1}, {2}, {3, 2}}},
{{{3, 2, 5}}, {{1}, {2}, {5}, {2, 5}, {3, 1, 5}, {1, 2, 1}, {1, 1, 5}, {3, 1, 1}, {3, 2, 5}}},
{{{2, 1, 2}}, {{2}, {2, 1, 1}}},
{{{3, 2, 5, 7}}, {{1}, {7}, {2}, {5, 7}, {2, 5, 7}, {2, 1, 1}, {1, 2, 1, 1}, {3, 2, 1, 1}, {3, 2, 5, 7}}},
{{{2, 2, 2, 2, 2, 2, 2, 2}}, {{2}, {2, 2}, {2, 1, 1, 2}}},
};

auto static_shapes_param_transform = [](const std::vector<std::pair<std::vector<ov::Shape>, ov::Shape>>& original_shapes) {
std::vector<std::pair<std::vector<ov::test::InputShape>, ov::Shape>> new_shapes;
for (const auto& shape_element : original_shapes) {
new_shapes.emplace_back(ov::test::static_shapes_to_test_representation(shape_element.first), shape_element.second);
}
return new_shapes;
};

const auto basicCases = ::testing::Combine(
const auto basic_case_params = ::testing::Combine(
::testing::ValuesIn(ov::test::utils::combineParams(activationTypes)),
::testing::ValuesIn(netPrecisions),
::testing::Values(InferenceEngine::Precision::UNSPECIFIED),
::testing::Values(InferenceEngine::Precision::UNSPECIFIED),
::testing::Values(InferenceEngine::Layout::ANY),
::testing::Values(InferenceEngine::Layout::ANY),
::testing::ValuesIn(ov::test::utils::combineParams(basic)),
::testing::ValuesIn(model_types),
::testing::ValuesIn(static_shapes_param_transform(ov::test::utils::combineParams(basic_input_shapes_static))),
::testing::Values(ov::test::utils::DEVICE_CPU)
);

const auto basicPreluCases = ::testing::Combine(
const auto basic_prelu_cases_params = ::testing::Combine(
::testing::ValuesIn(ov::test::utils::combineParams(activationParamTypes)),
::testing::ValuesIn(netPrecisions),
::testing::Values(InferenceEngine::Precision::UNSPECIFIED),
::testing::Values(InferenceEngine::Precision::UNSPECIFIED),
::testing::Values(InferenceEngine::Layout::ANY),
::testing::Values(InferenceEngine::Layout::ANY),
::testing::ValuesIn(ov::test::utils::combineParams(preluBasic)),
::testing::ValuesIn(model_types),
::testing::ValuesIn(static_shapes_param_transform(ov::test::utils::combineParams(prelu_basic_input_shapes_static))),
::testing::Values(ov::test::utils::DEVICE_CPU)
);

const auto basicIntegerOperations = ::testing::Combine(
const auto basic_integer_operations_params = ::testing::Combine(
::testing::ValuesIn(ov::test::utils::combineParams(intActivationTypes)),
::testing::ValuesIn(intPrecisions),
::testing::ValuesIn(intPrecisions),
::testing::ValuesIn(intPrecisions),
::testing::Values(InferenceEngine::Layout::ANY),
::testing::Values(InferenceEngine::Layout::ANY),
::testing::ValuesIn(ov::test::utils::combineParams(basic)),
::testing::Values(ov::element::i32),
::testing::ValuesIn(static_shapes_param_transform(ov::test::utils::combineParams(basic_input_shapes_static))),
::testing::Values(ov::test::utils::DEVICE_CPU)
);

INSTANTIATE_TEST_SUITE_P(smoke_Activation_Basic, ActivationLayerTest, basicCases, ActivationLayerTest::getTestCaseName);
INSTANTIATE_TEST_SUITE_P(smoke_Activation_Basic, ActivationDynamicLayerTest, basicCases, ActivationLayerTest::getTestCaseName);
INSTANTIATE_TEST_SUITE_P(smoke_Integer_Activation_Basic, ActivationLayerTest, basicIntegerOperations, ActivationLayerTest::getTestCaseName);
INSTANTIATE_TEST_SUITE_P(smoke_Activation_Basic, ActivationLayerTest, basic_case_params, ActivationLayerTest::getTestCaseName);
INSTANTIATE_TEST_SUITE_P(smoke_Integer_Activation_Basic, ActivationLayerTest, basic_integer_operations_params, ActivationLayerTest::getTestCaseName);

INSTANTIATE_TEST_SUITE_P(smoke_Activation_Basic_Prelu_Const, ActivationLayerTest, basicPreluCases, ActivationLayerTest::getTestCaseName);
INSTANTIATE_TEST_SUITE_P(smoke_Activation_Basic_Prelu_Param, ActivationParamLayerTest, basicPreluCases, ActivationLayerTest::getTestCaseName);
INSTANTIATE_TEST_SUITE_P(smoke_Activation_Basic_Prelu_Const, ActivationLayerTest, basic_prelu_cases_params, ActivationLayerTest::getTestCaseName);
INSTANTIATE_TEST_SUITE_P(smoke_Activation_Basic_Prelu_Param, ActivationParamLayerTest, basic_prelu_cases_params, ActivationLayerTest::getTestCaseName);
} // namespace
Original file line number Diff line number Diff line change
Expand Up @@ -187,6 +187,8 @@ std::vector<std::string> disabledTestPatterns() {
R"(smoke_GroupConvBackpropData.*paddingDefined/GroupConvBackpropLayerTest.Inference.*f32.*)",
// Issue: 122177
R"(smoke_LSTMSequenceCommon.*LSTMSequenceTest.Inference.*CONVERT_TO_TI.*)",
// Issue: 122081
R"(smoke_Activation_Basic_Prelu_Const/ActivationLayerTest.Inference/.*_TS=\(3.2.5.7\).*)",
// Issue: 122094
R"(smoke_Interpolate_Basic_Down_Sample_Tail/InterpolateLayerTest.Inference.*(asymmetric|align_corners).*f16.*)",
// Need to generate sequence exactly in the i64 data type. Enable in scope of i64 enabling.
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,29 @@
// Copyright (C) 2018-2023 Intel Corporation
// SPDX-License-Identifier: Apache-2.0
//

#pragma once

#include "shared_test_classes/single_op/activation.hpp"

namespace ov {
namespace test {

TEST_P(ActivationLayerTest, Inference) {
run();
}

TEST_P(ActivationParamLayerTest, Inference) {
run();
}

TEST_P(ActivationLayerTest, QueryModel) {
query_model();
}

TEST_P(ActivationParamLayerTest, QueryModel) {
query_model();
}

} // namespace test
} // namespace ov
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,9 @@

#pragma once

#include <map>
#include <vector>

#include "ngraph/node.hpp"
#include "ngraph/op/proposal.hpp"
#include "ngraph/op/power.hpp"
Expand All @@ -26,9 +29,44 @@
#include "openvino/op/logical_or.hpp"
#include "openvino/op/logical_xor.hpp"
#include "openvino/op/logical_not.hpp"

#include <map>
#include <vector>
#include "openvino/op/abs.hpp"
#include "openvino/op/acos.hpp"
#include "openvino/op/acosh.hpp"
#include "openvino/op/asin.hpp"
#include "openvino/op/asinh.hpp"
#include "openvino/op/atan.hpp"
#include "openvino/op/atanh.hpp"
#include "openvino/op/ceiling.hpp"
#include "openvino/op/clamp.hpp"
#include "openvino/op/constant.hpp"
#include "openvino/op/cos.hpp"
#include "openvino/op/cosh.hpp"
#include "openvino/op/elu.hpp"
#include "openvino/op/erf.hpp"
#include "openvino/op/exp.hpp"
#include "openvino/op/floor.hpp"
#include "openvino/op/gelu.hpp"
#include "openvino/op/hard_sigmoid.hpp"
#include "openvino/op/hsigmoid.hpp"
#include "openvino/op/hswish.hpp"
#include "openvino/op/log.hpp"
#include "openvino/op/mish.hpp"
#include "openvino/op/negative.hpp"
#include "openvino/op/parameter.hpp"
#include "openvino/op/prelu.hpp"
#include "openvino/op/relu.hpp"
#include "openvino/op/round.hpp"
#include "openvino/op/selu.hpp"
#include "openvino/op/sigmoid.hpp"
#include "openvino/op/sign.hpp"
#include "openvino/op/sin.hpp"
#include "openvino/op/sinh.hpp"
#include "openvino/op/softplus.hpp"
#include "openvino/op/softsign.hpp"
#include "openvino/op/sqrt.hpp"
#include "openvino/op/swish.hpp"
#include "openvino/op/tan.hpp"
#include "openvino/op/tanh.hpp"

namespace ov {
namespace test {
Expand Down Expand Up @@ -94,6 +132,51 @@ static std::map<ov::NodeTypeInfo, std::vector<std::vector<InputGenerateData>>> i
{ ov::op::v1::LogicalOr::get_type_info_static(), {{{0, 2}}, {{0, 2, 1}}} },
{ ov::op::v1::LogicalNot::get_type_info_static(), {{{0, 2}}, {{0, 2, 1}}} },
{ ov::op::v1::LogicalXor::get_type_info_static(), {{{0, 2}}, {{0, 2, 1}}} },
{ ov::op::v7::IDFT::get_type_info_static(), {{{0, 1}}, {{0, 1, 1000000}}} },
{ ov::op::v0::Sigmoid::get_type_info_static(), {{{0, 15}}, {{-1, 2, 32768}}} },
{ ov::op::v0::Tanh::get_type_info_static(), {{{0, 15}}, {{-1, 2, 32768}}} },
{ ov::op::v0::Relu::get_type_info_static(), {{{0, 15}}, {{-1, 2, 32768}}} },
{ ov::op::v0::PRelu::get_type_info_static(), {{{0, 15}}, {{-1, 2, 32768}}} },
{ ov::op::v0::Exp::get_type_info_static(), {{{0, 15}}, {{-10, 20, 32768}}} },
{ ov::op::v0::Log::get_type_info_static(), {{{0, 15}}, {{1, 20, 32768}}} },
{ ov::op::v0::Sign::get_type_info_static(), {{{0, 15}}, {{-1, 2, 32768}}} },
{ ov::op::v0::Abs::get_type_info_static(), {{{0, 15}}, {{-1, 2, 32768}}} },
{ ov::op::v0::Clamp::get_type_info_static(), {{{0, 15}}, {{-1, 2, 32768}}} },
{ ov::op::v0::Negative::get_type_info_static(), {{{0, 15}}, {{-1, 2, 32768}}} },
{ ov::op::v0::Acos::get_type_info_static(), {{{0, 15}}, {{-1, 2, 32768}}} },
{ ov::op::v3::Acosh::get_type_info_static(), {{{1, 15}}, {{1, 200, 32768}}} },
{ ov::op::v0::Asin::get_type_info_static(), {{{0, 15}}, {{-1, 2, 32768}}} },
{ ov::op::v3::Asinh::get_type_info_static(), {{{0, 15}}, {{-1, 2, 32768}}} },
{ ov::op::v0::Atan::get_type_info_static(), {{{0, 15}}, {{-1, 2, 32768}}} },
{ ov::op::v3::Atanh::get_type_info_static(), {{{0, 15}}, {{-1, 2, 32768}}} },
{ ov::op::v0::Cos::get_type_info_static(), {{{0, 15}}, {{-1, 2, 32768}}} },
{ ov::op::v0::Cosh::get_type_info_static(), {{{0, 15}}, {{-1, 2, 32768}}} },
{ ov::op::v0::Floor::get_type_info_static(), {{{0, 15}}, {{-1, 2, 32768}}} },
{ ov::op::v0::Sin::get_type_info_static(), {{{0, 15}}, {{-1, 2, 32768}}} },
{ ov::op::v0::Sinh::get_type_info_static(), {{{0, 15}}, {{-1, 2, 32768}}} },
{ ov::op::v0::Sqrt::get_type_info_static(), {{{0, 15}}, {{1, 20, 32768}}} },
{ ov::op::v0::Tan::get_type_info_static(), {{{0, 15}}, {{-1, 2, 32768}}} },
{ ov::op::v0::Elu::get_type_info_static(), {{{0, 15}}, {{-1, 2, 32768}}} },
{ ov::op::v0::Erf::get_type_info_static(), {{{0, 15}}, {{-1, 2, 32768}}} },
{ ov::op::v0::HardSigmoid::get_type_info_static(), {{{0, 15}}, {{-1, 2, 32768}}} },
{ ov::op::v0::Selu::get_type_info_static(), {{{0, 15}}, {{-1, 2, 32768}}} },
{ ov::op::v0::Sigmoid::get_type_info_static(), {{{0, 15}}, {{-1, 2, 32768}}} },
{ ov::op::v0::Tanh::get_type_info_static(), {{{0, 15}}, {{-1, 2, 32768}}} },
{ ov::op::v0::Relu::get_type_info_static(), {{{0, 15}}, {{-1, 2, 32768}}} },
{ ov::op::v0::Exp::get_type_info_static(), {{{0, 15}}, {{-1, 2, 32768}}} },
{ ov::op::v0::Log::get_type_info_static(), {{{0, 15}}, {{-1, 2, 32768}}} },
{ ov::op::v0::Sign::get_type_info_static(), {{{0, 15}}, {{-1, 2, 32768}}} },
{ ov::op::v0::Abs::get_type_info_static(), {{{0, 15}}, {{-1, 2, 32768}}} },
{ ov::op::v0::Gelu::get_type_info_static(), {{{0, 15}}, {{-1, 2, 32768}}} },
{ ov::op::v0::Ceiling::get_type_info_static(), {{{0, 15}}, {{-1000, 2000, 32768}}} },
{ ov::op::v4::Mish::get_type_info_static(), {{{0, 15}}, {{-10, 60, 32768}}} },
{ ov::op::v4::HSwish::get_type_info_static(), {{{0, 15}}, {{-1, 2, 32768}}} },
{ ov::op::v4::SoftPlus::get_type_info_static(), {{{0, 15}}, {{-100, 200, 32768}}} },
{ ov::op::v4::Swish::get_type_info_static(), {{{0, 15}}, {{-1, 2, 32768}}} },
{ ov::op::v5::HSigmoid::get_type_info_static(), {{{0, 15}}, {{-1, 2, 32768}}} },
{ ov::op::v5::Round::get_type_info_static(), {{{0, 15}}, {{-10, 20, 4}}} },
{ ov::op::v7::Gelu::get_type_info_static(), {{{0, 15}}, {{-1, 2, 32768}}} },
{ ov::op::v9::SoftSign::get_type_info_static(), {{{0, 15}}, {{-100, 200, 32768}}} },
};

} // namespace utils
Expand Down
Loading

0 comments on commit 30260e3

Please sign in to comment.