From 9e041eb66ea5e5be1bb95a41e60d1dcbe517151b Mon Sep 17 00:00:00 2001 From: Szymon Durawa Date: Thu, 8 Jul 2021 15:34:45 +0200 Subject: [PATCH 1/8] Add visitor and backend tests. --- ngraph/test/backend/mvn.in.cpp | 82 +++++++++++++++++++++++++++++++++ ngraph/test/visitors/op/mvn.cpp | 27 ++++++++++- 2 files changed, 108 insertions(+), 1 deletion(-) diff --git a/ngraph/test/backend/mvn.in.cpp b/ngraph/test/backend/mvn.in.cpp index 2a0dd89b218897..4ad42c7a860402 100644 --- a/ngraph/test/backend/mvn.in.cpp +++ b/ngraph/test/backend/mvn.in.cpp @@ -137,3 +137,85 @@ NGRAPH_TEST(${BACKEND_NAME}, evaluate_mvn_6_across_batch) // clang-format on test_case.run(); } + +NGRAPH_TEST(${BACKEND_NAME}, evaluate_mvn_1_no_variance_no_across_channels) +{ + auto data = make_shared(element::f32, PartialShape{1, 3, 3, 3}); + auto mvn = make_shared(data, false, false, 1e-9); + auto fun = make_shared(OutputVector{mvn}, ParameterVector{data}); + auto test_case = test::TestCase(fun); + + test_case.add_input({1, 2, 3, 4, 5, 6, 7, 8, 9, 1, 2, 3, 4, 5, 6, 7, 8, 9, 1, 2, 3, 4, 5, 6, 7, 8, 9}); + + test_case.add_expected_output({-4, -3, -2, -1, 0, 1, 2, 3, 4, + -4, -3, -2, -1, 0, 1, 2, 3, 4, + -4, -3, -2, -1, 0, 1, 2, 3, 4}); + // clang-format on + test_case.run(); +} + +NGRAPH_TEST(${BACKEND_NAME}, evaluate_mvn_1_across_channels_no_variance) +{ + auto data = make_shared(element::f32, PartialShape{1, 3, 2, 2}); + auto mvn = make_shared(data, true, false, 1e-9); + auto fun = make_shared(OutputVector{mvn}, ParameterVector{data}); + auto test_case = test::TestCase(fun); + + // clang-format off + test_case.add_input({1, 2, 3, 4, 5, 6, 7, 8, 9, 1, 2, 3}); + + test_case.add_expected_output({-3.25, -2.25, -1.25, + -0.25, 0.75, 1.75, + 2.75, 3.75, 4.75, + -3.25, -2.25, -1.25}); + // clang-format on + test_case.run(); +} + +NGRAPH_TEST(${BACKEND_NAME}, evaluate_mvn_1_variance_no_across_channels) +{ + auto data = make_shared(element::f32, PartialShape{1, 3, 3, 3}); + auto mvn = make_shared(data, false, true, 1e-9); + auto fun = make_shared(OutputVector{mvn}, ParameterVector{data}); + auto test_case = test::TestCase(fun); + + // clang-format off + test_case.add_input({1, 2, 3, 4, 5, 6, 7, 8, 9, 1, 2, 3, 4, 5, 6, 7, 8, 9, 1, 2, 3, 4, 5, 6, 7, 8, 9}); + + test_case.add_expected_output({-1.5491934, -1.161895, -0.7745967, + -0.38729835, 0., 0.38729835, + 0.7745967, 1.161895, 1.5491934, + -1.5491934, -1.161895, -0.7745967, + -0.38729835, 0., 0.38729835, + 0.7745967, 1.161895, 1.5491934, + -1.5491934, -1.161895, -0.7745967, + -0.38729835, 0., 0.38729835, + 0.7745967, 1.161895, 1.5491934}); + // clang-format on + test_case.run(); +} + +NGRAPH_TEST(${BACKEND_NAME}, evaluate_mvn_1_across_channels_with_variance) +{ + auto data = make_shared(element::f32, PartialShape{1, 3, 3, 3}); + + auto mvn = make_shared(data, true, true, 1e-9); + auto fun = make_shared(OutputVector{mvn}, ParameterVector{data}); + auto test_case = test::TestCase(fun); + + // clang-format off + test_case.add_input( + {1, 2, 3, 4, 5, 6, 7, 8, 9, 1, 2, 3, 4, 5, 6, 7, 8, 9, 1, 2, 3, 4, 5, 6, 7, 8, 9}); + + test_case.add_expected_output({-1.5491934, -1.161895, -0.7745967, + -0.38729835, 0., 0.38729835, + 0.7745967, 1.161895, 1.5491934, + -1.5491934, -1.161895, -0.7745967, + -0.38729835, 0., 0.38729835, + 0.7745967, 1.161895, 1.5491934, + -1.5491934, -1.161895, -0.7745967, + -0.38729835, 0., 0.38729835, + 0.7745967, 1.161895, 1.5491934}); + // clang-format on + test_case.run(); +} diff --git a/ngraph/test/visitors/op/mvn.cpp b/ngraph/test/visitors/op/mvn.cpp index 0f2bf38ecdce62..7773cca5abab15 100644 --- a/ngraph/test/visitors/op/mvn.cpp +++ b/ngraph/test/visitors/op/mvn.cpp @@ -10,6 +10,7 @@ #include "ngraph/opsets/opset3.hpp" #include "ngraph/opsets/opset4.hpp" #include "ngraph/opsets/opset5.hpp" +#include "ngraph/opsets/opset6.hpp" #include "util/visitor.hpp" @@ -18,7 +19,7 @@ using namespace ngraph; using ngraph::test::NodeBuilder; using ngraph::test::ValueMap; -TEST(attributes, mvn_op) +TEST(attributes, mvn_v1_op) { NodeBuilder::get_ops().register_factory(); const auto data = make_shared(element::i32, Shape{2, 3, 4, 5}); @@ -29,9 +30,33 @@ TEST(attributes, mvn_op) op->set_reduction_axes(axes); NodeBuilder builder(op); const auto g_op = as_type_ptr(builder.create()); + const auto expected_attr_count = 4; + EXPECT_EQ(builder.get_value_map_size(), expected_attr_count); EXPECT_EQ(g_op->get_reduction_axes(), op->get_reduction_axes()); EXPECT_EQ(g_op->get_across_channels(), op->get_across_channels()); EXPECT_EQ(g_op->get_normalize_variance(), op->get_normalize_variance()); EXPECT_EQ(g_op->get_eps(), op->get_eps()); } + +TEST(attributes, mvn_v6_op) +{ + NodeBuilder::get_ops().register_factory(); + const auto data = make_shared(element::i32, Shape{2, 3, 4, 5}); + auto axes = ngraph::opset6::Constant::create(ngraph::element::i64, ngraph::Shape{ 2 }, { 2, 3 }); + + const auto op = make_shared(data, + axes, + false, + 0.1, + op::MVNEpsMode::INSIDE_SQRT); + + NodeBuilder builder(op); + const auto g_op = as_type_ptr(builder.create()); + const auto expected_attr_count = 3; + + EXPECT_EQ(builder.get_value_map_size(), expected_attr_count); + EXPECT_EQ(g_op->get_eps_mode(), op->get_eps_mode()); + EXPECT_EQ(g_op->get_normalize_variance(), op->get_normalize_variance()); + EXPECT_EQ(g_op->get_eps(), op->get_eps()); +} From 612c0498b2a27e3290b73ae7e8baf2126e9f9ea4 Mon Sep 17 00:00:00 2001 From: Szymon Durawa Date: Mon, 12 Jul 2021 09:49:59 +0200 Subject: [PATCH 2/8] Add reduction_axes to SLTs, serialization and backend tests. --- .../serialization/single_layer/mvn.cpp | 24 ++++++++--- .../single_layer_tests/mvn.cpp | 19 ++++++++- .../plugin/cpu/single_layer_tests/mvn.cpp | 22 +++++++--- .../single_layer_tests/mvn.cpp | 5 ++- .../shared/include/single_layer_tests/mvn.hpp | 4 ++ .../shared_test_classes/single_layer/mvn.hpp | 18 +++++++++ .../src/single_layer/mvn.cpp | 40 +++++++++++++++++++ .../include/ngraph_functions/builders.hpp | 5 +++ .../ngraph_functions/src/mvn.cpp | 9 +++++ 9 files changed, 132 insertions(+), 14 deletions(-) diff --git a/inference-engine/tests/functional/inference_engine/serialization/single_layer/mvn.cpp b/inference-engine/tests/functional/inference_engine/serialization/single_layer/mvn.cpp index b5a3cf828c79b7..5af5339f4211d5 100644 --- a/inference-engine/tests/functional/inference_engine/serialization/single_layer/mvn.cpp +++ b/inference-engine/tests/functional/inference_engine/serialization/single_layer/mvn.cpp @@ -17,22 +17,34 @@ const std::vector normalizeVariance = {true, false}; const std::vector> inputShapes = {{1, 10, 5, 7, 8}, {1, 3, 8, 9, 49}}; +const std::vector axes = {{1, 2, 3}, {2, 3}}; const std::vector acrossChannels = {true, false}; +const std::vector emptyReductionAxes = {{}}; +const std::vector emptyAcrossChannels = {{}}; const std::vector epsilon = {0.000000001}; -const auto MvnCases = ::testing::Combine( +const auto MvnAcrossChannels = ::testing::Combine( ::testing::ValuesIn(inputShapes), ::testing::ValuesIn(dataPrecisions), - ::testing::ValuesIn(acrossChannels), ::testing::ValuesIn(normalizeVariance), - ::testing::ValuesIn(epsilon), + ::testing::ValuesIn(emptyReductionAxes), ::testing::ValuesIn(acrossChannels), + ::testing::ValuesIn(normalizeVariance), ::testing::ValuesIn(epsilon), ::testing::Values(CommonTestUtils::DEVICE_CPU)); -TEST_P(MvnLayerTest, Serialize) { +const auto MvnReductionAxes = ::testing::Combine( + ::testing::ValuesIn(inputShapes), ::testing::ValuesIn(dataPrecisions), + ::testing::ValuesIn(axes), ::testing::ValuesIn(emptyAcrossChannels), + ::testing::ValuesIn(normalizeVariance), ::testing::ValuesIn(epsilon), + ::testing::Values(CommonTestUtils::DEVICE_CPU)); + +TEST_P(Mvn1LayerTest, Serialize) { Serialize(); } -INSTANTIATE_TEST_SUITE_P(smoke_MKLDNN_TestsMVN, MvnLayerTest, MvnCases, - MvnLayerTest::getTestCaseName); +INSTANTIATE_TEST_SUITE_P(smoke_MKLDNN_TestsMVN_across_channels, Mvn1LayerTest, MvnAcrossChannels, + Mvn1LayerTest::getTestCaseName); + +INSTANTIATE_TEST_SUITE_P(smoke_MKLDNN_TestsMVN_reduction_axes, Mvn1LayerTest, MvnReductionAxes, + Mvn1LayerTest::getTestCaseName); // ------------------- MVN-6 ------------------------------------------------- diff --git a/inference-engine/tests/functional/plugin/cpu/shared_tests_instances/single_layer_tests/mvn.cpp b/inference-engine/tests/functional/plugin/cpu/shared_tests_instances/single_layer_tests/mvn.cpp index 73fdce483efd20..caf39dbf1ea86d 100644 --- a/inference-engine/tests/functional/plugin/cpu/shared_tests_instances/single_layer_tests/mvn.cpp +++ b/inference-engine/tests/functional/plugin/cpu/shared_tests_instances/single_layer_tests/mvn.cpp @@ -9,6 +9,9 @@ using namespace LayerTestsDefinitions; +const std::vector emptyAcrossChannels = {{}}; +const std::vector emptyReductionAxes = {{}}; + const std::vector> inputShapes = { {8}, {1, 16}, @@ -41,17 +44,29 @@ const std::vector epsilon = { 0.000000001 }; -const auto MvnCases = ::testing::Combine( +const auto MvnAcrossChannels = ::testing::Combine( ::testing::ValuesIn(inputShapes), ::testing::Values(InferenceEngine::Precision::FP32), + ::testing::ValuesIn(emptyReductionAxes), ::testing::ValuesIn(acrossChannels), ::testing::ValuesIn(normalizeVariance), ::testing::ValuesIn(epsilon), ::testing::Values(CommonTestUtils::DEVICE_CPU) ); -INSTANTIATE_TEST_SUITE_P(smoke_MKLDNN_TestsMVN, MvnLayerTest, MvnCases, MvnLayerTest::getTestCaseName); +const auto MvnReductionAxes = ::testing::Combine( + ::testing::ValuesIn(std::vector>{{1, 10, 5, 17}, {1, 3, 8, 9}}), + ::testing::Values(InferenceEngine::Precision::FP32), + ::testing::ValuesIn(std::vector{{1, 2, 3}, {2, 3}}), + ::testing::ValuesIn(emptyAcrossChannels), + ::testing::ValuesIn(normalizeVariance), + ::testing::ValuesIn(epsilon), + ::testing::Values(CommonTestUtils::DEVICE_CPU) +); + +INSTANTIATE_TEST_SUITE_P(smoke_MKLDNN_TestsMVN_AcrossChannels, Mvn1LayerTest, MvnAcrossChannels, Mvn1LayerTest::getTestCaseName); +INSTANTIATE_TEST_SUITE_P(smoke_MKLDNN_TestsMVN_ReductionAxes, Mvn1LayerTest, MvnReductionAxes, Mvn1LayerTest::getTestCaseName); std::vector dataPrecisions = { diff --git a/inference-engine/tests/functional/plugin/cpu/single_layer_tests/mvn.cpp b/inference-engine/tests/functional/plugin/cpu/single_layer_tests/mvn.cpp index 6b877960e7f2f6..9a69164baaa169 100644 --- a/inference-engine/tests/functional/plugin/cpu/single_layer_tests/mvn.cpp +++ b/inference-engine/tests/functional/plugin/cpu/single_layer_tests/mvn.cpp @@ -13,7 +13,7 @@ using namespace CPUTestUtils; namespace CPULayerTestsDefinitions { typedef std::tuple< - LayerTestsDefinitions::mvnParams, + LayerTestsDefinitions::mvn1Params, CPUSpecificParams, fusingSpecificParams, Precision, // CNNNetwork input precision @@ -24,14 +24,14 @@ class MvnLayerCPUTest : public testing::WithParamInterface obj) { - LayerTestsDefinitions::mvnParams basicParamsSet; + LayerTestsDefinitions::mvn1Params basicParamsSet; CPUSpecificParams cpuParams; fusingSpecificParams fusingParams; Precision inputPrecision, outputPrecision; std::tie(basicParamsSet, cpuParams, fusingParams, inputPrecision, outputPrecision) = obj.param; std::ostringstream result; - result << LayerTestsDefinitions::MvnLayerTest::getTestCaseName(testing::TestParamInfo( + result << LayerTestsDefinitions::Mvn1LayerTest::getTestCaseName(testing::TestParamInfo( basicParamsSet, 0)); result << "_" << "CNNInpPrc=" << inputPrecision.name(); @@ -45,7 +45,7 @@ class MvnLayerCPUTest : public testing::WithParamInterfaceGetParam(); @@ -55,13 +55,17 @@ class MvnLayerCPUTest : public testing::WithParamInterface(param)); auto mvn = ngraph::builder::makeMVN(paramOuts[0], acrossChanels, normalizeVariance, eps); + if (!axes.empty()) { + mvn = ngraph::builder::makeMVN(paramOuts[0], axes, normalizeVariance, eps); + } selectedType = getPrimitiveType() + "_" + inPrc.name(); @@ -128,6 +132,8 @@ const std::vector epsilon = { 0.000000001 }; +const std::vector emptyReductionAxes = {{}}; + std::vector inpPrc = {Precision::I8, Precision::BF16, Precision::FP32}; std::vector outPrc = {Precision::BF16, Precision::FP32}; @@ -162,6 +168,7 @@ const auto Mvn3D = ::testing::Combine( ::testing::Combine( ::testing::ValuesIn(inputShapes_3D), ::testing::Values(InferenceEngine::Precision::FP32), + ::testing::ValuesIn(emptyReductionAxes), ::testing::ValuesIn(acrossChannels), ::testing::ValuesIn(normalizeVariance), ::testing::ValuesIn(epsilon), @@ -177,6 +184,7 @@ const auto Mvn4D = ::testing::Combine( ::testing::Combine( ::testing::ValuesIn(inputShapes_4D), ::testing::Values(InferenceEngine::Precision::FP32), + ::testing::ValuesIn(emptyReductionAxes), ::testing::ValuesIn(acrossChannels), ::testing::ValuesIn(normalizeVariance), ::testing::ValuesIn(epsilon), @@ -192,6 +200,7 @@ const auto Mvn5D = ::testing::Combine( ::testing::Combine( ::testing::ValuesIn(inputShapes_5D), ::testing::Values(InferenceEngine::Precision::FP32), + ::testing::ValuesIn(emptyReductionAxes), ::testing::ValuesIn(acrossChannels), ::testing::ValuesIn(normalizeVariance), ::testing::ValuesIn(epsilon), @@ -216,6 +225,7 @@ const auto Mvn1D = ::testing::Combine( ::testing::Combine( ::testing::ValuesIn(inputShapes_1D), ::testing::Values(InferenceEngine::Precision::FP32), + ::testing::ValuesIn(emptyReductionAxes), ::testing::ValuesIn(acrossChannels), ::testing::ValuesIn(normalizeVariance), ::testing::ValuesIn(epsilon), @@ -232,6 +242,7 @@ const auto Mvn2D = ::testing::Combine( ::testing::Combine( ::testing::ValuesIn(inputShapes_2D), ::testing::Values(InferenceEngine::Precision::FP32), + ::testing::ValuesIn(emptyReductionAxes), ::testing::Values(false), ::testing::ValuesIn(normalizeVariance), ::testing::ValuesIn(epsilon), @@ -248,6 +259,7 @@ const auto Mvn2DTrans = ::testing::Combine( ::testing::Combine( ::testing::ValuesIn(inputShapes_2D), ::testing::Values(InferenceEngine::Precision::FP32), + ::testing::ValuesIn(emptyReductionAxes), ::testing::Values(true), ::testing::ValuesIn(normalizeVariance), ::testing::ValuesIn(epsilon), diff --git a/inference-engine/tests/functional/plugin/gpu/shared_tests_instances/single_layer_tests/mvn.cpp b/inference-engine/tests/functional/plugin/gpu/shared_tests_instances/single_layer_tests/mvn.cpp index a4e83dc98732c4..9c68172b014423 100644 --- a/inference-engine/tests/functional/plugin/gpu/shared_tests_instances/single_layer_tests/mvn.cpp +++ b/inference-engine/tests/functional/plugin/gpu/shared_tests_instances/single_layer_tests/mvn.cpp @@ -9,6 +9,8 @@ using namespace LayerTestsDefinitions; +const std::vector emptyReductionAxes = {{}}; + const std::vector> inputShapes = { {1, 32, 17}, {1, 37, 9}, @@ -41,13 +43,14 @@ const std::vector epsilon = { const auto MvnCases = ::testing::Combine( ::testing::ValuesIn(inputShapes), ::testing::Values(InferenceEngine::Precision::FP32), + ::testing::ValuesIn(emptyReductionAxes), ::testing::ValuesIn(acrossChannels), ::testing::ValuesIn(normalizeVariance), ::testing::ValuesIn(epsilon), ::testing::Values(CommonTestUtils::DEVICE_GPU) ); -INSTANTIATE_TEST_SUITE_P(smoke_CLDNN_TestsMVN, MvnLayerTest, MvnCases, MvnLayerTest::getTestCaseName); +INSTANTIATE_TEST_SUITE_P(smoke_CLDNN_TestsMVN, Mvn1LayerTest, MvnCases, Mvn1LayerTest::getTestCaseName); std::vector dataPrecisions = { InferenceEngine::Precision::FP32, diff --git a/inference-engine/tests/functional/plugin/shared/include/single_layer_tests/mvn.hpp b/inference-engine/tests/functional/plugin/shared/include/single_layer_tests/mvn.hpp index c0cc301e714cd3..919ea38a9478ee 100644 --- a/inference-engine/tests/functional/plugin/shared/include/single_layer_tests/mvn.hpp +++ b/inference-engine/tests/functional/plugin/shared/include/single_layer_tests/mvn.hpp @@ -12,6 +12,10 @@ TEST_P(MvnLayerTest, CompareWithRefs) { Run(); }; +TEST_P(Mvn1LayerTest, CompareWithRefs) { + Run(); +}; + TEST_P(Mvn6LayerTest, CompareWithRefs) { Run(); }; diff --git a/inference-engine/tests/functional/shared_test_classes/include/shared_test_classes/single_layer/mvn.hpp b/inference-engine/tests/functional/shared_test_classes/include/shared_test_classes/single_layer/mvn.hpp index 1fac97f20d2372..9b399e2109517d 100644 --- a/inference-engine/tests/functional/shared_test_classes/include/shared_test_classes/single_layer/mvn.hpp +++ b/inference-engine/tests/functional/shared_test_classes/include/shared_test_classes/single_layer/mvn.hpp @@ -27,6 +27,24 @@ class MvnLayerTest : public testing::WithParamInterface, virtual publ void SetUp() override; }; +typedef std::tuple< + InferenceEngine::SizeVector, // Input shapes + InferenceEngine::Precision, // Input precision + ngraph::AxisSet, // Reduction axes + bool, // Across channels + bool, // Normalize variance + double, // Epsilon + std::string // Device name + > mvn1Params; + +class Mvn1LayerTest : public testing::WithParamInterface, virtual public LayerTestsUtils::LayerTestsCommon { +public: + static std::string getTestCaseName(const testing::TestParamInfo& obj); + +protected: + void SetUp() override; +}; + typedef std::tuple< InferenceEngine::SizeVector, // Input shapes InferenceEngine::Precision, // Data precision diff --git a/inference-engine/tests/functional/shared_test_classes/src/single_layer/mvn.cpp b/inference-engine/tests/functional/shared_test_classes/src/single_layer/mvn.cpp index 2ae7af4116b302..69afd0df38d3b1 100644 --- a/inference-engine/tests/functional/shared_test_classes/src/single_layer/mvn.cpp +++ b/inference-engine/tests/functional/shared_test_classes/src/single_layer/mvn.cpp @@ -38,6 +38,46 @@ void MvnLayerTest::SetUp() { function = std::make_shared(results, param, "mvn"); } +std::string Mvn1LayerTest::getTestCaseName(const testing::TestParamInfo& obj) { + InferenceEngine::SizeVector inputShapes; + InferenceEngine::Precision inputPrecision; + ngraph::AxisSet axes; + bool acrossChannels, normalizeVariance; + double eps; + std::string targetDevice; + std::tie(inputShapes, inputPrecision, axes, acrossChannels, normalizeVariance, eps, targetDevice) = obj.param; + std::ostringstream result; + result << "IS=" << CommonTestUtils::vec2str(inputShapes) << "_"; + result << "Precision=" << inputPrecision.name() << "_"; + if (!axes.empty()) { + result << "ReductionAccess=" << CommonTestUtils::vec2str(axes.to_vector()) << "_"; + } else { + result << "AcrossChannels=" << (acrossChannels ? "TRUE" : "FALSE") << "_"; + } + result << "NormalizeVariance=" << (normalizeVariance ? "TRUE" : "FALSE") << "_"; + result << "Epsilon=" << eps << "_"; + result << "TargetDevice=" << targetDevice; + return result.str(); +} + +void Mvn1LayerTest::SetUp() { + InferenceEngine::SizeVector inputShapes; + InferenceEngine::Precision inputPrecision; + ngraph::AxisSet axes; + bool acrossChanels, normalizeVariance; + double eps; + std::tie(inputShapes, inputPrecision, axes, acrossChanels, normalizeVariance, eps, targetDevice) = this->GetParam(); + auto inType = FuncTestUtils::PrecisionUtils::convertIE2nGraphPrc(inputPrecision); + auto param = ngraph::builder::makeParams(inType, {inputShapes}); + auto paramOuts = ngraph::helpers::convert2OutputVector(ngraph::helpers::castOps2Nodes(param)); + auto mvn = std::dynamic_pointer_cast(ngraph::builder::makeMVN(paramOuts[0], acrossChanels, normalizeVariance, eps)); + if (!axes.empty()) { + mvn = std::dynamic_pointer_cast(ngraph::builder::makeMVN(paramOuts[0], axes, normalizeVariance, eps)); + } + ngraph::ResultVector results{std::make_shared(mvn)}; + function = std::make_shared(results, param, "MVN1"); +} + std::string Mvn6LayerTest::getTestCaseName(const testing::TestParamInfo& obj) { InferenceEngine::SizeVector inputShapes; diff --git a/inference-engine/tests/ngraph_helpers/ngraph_functions/include/ngraph_functions/builders.hpp b/inference-engine/tests/ngraph_helpers/ngraph_functions/include/ngraph_functions/builders.hpp index d1e94b78f1eb06..e0b75a7b4a6a1e 100644 --- a/inference-engine/tests/ngraph_helpers/ngraph_functions/include/ngraph_functions/builders.hpp +++ b/inference-engine/tests/ngraph_helpers/ngraph_functions/include/ngraph_functions/builders.hpp @@ -291,6 +291,11 @@ std::shared_ptr makeMVN(const ngraph::Output &in, bool normalizeVariance, double eps); +std::shared_ptr makeMVN(const ngraph::Output &in, + ngraph::AxisSet axes, + bool normalizeVariance, + double eps); + std::shared_ptr makeMVN6(const Output& in, const Output& axesNode, bool normalizeVariance, diff --git a/inference-engine/tests/ngraph_helpers/ngraph_functions/src/mvn.cpp b/inference-engine/tests/ngraph_helpers/ngraph_functions/src/mvn.cpp index ab65a06c179c34..c6b8af283dd168 100644 --- a/inference-engine/tests/ngraph_helpers/ngraph_functions/src/mvn.cpp +++ b/inference-engine/tests/ngraph_helpers/ngraph_functions/src/mvn.cpp @@ -24,6 +24,15 @@ std::shared_ptr makeMVN(const ngraph::Output &in, return mvnNode; } +std::shared_ptr makeMVN(const ngraph::Output &in, + ngraph::AxisSet axes, + bool normalizeVariance, + double eps) { + auto mvnNode = std::make_shared(in, axes, normalizeVariance, eps); + + return mvnNode; +} + std::shared_ptr makeMVN6(const Output& in, const Output& axesNode, bool normalizeVariance, From 735be83e18fc34c22ffabd7781f2e1ea944af465 Mon Sep 17 00:00:00 2001 From: Szymon Durawa Date: Mon, 12 Jul 2021 09:58:40 +0200 Subject: [PATCH 3/8] Update backend tests. --- ngraph/test/backend/mvn.in.cpp | 47 +++++++++++++++++++++++++++++++++- 1 file changed, 46 insertions(+), 1 deletion(-) diff --git a/ngraph/test/backend/mvn.in.cpp b/ngraph/test/backend/mvn.in.cpp index 4ad42c7a860402..da035ccb8546bd 100644 --- a/ngraph/test/backend/mvn.in.cpp +++ b/ngraph/test/backend/mvn.in.cpp @@ -156,7 +156,7 @@ NGRAPH_TEST(${BACKEND_NAME}, evaluate_mvn_1_no_variance_no_across_channels) NGRAPH_TEST(${BACKEND_NAME}, evaluate_mvn_1_across_channels_no_variance) { - auto data = make_shared(element::f32, PartialShape{1, 3, 2, 2}); + auto data = make_shared(element::f32, PartialShape{1, 1, 3, 2, 2}); auto mvn = make_shared(data, true, false, 1e-9); auto fun = make_shared(OutputVector{mvn}, ParameterVector{data}); auto test_case = test::TestCase(fun); @@ -219,3 +219,48 @@ NGRAPH_TEST(${BACKEND_NAME}, evaluate_mvn_1_across_channels_with_variance) // clang-format on test_case.run(); } + +NGRAPH_TEST(${BACKEND_NAME}, evaluate_mvn_1_reduction_axes_no_variance) +{ + const ngraph::AxisSet axes = {1, 2, 3}; + auto data = make_shared(element::f32, PartialShape{1, 3, 2, 2}); + auto mvn = make_shared(data, axes, false, 1e-9); + auto fun = make_shared(OutputVector{mvn}, ParameterVector{data}); + auto test_case = test::TestCase(fun); + + // clang-format off + test_case.add_input({1, 2, 3, 4, 5, 6, 7, 8, 9, 1, 2, 3}); + + test_case.add_expected_output({-3.25, -2.25, -1.25, + -0.25, 0.75, 1.75, + 2.75, 3.75, 4.75, + -3.25, -2.25, -1.25}); + // clang-format on + test_case.run(); +} + +NGRAPH_TEST(${BACKEND_NAME}, evaluate_mvn_1_reduction_axes_with_variance) +{ + auto data = make_shared(element::f32, PartialShape{1, 3, 3, 3}); + + const ngraph::AxisSet axes = {2, 3}; + auto mvn = make_shared(data, axes, true, 1e-9); + auto fun = make_shared(OutputVector{mvn}, ParameterVector{data}); + auto test_case = test::TestCase(fun); + + // clang-format off + test_case.add_input( + {1, 2, 3, 4, 5, 6, 7, 8, 9, 1, 2, 3, 4, 5, 6, 7, 8, 9, 1, 2, 3, 4, 5, 6, 7, 8, 9}); + + test_case.add_expected_output({-1.5491934, -1.161895, -0.7745967, + -0.38729835, 0., 0.38729835, + 0.7745967, 1.161895, 1.5491934, + -1.5491934, -1.161895, -0.7745967, + -0.38729835, 0., 0.38729835, + 0.7745967, 1.161895, 1.5491934, + -1.5491934, -1.161895, -0.7745967, + -0.38729835, 0., 0.38729835, + 0.7745967, 1.161895, 1.5491934}); + // clang-format on + test_case.run(); +} From 6f5a3474d63cb035e9ffed1d7c3e2ec4f944fee9 Mon Sep 17 00:00:00 2001 From: Szymon Durawa Date: Wed, 14 Jul 2021 13:55:11 +0200 Subject: [PATCH 4/8] Move backend tests to template plugin, remove old ones. --- .../tests/functional/op_reference/mvn.cpp | 294 ++++++++++++++++++ ngraph/test/CMakeLists.txt | 1 - ngraph/test/backend/mvn.in.cpp | 266 ---------------- 3 files changed, 294 insertions(+), 267 deletions(-) create mode 100644 docs/template_plugin/tests/functional/op_reference/mvn.cpp delete mode 100644 ngraph/test/backend/mvn.in.cpp diff --git a/docs/template_plugin/tests/functional/op_reference/mvn.cpp b/docs/template_plugin/tests/functional/op_reference/mvn.cpp new file mode 100644 index 00000000000000..b4580f995f6126 --- /dev/null +++ b/docs/template_plugin/tests/functional/op_reference/mvn.cpp @@ -0,0 +1,294 @@ +// Copyright (C) 2018-2021 Intel Corporation +// SPDX-License-Identifier: Apache-2.0 +// + +#include + +#include +#include +#include +#include +#include + +#include "base_reference_test.hpp" + +using namespace ngraph; +using namespace InferenceEngine; + +struct Tensor { + Tensor() = default; + Tensor(const ngraph::Shape& shape, ngraph::element::Type type, const InferenceEngine::Blob::Ptr& data): shape {shape}, type {type}, data {data} {} + + template + Tensor(const ngraph::Shape& shape, ngraph::element::Type type, std::initializer_list data_elements): shape {shape}, type {type} { + data = CreateBlob(type, std::vector(data_elements)); + } + + ngraph::Shape shape; + ngraph::element::Type type; + InferenceEngine::Blob::Ptr data; +}; + +// ------------------------------ V0 ------------------------------ + +struct MVN1Params { + MVN1Params(const Tensor& paramInput, const ngraph::AxisSet& paramReductionAxes, const bool paramAcrossChannels, const bool paramNormalizeVariance, + const double paramEps, const Tensor& paramExpected) + : input(paramInput), + reductionAxes(paramReductionAxes), + acrossChannels(paramAcrossChannels), + normalizeVariance(paramNormalizeVariance), + eps(paramEps), + expected(paramExpected) {} + Tensor input; + ngraph::AxisSet reductionAxes; + bool acrossChannels; + bool normalizeVariance; + double eps; + Tensor expected; +}; + +class ReferenceMVN1LayerTest : public testing::TestWithParam, public CommonReferenceTest { +public: + void SetUp() override { + auto params = GetParam(); + function = CreateFunction(params.input, params.reductionAxes, params.acrossChannels, params.normalizeVariance, params.eps); + inputData = {params.input.data}; + refOutData = {params.expected.data}; + } + static std::string getTestCaseName(const testing::TestParamInfo& obj) { + auto param = obj.param; + std::ostringstream result; + result << "shape=" << param.input.shape; + result << "_iType=" << param.input.type; + if (!param.reductionAxes.empty()) { + result << "_reductionAccess=" << CommonTestUtils::vec2str(param.reductionAxes.to_vector()); + } else { + result << "_acrossChannels=" << (param.acrossChannels ? "TRUE" : "FALSE"); + } + result << "_normalizeVariance=" << (param.normalizeVariance ? "TRUE" : "FALSE"); + result << "_eps=" << param.eps; + return result.str(); + } + +private: + static std::shared_ptr CreateFunction(const Tensor& input, const ngraph::AxisSet& reductionAxes, const bool acrossChannels, + const bool normalizeVariance, const double eps) { + const auto in = std::make_shared(input.type, input.shape); + auto mvn = std::make_shared(in, acrossChannels, normalizeVariance, eps); + if (!reductionAxes.empty()) { + mvn = std::make_shared(in, reductionAxes, normalizeVariance, eps); + } + return std::make_shared(NodeVector {mvn}, ParameterVector {in}); + } +}; + +TEST_P(ReferenceMVN1LayerTest, CompareWithHardcodedRefs) { + Exec(); +} + +const ngraph::AxisSet emptyReductionAxes {}; + +INSTANTIATE_TEST_SUITE_P(smoke_MVN1_With_Hardcoded_Refs, ReferenceMVN1LayerTest, + ::testing::Values( + // across_channels=false, variance=false + MVN1Params(Tensor {{1, 3, 3, 3}, ngraph::element::f32, std::initializer_list({1, 2, 3, 4, 5, 6, 7, 8, 9, 1, 2, 3, 4, 5, + 6, 7, 8, 9, 1, 2, 3, 4, 5, 6, 7, 8, 9})}, + emptyReductionAxes, + false, + false, + 1e-9, + Tensor {{1, 3, 3, 3}, ngraph::element::f32, std::initializer_list({-4, -3, -2, -1, 0, 1, 2, 3, 4, + -4, -3, -2, -1, 0, 1, 2, 3, 4, + -4, -3, -2, -1, 0, 1, 2, 3, 4})}), + // across_channels=true, variance=false + MVN1Params(Tensor {{1, 3, 2, 2}, ngraph::element::f32, std::initializer_list({1, 2, 3, 4, 5, 6, 7, 8, 9, 1, 2, 3})}, + emptyReductionAxes, + true, + false, + 1e-9, + Tensor {{1, 3, 2, 2}, ngraph::element::f32, std::initializer_list({-3.25, -2.25, -1.25, -0.25, 0.75, 1.75, + 2.75, 3.75, 4.75, -3.25, -2.25, -1.25})}), + // across_channels=false, variance=true + MVN1Params(Tensor {{1, 3, 3, 3}, ngraph::element::f32, std::initializer_list({1, 2, 3, 4, 5, 6, 7, 8, 9, 1, 2, 3, 4, 5, + 6, 7, 8, 9, 1, 2, 3, 4, 5, 6, 7, 8, 9})}, + emptyReductionAxes, + false, + true, + 1e-9, + Tensor {{1, 3, 3, 3}, ngraph::element::f32, std::initializer_list({-1.5491934, -1.161895, -0.7745967, + -0.38729835, 0., 0.38729835, + 0.7745967, 1.161895, 1.5491934, + -1.5491934, -1.161895, -0.7745967, + -0.38729835, 0., 0.38729835, + 0.7745967, 1.161895, 1.5491934, + -1.5491934, -1.161895, -0.7745967, + -0.38729835, 0., 0.38729835, + 0.7745967, 1.161895, 1.5491934})}), + // across_channels=true, variance=true + MVN1Params(Tensor {{1, 3, 3, 3}, ngraph::element::f32, std::initializer_list({1, 2, 3, 4, 5, 6, 7, 8, 9, 1, 2, 3, 4, 5, + 6, 7, 8, 9, 1, 2, 3, 4, 5, 6, 7, 8, 9})}, + emptyReductionAxes, + true, + true, + 1e-9, + Tensor {{1, 3, 3, 3}, ngraph::element::f32, std::initializer_list({-1.5491934, -1.161895, -0.7745967, + -0.38729835, 0., 0.38729835, + 0.7745967, 1.161895, 1.5491934, + -1.5491934, -1.161895, -0.7745967, + -0.38729835, 0., 0.38729835, + 0.7745967, 1.161895, 1.5491934, + -1.5491934, -1.161895, -0.7745967, + -0.38729835, 0., 0.38729835, + 0.7745967, 1.161895, 1.5491934})}), + // reductionAxes, variance=false + MVN1Params(Tensor {{1, 3, 2, 2}, ngraph::element::f32, std::initializer_list({1, 2, 3, 4, 5, 6, 7, 8, 9, 1, 2, 3})}, + {1, 2, 3}, + false, + false, + 1e-9, + Tensor {{1, 3, 2, 2}, ngraph::element::f32, std::initializer_list({-3.25, -2.25, -1.25, + -0.25, 0.75, 1.75, + 2.75, 3.75, 4.75, + -3.25, -2.25, -1.25})}), + // reductionAxes, variance=true + MVN1Params(Tensor {{1, 3, 3, 3}, ngraph::element::f32, std::initializer_list({1, 2, 3, 4, 5, 6, 7, 8, 9, 1, 2, 3, 4, 5, + 6, 7, 8, 9, 1, 2, 3, 4, 5, 6, 7, 8, 9})}, + {2, 3}, + false, + true, + 1e-9, + Tensor {{1, 3, 3, 3}, ngraph::element::f32, std::initializer_list({-1.5491934, -1.161895, -0.7745967, + -0.38729835, 0., 0.38729835, + 0.7745967, 1.161895, 1.5491934, + -1.5491934, -1.161895, -0.7745967, + -0.38729835, 0., 0.38729835, + 0.7745967, 1.161895, 1.5491934, + -1.5491934, -1.161895, -0.7745967, + -0.38729835, 0., 0.38729835, + 0.7745967, 1.161895, 1.5491934})})), + ReferenceMVN1LayerTest::getTestCaseName); + +// ------------------------------ V6 ------------------------------ + +struct MVN6Params { + MVN6Params(const Tensor& paramInput, const Tensor& paramReductionAxes, const bool paramNormalizeVariance, + const double paramEps, const ngraph::op::MVNEpsMode mode, const Tensor& paramExpected) + : input(paramInput), + reductionAxes(paramReductionAxes), + normalizeVariance(paramNormalizeVariance), + eps(paramEps), + epsMode(mode), + expected(paramExpected) {} + Tensor input; + Tensor reductionAxes; + bool normalizeVariance; + double eps; + ngraph::op::MVNEpsMode epsMode; + Tensor expected; +}; + +class ReferenceMVN6LayerTest : public testing::TestWithParam, public CommonReferenceTest { +public: + void SetUp() override { + auto params = GetParam(); + function = CreateFunction(params.input, params.reductionAxes, params.normalizeVariance, params.eps, params.epsMode); + inputData = {params.input.data}; + refOutData = {params.expected.data}; + } + static std::string getTestCaseName(const testing::TestParamInfo& obj) { + auto param = obj.param; + std::ostringstream result; + result << "shape=" << param.input.shape; + result << "_iType=" << param.input.type; + result << "_reductionAccess=" << CommonTestUtils::vec2str(param.reductionAxes.shape); + result << "_normalizeVariance=" << (param.normalizeVariance ? "TRUE" : "FALSE"); + result << "_eps=" << param.eps; + result << "_eps_mode=" << param.epsMode; + return result.str(); + } + +private: + static std::shared_ptr CreateFunction(const Tensor& input, const Tensor& reductionAxes, + const bool normalizeVariance, const double eps, const ngraph::op::MVNEpsMode epsMode) { + std::vector dataVector(reductionAxes.shape[0]); + const auto in = std::make_shared(input.type, input.shape); + auto mRef = as(reductionAxes.data); + IE_ASSERT(mRef); + const auto refLockMemory = mRef->rmap(); + const auto refBuffer = refLockMemory.as(); + for (size_t i = 0; i < dataVector.size(); ++i) { + dataVector[i] = refBuffer[i]; + } + const auto axes = std::make_shared(reductionAxes.type, reductionAxes.shape, dataVector); + auto mvn = std::make_shared(in, axes, normalizeVariance, eps, epsMode); + return std::make_shared(NodeVector {mvn}, ParameterVector {in}); + } +}; + +TEST_P(ReferenceMVN6LayerTest, CompareWithHardcodedRefs) { + Exec(); +} + +INSTANTIATE_TEST_SUITE_P(smoke_MVN6_With_Hardcoded_Refs, ReferenceMVN6LayerTest, + ::testing::Values( + // variance=false, OUTSIDE_SQRT + MVN6Params(Tensor {{1, 3, 3, 3}, ngraph::element::f32, std::initializer_list({1, 2, 3, 4, 5, 6, 7, 8, 9, 1, 2, 3, 4, 5, + 6, 7, 8, 9, 1, 2, 3, 4, 5, 6, 7, 8, 9})}, + Tensor {Shape{2}, ngraph::element::i64, std::initializer_list({2, 3})}, + false, + 1e-9, + ngraph::op::MVNEpsMode::OUTSIDE_SQRT, + Tensor {{1, 3, 3, 3}, ngraph::element::f32, std::initializer_list({-4, -3, -2, -1, 0, 1, 2, 3, 4, + -4, -3, -2, -1, 0, 1, 2, 3, 4, + -4, -3, -2, -1, 0, 1, 2, 3, 4})}), + // variance=true, OUTSIDE_SQRT + MVN6Params(Tensor {{1, 3, 3, 3}, ngraph::element::f32, std::initializer_list({1, 2, 3, 4, 5, 6, 7, 8, 9, 1, 2, 3, 4, 5, + 6, 7, 8, 9, 1, 2, 3, 4, 5, 6, 7, 8, 9})}, + Tensor {Shape{2}, ngraph::element::i64, std::initializer_list({2, 3})}, + true, + 1e-9, + ngraph::op::MVNEpsMode::OUTSIDE_SQRT, + Tensor {{1, 3, 3, 3}, ngraph::element::f32, std::initializer_list({-1.5491934, -1.161895, -0.7745967, + -0.38729835, 0., 0.38729835, + 0.7745967, 1.161895, 1.5491934, + -1.5491934, -1.161895, -0.7745967, + -0.38729835, 0., 0.38729835, + 0.7745967, 1.161895, 1.5491934, + -1.5491934, -1.161895, -0.7745967, + -0.38729835, 0., 0.38729835, + 0.7745967, 1.161895, 1.5491934})}), + // variance=true, INSIDE_SQRT + MVN6Params(Tensor {{1, 3, 3, 3}, ngraph::element::f32, std::initializer_list({1, 2, 3, 4, 5, 6, 7, 8, 9, 1, 2, 3, 4, 5, + 6, 7, 8, 9, 1, 2, 3, 4, 5, 6, 7, 8, 9})}, + Tensor {Shape{2}, ngraph::element::i64, std::initializer_list({2, 3})}, + true, + 1e-9, + ngraph::op::MVNEpsMode::INSIDE_SQRT, + Tensor {{1, 3, 3, 3}, ngraph::element::f32, std::initializer_list({-1.5491934, -1.161895, -0.7745967, + -0.38729835, 0., 0.38729835, + 0.7745967, 1.161895, 1.5491934, + -1.5491934, -1.161895, -0.7745967, + -0.38729835, 0., 0.38729835, + 0.7745967, 1.161895, 1.5491934, + -1.5491934, -1.161895, -0.7745967, + -0.38729835, 0., 0.38729835, + 0.7745967, 1.161895, 1.5491934})}), + // variance=true, another reductionAxes + MVN6Params(Tensor {{1, 3, 3, 3}, ngraph::element::f32, std::initializer_list({1, 2, 3, 4, 5, 6, 7, 8, 9, 1, 2, 3, 4, 5, + 6, 7, 8, 9, 1, 2, 3, 4, 5, 6, 7, 8, 9})}, + Tensor {Shape{3}, ngraph::element::i64, std::initializer_list({1, 2, 3})}, + true, + 1e-9, + ngraph::op::MVNEpsMode::OUTSIDE_SQRT, + Tensor {{1, 3, 3, 3}, ngraph::element::f32, std::initializer_list({-1.5491934, -1.161895, -0.7745967, + -0.38729835, 0., 0.38729835, + 0.7745967, 1.161895, 1.5491934, + -1.5491934, -1.161895, -0.7745967, + -0.38729835, 0., 0.38729835, + 0.7745967, 1.161895, 1.5491934, + -1.5491934, -1.161895, -0.7745967, + -0.38729835, 0., 0.38729835, + 0.7745967, 1.161895, 1.5491934})})), + ReferenceMVN6LayerTest::getTestCaseName); + \ No newline at end of file diff --git a/ngraph/test/CMakeLists.txt b/ngraph/test/CMakeLists.txt index d6ed497fbf9991..2a840891842698 100644 --- a/ngraph/test/CMakeLists.txt +++ b/ngraph/test/CMakeLists.txt @@ -444,7 +444,6 @@ set(MULTI_TEST_SRC backend/multiple_backends.in.cpp backend/multiple_result.in.cpp backend/multiply.in.cpp - backend/mvn.in.cpp backend/negative.in.cpp backend/node_name.in.cpp backend/normalize_l2.in.cpp diff --git a/ngraph/test/backend/mvn.in.cpp b/ngraph/test/backend/mvn.in.cpp deleted file mode 100644 index da035ccb8546bd..00000000000000 --- a/ngraph/test/backend/mvn.in.cpp +++ /dev/null @@ -1,266 +0,0 @@ -// Copyright (C) 2018-2021 Intel Corporation -// SPDX-License-Identifier: Apache-2.0 -// - -#include "gtest/gtest.h" -#include "ngraph/ngraph.hpp" -#include "util/engine/test_engines.hpp" -#include "util/test_case.hpp" -#include "util/test_control.hpp" - -NGRAPH_SUPPRESS_DEPRECATED_START - -using namespace std; -using namespace ngraph; - -static string s_manifest = "${MANIFEST}"; -using TestEngine = test::ENGINE_CLASS_NAME(${BACKEND_NAME}); - -NGRAPH_TEST(${BACKEND_NAME}, evaluate_mvn_6_no_variance) -{ - auto data = make_shared(element::f32, PartialShape{1, 3, 3, 3}); - auto axes = make_shared(element::i64, Shape{2}, vector{2, 3}); - - auto mvn = - make_shared(data, axes, false, 1e-9, ngraph::op::MVNEpsMode::OUTSIDE_SQRT); - auto fun = make_shared(OutputVector{mvn}, ParameterVector{data}); - auto test_case = test::TestCase(fun); - - // clang-format off - test_case.add_input({1, 2, 3, 4, 5, 6, 7, 8, 9, 1, 2, 3, 4, 5, 6, 7, 8, 9, 1, 2, 3, 4, 5, 6, 7, 8, 9}); - - test_case.add_expected_output({-4, -3, -2, -1, 0, 1, 2, 3, 4, - -4, -3, -2, -1, 0, 1, 2, 3, 4, - -4, -3, -2, -1, 0, 1, 2, 3, 4}); - // clang-format on - test_case.run(); -} - -NGRAPH_TEST(${BACKEND_NAME}, evaluate_mvn_6) -{ - auto data = make_shared(element::f32, PartialShape{1, 3, 3, 3}); - auto axes = make_shared(element::i64, Shape{2}, vector{2, 3}); - - auto mvn = - make_shared(data, axes, true, 1e-9, ngraph::op::MVNEpsMode::OUTSIDE_SQRT); - auto fun = make_shared(OutputVector{mvn}, ParameterVector{data}); - auto test_case = test::TestCase(fun); - - // clang-format off - test_case.add_input({1, 2, 3, 4, 5, 6, 7, 8, 9, 1, 2, 3, 4, 5, 6, 7, 8, 9, 1, 2, 3, 4, 5, 6, 7, 8, 9}); - - test_case.add_expected_output({-1.5491934, -1.161895, -0.7745967, - -0.38729835, 0., 0.38729835, - 0.7745967, 1.161895, 1.5491934, - -1.5491934, -1.161895, -0.7745967, - -0.38729835, 0., 0.38729835, - 0.7745967, 1.161895, 1.5491934, - -1.5491934, -1.161895, -0.7745967, - -0.38729835, 0., 0.38729835, - 0.7745967, 1.161895, 1.5491934}); - // clang-format on - test_case.run(); -} - -NGRAPH_TEST(${BACKEND_NAME}, evaluate_mvn_6_inside_sqrt) -{ - auto data = make_shared(element::f32, PartialShape{1, 3, 3, 3}); - auto axes = make_shared(element::i64, Shape{2}, vector{2, 3}); - - auto mvn = - make_shared(data, axes, true, 1e-9, ngraph::op::MVNEpsMode::INSIDE_SQRT); - auto fun = make_shared(OutputVector{mvn}, ParameterVector{data}); - auto test_case = test::TestCase(fun); - - // clang-format off - test_case.add_input({1, 2, 3, 4, 5, 6, 7, 8, 9, 1, 2, 3, 4, 5, 6, 7, 8, 9, 1, 2, 3, 4, 5, 6, 7, 8, 9}); - - test_case.add_expected_output({-1.5491934, -1.161895, -0.7745967, - -0.38729835, 0., 0.38729835, - 0.7745967, 1.161895, 1.5491934, - -1.5491934, -1.161895, -0.7745967, - -0.38729835, 0., 0.38729835, - 0.7745967, 1.161895, 1.5491934, - -1.5491934, -1.161895, -0.7745967, - -0.38729835, 0., 0.38729835, - 0.7745967, 1.161895, 1.5491934}); - // clang-format on - test_case.run(); -} - -NGRAPH_TEST(${BACKEND_NAME}, evaluate_mvn_6_across_chanells) -{ - auto data = make_shared(element::f32, PartialShape{1, 3, 3, 3}); - auto axes = make_shared(element::i64, Shape{3}, vector{1, 2, 3}); - - auto mvn = - make_shared(data, axes, true, 1e-9, ngraph::op::MVNEpsMode::OUTSIDE_SQRT); - auto fun = make_shared(OutputVector{mvn}, ParameterVector{data}); - auto test_case = test::TestCase(fun); - - // clang-format off - test_case.add_input( - {1, 2, 3, 4, 5, 6, 7, 8, 9, 1, 2, 3, 4, 5, 6, 7, 8, 9, 1, 2, 3, 4, 5, 6, 7, 8, 9}); - - test_case.add_expected_output({-1.5491934, -1.161895, -0.7745967, - -0.38729835, 0., 0.38729835, - 0.7745967, 1.161895, 1.5491934, - -1.5491934, -1.161895, -0.7745967, - -0.38729835, 0., 0.38729835, - 0.7745967, 1.161895, 1.5491934, - -1.5491934, -1.161895, -0.7745967, - -0.38729835, 0., 0.38729835, - 0.7745967, 1.161895, 1.5491934}); - // clang-format on - test_case.run(); -} - -NGRAPH_TEST(${BACKEND_NAME}, evaluate_mvn_6_across_batch) -{ - auto data = make_shared(element::f32, PartialShape{2, 3, 2, 2}); - auto axes = make_shared(element::i64, Shape{3}, vector{0, 2, 3}); - - auto mvn = - make_shared(data, axes, true, 1e-9, ngraph::op::MVNEpsMode::OUTSIDE_SQRT); - auto fun = make_shared(OutputVector{mvn}, ParameterVector{data}); - auto test_case = test::TestCase(fun); - - // clang-format off - test_case.add_input( - {1, 2, 3, 4, 5, 6, 7, 8, 1, 2, 3, 4, 5, 6, 7, 8, 1, 2, 3, 4, 5, 6, 7, 8}); - - test_case.add_expected_output( - {-1.5275252, -1.0910894, -0.65465367, -0.21821788, 0.21821788, 0.65465367, - 1.0910894, 1.5275252, -1.5275252, -1.0910894, -0.65465367, -0.21821788, - 0.21821788, 0.65465367, 1.0910894, 1.5275252, -1.5275252, -1.0910894, - -0.65465367, -0.21821788, 0.21821788, 0.65465367, 1.0910894, 1.5275252}); - // clang-format on - test_case.run(); -} - -NGRAPH_TEST(${BACKEND_NAME}, evaluate_mvn_1_no_variance_no_across_channels) -{ - auto data = make_shared(element::f32, PartialShape{1, 3, 3, 3}); - auto mvn = make_shared(data, false, false, 1e-9); - auto fun = make_shared(OutputVector{mvn}, ParameterVector{data}); - auto test_case = test::TestCase(fun); - - test_case.add_input({1, 2, 3, 4, 5, 6, 7, 8, 9, 1, 2, 3, 4, 5, 6, 7, 8, 9, 1, 2, 3, 4, 5, 6, 7, 8, 9}); - - test_case.add_expected_output({-4, -3, -2, -1, 0, 1, 2, 3, 4, - -4, -3, -2, -1, 0, 1, 2, 3, 4, - -4, -3, -2, -1, 0, 1, 2, 3, 4}); - // clang-format on - test_case.run(); -} - -NGRAPH_TEST(${BACKEND_NAME}, evaluate_mvn_1_across_channels_no_variance) -{ - auto data = make_shared(element::f32, PartialShape{1, 1, 3, 2, 2}); - auto mvn = make_shared(data, true, false, 1e-9); - auto fun = make_shared(OutputVector{mvn}, ParameterVector{data}); - auto test_case = test::TestCase(fun); - - // clang-format off - test_case.add_input({1, 2, 3, 4, 5, 6, 7, 8, 9, 1, 2, 3}); - - test_case.add_expected_output({-3.25, -2.25, -1.25, - -0.25, 0.75, 1.75, - 2.75, 3.75, 4.75, - -3.25, -2.25, -1.25}); - // clang-format on - test_case.run(); -} - -NGRAPH_TEST(${BACKEND_NAME}, evaluate_mvn_1_variance_no_across_channels) -{ - auto data = make_shared(element::f32, PartialShape{1, 3, 3, 3}); - auto mvn = make_shared(data, false, true, 1e-9); - auto fun = make_shared(OutputVector{mvn}, ParameterVector{data}); - auto test_case = test::TestCase(fun); - - // clang-format off - test_case.add_input({1, 2, 3, 4, 5, 6, 7, 8, 9, 1, 2, 3, 4, 5, 6, 7, 8, 9, 1, 2, 3, 4, 5, 6, 7, 8, 9}); - - test_case.add_expected_output({-1.5491934, -1.161895, -0.7745967, - -0.38729835, 0., 0.38729835, - 0.7745967, 1.161895, 1.5491934, - -1.5491934, -1.161895, -0.7745967, - -0.38729835, 0., 0.38729835, - 0.7745967, 1.161895, 1.5491934, - -1.5491934, -1.161895, -0.7745967, - -0.38729835, 0., 0.38729835, - 0.7745967, 1.161895, 1.5491934}); - // clang-format on - test_case.run(); -} - -NGRAPH_TEST(${BACKEND_NAME}, evaluate_mvn_1_across_channels_with_variance) -{ - auto data = make_shared(element::f32, PartialShape{1, 3, 3, 3}); - - auto mvn = make_shared(data, true, true, 1e-9); - auto fun = make_shared(OutputVector{mvn}, ParameterVector{data}); - auto test_case = test::TestCase(fun); - - // clang-format off - test_case.add_input( - {1, 2, 3, 4, 5, 6, 7, 8, 9, 1, 2, 3, 4, 5, 6, 7, 8, 9, 1, 2, 3, 4, 5, 6, 7, 8, 9}); - - test_case.add_expected_output({-1.5491934, -1.161895, -0.7745967, - -0.38729835, 0., 0.38729835, - 0.7745967, 1.161895, 1.5491934, - -1.5491934, -1.161895, -0.7745967, - -0.38729835, 0., 0.38729835, - 0.7745967, 1.161895, 1.5491934, - -1.5491934, -1.161895, -0.7745967, - -0.38729835, 0., 0.38729835, - 0.7745967, 1.161895, 1.5491934}); - // clang-format on - test_case.run(); -} - -NGRAPH_TEST(${BACKEND_NAME}, evaluate_mvn_1_reduction_axes_no_variance) -{ - const ngraph::AxisSet axes = {1, 2, 3}; - auto data = make_shared(element::f32, PartialShape{1, 3, 2, 2}); - auto mvn = make_shared(data, axes, false, 1e-9); - auto fun = make_shared(OutputVector{mvn}, ParameterVector{data}); - auto test_case = test::TestCase(fun); - - // clang-format off - test_case.add_input({1, 2, 3, 4, 5, 6, 7, 8, 9, 1, 2, 3}); - - test_case.add_expected_output({-3.25, -2.25, -1.25, - -0.25, 0.75, 1.75, - 2.75, 3.75, 4.75, - -3.25, -2.25, -1.25}); - // clang-format on - test_case.run(); -} - -NGRAPH_TEST(${BACKEND_NAME}, evaluate_mvn_1_reduction_axes_with_variance) -{ - auto data = make_shared(element::f32, PartialShape{1, 3, 3, 3}); - - const ngraph::AxisSet axes = {2, 3}; - auto mvn = make_shared(data, axes, true, 1e-9); - auto fun = make_shared(OutputVector{mvn}, ParameterVector{data}); - auto test_case = test::TestCase(fun); - - // clang-format off - test_case.add_input( - {1, 2, 3, 4, 5, 6, 7, 8, 9, 1, 2, 3, 4, 5, 6, 7, 8, 9, 1, 2, 3, 4, 5, 6, 7, 8, 9}); - - test_case.add_expected_output({-1.5491934, -1.161895, -0.7745967, - -0.38729835, 0., 0.38729835, - 0.7745967, 1.161895, 1.5491934, - -1.5491934, -1.161895, -0.7745967, - -0.38729835, 0., 0.38729835, - 0.7745967, 1.161895, 1.5491934, - -1.5491934, -1.161895, -0.7745967, - -0.38729835, 0., 0.38729835, - 0.7745967, 1.161895, 1.5491934}); - // clang-format on - test_case.run(); -} From 92d022ead5b01ac5dc298aa92a854050fd5f25da Mon Sep 17 00:00:00 2001 From: Szymon Durawa Date: Wed, 14 Jul 2021 14:24:09 +0200 Subject: [PATCH 5/8] Apply correct format. --- .../tests/functional/op_reference/mvn.cpp | 266 ++++++++---------- 1 file changed, 120 insertions(+), 146 deletions(-) diff --git a/docs/template_plugin/tests/functional/op_reference/mvn.cpp b/docs/template_plugin/tests/functional/op_reference/mvn.cpp index b4580f995f6126..70db598360d142 100644 --- a/docs/template_plugin/tests/functional/op_reference/mvn.cpp +++ b/docs/template_plugin/tests/functional/op_reference/mvn.cpp @@ -33,7 +33,7 @@ struct Tensor { struct MVN1Params { MVN1Params(const Tensor& paramInput, const ngraph::AxisSet& paramReductionAxes, const bool paramAcrossChannels, const bool paramNormalizeVariance, - const double paramEps, const Tensor& paramExpected) + const double paramEps, const Tensor& paramExpected) : input(paramInput), reductionAxes(paramReductionAxes), acrossChannels(paramAcrossChannels), @@ -89,91 +89,78 @@ TEST_P(ReferenceMVN1LayerTest, CompareWithHardcodedRefs) { const ngraph::AxisSet emptyReductionAxes {}; -INSTANTIATE_TEST_SUITE_P(smoke_MVN1_With_Hardcoded_Refs, ReferenceMVN1LayerTest, - ::testing::Values( - // across_channels=false, variance=false - MVN1Params(Tensor {{1, 3, 3, 3}, ngraph::element::f32, std::initializer_list({1, 2, 3, 4, 5, 6, 7, 8, 9, 1, 2, 3, 4, 5, - 6, 7, 8, 9, 1, 2, 3, 4, 5, 6, 7, 8, 9})}, - emptyReductionAxes, - false, - false, - 1e-9, - Tensor {{1, 3, 3, 3}, ngraph::element::f32, std::initializer_list({-4, -3, -2, -1, 0, 1, 2, 3, 4, - -4, -3, -2, -1, 0, 1, 2, 3, 4, - -4, -3, -2, -1, 0, 1, 2, 3, 4})}), - // across_channels=true, variance=false - MVN1Params(Tensor {{1, 3, 2, 2}, ngraph::element::f32, std::initializer_list({1, 2, 3, 4, 5, 6, 7, 8, 9, 1, 2, 3})}, - emptyReductionAxes, - true, - false, - 1e-9, - Tensor {{1, 3, 2, 2}, ngraph::element::f32, std::initializer_list({-3.25, -2.25, -1.25, -0.25, 0.75, 1.75, - 2.75, 3.75, 4.75, -3.25, -2.25, -1.25})}), - // across_channels=false, variance=true - MVN1Params(Tensor {{1, 3, 3, 3}, ngraph::element::f32, std::initializer_list({1, 2, 3, 4, 5, 6, 7, 8, 9, 1, 2, 3, 4, 5, - 6, 7, 8, 9, 1, 2, 3, 4, 5, 6, 7, 8, 9})}, - emptyReductionAxes, - false, - true, - 1e-9, - Tensor {{1, 3, 3, 3}, ngraph::element::f32, std::initializer_list({-1.5491934, -1.161895, -0.7745967, - -0.38729835, 0., 0.38729835, - 0.7745967, 1.161895, 1.5491934, - -1.5491934, -1.161895, -0.7745967, - -0.38729835, 0., 0.38729835, - 0.7745967, 1.161895, 1.5491934, - -1.5491934, -1.161895, -0.7745967, - -0.38729835, 0., 0.38729835, - 0.7745967, 1.161895, 1.5491934})}), - // across_channels=true, variance=true - MVN1Params(Tensor {{1, 3, 3, 3}, ngraph::element::f32, std::initializer_list({1, 2, 3, 4, 5, 6, 7, 8, 9, 1, 2, 3, 4, 5, - 6, 7, 8, 9, 1, 2, 3, 4, 5, 6, 7, 8, 9})}, - emptyReductionAxes, - true, - true, - 1e-9, - Tensor {{1, 3, 3, 3}, ngraph::element::f32, std::initializer_list({-1.5491934, -1.161895, -0.7745967, - -0.38729835, 0., 0.38729835, - 0.7745967, 1.161895, 1.5491934, - -1.5491934, -1.161895, -0.7745967, - -0.38729835, 0., 0.38729835, - 0.7745967, 1.161895, 1.5491934, - -1.5491934, -1.161895, -0.7745967, - -0.38729835, 0., 0.38729835, - 0.7745967, 1.161895, 1.5491934})}), - // reductionAxes, variance=false - MVN1Params(Tensor {{1, 3, 2, 2}, ngraph::element::f32, std::initializer_list({1, 2, 3, 4, 5, 6, 7, 8, 9, 1, 2, 3})}, - {1, 2, 3}, - false, - false, - 1e-9, - Tensor {{1, 3, 2, 2}, ngraph::element::f32, std::initializer_list({-3.25, -2.25, -1.25, - -0.25, 0.75, 1.75, - 2.75, 3.75, 4.75, - -3.25, -2.25, -1.25})}), - // reductionAxes, variance=true - MVN1Params(Tensor {{1, 3, 3, 3}, ngraph::element::f32, std::initializer_list({1, 2, 3, 4, 5, 6, 7, 8, 9, 1, 2, 3, 4, 5, - 6, 7, 8, 9, 1, 2, 3, 4, 5, 6, 7, 8, 9})}, - {2, 3}, - false, - true, - 1e-9, - Tensor {{1, 3, 3, 3}, ngraph::element::f32, std::initializer_list({-1.5491934, -1.161895, -0.7745967, - -0.38729835, 0., 0.38729835, - 0.7745967, 1.161895, 1.5491934, - -1.5491934, -1.161895, -0.7745967, - -0.38729835, 0., 0.38729835, - 0.7745967, 1.161895, 1.5491934, - -1.5491934, -1.161895, -0.7745967, - -0.38729835, 0., 0.38729835, - 0.7745967, 1.161895, 1.5491934})})), - ReferenceMVN1LayerTest::getTestCaseName); +INSTANTIATE_TEST_SUITE_P( + smoke_MVN1_With_Hardcoded_Refs, ReferenceMVN1LayerTest, + ::testing::Values( + // across_channels=false, variance=false + MVN1Params(Tensor {{1, 3, 3, 3}, ngraph::element::f32, std::initializer_list({1, 2, 3, 4, 5, 6, 7, 8, 9, 1, 2, 3, 4, 5, + 6, 7, 8, 9, 1, 2, 3, 4, 5, 6, 7, 8, 9})}, + emptyReductionAxes, + false, + false, + 1e-9, + Tensor {{1, 3, 3, 3}, ngraph::element::f32, std::initializer_list({-4, -3, -2, -1, 0, 1, 2, 3, 4, -4, -3, -2, -1, 0, + 1, 2, 3, 4, -4, -3, -2, -1, 0, 1, 2, 3, 4})}), + // across_channels=true, variance=false + MVN1Params(Tensor {{1, 3, 2, 2}, ngraph::element::f32, std::initializer_list({1, 2, 3, 4, 5, 6, 7, 8, 9, 1, 2, 3})}, + emptyReductionAxes, + true, + false, + 1e-9, + Tensor {{1, 3, 2, 2}, ngraph::element::f32, std::initializer_list({-3.25, -2.25, -1.25, -0.25, 0.75, 1.75, + 2.75, 3.75, 4.75, -3.25, -2.25, -1.25})}), + // across_channels=false, variance=true + MVN1Params(Tensor {{1, 3, 3, 3}, ngraph::element::f32, std::initializer_list({1, 2, 3, 4, 5, 6, 7, 8, 9, 1, 2, 3, 4, 5, + 6, 7, 8, 9, 1, 2, 3, 4, 5, 6, 7, 8, 9})}, + emptyReductionAxes, + false, + true, + 1e-9, + Tensor {{1, 3, 3, 3}, + ngraph::element::f32, + std::initializer_list({-1.5491934, -1.161895, -0.7745967, -0.38729835, 0., 0.38729835, 0.7745967, 1.161895, 1.5491934, + -1.5491934, -1.161895, -0.7745967, -0.38729835, 0., 0.38729835, 0.7745967, 1.161895, 1.5491934, + -1.5491934, -1.161895, -0.7745967, -0.38729835, 0., 0.38729835, 0.7745967, 1.161895, 1.5491934})}), + // across_channels=true, variance=true + MVN1Params(Tensor {{1, 3, 3, 3}, ngraph::element::f32, std::initializer_list({1, 2, 3, 4, 5, 6, 7, 8, 9, 1, 2, 3, 4, 5, + 6, 7, 8, 9, 1, 2, 3, 4, 5, 6, 7, 8, 9})}, + emptyReductionAxes, + true, + true, + 1e-9, + Tensor {{1, 3, 3, 3}, + ngraph::element::f32, + std::initializer_list({-1.5491934, -1.161895, -0.7745967, -0.38729835, 0., 0.38729835, 0.7745967, 1.161895, 1.5491934, + -1.5491934, -1.161895, -0.7745967, -0.38729835, 0., 0.38729835, 0.7745967, 1.161895, 1.5491934, + -1.5491934, -1.161895, -0.7745967, -0.38729835, 0., 0.38729835, 0.7745967, 1.161895, 1.5491934})}), + // reductionAxes, variance=false + MVN1Params(Tensor {{1, 3, 2, 2}, ngraph::element::f32, std::initializer_list({1, 2, 3, 4, 5, 6, 7, 8, 9, 1, 2, 3})}, + {1, 2, 3}, + false, + false, + 1e-9, + Tensor {{1, 3, 2, 2}, + ngraph::element::f32, + std::initializer_list({-3.25, -2.25, -1.25, -0.25, 0.75, 1.75, 2.75, 3.75, 4.75, -3.25, -2.25, -1.25})}), + // reductionAxes, variance=true + MVN1Params(Tensor {{1, 3, 3, 3}, ngraph::element::f32, std::initializer_list({1, 2, 3, 4, 5, 6, 7, 8, 9, 1, 2, 3, 4, 5, + 6, 7, 8, 9, 1, 2, 3, 4, 5, 6, 7, 8, 9})}, + {2, 3}, + false, + true, + 1e-9, + Tensor {{1, 3, 3, 3}, + ngraph::element::f32, + std::initializer_list({-1.5491934, -1.161895, -0.7745967, -0.38729835, 0., 0.38729835, 0.7745967, 1.161895, 1.5491934, + -1.5491934, -1.161895, -0.7745967, -0.38729835, 0., 0.38729835, 0.7745967, 1.161895, 1.5491934, + -1.5491934, -1.161895, -0.7745967, -0.38729835, 0., 0.38729835, 0.7745967, 1.161895, 1.5491934})})), + ReferenceMVN1LayerTest::getTestCaseName); // ------------------------------ V6 ------------------------------ struct MVN6Params { - MVN6Params(const Tensor& paramInput, const Tensor& paramReductionAxes, const bool paramNormalizeVariance, - const double paramEps, const ngraph::op::MVNEpsMode mode, const Tensor& paramExpected) + MVN6Params(const Tensor& paramInput, const Tensor& paramReductionAxes, const bool paramNormalizeVariance, const double paramEps, + const ngraph::op::MVNEpsMode mode, const Tensor& paramExpected) : input(paramInput), reductionAxes(paramReductionAxes), normalizeVariance(paramNormalizeVariance), @@ -209,8 +196,8 @@ class ReferenceMVN6LayerTest : public testing::TestWithParam, public } private: - static std::shared_ptr CreateFunction(const Tensor& input, const Tensor& reductionAxes, - const bool normalizeVariance, const double eps, const ngraph::op::MVNEpsMode epsMode) { + static std::shared_ptr CreateFunction(const Tensor& input, const Tensor& reductionAxes, const bool normalizeVariance, const double eps, + const ngraph::op::MVNEpsMode epsMode) { std::vector dataVector(reductionAxes.shape[0]); const auto in = std::make_shared(input.type, input.shape); auto mRef = as(reductionAxes.data); @@ -230,65 +217,52 @@ TEST_P(ReferenceMVN6LayerTest, CompareWithHardcodedRefs) { Exec(); } -INSTANTIATE_TEST_SUITE_P(smoke_MVN6_With_Hardcoded_Refs, ReferenceMVN6LayerTest, - ::testing::Values( - // variance=false, OUTSIDE_SQRT - MVN6Params(Tensor {{1, 3, 3, 3}, ngraph::element::f32, std::initializer_list({1, 2, 3, 4, 5, 6, 7, 8, 9, 1, 2, 3, 4, 5, - 6, 7, 8, 9, 1, 2, 3, 4, 5, 6, 7, 8, 9})}, - Tensor {Shape{2}, ngraph::element::i64, std::initializer_list({2, 3})}, - false, - 1e-9, - ngraph::op::MVNEpsMode::OUTSIDE_SQRT, - Tensor {{1, 3, 3, 3}, ngraph::element::f32, std::initializer_list({-4, -3, -2, -1, 0, 1, 2, 3, 4, - -4, -3, -2, -1, 0, 1, 2, 3, 4, - -4, -3, -2, -1, 0, 1, 2, 3, 4})}), - // variance=true, OUTSIDE_SQRT - MVN6Params(Tensor {{1, 3, 3, 3}, ngraph::element::f32, std::initializer_list({1, 2, 3, 4, 5, 6, 7, 8, 9, 1, 2, 3, 4, 5, - 6, 7, 8, 9, 1, 2, 3, 4, 5, 6, 7, 8, 9})}, - Tensor {Shape{2}, ngraph::element::i64, std::initializer_list({2, 3})}, - true, - 1e-9, - ngraph::op::MVNEpsMode::OUTSIDE_SQRT, - Tensor {{1, 3, 3, 3}, ngraph::element::f32, std::initializer_list({-1.5491934, -1.161895, -0.7745967, - -0.38729835, 0., 0.38729835, - 0.7745967, 1.161895, 1.5491934, - -1.5491934, -1.161895, -0.7745967, - -0.38729835, 0., 0.38729835, - 0.7745967, 1.161895, 1.5491934, - -1.5491934, -1.161895, -0.7745967, - -0.38729835, 0., 0.38729835, - 0.7745967, 1.161895, 1.5491934})}), - // variance=true, INSIDE_SQRT - MVN6Params(Tensor {{1, 3, 3, 3}, ngraph::element::f32, std::initializer_list({1, 2, 3, 4, 5, 6, 7, 8, 9, 1, 2, 3, 4, 5, - 6, 7, 8, 9, 1, 2, 3, 4, 5, 6, 7, 8, 9})}, - Tensor {Shape{2}, ngraph::element::i64, std::initializer_list({2, 3})}, - true, - 1e-9, - ngraph::op::MVNEpsMode::INSIDE_SQRT, - Tensor {{1, 3, 3, 3}, ngraph::element::f32, std::initializer_list({-1.5491934, -1.161895, -0.7745967, - -0.38729835, 0., 0.38729835, - 0.7745967, 1.161895, 1.5491934, - -1.5491934, -1.161895, -0.7745967, - -0.38729835, 0., 0.38729835, - 0.7745967, 1.161895, 1.5491934, - -1.5491934, -1.161895, -0.7745967, - -0.38729835, 0., 0.38729835, - 0.7745967, 1.161895, 1.5491934})}), - // variance=true, another reductionAxes - MVN6Params(Tensor {{1, 3, 3, 3}, ngraph::element::f32, std::initializer_list({1, 2, 3, 4, 5, 6, 7, 8, 9, 1, 2, 3, 4, 5, - 6, 7, 8, 9, 1, 2, 3, 4, 5, 6, 7, 8, 9})}, - Tensor {Shape{3}, ngraph::element::i64, std::initializer_list({1, 2, 3})}, - true, - 1e-9, - ngraph::op::MVNEpsMode::OUTSIDE_SQRT, - Tensor {{1, 3, 3, 3}, ngraph::element::f32, std::initializer_list({-1.5491934, -1.161895, -0.7745967, - -0.38729835, 0., 0.38729835, - 0.7745967, 1.161895, 1.5491934, - -1.5491934, -1.161895, -0.7745967, - -0.38729835, 0., 0.38729835, - 0.7745967, 1.161895, 1.5491934, - -1.5491934, -1.161895, -0.7745967, - -0.38729835, 0., 0.38729835, - 0.7745967, 1.161895, 1.5491934})})), - ReferenceMVN6LayerTest::getTestCaseName); - \ No newline at end of file +INSTANTIATE_TEST_SUITE_P( + smoke_MVN6_With_Hardcoded_Refs, ReferenceMVN6LayerTest, + ::testing::Values( + // variance=false, OUTSIDE_SQRT + MVN6Params(Tensor {{1, 3, 3, 3}, ngraph::element::f32, std::initializer_list({1, 2, 3, 4, 5, 6, 7, 8, 9, 1, 2, 3, 4, 5, + 6, 7, 8, 9, 1, 2, 3, 4, 5, 6, 7, 8, 9})}, + Tensor {Shape {2}, ngraph::element::i64, std::initializer_list({2, 3})}, + false, + 1e-9, + ngraph::op::MVNEpsMode::OUTSIDE_SQRT, + Tensor {{1, 3, 3, 3}, ngraph::element::f32, std::initializer_list({-4, -3, -2, -1, 0, 1, 2, 3, 4, -4, -3, -2, -1, 0, + 1, 2, 3, 4, -4, -3, -2, -1, 0, 1, 2, 3, 4})}), + // variance=true, OUTSIDE_SQRT + MVN6Params(Tensor {{1, 3, 3, 3}, ngraph::element::f32, std::initializer_list({1, 2, 3, 4, 5, 6, 7, 8, 9, 1, 2, 3, 4, 5, + 6, 7, 8, 9, 1, 2, 3, 4, 5, 6, 7, 8, 9})}, + Tensor {Shape {2}, ngraph::element::i64, std::initializer_list({2, 3})}, + true, + 1e-9, + ngraph::op::MVNEpsMode::OUTSIDE_SQRT, + Tensor {{1, 3, 3, 3}, + ngraph::element::f32, + std::initializer_list({-1.5491934, -1.161895, -0.7745967, -0.38729835, 0., 0.38729835, 0.7745967, 1.161895, 1.5491934, + -1.5491934, -1.161895, -0.7745967, -0.38729835, 0., 0.38729835, 0.7745967, 1.161895, 1.5491934, + -1.5491934, -1.161895, -0.7745967, -0.38729835, 0., 0.38729835, 0.7745967, 1.161895, 1.5491934})}), + // variance=true, INSIDE_SQRT + MVN6Params(Tensor {{1, 3, 3, 3}, ngraph::element::f32, std::initializer_list({1, 2, 3, 4, 5, 6, 7, 8, 9, 1, 2, 3, 4, 5, + 6, 7, 8, 9, 1, 2, 3, 4, 5, 6, 7, 8, 9})}, + Tensor {Shape {2}, ngraph::element::i64, std::initializer_list({2, 3})}, + true, + 1e-9, + ngraph::op::MVNEpsMode::INSIDE_SQRT, + Tensor {{1, 3, 3, 3}, + ngraph::element::f32, + std::initializer_list({-1.5491934, -1.161895, -0.7745967, -0.38729835, 0., 0.38729835, 0.7745967, 1.161895, 1.5491934, + -1.5491934, -1.161895, -0.7745967, -0.38729835, 0., 0.38729835, 0.7745967, 1.161895, 1.5491934, + -1.5491934, -1.161895, -0.7745967, -0.38729835, 0., 0.38729835, 0.7745967, 1.161895, 1.5491934})}), + // variance=true, another reductionAxes, OUTSIDE_SQRT + MVN6Params(Tensor {{1, 3, 3, 3}, ngraph::element::f32, std::initializer_list({1, 2, 3, 4, 5, 6, 7, 8, 9, 1, 2, 3, 4, 5, + 6, 7, 8, 9, 1, 2, 3, 4, 5, 6, 7, 8, 9})}, + Tensor {Shape {3}, ngraph::element::i64, std::initializer_list({1, 2, 3})}, + true, + 1e-9, + ngraph::op::MVNEpsMode::OUTSIDE_SQRT, + Tensor {{1, 3, 3, 3}, + ngraph::element::f32, + std::initializer_list({-1.5491934, -1.161895, -0.7745967, -0.38729835, 0., 0.38729835, 0.7745967, 1.161895, 1.5491934, + -1.5491934, -1.161895, -0.7745967, -0.38729835, 0., 0.38729835, 0.7745967, 1.161895, 1.5491934, + -1.5491934, -1.161895, -0.7745967, -0.38729835, 0., 0.38729835, 0.7745967, 1.161895, 1.5491934})})), + ReferenceMVN6LayerTest::getTestCaseName); From b6b1740b5a8bfcef072e2fb3a87ee4cd73b8dd73 Mon Sep 17 00:00:00 2001 From: Szymon Durawa Date: Thu, 15 Jul 2021 14:05:54 +0200 Subject: [PATCH 6/8] Add BF16 to SLT, add comment regarding deprecated class. --- .../single_layer_tests/mvn.cpp | 13 +++++++------ .../shared/include/single_layer_tests/mvn.hpp | 1 + .../shared_test_classes/single_layer/mvn.hpp | 1 + .../shared_test_classes/src/single_layer/mvn.cpp | 1 + .../layer_tests_summary/utils/constants.py | 1 + 5 files changed, 11 insertions(+), 6 deletions(-) diff --git a/inference-engine/tests/functional/plugin/cpu/shared_tests_instances/single_layer_tests/mvn.cpp b/inference-engine/tests/functional/plugin/cpu/shared_tests_instances/single_layer_tests/mvn.cpp index caf39dbf1ea86d..423e8d6525da30 100644 --- a/inference-engine/tests/functional/plugin/cpu/shared_tests_instances/single_layer_tests/mvn.cpp +++ b/inference-engine/tests/functional/plugin/cpu/shared_tests_instances/single_layer_tests/mvn.cpp @@ -44,9 +44,15 @@ const std::vector epsilon = { 0.000000001 }; +std::vector dataPrecisions = { + InferenceEngine::Precision::BF16, + InferenceEngine::Precision::FP16, + InferenceEngine::Precision::FP32 +}; + const auto MvnAcrossChannels = ::testing::Combine( ::testing::ValuesIn(inputShapes), - ::testing::Values(InferenceEngine::Precision::FP32), + ::testing::ValuesIn(dataPrecisions), ::testing::ValuesIn(emptyReductionAxes), ::testing::ValuesIn(acrossChannels), ::testing::ValuesIn(normalizeVariance), @@ -69,11 +75,6 @@ INSTANTIATE_TEST_SUITE_P(smoke_MKLDNN_TestsMVN_AcrossChannels, Mvn1LayerTest, Mv INSTANTIATE_TEST_SUITE_P(smoke_MKLDNN_TestsMVN_ReductionAxes, Mvn1LayerTest, MvnReductionAxes, Mvn1LayerTest::getTestCaseName); -std::vector dataPrecisions = { - InferenceEngine::Precision::FP32, - InferenceEngine::Precision::FP16 -}; - std::vector idxPrecisions = { InferenceEngine::Precision::I32, InferenceEngine::Precision::I64 diff --git a/inference-engine/tests/functional/plugin/shared/include/single_layer_tests/mvn.hpp b/inference-engine/tests/functional/plugin/shared/include/single_layer_tests/mvn.hpp index 919ea38a9478ee..d1a9ff52a108e0 100644 --- a/inference-engine/tests/functional/plugin/shared/include/single_layer_tests/mvn.hpp +++ b/inference-engine/tests/functional/plugin/shared/include/single_layer_tests/mvn.hpp @@ -8,6 +8,7 @@ namespace LayerTestsDefinitions { +// DEPRECATED, remove MvnLayerTest when KMB and ARM plugin will switch to use Mvn1LayerTest (#60420) TEST_P(MvnLayerTest, CompareWithRefs) { Run(); }; diff --git a/inference-engine/tests/functional/shared_test_classes/include/shared_test_classes/single_layer/mvn.hpp b/inference-engine/tests/functional/shared_test_classes/include/shared_test_classes/single_layer/mvn.hpp index 9b399e2109517d..747e0940da7fef 100644 --- a/inference-engine/tests/functional/shared_test_classes/include/shared_test_classes/single_layer/mvn.hpp +++ b/inference-engine/tests/functional/shared_test_classes/include/shared_test_classes/single_layer/mvn.hpp @@ -11,6 +11,7 @@ namespace LayerTestsDefinitions { +// DEPRECATED, remove MvnLayerTest when KMB and ARM plugin will switch to use Mvn1LayerTest (#60420) typedef std::tuple< InferenceEngine::SizeVector, // Input shapes InferenceEngine::Precision, // Input precision diff --git a/inference-engine/tests/functional/shared_test_classes/src/single_layer/mvn.cpp b/inference-engine/tests/functional/shared_test_classes/src/single_layer/mvn.cpp index 69afd0df38d3b1..d4e2a0c0df8536 100644 --- a/inference-engine/tests/functional/shared_test_classes/src/single_layer/mvn.cpp +++ b/inference-engine/tests/functional/shared_test_classes/src/single_layer/mvn.cpp @@ -7,6 +7,7 @@ namespace LayerTestsDefinitions { +// DEPRECATED, remove MvnLayerTest when KMB and ARM plugin will switch to use Mvn1LayerTest (#60420) std::string MvnLayerTest::getTestCaseName(const testing::TestParamInfo& obj) { InferenceEngine::SizeVector inputShapes; InferenceEngine::Precision inputPrecision; diff --git a/inference-engine/tests/ie_test_utils/functional_test_utils/layer_tests_summary/utils/constants.py b/inference-engine/tests/ie_test_utils/functional_test_utils/layer_tests_summary/utils/constants.py index 8f39adb29b7008..daaf369bf305ee 100644 --- a/inference-engine/tests/ie_test_utils/functional_test_utils/layer_tests_summary/utils/constants.py +++ b/inference-engine/tests/ie_test_utils/functional_test_utils/layer_tests_summary/utils/constants.py @@ -53,6 +53,7 @@ 'LSTMSequence-5', 'LogSoftmax-5', 'Loop-5', + 'MVN-1', 'MVN-6', 'Maximum-1', 'MaxPool-1', From ae4b4d66a25077f17b3a210741e253af92fed492 Mon Sep 17 00:00:00 2001 From: Szymon Durawa Date: Thu, 15 Jul 2021 15:02:03 +0200 Subject: [PATCH 7/8] Remove BF16 precision as it is not supported in windows, no default values for attributes. --- .../cpu/shared_tests_instances/single_layer_tests/mvn.cpp | 1 - ngraph/core/include/ngraph/op/mvn.hpp | 8 ++++---- 2 files changed, 4 insertions(+), 5 deletions(-) diff --git a/inference-engine/tests/functional/plugin/cpu/shared_tests_instances/single_layer_tests/mvn.cpp b/inference-engine/tests/functional/plugin/cpu/shared_tests_instances/single_layer_tests/mvn.cpp index 423e8d6525da30..41502bced85b53 100644 --- a/inference-engine/tests/functional/plugin/cpu/shared_tests_instances/single_layer_tests/mvn.cpp +++ b/inference-engine/tests/functional/plugin/cpu/shared_tests_instances/single_layer_tests/mvn.cpp @@ -45,7 +45,6 @@ const std::vector epsilon = { }; std::vector dataPrecisions = { - InferenceEngine::Precision::BF16, InferenceEngine::Precision::FP16, InferenceEngine::Precision::FP32 }; diff --git a/ngraph/core/include/ngraph/op/mvn.hpp b/ngraph/core/include/ngraph/op/mvn.hpp index cc3ab0bb9d7024..49f9c3a71d82af 100644 --- a/ngraph/core/include/ngraph/op/mvn.hpp +++ b/ngraph/core/include/ngraph/op/mvn.hpp @@ -69,7 +69,7 @@ namespace ngraph void set_reduction_axes(AxisSet axes) { m_reduction_axes = axes; } private: - double m_eps = 1e-9; + double m_eps; bool m_across_channels; bool m_normalize_variance; AxisSet m_reduction_axes; @@ -128,9 +128,9 @@ namespace ngraph MVNEpsMode get_eps_mode() const { return m_eps_mode; } private: - bool m_normalize_variance = true; - float m_eps = (float)1e-6; - MVNEpsMode m_eps_mode = MVNEpsMode::INSIDE_SQRT; + bool m_normalize_variance; + float m_eps; + MVNEpsMode m_eps_mode; }; } // namespace v6 } // namespace op From bd841a218365230a691aed710a13a9650bf332cd Mon Sep 17 00:00:00 2001 From: Szymon Durawa Date: Thu, 22 Jul 2021 15:09:11 +0200 Subject: [PATCH 8/8] Reuse Tensor from base_reference_test.hpp --- .../tests/functional/op_reference/mvn.cpp | 128 ++++++++---------- .../include/ngraph_functions/builders.hpp | 2 +- .../ngraph_functions/src/mvn.cpp | 2 +- 3 files changed, 59 insertions(+), 73 deletions(-) diff --git a/docs/template_plugin/tests/functional/op_reference/mvn.cpp b/docs/template_plugin/tests/functional/op_reference/mvn.cpp index 70db598360d142..5321164807b852 100644 --- a/docs/template_plugin/tests/functional/op_reference/mvn.cpp +++ b/docs/template_plugin/tests/functional/op_reference/mvn.cpp @@ -14,20 +14,7 @@ using namespace ngraph; using namespace InferenceEngine; - -struct Tensor { - Tensor() = default; - Tensor(const ngraph::Shape& shape, ngraph::element::Type type, const InferenceEngine::Blob::Ptr& data): shape {shape}, type {type}, data {data} {} - - template - Tensor(const ngraph::Shape& shape, ngraph::element::Type type, std::initializer_list data_elements): shape {shape}, type {type} { - data = CreateBlob(type, std::vector(data_elements)); - } - - ngraph::Shape shape; - ngraph::element::Type type; - InferenceEngine::Blob::Ptr data; -}; +using namespace reference_tests; // ------------------------------ V0 ------------------------------ @@ -93,67 +80,66 @@ INSTANTIATE_TEST_SUITE_P( smoke_MVN1_With_Hardcoded_Refs, ReferenceMVN1LayerTest, ::testing::Values( // across_channels=false, variance=false - MVN1Params(Tensor {{1, 3, 3, 3}, ngraph::element::f32, std::initializer_list({1, 2, 3, 4, 5, 6, 7, 8, 9, 1, 2, 3, 4, 5, - 6, 7, 8, 9, 1, 2, 3, 4, 5, 6, 7, 8, 9})}, + MVN1Params(Tensor {{1, 3, 3, 3}, ngraph::element::f32, std::vector {1, 2, 3, 4, 5, 6, 7, 8, 9, 1, 2, 3, 4, 5, + 6, 7, 8, 9, 1, 2, 3, 4, 5, 6, 7, 8, 9}}, emptyReductionAxes, false, false, 1e-9, - Tensor {{1, 3, 3, 3}, ngraph::element::f32, std::initializer_list({-4, -3, -2, -1, 0, 1, 2, 3, 4, -4, -3, -2, -1, 0, - 1, 2, 3, 4, -4, -3, -2, -1, 0, 1, 2, 3, 4})}), + Tensor {{1, 3, 3, 3}, ngraph::element::f32, std::vector {-4, -3, -2, -1, 0, 1, 2, 3, 4, -4, -3, -2, -1, 0, + 1, 2, 3, 4, -4, -3, -2, -1, 0, 1, 2, 3, 4}}), // across_channels=true, variance=false - MVN1Params(Tensor {{1, 3, 2, 2}, ngraph::element::f32, std::initializer_list({1, 2, 3, 4, 5, 6, 7, 8, 9, 1, 2, 3})}, - emptyReductionAxes, - true, - false, - 1e-9, - Tensor {{1, 3, 2, 2}, ngraph::element::f32, std::initializer_list({-3.25, -2.25, -1.25, -0.25, 0.75, 1.75, - 2.75, 3.75, 4.75, -3.25, -2.25, -1.25})}), + MVN1Params( + Tensor {{1, 3, 2, 2}, ngraph::element::f32, std::vector {1, 2, 3, 4, 5, 6, 7, 8, 9, 1, 2, 3}}, + emptyReductionAxes, + true, + false, + 1e-9, + Tensor {{1, 3, 2, 2}, ngraph::element::f32, std::vector {-3.25, -2.25, -1.25, -0.25, 0.75, 1.75, 2.75, 3.75, 4.75, -3.25, -2.25, -1.25}}), // across_channels=false, variance=true - MVN1Params(Tensor {{1, 3, 3, 3}, ngraph::element::f32, std::initializer_list({1, 2, 3, 4, 5, 6, 7, 8, 9, 1, 2, 3, 4, 5, - 6, 7, 8, 9, 1, 2, 3, 4, 5, 6, 7, 8, 9})}, + MVN1Params(Tensor {{1, 3, 3, 3}, ngraph::element::f32, std::vector {1, 2, 3, 4, 5, 6, 7, 8, 9, 1, 2, 3, 4, 5, + 6, 7, 8, 9, 1, 2, 3, 4, 5, 6, 7, 8, 9}}, emptyReductionAxes, false, true, 1e-9, Tensor {{1, 3, 3, 3}, ngraph::element::f32, - std::initializer_list({-1.5491934, -1.161895, -0.7745967, -0.38729835, 0., 0.38729835, 0.7745967, 1.161895, 1.5491934, - -1.5491934, -1.161895, -0.7745967, -0.38729835, 0., 0.38729835, 0.7745967, 1.161895, 1.5491934, - -1.5491934, -1.161895, -0.7745967, -0.38729835, 0., 0.38729835, 0.7745967, 1.161895, 1.5491934})}), + std::vector {-1.5491934, -1.161895, -0.7745967, -0.38729835, 0., 0.38729835, 0.7745967, 1.161895, 1.5491934, + -1.5491934, -1.161895, -0.7745967, -0.38729835, 0., 0.38729835, 0.7745967, 1.161895, 1.5491934, + -1.5491934, -1.161895, -0.7745967, -0.38729835, 0., 0.38729835, 0.7745967, 1.161895, 1.5491934}}), // across_channels=true, variance=true - MVN1Params(Tensor {{1, 3, 3, 3}, ngraph::element::f32, std::initializer_list({1, 2, 3, 4, 5, 6, 7, 8, 9, 1, 2, 3, 4, 5, - 6, 7, 8, 9, 1, 2, 3, 4, 5, 6, 7, 8, 9})}, + MVN1Params(Tensor {{1, 3, 3, 3}, ngraph::element::f32, std::vector {1, 2, 3, 4, 5, 6, 7, 8, 9, 1, 2, 3, 4, 5, + 6, 7, 8, 9, 1, 2, 3, 4, 5, 6, 7, 8, 9}}, emptyReductionAxes, true, true, 1e-9, Tensor {{1, 3, 3, 3}, ngraph::element::f32, - std::initializer_list({-1.5491934, -1.161895, -0.7745967, -0.38729835, 0., 0.38729835, 0.7745967, 1.161895, 1.5491934, - -1.5491934, -1.161895, -0.7745967, -0.38729835, 0., 0.38729835, 0.7745967, 1.161895, 1.5491934, - -1.5491934, -1.161895, -0.7745967, -0.38729835, 0., 0.38729835, 0.7745967, 1.161895, 1.5491934})}), + std::vector {-1.5491934, -1.161895, -0.7745967, -0.38729835, 0., 0.38729835, 0.7745967, 1.161895, 1.5491934, + -1.5491934, -1.161895, -0.7745967, -0.38729835, 0., 0.38729835, 0.7745967, 1.161895, 1.5491934, + -1.5491934, -1.161895, -0.7745967, -0.38729835, 0., 0.38729835, 0.7745967, 1.161895, 1.5491934}}), // reductionAxes, variance=false - MVN1Params(Tensor {{1, 3, 2, 2}, ngraph::element::f32, std::initializer_list({1, 2, 3, 4, 5, 6, 7, 8, 9, 1, 2, 3})}, - {1, 2, 3}, - false, - false, - 1e-9, - Tensor {{1, 3, 2, 2}, - ngraph::element::f32, - std::initializer_list({-3.25, -2.25, -1.25, -0.25, 0.75, 1.75, 2.75, 3.75, 4.75, -3.25, -2.25, -1.25})}), + MVN1Params( + Tensor {{1, 3, 2, 2}, ngraph::element::f32, std::vector {1, 2, 3, 4, 5, 6, 7, 8, 9, 1, 2, 3}}, + {1, 2, 3}, + false, + false, + 1e-9, + Tensor {{1, 3, 2, 2}, ngraph::element::f32, std::vector {-3.25, -2.25, -1.25, -0.25, 0.75, 1.75, 2.75, 3.75, 4.75, -3.25, -2.25, -1.25}}), // reductionAxes, variance=true - MVN1Params(Tensor {{1, 3, 3, 3}, ngraph::element::f32, std::initializer_list({1, 2, 3, 4, 5, 6, 7, 8, 9, 1, 2, 3, 4, 5, - 6, 7, 8, 9, 1, 2, 3, 4, 5, 6, 7, 8, 9})}, + MVN1Params(Tensor {{1, 3, 3, 3}, ngraph::element::f32, std::vector {1, 2, 3, 4, 5, 6, 7, 8, 9, 1, 2, 3, 4, 5, + 6, 7, 8, 9, 1, 2, 3, 4, 5, 6, 7, 8, 9}}, {2, 3}, false, true, 1e-9, Tensor {{1, 3, 3, 3}, ngraph::element::f32, - std::initializer_list({-1.5491934, -1.161895, -0.7745967, -0.38729835, 0., 0.38729835, 0.7745967, 1.161895, 1.5491934, - -1.5491934, -1.161895, -0.7745967, -0.38729835, 0., 0.38729835, 0.7745967, 1.161895, 1.5491934, - -1.5491934, -1.161895, -0.7745967, -0.38729835, 0., 0.38729835, 0.7745967, 1.161895, 1.5491934})})), + std::vector {-1.5491934, -1.161895, -0.7745967, -0.38729835, 0., 0.38729835, 0.7745967, 1.161895, 1.5491934, + -1.5491934, -1.161895, -0.7745967, -0.38729835, 0., 0.38729835, 0.7745967, 1.161895, 1.5491934, + -1.5491934, -1.161895, -0.7745967, -0.38729835, 0., 0.38729835, 0.7745967, 1.161895, 1.5491934}})), ReferenceMVN1LayerTest::getTestCaseName); // ------------------------------ V6 ------------------------------ @@ -221,48 +207,48 @@ INSTANTIATE_TEST_SUITE_P( smoke_MVN6_With_Hardcoded_Refs, ReferenceMVN6LayerTest, ::testing::Values( // variance=false, OUTSIDE_SQRT - MVN6Params(Tensor {{1, 3, 3, 3}, ngraph::element::f32, std::initializer_list({1, 2, 3, 4, 5, 6, 7, 8, 9, 1, 2, 3, 4, 5, - 6, 7, 8, 9, 1, 2, 3, 4, 5, 6, 7, 8, 9})}, - Tensor {Shape {2}, ngraph::element::i64, std::initializer_list({2, 3})}, + MVN6Params(Tensor {{1, 3, 3, 3}, ngraph::element::f32, std::vector {1, 2, 3, 4, 5, 6, 7, 8, 9, 1, 2, 3, 4, 5, + 6, 7, 8, 9, 1, 2, 3, 4, 5, 6, 7, 8, 9}}, + Tensor {Shape {2}, ngraph::element::i64, std::vector {2, 3}}, false, 1e-9, ngraph::op::MVNEpsMode::OUTSIDE_SQRT, - Tensor {{1, 3, 3, 3}, ngraph::element::f32, std::initializer_list({-4, -3, -2, -1, 0, 1, 2, 3, 4, -4, -3, -2, -1, 0, - 1, 2, 3, 4, -4, -3, -2, -1, 0, 1, 2, 3, 4})}), + Tensor {{1, 3, 3, 3}, ngraph::element::f32, std::vector {-4, -3, -2, -1, 0, 1, 2, 3, 4, -4, -3, -2, -1, 0, + 1, 2, 3, 4, -4, -3, -2, -1, 0, 1, 2, 3, 4}}), // variance=true, OUTSIDE_SQRT - MVN6Params(Tensor {{1, 3, 3, 3}, ngraph::element::f32, std::initializer_list({1, 2, 3, 4, 5, 6, 7, 8, 9, 1, 2, 3, 4, 5, - 6, 7, 8, 9, 1, 2, 3, 4, 5, 6, 7, 8, 9})}, - Tensor {Shape {2}, ngraph::element::i64, std::initializer_list({2, 3})}, + MVN6Params(Tensor {{1, 3, 3, 3}, ngraph::element::f32, std::vector {1, 2, 3, 4, 5, 6, 7, 8, 9, 1, 2, 3, 4, 5, + 6, 7, 8, 9, 1, 2, 3, 4, 5, 6, 7, 8, 9}}, + Tensor {Shape {2}, ngraph::element::i64, std::vector {2, 3}}, true, 1e-9, ngraph::op::MVNEpsMode::OUTSIDE_SQRT, Tensor {{1, 3, 3, 3}, ngraph::element::f32, - std::initializer_list({-1.5491934, -1.161895, -0.7745967, -0.38729835, 0., 0.38729835, 0.7745967, 1.161895, 1.5491934, - -1.5491934, -1.161895, -0.7745967, -0.38729835, 0., 0.38729835, 0.7745967, 1.161895, 1.5491934, - -1.5491934, -1.161895, -0.7745967, -0.38729835, 0., 0.38729835, 0.7745967, 1.161895, 1.5491934})}), + std::vector {-1.5491934, -1.161895, -0.7745967, -0.38729835, 0., 0.38729835, 0.7745967, 1.161895, 1.5491934, + -1.5491934, -1.161895, -0.7745967, -0.38729835, 0., 0.38729835, 0.7745967, 1.161895, 1.5491934, + -1.5491934, -1.161895, -0.7745967, -0.38729835, 0., 0.38729835, 0.7745967, 1.161895, 1.5491934}}), // variance=true, INSIDE_SQRT - MVN6Params(Tensor {{1, 3, 3, 3}, ngraph::element::f32, std::initializer_list({1, 2, 3, 4, 5, 6, 7, 8, 9, 1, 2, 3, 4, 5, - 6, 7, 8, 9, 1, 2, 3, 4, 5, 6, 7, 8, 9})}, - Tensor {Shape {2}, ngraph::element::i64, std::initializer_list({2, 3})}, + MVN6Params(Tensor {{1, 3, 3, 3}, ngraph::element::f32, std::vector {1, 2, 3, 4, 5, 6, 7, 8, 9, 1, 2, 3, 4, 5, + 6, 7, 8, 9, 1, 2, 3, 4, 5, 6, 7, 8, 9}}, + Tensor {Shape {2}, ngraph::element::i64, std::vector {2, 3}}, true, 1e-9, ngraph::op::MVNEpsMode::INSIDE_SQRT, Tensor {{1, 3, 3, 3}, ngraph::element::f32, - std::initializer_list({-1.5491934, -1.161895, -0.7745967, -0.38729835, 0., 0.38729835, 0.7745967, 1.161895, 1.5491934, - -1.5491934, -1.161895, -0.7745967, -0.38729835, 0., 0.38729835, 0.7745967, 1.161895, 1.5491934, - -1.5491934, -1.161895, -0.7745967, -0.38729835, 0., 0.38729835, 0.7745967, 1.161895, 1.5491934})}), + std::vector {-1.5491934, -1.161895, -0.7745967, -0.38729835, 0., 0.38729835, 0.7745967, 1.161895, 1.5491934, + -1.5491934, -1.161895, -0.7745967, -0.38729835, 0., 0.38729835, 0.7745967, 1.161895, 1.5491934, + -1.5491934, -1.161895, -0.7745967, -0.38729835, 0., 0.38729835, 0.7745967, 1.161895, 1.5491934}}), // variance=true, another reductionAxes, OUTSIDE_SQRT - MVN6Params(Tensor {{1, 3, 3, 3}, ngraph::element::f32, std::initializer_list({1, 2, 3, 4, 5, 6, 7, 8, 9, 1, 2, 3, 4, 5, - 6, 7, 8, 9, 1, 2, 3, 4, 5, 6, 7, 8, 9})}, - Tensor {Shape {3}, ngraph::element::i64, std::initializer_list({1, 2, 3})}, + MVN6Params(Tensor {{1, 3, 3, 3}, ngraph::element::f32, std::vector({1, 2, 3, 4, 5, 6, 7, 8, 9, 1, 2, 3, 4, 5, + 6, 7, 8, 9, 1, 2, 3, 4, 5, 6, 7, 8, 9})}, + Tensor {Shape {3}, ngraph::element::i64, std::vector({1, 2, 3})}, true, 1e-9, ngraph::op::MVNEpsMode::OUTSIDE_SQRT, Tensor {{1, 3, 3, 3}, ngraph::element::f32, - std::initializer_list({-1.5491934, -1.161895, -0.7745967, -0.38729835, 0., 0.38729835, 0.7745967, 1.161895, 1.5491934, - -1.5491934, -1.161895, -0.7745967, -0.38729835, 0., 0.38729835, 0.7745967, 1.161895, 1.5491934, - -1.5491934, -1.161895, -0.7745967, -0.38729835, 0., 0.38729835, 0.7745967, 1.161895, 1.5491934})})), + std::vector {-1.5491934, -1.161895, -0.7745967, -0.38729835, 0., 0.38729835, 0.7745967, 1.161895, 1.5491934, + -1.5491934, -1.161895, -0.7745967, -0.38729835, 0., 0.38729835, 0.7745967, 1.161895, 1.5491934, + -1.5491934, -1.161895, -0.7745967, -0.38729835, 0., 0.38729835, 0.7745967, 1.161895, 1.5491934}})), ReferenceMVN6LayerTest::getTestCaseName); diff --git a/inference-engine/tests/ngraph_helpers/ngraph_functions/include/ngraph_functions/builders.hpp b/inference-engine/tests/ngraph_helpers/ngraph_functions/include/ngraph_functions/builders.hpp index e0b75a7b4a6a1e..55a8f48297823d 100644 --- a/inference-engine/tests/ngraph_helpers/ngraph_functions/include/ngraph_functions/builders.hpp +++ b/inference-engine/tests/ngraph_helpers/ngraph_functions/include/ngraph_functions/builders.hpp @@ -292,7 +292,7 @@ std::shared_ptr makeMVN(const ngraph::Output &in, double eps); std::shared_ptr makeMVN(const ngraph::Output &in, - ngraph::AxisSet axes, + const ngraph::AxisSet &axes, bool normalizeVariance, double eps); diff --git a/inference-engine/tests/ngraph_helpers/ngraph_functions/src/mvn.cpp b/inference-engine/tests/ngraph_helpers/ngraph_functions/src/mvn.cpp index c6b8af283dd168..f4f73e93852a24 100644 --- a/inference-engine/tests/ngraph_helpers/ngraph_functions/src/mvn.cpp +++ b/inference-engine/tests/ngraph_helpers/ngraph_functions/src/mvn.cpp @@ -25,7 +25,7 @@ std::shared_ptr makeMVN(const ngraph::Output &in, } std::shared_ptr makeMVN(const ngraph::Output &in, - ngraph::AxisSet axes, + const ngraph::AxisSet &axes, bool normalizeVariance, double eps) { auto mvnNode = std::make_shared(in, axes, normalizeVariance, eps);