diff --git a/inference-engine/tests/functional/plugin/cpu/shared_tests_instances/subgraph_tests/mvn_fq_mvn.cpp b/inference-engine/tests/functional/plugin/cpu/shared_tests_instances/subgraph_tests/mvn_fq_mvn.cpp new file mode 100644 index 00000000000000..a79d423a1e80d6 --- /dev/null +++ b/inference-engine/tests/functional/plugin/cpu/shared_tests_instances/subgraph_tests/mvn_fq_mvn.cpp @@ -0,0 +1,83 @@ +// Copyright (C) 2021 Intel Corporation +// SPDX-License-Identifier: Apache-2.0 +// + +#include "subgraph_tests/mvn_fq_mvn.hpp" + +using namespace SubgraphTestsDefinitions; +using namespace InferenceEngine; + +namespace { + +const std::vector netPrecision = { + Precision::FP32 +}; + +std::vector idxPrecision = { + Precision::I64 +}; + +const std::vector normalizeVariance = { + true, + false +}; + +const std::vector epsilon = { + 0.000000001 +}; + +const std::vector epsMode = { + "inside_sqrt", + "outside_sqrt" +}; + +const std::vector level = {255}; + +const std::vector> constShapes = { + {1, 1, 1, 1}, + {1, 5, 1, 1} +}; + +const std::vector> inputParams = { + {-10, 10, 0.2}, + {0, 10, 0.2} +}; + +const auto fqParams = ::testing::Combine( + ::testing::ValuesIn(level), + ::testing::ValuesIn(constShapes), + ::testing::ValuesIn(inputParams) +); + +const std::vector dataShapes = { + {1, 5, 1, 1}, + {1, 5, 1, 2}, + {1, 5, 1, 3}, + {1, 5, 1, 4}, + {1, 5, 1, 5}, + {1, 5, 1, 6}, + {1, 5, 1, 7}, + {1, 5, 1, 8}, + {1, 5, 1, 9}, + {1, 5, 1, 10}, + {1, 5, 1, 11}, + {1, 5, 1, 12}, + {1, 5, 1, 13}, + {1, 5, 1, 14}, + {1, 5, 1, 15}, + {1, 5, 1, 16} +}; + +INSTANTIATE_TEST_CASE_P(smoke_MVNFqMVN, MvnFqMvnSubgraphTest, + ::testing::Combine( + fqParams, + ::testing::ValuesIn(dataShapes), + ::testing::ValuesIn(netPrecision), + ::testing::ValuesIn(idxPrecision), + ::testing::Values(std::vector{2, 3}), + ::testing::ValuesIn(normalizeVariance), + ::testing::ValuesIn(epsilon), + ::testing::ValuesIn(epsMode), + ::testing::Values(CommonTestUtils::DEVICE_CPU)), + MvnFqMvnSubgraphTest::getTestCaseName); +} // namespace diff --git a/inference-engine/tests/functional/plugin/shared/include/subgraph_tests/mvn_fq_mvn.hpp b/inference-engine/tests/functional/plugin/shared/include/subgraph_tests/mvn_fq_mvn.hpp new file mode 100644 index 00000000000000..865f39c8499238 --- /dev/null +++ b/inference-engine/tests/functional/plugin/shared/include/subgraph_tests/mvn_fq_mvn.hpp @@ -0,0 +1,15 @@ +// Copyright (C) 2021 Intel Corporation +// SPDX-License-Identifier: Apache-2.0 +// + +#pragma once + +#include "shared_test_classes/subgraph/mvn_fq_mvn.hpp" + +namespace SubgraphTestsDefinitions { + +TEST_P(MvnFqMvnSubgraphTest, CompareWithRefs){ + Run(); +}; + +} // namespace SubgraphTestsDefinitions diff --git a/inference-engine/tests/functional/shared_test_classes/include/shared_test_classes/subgraph/mvn_fq_mvn.hpp b/inference-engine/tests/functional/shared_test_classes/include/shared_test_classes/subgraph/mvn_fq_mvn.hpp new file mode 100644 index 00000000000000..5b696d77609965 --- /dev/null +++ b/inference-engine/tests/functional/shared_test_classes/include/shared_test_classes/subgraph/mvn_fq_mvn.hpp @@ -0,0 +1,49 @@ +// Copyright (C) 2021 Intel Corporation +// SPDX-License-Identifier: Apache-2.0 +// + +#pragma once + +#include +#include +#include +#include +#include +#include +#include "../base/layer_test_utils.hpp" + +namespace SubgraphTestsDefinitions { +typedef std::tuple< + size_t, // levels + std::vector, // const inputs shape + std::vector // input generator data: low, high, resolution +> fqSpecificParams; + +typedef std::tuple< + fqSpecificParams, + InferenceEngine::SizeVector, // Input shapes + InferenceEngine::Precision, // Input precision + InferenceEngine::Precision, // Axes precision + std::vector, // Axes + bool, // Normalize variance + float, // Epsilon + std::string, // Epsilon mode + LayerTestsUtils::TargetDevice // Device name +> fqSubgraphTestParamsSet; + +class MvnFqMvnSubgraphTest : public testing::WithParamInterface, + virtual public LayerTestsUtils::LayerTestsCommon { +public: + static std::string getTestCaseName(testing::TestParamInfo obj); + +protected: + void SetUp() override; + InferenceEngine::Blob::Ptr GenerateInput(const InferenceEngine::InputInfo &info) const override; + +protected: + float inputDataMin = 0.0; + float inputDataMax = 10.0; + float inputDataResolution = 1.0; + int32_t seed = 1; +}; +} // namespace SubgraphTestsDefinitions diff --git a/inference-engine/tests/functional/shared_test_classes/src/subgraph/mvn_fq_mvn.cpp b/inference-engine/tests/functional/shared_test_classes/src/subgraph/mvn_fq_mvn.cpp new file mode 100644 index 00000000000000..92d5bc33b888ff --- /dev/null +++ b/inference-engine/tests/functional/shared_test_classes/src/subgraph/mvn_fq_mvn.cpp @@ -0,0 +1,84 @@ +// Copyright (C) 2021 Intel Corporation +// SPDX-License-Identifier: Apache-2.0 +// + +#include +#include "shared_test_classes/subgraph/mvn_fq_mvn.hpp" + +namespace SubgraphTestsDefinitions { + + std::string MvnFqMvnSubgraphTest::getTestCaseName(testing::TestParamInfo obj) { + fqSpecificParams fqParams; + InferenceEngine::SizeVector inputShapes; + InferenceEngine::Precision dataPrecision, axesPrecision; + std::vector axes; + bool normalizeVariance; + float eps; + std::string epsMode; + std::string targetDevice; + std::tie(fqParams, inputShapes, dataPrecision, axesPrecision, axes, normalizeVariance, eps, epsMode, targetDevice) = obj.param; + + size_t levels; + std::vector constShape; + std::vector inputArg; + std::tie(levels, constShape, inputArg) = fqParams; + + std::ostringstream result; + result << "IS=" << CommonTestUtils::vec2str(inputShapes) << "_"; + result << "DataPrc=" << dataPrecision.name() << "_"; + result << "AxPrc=" << axesPrecision.name() << "_"; + result << "Ax=" << CommonTestUtils::vec2str(axes) << "_"; + result << "NormVariance=" << (normalizeVariance ? "TRUE" : "FALSE") << "_"; + result << "Eps=" << eps << "_"; + result << "EM=" << epsMode << "_"; + result << "LEVELS=" << levels << "_"; + result << "CS=" << CommonTestUtils::vec2str(constShape) << "_"; + if (inputArg.size() == 3) { + result << "_inputArg=" << inputArg[0] << "_" << inputArg[1] << "_" << inputArg[2]; + } + result << "TargetDevice=" << targetDevice; + return result.str(); + } + + void MvnFqMvnSubgraphTest::SetUp() { + fqSpecificParams fqParams; + InferenceEngine::SizeVector inputShapes; + InferenceEngine::Precision dataPrecision, axesPrecision; + std::vector axes; + bool normalizeVariance; + float eps; + std::string epsMode; + std::tie(fqParams, inputShapes, dataPrecision, axesPrecision, axes, normalizeVariance, eps, epsMode, targetDevice) = this->GetParam(); + + size_t levels; + std::vector constShape; + std::vector inputArg; + std::tie(levels, constShape, inputArg) = fqParams; + if (inputArg.size() == 3) { + inputDataMin = inputArg[0]; + inputDataMax = inputArg[1]; + inputDataResolution = inputArg[2]; + } + + auto dataType = FuncTestUtils::PrecisionUtils::convertIE2nGraphPrc(dataPrecision); + auto axesType = FuncTestUtils::PrecisionUtils::convertIE2nGraphPrc(axesPrecision); + + auto params = ngraph::builder::makeParams(dataType, {inputShapes}); + auto paramOuts = ngraph::helpers::convert2OutputVector(ngraph::helpers::castOps2Nodes(params)); + auto axesNode = ngraph::builder::makeConstant(axesType, ngraph::Shape{axes.size()}, axes); + auto mvn1 = ngraph::builder::makeMVN6(paramOuts[0], axesNode, normalizeVariance, eps, epsMode); + + auto FQNode = ngraph::builder::makeFakeQuantize(mvn1, ngraph::element::f32, levels, constShape, + { inputDataMin }, { inputDataMax }, { inputDataMin }, { inputDataMax }); + + auto mvn2 = ngraph::builder::makeMVN6(FQNode, axesNode, normalizeVariance, eps, epsMode); + + ngraph::ResultVector results{std::make_shared(mvn2)}; + function = std::make_shared(results, params, "MvnFqMvnSubgraph"); + } + +InferenceEngine::Blob::Ptr MvnFqMvnSubgraphTest::GenerateInput(const InferenceEngine::InputInfo &info) const { + return FuncTestUtils::createAndFillBlob(info.getTensorDesc(), inputDataMax - inputDataMin, inputDataMin, 1 / inputDataResolution, + seed); +} +} // namespace SubgraphTestsDefinitions