Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

MVN test extensions #6291

Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
@@ -0,0 +1,83 @@
// Copyright (C) 2021 Intel Corporation
// SPDX-License-Identifier: Apache-2.0
//

#include "subgraph_tests/mvn_fq_mvn.hpp"

using namespace SubgraphTestsDefinitions;
using namespace InferenceEngine;

namespace {

const std::vector<Precision> netPrecision = {
Precision::FP32
};

std::vector<Precision> idxPrecision = {
Precision::I64
};

const std::vector<bool> normalizeVariance = {
true,
false
};

const std::vector<float> epsilon = {
0.000000001
};

const std::vector<std::string> epsMode = {
"inside_sqrt",
"outside_sqrt"
};

const std::vector<size_t> level = {255};

const std::vector<std::vector<size_t>> constShapes = {
{1, 1, 1, 1},
{1, 5, 1, 1}
};

const std::vector<std::vector<float>> inputParams = {
{-10, 10, 0.2},
{0, 10, 0.2}
};

const auto fqParams = ::testing::Combine(
::testing::ValuesIn(level),
::testing::ValuesIn(constShapes),
::testing::ValuesIn(inputParams)
);

const std::vector<SizeVector> dataShapes = {
{1, 5, 1, 1},
{1, 5, 1, 2},
{1, 5, 1, 3},
{1, 5, 1, 4},
{1, 5, 1, 5},
{1, 5, 1, 6},
{1, 5, 1, 7},
{1, 5, 1, 8},
{1, 5, 1, 9},
{1, 5, 1, 10},
{1, 5, 1, 11},
{1, 5, 1, 12},
{1, 5, 1, 13},
{1, 5, 1, 14},
{1, 5, 1, 15},
{1, 5, 1, 16}
};

INSTANTIATE_TEST_CASE_P(smoke_MVNFqMVN, MvnFqMvnSubgraphTest,
::testing::Combine(
fqParams,
::testing::ValuesIn(dataShapes),
::testing::ValuesIn(netPrecision),
::testing::ValuesIn(idxPrecision),
::testing::Values(std::vector<int>{2, 3}),
::testing::ValuesIn(normalizeVariance),
::testing::ValuesIn(epsilon),
::testing::ValuesIn(epsMode),
::testing::Values(CommonTestUtils::DEVICE_CPU)),
MvnFqMvnSubgraphTest::getTestCaseName);
} // namespace
Original file line number Diff line number Diff line change
@@ -0,0 +1,15 @@
// Copyright (C) 2021 Intel Corporation
// SPDX-License-Identifier: Apache-2.0
//

#pragma once

#include "shared_test_classes/subgraph/mvn_fq_mvn.hpp"

namespace SubgraphTestsDefinitions {

TEST_P(MvnFqMvnSubgraphTest, CompareWithRefs){
Run();
};

} // namespace SubgraphTestsDefinitions
Original file line number Diff line number Diff line change
@@ -0,0 +1,49 @@
// Copyright (C) 2021 Intel Corporation
// SPDX-License-Identifier: Apache-2.0
//

#pragma once

#include <tuple>
#include <vector>
#include <string>
#include <memory>
#include <ie_precision.hpp>
#include <ie_common.h>
#include "../base/layer_test_utils.hpp"

namespace SubgraphTestsDefinitions {
typedef std::tuple<
size_t, // levels
std::vector<size_t>, // const inputs shape
std::vector<float> // input generator data: low, high, resolution
> fqSpecificParams;

typedef std::tuple<
fqSpecificParams,
InferenceEngine::SizeVector, // Input shapes
InferenceEngine::Precision, // Input precision
InferenceEngine::Precision, // Axes precision
std::vector<int>, // Axes
bool, // Normalize variance
float, // Epsilon
std::string, // Epsilon mode
LayerTestsUtils::TargetDevice // Device name
> fqSubgraphTestParamsSet;

class MvnFqMvnSubgraphTest : public testing::WithParamInterface<fqSubgraphTestParamsSet>,
virtual public LayerTestsUtils::LayerTestsCommon {
public:
static std::string getTestCaseName(testing::TestParamInfo<fqSubgraphTestParamsSet> obj);

protected:
void SetUp() override;
InferenceEngine::Blob::Ptr GenerateInput(const InferenceEngine::InputInfo &info) const override;

protected:
float inputDataMin = 0.0;
float inputDataMax = 10.0;
float inputDataResolution = 1.0;
int32_t seed = 1;
};
} // namespace SubgraphTestsDefinitions
Original file line number Diff line number Diff line change
@@ -0,0 +1,84 @@
// Copyright (C) 2021 Intel Corporation
// SPDX-License-Identifier: Apache-2.0
//

#include <ngraph_functions/builders.hpp>
#include "shared_test_classes/subgraph/mvn_fq_mvn.hpp"

namespace SubgraphTestsDefinitions {

std::string MvnFqMvnSubgraphTest::getTestCaseName(testing::TestParamInfo<fqSubgraphTestParamsSet> obj) {
fqSpecificParams fqParams;
InferenceEngine::SizeVector inputShapes;
InferenceEngine::Precision dataPrecision, axesPrecision;
std::vector<int> axes;
bool normalizeVariance;
float eps;
std::string epsMode;
std::string targetDevice;
std::tie(fqParams, inputShapes, dataPrecision, axesPrecision, axes, normalizeVariance, eps, epsMode, targetDevice) = obj.param;

size_t levels;
std::vector<size_t> constShape;
std::vector<float> inputArg;
std::tie(levels, constShape, inputArg) = fqParams;

std::ostringstream result;
result << "IS=" << CommonTestUtils::vec2str(inputShapes) << "_";
result << "DataPrc=" << dataPrecision.name() << "_";
result << "AxPrc=" << axesPrecision.name() << "_";
result << "Ax=" << CommonTestUtils::vec2str(axes) << "_";
result << "NormVariance=" << (normalizeVariance ? "TRUE" : "FALSE") << "_";
result << "Eps=" << eps << "_";
result << "EM=" << epsMode << "_";
result << "LEVELS=" << levels << "_";
result << "CS=" << CommonTestUtils::vec2str(constShape) << "_";
if (inputArg.size() == 3) {
result << "_inputArg=" << inputArg[0] << "_" << inputArg[1] << "_" << inputArg[2];
}
result << "TargetDevice=" << targetDevice;
return result.str();
}

void MvnFqMvnSubgraphTest::SetUp() {
fqSpecificParams fqParams;
InferenceEngine::SizeVector inputShapes;
InferenceEngine::Precision dataPrecision, axesPrecision;
std::vector<int> axes;
bool normalizeVariance;
float eps;
std::string epsMode;
std::tie(fqParams, inputShapes, dataPrecision, axesPrecision, axes, normalizeVariance, eps, epsMode, targetDevice) = this->GetParam();

size_t levels;
std::vector<size_t> constShape;
std::vector<float> inputArg;
std::tie(levels, constShape, inputArg) = fqParams;
if (inputArg.size() == 3) {
inputDataMin = inputArg[0];
inputDataMax = inputArg[1];
inputDataResolution = inputArg[2];
}

auto dataType = FuncTestUtils::PrecisionUtils::convertIE2nGraphPrc(dataPrecision);
auto axesType = FuncTestUtils::PrecisionUtils::convertIE2nGraphPrc(axesPrecision);

auto params = ngraph::builder::makeParams(dataType, {inputShapes});
auto paramOuts = ngraph::helpers::convert2OutputVector(ngraph::helpers::castOps2Nodes<ngraph::op::Parameter>(params));
auto axesNode = ngraph::builder::makeConstant(axesType, ngraph::Shape{axes.size()}, axes);
auto mvn1 = ngraph::builder::makeMVN6(paramOuts[0], axesNode, normalizeVariance, eps, epsMode);

auto FQNode = ngraph::builder::makeFakeQuantize(mvn1, ngraph::element::f32, levels, constShape,
{ inputDataMin }, { inputDataMax }, { inputDataMin }, { inputDataMax });

auto mvn2 = ngraph::builder::makeMVN6(FQNode, axesNode, normalizeVariance, eps, epsMode);

ngraph::ResultVector results{std::make_shared<ngraph::opset1::Result>(mvn2)};
function = std::make_shared<ngraph::Function>(results, params, "MvnFqMvnSubgraph");
}

InferenceEngine::Blob::Ptr MvnFqMvnSubgraphTest::GenerateInput(const InferenceEngine::InputInfo &info) const {
return FuncTestUtils::createAndFillBlob(info.getTensorDesc(), inputDataMax - inputDataMin, inputDataMin, 1 / inputDataResolution,
seed);
}
} // namespace SubgraphTestsDefinitions