Skip to content

Commit

Permalink
update FakeQuantizeLayerTest - add broadcast (openvinotoolkit#6813)
Browse files Browse the repository at this point in the history
  • Loading branch information
pelszkow authored and akuporos committed Sep 29, 2021
1 parent 70f727d commit 4f6b401
Show file tree
Hide file tree
Showing 6 changed files with 25 additions and 21 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,7 @@ const auto fqParams = ::testing::Combine(
::testing::ValuesIn(broadcasts)
);

INSTANTIATE_TEST_SUITE_P(smoke_FakeQuantize, FakeQuantizeLayerTestRevise,
INSTANTIATE_TEST_SUITE_P(smoke_FakeQuantize, FakeQuantizeLayerTest,
::testing::Combine(
fqParams,
::testing::ValuesIn(netPrecisions),
Expand All @@ -53,7 +53,7 @@ INSTANTIATE_TEST_SUITE_P(smoke_FakeQuantize, FakeQuantizeLayerTestRevise,
::testing::ValuesIn(inputShapes),
::testing::Values(CommonTestUtils::DEVICE_CPU),
::testing::Values(config)),
FakeQuantizeLayerTestRevise::getTestCaseName);
FakeQuantizeLayerTest::getTestCaseName);


const std::vector<size_t> singleShape = {3, 4, 2, 5};
Expand All @@ -65,7 +65,7 @@ const auto noneBroadcastFqParams = ::testing::Combine(
::testing::Values(noneBroadcast)
);

INSTANTIATE_TEST_SUITE_P(smoke_FakeQuantizeNoneBroadcast, FakeQuantizeLayerTestRevise,
INSTANTIATE_TEST_SUITE_P(smoke_FakeQuantizeNoneBroadcast, FakeQuantizeLayerTest,
::testing::Combine(
noneBroadcastFqParams,
::testing::ValuesIn(netPrecisions),
Expand All @@ -76,7 +76,7 @@ INSTANTIATE_TEST_SUITE_P(smoke_FakeQuantizeNoneBroadcast, FakeQuantizeLayerTestR
::testing::Values(singleShape),
::testing::Values(CommonTestUtils::DEVICE_CPU),
::testing::Values(config)),
FakeQuantizeLayerTestRevise::getTestCaseName);
FakeQuantizeLayerTest::getTestCaseName);

const std::vector<std::vector<size_t>> inputShapesPerChannel = {{11, 10, 22, 19}, {11, 10, 5, 6}};
const std::vector<std::vector<size_t>> constShapesPerChannelAxis0 = {{11, 1, 1, 1}};
Expand All @@ -98,7 +98,7 @@ const auto fqParamsPerChannelAxis1 = ::testing::Combine(
::testing::Values(numpyBroadcast)
);

INSTANTIATE_TEST_SUITE_P(smoke_FakeQuantizePerChannelAxis0, FakeQuantizeLayerTestRevise,
INSTANTIATE_TEST_SUITE_P(smoke_FakeQuantizePerChannelAxis0, FakeQuantizeLayerTest,
::testing::Combine(
fqParamsPerChannelAxis0,
::testing::ValuesIn(netPrecisions),
Expand All @@ -109,9 +109,9 @@ INSTANTIATE_TEST_SUITE_P(smoke_FakeQuantizePerChannelAxis0, FakeQuantizeLayerTes
::testing::ValuesIn(inputShapesPerChannel),
::testing::Values(CommonTestUtils::DEVICE_CPU),
::testing::Values(config)),
FakeQuantizeLayerTestRevise::getTestCaseName);
FakeQuantizeLayerTest::getTestCaseName);

INSTANTIATE_TEST_SUITE_P(smoke_FakeQuantizePerChannelAxis1, FakeQuantizeLayerTestRevise,
INSTANTIATE_TEST_SUITE_P(smoke_FakeQuantizePerChannelAxis1, FakeQuantizeLayerTest,
::testing::Combine(
fqParamsPerChannelAxis1,
::testing::ValuesIn(netPrecisions),
Expand All @@ -122,7 +122,7 @@ INSTANTIATE_TEST_SUITE_P(smoke_FakeQuantizePerChannelAxis1, FakeQuantizeLayerTes
::testing::ValuesIn(inputShapesPerChannel),
::testing::Values(CommonTestUtils::DEVICE_CPU),
::testing::Values(config)),
FakeQuantizeLayerTestRevise::getTestCaseName);
FakeQuantizeLayerTest::getTestCaseName);

const std::vector<std::vector<size_t>> inputShapesPerChannel2D = {{1, 10}};
const std::vector<std::vector<size_t>> constShapesPerChannel2D = { {10}, {1, 10}, {1} };
Expand All @@ -134,7 +134,7 @@ const auto fqParamsPerChannel2D = ::testing::Combine(
::testing::Values(numpyBroadcast)
);

INSTANTIATE_TEST_SUITE_P(smoke_FakeQuantizePerChannel2D, FakeQuantizeLayerTestRevise,
INSTANTIATE_TEST_SUITE_P(smoke_FakeQuantizePerChannel2D, FakeQuantizeLayerTest,
::testing::Combine(
fqParamsPerChannel2D,
::testing::ValuesIn(netPrecisions),
Expand All @@ -145,6 +145,6 @@ INSTANTIATE_TEST_SUITE_P(smoke_FakeQuantizePerChannel2D, FakeQuantizeLayerTestRe
::testing::ValuesIn(inputShapesPerChannel2D),
::testing::Values(CommonTestUtils::DEVICE_CPU),
::testing::Values(config)),
FakeQuantizeLayerTestRevise::getTestCaseName);
FakeQuantizeLayerTest::getTestCaseName);

} // namespace
Original file line number Diff line number Diff line change
Expand Up @@ -85,7 +85,7 @@ const auto fqParams = ::testing::Combine(
::testing::Values(ngraph::op::AutoBroadcastType::NUMPY)
);

INSTANTIATE_TEST_SUITE_P(smoke_FakeQuantize, FakeQuantizeLayerTestRevise,
INSTANTIATE_TEST_SUITE_P(smoke_FakeQuantize, FakeQuantizeLayerTest,
::testing::Combine(
fqParams,
::testing::ValuesIn(netPrecisions),
Expand All @@ -96,6 +96,6 @@ INSTANTIATE_TEST_SUITE_P(smoke_FakeQuantize, FakeQuantizeLayerTestRevise,
::testing::ValuesIn(inputShapes),
::testing::Values(CommonTestUtils::DEVICE_GNA),
::testing::ValuesIn(gnaQuantModes)),
FakeQuantizeLayerTestRevise::getTestCaseName);
FakeQuantizeLayerTest::getTestCaseName);

} // namespace
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@ const auto fqParams = ::testing::Combine(
::testing::Values(ngraph::op::AutoBroadcastType::NUMPY)
);

INSTANTIATE_TEST_SUITE_P(smoke_FakeQuantize, FakeQuantizeLayerTestRevise,
INSTANTIATE_TEST_SUITE_P(smoke_FakeQuantize, FakeQuantizeLayerTest,
::testing::Combine(
fqParams,
::testing::ValuesIn(netPrecisions),
Expand All @@ -44,6 +44,6 @@ INSTANTIATE_TEST_SUITE_P(smoke_FakeQuantize, FakeQuantizeLayerTestRevise,
::testing::ValuesIn(inputShapes),
::testing::Values(CommonTestUtils::DEVICE_GPU),
::testing::Values(config)),
FakeQuantizeLayerTestRevise::getTestCaseName);
FakeQuantizeLayerTest::getTestCaseName);

} // namespace
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@

namespace LayerTestsDefinitions {

TEST_P(FakeQuantizeLayerTestRevise, CompareWithRefs) {
TEST_P(FakeQuantizeLayerTest, CompareWithRefs) {
Run();
SKIP_IF_CURRENT_TEST_IS_DISABLED();

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -28,10 +28,11 @@ namespace LayerTestsDefinitions {


typedef std::tuple<
size_t, // levels
std::vector<size_t>, // const inputs shape
std::vector<float>, // fake quantize inputLow, inputHigh, outputLow, outputHigh or empty for random
std::vector<float> // input generator data: low, high, resolution
size_t, // fake quantize levels
std::vector<size_t>, // fake quantize inputs shape
std::vector<float>, // fake quantize (inputLow, inputHigh, outputLow, outputHigh) or empty for random
std::vector<float>, // input generator data (low, high, resolution) or empty for default
ngraph::op::AutoBroadcastSpec // fake quantize broadcast mode
> fqSpecificParams;
typedef std::tuple<
fqSpecificParams,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,8 @@ std::string FakeQuantizeLayerTest::getTestCaseName(const testing::TestParamInfo<
std::vector<size_t> constShape;
std::vector<float> fqDirectArgs;
std::vector<float> inputArg;
std::tie(levels, constShape, fqDirectArgs, inputArg) = fqParams;
ngraph::op::AutoBroadcastSpec broadcast;
std::tie(levels, constShape, fqDirectArgs, inputArg, broadcast) = fqParams;

std::ostringstream result;
result << "IS=" << CommonTestUtils::vec2str(inputShapes) << "_";
Expand All @@ -41,6 +42,7 @@ std::string FakeQuantizeLayerTest::getTestCaseName(const testing::TestParamInfo<
if (inputArg.size() == 3) {
result << "_inputArg=" << inputArg[0] << "_" << inputArg[1] << "_" << inputArg[2];
}
result << "_" << broadcast.m_type;
return result.str();
}

Expand All @@ -55,7 +57,8 @@ void FakeQuantizeLayerTest::SetUp() {
std::vector<size_t> constShape;
std::vector<float> fqDirectArg;
std::vector<float> inputArg;
std::tie(levels, constShape, fqDirectArg, inputArg) = fqParams;
ngraph::op::AutoBroadcastSpec broadcast;
std::tie(levels, constShape, fqDirectArg, inputArg, broadcast) = fqParams;
if (inputArg.size() == 3) {
inputDataMin = inputArg[0];
inputDataMax = inputArg[1];
Expand Down

0 comments on commit 4f6b401

Please sign in to comment.