From 05f65add08d3fc14b0eef629e471bccea0e33558 Mon Sep 17 00:00:00 2001 From: Vitaliy Urusovskij Date: Thu, 4 Jan 2024 16:28:16 +0400 Subject: [PATCH] Port and clean `behavior/` shared tests #1 (#21860) * Remove `set_preprocess.cpp` * Remove `preprocessing.hpp` * Remove `locale.hpp` - ported to `CanCompileModelWithCustomLocale` * Port `version.cpp` and remove legacy * Revert shared `version.hpp` --- .../behavior/ov_plugin/version.cpp | 23 + .../behavior/plugin/version.cpp | 18 - .../behavior/plugin/version.cpp | 18 - .../behavior/executable_network/locale.cpp | 14 - .../behavior/ov_plugin/version.cpp | 23 + .../behavior/plugin/version.cpp | 16 - .../behavior/ov_plugin/version.cpp | 18 + .../behavior/plugin/version.cpp | 18 - .../behavior/executable_network/locale.hpp | 33 - .../include/behavior/plugin/preprocessing.hpp | 143 --- .../behavior/plugin/set_preprocess.hpp | 1011 ----------------- .../behavior/executable_network/locale.cpp | 70 -- 12 files changed, 64 insertions(+), 1341 deletions(-) create mode 100644 src/plugins/auto/tests/functional/shared_tests_instances/behavior/ov_plugin/version.cpp delete mode 100644 src/plugins/auto/tests/functional/shared_tests_instances/behavior/plugin/version.cpp delete mode 100644 src/plugins/intel_cpu/tests/functional/shared_tests_instances/behavior/plugin/version.cpp delete mode 100644 src/plugins/intel_gpu/tests/functional/shared_tests_instances/behavior/executable_network/locale.cpp create mode 100644 src/plugins/intel_gpu/tests/functional/shared_tests_instances/behavior/ov_plugin/version.cpp delete mode 100644 src/plugins/intel_gpu/tests/functional/shared_tests_instances/behavior/plugin/version.cpp create mode 100644 src/plugins/template/tests/functional/shared_tests_instances/behavior/ov_plugin/version.cpp delete mode 100644 src/plugins/template/tests/functional/shared_tests_instances/behavior/plugin/version.cpp delete mode 100644 src/tests/functional/plugin/shared/include/behavior/executable_network/locale.hpp delete mode 100644 src/tests/functional/plugin/shared/include/behavior/plugin/preprocessing.hpp delete mode 100644 src/tests/functional/plugin/shared/include/behavior/plugin/set_preprocess.hpp delete mode 100644 src/tests/functional/plugin/shared/src/behavior/executable_network/locale.cpp diff --git a/src/plugins/auto/tests/functional/shared_tests_instances/behavior/ov_plugin/version.cpp b/src/plugins/auto/tests/functional/shared_tests_instances/behavior/ov_plugin/version.cpp new file mode 100644 index 00000000000000..1dd421bcb59df0 --- /dev/null +++ b/src/plugins/auto/tests/functional/shared_tests_instances/behavior/ov_plugin/version.cpp @@ -0,0 +1,23 @@ +// Copyright (C) 2018-2023 Intel Corporation +// SPDX-License-Identifier: Apache-2.0 +// + +#include "behavior/ov_plugin/version.hpp" + +namespace ov { +namespace test { +namespace behavior { + +INSTANTIATE_TEST_SUITE_P(smoke_Multi_BehaviorTests, + VersionTests, + ::testing::Values(ov::test::utils::DEVICE_MULTI), + VersionTests::getTestCaseName); + +INSTANTIATE_TEST_SUITE_P(smoke_Auto_BehaviorTests, + VersionTests, + ::testing::Values(ov::test::utils::DEVICE_AUTO), + VersionTests::getTestCaseName); + +} // namespace behavior +} // namespace test +} // namespace ov diff --git a/src/plugins/auto/tests/functional/shared_tests_instances/behavior/plugin/version.cpp b/src/plugins/auto/tests/functional/shared_tests_instances/behavior/plugin/version.cpp deleted file mode 100644 index 796149e7cb1e76..00000000000000 --- a/src/plugins/auto/tests/functional/shared_tests_instances/behavior/plugin/version.cpp +++ /dev/null @@ -1,18 +0,0 @@ -// Copyright (C) 2018-2023 Intel Corporation -// SPDX-License-Identifier: Apache-2.0 -// - -#include "behavior/plugin/version.hpp" - -using namespace BehaviorTestsDefinitions; -namespace { -INSTANTIATE_TEST_SUITE_P(smoke_Multi_BehaviorTests, - VersionTest, - ::testing::Values(ov::test::utils::DEVICE_MULTI), - VersionTest::getTestCaseName); - -INSTANTIATE_TEST_SUITE_P(smoke_Auto_BehaviorTests, - VersionTest, - ::testing::Values(ov::test::utils::DEVICE_AUTO), - VersionTest::getTestCaseName); -} // namespace diff --git a/src/plugins/intel_cpu/tests/functional/shared_tests_instances/behavior/plugin/version.cpp b/src/plugins/intel_cpu/tests/functional/shared_tests_instances/behavior/plugin/version.cpp deleted file mode 100644 index bcc0130ca8e8ab..00000000000000 --- a/src/plugins/intel_cpu/tests/functional/shared_tests_instances/behavior/plugin/version.cpp +++ /dev/null @@ -1,18 +0,0 @@ -// Copyright (C) 2018-2023 Intel Corporation -// SPDX-License-Identifier: Apache-2.0 -// - -#include "behavior/plugin/version.hpp" - -using namespace BehaviorTestsDefinitions; -namespace { - INSTANTIATE_TEST_SUITE_P(smoke_BehaviorTests, VersionTest, - ::testing::Values(ov::test::utils::DEVICE_CPU), - VersionTest::getTestCaseName); - - INSTANTIATE_TEST_SUITE_P(smoke_Hetero_BehaviorTests, VersionTest, - ::testing::Values(ov::test::utils::DEVICE_HETERO), - VersionTest::getTestCaseName); - - -} // namespace diff --git a/src/plugins/intel_gpu/tests/functional/shared_tests_instances/behavior/executable_network/locale.cpp b/src/plugins/intel_gpu/tests/functional/shared_tests_instances/behavior/executable_network/locale.cpp deleted file mode 100644 index 8e76c5f4f73168..00000000000000 --- a/src/plugins/intel_gpu/tests/functional/shared_tests_instances/behavior/executable_network/locale.cpp +++ /dev/null @@ -1,14 +0,0 @@ -// Copyright (C) 2018-2023 Intel Corporation -// SPDX-License-Identifier: Apache-2.0 -// - -#include "behavior/executable_network/locale.hpp" - -using namespace BehaviorTestsDefinitions; -namespace { - INSTANTIATE_TEST_SUITE_P(smoke_CustomLocaleTest, CustomLocaleTest, - ::testing::Combine( - ::testing::Values("ru_RU.UTF-8"), - ::testing::Values(ov::test::utils::DEVICE_GPU)), - CustomLocaleTest::getTestCaseName); -} // namespace diff --git a/src/plugins/intel_gpu/tests/functional/shared_tests_instances/behavior/ov_plugin/version.cpp b/src/plugins/intel_gpu/tests/functional/shared_tests_instances/behavior/ov_plugin/version.cpp new file mode 100644 index 00000000000000..6109f54819cce8 --- /dev/null +++ b/src/plugins/intel_gpu/tests/functional/shared_tests_instances/behavior/ov_plugin/version.cpp @@ -0,0 +1,23 @@ +// Copyright (C) 2018-2023 Intel Corporation +// SPDX-License-Identifier: Apache-2.0 +// + +#include "behavior/ov_plugin/version.hpp" + +namespace ov { +namespace test { +namespace behavior { + +INSTANTIATE_TEST_SUITE_P(smoke_BehaviorTests, + VersionTests, + ::testing::Values(ov::test::utils::DEVICE_GPU), + VersionTests::getTestCaseName); + +INSTANTIATE_TEST_SUITE_P(smoke_Hetero_BehaviorTests, + VersionTests, + ::testing::Values(ov::test::utils::DEVICE_HETERO), + VersionTests::getTestCaseName); + +} // namespace behavior +} // namespace test +} // namespace ov diff --git a/src/plugins/intel_gpu/tests/functional/shared_tests_instances/behavior/plugin/version.cpp b/src/plugins/intel_gpu/tests/functional/shared_tests_instances/behavior/plugin/version.cpp deleted file mode 100644 index 00fd89116bb52b..00000000000000 --- a/src/plugins/intel_gpu/tests/functional/shared_tests_instances/behavior/plugin/version.cpp +++ /dev/null @@ -1,16 +0,0 @@ -// Copyright (C) 2018-2023 Intel Corporation -// SPDX-License-Identifier: Apache-2.0 -// - -#include "behavior/plugin/version.hpp" - -using namespace BehaviorTestsDefinitions; -namespace { -INSTANTIATE_TEST_SUITE_P(smoke_BehaviorTests, VersionTest, - ::testing::Values(ov::test::utils::DEVICE_GPU), - VersionTest::getTestCaseName); - -INSTANTIATE_TEST_SUITE_P(smoke_Hetero_BehaviorTests, VersionTest, - ::testing::Values(ov::test::utils::DEVICE_HETERO), - VersionTest::getTestCaseName); -} // namespace diff --git a/src/plugins/template/tests/functional/shared_tests_instances/behavior/ov_plugin/version.cpp b/src/plugins/template/tests/functional/shared_tests_instances/behavior/ov_plugin/version.cpp new file mode 100644 index 00000000000000..1d2a49c29ff525 --- /dev/null +++ b/src/plugins/template/tests/functional/shared_tests_instances/behavior/ov_plugin/version.cpp @@ -0,0 +1,18 @@ +// Copyright (C) 2018-2023 Intel Corporation +// SPDX-License-Identifier: Apache-2.0 +// + +#include "behavior/ov_plugin/version.hpp" + +namespace ov { +namespace test { +namespace behavior { + +INSTANTIATE_TEST_SUITE_P(smoke_BehaviorTests, + VersionTests, + ::testing::Values(ov::test::utils::DEVICE_TEMPLATE), + VersionTests::getTestCaseName); + +} // namespace behavior +} // namespace test +} // namespace ov diff --git a/src/plugins/template/tests/functional/shared_tests_instances/behavior/plugin/version.cpp b/src/plugins/template/tests/functional/shared_tests_instances/behavior/plugin/version.cpp deleted file mode 100644 index 7fa154c1b86b1a..00000000000000 --- a/src/plugins/template/tests/functional/shared_tests_instances/behavior/plugin/version.cpp +++ /dev/null @@ -1,18 +0,0 @@ -// Copyright (C) 2018-2023 Intel Corporation -// SPDX-License-Identifier: Apache-2.0 -// - -#include "behavior/plugin/version.hpp" - -using namespace BehaviorTestsDefinitions; - -namespace { - -const std::vector> configs = {{}}; - -INSTANTIATE_TEST_SUITE_P(smoke_BehaviorTests, - VersionTest, - ::testing::Values(ov::test::utils::DEVICE_TEMPLATE), - VersionTest::getTestCaseName); - -} // namespace diff --git a/src/tests/functional/plugin/shared/include/behavior/executable_network/locale.hpp b/src/tests/functional/plugin/shared/include/behavior/executable_network/locale.hpp deleted file mode 100644 index a8af80e4101fc3..00000000000000 --- a/src/tests/functional/plugin/shared/include/behavior/executable_network/locale.hpp +++ /dev/null @@ -1,33 +0,0 @@ -// Copyright (C) 2018-2023 Intel Corporation -// SPDX-License-Identifier: Apache-2.0 -// - -#pragma once - -#include "common_test_utils/test_common.hpp" -#include "common_test_utils/file_utils.hpp" -#include "functional_test_utils/plugin_cache.hpp" -#include "ov_models/subgraph_builders.hpp" - -#include "base/behavior_test_utils.hpp" - -namespace BehaviorTestsDefinitions { - -typedef std::tuple< - std::string, // Locale name - std::string> // Target device name - LocaleParams; - -class CustomLocaleTest : public BehaviorTestsUtils::IEExecutableNetworkTestBase, - public ::testing::WithParamInterface { -protected: - std::shared_ptr function; - std::string localeName; - std::string testName; - - void SetUp() override; -public: - static std::string getTestCaseName(const testing::TestParamInfo &obj); -}; - -} // namespace BehaviorTestsDefinitions diff --git a/src/tests/functional/plugin/shared/include/behavior/plugin/preprocessing.hpp b/src/tests/functional/plugin/shared/include/behavior/plugin/preprocessing.hpp deleted file mode 100644 index faee036bb07bef..00000000000000 --- a/src/tests/functional/plugin/shared/include/behavior/plugin/preprocessing.hpp +++ /dev/null @@ -1,143 +0,0 @@ -// Copyright (C) 2018-2023 Intel Corporation -// SPDX-License-Identifier: Apache-2.0 -// - -#include - -#include -#include "common_test_utils/test_assertions.hpp" -#include "common_test_utils/common_utils.hpp" -#include "functional_test_utils/plugin_cache.hpp" -#include "shared_test_classes/base/layer_test_utils.hpp" -#include "functional_test_utils/blob_utils.hpp" -#include "ie_preprocess.hpp" -#include "base/behavior_test_utils.hpp" - -namespace BehaviorTestsDefinitions { - -using PreprocessingPrecisionConvertParams = std::tuple< - InferenceEngine::Precision, // Input precision - unsigned, // channels number - bool, // Use normal (i.e. SetInput() or unusal i.e. GetBlob()) inut method - std::string, // Device name - std::map // Config ->; - -struct PreprocessingPrecisionConvertTest : - public testing::WithParamInterface, - LayerTestsUtils::LayerTestsCommon { -public: - static std::string getTestCaseName(testing::TestParamInfo obj) { - InferenceEngine::Precision inPrc; - bool useSetInput; - unsigned channels; - std::string targetDevice; - std::map configuration; - std::tie(inPrc, channels, useSetInput, targetDevice, configuration) = obj.param; - std::ostringstream result; - result << "inPRC=" << inPrc.name() << "_"; - result << channels << "Ch" << "_"; - result << (useSetInput ? "SetInput" : "GetBlob") << "_"; - result << "targetDevice=" << targetDevice; - if (!configuration.empty()) { - for (auto& configItem : configuration) { - result << "configItem=" << configItem.first << "_" << configItem.second << "_"; - } - } - return result.str(); - } - - // Need to override Infer() due to usage of GetBlob() as input method. - // Mostly a copy of LayerTestsCommon::Infer() - void Infer() override { - inferRequest = executableNetwork.CreateInferRequest(); - inputs.clear(); - - for (const auto &input : executableNetwork.GetInputsInfo()) { - const auto &info = input.second; - auto blob = GenerateInput(*info); - if (!use_set_input) { - InferenceEngine::Blob::Ptr input = inferRequest.GetBlob(info->name()); - blob_copy(blob, input); - } else { - inferRequest.SetBlob(info->name(), blob); - } - - inputs.push_back(blob); - } - inferRequest.Infer(); - } - - void SetUp() override { - // This test: - // - Strive to test the plugin internal preprocessing (precision conversion) only. - // Thus (logically) no-op graph is used. - // - Reference code mimic the preprocessing via extra ngraph Convert operation. - // - Create/uses two (different) graphs here : one to feed the the plugin and one calculate the reference result. - - SetRefMode(LayerTestsUtils::RefMode::INTERPRETER); - - std::tie(inPrc, channels, use_set_input, targetDevice, configuration) = this->GetParam(); - outPrc = inPrc; - - bool specialZero = true; - - std::vector inputShape(channels, 4); - - auto make_ngraph = [&](bool with_extra_conv) { - auto in_prec = FuncTestUtils::PrecisionUtils::convertIE2nGraphPrc(with_extra_conv ? inPrc : decltype(inPrc)(InferenceEngine::Precision::FP32)); - ov::ParameterVector paramsIn {std::make_shared(in_prec, ov::Shape(inputShape))}; - - auto toF32 = std::make_shared(paramsIn[0], ngraph::element::Type_t::f32); - - auto constNode = std::make_shared( - ngraph::element::Type_t::i64, ngraph::Shape{inputShape.size()}, inputShape); - std::shared_ptr reshape_input = with_extra_conv ? toF32->shared_from_this() : paramsIn[0]; - auto reshape = std::dynamic_pointer_cast( - std::make_shared(reshape_input, constNode, specialZero)); - ngraph::ResultVector results{std::make_shared(reshape)}; - return std::make_shared(results, paramsIn, "Reshape"); - }; - - function = make_ngraph(false); - reference_function = make_ngraph(true); //use extra ops to mimic the preprocessing - } - - void Validate() override { - // w/a: copy of original function is required to provide correct op coverage report (overflow of convert counter issue) - auto copyOriginalFunction = function; - //force the reference implementation to use graph with extra Convert operation - LayerTestsUtils::LayerTestsCommon::Validate(); - function = copyOriginalFunction; - } - - void Run() override { - SKIP_IF_CURRENT_TEST_IS_DISABLED(); - functionRefs = ngraph::clone_function(*function); - try { - LoadNetwork(); - GenerateInputs(); - Infer(); - Validate(); - } - catch (const std::runtime_error &re) { - GTEST_FATAL_FAILURE_(re.what()); - } catch (const std::exception &ex) { - GTEST_FATAL_FAILURE_(ex.what()); - } catch (...) { - GTEST_FATAL_FAILURE_("Unknown failure occurred."); - } - } - -public: - std::shared_ptr ie = PluginCache::get().ie(); - std::shared_ptr reference_function; - bool use_set_input = true; - unsigned channels = 0; -}; - - -TEST_P(PreprocessingPrecisionConvertTest, InternalPluginPrecisionConvert) { - Run(); -} -} // namespace BehaviorTestsDefinitions diff --git a/src/tests/functional/plugin/shared/include/behavior/plugin/set_preprocess.hpp b/src/tests/functional/plugin/shared/include/behavior/plugin/set_preprocess.hpp deleted file mode 100644 index 92d9c67231c762..00000000000000 --- a/src/tests/functional/plugin/shared/include/behavior/plugin/set_preprocess.hpp +++ /dev/null @@ -1,1011 +0,0 @@ -// Copyright (C) 2018-2023 Intel Corporation -// SPDX-License-Identifier: Apache-2.0 -// - -#include - -#include -#include -#include "common_test_utils/test_assertions.hpp" -#include "common_test_utils/common_utils.hpp" -#include "functional_test_utils/plugin_cache.hpp" -#include "shared_test_classes/base/layer_test_utils.hpp" -#include "functional_test_utils/blob_utils.hpp" -#include "ie_preprocess.hpp" -#include "base/behavior_test_utils.hpp" -#include "ie_ngraph_utils.hpp" - -namespace BehaviorTestsDefinitions { -using InferRequestPreprocessTest = BehaviorTestsUtils::BehaviorTestsBasic; - -TEST_P(InferRequestPreprocessTest, SetPreProcessToInputInfo) { - // Create CNNNetwork from ngraph::Function - InferenceEngine::CNNNetwork cnnNet(function); - - auto &preProcess = cnnNet.getInputsInfo().begin()->second->getPreProcess(); - preProcess.setResizeAlgorithm(InferenceEngine::ResizeAlgorithm::RESIZE_BILINEAR); - // Load CNNNetwork to target plugins - auto execNet = ie->LoadNetwork(cnnNet, target_device, configuration); - // Create InferRequest - auto req = execNet.CreateInferRequest(); - { - InferenceEngine::ConstInputsDataMap inputsMap = execNet.GetInputsInfo(); - const auto &name = inputsMap.begin()->second->name(); - const InferenceEngine::PreProcessInfo *info = &req.GetPreProcess(name.c_str()); - ASSERT_EQ(info->getResizeAlgorithm(), InferenceEngine::ResizeAlgorithm::RESIZE_BILINEAR); - ASSERT_PREPROCESS_INFO_EQ(preProcess, *info); - } -} - -TEST_P(InferRequestPreprocessTest, SetPreProcessToInferRequest) { - // Create CNNNetwork from ngraph::Function - InferenceEngine::CNNNetwork cnnNet(function); - - auto &preProcess = cnnNet.getInputsInfo().begin()->second->getPreProcess(); - preProcess.setResizeAlgorithm(InferenceEngine::ResizeAlgorithm::RESIZE_BILINEAR); - // Load CNNNetwork to target plugins - auto execNet = ie->LoadNetwork(cnnNet, target_device, configuration); - // Create InferRequest - auto req = execNet.CreateInferRequest(); - InferenceEngine::ConstInputsDataMap inputsMap = execNet.GetInputsInfo(); - const auto &name = inputsMap.begin()->second->name(); - auto inputBlob = FuncTestUtils::createAndFillBlob( - cnnNet.getInputsInfo().begin()->second->getTensorDesc()); - req.SetBlob(cnnNet.getInputsInfo().begin()->first, inputBlob); - { - const InferenceEngine::PreProcessInfo *info = &req.GetPreProcess(name.c_str()); - ASSERT_EQ(cnnNet.getInputsInfo().begin()->second->getPreProcess().getResizeAlgorithm(), - info->getResizeAlgorithm()); - } -} - -TEST_P(InferRequestPreprocessTest, SetMeanImagePreProcessGetBlob) { - std::shared_ptr ngraph; - { - ngraph::PartialShape shape({1, 3, 10, 10}); - ngraph::element::Type type(ngraph::element::Type_t::f32); - auto param = std::make_shared(type, shape); - param->set_friendly_name("param"); - auto relu = std::make_shared(param); - relu->set_friendly_name("relu"); - auto result = std::make_shared(relu); - result->set_friendly_name("result"); - - ngraph::ParameterVector params = {param}; - ngraph::ResultVector results = {result}; - - ngraph = std::make_shared(results, params); - } - - // Create CNNNetwork from ngraph::Function - InferenceEngine::CNNNetwork cnnNet(ngraph); - - auto &preProcess = cnnNet.getInputsInfo().begin()->second->getPreProcess(); - preProcess.init(3); - for (size_t i = 0; i < 3; i++) { - preProcess[i]->meanData = make_blob_with_precision(InferenceEngine::TensorDesc(InferenceEngine::Precision::FP32, - {10, 10}, - InferenceEngine::Layout::HW)); - preProcess[i]->meanData->allocate(); - auto lockedMem = preProcess[i]->meanData->buffer(); - auto* data = lockedMem.as(); - for (size_t j = 0; j < 100; j++) { - data[j] = 0; - data[j] -= i * 100 + j; - } - } - preProcess.setVariant(InferenceEngine::MEAN_IMAGE); - // Load CNNNetwork to target plugins - auto execNet = ie->LoadNetwork(cnnNet, target_device, configuration); - // Create InferRequest - auto req = execNet.CreateInferRequest(); - auto inBlob = req.GetBlob("param"); - - // Fill input - { - auto lockedMem = inBlob->buffer(); - auto *inData = lockedMem.as(); - for (size_t i = 0; i < inBlob->size(); i++) - inData[i] = static_cast(i); - } - - req.Infer(); - - // Check output - auto outBlob = req.GetBlob(cnnNet.getOutputsInfo().begin()->first); - { - auto inMem = inBlob->cbuffer(); - const auto* inData = inMem.as(); - auto outMem = outBlob->cbuffer(); - const auto* outData = outMem.as(); - ASSERT_EQ(inBlob->size(), outBlob->size()); - for (size_t i = 0; i < inBlob->size(); i++) { - ASSERT_EQ(inData[i] + inData[i], outData[i]); - } - } -} - -TEST_P(InferRequestPreprocessTest, SetMeanImagePreProcessSetBlob) { - std::shared_ptr ngraph; - { - ngraph::PartialShape shape({1, 3, 10, 10}); - ngraph::element::Type type(ngraph::element::Type_t::f32); - auto param = std::make_shared(type, shape); - param->set_friendly_name("param"); - auto relu = std::make_shared(param); - relu->set_friendly_name("relu"); - auto result = std::make_shared(relu); - result->set_friendly_name("result"); - - ngraph::ParameterVector params = {param}; - ngraph::ResultVector results = {result}; - - ngraph = std::make_shared(results, params); - } - - // Create CNNNetwork from ngraph::Function - InferenceEngine::CNNNetwork cnnNet(ngraph); - - auto &preProcess = cnnNet.getInputsInfo().begin()->second->getPreProcess(); - preProcess.init(3); - for (size_t i = 0; i < 3; i++) { - auto meanData = make_blob_with_precision( - InferenceEngine::TensorDesc(InferenceEngine::Precision::FP32, {10, 10}, - InferenceEngine::Layout::HW)); - meanData->allocate(); - auto lockedMem = meanData->buffer(); - auto* data = lockedMem.as(); - for (size_t j = 0; j < 100; j++) { - data[j] = 0; - data[j] -= i * 100 + j; - } - ASSERT_NO_THROW(preProcess.setMeanImageForChannel(meanData, i)); - } - preProcess.setVariant(InferenceEngine::MEAN_IMAGE); - // Load CNNNetwork to target plugins - auto execNet = ie->LoadNetwork(cnnNet, target_device, configuration); - // Create InferRequest - auto req = execNet.CreateInferRequest(); - - auto inBlob = make_blob_with_precision(cnnNet.getInputsInfo().begin()->second->getTensorDesc()); - inBlob->allocate(); - req.SetBlob("param", inBlob); - - // Fill input - { - auto lockedMem = inBlob->buffer(); - auto *inData = lockedMem.as(); - for (size_t i = 0; i < inBlob->size(); i++) - inData[i] = static_cast(i); - } - - req.Infer(); - - // Check output - auto outBlob = req.GetBlob(cnnNet.getOutputsInfo().begin()->first); - { - auto inMem = inBlob->cbuffer(); - const auto* inData = inMem.as(); - auto outMem = outBlob->cbuffer(); - const auto* outData = outMem.as(); - ASSERT_EQ(inBlob->size(), outBlob->size()); - for (size_t i = 0; i < inBlob->size(); i++) - ASSERT_EQ(inData[i] + inData[i], outData[i]); - } -} - -TEST_P(InferRequestPreprocessTest, SetMeanValuePreProcessGetBlob) { - std::shared_ptr ngraph; - { - ngraph::PartialShape shape({1, 3, 10, 10}); - ngraph::element::Type type(ngraph::element::Type_t::f32); - auto param = std::make_shared(type, shape); - param->set_friendly_name("param"); - auto relu = std::make_shared(param); - relu->set_friendly_name("relu"); - auto result = std::make_shared(relu); - result->set_friendly_name("result"); - - ngraph::ParameterVector params = {param}; - ngraph::ResultVector results = {result}; - - ngraph = std::make_shared(results, params); - } - - // Create CNNNetwork from ngraph::Function - InferenceEngine::CNNNetwork cnnNet(ngraph); - - auto &preProcess = cnnNet.getInputsInfo().begin()->second->getPreProcess(); - preProcess.init(3); - preProcess[0]->meanValue = -5; - preProcess[1]->meanValue = -5; - preProcess[2]->meanValue = -5; - preProcess[0]->stdScale = 1; - preProcess[1]->stdScale = 1; - preProcess[2]->stdScale = 1; - preProcess.setVariant(InferenceEngine::MEAN_VALUE); - // Load CNNNetwork to target plugins - auto execNet = ie->LoadNetwork(cnnNet, target_device, configuration); - // Create InferRequest - auto req = execNet.CreateInferRequest(); - auto inBlob = req.GetBlob("param"); - - // Fill input - { - auto lockedMem = inBlob->buffer(); - auto *inData = lockedMem.as(); - for (size_t i = 0; i < inBlob->size(); i++) - inData[i] = static_cast(i); - } - - req.Infer(); - - // Check output - auto outBlob = req.GetBlob(cnnNet.getOutputsInfo().begin()->first); - { - auto inMem = inBlob->cbuffer(); - const auto* inData = inMem.as(); - auto outMem = outBlob->cbuffer(); - const auto* outData = outMem.as(); - ASSERT_EQ(inBlob->size(), outBlob->size()); - for (size_t i = 0; i < inBlob->size(); i++) { - ASSERT_EQ(inData[i] + 5, outData[i]); - } - } -} - -TEST_P(InferRequestPreprocessTest, SetMeanValuePreProcessSetBlob) { - std::shared_ptr ngraph; - { - ngraph::PartialShape shape({1, 3, 10, 10}); - ngraph::element::Type type(ngraph::element::Type_t::f32); - auto param = std::make_shared(type, shape); - param->set_friendly_name("param"); - auto relu = std::make_shared(param); - relu->set_friendly_name("relu"); - auto result = std::make_shared(relu); - result->set_friendly_name("result"); - - ngraph::ParameterVector params = {param}; - ngraph::ResultVector results = {result}; - - ngraph = std::make_shared(results, params); - } - - // Create CNNNetwork from ngraph::Function - InferenceEngine::CNNNetwork cnnNet(ngraph); - - auto &preProcess = cnnNet.getInputsInfo().begin()->second->getPreProcess(); - preProcess.init(3); - preProcess[0]->meanValue = -5; - preProcess[1]->meanValue = -5; - preProcess[2]->meanValue = -5; - preProcess[0]->stdScale = 1; - preProcess[1]->stdScale = 1; - preProcess[2]->stdScale = 1; - preProcess.setVariant(InferenceEngine::MEAN_VALUE); - // Load CNNNetwork to target plugins - auto execNet = ie->LoadNetwork(cnnNet, target_device, configuration); - // Create InferRequest - auto req = execNet.CreateInferRequest(); - - auto inBlob = make_blob_with_precision(cnnNet.getInputsInfo().begin()->second->getTensorDesc()); - inBlob->allocate(); - req.SetBlob("param", inBlob); - - // Fill input - { - auto lockedMem = inBlob->buffer(); - auto *inData = lockedMem.as(); - for (size_t i = 0; i < inBlob->size(); i++) - inData[i] = static_cast(i); - } - - req.Infer(); - - // Check output - auto outBlob = req.GetBlob(cnnNet.getOutputsInfo().begin()->first); - { - auto inMem = inBlob->cbuffer(); - const auto* inData = inMem.as(); - auto outMem = outBlob->cbuffer(); - const auto* outData = outMem.as(); - ASSERT_EQ(inBlob->size(), outBlob->size()); - for (size_t i = 0; i < inBlob->size(); i++) - ASSERT_EQ(inData[i] + 5, outData[i]); - } -} - -TEST_P(InferRequestPreprocessTest, ReverseInputChannelsPreProcessGetBlob) { - std::shared_ptr ngraph; - { - ngraph::PartialShape shape({1, 3, 10, 10}); - ngraph::element::Type type(ngraph::element::Type_t::f32); - auto param = std::make_shared(type, shape); - param->set_friendly_name("param"); - auto relu = std::make_shared(param); - relu->set_friendly_name("relu"); - auto result = std::make_shared(relu); - result->set_friendly_name("result"); - - ngraph::ParameterVector params = {param}; - ngraph::ResultVector results = {result}; - - ngraph = std::make_shared(results, params); - } - - // Create CNNNetwork from ngraph::Function - InferenceEngine::CNNNetwork cnnNet(ngraph); - - auto &preProcess = cnnNet.getInputsInfo().begin()->second->getPreProcess(); - preProcess.setColorFormat(InferenceEngine::ColorFormat::RGB); - // Load CNNNetwork to target plugins - auto execNet = ie->LoadNetwork(cnnNet, target_device, configuration); - // Create InferRequest - auto req = execNet.CreateInferRequest(); - auto inBlob = req.GetBlob("param"); - - // Fill input - { - auto lockedMem = inBlob->buffer(); - auto *inData = lockedMem.as(); - for (size_t i = 0; i < inBlob->size(); i++) - inData[i] = static_cast(i); - } - - req.Infer(); - - // Check output - auto outBlob = req.GetBlob(cnnNet.getOutputsInfo().begin()->first); - { - auto inMem = inBlob->cbuffer(); - const auto* inData = inMem.as(); - auto outMem = outBlob->cbuffer(); - const auto* outData = outMem.as(); - ASSERT_EQ(inBlob->size(), outBlob->size()); - for (size_t i = 0; i < 3; i++) - for (size_t j = 0; j < 100; j++) { - // BGR to RGB - if (!i) { - ASSERT_EQ(inData[j], outData[200 + j]); - } else if (i == j) { - ASSERT_EQ(inData[100 + j], outData[100 + j]); - } else { - ASSERT_EQ(inData[200 + j], outData[j]); - } - } - } -} - -TEST_P(InferRequestPreprocessTest, ReverseInputChannelsPreProcessSetBlob) { - std::shared_ptr ngraph; - { - ngraph::PartialShape shape({1, 3, 10, 10}); - ngraph::element::Type type(ngraph::element::Type_t::f32); - auto param = std::make_shared(type, shape); - param->set_friendly_name("param"); - auto relu = std::make_shared(param); - relu->set_friendly_name("relu"); - auto result = std::make_shared(relu); - result->set_friendly_name("result"); - - ngraph::ParameterVector params = {param}; - ngraph::ResultVector results = {result}; - - ngraph = std::make_shared(results, params); - } - - // Create CNNNetwork from ngraph::Function - InferenceEngine::CNNNetwork cnnNet(ngraph); - - auto &preProcess = cnnNet.getInputsInfo().begin()->second->getPreProcess(); - preProcess.setColorFormat(InferenceEngine::ColorFormat::RGB); - // Load CNNNetwork to target plugins - auto execNet = ie->LoadNetwork(cnnNet, target_device, configuration); - // Create InferRequest - auto req = execNet.CreateInferRequest(); - - auto inBlob = make_blob_with_precision(cnnNet.getInputsInfo().begin()->second->getTensorDesc()); - inBlob->allocate(); - req.SetBlob("param", inBlob); - - // Fill input - { - auto lockedMem = inBlob->buffer(); - auto *inData = lockedMem.as(); - for (size_t i = 0; i < inBlob->size(); i++) - inData[i] = static_cast(i); - } - - req.Infer(); - - // Check output - auto outBlob = req.GetBlob(cnnNet.getOutputsInfo().begin()->first); - { - auto inMem = inBlob->cbuffer(); - const auto* inData = inMem.as(); - auto outMem = outBlob->cbuffer(); - const auto* outData = outMem.as(); - ASSERT_EQ(inBlob->size(), outBlob->size()); - for (size_t i = 0; i < 3; i++) - for (size_t j = 0; j < 100; j++) { - // BGR to RGB - if (!i) { - ASSERT_EQ(inData[j], outData[200 + j]); - } else if (i == j) { - ASSERT_EQ(inData[100 + j], outData[100 + j]); - } else { - ASSERT_EQ(inData[200 + j], outData[j]); - } - } - } -} - -TEST_P(InferRequestPreprocessTest, SetScalePreProcessGetBlob) { - std::shared_ptr ngraph; - { - ngraph::PartialShape shape({1, 3, 10, 10}); - ngraph::element::Type type(ngraph::element::Type_t::f32); - auto param = std::make_shared(type, shape); - param->set_friendly_name("param"); - auto relu = std::make_shared(param); - relu->set_friendly_name("relu"); - auto result = std::make_shared(relu); - result->set_friendly_name("result"); - - ngraph::ParameterVector params = {param}; - ngraph::ResultVector results = {result}; - - ngraph = std::make_shared(results, params); - } - - // Create CNNNetwork from ngraph::Function - InferenceEngine::CNNNetwork cnnNet(ngraph); - - auto &preProcess = cnnNet.getInputsInfo().begin()->second->getPreProcess(); - preProcess.init(3); - preProcess[0]->stdScale = 2; - preProcess[1]->stdScale = 2; - preProcess[2]->stdScale = 2; - preProcess[0]->meanValue = 0; - preProcess[1]->meanValue = 0; - preProcess[2]->meanValue = 0; - preProcess.setVariant(InferenceEngine::MEAN_VALUE); - // Load CNNNetwork to target plugins - auto execNet = ie->LoadNetwork(cnnNet, target_device, configuration); - // Create InferRequest - auto req = execNet.CreateInferRequest(); - auto inBlob = req.GetBlob("param"); - - // Fill input - { - auto lockedMem = inBlob->buffer(); - auto *inData = lockedMem.as(); - for (size_t i = 0; i < inBlob->size(); i++) - inData[i] = static_cast(i); - } - - req.Infer(); - - // Check output - auto outBlob = req.GetBlob(cnnNet.getOutputsInfo().begin()->first); - { - auto inMem = inBlob->cbuffer(); - const auto* inData = inMem.as(); - auto outMem = outBlob->cbuffer(); - const auto* outData = outMem.as(); - ASSERT_EQ(inBlob->size(), outBlob->size()); - for (size_t i = 0; i < inBlob->size(); i++) { - ASSERT_EQ(inData[i] / 2, outData[i]); - } - } -} - -TEST_P(InferRequestPreprocessTest, SetScalePreProcessSetBlob) { - std::shared_ptr ngraph; - { - ngraph::PartialShape shape({1, 3, 10, 10}); - ngraph::element::Type type(ngraph::element::Type_t::f32); - auto param = std::make_shared(type, shape); - param->set_friendly_name("param"); - auto relu = std::make_shared(param); - relu->set_friendly_name("relu"); - auto result = std::make_shared(relu); - result->set_friendly_name("result"); - - ngraph::ParameterVector params = {param}; - ngraph::ResultVector results = {result}; - - ngraph = std::make_shared(results, params); - } - - // Create CNNNetwork from ngraph::Function - InferenceEngine::CNNNetwork cnnNet(ngraph); - - auto &preProcess = cnnNet.getInputsInfo().begin()->second->getPreProcess(); - preProcess.init(3); - preProcess[0]->stdScale = 2; - preProcess[1]->stdScale = 2; - preProcess[2]->stdScale = 2; - preProcess[0]->meanValue = 0; - preProcess[1]->meanValue = 0; - preProcess[2]->meanValue = 0; - preProcess.setVariant(InferenceEngine::MEAN_VALUE); - // Load CNNNetwork to target plugins - auto execNet = ie->LoadNetwork(cnnNet, target_device, configuration); - // Create InferRequest - auto req = execNet.CreateInferRequest(); - - auto inBlob = make_blob_with_precision(cnnNet.getInputsInfo().begin()->second->getTensorDesc()); - inBlob->allocate(); - req.SetBlob("param", inBlob); - - // Fill input - { - auto lockedMem = inBlob->buffer(); - auto *inData = lockedMem.as(); - for (size_t i = 0; i < inBlob->size(); i++) - inData[i] = static_cast(i); - } - - req.Infer(); - - // Check output - auto outBlob = req.GetBlob(cnnNet.getOutputsInfo().begin()->first); - { - auto inMem = inBlob->cbuffer(); - const auto* inData = inMem.as(); - auto outMem = outBlob->cbuffer(); - const auto* outData = outMem.as(); - ASSERT_EQ(inBlob->size(), outBlob->size()); - for (size_t i = 0; i < inBlob->size(); i++) - ASSERT_EQ(inData[i] / 2, outData[i]); - } -} - -typedef std::tuple< - InferenceEngine::Precision, // Network precision - InferenceEngine::Precision, // Set input precision - InferenceEngine::Precision, // Set output precision - InferenceEngine::Layout, // Network layout - always NCHW - InferenceEngine::Layout, // Set input layout - InferenceEngine::Layout, // Set output layout - bool, // SetBlob or GetBlob for input blob - bool, // SetBlob or GetBlob for output blob - std::string, // Device name - std::map // Config -> PreprocessConversionParams; - -class InferRequestPreprocessConversionTest : public testing::WithParamInterface, - public BehaviorTestsUtils::IEPluginTestBase { -public: - static std::string getTestCaseName(testing::TestParamInfo obj) { - InferenceEngine::Precision netPrecision, iPrecision, oPrecision; - InferenceEngine::Layout netLayout, iLayout, oLayout; - bool setInputBlob, setOutputBlob; - std::string target_device; - std::map configuration; - std::tie(netPrecision, iPrecision, oPrecision, - netLayout, iLayout, oLayout, - setInputBlob, setOutputBlob, - target_device, configuration) = obj.param; - std::replace(target_device.begin(), target_device.end(), ':', '_'); - std::ostringstream result; - result << "netPRC=" << netPrecision.name() << "_"; - result << "iPRC=" << iPrecision.name() << "_"; - result << "oPRC=" << oPrecision.name() << "_"; - result << "netLT=" << netLayout << "_"; - result << "iLT=" << iLayout << "_"; - result << "oLT=" << oLayout << "_"; - result << "setIBlob=" << setInputBlob << "_"; - result << "setOBlob=" << setOutputBlob << "_"; - result << "target_device=" << target_device; - if (!configuration.empty()) { - for (auto& configItem : configuration) { - result << "configItem=" << configItem.first << "_" << configItem.second << "_"; - } - } - return result.str(); - } - - static InferenceEngine::Layout getOppositeLayout(InferenceEngine::Layout l) { - if (InferenceEngine::Layout::NCHW == l) { - return InferenceEngine::Layout::NHWC; - } else if (InferenceEngine::Layout::NHWC == l) { - return InferenceEngine::Layout::NCHW; - } - return InferenceEngine::Layout::ANY; - } - - static InferenceEngine::Precision getOppositePrecision(InferenceEngine::Precision p) { - if (InferenceEngine::Precision::U8 == p) { - return InferenceEngine::Precision::FP32; - } else if (InferenceEngine::Precision::FP32 == p) { - return InferenceEngine::Precision::U8; - } - return InferenceEngine::Precision::UNSPECIFIED; - } - - void SetUp() override { - std::tie(netPrecision, iPrecision, oPrecision, - netLayout, iLayout, oLayout, - setInputBlob, setOutputBlob, - target_device, configuration) = this->GetParam(); - // Skip test according to plugin specific disabledTestPatterns() (if any) - SKIP_IF_CURRENT_TEST_IS_DISABLED() - APIBaseTest::SetUp(); - } - - void TearDown() override { - if (!configuration.empty()) { - PluginCache::get().reset(); - } - APIBaseTest::TearDown(); - } - - std::shared_ptr ie = PluginCache::get().ie(); - InferenceEngine::Precision netPrecision, iPrecision, oPrecision; - InferenceEngine::Layout netLayout, iLayout, oLayout; - bool setInputBlob, setOutputBlob; - std::map configuration; -}; - -TEST_P(InferRequestPreprocessConversionTest, Infer) { - std::shared_ptr ngraph; - unsigned int shape_size = 9, channels = 3, batch = 1, offset = 0; - { - ngraph::PartialShape shape({batch, channels, shape_size, shape_size}); - ngraph::element::Type type(InferenceEngine::details::convertPrecision(netPrecision)); - auto param = std::make_shared(type, shape); - param->set_friendly_name("param"); - auto relu = std::make_shared(param); - relu->set_friendly_name("relu"); - auto result = std::make_shared(relu); - result->set_friendly_name("result"); - - ngraph::ParameterVector params = {param}; - ngraph::ResultVector results = {result}; - - ngraph = std::make_shared(results, params); - } - - // Create CNNNetwork from ngraph::Function - InferenceEngine::CNNNetwork cnnNet(ngraph); - - cnnNet.getInputsInfo().begin()->second->setPrecision(iPrecision); - cnnNet.getInputsInfo().begin()->second->setLayout(iLayout); - cnnNet.getOutputsInfo().begin()->second->setPrecision(oPrecision); - cnnNet.getOutputsInfo().begin()->second->setLayout(oLayout); - - // Load CNNNetwork to target plugins - auto execNet = ie->LoadNetwork(cnnNet, target_device, configuration); - // Create InferRequest - auto req = execNet.CreateInferRequest(); - - // unsigned int stride = shape_size + offset; - // std::vector blobData(batch * channels * stride * stride, 0); - // InferenceEngine::BlockingDesc blockDesc({ batch, shape_size, shape_size, channels }, - // { 0, 2, 3, 1 }, - // 0, - // { 0, 0, 0, 0 }, - // { channels * stride * stride, channels * stride, channels, 1 }); - // InferenceEngine::TensorDesc desc( - // InferenceEngine::Precision::FP32, - // { batch, channels, shape_size, shape_size }, blockDesc); - (void)offset; - - InferenceEngine::Blob::Ptr inBlob = nullptr, outBlob = nullptr; - - if (setInputBlob) { - inBlob = make_blob_with_precision(cnnNet.getInputsInfo().begin()->second->getTensorDesc()); - inBlob->allocate(); - req.SetBlob("param", inBlob); - } else { - inBlob = req.GetBlob("param"); - } - - if (setOutputBlob) { - outBlob = make_blob_with_precision(cnnNet.getOutputsInfo().begin()->second->getTensorDesc()); - outBlob->allocate(); - req.SetBlob("relu", outBlob); - } else { - outBlob = req.GetBlob("relu"); - } - - // Fill input - { - auto lockedMem = inBlob->buffer(); - auto desc = inBlob->getTensorDesc(); - - if (desc.getPrecision() == InferenceEngine::Precision::FP32) { - auto *inData = lockedMem.as(); - for (size_t i = 0; i < inBlob->size(); i++) - inData[desc.offset(i)] = static_cast(i); - } else if (desc.getPrecision() == InferenceEngine::Precision::U8) { - auto *inData = lockedMem.as(); - for (size_t i = 0; i < inBlob->size(); i++) - inData[desc.offset(i)] = static_cast(i); - } else { - ASSERT_TRUE(false); - } - } - - req.Infer(); - - // Check output - { - auto outMem = outBlob->cbuffer(); - auto desc = outBlob->getTensorDesc(); - - if (desc.getPrecision() == InferenceEngine::Precision::FP32) { - const auto* outData = outMem.as(); - ASSERT_EQ(inBlob->size(), outBlob->size()); - for (size_t i = 0; i < inBlob->size(); i++) - ASSERT_EQ(i, outData[desc.offset(i)]) << i; - } else if (desc.getPrecision() == InferenceEngine::Precision::U8) { - const auto* outData = outMem.as(); - ASSERT_EQ(inBlob->size(), outBlob->size()); - for (size_t i = 0; i < inBlob->size(); i++) - ASSERT_EQ(i, outData[desc.offset(i)]) << i; - } else { - ASSERT_TRUE(false); - } - } -} - -typedef std::tuple< - InferenceEngine::Precision, // Network precision - bool, // Change input precision - bool, // Change output precision - InferenceEngine::Layout, // Network layout - always NCHW - bool, // Change input layout - bool, // Change output layout - bool, // SetBlob or GetBlob for input blob - bool, // SetBlob or GetBlob for output blob - std::string, // Device name - std::map // Config -> PreprocessSetBlobCheckParams; - -class InferRequestPreprocessDynamicallyInSetBlobTest : public testing::WithParamInterface, - public BehaviorTestsUtils::IEPluginTestBase { -public: - static std::string getTestCaseName(testing::TestParamInfo obj) { - InferenceEngine::Precision netPrecision; - InferenceEngine::Layout netLayout; - bool changeIPrecision, changeOPrecision; - bool changeILayout, changeOLayout; - bool setInputBlob, setOutputBlob; - std::string target_device; - std::map configuration; - std::tie(netPrecision, changeIPrecision, changeOPrecision, - netLayout, changeILayout, changeOLayout, - setInputBlob, setOutputBlob, - target_device, configuration) = obj.param; - std::replace(target_device.begin(), target_device.end(), ':', '_'); - std::ostringstream result; - result << "netPRC=" << netPrecision.name() << "_"; - result << "iPRC=" << changeIPrecision << "_"; - result << "oPRC=" << changeOPrecision << "_"; - result << "netLT=" << netLayout << "_"; - result << "iLT=" << changeILayout << "_"; - result << "oLT=" << changeOLayout << "_"; - result << "setIBlob=" << setInputBlob << "_"; - result << "setOBlob=" << setOutputBlob << "_"; - result << "target_device=" << target_device; - if (!configuration.empty()) { - for (auto& configItem : configuration) { - result << "configItem=" << configItem.first << "_" << configItem.second << "_"; - } - } - return result.str(); - } - - InferenceEngine::Layout getOppositeLayout(InferenceEngine::Layout l) { - if (InferenceEngine::Layout::NCHW == l) { - return InferenceEngine::Layout::NHWC; - } else if (InferenceEngine::Layout::NHWC == l) { - return InferenceEngine::Layout::NCHW; - } - return InferenceEngine::Layout::ANY; - } - - InferenceEngine::Precision getOppositePrecision(InferenceEngine::Precision p) { - if (InferenceEngine::Precision::U8 == p) { - return InferenceEngine::Precision::FP32; - } else if (InferenceEngine::Precision::FP32 == p) { - return InferenceEngine::Precision::U8; - } - return InferenceEngine::Precision::UNSPECIFIED; - } - - void SetUp() override { - // Skip test according to plugin specific disabledTestPatterns() (if any) - SKIP_IF_CURRENT_TEST_IS_DISABLED() - std::tie(netPrecision, changeIPrecision, changeOPrecision, - netLayout, changeILayout, changeOLayout, - setInputBlob, setOutputBlob, - target_device, configuration) = this->GetParam(); - APIBaseTest::SetUp(); - } - - void TearDown() override { - if (!configuration.empty()) { - PluginCache::get().reset(); - } - APIBaseTest::TearDown(); - } - - std::shared_ptr ie = PluginCache::get().ie(); - InferenceEngine::Precision netPrecision; - bool changeIPrecision, changeOPrecision; - InferenceEngine::Layout netLayout; - bool changeILayout, changeOLayout; - bool setInputBlob, setOutputBlob; - std::map configuration; -}; - -TEST_P(InferRequestPreprocessDynamicallyInSetBlobTest, Infer) { - std::shared_ptr ngraph; - unsigned int shape_size = 9, channels = 3, batch = 1; - { - ngraph::PartialShape shape({batch, channels, shape_size, shape_size}); - ngraph::element::Type type(InferenceEngine::details::convertPrecision(netPrecision)); - auto param = std::make_shared(type, shape); - param->set_friendly_name("param"); - auto relu = std::make_shared(param); - relu->set_friendly_name("relu"); - auto result = std::make_shared(relu); - result->set_friendly_name("result"); - - ngraph::ParameterVector params = {param}; - ngraph::ResultVector results = {result}; - - ngraph = std::make_shared(results, params); - } - - // Create CNNNetwork from ngraph::Function - InferenceEngine::CNNNetwork cnnNet(ngraph); - - // Load CNNNetwork to target plugins - auto execNet = ie->LoadNetwork(cnnNet, target_device, configuration); - auto req = execNet.CreateInferRequest(); - InferenceEngine::Blob::Ptr inBlob = nullptr, outBlob = nullptr; - - // create input blob - - auto recreateInputBlob = [&] (InferenceEngine::Blob::Ptr & _inBlob) { - auto desc = cnnNet.getInputsInfo().begin()->second->getTensorDesc(); - desc = InferenceEngine::TensorDesc( - changeIPrecision ? getOppositePrecision(desc.getPrecision()) : desc.getPrecision(), - desc.getDims(), - changeILayout ? getOppositeLayout(desc.getLayout()) : desc.getLayout()); - auto tempBlob = make_blob_with_precision(desc); - tempBlob->allocate(); - - _inBlob = std::move(tempBlob); - }; - - if (setInputBlob) { - recreateInputBlob(inBlob); - if (changeIPrecision) { - EXPECT_THROW(req.SetBlob("param", inBlob), InferenceEngine::ParameterMismatch); - // fallback - inBlob = req.GetBlob("param"); - } else { - EXPECT_NO_THROW(req.SetBlob("param", inBlob)); - } - } else { - inBlob = req.GetBlob("param"); - recreateInputBlob(inBlob); - } - - // Fill input - { - auto lockedMem = inBlob->buffer(); - auto desc = inBlob->getTensorDesc(); - - if (desc.getPrecision() == InferenceEngine::Precision::FP32) { - auto *inData = lockedMem.as(); - for (size_t i = 0; i < inBlob->size(); i++) - inData[desc.offset(i)] = static_cast(i); - } else if (desc.getPrecision() == InferenceEngine::Precision::U8) { - auto *inData = lockedMem.as(); - for (size_t i = 0; i < inBlob->size(); i++) - inData[desc.offset(i)] = static_cast(i); - } else { - ASSERT_TRUE(false); - } - } - - // create output blob - - auto recreateOutputBlob = [&] (InferenceEngine::Blob::Ptr & _outBlob) { - auto desc = cnnNet.getOutputsInfo().begin()->second->getTensorDesc(); - desc = InferenceEngine::TensorDesc( - changeOPrecision ? getOppositePrecision(desc.getPrecision()) : desc.getPrecision(), - desc.getDims(), - changeOLayout ? getOppositeLayout(desc.getLayout()) : desc.getLayout()); - auto tempBlob = make_blob_with_precision(desc); - tempBlob->allocate(); - - _outBlob = std::move(tempBlob); - }; - - if (setOutputBlob) { - recreateOutputBlob(outBlob); - if (changeOPrecision) { - ASSERT_THROW(req.SetBlob("relu", outBlob), InferenceEngine::ParameterMismatch); - // fallback - outBlob = req.GetBlob("relu"); - } else { - ASSERT_NO_THROW(req.SetBlob("relu", outBlob)); - } - } else { - outBlob = req.GetBlob("relu"); - recreateOutputBlob(outBlob); - } - - if (setOutputBlob && setInputBlob) { - ASSERT_NO_THROW(req.Infer()); - } else { - // TODO: if blob from GetBlob is re-created, no checks are performed - // should be "GetBlob re-creation error mismatch" - // EXPECT_THROW(req.Infer(), InferenceEngine::Exception); - - ASSERT_NO_THROW(req.Infer()); - } - - // Check output - { - auto outMem = outBlob->cbuffer(); - auto desc = outBlob->getTensorDesc(); - - if (desc.getPrecision() == InferenceEngine::Precision::FP32) { - const auto* outData = outMem.as(); - ASSERT_EQ(inBlob->size(), outBlob->size()); - for (size_t i = 0; i < inBlob->size(); i++) - ASSERT_EQ(i, outData[desc.offset(i)]) << i; - } else if (desc.getPrecision() == InferenceEngine::Precision::U8) { - const auto* outData = outMem.as(); - ASSERT_EQ(inBlob->size(), outBlob->size()); - for (size_t i = 0; i < inBlob->size(); i++) - ASSERT_EQ(i, outData[desc.offset(i)]) << i; - } else { - ASSERT_TRUE(false); - } - } -} - -TEST_P(InferRequestPreprocessTest, InferWithRGB2BGRConversion) { - std::shared_ptr ngraphFunc; - const unsigned int shape_size = 9, channels = 3, batch = 1; - { - ngraph::PartialShape shape({batch, channels, shape_size, shape_size}); - ngraph::element::Type type(InferenceEngine::details::convertPrecision(netPrecision)); - auto param = std::make_shared(type, shape); - param->set_friendly_name("param"); - auto relu = std::make_shared(param); - relu->set_friendly_name("relu"); - auto result = std::make_shared(relu); - result->set_friendly_name("result"); - - ngraph::ParameterVector params = {param}; - ngraph::ResultVector results = {result}; - - ngraphFunc = std::make_shared(results, params); - } - - // Create CNNNetwork from ngraph::Function - InferenceEngine::CNNNetwork cnnNet(ngraphFunc); - - auto &preProcess = cnnNet.getInputsInfo().begin()->second->getPreProcess(); - preProcess.setColorFormat(InferenceEngine::ColorFormat::BGR); - // Load CNNNetwork to target plugins - auto execNet = ie->LoadNetwork(cnnNet, target_device, configuration); - // Create InferRequest - auto req = execNet.CreateInferRequest(); - - ASSERT_NO_THROW(req.Infer()); -} - -} // namespace BehaviorTestsDefinitions diff --git a/src/tests/functional/plugin/shared/src/behavior/executable_network/locale.cpp b/src/tests/functional/plugin/shared/src/behavior/executable_network/locale.cpp deleted file mode 100644 index 5a98600e12b654..00000000000000 --- a/src/tests/functional/plugin/shared/src/behavior/executable_network/locale.cpp +++ /dev/null @@ -1,70 +0,0 @@ -// Copyright (C) 2018-2023 Intel Corporation -// SPDX-License-Identifier: Apache-2.0 -// - -#include "behavior/executable_network/locale.hpp" - -#include - -#include "functional_test_utils/summary/api_summary.hpp" -#include "openvino/core/model.hpp" -#include "openvino/op/constant.hpp" -#include "openvino/op/gelu.hpp" -#include "openvino/op/parameter.hpp" -#include "openvino/op/reshape.hpp" -#include "openvino/op/result.hpp" -#include "openvino/op/swish.hpp" - -namespace BehaviorTestsDefinitions { - -inline std::shared_ptr makeTestModel(std::vector inputShape = {1, 1, 32, 32}) { - ov::Shape in_shape(inputShape); - auto et = ov::element::Type_t::f16; - auto in = std::make_shared(et, in_shape); - auto gelu = std::make_shared(in); - auto swish_const = ov::op::v0::Constant::create(et, ov::Shape{}, {2.5f}); - auto swish = std::make_shared(gelu, swish_const); - ov::Shape reluShape = swish->outputs()[0].get_tensor().get_shape(); - std::vector constShape2 = {1, ov::shape_size(reluShape)}; - auto const2 = ov::op::v0::Constant::create(ov::element::i64, ov::Shape{2}, constShape2); - auto reshape2 = std::make_shared(swish, const2, false); - ov::ResultVector results{std::make_shared(reshape2)}; - std::shared_ptr fnPtr = std::make_shared(results, ov::ParameterVector{in}); - return fnPtr; -} - -std::string CustomLocaleTest::getTestCaseName(const testing::TestParamInfo& obj) { - std::ostringstream results; - std::string targetDevice, localeName; - std::tie(localeName, targetDevice) = obj.param; - std::replace(localeName.begin(), localeName.end(), '-', '.'); - std::replace(targetDevice.begin(), targetDevice.end(), ':', '.'); - results << "locale=" << localeName << "_" - << "targetDevice=" << targetDevice; - return results.str(); -} - -void CustomLocaleTest::SetUp() { - std::tie(localeName, target_device) = GetParam(); - SKIP_IF_CURRENT_TEST_IS_DISABLED() - APIBaseTest::SetUp(); - testName = ::testing::UnitTest::GetInstance()->current_test_info()->name(); - function = makeTestModel(); -} - -TEST_P(CustomLocaleTest, CanLoadNetworkWithCustomLocale) { - auto prev = std::locale().name(); - setlocale(LC_ALL, localeName.c_str()); - setlocale(LC_NUMERIC, localeName.c_str()); - setlocale(LC_TIME, localeName.c_str()); - - std::shared_ptr ie = PluginCache::get().ie(target_device); - InferenceEngine::CNNNetwork cnnNet(function); - ASSERT_NO_THROW(ie->LoadNetwork(cnnNet, target_device)); - - setlocale(LC_ALL, prev.c_str()); - setlocale(LC_NUMERIC, prev.c_str()); - setlocale(LC_TIME, prev.c_str()); -} - -} // namespace BehaviorTestsDefinitions