forked from openvinotoolkit/openvino
-
Notifications
You must be signed in to change notification settings - Fork 0
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
ROI tensor support for Template plugin (openvinotoolkit#9914)
* ROI tensor support for Template plugin + tests for Template and CPU plugins GPU doesn'tsupport ROI tensors, so tests were not added for GPU * Added asserts for unsupported mixed axis order (like 0,3,1,2), and unsupported types like int4/int2 for ROI tensors
- Loading branch information
Showing
5 changed files
with
258 additions
and
4 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
20 changes: 20 additions & 0 deletions
20
...te_plugin/tests/functional/shared_tests_instances/behavior/ov_infer_request/inference.cpp
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,20 @@ | ||
// Copyright (C) 2018-2021 Intel Corporation | ||
// SPDX-License-Identifier: Apache-2.0 | ||
// | ||
|
||
#include <vector> | ||
|
||
#include "behavior/ov_infer_request/inference.hpp" | ||
|
||
namespace { | ||
|
||
using namespace ov::test::behavior; | ||
using namespace ov; | ||
|
||
INSTANTIATE_TEST_SUITE_P(smoke_BehaviorTests, OVInferRequestInferenceTests, | ||
::testing::Combine( | ||
::testing::Values(tensor_roi::roi_nchw(), tensor_roi::roi_1d()), | ||
::testing::Values(CommonTestUtils::DEVICE_TEMPLATE)), | ||
OVInferRequestInferenceTests::getTestCaseName); | ||
|
||
} // namespace |
20 changes: 20 additions & 0 deletions
20
...ests/functional/plugin/cpu/shared_tests_instances/behavior/ov_infer_request/inference.cpp
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,20 @@ | ||
// Copyright (C) 2018-2021 Intel Corporation | ||
// SPDX-License-Identifier: Apache-2.0 | ||
// | ||
|
||
#include <vector> | ||
|
||
#include "behavior/ov_infer_request/inference.hpp" | ||
|
||
namespace { | ||
|
||
using namespace ov::test::behavior; | ||
using namespace ov; | ||
|
||
INSTANTIATE_TEST_SUITE_P(smoke_BehaviorTests, OVInferRequestInferenceTests, | ||
::testing::Combine( | ||
::testing::Values(tensor_roi::roi_nchw(), tensor_roi::roi_1d()), | ||
::testing::Values(CommonTestUtils::DEVICE_CPU)), | ||
OVInferRequestInferenceTests::getTestCaseName); | ||
|
||
} // namespace |
93 changes: 93 additions & 0 deletions
93
src/tests/functional/plugin/shared/include/behavior/ov_infer_request/inference.hpp
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,93 @@ | ||
// Copyright (C) 2018-2021 Intel Corporation | ||
// SPDX-License-Identifier: Apache-2.0 | ||
// | ||
|
||
#pragma once | ||
|
||
#include <gtest/gtest.h> | ||
#include <string> | ||
#include "functional_test_utils/ov_plugin_cache.hpp" | ||
#include <base/behavior_test_utils.hpp> | ||
|
||
namespace ov { | ||
namespace test { | ||
namespace behavior { | ||
|
||
struct OVInferReqInferParam { | ||
ov::Shape m_shape; | ||
ov::Tensor m_input_tensor; | ||
std::vector<float> m_expected; | ||
std::string m_test_name; | ||
}; | ||
|
||
using OVInferRequestInferenceTestsParams = std::tuple<OVInferReqInferParam, std::string>; | ||
|
||
namespace tensor_roi { | ||
inline OVInferReqInferParam roi_nchw() { | ||
OVInferReqInferParam res; | ||
res.m_test_name = "roi_nchw"; | ||
res.m_shape = Shape{1, 2, 3, 3}; | ||
auto in_tensor = ov::Tensor(element::f32, Shape{1, 2, 5, 5}); | ||
auto in_data = std::vector<float>{ | ||
0, 1, 2, 3, 4, | ||
5, 6, 7, 8, 9, | ||
0, 1, 2, 3, 4, | ||
5, 6, 7, 8, 9, | ||
9, 8, 7, 6, 5, | ||
|
||
5, 6, 7, 8, 9, | ||
9, 8, 7, 6, 5, | ||
0, 1, 2, 3, 4, | ||
5, 6, 7, 8, 9, | ||
0, 1, 2, 3, 4 | ||
}; | ||
memcpy(in_tensor.data(), in_data.data(), in_data.size() * sizeof(float)); | ||
res.m_input_tensor = ov::Tensor(in_tensor, Coordinate{0, 0, 1, 1}, Coordinate{1, 2, 4, 4}); | ||
// Extracted 3x3 boxes, add 1 to each element | ||
res.m_expected = std::vector<float>{ | ||
7, 8, 9, | ||
2, 3, 4, | ||
7, 8, 9, | ||
|
||
9, 8, 7, | ||
2, 3, 4, | ||
7, 8, 9, | ||
}; | ||
return res; | ||
} | ||
|
||
inline OVInferReqInferParam roi_1d() { | ||
OVInferReqInferParam res; | ||
res.m_test_name = "roi_1d"; | ||
res.m_shape = Shape{3}; | ||
auto in_tensor = ov::Tensor(element::f32, Shape{5}); | ||
auto in_data = std::vector<float>{10, 20, 30, 40, 50}; | ||
memcpy(in_tensor.data(), in_data.data(), in_data.size() * sizeof(float)); | ||
res.m_input_tensor = ov::Tensor(in_tensor, Coordinate{1}, Coordinate{4}); | ||
res.m_expected = std::vector<float>{21, 31, 41}; | ||
return res; | ||
} | ||
|
||
} // namespace tensor_roi | ||
|
||
class OVInferRequestInferenceTests : public testing::WithParamInterface<OVInferRequestInferenceTestsParams>, | ||
public CommonTestUtils::TestsCommon { | ||
public: | ||
static std::string getTestCaseName(const testing::TestParamInfo<OVInferRequestInferenceTestsParams>& device_name); | ||
|
||
protected: | ||
void SetUp() override; | ||
|
||
void TearDown() override; | ||
|
||
static std::shared_ptr<Model> create_n_inputs(size_t num, element::Type type, | ||
const PartialShape& shape); | ||
|
||
std::shared_ptr<ov::Core> ie = utils::PluginCache::get().core(); | ||
OVInferReqInferParam m_param; | ||
std::string m_device_name; | ||
}; | ||
|
||
} // namespace behavior | ||
} // namespace test | ||
} // namespace ov |
74 changes: 74 additions & 0 deletions
74
src/tests/functional/plugin/shared/src/behavior/ov_infer_request/inference.cpp
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,74 @@ | ||
// Copyright (C) 2018-2021 Intel Corporation | ||
// SPDX-License-Identifier: Apache-2.0 | ||
// | ||
|
||
#include <gtest/gtest.h> | ||
#include "openvino/opsets/opset8.hpp" | ||
#include "functional_test_utils/ov_plugin_cache.hpp" | ||
#include "behavior/ov_infer_request/inference.hpp" | ||
|
||
namespace ov { | ||
namespace test { | ||
namespace behavior { | ||
|
||
std::string OVInferRequestInferenceTests::getTestCaseName( | ||
const testing::TestParamInfo<OVInferRequestInferenceTestsParams>& obj) { | ||
return std::get<0>(obj.param).m_test_name + "_targetDevice=" + std::get<1>(obj.param); | ||
} | ||
|
||
void OVInferRequestInferenceTests::SetUp() { | ||
SKIP_IF_CURRENT_TEST_IS_DISABLED() | ||
m_param = std::get<0>(GetParam()); | ||
m_device_name = std::get<1>(GetParam()); | ||
} | ||
|
||
void OVInferRequestInferenceTests::TearDown() { | ||
} | ||
|
||
std::shared_ptr<Model> OVInferRequestInferenceTests::create_n_inputs(size_t n, | ||
element::Type type, | ||
const PartialShape& shape) { | ||
ResultVector res; | ||
ParameterVector params; | ||
for (size_t i = 0; i < n; i++) { | ||
auto index_str = std::to_string(i); | ||
auto data1 = std::make_shared<opset8::Parameter>(type, shape); | ||
data1->set_friendly_name("input" + index_str); | ||
data1->get_output_tensor(0).set_names({"tensor_input" + index_str}); | ||
auto constant = opset8::Constant::create(type, {1}, {1}); | ||
auto op1 = std::make_shared<opset8::Add>(data1, constant); | ||
op1->set_friendly_name("Add" + index_str); | ||
auto res1 = std::make_shared<opset8::Result>(op1); | ||
res1->set_friendly_name("Result" + index_str); | ||
res1->get_output_tensor(0).set_names({"tensor_output" + index_str}); | ||
params.push_back(data1); | ||
res.push_back(res1); | ||
} | ||
return std::make_shared<Model>(res, params); | ||
} | ||
|
||
TEST_P(OVInferRequestInferenceTests, Inference_ROI_Tensor) { | ||
auto shape_size = ov::shape_size(m_param.m_shape); | ||
auto model = OVInferRequestInferenceTests::create_n_inputs(1, element::f32, m_param.m_shape); | ||
auto execNet = ie->compile_model(model, m_device_name); | ||
// Create InferRequest | ||
ov::InferRequest req; | ||
req = execNet.create_infer_request(); | ||
const std::string tensor_name = "tensor_input0"; | ||
req.set_tensor(tensor_name, m_param.m_input_tensor); | ||
req.infer(); | ||
auto actual_out_tensor = req.get_tensor("tensor_output0"); | ||
auto out_ptr = actual_out_tensor.data<float>(); | ||
for (size_t i = 0; i < shape_size; ++i) { | ||
EXPECT_EQ(out_ptr[i], m_param.m_expected[i]) << "Expected=" | ||
<< m_param.m_expected[i] | ||
<< ", actual=" | ||
<< out_ptr[i] | ||
<< " for " | ||
<< i; | ||
} | ||
} | ||
|
||
} // namespace behavior | ||
} // namespace test | ||
} // namespace ov |