Skip to content

Commit

Permalink
Remove NGraphFunctions namespace
Browse files Browse the repository at this point in the history
  • Loading branch information
olpipi committed Mar 22, 2024
1 parent 791feb4 commit 2a43db2
Show file tree
Hide file tree
Showing 6 changed files with 22 additions and 28 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -65,10 +65,10 @@ TEST_F(DenormalNullifyCheck, smoke_CPU_Denormal_Check) {
constexpr unsigned denormalsCount = 15u;
constexpr uint32_t denormalsRange = (0xffffffffu >> 9u) - 1;
testing::internal::Random random(seed);
auto randomRange = NGraphFunctions::Utils::generateVector<ov::element::f32>(elemsCount, 10, -10);
auto randomRange = ov::test::utils::generateVector<ov::element::f32>(elemsCount, 10, -10);

for (auto& interval : intervals) {
auto randomIndices = NGraphFunctions::Utils::generateVector<ov::element::u32>(denormalsCount, interval.second, interval.first);
auto randomIndices = ov::test::utils::generateVector<ov::element::u32>(denormalsCount, interval.second, interval.first);
std::unordered_set<decltype(randomIndices)::value_type> randomIndexSet(randomIndices.begin(), randomIndices.end());
for (size_t i = 0; i < elemsCount; ++i) {
if (randomIndexSet.count(i)) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -183,14 +183,14 @@ const auto fusingSqrt = fusingSpecificParams{std::make_shared<postNodesMgr>(std:
const auto fusingPReluPerChannel = fusingSpecificParams{std::make_shared<postNodesMgr>(std::vector<postNodeBuilder>{
{[](postNodeConfig& cfg){
ov::Shape newShape = generatePerChannelShape(cfg.target);
auto data = NGraphFunctions::Utils::generateVector<ov::element::Type_t::f32>(ov::shape_size(newShape));
auto data = ov::test::utils::generateVector<ov::element::Type_t::f32>(ov::shape_size(newShape));
return utils::make_activation(cfg.input, cfg.type, utils::LeakyRelu, newShape, data);
}, "PRelu(PerChannel)"}}), {"PRelu"}};

const auto fusingPReluPerTensor = fusingSpecificParams{std::make_shared<postNodesMgr>(std::vector<postNodeBuilder>{
{[](postNodeConfig& cfg){
ov::Shape shape(1, 1);
auto data = NGraphFunctions::Utils::generateVector<ov::element::Type_t::f32>(ov::shape_size(shape));
auto data = ov::test::utils::generateVector<ov::element::Type_t::f32>(ov::shape_size(shape));
return utils::make_activation(cfg.input, cfg.type, utils::LeakyRelu, shape, data);
}, "PRelu(PerTensor)"}}), {"PRelu"}};

Expand Down Expand Up @@ -465,15 +465,15 @@ const auto fusingPRelu1D = fusingSpecificParams{std::make_shared<postNodesMgr>(s
{[](postNodeConfig& cfg){
auto shape = cfg.input->get_output_partial_shape(0);
ov::Shape newShape({static_cast<size_t>(shape[1].get_length())});
auto data = NGraphFunctions::Utils::generateVector<ov::element::Type_t::f32>(ov::shape_size(newShape));
auto data = ov::test::utils::generateVector<ov::element::Type_t::f32>(ov::shape_size(newShape));
return utils::make_activation(cfg.input, cfg.type, utils::LeakyRelu, newShape, data);
}, "PRelu1D"}}), {"PRelu"}};

const auto fusingPRelu1DScaleShift = fusingSpecificParams{std::make_shared<postNodesMgr>(std::vector<postNodeBuilder>{
{[](postNodeConfig& cfg){
auto shape = cfg.input->get_output_partial_shape(0);
ov::Shape newShape({static_cast<size_t>(shape[1].get_length())});
auto data = NGraphFunctions::Utils::generateVector<ov::element::Type_t::f32>(ov::shape_size(newShape));
auto data = ov::test::utils::generateVector<ov::element::Type_t::f32>(ov::shape_size(newShape));
return utils::make_activation(cfg.input, cfg.type, utils::LeakyRelu, newShape, data);
}, "PRelu1D"},
{[](postNodeConfig& cfg) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -264,15 +264,15 @@ ov::Tensor generate(const std::shared_ptr<ov::op::v0::FakeQuantize>& node,
int seed = 1;
size_t constDataSize = ov::shape_size(targetShape);
std::vector<float> inputLowData, inputHighData, outputLowData, outputHighData;
inputLowData = NGraphFunctions::Utils::generateVector<ov::element::f32>(constDataSize, 10, 1, seed);
inputLowData = ov::test::utils::generateVector<ov::element::f32>(constDataSize, 10, 1, seed);
if (node->get_levels() != 2) {
inputHighData = NGraphFunctions::Utils::generateVector<ov::element::f32>(constDataSize, 10, 1, seed);
outputLowData = NGraphFunctions::Utils::generateVector<ov::element::f32>(constDataSize, 10, 1, seed);
outputHighData = NGraphFunctions::Utils::generateVector<ov::element::f32>(constDataSize, 10, 1, seed);
inputHighData = ov::test::utils::generateVector<ov::element::f32>(constDataSize, 10, 1, seed);
outputLowData = ov::test::utils::generateVector<ov::element::f32>(constDataSize, 10, 1, seed);
outputHighData = ov::test::utils::generateVector<ov::element::f32>(constDataSize, 10, 1, seed);
} else {
inputHighData = inputLowData;
outputLowData = NGraphFunctions::Utils::generateVector<ov::element::f32>(constDataSize, 10, 1, seed);
outputHighData = NGraphFunctions::Utils::generateVector<ov::element::f32>(constDataSize, 10, 1, seed);
outputLowData = ov::test::utils::generateVector<ov::element::f32>(constDataSize, 10, 1, seed);
outputHighData = ov::test::utils::generateVector<ov::element::f32>(constDataSize, 10, 1, seed);

for (int i = 0; i < constDataSize; i++) {
if (outputLowData[i] > outputHighData[i]) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,8 +15,9 @@
#include "openvino/core/type/element_type_traits.hpp"
#include "openvino/runtime/tensor.hpp"

namespace NGraphFunctions {
namespace Utils {
namespace ov {
namespace test {
namespace utils {

template <ov::element::Type_t dType>
std::vector<typename ov::element_type_traits<dType>::value_type> inline generateVector(
Expand Down Expand Up @@ -105,13 +106,6 @@ std::vector<toType> castVector(const std::vector<fromType>& vec) {
return resVec;
}

} // namespace Utils
} // namespace NGraphFunctions

namespace ov {
namespace test {
namespace utils {

inline void fill_data(float* data, size_t size, size_t duty_ratio = 10) {
for (size_t i = 0; i < size; i++) {
if ((i / duty_ratio) % 2 == 1) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@ std::shared_ptr<ov::Node> make_constant(const ov::element::Type& type,
return std::make_shared<ov::op::v0::Constant>( \
type, \
shape, \
NGraphFunctions::Utils::generateVector<TYPE>(ov::shape_size(shape), \
generateVector<TYPE>(ov::shape_size(shape), \
ov::element_type_traits<TYPE>::value_type(up_to), \
ov::element_type_traits<TYPE>::value_type(start_from), \
seed)); \
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -41,15 +41,15 @@ std::shared_ptr<ov::Node> make_fake_quantize(const ov::Output<ov::Node>& in,
const int32_t seed) {
size_t constDataSize = ov::shape_size(constShapes);
std::vector<float> inputLowData, inputHighData, outputLowData, outputHighData;
inputLowData = NGraphFunctions::Utils::generateVector<ov::element::Type_t::f32>(constDataSize, 10, 1, seed);
inputLowData = ov::test::utils::generateVector<ov::element::Type_t::f32>(constDataSize, 10, 1, seed);
if (levels != 2) {
inputHighData = NGraphFunctions::Utils::generateVector<ov::element::Type_t::f32>(constDataSize, 10, 1, seed);
outputLowData = NGraphFunctions::Utils::generateVector<ov::element::Type_t::f32>(constDataSize, 10, 1, seed);
outputHighData = NGraphFunctions::Utils::generateVector<ov::element::Type_t::f32>(constDataSize, 10, 1, seed);
inputHighData = ov::test::utils::generateVector<ov::element::Type_t::f32>(constDataSize, 10, 1, seed);
outputLowData = ov::test::utils::generateVector<ov::element::Type_t::f32>(constDataSize, 10, 1, seed);
outputHighData = ov::test::utils::generateVector<ov::element::Type_t::f32>(constDataSize, 10, 1, seed);
} else {
inputHighData = inputLowData;
outputLowData = NGraphFunctions::Utils::generateVector<ov::element::Type_t::f32>(constDataSize, 10, 1, seed);
outputHighData = NGraphFunctions::Utils::generateVector<ov::element::Type_t::f32>(constDataSize, 10, 1, seed);
outputLowData = ov::test::utils::generateVector<ov::element::Type_t::f32>(constDataSize, 10, 1, seed);
outputHighData = ov::test::utils::generateVector<ov::element::Type_t::f32>(constDataSize, 10, 1, seed);

for (int i = 0; i < constDataSize; i++) {
if (outputLowData[i] > outputHighData[i]) {
Expand Down

0 comments on commit 2a43db2

Please sign in to comment.