Skip to content

Commit

Permalink
Resolve conflicts
Browse files Browse the repository at this point in the history
  • Loading branch information
Alexey Lebedev committed Sep 9, 2021
2 parents da642cd + aa106ad commit f79d649
Show file tree
Hide file tree
Showing 227 changed files with 4,780 additions and 3,309 deletions.
123 changes: 123 additions & 0 deletions docs/template_plugin/tests/functional/op_reference/not_equal.cpp
Original file line number Diff line number Diff line change
@@ -0,0 +1,123 @@
// Copyright (C) 2018-2021 Intel Corporation
// SPDX-License-Identifier: Apache-2.0
//

#include <gtest/gtest.h>

#include <ie_core.hpp>
#include <ie_ngraph_utils.hpp>
#include <ngraph/ngraph.hpp>
#include <shared_test_classes/base/layer_test_utils.hpp>

#include "comparison.hpp"

using namespace ngraph;
using namespace InferenceEngine;
using ComparisonTypes = ngraph::helpers::ComparisonTypes;


namespace reference_tests {
namespace ComparisonOpsRefTestDefinitions {
namespace {

template <element::Type_t IN_ET>
std::vector<RefComparisonParams> generateComparisonParams(const element::Type& type) {
using T = typename element_type_traits<IN_ET>::value_type;
std::vector<RefComparisonParams> compParams {
// 1D // 2D // 3D // 4D
Builder {}
.compType(ComparisonTypes::NOT_EQUAL)
.input1({{2, 2}, type, std::vector<T> {1, 0, 10, 255}})
.input2({{2, 2}, type, std::vector<T> {1, 0, 10, 255}})
.expected({{2, 2}, element::boolean, std::vector<char> {0, 0, 0, 0}}),
Builder {}
.compType(ComparisonTypes::NOT_EQUAL)
.input1({{2, 3}, type, std::vector<T> {0, 15, 45, 10, 5, 10}})
.input2({{2, 3}, type, std::vector<T> {1, 15, 5, 10, 50, 10}})
.expected({{2, 3}, element::boolean, std::vector<char> {1, 0, 1, 0, 1, 0}}),
Builder {}
.compType(ComparisonTypes::NOT_EQUAL)
.input1({{1}, type, std::vector<T> {20}})
.input2({{1}, type, std::vector<T> {10}})
.expected({{1}, element::boolean, std::vector<char> {1}}),
Builder {}
.compType(ComparisonTypes::NOT_EQUAL)
.input1({{2, 4}, type, std::vector<T> {0, 12, 23, 0, 1, 5, 12, 8}})
.input2({{2, 4}, type, std::vector<T> {0, 12, 23, 0, 10, 5, 11, 8}})
.expected({{2, 4}, element::boolean, std::vector<char> {0, 0, 0, 0, 1, 0, 1, 0}}),
Builder {}
.compType(ComparisonTypes::NOT_EQUAL)
.input1({{3, 1, 2}, type, std::vector<T> {2, 7, 4, 7, 3, 7}})
.input2({{1, 2, 1}, type, std::vector<T> {7, 7}})
.expected({{3, 2, 2}, element::boolean, std::vector<char> {1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0}}),
Builder {}
.compType(ComparisonTypes::NOT_EQUAL)
.input1({{2, 1, 2, 1}, type, std::vector<T> {1, 2, 1, 4}})
.input2({{1, 2, 1}, type, std::vector<T> {1, 1}})
.expected({{2, 1, 2, 1}, element::boolean, std::vector<char> {0, 1, 0, 1}})};
return compParams;
}

std::vector<RefComparisonParams> generateComparisonCombinedParams() {
const std::vector<std::vector<RefComparisonParams>> compTypeParams {
generateComparisonParams<element::Type_t::f32>(element::f32),
generateComparisonParams<element::Type_t::f16>(element::f16),
generateComparisonParams<element::Type_t::i32>(element::i32),
generateComparisonParams<element::Type_t::u32>(element::u32),
generateComparisonParams<element::Type_t::u8>(element::boolean)};
std::vector<RefComparisonParams> combinedParams;

for (const auto& params : compTypeParams) {
combinedParams.insert(combinedParams.end(), params.begin(), params.end());
}
return combinedParams;
}

INSTANTIATE_TEST_SUITE_P(smoke_Comparison_With_Hardcoded_Refs, ReferenceComparisonLayerTest,
::testing::ValuesIn(generateComparisonCombinedParams()),
ReferenceComparisonLayerTest::getTestCaseName);

template <element::Type_t IN_ET>
std::vector<RefComparisonParams> generateNumericParams(const element::Type& type) {
using T = typename element_type_traits<IN_ET>::value_type;
std::vector<RefComparisonParams> compParams {
Builder {}
.compType(ComparisonTypes::NOT_EQUAL)
.input1({{4}, type, std::vector<T> {-2.5f, 25.5f, 2.25f, NAN}})
.input2({{4}, type, std::vector<T> {10.0f, 5.0f, 2.25f, 10.0f}})
.expected({{4}, element::boolean, std::vector<char> {1, 1, 0, 1}}),
Builder {}
.compType(ComparisonTypes::NOT_EQUAL)
.input1({{2, 3}, type, std::vector<T> {0.0f, NAN, NAN, 1.0f, 21.0f, -INFINITY}})
.input2({{2, 3}, type, std::vector<T> {1.0f, NAN, 23.0f, 1.0f, 19.0f, 21.0f}})
.expected({{2, 3}, element::boolean, std::vector<char> {1, 1, 1, 0, 1, 1}}),
Builder {}
.compType(ComparisonTypes::NOT_EQUAL)
.input1({{1}, type, std::vector<T> {INFINITY}})
.input2({{1}, type, std::vector<T> {INFINITY}})
.expected({{1}, element::boolean, std::vector<char> {0}}),
Builder {}
.compType(ComparisonTypes::NOT_EQUAL)
.input1({{5}, type, std::vector<T> {-2.5f, 25.5f, 2.25f, INFINITY, 6.0f}})
.input2({{5}, type, std::vector<T> {10.0f, 5.0f, 2.25f, 10.0f, -INFINITY}})
.expected({{5}, element::boolean, std::vector<char> {1, 1, 0, 1, 1}})};
return compParams;
}

std::vector<RefComparisonParams> generateNumericCombinedParams() {
const std::vector<std::vector<RefComparisonParams>> compTypeParams {
generateNumericParams<element::Type_t::f16>(element::f16),
generateNumericParams<element::Type_t::f32>(element::f32)};
std::vector<RefComparisonParams> combinedParams;

for (const auto& params : compTypeParams) {
combinedParams.insert(combinedParams.end(), params.begin(), params.end());
}
return combinedParams;
}

INSTANTIATE_TEST_SUITE_P(smoke_Numeric_With_Hardcoded_Refs, ReferenceComparisonLayerTest, ::testing::ValuesIn(generateNumericCombinedParams()),
ReferenceComparisonLayerTest::getTestCaseName);
} // namespace
} // namespace ComparisonOpsRefTestDefinitions
} // namespace reference_tests
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
# -*- coding: utf-8 -*-
# Copyright (C) 2018-2021 Intel Corporation
# SPDX-License-Identifier: Apache-2.0
__path__ = __import__('pkgutil').extend_path(__path__, __name__) # type: ignore # mypy issue #1422
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,7 @@ cdef class InferRequest:
cpdef get_perf_counts(self)
cdef void user_callback(self, int status) with gil
cdef public:
_inputs_list, _outputs_list, _py_callback, _py_data, _user_blobs
_inputs_list, _outputs_list, _py_callback, _py_data, _user_blobs, _inputs_is_dynamic

cdef class IENetwork:
cdef C.IENetwork impl
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,6 @@ from .constants import WaitMode, StatusCode, MeanVariant, layout_str_to_enum, fo

import numpy as np


warnings.filterwarnings(action="module", category=DeprecationWarning)

cdef extern from "<utility>" namespace "std" nogil:
Expand All @@ -53,6 +52,11 @@ cdef c_map_to_dict(map[string, string] c_map):
return py_dict


cdef expand_dims_to_corresponding_layout(shape, layout):
single_axes = [1] * (len(layout) - len(shape))
return single_axes + list(shape)


def get_version():
return C.get_version().decode()

Expand Down Expand Up @@ -271,6 +275,10 @@ cdef class Blob:
tensor_desc = TensorDesc(precision, dims, layout_int_to_str_map[layout])
return tensor_desc

def set_shape(self, new_shape):
self._initial_shape = new_shape
deref(self._ptr).setShape(new_shape)

## This class represents an Inference Engine entity and allows you to manipulate with plugins using unified interfaces.
cdef class IECore:
## Class constructor
Expand Down Expand Up @@ -815,6 +823,14 @@ cdef class DataPtr:
def initialized(self):
return deref(self._ptr).isInitialized()

@property
def is_dynamic(self):
return deref(self._ptr).isDynamic()

## get capsule with ngraph::PartialShape
def _get_partial_shape_capsule(self):
return C.getPartialShape_capsule(self._ptr)


## This class is the layer constant data representation. Provides same interface as DataPtr object except properties setters
cdef class CDataPtr:
Expand Down Expand Up @@ -843,6 +859,14 @@ cdef class CDataPtr:
def initialized(self):
return deref(self._ptr).isInitialized()

@property
def is_dynamic(self):
return deref(self._ptr).isDynamic()

## get capsule with ngraph::PartialShape
def _get_partial_shape_capsule(self):
return C.getPartialShape_capsule(self._ptr)


## This class represents a network instance loaded to plugin and ready for inference.
cdef class ExecutableNetwork:
Expand Down Expand Up @@ -912,6 +936,8 @@ cdef class ExecutableNetwork:
infer_request.impl = &(deref(self.impl).infer_requests[i])
infer_request._inputs_list = list(self.input_info.keys())
infer_request._outputs_list = list(self.outputs.keys())
for input_name in infer_request._inputs_list:
infer_request._inputs_is_dynamic[input_name] = self.input_info[input_name].input_data.is_dynamic
self._infer_requests.append(infer_request)

if len(self._infer_requests) != c_infer_requests_size:
Expand Down Expand Up @@ -1046,6 +1072,7 @@ cdef class InferRequest:
self._outputs_list = []
self._py_callback = lambda *args, **kwargs: None
self._py_data = None
self._inputs_is_dynamic = {}

cdef void user_callback(self, int status) with gil:
if self._py_callback:
Expand Down Expand Up @@ -1283,6 +1310,9 @@ cdef class InferRequest:
def _fill_inputs(self, inputs):
for k, v in inputs.items():
assert k in self._inputs_list, f"No input with name {k} found in network"
if self._inputs_is_dynamic[k]:
shape = expand_dims_to_corresponding_layout(v.shape, self.input_blobs[k].tensor_desc.layout)
self.input_blobs[k].set_shape(shape)
if self.input_blobs[k].tensor_desc.precision == "FP16":
self.input_blobs[k].buffer[:] = v.view(dtype=np.int16)
else:
Expand Down Expand Up @@ -1427,15 +1457,25 @@ cdef class IENetwork:
# net.reshape({input_layer: (n, c, h*2, w*2)})
# ```
def reshape(self, input_shapes: dict):
cdef map[string, vector[size_t]] c_input_shapes
cdef vector[size_t] c_shape
cdef map[string, vector[vector[int64_t]]] c_input_shapes
cdef vector[vector[int64_t]] c_shape
cdef vector[int64_t] dim
net_inputs = self.input_info
for input, shape in input_shapes.items():
c_shape = []
if input not in net_inputs:
raise AttributeError(f"Specified '{input}' layer not in network inputs '{net_inputs}'! ")
for v in shape:
c_shape.push_back(v)
if isinstance(v, list) or isinstance(v, tuple):
if len(v) < 1 or len(v) > 2:
raise ValueError(f"Incorrect PartialShape dimension definition '{v}' "
f"in shape '{shape}', expected one or two values for a dimension! ")
for d in v:
dim.push_back(d)
else:
dim.push_back(v)
c_shape.push_back(dim)
dim.clear()
c_input_shapes[input.encode()] = c_shape
self.impl.reshape(c_input_shapes)

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,8 @@

#include "ie_api_impl.hpp"

#include <ngraph/partial_shape.hpp>

#include "ie_iinfer_request.hpp"
#include "ie_plugin_config.hpp"

Expand Down Expand Up @@ -206,6 +208,24 @@ InferenceEnginePython::IENetwork InferenceEnginePython::read_network(std::string
return InferenceEnginePython::IENetwork(std::make_shared<InferenceEngine::CNNNetwork>(net));
}

PyObject* InferenceEnginePython::getPartialShape_capsule(InferenceEngine::CDataPtr data) {
const char* py_capsule_name = "ngraph_partial_shape";
auto ngraph_pShape_ptr = std::make_shared<ngraph::PartialShape>(data->getPartialShape());
auto* sp_copy = new std::shared_ptr<const ngraph::PartialShape>(ngraph_pShape_ptr);
auto sp_deleter = [](PyObject* capsule) {
auto* capsule_ptr = PyCapsule_GetPointer(capsule, "ngraph_partial_shape");
auto* function_sp = static_cast<std::shared_ptr<ngraph::PartialShape>*>(capsule_ptr);
if (function_sp) {
delete function_sp;
}
};
if (ngraph_pShape_ptr) {
return PyCapsule_New(sp_copy, py_capsule_name, sp_deleter);
} else {
return nullptr;
}
}

InferenceEnginePython::IENetwork::IENetwork(const std::shared_ptr<InferenceEngine::CNNNetwork>& cnn_network)
: actual(cnn_network) {
if (actual == nullptr)
Expand Down Expand Up @@ -289,8 +309,21 @@ size_t InferenceEnginePython::IENetwork::getBatch() {
return actual->getBatchSize();
}

void InferenceEnginePython::IENetwork::reshape(const std::map<std::string, std::vector<size_t>>& input_shapes) {
actual->reshape(input_shapes);
void InferenceEnginePython::IENetwork::reshape(
const std::map<std::string, std::vector<std::vector<int64_t>>>& input_shapes) {
std::map<std::string, ngraph::PartialShape> inputShapes;
for (auto const& input : input_shapes) {
using ngraph::Dimension;
std::vector<Dimension> dims;
for (auto const& d : input.second) {
if (d.size() == 1)
dims.push_back(Dimension(d[0]));
else if (d.size() == 2)
dims.push_back(Dimension(d[0], d[1]));
}
inputShapes[input.first] = ngraph::PartialShape(dims);
}
actual->reshape(inputShapes);
}

InferenceEnginePython::IEExecNetwork::IEExecNetwork(const std::string& name, size_t num_requests)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -62,7 +62,7 @@ struct IENetwork {

const std::map<std::string, InferenceEngine::DataPtr> getOutputs();

void reshape(const std::map<std::string, std::vector<size_t>>& input_shapes);
void reshape(const std::map<std::string, std::vector<std::vector<int64_t>>>& input_shapes);

void serialize(const std::string& path_to_xml, const std::string& path_to_bin);

Expand Down Expand Up @@ -203,4 +203,6 @@ std::string get_version();

InferenceEnginePython::IENetwork read_network(std::string path_to_xml, std::string path_to_bin);

PyObject* getPartialShape_capsule(InferenceEngine::CDataPtr data);

}; // namespace InferenceEnginePython
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,7 @@ cdef extern from "<inference_engine.hpp>" namespace "InferenceEngine":
const CTensorDesc& getTensorDesc() except +
size_t element_size() except +
void allocate()
void setShape(const SizeVector& dims) except +

cdef TBlob[Type].Ptr make_shared_blob[Type](const CTensorDesc& tensorDesc)

Expand All @@ -47,6 +48,7 @@ cdef extern from "<inference_engine.hpp>" namespace "InferenceEngine":
const Layout getLayout() except +
void setLayout(Layout layout) except +
const bool isInitialized() except +
bool isDynamic() except +

ctypedef shared_ptr[Data] DataPtr
ctypedef weak_ptr[Data] DataWeakPtr
Expand Down Expand Up @@ -178,7 +180,7 @@ cdef extern from "ie_api_impl.hpp" namespace "InferenceEnginePython":
size_t getBatch() except +
void setLayerParams(map[string, map[string, string]] params_map) except +
void serialize(const string& path_to_xml, const string& path_to_bin) except +
void reshape(map[string, vector[size_t]] input_shapes) except +
void reshape(map[string, vector[vector[int64_t]]] input_shapes) except +
object getFunction() except +
void convertToOldRepresentation() except +
string getOVNameForTensor(const string &) except +
Expand Down Expand Up @@ -226,3 +228,5 @@ cdef extern from "ie_api_impl.hpp" namespace "InferenceEnginePython":
cdef string get_version()

cdef IENetwork read_network(string path_to_xml, string path_to_bin)

cdef object getPartialShape_capsule(DataPtr)
15 changes: 14 additions & 1 deletion inference-engine/ie_bridges/python/tests/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@

import os
import pytest
import numpy as np


def model_path(is_myriad=False):
Expand Down Expand Up @@ -41,7 +42,19 @@ def device():


def pytest_configure(config):
# register an additional marker for ngraph dependent tests
# register an additional markers
config.addinivalue_line(
"markers", "ngraph_dependent_test"
)
config.addinivalue_line(
"markers", "template_plugin"
)


def create_ngraph_function(inputShape):
import ngraph as ng
inputShape = ng.impl.PartialShape(inputShape)
param = ng.parameter(inputShape, dtype=np.float32, name="data")
result = ng.relu(param, name='out')
function = ng.Function(result, [param], "TestFunction")
return function
Loading

0 comments on commit f79d649

Please sign in to comment.