Skip to content

Commit

Permalink
Merge pull request #1 from openvinotoolkit/master
Browse files Browse the repository at this point in the history
Merge from original repository
  • Loading branch information
emmanuelattia authored May 30, 2020
2 parents 0efe474 + 9af51a1 commit 7917fd9
Show file tree
Hide file tree
Showing 132 changed files with 3,145 additions and 1,364 deletions.
10 changes: 9 additions & 1 deletion CODEOWNERS
Validating CODEOWNERS rules …
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ azure-pipelines.yml @openvinotoolkit/openvino-admins
/inference-engine/ @openvinotoolkit/openvino-ie-maintainers
/inference-engine/src/transformations/ @GlebKazantaev @ichuraev
/inference-engine/src/legacy_api/ @openvinotoolkit/openvino-ngraph-maintainers
/inference-engine/src/ir_readers/ @openvinotoolkit/openvino-ngraph-maintainers
/inference-engine/src/readers/ @openvinotoolkit/openvino-ngraph-maintainers

# IE CPU:
/inference-engine/src/mkldnn_plugin/ @openvinotoolkit/openvino-ie-cpu-maintainers @openvinotoolkit/openvino-ie-cpu-developers
Expand All @@ -33,6 +33,14 @@ azure-pipelines.yml @openvinotoolkit/openvino-admins
/inference-engine/src/vpu/ @openvinotoolkit/openvino-ie-vpu-maintainers
/inference-engine/include/vpu/ @openvinotoolkit/openvino-ie-vpu-maintainers
/inference-engine/thirdparty/movidius/ @openvinotoolkit/openvino-ie-vpu-maintainers
/inference-engine/tests_deprecated/unit/engines/vpu/ @openvinotoolkit/openvino-ie-vpu-maintainers @openvinotoolkit/openvino-ie-tests-maintainers
/inference-engine/tests_deprecated/functional/vpu/ @openvinotoolkit/openvino-ie-vpu-maintainers @openvinotoolkit/openvino-ie-tests-maintainers
/inference-engine/tests_deprecated/behavior/vpu/ @openvinotoolkit/openvino-ie-vpu-maintainers @openvinotoolkit/openvino-ie-tests-maintainers
/inference-engine/tests/functional/plugin/myriad/ @openvinotoolkit/openvino-ie-vpu-maintainers @openvinotoolkit/openvino-ie-tests-maintainers
/inference-engine/tests/unit/vpu/ @openvinotoolkit/openvino-ie-vpu-maintainers @openvinotoolkit/openvino-ie-tests-maintainers
/inference-engine/tests/unit/engines/vpu/ @openvinotoolkit/openvino-ie-vpu-maintainers @openvinotoolkit/openvino-ie-tests-maintainers
/inference-engine/tools/vpu/ @openvinotoolkit/openvino-ie-vpu-maintainers
/inference-engine/scripts/run_tests_myriad_multistick.sh @openvinotoolkit/openvino-ie-vpu-maintainers

# IE GNA:
/inference-engine/src/gna_plugin/ @openvinotoolkit/openvino-ie-gna-maintainers
Expand Down
6 changes: 6 additions & 0 deletions inference-engine/ie_bridges/c/src/CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,12 @@ target_include_directories(${TARGET_NAME} PUBLIC "${InferenceEngine_C_API_SOURCE

add_cpplint_target(${TARGET_NAME}_cpplint FOR_TARGETS ${TARGET_NAME})

# Workaround to avoid warnings caused with bug in the avx512intrin.h of GCC5
if((CMAKE_CXX_COMPILER_ID STREQUAL "GNU") AND
(CMAKE_CXX_COMPILER_VERSION VERSION_LESS_EQUAL 5.5))
set_target_properties(${TARGET_NAME} PROPERTIES LINK_FLAGS_RELEASE "-Wno-error=maybe-uninitialized -Wno-maybe-uninitialized")
endif()

# export

export(TARGETS ${TARGET_NAME} NAMESPACE IE:: APPEND FILE "${CMAKE_BINARY_DIR}/targets.cmake")
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@ set_source_files_properties(${SOURCE} PROPERTIES CYTHON_IS_CXX ON)
# create target

cython_add_module(${TARGET_NAME} ${SOURCE})
set(INSTALLED_TARGETS ${TARGET_NAME})

file(GLOB OTHER_SOURCES
${CMAKE_CURRENT_SOURCE_DIR}/*.pyx)
Expand All @@ -26,6 +27,7 @@ foreach(PYX_FILE ${OTHER_SOURCES})
add_dependencies(${TARGET_NAME} ${PYX_NAME})
target_include_directories(${PYX_NAME} PRIVATE "${CMAKE_CURRENT_SOURCE_DIR}")
target_link_libraries(${PYX_NAME} PRIVATE ${InferenceEngine_LIBRARIES})
list(APPEND INSTALLED_TARGETS ${PYX_NAME})
endforeach()

function(python_disable_deprecated_warnings)
Expand Down Expand Up @@ -64,7 +66,7 @@ endif()

# install

install(TARGETS ${TARGET_NAME}
install(TARGETS ${INSTALLED_TARGETS}
RUNTIME DESTINATION python/${PYTHON_VERSION}/openvino/inference_engine COMPONENT ${PYTHON_VERSION}
ARCHIVE DESTINATION python/${PYTHON_VERSION}/openvino/inference_engine COMPONENT ${PYTHON_VERSION}
LIBRARY DESTINATION python/${PYTHON_VERSION}/openvino/inference_engine COMPONENT ${PYTHON_VERSION})
Expand Down
32 changes: 20 additions & 12 deletions inference-engine/ie_bridges/python/tests/test_PreProcessInfo.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,45 +8,53 @@
test_net_xml, test_net_bin = model_path()


def get_preprocess_info():
def test_preprocess_info():
ie_core = IECore()
net = ie_core.read_network(model=test_net_xml, weights=test_net_bin)
return net.input_info["data"].preprocess_info


def test_preprocess_info():
assert isinstance(get_preprocess_info(), PreProcessInfo)
assert isinstance(net.input_info["data"].preprocess_info, PreProcessInfo)


def test_color_format():
preprocess_info = get_preprocess_info()
ie_core = IECore()
net = ie_core.read_network(model=test_net_xml, weights=test_net_bin)
preprocess_info = net.input_info["data"].preprocess_info
assert preprocess_info.color_format == ColorFormat.RAW


def test_color_format_setter():
preprocess_info = get_preprocess_info()
ie_core = IECore()
net = ie_core.read_network(model=test_net_xml, weights=test_net_bin)
preprocess_info = net.input_info["data"].preprocess_info
preprocess_info.color_format = ColorFormat.BGR
assert preprocess_info.color_format == ColorFormat.BGR


def test_resize_algorithm():
preprocess_info = get_preprocess_info()
ie_core = IECore()
net = ie_core.read_network(model=test_net_xml, weights=test_net_bin)
preprocess_info = net.input_info["data"].preprocess_info
assert preprocess_info.resize_algorithm == ResizeAlgorithm.NO_RESIZE


def test_resize_algorithm_setter():
preprocess_info = get_preprocess_info()
ie_core = IECore()
net = ie_core.read_network(model=test_net_xml, weights=test_net_bin)
preprocess_info = net.input_info["data"].preprocess_info
preprocess_info.resize_algorithm = ResizeAlgorithm.RESIZE_BILINEAR
assert preprocess_info.resize_algorithm == ResizeAlgorithm.RESIZE_BILINEAR


def test_mean_variant():
preprocess_info = get_preprocess_info()
ie_core = IECore()
net = ie_core.read_network(model=test_net_xml, weights=test_net_bin)
preprocess_info = net.input_info["data"].preprocess_info
assert preprocess_info.mean_variant == MeanVariant.NONE


def test_mean_variant_setter():
preprocess_info = get_preprocess_info()
ie_core = IECore()
net = ie_core.read_network(model=test_net_xml, weights=test_net_bin)
preprocess_info = net.input_info["data"].preprocess_info
preprocess_info.mean_variant = MeanVariant.MEAN_IMAGE
assert preprocess_info.mean_variant == MeanVariant.MEAN_IMAGE

Expand Down
Empty file modified inference-engine/samples/build_samples.sh
100644 → 100755
Empty file.
Empty file modified inference-engine/scripts/dependencies.sh
100644 → 100755
Empty file.
Empty file modified inference-engine/scripts/run_tests_myriad_multistick.sh
100644 → 100755
Empty file.
7 changes: 7 additions & 0 deletions inference-engine/src/gna_plugin/gna_infer_request.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -69,6 +69,13 @@ class GNAInferRequest : public InferenceEngine::AsyncInferRequestInternal {
// execute input pre-processing.
execDataPreprocessing(_inputs);
inferRequestIdx = plg->QueueInference(_inputs, _outputs);
// workaround to unblock callback-based flows
if (_callback) {
auto infer_request = _publicInterface.lock();
IE_ASSERT(infer_request != nullptr);
auto res = Wait(0);
_callback(infer_request, res);
}
}

InferenceEngine::StatusCode Wait(int64_t millis_timeout) override {
Expand Down
5 changes: 5 additions & 0 deletions inference-engine/src/inference_engine/CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -119,6 +119,11 @@ add_library(${TARGET_NAME}_obj OBJECT

target_compile_definitions(${TARGET_NAME}_obj PRIVATE IMPLEMENT_INFERENCE_ENGINE_API)

# TODO: Remove this definitios when readers will be loaded from xml
if(NGRAPH_ONNX_IMPORT_ENABLE)
target_compile_definitions(${TARGET_NAME}_obj PRIVATE ONNX_IMPORT_ENABLE)
endif()

target_include_directories(${TARGET_NAME}_obj SYSTEM PRIVATE $<TARGET_PROPERTY:ngraph::ngraph,INTERFACE_INCLUDE_DIRECTORIES>
$<TARGET_PROPERTY:pugixml,INTERFACE_INCLUDE_DIRECTORIES>)

Expand Down
130 changes: 3 additions & 127 deletions inference-engine/src/inference_engine/ie_core.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -5,33 +5,28 @@
#include "ie_core.hpp"

#include <unordered_set>
#include <fstream>
#include <functional>
#include <limits>
#include <map>
#include <memory>
#include <sstream>
#include <streambuf>
#include <string>
#include <utility>
#include <vector>
#include <istream>
#include <mutex>

#include "ie_blob_stream.hpp"
#include <ie_reader_ptr.hpp>
#include <ngraph/opsets/opset.hpp>
#include "cpp/ie_cnn_net_reader.h"
#include "cpp/ie_plugin_cpp.hpp"
#include "cpp_interfaces/base/ie_plugin_base.hpp"
#include "details/ie_exception_conversion.hpp"
#include "details/ie_so_pointer.hpp"
#include "file_utils.h"
#include "ie_icore.hpp"
#include "ie_plugin.hpp"
#include "ie_plugin_config.hpp"
#include "ie_profiling.hpp"
#include "ie_util_internal.hpp"
#include "ie_network_reader.hpp"
#include "multi-device/multi_device_config.hpp"
#include "xml_parse_utils.h"

Expand Down Expand Up @@ -133,79 +128,6 @@ Parameter copyParameterValue(const Parameter & value) {

} // namespace

class Reader: public IReader {
private:
InferenceEngine::IReaderPtr ptr;
std::once_flag readFlag;
std::string name;
std::string location;

InferenceEngine::IReaderPtr getReaderPtr() {
std::call_once(readFlag, [&] () {
FileUtils::FilePath libraryName = FileUtils::toFilePath(location);
FileUtils::FilePath readersLibraryPath = FileUtils::makeSharedLibraryName(getInferenceEngineLibraryPath(), libraryName);

if (!FileUtils::fileExist(readersLibraryPath)) {
THROW_IE_EXCEPTION << "Please, make sure that Inference Engine ONNX reader library "
<< FileUtils::fromFilePath(::FileUtils::makeSharedLibraryName({}, libraryName)) << " is in "
<< getIELibraryPath();
}
ptr = IReaderPtr(readersLibraryPath);
});

return ptr;
}

InferenceEngine::IReaderPtr getReaderPtr() const {
return const_cast<Reader*>(this)->getReaderPtr();
}

void Release() noexcept override {
delete this;
}

public:
using Ptr = std::shared_ptr<Reader>;
Reader(const std::string& name, const std::string location): name(name), location(location) {}
bool supportModel(std::istream& model) const override {
auto reader = getReaderPtr();
return reader->supportModel(model);
}
CNNNetwork read(std::istream& model, const std::vector<IExtensionPtr>& exts) const override {
auto reader = getReaderPtr();
return reader->read(model, exts);
}
CNNNetwork read(std::istream& model, std::istream& weights, const std::vector<IExtensionPtr>& exts) const override {
auto reader = getReaderPtr();
return reader->read(model, weights, exts);
}
std::vector<std::string> getDataFileExtensions() const override {
auto reader = getReaderPtr();
return reader->getDataFileExtensions();
}
std::string getName() const {
return name;
}
};

namespace {

// Extension to plugins creator
std::multimap<std::string, Reader::Ptr> readers;

void registerReaders() {
static std::mutex readerMutex;
std::lock_guard<std::mutex> lock(readerMutex);
// TODO: Read readers info from XML
auto onnxReader = std::make_shared<Reader>("ONNX", std::string("inference_engine_onnx_reader") + std::string(IE_BUILD_POSTFIX));
readers.emplace("onnx", onnxReader);
readers.emplace("prototxt", onnxReader);
auto irReader = std::make_shared<Reader>("IR", std::string("inference_engine_ir_reader") + std::string(IE_BUILD_POSTFIX));
readers.emplace("xml", irReader);
}

} // namespace

CNNNetReaderPtr CreateCNNNetReaderPtr() noexcept {
auto loader = createCnnReaderLoader();
return CNNNetReaderPtr(loader);
Expand Down Expand Up @@ -374,57 +296,12 @@ class Core::Impl : public ICore {

CNNNetwork ReadNetwork(const std::string& modelPath, const std::string& binPath) const override {
IE_PROFILING_AUTO_SCOPE(Core::ReadNetwork)

std::ifstream modelStream(modelPath, std::ios::binary);
if (!modelStream.is_open())
THROW_IE_EXCEPTION << "Model file " << modelPath << " cannot be opened!";

auto fileExt = modelPath.substr(modelPath.find_last_of(".") + 1);
for (auto it = readers.lower_bound(fileExt); it != readers.upper_bound(fileExt); it++) {
auto reader = it->second;
if (reader->supportModel(modelStream)) {
// Find weights
std::string bPath = binPath;
if (bPath.empty()) {
auto pathWoExt = modelPath;
auto pos = modelPath.rfind('.');
if (pos != std::string::npos) pathWoExt = modelPath.substr(0, pos);
for (const auto& ext : reader->getDataFileExtensions()) {
bPath = pathWoExt + "." + ext;
if (!FileUtils::fileExist(bPath)) {
bPath.clear();
} else {
break;
}
}
}
if (!bPath.empty()) {
std::ifstream binStream;
binStream.open(bPath, std::ios::binary);
if (!binStream.is_open())
THROW_IE_EXCEPTION << "Weights file " << bPath << " cannot be opened!";
return reader->read(modelStream, binStream, extensions);
}
return reader->read(modelStream, extensions);
}
}
THROW_IE_EXCEPTION << "Unknown model format! Cannot read the model: " << modelPath;
return details::ReadNetwork(modelPath, binPath, extensions);
}

CNNNetwork ReadNetwork(const std::string& model, const Blob::CPtr& weights) const override {
IE_PROFILING_AUTO_SCOPE(Core::ReadNetwork)
std::istringstream modelStream(model);
details::BlobStream binStream(weights);

for (auto it = readers.begin(); it != readers.end(); it++) {
auto reader = it->second;
if (reader->supportModel(modelStream)) {
if (weights)
return reader->read(modelStream, binStream, extensions);
return reader->read(modelStream, extensions);
}
}
THROW_IE_EXCEPTION << "Unknown model format! Cannot read the model from string!";
return details::ReadNetwork(model, weights, extensions);
}

ExecutableNetwork LoadNetwork(const CNNNetwork& network, const std::string& deviceName,
Expand Down Expand Up @@ -704,7 +581,6 @@ Core::Impl::Impl() {
opsetNames.insert("opset1");
opsetNames.insert("opset2");
opsetNames.insert("opset3");
registerReaders();
}

Core::Impl::~Impl() {}
Expand Down
Loading

0 comments on commit 7917fd9

Please sign in to comment.