Skip to content

Commit

Permalink
Merge remote-tracking branch 'upstream/master' into move_node_to_ov
Browse files Browse the repository at this point in the history
  • Loading branch information
ilyachur committed Aug 27, 2021
2 parents 5f54eb8 + 4ef700c commit b7c7803
Show file tree
Hide file tree
Showing 49 changed files with 1,168 additions and 449 deletions.
4 changes: 2 additions & 2 deletions cmake/developer_package/plugins/plugins.cmake
Original file line number Diff line number Diff line change
Expand Up @@ -112,8 +112,8 @@ function(ie_add_plugin)
if(TARGET inference_engine_ir_v7_reader)
add_dependencies(${IE_PLUGIN_NAME} inference_engine_ir_v7_reader)
endif()
if(TARGET inference_engine_onnx_reader)
add_dependencies(${IE_PLUGIN_NAME} inference_engine_onnx_reader)
if(TARGET onnx_ngraph_frontend)
add_dependencies(${IE_PLUGIN_NAME} onnx_ngraph_frontend)
endif()

# install rules
Expand Down
4 changes: 2 additions & 2 deletions docs/IE_DG/Deep_Learning_Inference_Engine_DevGuide.md
Original file line number Diff line number Diff line change
Expand Up @@ -43,10 +43,10 @@ This library contains the classes to:
Starting from 2020.4 release, Inference Engine introduced a concept of `CNNNetwork` reader plugins. Such plugins can be automatically dynamically loaded by Inference Engine in runtime depending on file format:
* Linux* OS:
- `libinference_engine_ir_reader.so` to read a network from IR
- `libinference_engine_onnx_reader.so` to read a network from ONNX model format
- `onnx_ngraph_frontend.so` to read a network from ONNX model format
* Windows* OS:
- `inference_engine_ir_reader.dll` to read a network from IR
- `inference_engine_onnx_reader.dll` to read a network from ONNX model format
- `onnx_ngraph_frontend.dll` to read a network from ONNX model format

### Device-Specific Plugin Libraries

Expand Down
4 changes: 2 additions & 2 deletions docs/IE_DG/inference_engine_intro.md
Original file line number Diff line number Diff line change
Expand Up @@ -46,10 +46,10 @@ This library contains the classes to:
Starting from 2020.4 release, Inference Engine introduced a concept of `CNNNetwork` reader plugins. Such plugins can be automatically dynamically loaded by Inference Engine in runtime depending on file format:
* Unix* OS:
- `libinference_engine_ir_reader.so` to read a network from IR
- `libinference_engine_onnx_reader.so` to read a network from ONNX model format
- `onnx_ngraph_frontend.so` to read a network from ONNX model format
* Windows* OS:
- `inference_engine_ir_reader.dll` to read a network from IR
- `inference_engine_onnx_reader.dll` to read a network from ONNX model format
- `onnx_ngraph_frontend.dll` to read a network from ONNX model format

### Device-specific Plugin Libraries ###

Expand Down
2 changes: 1 addition & 1 deletion inference-engine/src/CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -54,5 +54,5 @@ add_custom_target(ie_libraries ALL
ir_frontend)

if(NGRAPH_ONNX_FRONTEND_ENABLE)
add_dependencies(ie_libraries inference_engine_onnx_reader)
add_dependencies(ie_libraries onnx_ngraph_frontend)
endif()
Original file line number Diff line number Diff line change
Expand Up @@ -1841,8 +1841,8 @@ void FuseFQIntoWeightsPass::run() {
layers_connected_to_fq_count = inputTo.size();
}
for (int index = 0; index < layers_connected_to_fq_count; index++) {
auto weightableLayer = CNNNetGetNextLayerSkipCertain(layerBeforeWeightable, 0, index, isNonFunctional).first;
if (!LayerInfo(weightableLayer).isWeightable()) {
auto weightableLayer = CNNNetCheckNextLayerSkipCertain(layerBeforeWeightable, 0, index, true, isNonFunctional).first;
if (!weightableLayer || !LayerInfo(weightableLayer).isWeightable()) {
continue;
}
if (weightableLayer->insData.size() < 2) {
Expand Down
38 changes: 22 additions & 16 deletions inference-engine/src/inference_engine/src/ie_network_reader.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -96,6 +96,11 @@ namespace {
// Extension to plugins creator
std::multimap<std::string, Reader::Ptr> readers;

static ngraph::frontend::FrontEndManager* get_frontend_manager() {
static ngraph::frontend::FrontEndManager manager;
return &manager;
}

void registerReaders() {
OV_ITT_SCOPED_TASK(ov::itt::domains::IE, "registerReaders");
static bool initialized = false;
Expand All @@ -115,14 +120,6 @@ void registerReaders() {
return std::make_shared<Reader>(name, library_name);
};

// try to load ONNX reader if library exists
auto onnxReader =
create_if_exists("ONNX", std::string("inference_engine_onnx_reader") + std::string(IE_BUILD_POSTFIX));
if (onnxReader) {
readers.emplace("onnx", onnxReader);
readers.emplace("prototxt", onnxReader);
}

// try to load IR reader v10 if library exists
auto irReaderv10 =
create_if_exists("IRv10", std::string("inference_engine_ir_reader") + std::string(IE_BUILD_POSTFIX));
Expand Down Expand Up @@ -174,10 +171,6 @@ CNNNetwork details::ReadNetwork(const std::string& modelPath,
#endif
// Try to open model file
std::ifstream modelStream(model_path, std::ios::binary);
// save path in extensible array of stream
// notice: lifetime of path pointed by pword(0) is limited by current scope
const std::string path_to_save_in_stream = modelPath;
modelStream.pword(0) = const_cast<char*>(path_to_save_in_stream.c_str());
if (!modelStream.is_open())
IE_THROW() << "Model file " << modelPath << " cannot be opened!";

Expand Down Expand Up @@ -240,7 +233,7 @@ CNNNetwork details::ReadNetwork(const std::string& modelPath,
}
}
// Try to load with FrontEndManager
static ngraph::frontend::FrontEndManager manager;
const auto manager = get_frontend_manager();
ngraph::frontend::FrontEnd::Ptr FE;
ngraph::frontend::InputModel::Ptr inputModel;
if (!binPath.empty()) {
Expand All @@ -249,17 +242,17 @@ CNNNetwork details::ReadNetwork(const std::string& modelPath,
#else
std::string weights_path = binPath;
#endif
FE = manager.load_by_model(model_path, weights_path);
FE = manager->load_by_model(model_path, weights_path);
if (FE)
inputModel = FE->load(model_path, weights_path);
} else {
FE = manager.load_by_model(model_path);
FE = manager->load_by_model(model_path);
if (FE)
inputModel = FE->load(model_path);
}
if (inputModel) {
auto ngFunc = FE->convert(inputModel);
return CNNNetwork(ngFunc);
return CNNNetwork(ngFunc, exts);
}
IE_THROW() << "Unknown model format! Cannot find reader for model format: " << fileExt
<< " and read the model: " << modelPath << ". Please check that reader library exists in your PATH.";
Expand All @@ -282,6 +275,19 @@ CNNNetwork details::ReadNetwork(const std::string& model,
return reader->read(modelStream, exts);
}
}
// Try to load with FrontEndManager
// NOTE: weights argument is ignored
const auto manager = get_frontend_manager();
ngraph::frontend::FrontEnd::Ptr FE;
ngraph::frontend::InputModel::Ptr inputModel;
FE = manager->load_by_model(&modelStream);
if (FE)
inputModel = FE->load(&modelStream);
if (inputModel) {
auto ngFunc = FE->convert(inputModel);
return CNNNetwork(ngFunc, exts);
}

IE_THROW() << "Unknown model format! Cannot find reader for the model and read it. Please check that reader "
"library exists in your PATH.";
}
Expand Down
2 changes: 2 additions & 0 deletions inference-engine/src/mkldnn_plugin/mkldnn_graph.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -1028,6 +1028,8 @@ void MKLDNNGraph::DropDWConvNode(const MKLDNNNodePtr &node) {
auto parentConv = parentConvEdge->getParent();
if (!parentConv) return;

parentConv->outputShapes[0] = node->outputShapes[0];

for (size_t i = 0; i < 1; i++) {
auto p_edge = parents[i].lock();
if (!p_edge) continue;
Expand Down
4 changes: 0 additions & 4 deletions inference-engine/src/readers/CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,3 @@ add_cpplint_target(${TARGET_NAME}_cpplint FOR_SOURCES ${reader_api_hpp})

add_subdirectory(ir_reader)
add_subdirectory(ir_reader_v7)

if(NGRAPH_ONNX_FRONTEND_ENABLE)
add_subdirectory(onnx_reader)
endif()
39 changes: 0 additions & 39 deletions inference-engine/src/readers/onnx_reader/CMakeLists.txt

This file was deleted.

70 changes: 0 additions & 70 deletions inference-engine/src/readers/onnx_reader/ie_onnx_reader.cpp

This file was deleted.

45 changes: 0 additions & 45 deletions inference-engine/src/readers/onnx_reader/ie_onnx_reader.hpp

This file was deleted.

Loading

0 comments on commit b7c7803

Please sign in to comment.