-
Notifications
You must be signed in to change notification settings - Fork 0
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Implement reading pdpd model in ReadNetwork #51
Changes from all commits
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -362,13 +362,6 @@ int main(int argc, char* argv[]) { | |
|
||
auto startTime = Time::now(); | ||
CNNNetwork cnnNetwork = ie.ReadNetwork(FLAGS_m); | ||
// ngraph::frontend::FrontEndManager manager; | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. I propose to remove all redundant comments from benchmark app. There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. I don't see any more redundant comments in benchmark_app |
||
// auto FE = manager.loadByFramework("pdpd"); | ||
// auto inputModel = FE->loadFromFile(FLAGS_m); | ||
// //inputModel->setPartialShape(inputModel->getInputs()[0], ngraph::PartialShape({1, 224, 224, 3})); | ||
// auto ngFunc = FE->convert(inputModel); | ||
// CNNNetwork cnnNetwork(ngFunc); | ||
// cnnNetwork.serialize("benchmark_app_loaded_network.xml"); | ||
|
||
auto duration_ms = double_to_string(get_total_ms_time(startTime)); | ||
slog::info << "Read network took " << duration_ms << " ms" << slog::endl; | ||
|
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -9,6 +9,7 @@ | |
#include <file_utils.h> | ||
#include <ie_reader.hpp> | ||
#include <ie_ir_version.hpp> | ||
#include <frontend_manager/frontend_manager.hpp> | ||
|
||
#include <fstream> | ||
#include <istream> | ||
|
@@ -226,6 +227,26 @@ CNNNetwork details::ReadNetwork(const std::string& modelPath, const std::string& | |
return reader->read(modelStream, exts); | ||
} | ||
} | ||
// Try to load with FrontEndManager | ||
static ngraph::frontend::FrontEndManager manager; | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Why do you need static manager? |
||
ngraph::frontend::FrontEnd::Ptr FE; | ||
ngraph::frontend::InputModel::Ptr inputModel; | ||
if (!binPath.empty()) { | ||
#if defined(ENABLE_UNICODE_PATH_SUPPORT) && defined(_WIN32) | ||
std::wstring weights_path = FileUtils::multiByteCharToWString(binPath.c_str()); | ||
#else | ||
std::string weights_path = binPath; | ||
#endif | ||
FE = manager.load_by_model(model_path, weights_path); | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. In this line do we load FE from the library? |
||
if (FE) inputModel = FE->load(model_path, weights_path); | ||
} else { | ||
FE = manager.load_by_model(model_path); | ||
if (FE) inputModel = FE->load(model_path); | ||
} | ||
if (inputModel) { | ||
auto ngFunc = FE->convert(inputModel); | ||
return CNNNetwork(ngFunc); | ||
} | ||
IE_THROW() << "Unknown model format! Cannot find reader for model format: " << fileExt << " and read the model: " << modelPath << | ||
". Please check that reader library exists in your PATH."; | ||
} | ||
|
@@ -248,4 +269,4 @@ CNNNetwork details::ReadNetwork(const std::string& model, const Blob::CPtr& weig | |
IE_THROW() << "Unknown model format! Cannot find reader for the model and read it. Please check that reader library exists in your PATH."; | ||
} | ||
|
||
} // namespace InferenceEngine | ||
} // namespace InferenceEngine |
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,39 @@ | ||
// Copyright (C) 2018-2021 Intel Corporation | ||
// SPDX-License-Identifier: Apache-2.0 | ||
// | ||
|
||
#include <gtest/gtest.h> | ||
#include <set> | ||
#include <string> | ||
#include <fstream> | ||
|
||
#include <ie_blob.h> | ||
#include <ie_core.hpp> | ||
#include <ngraph/ngraph.hpp> | ||
|
||
TEST(PDPD_Reader_Tests, ImportBasicModelToCore) { | ||
auto model = std::string(PDPD_TEST_MODELS) + "relu.pdmodel"; | ||
InferenceEngine::Core ie; | ||
auto cnnNetwork = ie.ReadNetwork(model); | ||
auto function = cnnNetwork.getFunction(); | ||
|
||
int count_relus = 0; | ||
int count_constants = 0; | ||
int count_parameters = 0; | ||
|
||
for (auto op : function->get_ops()) { | ||
const auto op_type = std::string(op->get_type_name()); | ||
count_relus += (op_type == "Relu" ? 1 : 0); | ||
count_constants += (op_type == "Constant" ? 1 : 0); | ||
count_parameters += (op_type == "Parameter" ? 1 : 0); | ||
} | ||
|
||
ASSERT_EQ(function->get_output_size(), 1); | ||
ASSERT_EQ(std::string(function->get_output_op(0)->get_type_name()), "Result"); | ||
ASSERT_EQ(function->get_output_element_type(0), ngraph::element::f32); | ||
ASSERT_EQ(function->get_output_shape(0), ngraph::Shape({ 3 })); | ||
ASSERT_EQ(count_relus, 1); | ||
ASSERT_EQ(count_constants, 6); | ||
ASSERT_EQ(count_parameters, 1); | ||
} | ||
|
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
What is the size of pdpd models?