From 41c7c75bb624861a7db40eb8e29b43247c58e373 Mon Sep 17 00:00:00 2001 From: Ilya Lavrenov Date: Mon, 31 May 2021 13:22:22 +0300 Subject: [PATCH 1/2] Fixed samples compilation against make install --- .../samples/common/utils/CMakeLists.txt | 12 ++++---- .../common/utils/include/samples/slog.hpp | 18 ++++-------- .../samples/common/utils/src/slog.cpp | 29 +++++++++++++++++++ 3 files changed, 39 insertions(+), 20 deletions(-) create mode 100644 inference-engine/samples/common/utils/src/slog.cpp diff --git a/inference-engine/samples/common/utils/CMakeLists.txt b/inference-engine/samples/common/utils/CMakeLists.txt index 96dd896bdb0f62..90f36a62fa0e22 100644 --- a/inference-engine/samples/common/utils/CMakeLists.txt +++ b/inference-engine/samples/common/utils/CMakeLists.txt @@ -10,14 +10,12 @@ add_library(${TARGET_NAME} STATIC ${SOURCES}) set_target_properties(${TARGET_NAME} PROPERTIES FOLDER "src") target_include_directories(${TARGET_NAME} - PUBLIC - "${CMAKE_CURRENT_SOURCE_DIR}/include") + PUBLIC "${CMAKE_CURRENT_SOURCE_DIR}/include") -target_link_libraries(${TARGET_NAME} - PUBLIC - IE::inference_engine - gflags) +find_package(InferenceEngine REQUIRED COMPONENTS inference_engine) + +target_link_libraries(${TARGET_NAME} PUBLIC ${InferenceEngine_LIBRARIES} gflags) if(COMMAND add_clang_format_target) add_clang_format_target(${TARGET_NAME}_clang FOR_TARGETS ${TARGET_NAME}) -endif() \ No newline at end of file +endif() diff --git a/inference-engine/samples/common/utils/include/samples/slog.hpp b/inference-engine/samples/common/utils/include/samples/slog.hpp index f4cdedab045ea7..9351f5f1d682a6 100644 --- a/inference-engine/samples/common/utils/include/samples/slog.hpp +++ b/inference-engine/samples/common/utils/include/samples/slog.hpp @@ -62,22 +62,14 @@ class LogStream { } // Specializing for LogStreamEndLine to support slog::endl - LogStream& operator<<(const LogStreamEndLine& /*arg*/) { - _new_line = true; - - (*_log_stream) << std::endl; - return *this; - } + LogStream& operator<<(const LogStreamEndLine&); // Specializing for LogStreamBoolAlpha to support slog::boolalpha - LogStream& operator<<(const LogStreamBoolAlpha& /*arg*/) { - (*_log_stream) << std::boolalpha; - return *this; - } + LogStream& operator<<(const LogStreamBoolAlpha&); }; -static LogStream info("INFO", std::cout); -static LogStream warn("WARNING", std::cout); -static LogStream err("ERROR", std::cerr); +extern LogStream info; +extern LogStream warn; +extern LogStream err; } // namespace slog diff --git a/inference-engine/samples/common/utils/src/slog.cpp b/inference-engine/samples/common/utils/src/slog.cpp new file mode 100644 index 00000000000000..b8fa86113e3028 --- /dev/null +++ b/inference-engine/samples/common/utils/src/slog.cpp @@ -0,0 +1,29 @@ +// Copyright (C) 2018-2021 Intel Corporation +// SPDX-License-Identifier: Apache-2.0 +// + +#include + +#include "samples/slog.hpp" + +namespace slog { + +LogStream info("INFO", std::cout); +LogStream warn("WARNING", std::cout); +LogStream err("ERROR", std::cerr); + +// Specializing for LogStreamEndLine to support slog::endl +LogStream& LogStream::operator<<(const LogStreamEndLine& /*arg*/) { + _new_line = true; + + (*_log_stream) << std::endl; + return *this; +} + +// Specializing for LogStreamBoolAlpha to support slog::boolalpha +LogStream& LogStream::operator<<(const LogStreamBoolAlpha& /*arg*/) { + (*_log_stream) << std::boolalpha; + return *this; +} + +} // namespace slog \ No newline at end of file From 4e6e79d14ace430093bb9f0d9fa520b05573106f Mon Sep 17 00:00:00 2001 From: Ilya Lavrenov Date: Mon, 31 May 2021 15:40:55 +0300 Subject: [PATCH 2/2] Fixed code style --- .../openvino/inference_engine/ie_api_impl.cpp | 46 ++++++++----------- .../openvino/inference_engine/ie_api_impl.hpp | 4 +- .../samples/common/utils/src/slog.cpp | 4 +- 3 files changed, 23 insertions(+), 31 deletions(-) diff --git a/inference-engine/ie_bridges/python/src/openvino/inference_engine/ie_api_impl.cpp b/inference-engine/ie_bridges/python/src/openvino/inference_engine/ie_api_impl.cpp index 6f8dad24fdfb15..f00e12090a19c4 100644 --- a/inference-engine/ie_bridges/python/src/openvino/inference_engine/ie_api_impl.cpp +++ b/inference-engine/ie_bridges/python/src/openvino/inference_engine/ie_api_impl.cpp @@ -366,24 +366,21 @@ std::map InferenceEnginePython::IEExecNe return pyOutputs; } -void InferenceEnginePython::InferRequestWrap::setBlob(const std::string& blob_name, - const InferenceEngine::Blob::Ptr& blob_ptr) { +void InferenceEnginePython::InferRequestWrap::setBlob(const std::string& blob_name, const InferenceEngine::Blob::Ptr& blob_ptr) { request_ptr.SetBlob(blob_name.c_str(), blob_ptr); } -void InferenceEnginePython::InferRequestWrap::setBlob(const std::string& blob_name, - const InferenceEngine::Blob::Ptr& blob_ptr, +void InferenceEnginePython::InferRequestWrap::setBlob(const std::string& blob_name, const InferenceEngine::Blob::Ptr& blob_ptr, const InferenceEngine::PreProcessInfo& info) { request_ptr.SetBlob(blob_name.c_str(), blob_ptr, info); } -const InferenceEngine::PreProcessInfo& -InferenceEnginePython::InferRequestWrap::getPreProcess(const std::string& blob_name) { +const InferenceEngine::PreProcessInfo& InferenceEnginePython::InferRequestWrap::getPreProcess(const std::string& blob_name) { return request_ptr.GetPreProcess(blob_name.c_str()); } InferenceEngine::Blob::Ptr InferenceEnginePython::InferRequestWrap::getBlobPtr(const std::string& blob_name) { - return request_ptr.GetBlob(blob_name.c_str()); + return request_ptr.GetBlob(blob_name.c_str()); } void InferenceEnginePython::InferRequestWrap::setBatch(int size) { @@ -401,7 +398,6 @@ std::vector InferenceEnginePython::InferR return memoryStates; } - void InferenceEnginePython::InferRequestWrap::setCyCallback(cy_callback callback, void* data) { user_callback = callback; user_data = data; @@ -458,7 +454,6 @@ std::map InferenceEnginePython: return perf_map; } - std::string InferenceEnginePython::get_version() { auto version = InferenceEngine::GetInferenceEngineVersion(); return version->buildNumber; @@ -523,23 +518,22 @@ void InferenceEnginePython::IEExecNetwork::createInferRequests(int num_requests) infer_request.request_queue_ptr = request_queue_ptr; infer_request.request_ptr = actual.CreateInferRequest(); - infer_request.request_ptr.SetCompletionCallback>( - [&](InferenceEngine::InferRequest request, InferenceEngine::StatusCode code) { - if (code != InferenceEngine::StatusCode::OK) { - IE_EXCEPTION_SWITCH(code, ExceptionType, - InferenceEngine::details::ThrowNow {} <<= - std::stringstream {} << IE_LOCATION << InferenceEngine::details::ExceptionTraits::string()); - } - - auto end_time = Time::now(); - auto execTime = std::chrono::duration_cast(end_time - infer_request.start_time); - infer_request.exec_time = static_cast(execTime.count()) * 0.000001; - infer_request.request_queue_ptr->setRequestIdle(infer_request.index); - if (infer_request.user_callback) { - infer_request.user_callback(infer_request.user_data, code); - } - }); + infer_request.request_ptr.SetCompletionCallback>( + [&](InferenceEngine::InferRequest request, InferenceEngine::StatusCode code) { + if (code != InferenceEngine::StatusCode::OK) { + IE_EXCEPTION_SWITCH(code, ExceptionType, + InferenceEngine::details::ThrowNow {} <<= + std::stringstream {} << IE_LOCATION << InferenceEngine::details::ExceptionTraits::string()); + } + + auto end_time = Time::now(); + auto execTime = std::chrono::duration_cast(end_time - infer_request.start_time); + infer_request.exec_time = static_cast(execTime.count()) * 0.000001; + infer_request.request_queue_ptr->setRequestIdle(infer_request.index); + if (infer_request.user_callback) { + infer_request.user_callback(infer_request.user_data, code); + } + }); } } diff --git a/inference-engine/ie_bridges/python/src/openvino/inference_engine/ie_api_impl.hpp b/inference-engine/ie_bridges/python/src/openvino/inference_engine/ie_api_impl.hpp index a583737d94a18c..227918ce06fedf 100644 --- a/inference-engine/ie_bridges/python/src/openvino/inference_engine/ie_api_impl.hpp +++ b/inference-engine/ie_bridges/python/src/openvino/inference_engine/ie_api_impl.hpp @@ -121,9 +121,7 @@ struct InferRequestWrap { void setBlob(const std::string& blob_name, const InferenceEngine::Blob::Ptr& blob_ptr); - void setBlob(const std::string& name, - const InferenceEngine::Blob::Ptr& data, - const InferenceEngine::PreProcessInfo& info); + void setBlob(const std::string& name, const InferenceEngine::Blob::Ptr& data, const InferenceEngine::PreProcessInfo& info); void setBatch(int size); diff --git a/inference-engine/samples/common/utils/src/slog.cpp b/inference-engine/samples/common/utils/src/slog.cpp index b8fa86113e3028..c7a437278317be 100644 --- a/inference-engine/samples/common/utils/src/slog.cpp +++ b/inference-engine/samples/common/utils/src/slog.cpp @@ -2,10 +2,10 @@ // SPDX-License-Identifier: Apache-2.0 // -#include - #include "samples/slog.hpp" +#include + namespace slog { LogStream info("INFO", std::cout);