Skip to content

Commit

Permalink
Removed obsolete ie_profiling.hpp (openvinotoolkit#4043)
Browse files Browse the repository at this point in the history
  • Loading branch information
ilya-lavrenov authored and Egor Shulman committed Feb 1, 2021
1 parent 0dc9b19 commit 86a7717
Show file tree
Hide file tree
Showing 7 changed files with 19 additions and 257 deletions.
9 changes: 6 additions & 3 deletions docs/template_plugin/src/template_async_infer_request.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -25,16 +25,19 @@ TemplateAsyncInferRequest::TemplateAsyncInferRequest(
if (remoteDevice) {
_pipeline = {
{cpuTaskExecutor, [this] {
IE_PROFILING_AUTO_SCOPE(PreprocessingAndStartPipeline)
OV_ITT_SCOPED_TASK(itt::domains::TemplatePlugin,
"TemplateAsyncInferRequest::PreprocessingAndStartPipeline");
_inferRequest->inferPreprocess();
_inferRequest->startPipeline();
}},
{_waitExecutor, [this] {
IE_PROFILING_AUTO_SCOPE(WaitPipeline)
OV_ITT_SCOPED_TASK(itt::domains::TemplatePlugin,
"TemplateAsyncInferRequest::WaitPipeline");
_inferRequest->waitPipeline();
}},
{cpuTaskExecutor, [this] {
IE_PROFILING_AUTO_SCOPE(Postprocessing)
OV_ITT_SCOPED_TASK(itt::domains::TemplatePlugin,
"TemplateAsyncInferRequest::Postprocessing");
_inferRequest->inferPostprocess();
}}
};
Expand Down
5 changes: 4 additions & 1 deletion docs/template_plugin/src/template_executable_network.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@
#include "template/template_config.hpp"
#include "template_plugin.hpp"
#include "template_executable_network.hpp"
#include "template_itt.hpp"

using namespace TemplatePlugin;

Expand Down Expand Up @@ -61,7 +62,7 @@ TemplatePlugin::ExecutableNetwork::ExecutableNetwork(std::istream & model,
model.read(dataBlob->buffer(), dataSize);
}

// TODO: implement Import / Export of configuration options
// TODO: implement Import / Export of configuration options and merge with `cfg`
// TODO: implement Import / Export of network precisions, layouts, preprocessing info

auto cnnnetwork = _plugin->GetCore()->ReadNetwork(xmlString, std::move(dataBlob));
Expand Down Expand Up @@ -188,6 +189,8 @@ InferenceEngine::Parameter TemplatePlugin::ExecutableNetwork::GetMetric(const st

// ! [executable_network:export_impl]
void TemplatePlugin::ExecutableNetwork::ExportImpl(std::ostream& modelStream) {
OV_ITT_SCOPED_TASK(itt::domains::TemplatePlugin, "ExecutableNetwork::ExportImpl");

// Note: custom ngraph extensions are not supported
std::map<std::string, ngraph::OpSet> custom_opsets;
std::stringstream xmlFile, binFile;
Expand Down
17 changes: 9 additions & 8 deletions docs/template_plugin/src/template_plugin.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@
#include <transformations/rt_info/fused_names_attribute.hpp>

#include "template/template_config.hpp"
#include "template_itt.hpp"
#include "template_plugin.hpp"
#include "template_executable_network.hpp"
#include "template_infer_request.hpp"
Expand Down Expand Up @@ -74,6 +75,8 @@ std::shared_ptr<ngraph::Function> TransformNetwork(const std::shared_ptr<const n
// ! [plugin:load_exe_network_impl]
InferenceEngine::ExecutableNetworkInternal::Ptr Plugin::LoadExeNetworkImpl(const InferenceEngine::CNNNetwork & network,
const ConfigMap &config) {
OV_ITT_SCOPED_TASK(itt::domains::TemplatePlugin, "Plugin::LoadExeNetworkImpl");

auto cfg = Configuration{ config, _cfg };
InferenceEngine::InputsDataMap networkInputs = network.getInputsInfo();
InferenceEngine::OutputsDataMap networkOutputs = network.getOutputsInfo();
Expand Down Expand Up @@ -113,22 +116,20 @@ InferenceEngine::ExecutableNetworkInternal::Ptr Plugin::LoadExeNetworkImpl(const

// ! [plugin:import_network_impl]
InferenceEngine::ExecutableNetwork Plugin::ImportNetworkImpl(std::istream& model, const std::map<std::string, std::string>& config) {
// TODO: Import network from stream is not mandatory functionality;
// Can just throw an exception and remove the code below
Configuration exportedCfg;

// some code below which reads exportedCfg from `model` stream
// ..
OV_ITT_SCOPED_TASK(itt::domains::TemplatePlugin, "Plugin::ImportNetworkImpl");

auto cfg = Configuration(config, exportedCfg);
auto exec_network_impl = std::make_shared<ExecutableNetwork>(model, cfg, std::static_pointer_cast<Plugin>(shared_from_this()));
Configuration cfg(config);
auto exec_network_impl = std::make_shared<ExecutableNetwork>(model, cfg,
std::static_pointer_cast<Plugin>(shared_from_this()));

return make_executable_network(exec_network_impl);
}
// ! [plugin:import_network_impl]

// ! [plugin:query_network]
InferenceEngine::QueryNetworkResult Plugin::QueryNetwork(const InferenceEngine::CNNNetwork &network, const ConfigMap& config) const {
OV_ITT_SCOPED_TASK(itt::domains::TemplatePlugin, "Plugin::QueryNetwork");

InferenceEngine::QueryNetworkResult res;
Configuration cfg{config, _cfg, false};

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,6 @@
#include <cpp_interfaces/base/ie_variable_state_base.hpp>
#include "ie_iinfer_request.hpp"
#include "ie_preprocess.hpp"
#include "ie_profiling.hpp"

namespace InferenceEngine {

Expand Down
242 changes: 0 additions & 242 deletions inference-engine/src/plugin_api/ie_profiling.hpp

This file was deleted.

1 change: 0 additions & 1 deletion inference-engine/src/preprocessing/ie_preprocess_data.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,6 @@
#include <memory>

#include <ie_blob.h>
#include <ie_profiling.hpp>
#include <file_utils.h>
#include <ie_preprocess.hpp>

Expand Down
1 change: 0 additions & 1 deletion inference-engine/src/preprocessing/ie_preprocess_gapi.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,6 @@
#include <opencv2/gapi/gcompiled.hpp>
#include <opencv2/gapi/gcomputation.hpp>
#include <opencv2/gapi/util/optional.hpp>
#include "ie_profiling.hpp"
#include <openvino/itt.hpp>

// FIXME: Move this definition back to ie_preprocess_data,
Expand Down

0 comments on commit 86a7717

Please sign in to comment.