Skip to content

Commit

Permalink
Added SoPointers for InferRequest, ExecutableNetwork
Browse files Browse the repository at this point in the history
  • Loading branch information
ilya-lavrenov committed May 15, 2021
1 parent db5b669 commit a45aa7f
Show file tree
Hide file tree
Showing 14 changed files with 44 additions and 24 deletions.
2 changes: 2 additions & 0 deletions inference-engine/include/cpp/ie_executable_network.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -35,6 +35,8 @@ class INFERENCE_ENGINE_API_CLASS(ExecutableNetwork) : protected details::SOPoint
friend class Core;

public:
ExecutableNetwork() = default;

/**
* @brief Gets the Executable network output Data node information.
*
Expand Down
2 changes: 1 addition & 1 deletion inference-engine/src/auto_plugin/auto_exec_network.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@ AutoExecutableNetwork::~AutoExecutableNetwork() = default;

IInferRequestInternal::Ptr AutoExecutableNetwork::CreateInferRequestImpl(InputsDataMap networkInputs,
OutputsDataMap networkOutputs) {
auto inferRequest = _network->CreateInferRequest();
SoIInferRequestInternal inferRequest = { _network, _network->CreateInferRequest() };
return std::make_shared<AutoInferRequest>(networkInputs, networkOutputs, inferRequest);
}

Expand Down
6 changes: 3 additions & 3 deletions inference-engine/src/auto_plugin/auto_infer_request.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -9,9 +9,9 @@
namespace AutoPlugin {
using namespace InferenceEngine;

AutoInferRequest::AutoInferRequest(const InputsDataMap& networkInputs,
const OutputsDataMap& networkOutputs,
const IInferRequestInternal::Ptr& inferRequest)
AutoInferRequest::AutoInferRequest(const InputsDataMap& networkInputs,
const OutputsDataMap& networkOutputs,
const SoIInferRequestInternal& inferRequest)
: IInferRequestInternal(networkInputs, networkOutputs)
, _inferRequest(inferRequest) {
}
Expand Down
8 changes: 4 additions & 4 deletions inference-engine/src/auto_plugin/auto_infer_request.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -24,17 +24,17 @@ namespace AutoPlugin {
class AutoInferRequest : public InferenceEngine::IInferRequestInternal {
public:
using Ptr = std::shared_ptr<AutoInferRequest>;
explicit AutoInferRequest(const InferenceEngine::InputsDataMap& networkInputs,
const InferenceEngine::OutputsDataMap& networkOutputs,
const InferenceEngine::IInferRequestInternal::Ptr& inferRequest);
explicit AutoInferRequest(const InferenceEngine::InputsDataMap& networkInputs,
const InferenceEngine::OutputsDataMap& networkOutputs,
const InferenceEngine::SoIInferRequestInternal& inferRequest);
std::map<std::string, InferenceEngine::InferenceEngineProfileInfo> GetPerformanceCounts() const override;
void InferImpl() override;
void SetBlob(const std::string& name, const InferenceEngine::Blob::Ptr& data) override;
InferenceEngine::Blob::Ptr GetBlob(const std::string& name) override;
void Cancel() override;

private:
InferenceEngine::IInferRequestInternal::Ptr _inferRequest;
InferenceEngine::SoIInferRequestInternal _inferRequest;
};

} // namespace AutoPlugin
Original file line number Diff line number Diff line change
Expand Up @@ -49,7 +49,7 @@ HeteroInferRequest::HeteroInferRequest(InferenceEngine::InputsDataMap networkInp

// go over all subnet and create requests
for (auto&& desc : _inferRequests) {
desc._request = desc._network->CreateInferRequest();
desc._request = { desc._network, desc._network->CreateInferRequest() };
// go over all inputs and get blobs from subnet infer requests
for (auto&& outputInfo : desc._network->GetOutputsInfo()) {
requestBlob(outputInfo.first, desc._request);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ class HeteroInferRequest : public InferenceEngine::IInferRequestInternal {

struct SubRequestDesc {
InferenceEngine::SoExecutableNetworkInternal _network;
InferenceEngine::IInferRequestInternal::Ptr _request;
InferenceEngine::SoIInferRequestInternal _request;
openvino::itt::handle_t _profilingTask;
};
using SubRequestsList = std::vector<SubRequestDesc>;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -82,7 +82,7 @@ MultiDeviceExecutableNetwork::MultiDeviceExecutableNetwork(const DeviceMap<Infer
auto* idleWorkerRequestsPtr = &(idleWorkerRequests);
idleWorkerRequests.set_capacity(numRequests);
for (auto&& workerRequest : workerRequests) {
workerRequest._inferRequest = network->CreateInferRequest();
workerRequest._inferRequest = { network, network->CreateInferRequest() };
auto* workerRequestPtr = &workerRequest;
IE_ASSERT(idleWorkerRequests.try_push(workerRequestPtr) == true);
workerRequest._inferRequest->SetCallback(
Expand Down Expand Up @@ -177,7 +177,7 @@ InferenceEngine::IInferRequestInternal::Ptr MultiDeviceExecutableNetwork::Create
InferenceEngine::OutputsDataMap networkOutputs) {
auto num = _numRequestsCreated++;
size_t sum = 0;
InferenceEngine::IInferRequestInternal::Ptr request_to_share_blobs_with;
InferenceEngine::SoIInferRequestInternal request_to_share_blobs_with;
// borrowing device-specific blobs from the underlying requests for the device-agnostic, user-facing requests
// this allows to potentially save on the data-copy later (if the requests are scheduled in the same order)
for (const auto& device : _devicePrioritiesInitial) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -99,9 +99,9 @@ class MultiDeviceExecutableNetwork : public InferenceEngine::ExecutableNetworkTh
public:
using Ptr = std::shared_ptr<MultiDeviceExecutableNetwork>;
struct WorkerInferRequest {
InferenceEngine::IInferRequestInternal::Ptr _inferRequest;
InferenceEngine::Task _task;
std::exception_ptr _exceptionPtr = nullptr;
InferenceEngine::SoIInferRequestInternal _inferRequest;
InferenceEngine::Task _task;
std::exception_ptr _exceptionPtr = nullptr;
};
using NotBusyWorkerRequests = ThreadSafeBoundedQueue<WorkerInferRequest*>;

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ using namespace InferenceEngine;
// ------------------------------MultiDeviceInferRequest----------------------------
MultiDeviceInferRequest::MultiDeviceInferRequest(const InputsDataMap& networkInputs,
const OutputsDataMap& networkOutputs,
const IInferRequestInternal::Ptr & request_to_share_blobs_with)
const SoIInferRequestInternal & request_to_share_blobs_with)
: IInferRequestInternal(networkInputs, networkOutputs) {
if (request_to_share_blobs_with) {
// borrow device-friendly blobs from the request
Expand Down Expand Up @@ -48,7 +48,7 @@ MultiDeviceInferRequest::MultiDeviceInferRequest(const InputsDataMap& networkI
}
}

void MultiDeviceInferRequest::SetBlobsToAnotherRequest(const IInferRequestInternal::Ptr& req) {
void MultiDeviceInferRequest::SetBlobsToAnotherRequest(const SoIInferRequestInternal& req) {
for (const auto &it : _networkInputs) {
auto &name = it.first;
// this request is already in BUSY state, so using the internal functions safely
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,11 +25,11 @@ class MultiDeviceInferRequest : public InferenceEngine::IInferRequestInternal {
using Ptr = std::shared_ptr<MultiDeviceInferRequest>;
explicit MultiDeviceInferRequest(const InferenceEngine::InputsDataMap& networkInputs,
const InferenceEngine::OutputsDataMap& networkOutputs,
const InferenceEngine::IInferRequestInternal::Ptr & request_to_share_blobs_with);
const InferenceEngine::SoIInferRequestInternal & request_to_share_blobs_with);
std::map<std::string, InferenceEngine::InferenceEngineProfileInfo> GetPerformanceCounts() const override;
void InferImpl() override;
// Multi-Device impl specific: sets the data (blobs from the device-less requests to the specific device request)
void SetBlobsToAnotherRequest(const InferenceEngine::IInferRequestInternal::Ptr& req);
void SetBlobsToAnotherRequest(const InferenceEngine::SoIInferRequestInternal& req);
};

} // namespace MultiDevicePlugin
Original file line number Diff line number Diff line change
Expand Up @@ -13,9 +13,12 @@
#include <ie_remote_context.hpp>
#include <cpp/ie_cnn_network.h>
#include <cpp_interfaces/interface/ie_ivariable_state_internal.hpp>
#include <details/ie_so_pointer.hpp>

namespace InferenceEngine {

class IInferRequestInternal;

/**
* @interface IExecutableNetworkInternal
* @brief An internal API of executable network to be implemented by plugin,
Expand Down Expand Up @@ -109,4 +112,9 @@ class IExecutableNetworkInternal : public std::enable_shared_from_this<IExecutab
virtual RemoteContext::Ptr GetContext() const = 0;
};

/**
* @brief SOPointer to IExecutableNetworkInternal.
*/
using SoExecutableNetworkInternal = details::SOPointer<IExecutableNetworkInternal>;

} // namespace InferenceEngine
Original file line number Diff line number Diff line change
Expand Up @@ -15,8 +15,10 @@
#include <string>

namespace InferenceEngine {

class IExecutableNetworkInternal;
class IVariableStateInternal;

/**
* @interface IInferRequestInternal
* @brief An internal API of synchronous inference request to be implemented by plugin,
Expand Down Expand Up @@ -212,7 +214,7 @@ class INFERENCE_ENGINE_API_CLASS(IInferRequestInternal) : public std::enable_sha
int m_curBatch = -1; //!< Current batch value used in dynamic batching

/**
* @brief A shared pointer to ExecutableNetworkInternal interface
* @brief A shared pointer to IInferRequestInternal
* @note Needed to correctly handle ownership between objects.
*/
std::shared_ptr<IExecutableNetworkInternal> _exeNetwork;
Expand All @@ -224,4 +226,9 @@ class INFERENCE_ENGINE_API_CLASS(IInferRequestInternal) : public std::enable_sha
~IInferRequestInternal();
};

/**
* @brief SOPointer to IInferRequestInternal.
*/
using SoIInferRequestInternal = details::SOPointer<IInferRequestInternal>;

} // namespace InferenceEngine
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@
#pragma once

#include <ie_blob.h>
#include <details/ie_so_pointer.hpp>

#include <memory>
#include <string>
Expand Down Expand Up @@ -68,4 +69,9 @@ class IVariableStateInternal {
*/
using IMemoryStateInternal = IVariableStateInternal;

/**
* @brief SOPointer to IVariableStateInternal.
*/
using SoIVariableStateInternal = details::SOPointer<IVariableStateInternal>;

} // namespace InferenceEngine
5 changes: 1 addition & 4 deletions inference-engine/src/plugin_api/ie_icore.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -15,15 +15,12 @@

#include <ie_parameter.hpp>
#include <cpp/ie_cnn_network.h>
#include <cpp/ie_executable_network.hpp>
#include <details/ie_so_pointer.hpp>
#include "cpp_interfaces/interface/ie_iexecutable_network_internal.hpp"

#include "threading/ie_itask_executor.hpp"

namespace InferenceEngine {

using SoExecutableNetworkInternal = details::SOPointer<IExecutableNetworkInternal>;

/**
* @interface ICore
* @brief Minimal ICore interface to allow plugin to get information from Core Inference Engine class.
Expand Down

0 comments on commit a45aa7f

Please sign in to comment.