Skip to content

Commit

Permalink
added openvino runtime plugin
Browse files Browse the repository at this point in the history
  • Loading branch information
apankratovantonp committed Aug 26, 2021
1 parent c77d799 commit e3dd8e3
Show file tree
Hide file tree
Showing 19 changed files with 664 additions and 622 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,11 @@ class INFERENCE_ENGINE_API_CLASS(SharedObjectLoader) {
std::shared_ptr<Impl> _impl;

public:
/**
* @brief Constructs from existing object
*/
SharedObjectLoader(const std::shared_ptr<void>& impl);

/**
* @brief Default constructor
*/
Expand Down Expand Up @@ -55,6 +60,12 @@ class INFERENCE_ENGINE_API_CLASS(SharedObjectLoader) {
* @throws Exception if the function is not found
*/
void* get_symbol(const char* symbolName) const;

/**
* @brief Retruns reference to type erased implementation
* @throws Exception if the function is not found
*/
std::shared_ptr<void> get() const;
};

} // namespace details
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@
#include <vector>

#include "common.hpp"
#include "cpp/ie_executable_network.hpp"
#include "executable_network.hpp"
#include "ie_plugin_config.hpp"
#include "ie_version.hpp"
#include "remote_context.hpp"
Expand Down Expand Up @@ -117,9 +117,9 @@ class INFERENCE_ENGINE_API_CLASS(Core) {
* operation
* @return An executable network reference
*/
ie::ExecutableNetwork compile_model(const std::shared_ptr<const ov::Function>& network,
const std::string& deviceName,
const ConfigMap& config = {});
ExecutableNetwork compile_model(const std::shared_ptr<const ov::Function>& network,
const std::string& deviceName,
const ConfigMap& config = {});

/**
* @brief Reads model and creates an executable network from IR or ONNX file
Expand All @@ -134,9 +134,9 @@ class INFERENCE_ENGINE_API_CLASS(Core) {
*
* @return An executable network reference
*/
ie::ExecutableNetwork compile_model(const std::string& modelPath,
const std::string& deviceName,
const ConfigMap& config = {});
ExecutableNetwork compile_model(const std::string& modelPath,
const std::string& deviceName,
const ConfigMap& config = {});

/**
* @brief Creates an executable network from a network object within a specified remote context.
Expand All @@ -146,9 +146,9 @@ class INFERENCE_ENGINE_API_CLASS(Core) {
* operation
* @return An executable network object
*/
ie::ExecutableNetwork compile_model(const std::shared_ptr<const ov::Function>& network,
const RemoteContext& context,
const ConfigMap& config = {});
ExecutableNetwork compile_model(const std::shared_ptr<const ov::Function>& network,
const RemoteContext& context,
const ConfigMap& config = {});

/**
* @brief Registers extension
Expand All @@ -164,9 +164,9 @@ class INFERENCE_ENGINE_API_CLASS(Core) {
* operation*
* @return An executable network reference
*/
ie::ExecutableNetwork import_model(std::istream& networkModel,
const std::string& deviceName,
const ConfigMap& config = {});
ExecutableNetwork import_model(std::istream& networkModel,
const std::string& deviceName,
const ConfigMap& config = {});

/**
* @brief Creates an executable network from a previously exported network within a specified
Expand All @@ -178,9 +178,9 @@ class INFERENCE_ENGINE_API_CLASS(Core) {
* operation
* @return An executable network reference
*/
ie::ExecutableNetwork import_model(std::istream& networkModel,
const RemoteContext& context,
const ConfigMap& config = {});
ExecutableNetwork import_model(std::istream& networkModel,
const RemoteContext& context,
const ConfigMap& config = {});

/**
* @brief Query device if it supports specified network with specified configuration
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@ class Core;
* @brief This is an interface of an executable network
*/
class INFERENCE_ENGINE_API_CLASS(ExecutableNetwork) {
std::shared_ptr<SharedObject> _so;
std::shared_ptr<void> _so;
std::shared_ptr<InferenceEngine::IExecutableNetworkInternal> _impl;

/**
Expand All @@ -42,8 +42,7 @@ class INFERENCE_ENGINE_API_CLASS(ExecutableNetwork) {
* object is destroyed.
* @param impl Initialized shared pointer
*/
ExecutableNetwork(const std::shared_ptr<SharedObject>& so,
const std::shared_ptr<ie::IExecutableNetworkInternal>& impl);
ExecutableNetwork(const std::shared_ptr<void>& so, const std::shared_ptr<ie::IExecutableNetworkInternal>& impl);
friend class ov::runtime::Core;

public:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@ namespace runtime {
* It can throw exceptions safely for the application, where it is properly handled.
*/
class INFERENCE_ENGINE_API_CLASS(InferRequest) {
std::shared_ptr<SharedObject> _so;
std::shared_ptr<void> _so;
std::shared_ptr<ie::IInferRequestInternal> _impl;

/**
Expand All @@ -39,7 +39,7 @@ class INFERENCE_ENGINE_API_CLASS(InferRequest) {
* destroyed.
* @param impl Initialized shared pointer
*/
InferRequest(const std::shared_ptr<SharedObject>& so, const std::shared_ptr<ie::IInferRequestInternal>& impl);
InferRequest(const std::shared_ptr<void>& so, const std::shared_ptr<ie::IInferRequestInternal>& impl);
friend class ExecutableNetwork;

public:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,7 @@ class Core;
* networks and remote memory blobs can exist, function and exchange data.
*/
class INFERENCE_ENGINE_API_CLASS(RemoteContext) {
ie::details::SharedObjectLoader _so;
std::shared_ptr<void> _so;
std::shared_ptr<ie::IRemoteContext> _impl;

/**
Expand All @@ -44,7 +44,7 @@ class INFERENCE_ENGINE_API_CLASS(RemoteContext) {
* object is destroyed.
* @param impl Initialized shared pointer
*/
RemoteContext(const ie::details::SharedObjectLoader& so, const std::shared_ptr<ie::IRemoteContext>& impl);
RemoteContext(const std::shared_ptr<void>& so, const std::shared_ptr<ie::IRemoteContext>& impl);
friend class Core;

public:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -26,14 +26,13 @@ class Blob;
namespace ov {
namespace runtime {

class SharedObject;
class InferRequest;

/**
* @brief VariableState class
*/
class INFERENCE_ENGINE_API_CLASS(VariableState) {
std::shared_ptr<SharedObject> _so;
std::shared_ptr<void> _so;
std::shared_ptr<ie::IVariableStateInternal> _impl;

/**
Expand All @@ -42,7 +41,7 @@ class INFERENCE_ENGINE_API_CLASS(VariableState) {
* @param so Optional: Plugin to use. This is required to ensure that VariableState can work properly even if plugin
* object is destroyed.
*/
VariableState(const std::shared_ptr<SharedObject>& so, const std::shared_ptr<ie::IVariableStateInternal>& impl);
VariableState(const std::shared_ptr<void>& so, const std::shared_ptr<ie::IVariableStateInternal>& impl);

friend class ov::runtime::InferRequest;

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -111,7 +111,7 @@ ExecutableNetwork::operator bool() const noexcept {

namespace ov {
namespace runtime {
ExecutableNetwork::ExecutableNetwork(const std::shared_ptr<SharedObject>& so,
ExecutableNetwork::ExecutableNetwork(const std::shared_ptr<void>& so,
const std::shared_ptr<ie::IExecutableNetworkInternal>& impl)
: _so{so},
_impl{impl} {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -196,7 +196,7 @@ bool InferRequest::operator==(const InferRequest& r) const noexcept {
namespace ov {
namespace runtime {

InferRequest::InferRequest(const std::shared_ptr<SharedObject>& so, const ie::IInferRequestInternal::Ptr& impl)
InferRequest::InferRequest(const std::shared_ptr<void>& so, const ie::IInferRequestInternal::Ptr& impl)
: _so{so},
_impl{impl} {
IE_ASSERT(_impl != nullptr);
Expand Down
103 changes: 101 additions & 2 deletions inference-engine/src/inference_engine/src/cpp/ie_plugin.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,8 @@
#include "cpp/ie_cnn_network.h"
#include "cpp/exception2status.hpp"
#include "cpp_interfaces/interface/ie_iplugin_internal.hpp"
#include "so_ptr.hpp"
#include "openvino/runtime/common.hpp"

#if defined __GNUC__
# pragma GCC diagnostic push
Expand All @@ -27,7 +29,7 @@
if (!_ptr) IE_THROW() << "Wrapper used in the PLUGIN_CALL_STATEMENT was not initialized."; \
try { \
__VA_ARGS__; \
} catch(...) {details::Rethrow();}
} catch(...) {::InferenceEngine::details::Rethrow();}

namespace InferenceEngine {
/**
Expand Down Expand Up @@ -116,8 +118,105 @@ class InferencePlugin : protected details::SOPointer<IInferencePlugin> {
};
} // namespace InferenceEngine

#undef PLUGIN_CALL_STATEMENT

#if defined __GNUC__
# pragma GCC diagnostic pop
#endif

namespace ov {
namespace runtime {

/**
* @brief This class is a C++ API wrapper for IInferencePlugin.
*
* It can throw exceptions safely for the application, where it is properly handled.
*/
struct InferencePlugin {
std::shared_ptr<void> _so;
std::shared_ptr<ie::IInferencePlugin> _ptr;

InferencePlugin(const std::shared_ptr<void>& so, const std::shared_ptr<ie::IInferencePlugin>& impl) :
_so{so},
_ptr{impl} {
IE_ASSERT(_ptr != nullptr);
}

void set_name(const std::string& deviceName) {
PLUGIN_CALL_STATEMENT(_ptr->SetName(deviceName));
}

void set_core(std::weak_ptr<ie::ICore> core) {
PLUGIN_CALL_STATEMENT(_ptr->SetCore(core));
}

const ie::Version get_version() const {
PLUGIN_CALL_STATEMENT(return _ptr->GetVersion());
}

void add_extension(const ie::IExtensionPtr& extension) {
PLUGIN_CALL_STATEMENT(_ptr->AddExtension(extension));
}

void set_config(const ConfigMap& config) {
PLUGIN_CALL_STATEMENT(_ptr->SetConfig(config));
}

SoPtr<ie::IExecutableNetworkInternal> load_model(const ie::CNNNetwork& network, const ConfigMap& config) {
PLUGIN_CALL_STATEMENT(return {_so, _ptr->LoadNetwork(network, config)});
}

SoPtr<ie::IExecutableNetworkInternal> load_model(const ie::CNNNetwork& network,
const std::shared_ptr<ie::IRemoteContext>& context,
const ConfigMap& config) {
PLUGIN_CALL_STATEMENT(return {_so, _ptr->LoadNetwork(network, config, context)});
}

SoPtr<ie::IExecutableNetworkInternal> load_model(const std::string& modelPath, const ConfigMap& config) {
PLUGIN_CALL_STATEMENT(return {_so, _ptr->LoadNetwork(modelPath, config)});
}

ie::QueryNetworkResult query_model(const ie::CNNNetwork& network,
const ConfigMap& config) const {
ie::QueryNetworkResult res;
PLUGIN_CALL_STATEMENT(res = _ptr->QueryNetwork(network, config));
if (res.rc != ie::OK) IE_THROW() << res.resp.msg;
return res;
}

SoPtr<ie::IExecutableNetworkInternal> import_model(const std::string& modelFileName,
const ConfigMap& config) {
PLUGIN_CALL_STATEMENT(return {_so, _ptr->ImportNetwork(modelFileName, config)});
}

SoPtr<ie::IExecutableNetworkInternal> import_model(std::istream& networkModel,
const ConfigMap& config) {
PLUGIN_CALL_STATEMENT(return {_so, _ptr->ImportNetwork(networkModel, config)});
}

SoPtr<ie::IExecutableNetworkInternal> import_model(std::istream& networkModel,
const std::shared_ptr<ie::IRemoteContext>& context,
const ConfigMap& config) {
PLUGIN_CALL_STATEMENT(return {_so, _ptr->ImportNetwork(networkModel, context, config)});
}

ie::Parameter get_metric(const std::string& name, const ie::ParamMap& options) const {
PLUGIN_CALL_STATEMENT(return _ptr->GetMetric(name, options));
}

SoPtr<ie::IRemoteContext> create_context(const ie::ParamMap& params) {
PLUGIN_CALL_STATEMENT(return {_so, _ptr->CreateContext(params)});
}

SoPtr<ie::IRemoteContext> get_default_context(const ie::ParamMap& params) {
PLUGIN_CALL_STATEMENT(return {_so, _ptr->GetDefaultContext(params)});
}

ie::Parameter get_config(const std::string& name, const ie::ParamMap& options) const {
PLUGIN_CALL_STATEMENT(return _ptr->GetConfig(name, options));
}
};

} // namespace runtime
} // namespace ov

#undef PLUGIN_CALL_STATEMENT
Original file line number Diff line number Diff line change
Expand Up @@ -49,7 +49,7 @@ void VariableState::SetState(Blob::Ptr state) {
namespace ov {
namespace runtime {

VariableState::VariableState(const std::shared_ptr<SharedObject>& so, const ie::IVariableStateInternal::Ptr& impl)
VariableState::VariableState(const std::shared_ptr<void>& so, const ie::IVariableStateInternal::Ptr& impl)
: _so{so},
_impl{impl} {
IE_ASSERT(_impl != nullptr);
Expand Down
Loading

0 comments on commit e3dd8e3

Please sign in to comment.