Skip to content

Commit

Permalink
Added openvino executable network API
Browse files Browse the repository at this point in the history
  • Loading branch information
apankratovantonp committed Sep 6, 2021
1 parent ce9cf04 commit e369fd6
Show file tree
Hide file tree
Showing 10 changed files with 303 additions and 26 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,8 @@
#include <map>
#include <string>

namespace InferenceEngine {};
namespace InferenceEngine {}

namespace ov {
namespace ie = InferenceEngine;
namespace runtime {
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,155 @@
// Copyright (C) 2018-2021 Intel Corporation
// SPDX-License-Identifier: Apache-2.0
//

/**
* @brief A header file that provides ExecutableNetwork class
*
* @file ie_executable_network.hpp
*/

#pragma once

#include <map>
#include <memory>
#include <ostream>
#include <string>
#include <vector>

#include "ie_parameter.hpp"
#include "infer_request.hpp"
#include "remote_context.hpp"

namespace InferenceEngine {
class IExecutableNetworkInternal;
class RemoteContext;
} // namespace InferenceEngine
namespace ngraph {
namespace op {
namespace v0 {
class Parameter;
class Result;
} // namespace v0
} // namespace op
} // namespace ngraph
namespace ov {
class Function;
namespace runtime {

class Core;

/**
* @brief This is an interface of an executable network
*/
class INFERENCE_ENGINE_API_CLASS(ExecutableNetwork) {
std::shared_ptr<SharedObject> _so;
std::shared_ptr<InferenceEngine::IExecutableNetworkInternal> _impl;

/**
* @brief Constructs ExecutableNetwork from the initialized std::shared_ptr
* @param so Plugin to use. This is required to ensure that ExecutableNetwork can work properly even if plugin
* object is destroyed.
* @param impl Initialized shared pointer
*/
ExecutableNetwork(const std::shared_ptr<SharedObject>& so,
const std::shared_ptr<ie::IExecutableNetworkInternal>& impl);
friend class ov::runtime::Core;

public:
/**
* @brief A default constructor.
*/
ExecutableNetwork() = default;

/**
* @brief Get executable graph information from a device
*
* @return Function containing Executable Graph Info
*/
std::shared_ptr<const Function> get_runtime_function() const;

/**
* @brief Get parameters of executeble graph function
*
* @return vector of paramter nodes
*/
std::vector<std::shared_ptr<ngraph::op::v0::Parameter>> get_parameters() const;

/**
* @brief Get results of executeble graph function
*
* @return vector of result nodes
*/
std::vector<std::shared_ptr<ngraph::op::v0::Result>> get_results() const;

/**
* @brief Creates an inference request object used to infer the network.
*
* The created request has allocated input and output blobs (that can be changed later).
*
* @return InferRequest object
*/
InferRequest create_infer_request();

/**
* @brief Exports the current executable network.
*
* @see Core::ImportNetwork
*
* @param networkModel Network model output stream
*/
void export_model(std::ostream& networkModel);

/**
* @brief Sets configuration for current executable network
*
* @param config Map of pairs: (config parameter name, config parameter value)
*/
void set_config(const ie::ParamMap& config);

/** @brief Gets configuration for current executable network.
*
* The method is responsible to extract information
* which affects executable network execution. The list of supported configuration values can be extracted via
* ExecutableNetwork::get_metric with the SUPPORTED_CONFIG_KEYS key, but some of these keys cannot be changed
* dynamically, e.g. DEVICE_ID cannot changed if an executable network has already been compiled for particular
* device.
*
* @param name config key, can be found in ie_plugin_config.hpp
* @return Configuration parameter value
*/
ie::Parameter get_config(const std::string& name) const;

/**
* @brief Gets general runtime metric for an executable network.
*
* It can be network name, actual device ID on
* which executable network is running or all other properties which cannot be changed dynamically.
*
* @param name metric name to request
* @return Metric parameter value
*/
ie::Parameter get_metric(const std::string& name) const;

/**
* @brief Returns pointer to plugin-specific shared context
* on remote accelerator device that was used to create this ExecutableNetwork
* @return A context
*/
std::shared_ptr<ie::RemoteContext> get_context() const;

/**
* @brief Checks if current ExecutableNetwork object is not initialized
* @return true if current ExecutableNetwork object is not initialized, false - otherwise
*/
bool operator!() const noexcept;

/**
* @brief Checks if current ExecutableNetwork object is initialized
* @return true if current ExecutableNetwork object is initialized, false - otherwise
*/
explicit operator bool() const noexcept;
};

} // namespace runtime
} // namespace ov
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,9 @@ class Blob;

namespace ov {
namespace runtime {

class ExecutableNetwork;

/**
* @brief This is an interface of asynchronous infer request
*
Expand All @@ -40,7 +43,7 @@ class INFERENCE_ENGINE_API_CLASS(InferRequest) {
* @param impl Initialized shared pointer
*/
InferRequest(const std::shared_ptr<SharedObject>& so, const std::shared_ptr<ie::IInferRequestInternal>& impl);
friend class ExecutableNetwork;
friend class ov::runtime::ExecutableNetwork;

public:
/**
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@
#include "cpp_interfaces/interface/ie_iremote_context.hpp"
#include "ie_common.h"
#include "ie_executable_network_base.hpp"
#include "openvino/runtime/executable_network.hpp"

namespace InferenceEngine {

Expand All @@ -18,7 +19,7 @@ namespace InferenceEngine {
try { \
__VA_ARGS__; \
} catch (...) { \
details::Rethrow(); \
InferenceEngine::details::Rethrow(); \
}

ExecutableNetwork::ExecutableNetwork(const details::SharedObjectLoader& so, const IExecutableNetworkInternal::Ptr& impl)
Expand Down Expand Up @@ -55,9 +56,10 @@ ExecutableNetwork::operator IExecutableNetwork::Ptr() {

std::vector<VariableState> ExecutableNetwork::QueryState() {
std::vector<VariableState> controller;
EXEC_NET_CALL_STATEMENT(for (auto&& state
: _impl->QueryState()) {
controller.emplace_back(VariableState{_so, state});
EXEC_NET_CALL_STATEMENT({
for (auto&& state : _impl->QueryState()) {
controller.emplace_back(VariableState{_so, state});
}
});
return controller;
}
Expand Down Expand Up @@ -106,3 +108,58 @@ ExecutableNetwork::operator bool() const noexcept {
return !!_impl;
}
} // namespace InferenceEngine

namespace ov {
namespace runtime {
ExecutableNetwork::ExecutableNetwork(const std::shared_ptr<SharedObject>& so,
const std::shared_ptr<ie::IExecutableNetworkInternal>& impl)
: _so{so},
_impl{impl} {
IE_ASSERT(_impl != nullptr);
}

std::shared_ptr<const Function> ExecutableNetwork::get_runtime_function() const {
EXEC_NET_CALL_STATEMENT(return std::const_pointer_cast<const Function>(_impl->GetExecGraphInfo()));
}

std::vector<std::shared_ptr<ngraph::op::v0::Parameter>> ExecutableNetwork::get_parameters() const {
EXEC_NET_CALL_STATEMENT(return _impl->GetExecGraphInfo()->get_parameters());
}

std::vector<std::shared_ptr<ngraph::op::v0::Result>> ExecutableNetwork::get_results() const {
EXEC_NET_CALL_STATEMENT(return _impl->GetExecGraphInfo()->get_results());
}

InferRequest ExecutableNetwork::create_infer_request() {
EXEC_NET_CALL_STATEMENT(return {_so, _impl->CreateInferRequest()});
}

void ExecutableNetwork::export_model(std::ostream& networkModel) {
EXEC_NET_CALL_STATEMENT(_impl->Export(networkModel));
}

void ExecutableNetwork::set_config(const ie::ParamMap& config) {
EXEC_NET_CALL_STATEMENT(_impl->SetConfig(config));
}

ie::Parameter ExecutableNetwork::get_config(const std::string& name) const {
EXEC_NET_CALL_STATEMENT(return _impl->GetConfig(name));
}

ie::Parameter ExecutableNetwork::get_metric(const std::string& name) const {
EXEC_NET_CALL_STATEMENT(return _impl->GetMetric(name));
}

std::shared_ptr<ie::RemoteContext> ExecutableNetwork::get_context() const {
EXEC_NET_CALL_STATEMENT(return _impl->GetContext());
}

bool ExecutableNetwork::operator!() const noexcept {
return !_impl;
}

ExecutableNetwork::operator bool() const noexcept {
return !!_impl;
}
} // namespace runtime
} // namespace ov
Original file line number Diff line number Diff line change
Expand Up @@ -67,11 +67,7 @@ void IExecutableNetworkInternal::Export(std::ostream& networkModel) {
}

std::shared_ptr<ngraph::Function> IExecutableNetworkInternal::GetExecGraphInfo() {
<<<<<<< HEAD
return _runtime_function;
=======
return _function;
>>>>>>> Added default exec network result
}

std::vector<std::shared_ptr<IVariableStateInternal>> IExecutableNetworkInternal::QueryState() {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -220,8 +220,7 @@ std::shared_ptr<IExecutableNetworkInternal> IInferencePlugin::LoadExeNetworkImpl

void IInferencePlugin::SetExeNetworkInfo(const std::shared_ptr<IExecutableNetworkInternal>& exeNetwork,
const ConstInputsDataMap& inputs,
const ConstOutputsDataMap& outputs,
const std::shared_ptr<ov::Function>& function) {
const ConstOutputsDataMap& outputs) {
IE_ASSERT(exeNetwork != nullptr);
// Set inputs/outputs and pointer to plugin manually here
exeNetwork->setNetworkInputs(copyInfo(constMapCast(inputs)));
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -150,11 +150,7 @@ class INFERENCE_ENGINE_API_CLASS(IExecutableNetworkInternal)
virtual std::shared_ptr<IInferRequestInternal> CreateInferRequestImpl(InputsDataMap networkInputs,
OutputsDataMap networkOutputs);

<<<<<<< HEAD
std::shared_ptr<ov::Function> _runtime_function; //!< Holds information about network inputs and outputs
=======
std::shared_ptr<ov::Function> _function; //!< Holds information about network inputs and outputs
>>>>>>> Added default exec network result
InferenceEngine::InputsDataMap _networkInputs; //!< Holds information about network inputs info
InferenceEngine::OutputsDataMap _networkOutputs; //!< Holds information about network outputs data

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -304,16 +304,7 @@ class INFERENCE_ENGINE_API_CLASS(IInferencePlugin) : public std::enable_shared_f
*/
void SetExeNetworkInfo(const std::shared_ptr<IExecutableNetworkInternal>& exeNetwork,
const ConstInputsDataMap& inputs,
const ConstOutputsDataMap& outputs,
const std::shared_ptr<ov::Function>& function);

/**
* @brief Set input and output information to executable network. This method is used to
* set addtional information to InferenceEngine::IExecutableNetworkInternal create by device plugin.
* @param function Function with initial execution info
*/
void SetExeNetworkInfo(const std::shared_ptr<IExecutableNetworkInternal>& exeNetwork,
const std::shared_ptr<ov::Function>& function);
const ConstOutputsDataMap& outputs);

/**
* @brief Set input and output information to executable network. This method is used to
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,49 @@
// Copyright (C) 2018-2021 Intel Corporation
// SPDX-License-Identifier: Apache-2.0
//

#include <gtest/gtest.h>
#include <openvino/runtime/executable_network.hpp>

using namespace ::testing;
using namespace std;

TEST(ExecutableNetworkOVTests, throwsOnUninitializedExportStream) {
ov::runtime::ExecutableNetwork exec;
ASSERT_THROW(exec.export_model(std::cout), InferenceEngine::NotAllocated);
}

TEST(ExecutableNetworkOVTests, throwsOnUninitializedGetFunction) {
ov::runtime::ExecutableNetwork exec;
ASSERT_THROW(exec.get_function(), InferenceEngine::NotAllocated);
}

TEST(ExecutableNetworkOVTests, throwsOnUninitializedGetParameters) {
ov::runtime::ExecutableNetwork exec;
ASSERT_THROW(exec.get_parameters(), InferenceEngine::NotAllocated);
}

TEST(ExecutableNetworkOVTests, throwsOnUninitializedGetResults) {
ov::runtime::ExecutableNetwork exec;
ASSERT_THROW(exec.get_results(), InferenceEngine::NotAllocated);
}

TEST(ExecutableNetworkOVTests, throwsOnUninitializedSetConfig) {
ov::runtime::ExecutableNetwork exec;
ASSERT_THROW(exec.set_config({{}}), InferenceEngine::NotAllocated);
}

TEST(ExecutableNetworkOVTests, throwsOnUninitializedGetConfig) {
ov::runtime::ExecutableNetwork exec;
ASSERT_THROW(exec.get_config({}), InferenceEngine::NotAllocated);
}

TEST(ExecutableNetworkOVTests, throwsOnUninitializedGetMetric) {
ov::runtime::ExecutableNetwork exec;
ASSERT_THROW(exec.get_metric({}), InferenceEngine::NotAllocated);
}

TEST(ExecutableNetworkOVTests, throwsOnUninitializedGetContext) {
ov::runtime::ExecutableNetwork exec;
ASSERT_THROW(exec.get_context(), InferenceEngine::NotAllocated);
}
Loading

0 comments on commit e369fd6

Please sign in to comment.