Skip to content

Commit

Permalink
Auto plugin async infer request implementation (openvinotoolkit#5707)
Browse files Browse the repository at this point in the history
* Async auto-request, now with revamped SetCallback (after openvinotoolkit#5645 merged) it is safe to set.
Also test modification to verify that the callback is called on the same (user's) request and e.g. not on the actual device's request

* Override CreateInferRequestImpl() instead of CreateInferRequest()

Signed-off-by: Shoujiang Ma <[email protected]>

Co-authored-by: myshevts <[email protected]>
  • Loading branch information
2 people authored and rnugmanx committed Aug 26, 2021
1 parent 711616a commit 0d80eab
Show file tree
Hide file tree
Showing 7 changed files with 43 additions and 5 deletions.
12 changes: 12 additions & 0 deletions inference-engine/include/cpp/ie_infer_request.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -217,6 +217,18 @@ class INFERENCE_ENGINE_API_CLASS(InferRequest) {
* @return true if current InferRequest object is initialized, false - otherwise
*/
explicit operator bool() const noexcept;

/**
* @brief Compares whether this request wraps the same impl underneath
* @return true if current InferRequest object doesn't wrap the same impl as the operator's arg
*/
bool operator!=(const InferRequest&) const noexcept;

/**
* @brief Compares whether this request wraps the same impl underneath
* @return true if current InferRequest object wraps the same impl as the operator's arg
*/
bool operator==(const InferRequest&) const noexcept;
};

template<>
Expand Down
6 changes: 3 additions & 3 deletions inference-engine/src/auto_plugin/auto_exec_network.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -21,10 +21,10 @@ AutoExecutableNetwork::AutoExecutableNetwork(const SoExecutableNetworkInternal&

AutoExecutableNetwork::~AutoExecutableNetwork() = default;

IInferRequestInternal::Ptr AutoExecutableNetwork::CreateInferRequestImpl(InputsDataMap networkInputs,
InferenceEngine::IInferRequestInternal::Ptr AutoExecutableNetwork::CreateInferRequestImpl(InputsDataMap networkInputs,
OutputsDataMap networkOutputs) {
SoIInferRequestInternal inferRequest = { _network, _network->CreateInferRequest() };
return std::make_shared<AutoInferRequest>(networkInputs, networkOutputs, inferRequest);
SoIInferRequestInternal inferRequest = {_network, _network->CreateInferRequest()};
return std::make_shared<AutoInferRequest>(_networkInputs, _networkOutputs, inferRequest);
}

void AutoExecutableNetwork::Export(std::ostream& networkModel) {
Expand Down
3 changes: 2 additions & 1 deletion inference-engine/src/auto_plugin/auto_exec_network.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ struct DeviceInformation {
std::map<std::string, std::string> config;
};

class AutoExecutableNetwork : public InferenceEngine::ExecutableNetworkThreadSafeDefault {
class AutoExecutableNetwork : public InferenceEngine::ExecutableNetworkInternal {
public:
using Ptr = std::shared_ptr<AutoExecutableNetwork>;

Expand All @@ -38,6 +38,7 @@ class AutoExecutableNetwork : public InferenceEngine::ExecutableNetworkThreadSaf
InferenceEngine::Parameter GetConfig(const std::string& name) const override;
InferenceEngine::IInferRequestInternal::Ptr CreateInferRequestImpl(InferenceEngine::InputsDataMap networkInputs,
InferenceEngine::OutputsDataMap networkOutputs) override;

~AutoExecutableNetwork() override;

private:
Expand Down
12 changes: 12 additions & 0 deletions inference-engine/src/auto_plugin/auto_infer_request.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -36,4 +36,16 @@ void AutoInferRequest::Cancel() {
_inferRequest->Cancel();
}

void AutoInferRequest::StartAsync() {
_inferRequest->StartAsync();
}

InferenceEngine::StatusCode AutoInferRequest::Wait(int64_t millis_timeout) {
return _inferRequest->Wait(millis_timeout);
}

void AutoInferRequest::SetCallback(Callback callback) {
_inferRequest->SetCallback(callback);
}

} // namespace AutoPlugin
4 changes: 4 additions & 0 deletions inference-engine/src/auto_plugin/auto_infer_request.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,10 @@ class AutoInferRequest : public InferenceEngine::IInferRequestInternal {
void SetBlob(const std::string& name, const InferenceEngine::Blob::Ptr& data) override;
InferenceEngine::Blob::Ptr GetBlob(const std::string& name) override;
void Cancel() override;
//async impl
void StartAsync() override;
InferenceEngine::StatusCode Wait(int64_t millis_timeout) override;
void SetCallback(Callback callback) override;

private:
InferenceEngine::SoIInferRequestInternal _inferRequest;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -206,4 +206,12 @@ bool InferRequest::operator!() const noexcept {
InferRequest::operator bool() const noexcept {
return !!_impl;
}

bool InferRequest::operator!=(const InferRequest& r) const noexcept {
return !(r == *this);
}

bool InferRequest::operator==(const InferRequest& r) const noexcept {
return r._impl == _impl;
}
} // namespace InferenceEngine
Original file line number Diff line number Diff line change
Expand Up @@ -36,8 +36,9 @@ TEST_P(CallbackTests, canCallSyncAndAsyncWithCompletionCallback) {
// Create InferRequest
InferenceEngine::InferRequest req = execNet.CreateInferRequest();
bool isCalled = false;
req.SetCompletionCallback<std::function<void(InferenceEngine::InferRequest, InferenceEngine::StatusCode)>>(
req.SetCompletionCallback<std::function<void(InferenceEngine::InferRequest r, InferenceEngine::StatusCode)>>(
[&](InferenceEngine::InferRequest request, InferenceEngine::StatusCode status) {
ASSERT_TRUE(req == request); //the callback is called on the same impl of the request
// HSD_1805940120: Wait on starting callback return HDDL_ERROR_INVAL_TASK_HANDLE
if (targetDevice != CommonTestUtils::DEVICE_HDDL) {
ASSERT_EQ(static_cast<int>(InferenceEngine::StatusCode::OK), status);
Expand Down

0 comments on commit 0d80eab

Please sign in to comment.