Skip to content

Commit

Permalink
change npu-plugin backend log for changing the behavior of dry on exe…
Browse files Browse the repository at this point in the history
…cution
  • Loading branch information
DanLiu2Intel committed May 28, 2024
1 parent 63a13d4 commit 641c00c
Show file tree
Hide file tree
Showing 6 changed files with 25 additions and 4 deletions.
3 changes: 3 additions & 0 deletions src/plugins/intel_npu/src/plugin/include/backends.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,9 @@ class NPUBackends final {
std::string getCompilationPlatform(const std::string_view platform, const std::string& deviceId) const;

void setup(const Config& config);
bool is_empty() const {
return _backend == nullptr ? true : false;
}

private:
Logger _logger;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@
#include "intel_npu/utils/logger/logger.hpp"
#include "npu.hpp"
#include "openvino/runtime/so_ptr.hpp"
#include "plugin.hpp"

namespace intel_npu {

Expand Down
4 changes: 4 additions & 0 deletions src/plugins/intel_npu/src/plugin/include/plugin.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -53,6 +53,10 @@ class Plugin : public ov::IPlugin {
ov::SupportedOpsMap query_model(const std::shared_ptr<const ov::Model>& model,
const ov::AnyMap& properties) const override;

bool is_backend_empty() const {
return _backends->is_empty() ? true : false;
}

private:
ov::SoPtr<ICompiler> getCompiler(const Config& config) const;

Expand Down
8 changes: 5 additions & 3 deletions src/plugins/intel_npu/src/plugin/src/backends.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -109,9 +109,10 @@ NPUBackends::NPUBackends(const std::vector<AvailableBackends>& backendRegistry,
}
#endif
} catch (const std::exception& ex) {
_logger.error("Got an error during backend '%s' loading : %s", backendName.c_str(), ex.what());
_logger.warning("Got an issue during backend '%s' loading : %s", backendName.c_str(), ex.what());
// todo: can we get detail exception type and then throw it? need check openvin_throw
} catch (...) {
_logger.error("Got an unknown error during backend '%s' loading", backendName.c_str());
_logger.warning("Got an issue warning during backend '%s' loading", backendName.c_str());
}
}

Expand All @@ -127,7 +128,8 @@ NPUBackends::NPUBackends(const std::vector<AvailableBackends>& backendRegistry,
if (_backend != nullptr) {
_logger.info("Use '%s' backend for inference", _backend->getName().c_str());
} else {
_logger.error("Cannot find backend for inference. Make sure the device is available.");
_logger.warning("Cannot find backend. Make sure the device is available. It is ok to compilation, but will "
"result in failure of inference!");
}
}

Expand Down
9 changes: 9 additions & 0 deletions src/plugins/intel_npu/src/plugin/src/compiled_model.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
//

#include "compiled_model.hpp"
//#include "plugin.hpp"

#include <fstream>
#include <string_view>
Expand Down Expand Up @@ -104,6 +105,14 @@ CompiledModel::CompiledModel(const std::shared_ptr<const ov::Model>& model,
std::shared_ptr<ov::IAsyncInferRequest> CompiledModel::create_infer_request() const {
OV_ITT_SCOPED_TASK(itt::domains::NPUPlugin, "CompiledModel::create_infer_request");

// check backend in plugin
if (std::dynamic_pointer_cast<const Plugin>(get_plugin())->is_backend_empty()) {
_logger.error("Cannot find backend in inference. Make sure the device is available!");
throw "ERROR Cannot find backend before inference";
} else {
_logger.info("backend is ready for inference.");
}

if (_executorPtr == nullptr && _device != nullptr) {
_executorPtr = _device->createExecutor(_networkPtr, _config);
}
Expand Down
4 changes: 3 additions & 1 deletion src/plugins/intel_npu/src/plugin/src/plugin.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -563,7 +563,9 @@ std::shared_ptr<ov::ICompiledModel> Plugin::compile_model(const std::shared_ptr<
OPENVINO_THROW("Option 'CACHE_DIR' is not supported with MLIR compiler type");
}
}

// std::printf(" <1>localConfig.get<PLATFORM>() =%s, <2>localConfig.get<DEVICE_ID>()=%s \n",
// localConfig.get<PLATFORM>().data(),
// localConfig.get<DEVICE_ID>().c_str());
const auto platform = _backends->getCompilationPlatform(localConfig.get<PLATFORM>(), localConfig.get<DEVICE_ID>());
auto device = _backends->getDevice(localConfig.get<DEVICE_ID>());
localConfig.update({{ov::intel_npu::platform.name(), platform}});
Expand Down

0 comments on commit 641c00c

Please sign in to comment.