Skip to content

Commit

Permalink
Merge ov::Core Test and modify some parameters name
Browse files Browse the repository at this point in the history
  • Loading branch information
RICKIE777 committed May 18, 2022
2 parents e650473 + c1e02f2 commit c76d03c
Show file tree
Hide file tree
Showing 3 changed files with 361 additions and 55 deletions.
27 changes: 18 additions & 9 deletions src/bindings/c/include/c_api/ov_c_api.h
Original file line number Diff line number Diff line change
Expand Up @@ -78,27 +78,27 @@ typedef struct{
/**
* @brief The absolute time, in microseconds, that the node ran (in total).
*/
double real_time;
int64_t real_time;

/**
* @brief The net host CPU time that the node ran.
*/
double cpu_time;
int64_t cpu_time;

/**
* @brief Name of a node.
*/
char node_name[128];
char* node_name;

/**
* @brief Execution type of a unit.
*/
char exec_type[128];
char* exec_type;

/**
* @brief Node type.
*/
char node_type[128];
char* node_type;
}ov_profiling_info_t;

/**
Expand Down Expand Up @@ -325,7 +325,7 @@ typedef enum {

typedef union {
uint32_t value_u;
char value_s[256];
char value_s[320];
ov_performance_mode_e value_performance_mode;
}ov_property_value;

Expand Down Expand Up @@ -381,7 +381,7 @@ OPENVINO_C_API(ov_status_e) ov_core_read_model(const ov_core_t *core,
/**
* @brief Reads models from IR/ONNX/PDPD formats.
* @param core A pointer to the ie_core_t instance.
* @param model_path String with a model in IR/ONNX/PDPD format.
* @param model_str String with a model in IR/ONNX/PDPD format.
* @param weights Shared pointer to a constant tensor with weights.
* @param model A pointer to the newly created model.
* Reading ONNX/PDPD models does not support loading weights from the @p weights tensors.
Expand All @@ -391,7 +391,7 @@ OPENVINO_C_API(ov_status_e) ov_core_read_model(const ov_core_t *core,
* @return Status code of the operation: OK(0) for success.
*/
OPENVINO_C_API(ov_status_e) ov_core_read_model_from_memory(const ov_core_t *core,
const char *model_path,
const char *model_str,
const ov_tensor_t *weights,
ov_model_t **model);

Expand Down Expand Up @@ -856,14 +856,23 @@ OPENVINO_C_API(ov_status_e) ov_compiled_model_set_property(const ov_compiled_mod
/**
* @brief Gets properties for current compiled model.
* @param compiled_model A pointer to the ov_compiled_model_t.
* @param property_name Property name.
* @param property_name Property name.
* @param property_value A pointer to property value.
* @return Status code of the operation: OK(0) for success.
*/
OPENVINO_C_API(ov_status_e) ov_compiled_model_get_property(const ov_compiled_model_t* compiled_model,
const ov_property_key_e property_name,
ov_property_value* property_value);

/**
* @brief Exports the current compiled model to an output stream `std::ostream`.
* The exported model can also be imported via the ov::Core::import_model method.
* @param compiled_model A pointer to the ov_compiled_model_t.
* @param export_model_path Path to the file.
* @return Status code of the operation: OK(0) for success.
*/
OPENVINO_C_API(ov_status_e) ov_compiled_model_export(const ov_compiled_model_t* compiled_model,
const char* export_model_path);
/**
* @brief Sets an input/output tensor to infer on.
* @param infer_request A pointer to the ov_infer_request_t.
Expand Down
111 changes: 82 additions & 29 deletions src/bindings/c/src/ov_c_api.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@
#include <tuple>
#include <memory>
#include <streambuf>
#include <istream>
#include <fstream>

#include "c_api/ov_c_api.h"
#include "openvino/openvino.hpp"
Expand Down Expand Up @@ -177,9 +177,10 @@ ov_element_type_e find_ov_element_type_e(ov::element::Type type) {
CATCH_OV_EXCEPTION(INFER_CANCELLED, InferCancelled) \
catch (...) {return ov_status_e::UNEXPECTED;}

void str_to_char_array(const std::string& str, char** char_array) {
*char_array = new char[str.length() + 1];
std::copy_n(str.begin(), str.length() + 1, *char_array);
char* str_to_char_array(const std::string& str) {
char *char_array = new char[str.length() + 1];
std::copy_n(str.begin(), str.length() + 1, char_array);
return char_array;
}

ov_status_e ov_get_version(ov_version_t *version) {
Expand All @@ -191,10 +192,10 @@ ov_status_e ov_get_version(ov_version_t *version) {
ov::Version object = ov::get_openvino_version();

std::string version_builderNumber = object.buildNumber;
str_to_char_array(version_builderNumber, &(version->buildNumber));
version->buildNumber = str_to_char_array(version_builderNumber);

std::string version_description = object.description;
str_to_char_array(version_description, &version->description);
version->description = str_to_char_array(version_description);
} CATCH_OV_EXCEPTIONS
return ov_status_e::OK;
}
Expand Down Expand Up @@ -245,16 +246,16 @@ ov_status_e ov_core_read_model(const ov_core_t *core,
}

ov_status_e ov_core_read_model_from_memory(const ov_core_t *core,
const char *model_path,
const char *model_str,
const ov_tensor_t *weights,
ov_model_t **model) {
if (!core || !model_path || !weights || !model) {
if (!core || !model_str || !weights || !model) {
return ov_status_e::GENERAL_ERROR;
}

try {
*model = new ov_model_t;
(*model)->object = core->object->read_model(model_path, *(weights->object));
(*model)->object = core->object->read_model(model_str, *(weights->object));
} CATCH_OV_EXCEPTIONS
return ov_status_e::OK;
}
Expand All @@ -274,11 +275,14 @@ ov_status_e ov_core_compile_model(const ov_core_t* core,

try {
std::string dev_name = "";
ov::CompiledModel object;
if (device_name) {
dev_name = device_name;
object = core->object->compile_model(model->object, dev_name);
} else {
object = core->object->compile_model(model->object);
}
*compiled_model = new ov_compiled_model_t;
auto object = core->object->compile_model(model->object, dev_name);
(*compiled_model)->object = std::make_shared<ov::CompiledModel>(std::move(object));
} CATCH_OV_EXCEPTIONS
return ov_status_e::OK;
Expand All @@ -294,12 +298,15 @@ ov_status_e ov_core_compile_model_from_file(const ov_core_t* core,
}

try {
ov::CompiledModel object;
std::string dev_name = "";
if (device_name) {
dev_name = device_name;
object = core->object->compile_model(model_path, dev_name);
} else {
object = core->object->compile_model(model_path);
}
*compiled_model = new ov_compiled_model_t;
auto object = core->object->compile_model(model_path, dev_name);
(*compiled_model)->object = std::make_shared<ov::CompiledModel>(std::move(object));
} CATCH_OV_EXCEPTIONS
return ov_status_e::OK;
Expand Down Expand Up @@ -359,7 +366,7 @@ ov_status_e ov_core_get_property(const ov_core_t* core, const char* device_name,
for (const auto& i : supported_properties) {
tmp_s = tmp_s + "\n" + i;
}
if (tmp_s.length() + 1 > 256) {
if (tmp_s.length() + 1 > 512) {
return ov_status_e::GENERAL_ERROR;
}
std::copy_n(tmp_s.begin(), tmp_s.length() + 1, property_value->value_s);
Expand All @@ -372,26 +379,16 @@ ov_status_e ov_core_get_property(const ov_core_t* core, const char* device_name,
return ov_status_e::OK;
}

ov_status_e ov_core_add_extension(const ov_core_t* core, const char* library_path) {
if (!core || !library_path) {
return ov_status_e::GENERAL_ERROR;
}
try {
core->object->add_extension(library_path);
} CATCH_OV_EXCEPTIONS
return ov_status_e::OK;
}

ov_status_e ov_core_get_available_devices(const ov_core_t* core, ov_available_devices_t* devices) {
if (!core || !devices) {
if (!core) {
return ov_status_e::GENERAL_ERROR;
}
try {
auto available_devices = core->object->get_available_devices();
devices->num_devices = available_devices.size();
auto tmp_devices(new char*[available_devices.size()]);
for (int i = 0; i < available_devices.size(); i++) {
str_to_char_array(available_devices[i], &(tmp_devices[i]));
tmp_devices[i] = str_to_char_array(available_devices[i]);
}
devices->devices = tmp_devices;
} CATCH_OV_EXCEPTIONS
Expand Down Expand Up @@ -445,13 +442,13 @@ ov_status_e ov_core_get_versions(const ov_core_t* core,
auto iter = object.cbegin();
for (int i = 0; i < object.size(); i++, iter++) {
const auto& tmp_version_name = iter->first;
str_to_char_array(tmp_version_name, &(tmp_versions[i].device_name));
tmp_versions[i].device_name = str_to_char_array(tmp_version_name);

const auto tmp_version_build_number = iter->second.buildNumber;
str_to_char_array(tmp_version_build_number, &(tmp_versions[i].buildNumber));
tmp_versions[i].buildNumber = str_to_char_array(tmp_version_build_number);

const auto tmp_version_description = iter->second.description;
str_to_char_array(tmp_version_description, &(tmp_versions[i].description));
tmp_versions[i].description = str_to_char_array(tmp_version_description);
}
versions->versions = tmp_versions;
} CATCH_OV_EXCEPTIONS
Expand All @@ -467,7 +464,7 @@ void ov_core_versions_free(ov_core_version_list_t *versions) {
delete[] versions->versions[i].buildNumber;
delete[] versions->versions[i].description;
}
delete versions->versions;
delete[] versions->versions;
versions->versions = nullptr;
}

Expand Down Expand Up @@ -580,7 +577,7 @@ ov_status_e ov_model_get_friendly_name(const ov_model_t* model, char **friendly_
}
try {
auto& result = model->object->get_friendly_name();
str_to_char_array(result, friendly_name);
*friendly_name = str_to_char_array(result);
} CATCH_OV_EXCEPTIONS
return ov_status_e::OK;
}
Expand Down Expand Up @@ -987,6 +984,22 @@ ov_status_e ov_compiled_model_get_property(const ov_compiled_model_t* compiled_m
return ov_status_e::OK;
}

ov_status_e ov_compiled_model_export(const ov_compiled_model_t* compiled_model,
const char* export_model_path) {
if (!compiled_model || !export_model_path) {
return ov_status_e::GENERAL_ERROR;
}
try {
std::ofstream model_file(export_model_path, std::ios::out | std::ios::binary);
if (model_file.is_open()) {
compiled_model->object->export_model(model_file);
} else {
return ov_status_e::GENERAL_ERROR;
}
} CATCH_OV_EXCEPTIONS
return ov_status_e::OK;
}

void ov_infer_request_free(ov_infer_request_t *infer_request) {
delete infer_request;
}
Expand Down Expand Up @@ -1071,6 +1084,46 @@ ov_status_e ov_infer_request_set_callback(ov_infer_request_t* infer_request,
return ov_status_e::OK;
}

ov_status_e ov_infer_request_get_profiling_info(ov_infer_request_t* infer_request,
ov_profiling_info_list_t* profiling_infos) {
if (!infer_request || !profiling_infos) {
return ov_status_e::GENERAL_ERROR;
}

try {
auto infos = infer_request->object->get_profiling_info();
int num = infos.size();
profiling_infos->num = num;
ov_profiling_info_t *profiling_info_arr = new ov_profiling_info_t[num];
for (int i = 0; i < num; i++) {
profiling_info_arr[i].status = (ov_profiling_info_t::Status)infos[i].status;
profiling_info_arr[i].real_time = infos[i].real_time.count();
profiling_info_arr[i].cpu_time = infos[i].cpu_time.count();

profiling_info_arr[i].node_name = str_to_char_array(infos[i].node_name);
profiling_info_arr[i].exec_type = str_to_char_array(infos[i].exec_type);
profiling_info_arr[i].node_type = str_to_char_array(infos[i].node_type);
}
profiling_infos->profiling_infos = profiling_info_arr;
} CATCH_OV_EXCEPTIONS

return ov_status_e::OK;
}

void ov_profiling_info_list_free(ov_profiling_info_list_t *profiling_infos) {
if (!profiling_infos) {
return;
}
for (int i = 0; i < profiling_infos->num; i++) {
delete[] profiling_infos->profiling_infos[i].node_name;
delete[] profiling_infos->profiling_infos[i].exec_type;
delete[] profiling_infos->profiling_infos[i].node_type;
}
delete[] profiling_infos->profiling_infos;
profiling_infos->profiling_infos = nullptr;
profiling_infos->num = 0;
}

ov_status_e ov_tensor_create(const ov_element_type_e type, const ov_shape_t shape, ov_tensor_t **tensor) {
if (!tensor || !shape || element_type_map.find(type) == element_type_map.end()) {
return ov_status_e::GENERAL_ERROR;
Expand Down
Loading

0 comments on commit c76d03c

Please sign in to comment.