Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[C API] support string size for char pointer #19931

Merged
29 changes: 27 additions & 2 deletions src/bindings/c/include/openvino/c/ov_core.h
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@

#pragma once

#include "openvino/c/deprecated.h"
#include "openvino/c/ov_common.h"
#include "openvino/c/ov_compiled_model.h"
#include "openvino/c/ov_model.h"
Expand Down Expand Up @@ -173,8 +174,9 @@ ov_core_read_model_unicode(const ov_core_t* core,
/**
* @brief Reads models from IR / ONNX / PDPD / TF / TFLite formats.
* @ingroup ov_core_c_api
* @deprecated Use ov_core_read_model_from_memory_buffer instead.
* @param core A pointer to the ie_core_t instance.
* @param model_str String with a model in IR / ONNX / PDPD / TF / TFLite format.
* @param model_str String with a model in IR / ONNX / PDPD / TF / TFLite format, string is null-terminated.
* @param weights Shared pointer to a constant tensor with weights.
* @param model A pointer to the newly created model.
* Reading ONNX / PDPD / TF / TFLite models does not support loading weights from the @p weights tensors.
Expand All @@ -183,12 +185,35 @@ ov_core_read_model_unicode(const ov_core_t* core,
* constant data will point to an invalid memory.
* @return Status code of the operation: OK(0) for success.
*/
OPENVINO_C_API(ov_status_e)
OPENVINO_C_API(OPENVINO_DEPRECATED(
"This API is deprecated and will be replaced by ov_core_read_model_from_memory_buffer") ov_status_e)
ov_core_read_model_from_memory(const ov_core_t* core,
const char* model_str,
const ov_tensor_t* weights,
ov_model_t** model);

/**
* @brief Reads models from IR / ONNX / PDPD / TF / TFLite formats with models string size.
* @ingroup ov_core_c_api
* @param core A pointer to the ie_core_t instance.
* @param model_str String with a model in IR / ONNX / PDPD / TF / TFLite format, support model string containing
* several null chars.
* @param str_len The length of model string.
* @param weights Shared pointer to a constant tensor with weights.
* @param model A pointer to the newly created model.
* Reading ONNX / PDPD / TF / TFLite models does not support loading weights from the @p weights tensors.
* @note Created model object shares the weights with the @p weights object.
* Thus, do not create @p weights on temporary data that can be freed later, since the model
* constant data will point to an invalid memory.
* @return Status code of the operation: OK(0) for success.
*/
OPENVINO_C_API(ov_status_e)
ov_core_read_model_from_memory_buffer(const ov_core_t* core,
const char* model_str,
const size_t str_len,
const ov_tensor_t* weights,
ov_model_t** model);

/**
* @brief Creates a compiled model from a source model object.
* Users can create as many compiled models as they need and use
Expand Down
23 changes: 16 additions & 7 deletions src/bindings/c/src/ov_core.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -89,27 +89,36 @@ ov_status_e ov_core_read_model(const ov_core_t* core,
return ov_status_e::OK;
}

ov_status_e ov_core_read_model_from_memory(const ov_core_t* core,
const char* model_str,
const ov_tensor_t* weights,
ov_model_t** model) {
if (!core || !model_str || !model) {
ov_status_e ov_core_read_model_from_memory_buffer(const ov_core_t* core,
const char* model_str,
const size_t str_size,
const ov_tensor_t* weights,
ov_model_t** model) {
if (!core || !model_str || !model || !str_size) {
return ov_status_e::INVALID_C_PARAM;
}

try {
std::unique_ptr<ov_model_t> _model(new ov_model_t);
std::string model_string(model_str, str_size);
if (weights) {
_model->object = core->object->read_model(model_str, *(weights->object));
_model->object = core->object->read_model(model_string, *(weights->object));
} else {
_model->object = core->object->read_model(model_str, ov::Tensor());
_model->object = core->object->read_model(model_string, ov::Tensor());
}
*model = _model.release();
}
CATCH_OV_EXCEPTIONS
return ov_status_e::OK;
}

ov_status_e ov_core_read_model_from_memory(const ov_core_t* core,
const char* model_str,
const ov_tensor_t* weights,
ov_model_t** model) {
return ov_core_read_model_from_memory_buffer(core, model_str, strlen(model_str), weights, model);
}

ov_status_e ov_core_compile_model(const ov_core_t* core,
const ov_model_t* model,
const char* device_name,
Expand Down
31 changes: 31 additions & 0 deletions src/bindings/c/tests/ov_core_test.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -77,6 +77,7 @@ TEST_P(ov_core_test, ov_core_read_model_no_bin) {
ov_core_free(core);
}

OPENVINO_SUPPRESS_DEPRECATED_START
TEST_P(ov_core_test, ov_core_read_model_from_memory) {
ov_core_t* core = nullptr;
OV_EXPECT_OK(ov_core_create(&core));
Expand All @@ -102,6 +103,36 @@ TEST_P(ov_core_test, ov_core_read_model_from_memory) {
ov_model_free(model);
ov_core_free(core);
}
OPENVINO_SUPPRESS_DEPRECATED_END

TEST_P(ov_core_test, ov_core_read_model_from_memory_buffer_with_size) {
ov_core_t* core = nullptr;
OV_EXPECT_OK(ov_core_create(&core));
EXPECT_NE(nullptr, core);

std::vector<uint8_t> weights_content(content_from_file(bin_file_name.c_str(), true));

ov_tensor_t* tensor = nullptr;
ov_shape_t shape;
int64_t dims[2] = {1, (int64_t)weights_content.size()};
ov_shape_create(2, dims, &shape);
OV_EXPECT_OK(ov_tensor_create_from_host_ptr(ov_element_type_e::U8, shape, weights_content.data(), &tensor));
EXPECT_NE(nullptr, tensor);

std::vector<uint8_t> xml_content(content_from_file(xml_file_name.c_str(), false));
ov_model_t* model = nullptr;
OV_EXPECT_OK(ov_core_read_model_from_memory_buffer(core,
reinterpret_cast<const char*>(xml_content.data()),
xml_content.size(),
tensor,
&model));
EXPECT_NE(nullptr, model);

ov_shape_free(&shape);
ov_tensor_free(tensor);
ov_model_free(model);
ov_core_free(core);
}

TEST_P(ov_core_test, ov_core_compile_model) {
auto device_name = GetParam();
Expand Down