Skip to content

Commit

Permalink
Fix StressMemLeaksTests with several models (openvinotoolkit#19986)
Browse files Browse the repository at this point in the history
* Fix `StressMemLeaksTests` with several models

* Fix OMZ branch name in `get_testdata.py`
  • Loading branch information
vurusovs authored Sep 21, 2023
1 parent 37d54bc commit b00fbd0
Show file tree
Hide file tree
Showing 4 changed files with 15 additions and 18 deletions.
15 changes: 6 additions & 9 deletions tests/stress_tests/common/ie_pipelines/pipelines.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -55,11 +55,9 @@ create_compiled_model(const std::string &model, const std::string &target_device
};
}

std::function<void()> recreate_compiled_model(std::shared_ptr<InferApiBase> &ie_wrapper, const std::string &model,
std::function<void()> recreate_compiled_model(std::shared_ptr<InferApiBase> &ie_wrapper,
const std::string &target_device, const int &api_version) {
return [&] {
ie_wrapper->load_plugin(target_device);
ie_wrapper->read_network(model);
return [=] {
ie_wrapper->load_network(target_device);
};
}
Expand All @@ -77,7 +75,7 @@ create_infer_request(const std::string &model, const std::string &target_device,


std::function<void()> recreate_infer_request(std::shared_ptr<InferApiBase> &ie_wrapper) {
return [&] {
return [=] {
ie_wrapper->create_infer_request();
};
}
Expand All @@ -97,14 +95,14 @@ infer_request_inference(const std::string &model, const std::string &target_devi


std::function<void()> reinfer_request_inference(std::shared_ptr<InferApiBase> &ie_wrapper) {
return [&] {
return [=] {
ie_wrapper->infer();
};
}

std::function<void()> recreate_and_infer_in_thread(std::shared_ptr<InferApiBase> &ie_wrapper) {
return [&] {
auto func = [&] {
return [=] {
auto func = [=] {
ie_wrapper->create_infer_request();
ie_wrapper->prepare_input();
ie_wrapper->infer();
Expand Down Expand Up @@ -133,7 +131,6 @@ inference_with_streams(const std::string &model, const std::string &target_devic
for (int counter = 0; counter < nireq; counter++) {
ie_api_wrapper->create_infer_request();
ie_api_wrapper->prepare_input();

ie_api_wrapper->infer();
}
};
Expand Down
2 changes: 1 addition & 1 deletion tests/stress_tests/common/ie_pipelines/pipelines.h
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@ inference_with_streams(const std::string &model, const std::string &target_devic
const int &api_version);

std::function<void()>
recreate_compiled_model(std::shared_ptr<InferApiBase> &ie, const std::string &model, const std::string &target_device,
recreate_compiled_model(std::shared_ptr<InferApiBase> &ie_wrapper, const std::string &target_device,
const int &api_version);

std::function<void()> recreate_infer_request(std::shared_ptr<InferApiBase> &ie_wrapper);
Expand Down
14 changes: 7 additions & 7 deletions tests/stress_tests/memleaks_tests/tests.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -95,14 +95,15 @@ TEST_P(MemLeaksTestSuiteNoDevice, set_input_params) {
test_runner(test_params.numthreads, test);
}

TEST_P(MemLeaksTestSuite, recreate_exenetwork) {
TEST_P(MemLeaksTestSuite, recreate_compiled_model) {
auto test_params = GetParam();
std::vector<std::function<void()>> pipeline;
auto ie_wrapper = create_infer_api_wrapper(test_params.api_version);

pipeline.reserve(test_params.models.size());
for (int i = 0; i < test_params.models.size(); i++) {
pipeline.push_back(recreate_compiled_model(ie_wrapper, test_params.models[i]["full_path"], test_params.device,
auto ie_wrapper = create_infer_api_wrapper(test_params.api_version);
ie_wrapper->read_network(test_params.models[i]["full_path"]);
pipeline.push_back(recreate_compiled_model(ie_wrapper, test_params.device,
test_params.api_version));
}
auto test = [&] {
Expand All @@ -117,11 +118,10 @@ TEST_P(MemLeaksTestSuite, recreate_exenetwork) {
TEST_P(MemLeaksTestSuite, recreate_infer_request) {
auto test_params = GetParam();
std::vector<std::function<void()>> pipeline;
auto ie_wrapper = create_infer_api_wrapper(test_params.api_version);

size_t n_models = test_params.models.size();

for (int i = 0; i < n_models; i++) {
auto ie_wrapper = create_infer_api_wrapper(test_params.api_version);
ie_wrapper->read_network(test_params.models[i]["full_path"]);
ie_wrapper->load_network(test_params.device);
pipeline.push_back(recreate_infer_request(ie_wrapper));
Expand All @@ -138,10 +138,10 @@ TEST_P(MemLeaksTestSuite, recreate_infer_request) {
TEST_P(MemLeaksTestSuite, reinfer_request_inference) {
auto test_params = GetParam();
std::vector<std::function<void()>> pipeline;
auto ie_wrapper = create_infer_api_wrapper(test_params.api_version);
size_t n_models = test_params.models.size();

for (int i = 0; i < n_models; i++) {
auto ie_wrapper = create_infer_api_wrapper(test_params.api_version);
ie_wrapper->read_network(test_params.models[i]["full_path"]);
ie_wrapper->load_network(test_params.device);
ie_wrapper->create_infer_request();
Expand Down Expand Up @@ -196,10 +196,10 @@ TEST_P(MemLeaksTestSuite, inference_with_streams) {
TEST_P(MemLeaksTestSuite, recreate_and_infer_in_thread) {
auto test_params = GetParam();
std::vector<std::function<void()>> pipeline;
auto ie_wrapper = create_infer_api_wrapper(test_params.api_version);
size_t n_models = test_params.models.size();

for (int i = 0; i < n_models; i++) {
auto ie_wrapper = create_infer_api_wrapper(test_params.api_version);
ie_wrapper->read_network(test_params.models[i]["full_path"]);
ie_wrapper->load_network(test_params.device);
pipeline.push_back(recreate_and_infer_in_thread(ie_wrapper));
Expand Down
2 changes: 1 addition & 1 deletion tests/stress_tests/scripts/get_testdata.py
Original file line number Diff line number Diff line change
Expand Up @@ -138,7 +138,7 @@ def main():
# clone Open Model Zoo into temporary path
if os.path.exists(str(omz_path)):
shutil.rmtree(str(omz_path))
cmd = 'git clone --single-branch --branch develop' \
cmd = 'git clone --single-branch --branch master' \
' https://github.com/openvinotoolkit/open_model_zoo {omz_path}'.format(omz_path=omz_path)
run_in_subprocess(cmd)

Expand Down

0 comments on commit b00fbd0

Please sign in to comment.