From b982e19525eeecb0cf385591bd1ec182a781cb05 Mon Sep 17 00:00:00 2001 From: Pawel Raasz Date: Thu, 19 Dec 2024 14:19:32 +0100 Subject: [PATCH] [FE] Set OV model's output's name after parsing model by frontend (#28105) ### Details: - The model after read by IR frontend shows models output tensors names as Nodes connected to Result's. But by default this names are not dedicated Results name which can cause during pre-post processing that names stay on node and will disapear as model output names. To fix set the names as Results names so during transformations they will stay as model's output names. - Onnx frontens set OV model output's names when converting model to OV represenation. - Fix NPU test which reports `Attempt to get a name for a Tensor without names` ### Related PRs: - #28102 ### Tickets: - CVS-159401 --------- Signed-off-by: Raasz, Pawel --- src/frontends/ir/src/ir_deserializer.cpp | 13 +++ .../tests/pre_processing_deserialization.cpp | 99 +++++++++++++++++++ .../onnx/frontend/src/input_model.cpp | 7 ++ src/frontends/onnx/tests/load_from.cpp | 29 ++++++ 4 files changed, 148 insertions(+) diff --git a/src/frontends/ir/src/ir_deserializer.cpp b/src/frontends/ir/src/ir_deserializer.cpp index d7e250f9916302..62caebeee7d355 100644 --- a/src/frontends/ir/src/ir_deserializer.cpp +++ b/src/frontends/ir/src/ir_deserializer.cpp @@ -7,6 +7,7 @@ #include #include +#include "openvino/core/descriptor_tensor.hpp" #include "openvino/core/except.hpp" #include "openvino/core/meta_data.hpp" #include "openvino/core/rt_info/weightless_caching_attributes.hpp" @@ -18,6 +19,7 @@ #include "openvino/op/result.hpp" #include "openvino/op/util/assign_base.hpp" #include "openvino/op/util/framework_node.hpp" +#include "openvino/op/util/op_types.hpp" #include "openvino/op/util/read_value_base.hpp" #include "openvino/op/util/sub_graph_base.hpp" #include "openvino/op/util/variable.hpp" @@ -1023,6 +1025,17 @@ std::shared_ptr ov::XmlDeserializer::create_node(const std::vector(ovNode.get())) { + if (!ov::op::util::is_parameter(result->get_input_source_output(0).get_node())) { + // Copy names if parent node is not parameter, model's input names should not be dedicated + // output names as they could be removed from Parameter's tensor during model transformations. + ov::descriptor::copy_tensor_names(result->get_output_tensor(0), result->get_input_tensor(0)); + } + } } return ovNode; diff --git a/src/frontends/ir/tests/pre_processing_deserialization.cpp b/src/frontends/ir/tests/pre_processing_deserialization.cpp index f97cf078b53951..460cff3d9dbfb5 100644 --- a/src/frontends/ir/tests/pre_processing_deserialization.cpp +++ b/src/frontends/ir/tests/pre_processing_deserialization.cpp @@ -1,8 +1,10 @@ // Copyright (C) 2018-2024 Intel Corporation // SPDX-License-Identifier: Apache-2.0 // +#include #include "frontend_test.hpp" +#include "openvino/core/preprocess/pre_post_process.hpp" class IRFrontendTestsPreProcessing : public ::testing::Test, public IRFrontendTestsImpl { protected: @@ -71,3 +73,100 @@ TEST_F(IRFrontendTestsPreProcessing, pre_processing) { OV_ASSERT_NO_THROW(model = core.read_model(xmlFileName, binFileName)); ASSERT_TRUE(!!model); } + +namespace ov { +namespace test { + +using testing::ElementsAre; +using testing::Property; +using testing::UnorderedElementsAre; + +TEST_F(IRFrontendTestsPreProcessing, check_tensor_names_after_read_and_pre_post_processing) { + std::string xml_model = R"V0G0N( + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +// )V0G0N"; + + constexpr auto DATA_COUNT = 1; + std::vector buffer(DATA_COUNT * sizeof(float), 0); + std::fill_n(reinterpret_cast(buffer.data()), DATA_COUNT, 1.f); + + createTemporalModelFile(xml_model, buffer); + + std::shared_ptr model; + OV_ASSERT_NO_THROW(model = core.read_model(xmlFileName, binFileName)); + ASSERT_NE(model, nullptr); + + EXPECT_THAT(model->inputs(), + ElementsAre(Property("Input 0", &Output::get_names, UnorderedElementsAre("input_a")), + Property("Input 1", &Output::get_names, UnorderedElementsAre("input_b")))); + + EXPECT_THAT(model->outputs(), + ElementsAre(Property("Output 0", &Output::get_names, UnorderedElementsAre("add_result")), + // Directly connected to model input shows input's names. + Property("Output 1", &Output::get_names, UnorderedElementsAre("input_b")))); + + auto p = preprocess::PrePostProcessor(model); + p.output(0).tensor().set_element_type(element::f16); + p.output(1).tensor().set_element_type(element::i32); + model = p.build(); + + EXPECT_THAT(model->inputs(), + ElementsAre(Property("Input 0", &Output::get_names, UnorderedElementsAre("input_a")), + Property("Input 1", &Output::get_names, UnorderedElementsAre("input_b")))); + + EXPECT_THAT(model->outputs(), + ElementsAre(Property("Output 0", &Output::get_names, UnorderedElementsAre("add_result")), + // After PPP (inserts convert node) the tensor names stay on model's input. + Property("Output 1", &Output::get_names, testing::IsEmpty()))); +} +} // namespace test +} // namespace ov diff --git a/src/frontends/onnx/frontend/src/input_model.cpp b/src/frontends/onnx/frontend/src/input_model.cpp index 87f1439eb18b38..9410f54e428b3f 100644 --- a/src/frontends/onnx/frontend/src/input_model.cpp +++ b/src/frontends/onnx/frontend/src/input_model.cpp @@ -533,6 +533,13 @@ void InputModel::add_tensor_names(std::shared_ptr& model) { it->add_names(tensor_names.second); } } + + // Set model output names + for (auto&& result : model->get_results()) { + if (!is_type(result->get_input_source_output(0).get_node())) { + result->get_output_tensor(0).add_names(result->get_input_tensor(0).get_names()); + } + } } void InputModel::reshape_model_inputs(std::shared_ptr& model) { diff --git a/src/frontends/onnx/tests/load_from.cpp b/src/frontends/onnx/tests/load_from.cpp index 547937ac52171f..e74262991652d0 100644 --- a/src/frontends/onnx/tests/load_from.cpp +++ b/src/frontends/onnx/tests/load_from.cpp @@ -10,11 +10,15 @@ #include "common_test_utils/test_assertions.hpp" #include "onnx_utils.hpp" +#include "openvino/core/preprocess/pre_post_process.hpp" #include "utils.hpp" using namespace ov::frontend; using ONNXLoadTest = FrontEndLoadFromTest; +using testing::ElementsAre; +using testing::Property; +using testing::UnorderedElementsAre; static LoadFromFEParam getTestData() { LoadFromFEParam res; @@ -58,6 +62,31 @@ TEST_P(FrontEndLoadFromTest, load_model_not_exists_at_path) { OV_EXPECT_THROW(fe->load(model_file_path), ov::Exception, testing::HasSubstr(error_msg)); } +TEST_P(FrontEndLoadFromTest, load_model_and_apply_ppp) { + auto model_file_path = + ov::util::path_join({ov::test::utils::getExecutableDirectory(), TEST_ONNX_MODELS_DIRNAME, m_param.m_stream}); + + m_frontEnd = m_fem.load_by_model(model_file_path); + const auto fe_model = m_frontEnd->load(model_file_path); + auto model = m_frontEnd->convert(fe_model); + + EXPECT_THAT(model->inputs(), + ElementsAre(Property("Input 0", &ov::Output::get_names, UnorderedElementsAre("A")), + Property("Input 1", &ov::Output::get_names, UnorderedElementsAre("B")), + Property("Input 2", &ov::Output::get_names, UnorderedElementsAre("C")))); + EXPECT_THAT(model->output(0).get_names(), UnorderedElementsAre("Y")); + + auto p = ov::preprocess::PrePostProcessor(model); + p.output(0).tensor().set_element_type(ov::element::f16); + model = p.build(); + + EXPECT_THAT(model->inputs(), + ElementsAre(Property("Input 0", &ov::Output::get_names, UnorderedElementsAre("A")), + Property("Input 1", &ov::Output::get_names, UnorderedElementsAre("B")), + Property("Input 2", &ov::Output::get_names, UnorderedElementsAre("C")))); + EXPECT_THAT(model->output(0).get_names(), UnorderedElementsAre("Y")); +} + INSTANTIATE_TEST_SUITE_P(ONNXLoadTest, FrontEndLoadFromTest, ::testing::Values(getTestData()),