From e4b82a52741bd09c22743227cd4b72b0841849e9 Mon Sep 17 00:00:00 2001 From: M Date: Thu, 28 Mar 2024 00:08:01 -0600 Subject: [PATCH] [ONNX] Add ReduceLogSum-11,13,18 (#23508) ### Details: - Extended ReduceLogSum by opsets 11,13,18 ### Tickets: - Closes #20561 --------- Co-authored-by: Katarzyna Mitrus Co-authored-by: Georgy Krivoruchko --- src/frontends/onnx/frontend/src/op/reduce.cpp | 8 ++- src/frontends/onnx/frontend/src/op/reduce.hpp | 3 + .../onnx/frontend/src/ops_bridge.cpp | 2 + .../tests/models/reduce_log_sum_18.prototxt | 48 +++++++++++++ .../reduce_log_sum_18_axes_as_input.prototxt | 71 +++++++++++++++++++ src/frontends/onnx/tests/onnx_import.in.cpp | 29 ++++++++ .../onnx/tests/tests_python/test_backend.py | 3 - 7 files changed, 159 insertions(+), 5 deletions(-) create mode 100644 src/frontends/onnx/tests/models/reduce_log_sum_18.prototxt create mode 100644 src/frontends/onnx/tests/models/reduce_log_sum_18_axes_as_input.prototxt diff --git a/src/frontends/onnx/frontend/src/op/reduce.cpp b/src/frontends/onnx/frontend/src/op/reduce.cpp index d9108c8b6fcf30..284b984ab4aa2c 100644 --- a/src/frontends/onnx/frontend/src/op/reduce.cpp +++ b/src/frontends/onnx/frontend/src/op/reduce.cpp @@ -141,7 +141,7 @@ std::shared_ptr make_ov_reduction_op(const Node& node, namespace set_1 { ov::OutputVector reduce_log_sum(const ov::frontend::onnx::Node& node) { const ov::Output sum_node = - make_ov_reduction_op(node, node.get_ov_inputs().at(0), supported_types_v1); + make_ov_reduction_op(node, node.get_ov_inputs().at(0), supported_types_v2); return {std::make_shared(sum_node)}; } @@ -185,7 +185,6 @@ ov::OutputVector reduce_sum_square(const ov::frontend::onnx::Node& node) { return {make_ov_reduction_op(node, square_node, supported_types_v1)}; } } // namespace set_1 - /* Opset 11 is skipped because there are no significant difference between opset1 and opset 11. Found difference is: @@ -208,6 +207,11 @@ namespace set_18 { ov::OutputVector reduce_max(const ov::frontend::onnx::Node& node) { return {make_ov_reduction_op(node, node.get_ov_inputs().at(0), supported_types_v3, false)}; } +ov::OutputVector reduce_log_sum(const ov::frontend::onnx::Node& node) { + const ov::Output sum_node = + make_ov_reduction_op(node, node.get_ov_inputs().at(0), supported_types_v2, false); + return {std::make_shared(sum_node)}; +} } // namespace set_18 namespace set_20 { diff --git a/src/frontends/onnx/frontend/src/op/reduce.hpp b/src/frontends/onnx/frontend/src/op/reduce.hpp index 3acdc3677e77da..a30f1ec86b7d2d 100644 --- a/src/frontends/onnx/frontend/src/op/reduce.hpp +++ b/src/frontends/onnx/frontend/src/op/reduce.hpp @@ -13,6 +13,9 @@ namespace op { namespace set_1 { ov::OutputVector reduce_log_sum(const ov::frontend::onnx::Node& node); } // namespace set_1 +namespace set_18 { +ov::OutputVector reduce_log_sum(const ov::frontend::onnx::Node& node); +} // namespace set_18 namespace set_1 { ov::OutputVector reduce_log_sum_exp(const ov::frontend::onnx::Node& node); diff --git a/src/frontends/onnx/frontend/src/ops_bridge.cpp b/src/frontends/onnx/frontend/src/ops_bridge.cpp index 6350c902ba5761..62d0a8d05e433e 100644 --- a/src/frontends/onnx/frontend/src/ops_bridge.cpp +++ b/src/frontends/onnx/frontend/src/ops_bridge.cpp @@ -480,6 +480,8 @@ OperatorsBridge::OperatorsBridge() { REGISTER_OPERATOR("RandomUniformLike", 1, random_uniform_like); REGISTER_OPERATOR("Reciprocal", 1, reciprocal); REGISTER_OPERATOR("ReduceLogSum", 1, reduce_log_sum); + register_operator("ReduceLogSum", VersionRange{1, 17}, op::set_1::reduce_log_sum); + register_operator("ReduceLogSum", VersionRange::since(18), op::set_18::reduce_log_sum); REGISTER_OPERATOR("ReduceLogSumExp", 1, reduce_log_sum_exp); REGISTER_OPERATOR("ReduceL1", 1, reduce_l1); REGISTER_OPERATOR("ReduceL2", 1, reduce_l2); diff --git a/src/frontends/onnx/tests/models/reduce_log_sum_18.prototxt b/src/frontends/onnx/tests/models/reduce_log_sum_18.prototxt new file mode 100644 index 00000000000000..1d85f6dfb706cd --- /dev/null +++ b/src/frontends/onnx/tests/models/reduce_log_sum_18.prototxt @@ -0,0 +1,48 @@ +ir_version: 3 +producer_name: "OpenVINO ONNX Frontend" +graph { + node { + input: "A" + output: "B" + op_type: "ReduceLogSum" + } + name: "compute_graph" + input { + name: "A" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + dim { + dim_value: 4 + } + dim { + dim_value: 4 + } + } + } + } + } + output { + name: "B" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + } + } + } + } +} +opset_import { + version: 18 +} diff --git a/src/frontends/onnx/tests/models/reduce_log_sum_18_axes_as_input.prototxt b/src/frontends/onnx/tests/models/reduce_log_sum_18_axes_as_input.prototxt new file mode 100644 index 00000000000000..291070ef34975f --- /dev/null +++ b/src/frontends/onnx/tests/models/reduce_log_sum_18_axes_as_input.prototxt @@ -0,0 +1,71 @@ +ir_version: 3 +producer_name: "OpenVINO ONNX Frontend" +graph { + node { + input: "A" + input: "axes" + output: "B" + op_type: "ReduceLogSum" + } + name: "compute_graph" + input { + name: "A" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + dim { + dim_value: 4 + } + dim { + dim_value: 4 + } + } + } + } + } + input { + name: "axes" + type { + tensor_type { + elem_type: 7 + shape { + dim { + dim_value: 1 + } + } + } + } + } + output { + name: "B" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + dim { + dim_value: 4 + } + dim { + dim_value: 1 + } + } + } + } + } +} +opset_import { + version: 18 +} diff --git a/src/frontends/onnx/tests/onnx_import.in.cpp b/src/frontends/onnx/tests/onnx_import.in.cpp index b515fd1cb78799..b0cc3ed14072f9 100644 --- a/src/frontends/onnx/tests/onnx_import.in.cpp +++ b/src/frontends/onnx/tests/onnx_import.in.cpp @@ -910,6 +910,35 @@ OPENVINO_TEST(${BACKEND_NAME}, onnx_model_reduce_log_sum) { test_case.run(); } +OPENVINO_TEST(${BACKEND_NAME}, onnx_model_reduce_log_sum_18) { + auto model = convert_model("reduce_log_sum_18.onnx"); + + // input data shape (1, 1, 4, 4) + Inputs inputs{ + ov::test::NDArray({{{{1, 1, 1, 1}, {1, 1, 1, 1}, {1, 1, 1, 1}, {1, 1, 1, 1}}}}).get_vector()}; + + // output data shape (1,) + auto expected_output = ov::test::NDArray({{{{2.77258872f}}}}).get_vector(); + + auto test_case = ov::test::TestCase(model, s_device); + test_case.add_multiple_inputs(inputs); + test_case.add_expected_output(expected_output); + test_case.run(); +} + +OPENVINO_TEST(${BACKEND_NAME}, onnx_model_reduce_log_sum_18_axes_as_input) { + auto model = convert_model("reduce_log_sum_18_axes_as_input.onnx"); + auto test_case = ov::test::TestCase(model, s_device); + + test_case.add_input(Shape{1, 1, 4, 4}, {2, 1, 4, 2, 3, 1, 3, 2, 4, 2, 4, 2, 2, 2, 1, 4}); + test_case.add_input({3}); + + test_case.add_expected_output(Shape{1, 1, 4, 1}, + std::vector{2.19722458f, 2.19722458f, 2.48490665f, 2.19722458f}); + + test_case.run(); +} + OPENVINO_TEST(${BACKEND_NAME}, onnx_model_reduce_log_sum_exp) { auto model = convert_model("reduce_log_sum_exp.onnx"); diff --git a/src/frontends/onnx/tests/tests_python/test_backend.py b/src/frontends/onnx/tests/tests_python/test_backend.py index 229b88b39b31a8..dafa2fbebfbf29 100644 --- a/src/frontends/onnx/tests/tests_python/test_backend.py +++ b/src/frontends/onnx/tests/tests_python/test_backend.py @@ -492,7 +492,6 @@ def expect_fail(test_case_path, xfail): # type: (str) -> None "OnnxBackendNodeModelTest.test_reduce_log_sum_exp_keepdims_example_cpu", "OnnxBackendNodeModelTest.test_reduce_log_sum_exp_negative_axes_keepdims_example_cpu", "OnnxBackendNodeModelTest.test_reduce_log_sum_exp_keepdims_random_cpu", - "OnnxBackendNodeModelTest.test_reduce_log_sum_negative_axes_cpu", "OnnxBackendNodeModelTest.test_reduce_mean_do_not_keepdims_example_cpu", "OnnxBackendNodeModelTest.test_reduce_log_sum_exp_negative_axes_keepdims_random_cpu", "OnnxBackendNodeModelTest.test_reduce_mean_do_not_keepdims_random_cpu", @@ -708,7 +707,6 @@ def expect_fail(test_case_path, xfail): # type: (str) -> None xfail_issue_125493, "OnnxBackendNodeModelTest.test_reduce_l1_empty_set_cpu", "OnnxBackendNodeModelTest.test_reduce_l2_empty_set_cpu", - "OnnxBackendNodeModelTest.test_reduce_log_sum_empty_set_cpu", "OnnxBackendNodeModelTest.test_reduce_log_sum_exp_empty_set_cpu", "OnnxBackendNodeModelTest.test_reduce_min_empty_set_cpu", "OnnxBackendNodeModelTest.test_reduce_prod_empty_set_cpu", @@ -721,7 +719,6 @@ def expect_fail(test_case_path, xfail): # type: (str) -> None ( skip_misalignment, "OnnxBackendNodeModelTest.test_gelu_default_2_expanded_cpu", - "OnnxBackendNodeModelTest.test_reduce_log_sum_empty_set_expanded_cpu", "OnnxBackendNodeModelTest.test_reduce_log_sum_exp_empty_set_expanded_cpu", ), (