From 7926302d35770d081de06e066e5e4c2e68938b8c Mon Sep 17 00:00:00 2001 From: Mateusz Mikolajczyk Date: Wed, 4 Oct 2023 09:38:33 +0200 Subject: [PATCH] [Ref][Core][Opset13] BitwiseAnd, BitwiseOr and BitwiseXor core shell and reference (#20058) * Add Bitwise binary core and refs * Add draft for tests * Formatting, build issues and tests * Fix tests * Add reference tests * Apply requested changes * Add requested changes * Rename * uncomment test * Update src/tests/functional/plugin/conformance/test_runner/op_conformance_runner/src/op_impl_check/single_op_graph.cpp Co-authored-by: Tomasz Jankowski * change reference --------- Co-authored-by: Tomasz Jankowski --- src/core/include/openvino/op/bitwise_and.hpp | 39 + src/core/include/openvino/op/bitwise_or.hpp | 39 + src/core/include/openvino/op/bitwise_xor.hpp | 39 + src/core/include/openvino/op/ops.hpp | 3 + .../op/util/binary_elementwise_bitwise.hpp | 41 + .../include/openvino/opsets/opset13_tbl.hpp | 3 + .../openvino/reference/bitwise_and.hpp | 54 + .../include/openvino/reference/bitwise_or.hpp | 54 + .../openvino/reference/bitwise_xor.hpp | 54 + src/core/src/op/bitwise_and.cpp | 25 + src/core/src/op/bitwise_not.cpp | 1 - src/core/src/op/bitwise_or.cpp | 25 + src/core/src/op/bitwise_xor.cpp | 25 + .../op/util/binary_elementwise_bitwise.cpp | 41 + src/core/tests/op_version_tbl.hpp | 3 + src/core/tests/opset.cpp | 2 +- src/core/tests/type_prop/bitwise_and.cpp | 11 + src/core/tests/type_prop/bitwise_ops.hpp | 936 ++++++++++++++++++ src/core/tests/type_prop/bitwise_or.cpp | 11 + src/core/tests/type_prop/bitwise_xor.cpp | 11 + src/core/tests/visitors/op/bitwise_and.cpp | 11 + src/core/tests/visitors/op/bitwise_or.cpp | 11 + src/core/tests/visitors/op/bitwise_xor.cpp | 11 + .../template/backend/ops/bitwise_and.cpp | 56 ++ .../template/backend/ops/bitwise_not.cpp | 2 +- .../template/backend/ops/bitwise_or.cpp | 56 ++ .../template/backend/ops/bitwise_xor.cpp | 56 ++ .../template/backend/ops/ops_evaluates.hpp | 12 + .../template/backend/opset_int_tbl.hpp | 3 + .../tests/functional/op_reference/bitwise.hpp | 27 +- .../functional/op_reference/bitwise_and.cpp | 359 +++++++ .../functional/op_reference/bitwise_or.cpp | 385 +++++++ .../functional/op_reference/bitwise_xor.cpp | 385 +++++++ .../src/op_impl_check/single_op_graph.cpp | 20 + 34 files changed, 2801 insertions(+), 10 deletions(-) create mode 100644 src/core/include/openvino/op/bitwise_and.hpp create mode 100644 src/core/include/openvino/op/bitwise_or.hpp create mode 100644 src/core/include/openvino/op/bitwise_xor.hpp create mode 100644 src/core/include/openvino/op/util/binary_elementwise_bitwise.hpp create mode 100644 src/core/reference/include/openvino/reference/bitwise_and.hpp create mode 100644 src/core/reference/include/openvino/reference/bitwise_or.hpp create mode 100644 src/core/reference/include/openvino/reference/bitwise_xor.hpp create mode 100644 src/core/src/op/bitwise_and.cpp create mode 100644 src/core/src/op/bitwise_or.cpp create mode 100644 src/core/src/op/bitwise_xor.cpp create mode 100644 src/core/src/op/util/binary_elementwise_bitwise.cpp create mode 100644 src/core/tests/type_prop/bitwise_and.cpp create mode 100644 src/core/tests/type_prop/bitwise_ops.hpp create mode 100644 src/core/tests/type_prop/bitwise_or.cpp create mode 100644 src/core/tests/type_prop/bitwise_xor.cpp create mode 100644 src/core/tests/visitors/op/bitwise_and.cpp create mode 100644 src/core/tests/visitors/op/bitwise_or.cpp create mode 100644 src/core/tests/visitors/op/bitwise_xor.cpp create mode 100644 src/plugins/template/backend/ops/bitwise_and.cpp create mode 100644 src/plugins/template/backend/ops/bitwise_or.cpp create mode 100644 src/plugins/template/backend/ops/bitwise_xor.cpp create mode 100644 src/plugins/template/tests/functional/op_reference/bitwise_and.cpp create mode 100644 src/plugins/template/tests/functional/op_reference/bitwise_or.cpp create mode 100644 src/plugins/template/tests/functional/op_reference/bitwise_xor.cpp diff --git a/src/core/include/openvino/op/bitwise_and.hpp b/src/core/include/openvino/op/bitwise_and.hpp new file mode 100644 index 00000000000000..4a9867b222aef5 --- /dev/null +++ b/src/core/include/openvino/op/bitwise_and.hpp @@ -0,0 +1,39 @@ +// Copyright (C) 2018-2023 Intel Corporation +// SPDX-License-Identifier: Apache-2.0 +// + +#pragma once + +#include "openvino/op/op.hpp" +#include "openvino/op/util/binary_elementwise_bitwise.hpp" + +namespace ov { +namespace op { +namespace v13 { +/// \brief Elementwise bitwise AND operation. +/// \ingroup ov_ops_cpp_api +class OPENVINO_API BitwiseAnd : public util::BinaryElementwiseBitwise { +public: + OPENVINO_OP("BitwiseAnd", "opset13", util::BinaryElementwiseBitwise); + /// \brief Constructs a bitwise AND operation. + BitwiseAnd() = default; + /// \brief Constructs a bitwise AND operation. + /// + /// \param arg0 Output that produces the first input tensor.
+ /// `[d0, ...]` + /// \param arg1 Output that produces the second input tensor.
+ /// `[d0, ...]` + /// \param auto_broadcast Auto broadcast specification. Default is Numpy-style + /// implicit broadcasting. + /// + /// Output `[d0, ...]` + /// + BitwiseAnd(const Output& arg0, + const Output& arg1, + const AutoBroadcastSpec& auto_broadcast = AutoBroadcastSpec(AutoBroadcastType::NUMPY)); + + std::shared_ptr clone_with_new_inputs(const OutputVector& new_args) const override; +}; +} // namespace v13 +} // namespace op +} // namespace ov diff --git a/src/core/include/openvino/op/bitwise_or.hpp b/src/core/include/openvino/op/bitwise_or.hpp new file mode 100644 index 00000000000000..0f40a8500362a0 --- /dev/null +++ b/src/core/include/openvino/op/bitwise_or.hpp @@ -0,0 +1,39 @@ +// Copyright (C) 2018-2023 Intel Corporation +// SPDX-License-Identifier: Apache-2.0 +// + +#pragma once + +#include "openvino/op/op.hpp" +#include "openvino/op/util/binary_elementwise_bitwise.hpp" + +namespace ov { +namespace op { +namespace v13 { +/// \brief Elementwise bitwise OR operation. +/// \ingroup ov_ops_cpp_api +class OPENVINO_API BitwiseOr : public util::BinaryElementwiseBitwise { +public: + OPENVINO_OP("BitwiseOr", "opset13", util::BinaryElementwiseBitwise); + /// \brief Constructs a bitwise OR operation. + BitwiseOr() = default; + /// \brief Constructs a bitwise OR operation. + /// + /// \param arg0 Output that produces the first input tensor.
+ /// `[d0, ...]` + /// \param arg1 Output that produces the second input tensor.
+ /// `[d0, ...]` + /// \param auto_broadcast Auto broadcast specification. Default is Numpy-style + /// implicit broadcasting. + /// + /// Output `[d0, ...]` + /// + BitwiseOr(const Output& arg0, + const Output& arg1, + const AutoBroadcastSpec& auto_broadcast = AutoBroadcastSpec(AutoBroadcastType::NUMPY)); + + std::shared_ptr clone_with_new_inputs(const OutputVector& new_args) const override; +}; +} // namespace v13 +} // namespace op +} // namespace ov diff --git a/src/core/include/openvino/op/bitwise_xor.hpp b/src/core/include/openvino/op/bitwise_xor.hpp new file mode 100644 index 00000000000000..6ebb07bfe38d73 --- /dev/null +++ b/src/core/include/openvino/op/bitwise_xor.hpp @@ -0,0 +1,39 @@ +// Copyright (C) 2018-2023 Intel Corporation +// SPDX-License-Identifier: Apache-2.0 +// + +#pragma once + +#include "openvino/op/op.hpp" +#include "openvino/op/util/binary_elementwise_bitwise.hpp" + +namespace ov { +namespace op { +namespace v13 { +/// \brief Elementwise bitwise XOR operation. +/// \ingroup ov_ops_cpp_api +class OPENVINO_API BitwiseXor : public util::BinaryElementwiseBitwise { +public: + OPENVINO_OP("BitwiseXor", "opset13", util::BinaryElementwiseBitwise); + /// \brief Constructs a bitwise XOR operation. + BitwiseXor() = default; + /// \brief Constructs a bitwise XOR operation. + /// + /// \param arg0 Output that produces the first input tensor.
+ /// `[d0, ...]` + /// \param arg1 Output that produces the second input tensor.
+ /// `[d0, ...]` + /// \param auto_broadcast Auto broadcast specification. Default is Numpy-style + /// implicit broadcasting. + /// + /// Output `[d0, ...]` + /// + BitwiseXor(const Output& arg0, + const Output& arg1, + const AutoBroadcastSpec& auto_broadcast = AutoBroadcastSpec(AutoBroadcastType::NUMPY)); + + std::shared_ptr clone_with_new_inputs(const OutputVector& new_args) const override; +}; +} // namespace v13 +} // namespace op +} // namespace ov diff --git a/src/core/include/openvino/op/ops.hpp b/src/core/include/openvino/op/ops.hpp index 159a84176c427d..b57372f118f19b 100644 --- a/src/core/include/openvino/op/ops.hpp +++ b/src/core/include/openvino/op/ops.hpp @@ -21,7 +21,10 @@ #include "openvino/op/batch_norm.hpp" #include "openvino/op/batch_to_space.hpp" #include "openvino/op/binary_convolution.hpp" +#include "openvino/op/bitwise_and.hpp" #include "openvino/op/bitwise_not.hpp" +#include "openvino/op/bitwise_or.hpp" +#include "openvino/op/bitwise_xor.hpp" #include "openvino/op/broadcast.hpp" #include "openvino/op/bucketize.hpp" #include "openvino/op/ceiling.hpp" diff --git a/src/core/include/openvino/op/util/binary_elementwise_bitwise.hpp b/src/core/include/openvino/op/util/binary_elementwise_bitwise.hpp new file mode 100644 index 00000000000000..16096219e4d110 --- /dev/null +++ b/src/core/include/openvino/op/util/binary_elementwise_bitwise.hpp @@ -0,0 +1,41 @@ +// Copyright (C) 2018-2023 Intel Corporation +// SPDX-License-Identifier: Apache-2.0 +// + +#pragma once + +#include "openvino/op/op.hpp" + +namespace ov { +namespace op { +namespace util { +class OPENVINO_API BinaryElementwiseBitwise : public Op { +protected: + BinaryElementwiseBitwise(); + + /// \brief Constructs a binary elementwise bitwise operation. + /// + /// \param arg0 Output that produces the first input tensor. + /// \param arg1 Output that produces the second input tensor. + /// \param auto_broadcast Auto broadcast specification. Default is Numpy-style + /// implicit broadcasting. + BinaryElementwiseBitwise(const Output& arg0, + const Output& arg1, + const AutoBroadcastSpec& autob = AutoBroadcastSpec()); + +public: + OPENVINO_OP("BinaryElementwiseBitwise", "util"); + + void validate_and_infer_types() override; + + virtual const AutoBroadcastSpec& get_autob() const override; + + void set_autob(const AutoBroadcastSpec& autob); + bool visit_attributes(AttributeVisitor& visitor) override; + +private: + AutoBroadcastSpec m_autob = AutoBroadcastType::NUMPY; +}; +} // namespace util +} // namespace op +} // namespace ov diff --git a/src/core/include/openvino/opsets/opset13_tbl.hpp b/src/core/include/openvino/opsets/opset13_tbl.hpp index 353124af1afd21..8d543e49b67614 100644 --- a/src/core/include/openvino/opsets/opset13_tbl.hpp +++ b/src/core/include/openvino/opsets/opset13_tbl.hpp @@ -209,5 +209,8 @@ _OPENVINO_OP_REG(Pad, ov::op::v12) _OPENVINO_OP_REG(ScatterElementsUpdate, ov::op::v12) // New operations added in opset13 +_OPENVINO_OP_REG(BitwiseAnd, ov::op::v13) _OPENVINO_OP_REG(BitwiseNot, ov::op::v13) +_OPENVINO_OP_REG(BitwiseOr, ov::op::v13) +_OPENVINO_OP_REG(BitwiseXor, ov::op::v13) _OPENVINO_OP_REG(NMSRotated, ov::op::v13) diff --git a/src/core/reference/include/openvino/reference/bitwise_and.hpp b/src/core/reference/include/openvino/reference/bitwise_and.hpp new file mode 100644 index 00000000000000..a6422b5d489342 --- /dev/null +++ b/src/core/reference/include/openvino/reference/bitwise_and.hpp @@ -0,0 +1,54 @@ +// Copyright (C) 2018-2023 Intel Corporation +// SPDX-License-Identifier: Apache-2.0 +// + +#pragma once + +#include +#include + +#include "openvino/reference/autobroadcast_binop.hpp" + +namespace ov { +namespace reference { +/** + * @brief Reference implementation of binary elementwise bitwise AND operator. + * + * @param arg0 Pointer to input 0 data. + * @param arg1 Pointer to input 1 data. + * @param out Pointer to output data. + * @param arg_shape0 Input 0 shape. + * @param arg_shape1 Input 1 shape. + * @param broadcast_spec Broadcast specification mode. + */ +template ::type, char>::value>::type* = nullptr> +// Check for char datatype used by ov::element::boolean +void bitwise_and(const T* arg0, + const T* arg1, + T* out, + const Shape& arg0_shape, + const Shape& arg1_shape, + const op::AutoBroadcastSpec& broadcast_spec) { + autobroadcast_binop(arg0, arg1, out, arg0_shape, arg1_shape, broadcast_spec, std::bit_and()); +} +/** + * @brief Reference implementation of binary elementwise bitwise AND operator. + * + * @param arg0 Pointer to input 0 data. + * @param arg1 Pointer to input 1 data. + * @param out Pointer to output data. + * @param arg_shape0 Input 0 shape. + * @param arg_shape1 Input 1 shape. + * @param broadcast_spec Broadcast specification mode. + */ +template ::type, char>::value>::type* = nullptr> +void bitwise_and(const T* arg0, + const T* arg1, + T* out, + const Shape& arg0_shape, + const Shape& arg1_shape, + const op::AutoBroadcastSpec& broadcast_spec) { + autobroadcast_binop(arg0, arg1, out, arg0_shape, arg1_shape, broadcast_spec, std::bit_and()); +} +} // namespace reference +} // namespace ov diff --git a/src/core/reference/include/openvino/reference/bitwise_or.hpp b/src/core/reference/include/openvino/reference/bitwise_or.hpp new file mode 100644 index 00000000000000..54eb2fe91ffde0 --- /dev/null +++ b/src/core/reference/include/openvino/reference/bitwise_or.hpp @@ -0,0 +1,54 @@ +// Copyright (C) 2018-2023 Intel Corporation +// SPDX-License-Identifier: Apache-2.0 +// + +#pragma once + +#include +#include + +#include "openvino/reference/autobroadcast_binop.hpp" + +namespace ov { +namespace reference { +/** + * @brief Reference implementation of binary elementwise bitwise OR operator. + * + * @param arg0 Pointer to input 0 data. + * @param arg1 Pointer to input 1 data. + * @param out Pointer to output data. + * @param arg_shape0 Input 0 shape. + * @param arg_shape1 Input 1 shape. + * @param broadcast_spec Broadcast specification mode. + */ +template ::type, char>::value>::type* = nullptr> +// Check for char datatype used by ov::element::boolean +void bitwise_or(const T* arg0, + const T* arg1, + T* out, + const Shape& arg0_shape, + const Shape& arg1_shape, + const op::AutoBroadcastSpec& broadcast_spec) { + autobroadcast_binop(arg0, arg1, out, arg0_shape, arg1_shape, broadcast_spec, std::bit_or()); +} +/** + * @brief Reference implementation of binary elementwise bitwise OR operator. + * + * @param arg0 Pointer to input 0 data. + * @param arg1 Pointer to input 1 data. + * @param out Pointer to output data. + * @param arg_shape0 Input 0 shape. + * @param arg_shape1 Input 1 shape. + * @param broadcast_spec Broadcast specification mode. + */ +template ::type, char>::value>::type* = nullptr> +void bitwise_or(const T* arg0, + const T* arg1, + T* out, + const Shape& arg0_shape, + const Shape& arg1_shape, + const op::AutoBroadcastSpec& broadcast_spec) { + autobroadcast_binop(arg0, arg1, out, arg0_shape, arg1_shape, broadcast_spec, std::bit_or()); +} +} // namespace reference +} // namespace ov diff --git a/src/core/reference/include/openvino/reference/bitwise_xor.hpp b/src/core/reference/include/openvino/reference/bitwise_xor.hpp new file mode 100644 index 00000000000000..7204077c4abce7 --- /dev/null +++ b/src/core/reference/include/openvino/reference/bitwise_xor.hpp @@ -0,0 +1,54 @@ +// Copyright (C) 2018-2023 Intel Corporation +// SPDX-License-Identifier: Apache-2.0 +// + +#pragma once + +#include +#include + +#include "openvino/reference/autobroadcast_binop.hpp" + +namespace ov { +namespace reference { +/** + * @brief Reference implementation of binary elementwise bitwise XOR operator. + * + * @param arg0 Pointer to input 0 data. + * @param arg1 Pointer to input 1 data. + * @param out Pointer to output data. + * @param arg_shape0 Input 0 shape. + * @param arg_shape1 Input 1 shape. + * @param broadcast_spec Broadcast specification mode. + */ +template ::type, char>::value>::type* = nullptr> +// Check for char datatype used by ov::element::boolean +void bitwise_xor(const T* arg0, + const T* arg1, + T* out, + const Shape& arg0_shape, + const Shape& arg1_shape, + const op::AutoBroadcastSpec& broadcast_spec) { + autobroadcast_binop(arg0, arg1, out, arg0_shape, arg1_shape, broadcast_spec, std::bit_xor()); +} +/** + * @brief Reference implementation of binary elementwise bitwise XOR operator. + * + * @param arg0 Pointer to input 0 data. + * @param arg1 Pointer to input 1 data. + * @param out Pointer to output data. + * @param arg_shape0 Input 0 shape. + * @param arg_shape1 Input 1 shape. + * @param broadcast_spec Broadcast specification mode. + */ +template ::type, char>::value>::type* = nullptr> +void bitwise_xor(const T* arg0, + const T* arg1, + T* out, + const Shape& arg0_shape, + const Shape& arg1_shape, + const op::AutoBroadcastSpec& broadcast_spec) { + autobroadcast_binop(arg0, arg1, out, arg0_shape, arg1_shape, broadcast_spec, std::bit_xor()); +} +} // namespace reference +} // namespace ov diff --git a/src/core/src/op/bitwise_and.cpp b/src/core/src/op/bitwise_and.cpp new file mode 100644 index 00000000000000..22da9e92f47386 --- /dev/null +++ b/src/core/src/op/bitwise_and.cpp @@ -0,0 +1,25 @@ +// Copyright (C) 2018-2023 Intel Corporation +// SPDX-License-Identifier: Apache-2.0 +// +#include "openvino/op/bitwise_and.hpp" + +#include "itt.hpp" +#include "openvino/op/op.hpp" + +namespace ov { +namespace op { +namespace v13 { +BitwiseAnd::BitwiseAnd(const Output& arg0, const Output& arg1, const AutoBroadcastSpec& auto_broadcast) + : BinaryElementwiseBitwise(arg0, arg1, auto_broadcast) { + constructor_validate_and_infer_types(); +} + +std::shared_ptr BitwiseAnd::clone_with_new_inputs(const OutputVector& new_args) const { + OV_OP_SCOPE(v13_BitwiseAnd_clone_with_new_inputs); + check_new_args_count(this, new_args); + return std::make_shared(new_args[0], new_args[1], get_autob()); +} + +} // namespace v13 +} // namespace op +} // namespace ov diff --git a/src/core/src/op/bitwise_not.cpp b/src/core/src/op/bitwise_not.cpp index 92aeace18ad501..257a796fda2d0f 100644 --- a/src/core/src/op/bitwise_not.cpp +++ b/src/core/src/op/bitwise_not.cpp @@ -4,7 +4,6 @@ #include "openvino/op/bitwise_not.hpp" #include "itt.hpp" -#include "openvino/core/validation_util.hpp" #include "openvino/op/op.hpp" namespace ov { diff --git a/src/core/src/op/bitwise_or.cpp b/src/core/src/op/bitwise_or.cpp new file mode 100644 index 00000000000000..02ff0ad0830f1f --- /dev/null +++ b/src/core/src/op/bitwise_or.cpp @@ -0,0 +1,25 @@ +// Copyright (C) 2018-2023 Intel Corporation +// SPDX-License-Identifier: Apache-2.0 +// +#include "openvino/op/bitwise_or.hpp" + +#include "itt.hpp" +#include "openvino/op/op.hpp" + +namespace ov { +namespace op { +namespace v13 { +BitwiseOr::BitwiseOr(const Output& arg0, const Output& arg1, const AutoBroadcastSpec& auto_broadcast) + : BinaryElementwiseBitwise(arg0, arg1, auto_broadcast) { + constructor_validate_and_infer_types(); +} + +std::shared_ptr BitwiseOr::clone_with_new_inputs(const OutputVector& new_args) const { + OV_OP_SCOPE(v13_BitwiseOr_clone_with_new_inputs); + check_new_args_count(this, new_args); + return std::make_shared(new_args[0], new_args[1], get_autob()); +} + +} // namespace v13 +} // namespace op +} // namespace ov diff --git a/src/core/src/op/bitwise_xor.cpp b/src/core/src/op/bitwise_xor.cpp new file mode 100644 index 00000000000000..320fe39f120359 --- /dev/null +++ b/src/core/src/op/bitwise_xor.cpp @@ -0,0 +1,25 @@ +// Copyright (C) 2018-2023 Intel Corporation +// SPDX-License-Identifier: Apache-2.0 +// +#include "openvino/op/bitwise_xor.hpp" + +#include "itt.hpp" +#include "openvino/op/op.hpp" + +namespace ov { +namespace op { +namespace v13 { +BitwiseXor::BitwiseXor(const Output& arg0, const Output& arg1, const AutoBroadcastSpec& auto_broadcast) + : BinaryElementwiseBitwise(arg0, arg1, auto_broadcast) { + constructor_validate_and_infer_types(); +} + +std::shared_ptr BitwiseXor::clone_with_new_inputs(const OutputVector& new_args) const { + OV_OP_SCOPE(v13_BitwiseXor_clone_with_new_inputs); + check_new_args_count(this, new_args); + return std::make_shared(new_args[0], new_args[1], get_autob()); +} + +} // namespace v13 +} // namespace op +} // namespace ov diff --git a/src/core/src/op/util/binary_elementwise_bitwise.cpp b/src/core/src/op/util/binary_elementwise_bitwise.cpp new file mode 100644 index 00000000000000..342bcf9cd757a8 --- /dev/null +++ b/src/core/src/op/util/binary_elementwise_bitwise.cpp @@ -0,0 +1,41 @@ +// Copyright (C) 2018-2023 Intel Corporation +// SPDX-License-Identifier: Apache-2.0 +// + +#include "openvino/op/util/binary_elementwise_bitwise.hpp" + +#include "itt.hpp" +#include "openvino/op/util/elementwise_args.hpp" + +ov::op::util::BinaryElementwiseBitwise::BinaryElementwiseBitwise() = default; + +ov::op::util::BinaryElementwiseBitwise::BinaryElementwiseBitwise(const Output& arg0, + const Output& arg1, + const AutoBroadcastSpec& autob) + : Op({arg0, arg1}), + m_autob(autob) {} + +void ov::op::util::BinaryElementwiseBitwise::validate_and_infer_types() { + OV_OP_SCOPE(v0_util_BinaryElementwiseBitwise_validate_and_infer_types); + auto args_et_pshape = op::util::validate_and_infer_elementwise_args(this); + const auto& args_et = std::get<0>(args_et_pshape); + const auto& args_pshape = std::get<1>(args_et_pshape); + + NODE_VALIDATION_CHECK(this, + args_et.is_dynamic() || args_et.is_integral(), + "The element type of the input tensor must be integer or boolean."); + + set_output_type(0, args_et, args_pshape); +} + +bool ov::op::util::BinaryElementwiseBitwise::visit_attributes(AttributeVisitor& visitor) { + OV_OP_SCOPE(v0_util_BinaryElementwiseBitwise_visit_attributes); + visitor.on_attribute("auto_broadcast", m_autob); + return true; +} +const ov::op::AutoBroadcastSpec& ov::op::util::BinaryElementwiseBitwise::get_autob() const { + return m_autob; +} +void ov::op::util::BinaryElementwiseBitwise::set_autob(const AutoBroadcastSpec& autob) { + m_autob = autob; +} diff --git a/src/core/tests/op_version_tbl.hpp b/src/core/tests/op_version_tbl.hpp index bf2fc789b12635..d861bfba0c4c50 100644 --- a/src/core/tests/op_version_tbl.hpp +++ b/src/core/tests/op_version_tbl.hpp @@ -26,7 +26,10 @@ _OPENVINO_OP_REG(AvgPool, ov::op::v1) _OPENVINO_OP_REG(BatchNormInference, ov::op::v0) _OPENVINO_OP_REG(BatchToSpace, ov::op::v1) _OPENVINO_OP_REG(BinaryConvolution, ov::op::v1) +_OPENVINO_OP_REG(BitwiseAnd, ov::op::v13) _OPENVINO_OP_REG(BitwiseNot, ov::op::v13) +_OPENVINO_OP_REG(BitwiseOr, ov::op::v13) +_OPENVINO_OP_REG(BitwiseXor, ov::op::v13) _OPENVINO_OP_REG(Broadcast, ov::op::v1) _OPENVINO_OP_REG(Broadcast, ov::op::v3) _OPENVINO_OP_REG(Bucketize, ov::op::v3) diff --git a/src/core/tests/opset.cpp b/src/core/tests/opset.cpp index 947d2cdfa1f392..204f43ae8ff906 100644 --- a/src/core/tests/opset.cpp +++ b/src/core/tests/opset.cpp @@ -71,7 +71,7 @@ INSTANTIATE_TEST_SUITE_P(opset, OpsetTestParams{ov::get_opset10, 177}, OpsetTestParams{ov::get_opset11, 177}, OpsetTestParams{ov::get_opset12, 178}, - OpsetTestParams{ov::get_opset13, 180}), + OpsetTestParams{ov::get_opset13, 183}), OpsetTestNameGenerator{}); class MyOpOld : public ov::op::Op { diff --git a/src/core/tests/type_prop/bitwise_and.cpp b/src/core/tests/type_prop/bitwise_and.cpp new file mode 100644 index 00000000000000..0490f79c96e61c --- /dev/null +++ b/src/core/tests/type_prop/bitwise_and.cpp @@ -0,0 +1,11 @@ +// Copyright (C) 2018-2023 Intel Corporation +// SPDX-License-Identifier: Apache-2.0 +// + +#include "openvino/op/bitwise_and.hpp" + +#include "bitwise_ops.hpp" + +using Type = ::testing::Types; + +INSTANTIATE_TYPED_TEST_SUITE_P(type_prop_bitwise_and, BitwiseOperator, Type); diff --git a/src/core/tests/type_prop/bitwise_ops.hpp b/src/core/tests/type_prop/bitwise_ops.hpp new file mode 100644 index 00000000000000..3a8dc24df1b3ec --- /dev/null +++ b/src/core/tests/type_prop/bitwise_ops.hpp @@ -0,0 +1,936 @@ +// Copyright (C) 2018-2023 Intel Corporation +// SPDX-License-Identifier: Apache-2.0 +// + +#include + +#include + +#include "common_test_utils/test_assertions.hpp" +#include "common_test_utils/type_prop.hpp" +#include "openvino/core/dimension_tracker.hpp" +#include "openvino/op/util/attr_types.hpp" + +using namespace ov; +using op::v0::Parameter; +using namespace testing; + +template +class BitwiseOperator : public TypePropOpTest {}; + +TYPED_TEST_SUITE_P(BitwiseOperator); + +TYPED_TEST_P(BitwiseOperator, default_constructor_integer) { + auto lhs = std::make_shared(element::i32, PartialShape{-1, 4, 1, 6, {1, 6}, {2, 6}}); + auto rhs = std::make_shared(element::i32, PartialShape{-1, 1, 5, 6, {5, 8}, {5, 8}}); + + const auto op = this->make_op(); + + op->set_argument(0, lhs); + op->set_argument(1, rhs); + + auto autob = op::AutoBroadcastSpec(op::AutoBroadcastType::NONE); + op->set_autob(autob); + EXPECT_EQ(op->get_autob(), op::AutoBroadcastType::NONE); + ASSERT_THROW(op->validate_and_infer_types(), NodeValidationFailure); + + autob = op::AutoBroadcastSpec(op::AutoBroadcastType::NUMPY); + op->set_autob(autob); + EXPECT_EQ(op->get_autob(), op::AutoBroadcastType::NUMPY); + + op->validate_and_infer_types(); + + EXPECT_EQ(op->get_element_type(), element::i32); + EXPECT_EQ(op->get_output_partial_shape(0), (PartialShape{-1, 4, 5, 6, {5, 8}, {5, 6}})); +} + +TYPED_TEST_P(BitwiseOperator, default_constructor_boolean) { + auto lhs = std::make_shared(element::boolean, PartialShape{-1, 4, 1, 6, {1, 6}, {2, 6}}); + auto rhs = std::make_shared(element::boolean, PartialShape{-1, 1, 5, 6, {5, 8}, {5, 8}}); + + const auto op = this->make_op(); + + op->set_argument(0, lhs); + op->set_argument(1, rhs); + + auto autob = op::AutoBroadcastSpec(op::AutoBroadcastType::NONE); + op->set_autob(autob); + EXPECT_EQ(op->get_autob(), op::AutoBroadcastType::NONE); + ASSERT_THROW(op->validate_and_infer_types(), NodeValidationFailure); + + autob = op::AutoBroadcastSpec(op::AutoBroadcastType::NUMPY); + op->set_autob(autob); + EXPECT_EQ(op->get_autob(), op::AutoBroadcastType::NUMPY); + + op->validate_and_infer_types(); + + EXPECT_EQ(op->get_element_type(), element::boolean); + EXPECT_EQ(op->get_output_partial_shape(0), (PartialShape{-1, 4, 5, 6, {5, 8}, {5, 6}})); +} + +TYPED_TEST_P(BitwiseOperator, shape_inference_2D) { + auto lhs = std::make_shared(element::i32, Shape{2, 2}); + auto rhs = std::make_shared(element::i32, Shape{2, 2}); + + const auto op = this->make_op(lhs, rhs); + + EXPECT_EQ(op->get_element_type(), element::i32); + EXPECT_EQ(op->get_shape(), (Shape{2, 2})); +} + +TYPED_TEST_P(BitwiseOperator, shape_inference_4D) { + auto lhs = std::make_shared(element::i32, Shape{2, 2, 3, 3}); + auto rhs = std::make_shared(element::i32, Shape{2, 2, 3, 3}); + + const auto op = this->make_op(lhs, rhs); + + EXPECT_EQ(op->get_element_type(), element::i32); + EXPECT_EQ(op->get_shape(), (Shape{2, 2, 3, 3})); +} + +TYPED_TEST_P(BitwiseOperator, default_autobroadcast) { + auto lhs = std::make_shared(element::i32, Shape{2, 2}); + auto rhs = std::make_shared(element::i32, Shape{2, 2}); + + const auto op = this->make_op(lhs, rhs); + + EXPECT_EQ(op->get_element_type(), element::i32); + EXPECT_EQ(op->get_shape(), (Shape{2, 2})); + EXPECT_EQ(op->get_autob(), op::AutoBroadcastType::NUMPY); +} + +TYPED_TEST_P(BitwiseOperator, no_autobroadcast) { + auto lhs = std::make_shared(element::i32, Shape{2, 2}); + auto rhs = std::make_shared(element::i32, Shape{2, 2}); + + const auto op = this->make_op(lhs, rhs, op::AutoBroadcastType::NONE); + + EXPECT_EQ(op->get_element_type(), element::i32); + EXPECT_EQ(op->get_shape(), (Shape{2, 2})); + EXPECT_EQ(op->get_autob(), op::AutoBroadcastType::NONE); +} + +TYPED_TEST_P(BitwiseOperator, shape_inference_4D_x_scalar_numpy_broadcast) { + auto lhs = std::make_shared(element::i32, Shape{2, 3, 4, 5}); + auto rhs = std::make_shared(element::i32, Shape{1}); + + const auto op = this->make_op(lhs, rhs); + + EXPECT_EQ(op->get_element_type(), element::i32); + EXPECT_EQ(op->get_shape(), (Shape{2, 3, 4, 5})); +} + +TYPED_TEST_P(BitwiseOperator, shape_inference_4D_x_1D_numpy_broadcast) { + auto lhs = std::make_shared(element::i32, Shape{2, 3, 4, 5}); + auto rhs = std::make_shared(element::i32, Shape{5}); + + const auto op = this->make_op(lhs, rhs); + + EXPECT_EQ(op->get_element_type(), element::i32); + EXPECT_EQ(op->get_shape(), (Shape{2, 3, 4, 5})); +} + +TYPED_TEST_P(BitwiseOperator, shape_inference_2D_x_4D_numpy_broadcast) { + auto lhs = std::make_shared(element::i32, Shape{4, 5}); + auto rhs = std::make_shared(element::i32, Shape{2, 3, 4, 5}); + + const auto op = this->make_op(lhs, rhs); + + EXPECT_EQ(op->get_element_type(), element::i32); + EXPECT_EQ(op->get_shape(), (Shape{2, 3, 4, 5})); +} + +TYPED_TEST_P(BitwiseOperator, shape_inference_3D_x_4D_numpy_broadcast) { + auto lhs = std::make_shared(element::i32, Shape{1, 4, 5}); + auto rhs = std::make_shared(element::i32, Shape{2, 3, 1, 1}); + + const auto op = this->make_op(lhs, rhs); + + EXPECT_EQ(op->get_element_type(), element::i32); + EXPECT_EQ(op->get_shape(), (Shape{2, 3, 4, 5})); +} + +TYPED_TEST_P(BitwiseOperator, shape_inference_4D_x_3D_numpy_broadcast) { + auto lhs = std::make_shared(element::i32, Shape{8, 1, 6, 1}); + auto rhs = std::make_shared(element::i32, Shape{7, 1, 5}); + + const auto op = this->make_op(lhs, rhs); + + EXPECT_EQ(op->get_element_type(), element::i32); + EXPECT_EQ(op->get_shape(), (Shape{8, 7, 6, 5})); + EXPECT_EQ(op->get_autob(), op::AutoBroadcastType::NUMPY); +} + +TYPED_TEST_P(BitwiseOperator, static_shape_pdpd_doc_examples) { + { + auto lhs = std::make_shared(element::i32, Shape{2, 3, 4, 5}); + auto rhs = std::make_shared(element::i32, Shape{3, 4}); + + const auto autob = op::AutoBroadcastSpec(op::AutoBroadcastType::PDPD, 1); + const auto op = this->make_op(lhs, rhs, autob); + + EXPECT_EQ(op->get_element_type(), element::i32); + EXPECT_EQ(op->get_shape(), (Shape{2, 3, 4, 5})); + EXPECT_EQ(op->get_autob().m_type, op::AutoBroadcastType::PDPD); + } + { + auto lhs = std::make_shared(element::i32, Shape{2, 3, 4, 5}); + auto rhs = std::make_shared(element::i32, Shape{3, 1}); + + const auto autob = op::AutoBroadcastSpec(op::AutoBroadcastType::PDPD, 1); + const auto op = this->make_op(lhs, rhs, autob); + + EXPECT_EQ(op->get_element_type(), element::i32); + EXPECT_EQ(op->get_shape(), (Shape{2, 3, 4, 5})); + EXPECT_EQ(op->get_autob().m_type, op::AutoBroadcastType::PDPD); + } + { + auto lhs = std::make_shared(element::i32, Shape{2, 3, 4, 5}); + auto rhs = std::make_shared(element::i32, Shape{}); + + const auto autob = op::AutoBroadcastSpec(op::AutoBroadcastType::PDPD); + const auto op = this->make_op(lhs, rhs, autob); + + EXPECT_EQ(op->get_element_type(), element::i32); + EXPECT_EQ(op->get_shape(), (Shape{2, 3, 4, 5})); + EXPECT_EQ(op->get_autob().m_type, op::AutoBroadcastType::PDPD); + } + { + auto lhs = std::make_shared(element::i32, Shape{2, 3, 4, 5}); + auto rhs = std::make_shared(element::i32, Shape{5}); + + const auto autob = op::AutoBroadcastSpec(op::AutoBroadcastType::PDPD, 3); + const auto op = this->make_op(lhs, rhs, autob); + + EXPECT_EQ(op->get_element_type(), element::i32); + EXPECT_EQ(op->get_shape(), (Shape{2, 3, 4, 5})); + EXPECT_EQ(op->get_autob().m_type, op::AutoBroadcastType::PDPD); + } + { + auto lhs = std::make_shared(element::i32, Shape{2, 3, 4, 5}); + auto rhs = std::make_shared(element::i32, Shape{1, 3}); + + const auto autob = op::AutoBroadcastSpec(op::AutoBroadcastType::PDPD, 0); + const auto op = this->make_op(lhs, rhs, autob); + + EXPECT_EQ(op->get_element_type(), element::i32); + EXPECT_EQ(op->get_shape(), (Shape{2, 3, 4, 5})); + EXPECT_EQ(op->get_autob().m_type, op::AutoBroadcastType::PDPD); + } + { + auto lhs = std::make_shared(element::i32, Shape{2, 3, 4, 5}); + auto rhs = std::make_shared(element::i32, Shape{3, 1, 5}); + + const auto autob = op::AutoBroadcastSpec(op::AutoBroadcastType::PDPD, 1); + const auto op = this->make_op(lhs, rhs, autob); + + EXPECT_EQ(op->get_element_type(), element::i32); + EXPECT_EQ(op->get_shape(), (Shape{2, 3, 4, 5})); + EXPECT_EQ(op->get_autob().m_type, op::AutoBroadcastType::PDPD); + } +} + +TYPED_TEST_P(BitwiseOperator, static_shape_inference_4D_x_4D_pdpd_broadcast) { + { + auto lhs = std::make_shared(element::i32, Shape{8, 1, 6, 5}); + auto rhs = std::make_shared(element::i32, Shape{8, 1, 6, 5}); + + const auto autob = op::AutoBroadcastSpec(op::AutoBroadcastType::PDPD); + const auto op = this->make_op(lhs, rhs, autob); + + EXPECT_EQ(op->get_element_type(), element::i32); + EXPECT_EQ(op->get_shape(), (Shape{8, 1, 6, 5})); + EXPECT_EQ(op->get_autob().m_type, op::AutoBroadcastType::PDPD); + } + { + auto lhs = std::make_shared(element::i32, Shape{8, 7, 6, 5}); + auto rhs = std::make_shared(element::i32, Shape{8, 1, 6, 5}); + + const auto autob = op::AutoBroadcastSpec(op::AutoBroadcastType::PDPD); + const auto op = this->make_op(lhs, rhs, autob); + + EXPECT_EQ(op->get_element_type(), element::i32); + EXPECT_EQ(op->get_shape(), (Shape{8, 7, 6, 5})); + EXPECT_EQ(op->get_autob().m_type, op::AutoBroadcastType::PDPD); + } +} + +TYPED_TEST_P(BitwiseOperator, static_shape_inference_4D_x_3D_ax_default_pdpd_broadcast) { + auto lhs = std::make_shared(element::i32, Shape{8, 7, 6, 5}); + auto rhs = std::make_shared(element::i32, Shape{7, 1, 5}); + + const auto op = this->make_op(lhs, rhs, op::AutoBroadcastType::PDPD); + + EXPECT_EQ(op->get_element_type(), element::i32); + EXPECT_EQ(op->get_shape(), (Shape{8, 7, 6, 5})); + EXPECT_EQ(op->get_autob().m_type, op::AutoBroadcastType::PDPD); +} + +TYPED_TEST_P(BitwiseOperator, incompatible_element_types_f32) { + auto lhs = std::make_shared(element::f32, Shape{2, 2, 3, 3}); + auto rhs = std::make_shared(element::f32, Shape{2, 2, 3, 3}); + + OV_EXPECT_THROW(std::ignore = this->make_op(lhs, rhs), + NodeValidationFailure, + HasSubstr("The element type of the input tensor must be integer or boolean.")); +} + +TYPED_TEST_P(BitwiseOperator, shape_inference_1D_x_1D_incompatible) { + auto lhs = std::make_shared(element::i32, Shape{3}); + auto rhs = std::make_shared(element::i32, Shape{4}); + + ASSERT_THROW(const auto unused = this->make_op(lhs, rhs), NodeValidationFailure); +} + +TYPED_TEST_P(BitwiseOperator, shape_inference_3D_x_3D_incompatible) { + auto lhs = std::make_shared(element::i32, Shape{3, 5, 6}); + auto rhs = std::make_shared(element::i32, Shape{4, 10, 12}); + + ASSERT_THROW(const auto unused = this->make_op(lhs, rhs), NodeValidationFailure); +} + +TYPED_TEST_P(BitwiseOperator, shape_inference_5D_x_5D_incompatible) { + auto lhs = std::make_shared(element::i32, Shape{389, 112, 12}); + auto rhs = std::make_shared(element::i32, Shape{389, 112, 19}); + + ASSERT_THROW(const auto unused = this->make_op(lhs, rhs), NodeValidationFailure); +} + +TYPED_TEST_P(BitwiseOperator, shape_inference_axis_less_than_negative_1_pdpd_incompatible) { + auto lhs = std::make_shared(element::i32, Shape{2, 3, 4, 5}); + auto rhs = std::make_shared(element::i32, Shape{3, 1}); + + const auto autob = op::AutoBroadcastSpec(op::AutoBroadcastType::PDPD, -2); + + ASSERT_THROW(const auto unused = this->make_op(lhs, rhs, autob), NodeValidationFailure); +} + +TYPED_TEST_P(BitwiseOperator, shape_inference_dst_smaller_than_src_pdpd_broadcast) { + auto lhs = std::make_shared(element::i32, Shape{2, 3, 4, 1}); + auto rhs = std::make_shared(element::i32, Shape{2, 3, 4, 5}); + + const auto autob = op::AutoBroadcastSpec(op::AutoBroadcastType::PDPD); + + ASSERT_THROW(const auto unused = this->make_op(lhs, rhs, autob), NodeValidationFailure); +} + +TYPED_TEST_P(BitwiseOperator, fully_dynamic_shape_broadcast_numpy) { + auto param = std::make_shared(element::i32, PartialShape::dynamic()); + const auto autob = op::AutoBroadcastSpec(op::AutoBroadcastType::NUMPY); + + const auto op = this->make_op(param, param, autob); + EXPECT_EQ(op->get_element_type(), element::i32); + EXPECT_EQ(op->get_output_partial_shape(0), PartialShape::dynamic()); +} + +TYPED_TEST_P(BitwiseOperator, fully_dynamic_shape_broadcast_none) { + auto param = std::make_shared(element::i32, PartialShape::dynamic()); + const auto autob = op::AutoBroadcastSpec(op::AutoBroadcastType::NONE); + + const auto op = this->make_op(param, param, autob); + EXPECT_EQ(op->get_element_type(), element::i32); + EXPECT_EQ(op->get_output_partial_shape(0), PartialShape::dynamic()); +} + +TYPED_TEST_P(BitwiseOperator, fully_dynamic_shape_broadcast_pdpd) { + auto param = std::make_shared(element::i32, PartialShape::dynamic()); + const auto autob = op::AutoBroadcastSpec(op::AutoBroadcastType::PDPD); + + const auto op = this->make_op(param, param, autob); + EXPECT_EQ(op->get_element_type(), element::i32); + EXPECT_EQ(op->get_output_partial_shape(0), PartialShape::dynamic()); +} + +TYPED_TEST_P(BitwiseOperator, dynamic_shape_3D) { + Dimension dynamic = Dimension::dynamic(); + auto lhs = std::make_shared(element::i32, PartialShape{dynamic, dynamic, 6}); + auto rhs = std::make_shared(element::i32, PartialShape{dynamic, dynamic, 6}); + + const auto op = this->make_op(lhs, rhs); + + EXPECT_EQ(op->get_element_type(), element::i32); + EXPECT_EQ(op->get_output_partial_shape(0), (PartialShape{dynamic, dynamic, 6})); +} + +TYPED_TEST_P(BitwiseOperator, dynamic_shape_5D) { + Dimension dynamic = Dimension::dynamic(); + auto lhs = std::make_shared(element::i32, PartialShape{dynamic, 4, dynamic, dynamic, 6}); + auto rhs = std::make_shared(element::i32, PartialShape{dynamic, 4, dynamic, dynamic, 6}); + + const auto op = this->make_op(lhs, rhs); + + EXPECT_EQ(op->get_element_type(), element::i32); + EXPECT_EQ(op->get_output_partial_shape(0), (PartialShape{dynamic, 4, dynamic, dynamic, 6})); +} + +TYPED_TEST_P(BitwiseOperator, dynamic_shape_intervals_broadcast_none) { + auto lhs = std::make_shared(element::i32, PartialShape{{1, 3}, {2, 7}, {6, -1}, {-1, 6}, -1, 8}); + auto rhs = std::make_shared(element::i32, PartialShape{{1, 3}, {2, 7}, {6, -1}, {-1, 6}, -1, 8}); + + const auto op = this->make_op(lhs, rhs, op::AutoBroadcastType::NONE); + + EXPECT_EQ(op->get_element_type(), element::i32); + EXPECT_EQ(op->get_output_partial_shape(0), (PartialShape{{1, 3}, {2, 7}, {6, -1}, {-1, 6}, -1, 8})); +} + +TYPED_TEST_P(BitwiseOperator, dynamic_shape_intervals_equal_rank_broadcast_numpy) { + // Equal rank + auto lhs = std::make_shared(element::i32, PartialShape{{1, 3}, {1, 3}, {1, 3}, {4, 8}, -1, 1, -1, 1, 3}); + auto rhs = std::make_shared(element::i32, PartialShape{{1, 3}, {2, 7}, -1, 1, {1, 3}, {4, 8}, -1, 1, 3}); + + const auto op = this->make_op(lhs, rhs); + + EXPECT_EQ(op->get_element_type(), element::i32); + EXPECT_EQ(op->get_output_partial_shape(0), (PartialShape{{1, 3}, {2, 7}, -1, {4, 8}, -1, {4, 8}, -1, 1, 3})); +} + +TYPED_TEST_P(BitwiseOperator, dynamic_shape_intervals_a_rank_smaller_broadcast_numpy) { + // `lhs` rank smaller + auto lhs = std::make_shared(element::i32, PartialShape{{1, 3}, {4, 8}, -1, 1, -1, 1, 3}); + auto rhs = std::make_shared(element::i32, PartialShape{{1, 3}, {2, 7}, -1, 1, {1, 3}, {4, 8}, -1, 1, 3}); + + const auto op = this->make_op(lhs, rhs); + + EXPECT_EQ(op->get_element_type(), element::i32); + EXPECT_EQ(op->get_output_partial_shape(0), (PartialShape{{1, 3}, {2, 7}, -1, {4, 8}, -1, {4, 8}, -1, 1, 3})); +} + +TYPED_TEST_P(BitwiseOperator, dynamic_shape_intervals_b_rank_smaller_broadcast_numpy) { + // `rhs` rank smaller + auto lhs = std::make_shared(element::i32, PartialShape{{1, 3}, {2, 7}, -1, 1, {1, 3}, {4, 8}, -1, 1, 3}); + auto rhs = std::make_shared(element::i32, PartialShape{{1, 3}, {4, 8}, -1, 1, -1, 1, 3}); + + const auto op = this->make_op(lhs, rhs); + + EXPECT_EQ(op->get_element_type(), element::i32); + EXPECT_EQ(op->get_output_partial_shape(0), (PartialShape{{1, 3}, {2, 7}, -1, {4, 8}, -1, {4, 8}, -1, 1, 3})); +} + +TYPED_TEST_P(BitwiseOperator, dynamic_shape_intervals_broadcast_pdpd) { + { // Equal rank + auto lhs = std::make_shared(element::i32, PartialShape{{1, 3}, {2, 7}, {1, 6}, {6, -1}, -1, 8}); + auto rhs = std::make_shared(element::i32, PartialShape{{1, 3}, {2, 7}, 1, 1, -1, 8}); + + const auto op = this->make_op(lhs, rhs, op::AutoBroadcastType::PDPD); + + EXPECT_EQ(op->get_element_type(), element::i32); + EXPECT_EQ(op->get_output_partial_shape(0), (PartialShape{{1, 3}, {2, 7}, {1, 6}, {6, -1}, -1, 8})); + } + { // `lhs` rank smaller + auto lhs = + std::make_shared(element::i32, PartialShape{{1, 3}, {1, 3}, {1, 3}, {4, 8}, -1, 1, -1, 1, 3}); + auto rhs = + std::make_shared(element::i32, PartialShape{{1, 3}, {2, 7}, -1, 1, {1, 3}, {4, 8}, -1, 1, 3}); + + const auto autob = op::AutoBroadcastSpec(op::AutoBroadcastType::PDPD, 0); + const auto op = this->make_op(lhs, rhs, autob); + + EXPECT_EQ(op->get_element_type(), element::i32); + EXPECT_EQ(op->get_output_partial_shape(0), (PartialShape{{1, 3}, {2, 7}, -1, {4, 8}, -1, {4, 8}, -1, 1, 3})); + } + { // `rhs` rank smaller + auto lhs = + std::make_shared(element::i32, PartialShape{{1, 3}, {2, 7}, -1, 1, {1, 3}, {4, 8}, -1, 1, 3}); + auto rhs = std::make_shared(element::i32, PartialShape{{1, 3}, {4, 8}, -1, 1, -1, 1, 3}); + + const auto op = this->make_op(lhs, rhs); + + EXPECT_EQ(op->get_element_type(), element::i32); + EXPECT_EQ(op->get_output_partial_shape(0), (PartialShape{{1, 3}, {2, 7}, -1, {4, 8}, -1, {4, 8}, -1, 1, 3})); + } +} + +TYPED_TEST_P(BitwiseOperator, labels_a_dynamic_mixed_dims_broadcast_numpy) { + // All dimensions of lhs have labels, rhs without labels + PartialShape pshape_lhs{{-1}, {3}, {1}, {2, 128}}; + PartialShape pshape_rhs{{-1}, {3}, {2, 224}, {1}}; + + PartialShape expected_shape = {-1, 3, {2, 224}, {2, 128}}; + + set_shape_labels(pshape_lhs, {10, 11, 12, 13}); + set_shape_labels(expected_shape, {10, 11, 0, 13}); + + auto lhs = std::make_shared(element::i32, pshape_lhs); + auto rhs = std::make_shared(element::i32, pshape_rhs); + + const auto op = this->make_op(lhs, rhs); + + const auto out_shape = op->get_output_partial_shape(0); + + EXPECT_EQ(out_shape, expected_shape); + EXPECT_EQ(get_shape_labels(out_shape), get_shape_labels(expected_shape)); +} + +TYPED_TEST_P(BitwiseOperator, labels_b_dynamic_mixed_dims_broadcast_numpy) { + // All dimensions of rhs have labels, lhs without labels + PartialShape pshape_lhs{{-1}, {3}, {1}, {2, 128}}; + PartialShape pshape_rhs{{-1}, {3}, {2, 224}, {1}}; + + PartialShape expected_shape = {-1, 3, {2, 224}, {2, 128}}; + + set_shape_labels(pshape_rhs, {20, 21, 22, 23}); + set_shape_labels(expected_shape, {20, 21, 22, 0}); + + auto lhs = std::make_shared(element::i32, pshape_lhs); + auto rhs = std::make_shared(element::i32, pshape_rhs); + + const auto op = this->make_op(lhs, rhs); + + const auto out_shape = op->get_output_partial_shape(0); + + EXPECT_EQ(out_shape, expected_shape); + EXPECT_EQ(get_shape_labels(out_shape), get_shape_labels(expected_shape)); +} + +TYPED_TEST_P(BitwiseOperator, labels_different_interval_mixed_dims_broadcast_numpy) { + // Both params have dimensions with different labels + PartialShape pshape_lhs{{-1}, {3}, {1}, {2, 128}}; + PartialShape pshape_rhs{{-1}, {3}, {2, 224}, {1}}; + + PartialShape expected_shape = {-1, 3, {2, 224}, {2, 128}}; + + set_shape_labels(pshape_lhs, {10, 11, 12, 13}); + set_shape_labels(pshape_rhs, {20, 21, 22, 23}); + set_shape_labels(expected_shape, {0, 21, 22, 13}); + + auto lhs = std::make_shared(element::i32, pshape_lhs); + auto rhs = std::make_shared(element::i32, pshape_rhs); + + const auto op = this->make_op(lhs, rhs); + + const auto out_shape = op->get_output_partial_shape(0); + + EXPECT_EQ(out_shape, expected_shape); + EXPECT_EQ(get_shape_labels(out_shape), get_shape_labels(expected_shape)); +} + +TYPED_TEST_P(BitwiseOperator, labels_different_interval_b_and_fully_dyn_a_broadcast_numpy) { + // Both params have dimension labels, output has label rhs + Dimension dim_0_lhs = {-1}; + Dimension dim_0_rhs = {2, 4}; + + DimensionTracker::set_label(dim_0_lhs, 10); + DimensionTracker::set_label(dim_0_rhs, 20); + + PartialShape pshape_lhs = {dim_0_lhs, 3, 224, 1}, pshape_rhs = {dim_0_rhs, 3, 1, 224}; + PartialShape expected_shape = {{2, 4}, 3, 224, 224}; + TensorLabel expected_labels{20, 0, 0, 0}; + + auto lhs = std::make_shared(element::i32, pshape_lhs); + auto rhs = std::make_shared(element::i32, pshape_rhs); + + const auto op = this->make_op(lhs, rhs); + + const auto out_shape = op->get_output_partial_shape(0); + + EXPECT_EQ(out_shape, expected_shape); + EXPECT_EQ(get_shape_labels(out_shape), expected_labels); +} + +TYPED_TEST_P(BitwiseOperator, labels_different_interval_a_and_fully_dyn_b_broadcast_numpy) { + // Both params have dimension labels, output has label lhs + Dimension dim_0_lhs = {2, 4}; + Dimension dim_0_rhs = {-1}; + + DimensionTracker::set_label(dim_0_lhs, 10); + DimensionTracker::set_label(dim_0_rhs, 20); + + PartialShape pshape_lhs = {dim_0_lhs, 3, 224, 1}, pshape_rhs = {dim_0_rhs, 3, 1, 224}; + PartialShape expected_shape = {{2, 4}, 3, 224, 224}; + TensorLabel expected_labels{10, 0, 0, 0}; + + auto lhs = std::make_shared(element::i32, pshape_lhs); + auto rhs = std::make_shared(element::i32, pshape_rhs); + + const auto op = this->make_op(lhs, rhs); + + const auto out_shape = op->get_output_partial_shape(0); + + EXPECT_EQ(out_shape, expected_shape); + EXPECT_EQ(get_shape_labels(out_shape), expected_labels); +} + +TYPED_TEST_P(BitwiseOperator, labels_equal_interval_dims_without_one_broadcast_numpy) { + // Both params have dynamic interval dimension the same labels + PartialShape pshape_lhs{{2, 4}, {8, 16}, {8, 16}, {8, 16}}; + PartialShape pshape_rhs{{2, 4}, {4, 12}, {10, 12}, {16, 24}}; + + PartialShape expected_shape = {{2, 4}, {8, 12}, {10, 12}, 16}; + + set_shape_labels(pshape_lhs, {10, 11, 12, 13}); + set_shape_labels(pshape_rhs, {10, 11, 12, 13}); + set_shape_labels(expected_shape, {10, 11, 12, 13}); + + auto lhs = std::make_shared(element::i32, pshape_lhs); + auto rhs = std::make_shared(element::i32, pshape_rhs); + + const auto op = this->make_op(lhs, rhs); + + const auto out_shape = op->get_output_partial_shape(0); + + EXPECT_EQ(out_shape, expected_shape); + EXPECT_EQ(get_shape_labels(out_shape), get_shape_labels(expected_shape)); +} + +TYPED_TEST_P(BitwiseOperator, labels_different_interval_dims_without_one_broadcast_numpy) { + // Both params have dynamic interval dimension different labels + PartialShape pshape_lhs{{2, 4}, {8, 16}, {8, 16}, {8, 16}}; + PartialShape pshape_rhs{{2, 4}, {4, 12}, {10, 12}, {16, 24}}; + + PartialShape expected_shape = {{2, 4}, {8, 12}, {10, 12}, 16}; + TensorLabel expected_labels{20, 21, 22, 23}; + + set_shape_labels(pshape_lhs, {10, 11, 12, 13}); + set_shape_labels(pshape_rhs, {20, 21, 22, 23}); + + auto lhs = std::make_shared(element::i32, pshape_lhs); + auto rhs = std::make_shared(element::i32, pshape_rhs); + + const auto op = this->make_op(lhs, rhs); + + const auto out_shape = op->get_output_partial_shape(0); + + EXPECT_EQ(out_shape, expected_shape); + EXPECT_EQ(get_shape_labels(out_shape), expected_labels); +} + +TYPED_TEST_P(BitwiseOperator, labels_different_interval_batch_without_one_equivalence_table_broadcast_numpy) { + // Both params have dynamic interval dimension different labels, use table of equivalence + auto table_of_equivalence = std::make_shared(); + DimensionTracker dim_tracker(table_of_equivalence); + + Dimension dim_0_lhs = {2, 4}; + Dimension dim_0_rhs = {2, 4}; + + dim_tracker.set_up_for_tracking(dim_0_lhs, 10); + dim_tracker.set_up_for_tracking(dim_0_rhs, 20); + + PartialShape pshape_lhs = {dim_0_lhs, 3, 224, 1}, pshape_rhs = {dim_0_rhs, 3, 1, 224}; + + auto lhs = std::make_shared(element::i32, pshape_lhs); + auto rhs = std::make_shared(element::i32, pshape_rhs); + + const auto op = this->make_op(lhs, rhs); + + const auto out_shape = op->get_output_partial_shape(0); + + PartialShape expected_shape = {{2, 4}, 3, 224, 224}; + TensorLabel expected_labels{20, 0, 0, 0}; + + auto eq_table = table_of_equivalence->get_equivalence_table(); + EXPECT_EQ(*eq_table[DimensionTracker::get_label(dim_0_lhs)], std::set({10, 20})); + EXPECT_EQ(*eq_table[DimensionTracker::get_label(dim_0_rhs)], std::set({10, 20})); + + EXPECT_EQ(out_shape, expected_shape); + EXPECT_EQ(get_shape_labels(out_shape), expected_labels); +} + +TYPED_TEST_P(BitwiseOperator, labels_different_fully_dynamic_batch_broadcast_numpy) { + // Both params have fully dynamic dimension and different labels + Dimension dim_0_lhs = {-1}; + Dimension dim_0_rhs = {-1}; + + DimensionTracker::set_label(dim_0_lhs, 10); + DimensionTracker::set_label(dim_0_rhs, 20); + + PartialShape pshape_lhs = {dim_0_lhs, 3, 224, 1}, pshape_rhs = {dim_0_rhs, 3, 1, 224}; + PartialShape expected_shape = {-1, 3, 224, 224}; + TensorLabel expected_labels{0, 0, 0, 0}; + + auto lhs = std::make_shared(element::i32, pshape_lhs); + auto rhs = std::make_shared(element::i32, pshape_rhs); + + const auto op = this->make_op(lhs, rhs); + + const auto out_shape = op->get_output_partial_shape(0); + + EXPECT_EQ(out_shape, expected_shape); + EXPECT_EQ(get_shape_labels(out_shape), expected_labels); +} + +TYPED_TEST_P(BitwiseOperator, labels_equal_fully_dynamic_batch_broadcast_numpy) { + // Both params have fully dynamic dimension and the same labels + Dimension dim_0_lhs = {-1}; + Dimension dim_0_rhs = {-1}; + + DimensionTracker::set_label(dim_0_lhs, 10); + DimensionTracker::set_label(dim_0_rhs, 10); + + PartialShape pshape_lhs = {dim_0_lhs, 3, 224, 1}, pshape_rhs = {dim_0_rhs, 3, 1, 224}; + PartialShape expected_shape = {-1, 3, 224, 224}; + TensorLabel expected_labels{10, 0, 0, 0}; + + auto lhs = std::make_shared(element::i32, pshape_lhs); + auto rhs = std::make_shared(element::i32, pshape_rhs); + + const auto op = this->make_op(lhs, rhs); + + const auto out_shape = op->get_output_partial_shape(0); + + EXPECT_EQ(out_shape, expected_shape); + EXPECT_EQ(get_shape_labels(out_shape), expected_labels); +} + +TYPED_TEST_P(BitwiseOperator, labels_dyn_batch_a_broadcast_numpy) { + Dimension dim_0_lhs = -1; + DimensionTracker::set_label(dim_0_lhs, 10); + PartialShape pshape_lhs = {dim_0_lhs, 3, 224, 224}, pshape_rhs = {1, 3, 1, 1}; + PartialShape expected_shape{dim_0_lhs, 3, 224, 224}; + + TensorLabel expected_labels{10, 0, 0, 0}; + + auto lhs = std::make_shared(element::i64, pshape_lhs); + auto rhs = std::make_shared(element::i64, pshape_rhs); + + const auto op = this->make_op(lhs, rhs); + + const auto out_shape = op->get_output_partial_shape(0); + + EXPECT_EQ(out_shape, expected_shape); + EXPECT_EQ(get_shape_labels(out_shape), expected_labels); +} + +TYPED_TEST_P(BitwiseOperator, labels_dyn_batch_b_broadcast_numpy) { + Dimension dim_0_rhs = -1; + DimensionTracker::set_label(dim_0_rhs, 10); + PartialShape pshape_rhs = {dim_0_rhs, 3, 224, 224}, pshape_lhs = {1, 3, 1, 1}; + PartialShape expected_shape{dim_0_rhs, 3, 224, 224}; + + TensorLabel expected_labels{10, 0, 0, 0}; + + auto lhs = std::make_shared(element::i64, pshape_lhs); + auto rhs = std::make_shared(element::i64, pshape_rhs); + + const auto op = this->make_op(lhs, rhs); + + const auto out_shape = op->get_output_partial_shape(0); + + EXPECT_EQ(out_shape, expected_shape); + EXPECT_EQ(get_shape_labels(out_shape), expected_labels); +} + +TYPED_TEST_P(BitwiseOperator, labels_dyn_batch_and_higher_rank_a_broadcast_numpy) { + Dimension dim_0_lhs = -1; + DimensionTracker::set_label(dim_0_lhs, 10); + + PartialShape pshape_lhs{dim_0_lhs, -1, -1, -1}; + PartialShape pshape_rhs{3, 1, 1}; + PartialShape expected_shape{dim_0_lhs, 3, -1, -1}; + + TensorLabel expected_labels{10, 0, 0, 0}; + + auto lhs = std::make_shared(element::i64, pshape_lhs); + auto rhs = std::make_shared(element::i64, pshape_rhs); + + const auto op = this->make_op(lhs, rhs); + + const auto out_shape = op->get_output_partial_shape(0); + + EXPECT_EQ(out_shape, expected_shape); + EXPECT_EQ(get_shape_labels(out_shape), expected_labels); +} + +TYPED_TEST_P(BitwiseOperator, labels_dyn_batch_and_higher_rank_b_broadcast_numpy) { + Dimension dim_0_rhs = -1; + DimensionTracker::set_label(dim_0_rhs, 10); + + PartialShape pshape_lhs{3, 1, 1}; + PartialShape pshape_rhs{dim_0_rhs, -1, -1, -1}; + PartialShape expected_shape{dim_0_rhs, 3, -1, -1}; + + TensorLabel expected_labels{10, 0, 0, 0}; + + auto lhs = std::make_shared(element::i64, pshape_lhs); + auto rhs = std::make_shared(element::i64, pshape_rhs); + + const auto op = this->make_op(lhs, rhs); + + const auto out_shape = op->get_output_partial_shape(0); + + EXPECT_EQ(out_shape, expected_shape); + EXPECT_EQ(get_shape_labels(out_shape), expected_labels); +} + +TYPED_TEST_P(BitwiseOperator, labels_different_static_shape_broadcast_numpy) { + // Static shape, different labels + PartialShape pshape_lhs{{2}, {1}, {224}, {1}}; + PartialShape pshape_rhs{{2}, {1}, {1}, {128}}; + PartialShape expected_shape{2, 1, 224, 128}; + + // Different labels + set_shape_labels(pshape_lhs, {10, 11, 12, 13}); + set_shape_labels(pshape_rhs, {20, 21, 22, 23}); + set_shape_labels(expected_shape, {20, 21, 12, 23}); + + auto lhs = std::make_shared(element::i32, pshape_lhs); + auto rhs = std::make_shared(element::i32, pshape_rhs); + const auto op = this->make_op(lhs, rhs, op::AutoBroadcastType::NUMPY); + + const auto out_shape = op->get_output_partial_shape(0); + + EXPECT_EQ(out_shape, expected_shape); + EXPECT_EQ(get_shape_labels(out_shape), get_shape_labels(expected_shape)); +} + +TYPED_TEST_P(BitwiseOperator, labels_equal_static_shape_broadcast_numpy) { + // Static shape, the same labels + PartialShape pshape_lhs{2, 1, 224, 1}; + PartialShape pshape_rhs{2, 1, 1, 128}; + PartialShape expected_shape{2, 1, 224, 128}; + + // Equal labels + set_shape_labels(pshape_lhs, {30, 31, 32, 33}); + set_shape_labels(pshape_rhs, {30, 31, 32, 33}); + set_shape_labels(expected_shape, {30, 31, 32, 33}); + + auto lhs = std::make_shared(element::i32, pshape_lhs); + auto rhs = std::make_shared(element::i32, pshape_rhs); + const auto op = this->make_op(lhs, rhs, op::AutoBroadcastType::NUMPY); + + const auto out_shape = op->get_output_partial_shape(0); + + EXPECT_EQ(out_shape, expected_shape); + EXPECT_EQ(get_shape_labels(out_shape), get_shape_labels(expected_shape)); +} + +TYPED_TEST_P(BitwiseOperator, labels_different_static_shape_broadcast_none) { + // Static shape + PartialShape pshape_lhs{2, 3, 224, 128}; + PartialShape pshape_rhs{2, 3, 224, 128}; + PartialShape expected_shape{2, 3, 224, 128}; + + // Different labels + set_shape_labels(pshape_lhs, {10, 11, 12, 13}); + set_shape_labels(pshape_rhs, {20, 21, 22, 23}); + set_shape_labels(expected_shape, {20, 21, 22, 23}); + + auto lhs = std::make_shared(element::i32, pshape_lhs); + auto rhs = std::make_shared(element::i32, pshape_rhs); + const auto op = this->make_op(lhs, rhs, op::AutoBroadcastType::NONE); + + auto out_shape = op->get_output_partial_shape(0); + + EXPECT_EQ(out_shape, expected_shape); + EXPECT_EQ(get_shape_labels(out_shape), get_shape_labels(expected_shape)); +} + +TYPED_TEST_P(BitwiseOperator, labels_equal_static_shape_broadcast_none) { + // Static shape + PartialShape pshape_lhs{2, 3, 224, 128}; + PartialShape pshape_rhs{2, 3, 224, 128}; + PartialShape expected_shape{2, 3, 224, 128}; + + // Equal labels + set_shape_labels(pshape_lhs, {30, 31, 32, 33}); + set_shape_labels(pshape_rhs, {30, 31, 32, 33}); + set_shape_labels(expected_shape, {30, 31, 32, 33}); + + auto lhs = std::make_shared(element::i32, pshape_lhs); + auto rhs = std::make_shared(element::i32, pshape_rhs); + const auto op = this->make_op(lhs, rhs, op::AutoBroadcastType::NONE); + + auto out_shape = op->get_output_partial_shape(0); + + EXPECT_EQ(out_shape, expected_shape); + EXPECT_EQ(get_shape_labels(out_shape), get_shape_labels(expected_shape)); +} + +TYPED_TEST_P(BitwiseOperator, labels_different_dynamic_shape_broadcast_none) { + // Dynamic shape + PartialShape pshape_lhs{{-1}, {3}, {2, 224}, {1, 128}}; + PartialShape pshape_rhs{{-1}, {3}, {2, 224}, {1, 128}}; + PartialShape expected_shape{-1, 3, {2, 224}, {1, 128}}; + + // Different labels + set_shape_labels(pshape_lhs, {10, 11, 12, 13}); + set_shape_labels(pshape_rhs, {20, 21, 22, 23}); + set_shape_labels(expected_shape, {20, 21, 22, 23}); + + auto lhs = std::make_shared(element::i32, pshape_lhs); + auto rhs = std::make_shared(element::i32, pshape_rhs); + const auto op = this->make_op(lhs, rhs, op::AutoBroadcastType::NONE); + + const auto out_shape = op->get_output_partial_shape(0); + + EXPECT_EQ(out_shape, expected_shape); + EXPECT_EQ(get_shape_labels(out_shape), get_shape_labels(expected_shape)); +} + +TYPED_TEST_P(BitwiseOperator, labels_equal_dynamic_shape_broadcast_none) { + // Dynamic shape + PartialShape pshape_lhs{{-1}, {3}, {2, 224}, {1, 128}}; + PartialShape pshape_rhs{{-1}, {3}, {2, 224}, {1, 128}}; + PartialShape expected_shape{-1, 3, {2, 224}, {1, 128}}; + + // Equal labels + set_shape_labels(pshape_lhs, {30, 31, 32, 33}); + set_shape_labels(pshape_rhs, {30, 31, 32, 33}); + set_shape_labels(expected_shape, {30, 31, 32, 33}); + + auto lhs = std::make_shared(element::i32, pshape_lhs); + auto rhs = std::make_shared(element::i32, pshape_rhs); + const auto op = this->make_op(lhs, rhs, op::AutoBroadcastType::NONE); + + const auto out_shape = op->get_output_partial_shape(0); + + EXPECT_EQ(out_shape, expected_shape); + EXPECT_EQ(get_shape_labels(out_shape), get_shape_labels(expected_shape)); +} + +REGISTER_TYPED_TEST_SUITE_P(BitwiseOperator, + default_constructor_integer, + default_constructor_boolean, + + // Static shapes + shape_inference_2D, + shape_inference_4D, + default_autobroadcast, + no_autobroadcast, + shape_inference_4D_x_scalar_numpy_broadcast, + shape_inference_4D_x_1D_numpy_broadcast, + shape_inference_2D_x_4D_numpy_broadcast, + shape_inference_3D_x_4D_numpy_broadcast, + shape_inference_4D_x_3D_numpy_broadcast, + static_shape_pdpd_doc_examples, + static_shape_inference_4D_x_4D_pdpd_broadcast, + static_shape_inference_4D_x_3D_ax_default_pdpd_broadcast, + incompatible_element_types_f32, + shape_inference_1D_x_1D_incompatible, + shape_inference_3D_x_3D_incompatible, + shape_inference_5D_x_5D_incompatible, + shape_inference_axis_less_than_negative_1_pdpd_incompatible, + shape_inference_dst_smaller_than_src_pdpd_broadcast, + + // Dynamic shapes + fully_dynamic_shape_broadcast_numpy, + fully_dynamic_shape_broadcast_none, + fully_dynamic_shape_broadcast_pdpd, + dynamic_shape_3D, + dynamic_shape_5D, + dynamic_shape_intervals_broadcast_none, + dynamic_shape_intervals_equal_rank_broadcast_numpy, + dynamic_shape_intervals_a_rank_smaller_broadcast_numpy, + dynamic_shape_intervals_b_rank_smaller_broadcast_numpy, + dynamic_shape_intervals_broadcast_pdpd, + + // Dimension labels (static and dynamic) + labels_a_dynamic_mixed_dims_broadcast_numpy, + labels_b_dynamic_mixed_dims_broadcast_numpy, + labels_different_interval_mixed_dims_broadcast_numpy, + labels_different_interval_b_and_fully_dyn_a_broadcast_numpy, + labels_different_interval_a_and_fully_dyn_b_broadcast_numpy, + labels_equal_interval_dims_without_one_broadcast_numpy, + labels_different_interval_dims_without_one_broadcast_numpy, + labels_different_interval_batch_without_one_equivalence_table_broadcast_numpy, + labels_different_fully_dynamic_batch_broadcast_numpy, + labels_equal_fully_dynamic_batch_broadcast_numpy, + labels_dyn_batch_a_broadcast_numpy, + labels_dyn_batch_b_broadcast_numpy, + labels_dyn_batch_and_higher_rank_a_broadcast_numpy, + labels_dyn_batch_and_higher_rank_b_broadcast_numpy, + labels_different_static_shape_broadcast_numpy, + labels_equal_static_shape_broadcast_numpy, + labels_different_static_shape_broadcast_none, + labels_equal_static_shape_broadcast_none, + labels_different_dynamic_shape_broadcast_none, + labels_equal_dynamic_shape_broadcast_none); diff --git a/src/core/tests/type_prop/bitwise_or.cpp b/src/core/tests/type_prop/bitwise_or.cpp new file mode 100644 index 00000000000000..bb41322f1dec49 --- /dev/null +++ b/src/core/tests/type_prop/bitwise_or.cpp @@ -0,0 +1,11 @@ +// Copyright (C) 2018-2023 Intel Corporation +// SPDX-License-Identifier: Apache-2.0 +// + +#include "openvino/op/bitwise_or.hpp" + +#include "bitwise_ops.hpp" + +using Type = ::testing::Types; + +INSTANTIATE_TYPED_TEST_SUITE_P(type_prop_bitwise_or, BitwiseOperator, Type); diff --git a/src/core/tests/type_prop/bitwise_xor.cpp b/src/core/tests/type_prop/bitwise_xor.cpp new file mode 100644 index 00000000000000..00a1a299573882 --- /dev/null +++ b/src/core/tests/type_prop/bitwise_xor.cpp @@ -0,0 +1,11 @@ +// Copyright (C) 2018-2023 Intel Corporation +// SPDX-License-Identifier: Apache-2.0 +// + +#include "openvino/op/bitwise_xor.hpp" + +#include "bitwise_ops.hpp" + +using Type = ::testing::Types; + +INSTANTIATE_TYPED_TEST_SUITE_P(type_prop_bitwise_xor, BitwiseOperator, Type); diff --git a/src/core/tests/visitors/op/bitwise_and.cpp b/src/core/tests/visitors/op/bitwise_and.cpp new file mode 100644 index 00000000000000..35c29762061283 --- /dev/null +++ b/src/core/tests/visitors/op/bitwise_and.cpp @@ -0,0 +1,11 @@ +// Copyright (C) 2018-2023 Intel Corporation +// SPDX-License-Identifier: Apache-2.0 +// + +#include "openvino/op/bitwise_and.hpp" + +#include "binary_ops.hpp" + +using Type = ::testing::Types>; + +INSTANTIATE_TYPED_TEST_SUITE_P(visitor_with_auto_broadcast, BinaryOperatorVisitor, Type, BinaryOperatorTypeName); diff --git a/src/core/tests/visitors/op/bitwise_or.cpp b/src/core/tests/visitors/op/bitwise_or.cpp new file mode 100644 index 00000000000000..ebcff6e5e932b0 --- /dev/null +++ b/src/core/tests/visitors/op/bitwise_or.cpp @@ -0,0 +1,11 @@ +// Copyright (C) 2018-2023 Intel Corporation +// SPDX-License-Identifier: Apache-2.0 +// + +#include "openvino/op/bitwise_or.hpp" + +#include "binary_ops.hpp" + +using Type = ::testing::Types>; + +INSTANTIATE_TYPED_TEST_SUITE_P(visitor_with_auto_broadcast, BinaryOperatorVisitor, Type, BinaryOperatorTypeName); diff --git a/src/core/tests/visitors/op/bitwise_xor.cpp b/src/core/tests/visitors/op/bitwise_xor.cpp new file mode 100644 index 00000000000000..ef36fc98ab707d --- /dev/null +++ b/src/core/tests/visitors/op/bitwise_xor.cpp @@ -0,0 +1,11 @@ +// Copyright (C) 2018-2023 Intel Corporation +// SPDX-License-Identifier: Apache-2.0 +// + +#include "openvino/op/bitwise_xor.hpp" + +#include "binary_ops.hpp" + +using Type = ::testing::Types>; + +INSTANTIATE_TYPED_TEST_SUITE_P(visitor_with_auto_broadcast, BinaryOperatorVisitor, Type, BinaryOperatorTypeName); diff --git a/src/plugins/template/backend/ops/bitwise_and.cpp b/src/plugins/template/backend/ops/bitwise_and.cpp new file mode 100644 index 00000000000000..d0e5d05b11360d --- /dev/null +++ b/src/plugins/template/backend/ops/bitwise_and.cpp @@ -0,0 +1,56 @@ +// Copyright (C) 2018-2023 Intel Corporation +// SPDX-License-Identifier: Apache-2.0 +// + +#include "openvino/op/bitwise_and.hpp" + +#include "evaluate_node.hpp" +#include "openvino/reference/bitwise_and.hpp" +#include "utils.hpp" + +using namespace ov; + +template +bool evaluate(const std::shared_ptr& node, + ov::TensorVector& outputs, + const ov::TensorVector& inputs) { + OPENVINO_ASSERT(inputs.size() == 2); + OPENVINO_ASSERT(outputs.size() == 1); + outputs[0].set_shape(infer_broadcast_shape(node.get(), inputs[0].get_shape(), inputs[1].get_shape())); + using T = typename ov::element_type_traits::value_type; + ov::reference::bitwise_and(inputs[0].data(), + inputs[1].data(), + outputs[0].data(), + inputs[0].get_shape(), + inputs[1].get_shape(), + node->get_autob()); + return true; +} + +template <> +bool evaluate_node(std::shared_ptr node, + ov::TensorVector& outputs, + const ov::TensorVector& inputs) { + switch (node->get_input_element_type(0)) { + case element::boolean: + return evaluate(as_type_ptr(node), outputs, inputs); + case element::u8: + return evaluate(as_type_ptr(node), outputs, inputs); + case element::i8: + return evaluate(as_type_ptr(node), outputs, inputs); + case element::u16: + return evaluate(as_type_ptr(node), outputs, inputs); + case element::i16: + return evaluate(as_type_ptr(node), outputs, inputs); + case element::u32: + return evaluate(as_type_ptr(node), outputs, inputs); + case element::i32: + return evaluate(as_type_ptr(node), outputs, inputs); + case element::u64: + return evaluate(as_type_ptr(node), outputs, inputs); + case element::i64: + return evaluate(as_type_ptr(node), outputs, inputs); + default: + OPENVINO_THROW("Unhandled data type ", node->get_element_type().get_type_name(), "in evaluate_node()"); + } +} diff --git a/src/plugins/template/backend/ops/bitwise_not.cpp b/src/plugins/template/backend/ops/bitwise_not.cpp index 91a73fa0dd1c3f..83f1d77750eeec 100644 --- a/src/plugins/template/backend/ops/bitwise_not.cpp +++ b/src/plugins/template/backend/ops/bitwise_not.cpp @@ -19,7 +19,7 @@ bool evaluate(const std::shared_ptr& node, outputs[0].set_shape(inputs[0].get_shape()); using T = typename ov::element_type_traits::value_type; - ov::reference::bitwise_not(inputs[0].data(), outputs[0].data(), shape_size(inputs[0].get_shape())); + ov::reference::bitwise_not(inputs[0].data(), outputs[0].data(), shape_size(inputs[0].get_shape())); return true; } diff --git a/src/plugins/template/backend/ops/bitwise_or.cpp b/src/plugins/template/backend/ops/bitwise_or.cpp new file mode 100644 index 00000000000000..fe163edeccb3a1 --- /dev/null +++ b/src/plugins/template/backend/ops/bitwise_or.cpp @@ -0,0 +1,56 @@ +// Copyright (C) 2018-2023 Intel Corporation +// SPDX-License-Identifier: Apache-2.0 +// + +#include "openvino/op/bitwise_or.hpp" + +#include "evaluate_node.hpp" +#include "openvino/reference/bitwise_or.hpp" +#include "utils.hpp" + +using namespace ov; + +template +bool evaluate(const std::shared_ptr& node, + ov::TensorVector& outputs, + const ov::TensorVector& inputs) { + OPENVINO_ASSERT(inputs.size() == 2); + OPENVINO_ASSERT(outputs.size() == 1); + outputs[0].set_shape(infer_broadcast_shape(node.get(), inputs[0].get_shape(), inputs[1].get_shape())); + using T = typename ov::element_type_traits::value_type; + ov::reference::bitwise_or(inputs[0].data(), + inputs[1].data(), + outputs[0].data(), + inputs[0].get_shape(), + inputs[1].get_shape(), + node->get_autob()); + return true; +} + +template <> +bool evaluate_node(std::shared_ptr node, + ov::TensorVector& outputs, + const ov::TensorVector& inputs) { + switch (node->get_input_element_type(0)) { + case element::boolean: + return evaluate(as_type_ptr(node), outputs, inputs); + case element::u8: + return evaluate(as_type_ptr(node), outputs, inputs); + case element::i8: + return evaluate(as_type_ptr(node), outputs, inputs); + case element::u16: + return evaluate(as_type_ptr(node), outputs, inputs); + case element::i16: + return evaluate(as_type_ptr(node), outputs, inputs); + case element::u32: + return evaluate(as_type_ptr(node), outputs, inputs); + case element::i32: + return evaluate(as_type_ptr(node), outputs, inputs); + case element::u64: + return evaluate(as_type_ptr(node), outputs, inputs); + case element::i64: + return evaluate(as_type_ptr(node), outputs, inputs); + default: + OPENVINO_THROW("Unhandled data type ", node->get_element_type().get_type_name(), "in evaluate_node()"); + } +} diff --git a/src/plugins/template/backend/ops/bitwise_xor.cpp b/src/plugins/template/backend/ops/bitwise_xor.cpp new file mode 100644 index 00000000000000..3fa98775a05e18 --- /dev/null +++ b/src/plugins/template/backend/ops/bitwise_xor.cpp @@ -0,0 +1,56 @@ +// Copyright (C) 2018-2023 Intel Corporation +// SPDX-License-Identifier: Apache-2.0 +// + +#include "openvino/op/bitwise_xor.hpp" + +#include "evaluate_node.hpp" +#include "openvino/reference/bitwise_xor.hpp" +#include "utils.hpp" + +using namespace ov; + +template +bool evaluate(const std::shared_ptr& node, + ov::TensorVector& outputs, + const ov::TensorVector& inputs) { + OPENVINO_ASSERT(inputs.size() == 2); + OPENVINO_ASSERT(outputs.size() == 1); + outputs[0].set_shape(infer_broadcast_shape(node.get(), inputs[0].get_shape(), inputs[1].get_shape())); + using T = typename ov::element_type_traits::value_type; + ov::reference::bitwise_xor(inputs[0].data(), + inputs[1].data(), + outputs[0].data(), + inputs[0].get_shape(), + inputs[1].get_shape(), + node->get_autob()); + return true; +} + +template <> +bool evaluate_node(std::shared_ptr node, + ov::TensorVector& outputs, + const ov::TensorVector& inputs) { + switch (node->get_input_element_type(0)) { + case element::boolean: + return evaluate(as_type_ptr(node), outputs, inputs); + case element::u8: + return evaluate(as_type_ptr(node), outputs, inputs); + case element::i8: + return evaluate(as_type_ptr(node), outputs, inputs); + case element::u16: + return evaluate(as_type_ptr(node), outputs, inputs); + case element::i16: + return evaluate(as_type_ptr(node), outputs, inputs); + case element::u32: + return evaluate(as_type_ptr(node), outputs, inputs); + case element::i32: + return evaluate(as_type_ptr(node), outputs, inputs); + case element::u64: + return evaluate(as_type_ptr(node), outputs, inputs); + case element::i64: + return evaluate(as_type_ptr(node), outputs, inputs); + default: + OPENVINO_THROW("Unhandled data type ", node->get_element_type().get_type_name(), "in evaluate_node()"); + } +} diff --git a/src/plugins/template/backend/ops/ops_evaluates.hpp b/src/plugins/template/backend/ops/ops_evaluates.hpp index 0b860fcd4b28c6..040fd8334a3527 100644 --- a/src/plugins/template/backend/ops/ops_evaluates.hpp +++ b/src/plugins/template/backend/ops/ops_evaluates.hpp @@ -445,10 +445,22 @@ extern template bool evaluate_node(std::shared_ ov::TensorVector& outputs, const ov::TensorVector& inputs); +extern template bool evaluate_node(std::shared_ptr node, + ov::TensorVector& outputs, + const ov::TensorVector& inputs); + extern template bool evaluate_node(std::shared_ptr node, ov::TensorVector& outputs, const ov::TensorVector& inputs); +extern template bool evaluate_node(std::shared_ptr node, + ov::TensorVector& outputs, + const ov::TensorVector& inputs); + +extern template bool evaluate_node(std::shared_ptr node, + ov::TensorVector& outputs, + const ov::TensorVector& inputs); + extern template bool evaluate_node(std::shared_ptr node, ov::TensorVector& outputs, const ov::TensorVector& inputs); diff --git a/src/plugins/template/backend/opset_int_tbl.hpp b/src/plugins/template/backend/opset_int_tbl.hpp index 5ce73cbfa561c4..725fdd0621fc18 100644 --- a/src/plugins/template/backend/opset_int_tbl.hpp +++ b/src/plugins/template/backend/opset_int_tbl.hpp @@ -150,7 +150,10 @@ _OPENVINO_OP_REG(Interpolate, op::v11) _OPENVINO_OP_REG(GroupNormalization, ov::op::v12) +_OPENVINO_OP_REG(BitwiseAnd, ov::op::v13) _OPENVINO_OP_REG(BitwiseNot, ov::op::v13) +_OPENVINO_OP_REG(BitwiseOr, ov::op::v13) +_OPENVINO_OP_REG(BitwiseXor, ov::op::v13) _OPENVINO_OP_REG(NMSRotated, ov::op::v13) _OPENVINO_OP_REG(AUGRUCell, ov::op::internal) diff --git a/src/plugins/template/tests/functional/op_reference/bitwise.hpp b/src/plugins/template/tests/functional/op_reference/bitwise.hpp index 0e8ff7af32ce1b..8feb41378eb2f7 100644 --- a/src/plugins/template/tests/functional/op_reference/bitwise.hpp +++ b/src/plugins/template/tests/functional/op_reference/bitwise.hpp @@ -5,14 +5,17 @@ #include #include "base_reference_test.hpp" +#include "openvino/op/bitwise_and.hpp" #include "openvino/op/bitwise_not.hpp" +#include "openvino/op/bitwise_or.hpp" +#include "openvino/op/bitwise_xor.hpp" using namespace ov; namespace reference_tests { namespace BitwiseOpsRefTestDefinitions { -enum BitwiseTypes { BITWISE_NOT }; +enum BitwiseTypes { BITWISE_AND, BITWISE_NOT, BITWISE_OR, BITWISE_XOR }; struct RefBitwiseParams { BitwiseTypes opType; @@ -30,7 +33,7 @@ class ReferenceBitwiseLayerTest : public testing::TestWithParam CreateFunction(BitwiseTypes op_type, - const std::vector& inputs) { + static std::shared_ptr create_model(BitwiseTypes op_type, + const std::vector& inputs) { ov::ParameterVector params_vec; for (auto& input : inputs) { params_vec.push_back(std::make_shared(input.type, input.shape)); } - std::shared_ptr bitwise_op; + std::shared_ptr bitwise_op = nullptr; switch (op_type) { case BitwiseTypes::BITWISE_NOT: { bitwise_op = std::make_shared(params_vec[0]); break; } - default: { - throw std::runtime_error("Incorrect type of Bitwise operation"); + case BitwiseTypes::BITWISE_AND: { + bitwise_op = std::make_shared(params_vec[0], params_vec[1]); + break; + } + case BitwiseTypes::BITWISE_OR: { + bitwise_op = std::make_shared(params_vec[0], params_vec[1]); + break; + } + case BitwiseTypes::BITWISE_XOR: { + bitwise_op = std::make_shared(params_vec[0], params_vec[1]); + break; } } + EXPECT_TRUE(bitwise_op) << "Incorrect type of Bitwise operation"; return std::make_shared(ov::NodeVector{bitwise_op}, ov::ParameterVector{params_vec}); } }; diff --git a/src/plugins/template/tests/functional/op_reference/bitwise_and.cpp b/src/plugins/template/tests/functional/op_reference/bitwise_and.cpp new file mode 100644 index 00000000000000..a656f61826a675 --- /dev/null +++ b/src/plugins/template/tests/functional/op_reference/bitwise_and.cpp @@ -0,0 +1,359 @@ +// Copyright (C) 2018-2023 Intel Corporation +// SPDX-License-Identifier: Apache-2.0 +// + +#include "openvino/op/bitwise_and.hpp" + +#include + +#include "bitwise.hpp" + +using namespace ov; + +namespace reference_tests { +namespace BitwiseOpsRefTestDefinitions { +namespace { + +std::vector generateBitwiseParams() { + std::vector bitwiseParams{ + Builder{} + .opType(BitwiseTypes::BITWISE_AND) + .inputs({{{2, 2}, element::boolean, std::vector{true, false, true, false}}, + {{2, 2}, element::boolean, std::vector{true, false, false, true}}}) + .expected({{2, 2}, element::boolean, std::vector{true, false, false, false}}), + Builder{} + .opType(BitwiseTypes::BITWISE_AND) + .inputs( + {{{3, 5}, + element::u8, + std::vector< + uint8_t>{0xff, 0xff, 0xff, 0xff, 0xff, 0x0, 0x0, 0x0, 0x0, 0x1, 0x1, 0x1, 0x8, 0x8, 0xbf}}, + {{3, 5}, + element::u8, + std::vector< + uint8_t>{0x0, 0x1, 0x8, 0xbf, 0x3f, 0x1, 0x8, 0xbf, 0x3f, 0x8, 0xbf, 0x3f, 0xbf, 0x3f, 0x3f}}}) + .expected( + {{3, 5}, + element::u8, + std::vector{0x0, 0x1, 0x8, 0xbf, 0x3f, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x1, 0x8, 0x8, 0x3f}}), + Builder{} + .opType(BitwiseTypes::BITWISE_AND) + .inputs({{{3, 5}, + element::u16, + std::vector{0xffff, + 0xffff, + 0xffff, + 0xffff, + 0xffff, + 0x0, + 0x0, + 0x0, + 0x0, + 0x1, + 0x1, + 0x1, + 0x8, + 0x8, + 0xbfff}}, + {{3, 5}, + element::u16, + std::vector{0x0, + 0x1, + 0x8, + 0xbfff, + 0x3fff, + 0x1, + 0x8, + 0xbfff, + 0x3fff, + 0x8, + 0xbfff, + 0x3fff, + 0xbfff, + 0x3fff, + 0x3fff}}}) + .expected( + {{3, 5}, + element::u16, + std::vector< + uint16_t>{0x0, 0x1, 0x8, 0xbfff, 0x3fff, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x1, 0x8, 0x8, 0x3fff}}), + Builder{} + .opType(BitwiseTypes::BITWISE_AND) + .inputs({{{3, 5}, + element::u32, + std::vector{0xffffffff, + 0xffffffff, + 0xffffffff, + 0xffffffff, + 0xffffffff, + 0x0, + 0x0, + 0x0, + 0x0, + 0x1, + 0x1, + 0x1, + 0x8, + 0x8, + 0xbfffffff}}, + {{3, 5}, + element::u32, + std::vector{0x0, + 0x1, + 0x8, + 0xbfffffff, + 0x3fffffff, + 0x1, + 0x8, + 0xbfffffff, + 0x3fffffff, + 0x8, + 0xbfffffff, + 0x3fffffff, + 0xbfffffff, + 0x3fffffff, + 0x3fffffff}}}) + .expected({{3, 5}, + element::u32, + std::vector{0x0, + 0x1, + 0x8, + 0xbfffffff, + 0x3fffffff, + 0x0, + 0x0, + 0x0, + 0x0, + 0x0, + 0x1, + 0x1, + 0x8, + 0x8, + 0x3fffffff}}), + Builder{} + .opType(BitwiseTypes::BITWISE_AND) + .inputs({{{3, 5}, + element::u64, + std::vector{0x0, + 0x0, + 0x0, + 0x0, + 0x0, + 0x1, + 0x1, + 0x1, + 0x1, + 0x4000000000000000, + 0x4000000000000000, + 0x4000000000000000, + 0xc000000000000000, + 0xc000000000000000, + 0xffffffffffffffff}}, + {{3, 5}, + element::u64, + std::vector{0x1, + 0x4000000000000000, + 0xc000000000000000, + 0xffffffffffffffff, + 0x8, + 0x4000000000000000, + 0xc000000000000000, + 0xffffffffffffffff, + 0x8, + 0xc000000000000000, + 0xffffffffffffffff, + 0x8, + 0xffffffffffffffff, + 0x8, + 0x8}}}) + .expected({{3, 5}, + element::u64, + std::vector{0x0, + 0x0, + 0x0, + 0x0, + 0x0, + 0x0, + 0x0, + 0x1, + 0x0, + 0x4000000000000000, + 0x4000000000000000, + 0x0, + 0xc000000000000000, + 0x0, + 0x8}}), + Builder{} + .opType(BitwiseTypes::BITWISE_AND) + .inputs( + {{{3, 5}, + element::i8, + std::vector< + uint8_t>{0xff, 0xff, 0xff, 0xff, 0xff, 0x0, 0x0, 0x0, 0x0, 0x1, 0x1, 0x1, 0x8, 0x8, 0xbf}}, + {{3, 5}, + element::i8, + std::vector< + uint8_t>{0x0, 0x1, 0x8, 0xbf, 0x3f, 0x1, 0x8, 0xbf, 0x3f, 0x8, 0xbf, 0x3f, 0xbf, 0x3f, 0x3f}}}) + .expected( + {{3, 5}, + element::i8, + std::vector{0x0, 0x1, 0x8, 0xbf, 0x3f, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x1, 0x8, 0x8, 0x3f}}), + Builder{} + .opType(BitwiseTypes::BITWISE_AND) + .inputs({{{3, 5}, + element::i16, + std::vector{0xffff, + 0xffff, + 0xffff, + 0xffff, + 0xffff, + 0x0, + 0x0, + 0x0, + 0x0, + 0x1, + 0x1, + 0x1, + 0x8, + 0x8, + 0xbfff}}, + {{3, 5}, + element::i16, + std::vector{0x0, + 0x1, + 0x8, + 0xbfff, + 0x3fff, + 0x1, + 0x8, + 0xbfff, + 0x3fff, + 0x8, + 0xbfff, + 0x3fff, + 0xbfff, + 0x3fff, + 0x3fff}}}) + .expected( + {{3, 5}, + element::i16, + std::vector< + uint16_t>{0x0, 0x1, 0x8, 0xbfff, 0x3fff, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x1, 0x8, 0x8, 0x3fff}}), + Builder{} + .opType(BitwiseTypes::BITWISE_AND) + .inputs({{{3, 5}, + element::i32, + std::vector{0xffffffff, + 0xffffffff, + 0xffffffff, + 0xffffffff, + 0xffffffff, + 0x0, + 0x0, + 0x0, + 0x0, + 0x1, + 0x1, + 0x1, + 0x8, + 0x8, + 0xbfffffff}}, + {{3, 5}, + element::i32, + std::vector{0x0, + 0x1, + 0x8, + 0xbfffffff, + 0x3fffffff, + 0x1, + 0x8, + 0xbfffffff, + 0x3fffffff, + 0x8, + 0xbfffffff, + 0x3fffffff, + 0xbfffffff, + 0x3fffffff, + 0x3fffffff}}}) + .expected({{3, 5}, + element::i32, + std::vector{0x0, + 0x1, + 0x8, + 0xbfffffff, + 0x3fffffff, + 0x0, + 0x0, + 0x0, + 0x0, + 0x0, + 0x1, + 0x1, + 0x8, + 0x8, + 0x3fffffff}}), + Builder{} + .opType(BitwiseTypes::BITWISE_AND) + .inputs({{{3, 5}, + element::i64, + std::vector{0x0, + 0x0, + 0x0, + 0x0, + 0x0, + 0x1, + 0x1, + 0x1, + 0x1, + 0x4000000000000000, + 0x4000000000000000, + 0x4000000000000000, + 0xc000000000000000, + 0xc000000000000000, + 0xffffffffffffffff}}, + {{3, 5}, + element::i64, + std::vector{0x1, + 0x4000000000000000, + 0xc000000000000000, + 0xffffffffffffffff, + 0x8, + 0x4000000000000000, + 0xc000000000000000, + 0xffffffffffffffff, + 0x8, + 0xc000000000000000, + 0xffffffffffffffff, + 0x8, + 0xffffffffffffffff, + 0x8, + 0x8}}}) + .expected({{3, 5}, + element::i64, + std::vector{0x0, + 0x0, + 0x0, + 0x0, + 0x0, + 0x0, + 0x0, + 0x1, + 0x0, + 0x4000000000000000, + 0x4000000000000000, + 0x0, + 0xc000000000000000, + 0x0, + 0x8}}), + }; + return bitwiseParams; +} + +INSTANTIATE_TEST_SUITE_P(smoke_BitwiseAnd_With_Hardcoded_Refs, + ReferenceBitwiseLayerTest, + ::testing::ValuesIn(generateBitwiseParams()), + ReferenceBitwiseLayerTest::getTestCaseName); + +} // namespace +} // namespace BitwiseOpsRefTestDefinitions +} // namespace reference_tests diff --git a/src/plugins/template/tests/functional/op_reference/bitwise_or.cpp b/src/plugins/template/tests/functional/op_reference/bitwise_or.cpp new file mode 100644 index 00000000000000..418a2a293f41ba --- /dev/null +++ b/src/plugins/template/tests/functional/op_reference/bitwise_or.cpp @@ -0,0 +1,385 @@ +// Copyright (C) 2018-2023 Intel Corporation +// SPDX-License-Identifier: Apache-2.0 +// + +#include "openvino/op/bitwise_or.hpp" + +#include + +#include "bitwise.hpp" + +using namespace ov; + +namespace reference_tests { +namespace BitwiseOpsRefTestDefinitions { +namespace { + +std::vector generateBitwiseParams() { + std::vector bitwiseParams{ + Builder{} + .opType(BitwiseTypes::BITWISE_OR) + .inputs({{{2, 2}, element::boolean, std::vector{true, false, true, false}}, + {{2, 2}, element::boolean, std::vector{true, false, false, true}}}) + .expected({{2, 2}, element::boolean, std::vector{true, false, true, true}}), + Builder{} + .opType(BitwiseTypes::BITWISE_OR) + .inputs( + {{{3, 5}, + element::u8, + std::vector< + uint8_t>{0xff, 0xff, 0xff, 0xff, 0xff, 0x0, 0x0, 0x0, 0x0, 0x1, 0x1, 0x1, 0x8, 0x8, 0xbf}}, + {{3, 5}, + element::u8, + std::vector< + uint8_t>{0x0, 0x1, 0x8, 0xbf, 0x3f, 0x1, 0x8, 0xbf, 0x3f, 0x8, 0xbf, 0x3f, 0xbf, 0x3f, 0x3f}}}) + .expected( + {{3, 5}, + element::u8, + std::vector< + uint8_t>{0xff, 0xff, 0xff, 0xff, 0xff, 0x1, 0x8, 0xbf, 0x3f, 0x9, 0xbf, 0x3f, 0xbf, 0x3f, 0xbf}}), + Builder{} + .opType(BitwiseTypes::BITWISE_OR) + .inputs({{{3, 5}, + element::u16, + std::vector{0xffff, + 0xffff, + 0xffff, + 0xffff, + 0xffff, + 0x0, + 0x0, + 0x0, + 0x0, + 0x1, + 0x1, + 0x1, + 0x8, + 0x8, + 0xbfff}}, + {{3, 5}, + element::u16, + std::vector{0x0, + 0x1, + 0x8, + 0xbfff, + 0x3fff, + 0x1, + 0x8, + 0xbfff, + 0x3fff, + 0x8, + 0xbfff, + 0x3fff, + 0xbfff, + 0x3fff, + 0x3fff}}}) + .expected({{3, 5}, + element::u16, + std::vector{0xffff, + 0xffff, + 0xffff, + 0xffff, + 0xffff, + 0x1, + 0x8, + 0xbfff, + 0x3fff, + 0x9, + 0xbfff, + 0x3fff, + 0xbfff, + 0x3fff, + 0xbfff}}), + Builder{} + .opType(BitwiseTypes::BITWISE_OR) + .inputs({{{3, 5}, + element::u32, + std::vector{0xffffffff, + 0xffffffff, + 0xffffffff, + 0xffffffff, + 0xffffffff, + 0x0, + 0x0, + 0x0, + 0x0, + 0x1, + 0x1, + 0x1, + 0x8, + 0x8, + 0xbfffffff}}, + {{3, 5}, + element::u32, + std::vector{0x0, + 0x1, + 0x8, + 0xbfffffff, + 0x3fffffff, + 0x1, + 0x8, + 0xbfffffff, + 0x3fffffff, + 0x8, + 0xbfffffff, + 0x3fffffff, + 0xbfffffff, + 0x3fffffff, + 0x3fffffff}}}) + .expected({{3, 5}, + element::u32, + std::vector{0xffffffff, + 0xffffffff, + 0xffffffff, + 0xffffffff, + 0xffffffff, + 0x1, + 0x8, + 0xbfffffff, + 0x3fffffff, + 0x9, + 0xbfffffff, + 0x3fffffff, + 0xbfffffff, + 0x3fffffff, + 0xbfffffff}}), + Builder{} + .opType(BitwiseTypes::BITWISE_OR) + .inputs({{{3, 5}, + element::u64, + std::vector{0x0, + 0x0, + 0x0, + 0x0, + 0x0, + 0x1, + 0x1, + 0x1, + 0x1, + 0x4000000000000000, + 0x4000000000000000, + 0x4000000000000000, + 0xc000000000000000, + 0xc000000000000000, + 0xffffffffffffffff}}, + {{3, 5}, + element::u64, + std::vector{0x1, + 0x4000000000000000, + 0xc000000000000000, + 0xffffffffffffffff, + 0x8, + 0x4000000000000000, + 0xc000000000000000, + 0xffffffffffffffff, + 0x8, + 0xc000000000000000, + 0xffffffffffffffff, + 0x8, + 0xffffffffffffffff, + 0x8, + 0x8}}}) + .expected({{3, 5}, + element::u64, + std::vector{0x1, + 0x4000000000000000, + 0xc000000000000000, + 0xffffffffffffffff, + 0x8, + 0x4000000000000001, + 0xc000000000000001, + 0xffffffffffffffff, + 0x9, + 0xc000000000000000, + 0xffffffffffffffff, + 0x4000000000000008, + 0xffffffffffffffff, + 0xc000000000000008, + 0xffffffffffffffff}}), + Builder{} + .opType(BitwiseTypes::BITWISE_OR) + .inputs( + {{{3, 5}, + element::i8, + std::vector< + uint8_t>{0xff, 0xff, 0xff, 0xff, 0xff, 0x0, 0x0, 0x0, 0x0, 0x1, 0x1, 0x1, 0x8, 0x8, 0xbf}}, + {{3, 5}, + element::i8, + std::vector< + uint8_t>{0x0, 0x1, 0x8, 0xbf, 0x3f, 0x1, 0x8, 0xbf, 0x3f, 0x8, 0xbf, 0x3f, 0xbf, 0x3f, 0x3f}}}) + .expected( + {{3, 5}, + element::i8, + std::vector< + uint8_t>{0xff, 0xff, 0xff, 0xff, 0xff, 0x1, 0x8, 0xbf, 0x3f, 0x9, 0xbf, 0x3f, 0xbf, 0x3f, 0xbf}}), + Builder{} + .opType(BitwiseTypes::BITWISE_OR) + .inputs({{{3, 5}, + element::i16, + std::vector{0xffff, + 0xffff, + 0xffff, + 0xffff, + 0xffff, + 0x0, + 0x0, + 0x0, + 0x0, + 0x1, + 0x1, + 0x1, + 0x8, + 0x8, + 0xbfff}}, + {{3, 5}, + element::i16, + std::vector{0x0, + 0x1, + 0x8, + 0xbfff, + 0x3fff, + 0x1, + 0x8, + 0xbfff, + 0x3fff, + 0x8, + 0xbfff, + 0x3fff, + 0xbfff, + 0x3fff, + 0x3fff}}}) + .expected({{3, 5}, + element::i16, + std::vector{0xffff, + 0xffff, + 0xffff, + 0xffff, + 0xffff, + 0x1, + 0x8, + 0xbfff, + 0x3fff, + 0x9, + 0xbfff, + 0x3fff, + 0xbfff, + 0x3fff, + 0xbfff}}), + Builder{} + .opType(BitwiseTypes::BITWISE_OR) + .inputs({{{3, 5}, + element::i32, + std::vector{0xffffffff, + 0xffffffff, + 0xffffffff, + 0xffffffff, + 0xffffffff, + 0x0, + 0x0, + 0x0, + 0x0, + 0x1, + 0x1, + 0x1, + 0x8, + 0x8, + 0xbfffffff}}, + {{3, 5}, + element::i32, + std::vector{0x0, + 0x1, + 0x8, + 0xbfffffff, + 0x3fffffff, + 0x1, + 0x8, + 0xbfffffff, + 0x3fffffff, + 0x8, + 0xbfffffff, + 0x3fffffff, + 0xbfffffff, + 0x3fffffff, + 0x3fffffff}}}) + .expected({{3, 5}, + element::i32, + std::vector{0xffffffff, + 0xffffffff, + 0xffffffff, + 0xffffffff, + 0xffffffff, + 0x1, + 0x8, + 0xbfffffff, + 0x3fffffff, + 0x9, + 0xbfffffff, + 0x3fffffff, + 0xbfffffff, + 0x3fffffff, + 0xbfffffff}}), + Builder{} + .opType(BitwiseTypes::BITWISE_OR) + .inputs({{{3, 5}, + element::i64, + std::vector{0x0, + 0x0, + 0x0, + 0x0, + 0x0, + 0x1, + 0x1, + 0x1, + 0x1, + 0x4000000000000000, + 0x4000000000000000, + 0x4000000000000000, + 0xc000000000000000, + 0xc000000000000000, + 0xffffffffffffffff}}, + {{3, 5}, + element::i64, + std::vector{0x1, + 0x4000000000000000, + 0xc000000000000000, + 0xffffffffffffffff, + 0x8, + 0x4000000000000000, + 0xc000000000000000, + 0xffffffffffffffff, + 0x8, + 0xc000000000000000, + 0xffffffffffffffff, + 0x8, + 0xffffffffffffffff, + 0x8, + 0x8}}}) + .expected({{3, 5}, + element::i64, + std::vector{0x1, + 0x4000000000000000, + 0xc000000000000000, + 0xffffffffffffffff, + 0x8, + 0x4000000000000001, + 0xc000000000000001, + 0xffffffffffffffff, + 0x9, + 0xc000000000000000, + 0xffffffffffffffff, + 0x4000000000000008, + 0xffffffffffffffff, + 0xc000000000000008, + 0xffffffffffffffff}}), + }; + return bitwiseParams; +} + +INSTANTIATE_TEST_SUITE_P(smoke_BitwiseAnd_With_Hardcoded_Refs, + ReferenceBitwiseLayerTest, + ::testing::ValuesIn(generateBitwiseParams()), + ReferenceBitwiseLayerTest::getTestCaseName); + +} // namespace +} // namespace BitwiseOpsRefTestDefinitions +} // namespace reference_tests diff --git a/src/plugins/template/tests/functional/op_reference/bitwise_xor.cpp b/src/plugins/template/tests/functional/op_reference/bitwise_xor.cpp new file mode 100644 index 00000000000000..49b113220f3211 --- /dev/null +++ b/src/plugins/template/tests/functional/op_reference/bitwise_xor.cpp @@ -0,0 +1,385 @@ +// Copyright (C) 2018-2023 Intel Corporation +// SPDX-License-Identifier: Apache-2.0 +// + +#include "openvino/op/bitwise_xor.hpp" + +#include + +#include "bitwise.hpp" + +using namespace ov; + +namespace reference_tests { +namespace BitwiseOpsRefTestDefinitions { +namespace { + +std::vector generateBitwiseParams() { + std::vector bitwiseParams{ + Builder{} + .opType(BitwiseTypes::BITWISE_XOR) + .inputs({{{2, 2}, element::boolean, std::vector{true, false, true, false}}, + {{2, 2}, element::boolean, std::vector{true, false, false, true}}}) + .expected({{2, 2}, element::boolean, std::vector{false, false, true, true}}), + Builder{} + .opType(BitwiseTypes::BITWISE_XOR) + .inputs( + {{{3, 5}, + element::u8, + std::vector< + uint8_t>{0xff, 0xff, 0xff, 0xff, 0xff, 0x0, 0x0, 0x0, 0x0, 0x1, 0x1, 0x1, 0x8, 0x8, 0xbf}}, + {{3, 5}, + element::u8, + std::vector< + uint8_t>{0x0, 0x1, 0x8, 0xbf, 0x3f, 0x1, 0x8, 0xbf, 0x3f, 0x8, 0xbf, 0x3f, 0xbf, 0x3f, 0x3f}}}) + .expected( + {{3, 5}, + element::u8, + std::vector< + uint8_t>{0xff, 0xfe, 0xf7, 0x40, 0xc0, 0x1, 0x8, 0xbf, 0x3f, 0x9, 0xbe, 0x3e, 0xb7, 0x37, 0x80}}), + Builder{} + .opType(BitwiseTypes::BITWISE_XOR) + .inputs({{{3, 5}, + element::u16, + std::vector{0xffff, + 0xffff, + 0xffff, + 0xffff, + 0xffff, + 0x0, + 0x0, + 0x0, + 0x0, + 0x1, + 0x1, + 0x1, + 0x8, + 0x8, + 0xbfff}}, + {{3, 5}, + element::u16, + std::vector{0x0, + 0x1, + 0x8, + 0xbfff, + 0x3fff, + 0x1, + 0x8, + 0xbfff, + 0x3fff, + 0x8, + 0xbfff, + 0x3fff, + 0xbfff, + 0x3fff, + 0x3fff}}}) + .expected({{3, 5}, + element::u16, + std::vector{0xffff, + 0xfffe, + 0xfff7, + 0x4000, + 0xc000, + 0x1, + 0x8, + 0xbfff, + 0x3fff, + 0x9, + 0xbffe, + 0x3ffe, + 0xbff7, + 0x3ff7, + 0x8000}}), + Builder{} + .opType(BitwiseTypes::BITWISE_XOR) + .inputs({{{3, 5}, + element::u32, + std::vector{0xffffffff, + 0xffffffff, + 0xffffffff, + 0xffffffff, + 0xffffffff, + 0x0, + 0x0, + 0x0, + 0x0, + 0x1, + 0x1, + 0x1, + 0x8, + 0x8, + 0xbfffffff}}, + {{3, 5}, + element::u32, + std::vector{0x0, + 0x1, + 0x8, + 0xbfffffff, + 0x3fffffff, + 0x1, + 0x8, + 0xbfffffff, + 0x3fffffff, + 0x8, + 0xbfffffff, + 0x3fffffff, + 0xbfffffff, + 0x3fffffff, + 0x3fffffff}}}) + .expected({{3, 5}, + element::u32, + std::vector{0xffffffff, + 0xfffffffe, + 0xfffffff7, + 0x40000000, + 0xc0000000, + 0x1, + 0x8, + 0xbfffffff, + 0x3fffffff, + 0x9, + 0xbffffffe, + 0x3ffffffe, + 0xbffffff7, + 0x3ffffff7, + 0x80000000}}), + Builder{} + .opType(BitwiseTypes::BITWISE_XOR) + .inputs({{{3, 5}, + element::u64, + std::vector{0x0, + 0x0, + 0x0, + 0x0, + 0x0, + 0x1, + 0x1, + 0x1, + 0x1, + 0x4000000000000000, + 0x4000000000000000, + 0x4000000000000000, + 0xc000000000000000, + 0xc000000000000000, + 0xffffffffffffffff}}, + {{3, 5}, + element::u64, + std::vector{0x1, + 0x4000000000000000, + 0xc000000000000000, + 0xffffffffffffffff, + 0x8, + 0x4000000000000000, + 0xc000000000000000, + 0xffffffffffffffff, + 0x8, + 0xc000000000000000, + 0xffffffffffffffff, + 0x8, + 0xffffffffffffffff, + 0x8, + 0x8}}}) + .expected({{3, 5}, + element::u64, + std::vector{0x1, + 0x4000000000000000, + 0xc000000000000000, + 0xffffffffffffffff, + 0x8, + 0x4000000000000001, + 0xc000000000000001, + 0xfffffffffffffffe, + 0x9, + 0x8000000000000000, + 0xbfffffffffffffff, + 0x4000000000000008, + 0x3fffffffffffffff, + 0xc000000000000008, + 0xfffffffffffffff7}}), + Builder{} + .opType(BitwiseTypes::BITWISE_XOR) + .inputs( + {{{3, 5}, + element::i8, + std::vector< + uint8_t>{0xff, 0xff, 0xff, 0xff, 0xff, 0x0, 0x0, 0x0, 0x0, 0x1, 0x1, 0x1, 0x8, 0x8, 0xbf}}, + {{3, 5}, + element::i8, + std::vector< + uint8_t>{0x0, 0x1, 0x8, 0xbf, 0x3f, 0x1, 0x8, 0xbf, 0x3f, 0x8, 0xbf, 0x3f, 0xbf, 0x3f, 0x3f}}}) + .expected( + {{3, 5}, + element::i8, + std::vector< + uint8_t>{0xff, 0xfe, 0xf7, 0x40, 0xc0, 0x1, 0x8, 0xbf, 0x3f, 0x9, 0xbe, 0x3e, 0xb7, 0x37, 0x80}}), + Builder{} + .opType(BitwiseTypes::BITWISE_XOR) + .inputs({{{3, 5}, + element::i16, + std::vector{0xffff, + 0xffff, + 0xffff, + 0xffff, + 0xffff, + 0x0, + 0x0, + 0x0, + 0x0, + 0x1, + 0x1, + 0x1, + 0x8, + 0x8, + 0xbfff}}, + {{3, 5}, + element::i16, + std::vector{0x0, + 0x1, + 0x8, + 0xbfff, + 0x3fff, + 0x1, + 0x8, + 0xbfff, + 0x3fff, + 0x8, + 0xbfff, + 0x3fff, + 0xbfff, + 0x3fff, + 0x3fff}}}) + .expected({{3, 5}, + element::i16, + std::vector{0xffff, + 0xfffe, + 0xfff7, + 0x4000, + 0xc000, + 0x1, + 0x8, + 0xbfff, + 0x3fff, + 0x9, + 0xbffe, + 0x3ffe, + 0xbff7, + 0x3ff7, + 0x8000}}), + Builder{} + .opType(BitwiseTypes::BITWISE_XOR) + .inputs({{{3, 5}, + element::i32, + std::vector{0xffffffff, + 0xffffffff, + 0xffffffff, + 0xffffffff, + 0xffffffff, + 0x0, + 0x0, + 0x0, + 0x0, + 0x1, + 0x1, + 0x1, + 0x8, + 0x8, + 0xbfffffff}}, + {{3, 5}, + element::i32, + std::vector{0x0, + 0x1, + 0x8, + 0xbfffffff, + 0x3fffffff, + 0x1, + 0x8, + 0xbfffffff, + 0x3fffffff, + 0x8, + 0xbfffffff, + 0x3fffffff, + 0xbfffffff, + 0x3fffffff, + 0x3fffffff}}}) + .expected({{3, 5}, + element::i32, + std::vector{0xffffffff, + 0xfffffffe, + 0xfffffff7, + 0x40000000, + 0xc0000000, + 0x1, + 0x8, + 0xbfffffff, + 0x3fffffff, + 0x9, + 0xbffffffe, + 0x3ffffffe, + 0xbffffff7, + 0x3ffffff7, + 0x80000000}}), + Builder{} + .opType(BitwiseTypes::BITWISE_XOR) + .inputs({{{3, 5}, + element::i64, + std::vector{0x0, + 0x0, + 0x0, + 0x0, + 0x0, + 0x1, + 0x1, + 0x1, + 0x1, + 0x4000000000000000, + 0x4000000000000000, + 0x4000000000000000, + 0xc000000000000000, + 0xc000000000000000, + 0xffffffffffffffff}}, + {{3, 5}, + element::i64, + std::vector{0x1, + 0x4000000000000000, + 0xc000000000000000, + 0xffffffffffffffff, + 0x8, + 0x4000000000000000, + 0xc000000000000000, + 0xffffffffffffffff, + 0x8, + 0xc000000000000000, + 0xffffffffffffffff, + 0x8, + 0xffffffffffffffff, + 0x8, + 0x8}}}) + .expected({{3, 5}, + element::i64, + std::vector{0x1, + 0x4000000000000000, + 0xc000000000000000, + 0xffffffffffffffff, + 0x8, + 0x4000000000000001, + 0xc000000000000001, + 0xfffffffffffffffe, + 0x9, + 0x8000000000000000, + 0xbfffffffffffffff, + 0x4000000000000008, + 0x3fffffffffffffff, + 0xc000000000000008, + 0xfffffffffffffff7}}), + }; + return bitwiseParams; +} + +INSTANTIATE_TEST_SUITE_P(smoke_BitwiseAnd_With_Hardcoded_Refs, + ReferenceBitwiseLayerTest, + ::testing::ValuesIn(generateBitwiseParams()), + ReferenceBitwiseLayerTest::getTestCaseName); + +} // namespace +} // namespace BitwiseOpsRefTestDefinitions +} // namespace reference_tests diff --git a/src/tests/functional/plugin/conformance/test_runner/op_conformance_runner/src/op_impl_check/single_op_graph.cpp b/src/tests/functional/plugin/conformance/test_runner/op_conformance_runner/src/op_impl_check/single_op_graph.cpp index 2ca8a76f667977..5d36ba62e3ecda 100644 --- a/src/tests/functional/plugin/conformance/test_runner/op_conformance_runner/src/op_impl_check/single_op_graph.cpp +++ b/src/tests/functional/plugin/conformance/test_runner/op_conformance_runner/src/op_impl_check/single_op_graph.cpp @@ -1474,6 +1474,24 @@ std::shared_ptr generateBinaryEltwise(const std::shared_ptr(results, params, "BinaryEltwiseGraph"); } +std::shared_ptr generateBinaryEltwiseBitwise(const std::shared_ptr &node) { + ov::ParameterVector params{std::make_shared(ov::element::i32, ov::PartialShape{1, 2}), + std::make_shared(ov::element::i32, ov::PartialShape{1, 2})}; + + std::shared_ptr eltwise; + if (ov::is_type(node)) { + eltwise = std::make_shared(params[0], params[1]); + } else if (ov::is_type(node)) { + eltwise = std::make_shared(params[0], params[1]); + } else if (ov::is_type(node)) { + eltwise = std::make_shared(params[0], params[1]); + } else { + return nullptr; + } + ov::ResultVector results{std::make_shared(eltwise)}; + return std::make_shared(results, params, "BinaryEltwiseBitwiseGraph"); +} + std::shared_ptr generateBinaryEltwiseComp(const std::shared_ptr &node) { ov::ParameterVector params{std::make_shared(ov::element::f32, ov::Shape{2}), std::make_shared(ov::element::f32, ov::Shape{2})}; @@ -1976,6 +1994,8 @@ std::shared_ptr generateGraph() { return generateScatterNDBase(node); } else if (ov::is_type(node)) { return generateUnaryEltwise(node); + } else if (ov::is_type(node)) { + return generateBinaryEltwiseBitwise(node); } else if (ov::is_type(node)) { return generateBinaryEltwiseComp(node); } else if (ov::is_type(node)) {