diff --git a/docs/ops/logical/LogicalAnd_1.md b/docs/ops/logical/LogicalAnd_1.md
index 4f39b236fefbb7..a653d1abbc29f2 100644
--- a/docs/ops/logical/LogicalAnd_1.md
+++ b/docs/ops/logical/LogicalAnd_1.md
@@ -6,39 +6,40 @@
**Short description**: *LogicalAnd* performs element-wise logical AND operation with two given tensors applying multi-directional broadcast rules.
+**Detailed description**: Before performing logical operation, input tensors *a* and *b* are broadcasted if their shapes are different and `auto_broadcast` attributes is not `none`. Broadcasting is performed according to `auto_broadcast` value.
+
+After broadcasting *LogicalAnd* does the following with the input tensors *a* and *b*:
+
+\f[
+o_{i} = a_{i} \wedge b_{i}
+\f]
+
**Attributes**:
* *auto_broadcast*
* **Description**: specifies rules used for auto-broadcasting of input tensors.
* **Range of values**:
- * *none* - no auto-broadcasting is allowed, all input shapes should match
- * *numpy* - numpy broadcasting rules, aligned with ONNX Broadcasting. Description is available in ONNX docs.
+ * *none* - no auto-broadcasting is allowed, all input shapes must match,
+ * *numpy* - numpy broadcasting rules, description is available in [Broadcast Rules For Elementwise Operations](../broadcast_rules.md),
+ * *pdpd* - PaddlePaddle-style implicit broadcasting, description is available in [Broadcast Rules For Elementwise Operations](../broadcast_rules.md).
* **Type**: string
* **Default value**: "numpy"
* **Required**: *no*
**Inputs**
-* **1**: A tensor of type *T*. **Required.**
-* **2**: A tensor of type *T*. **Required.**
+* **1**: A tensor of type *T* and arbitrary shape. **Required.**
+* **2**: A tensor of type *T* and arbitrary shape. **Required.**
**Outputs**
-* **1**: The result of element-wise logical AND operation. A tensor of type boolean.
+* **1**: The result of element-wise *LogicalAnd* operation. A tensor of type boolean.
**Types**
* *T*: boolean type.
-**Detailed description**
-Before performing logical operation, input tensors *a* and *b* are broadcasted if their shapes are different and `auto_broadcast` attributes is not `none`. Broadcasting is performed according to `auto_broadcast` value.
-
-After broadcasting *LogicalAnd* does the following with the input tensors *a* and *b*:
-
-\f[
-o_{i} = a_{i} and b_{i}
-\f]
**Examples**
diff --git a/docs/template_plugin/tests/functional/op_reference/logical_and.cpp b/docs/template_plugin/tests/functional/op_reference/logical_and.cpp
new file mode 100644
index 00000000000000..0313874533e97b
--- /dev/null
+++ b/docs/template_plugin/tests/functional/op_reference/logical_and.cpp
@@ -0,0 +1,83 @@
+// Copyright (C) 2018-2021 Intel Corporation
+// SPDX-License-Identifier: Apache-2.0
+//
+
+#include
+
+#include
+#include
+#include
+#include
+#include
+
+#include "base_reference_test.hpp"
+
+using namespace reference_tests;
+using namespace ngraph;
+using namespace InferenceEngine;
+
+
+struct LogicalAndParams {
+ template
+ LogicalAndParams(const ngraph::PartialShape& input_shape1, const ngraph::PartialShape& input_shape2 ,
+ const std::vector& iValues1, const std::vector& iValues2, const std::vector& oValues)
+ : pshape1(input_shape1), pshape2(input_shape2), inType(ngraph::element::boolean), outType(ngraph::element::boolean),
+ inputData1(CreateBlob(ngraph::element::boolean, iValues1)), inputData2(CreateBlob(ngraph::element::boolean, iValues2)),
+ refData(CreateBlob(ngraph::element::boolean, oValues)) {}
+ ngraph::PartialShape pshape1;
+ ngraph::PartialShape pshape2;
+ ngraph::element::Type inType;
+ ngraph::element::Type outType;
+ InferenceEngine::Blob::Ptr inputData1;
+ InferenceEngine::Blob::Ptr inputData2;
+ InferenceEngine::Blob::Ptr refData;
+};
+
+class ReferenceLogicalAndLayerTest : public testing::TestWithParam, public CommonReferenceTest {
+public:
+ void SetUp() override {
+ auto params = GetParam();
+ function = CreateFunction(params.pshape1, params.pshape2, params.inType);
+ inputData = {params.inputData1, params.inputData2};
+ refOutData = {params.refData};
+ }
+ static std::string getTestCaseName(const testing::TestParamInfo& obj) {
+ auto param = obj.param;
+ std::ostringstream result;
+ result << "input_shape1=" << param.pshape1 << "_";
+ result << "input_shape2=" << param.pshape2 << "_";
+ result << "iType=" << param.inType << "_";
+ result << "oType=" << param.outType;
+ return result.str();
+ }
+
+private:
+ static std::shared_ptr CreateFunction(const PartialShape& input_shape1,
+ const PartialShape& input_shape2, const element::Type& input_type) {
+ const auto in = std::make_shared(input_type, input_shape1);
+ const auto in2 = std::make_shared(input_type, input_shape2);
+ const auto logical_and = std::make_shared(in, in2);
+ return std::make_shared(NodeVector {logical_and}, ParameterVector {in, in2});
+ }
+};
+
+TEST_P(ReferenceLogicalAndLayerTest, CompareWithHardcodedRefs) {
+ Exec();
+}
+
+INSTANTIATE_TEST_SUITE_P(
+ smoke_LogicalAnd_With_Hardcoded_Refs, ReferenceLogicalAndLayerTest,
+ ::testing::Values(
+ LogicalAndParams(ngraph::PartialShape {2, 2}, ngraph::PartialShape {2, 2},
+ std::vector {true, false, true, false},
+ std::vector {false, true, true, false},
+ std::vector {false, false, true, false}),
+ LogicalAndParams(ngraph::PartialShape {2, 1, 2, 1}, ngraph::PartialShape {1, 1, 2, 1},
+ std::vector {true, false, true, false},
+ std::vector {true, false},
+ std::vector {true, false, true, false}),
+ LogicalAndParams(ngraph::PartialShape {3, 4}, ngraph::PartialShape {3, 4},
+ std::vector {true, true, true, true, true, false, true, false, false, true, true, true},
+ std::vector {true, true, true, true, true, false, true, false, false, true, true, false},
+ std::vector {true, true, true, true, true, false, true, false, false, true, true, false})),
+ ReferenceLogicalAndLayerTest::getTestCaseName);
diff --git a/inference-engine/tests/functional/inference_engine/serialization/single_layer/logical.cpp b/inference-engine/tests/functional/inference_engine/serialization/single_layer/logical.cpp
new file mode 100644
index 00000000000000..aff8081509bdfb
--- /dev/null
+++ b/inference-engine/tests/functional/inference_engine/serialization/single_layer/logical.cpp
@@ -0,0 +1,84 @@
+// Copyright (C) 2018-2021 Intel Corporation
+// SPDX-License-Identifier: Apache-2.0
+//
+
+#include
+#include "shared_test_classes/single_layer/logical.hpp"
+#include "common_test_utils/test_constants.hpp"
+
+using namespace LayerTestsDefinitions;
+using namespace LayerTestsDefinitions::LogicalParams;
+
+namespace {
+TEST_P(LogicalLayerTest, Serialize) {
+ Serialize();
+}
+
+std::map, std::vector>> inputShapes = {
+ {{1}, {{1}, {17}, {1, 1}, {2, 18}, {1, 1, 2}, {2, 2, 3}, {1, 1, 2, 3}}},
+ {{5}, {{1}, {1, 1}, {2, 5}, {1, 1, 1}, {2, 2, 5}}},
+ {{2, 200}, {{1}, {200}, {1, 200}, {2, 200}, {2, 2, 200}}},
+ {{1, 3, 20}, {{20}, {2, 1, 1}}},
+ {{2, 17, 3, 4}, {{4}, {1, 3, 4}, {2, 1, 3, 4}}},
+ {{2, 1, 1, 3, 1}, {{1}, {1, 3, 4}, {2, 1, 3, 4}, {1, 1, 1, 1, 1}}},
+};
+
+std::map, std::vector>> inputShapesNot = {
+ {{1}, {}},
+ {{5}, {}},
+ {{2, 200}, {}},
+ {{1, 3, 20}, {}},
+ {{2, 17, 3, 4}, {}},
+ {{2, 1, 1, 3, 1}, {}},
+};
+
+std::vector inputsPrecisions = {
+ InferenceEngine::Precision::BOOL,
+};
+
+std::vector logicalOpTypes = {
+ ngraph::helpers::LogicalTypes::LOGICAL_AND,
+ ngraph::helpers::LogicalTypes::LOGICAL_OR,
+ ngraph::helpers::LogicalTypes::LOGICAL_XOR,
+};
+
+std::vector secondInputTypes = {
+ ngraph::helpers::InputLayerType::CONSTANT,
+ ngraph::helpers::InputLayerType::PARAMETER,
+};
+
+std::vector netPrecisions = {
+ InferenceEngine::Precision::FP32,
+};
+
+std::map additional_config = {};
+
+const auto LogicalTestParams = ::testing::Combine(
+ ::testing::ValuesIn(LogicalLayerTest::combineShapes(inputShapes)),
+ ::testing::ValuesIn(logicalOpTypes),
+ ::testing::ValuesIn(secondInputTypes),
+ ::testing::ValuesIn(netPrecisions),
+ ::testing::ValuesIn(inputsPrecisions),
+ ::testing::Values(InferenceEngine::Precision::UNSPECIFIED),
+ ::testing::Values(InferenceEngine::Layout::ANY),
+ ::testing::Values(InferenceEngine::Layout::ANY),
+ ::testing::Values(CommonTestUtils::DEVICE_CPU),
+ ::testing::Values(additional_config));
+
+const auto LogicalNotTestParams = ::testing::Combine(
+ ::testing::ValuesIn(LogicalLayerTest::combineShapes(inputShapesNot)),
+ ::testing::Values(ngraph::helpers::LogicalTypes::LOGICAL_NOT),
+ ::testing::Values(ngraph::helpers::InputLayerType::CONSTANT),
+ ::testing::ValuesIn(netPrecisions),
+ ::testing::ValuesIn(inputsPrecisions),
+ ::testing::Values(InferenceEngine::Precision::UNSPECIFIED),
+ ::testing::Values(InferenceEngine::Layout::ANY),
+ ::testing::Values(InferenceEngine::Layout::ANY),
+ ::testing::Values(CommonTestUtils::DEVICE_CPU),
+ ::testing::Values(additional_config));
+
+INSTANTIATE_TEST_SUITE_P(smoke_CompareWithRefs, LogicalLayerTest, LogicalTestParams, LogicalLayerTest::getTestCaseName);
+
+INSTANTIATE_TEST_SUITE_P(smoke_CompareWithRefsNot, LogicalLayerTest, LogicalNotTestParams, LogicalLayerTest::getTestCaseName);
+
+} // namespace
diff --git a/inference-engine/tests/ie_test_utils/functional_test_utils/layer_tests_summary/utils/constants.py b/inference-engine/tests/ie_test_utils/functional_test_utils/layer_tests_summary/utils/constants.py
index b45ed1dc44bb4b..ee68f19184dedc 100644
--- a/inference-engine/tests/ie_test_utils/functional_test_utils/layer_tests_summary/utils/constants.py
+++ b/inference-engine/tests/ie_test_utils/functional_test_utils/layer_tests_summary/utils/constants.py
@@ -57,6 +57,7 @@
'LRN-1',
'LSTMCell-4',
'LSTMSequence-5',
+ 'LogicalAnd-1'
'LogSoftmax-5',
'Loop-5',
'MVN-1',
diff --git a/ngraph/core/src/op/and.cpp b/ngraph/core/src/op/and.cpp
index 4c81190083b95c..945b0e1918e77d 100644
--- a/ngraph/core/src/op/and.cpp
+++ b/ngraph/core/src/op/and.cpp
@@ -7,6 +7,8 @@
#include "ngraph/runtime/host_tensor.hpp"
#include "ngraph/runtime/reference/and.hpp"
+#include "ngraph/validation_util.hpp"
+
using namespace std;
using namespace ngraph;
@@ -61,12 +63,6 @@ namespace logand
switch (arg0->get_element_type())
{
NGRAPH_TYPE_CASE(evaluate_logand, boolean, arg0, arg1, out, broadcast_spec);
- NGRAPH_TYPE_CASE(evaluate_logand, i32, arg0, arg1, out, broadcast_spec);
- NGRAPH_TYPE_CASE(evaluate_logand, i64, arg0, arg1, out, broadcast_spec);
- NGRAPH_TYPE_CASE(evaluate_logand, u32, arg0, arg1, out, broadcast_spec);
- NGRAPH_TYPE_CASE(evaluate_logand, u64, arg0, arg1, out, broadcast_spec);
- NGRAPH_TYPE_CASE(evaluate_logand, f16, arg0, arg1, out, broadcast_spec);
- NGRAPH_TYPE_CASE(evaluate_logand, f32, arg0, arg1, out, broadcast_spec);
default: rc = false; break;
}
return rc;
@@ -77,6 +73,7 @@ bool op::v1::LogicalAnd::evaluate(const HostTensorVector& outputs,
const HostTensorVector& inputs) const
{
NGRAPH_OP_SCOPE(v1_LogicalAnd_evaluate);
+ NGRAPH_CHECK(validate_host_tensor_vector(outputs, 1) && validate_host_tensor_vector(inputs, 2));
return logand::evaluate_logand(inputs[0], inputs[1], outputs[0], get_autob());
}
@@ -85,13 +82,7 @@ bool op::v1::LogicalAnd::has_evaluate() const
NGRAPH_OP_SCOPE(v1_LogicalAnd_has_evaluate);
switch (get_input_element_type(0))
{
- case ngraph::element::boolean:
- case ngraph::element::i32:
- case ngraph::element::i64:
- case ngraph::element::u32:
- case ngraph::element::u64:
- case ngraph::element::f16:
- case ngraph::element::f32: return true;
+ case ngraph::element::boolean: return true;
default: break;
}
return false;
diff --git a/ngraph/test/CMakeLists.txt b/ngraph/test/CMakeLists.txt
index a526f19e54ff5b..4c3035dab04b4e 100644
--- a/ngraph/test/CMakeLists.txt
+++ b/ngraph/test/CMakeLists.txt
@@ -152,6 +152,7 @@ set(SRC
type_prop/hswish.cpp
type_prop/idft.cpp
type_prop/interpolate.cpp
+ type_prop/logical_and.cpp
type_prop/lrn.cpp
type_prop/lstm_cell.cpp
type_prop/lstm_sequence.cpp
@@ -271,6 +272,7 @@ set(SRC
visitors/op/less_equal.cpp
visitors/op/less.cpp
visitors/op/log.cpp
+ visitors/op/logical_and.cpp
visitors/op/logical_or.cpp
visitors/op/logical_xor.cpp
visitors/op/lrn.cpp
@@ -452,7 +454,6 @@ set(MULTI_TEST_SRC
backend/interpolate.in.cpp
backend/log.in.cpp
backend/log_softmax.in.cpp
- backend/logical_and.in.cpp
backend/logical_not.in.cpp
backend/logical_or.in.cpp
backend/logical_xor.in.cpp
diff --git a/ngraph/test/backend/logical_and.in.cpp b/ngraph/test/backend/logical_and.in.cpp
deleted file mode 100644
index 20ac74bedb2cd6..00000000000000
--- a/ngraph/test/backend/logical_and.in.cpp
+++ /dev/null
@@ -1,34 +0,0 @@
-// Copyright (C) 2018-2021 Intel Corporation
-// SPDX-License-Identifier: Apache-2.0
-//
-
-#include "gtest/gtest.h"
-#include "ngraph/ngraph.hpp"
-#include "util/engine/test_engines.hpp"
-#include "util/test_case.hpp"
-#include "util/test_control.hpp"
-
-NGRAPH_SUPPRESS_DEPRECATED_START
-
-using namespace std;
-using namespace ngraph;
-
-static string s_manifest = "${MANIFEST}";
-using TestEngine = test::ENGINE_CLASS_NAME(${BACKEND_NAME});
-
-NGRAPH_TEST(${BACKEND_NAME}, logical_and)
-{
- Shape shape{3, 4};
- auto A = make_shared(element::boolean, shape);
- auto B = make_shared(element::boolean, shape);
- auto f =
- make_shared(std::make_shared(A, B), ParameterVector{A, B});
-
- std::vector a{true, true, true, true, true, false, true, false, false, true, true, true};
- std::vector b{true, true, true, true, true, false, true, false, false, true, true, false};
-
- auto test_case_1 = test::TestCase(f);
- test_case_1.add_multiple_inputs({a, b});
- test_case_1.add_expected_output(shape, {1., 1., 1., 1., 1., 0., 1., 0., 0., 1., 1., 0.});
- test_case_1.run();
-}
diff --git a/ngraph/test/type_prop/logical_and.cpp b/ngraph/test/type_prop/logical_and.cpp
new file mode 100644
index 00000000000000..2a8699cfbe86a7
--- /dev/null
+++ b/ngraph/test/type_prop/logical_and.cpp
@@ -0,0 +1,72 @@
+// Copyright (C) 2018-2021 Intel Corporation
+// SPDX-License-Identifier: Apache-2.0
+//
+
+#include "gtest/gtest.h"
+#include "ngraph/ngraph.hpp"
+#include "util/type_prop.hpp"
+
+using namespace std;
+using namespace ngraph;
+
+namespace {
+ void incorrect_init(const ngraph::element::Type& type, const std::string& err, const Shape& shape1 = {1, 3, 6}, const Shape& shape2 = {1, 3, 6}) {
+ auto input1 = make_shared(type, shape1);
+ auto input2 = make_shared(type, shape2);
+ try
+ {
+ auto logical_and = make_shared(input1, input2);
+ }
+ catch (const NodeValidationFailure& error)
+ {
+ EXPECT_HAS_SUBSTRING(error.what(), err);
+ }
+ }
+}
+
+TEST(type_prop, logical_and_incorrect_type_f32)
+{
+ incorrect_init(element::f32, "Operands for logical operators must have boolean element type but have element type f32");
+}
+
+TEST(type_prop, logical_and_incorrect_type_f64)
+{
+ incorrect_init(element::f64, "Operands for logical operators must have boolean element type but have element type f64");
+}
+
+TEST(type_prop, logical_and_incorrect_type_i32)
+{
+ incorrect_init(element::i32, "Operands for logical operators must have boolean element type but have element type i32");
+}
+
+TEST(type_prop, logical_and_incorrect_type_i64)
+{
+ incorrect_init(element::i64, "Operands for logical operators must have boolean element type but have element type i64");
+}
+
+TEST(type_prop, logical_and_incorrect_type_u32)
+{
+ incorrect_init(element::u32, "Operands for logical operators must have boolean element type but have element type u32");
+}
+
+TEST(type_prop, logical_and_incorrect_type_u64)
+{
+ incorrect_init(element::u64, "Operands for logical operators must have boolean element type but have element type u64");
+
+}
+
+TEST(type_prop, logical_and_incorrect_shape)
+{
+ incorrect_init(element::boolean, "Argument shapes are inconsistent", Shape {1, 3, 6}, Shape {1, 2, 3});
+}
+
+TEST(type_prop, logical_and_broadcast)
+{
+ auto input1 = make_shared(element::boolean, Shape{1, 1, 6});
+ auto input2 = make_shared(element::boolean, Shape{1, 3, 1});
+
+ auto logical_and = make_shared(input1, input2);
+
+ ASSERT_EQ(logical_and->get_element_type(), element::boolean);
+ ASSERT_EQ(logical_and->get_shape(), (Shape{1, 3, 6}));
+}
diff --git a/ngraph/test/visitors/op/logical_and.cpp b/ngraph/test/visitors/op/logical_and.cpp
new file mode 100644
index 00000000000000..919e89ea9c6126
--- /dev/null
+++ b/ngraph/test/visitors/op/logical_and.cpp
@@ -0,0 +1,13 @@
+// Copyright (C) 2018-2021 Intel Corporation
+// SPDX-License-Identifier: Apache-2.0
+//
+
+#include "binary_ops.hpp"
+#include "ngraph/opsets/opset1.hpp"
+
+using Type = ::testing::Types>;
+
+INSTANTIATE_TYPED_TEST_SUITE_P(visitor_with_auto_broadcast,
+ BinaryOperatorVisitor,
+ Type,
+ BinaryOperatorTypeName);