Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Move all a ops to ov #7336

Merged
merged 35 commits into from
Sep 3, 2021
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
35 commits
Select commit Hold shift + click to select a range
4388883
Moved ngraph::Node to ov namespace
ilyachur Aug 25, 2021
06303f4
Fixed code style
ilyachur Aug 25, 2021
fc5c23f
Fixed VPU
ilyachur Aug 25, 2021
bd87e41
Fixed GNA
ilyachur Aug 25, 2021
ec2c2e3
Fixed tests
ilyachur Aug 25, 2021
5766e70
Merge remote-tracking branch 'upstream/master' into move_node_to_ov
ilyachur Aug 25, 2021
16076af
Merge remote-tracking branch 'upstream/master' into move_node_to_ov
ilyachur Aug 26, 2021
9f00f52
Added aliases for backward compatibility
ilyachur Aug 26, 2021
250368f
Fix clDNN
ilyachur Aug 26, 2021
85ab414
Try to fix build
ilyachur Aug 26, 2021
6363551
Fixed comment
ilyachur Aug 26, 2021
5f54eb8
Renamed RTTI macros
ilyachur Aug 26, 2021
b7c7803
Merge remote-tracking branch 'upstream/master' into move_node_to_ov
ilyachur Aug 27, 2021
20e8fc9
Merge remote-tracking branch 'upstream/master' into move_node_to_ov
ilyachur Aug 29, 2021
63ccf61
Moved op utils to ov namespace
ilyachur Aug 27, 2021
e63e051
Fixed ngraph library build
ilyachur Aug 27, 2021
ee7564c
Fixed unit-tests
ilyachur Aug 30, 2021
60b6f3b
Merge remote-tracking branch 'upstream/master' into move_op_utils_to_ov
ilyachur Aug 30, 2021
9da3c7b
Merge remote-tracking branch 'upstream/master' into move_op_utils_to_ov
ilyachur Aug 30, 2021
be3ddca
Merge remote-tracking branch 'upstream/master' into move_op_utils_to_ov
ilyachur Aug 31, 2021
7faec8b
Changed src folder
ilyachur Aug 31, 2021
33a0a3b
Fixed recurrent_sequence
ilyachur Aug 31, 2021
ee064fa
Changed low latency
ilyachur Aug 31, 2021
9e8d1cf
Fixed serialize
ilyachur Aug 31, 2021
fa11d19
Merge remote-tracking branch 'upstream/master' into move_op_utils_to_ov
ilyachur Aug 31, 2021
334e6f9
Fixed ieFuncTests
ilyachur Sep 1, 2021
ef98550
Merge remote-tracking branch 'upstream/master' into move_op_utils_to_ov
ilyachur Sep 1, 2021
c282066
Try to fix windows
ilyachur Sep 1, 2021
f32a859
Merge remote-tracking branch 'upstream/master' into move_op_utils_to_ov
ilyachur Sep 1, 2021
0f4ad3e
Remove custom operator<< from tests
ilyachur Sep 1, 2021
1eba99b
Merge remote-tracking branch 'upstream/master' into move_op_utils_to_ov
ilyachur Sep 2, 2021
9a16e78
Merge remote-tracking branch 'upstream/master' into move_op_utils_to_ov
ilyachur Sep 2, 2021
47f2271
Fixed build
ilyachur Sep 2, 2021
5d646ac
Moved operations from A to ov namespace
ilyachur Sep 2, 2021
402be84
Merge remote-tracking branch 'upstream/master' into move_all_a_ops_to_ov
ilyachur Sep 3, 2021
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
29 changes: 2 additions & 27 deletions ngraph/core/include/ngraph/op/abs.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -7,37 +7,12 @@
#include <memory>

#include "ngraph/op/util/unary_elementwise_arithmetic.hpp"
#include "openvino/op/abs.hpp"

namespace ngraph {
namespace op {
namespace v0 {
/// \brief Elementwise absolute value operation.
///
class NGRAPH_API Abs : public util::UnaryElementwiseArithmetic {
public:
static constexpr NodeTypeInfo type_info{"Abs", 0};
const NodeTypeInfo& get_type_info() const override {
return type_info;
}
/// \brief Constructs an absolute value operation.
Abs() = default;
bool visit_attributes(AttributeVisitor&) override {
return true;
}
/// \brief Constructs an absolute value operation.
///
/// \param arg Output that produces the input tensor.<br>
/// `[d1, ...]`
///
/// Output `[d1, ...]`
///
Abs(const Output<Node>& arg);

std::shared_ptr<Node> clone_with_new_inputs(const OutputVector& new_args) const override;

bool evaluate(const HostTensorVector& outputs, const HostTensorVector& inputs) const override;
bool has_evaluate() const override;
};
using ov::op::v0::Abs;
} // namespace v0
using v0::Abs;
} // namespace op
Expand Down
27 changes: 2 additions & 25 deletions ngraph/core/include/ngraph/op/acos.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -7,35 +7,12 @@
#include <memory>

#include "ngraph/op/util/unary_elementwise_arithmetic.hpp"
#include "openvino/op/acos.hpp"

namespace ngraph {
namespace op {
namespace v0 {
/// \brief Elementwise inverse cosine (arccos) operation.
///
class NGRAPH_API Acos : public util::UnaryElementwiseArithmetic {
public:
static constexpr NodeTypeInfo type_info{"Acos", 0};
const NodeTypeInfo& get_type_info() const override {
return type_info;
}
/// \brief Constructs an arccos operation.
Acos() = default;
/// \brief Constructs an arccos operation.
///
/// \param arg Output that produces the input tensor.<br>
/// `[d1, ...]`
///
/// Output `[d1, ...]`
///
Acos(const Output<Node>& arg);
bool visit_attributes(AttributeVisitor&) override {
return true;
}
std::shared_ptr<Node> clone_with_new_inputs(const OutputVector& new_args) const override;
bool evaluate(const HostTensorVector& outputs, const HostTensorVector& inputs) const override;
bool has_evaluate() const override;
};
using ov::op::v0::Acos;
} // namespace v0
using v0::Acos;
} // namespace op
Expand Down
26 changes: 2 additions & 24 deletions ngraph/core/include/ngraph/op/acosh.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -7,34 +7,12 @@
#include <memory>

#include "ngraph/op/util/unary_elementwise_arithmetic.hpp"
#include "openvino/op/acosh.hpp"

namespace ngraph {
namespace op {
namespace v3 {
/// \brief Elementwise inverse hyperbolic cos operation.
///
class NGRAPH_API Acosh : public util::UnaryElementwiseArithmetic {
public:
NGRAPH_RTTI_DECLARATION;

/// \brief Constructs an Acosh operation.
Acosh() = default;
/// \brief Constructs an Acosh operation.
///
/// \param arg Output that produces the input tensor.<br>
/// `[d1, ...]`
///
/// Output `[d1, ...]`
///
Acosh(const Output<Node>& arg);

virtual std::shared_ptr<Node> clone_with_new_inputs(const OutputVector& new_args) const override;
bool visit_attributes(AttributeVisitor&) override {
return true;
}
bool evaluate(const HostTensorVector& outputs, const HostTensorVector& inputs) const override;
bool has_evaluate() const override;
};
using ov::op::v3::Acosh;
} // namespace v3
using v3::Acosh;
} // namespace op
Expand Down
25 changes: 2 additions & 23 deletions ngraph/core/include/ngraph/op/adaptive_avg_pool.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -6,33 +6,12 @@

#include "ngraph/op/op.hpp"
#include "ngraph/op/util/attr_types.hpp"
#include "openvino/op/adaptive_avg_pool.hpp"

namespace ngraph {
namespace op {
namespace v8 {
/// \brief Adaptive average pooling operation.
///
class NGRAPH_API AdaptiveAvgPool : public Op {
public:
NGRAPH_RTTI_DECLARATION;

AdaptiveAvgPool() = default;

///
/// \brief Constructs adaptive average pooling operation.
///
/// \param data Input data
///
/// \param output_shape 1D tensor describing output shape for spatial
/// dimensions.
///
AdaptiveAvgPool(const Output<Node>& data, const Output<Node>& output_shape);

void validate_and_infer_types() override;
bool visit_attributes(AttributeVisitor& visitor) override;

std::shared_ptr<Node> clone_with_new_inputs(const OutputVector& new_args) const override;
};
using ov::op::v8::AdaptiveAvgPool;
} // namespace v8
} // namespace op
} // namespace ngraph
37 changes: 2 additions & 35 deletions ngraph/core/include/ngraph/op/adaptive_max_pool.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -6,45 +6,12 @@

#include "ngraph/op/op.hpp"
#include "ngraph/op/util/attr_types.hpp"
#include "openvino/op/adaptive_max_pool.hpp"

namespace ngraph {
namespace op {
namespace v8 {
/// \brief Adaptive max pooling operation.
///
class NGRAPH_API AdaptiveMaxPool : public Op {
public:
NGRAPH_RTTI_DECLARATION;

AdaptiveMaxPool() = default;

///
/// \brief Constructs adaptive max pooling operation.
///
/// \param data Input data
///
/// \param output_shape 1D tensor describing output shape for spatial
/// dimensions.
///
/// \param index_element_type Specifies the output tensor type for indices
/// output
///
AdaptiveMaxPool(const Output<Node>& data,
const Output<Node>& output_shape,
const ngraph::element::Type& index_element_type = ngraph::element::i64);

void validate_and_infer_types() override;
bool visit_attributes(AttributeVisitor& visitor) override;

std::shared_ptr<Node> clone_with_new_inputs(const OutputVector& new_args) const override;

element::Type get_index_element_type() const {
return m_index_element_type;
}

protected:
ngraph::element::Type m_index_element_type = ngraph::element::i64;
};
using ov::op::v8::AdaptiveMaxPool;
} // namespace v8
} // namespace op
} // namespace ngraph
33 changes: 2 additions & 31 deletions ngraph/core/include/ngraph/op/add.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -7,41 +7,12 @@
#include <memory>

#include "ngraph/op/util/binary_elementwise_arithmetic.hpp"
#include "openvino/op/add.hpp"

namespace ngraph {
namespace op {
namespace v1 {
/// \brief Elementwise addition operation.
///
class NGRAPH_API Add : public util::BinaryElementwiseArithmetic {
public:
NGRAPH_RTTI_DECLARATION;

/// \brief Constructs an uninitialized addition operation
Add() : util::BinaryElementwiseArithmetic(AutoBroadcastSpec::NUMPY) {}

/// \brief Constructs an addition operation.
///
/// \param arg0 Output that produces the first input tensor.<br>
/// `[d0, ...]`
/// \param arg1 Output that produces the second input tensor.<br>
/// `[d0, ...]`
/// \param auto_broadcast Auto broadcast specification. Default is Numpy-style
/// implicit broadcasting.
///
/// Output `[d0, ...]`
///
Add(const Output<Node>& arg0,
const Output<Node>& arg1,
const AutoBroadcastSpec& auto_broadcast = AutoBroadcastSpec(AutoBroadcastType::NUMPY));

std::shared_ptr<Node> clone_with_new_inputs(const OutputVector& new_args) const override;

bool visit_attributes(AttributeVisitor& visitor) override;

bool evaluate(const HostTensorVector& outputs, const HostTensorVector& inputs) const override;
bool has_evaluate() const override;
};
using ov::op::v1::Add;
} // namespace v1
} // namespace op
} // namespace ngraph
29 changes: 2 additions & 27 deletions ngraph/core/include/ngraph/op/and.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -7,37 +7,12 @@
#include <memory>

#include "ngraph/op/util/binary_elementwise_logical.hpp"
#include "openvino/op/logical_and.hpp"

namespace ngraph {
namespace op {
namespace v1 {
/// \brief Elementwise logical-and operation.
///
class NGRAPH_API LogicalAnd : public util::BinaryElementwiseLogical {
public:
NGRAPH_RTTI_DECLARATION;
/// \brief Constructs a logical-and operation.
LogicalAnd() = default;

/// \brief Constructs a logical-and operation.
///
/// \param arg0 Output that produces the first input tensor.<br>
/// `[d0, ...]`
/// \param arg1 Output that produces the second input tensor.<br>
/// `[d0, ...]`
/// \param auto_broadcast Auto broadcast specification
///
/// Output `[d0, ...]`
///
LogicalAnd(const Output<Node>& arg0,
const Output<Node>& arg1,
const AutoBroadcastSpec& auto_broadcast = AutoBroadcastSpec(AutoBroadcastType::NUMPY));

std::shared_ptr<Node> clone_with_new_inputs(const OutputVector& new_args) const override;
bool visit_attributes(AttributeVisitor& visitor) override;
bool evaluate(const HostTensorVector& outputs, const HostTensorVector& inputs) const override;
bool has_evaluate() const override;
};
using ov::op::v1::LogicalAnd;
} // namespace v1
} // namespace op
} // namespace ngraph
28 changes: 2 additions & 26 deletions ngraph/core/include/ngraph/op/asin.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -7,36 +7,12 @@
#include <memory>

#include "ngraph/op/util/unary_elementwise_arithmetic.hpp"
#include "openvino/op/asin.hpp"

namespace ngraph {
namespace op {
namespace v0 {
/// \brief Elementwise inverse sine (arcsin) operation.
///
class NGRAPH_API Asin : public util::UnaryElementwiseArithmetic {
public:
static constexpr NodeTypeInfo type_info{"Asin", 0};
const NodeTypeInfo& get_type_info() const override {
return type_info;
}
/// \brief Constructs an arcsin operation.
Asin() = default;
/// \brief Constructs an arcsin operation.
///
/// \param arg Output that produces the input tensor.<br>
/// `[d1, ...]`
///
/// Output `[d1, ...]`
///
Asin(const Output<Node>& arg);

virtual std::shared_ptr<Node> clone_with_new_inputs(const OutputVector& new_args) const override;
bool visit_attributes(AttributeVisitor&) override {
return true;
}
bool evaluate(const HostTensorVector& outputs, const HostTensorVector& inputs) const override;
bool has_evaluate() const override;
};
using ov::op::v0::Asin;
} // namespace v0
using v0::Asin;
} // namespace op
Expand Down
26 changes: 2 additions & 24 deletions ngraph/core/include/ngraph/op/asinh.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -7,34 +7,12 @@
#include <memory>

#include "ngraph/op/util/unary_elementwise_arithmetic.hpp"
#include "openvino/op/asinh.hpp"

namespace ngraph {
namespace op {
namespace v3 {
/// \brief Elementwise inverse hyperbolic sin operation.
///
class NGRAPH_API Asinh : public util::UnaryElementwiseArithmetic {
public:
NGRAPH_RTTI_DECLARATION;

/// \brief Constructs an Asinh operation.
Asinh() = default;
/// \brief Constructs an Asinh operation.
///
/// \param arg Output that produces the input tensor.<br>
/// `[d1, ...]`
///
/// Output `[d1, ...]`
///
Asinh(const Output<Node>& arg);

virtual std::shared_ptr<Node> clone_with_new_inputs(const OutputVector& new_args) const override;
bool visit_attributes(AttributeVisitor&) override {
return true;
}
bool evaluate(const HostTensorVector& outputs, const HostTensorVector& inputs) const override;
bool has_evaluate() const override;
};
using ov::op::v3::Asinh;
} // namespace v3
using v3::Asinh;
} // namespace op
Expand Down
Loading