Skip to content

Commit

Permalink
Take Jane's changes for Demension names
Browse files Browse the repository at this point in the history
  • Loading branch information
ilya-lavrenov committed Apr 27, 2021
1 parent e995a8a commit 9f6c8fa
Show file tree
Hide file tree
Showing 8 changed files with 166 additions and 23 deletions.
13 changes: 10 additions & 3 deletions ngraph/core/include/ngraph/descriptor/tensor.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,8 @@ namespace ngraph
class HostTensor;
}
using HostTensorPtr = std::shared_ptr<runtime::HostTensor>;
using TensorLabel = std::vector<std::string>;

namespace descriptor
{
/// \brief Compile-time descriptor of a first-class value that is a tensor.
Expand Down Expand Up @@ -53,7 +55,9 @@ namespace ngraph
void set_lower_value(const HostTensorPtr& value);
/// \brief sets upper bound value description
void set_upper_value(const HostTensorPtr& value);
/// \brief unsets bound value descriptions
/// \brief sets value label description
void set_value_label(const TensorLabel& value_label);
/// \brief unsets bound value descriptions and their labels
void invalidate_values();

const element::Type& get_element_type() const { return m_element_type; }
Expand All @@ -63,6 +67,8 @@ namespace ngraph
HostTensorPtr get_lower_value() const { return m_lower_value; }
/// \brief gets upper bound value description
HostTensorPtr get_upper_value() const { return m_upper_value; }
/// \brief gets upper bound value description
TensorLabel get_value_label() const { return m_value_label; }
/// \brief checks if lower and upper bound are set and point to the same HostTensor
bool has_and_set_bound() const
{
Expand All @@ -81,6 +87,7 @@ namespace ngraph
PartialShape m_partial_shape;
Node* m_node{nullptr};
HostTensorPtr m_lower_value, m_upper_value;
std::vector<std::string> m_value_label;
size_t m_node_output_number{0};

std::string m_name;
Expand All @@ -89,5 +96,5 @@ namespace ngraph

NGRAPH_API
std::ostream& operator<<(std::ostream&, const ngraph::descriptor::Tensor&);
} // namespace descriptor
} // namespace ngraph
}
}
13 changes: 9 additions & 4 deletions ngraph/core/include/ngraph/dimension.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -26,12 +26,12 @@ namespace ngraph

/// \brief Construct a static dimension.
/// \param dimension Value of the dimension.
Dimension(value_type dimension);
Dimension(value_type dimension, std::string name = "");

/// \brief Construct a dynamic dimension with bounded range
/// \param min_dimension The lower inclusive limit for the dimension
/// \param mas_dimension The upper inclusive limit for the dimension
Dimension(value_type min_dimension, value_type max_dimension);
Dimension(value_type min_dimension, value_type max_dimension, std::string name = "");

/// \brief Construct a dynamic dimension with range [0, ...]
Dimension() = default;
Expand Down Expand Up @@ -61,6 +61,9 @@ namespace ngraph
/// \brief Return the interval of valid lengths
const Interval& get_interval() const { return m_dimension; }
Interval& get_interval() { return m_dimension; }

/// \brief Return the dimension name
const std::string& get_name() const { return m_name; }
/// \brief Check whether this dimension represents the same scheme as the argument (both
/// dynamic, or equal).
/// \param dim The other dimension to compare this dimension to.
Expand Down Expand Up @@ -146,13 +149,15 @@ namespace ngraph
Dimension& operator&=(const Dimension& dim);

private:
Dimension(const Interval& interval)
Dimension(const Interval& interval, std::string name = "")
: m_dimension(interval)
, m_name(name)
{
}

// The actual numerical value of the dimension.
Interval m_dimension{};
std::string m_name;
};

/// \brief Insert a human-readable representation of a dimension into an output stream.
Expand All @@ -163,4 +168,4 @@ namespace ngraph
/// Inserts the string `?` if `dimension` is dynamic; else inserts `dimension.get_length()`.
NGRAPH_API
std::ostream& operator<<(std::ostream& str, const Dimension& dimension);
} // namespace ngraph
}
4 changes: 3 additions & 1 deletion ngraph/core/include/ngraph/node.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -55,6 +55,7 @@ namespace ngraph
using HostTensor = runtime::HostTensor;
using HostTensorPtr = std::shared_ptr<HostTensor>;
using HostTensorVector = std::vector<HostTensorPtr>;
using TensorLabelVector = std::vector<TensorLabel>;

namespace op
{
Expand Down Expand Up @@ -199,6 +200,7 @@ namespace ngraph
const HostTensorVector& input_values) const;
virtual bool evaluate_lower(const HostTensorVector& output_values) const;
virtual bool evaluate_upper(const HostTensorVector& output_values) const;
virtual bool evaluate_label(TensorLabelVector& output_labels) const;

virtual bool constant_fold(OutputVector& output_values, const OutputVector& inputs_values);
/// \brief Decomposes the FusedOp into a sub-graph consisting of core ngraph ops
Expand Down Expand Up @@ -670,7 +672,7 @@ namespace ngraph
{
}
};
} // namespace ngraph
}
#define NODE_VALIDATION_CHECK(node, ...) \
NGRAPH_CHECK_HELPER(::ngraph::NodeValidationFailure, (node), __VA_ARGS__)

Expand Down
9 changes: 9 additions & 0 deletions ngraph/core/include/ngraph/validation_util.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -277,6 +277,15 @@ namespace ngraph
/// \return boolean status if value evaluation was successful.
NGRAPH_API bool default_lower_bound_evaluator(const Node* node,
const HostTensorVector& output_values);

/// \brief Propagates value label from 0 input to the only output through an operation.
/// Not applicable for operations which require values interaction (example: mathematical
/// operations). Could be used for movement operations (example: gathering, shape change)
/// \param node Operation to be performed
/// \param output_labels Vector of TensorLabel objects representing resulting value labels
/// \return boolean status if label evaluation was successful.
NGRAPH_API bool default_label_evaluator(const Node* node, TensorLabelVector& output_labels);

/// \brief Estimates both bounds for node output tensors using both bounds of inputs. Works for
/// operations with two inputs (in_1 and in_2). Brute forces all the pairs of bounds for inputs
/// and evaluates all of them: {in_1_lower, in_2 lower}, {in_1_lower, in_2 upper}, {in_1_upper,
Expand Down
17 changes: 17 additions & 0 deletions ngraph/core/src/descriptor/tensor.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -60,6 +60,7 @@ void descriptor::Tensor::invalidate_values()
{
m_upper_value = nullptr;
m_lower_value = nullptr;
m_value_label.clear();
}

void descriptor::Tensor::set_lower_value(const HostTensorPtr& value)
Expand All @@ -78,6 +79,21 @@ void descriptor::Tensor::set_upper_value(const HostTensorPtr& value)
m_upper_value = value;
}

void descriptor::Tensor::set_value_label(const TensorLabel& value_label)
{
const auto& labels_size = value_label.size();
if (labels_size == 0)
{
m_value_label.clear();
}
else
{
NGRAPH_CHECK(m_partial_shape.is_static());
NGRAPH_CHECK(shape_size(m_partial_shape.to_shape()) == labels_size);
m_value_label = value_label;
}
}

const Shape& descriptor::Tensor::get_shape() const
{
if (m_partial_shape.is_static())
Expand All @@ -96,6 +112,7 @@ size_t descriptor::Tensor::size() const
const bool bitwidth_less_than_byte = m_element_type.bitwidth() < 8;
if (bitwidth_less_than_byte)
{
// TODO consider caching this value
return ceil((1.0 * shape_size(get_shape()) * m_element_type.bitwidth()) / 8);
}
return shape_size(get_shape()) * m_element_type.size();
Expand Down
56 changes: 47 additions & 9 deletions ngraph/core/src/dimension.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -2,17 +2,17 @@
// SPDX-License-Identifier: Apache-2.0
//

#include <algorithm>
#include <iostream>
#include <limits>
#include <sstream>

#include "ngraph/dimension.hpp"

using namespace ngraph;

std::ostream& ngraph::operator<<(std::ostream& str, const Dimension& dimension)
{
if (!dimension.get_name().empty())
str << dimension.get_name() << ":";

if (dimension.is_static())
{
return str << dimension.get_length();
Expand All @@ -28,29 +28,41 @@ std::ostream& ngraph::operator<<(std::ostream& str, const Dimension& dimension)
}
}

Dimension::Dimension(value_type dimension)
Dimension::Dimension(value_type dimension, std::string name)
: m_dimension(dimension == -1 ? 0 : dimension, dimension == -1 ? Interval::s_max : dimension)
, m_name(name)
{
}

Dimension::Dimension(value_type min_dimension, value_type max_dimension)
Dimension::Dimension(value_type min_dimension, value_type max_dimension, std::string name)
: m_dimension(min_dimension == -1 ? 0 : min_dimension,
max_dimension == -1 ? Interval::s_max : max_dimension)
, m_name(name)
{
}

Dimension Dimension::operator+(const Dimension& dim) const
{
if (dim.m_dimension == 0 && dim.get_name().empty())
return *this;
else if (m_dimension == 0 && get_name().empty())
return dim;
return Dimension(m_dimension + dim.m_dimension);
}

Dimension Dimension::operator-(const Dimension& dim) const
{
if (dim.m_dimension == 0 && dim.get_name().empty())
return *this;
return Dimension(m_dimension - dim.m_dimension);
}

Dimension Dimension::operator*(const Dimension& dim) const
{
if (dim.m_dimension == 1 && dim.get_name().empty())
return *this;
else if (m_dimension == 1 && get_name().empty())
return dim;
return Dimension(m_dimension * dim.m_dimension);
}

Expand Down Expand Up @@ -93,20 +105,46 @@ bool Dimension::merge(Dimension& dst, const Dimension d1, const Dimension d2)
{
return false;
}
dst = result;
std::string name;
if (d1 == d2 && d1.get_name() == d2.get_name())
name = d1.get_name();
dst = {result, name};
return true;
}

std::string broadcast_dimensions_name(const Dimension& d1, const Dimension& d2)
{
std::string name;
if (d1 == d2)
{
const auto& name_1 = d1.get_name();
const auto& name_2 = d2.get_name();
if (name_1 == name_2 || (!name_1.empty() && name_2.empty()))
name = name_1;
else if (name_1.empty() && !name_2.empty())
name = name_2;
return name;
}

const auto& one_dim = d1 == 1 ? d1 : (d2 == 1 ? d2 : -1);
const auto& other_dim = d1 == 1 ? d2 : (d2 == 1 ? d1 : -1); // it is not equal to 1
if (one_dim.is_dynamic())
return "";
return other_dim.get_name();
}

bool Dimension::broadcast_merge(Dimension& dst, const Dimension d1, const Dimension d2)
{
if (d1.m_dimension.size() == 1 && d1.m_dimension.get_min_val() == 1)
{
dst = d2;
dst =
Dimension(d2.get_min_length(), d2.get_max_length(), broadcast_dimensions_name(d1, d2));
return true;
}
if (d2.m_dimension.size() == 1 && d2.m_dimension.get_min_val() == 1)
{
dst = d1;
dst =
Dimension(d1.get_min_length(), d1.get_max_length(), broadcast_dimensions_name(d1, d2));
return true;
}
return merge(dst, d1, d2);
Expand All @@ -127,7 +165,7 @@ namespace
{
return vt == Interval::s_max ? -1 : vt;
}
} // namespace
}

Dimension::value_type Dimension::get_max_length() const
{
Expand Down
7 changes: 6 additions & 1 deletion ngraph/core/src/node.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -552,7 +552,7 @@ namespace ngraph
{
ostream& operator<<(ostream& out, const Node& node) { return node.write_description(out, 1); }
ostream& operator<<(ostream& out, const Node* node) { return node->write_description(out, 1); }
} // namespace ngraph
}

std::ostream& Node::write_description(std::ostream& out, uint32_t depth) const
{
Expand Down Expand Up @@ -971,6 +971,11 @@ bool Node::evaluate_upper(const HostTensorVector& output_values) const
return default_upper_bound_evaluator(this, output_values);
}

bool Node::evaluate_label(TensorLabelVector& output_labels) const
{
return false;
}

bool Node::constant_fold(OutputVector& output_values, const OutputVector& input_values)
{
OV_ITT_SCOPED_TASK(itt::domains::nGraph, "Node::constant_fold");
Expand Down
Loading

0 comments on commit 9f6c8fa

Please sign in to comment.