Skip to content

Commit

Permalink
Removed unnecessary transposes
Browse files Browse the repository at this point in the history
  • Loading branch information
mryzhov committed Mar 17, 2023
1 parent e57f26c commit f1b4990
Show file tree
Hide file tree
Showing 4 changed files with 58 additions and 30 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -194,6 +194,13 @@ GatherSinkingTransposeReshapeForward::GatherSinkingTransposeReshapeForward() {
auto transpose_const = as_type_ptr<Constant>(pattern_to_output.at(transpose_const_label).get_node_shared_ptr());
auto reshape = pattern_to_output.at(reshape_label).get_node_shared_ptr();

const ov::Shape reshape_shape = pass::helper::SqueezeShape(reshape->get_shape());
const ov::Shape transpose_shape = pass::helper::SqueezeShape(transpose->get_shape());
if (reshape_shape == transpose_shape) {
pass::helper::RemoveSingleInputNodeFromFunction(transpose);
return true;
}

const NodePair new_nodes = SinkForward(transpose, transpose_const, reshape);

register_new_node(new_nodes.first);
Expand All @@ -220,6 +227,13 @@ GatherSinkingTransposeReshapeBackward::GatherSinkingTransposeReshapeBackward() {
auto transpose_const = as_type_ptr<Constant>(pattern_to_output.at(transpose_const_label).get_node_shared_ptr());
auto reshape = pattern_to_output.at(reshape_label).get_node_shared_ptr();

const ov::Shape reshape_shape = pass::helper::SqueezeShape(reshape->get_shape());
const ov::Shape transpose_shape = pass::helper::SqueezeShape(transpose->get_shape());
if (reshape_shape == transpose_shape) {
pass::helper::RemoveSingleInputNodeFromFunction(transpose);
return true;
}

const NodePair new_nodes = SinkBackward(transpose, transpose_const, reshape);
register_new_node(new_nodes.first);
register_new_node(new_nodes.second);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
//

#include "transformations/remove_pre_post_processing.hpp"
#include "transformations/utils/transformation_helper.hpp"

#include <openvino/cc/ngraph/itt.hpp>
#include <openvino/opsets/opset1.hpp>
Expand All @@ -19,14 +20,6 @@ using namespace ov::intel_gna::pass;

namespace {

ov::Shape SqueezeShape(const ov::Shape& shape) {
ov::Shape squeezed_shape;
std::copy_if(shape.begin(), shape.end(), std::back_inserter(squeezed_shape), [](size_t x) {
return x != 1;
});
return squeezed_shape;
}

bool IsPreprocessingLayerSuppported(std::shared_ptr<ngraph::Node>& layer) {
// Gather layers are not supported by GNA and have to be executed on CPU
if (std::dynamic_pointer_cast<ov::opset1::Gather>(layer) ||
Expand All @@ -37,7 +30,7 @@ bool IsPreprocessingLayerSuppported(std::shared_ptr<ngraph::Node>& layer) {

// 2-d Transposes layers can be executed on GNA
if (std::dynamic_pointer_cast<ov::opset1::Transpose>(layer)) {
const ov::Shape squeezed_shape = SqueezeShape(layer->get_shape());
const ov::Shape squeezed_shape = pass::helper::SqueezeShape(layer->get_shape());
const size_t min_input_dim = std::min(squeezed_shape[0], squeezed_shape[1]);
const size_t max_input_dim = std::max(squeezed_shape[0], squeezed_shape[1]);

Expand All @@ -56,24 +49,6 @@ bool IsPreprocessingLayerSuppported(std::shared_ptr<ngraph::Node>& layer) {

return false;
}

/*
works only if we have one date input and one output
*/
void RemoveSingleInputNodeFromFunction(std::shared_ptr<ov::Node> node) {
const ov::Shape input_node_shape = node->get_input_shape(0);
const ov::Shape output_node_shape = node->get_output_shape(0);

std::shared_ptr<ov::Node> node_parent = node->get_input_node_shared_ptr(0);
if (!std::equal(input_node_shape.begin(), input_node_shape.end(), output_node_shape.begin())) {
auto reshape_const_node =
std::make_shared<Constant>(ov::element::i64, ov::Shape{output_node_shape.size()}, output_node_shape);
node_parent = std::make_shared<Reshape>(node_parent, reshape_const_node, false);
}

ov::replace_output_update_name(node->output(0), node_parent->output(0));
}

/*
Support only one data node as 0 input
*/
Expand Down Expand Up @@ -110,7 +85,7 @@ bool RemoveInputsProcessing::run_on_model(const std::shared_ptr<ov::Model>& mode
m_subgraph_cpu_map->emplace(param_node.get_node_shared_ptr()->get_friendly_name(),
CopySingleInputNodeFromFunction(target_node));
}
RemoveSingleInputNodeFromFunction(target_node);
pass::helper::RemoveSingleInputNodeFromFunction(target_node);
result = true;
}
}
Expand All @@ -130,7 +105,7 @@ bool RemoveOutputsProcessing::run_on_model(const std::shared_ptr<ov::Model>& mod
m_subgraph_cpu_map->emplace(r_input_node->get_friendly_name(),
CopySingleInputNodeFromFunction(r_input_node));
}
RemoveSingleInputNodeFromFunction(r_input_node);
pass::helper::RemoveSingleInputNodeFromFunction(r_input_node);
result = true;
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -4,12 +4,14 @@

#include "transformation_helper.hpp"

#include <ngraph/opsets/opset7.hpp>
#include "openvino/opsets/opset7.hpp"
#include <ngraph/pattern/op/wrap_type.hpp>
#include <ngraph/rt_info.hpp>
#include "ops/gna_convolution.hpp"
#include "ops/gna_max_pool.hpp"

using namespace ov::opset7;

namespace ov {
namespace intel_gna {
namespace pass {
Expand Down Expand Up @@ -105,6 +107,29 @@ std::shared_ptr<ngraph::Node> InsertFQLayer(const std::shared_ptr<ngraph::opset7
}
return last_node;
}

void RemoveSingleInputNodeFromFunction(std::shared_ptr<ov::Node> node) {
const ov::Shape input_node_shape = node->get_input_shape(0);
const ov::Shape output_node_shape = node->get_output_shape(0);

std::shared_ptr<ov::Node> node_parent = node->get_input_node_shared_ptr(0);
if (!std::equal(input_node_shape.begin(), input_node_shape.end(), output_node_shape.begin())) {
auto reshape_const_node =
std::make_shared<Constant>(ov::element::i64, ov::Shape{output_node_shape.size()}, output_node_shape);
node_parent = std::make_shared<Reshape>(node_parent, reshape_const_node, false);
}

ov::replace_output_update_name(node->output(0), node_parent->output(0));
}

ov::Shape SqueezeShape(const ov::Shape& shape) {
ov::Shape squeezed_shape;
std::copy_if(shape.begin(), shape.end(), std::back_inserter(squeezed_shape), [](size_t x) {
return x != 1;
});
return squeezed_shape;
}

} // namespace helper
} // namespace pass
} // namespace intel_gna
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -110,6 +110,20 @@ std::shared_ptr<ngraph::Node> VerifyBiasGetConst(std::shared_ptr<ngraph::Node> c
std::shared_ptr<ngraph::Node> InsertFQLayer(const std::shared_ptr<ngraph::opset7::FakeQuantize> fq_layer,
std::shared_ptr<ngraph::Node> last_node);

/**
* @brief removes single node from the function and insert Reshape if input and outpur shapes are different
* @param node the node to be deleted
* @return void
*/
void RemoveSingleInputNodeFromFunction(std::shared_ptr<ov::Node> node);

/**
* @brief remove all 1 dimentions from the shape vector
* @param shape original tensor shape
* @return shape without 1 dimentions
*/
ov::Shape SqueezeShape(const ov::Shape& shape);

} // namespace helper
} // namespace pass
} // namespace intel_gna
Expand Down

0 comments on commit f1b4990

Please sign in to comment.