diff --git a/inference-engine/src/offline_transformations/src/pruning/propagate_masks.cpp b/inference-engine/src/offline_transformations/src/pruning/propagate_masks.cpp index 271b200f31b5bc..671cc6f88853d4 100644 --- a/inference-engine/src/offline_transformations/src/pruning/propagate_masks.cpp +++ b/inference-engine/src/offline_transformations/src/pruning/propagate_masks.cpp @@ -419,13 +419,12 @@ class ngraph::pass::mask_propagation::FakeQuantize : public MatcherPass{ auto fq_node = std::dynamic_pointer_cast(m_output.get_node_shared_ptr()); size_t idx = 0; if (fq_node->get_auto_broadcast() != ngraph::op::AutoBroadcastType::NONE) { - for (auto const_node : fq_params_nodes) { + for (auto node : fq_params_nodes) { + auto const_node = std::dynamic_pointer_cast(node); + if (!const_node) throw ngraph_error("Unexpected operation type."); auto new_shape = broadcast_shape_to_rank(const_node->get_shape(), m_input.get_partial_shape().rank().get_length()); - auto const_copy = const_node->clone_with_new_inputs(const_node->input_values()); - auto new_const = std::dynamic_pointer_cast(const_copy); - new_const->set_data_shape(new_shape); - new_const->validate_and_infer_types(); + auto new_const = std::make_shared(*const_node, new_shape); new_const->set_friendly_name(const_node->get_friendly_name()); ngraph::copy_runtime_info(const_node, new_const); ngraph::replace_node(const_node, new_const); diff --git a/ngraph/core/include/ngraph/op/constant.hpp b/ngraph/core/include/ngraph/op/constant.hpp index ce75a6744526dc..d3cfc91ccfd002 100644 --- a/ngraph/core/include/ngraph/op/constant.hpp +++ b/ngraph/core/include/ngraph/op/constant.hpp @@ -155,6 +155,7 @@ namespace ngraph } Constant(const Constant& other); + Constant(const Constant& other, const Shape& new_shape); Constant& operator=(const Constant&) = delete; virtual ~Constant() override; @@ -213,6 +214,7 @@ namespace ngraph /// count /// /// \param shape The shape of the tensor constant. + NGRAPH_DEPRECATED("Use Constant c-tor with shape argument instead") void set_data_shape(const Shape& shape); /// \brief Wrapper around constructing a shared_ptr of a Constant diff --git a/ngraph/core/src/op/constant.cpp b/ngraph/core/src/op/constant.cpp index 1e638896a2fa73..e03885d118db03 100644 --- a/ngraph/core/src/op/constant.cpp +++ b/ngraph/core/src/op/constant.cpp @@ -148,6 +148,18 @@ op::Constant::Constant(const Constant& other) constructor_validate_and_infer_types(); } +op::Constant::Constant(const Constant& other, const Shape& new_shape) +{ + NGRAPH_CHECK(shape_size(other.m_shape) == shape_size(new_shape), + "Shape size " + std::to_string(shape_size(new_shape)) + " is not equal to " + + std::to_string(shape_size(other.m_shape))); + m_element_type = other.m_element_type; + m_shape = new_shape; + m_data = other.m_data; + m_all_elements_bitwise_identical = other.m_all_elements_bitwise_identical; + constructor_validate_and_infer_types(); +} + op::Constant::~Constant() {} string op::Constant::convert_value_to_string(size_t index) const diff --git a/ngraph/core/src/op/reshape.cpp b/ngraph/core/src/op/reshape.cpp index 2e95da16dad97c..b50681fb40a9f2 100644 --- a/ngraph/core/src/op/reshape.cpp +++ b/ngraph/core/src/op/reshape.cpp @@ -241,19 +241,7 @@ bool op::v1::Reshape::constant_fold(OutputVector& output_values, const OutputVec if (auto data_const = std::dynamic_pointer_cast(inputs_values[0].get_node_shared_ptr())) { - // In case if data constant has single consumer we can change it shape without making a copy - // Otherwise we create Constant copy with shape from reshape node - if (data_const->output(0).get_target_inputs().size() == 1) - { - data_const->set_data_shape(shape); - data_const->validate_and_infer_types(); - output_values[0] = data_const; - } - else - { - output_values[0] = std::make_shared( - data_const->get_element_type(), shape, data_const->get_data_ptr()); - } + output_values[0] = std::make_shared(*data_const, shape); return true; } return false; diff --git a/ngraph/core/src/op/squeeze.cpp b/ngraph/core/src/op/squeeze.cpp index 3b4732f87297be..b5a2c1876bcf37 100644 --- a/ngraph/core/src/op/squeeze.cpp +++ b/ngraph/core/src/op/squeeze.cpp @@ -327,19 +327,7 @@ bool op::v0::Squeeze::constant_fold(OutputVector& output_values, const OutputVec if (auto data_const = std::dynamic_pointer_cast(inputs_values[0].get_node_shared_ptr())) { - // In case if data constant has single consumer we can change it shape without making a copy - // Otherwise we create Constant copy with shape from squeeze node - if (data_const->output(0).get_target_inputs().size() == 1) - { - data_const->set_data_shape(shape); - data_const->validate_and_infer_types(); - output_values[0] = data_const; - } - else - { - output_values[0] = std::make_shared( - data_const->get_element_type(), shape, data_const->get_data_ptr()); - } + output_values[0] = std::make_shared(*data_const, shape); return true; } return false; diff --git a/ngraph/core/src/op/unsqueeze.cpp b/ngraph/core/src/op/unsqueeze.cpp index a043d59a660a42..51dea7415d4271 100644 --- a/ngraph/core/src/op/unsqueeze.cpp +++ b/ngraph/core/src/op/unsqueeze.cpp @@ -190,19 +190,7 @@ bool op::v0::Unsqueeze::constant_fold(OutputVector& output_values, if (auto data_const = std::dynamic_pointer_cast(inputs_values[0].get_node_shared_ptr())) { - // In case if data constant has single consumer we can change it shape without making a copy - // Otherwise we create Constant copy with shape from unsqueeze node - if (data_const->output(0).get_target_inputs().size() == 1) - { - data_const->set_data_shape(shape); - data_const->validate_and_infer_types(); - output_values[0] = data_const; - } - else - { - output_values[0] = std::make_shared( - data_const->get_element_type(), shape, data_const->get_data_ptr()); - } + output_values[0] = std::make_shared(*data_const, shape); return true; } return false; diff --git a/ngraph/test/constant_folding.cpp b/ngraph/test/constant_folding.cpp index d7efe623708636..1f3c00b684bf82 100644 --- a/ngraph/test/constant_folding.cpp +++ b/ngraph/test/constant_folding.cpp @@ -2274,6 +2274,75 @@ TEST(constant_folding, constant_dyn_reshape_shape_not_originally_constant) ASSERT_TRUE(test::all_close_f(values_in, values_out, MIN_FLOAT_TOLERANCE_BITS)); } +TEST(constant_folding, const_reshape_no_data_copy) +{ + auto const_data = op::Constant::create(element::f32, Shape{1, 64}, {1}); + auto const_reshape = op::Constant::create(element::i64, Shape{2}, {2, 32}); + auto reshape = std::make_shared(const_data, const_reshape, false); + auto consumer1 = std::make_shared(reshape); + auto consumer2 = std::make_shared(reshape); + + auto f = std::make_shared(NodeVector{consumer1, consumer2}, ParameterVector{}); + + pass::Manager pass_manager; + pass_manager.register_pass(); + pass_manager.run_passes(f); + + auto const1 = std::dynamic_pointer_cast(consumer1->input_value(0).get_node_shared_ptr()); + auto const2 = std::dynamic_pointer_cast(consumer2->input_value(0).get_node_shared_ptr()); + + ASSERT_TRUE(const1); + ASSERT_TRUE(const2); + ASSERT_EQ(const1, const2); + ASSERT_EQ(const1->get_data_ptr(), const2->get_data_ptr()); +} + +TEST(constant_folding, const_squeeze_no_data_copy) +{ + auto const_data = op::Constant::create(element::f32, Shape{1, 64}, {1}); + auto const_reshape = op::Constant::create(element::i64, Shape{1}, {0}); + auto reshape = std::make_shared(const_data, const_reshape); + auto consumer1 = std::make_shared(reshape); + auto consumer2 = std::make_shared(reshape); + + auto f = std::make_shared(NodeVector{consumer1, consumer2}, ParameterVector{}); + + pass::Manager pass_manager; + pass_manager.register_pass(); + pass_manager.run_passes(f); + + auto const1 = std::dynamic_pointer_cast(consumer1->input_value(0).get_node_shared_ptr()); + auto const2 = std::dynamic_pointer_cast(consumer2->input_value(0).get_node_shared_ptr()); + + ASSERT_TRUE(const1); + ASSERT_TRUE(const2); + ASSERT_EQ(const1, const2); + ASSERT_EQ(const1->get_data_ptr(), const2->get_data_ptr()); +} + +TEST(constant_folding, const_unsqueeze_no_data_copy) +{ + auto const_data = op::Constant::create(element::f32, Shape{1, 64}, {1}); + auto const_reshape = op::Constant::create(element::i64, Shape{1}, {0}); + auto reshape = std::make_shared(const_data, const_reshape); + auto consumer1 = std::make_shared(reshape); + auto consumer2 = std::make_shared(reshape); + + auto f = std::make_shared(NodeVector{consumer1, consumer2}, ParameterVector{}); + + pass::Manager pass_manager; + pass_manager.register_pass(); + pass_manager.run_passes(f); + + auto const1 = std::dynamic_pointer_cast(consumer1->input_value(0).get_node_shared_ptr()); + auto const2 = std::dynamic_pointer_cast(consumer2->input_value(0).get_node_shared_ptr()); + + ASSERT_TRUE(const1); + ASSERT_TRUE(const2); + ASSERT_EQ(const1, const2); + ASSERT_EQ(const1->get_data_ptr(), const2->get_data_ptr()); +} + TEST(constant_folding, constant_transpose) { Shape shape_in{2, 4};