Skip to content

Commit

Permalink
Fixed duplicate names appearance in graph after fuse mul transformati…
Browse files Browse the repository at this point in the history
…on. (openvinotoolkit#5842)

* Added manual set of unique name to the data node.

* Added attributes checks in unit test.

* Removed redundant line break.
  • Loading branch information
popovaan authored and yekruglov committed Jun 7, 2021
1 parent 9f5267b commit 21c1373
Show file tree
Hide file tree
Showing 2 changed files with 60 additions and 0 deletions.
11 changes: 11 additions & 0 deletions model-optimizer/mo/middle/passes/fusing/fuse_linear_ops.py
Original file line number Diff line number Diff line change
Expand Up @@ -111,6 +111,17 @@ def _fuse_mul(graph: Graph, node: Node, fuse_nodes: list, backward: bool = True)
weights_port.get_connection().set_source(w_mul.out_port(0))
w_const.connect(w_mul.in_port(tensor_port.idx))

fuse_node_in_data = fuse_node.in_node(weights_port.idx)
w_const_out_data = w_const.node.out_node(w_const.idx)

# During this reconnection new data node name is copied from the data node
# outgoing from w_const port. Duplicate names of data nodes lead to appearing
# of duplicate op node names after constant folding. So we should manually
# set a unique name for the new data node.
if fuse_node_in_data.soft_get('name') == w_const_out_data.soft_get('name') and \
fuse_node_in_data.soft_get('name', None) is not None:
fuse_node.in_node(weights_port.idx)['name'] = graph.unique_id(mul_name)

# If we fuse in backward direction we should multiply biases if they exists
if backward and len(fuse_node.in_ports()) == 3 and not fuse_node.in_port(2).disconnected() and \
not fuse_node.has_and_set('shape_input'):
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -817,6 +817,55 @@ def test_fuse_mul_to_deconv_1(self):
(flag, resp) = compare_graphs(graph, graph_ref, 'placeholder_1', 'placeholder_1')
self.assertTrue(flag, resp)

def test_fuse_mul_data_nodes_names(self):
graph = build_graph(nodes_attributes,
[('placeholder_1', 'placeholder_1_data'),
('placeholder_1_data', 'mul_1'),
('const_mul_1_w', 'mul_1_w'),
('mul_1_w', 'mul_1'),
('mul_1', 'mul_1_data'),
('mul_1_data', 'conv_1'),
('const_conv_1_w', 'conv_1_w'),
('const_conv_1_b', 'conv_1_b'),
('conv_1_w', 'conv_1'),
('conv_1_b', 'conv_1'),
('conv_1', 'conv_1_data'),
('conv_1_data', 'op_output')
],
{'placeholder_1_data': {'shape': np.array([1, 227, 227, 3])},
'mul_1_data': {'shape': np.array([1, 227, 227, 3])},
'const_mul_1_w': {'shape': np.array([3]), 'value': np.array([1, 2, 3])},
'mul_1_w': {'shape': np.array([3]), 'value': np.array([1, 2, 3])},
'const_conv_1_w': {'shape': np.array([11, 11, 3, 96]),
'value': np.ones((11, 11, 3, 96))},
'conv_1_w': {'shape': np.array([11, 11, 3, 96]), 'value': np.ones((11, 11, 3, 96)),
'output_channel_dim': 3, 'input_channel_dim': 2,
'dims_number': 4},
'const_conv_1_b': {'shape': np.array([96]), 'value': np.zeros(96)},
'conv_1_b': {'shape': np.array([96]), 'value': np.zeros(96)},
'conv_1_data': {}
})

_fuse_mul(graph, Node(graph, 'mul_1'), [Node(graph, 'conv_1')], backward=False)

conv_node = Node(graph, 'conv_1')
conv_in_data_name = conv_node.in_node(1)['name']
const_node = Node(graph, 'const_conv_1_w')
const_out_data_name = const_node.out_node(0)['name']
mul_node = Node(graph, 'mul_1')
conv_in_data = conv_node.in_node(1)

# Check that transformation doesn't produce identical data node names,
# as this may lead to appearing of Const ops with identical names.
self.assertFalse(conv_in_data_name == const_out_data_name)

# Attributes that are required for fusing are kept on data nodes.
# These checks are needed to ensure that _fuse_mul doesn't remove any of these attributes.
self.assertTrue(conv_in_data['output_channel_dim'] == 3)
self.assertTrue(conv_in_data['input_channel_dim'] == 2)
self.assertTrue(conv_in_data['dims_number'] == 4)
self.assertTrue(mul_node['can_be_fused'] is True)


# Unit tests for fuse_linear_ops
class FuseLinOpsTests(unittest.TestCase):
Expand Down

0 comments on commit 21c1373

Please sign in to comment.