Skip to content

Commit

Permalink
Fixed framework name attribute in mapping file. (#5046)
Browse files Browse the repository at this point in the history
* Fixed framework name attribute for onnx, mxnet.

* Fixed framework name attribute for caffe.

* Removed unnecessary attribute setting from add_opoutput()

* Added identity nodes adding to outputs in mxnet loader.

* Removed unnecessary reformat.

* Removed unnecessary reformat.

* Added check for empty name.

* Used nodes indices instead of node names in loader.

* Code refactoring, small bug fixed.
  • Loading branch information
popovaan authored Apr 8, 2021
1 parent b8f8c9c commit cadff03
Show file tree
Hide file tree
Showing 6 changed files with 50 additions and 33 deletions.
16 changes: 8 additions & 8 deletions model-optimizer/mo/front/caffe/loader.py
Original file line number Diff line number Diff line change
Expand Up @@ -169,7 +169,7 @@ def caffe_pb_to_nx(graph, proto, model):
# Blobs in prototxt model can be reused by inplace layer.
# This requires loading of pb layers in order and tracking the latest
# layer that writes a particular blob.
blob_producers = {} # maps layer blob name to the layer name and port
blob_producers = {} # maps layer blob name to node id in graph, port and layer name
proto_layers = get_layers(proto)
model_layers = None
if model:
Expand Down Expand Up @@ -239,7 +239,7 @@ def caffe_pb_to_nx(graph, proto, model):
# Input is defined at the top level of proto instead of distinct Input layer
graph.add_node(input_name, pb=None, model_pb=None, type='GlobalInput', name=input_name, shape=input_dim,
kind='op')
blob_producers[input_name] = (input_name, 0)
blob_producers[input_name] = (input_name, 0, input_name)

used_blobs = set()
for i, layer in enumerate(proto_layers):
Expand Down Expand Up @@ -280,19 +280,19 @@ def caffe_pb_to_nx(graph, proto, model):
input_dims.append(np.array(list(dims), dtype=np.int64))
input_names.append(layer.name)

layer.name = graph.unique_id(layer.name)
graph.add_node(layer.name, pb=layer, model_pb=model_layer, kind='op', type='Parameter')
node_id = graph.unique_id(layer.name)
graph.add_node(node_id, pb=layer, model_pb=model_layer, kind='op', type='Parameter')

# connect inputs based on blob_producers dictionary
for dst_port, bottom in enumerate(layer.bottom):
add_edge_caffe(graph, bottom, layer.name, blob_producers, dst_port)
add_edge_caffe(graph, bottom, node_id, blob_producers, dst_port)
used_blobs.add(bottom)

# update blob producers dictionary by output ports
for src_port, top in enumerate(layer.top):
if top in blob_producers:
log.debug("Detected reuse of blob {} by layer {}".format(top, layer.name))
blob_producers[top] = (layer.name, src_port)
log.debug("Detected reuse of blob {} by layer {}".format(top, node_id))
blob_producers[top] = (node_id, src_port, layer.name)

# Tensor names information corresponding to a node is stored on outgoing edges.
# As output nodes do not have outgoing edges, fake outputs are required. In the following code
Expand Down Expand Up @@ -320,7 +320,7 @@ def add_edge_caffe(graph: Graph, bottom: str, dst_layer: str, blob_producers: di
'in': dst_port,
'name': bottom,
# debug anchor for a framework name, out port and tensor name
'fw_tensor_debug_info': [(src_layer, src_port, bottom)],
'fw_tensor_debug_info': [(blob_producers[bottom][2], src_port, bottom)],
'in_attrs': ['in', 'name'],
'out_attrs': ['out', 'name'],
'data_attrs': ['fw_tensor_debug_info']
Expand Down
2 changes: 1 addition & 1 deletion model-optimizer/mo/front/extractor.py
Original file line number Diff line number Diff line change
Expand Up @@ -753,7 +753,7 @@ def add_outputs_identity(graph: Graph, outputs: list, add_edge: callable, params
for output in outputs:
fake_node_name = graph.unique_id(output)
graph.add_node(fake_node_name, name=fake_node_name, identity=True, kind='op', op='Identity',
infer=None, needs_removal=True)
infer=None, needs_removal=True, symbol_dict={'op': 'Identity'})
add_edge(graph, output, fake_node_name, **params)


Expand Down
31 changes: 18 additions & 13 deletions model-optimizer/mo/front/mxnet/extractors/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -101,21 +101,26 @@ def has(self, key):

def get_mxnet_node_edges(node: dict, node_id: [int, str], nodes_list: list, index_node_key: dict):
edge_list = []
used_indices = set()
for in_port, src_node_id in enumerate(node['inputs']):
src_node = src_node_id[0]
dest_port = src_node_id[1]
edge_attrs = {
'in': in_port,
'out': dest_port,
# debug anchor for framework name, out port and tensor name
'fw_tensor_debug_info': [(index_node_key[src_node], src_node_id[1], nodes_list[src_node]['name'])],
'in_attrs': ['in'],
'out_attrs': ['out'],
'data_attrs': ['fw_tensor_debug_info']
}
edge = (index_node_key[src_node], index_node_key[node_id], edge_attrs)
edge = create_mxnet_edge(index_node_key[src_node_id[0]], index_node_key[node_id], in_port, src_node_id[1],
nodes_list[src_node_id[0]]['name'])
edge_list.append(edge)
return edge_list
used_indices.add(src_node_id[0])
return edge_list, used_indices


def create_mxnet_edge(src_node_id: str, dst_node_id: str, src_port: int, dst_port: int, framework_name: str):
edge_attrs = {
'in': src_port,
'out': dst_port,
# debug anchor for framework name, out port and tensor name
'fw_tensor_debug_info': [(framework_name, dst_port, framework_name)],
'in_attrs': ['in'],
'out_attrs': ['out'],
'data_attrs': ['fw_tensor_debug_info']
}
return src_node_id, dst_node_id, edge_attrs


def get_mxnet_layer_attrs(json_dic: dict):
Expand Down
22 changes: 19 additions & 3 deletions model-optimizer/mo/front/mxnet/loader.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,8 +8,9 @@
import mxnet as mx
import numpy as np

from mo.front.extractor import add_outputs_identity
from mo.front.mxnet.extractor import common_mxnet_fields
from mo.front.mxnet.extractors.utils import get_mxnet_node_edges, load_params, init_rnn_states
from mo.front.mxnet.extractors.utils import get_mxnet_node_edges, load_params, init_rnn_states, create_mxnet_edge
from mo.front.mxnet.nd_to_params import build_params_file
from mo.graph.graph import Node, Graph
from mo.utils.error import Error
Expand Down Expand Up @@ -50,7 +51,8 @@ def parse_input_model(input_model):
return model_name, iteration_number


def load_symbol_def(input_model_name, input_symbol, input_names: str = '', nd_prefix_name: str = '', pretrained_model_name: str = '', legacy_mxnet_model: bool = False):
def load_symbol_def(input_model_name, input_symbol, input_names: str = '', nd_prefix_name: str = '',
pretrained_model_name: str = '', legacy_mxnet_model: bool = False):
if not nd_prefix_name and not pretrained_model_name:
# model name always has extension 'param'
try:
Expand Down Expand Up @@ -95,6 +97,7 @@ def symbol2nx(graph, model_nodes, model_params, input_names: str = ''):

# as mxnet contain input layers as index of layer, for correct set up edges, we need provide index of layer with name of graph node
index_node_keys = {}
fw_name_map = {}
for i, node in enumerate(model_nodes):
if node['name'] in model_params._arg_params and node['name'] not in input_names:
node['value'] = np.array(model_params._arg_params[node['name']].asnumpy(), dtype=np.float32)
Expand All @@ -106,12 +109,25 @@ def symbol2nx(graph, model_nodes, model_params, input_names: str = ''):
graph.add_node(node_name, **symbol_attrs(node))
graph.node[node_name].update(common_mxnet_fields(Node(graph, node_name)))
index_node_keys[i] = node_name
fw_name_map[node_name] = node['name']

used_indices_set = set()
for i, attrs in enumerate(model_nodes):
node = attrs
edges = get_mxnet_node_edges(node, i, list(model_nodes), index_node_keys)
edges, used_indices = get_mxnet_node_edges(node, i, list(model_nodes), index_node_keys)
if len(edges) > 0:
graph.add_edges_from(edges)
used_indices_set = used_indices_set.union(used_indices)

output_ids = [index_node_keys[node_id] for node_id in set(range(len(model_nodes))) - used_indices_set]

# Tensor names information corresponding to a node is stored on outgoing edges.
# As output nodes do not have outgoing edges, fake outputs are required. In the following code
# for each output Identity node is added, and tensor name for the output is kept
# on (output, fake output) edge. After Result nodes adding transformation fake outputs
# are deleted from graph.
add_outputs_identity(graph, output_ids, lambda g, output_id, fake_node_id, fw_name: g.add_edges_from([
create_mxnet_edge(output_id, fake_node_id, 0, 0, fw_name[output_id])]), {'fw_name': fw_name_map})

return graph

Expand Down
5 changes: 3 additions & 2 deletions model-optimizer/mo/front/onnx/loader.py
Original file line number Diff line number Diff line change
Expand Up @@ -74,7 +74,8 @@ def protobuf2nx(graph: Graph, pb):
# important)
for node in graph_pb.node:
# create an NX node
id = graph.unique_id(node_id(node))
fw_name = node_id(node)
id = graph.unique_id(fw_name)
graph.add_node(id, pb=node, kind='op')

# add incoming edges based on data_nodes_map
Expand Down Expand Up @@ -109,7 +110,7 @@ def protobuf2nx(graph: Graph, pb):
'out': src_port,
'in': 0,
'name': out,
'fw_tensor_debug_info': [(id, src_port, out)],
'fw_tensor_debug_info': [(fw_name, src_port, out)],
'in_attrs': ['in', 'name'],
'out_attrs': ['out', 'name'],
'data_attrs': ['fw_tensor_debug_info']
Expand Down
7 changes: 1 addition & 6 deletions model-optimizer/mo/graph/graph.py
Original file line number Diff line number Diff line change
Expand Up @@ -1045,12 +1045,8 @@ def add_opoutput(graph: Graph, node_name: str, port: int, cut: bool = True):
if cut and len(node.out_edges()) != 0:
opoutput_node = Result(graph).create_node_on_port(node, port, {'name': node_name + '/sink_port_' + str(port)})
else:
tensor_names = None
if node.has_valid('op') and port in node.out_ports():
tensor_names = node.out_port(port).get_tensor_names()
opoutput_node = Result(graph).create_node([(node, port)], {'name': node_name + '/sink_port_' + str(port)})
opoutput_node.in_edge()['data_attrs'] = ['fw_tensor_debug_info']
opoutput_node.in_edge()['fw_tensor_debug_info'] = [(node_name, port, tensor_names)]

log.debug('Sink: {} for node {}'.format(opoutput_node.id, node_name))
log.debug(str(graph.node[opoutput_node.id]))
Expand Down Expand Up @@ -1125,8 +1121,7 @@ def set_edge_attribute_between_nodes(node1: Node, node2: Node, attr_name: str, n
out_port = edge['out']
out_node = node1.out_node(out_port)
if out_node.id == node2.id:
if attr_name in edge:
edge[attr_name] = new_value
edge[attr_name] = new_value

# All functions below are deprecated and will be removed in next release
# Please, use methods from Graph/Node classes instead
Expand Down

0 comments on commit cadff03

Please sign in to comment.