Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Fixed framework name attribute in mapping file. #5046

Merged
9 changes: 5 additions & 4 deletions model-optimizer/mo/front/caffe/loader.py
Original file line number Diff line number Diff line change
Expand Up @@ -169,7 +169,7 @@ def caffe_pb_to_nx(graph, proto, model):
# Blobs in prototxt model can be reused by inplace layer.
# This requires loading of pb layers in order and tracking the latest
# layer that writes a particular blob.
blob_producers = {} # maps layer blob name to the layer name and port
blob_producers = {} # maps layer blob name to node id in graph, port and layer name
proto_layers = get_layers(proto)
model_layers = None
if model:
Expand Down Expand Up @@ -239,7 +239,7 @@ def caffe_pb_to_nx(graph, proto, model):
# Input is defined at the top level of proto instead of distinct Input layer
graph.add_node(input_name, pb=None, model_pb=None, type='GlobalInput', name=input_name, shape=input_dim,
kind='op')
blob_producers[input_name] = (input_name, 0)
blob_producers[input_name] = (input_name, 0, input_name)

used_blobs = set()
for i, layer in enumerate(proto_layers):
Expand Down Expand Up @@ -280,6 +280,7 @@ def caffe_pb_to_nx(graph, proto, model):
input_dims.append(np.array(list(dims), dtype=np.int64))
input_names.append(layer.name)

layer_name = layer.name
layer.name = graph.unique_id(layer.name)
popovaan marked this conversation as resolved.
Show resolved Hide resolved
graph.add_node(layer.name, pb=layer, model_pb=model_layer, kind='op', type='Parameter')

Expand All @@ -292,7 +293,7 @@ def caffe_pb_to_nx(graph, proto, model):
for src_port, top in enumerate(layer.top):
if top in blob_producers:
log.debug("Detected reuse of blob {} by layer {}".format(top, layer.name))
blob_producers[top] = (layer.name, src_port)
blob_producers[top] = (layer.name, src_port, layer_name)

# Tensor names information corresponding to a node is stored on outgoing edges.
# As output nodes do not have outgoing edges, fake outputs are required. In the following code
Expand Down Expand Up @@ -320,7 +321,7 @@ def add_edge_caffe(graph: Graph, bottom: str, dst_layer: str, blob_producers: di
'in': dst_port,
'name': bottom,
# debug anchor for a framework name, out port and tensor name
'fw_tensor_debug_info': [(src_layer, src_port, bottom)],
'fw_tensor_debug_info': [(blob_producers[bottom][2], src_port, bottom)],
'in_attrs': ['in', 'name'],
'out_attrs': ['out', 'name'],
'data_attrs': ['fw_tensor_debug_info']
Expand Down
2 changes: 1 addition & 1 deletion model-optimizer/mo/front/extractor.py
Original file line number Diff line number Diff line change
Expand Up @@ -753,7 +753,7 @@ def add_outputs_identity(graph: Graph, outputs: list, add_edge: callable, params
for output in outputs:
fake_node_name = graph.unique_id(output)
graph.add_node(fake_node_name, name=fake_node_name, identity=True, kind='op', op='Identity',
infer=None, needs_removal=True)
infer=None, needs_removal=True, symbol_dict={'op': 'Identity'})
popovaan marked this conversation as resolved.
Show resolved Hide resolved
add_edge(graph, output, fake_node_name, **params)


Expand Down
30 changes: 17 additions & 13 deletions model-optimizer/mo/front/mxnet/extractors/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -101,21 +101,25 @@ def has(self, key):

def get_mxnet_node_edges(node: dict, node_id: [int, str], nodes_list: list, index_node_key: dict):
edge_list = []
used_indices = []
for in_port, src_node_id in enumerate(node['inputs']):
src_node = src_node_id[0]
dest_port = src_node_id[1]
edge_attrs = {
'in': in_port,
'out': dest_port,
# debug anchor for framework name, out port and tensor name
'fw_tensor_debug_info': [(index_node_key[src_node], src_node_id[1], nodes_list[src_node]['name'])],
'in_attrs': ['in'],
'out_attrs': ['out'],
'data_attrs': ['fw_tensor_debug_info']
}
edge = (index_node_key[src_node], index_node_key[node_id], edge_attrs)
edge = create_mxnet_edge(index_node_key[src_node_id[0]], index_node_key[node_id], in_port, src_node_id[1], nodes_list[src_node_id[0]]['name'])
popovaan marked this conversation as resolved.
Show resolved Hide resolved
edge_list.append(edge)
return edge_list
used_indices.append(src_node_id[0])
return edge_list, used_indices


def create_mxnet_edge(src_node_id: str, dst_node_id: str, src_port: int, dst_port: int, framework_name: str):
lazarevevgeny marked this conversation as resolved.
Show resolved Hide resolved
edge_attrs = {
'in': src_port,
'out': dst_port,
# debug anchor for framework name, out port and tensor name
'fw_tensor_debug_info': [(framework_name, dst_port, framework_name)],
popovaan marked this conversation as resolved.
Show resolved Hide resolved
'in_attrs': ['in'],
'out_attrs': ['out'],
'data_attrs': ['fw_tensor_debug_info']
}
return src_node_id, dst_node_id, edge_attrs


def get_mxnet_layer_attrs(json_dic: dict):
Expand Down
22 changes: 19 additions & 3 deletions model-optimizer/mo/front/mxnet/loader.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,8 +8,9 @@
import mxnet as mx
import numpy as np

from mo.front.extractor import add_outputs_identity
from mo.front.mxnet.extractor import common_mxnet_fields
from mo.front.mxnet.extractors.utils import get_mxnet_node_edges, load_params, init_rnn_states
from mo.front.mxnet.extractors.utils import get_mxnet_node_edges, load_params, init_rnn_states, create_mxnet_edge
from mo.front.mxnet.nd_to_params import build_params_file
from mo.graph.graph import Node, Graph
from mo.utils.error import Error
Expand Down Expand Up @@ -50,7 +51,8 @@ def parse_input_model(input_model):
return model_name, iteration_number


def load_symbol_def(input_model_name, input_symbol, input_names: str = '', nd_prefix_name: str = '', pretrained_model_name: str = '', legacy_mxnet_model: bool = False):
def load_symbol_def(input_model_name, input_symbol, input_names: str = '', nd_prefix_name: str = '',
pretrained_model_name: str = '', legacy_mxnet_model: bool = False):
if not nd_prefix_name and not pretrained_model_name:
# model name always has extension 'param'
try:
Expand Down Expand Up @@ -95,6 +97,7 @@ def symbol2nx(graph, model_nodes, model_params, input_names: str = ''):

# as mxnet contain input layers as index of layer, for correct set up edges, we need provide index of layer with name of graph node
index_node_keys = {}
fw_name_map = {}
for i, node in enumerate(model_nodes):
if node['name'] in model_params._arg_params and node['name'] not in input_names:
node['value'] = np.array(model_params._arg_params[node['name']].asnumpy(), dtype=np.float32)
Expand All @@ -106,12 +109,25 @@ def symbol2nx(graph, model_nodes, model_params, input_names: str = ''):
graph.add_node(node_name, **symbol_attrs(node))
graph.node[node_name].update(common_mxnet_fields(Node(graph, node_name)))
index_node_keys[i] = node_name
fw_name_map[node_name] = node['name']
popovaan marked this conversation as resolved.
Show resolved Hide resolved

used_indices_list = []
popovaan marked this conversation as resolved.
Show resolved Hide resolved
for i, attrs in enumerate(model_nodes):
node = attrs
edges = get_mxnet_node_edges(node, i, list(model_nodes), index_node_keys)
edges, used_indices = get_mxnet_node_edges(node, i, list(model_nodes), index_node_keys)
if len(edges) > 0:
graph.add_edges_from(edges)
used_indices_list += used_indices

output_ids = [index_node_keys[node_id] for node_id in set(range(1, len(model_nodes))) - set(used_indices_list)]
popovaan marked this conversation as resolved.
Show resolved Hide resolved

# Tensor names information corresponding to a node is stored on outgoing edges.
# As output nodes do not have outgoing edges, fake outputs are required. In the following code
# for each output Identity node is added, and tensor name for the output is kept
# on (output, fake output) edge. After Result nodes adding transformation fake outputs
# are deleted from graph.
add_outputs_identity(graph, output_ids, lambda g, output_id, fake_node_id, fw_name: g.add_edges_from([
create_mxnet_edge(output_id, fake_node_id, 0, 0, fw_name[output_id])]), {'fw_name': fw_name_map})

return graph

Expand Down
2 changes: 1 addition & 1 deletion model-optimizer/mo/front/onnx/loader.py
Original file line number Diff line number Diff line change
Expand Up @@ -109,7 +109,7 @@ def protobuf2nx(graph: Graph, pb):
'out': src_port,
'in': 0,
'name': out,
'fw_tensor_debug_info': [(id, src_port, out)],
'fw_tensor_debug_info': [(node_id(node), src_port, out)],
popovaan marked this conversation as resolved.
Show resolved Hide resolved
'in_attrs': ['in', 'name'],
'out_attrs': ['out', 'name'],
'data_attrs': ['fw_tensor_debug_info']
Expand Down
7 changes: 1 addition & 6 deletions model-optimizer/mo/graph/graph.py
Original file line number Diff line number Diff line change
Expand Up @@ -1045,12 +1045,8 @@ def add_opoutput(graph: Graph, node_name: str, port: int, cut: bool = True):
if cut and len(node.out_edges()) != 0:
opoutput_node = Result(graph).create_node_on_port(node, port, {'name': node_name + '/sink_port_' + str(port)})
else:
tensor_names = None
if node.has_valid('op') and port in node.out_ports():
tensor_names = node.out_port(port).get_tensor_names()
opoutput_node = Result(graph).create_node([(node, port)], {'name': node_name + '/sink_port_' + str(port)})
opoutput_node.in_edge()['data_attrs'] = ['fw_tensor_debug_info']
opoutput_node.in_edge()['fw_tensor_debug_info'] = [(node_name, port, tensor_names)]

log.debug('Sink: {} for node {}'.format(opoutput_node.id, node_name))
log.debug(str(graph.node[opoutput_node.id]))
Expand Down Expand Up @@ -1125,8 +1121,7 @@ def set_edge_attribute_between_nodes(node1: Node, node2: Node, attr_name: str, n
out_port = edge['out']
out_node = node1.out_node(out_port)
if out_node.id == node2.id:
if attr_name in edge:
edge[attr_name] = new_value
edge[attr_name] = new_value

# All functions below are deprecated and will be removed in next release
# Please, use methods from Graph/Node classes instead
Expand Down