Skip to content
This repository has been archived by the owner on Nov 17, 2023. It is now read-only.

Commit

Permalink
[v1.x] Update onnx export support for FullyConnected and add unit tes…
Browse files Browse the repository at this point in the history
…ts (#19679)

* Add tests for FullyConnected onnx export and fix export operator so it works properly.

* Remove unused variables.

* Add coverage to onnx tests.

* Condense code.

* Add more test cases.

* Revert "Add coverage to onnx tests."

This reverts commit 86270bb.

Co-authored-by: Joe Evans <[email protected]>
  • Loading branch information
josephevans and Joe Evans authored Dec 16, 2020
1 parent 806d91f commit 6b65b91
Show file tree
Hide file tree
Showing 2 changed files with 33 additions and 42 deletions.
59 changes: 17 additions & 42 deletions python/mxnet/contrib/onnx/mx2onnx/_op_translations.py
Original file line number Diff line number Diff line change
Expand Up @@ -319,55 +319,30 @@ def convert_fully_connected(node, **kwargs):
"""Map MXNet's FullyConnected operator attributes to onnx's Gemm operator
and return the created node.
"""
from onnx.helper import make_node
name, input_nodes, attrs = get_inputs(node, kwargs)

initializer = kwargs["initializer"]

input_type = kwargs['in_type']
dtype = onnx.mapping.TENSOR_TYPE_TO_NP_TYPE[input_type]
flatten = get_boolean_attribute_value(attrs, "flatten")
no_bias = get_boolean_attribute_value(attrs, "no_bias")

fcnode = []

op_name = "flatten_" + str(kwargs["idx"])
flatten_node = onnx.helper.make_node(
'Flatten',
inputs=[input_nodes[0]],
outputs=[op_name],
name=op_name
)

input_nodes[0] = op_name
fcnode.append(flatten_node)
nodes = []
if flatten:
nodes.append(make_node("Flatten", [input_nodes[0]], [name+"_flatten0_out"]))
in_nodes = [name+"_flatten0_out", input_nodes[1]]
else:
in_nodes = [input_nodes[0], input_nodes[1]]

if no_bias:
data_type = onnx.mapping.NP_TYPE_TO_TENSOR_TYPE[np.dtype('int64')]
bias_name = "bias" + str(kwargs["idx"])
tensor_node = onnx.helper.make_tensor_value_info(bias_name, data_type, (1,))
initializer.append(
onnx.helper.make_tensor(
name=bias_name,
data_type=data_type,
dims=(1,),
vals=[0],
raw=False,
)
)
input_nodes.append(bias_name)
fcnode.append(tensor_node)
create_const_scalar_node(name+"_bias", np.array([0], dtype=dtype), kwargs)
in_nodes.append(name+"_bias")
else:
in_nodes.append(input_nodes[2])

node = onnx.helper.make_node(
"Gemm",
input_nodes, # input (A, B, C) - C can be in place
[name], # output
alpha=1.0,
beta=1.0,
transA=False,
transB=True,
name=name
nodes.append(
make_node("Gemm", in_nodes, [name], alpha=1.0, beta=1.0, transA=0, transB=1, name=name)
)

fcnode.append(node)

return fcnode
return nodes


@mx_op.register("BatchNorm")
Expand Down
16 changes: 16 additions & 0 deletions tests/python-pytest/onnx/test_operators.py
Original file line number Diff line number Diff line change
Expand Up @@ -145,3 +145,19 @@ def test_onnx_export_contrib_interleaved_matmul_selfatt_qk(tmp_path, dtype):
M2 = def_model('contrib.interleaved_matmul_selfatt_qk', heads=5)
x2 = mx.nd.random.uniform(0, 1, (7, 5, 4*5*6))
op_export_test('contrib_interleaved_matmul_selfatt_qk_2', M2, [x2], tmp_path)


@pytest.mark.parametrize('dtype', ['float32', 'float64', 'int32', 'int64'])
@pytest.mark.parametrize('num_hidden', [1, 5, 10, 20])
@pytest.mark.parametrize('no_bias', [False, True])
@pytest.mark.parametrize('flatten', [True, False])
def test_onnx_export_fully_connected(tmp_path, dtype, num_hidden, no_bias, flatten):
M = def_model('FullyConnected', num_hidden=num_hidden, no_bias=no_bias, flatten=flatten)
x = mx.nd.random.uniform(-0.5, 0.5, (5, 325))
weight = mx.nd.random.uniform(0, 1, (num_hidden, 325))
args = [x, weight]
if not no_bias:
args.append(mx.nd.random.uniform(0,1,(num_hidden,)))
op_export_test('FullyConnected', M, args, tmp_path)


0 comments on commit 6b65b91

Please sign in to comment.