Skip to content
This repository has been archived by the owner on Nov 17, 2023. It is now read-only.

Commit

Permalink
Merge branch 'v1.x' into onnx
Browse files Browse the repository at this point in the history
  • Loading branch information
waytrue17 committed Dec 18, 2020
2 parents d960c03 + d06d705 commit 37ac4dd
Show file tree
Hide file tree
Showing 4 changed files with 56 additions and 7 deletions.
2 changes: 1 addition & 1 deletion ci/docker/runtime_functions.sh
Original file line number Diff line number Diff line change
Expand Up @@ -1277,7 +1277,7 @@ integrationtest_ubuntu_cpu_onnx() {
export PYTHONPATH=./python/
export MXNET_SUBGRAPH_VERBOSE=0
export DMLC_LOG_STACK_TRACE_DEPTH=10
tests/python-pytest/onnx/backend_test.py
#tests/python-pytest/onnx/backend_test.py
COV_ARG="--cov=./ --cov-report=xml --cov-append"
pytest $COV_ARG --verbose tests/python-pytest/onnx/mxnet_export_test.py
pytest $COV_ARG --verbose tests/python-pytest/onnx/test_models.py
Expand Down
2 changes: 1 addition & 1 deletion python/mxnet/contrib/onnx/mx2onnx/_op_translations.py
Original file line number Diff line number Diff line change
Expand Up @@ -829,7 +829,7 @@ def convert_leakyrelu(node, **kwargs):
create_const_scalar_node(name+"_half", np.float32(0.5), kwargs),
make_node("Add", [name+"_erf0_out", name+"_one"], [name+"_add0_out"]),
make_node("Mul", [input_nodes[0], name+"_add0_out"], [name+"_mul0_out"]),
make_node("Mul", [name+"_mul0_out", name+"_half"], [name])
make_node("Mul", [name+"_mul0_out", name+"_half"], [name], name=name)
]
return nodes
else:
Expand Down
19 changes: 14 additions & 5 deletions python/mxnet/contrib/onnx/mx2onnx/export_onnx.py
Original file line number Diff line number Diff line change
Expand Up @@ -116,7 +116,7 @@ def split_params(sym, params):
return arg_params, aux_params

@staticmethod
def get_outputs(sym, params, in_shape, in_label):
def get_outputs(sym, params, in_shape, in_label, in_type):
""" Infer output shapes and return dictionary of output name to shape
:param :class:`~mxnet.symbol.Symbol` sym: symbol to perform infer shape on
Expand All @@ -127,6 +127,7 @@ def get_outputs(sym, params, in_shape, in_label):
:return: dictionary of output name to shape
:rtype: dict of (str, tuple(int, ...))
"""
from onnx import mapping
# remove any input listed in params from sym.list_inputs() and bind them to the input shapes provided
# by user. Also remove in_label, which is the name of the label symbol that may have been used
# as the label for loss during training.
Expand All @@ -146,8 +147,16 @@ def get_outputs(sym, params, in_shape, in_label):
out_names.append(name)

assert len(out_shapes) == len(out_names)

# infer output types
args = {n: mapping.TENSOR_TYPE_TO_NP_TYPE[in_type] for n in sym.list_inputs()}
_, out_type, _ = sym.infer_type(**args)
out_types = [mapping.NP_TYPE_TO_TENSOR_TYPE[o(0).dtype] for o in out_type]

assert len(out_types) == len(out_names)

# bind output shapes with output names
graph_outputs = {n: s for n, s in zip(out_names, out_shapes)}
graph_outputs = {n: {'shape': s, 'dtype': d} for n, s, d in zip(out_names, out_shapes, out_types)}

return graph_outputs

Expand Down Expand Up @@ -210,7 +219,7 @@ def create_onnx_graph_proto(self, sym, params, in_shape, in_type, verbose=False,
index_lookup = []

# Determine output shape
graph_outputs = MXNetGraph.get_outputs(sym, params, in_shape, output_label)
graph_outputs = MXNetGraph.get_outputs(sym, params, in_shape, output_label, in_type)

graph_input_idx = 0
for idx, node in enumerate(mx_graph):
Expand Down Expand Up @@ -273,8 +282,8 @@ def create_onnx_graph_proto(self, sym, params, in_shape, in_type, verbose=False,
onnx_processed_outputs.append(
make_tensor_value_info(
name=nodename,
elem_type=in_type,
shape=graph_outputs[nodename]
elem_type=graph_outputs[nodename]['dtype'],
shape=graph_outputs[nodename]['shape']
)
)
if verbose:
Expand Down
40 changes: 40 additions & 0 deletions tests/python-pytest/onnx/test_operators.py
Original file line number Diff line number Diff line change
Expand Up @@ -125,6 +125,7 @@ def test_onnx_export_broadcast_axis(tmp_path, dtype):
op_export_test('broadcast_axis_3', M3, [x2], tmp_path)


#TODO: onnxruntime does not support float64 for Where
@pytest.mark.parametrize('dtype', ['float32'])
def test_onnx_export_SequenceMask(tmp_path, dtype):
M1 = def_model('SequenceMask', use_sequence_length=True, axis=1, value=-5)
Expand Down Expand Up @@ -208,6 +209,45 @@ def test_onnx_export_fully_connected(tmp_path, dtype, num_hidden, no_bias, flatt
op_export_test('FullyConnected', M, args, tmp_path)


#TODO: onnxruntime does not support float64 for the relu opertors
@pytest.mark.parametrize('dtype', ['float32', 'float16'])
@pytest.mark.parametrize('shape', [(1,), (3,), (4, 5), (3, 4, 5)])
@pytest.mark.parametrize('act_type', ['elu', 'leaky', 'prelu', 'selu', 'gelu'])
def test_onnx_export_LeakyReLU(tmp_path, dtype, shape, act_type):
M = def_model('LeakyReLU', act_type='leaky')
x = mx.nd.random.uniform(-0.5, 0.5, shape=shape, dtype=dtype)
op_export_test('LeakyReLU', M, [x], tmp_path)


@pytest.mark.parametrize('dtype', ['float32', 'float64', 'float16', 'int32', 'int64'])
def test_onnx_export_Concat(tmp_path, dtype):
x = mx.nd.array([[1,1],[2,2]], dtype=dtype)
y = mx.nd.array([[3,3],[4,4],[5,5]], dtype=dtype)
z = mx.nd.array([[6,6],[7,7],[8,8]], dtype=dtype)
M1 = def_model('Concat', dim=0)
M2 = def_model('Concat', dim=1)
op_export_test('Concat_1', M1, [x, y, z], tmp_path)
op_export_test('Concat_2', M2, [y, z], tmp_path)


@pytest.mark.parametrize('dtype', ['float32', 'float64', 'float16'])
@pytest.mark.parametrize('shape', [(1,), (3,), (4, 5), (3, 4, 5)])
def test_onnx_export_elemwise_add(tmp_path, dtype, shape):
M = def_model('elemwise_add')
x = mx.nd.random.uniform(-0.5, 0.5, shape=shape, dtype=dtype)
y = mx.nd.random.uniform(-0.5, 0.5, shape=shape, dtype=dtype)
op_export_test('elmwise_add', M, [x, y], tmp_path)


@pytest.mark.parametrize('dtype', ['float32', 'float16'])
@pytest.mark.parametrize('shape', [(1,), (3,), (4, 5), (3, 4, 5)])
@pytest.mark.parametrize('act_type', ['tanh', 'relu', 'sigmoid', 'softrelu', 'softsign'])
def test_onnx_export_Activation(tmp_path, dtype, shape, act_type):
M = def_model('Activation', act_type=act_type)
x = mx.nd.random.uniform(-0.5, 0.5, shape=shape, dtype=dtype)
op_export_test('Activation', M, [x], tmp_path)


@pytest.mark.parametrize('dtype', ['float32', 'float64', 'int32', 'int64'])
@pytest.mark.parametrize('axes', [None, [1,0,2]])
def test_onnx_export_transpose(tmp_path, dtype, axes):
Expand Down

0 comments on commit 37ac4dd

Please sign in to comment.