Skip to content
This repository has been archived by the owner on Nov 17, 2023. It is now read-only.

Commit

Permalink
Adding test for softmaxoutput (#13116)
Browse files Browse the repository at this point in the history
  • Loading branch information
Roshrini authored and sandeep-krishnamurthy committed Dec 7, 2018
1 parent 8feb826 commit f390f0c
Show file tree
Hide file tree
Showing 2 changed files with 23 additions and 1 deletion.
2 changes: 1 addition & 1 deletion python/mxnet/contrib/onnx/mx2onnx/_op_translations.py
Original file line number Diff line number Diff line change
Expand Up @@ -705,7 +705,7 @@ def convert_softmax_output(node, **kwargs):

softmax_node = onnx.helper.make_node(
"Softmax",
[input1.output[0]],
[input1.name],
[name],
axis=1,
name=name
Expand Down
22 changes: 22 additions & 0 deletions tests/python-pytest/onnx/export/mxnet_export_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -241,6 +241,28 @@ def test_square():

npt.assert_almost_equal(result, numpy_op)


def test_softmax():
input1 = np.random.rand(1000, 1000).astype("float32")
label1 = np.random.rand(1000)
input_nd = mx.nd.array(input1)
label_nd = mx.nd.array(label1)

ipsym = mx.sym.Variable("ipsym")
label = mx.sym.Variable('label')
sym = mx.sym.SoftmaxOutput(data=ipsym, label=label, ignore_label=0, use_ignore=False)
ex = sym.bind(ctx=mx.cpu(0), args={'ipsym': input_nd, 'label': label_nd})
ex.forward(is_train=True)
softmax_out = ex.outputs[0].asnumpy()

converted_model = onnx_mxnet.export_model(sym, {}, [(1000, 1000), (1000,)], np.float32, "softmaxop.onnx")

sym, arg_params, aux_params = onnx_mxnet.import_model(converted_model)
result = forward_pass(sym, arg_params, aux_params, ['ipsym'], input1)

# Comparing result of forward pass before using onnx export, import
npt.assert_almost_equal(result, softmax_out)

@with_seed()
def test_comparison_ops():
"""Test greater, lesser, equal"""
Expand Down

0 comments on commit f390f0c

Please sign in to comment.