From 4bdb330d7458150e374b07165459af14137e7dda Mon Sep 17 00:00:00 2001 From: Roshani Nagmote Date: Thu, 13 Dec 2018 15:48:45 -0800 Subject: [PATCH 1/7] onnx export operators added --- .../contrib/onnx/mx2onnx/_op_translations.py | 65 +++++++++++++++++-- 1 file changed, 60 insertions(+), 5 deletions(-) diff --git a/python/mxnet/contrib/onnx/mx2onnx/_op_translations.py b/python/mxnet/contrib/onnx/mx2onnx/_op_translations.py index 15624b6c3a22..386aaa4f9da5 100644 --- a/python/mxnet/contrib/onnx/mx2onnx/_op_translations.py +++ b/python/mxnet/contrib/onnx/mx2onnx/_op_translations.py @@ -630,12 +630,20 @@ def convert_exp(node, **kwargs): return create_basic_op_node('Exp', node, kwargs) @mx_op.register("_copy") -def convert_identity(node, **kwargs): +def convert_copy(node, **kwargs): """Map MXNet's _copy operator attributes to onnx's Identity operator and return the created node. """ return create_basic_op_node('Identity', node, kwargs) +@mx_op.register("identity") +def convert_identity(node, **kwargs): + """Map MXNet's identity operator attributes to onnx's ConstantFill operator + and return the created node. + """ + return create_basic_op_node('ConstantFill', node, kwargs) + + @mx_op.register("InstanceNorm") def convert_instancenorm(node, **kwargs): """Map MXNet's InstanceNorm operator attributes to onnx's InstanceNormalization operator @@ -726,6 +734,32 @@ def convert_softmax_output(node, **kwargs): return [softmax_node] +@mx_op.register("LogisticRegressionOutput") +def convert_logistic_regression_output(node, **kwargs): + """Map MXNet's SoftmaxOutput operator attributes to onnx's Softmax operator + and return the created node. + """ + name = node["name"] + input1_idx = kwargs["index_lookup"][node["inputs"][0][0]] + input1 = kwargs["proc_nodes"][input1_idx] + sigmoid_node = onnx.helper.make_node( + "Sigmoid", + [input1.output[0]], + [name], + name=name + ) + return [sigmoid_node] + +@mx_op.register("BlockGrad") +def convert_blockgrad(node, **kwargs): + """ Skip operator """ + return create_basic_op_node('ConstantFill', node, kwargs) + +@mx_op.register("make_loss") +def convert_makeloss(node, **kwargs): + """ Skip operator """ + return create_basic_op_node('ConstantFill', node, kwargs) + @mx_op.register("Concat") def convert_concat(node, **kwargs): @@ -872,6 +906,7 @@ def convert_clip(node, **kwargs): def scalar_op_helper(node, op_name, **kwargs): """Helper function for scalar arithmetic operations""" name, input_nodes, attrs = get_inputs(node, kwargs) + from onnx import numpy_helper input_type = kwargs["in_type"] scalar_value = np.array([attrs.get("scalar", 1)], @@ -884,13 +919,19 @@ def scalar_op_helper(node, op_name, **kwargs): for i in initializer: if i.name == input_nodes[0]: if op_name == 'Mul': - new_initializer = onnx.numpy_helper.to_array(i) * scalar_value[0] + new_initializer = numpy_helper.to_array(i) * scalar_value[0] elif op_name == 'Sub': - new_initializer = onnx.numpy_helper.to_array(i) - scalar_value[0] + if name.startswith("_rminusscalar"): + new_initializer = scalar_value[0] - numpy_helper.to_array(i) + else: + new_initializer = numpy_helper.to_array(i) - scalar_value[0] elif op_name == 'Add': - new_initializer = onnx.numpy_helper.to_array(i) + scalar_value[0] + new_initializer = numpy_helper.to_array(i) + scalar_value[0] elif op_name == 'Div': - new_initializer = onnx.numpy_helper.to_array(i) / scalar_value[0] + if name.startswith("_rdivscalar"): + new_initializer = scalar_value[0] / numpy_helper.to_array(i) + else: + new_initializer = numpy_helper.to_array(i) / scalar_value[0] flag = False break @@ -956,6 +997,13 @@ def convert_minus_scalar(node, **kwargs): """ return scalar_op_helper(node, 'Sub', **kwargs) +@mx_op.register("_rminus_scalar") +def convert_rminus_scalar(node, **kwargs): + """Map MXNet's _rminus_scalar operator attributes to onnx's Sub operator. + Creates a new node for the input scalar value, adds it to the initializer + and return multiple created nodes. + """ + return scalar_op_helper(node, 'Sub', **kwargs) # Convert scalar value into node and pass it as input to mul_node @mx_op.register("_plus_scalar") @@ -975,6 +1023,13 @@ def convert_div_scalar(node, **kwargs): """ return scalar_op_helper(node, 'Div', **kwargs) +@mx_op.register("_rdiv_scalar") +def convert_rdiv_scalar(node, **kwargs): + """Map MXNet's _rdiv_scalar operator attributes to onnx's Div operator. + Creates a new node for the input scalar value, adds it to the initializer + and return multiple created nodes. + """ + return scalar_op_helper(node, 'Div', **kwargs) # Sorting and Searching @mx_op.register("argmax") From cf647e124eff0d0de58b04d7faf33988339cd9c8 Mon Sep 17 00:00:00 2001 From: Roshani Nagmote Date: Fri, 14 Dec 2018 14:32:50 -0800 Subject: [PATCH 2/7] Adding operator test cases --- .../contrib/onnx/mx2onnx/_op_translations.py | 2 +- .../onnx/export/mxnet_export_test.py | 68 +++++++++++++++++++ 2 files changed, 69 insertions(+), 1 deletion(-) diff --git a/python/mxnet/contrib/onnx/mx2onnx/_op_translations.py b/python/mxnet/contrib/onnx/mx2onnx/_op_translations.py index 386aaa4f9da5..dc0d86f7b1da 100644 --- a/python/mxnet/contrib/onnx/mx2onnx/_op_translations.py +++ b/python/mxnet/contrib/onnx/mx2onnx/_op_translations.py @@ -744,7 +744,7 @@ def convert_logistic_regression_output(node, **kwargs): input1 = kwargs["proc_nodes"][input1_idx] sigmoid_node = onnx.helper.make_node( "Sigmoid", - [input1.output[0]], + [input1.name], [name], name=name ) diff --git a/tests/python-pytest/onnx/export/mxnet_export_test.py b/tests/python-pytest/onnx/export/mxnet_export_test.py index b4fa4b12c781..4cf28e7b7248 100644 --- a/tests/python-pytest/onnx/export/mxnet_export_test.py +++ b/tests/python-pytest/onnx/export/mxnet_export_test.py @@ -327,6 +327,74 @@ def test_softmax(): # Comparing result of forward pass before using onnx export, import npt.assert_almost_equal(result, softmax_out) +def test_logisticRegressionOutput(): + input1 = np.random.rand(1000, 1000).astype("float32") + label1 = np.random.rand(1000, 1000) + input_nd = mx.nd.array(input1) + label_nd = mx.nd.array(label1) + + ipsym = mx.sym.Variable("ipsym") + label = mx.sym.Variable('label') + sym = mx.sym.LogisticRegressionOutput(data=ipsym, label=label) + ex = sym.bind(ctx=mx.cpu(0), args={'ipsym': input_nd, 'label': label_nd}) + ex.forward(is_train=True) + logistic_out = ex.outputs[0].asnumpy() + + converted_model = onnx_mxnet.export_model(sym, {}, [(1000, 1000), (1000, 1000)], np.float32, "logisticop.onnx") + + sym, arg_params, aux_params = onnx_mxnet.import_model(converted_model) + result = forward_pass(sym, arg_params, aux_params, ['ipsym'], input1) + + # Comparing result of forward pass before using onnx export, import + npt.assert_almost_equal(result, logistic_out) + + +def _test_scalar_op(input1, outsym, np_out): + model = mx.mod.Module(symbol=outsym, data_names=['input1'], label_names=None) + model.bind(for_training=False, data_shapes=[('input1', np.shape(input1))], label_shapes=None) + model.init_params() + + args, auxs = model.get_params() + params = {} + params.update(args) + params.update(auxs) + + converted_model = onnx_mxnet.export_model(outsym, params, [np.shape(input1)], np.float32, + onnx_file_path=outsym.name+".onnx") + + sym, arg_params, aux_params = onnx_mxnet.import_model(converted_model) + result = forward_pass(sym, arg_params, aux_params, ['input1'], input1) + + npt.assert_almost_equal(result, np_out) + +@with_seed() +def test_scalarops(): + input1 = np.random.randint(1, 10, (2, 3)).astype("float32") + ipsym = mx.sym.Variable("input1") + operators = ['Add', 'Sub', 'rSub' 'Mul', 'Div', 'rDiv'] + for op in operators: + if op == 'Add': + out = 2 + ipsym + np_out = np.add(2, input1) + _test_scalar_op(input1, out, np_out) + if op == "Sub": + out = ipsym - 2 + np_out = np.subtract(input1, 2) + _test_scalar_op(input1, out, np_out) + if op == "rSub": + out = 2 - ipsym + np_out = np.subtract(2, input1) + _test_scalar_op(input1, out, np_out) + if op == "Mul": + out = 2 * ipsym + np_out = np.multiply(2, input1) + _test_scalar_op(input1, out, np_out) + if op == "Div": + np_out = input1/2 + out = ipsym / 2 + _test_scalar_op(input1, out, np_out) + + @with_seed() def test_comparison_ops(): """Test greater, lesser, equal""" From 3f03a170da01882799166beb51f1823b6da1b219 Mon Sep 17 00:00:00 2001 From: Roshani Nagmote Date: Wed, 19 Dec 2018 13:47:03 -0800 Subject: [PATCH 3/7] more test cases --- .../contrib/onnx/mx2onnx/_op_translations.py | 2 +- .../onnx/onnx2mx/_translation_utils.py | 2 +- .../onnx/export/mxnet_export_test.py | 21 +++++++++++++++++++ 3 files changed, 23 insertions(+), 2 deletions(-) diff --git a/python/mxnet/contrib/onnx/mx2onnx/_op_translations.py b/python/mxnet/contrib/onnx/mx2onnx/_op_translations.py index dc0d86f7b1da..9acf34fab745 100644 --- a/python/mxnet/contrib/onnx/mx2onnx/_op_translations.py +++ b/python/mxnet/contrib/onnx/mx2onnx/_op_translations.py @@ -755,7 +755,7 @@ def convert_blockgrad(node, **kwargs): """ Skip operator """ return create_basic_op_node('ConstantFill', node, kwargs) -@mx_op.register("make_loss") +@mx_op.register("MakeLoss") def convert_makeloss(node, **kwargs): """ Skip operator """ return create_basic_op_node('ConstantFill', node, kwargs) diff --git a/python/mxnet/contrib/onnx/onnx2mx/_translation_utils.py b/python/mxnet/contrib/onnx/onnx2mx/_translation_utils.py index f63c1e9e8e62..9700dd6a30a6 100644 --- a/python/mxnet/contrib/onnx/onnx2mx/_translation_utils.py +++ b/python/mxnet/contrib/onnx/onnx2mx/_translation_utils.py @@ -158,7 +158,7 @@ def _fix_broadcast(op_name, inputs, broadcast_axis, proto_obj): assert len(list(inputs)) == 2 input0_shape = get_input_shape(inputs[0], proto_obj) - #creating reshape shape + # creating reshape shape reshape_shape = list(len(input0_shape) * (1,)) reshape_shape[broadcast_axis] = -1 reshape_shape = tuple(reshape_shape) diff --git a/tests/python-pytest/onnx/export/mxnet_export_test.py b/tests/python-pytest/onnx/export/mxnet_export_test.py index 4cf28e7b7248..6f0a5adebad8 100644 --- a/tests/python-pytest/onnx/export/mxnet_export_test.py +++ b/tests/python-pytest/onnx/export/mxnet_export_test.py @@ -394,6 +394,27 @@ def test_scalarops(): out = ipsym / 2 _test_scalar_op(input1, out, np_out) +def test_makeloss(): + v1 = mx.nd.array([1, 2]) + v2 = mx.nd.array([0, 1]) + a = mx.sym.Variable('a') + b = mx.sym.Variable('b') + sym = mx.sym.MakeLoss(b + a) + ex = sym.bind(ctx=mx.cpu(0), args={'a': v1, 'b': v2}) + ex.forward(is_train=True) + makeloss_out = ex.outputs[0].asnumpy() + + converted_model = onnx_mxnet.export_model(sym, {}, [(2,),(2,)], np.float32, "makelossop.onnx") + + # sym, arg_params, aux_params = onnx_mxnet.import_model(converted_model) + # result = forward_pass(sym, arg_params, aux_params, ['ipsym'], input1) + # + # # Comparing result of forward pass before using onnx export, import + # npt.assert_almost_equal(result, makeloss_out) + # + # executor = loss.simple_bind(ctx=mx.cpu(0), a=(2,), b=(2,)) + # executor.forward(is_train=True, a=v1, b=v2) + # print(executor.outputs[0]) @with_seed() def test_comparison_ops(): From 9b327b68972354d8eb4c15f80d37588ba184966c Mon Sep 17 00:00:00 2001 From: Roshani Nagmote Date: Wed, 26 Dec 2018 09:57:23 -0800 Subject: [PATCH 4/7] fix --- .../onnx/export/mxnet_export_test.py | 22 ------------------- 1 file changed, 22 deletions(-) diff --git a/tests/python-pytest/onnx/export/mxnet_export_test.py b/tests/python-pytest/onnx/export/mxnet_export_test.py index 6f0a5adebad8..416e817fb422 100644 --- a/tests/python-pytest/onnx/export/mxnet_export_test.py +++ b/tests/python-pytest/onnx/export/mxnet_export_test.py @@ -394,28 +394,6 @@ def test_scalarops(): out = ipsym / 2 _test_scalar_op(input1, out, np_out) -def test_makeloss(): - v1 = mx.nd.array([1, 2]) - v2 = mx.nd.array([0, 1]) - a = mx.sym.Variable('a') - b = mx.sym.Variable('b') - sym = mx.sym.MakeLoss(b + a) - ex = sym.bind(ctx=mx.cpu(0), args={'a': v1, 'b': v2}) - ex.forward(is_train=True) - makeloss_out = ex.outputs[0].asnumpy() - - converted_model = onnx_mxnet.export_model(sym, {}, [(2,),(2,)], np.float32, "makelossop.onnx") - - # sym, arg_params, aux_params = onnx_mxnet.import_model(converted_model) - # result = forward_pass(sym, arg_params, aux_params, ['ipsym'], input1) - # - # # Comparing result of forward pass before using onnx export, import - # npt.assert_almost_equal(result, makeloss_out) - # - # executor = loss.simple_bind(ctx=mx.cpu(0), a=(2,), b=(2,)) - # executor.forward(is_train=True, a=v1, b=v2) - # print(executor.outputs[0]) - @with_seed() def test_comparison_ops(): """Test greater, lesser, equal""" From 04aed0c9a6a5ffbd86b4cec94fdd144a04922250 Mon Sep 17 00:00:00 2001 From: Roshani Nagmote Date: Fri, 28 Dec 2018 09:37:19 -0800 Subject: [PATCH 5/7] addressing review comments --- .../contrib/onnx/mx2onnx/_op_translations.py | 10 +++++++ tests/python-pytest/onnx/test_node.py | 30 +++++++++++++++++++ 2 files changed, 40 insertions(+) diff --git a/python/mxnet/contrib/onnx/mx2onnx/_op_translations.py b/python/mxnet/contrib/onnx/mx2onnx/_op_translations.py index 9acf34fab745..3add29e59ed7 100644 --- a/python/mxnet/contrib/onnx/mx2onnx/_op_translations.py +++ b/python/mxnet/contrib/onnx/mx2onnx/_op_translations.py @@ -932,6 +932,8 @@ def scalar_op_helper(node, op_name, **kwargs): new_initializer = scalar_value[0] / numpy_helper.to_array(i) else: new_initializer = numpy_helper.to_array(i) / scalar_value[0] + elif op_name == 'Pow': + new_initializer = numpy_helper.to_array(i) ** scalar_value[0] flag = False break @@ -1031,6 +1033,14 @@ def convert_rdiv_scalar(node, **kwargs): """ return scalar_op_helper(node, 'Div', **kwargs) +@mx_op.register("_power_scalar") +def convert_pow_scalar(node, **kwargs): + """Map MXNet's _pow_scalar operator attributes to onnx's Pow operator. + Creates a new node for the input scalar value, adds it to the initializer + and return multiple created nodes. + """ + return scalar_op_helper(node, 'Pow', **kwargs) + # Sorting and Searching @mx_op.register("argmax") def convert_argmax(node, **kwargs): diff --git a/tests/python-pytest/onnx/test_node.py b/tests/python-pytest/onnx/test_node.py index 07ae866b96cf..ec74e819d4f7 100644 --- a/tests/python-pytest/onnx/test_node.py +++ b/tests/python-pytest/onnx/test_node.py @@ -138,6 +138,32 @@ def get_onnx_graph(testname, input_names, inputs, output_name, output_shape, att npt.assert_almost_equal(output[0], mxnet_output) + input1 = get_rnd((1, 10, 2, 3)) + ipsym = mx.sym.Variable("input1") + for test in test_scalar_ops: + if test == 'Add': + outsym = 2 + ipsym + if test == "Sub": + outsym = ipsym - 2 + if test == "rSub": + outsym = ipsym.__rsub__(2) + if test == "Mul": + outsym = 2 * ipsym + if test == "Div": + outsym = ipsym / 2 + if test == "rDiv": + outsym = ipsym.__rdiv__(2) + if test == "Pow": + outsym = ipsym ** 2 + forward_op = forward_pass(outsym, None, None, ['input1'], input1) + converted_model = onnx_mxnet.export_model(outsym, {}, [np.shape(input1)], np.float32, + onnx_file_path=outsym.name + ".onnx") + + sym, arg_params, aux_params = onnx_mxnet.import_model(converted_model) + result = forward_pass(sym, arg_params, aux_params, ['input1'], input1) + + npt.assert_almost_equal(result, forward_op) + # test_case = ("test_case_name", mxnet op, "ONNX_op_name", [input_list], attribute map, MXNet_specific=True/False) test_cases = [ @@ -156,9 +182,13 @@ def get_onnx_graph(testname, input_names, inputs, output_name, output_shape, att {'block_size': 2}, False), ("test_softmax", mx.sym.SoftmaxOutput, "Softmax", [get_rnd((1000, 1000)), get_rnd(1000)], {'ignore_label': 0, 'use_ignore': False}, True), + ("test_logistic_regression", mx.sym.LogisticRegressionOutput, "Sigmoid", + [get_rnd((1000, 1000)), get_rnd((1000, 1000))], {}, True), ("test_fullyconnected", mx.sym.FullyConnected, "Gemm", [get_rnd((4,3)), get_rnd((4, 3)), get_rnd(4)], {'num_hidden': 4, 'name': 'FC'}, True) ] +test_scalar_ops = ['Add', 'Sub', 'rSub' 'Mul', 'Div', 'rDiv', 'Pow'] + if __name__ == '__main__': unittest.main() From 9734cdc698c960480de61be75127bb2f9ca7f0d8 Mon Sep 17 00:00:00 2001 From: Roshani Nagmote Date: Thu, 3 Jan 2019 10:34:19 -0800 Subject: [PATCH 6/7] fix --- python/mxnet/contrib/onnx/mx2onnx/_op_translations.py | 5 +---- tests/python-pytest/onnx/backend_test.py | 3 ++- tests/python-pytest/onnx/test_node.py | 4 +--- 3 files changed, 4 insertions(+), 8 deletions(-) diff --git a/python/mxnet/contrib/onnx/mx2onnx/_op_translations.py b/python/mxnet/contrib/onnx/mx2onnx/_op_translations.py index 3add29e59ed7..427ad6a5b468 100644 --- a/python/mxnet/contrib/onnx/mx2onnx/_op_translations.py +++ b/python/mxnet/contrib/onnx/mx2onnx/_op_translations.py @@ -928,10 +928,7 @@ def scalar_op_helper(node, op_name, **kwargs): elif op_name == 'Add': new_initializer = numpy_helper.to_array(i) + scalar_value[0] elif op_name == 'Div': - if name.startswith("_rdivscalar"): - new_initializer = scalar_value[0] / numpy_helper.to_array(i) - else: - new_initializer = numpy_helper.to_array(i) / scalar_value[0] + new_initializer = numpy_helper.to_array(i) / scalar_value[0] elif op_name == 'Pow': new_initializer = numpy_helper.to_array(i) ** scalar_value[0] flag = False diff --git a/tests/python-pytest/onnx/backend_test.py b/tests/python-pytest/onnx/backend_test.py index 6c6c3d2d9c7d..5ec5efb8ea21 100644 --- a/tests/python-pytest/onnx/backend_test.py +++ b/tests/python-pytest/onnx/backend_test.py @@ -50,6 +50,7 @@ def prepare_tests(backend, operation): for std_model_test in std_models: BACKEND_TESTS.include(std_model_test) - BACKEND_TESTS.exclude('.*bcast.*') + # Tests for scalar ops are in test_node.py + BACKEND_TESTS.exclude('.*scalar.*') return BACKEND_TESTS diff --git a/tests/python-pytest/onnx/test_node.py b/tests/python-pytest/onnx/test_node.py index ec74e819d4f7..41b86de4b9bd 100644 --- a/tests/python-pytest/onnx/test_node.py +++ b/tests/python-pytest/onnx/test_node.py @@ -151,8 +151,6 @@ def get_onnx_graph(testname, input_names, inputs, output_name, output_shape, att outsym = 2 * ipsym if test == "Div": outsym = ipsym / 2 - if test == "rDiv": - outsym = ipsym.__rdiv__(2) if test == "Pow": outsym = ipsym ** 2 forward_op = forward_pass(outsym, None, None, ['input1'], input1) @@ -188,7 +186,7 @@ def get_onnx_graph(testname, input_names, inputs, output_name, output_shape, att {'num_hidden': 4, 'name': 'FC'}, True) ] -test_scalar_ops = ['Add', 'Sub', 'rSub' 'Mul', 'Div', 'rDiv', 'Pow'] +test_scalar_ops = ['Add', 'Sub', 'rSub' 'Mul', 'Div', 'Pow'] if __name__ == '__main__': unittest.main() From 3cc450cfe4bdf266659a511842d0edbaf0d255ef Mon Sep 17 00:00:00 2001 From: Roshani Nagmote Date: Tue, 8 Jan 2019 18:32:53 -0800 Subject: [PATCH 7/7] retrigger CI --- python/mxnet/contrib/onnx/mx2onnx/_op_translations.py | 1 - 1 file changed, 1 deletion(-) diff --git a/python/mxnet/contrib/onnx/mx2onnx/_op_translations.py b/python/mxnet/contrib/onnx/mx2onnx/_op_translations.py index 427ad6a5b468..0dd816bcc6fd 100644 --- a/python/mxnet/contrib/onnx/mx2onnx/_op_translations.py +++ b/python/mxnet/contrib/onnx/mx2onnx/_op_translations.py @@ -643,7 +643,6 @@ def convert_identity(node, **kwargs): """ return create_basic_op_node('ConstantFill', node, kwargs) - @mx_op.register("InstanceNorm") def convert_instancenorm(node, **kwargs): """Map MXNet's InstanceNorm operator attributes to onnx's InstanceNormalization operator