Skip to content
This repository has been archived by the owner on Nov 17, 2023. It is now read-only.

ONNX test code cleanup #13553

Merged
merged 15 commits into from
Dec 26, 2018
10 changes: 5 additions & 5 deletions ci/docker/runtime_functions.sh
Original file line number Diff line number Diff line change
Expand Up @@ -939,11 +939,11 @@ unittest_centos7_gpu() {
integrationtest_ubuntu_cpu_onnx() {
set -ex
export PYTHONPATH=./python/
pytest tests/python-pytest/onnx/import/mxnet_backend_test.py
pytest tests/python-pytest/onnx/import/onnx_import_test.py
pytest tests/python-pytest/onnx/import/gluon_backend_test.py
pytest tests/python-pytest/onnx/export/onnx_backend_test.py
python tests/python-pytest/onnx/export/mxnet_export_test.py
pytest tests/python-pytest/onnx/gluon_backend_test.py
pytest tests/python-pytest/onnx/mxnet_backend_test.py
pytest tests/python-pytest/onnx/mxnet_export_test.py
pytest tests/python-pytest/onnx/test_models.py
pytest tests/python-pytest/onnx/test_node.py
}

integrationtest_ubuntu_gpu_python() {
Expand Down
33 changes: 33 additions & 0 deletions tests/python-pytest/onnx/README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,33 @@
# ONNX tests

## Directory structure:

```bash
.
├── README.md
├── backend.py
├── backend_rep.py
├── backend_test.py
├── gluon_backend_test.py
├── mxnet_backend_test.py
├── mxnet_export_test.py
├── test_cases.py
├── test_models.py
└── test_node.py
```

* `backend.py` - MXNetBackend. This file contains prepare(). \
This class can be used for both, MXNet and Gluon backend.
* `backend_rep.py` - MXNetBackendRep and GluonBackendRep for running inference
* `backend_test.py` - prepare tests by including tests from `test_cases.py`
* `gluon_backend_test.py` - Set backend as gluon and execute ONNX tests for ONNX->Gluon import.
* `mxnet_backend_test.py` - Set backend as gluon and add tests for ONNX->MXNet import/export.
Since MXNetBackend for export, tests both import and export, the test list in this file is
a union of tests that execute for import and export, export alone, and import alone.
* `mxnet_export_test.py` - Execute unit tests for testing MXNet export code - this is not specific to
any operator.
* `test_cases.py` - list of test cases for operators/models that are supported
for "both", import and export, "import" alone, or "export" alone.
* `test_models.py` - custom tests for models
* `test_node.py` - custom tests for operators. These tests are written independent of ONNX tests, in case
ONNX doesn't have tests yet or for MXNet specific operators.
Original file line number Diff line number Diff line change
Expand Up @@ -16,51 +16,57 @@
# under the License.

# coding: utf-8
"""backend wrapper for onnx test infrastructure"""
import os
import sys
import numpy as np
"""MXNet/Gluon backend wrapper for onnx test infrastructure"""

from mxnet.contrib.onnx.onnx2mx.import_onnx import GraphProto
from mxnet.contrib.onnx.mx2onnx.export_onnx import MXNetGraph
import mxnet as mx
import numpy as np

try:
from onnx import helper, TensorProto, mapping
from onnx.backend.base import Backend
except ImportError:
raise ImportError("Onnx and protobuf need to be installed")
CURR_PATH = os.path.dirname(os.path.abspath(os.path.expanduser(__file__)))
sys.path.insert(0, os.path.join(CURR_PATH, '../'))
from backend_rep import MXNetBackendRep
raise ImportError("Onnx and protobuf need to be installed. Instructions to"
+ " install - https://github.com/onnx/onnx#installation")
from backend_rep import MXNetBackendRep, GluonBackendRep


# Using these functions for onnx test infrastructure.
# Implemented by following onnx docs guide:
# https://github.com/onnx/onnx/blob/master/docs/Implementing%20an%20ONNX%20backend.md
# MXNetBackend class will take an ONNX model with inputs, perform a computation,
# and then return the output.
# Implemented by following onnx docs guide:
# https://github.com/onnx/onnx/blob/master/docs/ImplementingAnOnnxBackend.md

class MXNetBackend(Backend):
"""MXNet backend for ONNX"""
"""MXNet/Gluon backend for ONNX"""

backend = 'mxnet'
operation = 'import'

@classmethod
def set_params(cls, backend, operation):
cls.backend = backend
cls.operation = operation

@staticmethod
def perform_import_export(graph_proto, input_shape):
def perform_import_export(sym, arg_params, aux_params, input_shape):
""" Import ONNX model to mxnet model and then export to ONNX model
and then import it back to mxnet for verifying the result"""
graph = GraphProto()

sym, arg_params, aux_params = graph.from_onnx(graph_proto)

params = {}
params.update(arg_params)
params.update(aux_params)
# exporting to onnx graph proto format
converter = MXNetGraph()
graph_proto = converter.create_onnx_graph_proto(sym, params, in_shape=input_shape, in_type=mapping.NP_TYPE_TO_TENSOR_TYPE[np.dtype('float32')])
graph_proto = converter.create_onnx_graph_proto(sym, params, in_shape=input_shape,
in_type=mapping.NP_TYPE_TO_TENSOR_TYPE[np.dtype('float32')])

# importing back to MXNET for verifying result.
sym, arg_params, aux_params = graph.from_onnx(graph_proto)

return sym, arg_params, aux_params


@classmethod
def prepare(cls, model, device='CPU', **kwargs):
"""For running end to end model(used for onnx test backend)
Expand All @@ -80,13 +86,31 @@ def prepare(cls, model, device='CPU', **kwargs):
Returns object of MXNetBackendRep class which will be in turn
used to run inference on the input model and return the result for comparison.
"""
backend = kwargs.get('backend', cls.backend)
operation = kwargs.get('operation', cls.operation)

graph = GraphProto()
metadata = graph.get_graph_metadata(model.graph)
input_data = metadata['input_tensor_data']
input_shape = [data[1] for data in input_data]
sym, arg_params, aux_params = MXNetBackend.perform_import_export(model.graph, input_shape)
return MXNetBackendRep(sym, arg_params, aux_params, device)
if device == 'CPU':
ctx = mx.cpu()
else:
raise NotImplementedError("ONNX tests are run only for CPU context.")

if backend == 'mxnet':
sym, arg_params, aux_params = graph.from_onnx(model.graph)
if operation == 'export':
metadata = graph.get_graph_metadata(model.graph)
input_data = metadata['input_tensor_data']
input_shape = [data[1] for data in input_data]
sym, arg_params, aux_params = MXNetBackend.perform_import_export(sym, arg_params, aux_params,
input_shape)

return MXNetBackendRep(sym, arg_params, aux_params, device)
elif backend == 'gluon':
if operation == 'import':
net = graph.graph_to_gluon(model.graph, ctx)
return GluonBackendRep(net, device)
elif operation == 'export':
raise NotImplementedError("Gluon->ONNX export not implemented.")

@classmethod
def supports_device(cls, device):
Expand All @@ -96,6 +120,4 @@ def supports_device(cls, device):

prepare = MXNetBackend.prepare

run_node = MXNetBackend.run_node

supports_device = MXNetBackend.supports_device
46 changes: 46 additions & 0 deletions tests/python-pytest/onnx/backend_rep.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,9 @@
except ImportError:
raise ImportError("Onnx and protobuf need to be installed. Instructions to"
+ " install - https://github.com/onnx/onnx#installation")
import numpy as np
import mxnet as mx
from mxnet import nd

# Using these functions for onnx test infrastructure.
# Implemented by following onnx docs guide:
Expand Down Expand Up @@ -82,3 +84,47 @@ def run(self, inputs, **kwargs):
exe.forward(is_train=False)
result = exe.outputs[0].asnumpy()
return [result]


# GluonBackendRep object will be returned by GluonBackend's prepare method which is used to
# execute a model repeatedly.
# Inputs will be passed to the run method of MXNetBackendRep class, it will perform computation and
# retrieve the corresponding results for comparison to the onnx backend.
# https://github.com/onnx/onnx/blob/master/onnx/backend/test/runner/__init__.py.
# Implemented by following onnx docs guide:
# https://github.com/onnx/onnx/blob/master/docs/ImplementingAnOnnxBackend.md

class GluonBackendRep(BackendRep):
"""Running model inference on gluon backend and return the result
to onnx test infrastructure for comparison."""
def __init__(self, net, device):
self.net = net
self.device = device

def run(self, inputs, **kwargs):
"""Run model inference and return the result

Parameters
----------
inputs : numpy array
input to run a layer on

Returns
-------
params : numpy array
result obtained after running the inference on mxnet
"""
# create module, passing cpu context
if self.device == 'CPU':
ctx = mx.cpu()
else:
raise NotImplementedError("ONNX tests are run only for CPU context.")

# run inference
net_inputs = [nd.array(input_data, ctx=ctx) for input_data in inputs]
net_outputs = self.net(*net_inputs)
results = []
results.extend([o for o in net_outputs.asnumpy()])
result = np.array(results)

return [result]
55 changes: 55 additions & 0 deletions tests/python-pytest/onnx/backend_test.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,55 @@
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.

"""ONNX test backend wrapper"""
try:
import onnx.backend.test
except ImportError:
raise ImportError("Onnx and protobuf need to be installed")

import test_cases


def prepare_tests(backend, operation):
"""
Prepare the test list
:param backend: mxnet/gluon backend
:param operation: str. export or import
:return: backend test list
"""
BACKEND_TESTS = onnx.backend.test.BackendTest(backend, __name__)
implemented_ops = test_cases.IMPLEMENTED_OPERATORS_TEST.get('both', []) + \
test_cases.IMPLEMENTED_OPERATORS_TEST.get(operation, [])

for op_test in implemented_ops:
BACKEND_TESTS.include(op_test)

basic_models = test_cases.BASIC_MODEL_TESTS.get('both', []) + \
test_cases.BASIC_MODEL_TESTS.get(operation, [])

for basic_model_test in basic_models:
BACKEND_TESTS.include(basic_model_test)

std_models = test_cases.STANDARD_MODEL.get('both', []) + \
test_cases.STANDARD_MODEL.get(operation, [])

for std_model_test in std_models:
BACKEND_TESTS.include(std_model_test)

BACKEND_TESTS.exclude('.*bcast.*')

return BACKEND_TESTS
Loading