diff --git a/ci/docker/runtime_functions.sh b/ci/docker/runtime_functions.sh index 40405b96163e..8a34e734eee1 100755 --- a/ci/docker/runtime_functions.sh +++ b/ci/docker/runtime_functions.sh @@ -268,6 +268,7 @@ build_centos7_cpu() { -DUSE_MKLDNN=OFF \ -DUSE_DIST_KVSTORE=ON \ -DUSE_CUDA=OFF \ + -DBUILD_EXTENSION_PATH=/work/mxnet/example/extensions/lib_external_ops \ -G Ninja /work/mxnet ninja } @@ -299,6 +300,7 @@ build_centos7_gpu() { -DUSE_CUDA=ON \ -DMXNET_CUDA_ARCH="$CI_CMAKE_CUDA_ARCH" \ -DUSE_DIST_KVSTORE=ON\ + -DBUILD_EXTENSION_PATH=/work/mxnet/example/extensions/lib_external_ops \ -G Ninja /work/mxnet ninja } @@ -320,6 +322,7 @@ build_ubuntu_cpu_openblas() { -DUSE_CUDA=OFF \ -DUSE_DIST_KVSTORE=ON \ -DBUILD_CYTHON_MODULES=ON \ + -DBUILD_EXTENSION_PATH=/work/mxnet/example/extensions/lib_external_ops \ -G Ninja /work/mxnet ninja } @@ -335,6 +338,7 @@ build_ubuntu_cpu_mkl() { -DUSE_TVM_OP=ON \ -DUSE_MKL_IF_AVAILABLE=ON \ -DUSE_BLAS=MKL \ + -DBUILD_EXTENSION_PATH=/work/mxnet/example/extensions/lib_external_ops \ -GNinja /work/mxnet ninja } @@ -367,6 +371,7 @@ build_ubuntu_cpu_cmake_no_tvm_op() { -DUSE_OPENCV=ON \ -DUSE_SIGNAL_HANDLER=ON \ -DCMAKE_BUILD_TYPE=Release \ + -DBUILD_EXTENSION_PATH=/work/mxnet/example/extensions/lib_external_ops \ -G Ninja \ /work/mxnet @@ -519,6 +524,7 @@ build_ubuntu_cpu_mkldnn() { -DUSE_MKLDNN=ON \ -DUSE_CUDA=OFF \ -DUSE_CPP_PACKAGE=ON \ + -DBUILD_EXTENSION_PATH=/work/mxnet/example/extensions/lib_external_ops \ -G Ninja /work/mxnet ninja } @@ -534,6 +540,7 @@ build_ubuntu_cpu_mkldnn_mkl() { -DUSE_TVM_OP=ON \ -DUSE_MKL_IF_AVAILABLE=ON \ -DUSE_BLAS=MKL \ + -DBUILD_EXTENSION_PATH=/work/mxnet/example/extensions/lib_external_ops \ -GNinja /work/mxnet ninja } @@ -605,6 +612,7 @@ build_ubuntu_gpu_mkldnn() { -DUSE_CUDA=ON \ -DMXNET_CUDA_ARCH="$CI_CMAKE_CUDA_ARCH" \ -DUSE_CPP_PACKAGE=ON \ + -DBUILD_EXTENSION_PATH=/work/mxnet/example/extensions/lib_external_ops \ -G Ninja /work/mxnet ninja } @@ -619,6 +627,7 @@ build_ubuntu_gpu_mkldnn_nocudnn() { -DMXNET_CUDA_ARCH="$CI_CMAKE_CUDA_ARCH" \ -DUSE_CUDNN=OFF \ -DUSE_CPP_PACKAGE=ON \ + -DBUILD_EXTENSION_PATH=/work/mxnet/example/extensions/lib_external_ops \ -G Ninja /work/mxnet ninja } @@ -636,6 +645,7 @@ build_ubuntu_gpu_cuda101_cudnn7() { -DUSE_CPP_PACKAGE=ON \ -DUSE_DIST_KVSTORE=ON \ -DBUILD_CYTHON_MODULES=ON \ + -DBUILD_EXTENSION_PATH=/work/mxnet/example/extensions/lib_external_ops \ -G Ninja /work/mxnet ninja } diff --git a/ci/jenkins/Jenkins_steps.groovy b/ci/jenkins/Jenkins_steps.groovy index 20b03e9f6b4a..3b8d2ca600e2 100644 --- a/ci/jenkins/Jenkins_steps.groovy +++ b/ci/jenkins/Jenkins_steps.groovy @@ -23,18 +23,18 @@ utils = load('ci/Jenkinsfile_utils.groovy') // mxnet libraries -mx_lib = 'build/libmxnet.so, build/3rdparty/tvm/libtvm_runtime.so, build/libtvmop.so, build/tvmop.conf, build/libcustomop_lib.so, build/libcustomop_gpu_lib.so, build/libsubgraph_lib.so, build/3rdparty/openmp/runtime/src/libomp.so' -mx_lib_cython = 'build/libmxnet.so, build/3rdparty/tvm/libtvm_runtime.so, build/libtvmop.so, build/tvmop.conf, build/libcustomop_lib.so, build/libcustomop_gpu_lib.so, build/libsubgraph_lib.so, python/mxnet/_cy3/*.so, build/3rdparty/openmp/runtime/src/libomp.so, python/mxnet/_ffi/_cy3/*.so' +mx_lib = 'build/libmxnet.so, build/3rdparty/tvm/libtvm_runtime.so, build/libtvmop.so, build/tvmop.conf, build/libcustomop_lib.so, build/libcustomop_gpu_lib.so, build/libsubgraph_lib.so, example/extensions/lib_external_ops/build/libexternal_lib.so, build/3rdparty/openmp/runtime/src/libomp.so' +mx_lib_cython = 'build/libmxnet.so, build/3rdparty/tvm/libtvm_runtime.so, build/libtvmop.so, build/tvmop.conf, build/libcustomop_lib.so, build/libcustomop_gpu_lib.so, build/libsubgraph_lib.so, example/extensions/lib_external_ops/build/libexternal_lib.so, python/mxnet/_cy3/*.so, build/3rdparty/openmp/runtime/src/libomp.so, python/mxnet/_ffi/_cy3/*.so' // mxnet cmake libraries, in cmake builds we do not produce a libnvvm static library by default. mx_cmake_lib = 'build/libmxnet.so, build/3rdparty/tvm/libtvm_runtime.so, build/libtvmop.so, build/tvmop.conf, build/tests/mxnet_unit_tests, build/3rdparty/openmp/runtime/src/libomp.so' -mx_cmake_lib_no_tvm_op = 'build/libmxnet.so, build/libcustomop_lib.so, build/libcustomop_gpu_lib.so, build/libsubgraph_lib.so, build/tests/mxnet_unit_tests, build/3rdparty/openmp/runtime/src/libomp.so' +mx_cmake_lib_no_tvm_op = 'build/libmxnet.so, build/libcustomop_lib.so, build/libcustomop_gpu_lib.so, build/libsubgraph_lib.so, example/extensions/lib_external_ops/build/libexternal_lib.so, build/tests/mxnet_unit_tests, build/3rdparty/openmp/runtime/src/libomp.so' mx_cmake_lib_cython = 'build/libmxnet.so, build/3rdparty/tvm/libtvm_runtime.so, build/libtvmop.so, build/tvmop.conf, build/tests/mxnet_unit_tests, build/3rdparty/openmp/runtime/src/libomp.so, python/mxnet/_cy3/*.so, python/mxnet/_ffi/_cy3/*.so' // mxnet cmake libraries, in cmake builds we do not produce a libnvvm static library by default. mx_cmake_lib_debug = 'build/libmxnet.so, build/3rdparty/tvm/libtvm_runtime.so, build/libtvmop.so, build/tvmop.conf, build/libcustomop_lib.so, build/libcustomop_gpu_lib.so, build/libsubgraph_lib.so, build/tests/mxnet_unit_tests' -mx_mkldnn_lib = 'build/libmxnet.so, build/3rdparty/tvm/libtvm_runtime.so, build/libtvmop.so, build/tvmop.conf, build/3rdparty/openmp/runtime/src/libomp.so, build/libcustomop_lib.so, build/libcustomop_gpu_lib.so, build/libsubgraph_lib.so' +mx_mkldnn_lib = 'build/libmxnet.so, build/3rdparty/tvm/libtvm_runtime.so, build/libtvmop.so, build/tvmop.conf, build/3rdparty/openmp/runtime/src/libomp.so, build/libcustomop_lib.so, build/libcustomop_gpu_lib.so, build/libsubgraph_lib.so, example/extensions/lib_external_ops/build/libexternal_lib.so' mx_tensorrt_lib = 'build/libmxnet.so, build/3rdparty/tvm/libtvm_runtime.so, build/libtvmop.so, build/tvmop.conf, build/3rdparty/openmp/runtime/src/libomp.so, lib/libnvonnxparser_runtime.so.0, lib/libnvonnxparser.so.0, lib/libonnx_proto.so, lib/libonnx.so' -mx_lib_cpp_examples = 'build/libmxnet.so, build/3rdparty/tvm/libtvm_runtime.so, build/libtvmop.so, build/tvmop.conf, build/3rdparty/openmp/runtime/src/libomp.so, build/libcustomop_lib.so, build/libcustomop_gpu_lib.so, build/libsubgraph_lib.so, python/mxnet/_cy3/*.so, python/mxnet/_ffi/_cy3/*.so' +mx_lib_cpp_examples = 'build/libmxnet.so, build/3rdparty/tvm/libtvm_runtime.so, build/libtvmop.so, build/tvmop.conf, build/3rdparty/openmp/runtime/src/libomp.so, build/libcustomop_lib.so, build/libcustomop_gpu_lib.so, build/libsubgraph_lib.so, example/extensions/lib_external_ops/build/libexternal_lib.so, python/mxnet/_cy3/*.so, python/mxnet/_ffi/_cy3/*.so' mx_lib_cpp_examples_no_tvm_op = 'build/libmxnet.so, build/libcustomop_lib.so, build/libcustomop_gpu_lib.so, build/libsubgraph_lib.so, build/3rdparty/openmp/runtime/src/libomp.so, python/mxnet/_cy3/*.so, python/mxnet/_ffi/_cy3/*.so' mx_lib_cpp_examples_cpu = 'build/libmxnet.so, build/3rdparty/tvm/libtvm_runtime.so, build/libtvmop.so, build/tvmop.conf, build/3rdparty/openmp/runtime/src/libomp.so' mx_cd_lib = 'lib/libmxnet.so, licenses/*, lib/libgfortran.so.*, lib/libopenblas.so.0, include/mkldnn/dnnl_version.h, include/mkldnn/dnnl_config.h' diff --git a/tests/python/gpu/test_extensions_gpu.py b/tests/python/gpu/test_extensions_gpu.py index 1cc06cd8b2c5..9d3683166053 100644 --- a/tests/python/gpu/test_extensions_gpu.py +++ b/tests/python/gpu/test_extensions_gpu.py @@ -28,8 +28,8 @@ import pytest base_path = os.path.join(os.path.dirname(__file__), "../../..") -def check_platform(): - return platform.machine() not in ['x86_64', 'AMD64'] +def check_platform(supported_platforms=['x86_64', 'AMD64']): + return platform.machine() not in supported_platforms @pytest.mark.skipif(check_platform(), reason="not all machine types supported") @pytest.mark.skipif(is_cd_run(), reason="continuous delivery run - ignoring test") @@ -89,3 +89,24 @@ def test_custom_op_gpu(): mx.random.seed(128, ctx=mx.gpu()) r4 = mx.nd.my_noisy_relu(d2) assert_almost_equal(r3.asnumpy(), r4.asnumpy(), rtol=1e-3, atol=1e-3) + +@pytest.mark.skipif(check_platform(['x86_64']), reason="not all machine types supported") +@pytest.mark.skipif(is_cd_run(), reason="continuous delivery run - ignoring test") +def test_external_op(): + # check if operator already exists + if hasattr(mx.nd, 'min_ex'): + raise MXNetError('Operator already loaded') + + lib = 'libexternal_lib.so' + fname = os.path.join(base_path,'example/extensions/lib_external_ops/build/'+lib) + if not os.path.exists(fname): + raise MXNetError("library %s not found " % lib) + + fname = os.path.abspath(fname) + mx.library.load(fname, False) + + # execute operator + try: + mx.nd.min_ex() + except: + raise MXNetError('Operator not loaded successfully') diff --git a/tests/python/unittest/test_extensions.py b/tests/python/unittest/test_extensions.py index 52f999571e13..1c6763f5c515 100644 --- a/tests/python/unittest/test_extensions.py +++ b/tests/python/unittest/test_extensions.py @@ -28,8 +28,8 @@ import pytest base_path = os.path.join(os.path.dirname(__file__), "../../..") -def check_platform(): - return platform.machine() not in ['x86_64', 'AMD64'] +def check_platform(supported_platforms=['x86_64', 'AMD64']): + return platform.machine() not in supported_platforms @pytest.mark.skipif(check_platform(), reason="not all machine types supported") @pytest.mark.skipif(is_cd_run(), reason="continuous delivery run - ignoring test") @@ -179,3 +179,24 @@ def test_subgraph(): out5 = sym_block3(a_data, b_data) # check that result matches one executed by MXNet assert_almost_equal(out[0].asnumpy(), out5[0].asnumpy(), rtol=1e-3, atol=1e-3) + +@pytest.mark.skipif(check_platform(['x86_64']), reason="not all machine types supported") +@pytest.mark.skipif(is_cd_run(), reason="continuous delivery run - ignoring test") +def test_external_op(): + # check if operator already exists + if hasattr(mx.nd, 'min_ex'): + raise MXNetError('Operator already loaded') + + lib = 'libexternal_lib.so' + fname = os.path.join(base_path,'example/extensions/lib_external_ops/build/'+lib) + if not os.path.exists(fname): + raise MXNetError("library %s not found " % lib) + + fname = os.path.abspath(fname) + mx.library.load(fname, False) + + # execute operator + try: + mx.nd.min_ex() + except: + raise MXNetError('Operator not loaded successfully')