Skip to content
This repository has been archived by the owner on Nov 17, 2023. It is now read-only.

Commit

Permalink
[MXNET-703] Use static libs for TensorRT
Browse files Browse the repository at this point in the history
Moving a few libraries to static linking to mitigate risk of dep conflicts.
  • Loading branch information
KellenSunderland committed Nov 18, 2018
1 parent c152969 commit f6133bf
Show file tree
Hide file tree
Showing 3 changed files with 13 additions and 6 deletions.
4 changes: 3 additions & 1 deletion Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -99,7 +99,9 @@ endif

ifeq ($(USE_TENSORRT), 1)
CFLAGS += -I$(ROOTDIR) -I$(TPARTYDIR) -DONNX_NAMESPACE=$(ONNX_NAMESPACE) -DMXNET_USE_TENSORRT=1
LDFLAGS += -lprotobuf -pthread -lonnx -lonnx_proto -lnvonnxparser -lnvonnxparser_runtime -lnvinfer -lnvinfer_plugin
LDFLAGS += -lprotobuf -pthread -lonnx -l:libonnx_proto.a -l:libnvonnxparser_static.a \
-l:libnvonnxparser_runtime_static.a -l:libnvonnxparser_plugin.a -lnvinfer -lnvinfer_plugin \
-l:libonnx_proto.a -l:libonnxtrt_proto.a
endif
# -L/usr/local/lib

Expand Down
2 changes: 1 addition & 1 deletion ci/docker/install/tensorrt.sh
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@ pip3 install gluoncv==0.2.0
pushd .
cd ..
apt-get update
apt-get install -y automake libtool
apt-get install -y automake libtool zip
git clone --recursive -b 3.5.1.1 https://github.com/google/protobuf.git
cd protobuf
./autogen.sh
Expand Down
13 changes: 9 additions & 4 deletions ci/docker/runtime_functions.sh
Original file line number Diff line number Diff line change
Expand Up @@ -79,6 +79,8 @@ build_wheel() {
export MXNET_LIBRARY_PATH=${BUILD_DIR}/libmxnet.so

cd ${PYTHON_DIR}

# If building for redistribution edit the name in this setup.py
python setup.py bdist_wheel --universal

# repackage
Expand All @@ -96,6 +98,8 @@ build_wheel() {
rm -rf ${TMPDIR}

popd

# If redistributing you may now run `twine upload -r pypi *.whl`
}

# Build commands: Every platform in docker/Dockerfile.build.<platform> should have a corresponding
Expand Down Expand Up @@ -498,7 +502,7 @@ build_ubuntu_gpu_tensorrt() {
cd build
cmake \
-DCMAKE_CXX_FLAGS=-I/usr/include/python${PYVER}\
-DBUILD_SHARED_LIBS=ON ..\
-DBUILD_SHARED_LIBS=OFF ..\
-G Ninja
ninja -j 1 -v onnx/onnx.proto
ninja -j 1 -v
Expand All @@ -514,12 +518,11 @@ build_ubuntu_gpu_tensorrt() {
cmake ..
make -j$(nproc)
export LIBRARY_PATH=`pwd`:$LIBRARY_PATH
export LIBRARY_PATH=$LIBRARY_PATH:`pwd`/third_party/onnx/
mv third_party/onnx/libonnx_proto.a third_party/onnx/libonnxtrt_proto.a
popd

mkdir -p /work/mxnet/lib/
cp 3rdparty/onnx-tensorrt/third_party/onnx/build/*.so /work/mxnet/lib/
cp -L 3rdparty/onnx-tensorrt/build/libnvonnxparser_runtime.so.0 /work/mxnet/lib/
cp -L 3rdparty/onnx-tensorrt/build/libnvonnxparser.so.0 /work/mxnet/lib/

rm -rf build
make \
Expand All @@ -537,6 +540,8 @@ build_ubuntu_gpu_tensorrt() {
ONNX_NAMESPACE=onnx \
CUDA_ARCH="-gencode arch=compute_70,code=compute_70" \
-j$(nproc)

build_wheel /work/mxnet/python /work/mxnet/lib
}

build_ubuntu_gpu_mkldnn() {
Expand Down

0 comments on commit f6133bf

Please sign in to comment.