Skip to content
This repository has been archived by the owner on Nov 17, 2023. It is now read-only.

Commit

Permalink
Merge branch 'master' into backward_op_cache
Browse files Browse the repository at this point in the history
  • Loading branch information
ZhennanQin authored Jul 16, 2018
2 parents f160c11 + 3ea67a7 commit 002ad65
Show file tree
Hide file tree
Showing 156 changed files with 7,976 additions and 8,303 deletions.
45 changes: 27 additions & 18 deletions Jenkinsfile
Original file line number Diff line number Diff line change
Expand Up @@ -28,8 +28,6 @@ mx_dist_lib = 'lib/libmxnet.so, lib/libmxnet.a, 3rdparty/dmlc-core/libdmlc.a, 3r
mx_cmake_lib = 'build/libmxnet.so, build/libmxnet.a, build/3rdparty/dmlc-core/libdmlc.a, build/tests/mxnet_unit_tests, build/3rdparty/openmp/runtime/src/libomp.so'
mx_cmake_mkldnn_lib = 'build/libmxnet.so, build/libmxnet.a, build/3rdparty/dmlc-core/libdmlc.a, build/tests/mxnet_unit_tests, build/3rdparty/openmp/runtime/src/libomp.so, build/3rdparty/mkldnn/src/libmkldnn.so.0'
mx_mkldnn_lib = 'lib/libmxnet.so, lib/libmxnet.a, lib/libiomp5.so, lib/libmkldnn.so.0, lib/libmklml_intel.so, 3rdparty/dmlc-core/libdmlc.a, 3rdparty/tvm/nnvm/lib/libnnvm.a'
// command to start a docker container
docker_run = 'tests/ci_build/ci_build.sh'
// timeout in minutes
max_time = 120
// assign any caught errors here
Expand Down Expand Up @@ -407,7 +405,7 @@ try {
}
}
},
//Todo: Set specific CUDA_ARCh for windows builds in cmake

'Build GPU windows':{
node('mxnetwindows-cpu') {
timeout(time: max_time, unit: 'MINUTES') {
Expand All @@ -417,7 +415,7 @@ try {
bat """mkdir build_vc14_gpu
call "C:\\Program Files (x86)\\Microsoft Visual Studio 14.0\\VC\\bin\\x86_amd64\\vcvarsx86_amd64.bat"
cd build_vc14_gpu
cmake -G \"NMake Makefiles JOM\" -DUSE_CUDA=1 -DUSE_CUDNN=1 -DUSE_NVRTC=1 -DUSE_OPENCV=1 -DUSE_OPENMP=1 -DUSE_PROFILER=1 -DUSE_BLAS=open -DUSE_LAPACK=1 -DUSE_DIST_KVSTORE=0 -DCUDA_ARCH_NAME=All -DCMAKE_CXX_FLAGS_RELEASE="/FS /MD /O2 /Ob2 /DNDEBUG" -DCMAKE_BUILD_TYPE=Release -DUSE_MKL_IF_AVAILABLE=0 ${env.WORKSPACE}"""
cmake -G \"NMake Makefiles JOM\" -DUSE_CUDA=1 -DUSE_CUDNN=1 -DUSE_NVRTC=1 -DUSE_OPENCV=1 -DUSE_OPENMP=1 -DUSE_PROFILER=1 -DUSE_BLAS=open -DUSE_LAPACK=1 -DUSE_DIST_KVSTORE=0 -DCUDA_ARCH_NAME=Manual -DCUDA_ARCH_BIN=52 -DCUDA_ARCH_PTX=52 -DCMAKE_CXX_FLAGS_RELEASE="/FS /MD /O2 /Ob2 /DNDEBUG" -DCMAKE_BUILD_TYPE=Release -DUSE_MKL_IF_AVAILABLE=0 ${env.WORKSPACE}"""
bat 'C:\\mxnet\\build_vc14_gpu.bat'
bat '''rmdir /s/q pkg_vc14_gpu
mkdir pkg_vc14_gpu\\lib
Expand Down Expand Up @@ -450,7 +448,7 @@ try {
call "C:\\Program Files (x86)\\Microsoft Visual Studio 14.0\\VC\\bin\\x86_amd64\\vcvarsx86_amd64.bat"
cd build_%BUILD_NAME%
copy ${env.WORKSPACE}\\3rdparty\\mkldnn\\config_template.vcxproj.user ${env.WORKSPACE}\\config_template.vcxproj.user /y
cmake -G \"NMake Makefiles JOM\" -DUSE_CUDA=1 -DUSE_CUDNN=1 -DUSE_NVRTC=1 -DUSE_OPENCV=1 -DUSE_OPENMP=1 -DUSE_PROFILER=1 -DUSE_BLAS=open -DUSE_LAPACK=1 -DUSE_DIST_KVSTORE=0 -DCUDA_ARCH_NAME=All -DUSE_MKLDNN=1 -DCMAKE_CXX_FLAGS_RELEASE="/FS /MD /O2 /Ob2 /DNDEBUG" -DCMAKE_BUILD_TYPE=Release ${env.WORKSPACE}"""
cmake -G \"NMake Makefiles JOM\" -DUSE_CUDA=1 -DUSE_CUDNN=1 -DUSE_NVRTC=1 -DUSE_OPENCV=1 -DUSE_OPENMP=1 -DUSE_PROFILER=1 -DUSE_BLAS=open -DUSE_LAPACK=1 -DUSE_DIST_KVSTORE=0 -DCUDA_ARCH_NAME=Manual -DCUDA_ARCH_BIN=52 -DCUDA_ARCH_PTX=52 -DUSE_MKLDNN=1 -DCMAKE_CXX_FLAGS_RELEASE="/FS /MD /O2 /Ob2 /DNDEBUG" -DCMAKE_BUILD_TYPE=Release ${env.WORKSPACE}"""
bat '''
call "C:\\Program Files (x86)\\Microsoft Visual Studio 14.0\\VC\\bin\\x86_amd64\\vcvarsx86_amd64.bat"
cd build_%BUILD_NAME%
Expand Down Expand Up @@ -1000,6 +998,30 @@ try {
}
}
},
'dist-kvstore tests GPU': {
node('mxnetlinux-gpu') {
ws('workspace/it-dist-kvstore') {
timeout(time: max_time, unit: 'MINUTES') {
init_git()
unpack_lib('gpu')
docker_run('ubuntu_gpu', 'integrationtest_ubuntu_gpu_dist_kvstore', true)
publish_test_coverage()
}
}
}
},
'dist-kvstore tests CPU': {
node('mxnetlinux-cpu') {
ws('workspace/it-dist-kvstore') {
timeout(time: max_time, unit: 'MINUTES') {
init_git()
unpack_lib('cpu')
docker_run('ubuntu_cpu', 'integrationtest_ubuntu_cpu_dist_kvstore', false)
publish_test_coverage()
}
}
}
},
'Scala: GPU': {
node('mxnetlinux-gpu') {
ws('workspace/ut-scala-gpu') {
Expand All @@ -1012,19 +1034,6 @@ try {
}
}
}
// Disable until fixed https://github.com/apache/incubator-mxnet/issues/11441
// 'dist-kvstore tests GPU': {
// node('mxnetlinux-gpu') {
// ws('workspace/it-dist-kvstore') {
// timeout(time: max_time, unit: 'MINUTES') {
// init_git()
// unpack_lib('gpu')
// docker_run('ubuntu_gpu', 'integrationtest_ubuntu_gpu_dist_kvstore', true)
// publish_test_coverage()
// }
// }
// }
//}
}

stage('Deploy') {
Expand Down
2 changes: 1 addition & 1 deletion benchmark/python/sparse/util.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ def estimate_density(DATA_PATH, feature_size):
raise Exception("Data is not there!")
density = []
P = 0.01
for _ in xrange(10):
for _ in range(10):
num_non_zero = 0
num_sample = 0
with open(DATA_PATH) as f:
Expand Down
8 changes: 1 addition & 7 deletions ci/docker/install/ubuntu_r.sh
Original file line number Diff line number Diff line change
Expand Up @@ -29,17 +29,11 @@ cd "$(dirname "$0")"
# install libraries for mxnet's r package on ubuntu
echo "deb http://cran.rstudio.com/bin/linux/ubuntu trusty/" >> /etc/apt/sources.list

key=E084DAB9

gpg --keyserver keyserver.ubuntu.com --recv-key $key || \
gpg --keyserver keyserver.pgp.com --recv-keys $key || \
gpg --keyserver ha.pool.sks-keyservers.net --recv-keys $key ;
apt-key add r.gpg

# Installing the latest version (3.3+) that is compatible with MXNet
add-apt-repository 'deb [arch=amd64,i386] https://cran.rstudio.com/bin/linux/ubuntu xenial/'

gpg -a --export $key | apt-key add -

apt-get update
apt-get install -y --allow-unauthenticated \
libcairo2-dev \
Expand Down
21 changes: 17 additions & 4 deletions ci/docker/runtime_functions.sh
Original file line number Diff line number Diff line change
Expand Up @@ -743,6 +743,22 @@ integrationtest_ubuntu_gpu_cpp_package() {
cpp-package/tests/ci_test.sh
}

integrationtest_ubuntu_cpu_dist_kvstore() {
set -ex
export PYTHONPATH=./python/
export MXNET_STORAGE_FALLBACK_LOG_VERBOSE=0
export MXNET_USE_OPERATOR_TUNING=0
cd tests/nightly/
../../tools/launch.py -n 7 --launcher local python dist_sync_kvstore.py --type=gluon_step_cpu
../../tools/launch.py -n 7 --launcher local python dist_sync_kvstore.py --type=gluon_sparse_step_cpu
../../tools/launch.py -n 7 --launcher local python dist_sync_kvstore.py --type=invalid_cpu
../../tools/launch.py -n 7 --launcher local python dist_sync_kvstore.py --type=gluon_type_cpu
../../tools/launch.py -n 7 --launcher local python dist_sync_kvstore.py
../../tools/launch.py -n 7 --launcher local python dist_sync_kvstore.py --no-multiprecision
../../tools/launch.py -n 7 --launcher local python dist_sync_kvstore.py --type=compressed_cpu
../../tools/launch.py -n 7 --launcher local python dist_sync_kvstore.py --type=compressed_cpu --no-multiprecision
}

integrationtest_ubuntu_gpu_scala() {
set -ex
make scalapkg USE_OPENCV=1 USE_BLAS=openblas USE_CUDA=1 USE_CUDA_PATH=/usr/local/cuda USE_CUDNN=1 USE_DIST_KVSTORE=1 SCALA_ON_GPU=1
Expand All @@ -754,11 +770,8 @@ integrationtest_ubuntu_gpu_dist_kvstore() {
export PYTHONPATH=./python/
export MXNET_STORAGE_FALLBACK_LOG_VERBOSE=0
cd tests/nightly/
../../tools/launch.py -n 7 --launcher local python dist_sync_kvstore.py
../../tools/launch.py -n 7 --launcher local python dist_sync_kvstore.py --no-multiprecision
../../tools/launch.py -n 7 --launcher local python dist_device_sync_kvstore.py
../../tools/launch.py -n 7 --launcher local python dist_sync_kvstore.py --type=invalid
../../tools/launch.py -n 7 --launcher local python dist_sync_kvstore.py --type=gluon
../../tools/launch.py -n 7 --launcher local python dist_sync_kvstore.py --type=init_gpu
}

test_ubuntu_cpu_python2() {
Expand Down
2 changes: 1 addition & 1 deletion contrib/clojure-package/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -193,7 +193,7 @@ This command can be very handy too
timestamp, name, utilization.gpu [%], utilization.memory [%], memory.total [MiB], memory.free [MiB], memory.used [MiB]`

**Supported APIs**
There are 3 high level apis supported in MxNet: (Model/FeedForward), Module, and Gluon. The Module api is supported in the Clojure package because of the existing support for it in the Scala package. The Module api is very similar to the Gluon api and examples of the usage can be found in the examples directory. The Model/FeedForward Api is deprected.
There are 3 high level apis supported in MXNet: (Model/FeedForward), Module, and Gluon. The Module api is supported in the Clojure package because of the existing support for it in the Scala package. The Module api is very similar to the Gluon api and examples of the usage can be found in the examples directory. The Model/FeedForward Api is deprected.

Gluon support will come later and may or may not be built on the Scala gluon api (when it lands there)

Expand Down
1 change: 1 addition & 0 deletions docs/api/python/gluon/contrib.md
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,7 @@ In the rest of this document, we list routines provided by the `gluon.contrib` p
HybridConcurrent
Identity
SparseEmbedding
SyncBatchNorm
```

### Recurrent neural network
Expand Down
3 changes: 2 additions & 1 deletion docs/install/ubuntu_setup.md
Original file line number Diff line number Diff line change
Expand Up @@ -211,7 +211,8 @@ Building *MXNet* from source is a 2 step process.
You can quickly build MXNet-R with the following two scripts found in the `/docs/install` folder:

```bash
cd docs/install
git clone --recursive https://github.com/apache/incubator-mxnet.git mxnet
cd mxnet/docs/install
./install_mxnet_ubuntu_python.sh
./install_mxnet_ubuntu_r.sh
```
Expand Down
82 changes: 0 additions & 82 deletions example/rcnn/.gitignore

This file was deleted.

6 changes: 0 additions & 6 deletions example/rcnn/Makefile

This file was deleted.

Loading

0 comments on commit 002ad65

Please sign in to comment.