Skip to content
This repository has been archived by the owner on Nov 17, 2023. It is now read-only.

Commit

Permalink
Merge branch 'master' into broken_links_aaron
Browse files Browse the repository at this point in the history
  • Loading branch information
aaronmarkham authored Oct 16, 2019
2 parents d1897a6 + c2bbde7 commit cd213b7
Show file tree
Hide file tree
Showing 270 changed files with 9,166 additions and 5,666 deletions.
6 changes: 5 additions & 1 deletion CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -58,6 +58,11 @@ message(STATUS "CMAKE_HOST_SYSTEM_PROCESSOR ${CMAKE_HOST_SYSTEM_PROCESSOR}")
message(STATUS "CMAKE_SYSTEM_PROCESSOR ${CMAKE_SYSTEM_PROCESSOR}")

message(STATUS "CMAKE_SYSTEM_NAME ${CMAKE_SYSTEM_NAME}")

if(USE_TVM_OP)
add_definitions(-DMXNET_USE_TVM_OP=1)
endif()

if(USE_CUDA AND NOT USE_OLDCMAKECUDA)
message(STATUS "CMake version '${CMAKE_VERSION}' using generator '${CMAKE_GENERATOR}'")
if(
Expand Down Expand Up @@ -743,7 +748,6 @@ if(USE_DIST_KVSTORE)
endif()

if(USE_TVM_OP)
add_definitions(-DMXNET_USE_TVM_OP=1)
list(APPEND mxnet_LINKER_LIBS ${CMAKE_CURRENT_BINARY_DIR}/3rdparty/tvm/libtvm_runtime.so)
include(cmake/BuildTVM.cmake)
add_subdirectory("3rdparty/tvm")
Expand Down
4 changes: 4 additions & 0 deletions cd/Jenkinsfile_cd_pipeline
Original file line number Diff line number Diff line change
Expand Up @@ -61,6 +61,10 @@ pipeline {
stage("Build") {
cd_utils.trigger_release_job("Build static libmxnet", "mxnet_lib/static", params.MXNET_VARIANTS)
}
stage("PyPI Release") {
echo "Building PyPI Release"
cd_utils.trigger_release_job("Release PyPI Packages", "python/pypi", params.MXNET_VARIANTS)
}
},

"Dynamic libmxnet based release": {
Expand Down
73 changes: 73 additions & 0 deletions cd/Jenkinsfile_utils.groovy
Original file line number Diff line number Diff line change
Expand Up @@ -119,4 +119,77 @@ def push_artifact(libmxnet_path, variant, libtype, license_paths = '', dependenc
sh "./cd/utils/artifact_repository.py --push --verbose --libtype ${libtype} --variant ${variant} --libmxnet ${libmxnet_path} --licenses ${license_paths} --dependencies ${dependency_paths}"
}

// pull artifact from repository
def pull_artifact(variant, libtype, destination = '') {
sh "./cd/utils/artifact_repository.py --pull --verbose --libtype ${libtype} --variant ${variant} --destination ${destination}"
}

// pulls artifact from repository and places files in the appropriate directories
def restore_artifact(variant, libtype) {

pull_artifact(variant, libtype, 'mxnet_artifact')

// move libraries to lib directory
dir('lib') {
sh "mv ../mxnet_artifact/libmxnet.so ."
if (fileExists('../mxnet_artifact/dependencies')) {
sh """find "../mxnet_artifact/dependencies" -type f -name "*.so*" -exec mv {} . \\;"""
sh "ls ."
}
}

dir('cd_misc') {
if (fileExists('../mxnet_artifact/dependencies')) {
// All library files (*.so*) should have be moved
// to the lib directory. If anything is left, it will be
// other supporting files (header files, etc.)
sh """find "../mxnet_artifact/dependencies" -type f -exec mv {} . \\;"""
sh "ls ."
}
}

dir('licenses') {
if (fileExists('../mxnet_artifact/licenses')) {
sh """find "../mxnet_artifact/licenses" -type f -exec mv {} . \\;"""
sh "ls ."
}
}

dir('mxnet_artifact') {
deleteDir()
}
}

// A generic pipeline that can be used by *most* CD jobs
// It can be used when implementing the pipeline steps in the Jenkins_steps.groovy
// script for a particular delivery channel. However, it should also implement the
// build, test, and push steps.
// NOTE: Be mindful of the expected time that a step should take. If it will take a long time,
// and it can be done in a CPU node, do it in a CPU node. We should avoid using GPU instances unless
// we *have* to.
// However, if it is only packaging libmxnet and that doesn't take long. Then, the pipeline can
// just run on a single node. As is done bellow.
// For examples of multi-node CD pipelines, see the the binary_release/static and binary_release/dynamic
// pipeline.
def generic_pipeline(mxnet_variant, custom_steps, node_type = "restricted-mxnetlinux-cpu") {
return {
node(node_type) {
stage("${mxnet_variant}") {

stage('Build') {
custom_steps.build(mxnet_variant)
}

stage('Test') {
custom_steps.test(mxnet_variant)
}

stage('Push') {
custom_steps.push(mxnet_variant)
}
}
}
}
}

return this
8 changes: 8 additions & 0 deletions cd/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -159,6 +159,10 @@ def get_pipeline(mxnet_variant) {
}
```

Examples:

* [PyPI Release](python/pypi/Jenkins_pipeline.groovy): In this pipeline, the majority of time is overwhelmingly spent on testing. Therefore, it should be ok to execute the whole pipeline on a GPU node (i.e. packaging, testing, and publishing).

**Per step**

Use this approach in cases where you have long running stages that don't depend on specialized/expensive hardware.
Expand Down Expand Up @@ -188,3 +192,7 @@ def test(mxnet_variant) {
}
}
```

Examples:

Both the [statically linked libmxnet](mxnet_lib/static/Jenkins_pipeline.groovy) and [dynamically linked libmxnet](mxnet_lib/dynamic/Jenkins_pipeline.groovy) pipelines have long running compilation and testing stages that **do not** require specialized/expensive hardware (e.g. GPUs). Therefore, as mush as possible, it is important to run each stage in on its own node, and design the pipeline to spend the least amount of time possible on expensive hardware. E.g. for GPU builds, only run GPU tests on GPU instances, all other stages can be executed on CPU nodes.
78 changes: 78 additions & 0 deletions cd/python/pypi/Jenkins_pipeline.groovy
Original file line number Diff line number Diff line change
@@ -0,0 +1,78 @@
// -*- mode: groovy -*-

// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
//
// Jenkins pipeline
// See documents at https://jenkins.io/doc/book/pipeline/jenkinsfile/

// NOTE:
// ci_utils and cd_utils are loaded by the originating Jenkins job, e.g. jenkins/Jenkinsfile_release_job

// Only post the following variants to PyPI.
// This is a temporary solution until we are confident with the packages generated by CI
// This should be removed in the not too distant future.
// We only skip the publish step so we can still QA the other variants.
pypi_releases = ["cu92", "cu92mkl"]

def get_pipeline(mxnet_variant) {
def node_type = mxnet_variant.startsWith('cu') ? NODE_LINUX_GPU : NODE_LINUX_CPU
return cd_utils.generic_pipeline(mxnet_variant, this, node_type)
}

def get_environment(mxnet_variant) {
def environment = "ubuntu_cpu"
if (mxnet_variant.startsWith('cu')) {
environment = "ubuntu_gpu_${mxnet_variant}".replace("mkl", "")
}
return environment
}

def build(mxnet_variant) {
ws("workspace/python_pypi/${mxnet_variant}/${env.BUILD_NUMBER}") {
ci_utils.init_git()
cd_utils.restore_artifact(mxnet_variant, 'static')

// create wheel file
def environment = get_environment(mxnet_variant)
def nvidia_docker = mxnet_variant.startsWith('cu')
ci_utils.docker_run(environment, "cd_package_pypi ${mxnet_variant}", nvidia_docker, '500m', "RELEASE_BUILD='${env.RELEASE_BUILD}'")
}
}

def test(mxnet_variant) {
ws("workspace/python_pypi/${mxnet_variant}/${env.BUILD_NUMBER}") {
// test wheel file
def environment = get_environment(mxnet_variant)
def nvidia_docker = mxnet_variant.startsWith('cu')
ci_utils.docker_run(environment, "cd_integration_test_pypi python ${nvidia_docker}", nvidia_docker)
ci_utils.docker_run(environment, "cd_integration_test_pypi python3 ${nvidia_docker}", nvidia_docker)
}
}

def push(mxnet_variant) {
ws("workspace/python_pypi/${mxnet_variant}/${env.BUILD_NUMBER}") {
// publish package to pypi
if (mxnet_variant in pypi_releases) {
sh "./ci/docker/runtime_functions.sh cd_pypi_publish"
} else {
echo "Temporarily skipping publishing PyPI package for '${mxnet_variant}'."
}
}
}

return this
43 changes: 43 additions & 0 deletions cd/python/pypi/README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,43 @@
<!--- Licensed to the Apache Software Foundation (ASF) under one -->
<!--- or more contributor license agreements. See the NOTICE file -->
<!--- distributed with this work for additional information -->
<!--- regarding copyright ownership. The ASF licenses this file -->
<!--- to you under the Apache License, Version 2.0 (the -->
<!--- "License"); you may not use this file except in compliance -->
<!--- with the License. You may obtain a copy of the License at -->

<!--- http://www.apache.org/licenses/LICENSE-2.0 -->

<!--- Unless required by applicable law or agreed to in writing, -->
<!--- software distributed under the License is distributed on an -->
<!--- "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -->
<!--- KIND, either express or implied. See the License for the -->
<!--- specific language governing permissions and limitations -->
<!--- under the License. -->

# PyPI CD Pipeline

The Jenkins pipelines for continuous delivery of the PyPI MXNet packages.
The pipelines for each variant are run, and fail, independently. Each depends
on a successful build of the statically linked libmxet library.

The pipeline relies on the scripts and resources located in [tools/pip](https://github.com/apache/incubator-mxnet/tree/master/tools/pip)
to build the PyPI packages.

## Credentials

The pipeline depends on the following environment variables in order to successfully
retrieve the credentials for the PyPI account:

* CD_PYPI_SECRET_NAME
* DOCKERHUB_SECRET_ENDPOINT_URL
* DOCKERHUB_SECRET_ENDPOINT_REGION

The credentials are stored in the Secrets Manager of the AWS account hosting Jenkins.
The [pypi_publish.py](pypi_publish.sh) script is in charge of retrieving the credentials.

## Mock publishing

Because of space limitations on PyPI, we don't want to push test packages from Jenkins Dev
everytime the pipeline is run. Therefore, the [pypi_publish.sh](pypi_publish.sh)
script will fake publishing packages if the `username` is *skipPublish*.
60 changes: 60 additions & 0 deletions cd/python/pypi/pypi_package.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,60 @@
#!/usr/bin/env bash
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#

set -ex

# variant = cpu, mkl, cu80, cu80mkl, cu100, etc.
export mxnet_variant=${1:?"Please specify the mxnet variant"}

# Due to this PR: https://github.com/apache/incubator-mxnet/pull/14899
# The setup.py expects that mkldnn_version.h be present in
# mxnet-build/3rdparty/mkldnn/build/install/include
# The artifact repository stores this file in the dependencies
# and CD unpacks it to a directory called cd_misc
if [ -f "cd_misc/mkldnn_version.h" ]; then
mkdir -p 3rdparty/mkldnn/build/install/include
cp cd_misc/mkldnn_version.h 3rdparty/mkldnn/build/install/include/.
fi

# Create wheel workspace
rm -rf wheel_build
mkdir wheel_build
cd wheel_build

# Setup workspace
# setup.py expects mxnet-build to be the
# mxnet directory
ln -s ../. mxnet-build

# Copy the setup.py and other package resources
cp -R ../tools/pip/* .

# Remove comment lines from pip doc files
pushd doc
for file in $(ls); do
sed -i '/<!--/d' ${file}
done
popd

echo "Building python package with environment:"
printenv
echo "-----------------------------------------"
pip install --user pypandoc

# Build wheel file - placed in wheel_build/dist
python setup.py bdist_wheel
Loading

0 comments on commit cd213b7

Please sign in to comment.