diff --git a/cd/Jenkinsfile_cd_pipeline b/cd/Jenkinsfile_cd_pipeline index 4e4e6a7af3d4..f42f5cd7f933 100644 --- a/cd/Jenkinsfile_cd_pipeline +++ b/cd/Jenkinsfile_cd_pipeline @@ -52,6 +52,10 @@ pipeline { stage("Build") { cd_utils.trigger_release_job("Build static libmxnet", "mxnet_lib/static", params.MXNET_VARIANTS) } + stage("PyPI Release") { + echo "Building PyPI Release" + cd_utils.trigger_release_job("Release PyPI Packages", "python/pypi", params.MXNET_VARIANTS) + } }, "Dynamic libmxnet based release": { diff --git a/cd/Jenkinsfile_release_job b/cd/Jenkinsfile_release_job index 6a8864f4dbb1..d351ba8a75bc 100644 --- a/cd/Jenkinsfile_release_job +++ b/cd/Jenkinsfile_release_job @@ -37,7 +37,7 @@ pipeline { parameters { // Release parameters string(defaultValue: "Generic release job", description: "Optional Job name", name: "RELEASE_JOB_NAME") - choice(choices: ["mxnet_lib/static", "mxnet_lib/dynamic"], description: "Pipeline to build", name: "RELEASE_JOB_TYPE") + choice(choices: ["mxnet_lib/static", "mxnet_lib/dynamic", "python/pypi"], description: "Pipeline to build", name: "RELEASE_JOB_TYPE") string(defaultValue: "cpu,mkl,cu90,cu90mkl,cu92,cu92mkl,cu100,cu100mkl,cu101,cu101mkl", description: "Comma separated list of variants", name: "MXNET_VARIANTS") booleanParam(defaultValue: false, description: 'Whether this is a release build or not', name: "RELEASE_BUILD") } diff --git a/cd/Jenkinsfile_utils.groovy b/cd/Jenkinsfile_utils.groovy index 1820629c99cd..da8171d67218 100644 --- a/cd/Jenkinsfile_utils.groovy +++ b/cd/Jenkinsfile_utils.groovy @@ -100,14 +100,15 @@ def push_artifact(libmxnet_path, variant, libtype, license_paths = '', dependenc // pull artifact from repository def pull_artifact(variant, libtype, destination = '') { - sh "./ci/cd/utils/artifact_repository.py --pull --verbose --libtype ${libtype} --variant ${variant} --destination ${destination}" + sh "./cd/utils/artifact_repository.py --pull --verbose --libtype ${libtype} --variant ${variant} --destination ${destination}" } // pulls artifact from repository and places files in the appropriate directories def restore_artifact(variant, libtype) { + pull_artifact(variant, libtype, 'mxnet_artifact') - // move libraries to lib directory + // move libraries to lib directory dir('lib') { sh "mv ../mxnet_artifact/libmxnet.so ." if (fileExists('../mxnet_artifact/dependencies')) { @@ -139,13 +140,13 @@ def restore_artifact(variant, libtype) { } // A generic pipeline that can be used by *most* CD jobs -// It can use used when implementing the pipeline steps in the Jenkins_steps.groovy -// script for the particular delivery channel. However, it should also implemente the +// It can be used when implementing the pipeline steps in the Jenkins_steps.groovy +// script for a particular delivery channel. However, it should also implement the // build, test, and push steps. // NOTE: Be mindful of the expected time that a step should take. If it will take a long time, // and it can be done in a CPU node, do it in a CPU node. We should avoid using GPU instances unless // we *have* to. -// However, if it is only packaging libmxnet and that doesn't take long. The the pipeline can +// However, if it is only packaging libmxnet and that doesn't take long. Then, the pipeline can // just run on a single node. As is done bellow. // For examples of multi-node CD pipelines, see the the binary_release/static and binary_release/dynamic // pipeline. diff --git a/cd/README.md b/cd/README.md index a66fd0ce5533..c5b0555ced3e 100644 --- a/cd/README.md +++ b/cd/README.md @@ -150,6 +150,10 @@ def get_pipeline(mxnet_variant) { } ``` +Examples: + + * [PyPI Release](python/pypi/Jenkins_pipeline.groovy): In this pipeline, the majority of time is overwhelmingly spent on testing. Therefore, it should be ok to execute the whole pipeline on a GPU node (i.e. packaging, testing, and publishing). + **Per step** Use this approach in cases where you have long running stages that don't depend on specialized/expensive hardware. @@ -179,3 +183,7 @@ def test(mxnet_variant) { } } ``` + +Examples: + +Both the [statically linked libmxnet](mxnet_lib/static/Jenkins_pipeline.groovy) and [dynamically linked libmxnet](mxnet_lib/dynamic/Jenkins_pipeline.groovy) pipelines have long running compilation and testing stages that **do not** require specialized/expensive hardware (e.g. GPUs). Therefore, as mush as possible, it is important to run each stage in on its own node, and design the pipeline to spend the least amount of time possible on expensive hardware. E.g. for GPU builds, only run GPU tests on GPU instances, all other stages can be executed on CPU nodes. \ No newline at end of file diff --git a/cd/python/pypi/Jenkins_pipeline.groovy b/cd/python/pypi/Jenkins_pipeline.groovy new file mode 100644 index 000000000000..a89228aef80b --- /dev/null +++ b/cd/python/pypi/Jenkins_pipeline.groovy @@ -0,0 +1,68 @@ +// -*- mode: groovy -*- + +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. +// +// Jenkins pipeline +// See documents at https://jenkins.io/doc/book/pipeline/jenkinsfile/ + +// NOTE: +// ci_utils and cd_utils are loaded by the originating Jenkins job, e.g. jenkins/Jenkinsfile_release_job + +def get_pipeline(mxnet_variant) { + def node_type = mxnet_variant.startsWith('cu') ? NODE_LINUX_GPU : NODE_LINUX_CPU + return cd_utils.generic_pipeline(mxnet_variant, this, node_type) +} + +def get_environment(mxnet_variant) { + def environment = "ubuntu_cpu" + if (mxnet_variant.startsWith('cu')) { + environment = "ubuntu_gpu_${mxnet_variant}".replace("mkl", "") + } + return environment +} + +def build(mxnet_variant) { + ws("workspace/python_pypi/${mxnet_variant}/${env.BUILD_NUMBER}") { + ci_utils.init_git() + cd_utils.restore_artifact(mxnet_variant, 'static') + + // create wheel file + def environment = get_environment(mxnet_variant) + def nvidia_docker = mxnet_variant.startsWith('cu') + ci_utils.docker_run(environment, "cd_package_pypi ${mxnet_variant}", nvidia_docker, '500m', "RELEASE_BUILD='${env.RELEASE_BUILD}'") + } +} + +def test(mxnet_variant) { + ws("workspace/python_pypi/${mxnet_variant}/${env.BUILD_NUMBER}") { + // test wheel file + def environment = get_environment(mxnet_variant) + def nvidia_docker = mxnet_variant.startsWith('cu') + ci_utils.docker_run(environment, "cd_integration_test_pypi python ${nvidia_docker}", nvidia_docker) + ci_utils.docker_run(environment, "cd_integration_test_pypi python3 ${nvidia_docker}", nvidia_docker) + } +} + +def push(mxnet_variant) { + ws("workspace/python_pypi/${mxnet_variant}/${env.BUILD_NUMBER}") { + // publish package to pypi + sh "./ci/docker/runtime_functions.sh cd_pypi_publish" + } +} + +return this diff --git a/cd/python/pypi/README.md b/cd/python/pypi/README.md new file mode 100644 index 000000000000..a9b91c72f540 --- /dev/null +++ b/cd/python/pypi/README.md @@ -0,0 +1,26 @@ +# PyPI CD Pipeline + +The Jenkins pipelines for continuous delivery of the PyPI MXNet packages. +The pipelines for each variant are run, and fail, independently. Each depends +on a successful build of the statically linked libmxet library. + +The pipeline relies on the scripts and resources located in [tools/pip](https://github.com/apache/incubator-mxnet/tree/master/tools/pip) +to build the PyPI packages. + +## Credentials + +The pipeline depends on the following environment variables in order to successfully +retrieve the credentials for the PyPI account: + +* CD_PYPI_SECRET_NAME +* DOCKERHUB_SECRET_ENDPOINT_URL +* DOCKERHUB_SECRET_ENDPOINT_REGION + +The credentials are stored in the Secrets Manager of the AWS account hosting Jenkins. +The [pypi_publish.py](pypi_publish.sh) script is in charge of retrieving the credentials. + +## Mock publishing + +Because of space limitations on PyPI, we don't want to push test packages from Jenkins Dev +everytime the pipeline is run. Therefore, the [pypi_publish.sh](pypi_publish.sh) +script will fake publishing packages if the `username` is *skipPublish*. diff --git a/cd/python/pypi/pypi_package.sh b/cd/python/pypi/pypi_package.sh new file mode 100755 index 000000000000..fdfed461eb44 --- /dev/null +++ b/cd/python/pypi/pypi_package.sh @@ -0,0 +1,59 @@ +#!/usr/bin/env bash +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +set -ex + +# variant = cpu, mkl, cu80, cu80mkl, cu100, etc. +export mxnet_variant=${1:?"Please specify the mxnet variant"} + +# Due to this PR: https://github.com/apache/incubator-mxnet/pull/14899 +# The setup.py expects that mkldnn_version.h be present in +# mxnet-build/3rdparty/mkldnn/build/install/include +# The artifact repository stores this file in the dependencies +# and CD unpacks it to a directory called cd_misc +if [ -f "cd_misc/mkldnn_version.h" ]; then + mkdir -p 3rdparty/mkldnn/build/install/include + cp cd_misc/mkldnn_version.h 3rdparty/mkldnn/build/install/include/. +fi + +# Create wheel workspace +rm -rf wheel_build +mkdir wheel_build +cd wheel_build + +# Setup workspace +# setup.py expects mxnet-build to be the +# mxnet directory +ln -s ../. mxnet-build + +# Copy the setup.py and other package resources +cp -R ../tools/pip/* . + +# Remove comment lines from pip doc files +pushd doc +for file in $(ls); do + sed -i '/