Skip to content

Commit

Permalink
Pass groovy formater on Jenkinsfile
Browse files Browse the repository at this point in the history
  • Loading branch information
mikepapadim committed Sep 16, 2021
1 parent 2441390 commit 2fbee75
Showing 1 changed file with 76 additions and 78 deletions.
154 changes: 76 additions & 78 deletions Jenkinsfile
Original file line number Diff line number Diff line change
Expand Up @@ -44,39 +44,39 @@
//

// NOTE: these lines are scanned by docker/dev_common.sh. Please update the regex as needed. -->
ci_lint = "tlcpack/ci-lint:v0.67"
ci_gpu = "tlcpack/ci-gpu:v0.77"
ci_cpu = "tlcpack/ci-cpu:v0.77"
ci_wasm = "tlcpack/ci-wasm:v0.71"
ci_i386 = "tlcpack/ci-i386:v0.73"
ci_qemu = "tlcpack/ci-qemu:v0.08"
ci_arm = "tlcpack/ci-arm:v0.06"
ci_lint = 'tlcpack/ci-lint:v0.67'
ci_gpu = 'tlcpack/ci-gpu:v0.77'
ci_cpu = 'tlcpack/ci-cpu:v0.77'
ci_wasm = 'tlcpack/ci-wasm:v0.71'
ci_i386 = 'tlcpack/ci-i386:v0.73'
ci_qemu = 'tlcpack/ci-qemu:v0.08'
ci_arm = 'tlcpack/ci-arm:v0.06'
// <--- End of regex-scanned config.

// Parameters to allow overriding (in Jenkins UI), the images
// to be used by a given build. When provided, they take precedence
// over default values above.
properties([
parameters([
string(name: 'ci_lint_param', defaultValue: ""),
string(name: 'ci_cpu_param', defaultValue: ""),
string(name: 'ci_gpu_param', defaultValue: ""),
string(name: 'ci_wasm_param', defaultValue: ""),
string(name: 'ci_i386_param', defaultValue: ""),
string(name: 'ci_qemu_param', defaultValue: ""),
string(name: 'ci_arm_param', defaultValue: "")
string(name: 'ci_lint_param', defaultValue: ''),
string(name: 'ci_cpu_param', defaultValue: ''),
string(name: 'ci_gpu_param', defaultValue: ''),
string(name: 'ci_wasm_param', defaultValue: ''),
string(name: 'ci_i386_param', defaultValue: ''),
string(name: 'ci_qemu_param', defaultValue: ''),
string(name: 'ci_arm_param', defaultValue: '')
])
])

// tvm libraries
tvm_runtime = "build/libtvm_runtime.so, build/config.cmake"
tvm_lib = "build/libtvm.so, " + tvm_runtime
tvm_runtime = 'build/libtvm_runtime.so, build/config.cmake'
tvm_lib = 'build/libtvm.so, ' + tvm_runtime
// LLVM upstream lib
tvm_multilib = "build/libtvm.so, " +
"build/libvta_fsim.so, " +
tvm_multilib = 'build/libtvm.so, ' +
'build/libvta_fsim.so, ' +
tvm_runtime

tvm_multilib_tsim = "build/libvta_tsim.so, " +
tvm_multilib_tsim = 'build/libvta_tsim.so, ' +
tvm_multilib

// command to start a docker container
Expand All @@ -103,23 +103,23 @@ def init_git() {
}

def init_git_win() {
checkout scm
retry(5) {
checkout scm
retry(5) {
timeout(time: 2, unit: 'MINUTES') {
bat 'git submodule update --init -f'
bat 'git submodule update --init -f'
}
}
}
}

def cancel_previous_build() {
// cancel previous build if it is not on main.
if (env.BRANCH_NAME != "main") {
def buildNumber = env.BUILD_NUMBER as int
// Milestone API allows us to cancel previous build
// with the same milestone number
if (buildNumber > 1) milestone(buildNumber - 1)
milestone(buildNumber)
}
// cancel previous build if it is not on main.
if (env.BRANCH_NAME != 'main') {
def buildNumber = env.BUILD_NUMBER as int
// Milestone API allows us to cancel previous build
// with the same milestone number
if (buildNumber > 1) milestone(buildNumber - 1)
milestone(buildNumber)
}
}

cancel_previous_build()
Expand Down Expand Up @@ -148,13 +148,12 @@ stage('Prepare') {
}
}


stage("Sanity Check") {
stage('Sanity Check') {
timeout(time: max_time, unit: 'MINUTES') {
node('CPU') {
ws(per_exec_ws("tvm/sanity")) {
init_git()
docs = sh (returnStatus: true, script: '''
ws(per_exec_ws('tvm/sanity')) {
init_git()
docs = sh (returnStatus: true, script: '''
./tests/scripts/git_check_tree.sh
'''
)
Expand Down Expand Up @@ -195,7 +194,6 @@ def pack_lib(name, libs) {
stash includes: libs, name: name
}


// unpack libraries saved before
def unpack_lib(name, libs) {
unstash name
Expand All @@ -208,7 +206,7 @@ def unpack_lib(name, libs) {
stage('Build') {
parallel 'BUILD: GPU': {
node('GPUBUILD') {
ws(per_exec_ws("tvm/build-gpu")) {
ws(per_exec_ws('tvm/build-gpu')) {
init_git()
sh "${docker_run} ${ci_gpu} ./tests/scripts/task_config_build_gpu.sh"
make(ci_gpu, 'build', '-j2')
Expand All @@ -220,9 +218,9 @@ stage('Build') {
}
},
'BUILD: CPU': {
if( docs == 1) {
if (docs == 1) {
node('CPU') {
ws(per_exec_ws("tvm/build-cpu")) {
ws(per_exec_ws('tvm/build-cpu')) {
init_git()
sh "${docker_run} ${ci_cpu} ./tests/scripts/task_config_build_cpu.sh"
make(ci_cpu, 'build', '-j2')
Expand All @@ -236,16 +234,16 @@ stage('Build') {
// sh "${docker_run} ${ci_cpu} ./tests/scripts/task_golang.sh"
// TODO(@jroesch): need to resolve CI issue will turn back on in follow up patch
sh "${docker_run} ${ci_cpu} ./tests/scripts/task_rust.sh"
junit "build/pytest-results/*.xml"
junit 'build/pytest-results/*.xml'
}
}
}
}
},
'BUILD: WASM': {
if( docs == 1) {
if (docs == 1) {
node('CPU') {
ws(per_exec_ws("tvm/build-wasm")) {
ws(per_exec_ws('tvm/build-wasm')) {
init_git()
sh "${docker_run} ${ci_wasm} ./tests/scripts/task_config_build_wasm.sh"
make(ci_wasm, 'build', '-j2')
Expand All @@ -258,9 +256,9 @@ stage('Build') {
}
},
'BUILD : i386': {
if( docs == 1) {
if ( docs == 1) {
node('CPU') {
ws(per_exec_ws("tvm/build-i386")) {
ws(per_exec_ws('tvm/build-i386')) {
init_git()
sh "${docker_run} ${ci_i386} ./tests/scripts/task_config_build_i386.sh"
make(ci_i386, 'build', '-j2')
Expand All @@ -270,9 +268,9 @@ stage('Build') {
}
},
'BUILD : arm': {
if( docs == 1) {
if (docs == 1) {
node('ARM') {
ws(per_exec_ws("tvm/build-arm")) {
ws(per_exec_ws('tvm/build-arm')) {
init_git()
sh "${docker_run} ${ci_arm} ./tests/scripts/task_config_build_arm.sh"
make(ci_arm, 'build', '-j4')
Expand All @@ -282,79 +280,79 @@ stage('Build') {
}
},
'BUILD: QEMU': {
if( docs == 1) {
if (docs == 1) {
node('CPU') {
ws(per_exec_ws("tvm/build-qemu")) {
ws(per_exec_ws('tvm/build-qemu')) {
init_git()
sh "${docker_run} ${ci_qemu} ./tests/scripts/task_config_build_qemu.sh"
make(ci_qemu, 'build', '-j2')
timeout(time: max_time, unit: 'MINUTES') {
sh "${docker_run} ${ci_qemu} ./tests/scripts/task_ci_setup.sh"
sh "${docker_run} ${ci_qemu} ./tests/scripts/task_python_microtvm.sh"
junit "build/pytest-results/*.xml"
junit 'build/pytest-results/*.xml'
}
}
}
}
}
}
}

stage('Unit Test') {
if( docs == 1) {
if (docs == 1) {
parallel 'python3: GPU': {
node('TensorCore') {
ws(per_exec_ws("tvm/ut-python-gpu")) {
ws(per_exec_ws('tvm/ut-python-gpu')) {
init_git()
unpack_lib('gpu', tvm_multilib)
timeout(time: max_time, unit: 'MINUTES') {
sh "${docker_run} ${ci_gpu} ./tests/scripts/task_ci_setup.sh"
sh "${docker_run} ${ci_gpu} ./tests/scripts/task_sphinx_precheck.sh"
sh "${docker_run} ${ci_gpu} ./tests/scripts/task_python_unittest_gpuonly.sh"
sh "${docker_run} ${ci_gpu} ./tests/scripts/task_python_integration_gpuonly.sh"
junit "build/pytest-results/*.xml"
junit 'build/pytest-results/*.xml'
}
}
}
},
'python3: i386': {
node('CPU') {
ws(per_exec_ws("tvm/ut-python-i386")) {
ws(per_exec_ws('tvm/ut-python-i386')) {
init_git()
unpack_lib('i386', tvm_multilib)
timeout(time: max_time, unit: 'MINUTES') {
sh "${docker_run} ${ci_i386} ./tests/scripts/task_ci_setup.sh"
sh "${docker_run} ${ci_i386} ./tests/scripts/task_python_unittest.sh"
sh "${docker_run} ${ci_i386} ./tests/scripts/task_python_integration.sh"
sh "${docker_run} ${ci_i386} ./tests/scripts/task_python_vta_fsim.sh"
junit "build/pytest-results/*.xml"
junit 'build/pytest-results/*.xml'
}
}
}
},
'python3: arm': {
node('ARM') {
ws(per_exec_ws("tvm/ut-python-arm")) {
ws(per_exec_ws('tvm/ut-python-arm')) {
init_git()
unpack_lib('arm', tvm_multilib)
timeout(time: max_time, unit: 'MINUTES') {
sh "${docker_run} ${ci_arm} ./tests/scripts/task_ci_setup.sh"
sh "${docker_run} ${ci_arm} ./tests/scripts/task_python_unittest.sh"
sh "${docker_run} ${ci_arm} ./tests/scripts/task_python_arm_compute_library.sh"
junit "build/pytest-results/*.xml"
// sh "${docker_run} ${ci_arm} ./tests/scripts/task_python_integration.sh"
junit 'build/pytest-results/*.xml'
// sh "${docker_run} ${ci_arm} ./tests/scripts/task_python_integration.sh"
}
}
}
},
'java: GPU': {
node('GPU') {
ws(per_exec_ws("tvm/ut-java")) {
ws(per_exec_ws('tvm/ut-java')) {
init_git()
unpack_lib('gpu', tvm_multilib)
timeout(time: max_time, unit: 'MINUTES') {
sh "${docker_run} ${ci_gpu} ./tests/scripts/task_ci_setup.sh"
sh "${docker_run} ${ci_gpu} ./tests/scripts/task_java_unittest.sh"
}
}
}
}
}
Expand All @@ -363,53 +361,53 @@ stage('Unit Test') {

stage('Integration Test') {
parallel 'topi: GPU': {
if( docs == 1) {
if (docs == 1) {
node('GPU') {
ws(per_exec_ws("tvm/topi-python-gpu")) {
ws(per_exec_ws('tvm/topi-python-gpu')) {
init_git()
unpack_lib('gpu', tvm_multilib)
timeout(time: max_time, unit: 'MINUTES') {
sh "${docker_run} ${ci_gpu} ./tests/scripts/task_ci_setup.sh"
sh "${docker_run} ${ci_gpu} ./tests/scripts/task_python_topi.sh"
junit "build/pytest-results/*.xml"
junit 'build/pytest-results/*.xml'
}
}
}
}
},
'frontend: GPU': {
if( docs == 1) {
if (docs == 1) {
node('GPU') {
ws(per_exec_ws("tvm/frontend-python-gpu")) {
ws(per_exec_ws('tvm/frontend-python-gpu')) {
init_git()
unpack_lib('gpu', tvm_multilib)
timeout(time: max_time, unit: 'MINUTES') {
sh "${docker_run} ${ci_gpu} ./tests/scripts/task_ci_setup.sh"
sh "${docker_run} ${ci_gpu} ./tests/scripts/task_python_frontend.sh"
junit "build/pytest-results/*.xml"
junit 'build/pytest-results/*.xml'
}
}
}
}
},
'frontend: CPU': {
if( docs == 1) {
if (docs == 1) {
node('CPU') {
ws(per_exec_ws("tvm/frontend-python-cpu")) {
ws(per_exec_ws('tvm/frontend-python-cpu')) {
init_git()
unpack_lib('cpu', tvm_multilib)
timeout(time: max_time, unit: 'MINUTES') {
sh "${docker_run} ${ci_cpu} ./tests/scripts/task_ci_setup.sh"
sh "${docker_run} ${ci_cpu} ./tests/scripts/task_python_frontend_cpu.sh"
junit "build/pytest-results/*.xml"
junit 'build/pytest-results/*.xml'
}
}
}
}
},
'docs: GPU': {
node('TensorCore') {
ws(per_exec_ws("tvm/docs-python-gpu")) {
ws(per_exec_ws('tvm/docs-python-gpu')) {
init_git()
unpack_lib('gpu', tvm_multilib)
timeout(time: max_time, unit: 'MINUTES') {
Expand All @@ -435,18 +433,18 @@ stage('Build packages') {
sh "${docker_run} tlcpack/conda-cuda100 ./conda/build_cuda.sh
}
}
// Here we could upload the packages to anaconda for releases
// and/or the main branch
// Here we could upload the packages to anaconda for releases
// and/or the main branch
}
*/

stage('Deploy') {
node('doc') {
ws(per_exec_ws("tvm/deploy-docs")) {
if (env.BRANCH_NAME == "main") {
unpack_lib('mydocs', 'docs.tgz')
sh "cp docs.tgz /var/docs/docs.tgz"
sh "tar xf docs.tgz -C /var/docs"
ws(per_exec_ws('tvm/deploy-docs')) {
if (env.BRANCH_NAME == 'main') {
unpack_lib('mydocs', 'docs.tgz')
sh 'cp docs.tgz /var/docs/docs.tgz'
sh 'tar xf docs.tgz -C /var/docs'
}
}
}
Expand Down

0 comments on commit 2fbee75

Please sign in to comment.