diff --git a/Jenkinsfile b/Jenkinsfile index ef13eb85c29a..11feadbc8640 100755 --- a/Jenkinsfile +++ b/Jenkinsfile @@ -59,25 +59,25 @@ ci_arm = "tlcpack/ci-arm:v0.06" // over default values above. properties([ parameters([ - string(name: 'ci_lint_param', defaultValue: ""), - string(name: 'ci_cpu_param', defaultValue: ""), - string(name: 'ci_gpu_param', defaultValue: ""), - string(name: 'ci_wasm_param', defaultValue: ""), - string(name: 'ci_i386_param', defaultValue: ""), - string(name: 'ci_qemu_param', defaultValue: ""), - string(name: 'ci_arm_param', defaultValue: "") + string(name: 'ci_lint_param', defaultValue: ''), + string(name: 'ci_cpu_param', defaultValue: ''), + string(name: 'ci_gpu_param', defaultValue: ''), + string(name: 'ci_wasm_param', defaultValue: ''), + string(name: 'ci_i386_param', defaultValue: ''), + string(name: 'ci_qemu_param', defaultValue: ''), + string(name: 'ci_arm_param', defaultValue: '') ]) ]) // tvm libraries -tvm_runtime = "build/libtvm_runtime.so, build/config.cmake" -tvm_lib = "build/libtvm.so, " + tvm_runtime +tvm_runtime = 'build/libtvm_runtime.so, build/config.cmake' +tvm_lib = 'build/libtvm.so, ' + tvm_runtime // LLVM upstream lib -tvm_multilib = "build/libtvm.so, " + - "build/libvta_fsim.so, " + +tvm_multilib = 'build/libtvm.so, ' + + 'build/libvta_fsim.so, ' + tvm_runtime -tvm_multilib_tsim = "build/libvta_tsim.so, " + +tvm_multilib_tsim = 'build/libvta_tsim.so, ' + tvm_multilib // command to start a docker container @@ -96,19 +96,19 @@ def init_git() { script: """ echo "INFO: NODE_NAME=${NODE_NAME} EXECUTOR_NUMBER=${EXECUTOR_NUMBER}" """, - label: "Show executor node info", + label: 'Show executor node info', ) checkout scm retry(5) { timeout(time: 2, unit: 'MINUTES') { - sh (script: 'git submodule update --init -f', label: "Update git submodules") + sh (script: 'git submodule update --init -f', label: 'Update git submodules') } } } def cancel_previous_build() { // cancel previous build if it is not on main. - if (env.BRANCH_NAME != "main") { + if (env.BRANCH_NAME != 'main') { def buildNumber = env.BUILD_NUMBER as int // Milestone API allows us to cancel previous build // with the same milestone number @@ -117,6 +117,22 @@ def cancel_previous_build() { } } +def should_skip_ci(pr_number) { + withCredentials([string( + credentialsId: 'tvm-bot-jenkins-reader', + variable: 'TOKEN', + )]) { + // Exit code of 1 means run full CI (or the script had an error, so run + // full CI just in case). Exit code of 0 means skip CI. + git_skip_ci_code = sh ( + returnStatus: true, + script: "./tests/scripts/git_skip_ci.py --pr '${pr_number}'", + label: 'Check if CI should be skipped', + ) + } + return git_skip_ci_code == 0 +} + cancel_previous_build() stage('Prepare') { @@ -139,7 +155,7 @@ stage('Prepare') { echo " ci_i386 = ${ci_i386}" echo " ci_qemu = ${ci_qemu}" echo " ci_arm = ${ci_arm}" - """, label: "Docker image names") + """, label: 'Docker image names') } } @@ -151,11 +167,12 @@ stage('Sanity Check') { is_docs_only_build = sh ( returnStatus: true, script: './tests/scripts/git_change_docs.sh', - label: "Check for docs only changes", + label: 'Check for docs only changes', ) + skip_ci = should_skip_ci(env.CHANGE_ID) sh ( script: "${docker_run} ${ci_lint} ./tests/scripts/task_lint.sh", - label: "Run lint", + label: 'Run lint', ) } } @@ -179,7 +196,7 @@ def make(docker_type, path, make_flag) { echo 'Incremental compilation failed. Fall back to build from scratch' sh ( script: "${docker_run} ${docker_type} ./tests/scripts/task_clean.sh ${path}", - label: "Clear old cmake workspace", + label: 'Clear old cmake workspace', ) cmake_build(docker_type, path, make_flag) cpp_unittest(docker_type) @@ -192,7 +209,7 @@ def pack_lib(name, libs) { sh (script: """ echo "Packing ${libs} into ${name}" echo ${libs} | sed -e 's/,/ /g' | xargs md5sum - """, label: "Stash libraries and show md5") + """, label: 'Stash libraries and show md5') stash includes: libs, name: name } @@ -202,66 +219,68 @@ def unpack_lib(name, libs) { sh (script: """ echo "Unpacked ${libs} from ${name}" echo ${libs} | sed -e 's/,/ /g' | xargs md5sum - """, label: "Unstash libraries and show md5") + """, label: 'Unstash libraries and show md5') } def ci_setup(image) { sh ( script: "${docker_run} ${image} ./tests/scripts/task_ci_setup.sh", - label: "Set up CI environment", + label: 'Set up CI environment', ) } def python_unittest(image) { sh ( script: "${docker_run} ${image} ./tests/scripts/task_python_unittest.sh", - label: "Run Python unit tests", + label: 'Run Python unit tests', ) } def fsim_test(image) { sh ( script: "${docker_run} ${image} ./tests/scripts/task_python_vta_fsim.sh", - label: "Run VTA tests in FSIM ", + label: 'Run VTA tests in FSIM ', ) } def cmake_build(image, path, make_flag) { sh ( script: "${docker_run} ${image} ./tests/scripts/task_build.sh ${path} ${make_flag}", - label: "Run cmake build", + label: 'Run cmake build', ) } def cpp_unittest(image) { sh ( script: "${docker_run} ${image} ./tests/scripts/task_cpp_unittest.sh", - label: "Build and run C++ tests", + label: 'Build and run C++ tests', ) } stage('Build') { parallel 'BUILD: GPU': { - node('GPUBUILD') { - ws(per_exec_ws('tvm/build-gpu')) { - init_git() - sh "${docker_run} ${ci_gpu} ./tests/scripts/task_config_build_gpu.sh" - make(ci_gpu, 'build', '-j2') - pack_lib('gpu', tvm_multilib) - // compiler test - sh "${docker_run} ${ci_gpu} ./tests/scripts/task_config_build_gpu_other.sh" - make(ci_gpu, 'build2', '-j2') + if (!skip_ci) { + node('GPUBUILD') { + ws(per_exec_ws('tvm/build-gpu')) { + init_git() + sh "${docker_run} ${ci_gpu} ./tests/scripts/task_config_build_gpu.sh" + make(ci_gpu, 'build', '-j2') + pack_lib('gpu', tvm_multilib) + // compiler test + sh "${docker_run} ${ci_gpu} ./tests/scripts/task_config_build_gpu_other.sh" + make(ci_gpu, 'build2', '-j2') + } + } } - } }, 'BUILD: CPU': { - if (is_docs_only_build != 1) { + if (!skip_ci && is_docs_only_build != 1) { node('CPU') { ws(per_exec_ws('tvm/build-cpu')) { init_git() sh ( script: "${docker_run} ${ci_cpu} ./tests/scripts/task_config_build_cpu.sh", - label: "Create CPU cmake config", + label: 'Create CPU cmake config', ) make(ci_cpu, 'build', '-j2') pack_lib('cpu', tvm_multilib_tsim) @@ -278,20 +297,20 @@ stage('Build') { } }, 'BUILD: WASM': { - if (is_docs_only_build != 1) { + if (!skip_ci && is_docs_only_build != 1) { node('CPU') { ws(per_exec_ws('tvm/build-wasm')) { init_git() sh ( script: "${docker_run} ${ci_wasm} ./tests/scripts/task_config_build_wasm.sh", - label: "Create WASM cmake config", + label: 'Create WASM cmake config', ) make(ci_wasm, 'build', '-j2') timeout(time: max_time, unit: 'MINUTES') { ci_setup(ci_wasm) sh ( script: "${docker_run} ${ci_wasm} ./tests/scripts/task_web_wasm.sh", - label: "Run WASM lint and tests", + label: 'Run WASM lint and tests', ) } } @@ -301,13 +320,13 @@ stage('Build') { } }, 'BUILD: i386': { - if ( is_docs_only_build != 1) { + if (!skip_ci && is_docs_only_build != 1) { node('CPU') { ws(per_exec_ws('tvm/build-i386')) { init_git() sh ( script: "${docker_run} ${ci_i386} ./tests/scripts/task_config_build_i386.sh", - label: "Create i386 cmake config", + label: 'Create i386 cmake config', ) make(ci_i386, 'build', '-j2') pack_lib('i386', tvm_multilib_tsim) @@ -318,13 +337,13 @@ stage('Build') { } }, 'BUILD: arm': { - if (is_docs_only_build != 1) { + if (!skip_ci && is_docs_only_build != 1) { node('ARM') { ws(per_exec_ws('tvm/build-arm')) { init_git() sh ( script: "${docker_run} ${ci_arm} ./tests/scripts/task_config_build_arm.sh", - label: "Create ARM cmake config", + label: 'Create ARM cmake config', ) make(ci_arm, 'build', '-j4') pack_lib('arm', tvm_multilib) @@ -335,22 +354,22 @@ stage('Build') { } }, 'BUILD: QEMU': { - if (is_docs_only_build != 1) { + if (!skip_ci && is_docs_only_build != 1) { node('CPU') { ws(per_exec_ws('tvm/build-qemu')) { init_git() sh ( script: "${docker_run} ${ci_qemu} ./tests/scripts/task_config_build_qemu.sh", - label: "Create QEMU cmake config", + label: 'Create QEMU cmake config', ) make(ci_qemu, 'build', '-j2') timeout(time: max_time, unit: 'MINUTES') { ci_setup(ci_qemu) sh ( script: "${docker_run} ${ci_qemu} ./tests/scripts/task_python_microtvm.sh", - label: "Run microTVM tests", + label: 'Run microTVM tests', ) - junit "build/pytest-results/*.xml" + junit 'build/pytest-results/*.xml' } } } @@ -362,7 +381,7 @@ stage('Build') { stage('Test') { parallel 'unittest: GPU': { - if (is_docs_only_build != 1) { + if (!skip_ci && is_docs_only_build != 1) { node('TensorCore') { ws(per_exec_ws('tvm/ut-python-gpu')) { init_git() @@ -371,21 +390,21 @@ stage('Test') { ci_setup(ci_gpu) sh ( script: "${docker_run} ${ci_gpu} ./tests/scripts/task_sphinx_precheck.sh", - label: "Check Sphinx warnings in docs", + label: 'Check Sphinx warnings in docs', ) sh ( script: "${docker_run} ${ci_gpu} ./tests/scripts/task_java_unittest.sh", - label: "Run Java unit tests", + label: 'Run Java unit tests', ) sh ( script: "${docker_run} ${ci_gpu} ./tests/scripts/task_python_unittest_gpuonly.sh", - label: "Run Python GPU unit tests", + label: 'Run Python GPU unit tests', ) sh ( script: "${docker_run} ${ci_gpu} ./tests/scripts/task_python_integration_gpuonly.sh", - label: "Run Python GPU integration tests", + label: 'Run Python GPU integration tests', ) - junit "build/pytest-results/*.xml" + junit 'build/pytest-results/*.xml' } } } @@ -394,18 +413,18 @@ stage('Test') { } }, 'integration: CPU': { - if (is_docs_only_build != 1) { + if (!skip_ci && is_docs_only_build != 1) { node('CPU') { - ws(per_exec_ws("tvm/ut-python-cpu")) { + ws(per_exec_ws('tvm/ut-python-cpu')) { init_git() unpack_lib('cpu', tvm_multilib_tsim) timeout(time: max_time, unit: 'MINUTES') { ci_setup(ci_cpu) sh ( script: "${docker_run} ${ci_cpu} ./tests/scripts/task_python_integration.sh", - label: "Run CPU integration tests", + label: 'Run CPU integration tests', ) - junit "build/pytest-results/*.xml" + junit 'build/pytest-results/*.xml' } } } @@ -414,7 +433,7 @@ stage('Test') { } }, 'unittest: CPU': { - if (is_docs_only_build != 1) { + if (!skip_ci && is_docs_only_build != 1) { node('CPU') { ws(per_exec_ws("tvm/ut-python-cpu")) { init_git() @@ -436,7 +455,7 @@ stage('Test') { } }, 'python3: i386': { - if (is_docs_only_build != 1) { + if (!skip_ci && is_docs_only_build != 1) { node('CPU') { ws(per_exec_ws('tvm/ut-python-i386')) { init_git() @@ -446,10 +465,10 @@ stage('Test') { python_unittest(ci_i386) sh ( script: "${docker_run} ${ci_i386} ./tests/scripts/task_python_integration_i386only.sh", - label: "Run i386 integration tests", + label: 'Run i386 integration tests', ) fsim_test(ci_i386) - junit "build/pytest-results/*.xml" + junit 'build/pytest-results/*.xml' } } } @@ -458,7 +477,7 @@ stage('Test') { } }, 'python3: arm': { - if (is_docs_only_build != 1) { + if (!skip_ci && is_docs_only_build != 1) { node('ARM') { ws(per_exec_ws('tvm/ut-python-arm')) { init_git() @@ -468,9 +487,9 @@ stage('Test') { python_unittest(ci_arm) sh ( script: "${docker_run} ${ci_arm} ./tests/scripts/task_python_arm_compute_library.sh", - label: "Run test_arm_compute_lib test", + label: 'Run test_arm_compute_lib test', ) - junit "build/pytest-results/*.xml" + junit 'build/pytest-results/*.xml' // sh "${docker_run} ${ci_arm} ./tests/scripts/task_python_integration.sh" } } @@ -480,7 +499,7 @@ stage('Test') { } }, 'topi: GPU': { - if (is_docs_only_build != 1) { + if (!skip_ci && is_docs_only_build != 1) { node('GPU') { ws(per_exec_ws('tvm/topi-python-gpu')) { init_git() @@ -489,9 +508,9 @@ stage('Test') { ci_setup(ci_gpu) sh ( script: "${docker_run} ${ci_gpu} ./tests/scripts/task_python_topi.sh", - label: "Run TOPI tests", + label: 'Run TOPI tests', ) - junit "build/pytest-results/*.xml" + junit 'build/pytest-results/*.xml' } } } @@ -500,7 +519,7 @@ stage('Test') { } }, 'frontend: GPU': { - if (is_docs_only_build != 1) { + if (!skip_ci && is_docs_only_build != 1) { node('GPU') { ws(per_exec_ws('tvm/frontend-python-gpu')) { init_git() @@ -509,9 +528,9 @@ stage('Test') { ci_setup(ci_gpu) sh ( script: "${docker_run} ${ci_gpu} ./tests/scripts/task_python_frontend.sh", - label: "Run Python frontend tests", + label: 'Run Python frontend tests', ) - junit "build/pytest-results/*.xml" + junit 'build/pytest-results/*.xml' } } } @@ -520,7 +539,7 @@ stage('Test') { } }, 'frontend: CPU': { - if (is_docs_only_build != 1) { + if (!skip_ci && is_docs_only_build != 1) { node('CPU') { ws(per_exec_ws('tvm/frontend-python-cpu')) { init_git() @@ -529,9 +548,9 @@ stage('Test') { ci_setup(ci_cpu) sh ( script: "${docker_run} ${ci_cpu} ./tests/scripts/task_python_frontend_cpu.sh", - label: "Run Python frontend tests", + label: 'Run Python frontend tests', ) - junit "build/pytest-results/*.xml" + junit 'build/pytest-results/*.xml' } } } @@ -540,18 +559,19 @@ stage('Test') { } }, 'docs: GPU': { - node('TensorCore') { - ws(per_exec_ws('tvm/docs-python-gpu')) { - init_git() - unpack_lib('gpu', tvm_multilib) - timeout(time: max_time, unit: 'MINUTES') { - ci_setup(ci_gpu) - sh ( - script: "${docker_run} ${ci_gpu} ./tests/scripts/task_python_docs.sh", - label: "Build docs", - ) + if (!skip_ci) { + node('TensorCore') { + ws(per_exec_ws('tvm/docs-python-gpu')) { + init_git() + unpack_lib('gpu', tvm_multilib) + timeout(time: max_time, unit: 'MINUTES') { + ci_setup(ci_gpu) + sh ( + script: "${docker_run} ${ci_gpu} ./tests/scripts/task_python_docs.sh", + label: 'Build docs', + ) + } } - pack_lib('mydocs', 'docs.tgz') } } } diff --git a/README.md b/README.md index d96038d17804..3b86a34f8d49 100644 --- a/README.md +++ b/README.md @@ -51,3 +51,4 @@ We learned a lot from the following projects when building TVM. originates from Halide. We also learned and adapted some part of lowering pipeline from Halide. - [Loopy](https://github.com/inducer/loopy): use of integer set analysis and its loop transformation primitives. - [Theano](https://github.com/Theano/Theano): the design inspiration of symbolic scan operator for recurrence. +test diff --git a/docs/contribute/committer_guide.rst b/docs/contribute/committer_guide.rst index 6e553bd42a39..68885b6b927a 100644 --- a/docs/contribute/committer_guide.rst +++ b/docs/contribute/committer_guide.rst @@ -101,3 +101,37 @@ Sometimes, we tend to only interact with people we know. However, broad collaborations are necessary to the success of the project. Try to keep that in mind, shepherd PRs for, and request code reviews from community members who you do not interact physically. + + +Keeping CI Green +---------------- +Developers rely on the TVM CI to get signal on their PRs before merging. +Occasionally breakges slip through and break ``main``, which in turn causes +the same error to show up on an PR that is based on the broken commit(s). +In these situations it is possible to either revert the offending commit or +submit a forward fix to address the issue. It is up to the committer and commit +author which option to choose, keeping in mind that a broken CI affects all TVM +developers and should be fixed as soon as possible. + +For reverts and trivial forward fixes, adding ``[skip ci]`` to the revert's +commit message will cause CI to shortcut and only run lint. Committers should +take care that they only merge CI-skipped PRs to fix a failure on ``main`` and +not in cases where the submitter wants to shortcut CI to merge a change faster. + +.. code:: bash + + # Example: Skip CI on a revert + # Revert HEAD commit, make sure to insert '[skip ci]' at the beginning of + # the commit subject + git revert HEAD + + git checkout -b my_fix + # After you have pushed your branch, create a PR as usual. + git push my_repo + + # Example: Skip CI on a branch with an existing PR + # Adding this commit to an existing branch will cause a new CI run where + # Jenkins is skipped + git commit --allow-empty --message "[skip ci] Trigger skipped CI" + git push my_repo + diff --git a/tests/python/unittest/test_ci.py b/tests/python/unittest/test_ci.py new file mode 100644 index 000000000000..ac7e6cdd7c29 --- /dev/null +++ b/tests/python/unittest/test_ci.py @@ -0,0 +1,132 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +import pathlib +import subprocess +import sys +import tempfile + +import pytest + +REPO_ROOT = pathlib.Path(__file__).resolve().parent.parent.parent.parent + + +def test_skip_ci(): + skip_ci_script = REPO_ROOT / "tests" / "scripts" / "git_skip_ci.py" + + class TempGit: + def __init__(self, cwd): + self.cwd = cwd + + def run(self, *args): + proc = subprocess.run(["git"] + list(args), cwd=self.cwd) + if proc.returncode != 0: + raise RuntimeError(f"git command failed: '{args}'") + + def test(commands, should_skip, pr_title, why): + with tempfile.TemporaryDirectory() as dir: + git = TempGit(dir) + # Jenkins git is too old and doesn't have 'git init --initial-branch' + git.run("init") + git.run("checkout", "-b", "main") + git.run("remote", "add", "origin", "https://github.com/apache/tvm.git") + git.run("config", "user.name", "ci") + git.run("config", "user.email", "email@example.com") + git.run("commit", "--allow-empty", "--message", "base commit") + for command in commands: + git.run(*command) + pr_number = "1234" + proc = subprocess.run( + [str(skip_ci_script), "--pr", pr_number, "--pr-title", pr_title], cwd=dir + ) + expected = 0 if should_skip else 1 + assert proc.returncode == expected, why + + test( + commands=[], + should_skip=False, + pr_title="[skip ci] test", + why="ci should not be skipped", + ) + + test( + commands=[ + ["commit", "--allow-empty", "--message", "[skip ci] commit 1"], + ], + should_skip=False, + pr_title="[skip ci] test", + why="ci should not be skipped on main", + ) + + test( + commands=[ + ["checkout", "-b", "some_new_branch"], + ["commit", "--allow-empty", "--message", "[skip ci] commit 1"], + ], + should_skip=True, + pr_title="[skip ci] test", + why="ci should be skipped on a branch with [skip ci] in the last commit", + ) + + test( + commands=[ + ["checkout", "-b", "some_new_branch"], + ["commit", "--allow-empty", "--message", "[skip ci] commit 1"], + ], + should_skip=False, + pr_title="[no skip ci] test", + why="ci should not be skipped on a branch with [skip ci] in the last commit but not the PR title", + ) + + test( + commands=[ + ["checkout", "-b", "some_new_branch"], + ["commit", "--allow-empty", "--message", "[skip ci] commit 1"], + ["commit", "--allow-empty", "--message", "commit 2"], + ], + should_skip=False, + pr_title="[skip ci] test", + why="ci should not be skipped on a branch without [skip ci] in the last commit", + ) + + test( + commands=[ + ["checkout", "-b", "some_new_branch"], + ["commit", "--allow-empty", "--message", "[skip ci] commit 1"], + ["commit", "--allow-empty", "--message", "commit 2"], + ], + should_skip=False, + pr_title="[skip ci] test", + why="ci should not be skipped on a branch without [skip ci] in the last commit", + ) + + test( + commands=[ + ["checkout", "-b", "some_new_branch"], + ["commit", "--allow-empty", "--message", "commit 1"], + ["commit", "--allow-empty", "--message", "commit 2"], + ["commit", "--allow-empty", "--message", "commit 3"], + ["commit", "--allow-empty", "--message", "commit 4"], + ], + should_skip=False, + pr_title="[skip ci] test", + why="ci should not be skipped on a branch without [skip ci] in the last commit", + ) + + +if __name__ == "__main__": + sys.exit(pytest.main([__file__] + sys.argv[1:])) diff --git a/tests/scripts/git_skip_ci.py b/tests/scripts/git_skip_ci.py new file mode 100755 index 000000000000..73fcc6490ab8 --- /dev/null +++ b/tests/scripts/git_skip_ci.py @@ -0,0 +1,110 @@ +#!/usr/bin/env python3 +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +import os +import json +import argparse +import subprocess +import re +from urllib import request +from typing import Dict, Tuple, Any + + +class GitHubRepo: + def __init__(self, user, repo, token): + self.token = token + self.user = user + self.repo = repo + self.base = f"https://api.github.com/repos/{user}/{repo}/" + + def headers(self): + return { + "Authorization": f"Bearer {self.token}", + } + + def get(self, url: str) -> Dict[str, Any]: + url = self.base + url + print("Requesting", url) + req = request.Request(url, headers=self.headers()) + with request.urlopen(req) as response: + response = json.loads(response.read()) + return response + + +def parse_remote(remote: str) -> Tuple[str, str]: + """ + Get a GitHub (user, repo) pair out of a git remote + """ + if remote.startswith("https://"): + # Parse HTTP remote + parts = remote.split("/") + if len(parts) < 2: + raise RuntimeError(f"Unable to parse remote '{remote}'") + return parts[-2], parts[-1].replace(".git", "") + else: + # Parse SSH remote + m = re.search(r":(.*)/(.*)\.git", remote) + if m is None or len(m.groups()) != 2: + raise RuntimeError(f"Unable to parse remote '{remote}'") + return m.groups() + + +def git(command): + proc = subprocess.run(["git"] + command, stdout=subprocess.PIPE, check=True) + return proc.stdout.decode().strip() + + +if __name__ == "__main__": + help = "Exits with 0 if CI should be skipped, 1 otherwise" + parser = argparse.ArgumentParser(description=help) + parser.add_argument("--pr", required=True) + parser.add_argument("--remote", default="origin", help="ssh remote to parse") + parser.add_argument( + "--pr-title", help="(testing) PR title to use instead of fetching from GitHub" + ) + args = parser.parse_args() + + branch = git(["rev-parse", "--abbrev-ref", "HEAD"]) + log = git(["log", "--format=%s", "-1"]) + + # Check the PR's title (don't check this until everything else passes first) + def check_pr_title(): + remote = git(["config", "--get", f"remote.{args.remote}.url"]) + user, repo = parse_remote(remote) + + if args.pr_title: + title = args.pr_title + else: + github = GitHubRepo(token=os.environ["TOKEN"], user=user, repo=repo) + pr = github.get(f"pulls/{args.pr}") + title = pr["title"] + print("pr title:", title) + return title.startswith("[skip ci]") + + if ( + args.pr != "null" + and args.pr.strip() != "" + and branch != "main" + and log.startswith("[skip ci]") + and check_pr_title() + ): + print("Commit and PR start with '[skip ci]', skipping...") + exit(0) + else: + print(f"Not skipping CI:\nargs.pr: {args.pr}\nbranch: {branch}\ncommit: {log}") + exit(1)