Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
11 changes: 5 additions & 6 deletions .github/workflows/dist.yml
Original file line number Diff line number Diff line change
Expand Up @@ -44,12 +44,11 @@ jobs:
runs-on: macos-latest
timeout-minutes: 30
env:
NO_VERSION_LABEL: ${{ github.event_name == 'release' && 'OFF' || 'ON' }}
# NO_GIT_VERSION disables embedding the git commit hash in version metadata.
# NO_VERSION_LABEL disables embedding the toolchain / git commit hash in version metadata.
# Otherwise, the version of the SDist has a git hash suffix (e.g., 0.1.0+gitabcdef12),
# but the package built from the SDist has no way to get the git hash (it is not a git repo),
# leading to inconsistent versions between SDist and built packages (+gitabcdef12 vs. +gitunknown).
NO_GIT_VERSION: "ON"
NO_VERSION_LABEL: 'OFF'

steps:
- name: Checkout repository
Expand Down Expand Up @@ -89,7 +88,7 @@ jobs:

- name: Upload SDist
# Not PR to save artifact storage, as SDist is only needed for releases.
if: github.event_name != 'pull_request'
if: github.event_name != 'pull_request' || contains(github.event.pull_request.title, '[Release]')
uses: actions/upload-artifact@v5
with:
name: sdist
Expand Down Expand Up @@ -157,7 +156,7 @@ jobs:

- name: Upload wheels
# Not PR to save artifact storage, as wheels are only needed for releases.
if: github.event_name != 'pull_request'
if: github.event_name != 'pull_request' || contains(github.event.pull_request.title, '[Release]')
uses: actions/upload-artifact@v5
with:
name: wheels-${{ matrix.python-version }}-${{ runner.os }}-${{ runner.arch }}-${{ matrix.target.toolkit }}
Expand All @@ -167,7 +166,7 @@ jobs:
list-artifacts:
name: List artifacts
# Not PR to save artifact storage, as artifacts are only needed for releases.
if: github.event_name != 'pull_request'
if: github.event_name != 'pull_request' || contains(github.event.pull_request.title, '[Release]')
runs-on: ubuntu-latest
needs: [build-sdist, build-wheels]
timeout-minutes: 15
Expand Down
3 changes: 3 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -102,3 +102,6 @@ tilelang/jit/adapter/cython/.cycache
# CMake
cmake-build/
cmake-build-*/

# Git version for sdist
_git_commit.txt
35 changes: 0 additions & 35 deletions MANIFEST.in

This file was deleted.

3 changes: 0 additions & 3 deletions maint/scripts/docker_build_all.sh

This file was deleted.

69 changes: 2 additions & 67 deletions maint/scripts/docker_local_distribute.sh
Original file line number Diff line number Diff line change
@@ -1,70 +1,5 @@
#!/usr/bin/env bash
set -euxo pipefail

IMAGE="tilelang-builder:manylinux"

HOST_UNAME=$(uname -m)
case "$HOST_UNAME" in
x86_64) TARGETARCH=amd64 ;;
aarch64|arm64) TARGETARCH=arm64 ;;
*) echo "Unsupported architecture: $HOST_UNAME" >&2; exit 1 ;;
esac

if docker buildx version >/dev/null 2>&1; then
if docker info >/dev/null 2>&1; then
docker run --rm --privileged tonistiigi/binfmt --install amd64,arm64 >/dev/null 2>&1 || true
fi

if ! docker buildx inspect multi >/dev/null 2>&1; then
docker buildx create --name multi --driver docker-container --use >/dev/null 2>&1 || true
else
docker buildx use multi >/dev/null 2>&1 || true
fi
docker buildx inspect --bootstrap >/dev/null 2>&1 || true

for ARCH in amd64 arm64; do
TAG_PLATFORM="linux/${ARCH}"
TAG_IMAGE="${IMAGE}-${ARCH}"

docker buildx build \
--platform "${TAG_PLATFORM}" \
--build-arg TARGETARCH="${ARCH}" \
-f "$(dirname "${BASH_SOURCE[0]}")/pypi.manylinux.Dockerfile" \
-t "${TAG_IMAGE}" \
--load \
.

script="sh maint/scripts/local_distribution.sh"
docker run --rm \
--platform "${TAG_PLATFORM}" \
-v "$(pwd):/tilelang" \
"${TAG_IMAGE}" \
/bin/bash -lc "$script"

if [ -d dist ]; then
mv -f dist "dist-local-${ARCH}"
fi
done

else
echo "docker buildx not found; building only host arch: ${TARGETARCH}" >&2
TAG_IMAGE="${IMAGE}-${TARGETARCH}"
TAG_PLATFORM="linux/${TARGETARCH}"

docker build \
--build-arg TARGETARCH="$TARGETARCH" \
-f "$(dirname "${BASH_SOURCE[0]}")/pypi.manylinux.Dockerfile" \
-t "${TAG_IMAGE}" \
.

script="sh maint/scripts/local_distribution.sh"
docker run --rm \
--platform "${TAG_PLATFORM}" \
-v "$(pwd):/tilelang" \
"${TAG_IMAGE}" \
/bin/bash -lc "$script"

if [ -d dist ]; then
mv -f dist "dist-local-${TARGETARCH}"
fi
fi
# Build for local architecture
CIBW_BUILD='cp38-*' cibuildwheel .
56 changes: 3 additions & 53 deletions maint/scripts/docker_pypi_distribute.sh
Original file line number Diff line number Diff line change
@@ -1,15 +1,6 @@
#!/usr/bin/env bash
set -euxo pipefail

IMAGE="tilelang-builder:manylinux"

HOST_UNAME=$(uname -m)
case "$HOST_UNAME" in
x86_64) TARGETARCH=amd64 ;;
aarch64|arm64) TARGETARCH=arm64 ;;
*) echo "Unsupported architecture: $HOST_UNAME" >&2; exit 1 ;;
esac

if docker buildx version >/dev/null 2>&1; then
if docker info >/dev/null 2>&1; then
docker run --rm --privileged tonistiigi/binfmt --install amd64,arm64 >/dev/null 2>&1 || true
Expand All @@ -21,50 +12,9 @@ if docker buildx version >/dev/null 2>&1; then
docker buildx use multi >/dev/null 2>&1 || true
fi
docker buildx inspect --bootstrap >/dev/null 2>&1 || true

for ARCH in amd64 arm64; do
TAG_PLATFORM="linux/${ARCH}"
TAG_IMAGE="${IMAGE}-${ARCH}"

docker buildx build \
--platform "${TAG_PLATFORM}" \
--build-arg TARGETARCH="${ARCH}" \
-f "$(dirname "${BASH_SOURCE[0]}")/pypi.manylinux.Dockerfile" \
-t "${TAG_IMAGE}" \
--load \
.

script="sh maint/scripts/pypi_distribution.sh"
docker run --rm \
--platform "${TAG_PLATFORM}" \
-v "$(pwd):/tilelang" \
"${TAG_IMAGE}" \
/bin/bash -lc "$script"

if [ -d dist ]; then
mv -f dist "dist-pypi-${ARCH}"
fi
done

else
echo "docker buildx not found; building only host arch: ${TARGETARCH}" >&2
TAG_IMAGE="${IMAGE}-${TARGETARCH}"
TAG_PLATFORM="linux/${TARGETARCH}"

docker build \
--build-arg TARGETARCH="$TARGETARCH" \
-f "$(dirname "${BASH_SOURCE[0]}")/pypi.manylinux.Dockerfile" \
-t "${TAG_IMAGE}" \
.

script="sh maint/scripts/pypi_distribution.sh"
docker run --rm \
--platform "${TAG_PLATFORM}" \
-v "$(pwd):/tilelang" \
"${TAG_IMAGE}" \
/bin/bash -lc "$script"

if [ -d dist ]; then
mv -f dist "dist-pypi-${TARGETARCH}"
fi
export CIBW_ARCHS='x86_64 aarch64'
fi

NO_VERSION_LABEL=ON CIBW_BUILD='cp38-*' cibuildwheel .
29 changes: 14 additions & 15 deletions maint/scripts/pypi.manylinux.Dockerfile
Original file line number Diff line number Diff line change
@@ -1,14 +1,18 @@
ARG TARGETARCH
FROM pytorch/manylinux2_28-builder:cuda12.1 AS builder_amd64
ENV CUDA_VERSION=12.1 \
AUDITWHEEL_PLAT=manylinux_2_28_x86_64
RUN pip3 install uv
FROM quay.io/pypa/manylinux2014_x86_64 AS builder_amd64

RUN yum-config-manager --add-repo https://developer.download.nvidia.cn/compute/cuda/repos/rhel7/x86_64/cuda-rhel7.repo

ARG CUDA_VERSION=12.1
ENV CUDA_VERSION=${CUDA_VERSION}

FROM quay.io/pypa/manylinux_2_28_aarch64 AS builder_arm64

FROM pytorch/manylinuxaarch64-builder:cuda12.8 AS builder_arm64
ENV CUDA_VERSION=12.8 \
AUDITWHEEL_PLAT=manylinux_2_28_aarch64
RUN /opt/python/cp312-cp312/bin/pip install uv
RUN dnf config-manager --add-repo https://developer.download.nvidia.com/compute/cuda/repos/rhel8/sbsa/cuda-rhel8.repo

ARG CUDA_VERSION=12.8
ENV CUDA_VERSION=${CUDA_VERSION}

ARG TARGETARCH
FROM builder_${TARGETARCH}

ENV DEBIAN_FRONTEND=noninteractive \
Expand All @@ -19,12 +23,7 @@ ENV PATH="/usr/local/cuda/bin:${PATH}"
ENV LD_LIBRARY_PATH="/usr/local/cuda/lib64:${LD_LIBRARY_PATH}"

RUN set -eux; \
uv venv -p 3.12 --seed /venv; \
pipx install cibuildwheel; \
git config --global --add safe.directory '/tilelang'

ENV PATH="/venv/bin:$PATH" \
VIRTUAL_ENV=/venv

RUN uv pip install build wheel

WORKDIR /tilelang
29 changes: 21 additions & 8 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -59,19 +59,30 @@ metadata.version.provider = "version_provider"
metadata.version.provider-path = "."
experimental = true

# build.verbose = true
# logging.level = "DEBUG"

[tool.scikit-build.sdist]
# See MANIFEST.in for details
include = [
"VERSION",
"LICENSE",
"./VERSION",
".git_commit.txt",
"./LICENSE",
"THIRDPARTYNOTICES.txt",
"version_provider.py",
"requirements*.txt",
"tilelang/jit/adapter/cython/cython_wrapper.pyx",
"CMakeLists.txt",
"src/**",
"cmake/**",
"3rdparty/**",
# The vendored 3rdparty contents in sdist should be same as wheel.
# Need full TVM to build from source.
"3rdparty/tvm",
# CUTLASS
"3rdparty/cutlass/include",
"3rdparty/cutlass/tools",
# Composable Kernel
"3rdparty/composable_kernel/include",
"3rdparty/composable_kernel/library",
"testing/**",
"examples/**",
]
Expand All @@ -80,8 +91,7 @@ exclude = [
".github",
"**/.git",
"**/.github",
"3rdparty/clang**",
"3rdparty/llvm**",
"3rdparty/**",
"build",
]

Expand All @@ -90,7 +100,7 @@ tilelang = "tilelang"
"tilelang/src" = "src"
# NOTE: The mapping below places the contents of '3rdparty' inside 'tilelang/3rdparty' in the wheel.
# This is necessary to find TVM shared libraries at runtime.
# Restrict 3rdparty contents in wheel to the same allowlist as sdist
# The vendored 3rdparty contents in wheel should be same as sdist.
# TVM
"tilelang/3rdparty/tvm/src" = "3rdparty/tvm/src"
"tilelang/3rdparty/tvm/python" = "3rdparty/tvm/python"
Expand Down Expand Up @@ -202,6 +212,7 @@ environment.PYTHONUNBUFFERED = "1"
environment.PATH = "/usr/local/cuda/bin:$PATH"
environment.LD_LIBRARY_PATH = "/usr/local/cuda/lib64:/usr/local/cuda/lib64/stubs:$LD_LIBRARY_PATH"
# Pin to glibc 2.17 for x86 and 2.28 for aarch64 for now
# TODO: upgrade to manylinux_2_28 at some time
manylinux-x86_64-image = "manylinux2014" # CentOS 7
manylinux-aarch64-image = "manylinux_2_28" # AlmaLinux 8
# Install CUDA runtime and stub driver library
Expand All @@ -214,17 +225,19 @@ uname -a

case "$(uname -m)" in
"x86_64")
DEFAULT_CUDA_VERSION="12.1"
yum-config-manager --add-repo https://developer.download.nvidia.cn/compute/cuda/repos/rhel7/x86_64/cuda-rhel7.repo
;;
"aarch64")
DEFAULT_CUDA_VERSION="12.8"
dnf config-manager --add-repo https://developer.download.nvidia.com/compute/cuda/repos/rhel8/sbsa/cuda-rhel8.repo
;;
*)
exit 1
;;
esac

cudaver="$(echo "${CUDA_VERSION:-"12.4"}" | cut -d '.' -f-2)"
cudaver="$(echo "${CUDA_VERSION:-$DEFAULT_CUDA_VERSION}" | cut -d '.' -f-2)"
v="${cudaver//./-}"
yum install -y "cuda-minimal-build-${v}" "cuda-driver-devel-${v}" "cuda-nvrtc-devel-${v}" nvidia-driver-cuda-libs
"""
Expand Down
Loading
Loading