Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions .buildkite/image_build/image_build.sh
Original file line number Diff line number Diff line change
Expand Up @@ -192,6 +192,7 @@ export BUILDKITE_COMMIT
export PARENT_COMMIT
export IMAGE_TAG
export IMAGE_TAG_LATEST
export COMMIT="${COMMIT:-${BUILDKITE_COMMIT}}"
export CACHE_FROM
export CACHE_FROM_BASE_BRANCH
export CACHE_FROM_MAIN
Expand Down
116 changes: 108 additions & 8 deletions .buildkite/release-pipeline.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -121,7 +121,19 @@ steps:
queue: cpu_queue_release
commands:
- "aws ecr-public get-login-password --region us-east-1 | docker login --username AWS --password-stdin public.ecr.aws/q9t5s3a7"
- "DOCKER_BUILDKIT=1 docker build --build-arg max_jobs=16 --build-arg USE_SCCACHE=1 --build-arg GIT_REPO_CHECK=1 --build-arg CUDA_VERSION=13.0.2 --build-arg torch_cuda_arch_list=\"${CUDA_ARCH_X86}\" --build-arg INSTALL_KV_CONNECTORS=true --build-arg BUILD_BASE_IMAGE=nvidia/cuda:13.0.2-devel-ubuntu22.04 --tag public.ecr.aws/q9t5s3a7/vllm-release-repo:$BUILDKITE_COMMIT-$(uname -m) --target vllm-openai --progress plain -f docker/Dockerfile ."
- |
DOCKER_BUILDKIT=1 docker build \
$(bash .buildkite/scripts/docker-build-metadata-args.sh) \
Comment thread
alec-flowers marked this conversation as resolved.
--build-arg max_jobs=16 \
--build-arg USE_SCCACHE=1 \
--build-arg GIT_REPO_CHECK=1 \
--build-arg CUDA_VERSION=13.0.2 \
--build-arg torch_cuda_arch_list="${CUDA_ARCH_X86}" \
--build-arg INSTALL_KV_CONNECTORS=true \
--build-arg BUILD_BASE_IMAGE=nvidia/cuda:13.0.2-devel-ubuntu22.04 \
--target vllm-openai \
--progress plain \
-f docker/Dockerfile .
- "docker push public.ecr.aws/q9t5s3a7/vllm-release-repo:$BUILDKITE_COMMIT-$(uname -m)"
# re-tag to default image tag and push, just in case arm64 build fails
- "docker tag public.ecr.aws/q9t5s3a7/vllm-release-repo:$BUILDKITE_COMMIT-$(uname -m) public.ecr.aws/q9t5s3a7/vllm-release-repo:$BUILDKITE_COMMIT"
Expand All @@ -134,7 +146,19 @@ steps:
queue: arm64_cpu_queue_release
commands:
- "aws ecr-public get-login-password --region us-east-1 | docker login --username AWS --password-stdin public.ecr.aws/q9t5s3a7"
- "DOCKER_BUILDKIT=1 docker build --build-arg max_jobs=16 --build-arg USE_SCCACHE=1 --build-arg GIT_REPO_CHECK=1 --build-arg CUDA_VERSION=13.0.2 --build-arg torch_cuda_arch_list=\"${CUDA_ARCH_AARCH64}\" --build-arg INSTALL_KV_CONNECTORS=true --build-arg BUILD_BASE_IMAGE=nvidia/cuda:13.0.2-devel-ubuntu22.04 --tag public.ecr.aws/q9t5s3a7/vllm-release-repo:$BUILDKITE_COMMIT-$(uname -m) --target vllm-openai --progress plain -f docker/Dockerfile ."
- |
DOCKER_BUILDKIT=1 docker build \
$(bash .buildkite/scripts/docker-build-metadata-args.sh) \
--build-arg max_jobs=16 \
--build-arg USE_SCCACHE=1 \
--build-arg GIT_REPO_CHECK=1 \
--build-arg CUDA_VERSION=13.0.2 \
--build-arg torch_cuda_arch_list="${CUDA_ARCH_AARCH64}" \
--build-arg INSTALL_KV_CONNECTORS=true \
--build-arg BUILD_BASE_IMAGE=nvidia/cuda:13.0.2-devel-ubuntu22.04 \
--target vllm-openai \
--progress plain \
-f docker/Dockerfile .
- "docker push public.ecr.aws/q9t5s3a7/vllm-release-repo:$BUILDKITE_COMMIT-$(uname -m)"

- label: "Build release image - x86_64 - CUDA 12.9"
Expand All @@ -144,7 +168,18 @@ steps:
queue: cpu_queue_release
commands:
- "aws ecr-public get-login-password --region us-east-1 | docker login --username AWS --password-stdin public.ecr.aws/q9t5s3a7"
- "DOCKER_BUILDKIT=1 docker build --build-arg max_jobs=16 --build-arg USE_SCCACHE=1 --build-arg GIT_REPO_CHECK=1 --build-arg CUDA_VERSION=12.9.1 --build-arg torch_cuda_arch_list=\"${CUDA_ARCH_X86_CU129}\" --build-arg INSTALL_KV_CONNECTORS=true --tag public.ecr.aws/q9t5s3a7/vllm-release-repo:$BUILDKITE_COMMIT-$(uname -m)-cu129 --target vllm-openai --progress plain -f docker/Dockerfile ."
- |
DOCKER_BUILDKIT=1 docker build \
$(bash .buildkite/scripts/docker-build-metadata-args.sh cu129) \
--build-arg max_jobs=16 \
--build-arg USE_SCCACHE=1 \
--build-arg GIT_REPO_CHECK=1 \
--build-arg CUDA_VERSION=12.9.1 \
--build-arg torch_cuda_arch_list="${CUDA_ARCH_X86_CU129}" \
--build-arg INSTALL_KV_CONNECTORS=true \
--target vllm-openai \
--progress plain \
-f docker/Dockerfile .
- "docker push public.ecr.aws/q9t5s3a7/vllm-release-repo:$BUILDKITE_COMMIT-$(uname -m)-cu129"
# re-tag to default image tag and push, just in case arm64 build fails
- "docker tag public.ecr.aws/q9t5s3a7/vllm-release-repo:$BUILDKITE_COMMIT-$(uname -m)-cu129 public.ecr.aws/q9t5s3a7/vllm-release-repo:$BUILDKITE_COMMIT-cu129"
Expand All @@ -157,7 +192,18 @@ steps:
queue: arm64_cpu_queue_release
commands:
- "aws ecr-public get-login-password --region us-east-1 | docker login --username AWS --password-stdin public.ecr.aws/q9t5s3a7"
- "DOCKER_BUILDKIT=1 docker build --build-arg max_jobs=16 --build-arg USE_SCCACHE=1 --build-arg GIT_REPO_CHECK=1 --build-arg CUDA_VERSION=12.9.1 --build-arg torch_cuda_arch_list=\"${CUDA_ARCH_AARCH64_CU129}\" --build-arg INSTALL_KV_CONNECTORS=true --tag public.ecr.aws/q9t5s3a7/vllm-release-repo:$BUILDKITE_COMMIT-$(uname -m)-cu129 --target vllm-openai --progress plain -f docker/Dockerfile ."
- |
DOCKER_BUILDKIT=1 docker build \
$(bash .buildkite/scripts/docker-build-metadata-args.sh cu129) \
--build-arg max_jobs=16 \
--build-arg USE_SCCACHE=1 \
--build-arg GIT_REPO_CHECK=1 \
--build-arg CUDA_VERSION=12.9.1 \
--build-arg torch_cuda_arch_list="${CUDA_ARCH_AARCH64_CU129}" \
--build-arg INSTALL_KV_CONNECTORS=true \
--target vllm-openai \
--progress plain \
-f docker/Dockerfile .
- "docker push public.ecr.aws/q9t5s3a7/vllm-release-repo:$BUILDKITE_COMMIT-$(uname -m)-cu129"

- label: "Build release image - x86_64 - CUDA 13.0 - Ubuntu 24.04"
Expand All @@ -167,7 +213,21 @@ steps:
queue: cpu_queue_release
commands:
- "aws ecr-public get-login-password --region us-east-1 | docker login --username AWS --password-stdin public.ecr.aws/q9t5s3a7"
- "DOCKER_BUILDKIT=1 docker build --build-arg max_jobs=16 --build-arg USE_SCCACHE=1 --build-arg GIT_REPO_CHECK=1 --build-arg CUDA_VERSION=13.0.2 --build-arg UBUNTU_VERSION=24.04 --build-arg GDRCOPY_OS_VERSION=Ubuntu24_04 --build-arg torch_cuda_arch_list=\"${CUDA_ARCH_X86}\" --build-arg INSTALL_KV_CONNECTORS=true --build-arg BUILD_BASE_IMAGE=nvidia/cuda:13.0.2-devel-ubuntu24.04 --tag public.ecr.aws/q9t5s3a7/vllm-release-repo:$BUILDKITE_COMMIT-$(uname -m)-ubuntu2404 --target vllm-openai --progress plain -f docker/Dockerfile ."
- |
DOCKER_BUILDKIT=1 docker build \
$(bash .buildkite/scripts/docker-build-metadata-args.sh ubuntu2404) \
--build-arg max_jobs=16 \
--build-arg USE_SCCACHE=1 \
--build-arg GIT_REPO_CHECK=1 \
--build-arg CUDA_VERSION=13.0.2 \
--build-arg UBUNTU_VERSION=24.04 \
--build-arg GDRCOPY_OS_VERSION=Ubuntu24_04 \
--build-arg torch_cuda_arch_list="${CUDA_ARCH_X86}" \
--build-arg INSTALL_KV_CONNECTORS=true \
--build-arg BUILD_BASE_IMAGE=nvidia/cuda:13.0.2-devel-ubuntu24.04 \
--target vllm-openai \
--progress plain \
-f docker/Dockerfile .
- "docker push public.ecr.aws/q9t5s3a7/vllm-release-repo:$BUILDKITE_COMMIT-$(uname -m)-ubuntu2404"
- "docker tag public.ecr.aws/q9t5s3a7/vllm-release-repo:$BUILDKITE_COMMIT-$(uname -m)-ubuntu2404 public.ecr.aws/q9t5s3a7/vllm-release-repo:$BUILDKITE_COMMIT-ubuntu2404"
- "docker push public.ecr.aws/q9t5s3a7/vllm-release-repo:$BUILDKITE_COMMIT-ubuntu2404"
Expand All @@ -179,7 +239,21 @@ steps:
queue: arm64_cpu_queue_release
commands:
- "aws ecr-public get-login-password --region us-east-1 | docker login --username AWS --password-stdin public.ecr.aws/q9t5s3a7"
- "DOCKER_BUILDKIT=1 docker build --build-arg max_jobs=16 --build-arg USE_SCCACHE=1 --build-arg GIT_REPO_CHECK=1 --build-arg CUDA_VERSION=13.0.2 --build-arg UBUNTU_VERSION=24.04 --build-arg GDRCOPY_OS_VERSION=Ubuntu24_04 --build-arg torch_cuda_arch_list=\"${CUDA_ARCH_AARCH64}\" --build-arg INSTALL_KV_CONNECTORS=true --build-arg BUILD_BASE_IMAGE=nvidia/cuda:13.0.2-devel-ubuntu24.04 --tag public.ecr.aws/q9t5s3a7/vllm-release-repo:$BUILDKITE_COMMIT-$(uname -m)-ubuntu2404 --target vllm-openai --progress plain -f docker/Dockerfile ."
- |
DOCKER_BUILDKIT=1 docker build \
$(bash .buildkite/scripts/docker-build-metadata-args.sh ubuntu2404) \
--build-arg max_jobs=16 \
--build-arg USE_SCCACHE=1 \
--build-arg GIT_REPO_CHECK=1 \
--build-arg CUDA_VERSION=13.0.2 \
--build-arg UBUNTU_VERSION=24.04 \
--build-arg GDRCOPY_OS_VERSION=Ubuntu24_04 \
--build-arg torch_cuda_arch_list="${CUDA_ARCH_AARCH64}" \
--build-arg INSTALL_KV_CONNECTORS=true \
--build-arg BUILD_BASE_IMAGE=nvidia/cuda:13.0.2-devel-ubuntu24.04 \
--target vllm-openai \
--progress plain \
-f docker/Dockerfile .
- "docker push public.ecr.aws/q9t5s3a7/vllm-release-repo:$BUILDKITE_COMMIT-$(uname -m)-ubuntu2404"

- label: "Build release image - x86_64 - CUDA 12.9 - Ubuntu 24.04"
Expand All @@ -189,7 +263,20 @@ steps:
queue: cpu_queue_release
commands:
- "aws ecr-public get-login-password --region us-east-1 | docker login --username AWS --password-stdin public.ecr.aws/q9t5s3a7"
- "DOCKER_BUILDKIT=1 docker build --build-arg max_jobs=16 --build-arg USE_SCCACHE=1 --build-arg GIT_REPO_CHECK=1 --build-arg CUDA_VERSION=12.9.1 --build-arg UBUNTU_VERSION=24.04 --build-arg GDRCOPY_OS_VERSION=Ubuntu24_04 --build-arg torch_cuda_arch_list=\"${CUDA_ARCH_X86_CU129}\" --build-arg INSTALL_KV_CONNECTORS=true --tag public.ecr.aws/q9t5s3a7/vllm-release-repo:$BUILDKITE_COMMIT-$(uname -m)-cu129-ubuntu2404 --target vllm-openai --progress plain -f docker/Dockerfile ."
- |
DOCKER_BUILDKIT=1 docker build \
$(bash .buildkite/scripts/docker-build-metadata-args.sh cu129-ubuntu2404) \
--build-arg max_jobs=16 \
--build-arg USE_SCCACHE=1 \
--build-arg GIT_REPO_CHECK=1 \
--build-arg CUDA_VERSION=12.9.1 \
--build-arg UBUNTU_VERSION=24.04 \
--build-arg GDRCOPY_OS_VERSION=Ubuntu24_04 \
--build-arg torch_cuda_arch_list="${CUDA_ARCH_X86_CU129}" \
--build-arg INSTALL_KV_CONNECTORS=true \
--target vllm-openai \
--progress plain \
-f docker/Dockerfile .
- "docker push public.ecr.aws/q9t5s3a7/vllm-release-repo:$BUILDKITE_COMMIT-$(uname -m)-cu129-ubuntu2404"
- "docker tag public.ecr.aws/q9t5s3a7/vllm-release-repo:$BUILDKITE_COMMIT-$(uname -m)-cu129-ubuntu2404 public.ecr.aws/q9t5s3a7/vllm-release-repo:$BUILDKITE_COMMIT-cu129-ubuntu2404"
- "docker push public.ecr.aws/q9t5s3a7/vllm-release-repo:$BUILDKITE_COMMIT-cu129-ubuntu2404"
Expand All @@ -201,7 +288,20 @@ steps:
queue: arm64_cpu_queue_release
commands:
- "aws ecr-public get-login-password --region us-east-1 | docker login --username AWS --password-stdin public.ecr.aws/q9t5s3a7"
- "DOCKER_BUILDKIT=1 docker build --build-arg max_jobs=16 --build-arg USE_SCCACHE=1 --build-arg GIT_REPO_CHECK=1 --build-arg CUDA_VERSION=12.9.1 --build-arg UBUNTU_VERSION=24.04 --build-arg GDRCOPY_OS_VERSION=Ubuntu24_04 --build-arg torch_cuda_arch_list=\"${CUDA_ARCH_AARCH64_CU129}\" --build-arg INSTALL_KV_CONNECTORS=true --tag public.ecr.aws/q9t5s3a7/vllm-release-repo:$BUILDKITE_COMMIT-$(uname -m)-cu129-ubuntu2404 --target vllm-openai --progress plain -f docker/Dockerfile ."
- |
DOCKER_BUILDKIT=1 docker build \
$(bash .buildkite/scripts/docker-build-metadata-args.sh cu129-ubuntu2404) \
--build-arg max_jobs=16 \
--build-arg USE_SCCACHE=1 \
--build-arg GIT_REPO_CHECK=1 \
--build-arg CUDA_VERSION=12.9.1 \
--build-arg UBUNTU_VERSION=24.04 \
--build-arg GDRCOPY_OS_VERSION=Ubuntu24_04 \
--build-arg torch_cuda_arch_list="${CUDA_ARCH_AARCH64_CU129}" \
--build-arg INSTALL_KV_CONNECTORS=true \
--target vllm-openai \
--progress plain \
-f docker/Dockerfile .
- "docker push public.ecr.aws/q9t5s3a7/vllm-release-repo:$BUILDKITE_COMMIT-$(uname -m)-cu129-ubuntu2404"

- block: "Build release image for x86_64 CPU"
Expand Down
54 changes: 54 additions & 0 deletions .buildkite/scripts/docker-build-metadata-args.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,54 @@
#!/bin/bash
# Emit docker build flags for release image provenance metadata.
# Keep this helper best-effort: missing Buildkite metadata should fall back to
# local/default values instead of blocking the Docker build.

# Variant examples: "", "cu129", "ubuntu2404", "cu129-ubuntu2404".
variant="${1:-}"
variant_suffix="${variant:+-${variant}}"

image_name="${VLLM_DOCKER_IMAGE_NAME:-vllm/vllm-openai}"
staging_repo="${VLLM_STAGING_IMAGE_REPO:-public.ecr.aws/q9t5s3a7/vllm-release-repo}"
build_commit="${VLLM_BUILD_COMMIT:-${BUILDKITE_COMMIT:-unknown}}"
build_pipeline="${VLLM_BUILD_PIPELINE:-${BUILDKITE_PIPELINE_ID:-${BUILDKITE_PIPELINE_SLUG:-local}}}"
build_url="${VLLM_BUILD_URL:-${BUILDKITE_BUILD_URL:-}}"
tag_commit="${BUILDKITE_COMMIT:-${build_commit}}"

if [[ -n "${BUILDKITE:-}" || -n "${BUILDKITE_COMMIT:-}" ]]; then
release_version="${RELEASE_VERSION:-}"
if command -v buildkite-agent >/dev/null 2>&1; then
release_version="${release_version:-$(buildkite-agent meta-data get release-version 2>/dev/null)}"
fi
release_version="${release_version#v}"
release_version="${release_version:-${tag_commit}}"

staging_image_ref="${staging_repo}:${tag_commit}-$(uname -m)${variant_suffix}"

if [[ "${NIGHTLY:-}" == "1" ]]; then
if [[ -z "${variant}" ]]; then
image_tag="${image_name}:nightly-${tag_commit}"
elif [[ "${variant}" == cu* ]]; then
cuda_variant="${variant%%-*}"
remaining_variant="${variant#${cuda_variant}}"
image_tag="${image_name}:${cuda_variant}-nightly-${tag_commit}${remaining_variant}"
else
image_tag="${image_name}:nightly-${tag_commit}${variant_suffix}"
fi
else
image_tag="${image_name}:v${release_version}${variant_suffix}"
fi
else
image_tag="${VLLM_IMAGE_TAG:-local/vllm-openai:dev}"
staging_image_ref="${image_tag}"
fi

emit_arg() {
printf -- "--build-arg %s=%s " "$1" "$2"
}

emit_arg VLLM_BUILD_COMMIT "${build_commit}"
emit_arg VLLM_BUILD_PIPELINE "${build_pipeline}"
emit_arg VLLM_BUILD_URL "${build_url}"
# This is the intended public tag. The final digest is only known after push.
emit_arg VLLM_IMAGE_TAG "${image_tag}"
printf -- "--tag %s " "${staging_image_ref}"
16 changes: 16 additions & 0 deletions .buildkite/test_areas/docker.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,16 @@
group: Docker
depends_on:
- image-build-cpu
steps:
- label: Docker Build Metadata
timeout_in_minutes: 10
device: cpu-small
source_file_dependencies:
- .buildkite/release-pipeline.yaml
- .buildkite/scripts/docker-build-metadata-args.sh
- docker/Dockerfile
- docker/Dockerfile.cpu
- docker/docker-bake.hcl
- tests/tools/test_docker_build_metadata_args.py
commands:
- pytest -v -s tools/test_docker_build_metadata_args.py
16 changes: 16 additions & 0 deletions docker/Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -763,6 +763,10 @@ FROM vllm-base AS vllm-openai-base
ARG TARGETPLATFORM
ARG INSTALL_KV_CONNECTORS=false
ARG CUDA_VERSION
ARG VLLM_BUILD_COMMIT
ARG VLLM_BUILD_PIPELINE
ARG VLLM_BUILD_URL
ARG VLLM_IMAGE_TAG

ARG PIP_INDEX_URL UV_INDEX_URL
ARG PIP_EXTRA_INDEX_URL UV_EXTRA_INDEX_URL
Expand Down Expand Up @@ -799,6 +803,18 @@ RUN --mount=type=cache,target=/root/.cache/uv \
fi

ENV VLLM_USAGE_SOURCE production-docker-image
ENV VLLM_BUILD_COMMIT=${VLLM_BUILD_COMMIT:-unknown} \
VLLM_BUILD_PIPELINE=${VLLM_BUILD_PIPELINE:-local} \
VLLM_BUILD_URL=${VLLM_BUILD_URL:-} \
VLLM_IMAGE_TAG=${VLLM_IMAGE_TAG:-local/vllm-openai:dev}
LABEL org.opencontainers.image.source="https://github.com/vllm-project/vllm" \
org.opencontainers.image.revision="${VLLM_BUILD_COMMIT}" \
org.opencontainers.image.version="${VLLM_IMAGE_TAG}" \
org.opencontainers.image.url="${VLLM_BUILD_URL}" \
ai.vllm.build.commit="${VLLM_BUILD_COMMIT}" \
ai.vllm.build.pipeline="${VLLM_BUILD_PIPELINE}" \
ai.vllm.build.url="${VLLM_BUILD_URL}" \
ai.vllm.image.tag="${VLLM_IMAGE_TAG}"

# define sagemaker first, so it is not default from `docker build`
FROM vllm-openai-base AS vllm-sagemaker
Expand Down
1 change: 1 addition & 0 deletions docker/Dockerfile.cpu
Original file line number Diff line number Diff line change
Expand Up @@ -192,6 +192,7 @@ ADD ./tests/ ./tests/
ADD ./examples/ ./examples/
ADD ./benchmarks/ ./benchmarks/
ADD ./vllm/collect_env.py .
ADD ./docker/ ./docker/
ADD ./.buildkite/ ./.buildkite/

# install development dependencies (for testing)
Expand Down
30 changes: 28 additions & 2 deletions docker/docker-bake.hcl
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,22 @@ variable "COMMIT" {
default = ""
}

variable "VLLM_BUILD_COMMIT" {
default = "unknown"
}

variable "VLLM_BUILD_PIPELINE" {
default = "local"
}

variable "VLLM_BUILD_URL" {
default = ""
}

variable "VLLM_IMAGE_TAG" {
default = "local/vllm-openai:dev"
}

# Groups

group "default" {
Expand All @@ -46,6 +62,10 @@ target "_common" {
max_jobs = MAX_JOBS
nvcc_threads = NVCC_THREADS
torch_cuda_arch_list = TORCH_CUDA_ARCH_LIST
VLLM_BUILD_COMMIT = VLLM_BUILD_COMMIT != "unknown" ? VLLM_BUILD_COMMIT : (COMMIT != "" ? COMMIT : "unknown")
VLLM_BUILD_PIPELINE = VLLM_BUILD_PIPELINE
VLLM_BUILD_URL = VLLM_BUILD_URL
VLLM_IMAGE_TAG = VLLM_IMAGE_TAG
}
}

Expand All @@ -56,10 +76,16 @@ target "_labels" {
"org.opencontainers.image.title" = "vLLM"
"org.opencontainers.image.description" = "vLLM: A high-throughput and memory-efficient inference and serving engine for LLMs"
"org.opencontainers.image.licenses" = "Apache-2.0"
"org.opencontainers.image.revision" = COMMIT
"org.opencontainers.image.revision" = VLLM_BUILD_COMMIT != "unknown" ? VLLM_BUILD_COMMIT : (COMMIT != "" ? COMMIT : "unknown")
"org.opencontainers.image.version" = VLLM_IMAGE_TAG
"org.opencontainers.image.url" = VLLM_BUILD_URL
"ai.vllm.build.commit" = VLLM_BUILD_COMMIT != "unknown" ? VLLM_BUILD_COMMIT : (COMMIT != "" ? COMMIT : "unknown")
"ai.vllm.build.pipeline" = VLLM_BUILD_PIPELINE
"ai.vllm.build.url" = VLLM_BUILD_URL
"ai.vllm.image.tag" = VLLM_IMAGE_TAG
}
annotations = [
"index,manifest:org.opencontainers.image.revision=${COMMIT}",
"index,manifest:org.opencontainers.image.revision=${VLLM_BUILD_COMMIT != "unknown" ? VLLM_BUILD_COMMIT : (COMMIT != "" ? COMMIT : "unknown")}",
]
}

Expand Down
Loading
Loading