diff --git a/.github/workflows/build-notebooks-TEMPLATE.yaml b/.github/workflows/build-notebooks-TEMPLATE.yaml index f163200cfa..26715bac27 100644 --- a/.github/workflows/build-notebooks-TEMPLATE.yaml +++ b/.github/workflows/build-notebooks-TEMPLATE.yaml @@ -266,11 +266,11 @@ jobs: # region Pytest image tests # https://github.com/astral-sh/setup-uv - - name: Install the latest version of uv + - name: Install uv uses: astral-sh/setup-uv@v7 with: - version: "latest" - python-version: "3.14" + version-file: uv.toml + python-version-file: .python-version enable-cache: true cache-dependency-glob: "uv.lock" diff --git a/.github/workflows/code-quality.yaml b/.github/workflows/code-quality.yaml index 0658762d23..ee2efd2b5d 100644 --- a/.github/workflows/code-quality.yaml +++ b/.github/workflows/code-quality.yaml @@ -15,11 +15,11 @@ jobs: - uses: actions/checkout@v6 # https://github.com/astral-sh/setup-uv - - name: Install the latest version of uv + - name: Install uv uses: astral-sh/setup-uv@v7 with: - version: "latest" - python-version: "3.14" + version-file: uv.toml + python-version-file: .python-version enable-cache: true cache-dependency-glob: "uv.lock" @@ -44,11 +44,11 @@ jobs: - uses: actions/checkout@v6 # https://github.com/astral-sh/setup-uv - - name: Install the latest version of uv + - name: Install uv uses: astral-sh/setup-uv@v7 with: - version: "latest" - python-version: "3.14" + version-file: uv.toml + python-version-file: .python-version enable-cache: true cache-dependency-glob: "uv.lock" diff --git a/.github/workflows/docs.yaml b/.github/workflows/docs.yaml index 4519bc4b70..8c84ac71db 100644 --- a/.github/workflows/docs.yaml +++ b/.github/workflows/docs.yaml @@ -19,11 +19,11 @@ jobs: - uses: actions/checkout@v6 # https://github.com/astral-sh/setup-uv - - name: Install the latest version of uv + - name: Install uv uses: astral-sh/setup-uv@v7 with: - version: "latest" - python-version: "3.14" + version-file: uv.toml + python-version-file: .python-version enable-cache: true cache-dependency-glob: "uv.lock" diff --git a/.github/workflows/piplock-renewal.yaml b/.github/workflows/piplock-renewal.yaml index 9b50bdb29a..fcef598933 100644 --- a/.github/workflows/piplock-renewal.yaml +++ b/.github/workflows/piplock-renewal.yaml @@ -1,63 +1,60 @@ --- -# This GitHub action is meant to update the pipfile.locks -name: Pipfile.locks Renewal Action +# This GitHub action is meant to update the lock files (pylock.toml) +name: Lock Files Renewal Action on: # yamllint disable-line rule:truthy # Triggers the workflow every Wednesday at 1am UTC schedule: - - cron: "0 1 * * 3" + - cron: "0 1 * * 3" # Weekly lockfile update + - cron: "0 9,15 * * 1-5" # Auto-merge check at 9am and 3pm UTC on weekdays workflow_dispatch: # for manual trigger workflow from GH Web UI inputs: + operation: + description: 'Which operation to run' + required: true + default: 'update-lockfiles' + type: choice + options: + - 'update-lockfiles' + - 'auto-merge' branch: - description: 'Specify branch' + description: 'Specify branch (for update-lockfiles)' required: false default: 'main' - python_version: - description: 'Select a Python version to update Pipfile.lock' + index_mode: + description: 'Index mode for lock file generation (for update-lockfiles)' required: false - default: '["3.11", "3.12"]' + default: 'auto' type: choice options: - - '["3.11", "3.12"]' - - '["3.12"]' - - '["3.11"]' - - '["3.9"]' - - '["3.8"]' - lockfiles_upgrade: - description: 'Force full relock and upgrades for pylock.toml (manual runs)' + - 'auto' + - 'public-index' + - 'rh-index' + force_upgrade: + description: 'Force upgrade all packages to latest versions (for update-lockfiles)' required: false default: 'false' type: choice options: - - 'true' - 'false' - update_optional_dirs: - description: 'Include optional directories in update' - required: false - default: 'false' - type: choice - options: - 'true' - - 'false' jobs: - refresh-pipfile-locks: + refresh-lock-files: + # Only run on Wednesday schedule or manual dispatch with 'update-lockfiles' operation + if: (github.event_name == 'workflow_dispatch' && github.event.inputs.operation == 'update-lockfiles') || (github.event_name == 'schedule' && github.event.schedule == '0 1 * * 3') runs-on: ubuntu-latest concurrency: - group: refresh-pipfile-locks-${{ matrix.python-version }}-${{ github.ref }} + group: refresh-lock-files-${{ github.ref }} cancel-in-progress: false - strategy: - fail-fast: false - matrix: - python-version: >- - ${{ fromJSON( github.event.inputs.python_version || '["3.11", "3.12"]' ) }} permissions: contents: write + pull-requests: write env: BRANCH: ${{ github.event.inputs.branch || 'main' }} - INCLUDE_OPT_DIRS: ${{ github.event.inputs.update_optional_dirs || 'false' }} - # Force full relock on scheduled runs. For manual runs, use the input toggle. - FORCE_LOCKFILES_UPGRADE: ${{ github.event_name == 'schedule' && '1' || (github.event.inputs.lockfiles_upgrade == 'true' && '1' || '0') }} + INDEX_MODE: ${{ github.event.inputs.index_mode || 'auto' }} + # Force upgrade on scheduled runs, or when explicitly requested + FORCE_LOCKFILES_UPGRADE: ${{ github.event_name == 'schedule' && '1' || (github.event.inputs.force_upgrade == 'true' && '1' || '0') }} steps: - name: Checkout code @@ -65,35 +62,179 @@ jobs: with: ref: ${{ env.BRANCH }} token: ${{ secrets.GH_ACCESS_TOKEN }} + persist-credentials: true - name: Configure Git run: | git config --global user.email "github-actions[bot]@users.noreply.github.com" git config --global user.name "GitHub Actions" - - name: Set up Python ${{ matrix.python-version }} + - name: Set up Python uses: actions/setup-python@v6 with: - python-version: ${{ matrix.python-version }} - - - name: Install pipenv - run: pip install "pipenv==2025.0.4" + python-version: '3.12' - name: Install uv - run: pip install "uv==0.9.6" + uses: astral-sh/setup-uv@v7 + with: + version-file: uv.toml - - name: Run make refresh-pipfilelock-files + - name: Run make refresh-lock-files run: | - make refresh-pipfilelock-files PYTHON_VERSION=${{ matrix.python-version }} INCLUDE_OPT_DIRS=${{ env.INCLUDE_OPT_DIRS }} + make refresh-lock-files INDEX_MODE=${{ env.INDEX_MODE }} env: FORCE_LOCKFILES_UPGRADE: ${{ env.FORCE_LOCKFILES_UPGRADE }} - - name: Commit changes (if any) + - name: Create Pull Request + env: + GH_TOKEN: ${{ secrets.GH_ACCESS_TOKEN }} run: | git add . - git diff --cached --quiet && echo "No changes to commit." || git commit -m "Update Pipfile.lock for Python ${{ matrix.python-version }}" + if git diff --cached --quiet; then + echo "No changes to commit." + exit 0 + fi - - name: Pull and push changes + BRANCH_NAME="lockfile-update-$(date +%Y%m%d-%H%M)" + git checkout -b "$BRANCH_NAME" + git commit -m "Update lock files" + git push -u origin "$BRANCH_NAME" + + gh pr create \ + --title "Update lock files" \ + --body "$(cat <<'EOF' + Automated lock file update. + + **Auto-merge policy:** This PR will be automatically merged after 1 working day unless: + - Moved to draft status + - Labeled with `do-not-merge/*` + - Manually merged or closed + EOF + )" \ + --label "automated-lockfile-update" \ + --base "${{ env.BRANCH }}" + + auto-merge-lockfile-prs: + # Run on auto-merge schedule or manual dispatch with 'auto-merge' operation + if: (github.event_name == 'workflow_dispatch' && github.event.inputs.operation == 'auto-merge') || (github.event_name == 'schedule' && github.event.schedule == '0 9,15 * * 1-5') + runs-on: ubuntu-latest + permissions: + contents: write + pull-requests: write + steps: + - name: Auto-merge eligible lockfile PRs + env: + GH_TOKEN: ${{ secrets.GH_ACCESS_TOKEN }} + GITHUB_TOKEN_FOR_APPROVAL: ${{ github.token }} run: | - git pull --rebase origin ${{ env.BRANCH }} - git push origin ${{ env.BRANCH }} + set -euo pipefail + + REPO="${{ github.repository }}" + + echo "Searching for PRs with label 'automated-lockfile-update'..." + + # Get all open PRs with the automated-lockfile-update label + PRS=$(gh pr list --repo "$REPO" --label "automated-lockfile-update" --state open --json number,title,createdAt,isDraft,labels --limit 50) + + if [ "$PRS" = "[]" ] || [ -z "$PRS" ]; then + echo "No open PRs found with label 'automated-lockfile-update'" + exit 0 + fi + + echo "Found PRs: $PRS" + + # Process each PR + echo "$PRS" | jq -c '.[]' | while read -r pr; do + PR_NUM=$(echo "$pr" | jq -r '.number') + PR_TITLE=$(echo "$pr" | jq -r '.title') + CREATED_AT=$(echo "$pr" | jq -r '.createdAt') + IS_DRAFT=$(echo "$pr" | jq -r '.isDraft') + LABELS=$(echo "$pr" | jq -r '.labels[].name' 2>/dev/null || echo "") + + echo "" + echo "=== Processing PR #$PR_NUM: $PR_TITLE ===" + echo "Created at: $CREATED_AT" + echo "Is draft: $IS_DRAFT" + echo "Labels: $LABELS" + + # Skip drafts + if [ "$IS_DRAFT" = "true" ]; then + echo "SKIP: PR #$PR_NUM is a draft" + continue + fi + + # Skip if has do-not-merge/* label + if echo "$LABELS" | grep -q "^do-not-merge/"; then + echo "SKIP: PR #$PR_NUM has a do-not-merge/* label" + continue + fi + + # Check if PR is at least 1 working day old + # Working day = Monday-Friday, so: + # - If created Mon-Thu, eligible next day + # - If created Fri, eligible Mon + # - If created Sat, eligible Mon + # - If created Sun, eligible Tue + + CREATED_TS=$(date -d "$CREATED_AT" +%s 2>/dev/null || date -j -f "%Y-%m-%dT%H:%M:%SZ" "$CREATED_AT" +%s 2>/dev/null || date -j -f "%Y-%m-%dT%H:%M:%S" "${CREATED_AT%Z}" +%s) + NOW_TS=$(date +%s) + CREATED_DOW=$(date -d "$CREATED_AT" +%u 2>/dev/null || date -j -f "%Y-%m-%dT%H:%M:%SZ" "$CREATED_AT" +%u 2>/dev/null || date -j -f "%Y-%m-%dT%H:%M:%S" "${CREATED_AT%Z}" +%u) + + if [ -z "$CREATED_TS" ] || [ -z "$CREATED_DOW" ]; then + echo "WARNING: Failed to parse date '$CREATED_AT' for PR #$PR_NUM. Skipping." + continue + fi + + # Calculate minimum age in seconds for 1 working day + # Base: 24 hours = 86400 seconds + # If created on Friday (5), add weekend: 72 hours = 259200 seconds + # If created on Saturday (6), need to wait till Monday + 1 day: 48 + 24 = 72 hours + # If created on Sunday (7), need to wait till Tuesday: 24 + 24 = 48 hours... actually Mon+1day = Tue + + case "$CREATED_DOW" in + 5) MIN_AGE_SECONDS=$((72 * 3600)) ;; # Friday -> Monday (3 days) + 6) MIN_AGE_SECONDS=$((48 * 3600)) ;; # Saturday -> Monday (2 days) + 7) MIN_AGE_SECONDS=$((48 * 3600)) ;; # Sunday -> Tuesday (2 days, Mon is working day 0) + *) MIN_AGE_SECONDS=$((24 * 3600)) ;; # Mon-Thu -> next day (1 day) + esac + + AGE_SECONDS=$((NOW_TS - CREATED_TS)) + AGE_HOURS=$((AGE_SECONDS / 3600)) + + echo "PR age: ${AGE_HOURS} hours (minimum required: $((MIN_AGE_SECONDS / 3600)) hours)" + + if [ "$AGE_SECONDS" -lt "$MIN_AGE_SECONDS" ]; then + echo "SKIP: PR #$PR_NUM is not old enough (created $AGE_HOURS hours ago, need $((MIN_AGE_SECONDS / 3600)) hours)" + continue + fi + + echo "Checking review status for PR #$PR_NUM..." + + # Check if PR has an approving review + REVIEWS=$(gh pr view "$PR_NUM" --repo "$REPO" --json reviews --jq '.reviews[] | select(.state == "APPROVED")' 2>/dev/null || echo "") + + if [ -z "$REVIEWS" ]; then + echo "No approving review found. Adding approval using github-actions bot..." + + # Get PR author to ensure we don't approve our own PR + PR_AUTHOR=$(gh pr view "$PR_NUM" --repo "$REPO" --json author --jq '.author.login') + echo "PR author: $PR_AUTHOR" + + # Add approving review using GITHUB_TOKEN (github-actions bot) + # This is different from GH_ACCESS_TOKEN which created the PR + GH_TOKEN="$GITHUB_TOKEN_FOR_APPROVAL" gh pr review "$PR_NUM" --repo "$REPO" --approve --body "Auto-approved by lockfile renewal workflow after 1 working day waiting period." || { + echo "WARNING: Failed to add approval for PR #$PR_NUM. May need manual approval." + continue + } + echo "Approval added successfully." + else + echo "PR #$PR_NUM already has an approving review." + fi + + echo "MERGING: PR #$PR_NUM meets all criteria" + gh pr merge "$PR_NUM" --repo "$REPO" --merge --admin || echo "WARNING: Failed to merge PR #$PR_NUM" + + done + + echo "" + echo "Auto-merge check complete." diff --git a/.github/workflows/security.yaml b/.github/workflows/security.yaml index 3b73e67544..0d3dd5ff2f 100644 --- a/.github/workflows/security.yaml +++ b/.github/workflows/security.yaml @@ -16,18 +16,18 @@ jobs: security-events: write steps: + - name: Checkout code + uses: actions/checkout@v6 + # https://github.com/astral-sh/setup-uv - - name: Install the latest version of uv + - name: Install uv uses: astral-sh/setup-uv@v7 with: - version: "latest" + version-file: uv.toml activate-environment: false ignore-empty-workdir: true enable-cache: false - - name: Checkout code - uses: actions/checkout@v6 - # Trivy does not support pylock.toml https://github.com/aquasecurity/trivy/discussions/9408 - run: find . -name pyproject.toml -execdir uv lock \; diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 54a98e461b..712ee933c5 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -2,15 +2,21 @@ # https://github.com/pre-commit/pre-commit-hooks?tab=readme-ov-file#hooks-available repos: # https://docs.astral.sh/uv/guides/integration/pre-commit/ - - repo: https://github.com/astral-sh/uv-pre-commit - rev: 0.9.18 + # Using a local hook instead of uv-pre-commit so it goes through ./uv, + # which handles version mismatches without requiring the exact system uv. + - repo: local hooks: - id: uv-lock + name: uv-lock + entry: ./uv lock --locked + language: system + files: '(^uv\.lock$|^pyproject\.toml$|^uv\.toml$)' + pass_filenames: false # https://github.com/astral-sh/ruff-pre-commit - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.14.10 + rev: v0.15.4 hooks: - - id: ruff + - id: ruff-check types_or: [python, pyi] args: [--fix] files: 'ci/.*|tests/.*' @@ -25,7 +31,7 @@ repos: - id: pyright name: Run Pyright on all files # entry: /bin/bash -c 'find. -name "*.py" | xargs pyright --pythonversion 3.12' - entry: uv run pyright --pythonversion 3.12 + entry: ./uv run pyright --pythonversion 3.14 pass_filenames: true types_or: [python, pyi] language: system diff --git a/Makefile b/Makefile index c835c34127..41de6745a2 100644 --- a/Makefile +++ b/Makefile @@ -421,7 +421,7 @@ refresh-lock-files: @echo "===================================================================" @echo "🔁 Refreshing pylock.toml files using $(INDEX_MODE)" @echo "===================================================================" - @cd $(ROOT_DIR) && bash scripts/pylocks_generator.sh $(INDEX_MODE) $(DIR) + @cd $(ROOT_DIR) && ./uv run scripts/pylocks_generator.py $(INDEX_MODE) $(DIR) # This is only for the workflow action # For running manually, set the required environment variables @@ -467,5 +467,5 @@ print-release: .PHONY: test test: @echo "Running quick static tests" - uv run pytest -m 'not buildonlytest' + ./uv run pytest -m 'not buildonlytest' @./scripts/check_dockerfile_alignment.sh diff --git a/README.md b/README.md index 4aa2e0c2df..450d0b567c 100644 --- a/README.md +++ b/README.md @@ -62,17 +62,49 @@ Note: To ensure the GitHub Action runs successfully, users must add a `GH_ACCESS #### Prepare Python + uv + pytest env +This project pins its uv version in `uv.toml` (`required-version`). +Use the `./uv` wrapper script at the repo root — it reads the pinned +version and runs it via `uvx`, so your system uv version doesn't matter: + ```shell # Linux sudo dnf install python3.14 pip install --user uv -# MacOS +# macOS brew install python@3.14 uv -uv venv --python $(which python3.14) -uv sync --locked +./uv venv --python $(which python3.14) +./uv sync --locked ``` +
+Alternatives to ./uv + +The `./uv` wrapper is the recommended way, but you can also +(replace `0.10.6` below with the version from `uv.toml`): + +- **Use `uvx` directly** with an explicit version: + ```shell + uvx uv@0.10.6 sync --locked + ``` +- **Use `uv tool run`** (equivalent, longer form): + ```shell + uv tool run uv@0.10.6 sync --locked + ``` +- **Install the exact version** so `uv` works directly: + ```shell + # Standalone installer (any OS) + curl -LsSf https://astral.sh/uv/0.10.6/install.sh | sh + # Or with pip + pip install uv==0.10.6 + ``` + +If your system uv matches the pinned version, you can use `uv` directly — +`required-version` in `uv.toml` will let it through. If it doesn't match, +uv exits with a clear error telling you which version is required. + +
+ #### Running Python selftests in Pytest By completing configuration in previous section, you are able to run any tests that don't need to start a container using following command: @@ -106,7 +138,7 @@ sudo dnf install podman systemctl --user start podman.service systemctl --user status podman.service systemctl --user status podman.socket -DOCKER_HOST=unix:///run/user/$UID/podman/podman.sock uv run pytest tests/containers -m 'not openshift and not cuda and not rocm' --image quay.io/opendatahub/workbench-images@sha256:e98d19df346e7abb1fa3053f6d41f0d1fa9bab39e49b4cb90b510ca33452c2e4 +DOCKER_HOST=unix:///run/user/$UID/podman/podman.sock ./uv run pytest tests/containers -m 'not openshift and not cuda and not rocm' --image quay.io/opendatahub/workbench-images@sha256:e98d19df346e7abb1fa3053f6d41f0d1fa9bab39e49b4cb90b510ca33452c2e4 # Mac OS brew install podman @@ -114,7 +146,7 @@ podman machine init podman machine set --rootful=false sudo podman-mac-helper install podman machine start -uv run pytest tests/containers -m 'not openshift' --image quay.io/opendatahub/workbench-images@sha256:e98d19df346e7abb1fa3053f6d41f0d1fa9bab39e49b4cb90b510ca33452c2e4 +./uv run pytest tests/containers -m 'not openshift' --image quay.io/opendatahub/workbench-images@sha256:e98d19df346e7abb1fa3053f6d41f0d1fa9bab39e49b4cb90b510ca33452c2e4 ``` When using lima on macOS, it might be useful to give yourself access to rootful podman socket diff --git a/ci/check-software-versions.py b/ci/check-software-versions.py index d918fa1e9a..34654934c9 100755 --- a/ci/check-software-versions.py +++ b/ci/check-software-versions.py @@ -256,8 +256,9 @@ def process_tag(tag): if stop_and_remove_container(container_id) != 0: log.error(f"Failed to stop/remove the container '{container_id}' for the '{image_ref}' tag!") print_delimiter() - return 1 # noqa: B012 `return` inside `finally` blocks cause exceptions to be silenced - print_delimiter() + ret_code = 1 + else: + print_delimiter() return ret_code diff --git a/ci/generate_code.sh b/ci/generate_code.sh index c483636f79..c78a6b5b94 100755 --- a/ci/generate_code.sh +++ b/ci/generate_code.sh @@ -1,7 +1,10 @@ #!/usr/bin/env bash set -Eeuxo pipefail -uv --version || pip install "uv==0.9.6" +SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" +REPO_ROOT="$(cd "${SCRIPT_DIR}/.." && pwd)" -uv run scripts/dockerfile_fragments.py -bash scripts/pylocks_generator.sh +uv --version || pip install "uv==0.10.6" + +"${REPO_ROOT}/uv" run scripts/dockerfile_fragments.py +"${REPO_ROOT}/uv" run scripts/pylocks_generator.py diff --git a/dependencies/cve-constraints.txt b/dependencies/cve-constraints.txt new file mode 100644 index 0000000000..5651f2f27b --- /dev/null +++ b/dependencies/cve-constraints.txt @@ -0,0 +1,14 @@ +# CVE-induced minimum version constraints +# This file is used with `uv pip compile --constraints` or UV_CONSTRAINT env var +# +# Format: requirements.txt style (package>=version) +# +# When adding a new CVE fix: +# 1. Add the constraint below with CVE ID and issue reference in a comment +# 2. Regenerate all lock files with: make refresh-lock-files +# 3. The constraint applies to ALL images during resolution + +# RHAIENG-2458: CVE-2025-66418 urllib3 decompression vulnerability +# Upstream: https://github.com/elyra-ai/elyra/issues/3325 +urllib3>=2.6.0 +keras>=3.13.1 diff --git a/docs/cves/python.md b/docs/cves/python.md new file mode 100644 index 0000000000..c95b5a9203 --- /dev/null +++ b/docs/cves/python.md @@ -0,0 +1,226 @@ +# Python CVE Resolution Guide + +This guide documents the workflow for resolving CVEs in Python packages within the OpenDataHub Notebooks images. + +> **Acknowledgment**: This workflow was contributed by Adriana Theodorakopoulou. + +## Overview + +Python CVEs in notebook images can come from: +- **Direct dependencies**: Packages explicitly listed in `pyproject.toml` +- **Transitive dependencies**: Packages pulled in by direct dependencies + +The resolution strategy differs based on which type is affected. + +## Centralized CVE Constraints + +To prevent CVEs from returning through transitive dependencies, we maintain a centralized constraints file: + +``` +dependencies/cve-constraints.txt +``` + +This file is automatically applied during lock file generation via `uv pip compile --constraints`. It ensures that even packages not explicitly in `pyproject.toml` (transitive dependencies) never go below the fixed version for CVEs we've resolved. + +### How It Works + +1. **Constraints file format** (requirements.txt style): + ``` + # CVE-ID: Description + # Reference: https://... + package>=fixed_version + ``` + +2. **Automatic application**: The `pylocks_generator.py` script applies these constraints to all lock file generations. + +3. **Override for conflicts**: Some packages (like odh-elyra's appengine-python-standard) have conflicting version requirements. For these, use `override-dependencies` in the specific image's `pyproject.toml`. + +### Adding a New CVE Constraint + +1. Add the constraint to `dependencies/cve-constraints.txt`: + ``` + # RHAIENG-XXXX: CVE-YYYY-ZZZZZ package_name vulnerability description + # Upstream: https://github.com/... + package_name>=fixed_version + ``` + +2. Regenerate all lock files: + ```bash + make refresh-lock-files + # or + ./uv run scripts/pylocks_generator.py auto + ``` + +3. If resolution fails due to conflicts, add `override-dependencies` to the affected image's `pyproject.toml`. + +## CVE Resolution Workflow + +### Step 1: Identify the Package and Affected Images + +Example: RHAIENG-2448 - Tornado quadratic DoS repeated header + +1. Open the Jira ticket and identify the package name (e.g., "tornado") +2. Check which images are affected (often all images from minimal to trustyai, tensorflow, pytorch, etc.) +3. Open one of the linked Jiras from ProdSec to see the summary + +### Step 2: Determine the Fixed Version + +From the CVE summary, identify: +- **Affected versions**: e.g., "version 6.5.2 and below" +- **Fixed version**: e.g., "fixed in version 6.5.3" + +### Step 3: Search for the Package in the Repository + +```bash +# Search in pyproject.toml files +grep -r "tornado" --include="pyproject.toml" . + +# Search in pylock.toml files +grep -r "tornado" --include="pylock.toml" . +``` + +Determine if it's a: +- **Direct dependency**: Found in `pyproject.toml` +- **Transitive dependency**: Only found in `pylock.toml` + +### Step 4: Identify the Source of Transitive Dependencies + +For transitive dependencies, find which direct dependency pulls it in: + +```bash +# Using uv (preferred) +uv tree | grep -A5 -B5 tornado + +# Or check the package's dependents +uv tree --invert tornado +``` + +Example: Tornado is typically pulled in by `jupyter-server`. + +### Step 5: Resolve the CVE + +#### Option A: Upgrade the Direct Dependency + +1. Check the latest version on [pypi.org](https://pypi.org) +2. Check the upstream package's `pyproject.toml` to see their version constraints +3. Update the version in your `pyproject.toml`: + ```toml + "jupyter-server~=2.17.0", # Updated for tornado CVE fix + ``` + +#### Option B: Use Centralized CVE Constraints + +If the direct dependency can't be upgraded but the transitive package version is flexible: + +1. Add to `dependencies/cve-constraints.txt`: + ``` + # RHAIENG-2448: CVE-XXXX-YYYY tornado quadratic DoS + tornado>=6.5.3 + ``` + +2. Regenerate lock files - the constraint will be applied automatically. + +#### Option C: Use Override Dependencies (Last Resort) + +If there are version conflicts that prevent constraint-based resolution: + +```toml +[tool.uv] +override-dependencies = [ + # RHAIENG-2448: CVE-XXXX-YYYY tornado - override needed due to version conflict + "tornado>=6.5.3", +] +``` + +**Note**: Override dependencies force the specified version, potentially breaking packages that genuinely can't work with it. Use sparingly. + +### Step 6: Regenerate Lock Files and Build + +```bash +# Regenerate lock files +make refresh-lock-files + +# Build the affected image(s) +make jupyter-datascience-ubi9-python-3.12 +``` + +### Step 7: Validate the Fix + +#### Downstream (Konflux) - Clair Scan + +1. Go to Konflux and find the Tekton build pipeline for your image +2. Open the **clair-scan** task logs +3. Search for the CVE number (e.g., `CVE-2024-XXXXX`) +4. If the CVE is **not found** in the logs, the fix is validated + +#### Upstream (GitHub Actions) - Trivy + +1. Go to the "push build notebooks" GitHub Action +2. Check the "Vulnerability Report by Trivy" section +3. Search for the CVE number +4. If the CVE is **not present** after the fix, validation is successful + +**Note**: Trivy is more sensitive than Konflux's Clair scan. A CVE may appear in Trivy but not in Clair. Always validate against the downstream Konflux scans for production images. + +## Example: Complete CVE Resolution + +### Scenario: CVE-2025-66418 in urllib3 + +1. **Identify**: urllib3 decompression vulnerability, affects all images +2. **Fixed version**: urllib3 >= 2.6.0 +3. **Type**: Transitive dependency (pulled in by many packages) +4. **Conflict**: odh-elyra depends on appengine-python-standard which requires urllib3<2 + +**Solution**: +1. Add to `dependencies/cve-constraints.txt` for general protection: + ``` + # RHAIENG-2458: CVE-2025-66418 urllib3 decompression vulnerability + urllib3>=2.6.0 + ``` + +2. Add override to jupyter images with odh-elyra (due to conflict): + ```toml + override-dependencies = [ + # RHAIENG-2458: CVE-2025-66418 urllib3 - override needed because odh-elyra pulls in + # appengine-python-standard which has an obnoxious urllib3<2 constraint + "urllib3>=2.6.0", + ] + ``` + +## Best Practices + +1. **Always add to centralized constraints first** - This prevents CVEs from returning through any dependency path. + +2. **Use override-dependencies sparingly** - Only when there's a genuine conflict that constraints can't resolve. + +3. **Document the CVE** - Include RHAIENG ticket, CVE ID, and explanation in comments. + +4. **Validate in both Trivy and Clair** - Trivy may catch issues Clair misses. + +5. **Consider upstream fixes** - If a direct dependency has a newer version that fixes the transitive CVE, prefer upgrading the direct dependency. + +## Related Files + +- `dependencies/cve-constraints.txt` - Centralized CVE constraints +- `scripts/pylocks_generator.py` - Lock file generator (applies constraints) +- `pyproject.toml` - Direct dependencies and override-dependencies +- `pylock.toml` / `uv.lock.d/` - Generated lock files + +## Useful Commands + +```bash +# Regenerate all lock files +make refresh-lock-files + +# Regenerate lock files for specific directory +./uv run scripts/pylocks_generator.py auto jupyter/datascience/ubi9-python-3.12 + +# Check dependency tree +uv tree + +# Find what depends on a package +uv tree --invert package-name + +# Search for package in repository +grep -r "package-name" --include="*.toml" . +``` diff --git a/jupyter/datascience/ubi9-python-3.12/pyproject.toml b/jupyter/datascience/ubi9-python-3.12/pyproject.toml index f058af9765..b35870f43a 100644 --- a/jupyter/datascience/ubi9-python-3.12/pyproject.toml +++ b/jupyter/datascience/ubi9-python-3.12/pyproject.toml @@ -52,7 +52,7 @@ environments = [ "sys_platform == 'linux' and implementation_name == 'cpython'", ] override-dependencies = [ - # RHAIENG-2458: CVE-2025-66418 urllib3 decompression vulnerability - # Upstream: https://github.com/elyra-ai/elyra/issues/3325 + # RHAIENG-2458: CVE-2025-66418 urllib3 - override needed because odh-elyra pulls in + # appengine-python-standard which has an obnoxious urllib3<2 constraint "urllib3>=2.6.0", ] diff --git a/jupyter/pytorch+llmcompressor/ubi9-python-3.12/pyproject.toml b/jupyter/pytorch+llmcompressor/ubi9-python-3.12/pyproject.toml index ea77d4c283..610724a984 100644 --- a/jupyter/pytorch+llmcompressor/ubi9-python-3.12/pyproject.toml +++ b/jupyter/pytorch+llmcompressor/ubi9-python-3.12/pyproject.toml @@ -91,7 +91,7 @@ environments = [ "sys_platform == 'linux' and implementation_name == 'cpython'", ] override-dependencies = [ - # RHAIENG-2458: CVE-2025-66418 urllib3 decompression vulnerability - # Upstream: https://github.com/elyra-ai/elyra/issues/3325 + # RHAIENG-2458: CVE-2025-66418 urllib3 - override needed because odh-elyra pulls in + # appengine-python-standard which has an obnoxious urllib3<2 constraint "urllib3>=2.6.0", ] diff --git a/jupyter/pytorch/ubi9-python-3.12/pyproject.toml b/jupyter/pytorch/ubi9-python-3.12/pyproject.toml index 92f4da5829..154e1037fb 100644 --- a/jupyter/pytorch/ubi9-python-3.12/pyproject.toml +++ b/jupyter/pytorch/ubi9-python-3.12/pyproject.toml @@ -67,7 +67,7 @@ environments = [ "sys_platform == 'linux' and implementation_name == 'cpython'", ] override-dependencies = [ - # RHAIENG-2458: CVE-2025-66418 urllib3 decompression vulnerability - # Upstream: https://github.com/elyra-ai/elyra/issues/3325 + # RHAIENG-2458: CVE-2025-66418 urllib3 - override needed because odh-elyra pulls in + # appengine-python-standard which has an obnoxious urllib3<2 constraint "urllib3>=2.6.0", ] diff --git a/jupyter/rocm/pytorch/ubi9-python-3.12/pyproject.toml b/jupyter/rocm/pytorch/ubi9-python-3.12/pyproject.toml index 0a2e147c94..b374783c71 100644 --- a/jupyter/rocm/pytorch/ubi9-python-3.12/pyproject.toml +++ b/jupyter/rocm/pytorch/ubi9-python-3.12/pyproject.toml @@ -69,7 +69,7 @@ environments = [ "sys_platform == 'linux' and implementation_name == 'cpython'", ] override-dependencies = [ - # RHAIENG-2458: CVE-2025-66418 urllib3 decompression vulnerability - # Upstream: https://github.com/elyra-ai/elyra/issues/3325 + # RHAIENG-2458: CVE-2025-66418 urllib3 - override needed because odh-elyra pulls in + # appengine-python-standard which has an obnoxious urllib3<2 constraint "urllib3>=2.6.0", ] diff --git a/jupyter/rocm/tensorflow/ubi9-python-3.12/pyproject.toml b/jupyter/rocm/tensorflow/ubi9-python-3.12/pyproject.toml index ec1166bf07..3bc8522701 100644 --- a/jupyter/rocm/tensorflow/ubi9-python-3.12/pyproject.toml +++ b/jupyter/rocm/tensorflow/ubi9-python-3.12/pyproject.toml @@ -14,31 +14,21 @@ dependencies = [ "tensorboard~=2.18.0", # Datascience and useful extensions - "boto3~=1.40.52", - "kafka-python-ng~=2.2.3", - "kfp~=2.14.6", - "matplotlib~=3.10.7", - "numpy~=1.26.4", - "pandas~=2.3.3", - "plotly~=6.3.1", - "scikit-learn~=1.7.2", - "scipy~=1.16.2", - "skl2onnx~=1.19.1", - "onnxconverter-common~=1.13.0", # Required for skl2onnx, as upgraded version is not compatible with protobuf "codeflare-sdk~=0.34.0", - "kubeflow-training==1.9.3", + "skl2onnx~=1.19.1", + "odh-notebooks-meta-workbench-datascience-deps", # DB connectors - "pymongo~=4.15.3", - "psycopg~=3.2.10", - "pyodbc~=5.2.0", - "mysql-connector-python~=9.4.0", + "pymongo~=4.16.0", + "psycopg~=3.3.2", + "pyodbc~=5.3.0", + "mysql-connector-python~=9.5.0", # JupyterLab packages "odh-elyra==4.3.2", "odh-jupyter-trash-cleanup==0.1.1", - "jupyterlab==4.4.9", + "jupyterlab==4.5.2", "jupyter-bokeh~=4.0.5", "jupyter-server~=2.17.0", "jupyter-server-proxy~=4.4.0", @@ -51,19 +41,25 @@ dependencies = [ "nbgitpuller~=1.2.2", # Base packages - "wheel~=0.46.2", + "wheel~=0.46.3", "setuptools~=80.9.0", ] +[tool.uv.sources] +odh-notebooks-meta-workbench-datascience-deps = { path = "../../../../dependencies/odh-notebooks-meta-workbench-datascience-deps" } + [tool.uv] override-dependencies = [ # tf2onnx has pinned protobuf version, that causes conflict with other packages "protobuf==6.31.1", - "keras~=3.12.0", - # RHAIENG-2458: CVE-2025-66418 urllib3 decompression vulnerability - # Upstream: https://github.com/elyra-ai/elyra/issues/3325 + # RHAIENG-2458: CVE-2025-66418 urllib3 - override needed because odh-elyra pulls in + # appengine-python-standard which has an obnoxious urllib3<2 constraint "urllib3>=2.6.0", + # AIPCC-8698: python-lsp-server[all] has conflicting pyflakes/pycodestyle requirements + # Using >= instead of ~= to allow broader compatibility during resolution + # TODO: Remove this override before RHOAI 3.4 GA once AIPCC-8698 is resolved + "python-lsp-server>=1.11.0", ] constraint-dependencies = [ diff --git a/jupyter/tensorflow/ubi9-python-3.12/pyproject.toml b/jupyter/tensorflow/ubi9-python-3.12/pyproject.toml index 74bc4f5de8..7cebed68a9 100644 --- a/jupyter/tensorflow/ubi9-python-3.12/pyproject.toml +++ b/jupyter/tensorflow/ubi9-python-3.12/pyproject.toml @@ -11,31 +11,22 @@ dependencies = [ "tensorboard~=2.20.0", # Datascience and useful extensions - "boto3~=1.40.52", - "kafka-python-ng~=2.2.3", - "kfp~=2.14.6", - "matplotlib~=3.10.7", - "numpy~=2.1.3", - "pandas~=2.3.3", - "plotly~=6.3.1", - "scikit-learn~=1.7.2", - "scipy~=1.16.2", - "skl2onnx~=1.19.1", - "onnxconverter-common~=1.13.0", # Required for skl2onnx, as upgraded version is not compatible with protobuf "codeflare-sdk~=0.34.0", "feast~=0.59.0", + "skl2onnx~=1.19.1", + "odh-notebooks-meta-workbench-datascience-deps", # DB connectors - "pymongo~=4.15.3", - "psycopg~=3.2.10", - "pyodbc~=5.2.0", - "mysql-connector-python~=9.4.0", + "pymongo~=4.16.0", + "psycopg~=3.3.2", + "pyodbc~=5.3.0", + "mysql-connector-python~=9.5.0", # JupyterLab packages "odh-elyra==4.3.2", "odh-jupyter-trash-cleanup==0.1.1", - "jupyterlab==4.4.9", + "jupyterlab==4.5.2", "jupyter-bokeh~=4.0.5", "jupyter-server~=2.17.0", "jupyter-server-proxy~=4.4.0", @@ -48,19 +39,25 @@ dependencies = [ "nbgitpuller~=1.2.2", # Base packages - "wheel~=0.46.2", + "wheel~=0.46.3", "setuptools~=80.9.0", ] +[tool.uv.sources] +odh-notebooks-meta-workbench-datascience-deps = { path = "../../../dependencies/odh-notebooks-meta-workbench-datascience-deps" } + [tool.uv] override-dependencies = [ # tf2onnx has pinned protobuf version, that causes conflict with other packages "protobuf==6.31.1", - "keras~=3.12.0", - # RHAIENG-2458: CVE-2025-66418 urllib3 decompression vulnerability - # Upstream: https://github.com/elyra-ai/elyra/issues/3325 + # RHAIENG-2458: CVE-2025-66418 urllib3 - override needed because odh-elyra pulls in + # appengine-python-standard which has an obnoxious urllib3<2 constraint "urllib3>=2.6.0", + # AIPCC-8698: python-lsp-server[all] has conflicting pyflakes/pycodestyle requirements + # Using >= instead of ~= to allow broader compatibility during resolution + # TODO: Remove this override before RHOAI 3.4 GA once AIPCC-8698 is resolved + "python-lsp-server>=1.11.0", ] environments = [ diff --git a/jupyter/trustyai/ubi9-python-3.12/pyproject.toml b/jupyter/trustyai/ubi9-python-3.12/pyproject.toml index 9653799a12..65bf33ddd5 100644 --- a/jupyter/trustyai/ubi9-python-3.12/pyproject.toml +++ b/jupyter/trustyai/ubi9-python-3.12/pyproject.toml @@ -75,8 +75,8 @@ environments = [ "sys_platform == 'linux' and implementation_name == 'cpython'", ] override-dependencies = [ - # RHAIENG-2458: CVE-2025-66418 urllib3 decompression vulnerability - # Upstream: https://github.com/elyra-ai/elyra/issues/3325 + # RHAIENG-2458: CVE-2025-66418 urllib3 - override needed because odh-elyra pulls in + # appengine-python-standard which has an obnoxious urllib3<2 constraint "urllib3>=2.6.0", ] diff --git a/pyproject.toml b/pyproject.toml index 4b92a1acfe..6d9788f0a6 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -31,6 +31,7 @@ dev = [ "podman", "kubernetes", "openshift-python-wrapper", + "typer", ] [tool.uv] diff --git a/runtimes/rocm-tensorflow/ubi9-python-3.12/pyproject.toml b/runtimes/rocm-tensorflow/ubi9-python-3.12/pyproject.toml index 8d9c640160..fd3e0e90c8 100644 --- a/runtimes/rocm-tensorflow/ubi9-python-3.12/pyproject.toml +++ b/runtimes/rocm-tensorflow/ubi9-python-3.12/pyproject.toml @@ -14,30 +14,20 @@ dependencies = [ "tensorboard~=2.18.0", # Datascience and useful extensions - "boto3~=1.40.52", - "kafka-python-ng~=2.2.3", - "matplotlib~=3.10.7", - "numpy~=1.26.4", - "pandas~=2.3.3", - "plotly~=6.3.1", - "scikit-learn~=1.7.2", - "scipy~=1.16.2", - "skl2onnx~=1.19.1", - # Required for skl2onnx, as upgraded version is not compatible with protobuf - "onnxconverter-common~=1.13.0", "codeflare-sdk~=0.34.0", + "odh-notebooks-meta-runtime-datascience-deps", # DB connectors - "pymongo~=4.15.3", - "psycopg~=3.2.10", - "pyodbc~=5.2.0", - "mysql-connector-python~=9.4.0", + "pymongo~=4.16.0", + "psycopg~=3.3.2", + "pyodbc~=5.3.0", + "mysql-connector-python~=9.5.0", "odh-notebooks-meta-runtime-elyra-deps", # Base packages "setuptools==80.9.0", - "wheel~=0.46.2", + "wheel==0.46.3", ] [tool.uv] @@ -45,7 +35,6 @@ dependencies = [ override-dependencies = [ # tf2onnx has pinned protobuf version, that causes conflict with other packages "protobuf==6.31.1", - "keras~=3.12.0" ] constraint-dependencies = [ @@ -59,3 +48,4 @@ environments = [ [tool.uv.sources] odh-notebooks-meta-runtime-elyra-deps = { path = "../../../dependencies/odh-notebooks-meta-runtime-elyra-deps" } +odh-notebooks-meta-runtime-datascience-deps = { path = "../../../dependencies/odh-notebooks-meta-runtime-datascience-deps" } diff --git a/runtimes/tensorflow/ubi9-python-3.12/pyproject.toml b/runtimes/tensorflow/ubi9-python-3.12/pyproject.toml index 62faa47067..5a7f1e25ae 100644 --- a/runtimes/tensorflow/ubi9-python-3.12/pyproject.toml +++ b/runtimes/tensorflow/ubi9-python-3.12/pyproject.toml @@ -11,31 +11,21 @@ dependencies = [ "tensorboard~=2.20.0", # Datascience and useful extensions - "boto3~=1.40.52", - "kafka-python-ng~=2.2.3", - "matplotlib~=3.10.7", - "numpy~=2.1.3", - "pandas~=2.3.3", - "plotly~=6.3.1", - "scikit-learn~=1.7.2", - "scipy~=1.16.2", - "skl2onnx~=1.19.1", - # Required for skl2onnx, as upgraded version is not compatible with protobuf - "onnxconverter-common~=1.13.0", "codeflare-sdk~=0.34.0", "feast~=0.59.0", + "odh-notebooks-meta-runtime-datascience-deps", # DB connectors - "pymongo~=4.15.3", - "psycopg~=3.2.10", - "pyodbc~=5.2.0", - "mysql-connector-python~=9.4.0", + "pymongo~=4.16.0", + "psycopg~=3.3.2", + "pyodbc~=5.3.0", + "mysql-connector-python~=9.5.0", "odh-notebooks-meta-runtime-elyra-deps", # Base packages "setuptools~=80.9.0", - "wheel~=0.46.2", + "wheel~=0.46.3", ] [tool.uv] @@ -43,7 +33,6 @@ dependencies = [ override-dependencies = [ # tf2onnx has pinned protobuf version, that causes conflict with other packages "protobuf==6.31.1", - "keras~=3.12.0" ] environments = [ @@ -52,3 +41,4 @@ environments = [ [tool.uv.sources] odh-notebooks-meta-runtime-elyra-deps = { path = "../../../dependencies/odh-notebooks-meta-runtime-elyra-deps" } +odh-notebooks-meta-runtime-datascience-deps = { path = "../../../dependencies/odh-notebooks-meta-runtime-datascience-deps" } diff --git a/scripts/pylocks_generator.py b/scripts/pylocks_generator.py new file mode 100644 index 0000000000..0ca5d134e0 --- /dev/null +++ b/scripts/pylocks_generator.py @@ -0,0 +1,411 @@ +#!/usr/bin/env python3 + +"""Generate Python dependency lock files (pylock.toml) using uv pip compile. + +This script generates Python dependency lock files (pylock.toml) for multiple +directories using either internal Red Hat wheel indexes or the public PyPI index. + +Features: + - Supports multiple Python project directories, detected by pyproject.toml. + - Detects available Dockerfile flavors (CPU, CUDA, ROCm) for rh-index mode. + - Validates Python version extracted from directory name (expects format .../ubi9-python-X.Y). + - Generates per-flavor locks in 'uv.lock.d/' for rh-index mode. + - Overwrites existing pylock.toml in-place for public PyPI index mode. + +Index Modes: + auto (default) -- Uses rh-index if uv.lock.d/ exists, public-index otherwise. + rh-index -- Uses internal Red Hat wheel indexes. Generates uv.lock.d/pylock..toml. + public-index -- Uses public PyPI index and updates pylock.toml in place. + +Fallback Index (RHAIENG-3071): + For CUDA and ROCm flavors, if CPU_INDEX_URL is defined in the build-args/*.conf file, + it will be added as a fallback index for packages not available in the specialized indexes. + +Usage: + 1. Lock using auto mode (default) for all projects in MAIN_DIRS:: + + python pylocks_generator.py + + 2. Lock using rh-index for a specific directory:: + + python pylocks_generator.py rh-index jupyter/minimal/ubi9-python-3.12 + + 3. Lock using public index for a specific directory:: + + python pylocks_generator.py public-index jupyter/minimal/ubi9-python-3.12 + + 4. Force upgrade all packages to latest versions:: + + FORCE_LOCKFILES_UPGRADE=1 python pylocks_generator.py + +Notes: + - If the script fails for a directory, it lists the failed directories at the end. + - Public index mode does not create uv.lock.d directories and keeps the old format. + - Python version extraction depends on directory naming convention; invalid formats are skipped. +""" + +from __future__ import annotations + +import os +import re +import subprocess +import sys +from enum import Enum +from pathlib import Path +from typing import Annotated + +import typer + +# ============================================================================= +# CONFIGURATION +# ============================================================================= + +ROOT_DIR = Path(__file__).resolve().parent.parent +UV = ROOT_DIR / "uv" +CVE_CONSTRAINTS_FILE = ROOT_DIR / "dependencies" / "cve-constraints.txt" +PUBLIC_INDEX = "--default-index=https://pypi.org/simple" +MAIN_DIRS = ("jupyter", "runtimes", "rstudio", "codeserver") +UV_MIN_VERSION = (0, 4, 0) + +NO_EMIT_PACKAGES = ( + "odh-notebooks-meta-llmcompressor-deps", + "odh-notebooks-meta-runtime-elyra-deps", + "odh-notebooks-meta-runtime-datascience-deps", + "odh-notebooks-meta-workbench-datascience-deps", +) + +FLAVORS = ("cpu", "cuda", "rocm") + + +class IndexMode(str, Enum): + auto = "auto" + rh_index = "rh-index" + public_index = "public-index" + + +# ============================================================================= +# HELPER FUNCTIONS +# ============================================================================= + +BLUE = "\033[1;34m" +YELLOW = "\033[1;33m" +RED = "\033[1;31m" +GREEN = "\033[1;32m" +RESET = "\033[0m" + + +def info(msg: str) -> None: + print(f"🔹 {BLUE}{msg}{RESET}") + + +def warn(msg: str) -> None: + print(f"⚠️ {YELLOW}{msg}{RESET}", file=sys.stderr) + + +def error(msg: str) -> None: + print(f"❌ {RED}{msg}{RESET}", file=sys.stderr) + + +def ok(msg: str) -> None: + print(f"✅ {GREEN}{msg}{RESET}", file=sys.stderr) + + +def read_conf_value(conf_file: Path, key: str) -> str | None: + """Read a key=value from a .conf file, skipping comments and blank lines.""" + for line in conf_file.read_text().splitlines(): + stripped = line.strip() + if stripped.startswith("#") or "=" not in stripped: + continue + k, _, v = stripped.partition("=") + if k.strip() == key: + return v.strip() + return None + + +# ============================================================================= +# PRE-FLIGHT CHECK +# ============================================================================= + + +def check_uv() -> None: + """Verify the uv wrapper exists and meets the minimum version requirement.""" + if not UV.is_file() or not os.access(UV, os.X_OK): + error(f"Expected uv wrapper at '{UV}' but it is missing or not executable.") + raise SystemExit(1) + + try: + result = subprocess.run( + [str(UV), "--version"], + capture_output=True, + text=True, + check=False, + ) + version_str = result.stdout.strip().split()[1] if result.stdout.strip() else "0.0.0" + except (IndexError, FileNotFoundError): + version_str = "0.0.0" + + version_tuple = tuple(int(x) for x in version_str.split(".")) + if version_tuple < UV_MIN_VERSION: + min_ver = ".".join(str(x) for x in UV_MIN_VERSION) + error(f"uv version {version_str} found, but >= {min_ver} is required.") + error("Please upgrade uv: https://github.com/astral-sh/uv") + raise SystemExit(1) + + +# ============================================================================= +# TARGET DIRECTORY DISCOVERY +# ============================================================================= + + +def find_target_dirs(target_dir: Path | None) -> list[Path]: + """Find directories containing pyproject.toml.""" + if target_dir is not None: + return [target_dir] + + info("Scanning main directories for Python projects...") + dirs: set[Path] = set() + for base_name in MAIN_DIRS: + base = ROOT_DIR / base_name + if base.is_dir(): + dirs.update(p.parent for p in base.rglob("pyproject.toml")) + return sorted(dirs) + + +# ============================================================================= +# FLAVOR DETECTION +# ============================================================================= + + +def detect_flavors(project_dir: Path) -> set[str]: + """Detect available Dockerfile flavors (cpu, cuda, rocm) in a directory.""" + return {f for f in FLAVORS if (project_dir / f"Dockerfile.{f}").is_file()} + + +def extract_python_version(project_dir: Path) -> str | None: + """Extract Python version from directory name suffix (e.g. ubi9-python-3.12 -> 3.12).""" + name = project_dir.resolve().name + # The version is everything after the last hyphen + version = name.rsplit("-", maxsplit=1)[-1] + if re.fullmatch(r"\d+\.\d+", version): + return version + return None + + +# ============================================================================= +# INDEX FLAGS +# ============================================================================= + + +def get_index_flags(project_dir: Path, flavor: str) -> list[str] | None: + """Build uv index flags from build-args/.conf. + + Returns None on failure (missing conf or INDEX_URL). + """ + conf_file = project_dir / "build-args" / f"{flavor}.conf" + if not conf_file.is_file(): + warn(f"Missing build-args config for {flavor}: {conf_file}") + return None + + index_url = read_conf_value(conf_file, "INDEX_URL") + if not index_url: + warn(f"INDEX_URL not found in {conf_file}") + return None + + flags = [f"--default-index={index_url}", f"--index={index_url}"] + + # For CUDA and ROCm flavors, add CPU index as fallback (RHAIENG-3071) + if flavor in ("cuda", "rocm"): + cpu_index_url = read_conf_value(conf_file, "CPU_INDEX_URL") + if cpu_index_url: + flags.append(f"--index={cpu_index_url}") + print(" 📎 Using CPU index as fallback", file=sys.stderr) + + return flags + + +# ============================================================================= +# LOCK FILE GENERATION +# ============================================================================= + + +def run_lock( + project_dir: Path, + flavor: str, + index_flags: list[str], + mode: IndexMode, + python_version: str, + upgrade: bool, +) -> bool: + """Run uv pip compile to generate a lock file. Returns True on success.""" + if mode == IndexMode.public_index: + output = "pylock.toml" + desc = "pylock.toml (public index)" + print("➡️ Generating pylock.toml from public PyPI index...") + else: + (project_dir / "uv.lock.d").mkdir(exist_ok=True) + output = f"uv.lock.d/pylock.{flavor}.toml" + desc = f"{flavor.upper()} lock file" + print(f"➡️ Generating {flavor.upper()} lock file...") + + # Tag filtering was added in uv 0.9.16 (https://github.com/astral-sh/uv/pull/16956) + # but bypassed in --universal mode. uv 0.10.5 (https://github.com/astral-sh/uv/pull/18081) + # now filters wheels by requires-python and marker disjointness even in --universal mode. + # Documentation at https://docs.astral.sh/uv/reference/cli/#uv-pip-compile--python-platform says that + # `--python-platform linux` is alias for `x86_64-unknown-linux-gnu`; we cannot use this to get a multiarch pylock + # Let's use --universal temporarily, and in the future we can switch to using uv.lock + # when https://github.com/astral-sh/uv/issues/6830 is resolved, or symlink `ln -s uv.lock.d/uv.${flavor}.lock uv.lock` + # Note: currently generating uv.lock.d/pylock.${flavor}.toml; future rename to uv.${flavor}.lock is planned + # See also --universal discussion with Gerard + # https://redhat-internal.slack.com/archives/C0961HQ858Q/p1757935641975969?thread_ts=1757542802.032519&cid=C0961HQ858Q + cmd: list[str] = [ + str(UV), + "pip", + "compile", + "pyproject.toml", + "--output-file", + output, + "--format", + "pylock.toml", + "--generate-hashes", + "--emit-index-url", + f"--python-version={python_version}", + "--universal", + "--no-annotate", + "--quiet", + ] + + for pkg in NO_EMIT_PACKAGES: + cmd.extend(["--no-emit-package", pkg]) + + if upgrade: + cmd.append("--upgrade") + + # Use relative path to avoid absolute paths in pylock.toml headers + if CVE_CONSTRAINTS_FILE.is_file(): + relative_constraints = os.path.relpath(CVE_CONSTRAINTS_FILE, project_dir) + cmd.extend(["--constraints", relative_constraints]) + + cmd.extend(index_flags) + + result = subprocess.run(cmd, cwd=project_dir, check=False) + + if result.returncode != 0: + warn(f"Failed to generate {desc} in {project_dir}") + output_path = project_dir / output + output_path.unlink(missing_ok=True) + return False + + ok(f"{desc} generated successfully.") + return True + + +# ============================================================================= +# MAIN +# ============================================================================= + +app = typer.Typer(add_completion=False) + + +@app.command() +def main( + index_mode: Annotated[ + IndexMode, typer.Argument(help="Index mode: auto, rh-index, or public-index") + ] = IndexMode.auto, + target_dir: Annotated[ + Path | None, typer.Argument(help="Specific project directory to process") + ] = None, +) -> None: + """Generate pylock.toml lock files for Python project directories.""" + # PRE-FLIGHT + check_uv() + + # UPGRADE FLAG + upgrade = os.environ.get("FORCE_LOCKFILES_UPGRADE", "0") == "1" + if upgrade: + info("FORCE_LOCKFILES_UPGRADE=1 detected. Will upgrade all packages to latest versions.") + + info(f"Using index mode: {index_mode.value}") + + # TARGET DIRECTORIES + target_dirs = find_target_dirs(target_dir) + if not target_dirs: + error("No directories containing pyproject.toml were found.") + raise SystemExit(1) + + # MAIN LOOP + success_dirs: list[Path] = [] + failed_dirs: list[Path] = [] + + for tdir in target_dirs: + print() + print("=" * 67) + info(f"Processing directory: {tdir}") + print("=" * 67) + + python_version = extract_python_version(tdir) + if python_version is None: + warn(f"Could not extract valid Python version from directory name: {tdir}") + warn("Expected directory format: .../ubi9-python-X.Y") + continue + + flavors = detect_flavors(tdir) + if not flavors: + warn(f"No Dockerfiles found in {tdir} (cpu/cuda/rocm). Skipping.") + continue + + print(f"📦 Python version: {python_version}") + print("🧩 Detected flavors:") + for f in sorted(flavors): + print(f" • {f.upper()}") + print() + + # Resolve effective mode + if index_mode == IndexMode.auto: + effective_mode = IndexMode.rh_index if (tdir / "uv.lock.d").is_dir() else IndexMode.public_index + else: + effective_mode = index_mode + info(f"Effective mode for this directory: {effective_mode.value}") + + dir_success = True + + if effective_mode == IndexMode.public_index: + if not run_lock(tdir, "cpu", [PUBLIC_INDEX], effective_mode, python_version, upgrade): + dir_success = False + else: + for flavor in ("cpu", "cuda", "rocm"): + if flavor not in flavors: + continue + flags = get_index_flags(tdir, flavor) + if flags is None: + dir_success = False + continue + if not run_lock(tdir, flavor, flags, effective_mode, python_version, upgrade): + dir_success = False + + if dir_success: + success_dirs.append(tdir) + else: + failed_dirs.append(tdir) + + # SUMMARY + print() + print("=" * 67) + ok("Lock generation complete.") + print("=" * 67) + + if success_dirs: + print("✅ Successfully generated locks for:") + for d in success_dirs: + print(f" • {d}") + + if failed_dirs: + print() + warn("Failed lock generation for:") + for d in failed_dirs: + print(f" • {d}") + print("Please comment out the missing package to continue and report the missing package to the RH index maintainers") + raise SystemExit(1) + + +if __name__ == "__main__": + app() diff --git a/scripts/pylocks_generator.sh b/scripts/pylocks_generator.sh deleted file mode 100755 index e107151db6..0000000000 --- a/scripts/pylocks_generator.sh +++ /dev/null @@ -1,258 +0,0 @@ -#!/usr/bin/env bash -set -euo pipefail - -# ============================================================================= -# pylocks_generator.sh -# -# This script generates Python dependency lock files (pylock.toml) for multiple -# directories using either internal AIPCC wheel indexes or the public PyPI index. -# -# Features: -# • Supports multiple Python project directories, detected by pyproject.toml. -# • Detects available Dockerfile flavors (CPU, CUDA, ROCm) for AIPCC index mode. -# • Validates Python version extracted from directory name (expects format .../ubi9-python-X.Y). -# • Generates per-flavor locks in 'uv.lock/' for AIPCC index mode. -# • Overwrites existing pylock.toml in-place for public PyPI index mode. -# -# Index Modes: -# • aipcc-index -> Uses internal Red Hat AIPCC wheel indexes. Generates uv.lock/pylock..toml for each detected flavor. -# • public-index -> Uses public PyPI index. Updates pylock.toml in the project directory. -# Default mode if not specified. -# -# Usage: -# 1. Lock using default public index for all projects in MAIN_DIRS: -# bash pylocks_generator.sh -# -# 2. Lock using AIPCC index for a specific directory: -# bash pylocks_generator.sh aipcc-index jupyter/minimal/ubi9-python-3.12 -# -# 3. Lock using public index for a specific directory: -# bash pylocks_generator.sh public-index jupyter/minimal/ubi9-python-3.12 -# -# Notes: -# • If the script fails for a directory, it lists the failed directories at the end. -# • Public index mode does not create uv.lock directories keeps the old format. -# • Python version extraction depends on directory naming convention; invalid formats are skipped. -# ============================================================================= - -# ---------------------------- -# CONFIGURATION -# ---------------------------- -CPU_INDEX="--index-url=https://console.redhat.com/api/pypi/public-rhai/rhoai/3.0/cpu-ubi9/simple/" -CUDA_INDEX="--index-url=https://console.redhat.com/api/pypi/public-rhai/rhoai/3.0/cuda-ubi9/simple/" -ROCM_INDEX="--index-url=https://console.redhat.com/api/pypi/public-rhai/rhoai/3.0/rocm-ubi9/simple/" -PUBLIC_INDEX="--index-url=https://pypi.org/simple" - -MAIN_DIRS=("jupyter" "runtimes" "rstudio" "codeserver") - -# ---------------------------- -# HELPER FUNCTIONS -# ---------------------------- -info() { echo -e "🔹 \033[1;34m$1\033[0m"; } -warn() { echo -e "⚠️ \033[1;33m$1\033[0m"; } -error() { echo -e "❌ \033[1;31m$1\033[0m"; } -ok() { echo -e "✅ \033[1;32m$1\033[0m"; } - -uppercase() { - echo "$1" | tr '[:lower:]' '[:upper:]' -} - -# ---------------------------- -# PRE-FLIGHT CHECK -# ---------------------------- -if ! command -v uv &>/dev/null; then - error "uv command not found. Please install uv: https://github.com/astral-sh/uv" - exit 1 -fi - -UV_MIN_VERSION="0.4.0" -UV_VERSION=$(uv --version 2>/dev/null | awk '{print $2}' || echo "0.0.0") - -version_ge() { - [ "$(printf '%s\n' "$2" "$1" | sort -V | head -n1)" = "$2" ] -} - -if ! version_ge "$UV_VERSION" "$UV_MIN_VERSION"; then - error "uv version $UV_VERSION found, but >= $UV_MIN_VERSION is required." - error "Please upgrade uv: https://github.com/astral-sh/uv" - exit 1 -fi - -# ---------------------------- -# ARGUMENT PARSING -# ---------------------------- -# default to public-index if not provided -INDEX_MODE="${1:-public-index}" -TARGET_DIR_ARG="${2:-}" - -# Validate mode -if [[ "$INDEX_MODE" != "aipcc-index" && "$INDEX_MODE" != "public-index" ]]; then - error "Invalid mode '$INDEX_MODE'. Valid options: aipcc-index, public-index" - exit 1 -fi -info "Using index mode: $INDEX_MODE" - -# ---------------------------- -# GET TARGET DIRECTORIES -# ---------------------------- -if [ -n "$TARGET_DIR_ARG" ]; then - TARGET_DIRS=("$TARGET_DIR_ARG") -else - info "Scanning main directories for Python projects..." - TARGET_DIRS=() - for base in "${MAIN_DIRS[@]}"; do - if [ -d "$base" ]; then - while IFS= read -r -d '' pyproj; do - TARGET_DIRS+=("$(dirname "$pyproj")") - done < <(find "$base" -type f -name "pyproject.toml" -print0) - fi - done -fi - -if [ ${#TARGET_DIRS[@]} -eq 0 ]; then - error "No directories containing pyproject.toml were found." - exit 1 -fi - -# ---------------------------- -# MAIN LOOP -# ---------------------------- -FAILED_DIRS=() -SUCCESS_DIRS=() - -for TARGET_DIR in "${TARGET_DIRS[@]}"; do - echo - echo "===================================================================" - info "Processing directory: $TARGET_DIR" - echo "===================================================================" - - cd "$TARGET_DIR" || continue - PYTHON_VERSION="${PWD##*-}" - - # Validate Python version extraction - if [[ ! "$PYTHON_VERSION" =~ ^[0-9]+\.[0-9]+$ ]]; then - warn "Could not extract valid Python version from directory name: $PWD" - warn "Expected directory format: .../ubi9-python-X.Y" - cd - >/dev/null - continue - fi - - # Detect available Dockerfiles (flavors) - HAS_CPU=false - HAS_CUDA=false - HAS_ROCM=false - [ -f "Dockerfile.cpu" ] && HAS_CPU=true - [ -f "Dockerfile.cuda" ] && HAS_CUDA=true - [ -f "Dockerfile.rocm" ] && HAS_ROCM=true - - if ! $HAS_CPU && ! $HAS_CUDA && ! $HAS_ROCM; then - warn "No Dockerfiles found in $TARGET_DIR (cpu/cuda/rocm). Skipping." - cd - >/dev/null - continue - fi - - echo "📦 Python version: $PYTHON_VERSION" - echo "🧩 Detected flavors:" - $HAS_CPU && echo " • CPU" - $HAS_CUDA && echo " • CUDA" - $HAS_ROCM && echo " • ROCm" - echo - - DIR_SUCCESS=true - - run_lock() { - local flavor="$1" - local index="$2" - local output - local desc - - if [[ "$INDEX_MODE" == "public-index" ]]; then - output="pylock.toml" - desc="pylock.toml (public index)" - echo "➡️ Generating pylock.toml from public PyPI index..." - else - mkdir -p uv.lock - output="uv.lock/pylock.${flavor}.toml" - desc="$(uppercase "$flavor") lock file" - echo "➡️ Generating $(uppercase "$flavor") lock file..." - fi - - # The behavior has changed in uv 0.9.17 (https://github.com/astral-sh/uv/pull/16956) - # Documentation at https://docs.astral.sh/uv/reference/cli/#uv-pip-compile--python-platform says that - # `--python-platform linux` is alias for `x86_64-unknown-linux-gnu`; we cannot use this to get a multiarch pylock - # Let's use --universal temporarily, and in the future we can switch to using uv.lock - # when https://github.com/astral-sh/uv/issues/6830 is resolved, or link `ln -s uv.lock/lock.${flavor}.toml uv.lock` - # See also --universal discussion with Gerard - # https://redhat-internal.slack.com/archives/C0961HQ858Q/p1757935641975969?thread_ts=1757542802.032519&cid=C0961HQ858Q - set +e - uv pip compile pyproject.toml \ - --output-file "$output" \ - --format pylock.toml \ - --generate-hashes \ - --emit-index-url \ - --python-version="$PYTHON_VERSION" \ - --universal \ - --no-annotate \ - --quiet \ - --no-emit-package odh-notebooks-meta-llmcompressor-deps \ - --no-emit-package odh-notebooks-meta-runtime-elyra-deps \ - $index - local status=$? - set -e - - if [ $status -ne 0 ]; then - warn "Failed to generate $desc in $TARGET_DIR" - rm -f "$output" - DIR_SUCCESS=false - else - if [[ "$INDEX_MODE" == "public-index" ]]; then - ok "pylock.toml generated successfully." - else - ok "$(uppercase "$flavor") lock generated successfully." - fi - fi - } - - # Run lock generation - if [[ "$INDEX_MODE" == "public-index" ]]; then - # public-index always updates pylock.toml in place - run_lock "cpu" "$PUBLIC_INDEX" - else - $HAS_CPU && run_lock "cpu" "$CPU_INDEX" - $HAS_CUDA && run_lock "cuda" "$CUDA_INDEX" - $HAS_ROCM && run_lock "rocm" "$ROCM_INDEX" - fi - - if $DIR_SUCCESS; then - SUCCESS_DIRS+=("$TARGET_DIR") - else - FAILED_DIRS+=("$TARGET_DIR") - fi - - cd - >/dev/null -done - -# ---------------------------- -# SUMMARY -# ---------------------------- -echo -echo "===================================================================" -ok "Lock generation complete." -echo "===================================================================" - -if [ ${#SUCCESS_DIRS[@]} -gt 0 ]; then - echo "✅ Successfully generated locks for:" - for d in "${SUCCESS_DIRS[@]}"; do - echo " • $d" - done -fi - -if [ ${#FAILED_DIRS[@]} -gt 0 ]; then - echo - warn "Failed lock generation for:" - for d in "${FAILED_DIRS[@]}"; do - echo " • $d" - echo "Please comment out the missing package to continue and report the missing package to aipcc" - done - exit 1 -fi diff --git a/tests/__init__.py b/tests/__init__.py index bef952bc81..15a3ae61b3 100644 --- a/tests/__init__.py +++ b/tests/__init__.py @@ -1,6 +1,4 @@ -import pathlib - -PROJECT_ROOT = pathlib.Path(__file__).parent.parent +from tests._common import PROJECT_ROOT as PROJECT_ROOT __all__ = [ "PROJECT_ROOT", diff --git a/tests/_common.py b/tests/_common.py new file mode 100644 index 0000000000..98cb32eb5e --- /dev/null +++ b/tests/_common.py @@ -0,0 +1,3 @@ +import pathlib + +PROJECT_ROOT = pathlib.Path(__file__).parent.parent diff --git a/tests/containers/docker_utils.py b/tests/containers/docker_utils.py index 9d2801ad9f..109df7fb5d 100644 --- a/tests/containers/docker_utils.py +++ b/tests/containers/docker_utils.py @@ -232,7 +232,7 @@ def container_exec_with_stdin( raw_io._sock.shutdown(pysocket.SHUT_WR) else: stream.shutdown(pysocket.SHUT_WR) - except (OSError, AttributeError): + except OSError, AttributeError: # This is expected if the remote process closes the connection first. pass diff --git a/tests/containers/workbenches/workbench_image_test.py b/tests/containers/workbenches/workbench_image_test.py index ea0ce2d2a1..e999f09002 100644 --- a/tests/containers/workbenches/workbench_image_test.py +++ b/tests/containers/workbenches/workbench_image_test.py @@ -93,8 +93,9 @@ def test_ipv6_only(self, subtests: pytest_subtests.SubTests, workbench_image: st socket_path = os.path.realpath(docker_utils.get_socket_path(client.client)) logging.debug(f"{socket_path=}") process = podman_machine_utils.open_ssh_tunnel( - machine_predicate=lambda m: os.path.realpath(m.ConnectionInfo.PodmanSocket.Path) - == socket_path, + machine_predicate=lambda m: ( + os.path.realpath(m.ConnectionInfo.PodmanSocket.Path) == socket_path + ), local_port=port, remote_port=container.port, remote_interface=f"[{ipv6_address}]", diff --git a/uv b/uv new file mode 100755 index 0000000000..145dfea0a7 --- /dev/null +++ b/uv @@ -0,0 +1,40 @@ +#!/usr/bin/env -S bash --norc --noprofile +# ./uv — run the project-pinned version of uv. +# +# Reads required-version from uv.toml and delegates via `uv tool run`. +# This avoids version mismatch errors when your system uv (e.g. Homebrew) +# differs from the version pinned for this project. +# +# Usage: +# ./uv sync +# ./uv run pytest +# ./uv pip compile ... +# +# The pinned version is cached by uvx after the first run. +# Parsing uses bash =~ instead of forking sed to avoid a subprocess. +# +# Bash with built-in regex is ~3x faster than Python for this task (hyperfine, +# 50 runs): bash+builtin 18.7ms, bash+sed 25.0ms, python 55.4ms. +set -Eeuo pipefail + +SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" + +while IFS= read -r line; do + if [[ "$line" =~ ^required-version\ *=\ *\"==([^\"]*)\" ]]; then + version="${BASH_REMATCH[1]}" + break + fi +done < "${SCRIPT_DIR}/uv.toml" + +if [[ -z "${version:-}" ]]; then + echo "error: could not read required-version from ${SCRIPT_DIR}/uv.toml" >&2 + exit 1 +fi + +# Fast path: use the system uv directly if it already matches the pinned version +if current=$(uv --version 2>/dev/null) && [[ "$current" == "uv $version" || "$current" == "uv $version "* ]]; then + exec uv "$@" +fi + +# Slow path: run the pinned version via uvx (downloaded and cached on first use) +exec uv tool run "uv@${version}" "$@" diff --git a/uv.lock b/uv.lock index d099d1d4e2..08e67703d8 100644 --- a/uv.lock +++ b/uv.lock @@ -943,6 +943,7 @@ dev = [ { name = "ruamel-yaml", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" }, { name = "ruff", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" }, { name = "testcontainers", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" }, + { name = "typer", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" }, ] [package.metadata] @@ -965,6 +966,7 @@ dev = [ { name = "ruamel-yaml" }, { name = "ruff" }, { name = "testcontainers" }, + { name = "typer" }, ] [[package]] diff --git a/uv.toml b/uv.toml new file mode 100644 index 0000000000..05eb4364ab --- /dev/null +++ b/uv.toml @@ -0,0 +1 @@ +required-version = "==0.10.6"