Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions tools/pre_commit/check_forbidden_imports.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,7 @@ class ForbiddenImport:
"vllm/transformers_utils/config.py",
"vllm/model_executor/models/registry.py",
"vllm/compilation/caching.py",
"vllm/env_override.py",
"vllm/compilation/piecewise_backend.py",
"vllm/distributed/utils.py",
"vllm/distributed/parallel_state.py",
Expand Down
44 changes: 43 additions & 1 deletion vllm/env_override.py
Original file line number Diff line number Diff line change
Expand Up @@ -87,7 +87,7 @@ def _maybe_set_cuda_compatibility_path():
import torch

from vllm.logger import init_logger
from vllm.utils.torch_utils import is_torch_equal
from vllm.utils.torch_utils import is_torch_equal, is_torch_equal_or_newer

logger = init_logger(__name__)

Expand Down Expand Up @@ -490,3 +490,45 @@ def _patch_get_raw_stream_if_needed():

PythonWrapperCodegen.memory_plan_reuse = memory_plan_reuse_patched
GraphLowering._update_scheduler = _update_scheduler_patched

# ===================================================
# torch <2.12 GraphCaptureOutput.get_runtime_env monkeypatch
# ===================================================
Comment on lines +494 to +496
Copy link
Copy Markdown
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

do you have a test that would exercise this? Otherwise we may just silently break it in the 2.12 update

Copy link
Copy Markdown
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This is in @hmellor's PR which surfaced the need for this monkeypatch

Copy link
Copy Markdown
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

tests/compile/fullgraph/test_multimodal_compile.py test with transformers backend fails without this PR's patch

Copy link
Copy Markdown
Member

@hmellor hmellor Mar 27, 2026

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Looks like we have a bit of a stalemate, could we merge this first then I'll be using it in #30518?

Copy link
Copy Markdown
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

yup let's do it

# PyTorch's AOT compile path omits builtins from used_globals, causing
# 'Missing required external references' errors for refs like 'type'.
# (which happens in transformers code)
# This mirrors the fix in https://github.com/pytorch/pytorch/pull/177558
# and can be removed once torch >=2.12 is the minimum supported version.

if not is_torch_equal_or_newer("2.12.0"):
import builtins as _builtins
import pickle

from torch._dynamo.convert_frame import GraphCaptureOutput

_original_get_runtime_env = GraphCaptureOutput.get_runtime_env

def _safe_builtins_dict(builtins_dict: dict) -> dict:
"""Filter a builtins dict to only picklable entries for serialization."""
result = {}
for k, v in builtins_dict.items():
try:
pickle.dumps(v)
result[k] = v
except Exception:
pass
return result

def _patched_get_runtime_env(self): # type: ignore[no-untyped-def]
runtime_env = _original_get_runtime_env(self)
for ref in runtime_env.external_refs:
if ref not in runtime_env.used_globals:
if ref.startswith("__builtins_dict__") and ref in self.f_globals:
runtime_env.used_globals[ref] = _safe_builtins_dict(
self.f_globals[ref]
)
elif hasattr(_builtins, ref):
runtime_env.used_globals[ref] = getattr(_builtins, ref)
return runtime_env

GraphCaptureOutput.get_runtime_env = _patched_get_runtime_env
Loading