-
-
Notifications
You must be signed in to change notification settings - Fork 16.2k
[BUGFIX] Fix Pixtral consolidated format vision weight loading #39916
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Changes from all commits
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
Large diffs are not rendered by default.
| Original file line number | Diff line number | Diff line change |
|---|---|---|
|
|
@@ -458,13 +458,27 @@ def get_eagle3_aux_hidden_state_layers(self) -> tuple[int, ...]: | |
| def load_weights(self, weights: Iterable[tuple[str, torch.Tensor]]): | ||
| _vision_encoder_stacked_params = [ | ||
| # (param_name, shard_name, shard_id) | ||
| # HF format | ||
| (".qkv_proj", ".q_proj", "q"), | ||
| (".qkv_proj", ".k_proj", "k"), | ||
| (".qkv_proj", ".v_proj", "v"), | ||
| (".gate_up_proj", ".gate_proj", 0), | ||
| (".gate_up_proj", ".up_proj", 1), | ||
| # Mistral native (consolidated) format | ||
|
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. The |
||
| (".qkv_proj", ".wq", "q"), | ||
| (".qkv_proj", ".wk", "k"), | ||
| (".qkv_proj", ".wv", "v"), | ||
| (".gate_up_proj", ".w1", 0), | ||
| (".gate_up_proj", ".w3", 1), | ||
|
juliendenize marked this conversation as resolved.
|
||
| ] | ||
|
|
||
| # Remap Mistral native names to HF-style names | ||
| # used by the vLLM vision encoder modules. | ||
| _vision_encoder_name_remap = { | ||
| ".wo.": ".o_proj.", | ||
| ".w2.": ".down_proj.", | ||
| } | ||
|
|
||
| def is_vision_encoder_weights(weight: tuple[str, torch.Tensor]): | ||
| return weight[0].startswith(("vision_encoder", "vision_tower")) | ||
|
|
||
|
|
@@ -518,6 +532,11 @@ def llm_weights_generator(): | |
| weight_loader(param, w, shard_id) | ||
| break | ||
| else: | ||
| for old, new in _vision_encoder_name_remap.items(): | ||
| if old in trimmed_name: | ||
| trimmed_name = trimmed_name.replace(old, new) | ||
| break | ||
|
|
||
| param = vision_encoder_dict.get(trimmed_name) | ||
| if param is not None: | ||
| weight_loader = getattr( | ||
|
|
||
Uh oh!
There was an error while loading. Please reload this page.