Skip to content

Commit

Permalink
fix issue that mono-internvl failed to fallback pytorch engine (#2744)
Browse files Browse the repository at this point in the history
  • Loading branch information
lvhan028 authored Nov 13, 2024
1 parent 9f6ff9b commit 20544d3
Showing 1 changed file with 5 additions and 3 deletions.
8 changes: 5 additions & 3 deletions lmdeploy/turbomind/supported_models.py
Original file line number Diff line number Diff line change
Expand Up @@ -96,10 +96,12 @@ def _is_head_dim_supported(cfg):
# glm-4v-9b not supported
support_by_turbomind = False
elif arch == 'InternVLChatModel':
support_by_turbomind = _is_head_dim_supported(cfg.llm_config)
llm_arch = cfg.llm_config.architectures[0]
support_by_turbomind = (llm_arch in SUPPORTED_ARCHS and
_is_head_dim_supported(cfg.llm_config))
elif arch == 'LlavaForConditionalGeneration':
sub_arch = cfg.text_config.architectures[0]
if sub_arch in ['Qwen2ForCausalLM', 'LlamaForCausalLM']:
llm_arch = cfg.text_config.architectures[0]
if llm_arch in ['Qwen2ForCausalLM', 'LlamaForCausalLM']:
support_by_turbomind = _is_head_dim_supported(
cfg.text_config)

Expand Down

0 comments on commit 20544d3

Please sign in to comment.