From 1647348910a1acb9fa79f73ed297ac4de7b6f7a9 Mon Sep 17 00:00:00 2001 From: Kourosh Hakhamaneshi Date: Tue, 14 Oct 2025 10:11:54 -0700 Subject: [PATCH] Wip Signed-off-by: Kourosh Hakhamaneshi --- vllm/benchmarks/datasets.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/vllm/benchmarks/datasets.py b/vllm/benchmarks/datasets.py index 7ffc21905924..f3b2560412c5 100644 --- a/vllm/benchmarks/datasets.py +++ b/vllm/benchmarks/datasets.py @@ -2850,13 +2850,14 @@ def _generate_exact_length_tokens(target_length: int) -> list[int]: requests = [] token_mismatch_total = 0 for _ in range(num_prefixes): - prefix_tokens = _generate_exact_length_tokens(prefix_len) + prefix_tokens, prefix_mismatch = _generate_exact_length_tokens(prefix_len) + token_mismatch_total += prefix_mismatch for _ in range(prompts_per_prefix): - suffix_tokens, token_mistmatch = _generate_exact_length_tokens( + suffix_tokens, suffix_mismatch = _generate_exact_length_tokens( suffix_len ) - token_mismatch_total += token_mistmatch + token_mismatch_total += suffix_mismatch combined_tokens = prefix_tokens + suffix_tokens prompt = tokenizer.decode(combined_tokens) prompt_len = len(combined_tokens)