Skip to content

Commit 104f08e

Browse files
committed
test: update max_beam_width to 1 due to torchsampler changes.
Signed-off-by: nv-guomingz <[email protected]>
1 parent 2d2b8ba commit 104f08e

File tree

1 file changed

+3
-3
lines changed

1 file changed

+3
-3
lines changed

tests/unittest/llmapi/test_llm_args.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -372,18 +372,18 @@ class TestTorchLlmArgs:
372372
def test_runtime_sizes(self):
373373
llm = TorchLLM(
374374
llama_model_path,
375-
max_beam_width=4,
375+
max_beam_width=1,
376376
max_num_tokens=256,
377377
max_seq_len=128,
378378
max_batch_size=8,
379379
)
380380

381-
assert llm.args.max_beam_width == 4
381+
assert llm.args.max_beam_width == 1
382382
assert llm.args.max_num_tokens == 256
383383
assert llm.args.max_seq_len == 128
384384
assert llm.args.max_batch_size == 8
385385

386-
assert llm._executor_config.max_beam_width == 4
386+
assert llm._executor_config.max_beam_width == 1
387387
assert llm._executor_config.max_num_tokens == 256
388388
assert llm._executor_config.max_seq_len == 128
389389
assert llm._executor_config.max_batch_size == 8

0 commit comments

Comments
 (0)