Skip to content

Commit 9fceb60

Browse files
authored
Update throughput.py
when run "vllm serve ..." @0.8.5 ``` Lib\site-packages\vllm\benchmarks\throughput.py", line 13, in <module> import uvloop ModuleNotFoundError: No module named 'uvloop' ```
1 parent d9fec81 commit 9fceb60

File tree

1 file changed

+11
-2
lines changed

1 file changed

+11
-2
lines changed

vllm/benchmarks/throughput.py

Lines changed: 11 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -10,7 +10,16 @@
1010
from typing import Any, Optional, Union
1111

1212
import torch
13-
import uvloop
13+
import platform
14+
if platform.system() == "Windows":
15+
import winloop as uvloop_impl
16+
# Windows does not support fork
17+
os.environ["VLLM_WORKER_MULTIPROC_METHOD"] = "spawn"
18+
19+
# Disable libuv on Windows by default
20+
os.environ["USE_LIBUV"] = os.environ.get("USE_LIBUV", "0")
21+
else:
22+
import uvloop as uvloop_impl
1423
from tqdm import tqdm
1524
from transformers import (AutoModelForCausalLM, AutoTokenizer,
1625
PreTrainedTokenizerBase)
@@ -537,7 +546,7 @@ def main(args: argparse.Namespace):
537546
request_outputs: Optional[list[RequestOutput]] = None
538547
if args.backend == "vllm":
539548
if args.async_engine:
540-
elapsed_time = uvloop.run(
549+
elapsed_time = uvloop_impl.run(
541550
run_vllm_async(
542551
requests,
543552
args.n,

0 commit comments

Comments
 (0)