Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 6 additions & 0 deletions vllm/engine/async_llm_engine.py
Original file line number Diff line number Diff line change
Expand Up @@ -506,3 +506,9 @@ def from_engine_args(cls,
max_log_len=engine_args.max_log_len,
start_engine_loop=start_engine_loop)
return engine

async def do_log_stats(self) -> None:
if self.engine_use_ray:
await self.engine.do_log_stats.remote()
Comment on lines +511 to +512
Copy link
Copy Markdown
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Yeah this is right.

else:
self.engine.do_log_stats()
3 changes: 3 additions & 0 deletions vllm/engine/llm_engine.py
Original file line number Diff line number Diff line change
Expand Up @@ -641,6 +641,9 @@ def step(self) -> List[RequestOutput]:

return self._process_model_outputs(output, scheduler_outputs)

def do_log_stats(self) -> None:
self._log_system_stats(False, 0)

def _log_system_stats(
self,
prompt_run: bool,
Expand Down
20 changes: 19 additions & 1 deletion vllm/entrypoints/openai/api_server.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@
import codecs
import json
import time
from contextlib import asynccontextmanager
from http import HTTPStatus
from typing import AsyncGenerator, Dict, List, Optional, Tuple, Union

Expand Down Expand Up @@ -38,11 +39,28 @@

logger = init_logger(__name__)
served_model = None
app = fastapi.FastAPI()
engine_args = None
engine = None
response_role = None


@asynccontextmanager
async def lifespan(app: fastapi.FastAPI):

async def _force_log():
while True:
await asyncio.sleep(10)
await engine.do_log_stats()

if not engine_args.disable_log_stats:
asyncio.create_task(_force_log())

yield


app = fastapi.FastAPI(lifespan=lifespan)


def parse_args():
parser = argparse.ArgumentParser(
description="vLLM OpenAI-Compatible RESTful API server.")
Expand Down