From 8f34eb10bb45c8b41442e8f72f31caf91104a9d2 Mon Sep 17 00:00:00 2001 From: Galaxy-Husky <598756381@qq.com> Date: Fri, 13 Dec 2024 11:26:33 +0800 Subject: [PATCH] Fix args type in docstring (#2888) * Fix args type in docstring * fix linting --- lmdeploy/serve/async_engine.py | 21 ++++++++++++--------- 1 file changed, 12 insertions(+), 9 deletions(-) diff --git a/lmdeploy/serve/async_engine.py b/lmdeploy/serve/async_engine.py index f3c3432328..78574a38b1 100644 --- a/lmdeploy/serve/async_engine.py +++ b/lmdeploy/serve/async_engine.py @@ -223,9 +223,10 @@ def __call__(self, """Inference a batch of prompts. Args: - prompts (List[str] | str | List[Dict] | List[Dict]): a batch of - prompts. It accepts: string prompt, a list of string prompts, - a chat history in OpenAI format or a list of chat history. + prompts (List[str] | str | List[Dict] | List[List[Dict]]]): a + batch of prompts. It accepts: string prompt, a list of string + prompts, a chat history in OpenAI format or a list of chat + history. gen_config (GenerationConfig | None): a instance of GenerationConfig. Default to None. do_preprocess (bool): whether pre-process the messages. Default to @@ -297,9 +298,10 @@ def batch_infer(self, """Inference a batch of prompts. Args: - prompts (List[str] | str | List[Dict] | List[Dict]): a batch of - prompts. It accepts: string prompt, a list of string prompts, - a chat history in OpenAI format or a list of chat history. + prompts (List[str] | str | List[Dict] | List[List[Dict]]]): a + batch of prompts. It accepts: string prompt, a list of string + prompts, a chat history in OpenAI format or a list of chat + history. gen_config (GenerationConfig | None): a instance of or a list of GenerationConfig. Default to None. do_preprocess (bool): whether pre-process the messages. Default to @@ -374,9 +376,10 @@ def stream_infer( """Inference a batch of prompts with stream mode. Args: - prompts (List[str] | str | List[Dict] | List[Dict]): a batch of - prompts. It accepts: string prompt, a list of string prompts, - a chat history in OpenAI format or a list of chat history. + prompts (List[str] | str | List[Dict] | List[List[Dict]]]):a + batch of prompts. It accepts: string prompt, a list of string + prompts, a chat history in OpenAI format or a list of chat + history. gen_config (GenerationConfig | None): a instance of or a list of GenerationConfig. Default to None. do_preprocess (bool): whether pre-process the messages. Default to