-
Notifications
You must be signed in to change notification settings - Fork 486
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Check server input #2719
Check server input #2719
Changes from 7 commits
3e257de
ad0ea23
9d3face
91504e2
a6a9a3b
ddeec0e
c4c4b7f
efc2599
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -3,7 +3,7 @@ | |
import json | ||
import uuid | ||
from abc import abstractmethod | ||
from typing import List, Literal, Optional | ||
from typing import List, Literal, Optional, Union | ||
|
||
from mmengine import Registry | ||
|
||
|
@@ -18,6 +18,15 @@ def random_uuid() -> str: | |
return str(uuid.uuid4().hex) | ||
|
||
|
||
def get_text(content: Union[str, List[dict]]): | ||
"""openai format support Union[str, | ||
List[ChatCompletionContentPartTextParam]] input.""" | ||
|
||
if isinstance(content, str): | ||
return content | ||
return content[0]['text'] | ||
|
||
|
||
@dataclasses.dataclass | ||
class ChatTemplateConfig: | ||
"""Parameters for chat template. | ||
|
@@ -219,7 +228,7 @@ def messages2prompt(self, messages, sequence_start=True, **kwargs): | |
ret += f'{self.system}{self.meta_instruction}{self.eosys}' | ||
for message in messages: | ||
role = message['role'] | ||
content = message['content'] | ||
content = get_text(message['content']) | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Is it for the GPT4V messages? There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. the content part could be str or ChatCompletionContentPartTextParam |
||
ret += f'{box_map[role]}{content}{eox_map[role]}' | ||
if len(messages) and messages[-1]['role'] == 'assistant': | ||
return ret[:-len(eox_map['assistant'])] # prefix of response | ||
|
@@ -509,7 +518,7 @@ def messages2prompt(self, | |
messages.insert(insert_index, tools_prompt) | ||
for message in messages: | ||
role = message['role'] | ||
content = message['content'] | ||
content = get_text(message['content']) | ||
if role == 'assistant' and message.get('tool_calls', | ||
None) is not None: | ||
for tool_call in message['tool_calls']: | ||
|
@@ -861,7 +870,7 @@ def messages2prompt(self, | |
ret += f'{self.system}{self.knowledge}{self.tools}{tool_prompt}{self.eotools}{self.meta_instruction}{self.eosys}' | ||
for message in messages: | ||
role = message['role'] | ||
content = message['content'] | ||
content = get_text(message['content']) | ||
if role == 'assistant' and ('<|python_tag|>' in content | ||
or '</function>' in content): | ||
ret += f'{box_map[role]}{content}<|eom_id|>' | ||
|
@@ -1037,7 +1046,7 @@ def messages2prompt(self, messages, sequence_start=True, **kwargs): | |
count = 0 | ||
for message in messages: | ||
role = message['role'] | ||
content = message['content'] | ||
content = get_text(message['content']) | ||
if role == 'user': | ||
count += 1 | ||
ret += f'[Round {count}]\n\n' | ||
|
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -114,7 +114,7 @@ class ChatCompletionRequest(BaseModel): | |
temperature: Optional[float] = 0.7 | ||
top_p: Optional[float] = 1.0 | ||
tools: Optional[List[Tool]] = Field(default=None, examples=[None]) | ||
tool_choice: Union[ToolChoice, Literal['auto', 'required','none']] = Field(default='auto', examples=['none']) # noqa | ||
tool_choice: Union[ToolChoice, Literal['auto', 'required', 'none']] = Field(default='auto', examples=['none']) # noqa | ||
logprobs: Optional[bool] = False | ||
top_logprobs: Optional[int] = None | ||
n: Optional[int] = 1 | ||
|
@@ -240,7 +240,6 @@ class CompletionRequest(BaseModel): | |
stream_options: Optional[StreamOptions] = Field(default=None, | ||
examples=[None]) | ||
top_p: Optional[float] = 1.0 | ||
logprobs: Optional[int] = None | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Is is as expected? There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. there is a duplicated |
||
echo: Optional[bool] = False | ||
presence_penalty: Optional[float] = 0.0 | ||
frequency_penalty: Optional[float] = 0.0 | ||
|
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
may get updated
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
@irexyc May update the docstring as follows:
"""Within the OpenAI API, the content field may be specified as either a string or a list of ChatCompletionContentPartTextParam (defined in openai). When a list is provided, lmdeploy selects the first element to incorporate into the chat template, as the manner in which OpenAI processes lists is not explicitly defined.
"""