Skip to content

Commit

Permalink
refactor(agents-api): Minor refactors to typespec types
Browse files Browse the repository at this point in the history
Signed-off-by: Diwank Tomer <[email protected]>
  • Loading branch information
Diwank Tomer committed Aug 20, 2024
1 parent 8263aea commit 744e2ef
Show file tree
Hide file tree
Showing 60 changed files with 561 additions and 405 deletions.
10 changes: 5 additions & 5 deletions agents-api/agents_api/autogen/Agents.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,6 @@

class Agent(BaseModel):
model_config = ConfigDict(
extra="allow",
populate_by_name=True,
)
id: Annotated[UUID, Field(json_schema_extra={"readOnly": True})]
Expand All @@ -30,6 +29,7 @@ class Agent(BaseModel):
str,
Field(
"",
max_length=120,
pattern="^[\\p{L}\\p{Nl}\\p{Pattern_Syntax}\\p{Pattern_White_Space}]+[\\p{ID_Start}\\p{Mn}\\p{Mc}\\p{Nd}\\p{Pc}\\p{Pattern_Syntax}\\p{Pattern_White_Space}]*$",
),
]
Expand Down Expand Up @@ -60,14 +60,14 @@ class CreateAgentRequest(BaseModel):
"""

model_config = ConfigDict(
extra="allow",
populate_by_name=True,
)
metadata: dict[str, Any] | None = None
name: Annotated[
str,
Field(
"",
max_length=120,
pattern="^[\\p{L}\\p{Nl}\\p{Pattern_Syntax}\\p{Pattern_White_Space}]+[\\p{ID_Start}\\p{Mn}\\p{Mc}\\p{Nd}\\p{Pc}\\p{Pattern_Syntax}\\p{Pattern_White_Space}]*$",
),
]
Expand All @@ -94,7 +94,6 @@ class CreateAgentRequest(BaseModel):

class CreateOrUpdateAgentRequest(CreateAgentRequest):
model_config = ConfigDict(
extra="allow",
populate_by_name=True,
)
id: UUID
Expand All @@ -103,6 +102,7 @@ class CreateOrUpdateAgentRequest(CreateAgentRequest):
str,
Field(
"",
max_length=120,
pattern="^[\\p{L}\\p{Nl}\\p{Pattern_Syntax}\\p{Pattern_White_Space}]+[\\p{ID_Start}\\p{Mn}\\p{Mc}\\p{Nd}\\p{Pc}\\p{Pattern_Syntax}\\p{Pattern_White_Space}]*$",
),
]
Expand Down Expand Up @@ -133,14 +133,14 @@ class PatchAgentRequest(BaseModel):
"""

model_config = ConfigDict(
extra="allow",
populate_by_name=True,
)
metadata: dict[str, Any] | None = None
name: Annotated[
str,
Field(
"",
max_length=120,
pattern="^[\\p{L}\\p{Nl}\\p{Pattern_Syntax}\\p{Pattern_White_Space}]+[\\p{ID_Start}\\p{Mn}\\p{Mc}\\p{Nd}\\p{Pc}\\p{Pattern_Syntax}\\p{Pattern_White_Space}]*$",
),
]
Expand Down Expand Up @@ -171,14 +171,14 @@ class UpdateAgentRequest(BaseModel):
"""

model_config = ConfigDict(
extra="allow",
populate_by_name=True,
)
metadata: dict[str, Any] | None = None
name: Annotated[
str,
Field(
"",
max_length=120,
pattern="^[\\p{L}\\p{Nl}\\p{Pattern_Syntax}\\p{Pattern_White_Space}]+[\\p{ID_Start}\\p{Mn}\\p{Mc}\\p{Nd}\\p{Pc}\\p{Pattern_Syntax}\\p{Pattern_White_Space}]*$",
),
]
Expand Down
104 changes: 82 additions & 22 deletions agents-api/agents_api/autogen/Chat.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,13 +10,12 @@

from .Common import LogitBias
from .Docs import DocReference
from .Entries import InputChatMLMessage
from .Entries import ChatMLImageContentPart
from .Tools import FunctionTool, NamedToolChoice


class BaseChatOutput(BaseModel):
model_config = ConfigDict(
extra="allow",
populate_by_name=True,
)
index: int
Expand All @@ -32,7 +31,6 @@ class BaseChatOutput(BaseModel):

class BaseChatResponse(BaseModel):
model_config = ConfigDict(
extra="allow",
populate_by_name=True,
)
usage: CompetionUsage | None = None
Expand All @@ -56,7 +54,6 @@ class BaseChatResponse(BaseModel):

class BaseTokenLogProb(BaseModel):
model_config = ConfigDict(
extra="allow",
populate_by_name=True,
)
token: str
Expand All @@ -69,10 +66,9 @@ class BaseTokenLogProb(BaseModel):

class ChatInputData(BaseModel):
model_config = ConfigDict(
extra="allow",
populate_by_name=True,
)
messages: Annotated[list[InputChatMLMessage], Field(min_length=1)]
messages: Annotated[list[Message], Field(min_length=1)]
"""
A list of new input messages comprising the conversation so far.
"""
Expand All @@ -92,18 +88,16 @@ class ChatOutputChunk(BaseChatOutput):
"""

model_config = ConfigDict(
extra="allow",
populate_by_name=True,
)
delta: InputChatMLMessage
delta: Delta
"""
The message generated by the model
"""


class ChunkChatResponse(BaseChatResponse):
model_config = ConfigDict(
extra="allow",
populate_by_name=True,
)
choices: list[ChatOutputChunk]
Expand All @@ -118,7 +112,6 @@ class CompetionUsage(BaseModel):
"""

model_config = ConfigDict(
extra="allow",
populate_by_name=True,
)
completion_tokens: Annotated[
Expand All @@ -143,7 +136,6 @@ class CompetionUsage(BaseModel):

class CompletionResponseFormat(BaseModel):
model_config = ConfigDict(
extra="allow",
populate_by_name=True,
)
type: Literal["text", "json_object"] = "text"
Expand All @@ -152,9 +144,53 @@ class CompletionResponseFormat(BaseModel):
"""


class Content(BaseModel):
model_config = ConfigDict(
populate_by_name=True,
)
text: str
type: Literal["text"] = "text"
"""
The type (fixed to 'text')
"""


class Delta(BaseModel):
"""
The message generated by the model
"""

model_config = ConfigDict(
populate_by_name=True,
)
role: Literal[
"user",
"assistant",
"system",
"function",
"function_response",
"function_call",
"auto",
]
"""
The role of the message
"""
content: str | list[str] | list[Content | ChatMLImageContentPart]
"""
The content parts of the message
"""
name: str | None = None
"""
Name
"""
continue_: Annotated[StrictBool | None, Field(None, alias="continue")]
"""
Whether to continue this message or return a new one
"""


class LogProbResponse(BaseModel):
model_config = ConfigDict(
extra="allow",
populate_by_name=True,
)
content: Annotated[list[TokenLogProb] | None, Field(...)]
Expand All @@ -163,9 +199,38 @@ class LogProbResponse(BaseModel):
"""


class Message(BaseModel):
model_config = ConfigDict(
populate_by_name=True,
)
role: Literal[
"user",
"assistant",
"system",
"function",
"function_response",
"function_call",
"auto",
]
"""
The role of the message
"""
content: str | list[str] | list[Content | ChatMLImageContentPart]
"""
The content parts of the message
"""
name: str | None = None
"""
Name
"""
continue_: Annotated[StrictBool | None, Field(None, alias="continue")]
"""
Whether to continue this message or return a new one
"""


class MessageChatResponse(BaseChatResponse):
model_config = ConfigDict(
extra="allow",
populate_by_name=True,
)
choices: list[SingleChatOutput | MultipleChatOutput]
Expand All @@ -180,15 +245,13 @@ class MultipleChatOutput(BaseChatOutput):
"""

model_config = ConfigDict(
extra="allow",
populate_by_name=True,
)
messages: list[InputChatMLMessage]
messages: list[Message]


class OpenAISettings(BaseModel):
model_config = ConfigDict(
extra="allow",
populate_by_name=True,
)
frequency_penalty: Annotated[float | None, Field(None, ge=-2.0, le=2.0)]
Expand All @@ -215,23 +278,20 @@ class SingleChatOutput(BaseChatOutput):
"""

model_config = ConfigDict(
extra="allow",
populate_by_name=True,
)
message: InputChatMLMessage
message: Message


class TokenLogProb(BaseTokenLogProb):
model_config = ConfigDict(
extra="allow",
populate_by_name=True,
)
top_logprobs: list[BaseTokenLogProb]


class ChatInput(ChatInputData):
model_config = ConfigDict(
extra="allow",
populate_by_name=True,
)
remember: Annotated[StrictBool, Field(False, json_schema_extra={"readOnly": True})]
Expand All @@ -250,6 +310,7 @@ class ChatInput(ChatInputData):
str | None,
Field(
None,
max_length=120,
pattern="^[\\p{L}\\p{Nl}\\p{Pattern_Syntax}\\p{Pattern_White_Space}]+[\\p{ID_Start}\\p{Mn}\\p{Mc}\\p{Nd}\\p{Pc}\\p{Pattern_Syntax}\\p{Pattern_White_Space}]*$",
),
]
Expand Down Expand Up @@ -320,7 +381,6 @@ class DefaultChatSettings(OpenAISettings):
"""

model_config = ConfigDict(
extra="allow",
populate_by_name=True,
)
repetition_penalty: Annotated[float | None, Field(None, ge=0.0, le=2.0)]
Expand All @@ -339,13 +399,13 @@ class DefaultChatSettings(OpenAISettings):

class ChatSettings(DefaultChatSettings):
model_config = ConfigDict(
extra="allow",
populate_by_name=True,
)
model: Annotated[
str | None,
Field(
None,
max_length=120,
pattern="^[\\p{L}\\p{Nl}\\p{Pattern_Syntax}\\p{Pattern_White_Space}]+[\\p{ID_Start}\\p{Mn}\\p{Mc}\\p{Nd}\\p{Pc}\\p{Pattern_Syntax}\\p{Pattern_White_Space}]*$",
),
]
Expand Down
3 changes: 0 additions & 3 deletions agents-api/agents_api/autogen/Common.py
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,6 @@ class Offset(RootModel[int]):

class ResourceCreatedResponse(BaseModel):
model_config = ConfigDict(
extra="allow",
populate_by_name=True,
)
id: UUID
Expand All @@ -57,7 +56,6 @@ class ResourceCreatedResponse(BaseModel):

class ResourceDeletedResponse(BaseModel):
model_config = ConfigDict(
extra="allow",
populate_by_name=True,
)
id: UUID
Expand All @@ -76,7 +74,6 @@ class ResourceDeletedResponse(BaseModel):

class ResourceUpdatedResponse(BaseModel):
model_config = ConfigDict(
extra="allow",
populate_by_name=True,
)
id: UUID
Expand Down
Loading

0 comments on commit 744e2ef

Please sign in to comment.