Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Native tool call support for Mistral AI API and topic notebook. #2135

Merged
merged 4 commits into from
Mar 25, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
38 changes: 27 additions & 11 deletions autogen/agentchat/conversable_agent.py
Original file line number Diff line number Diff line change
Expand Up @@ -698,8 +698,8 @@ def _print_received_message(self, message: Union[Dict, str], sender: Agent):
id_key = "name"
else:
id_key = "tool_call_id"

func_print = f"***** Response from calling {message['role']} \"{message[id_key]}\" *****"
id = message.get(id_key, "No id found")
func_print = f"***** Response from calling {message['role']} ({id}) *****"
print(colored(func_print, "green"), flush=True)
print(message["content"], flush=True)
print(colored("*" * len(func_print), "green"), flush=True)
Expand All @@ -716,7 +716,7 @@ def _print_received_message(self, message: Union[Dict, str], sender: Agent):
if "function_call" in message and message["function_call"]:
function_call = dict(message["function_call"])
func_print = (
f"***** Suggested function Call: {function_call.get('name', '(No function name found)')} *****"
f"***** Suggested function call: {function_call.get('name', '(No function name found)')} *****"
)
print(colored(func_print, "green"), flush=True)
print(
Expand All @@ -728,9 +728,9 @@ def _print_received_message(self, message: Union[Dict, str], sender: Agent):
print(colored("*" * len(func_print), "green"), flush=True)
if "tool_calls" in message and message["tool_calls"]:
for tool_call in message["tool_calls"]:
id = tool_call.get("id", "(No id found)")
id = tool_call.get("id", "No tool call id found")
function_call = dict(tool_call.get("function", {}))
func_print = f"***** Suggested tool Call ({id}): {function_call.get('name', '(No function name found)')} *****"
func_print = f"***** Suggested tool call ({id}): {function_call.get('name', '(No function name found)')} *****"
print(colored(func_print, "green"), flush=True)
print(
"Arguments: \n",
Expand Down Expand Up @@ -1311,6 +1311,12 @@ def _generate_oai_reply_from_client(self, llm_client, messages, cache) -> Union[
)
for tool_call in extracted_response.get("tool_calls") or []:
tool_call["function"]["name"] = self._normalize_name(tool_call["function"]["name"])
# Remove id and type if they are not present.
# This is to make the tool call object compatible with Mistral API.
if tool_call.get("id") is None:
tool_call.pop("id")
if tool_call.get("type") is None:
tool_call.pop("type")
return extracted_response

async def a_generate_oai_reply(
Expand Down Expand Up @@ -1527,7 +1533,6 @@ def generate_tool_calls_reply(
message = messages[-1]
tool_returns = []
for tool_call in message.get("tool_calls", []):
id = tool_call["id"]
function_call = tool_call.get("function", {})
func = self._function_map.get(function_call.get("name", None), None)
if inspect.iscoroutinefunction(func):
Expand All @@ -1545,13 +1550,24 @@ def generate_tool_calls_reply(
loop.close()
else:
_, func_return = self.execute_function(function_call)
tool_returns.append(
{
"tool_call_id": id,
content = func_return.get("content", "")
if content is None:
content = ""
tool_call_id = tool_call.get("id", None)
if tool_call_id is not None:
tool_call_response = {
"tool_call_id": tool_call_id,
"role": "tool",
"content": func_return.get("content", ""),
"content": content,
}
)
else:
# Do not include tool_call_id if it is not present.
# This is to make the tool call object compatible with Mistral API.
tool_call_response = {
"role": "tool",
"content": content,
}
tool_returns.append(tool_call_response)
if tool_returns:
return True, {
"role": "tool",
Expand Down
2 changes: 2 additions & 0 deletions website/.gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,8 @@ docs/topics/code-execution/*.mdx
docs/topics/task_decomposition.mdx
docs/topics/prompting-and-reasoning/*.mdx
docs/topics/non-openai-models/*.mdx
docs/topics/non-openai-models/**/*.py
docs/topics/non-openai-models/**/*.svg

# Misc
.DS_Store
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,8 @@ These proxy servers can be cloud-based or running locally within your environmen
By using cloud-based proxy servers, you are able to use models without requiring the hardware
and software to run them.

These providers can host open source/weight models, like [Hugging Face](https://huggingface.co/),
These providers can host open source/weight models, like [Hugging Face](https://huggingface.co/)
and [Mistral AI](https://mistral.ai/),
or their own closed models.

When cloud-based proxy servers provide an OpenAI-compatible API, using them in AutoGen
Expand All @@ -32,7 +33,8 @@ authentication which is usually handled through an API key.
Examples of using cloud-based proxy servers providers that have an OpenAI-compatible API
are provided below:

- [together.ai example](/docs/topics/non-openai-models/cloud-togetherai)
- [Together AI example](/docs/topics/non-openai-models/cloud-togetherai)
- [Mistral AI example](/docs/topics/non-openai-models/cloud-mistralai)


### Locally run proxy servers
Expand Down
Loading
Loading