Skip to content

Commit 78a76b1

Browse files
authored
Native tool call support for Mistral AI API and topic notebook. (microsoft#2135)
* Support for Mistral AI API and topic notebook. * formatting * formatting
1 parent e2b0bc3 commit 78a76b1

File tree

4 files changed

+1042
-13
lines changed

4 files changed

+1042
-13
lines changed

autogen/agentchat/conversable_agent.py

+27-11
Original file line numberDiff line numberDiff line change
@@ -696,8 +696,8 @@ def _print_received_message(self, message: Union[Dict, str], sender: Agent):
696696
id_key = "name"
697697
else:
698698
id_key = "tool_call_id"
699-
700-
func_print = f"***** Response from calling {message['role']} \"{message[id_key]}\" *****"
699+
id = message.get(id_key, "No id found")
700+
func_print = f"***** Response from calling {message['role']} ({id}) *****"
701701
print(colored(func_print, "green"), flush=True)
702702
print(message["content"], flush=True)
703703
print(colored("*" * len(func_print), "green"), flush=True)
@@ -714,7 +714,7 @@ def _print_received_message(self, message: Union[Dict, str], sender: Agent):
714714
if "function_call" in message and message["function_call"]:
715715
function_call = dict(message["function_call"])
716716
func_print = (
717-
f"***** Suggested function Call: {function_call.get('name', '(No function name found)')} *****"
717+
f"***** Suggested function call: {function_call.get('name', '(No function name found)')} *****"
718718
)
719719
print(colored(func_print, "green"), flush=True)
720720
print(
@@ -726,9 +726,9 @@ def _print_received_message(self, message: Union[Dict, str], sender: Agent):
726726
print(colored("*" * len(func_print), "green"), flush=True)
727727
if "tool_calls" in message and message["tool_calls"]:
728728
for tool_call in message["tool_calls"]:
729-
id = tool_call.get("id", "(No id found)")
729+
id = tool_call.get("id", "No tool call id found")
730730
function_call = dict(tool_call.get("function", {}))
731-
func_print = f"***** Suggested tool Call ({id}): {function_call.get('name', '(No function name found)')} *****"
731+
func_print = f"***** Suggested tool call ({id}): {function_call.get('name', '(No function name found)')} *****"
732732
print(colored(func_print, "green"), flush=True)
733733
print(
734734
"Arguments: \n",
@@ -1309,6 +1309,12 @@ def _generate_oai_reply_from_client(self, llm_client, messages, cache) -> Union[
13091309
)
13101310
for tool_call in extracted_response.get("tool_calls") or []:
13111311
tool_call["function"]["name"] = self._normalize_name(tool_call["function"]["name"])
1312+
# Remove id and type if they are not present.
1313+
# This is to make the tool call object compatible with Mistral API.
1314+
if tool_call.get("id") is None:
1315+
tool_call.pop("id")
1316+
if tool_call.get("type") is None:
1317+
tool_call.pop("type")
13121318
return extracted_response
13131319

13141320
async def a_generate_oai_reply(
@@ -1525,7 +1531,6 @@ def generate_tool_calls_reply(
15251531
message = messages[-1]
15261532
tool_returns = []
15271533
for tool_call in message.get("tool_calls", []):
1528-
id = tool_call["id"]
15291534
function_call = tool_call.get("function", {})
15301535
func = self._function_map.get(function_call.get("name", None), None)
15311536
if inspect.iscoroutinefunction(func):
@@ -1543,13 +1548,24 @@ def generate_tool_calls_reply(
15431548
loop.close()
15441549
else:
15451550
_, func_return = self.execute_function(function_call)
1546-
tool_returns.append(
1547-
{
1548-
"tool_call_id": id,
1551+
content = func_return.get("content", "")
1552+
if content is None:
1553+
content = ""
1554+
tool_call_id = tool_call.get("id", None)
1555+
if tool_call_id is not None:
1556+
tool_call_response = {
1557+
"tool_call_id": tool_call_id,
15491558
"role": "tool",
1550-
"content": func_return.get("content", ""),
1559+
"content": content,
15511560
}
1552-
)
1561+
else:
1562+
# Do not include tool_call_id if it is not present.
1563+
# This is to make the tool call object compatible with Mistral API.
1564+
tool_call_response = {
1565+
"role": "tool",
1566+
"content": content,
1567+
}
1568+
tool_returns.append(tool_call_response)
15531569
if tool_returns:
15541570
return True, {
15551571
"role": "tool",

website/.gitignore

+2
Original file line numberDiff line numberDiff line change
@@ -19,6 +19,8 @@ docs/topics/code-execution/*.mdx
1919
docs/topics/task_decomposition.mdx
2020
docs/topics/prompting-and-reasoning/*.mdx
2121
docs/topics/non-openai-models/*.mdx
22+
docs/topics/non-openai-models/**/*.py
23+
docs/topics/non-openai-models/**/*.svg
2224

2325
# Misc
2426
.DS_Store

website/docs/topics/non-openai-models/about-using-nonopenai-models.md

+4-2
Original file line numberDiff line numberDiff line change
@@ -21,7 +21,8 @@ These proxy servers can be cloud-based or running locally within your environmen
2121
By using cloud-based proxy servers, you are able to use models without requiring the hardware
2222
and software to run them.
2323

24-
These providers can host open source/weight models, like [Hugging Face](https://huggingface.co/),
24+
These providers can host open source/weight models, like [Hugging Face](https://huggingface.co/)
25+
and [Mistral AI](https://mistral.ai/),
2526
or their own closed models.
2627

2728
When cloud-based proxy servers provide an OpenAI-compatible API, using them in AutoGen
@@ -32,7 +33,8 @@ authentication which is usually handled through an API key.
3233
Examples of using cloud-based proxy servers providers that have an OpenAI-compatible API
3334
are provided below:
3435

35-
- [together.ai example](/docs/topics/non-openai-models/cloud-togetherai)
36+
- [Together AI example](/docs/topics/non-openai-models/cloud-togetherai)
37+
- [Mistral AI example](/docs/topics/non-openai-models/cloud-mistralai)
3638

3739

3840
### Locally run proxy servers

0 commit comments

Comments
 (0)