diff --git a/tests/dotnet/azure-openai/Program.cs b/tests/dotnet/azure-openai/Program.cs index d6987cb1..6ba44270 100644 --- a/tests/dotnet/azure-openai/Program.cs +++ b/tests/dotnet/azure-openai/Program.cs @@ -271,17 +271,13 @@ static async Task RunPrototype() }; options.Tools.Add(weatherTool); - // Derive from options.Tools — the same collection passed to CompleteChatAsync (OpenAI function-calling format) var toolDefinitionsJson = JsonSerializer.Serialize( options.Tools.Select(t => new Dictionary { ["type"] = "function", - ["function"] = new - { - name = t.FunctionName, - description = t.FunctionDescription, - parameters = JsonSerializer.Deserialize(t.FunctionParameters) - } + ["name"] = t.FunctionName, + ["description"] = t.FunctionDescription, + ["parameters"] = JsonSerializer.Deserialize(t.FunctionParameters) }).ToArray() ); activity?.SetTag("gen_ai.operation.name", "chat"); diff --git a/tests/dotnet/extensions-ai/Program.cs b/tests/dotnet/extensions-ai/Program.cs index 6e5b030d..1785fa84 100644 --- a/tests/dotnet/extensions-ai/Program.cs +++ b/tests/dotnet/extensions-ai/Program.cs @@ -270,17 +270,13 @@ static async Task RunPrototype() }; options.Tools.Add(weatherTool); - // Derive from options.Tools — the same collection passed to CompleteChatAsync (OpenAI function-calling format) var toolDefinitionsJson = JsonSerializer.Serialize( options.Tools.Select(t => new Dictionary { ["type"] = "function", - ["function"] = new - { - name = t.FunctionName, - description = t.FunctionDescription, - parameters = JsonSerializer.Deserialize(t.FunctionParameters) - } + ["name"] = t.FunctionName, + ["description"] = t.FunctionDescription, + ["parameters"] = JsonSerializer.Deserialize(t.FunctionParameters) }).ToArray() ); activity?.SetTag("gen_ai.operation.name", "chat"); diff --git a/tests/dotnet/semantic-kernel/Program.cs b/tests/dotnet/semantic-kernel/Program.cs index 7b938777..5c19f15b 100644 --- a/tests/dotnet/semantic-kernel/Program.cs +++ b/tests/dotnet/semantic-kernel/Program.cs @@ -292,28 +292,23 @@ static async Task RunPrototype() using (var activity = s_manualActivitySource.StartActivity("chat gpt-4o-mini")) { var endpoint = new Uri(mockBaseUrl); - // Semantic Kernel converts plugins to OpenAI function-calling format - // before sending to the API, so we mirror that shape here. var toolDefinitionsJson = JsonSerializer.Serialize( kernel.Plugins .SelectMany(p => p) .Select(f => new Dictionary { ["type"] = "function", - ["function"] = new + ["name"] = f.Name, + ["description"] = f.Description, + ["parameters"] = new { - name = f.Name, - description = f.Description, - parameters = new - { - type = "object", - properties = f.Metadata.Parameters.ToDictionary( - p => p.Name, - p => new { type = ToJsonSchemaType(p.ParameterType) }), - required = f.Metadata.Parameters - .Where(p => p.IsRequired) - .Select(p => p.Name) - } + type = "object", + properties = f.Metadata.Parameters.ToDictionary( + p => p.Name, + p => new { type = ToJsonSchemaType(p.ParameterType) }), + required = f.Metadata.Parameters + .Where(p => p.IsRequired) + .Select(p => p.Name) } })); activity?.SetTag("gen_ai.operation.name", "chat"); diff --git a/tests/java/aws-bedrock/src/main/java/com/example/bedrocktest/AwsBedrockPrototypeTest.java b/tests/java/aws-bedrock/src/main/java/com/example/bedrocktest/AwsBedrockPrototypeTest.java index ac427013..b30302bf 100644 --- a/tests/java/aws-bedrock/src/main/java/com/example/bedrocktest/AwsBedrockPrototypeTest.java +++ b/tests/java/aws-bedrock/src/main/java/com/example/bedrocktest/AwsBedrockPrototypeTest.java @@ -161,9 +161,9 @@ static void runConverseToolCall(BedrockRuntimeClient client) { ToolConfiguration toolConfig = ToolConfiguration.builder() .tools(Tool.builder().toolSpec(toolSpec).build()) .build(); - String toolDefinitionsJson = "[{\"toolSpec\":{\"name\":\"get_weather\",\"description\":\"Get the current weather\"," + - "\"inputSchema\":{\"json\":{\"type\":\"object\",\"properties\":{\"location\":{\"type\":\"string\"," + - "\"description\":\"City name\"}},\"required\":[\"location\"]}}}}]"; + String toolDefinitionsJson = "[{\"type\":\"function\",\"name\":\"get_weather\",\"description\":\"Get the current weather\"," + + "\"parameters\":{\"type\":\"object\",\"properties\":{\"location\":{\"type\":\"string\"," + + "\"description\":\"City name\"}},\"required\":[\"location\"]}}]"; Span span = tracer.spanBuilder("chat " + modelId).startSpan(); try { try (var scope = span.makeCurrent()) { diff --git a/tests/java/openai/src/main/java/com/example/openaitest/OpenAiPrototypeTest.java b/tests/java/openai/src/main/java/com/example/openaitest/OpenAiPrototypeTest.java index 80184029..d10edbba 100644 --- a/tests/java/openai/src/main/java/com/example/openaitest/OpenAiPrototypeTest.java +++ b/tests/java/openai/src/main/java/com/example/openaitest/OpenAiPrototypeTest.java @@ -215,10 +215,18 @@ static void runChatToolCall(OpenAIClient client) { .build()) .build()) .build(); - // Derive from params.tools() — the same tools passed to the API call String toolDefinitionsJson; try { - toolDefinitionsJson = ObjectMappers.jsonMapper().writeValueAsString(params.tools().orElse(List.of())); + var toolDefs = params.tools().orElse(List.of()).stream().map(tool -> { + var fn = tool.asFunction().function(); + var def = new java.util.LinkedHashMap(); + def.put("type", "function"); + def.put("name", fn.name()); + fn.description().ifPresent(d -> def.put("description", d)); + fn.parameters().ifPresent(p -> def.put("parameters", p)); + return def; + }).toList(); + toolDefinitionsJson = ObjectMappers.jsonMapper().writeValueAsString(toolDefs); } catch (JsonProcessingException e) { throw new RuntimeException(e); } diff --git a/tests/js/anthropic/test_prototype.ts b/tests/js/anthropic/test_prototype.ts index 61768b7b..6adb3750 100644 --- a/tests/js/anthropic/test_prototype.ts +++ b/tests/js/anthropic/test_prototype.ts @@ -147,7 +147,12 @@ async function main() { span.setAttribute("gen_ai.provider.name", "anthropic"); span.setAttribute("gen_ai.request.model", requestModel); span.setAttribute("gen_ai.request.max_tokens", requestMaxTokens); - span.setAttribute("gen_ai.tool.definitions", JSON.stringify([requestTool])); + span.setAttribute("gen_ai.tool.definitions", JSON.stringify([{ + type: "function", + name: requestTool.name, + description: requestTool.description, + parameters: requestTool.input_schema, + }])); const resp = await client.messages.create({ model: requestModel, max_tokens: requestMaxTokens, diff --git a/tests/js/aws-bedrock/test_prototype.ts b/tests/js/aws-bedrock/test_prototype.ts index e9c598f3..d95bccfa 100644 --- a/tests/js/aws-bedrock/test_prototype.ts +++ b/tests/js/aws-bedrock/test_prototype.ts @@ -109,7 +109,12 @@ async function main() { }, }; const toolConfig = { tools: [toolSpec] }; - span.setAttribute("gen_ai.tool.definitions", JSON.stringify(toolConfig.tools)); + span.setAttribute("gen_ai.tool.definitions", JSON.stringify([{ + type: "function", + name: toolSpec.toolSpec.name, + description: toolSpec.toolSpec.description, + parameters: toolSpec.toolSpec.inputSchema.json, + }])); const messages = [ { role: "user" as const, content: [{ text: "What's the weather in Seattle?" }] }, ]; diff --git a/tests/js/azure-openai/test_prototype.ts b/tests/js/azure-openai/test_prototype.ts index 507d4daf..96d0428c 100644 --- a/tests/js/azure-openai/test_prototype.ts +++ b/tests/js/azure-openai/test_prototype.ts @@ -152,7 +152,12 @@ async function main() { span.setAttribute("gen_ai.operation.name", "chat"); span.setAttribute("gen_ai.provider.name", "openai"); span.setAttribute("gen_ai.request.model", requestModel); - span.setAttribute("gen_ai.tool.definitions", JSON.stringify([requestTool])); + span.setAttribute("gen_ai.tool.definitions", JSON.stringify([{ + type: requestTool.type, + name: requestTool.function.name, + description: requestTool.function.description, + parameters: requestTool.function.parameters, + }])); span.setAttribute("server.address", endpoint.hostname); if (endpoint.port) { span.setAttribute("server.port", Number(endpoint.port)); diff --git a/tests/js/cohere/test_prototype.ts b/tests/js/cohere/test_prototype.ts index 3a08ca8a..c5af1a50 100644 --- a/tests/js/cohere/test_prototype.ts +++ b/tests/js/cohere/test_prototype.ts @@ -86,7 +86,18 @@ async function main() { location: { description: "City name", type: "str" as const, required: true }, }, }; - span.setAttribute("gen_ai.tool.definitions", JSON.stringify([requestTool])); + span.setAttribute("gen_ai.tool.definitions", JSON.stringify([{ + type: "function", + name: requestTool.name, + description: requestTool.description, + parameters: { + type: "object", + properties: Object.fromEntries( + Object.entries(requestTool.parameterDefinitions).map(([k, v]: [string, any]) => [k, { type: v.type, description: v.description }]) + ), + required: Object.entries(requestTool.parameterDefinitions).filter(([, v]: [string, any]) => v.required).map(([k]) => k), + }, + }])); const resp = await client.chat({ model: requestModel, message: "What's the weather in Seattle?", diff --git a/tests/js/langchain/test_prototype.ts b/tests/js/langchain/test_prototype.ts index f072cb1f..5f506bca 100644 --- a/tests/js/langchain/test_prototype.ts +++ b/tests/js/langchain/test_prototype.ts @@ -129,10 +129,9 @@ async function main() { }), }, ]; - // Derive from the same tools array passed to bindTools (LangChain StructuredToolParams format) span.setAttribute( "gen_ai.tool.definitions", - JSON.stringify(tools.map((t) => ({ name: t.name, description: t.description, schema: toJSONSchema(t.schema) }))), + JSON.stringify(tools.map((t) => ({ type: "function", name: t.name, description: t.description, parameters: toJSONSchema(t.schema) }))), ); const llmWithTools = llm.bindTools(tools, { tool_choice: "auto" }); diff --git a/tests/js/llamaindex/test_prototype.ts b/tests/js/llamaindex/test_prototype.ts index c5961336..cfcbf357 100644 --- a/tests/js/llamaindex/test_prototype.ts +++ b/tests/js/llamaindex/test_prototype.ts @@ -98,7 +98,12 @@ async function main() { }, }, }; - span.setAttribute("gen_ai.tool.definitions", JSON.stringify([requestTool])); + span.setAttribute("gen_ai.tool.definitions", JSON.stringify([{ + type: requestTool.type, + name: requestTool.function.name, + description: requestTool.function.description, + parameters: requestTool.function.parameters, + }])); const resp = await llm.chat({ messages: [{ role: "user", content: "What's the weather in Seattle?" }], additionalChatOptions: { tools: [requestTool] }, diff --git a/tests/js/openai/test_prototype.ts b/tests/js/openai/test_prototype.ts index ebc21e2f..1cdf3b0b 100644 --- a/tests/js/openai/test_prototype.ts +++ b/tests/js/openai/test_prototype.ts @@ -148,7 +148,12 @@ async function main() { span.setAttribute("gen_ai.operation.name", "chat"); span.setAttribute("gen_ai.provider.name", "openai"); span.setAttribute("gen_ai.request.model", requestModel); - span.setAttribute("gen_ai.tool.definitions", JSON.stringify([requestTool])); + span.setAttribute("gen_ai.tool.definitions", JSON.stringify([{ + type: requestTool.type, + name: requestTool.function.name, + description: requestTool.function.description, + parameters: requestTool.function.parameters, + }])); span.setAttribute("server.address", endpoint.hostname); if (endpoint.port) { span.setAttribute("server.port", Number(endpoint.port)); diff --git a/tests/js/vercel-ai/test_prototype.ts b/tests/js/vercel-ai/test_prototype.ts index cea6eef3..3c8514c2 100644 --- a/tests/js/vercel-ai/test_prototype.ts +++ b/tests/js/vercel-ai/test_prototype.ts @@ -146,9 +146,10 @@ async function main() { "gen_ai.tool.definitions", JSON.stringify( Object.entries(tools).map(([name, t]) => ({ + type: "function", name: name, description: t.description, - inputSchema: toJSONSchema(t.inputSchema!), + parameters: toJSONSchema(t.inputSchema!), })), ), ); diff --git a/tests/js/vertexai/test_prototype.ts b/tests/js/vertexai/test_prototype.ts index d3eaccc5..04f47509 100644 --- a/tests/js/vertexai/test_prototype.ts +++ b/tests/js/vertexai/test_prototype.ts @@ -131,7 +131,14 @@ async function main() { }, }], }; - span.setAttribute("gen_ai.tool.definitions", JSON.stringify([requestTool])); + span.setAttribute("gen_ai.tool.definitions", JSON.stringify( + requestTool.functionDeclarations.map((fn: any) => ({ + type: "function", + name: fn.name, + description: fn.description, + parameters: fn.parameters, + })) + )); const result = await model.generateContent({ contents: [{ role: "user", parts: [{ text: "What's the weather in Seattle?" }] }], tools: [requestTool], diff --git a/tests/python/anthropic/test_prototype.py b/tests/python/anthropic/test_prototype.py index 2270ae5c..b42d0a3c 100644 --- a/tests/python/anthropic/test_prototype.py +++ b/tests/python/anthropic/test_prototype.py @@ -114,7 +114,12 @@ def run_chat_tool_call_prototype(client): span.set_attribute("gen_ai.operation.name", "chat") span.set_attribute("gen_ai.provider.name", "anthropic") span.set_attribute("gen_ai.request.model", request_model) - span.set_attribute("gen_ai.tool.definitions", json.dumps([request_tool])) + span.set_attribute("gen_ai.tool.definitions", json.dumps([{ + "type": "function", + "name": request_tool["name"], + "description": request_tool["description"], + "parameters": request_tool["input_schema"], + }])) resp = client.messages.create( model=request_model, max_tokens=100, diff --git a/tests/python/autogen/test_prototype.py b/tests/python/autogen/test_prototype.py index bde57f68..08914859 100644 --- a/tests/python/autogen/test_prototype.py +++ b/tests/python/autogen/test_prototype.py @@ -101,7 +101,15 @@ def run_chat_tool_call_prototype(): span.set_attribute("gen_ai.operation.name", "chat") span.set_attribute("gen_ai.provider.name", "openai") span.set_attribute("gen_ai.request.model", request_model) - span.set_attribute("gen_ai.tool.definitions", json.dumps(tools)) + span.set_attribute("gen_ai.tool.definitions", json.dumps([ + { + "type": t["type"], + "name": t["function"]["name"], + "description": t["function"]["description"], + "parameters": t["function"]["parameters"], + } + for t in tools + ])) if endpoint.hostname: span.set_attribute("server.address", endpoint.hostname) if endpoint.port is not None: diff --git a/tests/python/aws-bedrock/test_prototype.py b/tests/python/aws-bedrock/test_prototype.py index af7ec004..bb4e2106 100644 --- a/tests/python/aws-bedrock/test_prototype.py +++ b/tests/python/aws-bedrock/test_prototype.py @@ -113,7 +113,12 @@ def run_converse_tool_call_prototype(client): span.set_attribute("gen_ai.operation.name", "chat") span.set_attribute("gen_ai.provider.name", "aws.bedrock") span.set_attribute("gen_ai.request.model", request_model) - span.set_attribute("gen_ai.tool.definitions", json.dumps(tool_config["tools"])) + span.set_attribute("gen_ai.tool.definitions", json.dumps([{ + "type": "function", + "name": tool_spec["toolSpec"]["name"], + "description": tool_spec["toolSpec"]["description"], + "parameters": tool_spec["toolSpec"]["inputSchema"]["json"], + }])) messages = [ { "role": "user", diff --git a/tests/python/azure-ai-foundry/common.py b/tests/python/azure-ai-foundry/common.py index 2cd3d62e..38ce2e84 100644 --- a/tests/python/azure-ai-foundry/common.py +++ b/tests/python/azure-ai-foundry/common.py @@ -84,7 +84,15 @@ def run_invoke_agent(client): span.set_attribute("gen_ai.agent.id", agent.id) span.set_attribute("gen_ai.agent.name", agent.name or "") span.set_attribute("gen_ai.request.model", AGENT_MODEL) - span.set_attribute("gen_ai.tool.definitions", json.dumps(tool_defs)) + span.set_attribute("gen_ai.tool.definitions", json.dumps([ + { + "type": t["type"], + "name": t["function"]["name"], + "description": t["function"]["description"], + "parameters": t["function"]["parameters"], + } + for t in tool_defs + ])) span.set_attribute("server.address", _SERVER_ADDRESS) span.set_attribute("server.port", _SERVER_PORT) try: diff --git a/tests/python/azure-ai-inference/test_prototype.py b/tests/python/azure-ai-inference/test_prototype.py index 0f19851c..7c3684c5 100644 --- a/tests/python/azure-ai-inference/test_prototype.py +++ b/tests/python/azure-ai-inference/test_prototype.py @@ -110,11 +110,9 @@ def run_chat_tool_call_prototype(client): span.set_attribute("gen_ai.request.model", request_model) span.set_attribute("gen_ai.tool.definitions", json.dumps([{ "type": "function", - "function": { - "name": tool.function.name, - "description": tool.function.description, - "parameters": tool.function.parameters, - }, + "name": tool.function.name, + "description": tool.function.description, + "parameters": tool.function.parameters, }])) if endpoint.hostname: span.set_attribute("server.address", endpoint.hostname) diff --git a/tests/python/azure-openai/test_prototype.py b/tests/python/azure-openai/test_prototype.py index 9882a95f..385ae768 100644 --- a/tests/python/azure-openai/test_prototype.py +++ b/tests/python/azure-openai/test_prototype.py @@ -147,7 +147,15 @@ def run_chat_tool_call_prototype(client): span.set_attribute("gen_ai.operation.name", "chat") span.set_attribute("gen_ai.provider.name", "openai") span.set_attribute("gen_ai.request.model", request_model) - span.set_attribute("gen_ai.tool.definitions", json.dumps(tools)) + span.set_attribute("gen_ai.tool.definitions", json.dumps([ + { + "type": t["type"], + "name": t["function"]["name"], + "description": t["function"]["description"], + "parameters": t["function"]["parameters"], + } + for t in tools + ])) if endpoint.hostname: span.set_attribute("server.address", endpoint.hostname) if endpoint.port is not None: diff --git a/tests/python/cohere/test_prototype.py b/tests/python/cohere/test_prototype.py index 7a4a3122..a6b4bd86 100644 --- a/tests/python/cohere/test_prototype.py +++ b/tests/python/cohere/test_prototype.py @@ -98,7 +98,15 @@ def run_chat_tool_call(client): span.set_attribute("gen_ai.operation.name", "chat") span.set_attribute("gen_ai.provider.name", "cohere") span.set_attribute("gen_ai.request.model", request_model) - span.set_attribute("gen_ai.tool.definitions", json.dumps(tools)) + span.set_attribute("gen_ai.tool.definitions", json.dumps([ + { + "type": t["type"], + "name": t["function"]["name"], + "description": t["function"]["description"], + "parameters": t["function"]["parameters"], + } + for t in tools + ])) resp = client.chat( model=request_model, messages=[{"role": "user", "content": "What's the weather in Seattle?"}], diff --git a/tests/python/crewai/test_prototype.py b/tests/python/crewai/test_prototype.py index bf4c5be6..4ec648cd 100644 --- a/tests/python/crewai/test_prototype.py +++ b/tests/python/crewai/test_prototype.py @@ -54,16 +54,12 @@ def get_weather(location: str) -> str: span.set_attribute("gen_ai.operation.name", "chat") span.set_attribute("gen_ai.provider.name", "openai") span.set_attribute("gen_ai.request.model", request_model) - # CrewAI converts tools to OpenAI function-calling format before - # passing them to litellm, so we mirror that shape here. span.set_attribute("gen_ai.tool.definitions", json.dumps([ { "type": "function", - "function": { - "name": t.name, - "description": t.func.__doc__, - "parameters": t.args_schema.model_json_schema(), - }, + "name": t.name, + "description": t.func.__doc__, + "parameters": t.args_schema.model_json_schema(), } for t in tools ])) diff --git a/tests/python/dspy/test_prototype.py b/tests/python/dspy/test_prototype.py index efa3c708..b0088002 100644 --- a/tests/python/dspy/test_prototype.py +++ b/tests/python/dspy/test_prototype.py @@ -147,6 +147,7 @@ def run_tool_call(): dspy.configure(lm=lm) messages = [{"role": "user", "content": prompt_text}] tool_definition = { + "type": "function", "name": "get_weather", "description": "Get the current weather for a location.", "parameters": { @@ -157,14 +158,18 @@ def run_tool_call(): } request_tool = { "type": "function", - "function": tool_definition, + "function": { + "name": tool_definition["name"], + "description": tool_definition["description"], + "parameters": tool_definition["parameters"], + }, } with _prototype_tracer.start_as_current_span("chat gpt-4o-mini") as span: span.set_attribute("gen_ai.operation.name", "chat") span.set_attribute("gen_ai.provider.name", "openai") span.set_attribute("gen_ai.request.model", request_model) - span.set_attribute("gen_ai.tool.definitions", json.dumps([request_tool])) + span.set_attribute("gen_ai.tool.definitions", json.dumps([tool_definition])) result = lm( messages=messages, tools=[request_tool], diff --git a/tests/python/google-adk/test_prototype.py b/tests/python/google-adk/test_prototype.py index 3b2e02d4..ddf976c9 100644 --- a/tests/python/google-adk/test_prototype.py +++ b/tests/python/google-adk/test_prototype.py @@ -121,7 +121,15 @@ async def _run(): span.set_attribute("gen_ai.provider.name", "google_genai") span.set_attribute("gen_ai.conversation.id", session.id) span.set_attribute("gen_ai.request.model", request_model) - span.set_attribute("gen_ai.tool.definitions", json.dumps(tool_defs)) + span.set_attribute("gen_ai.tool.definitions", json.dumps([ + { + "type": "function", + "name": t["name"], + "description": t["description"], + "parameters": t["parameters"], + } + for t in tool_defs + ])) usage_metadata = None finish_reason = None try: diff --git a/tests/python/google-genai/test_prototype.py b/tests/python/google-genai/test_prototype.py index 1b1deda3..16b18ddc 100644 --- a/tests/python/google-genai/test_prototype.py +++ b/tests/python/google-genai/test_prototype.py @@ -115,17 +115,16 @@ def run_chat_tool_call(): span.set_attribute("gen_ai.provider.name", "google_genai") span.set_attribute("gen_ai.request.model", request_model) span.set_attribute("gen_ai.tool.definitions", json.dumps([{ - "function_declarations": [{ - "name": "get_weather", - "description": "Get the current weather", - "parameters": { - "type": "object", - "properties": { - "location": {"type": "string", "description": "City name"}, - }, - "required": ["location"], + "type": "function", + "name": "get_weather", + "description": "Get the current weather", + "parameters": { + "type": "object", + "properties": { + "location": {"type": "string", "description": "City name"}, }, - }] + "required": ["location"], + }, }])) response = client.models.generate_content( model=request_model, diff --git a/tests/python/groq/test_prototype.py b/tests/python/groq/test_prototype.py index 4e67633a..496dbdbe 100644 --- a/tests/python/groq/test_prototype.py +++ b/tests/python/groq/test_prototype.py @@ -121,7 +121,15 @@ def run_chat_tool_call_prototype(client): span.set_attribute("gen_ai.operation.name", "chat") span.set_attribute("gen_ai.provider.name", "groq") span.set_attribute("gen_ai.request.model", request_model) - span.set_attribute("gen_ai.tool.definitions", json.dumps(tools)) + span.set_attribute("gen_ai.tool.definitions", json.dumps([ + { + "type": t["type"], + "name": t["function"]["name"], + "description": t["function"]["description"], + "parameters": t["function"]["parameters"], + } + for t in tools + ])) resp = client.chat.completions.create( model=request_model, messages=[{"role": "user", "content": "What's the weather in Seattle?"}], diff --git a/tests/python/haystack/test_prototype.py b/tests/python/haystack/test_prototype.py index c9d5811a..d90e2bef 100644 --- a/tests/python/haystack/test_prototype.py +++ b/tests/python/haystack/test_prototype.py @@ -119,6 +119,7 @@ def get_weather(location: str) -> str: }, ) tool_definition = { + "type": "function", "name": weather_tool.name, "description": weather_tool.description, "parameters": weather_tool.parameters, diff --git a/tests/python/instructor/test_prototype.py b/tests/python/instructor/test_prototype.py index dcf2c3f0..9a5fe069 100644 --- a/tests/python/instructor/test_prototype.py +++ b/tests/python/instructor/test_prototype.py @@ -101,7 +101,15 @@ class WeatherRequest(BaseModel): span.set_attribute("gen_ai.operation.name", "chat") span.set_attribute("gen_ai.provider.name", "openai") span.set_attribute("gen_ai.request.model", request_model) - span.set_attribute("gen_ai.tool.definitions", json.dumps(tools)) + span.set_attribute("gen_ai.tool.definitions", json.dumps([ + { + "type": t["type"], + "name": t["function"]["name"], + "description": t["function"]["description"], + "parameters": t["function"]["parameters"], + } + for t in tools + ])) resp, completion = client.chat.completions.create_with_completion( model=request_model, messages=[{"role": "user", "content": "What's the weather in Seattle?"}], diff --git a/tests/python/langchain/test_prototype.py b/tests/python/langchain/test_prototype.py index 4b0a31cb..123e1d41 100644 --- a/tests/python/langchain/test_prototype.py +++ b/tests/python/langchain/test_prototype.py @@ -120,9 +120,10 @@ def get_weather(location: str) -> str: return "Sunny, 72°F" tool_definition = { + "type": "function", "name": get_weather.name, "description": get_weather.description, - "args_schema": get_weather.args_schema.model_json_schema(), + "parameters": get_weather.args_schema.model_json_schema(), } with _prototype_tracer.start_as_current_span("chat gpt-4o-mini") as span: diff --git a/tests/python/litellm/test_prototype.py b/tests/python/litellm/test_prototype.py index 5a02684c..8de9ca6c 100644 --- a/tests/python/litellm/test_prototype.py +++ b/tests/python/litellm/test_prototype.py @@ -171,7 +171,12 @@ def run_chat_tool_call(): span.set_attribute("gen_ai.operation.name", "chat") span.set_attribute("gen_ai.provider.name", "openai") span.set_attribute("gen_ai.request.model", request_model) - span.set_attribute("gen_ai.tool.definitions", json.dumps([request_tool])) + span.set_attribute("gen_ai.tool.definitions", json.dumps([{ + "type": request_tool["type"], + "name": request_tool["function"]["name"], + "description": request_tool["function"]["description"], + "parameters": request_tool["function"]["parameters"], + }])) span.set_attribute( "gen_ai.input.messages", json.dumps([ diff --git a/tests/python/llamaindex/test_prototype.py b/tests/python/llamaindex/test_prototype.py index 47e5d665..28c697de 100644 --- a/tests/python/llamaindex/test_prototype.py +++ b/tests/python/llamaindex/test_prototype.py @@ -107,9 +107,10 @@ def get_weather(location: str) -> str: weather_tool = FunctionTool.from_defaults(fn=get_weather) tool_definition = { + "type": "function", "name": weather_tool.metadata.name, "description": weather_tool.metadata.description, - "fn_schema": weather_tool.metadata.fn_schema.model_json_schema(), + "parameters": weather_tool.metadata.fn_schema.model_json_schema(), } with _prototype_tracer.start_as_current_span("chat gpt-4o-mini") as span: span.set_attribute("gen_ai.operation.name", "chat") diff --git a/tests/python/mistralai/test_prototype.py b/tests/python/mistralai/test_prototype.py index 0c5ceb97..af73c662 100644 --- a/tests/python/mistralai/test_prototype.py +++ b/tests/python/mistralai/test_prototype.py @@ -98,7 +98,15 @@ def run_chat_tool_call(client): span.set_attribute("gen_ai.operation.name", "chat") span.set_attribute("gen_ai.provider.name", "mistral") span.set_attribute("gen_ai.request.model", request_model) - span.set_attribute("gen_ai.tool.definitions", json.dumps(tools)) + span.set_attribute("gen_ai.tool.definitions", json.dumps([ + { + "type": t["type"], + "name": t["function"]["name"], + "description": t["function"]["description"], + "parameters": t["function"]["parameters"], + } + for t in tools + ])) resp = client.chat.complete( model=request_model, messages=[{"role": "user", "content": "What's the weather in Seattle?"}], diff --git a/tests/python/openai-agents/test_prototype.py b/tests/python/openai-agents/test_prototype.py index 4d2abb0e..3dee469b 100644 --- a/tests/python/openai-agents/test_prototype.py +++ b/tests/python/openai-agents/test_prototype.py @@ -48,16 +48,12 @@ def get_weather(location: str) -> str: span.set_attribute("gen_ai.provider.name", "openai") span.set_attribute("gen_ai.request.model", request_model) span.set_attribute("gen_ai.response.model", request_model) - # OpenAI Agents SDK converts FunctionTools to OpenAI function-calling - # format before sending to the API, so we mirror that shape here. span.set_attribute("gen_ai.tool.definitions", json.dumps([ { "type": "function", - "function": { - "name": t.name, - "description": t.description, - "parameters": t.params_json_schema, - }, + "name": t.name, + "description": t.description, + "parameters": t.params_json_schema, } for t in tools if isinstance(t, FunctionTool) diff --git a/tests/python/openai-assistants/common.py b/tests/python/openai-assistants/common.py index 2daf3fda..9e359192 100644 --- a/tests/python/openai-assistants/common.py +++ b/tests/python/openai-assistants/common.py @@ -78,7 +78,15 @@ def run_invoke_agent(client): span.set_attribute("gen_ai.agent.id", assistant.id) span.set_attribute("gen_ai.agent.name", assistant.name or "") span.set_attribute("gen_ai.request.model", "gpt-4o-mini") - span.set_attribute("gen_ai.tool.definitions", json.dumps(tool_defs)) + span.set_attribute("gen_ai.tool.definitions", json.dumps([ + { + "type": t["type"], + "name": t["function"]["name"], + "description": t["function"]["description"], + "parameters": t["function"]["parameters"], + } + for t in tool_defs + ])) span.set_attribute("server.address", _SERVER_ADDRESS) span.set_attribute("server.port", _SERVER_PORT) try: diff --git a/tests/python/openai/test_prototype.py b/tests/python/openai/test_prototype.py index e7be94b5..7bc5bea3 100644 --- a/tests/python/openai/test_prototype.py +++ b/tests/python/openai/test_prototype.py @@ -147,7 +147,15 @@ def run_chat_tool_call_prototype(client): span.set_attribute("gen_ai.operation.name", "chat") span.set_attribute("gen_ai.provider.name", "openai") span.set_attribute("gen_ai.request.model", request_model) - span.set_attribute("gen_ai.tool.definitions", json.dumps(tools)) + span.set_attribute("gen_ai.tool.definitions", json.dumps([ + { + "type": t["type"], + "name": t["function"]["name"], + "description": t["function"]["description"], + "parameters": t["function"]["parameters"], + } + for t in tools + ])) if endpoint.hostname: span.set_attribute("server.address", endpoint.hostname) if endpoint.port is not None: diff --git a/tests/python/pydantic-ai/test_prototype.py b/tests/python/pydantic-ai/test_prototype.py index 63602f69..8db43a34 100644 --- a/tests/python/pydantic-ai/test_prototype.py +++ b/tests/python/pydantic-ai/test_prototype.py @@ -155,16 +155,12 @@ def get_weather(location: str) -> str: {"role": "user", "parts": [{"type": "text", "content": prompt_text}]}, ]), ) - # Pydantic AI converts tools to OpenAI function-calling format before - # sending to the API, so we mirror that shape here. span.set_attribute("gen_ai.tool.definitions", json.dumps([ { "type": "function", - "function": { - "name": t.name, - "description": t.description, - "parameters": t.function_schema.json_schema, - }, + "name": t.name, + "description": t.description, + "parameters": t.function_schema.json_schema, } for t in tools ])) diff --git a/tests/python/vertexai/test_prototype.py b/tests/python/vertexai/test_prototype.py index 47cbd8e6..2248fe1b 100644 --- a/tests/python/vertexai/test_prototype.py +++ b/tests/python/vertexai/test_prototype.py @@ -136,17 +136,16 @@ def run_chat_tool_call(): span.set_attribute("gen_ai.provider.name", "vertex_ai") span.set_attribute("gen_ai.request.model", request_model) span.set_attribute("gen_ai.tool.definitions", json.dumps([{ - "function_declarations": [{ - "name": "get_weather", - "description": "Get the current weather", - "parameters": { - "type": "object", - "properties": { - "location": {"type": "string", "description": "City name"}, - }, - "required": ["location"], + "type": "function", + "name": "get_weather", + "description": "Get the current weather", + "parameters": { + "type": "object", + "properties": { + "location": {"type": "string", "description": "City name"}, }, - }] + "required": ["location"], + }, }])) with warnings.catch_warnings(): warnings.simplefilter("ignore", DeprecationWarning)