Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions dotnet/agent-framework-dotnet.slnx
Original file line number Diff line number Diff line change
Expand Up @@ -67,6 +67,7 @@
<Project Path="samples/02-agents/Agents/Agent_Step17_AdditionalAIContext/Agent_Step17_AdditionalAIContext.csproj" />
<Project Path="samples/02-agents/Agents/Agent_Step18_CompactionPipeline/Agent_Step18_CompactionPipeline.csproj" />
<Project Path="samples/02-agents/Agents/Agent_Step19_InFunctionLoopCheckpointing/Agent_Step19_InFunctionLoopCheckpointing.csproj" />
<Project Path="samples/02-agents/Agents/Agent_Step20_DynamicFunctionTools/Agent_Step20_DynamicFunctionTools.csproj" />
</Folder>
<Folder Name="/Samples/02-agents/DeclarativeAgents/">
<Project Path="samples/02-agents/DeclarativeAgents/ChatClient/DeclarativeChatClientAgents.csproj" />
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,20 @@
<Project Sdk="Microsoft.NET.Sdk">

<PropertyGroup>
<OutputType>Exe</OutputType>
<TargetFrameworks>net10.0</TargetFrameworks>

<Nullable>enable</Nullable>
<ImplicitUsings>enable</ImplicitUsings>
</PropertyGroup>

<ItemGroup>
<PackageReference Include="Azure.AI.OpenAI" />
<PackageReference Include="Azure.Identity" />
</ItemGroup>

<ItemGroup>
<ProjectReference Include="..\..\..\..\src\Microsoft.Agents.AI.OpenAI\Microsoft.Agents.AI.OpenAI.csproj" />
</ItemGroup>

</Project>
Original file line number Diff line number Diff line change
@@ -0,0 +1,281 @@
// Copyright (c) Microsoft. All rights reserved.

// This sample demonstrates how to dynamically expand the set of function tools available to an
// agent during a function-calling loop. The agent starts with a single "RequestTools" function.
// When the model calls RequestTools with a description of the capabilities needed, the function
// uses the ambient FunctionInvocationContext to add new tools to ChatOptions.Tools. The agent
// can then use the newly added tools in subsequent iterations of the same function-calling loop.

using System.ComponentModel;
using Azure.AI.OpenAI;
using Azure.Identity;
using Microsoft.Agents.AI;
using Microsoft.Extensions.AI;

var endpoint = Environment.GetEnvironmentVariable("AZURE_OPENAI_ENDPOINT") ?? throw new InvalidOperationException("AZURE_OPENAI_ENDPOINT is not set.");
var deploymentName = Environment.GetEnvironmentVariable("AZURE_OPENAI_DEPLOYMENT_NAME") ?? "gpt-5.4-mini";

// Pre-defined tool implementations that can be loaded on demand.
[Description("Get the current weather for a city.")]
static string GetWeather([Description("The city name.")] string city) =>
city.ToUpperInvariant() switch
{
"SEATTLE" => "Seattle: 55°F, cloudy with light rain.",
"NEW YORK" => "New York: 72°F, sunny and warm.",
"LONDON" => "London: 48°F, overcast with fog.",
_ => $"{city}: weather data not available, please provide one of the following city names: 'Seattle', 'New York', 'London'."
};

[Description("Get the current local time for a city.")]
static string GetTime([Description("The city name.")] string city) =>
city.ToUpperInvariant() switch
{
"SEATTLE" => "Seattle: 9:00 AM PST",
"NEW YORK" => "New York: 12:00 PM EST",
"LONDON" => "London: 5:00 PM GMT",
_ => $"{city}: time data not available, please provide one of the following city names: 'Seattle', 'New York', 'London'."
};

[Description("Convert a temperature from Fahrenheit to Celsius.")]
static string ConvertFahrenheitToCelsius([Description("The temperature in Fahrenheit.")] double fahrenheit) =>
$"{fahrenheit}°F = {(fahrenheit - 32) * 5 / 9:F1}°C";

// A registry of tool sets that can be loaded by description keyword.
Dictionary<string, List<AITool>> toolCatalog = new(StringComparer.OrdinalIgnoreCase)
{
["weather"] = [AIFunctionFactory.Create(GetWeather, name: "GetWeather")],
["time"] = [AIFunctionFactory.Create(GetTime, name: "GetTime")],
["temperature"] = [AIFunctionFactory.Create(ConvertFahrenheitToCelsius, name: "ConvertFahrenheitToCelsius")],
};

// The RequestTools function uses the ambient FunctionInvocationContext to add tools dynamically.
AIFunction requestToolsFunction = AIFunctionFactory.Create(
[Description("Request additional tools to be loaded based on a description of the functionality needed. " +
"Call this when you need capabilities that are not yet available in your current tool set.")] (
[Description("A description of the functionality required, e.g. 'weather', 'time', or 'temperature conversion'.")] string description
) =>
{
// Access the ambient FunctionInvocationContext provided by FunctionInvokingChatClient.
var context = FunctionInvokingChatClient.CurrentContext
?? throw new InvalidOperationException("No ambient FunctionInvocationContext available.");

var tools = context.Options?.Tools;
if (tools is null)
{
return "Unable to register new tools: ChatOptions.Tools is not available.";
}

// Find matching tool sets from the catalog.
List<string> addedToolNames = [];
foreach (var kvp in toolCatalog)
{
var keyword = kvp.Key;
var catalogTools = kvp.Value;
if (description.Contains(keyword, StringComparison.OrdinalIgnoreCase))
{
foreach (var tool in catalogTools)
{
// Avoid adding duplicates.
if (tool is AIFunction fn && !tools.Any(t => t is AIFunction existing && existing.Name == fn.Name))
{
tools.Add(tool);
addedToolNames.Add(fn.Name);
}
}
}
}

return addedToolNames.Count > 0
? "Successfully loaded tools"
: $"No tools matched the description '{description}'. Available categories: {string.Join(", ", toolCatalog.Keys)}.";
},
name: "RequestTools");

// Create the agent with only the RequestTools function initially.
// Insert chat client middleware that logs the tools available on each LLM call,
// making the dynamic expansion visible in the console output.
// WARNING: DefaultAzureCredential is convenient for development but requires careful consideration in production.
// In production, consider using a specific credential (e.g., ManagedIdentityCredential) to avoid
// latency issues, unintended credential probing, and potential security risks from fallback mechanisms.
AIAgent agent = new AzureOpenAIClient(
new Uri(endpoint),
new DefaultAzureCredential())
.GetChatClient(deploymentName)
.AsIChatClient()
.AsBuilder()
.Use(getResponseFunc: ToolLoggingMiddleware, getStreamingResponseFunc: ToolLoggingStreamingMiddleware)
.BuildAIAgent(
instructions: """
You are a helpful assistant. You start with limited tools.
When you need functionality that you don't currently have, call RequestTools with a description
of what you need. After new tools are loaded, use them to answer the user's question.
""",
tools: [requestToolsFunction]);

// Run a conversation that triggers dynamic tool expansion.
Console.WriteLine("=== Dynamic Function Tools Sample ===\n");

string[] prompts =
[
"What's the weather like in Seattle and London?",
"What time is it in New York?",
"Can you convert those temperatures to Celsius?"
];

// --- Non-Streaming Mode ---
Console.ForegroundColor = ConsoleColor.Yellow;
Console.WriteLine("=== Non-Streaming Mode ===");
Console.ResetColor();
Console.WriteLine();

AgentSession session = await agent.CreateSessionAsync();

foreach (var prompt in prompts)
{
Console.ForegroundColor = ConsoleColor.Green;
Console.Write("[User] ");
Console.ResetColor();
Console.WriteLine(prompt);

var response = await agent.RunAsync(prompt, session);

// Print all message contents including tool calls, tool results, and text.
foreach (var message in response.Messages)
{
foreach (var content in message.Contents)
{
switch (content)
{
case FunctionCallContent functionCall:
Console.ForegroundColor = ConsoleColor.Yellow;
Console.WriteLine($" [Tool Call] {functionCall.Name}({string.Join(", ", functionCall.Arguments?.Select(a => $"{a.Key}: {a.Value}") ?? [])})");
Console.ResetColor();
break;

case FunctionResultContent functionResult:
Console.ForegroundColor = ConsoleColor.DarkYellow;
Console.WriteLine($" [Tool Result] {functionResult.CallId} => {functionResult.Result}");
Console.ResetColor();
break;

case TextContent textContent when !string.IsNullOrWhiteSpace(textContent.Text):
Console.ForegroundColor = ConsoleColor.Cyan;
Console.Write("[Agent] ");
Console.ResetColor();
Console.WriteLine(textContent.Text);
break;
}
}
}

Console.WriteLine();
}

// --- Streaming Mode ---
Console.ForegroundColor = ConsoleColor.Yellow;
Console.WriteLine("=== Streaming Mode ===");
Console.ResetColor();
Console.WriteLine();

AgentSession streamingSession = await agent.CreateSessionAsync();

foreach (var prompt in prompts)
{
Console.ForegroundColor = ConsoleColor.Green;
Console.Write("[User] ");
Console.ResetColor();
Console.WriteLine(prompt);

bool inAgentText = false;

await foreach (var update in agent.RunStreamingAsync(prompt, streamingSession))
{
foreach (var content in update.Contents)
{
switch (content)
{
case FunctionCallContent functionCall:
if (inAgentText)
{
Console.WriteLine();
inAgentText = false;
}

Console.ForegroundColor = ConsoleColor.Yellow;
Console.WriteLine($" [Tool Call] {functionCall.Name}({string.Join(", ", functionCall.Arguments?.Select(a => $"{a.Key}: {a.Value}") ?? [])})");
Console.ResetColor();
break;

case FunctionResultContent functionResult:
Console.ForegroundColor = ConsoleColor.DarkYellow;
Console.WriteLine($" [Tool Result] {functionResult.CallId} => {functionResult.Result}");
Console.ResetColor();
break;

case TextContent textContent when !string.IsNullOrWhiteSpace(textContent.Text):
if (!inAgentText)
{
Console.ForegroundColor = ConsoleColor.Cyan;
Console.Write("[Agent] ");
Console.ResetColor();
inAgentText = true;
}

Console.Write(textContent.Text);
break;
}
}
}

if (inAgentText)
{
Console.WriteLine();
}

Console.WriteLine();
}

// Chat client middleware that logs the number and names of tools on each LLM request.
async Task<ChatResponse> ToolLoggingMiddleware(
IEnumerable<ChatMessage> messages,
ChatOptions? options,
IChatClient innerChatClient,
CancellationToken cancellationToken)
{
LogTools(options);

return await innerChatClient.GetResponseAsync(messages, options, cancellationToken);
}

// Streaming version of the tool logging middleware.
async IAsyncEnumerable<ChatResponseUpdate> ToolLoggingStreamingMiddleware(
IEnumerable<ChatMessage> messages,
ChatOptions? options,
IChatClient innerChatClient,
[System.Runtime.CompilerServices.EnumeratorCancellation] CancellationToken cancellationToken)
{
LogTools(options);

await foreach (var update in innerChatClient.GetStreamingResponseAsync(messages, options, cancellationToken))
{
yield return update;
}
}

// Shared helper to log the current tool set.
void LogTools(ChatOptions? options)
{
if (options?.Tools is { Count: > 0 } tools)
{
var toolNames = tools.OfType<AIFunction>().Select(t => t.Name);
Console.ForegroundColor = ConsoleColor.DarkGray;
Console.WriteLine($" [Middleware] LLM call with {tools.Count} tool(s): {string.Join(", ", toolNames)}");
Console.ResetColor();
}
else
{
Console.ForegroundColor = ConsoleColor.DarkGray;
Console.WriteLine(" [Middleware] LLM call with 0 tools");
Console.ResetColor();
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,38 @@
# Dynamic Function Tools

This sample demonstrates how to dynamically expand the set of function tools available to an agent during a function-calling loop.

## What it demonstrates

- The agent starts with only a single `RequestTools` function
- When the model needs capabilities it doesn't have, it calls `RequestTools` with a description of the functionality needed
- The `RequestTools` function uses the ambient `FunctionInvokingChatClient.CurrentContext` to access `ChatOptions.Tools` and add new tools at runtime
- The agent then uses the newly added tools in subsequent iterations of the same function-calling loop

## How it works

1. A tool catalog maps keywords (e.g. "weather", "time", "temperature") to pre-built `AIFunction` instances
2. The `RequestTools` function matches the description against catalog keywords and adds matching tools to `ChatOptions.Tools`
3. `FunctionInvokingChatClient` automatically picks up the new tools on the next iteration of its loop

## Prerequisites

- .NET 10 SDK or later
- Azure OpenAI service endpoint and deployment configured
- Azure CLI installed and authenticated (for Azure credential authentication)
- User has the `Cognitive Services OpenAI Contributor` role for the Azure OpenAI resource

## Running the sample

Set the required environment variables:

```powershell
$env:AZURE_OPENAI_ENDPOINT="https://your-resource.openai.azure.com/"
$env:AZURE_OPENAI_DEPLOYMENT_NAME="gpt-5.4-mini" # Optional, defaults to gpt-5.4-mini
```

Run the sample:

```powershell
dotnet run
```
Comment thread
westey-m marked this conversation as resolved.
1 change: 1 addition & 0 deletions dotnet/samples/02-agents/Agents/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -46,6 +46,7 @@ Before you begin, ensure you have the following prerequisites:
|[Providing additional AI Context to an agent using multiple AIContextProviders](./Agent_Step17_AdditionalAIContext/)|This sample demonstrates how to inject additional AI context into a ChatClientAgent using multiple custom AIContextProvider components that are attached to the agent.|
|[Using compaction pipeline with an agent](./Agent_Step18_CompactionPipeline/)|This sample demonstrates how to use a compaction pipeline to efficiently limit the size of the conversation history for an agent.|
|[In-function-loop checkpointing](./Agent_Step19_InFunctionLoopCheckpointing/)|This sample demonstrates how to persist chat history after each service call during a tool-calling loop, enabling crash recovery and mid-run observability.|
|[Dynamic function tools](./Agent_Step20_DynamicFunctionTools/)|This sample demonstrates how to dynamically expand the set of function tools available to an agent during a function-calling loop using the ambient FunctionInvocationContext.|
Comment thread
westey-m marked this conversation as resolved.

## Running the samples from the console

Expand Down
Loading