Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions dotnet/agent-framework-dotnet.slnx
Original file line number Diff line number Diff line change
Expand Up @@ -57,6 +57,7 @@
<Project Path="samples/02-agents/Agents/Agent_Step16_Declarative/Agent_Step16_Declarative.csproj" />
<Project Path="samples/02-agents/Agents/Agent_Step17_AdditionalAIContext/Agent_Step17_AdditionalAIContext.csproj" />
<Project Path="samples/02-agents/Agents/Agent_Step18_CompactionPipeline/Agent_Step18_CompactionPipeline.csproj" />
<Project Path="samples/02-agents/Agents/Agent_Step19_InFunctionLoopCheckpointing/Agent_Step19_InFunctionLoopCheckpointing.csproj" />
</Folder>
<Folder Name="/Samples/02-agents/DeclarativeAgents/">
<Project Path="samples/02-agents/DeclarativeAgents/ChatClient/DeclarativeChatClientAgents.csproj" />
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,20 @@
<Project Sdk="Microsoft.NET.Sdk">

<PropertyGroup>
<OutputType>Exe</OutputType>
<TargetFrameworks>net10.0</TargetFrameworks>

<Nullable>enable</Nullable>
<ImplicitUsings>enable</ImplicitUsings>
</PropertyGroup>

<ItemGroup>
<PackageReference Include="Azure.AI.OpenAI" />
<PackageReference Include="Azure.Identity" />
</ItemGroup>

<ItemGroup>
<ProjectReference Include="..\..\..\..\src\Microsoft.Agents.AI.OpenAI\Microsoft.Agents.AI.OpenAI.csproj" />
</ItemGroup>

</Project>
Original file line number Diff line number Diff line change
@@ -0,0 +1,226 @@
// Copyright (c) Microsoft. All rights reserved.

// This sample demonstrates how the ChatClientAgent persists chat history after each individual
// call to the AI service.
// When an agent uses tools, FunctionInvokingChatClient may loop multiple times
// (service call → tool execution → service call), and intermediate messages (tool calls and
// results) are persisted after each service call. This allows you to inspect or recover them
// even if the process is interrupted mid-loop, but may also result in chat history that is not
// yet finalized (e.g., tool calls without results) being persisted, which may be undesirable in some cases.
//
// To opt into end-of-run persistence instead (atomic run semantics), set
// PersistChatHistoryAtEndOfRun = true on ChatClientAgentOptions.
//
// The sample runs two multi-turn conversations: one using non-streaming (RunAsync) and one
// using streaming (RunStreamingAsync), to demonstrate correct behavior in both modes.

using System.ComponentModel;
using Azure.AI.OpenAI;
using Azure.Identity;
using Microsoft.Agents.AI;
using Microsoft.Extensions.AI;
using OpenAI.Responses;

var endpoint = Environment.GetEnvironmentVariable("AZURE_OPENAI_ENDPOINT") ?? throw new InvalidOperationException("AZURE_OPENAI_ENDPOINT is not set.");
var deploymentName = Environment.GetEnvironmentVariable("AZURE_OPENAI_DEPLOYMENT_NAME") ?? "gpt-4o-mini";
var store = Environment.GetEnvironmentVariable("AZURE_OPENAI_RESPONSES_STORE") ?? "false";

// WARNING: DefaultAzureCredential is convenient for development but requires careful consideration in production.
// In production, consider using a specific credential (e.g., ManagedIdentityCredential) to avoid
// latency issues, unintended credential probing, and potential security risks from fallback mechanisms.
AzureOpenAIClient openAIClient = new(new Uri(endpoint), new DefaultAzureCredential());

// Define multiple tools so the model makes several tool calls in a single run.
[Description("Get the current weather for a city.")]
static string GetWeather([Description("The city name.")] string city) =>
city.ToUpperInvariant() switch
{
"SEATTLE" => "Seattle: 55°F, cloudy with light rain.",
"NEW YORK" => "New York: 72°F, sunny and warm.",
"LONDON" => "London: 48°F, overcast with fog.",
"DUBLIN" => "Dublin: 43°F, overcast with fog.",
_ => $"{city}: weather data not available."
};

[Description("Get the current time in a city.")]
static string GetTime([Description("The city name.")] string city) =>
city.ToUpperInvariant() switch
{
"SEATTLE" => "Seattle: 9:00 AM PST",
"NEW YORK" => "New York: 12:00 PM EST",
"LONDON" => "London: 5:00 PM GMT",
"DUBLIN" => "Dublin: 5:00 PM GMT",
_ => $"{city}: time data not available."
};

// Create the agent — per-service-call persistence is the default behavior.
// The in-memory ChatHistoryProvider is used by default when the service does not require service stored chat
// history, so for those cases, we can inspect the chat history via session.TryGetInMemoryChatHistory().
IChatClient chatClient = string.Equals(store, "TRUE", StringComparison.OrdinalIgnoreCase) ?
openAIClient.GetResponsesClient().AsIChatClient(deploymentName) :
openAIClient.GetResponsesClient().AsIChatClientWithStoredOutputDisabled(deploymentName);
AIAgent agent = chatClient.AsAIAgent(
new ChatClientAgentOptions
{
Name = "WeatherAssistant",
ChatOptions = new()
{
Instructions = "You are a helpful assistant. When asked about multiple cities, call the appropriate tool for each city.",
Tools = [AIFunctionFactory.Create(GetWeather), AIFunctionFactory.Create(GetTime)]
},
});

await RunNonStreamingAsync();
await RunStreamingAsync();

async Task RunNonStreamingAsync()
{
int lastChatHistorySize = 0;
string lastConversationId = string.Empty;

Console.ForegroundColor = ConsoleColor.Yellow;
Console.WriteLine("\n=== Non-Streaming Mode ===");
Console.ResetColor();

AgentSession session = await agent.CreateSessionAsync();

// First turn — ask about multiple cities so the model calls tools.
const string Prompt = "What's the weather and time in Seattle, New York, and London?";
PrintUserMessage(Prompt);

var response = await agent.RunAsync(Prompt, session);
PrintAgentResponse(response.Text);
PrintChatHistory(session, "After run", ref lastChatHistorySize, ref lastConversationId);

// Second turn — follow-up to verify chat history is correct.
const string FollowUp1 = "And Dublin?";
PrintUserMessage(FollowUp1);

response = await agent.RunAsync(FollowUp1, session);
PrintAgentResponse(response.Text);
PrintChatHistory(session, "After second run", ref lastChatHistorySize, ref lastConversationId);

// Third turn — follow-up to verify chat history is correct.
const string FollowUp2 = "Which city is the warmest?";
PrintUserMessage(FollowUp2);

response = await agent.RunAsync(FollowUp2, session);
PrintAgentResponse(response.Text);
PrintChatHistory(session, "After third run", ref lastChatHistorySize, ref lastConversationId);
}

async Task RunStreamingAsync()
{
int lastChatHistorySize = 0;
string lastConversationId = string.Empty;

Console.ForegroundColor = ConsoleColor.Yellow;
Console.WriteLine("\n=== Streaming Mode ===");
Console.ResetColor();

AgentSession session = await agent.CreateSessionAsync();

// First turn — ask about multiple cities so the model calls tools.
const string Prompt = "What's the weather and time in Seattle, New York, and London?";
PrintUserMessage(Prompt);

Console.ForegroundColor = ConsoleColor.Cyan;
Console.Write("\n[Agent] ");
Console.ResetColor();

await foreach (var update in agent.RunStreamingAsync(Prompt, session))
{
Console.Write(update);

// During streaming we should be able to see updates to the chat history
// before the full run completes, as each service call is made and persisted.
PrintChatHistory(session, "During run", ref lastChatHistorySize, ref lastConversationId);
}

Console.WriteLine();
PrintChatHistory(session, "After run", ref lastChatHistorySize, ref lastConversationId);

// Second turn — follow-up to verify chat history is correct.
const string FollowUp1 = "And Dublin?";
PrintUserMessage(FollowUp1);

Console.ForegroundColor = ConsoleColor.Cyan;
Console.Write("\n[Agent] ");
Console.ResetColor();

await foreach (var update in agent.RunStreamingAsync(FollowUp1, session))
{
Console.Write(update);

// During streaming we should be able to see updates to the chat history
// before the full run completes, as each service call is made and persisted.
PrintChatHistory(session, "During second run", ref lastChatHistorySize, ref lastConversationId);
}

Console.WriteLine();
PrintChatHistory(session, "After second run", ref lastChatHistorySize, ref lastConversationId);

// Third turn — follow-up to verify chat history is correct.
const string FollowUp2 = "Which city is the warmest?";
PrintUserMessage(FollowUp2);

Console.ForegroundColor = ConsoleColor.Cyan;
Console.Write("\n[Agent] ");
Console.ResetColor();

await foreach (var update in agent.RunStreamingAsync(FollowUp2, session))
{
Console.Write(update);

// During streaming we should be able to see updates to the chat history
// before the full run completes, as each service call is made and persisted.
PrintChatHistory(session, "During third run", ref lastChatHistorySize, ref lastConversationId);
}

Console.WriteLine();
PrintChatHistory(session, "After third run", ref lastChatHistorySize, ref lastConversationId);
}

void PrintUserMessage(string message)
{
Console.ForegroundColor = ConsoleColor.Cyan;
Console.Write("\n[User] ");
Console.ResetColor();
Console.WriteLine(message);
}

void PrintAgentResponse(string? text)
{
Console.ForegroundColor = ConsoleColor.Cyan;
Console.Write("\n[Agent] ");
Console.ResetColor();
Console.WriteLine(text);
}

// Helper to print the current chat history from the session.
void PrintChatHistory(AgentSession session, string label, ref int lastChatHistorySize, ref string lastConversationId)
{
if (session.TryGetInMemoryChatHistory(out var history) && history.Count != lastChatHistorySize)
{
Console.ForegroundColor = ConsoleColor.DarkGray;
Console.WriteLine($"\n [{label} — Chat history: {history.Count} message(s)]");
foreach (var msg in history)
{
var preview = msg.Text?.Length > 80 ? msg.Text[..80] + "…" : msg.Text;
var contentTypes = string.Join(", ", msg.Contents.Select(c => c.GetType().Name));
Console.WriteLine($" {msg.Role,-12} | {(string.IsNullOrWhiteSpace(preview) ? $"[{contentTypes}]" : preview)}");
}

Console.ResetColor();

lastChatHistorySize = history.Count;
}

if (session is ChatClientAgentSession ccaSession && ccaSession.ConversationId is not null && ccaSession.ConversationId != lastConversationId)
{
Console.ForegroundColor = ConsoleColor.DarkGray;
Console.WriteLine($" [{label} — Conversation ID: {ccaSession.ConversationId}]");
Console.ResetColor();
lastConversationId = ccaSession.ConversationId;
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,63 @@
# In-Function-Loop Checkpointing

This sample demonstrates how `ChatClientAgent` persists chat history after each individual call to the AI service by default. This per-service-call persistence ensures intermediate progress is saved during the function invocation loop.

## What This Sample Shows

When an agent uses tools, the `FunctionInvokingChatClient` loops multiple times (service call → tool execution → service call → …). By default, chat history is persisted after each service call via the `ChatHistoryPersistingChatClient` decorator:

- A `ChatHistoryPersistingChatClient` decorator is automatically inserted into the chat client pipeline
- After each service call, the decorator notifies the `ChatHistoryProvider` (and any `AIContextProvider` instances) with the new messages
- Only **new** messages are sent to providers on each notification — messages that were already persisted in an earlier call within the same run are deduplicated automatically

To opt into end-of-run persistence instead (atomic run semantics), set `PersistChatHistoryAtEndOfRun = true` on `ChatClientAgentOptions`. In that mode, the decorator marks messages with metadata rather than persisting them immediately, and `ChatClientAgent` persists only the marked messages at the end of the run.

Per-service-call persistence is useful for:
- **Crash recovery** — if the process is interrupted mid-loop, the intermediate tool calls and results are already persisted
- **Observability** — you can inspect the chat history while the agent is still running (e.g., during streaming)
- **Long-running tool loops** — agents with many sequential tool calls benefit from incremental persistence

## How It Works

The sample asks the agent about the weather and time in three cities. The model calls the `GetWeather` and `GetTime` tools for each city, resulting in multiple service calls within a single `RunStreamingAsync` invocation. After the run completes, the sample prints the full chat history to show all the intermediate messages that were persisted along the way.

### Pipeline Architecture

```
ChatClientAgent
└─ FunctionInvokingChatClient (handles tool call loop)
└─ ChatHistoryPersistingChatClient (persists after each service call)
└─ Leaf IChatClient (Azure OpenAI)
```

## Prerequisites

- .NET 10 SDK or later
- Azure OpenAI service endpoint and model deployment
- Azure CLI installed and authenticated

**Note**: This sample uses `DefaultAzureCredential`. Sign in with `az login` before running. For production, prefer a specific credential such as `ManagedIdentityCredential`. For more information, see the [Azure CLI authentication documentation](https://learn.microsoft.com/cli/azure/authenticate-azure-cli-interactively).

## Environment Variables

```powershell
$env:AZURE_OPENAI_ENDPOINT="https://your-resource.openai.azure.com/" # Required
$env:AZURE_OPENAI_DEPLOYMENT_NAME="gpt-4o-mini" # Optional, defaults to gpt-4o-mini
```

## Running the Sample

```powershell
cd dotnet/samples/02-agents/Agents/Agent_Step19_InFunctionLoopCheckpointing
dotnet run
```

## Expected Behavior

The sample runs two conversation turns:

1. **First turn** — asks about weather and time in three cities. The model calls `GetWeather` and `GetTime` tools (potentially in parallel or sequentially), then provides a summary. The chat history dump after the run shows all the intermediate tool call and result messages.

2. **Second turn** — asks a follow-up question ("Which city is the warmest?") that uses the persisted conversation context. The chat history dump shows the full accumulated conversation.

The chat history printout uses `session.TryGetInMemoryChatHistory()` to inspect the in-memory storage.
1 change: 1 addition & 0 deletions dotnet/samples/02-agents/Agents/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -45,6 +45,7 @@ Before you begin, ensure you have the following prerequisites:
|[Declarative agent](./Agent_Step16_Declarative/)|This sample demonstrates how to declaratively define an agent.|
|[Providing additional AI Context to an agent using multiple AIContextProviders](./Agent_Step17_AdditionalAIContext/)|This sample demonstrates how to inject additional AI context into a ChatClientAgent using multiple custom AIContextProvider components that are attached to the agent.|
|[Using compaction pipeline with an agent](./Agent_Step18_CompactionPipeline/)|This sample demonstrates how to use a compaction pipeline to efficiently limit the size of the conversation history for an agent.|
|[In-function-loop checkpointing](./Agent_Step19_InFunctionLoopCheckpointing/)|This sample demonstrates how to persist chat history after each service call during a tool-calling loop, enabling crash recovery and mid-run observability.|

## Running the samples from the console

Expand Down
Loading
Loading