diff --git a/.github/skills/run-device-tests/SKILL.md b/.github/skills/run-device-tests/SKILL.md
index bd8d7b842ffa..5ad415535a11 100644
--- a/.github/skills/run-device-tests/SKILL.md
+++ b/.github/skills/run-device-tests/SKILL.md
@@ -54,6 +54,7 @@ These are automatically loaded by the Run-DeviceTests.ps1 script.
| Essentials | `src/Essentials/test/DeviceTests/Essentials.DeviceTests.csproj` |
| Graphics | `src/Graphics/tests/DeviceTests/Graphics.DeviceTests.csproj` |
| BlazorWebView | `src/BlazorWebView/tests/DeviceTests/MauiBlazorWebView.DeviceTests.csproj` |
+| AI | `src/AI/tests/Essentials.AI.DeviceTests/Essentials.AI.DeviceTests.csproj` |
## Scripts
diff --git a/.github/skills/run-device-tests/scripts/Run-DeviceTests.ps1 b/.github/skills/run-device-tests/scripts/Run-DeviceTests.ps1
index f94b2d5b044e..3f5620b79684 100644
--- a/.github/skills/run-device-tests/scripts/Run-DeviceTests.ps1
+++ b/.github/skills/run-device-tests/scripts/Run-DeviceTests.ps1
@@ -12,7 +12,7 @@
- Windows: android, windows
.PARAMETER Project
- The device test project to run. Valid values: Controls, Core, Essentials, Graphics, BlazorWebView
+ The device test project to run. Valid values: Controls, Core, Essentials, Graphics, BlazorWebView, AI
.PARAMETER Platform
Target platform. Valid values depend on OS:
@@ -65,7 +65,7 @@
[CmdletBinding()]
param(
[Parameter(Mandatory = $true, Position = 0)]
- [ValidateSet("Controls", "Core", "Essentials", "Graphics", "BlazorWebView")]
+ [ValidateSet("Controls", "Core", "Essentials", "Graphics", "BlazorWebView", "AI")]
[string]$Project,
[Parameter(Mandatory = $false)]
@@ -128,6 +128,7 @@ $ProjectPaths = @{
"Essentials" = "src/Essentials/test/DeviceTests/Essentials.DeviceTests.csproj"
"Graphics" = "src/Graphics/tests/DeviceTests/Graphics.DeviceTests.csproj"
"BlazorWebView" = "src/BlazorWebView/tests/DeviceTests/MauiBlazorWebView.DeviceTests.csproj"
+ "AI" = "src/AI/tests/Essentials.AI.DeviceTests/Essentials.AI.DeviceTests.csproj"
}
$AppNames = @{
@@ -136,6 +137,7 @@ $AppNames = @{
"Essentials" = "Microsoft.Maui.Essentials.DeviceTests"
"Graphics" = "Microsoft.Maui.Graphics.DeviceTests"
"BlazorWebView" = "Microsoft.Maui.MauiBlazorWebView.DeviceTests"
+ "AI" = "Microsoft.Maui.Essentials.AI.DeviceTests"
}
# Android package names (lowercase)
@@ -145,6 +147,7 @@ $AndroidPackageNames = @{
"Essentials" = "com.microsoft.maui.essentials.devicetests"
"Graphics" = "com.microsoft.maui.graphics.devicetests"
"BlazorWebView" = "com.microsoft.maui.mauiblazorwebview.devicetests"
+ "AI" = "com.microsoft.maui.ai.devicetests"
}
# Platform-specific configurations
@@ -239,6 +242,8 @@ try {
$projectPath = $ProjectPaths[$Project]
$appName = $AppNames[$Project]
+ # Derive artifact folder name from the project file name (e.g., "Essentials.AI.DeviceTests" from the .csproj)
+ $artifactName = [System.IO.Path]::GetFileNameWithoutExtension($projectPath)
Write-Host ""
Write-Host "Project: $Project" -ForegroundColor Yellow
@@ -316,11 +321,11 @@ try {
# Construct app path based on platform
switch ($Platform) {
"ios" {
- $appPath = "artifacts/bin/$Project.DeviceTests/$Configuration/$tfmFolder/$ridFolder/$appName.app"
+ $appPath = "artifacts/bin/$artifactName/$Configuration/$tfmFolder/$ridFolder/$appName.app"
}
"maccatalyst" {
# MacCatalyst apps may have different names - search for .app bundle
- $appSearchPath = "artifacts/bin/$Project.DeviceTests/$Configuration/$tfmFolder/$ridFolder"
+ $appSearchPath = "artifacts/bin/$artifactName/$Configuration/$tfmFolder/$ridFolder"
$appBundle = Get-ChildItem -Path $appSearchPath -Filter "*.app" -Directory -ErrorAction SilentlyContinue | Select-Object -First 1
if ($appBundle) {
$appPath = $appBundle.FullName
@@ -330,7 +335,7 @@ try {
}
"android" {
# Android APK path - look for signed APK
- $apkSearchPath = "artifacts/bin/$Project.DeviceTests/$Configuration/$tfmFolder"
+ $apkSearchPath = "artifacts/bin/$artifactName/$Configuration/$tfmFolder"
$apkFile = Get-ChildItem -Path $apkSearchPath -Filter "*-Signed.apk" -Recurse -ErrorAction SilentlyContinue | Select-Object -First 1
if ($apkFile) {
$appPath = $apkFile.FullName
@@ -345,14 +350,14 @@ try {
}
}
"windows" {
- $appPath = "artifacts/bin/$Project.DeviceTests/$Configuration/$tfmFolder/$ridFolder/$appName.exe"
+ $appPath = "artifacts/bin/$artifactName/$Configuration/$tfmFolder/$ridFolder/$appName.exe"
}
}
if (-not (Test-Path $appPath)) {
Write-Error "Built app not found at: $appPath"
Write-Info "Searching for app in artifacts..."
- Get-ChildItem -Path "artifacts/bin/$Project.DeviceTests" -Recurse -ErrorAction SilentlyContinue |
+ Get-ChildItem -Path "artifacts/bin/$artifactName" -Recurse -ErrorAction SilentlyContinue |
Where-Object { $_.Name -match "$appName" } |
ForEach-Object { Write-Host " Found: $($_.FullName)" }
exit 1
diff --git a/eng/Build.props b/eng/Build.props
index 691d93a5904d..26f2232be3dd 100644
--- a/eng/Build.props
+++ b/eng/Build.props
@@ -31,5 +31,6 @@
CodesignRequireProvisioningProfile=false
+
diff --git a/eng/Versions.props b/eng/Versions.props
index eb1d76746363..27b2954b2c4b 100644
--- a/eng/Versions.props
+++ b/eng/Versions.props
@@ -42,8 +42,8 @@
$(MicrosoftNETCoreAppRefPackageVersion)
$(MicrosoftNETCoreAppRefPackageVersion)
- 10.0.1
- 10.0.1
+ 10.3.0
+ 10.3.0
10.0.0
10.0.0
10.0.0
@@ -58,7 +58,10 @@
10.0.0
10.0.0
- 1.0.0-preview.251204.1
+ 1.0.0-rc2
+ 1.0.0-rc2
+ 1.0.0-rc2
+ 1.0.0-preview.260225.1
36.1.2
35.0.105
diff --git a/eng/cake/dotnet.cake b/eng/cake/dotnet.cake
index 918774f8fda3..09b7378ab229 100644
--- a/eng/cake/dotnet.cake
+++ b/eng/cake/dotnet.cake
@@ -270,6 +270,7 @@ Task("dotnet-test")
"**/Controls.BindingSourceGen.UnitTests.csproj",
"**/Core.UnitTests.csproj",
"**/Essentials.UnitTests.csproj",
+ "**/Essentials.AI.UnitTests.csproj",
"**/Resizetizer.UnitTests.csproj",
"**/Graphics.Tests.csproj",
"**/Compatibility.Core.UnitTests.csproj",
diff --git a/eng/helix.proj b/eng/helix.proj
index 5ba803fef9ba..565ce270a50c 100644
--- a/eng/helix.proj
+++ b/eng/helix.proj
@@ -35,6 +35,7 @@
+
diff --git a/eng/helix_xharness.proj b/eng/helix_xharness.proj
index c99d5a7526b5..ffba39148bb1 100644
--- a/eng/helix_xharness.proj
+++ b/eng/helix_xharness.proj
@@ -31,6 +31,11 @@
CollectionView;Shell;HybridWebView
+
+
+
+ AppleIntelligenceChatClient
+
@@ -119,6 +124,15 @@
Microsoft.Maui.MauiBlazorWebView.DeviceTests
src/BlazorWebView/tests/DeviceTests/MauiBlazorWebView.DeviceTests.csproj
+
+
+ Essentials.AI.DeviceTests
+ $(ScenariosDir)Essentials.AI.DeviceTests
+ Microsoft.Maui.Essentials.AI.DeviceTests
+ com.microsoft.maui.ai.devicetests
+ com.microsoft.maui.ai.devicetests
+ src/AI/tests/Essentials.AI.DeviceTests/Essentials.AI.DeviceTests.csproj
+
@@ -173,15 +187,33 @@
02:00:00
01:00:00
+
+ ios-simulator-64
+ 02:00:00
+ 01:00:00
+ xharness apple test --target "$target" --app "$app" --output-directory "$output_directory" --timeout "$timeout" --launch-timeout "$launch_timeout" --set-env="TestFilter=SkipCategories=$(AITestCategoriesToSkipOnCI)"
+
-
+
-
+
+ <_MAUIScenarioSearchMacCatalyst Include="@(_MAUIScenarioSearch)" />
+ <_MAUIScenarioSearchMacCatalyst Remove="EssentialsAI" />
+
+
+ maccatalyst
+ 02:00:00
+ 01:00:00
+ %(_MAUIScenarioSearchMacCatalyst.ScenarioDirectoryName)
+
+
+
+
maccatalyst
02:00:00
01:00:00
- %(_MAUIScenarioSearch.ScenarioDirectoryName)
+ xharness apple test --target "$target" --app "$app" --output-directory "$output_directory" --timeout "$timeout" --launch-timeout "$launch_timeout" --set-env="TestFilter=SkipCategories=$(AITestCategoriesToSkipOnCI)"
diff --git a/eng/pipelines/arcade/stage-device-tests.yml b/eng/pipelines/arcade/stage-device-tests.yml
index ac53318edca2..e4c84a19d9e0 100644
--- a/eng/pipelines/arcade/stage-device-tests.yml
+++ b/eng/pipelines/arcade/stage-device-tests.yml
@@ -79,6 +79,9 @@ parameters:
- name: MauiBlazorWebView.DeviceTests
path: src/BlazorWebView/tests/DeviceTests/MauiBlazorWebView.DeviceTests.csproj
packageId: Microsoft.Maui.MauiBlazorWebView.DeviceTests
+ - name: Essentials.AI.DeviceTests
+ path: src/AI/tests/Essentials.AI.DeviceTests/Essentials.AI.DeviceTests.csproj
+ packageId: com.microsoft.maui.ai.devicetests
stages:
- stage: devicetests_build
@@ -465,7 +468,7 @@ stages:
# Save unpackaged publish output before packaged builds overwrite artifacts/bin
- pwsh: |
- $artifactNames = @("Controls.DeviceTests", "Core.DeviceTests", "Graphics.DeviceTests", "Essentials.DeviceTests", "MauiBlazorWebView.DeviceTests")
+ $artifactNames = @("Controls.DeviceTests", "Core.DeviceTests", "Graphics.DeviceTests", "Essentials.DeviceTests", "MauiBlazorWebView.DeviceTests", "Essentials.AI.DeviceTests")
foreach ($name in $artifactNames) {
$publishDir = Get-ChildItem -Path "$(Build.SourcesDirectory)/artifacts/bin/$name" -Filter "publish" -Recurse -Directory | Select-Object -First 1
if ($publishDir) {
@@ -498,7 +501,8 @@ stages:
@{ Name = "Core.DeviceTests"; ProjectDir = "$(Build.SourcesDirectory)/src/Core/tests/DeviceTests"; ArtifactDir = "$(Build.SourcesDirectory)/artifacts/bin/Core.DeviceTests" },
@{ Name = "Graphics.DeviceTests"; ProjectDir = "$(Build.SourcesDirectory)/src/Graphics/tests/DeviceTests"; ArtifactDir = "$(Build.SourcesDirectory)/artifacts/bin/Graphics.DeviceTests" },
@{ Name = "Essentials.DeviceTests"; ProjectDir = "$(Build.SourcesDirectory)/src/Essentials/test/DeviceTests"; ArtifactDir = "$(Build.SourcesDirectory)/artifacts/bin/Essentials.DeviceTests" },
- @{ Name = "MauiBlazorWebView.DeviceTests"; ProjectDir = "$(Build.SourcesDirectory)/src/BlazorWebView/tests/DeviceTests"; ArtifactDir = "$(Build.SourcesDirectory)/artifacts/bin/MauiBlazorWebView.DeviceTests" }
+ @{ Name = "MauiBlazorWebView.DeviceTests"; ProjectDir = "$(Build.SourcesDirectory)/src/BlazorWebView/tests/DeviceTests"; ArtifactDir = "$(Build.SourcesDirectory)/artifacts/bin/MauiBlazorWebView.DeviceTests" },
+ @{ Name = "Essentials.AI.DeviceTests"; ProjectDir = "$(Build.SourcesDirectory)/src/AI/tests/Essentials.AI.DeviceTests"; ArtifactDir = "$(Build.SourcesDirectory)/artifacts/bin/Essentials.AI.DeviceTests" }
)
foreach ($project in $projects) {
diff --git a/eng/pipelines/device-tests.yml b/eng/pipelines/device-tests.yml
index af6f271db91a..fd954605eecc 100644
--- a/eng/pipelines/device-tests.yml
+++ b/eng/pipelines/device-tests.yml
@@ -216,3 +216,15 @@ stages:
ios: $(System.DefaultWorkingDirectory)/src/BlazorWebView/tests/DeviceTests/MauiBlazorWebView.DeviceTests.csproj
catalyst: $(System.DefaultWorkingDirectory)/src/BlazorWebView/tests/DeviceTests/MauiBlazorWebView.DeviceTests.csproj
windows: $(System.DefaultWorkingDirectory)/src/BlazorWebView/tests/DeviceTests/MauiBlazorWebView.DeviceTests.csproj
+ - name: essentialsai
+ desc: Essentials.AI
+ androidApiLevelsExclude: [ 25, 27 ]
+ androidApiLevelsCoreClrExclude: [ 27, 25, 23]
+ androidConfiguration: 'Release'
+ iOSConfiguration: 'Debug'
+ windowsConfiguration: 'Debug'
+ windowsPackageId: 'com.microsoft.maui.ai.devicetests'
+ android: $(System.DefaultWorkingDirectory)/src/AI/tests/Essentials.AI.DeviceTests/Essentials.AI.DeviceTests.csproj
+ ios: $(System.DefaultWorkingDirectory)/src/AI/tests/Essentials.AI.DeviceTests/Essentials.AI.DeviceTests.csproj
+ catalyst: $(System.DefaultWorkingDirectory)/src/AI/tests/Essentials.AI.DeviceTests/Essentials.AI.DeviceTests.csproj
+ windows: $(System.DefaultWorkingDirectory)/src/AI/tests/Essentials.AI.DeviceTests/Essentials.AI.DeviceTests.csproj
diff --git a/src/AI/samples/Essentials.AI.Sample/AI/1_TravelPlannerExecutor.cs b/src/AI/samples/Essentials.AI.Sample/AI/1_TravelPlannerExecutor.cs
index b8522927d006..8f20bca343c0 100644
--- a/src/AI/samples/Essentials.AI.Sample/AI/1_TravelPlannerExecutor.cs
+++ b/src/AI/samples/Essentials.AI.Sample/AI/1_TravelPlannerExecutor.cs
@@ -1,4 +1,3 @@
-using System.Text.Json;
using Microsoft.Agents.AI;
using Microsoft.Agents.AI.Workflows;
using Microsoft.Extensions.AI;
@@ -9,30 +8,17 @@ namespace Maui.Controls.Sample.AI;
///
/// Agent 1: Travel Planner - Parses natural language to extract intent.
/// No tools - just NLP to extract destinationName, dayCount, language.
-/// Extends ChatProtocolExecutor to support the chat protocol for workflow-as-agent.
///
-internal sealed class TravelPlannerExecutor(AIAgent agent, JsonSerializerOptions jsonOptions, ILogger logger)
- : ChatProtocolExecutor("TravelPlannerExecutor")
+internal sealed class TravelPlannerExecutor(AIAgent agent, ILogger logger)
+ : ChatProtocolExecutor("TravelPlannerExecutor", new ChatProtocolExecutorOptions { AutoSendTurnToken = false })
{
- public const string Instructions = """
- You are a simple text parser.
-
- Extract ONLY these 3 values from the user's request:
- 1. destinationName: The place/location name mentioned (extract it exactly as written)
- 2. dayCount: The number of days mentioned (default: 3 if not specified)
- 3. language: The language mentioned for the output (default: English if not specified)
-
- Rules:
- 1. ALWAYS extract the raw values.
- 2. NEVER make up values or interpret the user's intent.
-
- Examples:
- - "5-day trip to Maui in French" → destinationName: "Maui", dayCount: 5, language: "French"
- - "Visit the Great Wall" → destinationName: "Great Wall", dayCount: 3, language: "English"
- - "Itinerary for Tokyo" → destinationName: "Tokyo", dayCount: 3, language: "English"
- - "Give me a Maui itinerary" → destinationName: "Maui", dayCount: 3, language: "English"
- - "Plan a 7 day Japan trip in Spanish" → destinationName: "Japan", dayCount: 7, language: "Spanish"
- """;
+ ///
+ /// Declares TravelPlanResult as a sent message type so the edge router can map it to downstream executors.
+ /// Without this, ChatProtocolExecutor only declares List<ChatMessage> and TurnToken, causing
+ /// TravelPlanResult to be silently dropped with DroppedTypeMismatch.
+ ///
+ protected override ProtocolBuilder ConfigureProtocol(ProtocolBuilder protocolBuilder)
+ => base.ConfigureProtocol(protocolBuilder).SendsMessage();
protected override async ValueTask TakeTurnAsync(
List messages,
@@ -42,18 +28,13 @@ protected override async ValueTask TakeTurnAsync(
{
logger.LogDebug("[TravelPlannerExecutor] Starting - parsing user intent");
- await context.AddEventAsync(new ExecutorStatusEvent("Analyzing your request..."));
+ await context.AddEventAsync(new ExecutorStatusEvent("Analyzing your request..."), cancellationToken);
- var runOptions = new ChatClientAgentRunOptions(new ChatOptions
- {
- ResponseFormat = ChatResponseFormat.ForJsonSchema(jsonOptions)
- });
-
- var response = await agent.RunAsync(messages, options: runOptions, cancellationToken: cancellationToken);
+ var response = await agent.RunAsync(messages, cancellationToken: cancellationToken);
logger.LogTrace("[TravelPlannerExecutor] Raw response: {Response}", response.Text);
- var result = JsonSerializer.Deserialize(response.Text, jsonOptions)!;
+ var result = response.Result;
logger.LogDebug("[TravelPlannerExecutor] Completed - extracted: destination={Destination}, days={Days}, language={Language}",
result.DestinationName, result.DayCount, result.Language);
@@ -61,7 +42,7 @@ protected override async ValueTask TakeTurnAsync(
var summary = result.Language != "English"
? $"Planning {result.DayCount}-day trip to {result.DestinationName} in {result.Language}"
: $"Planning {result.DayCount}-day trip to {result.DestinationName}";
- await context.AddEventAsync(new ExecutorStatusEvent(summary));
+ await context.AddEventAsync(new ExecutorStatusEvent(summary), cancellationToken);
await context.SendMessageAsync(result, cancellationToken);
}
diff --git a/src/AI/samples/Essentials.AI.Sample/AI/2_ResearcherExecutor.cs b/src/AI/samples/Essentials.AI.Sample/AI/2_ResearcherExecutor.cs
index 409968a245d7..ddc9cad04547 100644
--- a/src/AI/samples/Essentials.AI.Sample/AI/2_ResearcherExecutor.cs
+++ b/src/AI/samples/Essentials.AI.Sample/AI/2_ResearcherExecutor.cs
@@ -1,40 +1,20 @@
-using System.ComponentModel;
-using System.Text.Json;
-using Maui.Controls.Sample.Models;
-using Maui.Controls.Sample.Services;
using Microsoft.Agents.AI;
using Microsoft.Agents.AI.Workflows;
-using Microsoft.Extensions.AI;
using Microsoft.Extensions.Logging;
namespace Maui.Controls.Sample.AI;
///
-/// Agent 2: Researcher - Uses RAG to find candidate destinations, then AI selects the best match.
-/// Uses semantic search (embeddings) to pre-filter destinations, then LLM picks the best one.
+/// Agent 2: Researcher - Uses TextSearchProvider (RAG) to automatically inject matching destinations
+/// into the AI context before each invocation, then the AI selects the best match.
+/// The TextSearchProvider is configured in with BeforeAIInvoke mode, so candidate destinations are
+/// automatically searched and injected.
///
-internal sealed class ResearcherExecutor(AIAgent agent, DataService dataService, JsonSerializerOptions jsonOptions, ILogger logger)
- : Executor("ResearcherExecutor")
+internal sealed partial class ResearcherExecutor(AIAgent agent, ILogger logger)
+ : Executor("ResearcherExecutor")
{
- ///
- /// Maximum number of RAG candidates to return from semantic search.
- ///
- private const int MaxRagCandidates = 5;
-
- public const string Instructions = """
- You are a travel researcher.
- Your job is to select the best matching destination from a list of candidates.
-
- Rules:
- 1. You will be given a list of candidate destinations that semantically match the user's request.
- 2. Select the ONE destination that best matches what the user asked for.
- 3. NEVER make up destinations - only choose from the provided candidates.
- 4. If none of the candidates match well, pick the closest one.
-
- Return the exact name of the best matching destination from the candidates.
- """;
-
- public override async ValueTask HandleAsync(
+ [MessageHandler]
+ private async ValueTask HandleAsync(
TravelPlanResult input,
IWorkflowContext context,
CancellationToken cancellationToken = default)
@@ -42,72 +22,34 @@ public override async ValueTask HandleAsync(
logger.LogDebug("[ResearcherExecutor] Starting - finding best matching destination for '{DestinationName}'", input.DestinationName);
logger.LogTrace("[ResearcherExecutor] Input: {@Input}", input);
- await context.AddEventAsync(new ExecutorStatusEvent("Searching destinations..."));
-
- // Step 1: Use RAG to find semantically similar destinations
- var candidates = await dataService.SearchLandmarksAsync(input.DestinationName, MaxRagCandidates);
-
- logger.LogDebug("[ResearcherExecutor] RAG returned {Count} candidates: {Names}",
- candidates.Count, string.Join(", ", candidates.Select(c => c.Name)));
-
- if (candidates.Count == 0)
- {
- logger.LogDebug("[ResearcherExecutor] No candidates found");
- await context.AddEventAsync(new ExecutorStatusEvent("No matching destinations found"));
- return new ResearchResult(null, input.DayCount, input.Language);
- }
+ await context.AddEventAsync(new ExecutorStatusEvent("Searching destinations..."), cancellationToken);
- // If only one candidate, use it directly without LLM call
- if (candidates.Count == 1)
- {
- var singleMatch = candidates[0];
- logger.LogDebug("[ResearcherExecutor] Single candidate found: {Name}", singleMatch.Name);
- await context.AddEventAsync(new ExecutorStatusEvent($"Found destination: {singleMatch.Name}"));
- return new ResearchResult(singleMatch, input.DayCount, input.Language);
- }
-
- await context.AddEventAsync(new ExecutorStatusEvent($"Evaluating {candidates.Count} candidates..."));
-
- // Step 2: Ask LLM to pick the best match from RAG candidates
- var candidateDescriptions = string.Join("\n", candidates.Select(c =>
- $"- {c.Name}: {c.ShortDescription}"));
-
- var prompt = $"""
- The user wants to visit: "{input.DestinationName}"
-
- Here are the available destinations that might match:
- {candidateDescriptions}
-
- Which destination best matches what the user is looking for?
- """;
+ // TextSearchProvider (configured via CreateAgent) automatically searches
+ // DataService.SearchLandmarksAsync and injects results as context before
+ // the AI call. We just need to ask the AI to pick the best match.
+ var prompt = input.DestinationName;
logger.LogTrace("[ResearcherExecutor] Prompt: {Prompt}", prompt);
- var runOptions = new ChatClientAgentRunOptions(new ChatOptions
- {
- ResponseFormat = ChatResponseFormat.ForJsonSchema(jsonOptions)
- });
-
- var response = await agent.RunAsync(prompt, options: runOptions, cancellationToken: cancellationToken);
+ var response = await agent.RunAsync(prompt, cancellationToken: cancellationToken);
logger.LogTrace("[ResearcherExecutor] Raw response: {Response}", response.Text);
- // Parse the AI's response to get the matched destination name
- var matchResult = JsonSerializer.Deserialize(response.Text, jsonOptions);
- var matchedName = matchResult?.MatchedDestinationName ?? input.DestinationName;
-
- logger.LogDebug("[ResearcherExecutor] AI selected '{MatchedName}' from candidates", matchedName);
+ // Parse the AI's response — both name and description come from RAG context
+ var matchResult = response.Result;
- // Find the landmark from candidates (prefer exact match from candidates)
- var landmark = candidates.FirstOrDefault(l => l.Name.Equals(matchedName, StringComparison.OrdinalIgnoreCase))
- ?? candidates[0]; // Fallback to top RAG result if LLM returned unexpected name
+ logger.LogDebug("[ResearcherExecutor] AI selected '{MatchedName}'", matchResult.MatchedDestinationName);
- var result = new ResearchResult(landmark, input.DayCount, input.Language);
+ var result = new ResearchResult(
+ matchResult.MatchedDestinationName,
+ matchResult.MatchedDestinationDescription,
+ input.DayCount,
+ input.Language);
- logger.LogDebug("[ResearcherExecutor] Completed - selected destination: {Name}", landmark.Name);
+ logger.LogDebug("[ResearcherExecutor] Completed - selected destination: {Name}", matchResult.MatchedDestinationName);
logger.LogTrace("[ResearcherExecutor] Output: {@Result}", result);
- await context.AddEventAsync(new ExecutorStatusEvent($"Found destination: {landmark.Name}"));
+ await context.AddEventAsync(new ExecutorStatusEvent($"Found destination: {matchResult.MatchedDestinationName}"), cancellationToken);
return result;
}
diff --git a/src/AI/samples/Essentials.AI.Sample/AI/3_ItineraryPlannerExecutor.cs b/src/AI/samples/Essentials.AI.Sample/AI/3_ItineraryPlannerExecutor.cs
index 52c60c46c301..c70a0e32c0b5 100644
--- a/src/AI/samples/Essentials.AI.Sample/AI/3_ItineraryPlannerExecutor.cs
+++ b/src/AI/samples/Essentials.AI.Sample/AI/3_ItineraryPlannerExecutor.cs
@@ -1,7 +1,4 @@
-using System.ComponentModel;
using System.Text;
-using System.Text.Json;
-using Maui.Controls.Sample.Models;
using Microsoft.Agents.AI;
using Microsoft.Agents.AI.Workflows;
using Microsoft.Extensions.AI;
@@ -11,71 +8,42 @@ namespace Maui.Controls.Sample.AI;
///
/// Agent 3: Itinerary Planner - Builds the travel itinerary with streaming output.
-/// Tools: findPointsOfInterest(destinationName, category, query)
+/// Tools are used to assist in generating the itinerary.
/// Uses RunStreamingAsync to emit partial JSON as it's generated.
///
-internal sealed class ItineraryPlannerExecutor(AIAgent agent, JsonSerializerOptions jsonOptions, ILogger logger)
- : Executor("ItineraryPlannerExecutor")
+internal sealed partial class ItineraryPlannerExecutor(AIAgent agent, ILogger logger)
+ : Executor("ItineraryPlannerExecutor")
{
- private IWorkflowContext? _context;
-
- public const string Instructions = $"""
- You create detailed travel itineraries.
-
- For each day include these places:
- 1. An activity or attraction
- 2. A hotel recommendation
- 3. A restaurant recommendation
-
- Rules:
- 1. ALWAYS use the `{FindPointsOfInterestToolName}` tool to discover real places near the destination.
- 2. NEVER make up places or use your own knowledge.
- 3. ONLY use places returned by the `{FindPointsOfInterestToolName}` tool.
- 4. PREFER the places returned by the `{FindPointsOfInterestToolName}` tool instead of the destination description.
-
- Give the itinerary a fun, creative title and engaging description.
-
- Include a rationale explaining why you chose these activities for the traveler.
- """;
-
- public const string FindPointsOfInterestToolName = "findPointsOfInterest";
-
- public override async ValueTask HandleAsync(
+ [MessageHandler]
+ private async ValueTask HandleAsync(
ResearchResult input,
IWorkflowContext context,
CancellationToken cancellationToken = default)
{
- _context = context;
-
- logger.LogDebug("[ItineraryPlannerExecutor] Starting - building {Days}-day itinerary for '{Landmark}'",
- input.DayCount, input.Landmark?.Name ?? "unknown");
+ logger.LogDebug("[ItineraryPlannerExecutor] Starting - building {Days}-day itinerary for '{Destination}'",
+ input.DayCount, input.DestinationName ?? "unknown");
logger.LogTrace("[ItineraryPlannerExecutor] Input: {@Input}", input);
- await context.AddEventAsync(new ExecutorStatusEvent("Building your itinerary..."));
+ await context.AddEventAsync(new ExecutorStatusEvent("Building your itinerary..."), cancellationToken);
- if (input.Landmark is null)
+ if (input.DestinationName is null)
{
- logger.LogDebug("[ItineraryPlannerExecutor] No landmark found - returning error");
- await context.AddEventAsync(new ExecutorStatusEvent("Error: No destination found"));
- return new ItineraryResult(JsonSerializer.Serialize(new { error = "Landmark not found" }), input.Language);
+ logger.LogDebug("[ItineraryPlannerExecutor] No destination found - returning error");
+ await context.AddEventAsync(new ExecutorStatusEvent("Error: No destination found"), cancellationToken);
+ return new ItineraryResult(System.Text.Json.JsonSerializer.Serialize(new { error = "Destination not found" }), input.Language);
}
var prompt = $"""
- Generate a {input.DayCount}-day itinerary to {input.Landmark.Name}.
- Destination description: {input.Landmark.Description}
+ Generate a {input.DayCount}-day itinerary to {input.DestinationName}.
+ Destination description: {input.DestinationDescription}
""";
logger.LogTrace("[ItineraryPlannerExecutor] Prompt: {Prompt}", prompt);
- var runOptions = new ChatClientAgentRunOptions(new ChatOptions
- {
- Tools = [AIFunctionFactory.Create(FindPointsOfInterestAsync, name: FindPointsOfInterestToolName)],
- ResponseFormat = ChatResponseFormat.ForJsonSchema(jsonOptions)
- });
-
// Use streaming to emit partial JSON as it's generated
+ // Tools and ResponseFormat are configured at agent level in ItineraryWorkflowExtensions
var fullResponse = new StringBuilder();
- await foreach (var update in agent.RunStreamingAsync(prompt, options: runOptions, cancellationToken: cancellationToken))
+ await foreach (var update in agent.RunStreamingAsync(prompt, cancellationToken: cancellationToken))
{
foreach (var content in update.Contents)
{
@@ -92,53 +60,8 @@ Generate a {input.DayCount}-day itinerary to {input.Landmark.Name}.
logger.LogTrace("[ItineraryPlannerExecutor] Raw response: {Response}", responseText);
logger.LogDebug("[ItineraryPlannerExecutor] Completed - itinerary generated, language: {Language}", input.Language);
- await context.AddEventAsync(new ExecutorStatusEvent($"Created {input.DayCount}-day itinerary for {input.Landmark.Name}"));
+ await context.AddEventAsync(new ExecutorStatusEvent($"Created {input.DayCount}-day itinerary for {input.DestinationName}"), cancellationToken);
return new ItineraryResult(responseText, input.Language);
}
-
- [Description("Finds points of interest (hotels, restaurants, activities) near a destination.")]
- private async Task FindPointsOfInterestAsync(
- [Description("The name of the destination to search near.")]
- string destinationName,
- [Description("The category of place to find (Hotel, Restaurant, Cafe, Museum, etc.).")]
- PointOfInterestCategory category,
- [Description("A natural language query to refine the search.")]
- string additionalSearchQuery)
- {
- if (_context is not null)
- {
- await _context.AddEventAsync(new ExecutorStatusEvent($"Finding {category}s near {destinationName}..."));
- }
-
- var suggestions = GetSuggestions(category);
- var result = $"""
- These {category} options are available near {destinationName}:
-
- - {string.Join(Environment.NewLine + "- ", suggestions)}
- """;
-
- logger.LogTrace("[ItineraryPlannerExecutor] findPointsOfInterest tool called - destination={Destination}, category={Category}, query={Query}, result={Result}",
- destinationName, category, additionalSearchQuery ?? "(none)", result);
-
- if (_context is not null)
- {
- await _context.AddEventAsync(new ExecutorStatusEvent($"Found {suggestions.Length} {category} options"));
- }
-
- return result;
- }
-
- private static string[] GetSuggestions(PointOfInterestCategory category) =>
- category switch
- {
- PointOfInterestCategory.Cafe => ["Cafe 1", "Cafe 2", "Cafe 3"],
- PointOfInterestCategory.Campground => ["Campground 1", "Campground 2", "Campground 3"],
- PointOfInterestCategory.Hotel => ["Hotel 1", "Hotel 2", "Hotel 3"],
- PointOfInterestCategory.Marina => ["Marina 1", "Marina 2", "Marina 3"],
- PointOfInterestCategory.Museum => ["Museum 1", "Museum 2", "Museum 3"],
- PointOfInterestCategory.NationalMonument => ["The National Rock 1", "The National Rock 2", "The National Rock 3"],
- PointOfInterestCategory.Restaurant => ["Restaurant 1", "Restaurant 2", "Restaurant 3"],
- _ => []
- };
}
diff --git a/src/AI/samples/Essentials.AI.Sample/AI/4_TranslatorExecutor.cs b/src/AI/samples/Essentials.AI.Sample/AI/4_TranslatorExecutor.cs
index 4e582c7b8a28..122494ece73e 100644
--- a/src/AI/samples/Essentials.AI.Sample/AI/4_TranslatorExecutor.cs
+++ b/src/AI/samples/Essentials.AI.Sample/AI/4_TranslatorExecutor.cs
@@ -1,6 +1,4 @@
using System.Text;
-using System.Text.Json;
-using Maui.Controls.Sample.Models;
using Microsoft.Agents.AI;
using Microsoft.Agents.AI.Workflows;
using Microsoft.Extensions.AI;
@@ -10,22 +8,14 @@ namespace Maui.Controls.Sample.AI;
///
/// Agent 4: Translator - Translates the itinerary to target language (conditional) with streaming.
-/// No tools - just translation. Uses RunStreamingAsync to emit partial translated JSON.
+/// No tools - just translation.
+/// Uses RunStreamingAsync to emit partial translated JSON.
///
-internal sealed class TranslatorExecutor(AIAgent agent, JsonSerializerOptions jsonOptions, ILogger logger)
- : Executor("TranslatorExecutor")
+internal sealed partial class TranslatorExecutor(AIAgent agent, ILogger logger)
+ : Executor("TranslatorExecutor")
{
- public const string Instructions = """
- You are a professional translator.
- Translate the provided JSON content to the target language.
-
- Rules:
- 1. ALWAYS preserve the JSON format exactly.
- 2. ONLY translate the text values within the JSON.
- 3. NEVER add explanations or commentary.
- """;
-
- public override async ValueTask HandleAsync(
+ [MessageHandler]
+ private async ValueTask HandleAsync(
ItineraryResult input,
IWorkflowContext context,
CancellationToken cancellationToken = default)
@@ -33,12 +23,7 @@ public override async ValueTask HandleAsync(
logger.LogDebug("[TranslatorExecutor] Starting - translating to '{Language}'", input.TargetLanguage);
logger.LogTrace("[TranslatorExecutor] Input JSON: {Json}", input.ItineraryJson);
- await context.AddEventAsync(new ExecutorStatusEvent($"Translating to {input.TargetLanguage}..."));
-
- var runOptions = new ChatClientAgentRunOptions(new ChatOptions
- {
- ResponseFormat = ChatResponseFormat.ForJsonSchema(jsonOptions)
- });
+ await context.AddEventAsync(new ExecutorStatusEvent($"Translating to {input.TargetLanguage}..."), cancellationToken);
var prompt = $"""
Translate to {input.TargetLanguage}:
@@ -49,8 +34,9 @@ public override async ValueTask HandleAsync(
logger.LogTrace("[TranslatorExecutor] Prompt: {Prompt}", prompt);
// Use streaming to emit partial JSON as it's generated
+ // ResponseFormat is set at agent creation time in ItineraryWorkflowExtensions
var fullResponse = new StringBuilder();
- await foreach (var update in agent.RunStreamingAsync(prompt, options: runOptions, cancellationToken: cancellationToken))
+ await foreach (var update in agent.RunStreamingAsync(prompt, cancellationToken: cancellationToken))
{
foreach (var content in update.Contents)
{
@@ -67,7 +53,7 @@ public override async ValueTask HandleAsync(
logger.LogTrace("[TranslatorExecutor] Raw response: {Response}", responseText);
logger.LogDebug("[TranslatorExecutor] Completed - translation to '{Language}' finished", input.TargetLanguage);
- await context.AddEventAsync(new ExecutorStatusEvent($"Translated to {input.TargetLanguage}"));
+ await context.AddEventAsync(new ExecutorStatusEvent($"Translated to {input.TargetLanguage}"), cancellationToken);
return new ItineraryResult(responseText, input.TargetLanguage);
}
diff --git a/src/AI/samples/Essentials.AI.Sample/AI/5_OutputExecutor.cs b/src/AI/samples/Essentials.AI.Sample/AI/5_OutputExecutor.cs
index 8b046e0f2a56..85610945ad9e 100644
--- a/src/AI/samples/Essentials.AI.Sample/AI/5_OutputExecutor.cs
+++ b/src/AI/samples/Essentials.AI.Sample/AI/5_OutputExecutor.cs
@@ -7,10 +7,11 @@ namespace Maui.Controls.Sample.AI;
/// Final executor that marks the workflow as complete.
/// The itinerary JSON has already been streamed by ItineraryPlannerExecutor or TranslatorExecutor.
///
-internal sealed class OutputExecutor(ILogger logger)
- : Executor("OutputExecutor")
+internal sealed partial class OutputExecutor(ILogger logger)
+ : Executor("OutputExecutor")
{
- public override async ValueTask HandleAsync(
+ [MessageHandler]
+ private async ValueTask HandleAsync(
ItineraryResult input,
IWorkflowContext context,
CancellationToken cancellationToken = default)
@@ -19,7 +20,7 @@ public override async ValueTask HandleAsync(
logger.LogTrace("[OutputExecutor] Final JSON: {Json}", input.ItineraryJson);
// Don't re-emit the JSON - it was already streamed by ItineraryPlannerExecutor or TranslatorExecutor
- await context.AddEventAsync(new ExecutorStatusEvent("Your itinerary is ready!"));
+ await context.AddEventAsync(new ExecutorStatusEvent("Your itinerary is ready!"), cancellationToken);
logger.LogDebug("[OutputExecutor] Completed - workflow finished");
}
diff --git a/src/AI/samples/Essentials.AI.Sample/AI/ItineraryWorkflowExtensions.cs b/src/AI/samples/Essentials.AI.Sample/AI/ItineraryWorkflowExtensions.cs
index a69c36f711b2..0efbe8a04ec2 100644
--- a/src/AI/samples/Essentials.AI.Sample/AI/ItineraryWorkflowExtensions.cs
+++ b/src/AI/samples/Essentials.AI.Sample/AI/ItineraryWorkflowExtensions.cs
@@ -1,9 +1,11 @@
using System.Text.Json;
using System.Text.Json.Serialization;
+using Maui.Controls.Sample.Models;
using Maui.Controls.Sample.Services;
using Microsoft.Agents.AI;
using Microsoft.Agents.AI.Hosting;
using Microsoft.Agents.AI.Workflows;
+using Microsoft.Extensions.AI;
using Microsoft.Extensions.Hosting;
using Microsoft.Extensions.Logging;
@@ -11,6 +13,8 @@ namespace Maui.Controls.Sample.AI;
///
/// Extension methods to register the 4-agent itinerary workflow.
+/// All agent configuration (instructions, tools, response formats, content providers) is
+/// defined here. Executors contain only execution logic (streaming, status events, prompt assembly).
///
public static class ItineraryWorkflowExtensions
{
@@ -30,29 +34,131 @@ public static class ItineraryWorkflowExtensions
///
public static IHostApplicationBuilder AddItineraryWorkflow(this IHostApplicationBuilder builder)
{
+ // Tool: findPointsOfInterest - used by Agent 3
+ var findPoiTool = AIFunctionFactory.Create(
+ ItineraryWorkflowTools.FindPointsOfInterestAsync,
+ name: ItineraryWorkflowTools.FindPointsOfInterestToolName);
+
// Agent 1: Travel Planner - parses natural language, extracts intent
builder.AddAIAgent(
name: "travel-planner-agent",
- instructions: TravelPlannerExecutor.Instructions,
+ instructions: """
+ You are a simple text parser.
+
+ Extract ONLY these 3 values from the user's request:
+ 1. destinationName: The place/location name mentioned (extract it exactly as written)
+ 2. dayCount: The number of days mentioned (default: 3 if not specified)
+ 3. language: The language mentioned for the output (default: English if not specified)
+
+ Rules:
+ 1. ALWAYS extract the raw values.
+ 2. NEVER make up values or interpret the user's intent.
+
+ Examples:
+ - "5-day trip to Maui in French" → destinationName: "Maui", dayCount: 5, language: "French"
+ - "Visit the Great Wall" → destinationName: "Great Wall", dayCount: 3, language: "English"
+ - "Itinerary for Tokyo" → destinationName: "Tokyo", dayCount: 3, language: "English"
+ - "Give me a Maui itinerary" → destinationName: "Maui", dayCount: 3, language: "English"
+ - "Plan a 7 day Japan trip in Spanish" → destinationName: "Japan", dayCount: 7, language: "Spanish"
+ """,
chatClientServiceKey: "local-model");
- // Agent 2: Researcher - finds best matching destination
- builder.AddAIAgent(
- name: "researcher-agent",
- instructions: ResearcherExecutor.Instructions,
- chatClientServiceKey: "local-model");
+ // Agent 2: Researcher - finds best matching destination using RAG via TextSearchProvider
+ builder.AddAIAgent("researcher-agent", (sp, name) =>
+ {
+ var chatClient = sp.GetRequiredKeyedService("local-model");
+ var dataService = sp.GetRequiredService();
+ var loggerFactory = sp.GetRequiredService();
- // Agent 3: Itinerary Planner - builds detailed itineraries
- builder.AddAIAgent(
- name: "itinerary-planner-agent",
- instructions: ItineraryPlannerExecutor.Instructions,
- chatClientServiceKey: "local-model");
+ var searchProvider = ItineraryWorkflowTools.CreateLandmarkSearchProvider(dataService, loggerFactory);
- // Agent 4: Translator - translates content
- builder.AddAIAgent(
- name: "translator-agent",
- instructions: TranslatorExecutor.Instructions,
- chatClientServiceKey: "cloud-model");
+ return chatClient.AsAIAgent(
+ new ChatClientAgentOptions
+ {
+ Name = name,
+ ChatOptions = new ChatOptions
+ {
+ Instructions = """
+ You are a travel researcher.
+ Your job is to select the best matching destination from the additional context provided.
+
+ Rules:
+ 1. You will be given additional context containing candidate destinations that match the user's request.
+ 2. Select the ONE destination that best matches what the user asked for.
+ 3. NEVER make up destinations - only choose from the provided candidates.
+ 4. If none of the candidates match well, pick the closest one.
+ 5. Include the destination's description from the context in your response.
+
+ Return the exact name of the best matching destination from the candidates.
+ """
+ },
+ AIContextProviders = [searchProvider],
+ },
+ loggerFactory);
+ });
+
+ // Agent 3: Itinerary Planner - builds detailed itineraries with tool calling
+ builder.AddAIAgent("itinerary-planner-agent", (sp, name) =>
+ {
+ var chatClient = sp.GetRequiredKeyedService("local-model");
+ var loggerFactory = sp.GetRequiredService();
+ return chatClient.AsAIAgent(
+ new ChatClientAgentOptions
+ {
+ Name = name,
+ ChatOptions = new ChatOptions
+ {
+ Instructions = $"""
+ You create detailed travel itineraries.
+
+ For each day include these places:
+ 1. An activity or attraction
+ 2. A hotel recommendation
+ 3. A restaurant recommendation
+
+ Rules:
+ 1. ALWAYS use the `{ItineraryWorkflowTools.FindPointsOfInterestToolName}` tool to discover real places near the destination.
+ 2. NEVER make up places or use your own knowledge.
+ 3. ONLY use places returned by the `{ItineraryWorkflowTools.FindPointsOfInterestToolName}` tool.
+ 4. PREFER the places returned by the `{ItineraryWorkflowTools.FindPointsOfInterestToolName}` tool instead of the destination description.
+
+ Give the itinerary a fun, creative title and engaging description.
+
+ Include a rationale explaining why you chose these activities for the traveler.
+ """,
+ ResponseFormat = ChatResponseFormat.ForJsonSchema(JsonOptions),
+ Tools = [findPoiTool],
+ },
+ },
+ loggerFactory,
+ services: sp);
+ });
+
+ // Agent 4: Translator - translates content with streaming
+ builder.AddAIAgent("translator-agent", (sp, name) =>
+ {
+ var chatClient = sp.GetRequiredKeyedService("cloud-model");
+ var loggerFactory = sp.GetRequiredService();
+ return chatClient.AsAIAgent(
+ new ChatClientAgentOptions
+ {
+ Name = name,
+ ChatOptions = new ChatOptions
+ {
+ Instructions = """
+ You are a professional translator.
+ Translate the provided JSON content to the target language.
+
+ Rules:
+ 1. ALWAYS preserve the JSON format exactly.
+ 2. ONLY translate the text values within the JSON.
+ 3. NEVER add explanations or commentary.
+ """,
+ ResponseFormat = ChatResponseFormat.ForJsonSchema(JsonOptions),
+ },
+ },
+ loggerFactory);
+ });
// Register the workflow
var workflow = builder.AddWorkflow("itinerary-workflow", (sp, key) =>
@@ -61,14 +167,13 @@ public static IHostApplicationBuilder AddItineraryWorkflow(this IHostApplication
var researcherAgent = sp.GetRequiredKeyedService("researcher-agent");
var itineraryPlannerAgent = sp.GetRequiredKeyedService("itinerary-planner-agent");
var translatorAgent = sp.GetRequiredKeyedService("translator-agent");
- var landmarkService = sp.GetRequiredService();
var logger = sp.GetRequiredService().CreateLogger("ItineraryWorkflow");
- // Create executors for each agent with logging
- var travelPlannerExecutor = new TravelPlannerExecutor(travelPlannerAgent, JsonOptions, logger);
- var researcherExecutor = new ResearcherExecutor(researcherAgent, landmarkService, JsonOptions, logger);
- var itineraryPlannerExecutor = new ItineraryPlannerExecutor(itineraryPlannerAgent, JsonOptions, logger);
- var translatorExecutor = new TranslatorExecutor(translatorAgent, JsonOptions, logger);
+ // Create executors — thin wrappers with just execution logic
+ var travelPlannerExecutor = new TravelPlannerExecutor(travelPlannerAgent, logger);
+ var researcherExecutor = new ResearcherExecutor(researcherAgent, logger);
+ var itineraryPlannerExecutor = new ItineraryPlannerExecutor(itineraryPlannerAgent, logger);
+ var translatorExecutor = new TranslatorExecutor(translatorAgent, logger);
var outputExecutor = new OutputExecutor(logger);
// Build the 4-agent workflow with conditional translation:
@@ -77,10 +182,9 @@ public static IHostApplicationBuilder AddItineraryWorkflow(this IHostApplication
.WithName(key)
.AddEdge(travelPlannerExecutor, researcherExecutor)
.AddEdge(researcherExecutor, itineraryPlannerExecutor)
- // English path: skip translation
- .AddEdge(itineraryPlannerExecutor, outputExecutor, condition: IsEnglish)
- // Non-English path: translate first
- .AddEdge(itineraryPlannerExecutor, translatorExecutor, condition: NeedsTranslation)
+ .AddSwitch(itineraryPlannerExecutor, switch_ => switch_
+ .AddCase(r => r is not null && !string.Equals(r.TargetLanguage, "English", StringComparison.OrdinalIgnoreCase), translatorExecutor)
+ .WithDefault(outputExecutor))
.AddEdge(translatorExecutor, outputExecutor)
.WithOutputFrom(outputExecutor)
.Build();
@@ -93,10 +197,4 @@ public static IHostApplicationBuilder AddItineraryWorkflow(this IHostApplication
return builder;
}
-
- private static bool IsEnglish(ItineraryResult? result) =>
- result is not null && string.Equals(result.TargetLanguage, "English", StringComparison.OrdinalIgnoreCase);
-
- private static bool NeedsTranslation(ItineraryResult? result) =>
- result is not null && !string.Equals(result.TargetLanguage, "English", StringComparison.OrdinalIgnoreCase);
}
diff --git a/src/AI/samples/Essentials.AI.Sample/AI/ItineraryWorkflowTools.cs b/src/AI/samples/Essentials.AI.Sample/AI/ItineraryWorkflowTools.cs
new file mode 100644
index 000000000000..c84c6b3e85e9
--- /dev/null
+++ b/src/AI/samples/Essentials.AI.Sample/AI/ItineraryWorkflowTools.cs
@@ -0,0 +1,84 @@
+using System.ComponentModel;
+using Maui.Controls.Sample.Models;
+using Maui.Controls.Sample.Services;
+using Microsoft.Agents.AI;
+using Microsoft.Extensions.AI;
+using Microsoft.Extensions.Logging;
+
+namespace Maui.Controls.Sample.AI;
+
+///
+/// Static tool functions and content providers used by the itinerary workflow.
+/// Defined here so they can be registered centrally in .
+///
+internal static class ItineraryWorkflowTools
+{
+ public const string FindPointsOfInterestToolName = "findPointsOfInterest";
+
+ ///
+ /// Creates a that performs RAG via .
+ /// The provider runs in BeforeAIInvoke mode, automatically searching for matching landmarks
+ /// and injecting them as context before each AI call.
+ ///
+ public static TextSearchProvider CreateLandmarkSearchProvider(DataService dataService, ILoggerFactory loggerFactory)
+ {
+ var ragLogger = loggerFactory.CreateLogger();
+
+ return new TextSearchProvider(
+ async (query, ct) =>
+ {
+ ragLogger.LogDebug("[RAG] Searching landmarks for query: '{Query}'", query);
+ var results = await dataService.SearchLandmarksAsync(query, maxResults: 5);
+ ragLogger.LogDebug("[RAG] Found {Count} landmarks: {Names}",
+ results.Count, string.Join(", ", results.Select(r => r.Name)));
+ return results.Select(r => new TextSearchProvider.TextSearchResult
+ {
+ Text = $"{r.Name}: {r.ShortDescription}",
+ SourceName = r.Name,
+ });
+ },
+ new TextSearchProviderOptions
+ {
+ SearchTime = TextSearchProviderOptions.TextSearchBehavior.BeforeAIInvoke,
+ },
+ loggerFactory);
+ }
+
+ [Description("Finds points of interest (hotels, restaurants, activities) near a destination.")]
+ public static Task FindPointsOfInterestAsync(
+ [Description("The name of the destination to search near.")]
+ string destinationName,
+ [Description("The category of place to find (Hotel, Restaurant, Cafe, Museum, etc.).")]
+ PointOfInterestCategory category,
+ [Description("A natural language query to refine the search.")]
+ string additionalSearchQuery,
+ IServiceProvider services)
+ {
+ var logger = services.GetService()?.CreateLogger("ItineraryWorkflowTools");
+
+ var suggestions = GetSuggestions(category);
+ var result = $"""
+ These {category} options are available near {destinationName}:
+
+ - {string.Join(Environment.NewLine + "- ", suggestions)}
+ """;
+
+ logger?.LogTrace("[ItineraryWorkflowTools] findPointsOfInterest - destination={Destination}, category={Category}, query={Query}, results={Count}",
+ destinationName, category, additionalSearchQuery ?? "(none)", suggestions.Length);
+
+ return Task.FromResult(result);
+ }
+
+ private static string[] GetSuggestions(PointOfInterestCategory category) =>
+ category switch
+ {
+ PointOfInterestCategory.Cafe => ["Cafe 1", "Cafe 2", "Cafe 3"],
+ PointOfInterestCategory.Campground => ["Campground 1", "Campground 2", "Campground 3"],
+ PointOfInterestCategory.Hotel => ["Hotel 1", "Hotel 2", "Hotel 3"],
+ PointOfInterestCategory.Marina => ["Marina 1", "Marina 2", "Marina 3"],
+ PointOfInterestCategory.Museum => ["Museum 1", "Museum 2", "Museum 3"],
+ PointOfInterestCategory.NationalMonument => ["The National Rock 1", "The National Rock 2", "The National Rock 3"],
+ PointOfInterestCategory.Restaurant => ["Restaurant 1", "Restaurant 2", "Restaurant 3"],
+ _ => []
+ };
+}
diff --git a/src/AI/samples/Essentials.AI.Sample/AI/NonFunctionInvokingChatClient.cs b/src/AI/samples/Essentials.AI.Sample/AI/NonFunctionInvokingChatClient.cs
deleted file mode 100644
index d2cd6b29d93e..000000000000
--- a/src/AI/samples/Essentials.AI.Sample/AI/NonFunctionInvokingChatClient.cs
+++ /dev/null
@@ -1,230 +0,0 @@
-using System.Runtime.CompilerServices;
-using System.Text.Json;
-using Microsoft.Extensions.AI;
-using Microsoft.Extensions.Logging;
-using Microsoft.Extensions.Logging.Abstractions;
-
-namespace Maui.Controls.Sample.AI;
-
-///
-/// A chat client wrapper that prevents Agent Framework from adding its own function invocation layer.
-///
-///
-///
-/// Some chat clients handle tool invocation internally - when tools are registered, the underlying
-/// service invokes them automatically and returns the results. However, Agent Framework's
-/// ChatClientAgent also tries to invoke tools when it sees
-/// in the response, causing double invocation.
-///
-///
-/// This wrapper solves the problem by:
-///
-/// - The inner handler converts and
-/// to internal marker types that doesn't recognize
-/// - We wrap the handler with a real , satisfying
-/// Agent Framework's GetService<FunctionInvokingChatClient>() check so it won't create another
-/// - The outer layer unwraps the marker types back to the original content types for the caller
-///
-///
-///
-/// When the employed enables , the contents of
-/// function calls and results are logged. These may contain sensitive application data.
-/// is disabled by default and should never be enabled in a production environment.
-///
-///
-/// Use this wrapper for any that handles its own tool invocation, such as
-/// on-device models (Apple Intelligence, etc.) or remote services that invoke tools server-side.
-///
-///
-public sealed partial class NonFunctionInvokingChatClient : DelegatingChatClient
-{
- private readonly ILogger _logger;
-
- ///
- /// Initializes a new instance of the class.
- ///
- /// The to wrap.
- /// Optional logger factory for logging function invocations.
- /// Optional service provider for dependency resolution.
- public NonFunctionInvokingChatClient(
- IChatClient innerClient,
- ILoggerFactory? loggerFactory = null,
- IServiceProvider? serviceProvider = null)
- : base(CreateInnerClient(innerClient, loggerFactory, serviceProvider))
- {
- _logger = (ILogger?)loggerFactory?.CreateLogger() ?? NullLogger.Instance;
- }
-
- private static FunctionInvokingChatClient CreateInnerClient(
- IChatClient innerClient,
- ILoggerFactory? loggerFactory,
- IServiceProvider? serviceProvider)
- {
- ArgumentNullException.ThrowIfNull(innerClient);
- var handler = new ToolCallPassThroughHandler(innerClient);
- return new FunctionInvokingChatClient(handler, loggerFactory, serviceProvider);
- }
-
- ///
- public override async Task GetResponseAsync(
- IEnumerable messages,
- ChatOptions? options = null,
- CancellationToken cancellationToken = default)
- {
- var response = await base.GetResponseAsync(messages, options, cancellationToken).ConfigureAwait(false);
- foreach (var message in response.Messages)
- {
- message.Contents.Unwrap(this);
- }
- return response;
- }
-
- ///
- public override async IAsyncEnumerable GetStreamingResponseAsync(
- IEnumerable messages,
- ChatOptions? options = null,
- [EnumeratorCancellation] CancellationToken cancellationToken = default)
- {
- await foreach (var update in base.GetStreamingResponseAsync(messages, options, cancellationToken).ConfigureAwait(false))
- {
- update.Contents.Unwrap(this);
- yield return update;
- }
- }
-
- internal void LogFunctionInvoking(string functionName, string callId, IDictionary? arguments)
- {
- if (_logger.IsEnabled(LogLevel.Trace) && arguments is not null)
- {
- var argsJson = JsonSerializer.Serialize(arguments, AIJsonUtilities.DefaultOptions);
- LogToolInvokedSensitive(functionName, callId, argsJson);
- }
- else if (_logger.IsEnabled(LogLevel.Debug))
- {
- LogToolInvoked(functionName, callId);
- }
- }
-
- internal void LogFunctionInvocationCompleted(string callId, object? result)
- {
- if (_logger.IsEnabled(LogLevel.Trace) && result is not null)
- {
- var resultJson = result is string s ? s : JsonSerializer.Serialize(result, AIJsonUtilities.DefaultOptions);
- LogToolInvocationCompletedSensitive(callId, resultJson);
- }
- else if (_logger.IsEnabled(LogLevel.Debug))
- {
- LogToolInvocationCompleted(callId);
- }
- }
-
- [LoggerMessage(LogLevel.Debug, "Received tool call: {ToolName} (ID: {ToolCallId})")]
- private partial void LogToolInvoked(string toolName, string toolCallId);
-
- [LoggerMessage(LogLevel.Trace, "Received tool call: {ToolName} (ID: {ToolCallId}) with arguments: {Arguments}")]
- private partial void LogToolInvokedSensitive(string toolName, string toolCallId, string arguments);
-
- [LoggerMessage(LogLevel.Debug, "Received tool result for call ID: {ToolCallId}")]
- private partial void LogToolInvocationCompleted(string toolCallId);
-
- [LoggerMessage(LogLevel.Trace, "Received tool result for call ID: {ToolCallId}: {Result}")]
- private partial void LogToolInvocationCompletedSensitive(string toolCallId, string result);
-
- ///
- /// Handler that wraps the inner client and converts tool call/result content to server-handled types.
- ///
- private sealed class ToolCallPassThroughHandler(IChatClient innerClient) : DelegatingChatClient(innerClient)
- {
- public override async Task GetResponseAsync(
- IEnumerable messages,
- ChatOptions? options = null,
- CancellationToken cancellationToken = default)
- {
- var response = await base.GetResponseAsync(messages, options, cancellationToken).ConfigureAwait(false);
- foreach (var message in response.Messages)
- {
- message.Contents.Wrap();
- }
- return response;
- }
-
- public override async IAsyncEnumerable GetStreamingResponseAsync(
- IEnumerable messages,
- ChatOptions? options = null,
- [EnumeratorCancellation] CancellationToken cancellationToken = default)
- {
- await foreach (var update in base.GetStreamingResponseAsync(messages, options, cancellationToken).ConfigureAwait(false))
- {
- update.Contents.Wrap();
- yield return update;
- }
- }
- }
-}
-
-file static class Extensions
-{
- ///
- /// Wraps any or in the contents list.
- ///
- /// The list of contents to wrap.
- public static void Wrap(this IList contents)
- {
- for (var i = 0; i < contents.Count; i++)
- {
- if (contents[i] is FunctionCallContent fcc)
- {
- // The inner client already handled this tool call - wrap it so FICC ignores it
- contents[i] = new ServerFunctionCallContent(fcc);
- }
- else if (contents[i] is FunctionResultContent frc)
- {
- // The inner client already produced this result - wrap it so FICC ignores it
- contents[i] = new ServerFunctionResultContent(frc);
- }
- }
- }
-
- ///
- /// Unwraps any or in the contents list
- /// and logs the function invocations.
- ///
- /// The list of contents to unwrap.
- /// The client to use for logging.
- public static void Unwrap(this IList contents, NonFunctionInvokingChatClient client)
- {
- for (var i = 0; i < contents.Count; i++)
- {
- if (contents[i] is ServerFunctionCallContent serverFcc)
- {
- var fcc = serverFcc.FunctionCallContent;
- client.LogFunctionInvoking(fcc.Name, fcc.CallId, fcc.Arguments);
- contents[i] = fcc;
- }
- else if (contents[i] is ServerFunctionResultContent serverFrc)
- {
- var frc = serverFrc.FunctionResultContent;
- client.LogFunctionInvocationCompleted(frc.CallId, frc.Result);
- contents[i] = frc;
- }
- }
- }
-
- ///
- /// Marker type for function calls that were already handled by the inner client.
- /// only looks for ,
- /// so this type passes through without triggering function invocation.
- ///
- private sealed class ServerFunctionCallContent(FunctionCallContent functionCallContent) : AIContent
- {
- public FunctionCallContent FunctionCallContent { get; } = functionCallContent;
- }
-
- ///
- /// Marker type for function results that were already produced by the inner client.
- ///
- private sealed class ServerFunctionResultContent(FunctionResultContent functionResultContent) : AIContent
- {
- public FunctionResultContent FunctionResultContent { get; } = functionResultContent;
- }
-}
diff --git a/src/AI/samples/Essentials.AI.Sample/AI/WorkflowModels.cs b/src/AI/samples/Essentials.AI.Sample/AI/WorkflowModels.cs
index 7f9d982ff403..dea298dd0ee6 100644
--- a/src/AI/samples/Essentials.AI.Sample/AI/WorkflowModels.cs
+++ b/src/AI/samples/Essentials.AI.Sample/AI/WorkflowModels.cs
@@ -1,5 +1,4 @@
using System.ComponentModel;
-using Maui.Controls.Sample.Models;
namespace Maui.Controls.Sample.AI;
@@ -19,18 +18,22 @@ public record TravelPlanResult(
string Language);
///
-/// Result from the Researcher Agent - the best matching destination name (for JSON schema).
+/// Result from the Researcher Agent - the best matching destination (for JSON schema).
///
internal record DestinationMatchResult(
[property: DisplayName("matchedDestinationName")]
[property: Description("The exact name of the best matching destination from the available list.")]
- string MatchedDestinationName);
+ string MatchedDestinationName,
+ [property: DisplayName("matchedDestinationDescription")]
+ [property: Description("A brief description of the matched destination, based on the information provided in the additional context.")]
+ string MatchedDestinationDescription);
///
-/// Result from the Researcher Agent - includes full landmark details.
+/// Result from the Researcher Agent - includes destination name and description from RAG context.
///
public record ResearchResult(
- Landmark? Landmark,
+ string? DestinationName,
+ string? DestinationDescription,
int DayCount,
string Language);
diff --git a/src/AI/samples/Essentials.AI.Sample/Essentials.AI.Sample.csproj b/src/AI/samples/Essentials.AI.Sample/Essentials.AI.Sample.csproj
index 44e76794dac9..dd675e590bbf 100644
--- a/src/AI/samples/Essentials.AI.Sample/Essentials.AI.Sample.csproj
+++ b/src/AI/samples/Essentials.AI.Sample/Essentials.AI.Sample.csproj
@@ -32,14 +32,16 @@
-
-
+
+
+
+
diff --git a/src/AI/samples/Essentials.AI.Sample/MauiProgram.cs b/src/AI/samples/Essentials.AI.Sample/MauiProgram.cs
index 2346fb53c0ab..2d95fd1146f5 100644
--- a/src/AI/samples/Essentials.AI.Sample/MauiProgram.cs
+++ b/src/AI/samples/Essentials.AI.Sample/MauiProgram.cs
@@ -52,6 +52,7 @@ public static MauiApp CreateMauiApp()
// Register ViewModels
builder.Services.AddTransient();
builder.Services.AddTransient();
+ builder.Services.AddSingleton();
// Register Services
builder.Services.AddSingleton();
@@ -59,6 +60,7 @@ public static MauiApp CreateMauiApp()
builder.Services.AddTransient();
builder.Services.AddTransient();
builder.Services.AddHttpClient();
+ builder.Services.AddSingleton();
// Configure Logging
builder.Services.AddLogging();
@@ -110,9 +112,6 @@ private static MauiAppBuilder AddAppleIntelligenceServices(this MauiAppBuilder b
return appleClient
.AsBuilder()
.UseLogging(loggerFactory)
- // This prevents double tool invocation when using Microsoft Agent Framework
- // TODO: workaround for https://github.com/dotnet/extensions/issues/7204
- .Use(cc => new NonFunctionInvokingChatClient(cc, loggerFactory, sp))
.Build();
});
@@ -183,7 +182,6 @@ private static MauiAppBuilder AddOpenAIServices(this MauiAppBuilder builder)
});
// Add chat client for local model with function calling
- // TODO: Replace with actual local model client when available
builder.Services.AddKeyedSingleton("local-model", (provider, _) =>
{
var lf = provider.GetRequiredService();
diff --git a/src/AI/samples/Essentials.AI.Sample/Models/Landmark.cs b/src/AI/samples/Essentials.AI.Sample/Models/Landmark.cs
index 94b5457614c1..1edf16df6fa6 100644
--- a/src/AI/samples/Essentials.AI.Sample/Models/Landmark.cs
+++ b/src/AI/samples/Essentials.AI.Sample/Models/Landmark.cs
@@ -33,8 +33,9 @@ public record Landmark
public Location Location => new(Latitude, Longitude);
///
- /// Embedding vector generated from Name and ShortDescription for RAG search.
+ /// Embedding vectors generated from the name, short description, and individual
+ /// sentences of the full description for multi-granularity semantic search.
///
[JsonIgnore]
- public Embedding? Embedding { get; set; }
+ public IReadOnlyList>? Embeddings { get; set; }
}
diff --git a/src/AI/samples/Essentials.AI.Sample/Models/PointOfInterest.cs b/src/AI/samples/Essentials.AI.Sample/Models/PointOfInterest.cs
index 1f17752b7425..7d95e45d47d9 100644
--- a/src/AI/samples/Essentials.AI.Sample/Models/PointOfInterest.cs
+++ b/src/AI/samples/Essentials.AI.Sample/Models/PointOfInterest.cs
@@ -11,8 +11,11 @@ public class PointOfInterest
public string Description { get; set; } = string.Empty;
+ ///
+ /// Embedding vectors generated from the name and description for semantic search.
+ ///
[JsonIgnore]
- public Embedding? Embedding { get; set; }
+ public IReadOnlyList>? Embeddings { get; set; }
}
public enum PointOfInterestCategory
diff --git a/src/AI/samples/Essentials.AI.Sample/Pages/LandmarksPage.xaml b/src/AI/samples/Essentials.AI.Sample/Pages/LandmarksPage.xaml
index 8dabc74f7b16..d8e36611f2b9 100644
--- a/src/AI/samples/Essentials.AI.Sample/Pages/LandmarksPage.xaml
+++ b/src/AI/samples/Essentials.AI.Sample/Pages/LandmarksPage.xaml
@@ -69,6 +69,53 @@
FontSize="24"
Background="{AppThemeBinding Light=#80FFFFFF, Dark=#80000000}"
TextColor="{AppThemeBinding Light={StaticResource Gray900}, Dark=White}" />
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/src/AI/samples/Essentials.AI.Sample/Pages/LandmarksPage.xaml.cs b/src/AI/samples/Essentials.AI.Sample/Pages/LandmarksPage.xaml.cs
index 5cd318b64352..82fbf60c3a00 100644
--- a/src/AI/samples/Essentials.AI.Sample/Pages/LandmarksPage.xaml.cs
+++ b/src/AI/samples/Essentials.AI.Sample/Pages/LandmarksPage.xaml.cs
@@ -1,22 +1,53 @@
using Maui.Controls.Sample.Models;
using Maui.Controls.Sample.ViewModels;
+using Maui.Controls.Sample.Views;
namespace Maui.Controls.Sample.Pages;
public partial class LandmarksPage : ContentPage
{
private readonly LandmarksViewModel _viewModel;
+ private readonly ChatOverlayView _chatOverlay;
+ private readonly ChatViewModel _chatViewModel;
- public LandmarksPage(LandmarksViewModel viewModel)
+ public LandmarksPage(LandmarksViewModel viewModel, ChatViewModel chatViewModel)
{
InitializeComponent();
_viewModel = viewModel;
+ _chatViewModel = chatViewModel;
BindingContext = viewModel;
+ _chatOverlay = new ChatOverlayView();
+ _chatOverlay.Initialize(chatViewModel);
+
Loaded += async (_, _) => await viewModel.InitializeAsync();
}
+ protected override void OnAppearing()
+ {
+ base.OnAppearing();
+ _chatViewModel.ChatService.NavigateToTripRequested += OnNavigateToTrip;
+ }
+
+ protected override void OnDisappearing()
+ {
+ _chatViewModel.ChatService.NavigateToTripRequested -= OnNavigateToTrip;
+ base.OnDisappearing();
+ }
+
+ private async void OnNavigateToTrip(Landmark landmark)
+ {
+ // Close chat overlay first if open
+ await _chatOverlay.Hide();
+
+ var parameters = new Dictionary
+ {
+ { "Landmark", landmark }
+ };
+ await Shell.Current.GoToAsync(nameof(TripPlanningPage), parameters);
+ }
+
private async void OnLandmarkTapped(object? sender, Landmark landmark)
{
var parameters = new Dictionary
@@ -40,4 +71,21 @@ private async void OnLanguageButtonClicked(object? sender, EventArgs e)
_viewModel.SelectedLanguage = action;
}
}
+
+ private async void OnChatButtonClicked(object? sender, EventArgs e)
+ {
+ ChatFab.IsVisible = false;
+ var grid = (Grid)Content;
+ grid.Children.Add(_chatOverlay);
+ _chatOverlay.Closed += OnChatOverlayClosed;
+ await _chatOverlay.Show();
+ }
+
+ private void OnChatOverlayClosed(object? sender, EventArgs e)
+ {
+ _chatOverlay.Closed -= OnChatOverlayClosed;
+ var grid = (Grid)Content;
+ grid.Children.Remove(_chatOverlay);
+ ChatFab.IsVisible = true;
+ }
}
diff --git a/src/AI/samples/Essentials.AI.Sample/Pages/TripPlanningPage.xaml b/src/AI/samples/Essentials.AI.Sample/Pages/TripPlanningPage.xaml
index 56e4cc4b896a..1c0a5d4f8510 100644
--- a/src/AI/samples/Essentials.AI.Sample/Pages/TripPlanningPage.xaml
+++ b/src/AI/samples/Essentials.AI.Sample/Pages/TripPlanningPage.xaml
@@ -25,6 +25,24 @@
FontSize="24" Background="{AppThemeBinding Light=#80FFFFFF, Dark=#80000000}"
TextColor="{AppThemeBinding Light={StaticResource Gray900}, Dark=White}" />
+
+
+
diff --git a/src/AI/samples/Essentials.AI.Sample/Pages/TripPlanningPage.xaml.cs b/src/AI/samples/Essentials.AI.Sample/Pages/TripPlanningPage.xaml.cs
index 13108c05d78d..a64af342bf67 100644
--- a/src/AI/samples/Essentials.AI.Sample/Pages/TripPlanningPage.xaml.cs
+++ b/src/AI/samples/Essentials.AI.Sample/Pages/TripPlanningPage.xaml.cs
@@ -1,15 +1,21 @@
using Maui.Controls.Sample.ViewModels;
+using Maui.Controls.Sample.Views;
namespace Maui.Controls.Sample.Pages;
public partial class TripPlanningPage : ContentPage
{
- public TripPlanningPage(TripPlanningViewModel viewModel)
+ private readonly ChatOverlayView _chatOverlay;
+
+ public TripPlanningPage(TripPlanningViewModel viewModel, ChatViewModel chatViewModel)
{
InitializeComponent();
BindingContext = viewModel;
+ _chatOverlay = new ChatOverlayView();
+ _chatOverlay.Initialize(chatViewModel);
+
Loaded += async (_, _) => await viewModel.InitializeAsync();
NavigatingFrom += (_, _) => viewModel.Cancel();
@@ -19,4 +25,21 @@ private async void OnBackButtonClicked(object? sender, EventArgs e)
{
await Shell.Current.GoToAsync("..");
}
+
+ private async void OnChatButtonClicked(object? sender, EventArgs e)
+ {
+ ChatFab.IsVisible = false;
+ var grid = (Grid)Content;
+ grid.Children.Add(_chatOverlay);
+ _chatOverlay.Closed += OnChatOverlayClosed;
+ await _chatOverlay.Show();
+ }
+
+ private void OnChatOverlayClosed(object? sender, EventArgs e)
+ {
+ _chatOverlay.Closed -= OnChatOverlayClosed;
+ var grid = (Grid)Content;
+ grid.Children.Remove(_chatOverlay);
+ ChatFab.IsVisible = true;
+ }
}
diff --git a/src/AI/samples/Essentials.AI.Sample/Services/ChatService.cs b/src/AI/samples/Essentials.AI.Sample/Services/ChatService.cs
new file mode 100644
index 000000000000..22be98f02a9b
--- /dev/null
+++ b/src/AI/samples/Essentials.AI.Sample/Services/ChatService.cs
@@ -0,0 +1,229 @@
+using System.ComponentModel;
+using System.Text;
+using System.Text.Json;
+using Maui.Controls.Sample.Models;
+using Microsoft.Extensions.AI;
+
+namespace Maui.Controls.Sample.Services;
+
+public class ChatService
+{
+ static string SystemPrompt => $"""
+ You are a helpful travel assistant for the .NET MAUI Trip Planner app. You have access to 21 world landmarks across 7 continents and can help users:
+ - Search and discover destinations
+ - Find nearby hotels, restaurants, cafes, and museums
+ - Check weather forecasts
+ - Generate social media hashtags for trips
+ - Change the AI response language
+ - Start planning a trip by navigating to the trip planner
+
+ Today's date is {DateTime.Now:yyyy-MM-dd} ({DateTime.Now:dddd}).
+
+ When users ask about destinations, search first to provide relevant results. When they want to plan a trip, use plan_trip to navigate them to the planning page.
+ For weather requests with relative dates like "tomorrow" or "next week", calculate the actual date from today before calling get_weather.
+ Be concise and helpful. Use the tools available to provide accurate information.
+ """;
+
+ readonly IChatClient _toolClient;
+ readonly DataService _dataService;
+ readonly WeatherService _weatherService;
+ readonly TaggingService _taggingService;
+ readonly LanguagePreferenceService _languageService;
+ readonly IList _tools;
+
+ public event Action? NavigateToTripRequested;
+
+ public ChatService(
+ IChatClient chatClient,
+ DataService dataService,
+ WeatherService weatherService,
+ TaggingService taggingService,
+ LanguagePreferenceService languageService)
+ {
+ _dataService = dataService;
+ _weatherService = weatherService;
+ _taggingService = taggingService;
+ _languageService = languageService;
+
+ _tools =
+ [
+ AIFunctionFactory.Create(SearchLandmarksAsync),
+ AIFunctionFactory.Create(ListLandmarksByContinentAsync),
+ AIFunctionFactory.Create(GetLandmarkDetailsAsync),
+ AIFunctionFactory.Create(SearchPointsOfInterestAsync),
+ AIFunctionFactory.Create(GetWeatherAsync),
+ AIFunctionFactory.Create(GenerateTagsAsync),
+ AIFunctionFactory.Create(SetLanguage),
+ AIFunctionFactory.Create(PlanTripAsync),
+ ];
+
+ // Don't use FunctionInvokingChatClient here — Apple Intelligence handles
+ // tool calling natively at the Swift layer. The tools are passed via ChatOptions
+ // and invoked directly by FoundationModels through AIFunctionToolAdapter.
+ _toolClient = chatClient;
+ }
+
+ public IAsyncEnumerable GetStreamingResponseAsync(
+ IList messages, CancellationToken cancellationToken = default)
+ {
+ // Prepend system prompt without mutating the caller's list
+ IEnumerable effectiveMessages = (messages.Count == 0 || messages[0].Role != ChatRole.System)
+ ? messages.Prepend(new ChatMessage(ChatRole.System, SystemPrompt))
+ : messages;
+
+ var options = new ChatOptions { Tools = _tools };
+
+ return _toolClient.GetStreamingResponseAsync(effectiveMessages, options, cancellationToken);
+ }
+
+ [Description("Search for travel destinations by a natural language query. Uses semantic search to find the most relevant landmarks.")]
+ async Task SearchLandmarksAsync(
+ [Description("A natural language search query, e.g. 'beaches in Asia' or 'ancient ruins'")] string query,
+ [Description("Maximum number of results to return (1-10)")] int maxResults = 5)
+ {
+ var results = await _dataService.SearchLandmarksAsync(query, Math.Clamp(maxResults, 1, 10));
+
+ if (results.Count == 0)
+ return "No landmarks found matching your query.";
+
+ var sb = new StringBuilder();
+ foreach (var landmark in results)
+ {
+ sb.AppendLine($"• **{landmark.Name}** ({landmark.Continent})");
+ sb.AppendLine($" {landmark.ShortDescription}");
+ }
+ return sb.ToString();
+ }
+
+ [Description("List all available landmarks in a specific continent.")]
+ async Task ListLandmarksByContinentAsync(
+ [Description("The continent name, e.g. 'Europe', 'Asia', 'South America', 'Africa', 'North America', 'Australia/Oceania', 'Antarctica'")] string continent)
+ {
+ var groups = await _dataService.GetLandmarksByContinentAsync();
+
+ var match = groups.Keys.FirstOrDefault(k =>
+ k.Contains(continent, StringComparison.OrdinalIgnoreCase));
+
+ if (match is null || !groups.TryGetValue(match, out var landmarks))
+ return $"No landmarks found for continent '{continent}'. Available: {string.Join(", ", groups.Keys)}";
+
+ var sb = new StringBuilder();
+ sb.AppendLine($"Landmarks in {match}:");
+ foreach (var landmark in landmarks)
+ {
+ sb.AppendLine($"• **{landmark.Name}** — {landmark.ShortDescription}");
+ }
+ return sb.ToString();
+ }
+
+ [Description("Get detailed information about a specific landmark including its full description and location coordinates.")]
+ async Task GetLandmarkDetailsAsync(
+ [Description("The name of the landmark to look up")] string landmarkName)
+ {
+ var landmark = await FindLandmarkByNameAsync(landmarkName);
+
+ if (landmark is null)
+ return $"Landmark '{landmarkName}' not found. Try searching with search_landmarks first.";
+
+ return $"""
+ **{landmark.Name}**
+ Continent: {landmark.Continent}
+ Coordinates: {landmark.Latitude:F4}, {landmark.Longitude:F4}
+
+ {landmark.Description}
+ """;
+ }
+
+ [Description("Find points of interest near a destination. Categories: Hotel, Restaurant, Cafe, Museum, Campground, Marina, NationalMonument.")]
+ async Task SearchPointsOfInterestAsync(
+ [Description("The category of place to find")] PointOfInterestCategory category,
+ [Description("A natural language query to refine the search, e.g. 'family friendly' or 'luxury'")] string query)
+ {
+ var results = await _dataService.SearchPointsOfInterestAsync(category, query, 5);
+
+ if (results.Count == 0)
+ return $"No {category} found matching your query.";
+
+ var sb = new StringBuilder();
+ sb.AppendLine($"Found {results.Count} {category} options:");
+ foreach (var poi in results)
+ {
+ sb.AppendLine($"• **{poi.Name}** — {poi.Description}");
+ }
+ return sb.ToString();
+ }
+
+ [Description("Get the weather forecast for a landmark. Supports today and up to 7 days ahead.")]
+ async Task GetWeatherAsync(
+ [Description("The name of the landmark to check weather for")] string landmarkName,
+ [Description("The date to check weather for. Use today's date from the system prompt to calculate dates for 'tomorrow', 'next week', etc.")] DateTimeOffset date)
+ {
+ var landmark = await FindLandmarkByNameAsync(landmarkName);
+
+ if (landmark is null)
+ return $"Landmark '{landmarkName}' not found in the app database. Available landmarks can be found with search_landmarks or list_landmarks_by_continent.";
+
+ var dateOnly = DateOnly.FromDateTime(date.DateTime);
+ var today = DateOnly.FromDateTime(DateTime.Now);
+
+ if (dateOnly < today)
+ return $"Cannot get weather for past date {dateOnly:yyyy-MM-dd}. The forecast supports today ({today:yyyy-MM-dd}) through {today.AddDays(7):yyyy-MM-dd}.";
+
+ if (dateOnly > today.AddDays(7))
+ return $"Cannot get weather for {dateOnly:yyyy-MM-dd} — too far ahead. The forecast supports today ({today:yyyy-MM-dd}) through {today.AddDays(7):yyyy-MM-dd}.";
+
+ var weather = await _weatherService.GetWeatherForecastAsync(
+ landmark.Latitude, landmark.Longitude, dateOnly);
+
+ return $"Weather at {landmark.Name} on {dateOnly:yyyy-MM-dd} ({dateOnly:dddd}): {weather}";
+ }
+
+ [Description("Generate social media hashtags for a trip description or destination.")]
+ async Task GenerateTagsAsync(
+ [Description("The text to generate hashtags for, e.g. a trip description or destination name")] string text)
+ {
+ try
+ {
+ var tags = await _taggingService.GenerateTagsAsync(text);
+ return $"Suggested hashtags: {string.Join(" ", tags.Select(t => $"#{t}"))}";
+ }
+ catch
+ {
+ return "Unable to generate tags at this time.";
+ }
+ }
+
+ [Description("Change the language for AI-generated responses. Supported: English, French, Spanish, German, Chinese, Japanese, Korean, Arabic, Indonesian, Italian, Portuguese.")]
+ string SetLanguage(
+ [Description("The language name to switch to, e.g. 'Spanish', 'French', 'Japanese'")] string language)
+ {
+ var match = _languageService.SupportedLanguages.Keys.FirstOrDefault(k =>
+ k.Equals(language, StringComparison.OrdinalIgnoreCase));
+
+ if (match is null)
+ return $"Language '{language}' is not supported. Available: {string.Join(", ", _languageService.SupportedLanguages.Keys)}";
+
+ _languageService.SelectedLanguage = match;
+ return $"Language changed to {match}. AI-generated itineraries will now be in {match}.";
+ }
+
+ [Description("Navigate the user to the trip planning page to generate a detailed multi-day itinerary for a landmark. Use this when the user wants to plan or start a trip.")]
+ async Task PlanTripAsync(
+ [Description("The name of the landmark to plan a trip to")] string landmarkName)
+ {
+ var landmark = await FindLandmarkByNameAsync(landmarkName);
+
+ if (landmark is null)
+ return $"Landmark '{landmarkName}' not found. Try searching with search_landmarks first.";
+
+ MainThread.BeginInvokeOnMainThread(() => NavigateToTripRequested?.Invoke(landmark));
+ return $"Navigating to trip planner for {landmark.Name}! A multi-day itinerary will be generated for you.";
+ }
+
+ async Task FindLandmarkByNameAsync(string name)
+ {
+ var landmarks = await _dataService.GetLandmarksAsync();
+ return landmarks.FirstOrDefault(l =>
+ l.Name.Contains(name, StringComparison.OrdinalIgnoreCase));
+ }
+}
diff --git a/src/AI/samples/Essentials.AI.Sample/Services/DataService.cs b/src/AI/samples/Essentials.AI.Sample/Services/DataService.cs
index a6ee22b77b7b..c795b111fbbc 100644
--- a/src/AI/samples/Essentials.AI.Sample/Services/DataService.cs
+++ b/src/AI/samples/Essentials.AI.Sample/Services/DataService.cs
@@ -1,13 +1,14 @@
using System.Numerics.Tensors;
using System.Text.Json;
using System.Text.Json.Serialization;
+using System.Text.RegularExpressions;
using Maui.Controls.Sample.Models;
using Microsoft.Extensions.AI;
using Microsoft.Extensions.Logging;
namespace Maui.Controls.Sample.Services;
-public class DataService
+public partial class DataService
{
private static readonly JsonSerializerOptions _jsonSerializerOptions = new()
{
@@ -19,6 +20,7 @@ public class DataService
private readonly IEmbeddingGenerator> _generator;
private readonly ILogger _logger;
private readonly Task _initializationTask;
+ private Task? _embeddingsTask;
private List? _landmarks;
private List? _pointsOfInterest;
@@ -27,11 +29,18 @@ public class DataService
private Dictionary? _landmarksById;
private Landmark? _featuredLandmark;
+ ///
+ /// Raised on each embedding generated. Args: (current, total).
+ ///
+ public event Action? EmbeddingProgressChanged;
+
public DataService(IEmbeddingGenerator> generator, ILogger logger)
{
_generator = generator;
_logger = logger;
- _initializationTask = LoadLandmarksAsync();
+
+ _initializationTask = Task.Run(LoadLandmarksAsync);
+ _initializationTask.ContinueWith(_ => _embeddingsTask = Task.Run(GenerateEmbeddingsAsync));
}
///
@@ -62,13 +71,26 @@ public async Task>> GetLandmarksByCon
return _featuredLandmark;
}
+ ///
+ /// Waits for both data loading and embedding generation to complete.
+ /// Subscribe to before calling to receive progress updates.
+ ///
+ public async Task WaitUntilReadyAsync()
+ {
+ await _initializationTask;
+ if (_embeddingsTask is not null)
+ await _embeddingsTask;
+ }
+
public async Task> SearchLandmarksAsync(string query, int maxResults = 5)
{
await _initializationTask;
var candidates = _landmarks ?? [];
- return await SearchAsync(candidates, query, l => l.Embedding, maxResults);
+ return await SearchAsync(candidates, query,
+ l => l.Embeddings, maxResults,
+ l => $"{l.Name} {l.ShortDescription} {l.Description}");
}
public async Task> SearchPointsOfInterestAsync(PointOfInterestCategory category, string query, int maxResults = 3)
@@ -79,7 +101,9 @@ public async Task> SearchPointsOfInterestAsync(Po
? _pointsOfInterest ?? []
: _pointsOfInterest?.Where(p => p.Category == category).ToList() ?? [];
- return await SearchAsync(candidates, $"{category}: {query}", p => p.Embedding, maxResults);
+ return await SearchAsync(candidates, $"{category}: {query}",
+ p => p.Embeddings, maxResults,
+ p => $"{p.Name} {p.Description}");
}
private async Task LoadLandmarksAsync()
@@ -109,24 +133,36 @@ private async Task LoadLandmarksAsync()
_landmarksByContinent = new Dictionary>();
_landmarksById = new Dictionary();
}
-
- _ = GenerateEmbeddingsAsync();
}
private async Task GenerateEmbeddingsAsync()
{
try
{
+ var totalItems = (_landmarks?.Count ?? 0) + (_pointsOfInterest?.Count ?? 0);
+ var completed = 0;
+
foreach (var landmark in _landmarks!)
{
- var text = $"{landmark.Name}";
- landmark.Embedding = await _generator.GenerateAsync(text);
+ IEnumerable chunks = [
+ landmark.Name.ToLowerInvariant(),
+ $"{landmark.Name}. {landmark.ShortDescription}".ToLowerInvariant(),
+ .. SplitSentences(landmark.Description.ToLowerInvariant())];
+
+ landmark.Embeddings = await _generator.GenerateAsync(chunks);
+
+ EmbeddingProgressChanged?.Invoke(++completed, totalItems);
}
foreach (var poi in _pointsOfInterest!)
{
- var text = $"{poi.Name}. {poi.Description}";
- poi.Embedding = await _generator.GenerateAsync(text);
+ IEnumerable chunks = [
+ poi.Name.ToLowerInvariant(),
+ $"{poi.Name}. {poi.Description}".ToLowerInvariant()];
+
+ poi.Embeddings = await _generator.GenerateAsync(chunks);
+
+ EmbeddingProgressChanged?.Invoke(++completed, totalItems);
}
_logger.LogInformation("Successfully generated embeddings for {LandmarkCount} landmarks and {POICount} points of interest.", _landmarks?.Count ?? 0, _pointsOfInterest?.Count ?? 0);
@@ -137,11 +173,28 @@ private async Task GenerateEmbeddingsAsync()
}
}
+ [GeneratedRegex(@"(?<=[.!?])\s+", RegexOptions.Compiled)]
+ private static partial Regex SentenceBoundaryRegex();
+
+ private static IEnumerable SplitSentences(string text)
+ {
+ if (string.IsNullOrWhiteSpace(text))
+ yield break;
+
+ foreach (var sentence in SentenceBoundaryRegex().Split(text))
+ {
+ var trimmed = sentence.Trim();
+ if (trimmed.Length > 0)
+ yield return trimmed;
+ }
+ }
+
private async Task> SearchAsync(
IEnumerable candidates,
string query,
- Func?> embeddingSelector,
- int maxResults)
+ Func>?> embeddingsSelector,
+ int maxResults,
+ Func? textSelector = null)
{
var items = candidates as ICollection ?? [.. candidates];
if (items.Count == 0)
@@ -149,22 +202,51 @@ private async Task> SearchAsync(
return [];
}
- var searchEmbedding = await _generator.GenerateAsync(query);
+ var queryLower = query.ToLowerInvariant();
+ var searchEmbedding = await _generator.GenerateAsync(queryLower);
return items
- .Select(item => new
- {
- Item = item,
- Score = embeddingSelector(item) is Embedding embedding
- ? TensorPrimitives.CosineSimilarity(searchEmbedding.Vector.Span, embedding.Vector.Span)
- : -1f
- })
+ .Select(item => Similarity(item, queryLower, embeddingsSelector, textSelector, searchEmbedding))
.OrderByDescending(x => x.Score)
.Take(maxResults)
.Select(x => x.Item)
.ToList();
}
+ private static (T Item, float Score) Similarity(
+ T item,
+ string query,
+ Func>?> embeddingsSelector,
+ Func? textSelector,
+ Embedding searchEmbedding)
+ {
+ var embeddings = embeddingsSelector(item);
+ var score = -1f;
+
+ if (embeddings is not null)
+ {
+ foreach (var emb in embeddings)
+ {
+ var similarity = TensorPrimitives.CosineSimilarity(searchEmbedding.Vector.Span, emb.Vector.Span);
+ if (similarity > score)
+ {
+ score = similarity;
+ }
+ }
+ }
+
+ // Hybrid keyword boost: if the raw text contains the query as a
+ // substring, add a bonus to the embedding score. This follows the
+ // hybrid search pattern (keyword + vector) recommended by Azure AI
+ // Search — both signals contribute additively to the final rank.
+ if (textSelector is not null && textSelector(item).Contains(query, StringComparison.OrdinalIgnoreCase))
+ {
+ score += 0.5f;
+ }
+
+ return (Item: item, Score: score);
+ }
+
private static async Task> LoadDataAsync(string filename)
{
using var stream = await FileSystem.OpenAppPackageFileAsync(filename);
diff --git a/src/AI/samples/Essentials.AI.Sample/ViewModels/ChatBubbleViewModel.cs b/src/AI/samples/Essentials.AI.Sample/ViewModels/ChatBubbleViewModel.cs
new file mode 100644
index 000000000000..7ab18069046d
--- /dev/null
+++ b/src/AI/samples/Essentials.AI.Sample/ViewModels/ChatBubbleViewModel.cs
@@ -0,0 +1,33 @@
+using CommunityToolkit.Mvvm.ComponentModel;
+using CommunityToolkit.Mvvm.Input;
+
+namespace Maui.Controls.Sample.ViewModels;
+
+public enum ChatBubbleType
+{
+ User,
+ Assistant,
+ ToolCall,
+ ToolResult
+}
+
+public partial class ChatBubbleViewModel : ObservableObject
+{
+ public ChatBubbleType BubbleType { get; init; }
+
+ [ObservableProperty]
+ public partial string Text { get; set; } = string.Empty;
+
+ [ObservableProperty]
+ public partial bool IsStreaming { get; set; }
+
+ [ObservableProperty]
+ public partial bool IsExpanded { get; set; }
+
+ public string? ToolName { get; init; }
+
+ public string? DetailText { get; init; }
+
+ [RelayCommand]
+ void ToggleExpanded() => IsExpanded = !IsExpanded;
+}
diff --git a/src/AI/samples/Essentials.AI.Sample/ViewModels/ChatViewModel.cs b/src/AI/samples/Essentials.AI.Sample/ViewModels/ChatViewModel.cs
new file mode 100644
index 000000000000..d28cf318a30c
--- /dev/null
+++ b/src/AI/samples/Essentials.AI.Sample/ViewModels/ChatViewModel.cs
@@ -0,0 +1,236 @@
+using System.Collections.ObjectModel;
+using System.Text.Json;
+using CommunityToolkit.Mvvm.ComponentModel;
+using Maui.Controls.Sample.Services;
+using Microsoft.Extensions.AI;
+
+namespace Maui.Controls.Sample.ViewModels;
+
+public partial class ChatViewModel : ObservableObject
+{
+ readonly ChatService _chatService;
+ readonly List _conversationHistory = [];
+ CancellationTokenSource? _cts;
+
+ public ChatViewModel(ChatService chatService)
+ {
+ _chatService = chatService;
+ }
+
+ public ChatService ChatService => _chatService;
+
+ public ObservableCollection Messages { get; } = [];
+
+ [ObservableProperty]
+ public partial string MessageText { get; set; } = string.Empty;
+
+ [ObservableProperty]
+ public partial bool IsOverlayVisible { get; set; }
+
+ [ObservableProperty]
+ public partial bool IsSending { get; set; }
+
+ public Command SendMessageCommand => field ??= new Command(
+ async () => await SendMessageAsync(),
+ () => !string.IsNullOrWhiteSpace(MessageText) && !IsSending);
+
+ public Command ToggleOverlayCommand => field ??= new Command(
+ () => IsOverlayVisible = !IsOverlayVisible);
+
+ public Command NewChatCommand => field ??= new Command(NewChat);
+
+ void NewChat()
+ {
+ _cts?.Cancel();
+ _cts?.Dispose();
+ _cts = null;
+ Messages.Clear();
+ _conversationHistory.Clear();
+ MessageText = string.Empty;
+ IsSending = false;
+ }
+
+ partial void OnMessageTextChanged(string value)
+ {
+ SendMessageCommand.ChangeCanExecute();
+ }
+
+ partial void OnIsSendingChanged(bool value)
+ {
+ SendMessageCommand.ChangeCanExecute();
+ }
+
+ async Task SendMessageAsync()
+ {
+ var userText = MessageText.Trim();
+ if (string.IsNullOrEmpty(userText))
+ return;
+
+ IsSending = true;
+ MessageText = string.Empty;
+
+ // Add user bubble
+ Messages.Add(new ChatBubbleViewModel
+ {
+ BubbleType = ChatBubbleType.User,
+ Text = userText
+ });
+
+ // Add to conversation history
+ _conversationHistory.Add(new ChatMessage(ChatRole.User, userText));
+
+ // Prepare streaming
+ _cts = new CancellationTokenSource();
+ ChatBubbleViewModel? assistantBubble = null;
+ ChatMessage? textMessage = null; // Accumulates text deltas into one assistant message
+ var seenCallIds = new HashSet();
+ var callIdToToolName = new Dictionary(); // Cache tool names for result bubbles
+
+ // Show "Thinking..." placeholder immediately
+ var thinkingBubble = new ChatBubbleViewModel
+ {
+ BubbleType = ChatBubbleType.Assistant,
+ Text = "Thinking...",
+ IsStreaming = true
+ };
+ Messages.Add(thinkingBubble);
+
+ try
+ {
+ await foreach (var update in _chatService.GetStreamingResponseAsync(
+ _conversationHistory, _cts.Token))
+ {
+ foreach (var content in update.Contents)
+ {
+ switch (content)
+ {
+ case TextContent textContent when !string.IsNullOrEmpty(textContent.Text):
+ // Add to history: accumulate all text deltas into one assistant message
+ if (textMessage is null)
+ {
+ textMessage = new ChatMessage(ChatRole.Assistant, [textContent]);
+ _conversationHistory.Add(textMessage);
+ }
+ else
+ {
+ textMessage.Contents.Add(textContent);
+ }
+
+ // Update UI (already on UI thread via await foreach)
+ if (assistantBubble is null)
+ {
+ if (Messages.Contains(thinkingBubble))
+ {
+ thinkingBubble.Text = textContent.Text;
+ assistantBubble = thinkingBubble;
+ }
+ else
+ {
+ assistantBubble = new ChatBubbleViewModel
+ {
+ BubbleType = ChatBubbleType.Assistant,
+ Text = textContent.Text,
+ IsStreaming = true
+ };
+ Messages.Add(assistantBubble);
+ }
+ }
+ else
+ {
+ assistantBubble.Text += textContent.Text;
+ }
+ break;
+
+ case FunctionCallContent functionCall:
+ if (!seenCallIds.Add($"call:{functionCall.CallId}"))
+ break;
+
+ // Cache tool name for the result bubble
+ if (functionCall.CallId is not null)
+ callIdToToolName[functionCall.CallId] = functionCall.Name;
+
+ // Add to history immediately, preserving stream order
+ _conversationHistory.Add(new ChatMessage(ChatRole.Assistant, [functionCall]));
+ textMessage = null; // Next text starts a new message
+
+ // Update UI: always remove pre-tool bubble (thinking or partial text)
+ if (assistantBubble is not null)
+ {
+ Messages.Remove(assistantBubble);
+ assistantBubble = null;
+ }
+ else
+ {
+ Messages.Remove(thinkingBubble);
+ }
+
+ var argsJson = functionCall.Arguments is not null
+ ? JsonSerializer.Serialize(functionCall.Arguments, new JsonSerializerOptions { WriteIndented = true })
+ : "{}";
+
+ Messages.Add(new ChatBubbleViewModel
+ {
+ BubbleType = ChatBubbleType.ToolCall,
+ Text = $"🔧 Called {functionCall.Name}",
+ ToolName = functionCall.Name,
+ DetailText = argsJson
+ });
+ break;
+
+ case FunctionResultContent functionResult:
+ if (!seenCallIds.Add($"result:{functionResult.CallId}"))
+ break;
+
+ // Add to history immediately, preserving stream order
+ _conversationHistory.Add(new ChatMessage(ChatRole.Tool, [functionResult]));
+
+ // Update UI — use cached tool name instead of raw CallId
+ var resultText = functionResult.Result?.ToString() ?? "(no result)";
+ var toolName = (functionResult.CallId is not null &&
+ callIdToToolName.TryGetValue(functionResult.CallId, out var cachedName))
+ ? cachedName
+ : "tool";
+
+ Messages.Add(new ChatBubbleViewModel
+ {
+ BubbleType = ChatBubbleType.ToolResult,
+ Text = $"📋 {toolName} responded",
+ ToolName = toolName,
+ DetailText = resultText
+ });
+ break;
+ }
+ }
+ }
+
+ if (assistantBubble is { IsStreaming: true })
+ assistantBubble.IsStreaming = false;
+ }
+ catch (OperationCanceledException)
+ {
+ // User cancelled
+ }
+ catch (Exception ex)
+ {
+ Messages.Add(new ChatBubbleViewModel
+ {
+ BubbleType = ChatBubbleType.Assistant,
+ Text = $"⚠️ Error: {ex.Message}"
+ });
+ }
+ finally
+ {
+ // Clean up thinking bubble if it was never replaced
+ if (assistantBubble is null && Messages.Contains(thinkingBubble))
+ Messages.Remove(thinkingBubble);
+
+ // Stop streaming indicator on any remaining bubble
+ if (assistantBubble is { IsStreaming: true })
+ assistantBubble.IsStreaming = false;
+
+ IsSending = false;
+ _cts?.Dispose();
+ _cts = null;
+ }
+ }
+}
diff --git a/src/AI/samples/Essentials.AI.Sample/ViewModels/LandmarksViewModel.cs b/src/AI/samples/Essentials.AI.Sample/ViewModels/LandmarksViewModel.cs
index a91c7443de85..c0af14c5947e 100644
--- a/src/AI/samples/Essentials.AI.Sample/ViewModels/LandmarksViewModel.cs
+++ b/src/AI/samples/Essentials.AI.Sample/ViewModels/LandmarksViewModel.cs
@@ -17,6 +17,15 @@ public partial class LandmarksViewModel(
[ObservableProperty]
public partial bool IsLoading { get; set; }
+ [ObservableProperty]
+ public partial bool IsGeneratingEmbeddings { get; set; }
+
+ [ObservableProperty]
+ public partial double EmbeddingProgress { get; set; }
+
+ [ObservableProperty]
+ public partial string? EmbeddingStatusText { get; set; }
+
[ObservableProperty]
public partial string SelectedLanguage { get; set; } = "English";
@@ -36,6 +45,7 @@ public async Task InitializeAsync()
SelectedLanguage = languagePreference.SelectedLanguage;
await LoadLandmarksAsync();
+ await WaitForEmbeddingsAsync();
}
private async Task LoadLandmarksAsync()
@@ -46,13 +56,11 @@ private async Task LoadLandmarksAsync()
FeaturedLandmark = await dataService.GetFeaturedLandmarkAsync();
ContinentGroups.Clear();
+
var landmarksByContinent = await dataService.GetLandmarksByContinentAsync();
- foreach (var continent in landmarksByContinent.Keys.OrderBy(c => c))
+ foreach (var (continent, landmarks) in landmarksByContinent.OrderBy(kvp => kvp.Key))
{
- if (landmarksByContinent.TryGetValue(continent, out var landmarks))
- {
- ContinentGroups.Add(new ContinentGroup(continent, landmarks));
- }
+ ContinentGroups.Add(new ContinentGroup(continent, [.. landmarks.OrderBy(l => l.Name)]));
}
}
finally
@@ -60,4 +68,31 @@ private async Task LoadLandmarksAsync()
IsLoading = false;
}
}
+
+ private async Task WaitForEmbeddingsAsync()
+ {
+ IsGeneratingEmbeddings = true;
+ EmbeddingStatusText = "Generating search embeddings…";
+ EmbeddingProgress = 0;
+
+ dataService.EmbeddingProgressChanged += OnEmbeddingProgress;
+ try
+ {
+ await dataService.WaitUntilReadyAsync();
+ }
+ finally
+ {
+ dataService.EmbeddingProgressChanged -= OnEmbeddingProgress;
+ IsGeneratingEmbeddings = false;
+ }
+ }
+
+ private void OnEmbeddingProgress(int current, int total)
+ {
+ MainThread.BeginInvokeOnMainThread(() =>
+ {
+ EmbeddingProgress = (double)current / total;
+ EmbeddingStatusText = $"Generating search embeddings… {current}/{total}";
+ });
+ }
}
diff --git a/src/AI/samples/Essentials.AI.Sample/Views/ChatBubbleTemplateSelector.cs b/src/AI/samples/Essentials.AI.Sample/Views/ChatBubbleTemplateSelector.cs
new file mode 100644
index 000000000000..442c8d328dc1
--- /dev/null
+++ b/src/AI/samples/Essentials.AI.Sample/Views/ChatBubbleTemplateSelector.cs
@@ -0,0 +1,27 @@
+using Maui.Controls.Sample.ViewModels;
+
+namespace Maui.Controls.Sample.Views;
+
+public class ChatBubbleTemplateSelector : DataTemplateSelector
+{
+ public DataTemplate? UserTemplate { get; set; }
+ public DataTemplate? AssistantTemplate { get; set; }
+ public DataTemplate? ToolCallTemplate { get; set; }
+ public DataTemplate? ToolResultTemplate { get; set; }
+
+ protected override DataTemplate? OnSelectTemplate(object item, BindableObject container)
+ {
+ if (item is ChatBubbleViewModel bubble)
+ {
+ return bubble.BubbleType switch
+ {
+ ChatBubbleType.User => UserTemplate,
+ ChatBubbleType.Assistant => AssistantTemplate,
+ ChatBubbleType.ToolCall => ToolCallTemplate,
+ ChatBubbleType.ToolResult => ToolResultTemplate,
+ _ => AssistantTemplate
+ };
+ }
+ return AssistantTemplate;
+ }
+}
diff --git a/src/AI/samples/Essentials.AI.Sample/Views/ChatOverlayView.xaml b/src/AI/samples/Essentials.AI.Sample/Views/ChatOverlayView.xaml
new file mode 100644
index 000000000000..d21edc7e9494
--- /dev/null
+++ b/src/AI/samples/Essentials.AI.Sample/Views/ChatOverlayView.xaml
@@ -0,0 +1,250 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/src/AI/samples/Essentials.AI.Sample/Views/ChatOverlayView.xaml.cs b/src/AI/samples/Essentials.AI.Sample/Views/ChatOverlayView.xaml.cs
new file mode 100644
index 000000000000..75e0dc9e1d58
--- /dev/null
+++ b/src/AI/samples/Essentials.AI.Sample/Views/ChatOverlayView.xaml.cs
@@ -0,0 +1,84 @@
+using System.Collections.Specialized;
+using Maui.Controls.Sample.ViewModels;
+
+namespace Maui.Controls.Sample.Views;
+
+public partial class ChatOverlayView : ContentView
+{
+ ChatViewModel? _viewModel;
+
+ public ChatOverlayView()
+ {
+ InitializeComponent();
+ SizeChanged += (s, e) =>
+ {
+ if (Width > 0)
+ ChatPanel.WidthRequest = Math.Min(Width - 16, 500);
+ if (Height > 0)
+ ChatPanel.HeightRequest = Math.Min(Height - 16, 800);
+ };
+ }
+
+ public void Initialize(ChatViewModel viewModel)
+ {
+ // Unsubscribe from previous VM to avoid leaking this view via the singleton VM's event
+ if (_viewModel is not null)
+ _viewModel.Messages.CollectionChanged -= OnMessagesChanged;
+
+ _viewModel = viewModel;
+ BindingContext = viewModel;
+ viewModel.Messages.CollectionChanged += OnMessagesChanged;
+ }
+
+ void OnMessagesChanged(object? sender, NotifyCollectionChangedEventArgs e)
+ {
+ if (e.Action == NotifyCollectionChangedAction.Add && _viewModel is not null && _viewModel.Messages.Count > 0)
+ {
+ Dispatcher.Dispatch(() =>
+ {
+ MessagesView.ScrollTo(_viewModel.Messages.Count - 1, position: ScrollToPosition.End, animate: true);
+ });
+ }
+ }
+
+ async void OnBackdropTapped(object? sender, TappedEventArgs e) => await Hide();
+
+ async void OnCloseTapped(object? sender, EventArgs e) => await Hide();
+
+ public event EventHandler? Closed;
+
+ public async Task Show()
+ {
+ // Re-subscribe in case Hide() previously unsubscribed
+ if (_viewModel is not null)
+ {
+ _viewModel.Messages.CollectionChanged -= OnMessagesChanged;
+ _viewModel.Messages.CollectionChanged += OnMessagesChanged;
+ }
+
+ // Ensure panel starts offscreen at its actual height
+ ChatPanel.TranslationY = ChatPanel.Height > 0 ? ChatPanel.Height : 1000;
+
+ var backdropFade = Backdrop.FadeToAsync(0.4, 250, Easing.CubicOut);
+ var panelSlide = ChatPanel.TranslateToAsync(0, 0, 300, Easing.CubicOut);
+ await Task.WhenAll(backdropFade, panelSlide);
+
+ MessageEntry.Focus();
+ }
+
+ public async Task Hide()
+ {
+ // Unsubscribe to prevent leaking this view via the singleton VM's event
+ if (_viewModel is not null)
+ _viewModel.Messages.CollectionChanged -= OnMessagesChanged;
+
+ MessageEntry.Unfocus();
+
+ var targetY = ChatPanel.Height > 0 ? ChatPanel.Height : 700;
+ var backdropFade = Backdrop.FadeToAsync(0, 200, Easing.CubicIn);
+ var panelSlide = ChatPanel.TranslateToAsync(0, targetY, 250, Easing.CubicIn);
+ await Task.WhenAll(backdropFade, panelSlide);
+
+ Closed?.Invoke(this, EventArgs.Empty);
+ }
+}
diff --git a/src/AI/samples/Essentials.AI.Sample/Views/MarkdownConverter.cs b/src/AI/samples/Essentials.AI.Sample/Views/MarkdownConverter.cs
new file mode 100644
index 000000000000..f2cd96ccb244
--- /dev/null
+++ b/src/AI/samples/Essentials.AI.Sample/Views/MarkdownConverter.cs
@@ -0,0 +1,162 @@
+using System.Globalization;
+using Markdig;
+using Markdig.Syntax;
+using Markdig.Syntax.Inlines;
+
+namespace Maui.Controls.Sample.Views;
+
+///
+/// Converts a markdown string to a FormattedString using Markdig's AST.
+/// Supports bold, italic, code, and renders everything else as plain text.
+///
+public class MarkdownConverter : IValueConverter
+{
+ static readonly MarkdownPipeline s_pipeline = new MarkdownPipelineBuilder()
+ .UseEmphasisExtras()
+ .Build();
+
+ public object? Convert(object? value, Type targetType, object? parameter, CultureInfo culture)
+ {
+ if (value is not string text || string.IsNullOrEmpty(text))
+ return new FormattedString();
+
+ var isDark = Application.Current?.RequestedTheme == AppTheme.Dark;
+ var textColor = isDark ? Color.FromArgb("#E1E1E1") : Color.FromArgb("#1F1F1F");
+ var codeBackground = isDark ? Color.FromArgb("#3D3D3D") : Color.FromArgb("#E8E8E8");
+
+ var formatted = new FormattedString();
+ var doc = Markdown.Parse(text, s_pipeline);
+
+ foreach (var block in doc)
+ {
+ if (formatted.Spans.Count > 0)
+ formatted.Spans.Add(new Span { Text = "\n", TextColor = textColor, FontSize = 14 });
+
+ if (block is ParagraphBlock paragraph && paragraph.Inline is not null)
+ {
+ WalkInlines(paragraph.Inline, formatted, textColor, codeBackground, FontAttributes.None);
+ }
+ else if (block is HeadingBlock heading && heading.Inline is not null)
+ {
+ WalkInlines(heading.Inline, formatted, textColor, codeBackground, FontAttributes.Bold);
+ }
+ else if (block is ListBlock list)
+ {
+ int index = 1;
+ foreach (var item in list)
+ {
+ if (formatted.Spans.Count > 0)
+ formatted.Spans.Add(new Span { Text = "\n", TextColor = textColor, FontSize = 14 });
+
+ var bullet = list.IsOrdered ? $"{index++}. " : "• ";
+ formatted.Spans.Add(new Span { Text = bullet, TextColor = textColor, FontSize = 14 });
+
+ if (item is ListItemBlock listItem)
+ {
+ foreach (var subBlock in listItem)
+ {
+ if (subBlock is ParagraphBlock p && p.Inline is not null)
+ WalkInlines(p.Inline, formatted, textColor, codeBackground, FontAttributes.None);
+ }
+ }
+ }
+ }
+ else
+ {
+ // Fallback: render block as plain text
+ var start = Math.Min(block.Span.Start, text.Length);
+ var length = Math.Min(block.Span.Length, text.Length - start);
+ var plainText = block.GetType().Name == "FencedCodeBlock" || block.GetType().Name == "CodeBlock"
+ ? GetCodeBlockText(block)
+ : text.Substring(start, length);
+ formatted.Spans.Add(new Span
+ {
+ Text = plainText,
+ TextColor = textColor,
+ FontFamily = "Courier New",
+ BackgroundColor = codeBackground,
+ FontSize = 13,
+ });
+ }
+ }
+
+ if (formatted.Spans.Count == 0)
+ formatted.Spans.Add(new Span { Text = text, TextColor = textColor, FontSize = 14 });
+
+ return formatted;
+ }
+
+ public object? ConvertBack(object? value, Type targetType, object? parameter, CultureInfo culture)
+ => throw new NotSupportedException();
+
+ static void WalkInlines(ContainerInline container, FormattedString fs, Color textColor, Color codeBackground, FontAttributes inherited)
+ {
+ foreach (var inline in container)
+ {
+ switch (inline)
+ {
+ case EmphasisInline emphasis:
+ var attrs = inherited;
+ if (emphasis.DelimiterCount == 2 || (emphasis.DelimiterChar == '*' && emphasis.DelimiterCount >= 2))
+ attrs |= FontAttributes.Bold;
+ else
+ attrs |= FontAttributes.Italic;
+ WalkInlines(emphasis, fs, textColor, codeBackground, attrs);
+ break;
+
+ case CodeInline code:
+ fs.Spans.Add(new Span
+ {
+ Text = code.Content,
+ TextColor = textColor,
+ FontFamily = "Courier New",
+ BackgroundColor = codeBackground,
+ FontSize = 13,
+ });
+ break;
+
+ case LinkInline link:
+ // Render link text with underline
+ var linkText = link.FirstChild is LiteralInline lit ? lit.Content.ToString() : link.Url ?? "";
+ fs.Spans.Add(new Span
+ {
+ Text = linkText,
+ TextColor = Color.FromArgb("#512BD4"),
+ TextDecorations = TextDecorations.Underline,
+ FontAttributes = inherited,
+ FontSize = 14,
+ });
+ break;
+
+ case LineBreakInline:
+ fs.Spans.Add(new Span { Text = "\n", TextColor = textColor, FontSize = 14 });
+ break;
+
+ case LiteralInline literal:
+ fs.Spans.Add(new Span
+ {
+ Text = literal.Content.ToString(),
+ TextColor = textColor,
+ FontAttributes = inherited,
+ FontSize = 14,
+ });
+ break;
+
+ default:
+ // Any other inline — render as plain text
+ if (inline is ContainerInline ci)
+ WalkInlines(ci, fs, textColor, codeBackground, inherited);
+ break;
+ }
+ }
+ }
+
+ static string GetCodeBlockText(Block block)
+ {
+ if (block is FencedCodeBlock fenced)
+ return string.Join("\n", fenced.Lines);
+ if (block is CodeBlock code)
+ return string.Join("\n", code.Lines);
+ return block.Span.ToString() ?? "";
+ }
+}
diff --git a/src/AI/src/AppleNative/ApiDefinitions.cs b/src/AI/src/AppleNative/ApiDefinitions.cs
index 1bf62b8376b2..af2e2c16ff7d 100644
--- a/src/AI/src/AppleNative/ApiDefinitions.cs
+++ b/src/AI/src/AppleNative/ApiDefinitions.cs
@@ -225,14 +225,18 @@ interface FunctionResultContentNative
[Export("callId", ArgumentSemantic.Copy)]
string CallId { get; set; }
+ // @property (nonatomic, copy) NSString * _Nonnull name;
+ [Export("name", ArgumentSemantic.Copy)]
+ string Name { get; set; }
+
// @property (nonatomic, copy) NSString * _Nonnull result;
[Export("result", ArgumentSemantic.Copy)]
string Result { get; set; }
- // - (nonnull instancetype)initWithCallId:(NSString * _Nonnull)callId result:(NSString * _Nonnull)result OBJC_DESIGNATED_INITIALIZER;
- [Export("initWithCallId:result:")]
+ // - (nonnull instancetype)initWithCallId:(NSString * _Nonnull)callId name:(NSString * _Nonnull)name result:(NSString * _Nonnull)result OBJC_DESIGNATED_INITIALIZER;
+ [Export("initWithCallId:name:result:")]
[DesignatedInitializer]
- NativeHandle Constructor(string callId, string result);
+ NativeHandle Constructor(string callId, string name, string result);
}
// @interface TextContentNative : AIContentNative
diff --git a/src/AI/src/AppleNative/EssentialsAI/ChatClient.swift b/src/AI/src/AppleNative/EssentialsAI/ChatClient.swift
index 77aa18dae860..b4c9fa7a6b69 100644
--- a/src/AI/src/AppleNative/EssentialsAI/ChatClient.swift
+++ b/src/AI/src/AppleNative/EssentialsAI/ChatClient.swift
@@ -87,6 +87,7 @@ public class ChatClientNative: NSObject {
for try await response in responseStream {
try Task.checkCancellation()
let text = response.content.jsonString
+ guard !text.isEmpty else { continue }
#if APPLE_INTELLIGENCE_LOGGING_ENABLED
if let log = AppleIntelligenceLogger.log {
log("[\(methodName)] Streaming update: \(text)")
@@ -115,6 +116,7 @@ public class ChatClientNative: NSObject {
for try await response in responseStream {
try Task.checkCancellation()
let text = response.content
+ guard !text.isEmpty else { continue }
#if APPLE_INTELLIGENCE_LOGGING_ENABLED
if let log = AppleIntelligenceLogger.log {
log("[\(methodName)] Streaming update: \(text)")
@@ -236,8 +238,11 @@ public class ChatClientNative: NSObject {
}
#endif
- let lastMessage = messages.last!
- let otherMessages = messages.dropLast()
+ // The last message is the prompt; everything before is the transcript history.
+ guard let lastMessage = messages.last else {
+ throw NSError.chatError(.invalidRole, description: "No messages found in conversation")
+ }
+ let otherMessages = Array(messages.dropLast())
let model = SystemLanguageModel.default
let tools = options?.tools?.map { ToolNative($0, toolWatcher?.notifyToolCall, toolWatcher?.notifyToolResult) } ?? []
@@ -252,7 +257,7 @@ public class ChatClientNative: NSObject {
}
#endif
- let transcript = try Transcript(entries: otherMessages.map(self.toTranscriptEntry))
+ let transcript = try Transcript(entries: otherMessages.flatMap(self.toTranscriptEntries))
let prompt = try self.toPrompt(message: lastMessage)
// Parse the JSON schema from the options
@@ -375,25 +380,81 @@ public class ChatClientNative: NSObject {
}
}
- private func toTranscriptEntry(message: ChatMessageNative) throws -> Transcript.Entry {
+ private func toTranscriptEntries(message: ChatMessageNative) throws -> [Transcript.Entry] {
switch message.role {
case .user:
- return try .prompt(Transcript.Prompt(segments: message.contents.map(self.toTranscriptSegment)))
+ return [try toUserEntry(message)]
case .assistant:
- return try .response(Transcript.Response(assetIDs: [], segments: message.contents.map(self.toTranscriptSegment)))
+ return toAssistantEntries(message)
case .system:
- return try .instructions(Transcript.Instructions(segments: message.contents.map(self.toTranscriptSegment), toolDefinitions: []))
+ return [try toSystemEntry(message)]
+ case .tool:
+ return try toToolEntries(message)
default:
throw NSError.chatError(.invalidRole, description: "Unsupported role in transcript. Found: \(message.role)")
}
}
- private func toTranscriptSegment(content: AIContentNative) throws -> Transcript.Segment {
- switch content {
- case let textContent as TextContentNative:
+ private func toUserEntry(_ message: ChatMessageNative) throws -> Transcript.Entry {
+ let segments: [Transcript.Segment] = try message.contents.map { content in
+ guard let textContent = content as? TextContentNative else {
+ throw NSError.chatError(.invalidContent, description: "Unsupported content type in user message: \(type(of: content))")
+ }
return .text(Transcript.TextSegment(content: textContent.text))
- default:
- throw NSError.chatError(.invalidContent, description: "Unsupported content type in transcript. Found: \(type(of: content))")
+ }
+ return .prompt(Transcript.Prompt(segments: segments))
+ }
+
+ private func toAssistantEntries(_ message: ChatMessageNative) -> [Transcript.Entry] {
+ // Process contents in order, flushing batches when the content type changes.
+ // This preserves interleaving: [text, funcCall, text] → [.response, .toolCalls, .response]
+ var entries: [Transcript.Entry] = []
+ var pendingTextSegments: [Transcript.Segment] = []
+ var pendingToolCalls: [Transcript.ToolCall] = []
+
+ for content in message.contents {
+ if let textContent = content as? TextContentNative {
+ if !pendingToolCalls.isEmpty {
+ entries.append(.toolCalls(Transcript.ToolCalls(pendingToolCalls)))
+ pendingToolCalls = []
+ }
+ pendingTextSegments.append(.text(Transcript.TextSegment(content: textContent.text)))
+ } else if let funcCall = content as? FunctionCallContentNative {
+ if !pendingTextSegments.isEmpty {
+ entries.append(.response(Transcript.Response(assetIDs: [], segments: pendingTextSegments)))
+ pendingTextSegments = []
+ }
+ let argsContent = (try? GeneratedContent(json: funcCall.arguments)) ?? GeneratedContent(funcCall.arguments)
+ pendingToolCalls.append(Transcript.ToolCall(id: funcCall.callId, toolName: funcCall.name, arguments: argsContent))
+ }
+ }
+
+ if !pendingTextSegments.isEmpty {
+ entries.append(.response(Transcript.Response(assetIDs: [], segments: pendingTextSegments)))
+ }
+ if !pendingToolCalls.isEmpty {
+ entries.append(.toolCalls(Transcript.ToolCalls(pendingToolCalls)))
+ }
+ return entries
+ }
+
+ private func toSystemEntry(_ message: ChatMessageNative) throws -> Transcript.Entry {
+ let segments: [Transcript.Segment] = try message.contents.map { content in
+ guard let textContent = content as? TextContentNative else {
+ throw NSError.chatError(.invalidContent, description: "Unsupported content type in system message: \(type(of: content))")
+ }
+ return .text(Transcript.TextSegment(content: textContent.text))
+ }
+ return .instructions(Transcript.Instructions(segments: segments, toolDefinitions: []))
+ }
+
+ private func toToolEntries(_ message: ChatMessageNative) throws -> [Transcript.Entry] {
+ return try message.contents.map { content in
+ guard let funcResult = content as? FunctionResultContentNative else {
+ throw NSError.chatError(.invalidContent, description: "Unsupported content type in tool message: \(type(of: content))")
+ }
+ let segment = Transcript.Segment.text(Transcript.TextSegment(content: funcResult.result))
+ return .toolOutput(Transcript.ToolOutput(id: funcResult.callId, toolName: funcResult.name, segments: [segment]))
}
}
@@ -453,7 +514,7 @@ public class ChatClientNative: NSObject {
return nil
}
- return FunctionResultContentNative(callId: toolOutput.id, result: resultText)
+ return FunctionResultContentNative(callId: toolOutput.id, name: toolOutput.toolName, result: resultText)
}
}
diff --git a/src/AI/src/AppleNative/EssentialsAI/ChatMessageContent.swift b/src/AI/src/AppleNative/EssentialsAI/ChatMessageContent.swift
index 073c307a13ca..a80ccca240af 100644
--- a/src/AI/src/AppleNative/EssentialsAI/ChatMessageContent.swift
+++ b/src/AI/src/AppleNative/EssentialsAI/ChatMessageContent.swift
@@ -29,10 +29,12 @@ public class FunctionCallContentNative: AIContentNative {
@objc(FunctionResultContentNative)
public class FunctionResultContentNative: AIContentNative {
@objc public var callId: String
+ @objc public var name: String
@objc public var result: String
- @objc public init(callId: String, result: String) {
+ @objc public init(callId: String, name: String, result: String) {
self.callId = callId
+ self.name = name
self.result = result
super.init()
}
diff --git a/src/AI/src/AppleNative/EssentialsAI/EssentialsAI.xcodeproj/project.pbxproj b/src/AI/src/AppleNative/EssentialsAI/EssentialsAI.xcodeproj/project.pbxproj
index a6bc01feeab4..62f2cf8ed256 100644
--- a/src/AI/src/AppleNative/EssentialsAI/EssentialsAI.xcodeproj/project.pbxproj
+++ b/src/AI/src/AppleNative/EssentialsAI/EssentialsAI.xcodeproj/project.pbxproj
@@ -18,6 +18,7 @@
3400193C2EDF4D6800DAB0A3 /* ChatResponseUpdateNative.swift in Sources */ = {isa = PBXBuildFile; fileRef = 3400192D2EDF4D6800DAB0A3 /* ChatResponseUpdateNative.swift */; };
3400193D2EDF4D6800DAB0A3 /* Cancellation.swift in Sources */ = {isa = PBXBuildFile; fileRef = 340019272EDF4D6800DAB0A3 /* Cancellation.swift */; };
3400193E2EDF4D6800DAB0A3 /* ChatTool.swift in Sources */ = {isa = PBXBuildFile; fileRef = 3400192E2EDF4D6800DAB0A3 /* ChatTool.swift */; };
+ 34279CA12EC421CC00583050 /* FoundationModels.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 34279CA02EC421CC00583050 /* FoundationModels.framework */; settings = {ATTRIBUTES = (Weak, ); }; };
/* End PBXBuildFile section */
/* Begin PBXFileReference section */
@@ -33,6 +34,7 @@
340019312EDF4D6800DAB0A3 /* ToolCallWatcher.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ToolCallWatcher.swift; sourceTree = ""; };
340019322EDF4D6800DAB0A3 /* ToolNative.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ToolNative.swift; sourceTree = ""; };
34279C902EC421CC00583050 /* EssentialsAI.framework */ = {isa = PBXFileReference; explicitFileType = wrapper.framework; includeInIndex = 0; path = EssentialsAI.framework; sourceTree = BUILT_PRODUCTS_DIR; };
+ 34279CA02EC421CC00583050 /* FoundationModels.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = FoundationModels.framework; path = System/Library/Frameworks/FoundationModels.framework; sourceTree = SDKROOT; };
/* End PBXFileReference section */
/* Begin PBXFrameworksBuildPhase section */
@@ -40,6 +42,7 @@
isa = PBXFrameworksBuildPhase;
buildActionMask = 2147483647;
files = (
+ 34279CA12EC421CC00583050 /* FoundationModels.framework in Frameworks */,
);
runOnlyForDeploymentPostprocessing = 0;
};
@@ -49,6 +52,7 @@
34279C862EC421CC00583050 = {
isa = PBXGroup;
children = (
+ 34279CA02EC421CC00583050 /* FoundationModels.framework */,
340019272EDF4D6800DAB0A3 /* Cancellation.swift */,
340019282EDF4D6800DAB0A3 /* ChatClient.swift */,
340019292EDF4D6800DAB0A3 /* ChatMessage.swift */,
diff --git a/src/AI/src/AppleNative/EssentialsAI/JsonSchemaDecoder.swift b/src/AI/src/AppleNative/EssentialsAI/JsonSchemaDecoder.swift
index 2219d2a14a8e..13db2498a5e5 100644
--- a/src/AI/src/AppleNative/EssentialsAI/JsonSchemaDecoder.swift
+++ b/src/AI/src/AppleNative/EssentialsAI/JsonSchemaDecoder.swift
@@ -3,6 +3,23 @@ import FoundationModels
class JsonSchemaDecoder {
+ enum SchemaError: Error, LocalizedError {
+ case unsupportedType(String)
+ case missingObjectProperties
+ case missingArrayItems
+
+ var errorDescription: String? {
+ switch self {
+ case .unsupportedType(let type):
+ return "Unsupported JSON schema type '\(type)'"
+ case .missingObjectProperties:
+ return "Object schema missing 'properties'"
+ case .missingArrayItems:
+ return "Array schema missing 'items'"
+ }
+ }
+ }
+
/// Simple JSON Schema representation
private class JsonSchema: Codable {
var type: String?
@@ -50,23 +67,24 @@ class JsonSchemaDecoder {
else { return nil }
// Convert into a DynamicJsonSchema
- guard let dynamicSchema = toDynamicSchema(jsonSchema)
- else { return nil }
+ let dynamicSchema = try toDynamicSchema(jsonSchema)
// Get the final GenerationSchema
return try GenerationSchema(root: dynamicSchema, dependencies: [])
}
/// Convert the object representation of a JSON schema into a DynamicGenerationSchema
- private static func toDynamicSchema(_ schema: JsonSchema)
- -> DynamicGenerationSchema?
+ private static func toDynamicSchema(_ schema: JsonSchema) throws
+ -> DynamicGenerationSchema
{
switch schema.type {
// Handle objects with properties
case "object":
- guard let properties = schema.properties else { return nil }
- let props = properties.compactMap { (name, value) in
- parseJsonProperty(name, value, schema)
+ guard let properties = schema.properties else {
+ throw SchemaError.missingObjectProperties
+ }
+ let props = try properties.map { (name, value) in
+ try parseJsonProperty(name, value, schema)
}
return DynamicGenerationSchema(
name: schema.title ?? "Object",
@@ -75,10 +93,10 @@ class JsonSchemaDecoder {
)
// Handle arrays with items
case "array":
- guard
- let items = schema.items,
- let itemSchema = toDynamicSchema(items)
- else { return nil }
+ guard let items = schema.items else {
+ throw SchemaError.missingArrayItems
+ }
+ let itemSchema = try toDynamicSchema(items)
return DynamicGenerationSchema(
arrayOf: itemSchema,
minimumElements: schema.minItems,
@@ -95,7 +113,8 @@ class JsonSchemaDecoder {
case "integer": return DynamicGenerationSchema(type: Int.self)
case "number": return DynamicGenerationSchema(type: Double.self)
case "boolean": return DynamicGenerationSchema(type: Bool.self)
- default: return nil
+ default:
+ throw SchemaError.unsupportedType(schema.type ?? "unknown")
}
}
@@ -103,10 +122,8 @@ class JsonSchemaDecoder {
_ propertyName: String,
_ value: JsonSchema,
_ parentSchema: JsonSchema
- ) -> DynamicGenerationSchema.Property? {
- guard let nestedSchema = toDynamicSchema(value) else {
- return nil
- }
+ ) throws -> DynamicGenerationSchema.Property {
+ let nestedSchema = try toDynamicSchema(value)
let isRequired = parentSchema.required?.contains(propertyName) == true
return DynamicGenerationSchema.Property(
name: propertyName,
diff --git a/src/AI/src/Essentials.AI/Platform/JsonStreamChunker.cs b/src/AI/src/Essentials.AI/Platform/JsonStreamChunker.cs
index 2a0c51b36fb2..44fc364a1594 100644
--- a/src/AI/src/Essentials.AI/Platform/JsonStreamChunker.cs
+++ b/src/AI/src/Essentials.AI/Platform/JsonStreamChunker.cs
@@ -226,6 +226,23 @@ public override string Flush()
return sb.ToString();
}
+ ///
+ public override void Reset()
+ {
+ // Setting _prevState to null causes the next Process() call to treat the
+ // incoming JSON as a "first chunk" (see the _prevState == null check in
+ // Process), which is the intended behavior after a reset — the chunker
+ // should emit the full initial structure for the new stream rather than
+ // computing a delta against stale state from before the reset.
+ _prevState = null;
+ _openStringPath = null;
+ _emittedStrings.Clear();
+ _pendingStrings.Clear();
+ _pendingContainers.Clear();
+ _emittedPaths.Clear();
+ _openStructures.Clear();
+ }
+
// ═══════════════════════════════════════════════════════════════════════════════════════════
// FIRST CHUNK PROCESSING
// ═══════════════════════════════════════════════════════════════════════════════════════════
diff --git a/src/AI/src/Essentials.AI/Platform/MaciOS/AppleIntelligenceChatClient.cs b/src/AI/src/Essentials.AI/Platform/MaciOS/AppleIntelligenceChatClient.cs
index bda2a0e52fb0..759fdaf87952 100644
--- a/src/AI/src/Essentials.AI/Platform/MaciOS/AppleIntelligenceChatClient.cs
+++ b/src/AI/src/Essentials.AI/Platform/MaciOS/AppleIntelligenceChatClient.cs
@@ -2,8 +2,9 @@
using System.Runtime.Versioning;
using System.Text.Json;
using System.Text.Json.Nodes;
-using System.Threading.Channels;
using Microsoft.Extensions.AI;
+using Microsoft.Extensions.Logging;
+using Microsoft.Extensions.Logging.Abstractions;
namespace Microsoft.Maui.Essentials.AI;
@@ -14,7 +15,7 @@ namespace Microsoft.Maui.Essentials.AI;
[SupportedOSPlatform("maccatalyst26.0")]
[SupportedOSPlatform("macos26.0")]
[SupportedOSPlatform("tvos26.0")]
-public sealed class AppleIntelligenceChatClient : IChatClient
+public sealed partial class AppleIntelligenceChatClient : IChatClient
{
/// The provider name for this chat client.
private const string ProviderName = "apple";
@@ -22,6 +23,28 @@ public sealed class AppleIntelligenceChatClient : IChatClient
/// The default model identifier.
private const string DefaultModelId = "apple-intelligence";
+ private readonly ILogger _logger;
+ private readonly IServiceProvider? _functionInvocationServices;
+
+ ///
+ /// Initializes a new instance of the class.
+ ///
+ public AppleIntelligenceChatClient()
+ : this(null, null)
+ {
+ }
+
+ ///
+ /// Initializes a new instance of the class.
+ ///
+ /// Optional logger factory for logging tool invocations.
+ /// Optional service provider for dependency injection in tool functions.
+ public AppleIntelligenceChatClient(ILoggerFactory? loggerFactory = null, IServiceProvider? functionInvocationServices = null)
+ {
+ _logger = (ILogger?)loggerFactory?.CreateLogger() ?? NullLogger.Instance;
+ _functionInvocationServices = functionInvocationServices;
+ }
+
// static AppleIntelligenceChatClient()
// {
// // Enable native logging for debugging purposes, this is quite verbose.
@@ -65,42 +88,46 @@ public Task GetResponseAsync(
var nativeMessages = ToNative(messages, options);
var nativeOptions = ToNative(options, cancellationToken);
var native = new ChatClientNative();
+ var handler = new NonStreamingResponseHandler();
- var tcs = new TaskCompletionSource(TaskCreationOptions.RunContinuationsAsynchronously);
+ // Set up cancellation registration before invoking native to avoid race
+ CancellationTokenNative? nativeToken = null;
+ var registration = cancellationToken.Register(() => nativeToken?.Cancel());
- CancellationTokenRegistration registration = default;
-
- var nativeToken = native.GetResponse(
+ nativeToken = native.GetResponse(
nativeMessages,
nativeOptions,
- onUpdate: (update) =>
- {
- // Updates are not used in non-streaming mode
- },
+ onUpdate: (_) => { },
onComplete: (response, error) =>
{
registration.Dispose();
if (error is not null)
{
if (error.Domain == nameof(ChatClientNative) && error.Code == (int)ChatClientError.Cancelled)
- {
- tcs.TrySetCanceled();
- }
+ handler.CompleteCancelled(cancellationToken);
else
- {
- tcs.TrySetException(new NSErrorException(error));
- }
+ handler.CompleteWithError(new NSErrorException(error));
}
else
{
- var chatResponse = FromNativeChatResponse(response);
- tcs.TrySetResult(chatResponse);
+ try
+ {
+ handler.Complete(FromNativeChatResponse(response));
+ }
+ catch (Exception ex)
+ {
+ handler.CompleteWithError(ex);
+ }
}
});
- registration = cancellationToken.Register(() => nativeToken?.Cancel());
+ // If cancellation was already requested before native call started
+ if (cancellationToken.IsCancellationRequested)
+ {
+ nativeToken?.Cancel();
+ }
- return tcs.Task;
+ return handler.Task;
}
///
@@ -111,68 +138,42 @@ public async IAsyncEnumerable GetStreamingResponseAsync(
{
var nativeMessages = ToNative(messages, options);
var nativeOptions = ToNative(options, cancellationToken);
-
var native = new ChatClientNative();
-
- var channel = Channel.CreateUnbounded();
-
- // Use appropriate stream chunker based on response format
StreamChunkerBase chunker = nativeOptions?.ResponseJsonSchema is not null
? new JsonStreamChunker()
: new PlainTextStreamChunker();
+ var handler = new StreamingResponseHandler(chunker);
- CancellationTokenRegistration registration = default;
+ // Set up cancellation registration before invoking native to avoid race
+ CancellationTokenNative? nativeToken = null;
+ var registration = cancellationToken.Register(() => nativeToken?.Cancel());
- var nativeToken = native.StreamResponse(
+ nativeToken = native.StreamResponse(
nativeMessages,
nativeOptions,
onUpdate: (update) =>
{
- switch (update.UpdateType)
+ try
{
- case ResponseUpdateTypeNative.Content:
- // Handle text updates
- if (update.Text is not null)
- {
- // Use stream chunker to compute delta - handles both JSON and plain text
- var delta = chunker.Process(update.Text);
-
- if (!string.IsNullOrEmpty(delta))
- {
- var chatUpdate = new ChatResponseUpdate
- {
- Role = ChatRole.Assistant,
- Contents = { new TextContent(delta) }
- };
-
- channel.Writer.TryWrite(chatUpdate);
- }
- }
- break;
-
- case ResponseUpdateTypeNative.ToolCall:
- var args = update.ToolCallArguments is null
- ? null
-#pragma warning disable IL3050, IL2026 // DefaultJsonTypeInfoResolver is only used when reflection-based serialization is enabled
- : JsonSerializer.Deserialize(update.ToolCallArguments, AIJsonUtilities.DefaultOptions);
-#pragma warning restore IL3050, IL2026
-
- var toolCallUpdate = new ChatResponseUpdate
- {
- Role = ChatRole.Assistant,
- Contents = { new FunctionCallContent(update.ToolCallId!, update.ToolCallName!, args) }
- };
- channel.Writer.TryWrite(toolCallUpdate);
- break;
-
- case ResponseUpdateTypeNative.ToolResult:
- var toolResultUpdate = new ChatResponseUpdate
- {
- Role = ChatRole.Assistant,
- Contents = { new FunctionResultContent(update.ToolCallId!, update.ToolCallResult!) }
- };
- channel.Writer.TryWrite(toolResultUpdate);
- break;
+ switch (update.UpdateType)
+ {
+ case ResponseUpdateTypeNative.Content:
+ handler.ProcessContent(update.Text);
+ break;
+ case ResponseUpdateTypeNative.ToolCall:
+ handler.ProcessToolCall(update.ToolCallId, update.ToolCallName, update.ToolCallArguments);
+ break;
+ case ResponseUpdateTypeNative.ToolResult:
+ handler.ProcessToolResult(update.ToolCallId, update.ToolCallResult);
+ break;
+ default:
+ throw new NotSupportedException($"Unsupported update type: {update.UpdateType}");
+ }
+ }
+ catch (Exception ex)
+ {
+ nativeToken?.Cancel();
+ handler.CompleteWithError(ex);
}
},
onComplete: (finalResult, error) =>
@@ -181,38 +182,41 @@ public async IAsyncEnumerable GetStreamingResponseAsync(
if (error is not null)
{
if (error.Domain == nameof(ChatClientNative) && error.Code == (int)ChatClientError.Cancelled)
- {
- channel.Writer.Complete(new OperationCanceledException());
- }
+ handler.CompleteWithError(new OperationCanceledException(cancellationToken));
else
- {
- channel.Writer.Complete(new NSErrorException(error));
- }
+ handler.CompleteWithError(new NSErrorException(error));
}
else
{
- // Flush any remaining content from the chunker
- var finalChunk = chunker.Flush();
- if (!string.IsNullOrEmpty(finalChunk))
+ try
{
- var finalUpdate = new ChatResponseUpdate
- {
- Role = ChatRole.Assistant,
- Contents = { new TextContent(finalChunk) }
- };
-
- channel.Writer.TryWrite(finalUpdate);
+ handler.Complete();
+ }
+ catch (Exception ex)
+ {
+ handler.CompleteWithError(ex);
}
-
- channel.Writer.Complete();
}
});
- registration = cancellationToken.Register(() => nativeToken?.Cancel());
+ // If cancellation was already requested before native call started
+ if (cancellationToken.IsCancellationRequested)
+ {
+ nativeToken?.Cancel();
+ }
- await foreach (var update in channel.Reader.ReadAllAsync(cancellationToken))
+ try
{
- yield return update;
+ await foreach (var update in handler.ReadAllAsync(cancellationToken))
+ {
+ yield return update;
+ }
+ }
+ finally
+ {
+ // Cancel native operation if consumer stopped iterating early (break, Take, etc.)
+ nativeToken?.Cancel();
+ registration.Dispose();
}
}
@@ -251,15 +255,31 @@ private static ChatMessageNative[] ToNative(IEnumerable messages, C
{
ArgumentNullException.ThrowIfNull(messages);
+ var messagesList = messages.ToList();
+
+ // Build a callId → name lookup from FunctionCallContent so FunctionResultContent can reference the tool name
+ var callIdToName = new Dictionary();
+ foreach (var msg in messagesList)
+ {
+ foreach (var content in msg.Contents.OfType())
+ {
+ if (content.CallId is not null && content.Name is not null)
+ callIdToName[content.CallId] = content.Name;
+ }
+ }
+
var toConvert = options?.Instructions is not null
- ? messages.Prepend(new(ChatRole.System, options.Instructions))
- : messages;
+ ? messagesList.Prepend(new(ChatRole.System, options.Instructions))
+ : messagesList;
- ChatMessageNative[] nativeMessages = [.. toConvert.Select(ToNative)];
+ // Filter out any messages that produce empty native content as a safety net.
+ ChatMessageNative[] nativeMessages = [.. toConvert
+ .Select(m => ToNative(m, callIdToName))
+ .Where(m => m.Contents.Length > 0)];
if (nativeMessages.Length == 0)
{
- throw new ArgumentException("The messages collection must contain at least one message.", nameof(messages));
+ throw new ArgumentException("No messages with convertible content found. Ensure at least one message contains TextContent, FunctionCallContent, or FunctionResultContent.", nameof(messages));
}
return nativeMessages;
@@ -323,7 +343,8 @@ private static AIContent FromNative(AIContentNative content) =>
functionCall.Name,
JsonSerializer.Deserialize(
functionCall.Arguments,
- AIJsonUtilities.DefaultOptions)),
+ AIJsonUtilities.DefaultOptions))
+ { InformationalOnly = true },
#pragma warning restore IL3050, IL2026
FunctionResultContentNative functionResult =>
@@ -334,11 +355,11 @@ private static AIContent FromNative(AIContentNative content) =>
_ => throw new ArgumentException($"Unsupported content type: {content.GetType().Name}", nameof(content))
};
- private static ChatMessageNative ToNative(ChatMessage message) =>
+ private static ChatMessageNative ToNative(ChatMessage message, Dictionary? callIdToName = null) =>
new()
{
Role = ToNative(message.Role),
- Contents = [.. message.Contents.SelectMany(ToNative)]
+ Contents = [.. message.Contents.SelectMany(c => ToNative(c, callIdToName))]
};
private static ChatRoleNative ToNative(ChatRole role)
@@ -355,7 +376,7 @@ private static ChatRoleNative ToNative(ChatRole role)
throw new ArgumentOutOfRangeException(nameof(role), $"The role '{role}' is not supported by Apple Intelligence chat APIs.");
}
- private static ChatOptionsNative? ToNative(ChatOptions? options, CancellationToken cancellationToken)
+ private ChatOptionsNative? ToNative(ChatOptions? options, CancellationToken cancellationToken)
{
if (options is null)
{
@@ -374,11 +395,11 @@ private static ChatRoleNative ToNative(ChatRole role)
Temperature = ToNative(options.Temperature),
MaxOutputTokens = ToNative(options.MaxOutputTokens),
ResponseJsonSchema = ToNative(options.ResponseFormat),
- Tools = ToNative(options.Tools, cancellationToken)
+ Tools = ToNative(options.Tools, cancellationToken, _functionInvocationServices)
};
}
- private static AIFunctionToolAdapter[]? ToNative(IList? tools, CancellationToken cancellationToken)
+ private AIFunctionToolAdapter[]? ToNative(IList? tools, CancellationToken cancellationToken, IServiceProvider? services)
{
AIFunctionToolAdapter[]? adapters = null;
@@ -395,7 +416,7 @@ private static ChatRoleNative ToNative(ChatRole role)
adapters = tools
.OfType()
- .Select(function => new AIFunctionToolAdapter(function, cancellationToken))
+ .Select(function => new AIFunctionToolAdapter(function, _logger, cancellationToken, services))
.ToArray();
}
@@ -406,7 +427,7 @@ private static ChatRoleNative ToNative(ChatRole role)
format switch
{
ChatResponseFormatJson jsonFormat when StrictSchemaTransformCache.GetOrCreateTransformedSchema(jsonFormat) is { } jsonSchema =>
- (NSString?)ChatResponseFormat.ForJsonSchema(jsonSchema, jsonFormat.SchemaName ?? "json_schema", jsonFormat.SchemaDescription).Schema.ToString(),
+ (NSString?)jsonSchema.GetRawText(),
ChatResponseFormatJson jsonFormat when jsonFormat.Schema is not null =>
throw new InvalidOperationException("Failed to transform JSON schema for Apple Intelligence chat API."),
ChatResponseFormatJson =>
@@ -414,13 +435,37 @@ ChatResponseFormatJson jsonFormat when StrictSchemaTransformCache.GetOrCreateTra
_ => null
};
- private static IEnumerable ToNative(AIContent content) =>
+ private static IEnumerable ToNative(AIContent content, Dictionary? callIdToName = null) =>
content switch
{
// Apple Intelligence performs better when each text content chunk is separated
TextContent textContent when textContent.Text is not null => [new TextContentNative(textContent.Text)],
TextContent => Array.Empty(),
+ // Function call/result content from prior tool-calling turns is converted to native types.
+ // The native Swift layer gracefully skips these when building the Transcript, since Apple's
+ // LanguageModelSession manages tool call state internally.
+ FunctionCallContent functionCall => [new FunctionCallContentNative(
+ functionCall.CallId ?? string.Empty,
+ functionCall.Name,
+#pragma warning disable IL3050, IL2026
+ functionCall.Arguments is not null ? JsonSerializer.Serialize(functionCall.Arguments, AIJsonUtilities.DefaultOptions) : "{}")],
+#pragma warning restore IL3050, IL2026
+
+ FunctionResultContent functionResult => [new FunctionResultContentNative(
+ functionResult.CallId ?? string.Empty,
+ // Look up the tool name from the corresponding FunctionCallContent via callId
+ (functionResult.CallId is not null && callIdToName?.TryGetValue(functionResult.CallId, out var name) == true) ? name : string.Empty,
+#pragma warning disable IL3050, IL2026
+ // If Result is already a string (common when replaying history), pass through to avoid double-serialization
+ functionResult.Result switch
+ {
+ string s => s,
+ not null => JsonSerializer.Serialize(functionResult.Result, AIJsonUtilities.DefaultOptions),
+ _ => "{}"
+ })],
+#pragma warning restore IL3050, IL2026
+
// Throw for unsupported content types
_ => throw new ArgumentException($"The content type '{content.GetType().FullName}' is not supported by Apple Intelligence chat APIs.", nameof(content))
};
@@ -434,7 +479,7 @@ private static IEnumerable ToNative(AIContent content) =>
private static NSNumber? ToNative(long? value) =>
value.HasValue ? NSNumber.FromInt64(value.Value) : null;
- private sealed class AIFunctionToolAdapter(AIFunction function, CancellationToken cancellationToken) : AIToolNative
+ private sealed partial class AIFunctionToolAdapter(AIFunction function, ILogger logger, CancellationToken cancellationToken, IServiceProvider? services) : AIToolNative
{
public override string Name => function.Name;
@@ -451,24 +496,54 @@ public override async void CallWithArguments(NSString arguments, AIToolCompletio
{
ArgumentNullException.ThrowIfNull(arguments);
- var aiArgs = JsonSerializer.Deserialize((string)arguments, AIJsonUtilities.DefaultOptions);
+ var argsString = (string)arguments;
+
+ // Log before invocation — matches FunctionInvokingChatClient pattern
+ if (logger.IsEnabled(LogLevel.Trace))
+ {
+ LogInvokingSensitive(logger, function.Name, argsString);
+ }
+ else if (logger.IsEnabled(LogLevel.Debug))
+ {
+ LogInvoking(logger, function.Name);
+ }
+
+ var startingTimestamp = System.Diagnostics.Stopwatch.GetTimestamp();
+
+ var aiArgs = JsonSerializer.Deserialize(argsString, AIJsonUtilities.DefaultOptions) ?? new();
+ aiArgs.Services = services;
var result = await function.InvokeAsync(aiArgs, cancellationToken: cancellationToken);
var resultJson = result is not null
- ? JsonSerializer.Serialize(result)
+ ? JsonSerializer.Serialize(result, AIJsonUtilities.DefaultOptions)
: "{}";
+ // Log after invocation
+ var duration = System.Diagnostics.Stopwatch.GetElapsedTime(startingTimestamp);
+ if (logger.IsEnabled(LogLevel.Trace))
+ {
+ LogInvocationCompletedSensitive(logger, function.Name, duration, resultJson);
+ }
+ else if (logger.IsEnabled(LogLevel.Debug))
+ {
+ LogInvocationCompleted(logger, function.Name, duration);
+ }
+
completionHandler(new NSString(resultJson), null);
}
catch (OperationCanceledException)
{
+ LogInvocationCanceled(logger, function.Name);
+
var error = new NSError(new NSString(nameof(ChatClientNative)), (int)ChatClientError.Cancelled);
completionHandler(null, error);
}
catch (Exception ex)
{
+ LogInvocationFailed(logger, function.Name, ex);
+
var userInfo = NSDictionary.FromObjectsAndKeys(
[new NSString(ex.Message)],
[NSError.LocalizedDescriptionKey]);
@@ -479,5 +554,24 @@ [new NSString(ex.Message)],
}
}
#pragma warning restore IL3050, IL2026
+
+ [LoggerMessage(LogLevel.Debug, "Invoking {MethodName}.", SkipEnabledCheck = true)]
+ private static partial void LogInvoking(ILogger logger, string methodName);
+
+ [LoggerMessage(LogLevel.Trace, "Invoking {MethodName}({Arguments}).", SkipEnabledCheck = true)]
+ private static partial void LogInvokingSensitive(ILogger logger, string methodName, string arguments);
+
+ [LoggerMessage(LogLevel.Debug, "{MethodName} invocation completed. Duration: {Duration}")]
+ private static partial void LogInvocationCompleted(ILogger logger, string methodName, TimeSpan duration);
+
+ [LoggerMessage(LogLevel.Trace, "{MethodName} invocation completed. Duration: {Duration}. Result: {Result}")]
+ private static partial void LogInvocationCompletedSensitive(ILogger logger, string methodName, TimeSpan duration, string result);
+
+ [LoggerMessage(LogLevel.Debug, "{MethodName} invocation canceled.")]
+ private static partial void LogInvocationCanceled(ILogger logger, string methodName);
+
+ [LoggerMessage(LogLevel.Error, "{MethodName} invocation failed.")]
+ private static partial void LogInvocationFailed(ILogger logger, string methodName, Exception error);
}
+
}
diff --git a/src/AI/src/Essentials.AI/Platform/NonStreamingResponseHandler.cs b/src/AI/src/Essentials.AI/Platform/NonStreamingResponseHandler.cs
new file mode 100644
index 000000000000..c2e4f49e4a2c
--- /dev/null
+++ b/src/AI/src/Essentials.AI/Platform/NonStreamingResponseHandler.cs
@@ -0,0 +1,42 @@
+using Microsoft.Extensions.AI;
+
+namespace Microsoft.Maui.Essentials.AI;
+
+///
+/// Handles the TaskCompletionSource and completion for non-streaming responses.
+/// Extracted from the platform-specific chat client for testability.
+///
+internal sealed class NonStreamingResponseHandler
+{
+ private readonly TaskCompletionSource _tcs =
+ new(TaskCreationOptions.RunContinuationsAsynchronously);
+
+ ///
+ /// The task that completes when the response is ready.
+ ///
+ public Task Task => _tcs.Task;
+
+ ///
+ /// Completes with a successful response.
+ ///
+ public void Complete(ChatResponse response)
+ {
+ _tcs.TrySetResult(response);
+ }
+
+ ///
+ /// Completes with an error. Safe to call multiple times.
+ ///
+ public void CompleteWithError(Exception exception)
+ {
+ _tcs.TrySetException(exception);
+ }
+
+ ///
+ /// Completes with a cancellation.
+ ///
+ public void CompleteCancelled(CancellationToken cancellationToken)
+ {
+ _tcs.TrySetCanceled(cancellationToken);
+ }
+}
diff --git a/src/AI/src/Essentials.AI/Platform/PlainTextStreamChunker.cs b/src/AI/src/Essentials.AI/Platform/PlainTextStreamChunker.cs
index 2ca710a6260e..76be9b2f61a7 100644
--- a/src/AI/src/Essentials.AI/Platform/PlainTextStreamChunker.cs
+++ b/src/AI/src/Essentials.AI/Platform/PlainTextStreamChunker.cs
@@ -45,4 +45,10 @@ public override string Flush()
// Plain text has no pending state to flush
return string.Empty;
}
+
+ ///
+ public override void Reset()
+ {
+ _lastResponse = "";
+ }
}
diff --git a/src/AI/src/Essentials.AI/Platform/StreamChunkerBase.cs b/src/AI/src/Essentials.AI/Platform/StreamChunkerBase.cs
index f134b6c30f49..c9815fd9cd96 100644
--- a/src/AI/src/Essentials.AI/Platform/StreamChunkerBase.cs
+++ b/src/AI/src/Essentials.AI/Platform/StreamChunkerBase.cs
@@ -22,4 +22,10 @@ internal abstract class StreamChunkerBase
///
/// Final chunk to complete the output (may be empty).
public abstract string Flush();
+
+ ///
+ /// Resets the chunker state. Call this when the streaming context changes
+ /// (e.g., after a tool call boundary) so the next text is treated as a fresh stream.
+ ///
+ public abstract void Reset();
}
diff --git a/src/AI/src/Essentials.AI/Platform/StreamingResponseHandler.cs b/src/AI/src/Essentials.AI/Platform/StreamingResponseHandler.cs
new file mode 100644
index 000000000000..bad126ae9deb
--- /dev/null
+++ b/src/AI/src/Essentials.AI/Platform/StreamingResponseHandler.cs
@@ -0,0 +1,115 @@
+using System.Text.Json;
+using System.Threading.Channels;
+using Microsoft.Extensions.AI;
+
+namespace Microsoft.Maui.Essentials.AI;
+
+///
+/// Handles the channel, chunker, and update processing for streaming responses.
+/// Extracted from the platform-specific chat client for testability.
+///
+internal sealed class StreamingResponseHandler
+{
+ private readonly Channel _channel;
+ private readonly StreamChunkerBase _chunker;
+
+ public StreamingResponseHandler(StreamChunkerBase chunker)
+ {
+ _channel = Channel.CreateUnbounded(
+ new UnboundedChannelOptions { SingleReader = true });
+ _chunker = chunker;
+ }
+
+ ///
+ /// Processes a content (text) streaming update.
+ ///
+ public void ProcessContent(string? text)
+ {
+ if (text is null)
+ return;
+
+ var delta = _chunker.Process(text);
+ if (!string.IsNullOrEmpty(delta))
+ {
+ _channel.Writer.TryWrite(new ChatResponseUpdate
+ {
+ Role = ChatRole.Assistant,
+ Contents = { new TextContent(delta) }
+ });
+ }
+ }
+
+ ///
+ /// Processes a tool call update. Flushes any pending content first.
+ ///
+ public void ProcessToolCall(string? toolCallId, string? toolCallName, string? toolCallArguments)
+ {
+ // Flush any pending content before resetting for tool call
+ var pendingContent = _chunker.Flush();
+ if (!string.IsNullOrEmpty(pendingContent))
+ {
+ _channel.Writer.TryWrite(new ChatResponseUpdate
+ {
+ Role = ChatRole.Assistant,
+ Contents = { new TextContent(pendingContent) }
+ });
+ }
+ _chunker.Reset();
+
+ var args = toolCallArguments is null
+ ? null
+#pragma warning disable IL3050, IL2026 // DefaultJsonTypeInfoResolver is only used when reflection-based serialization is enabled
+ : JsonSerializer.Deserialize(toolCallArguments, AIJsonUtilities.DefaultOptions);
+#pragma warning restore IL3050, IL2026
+
+ _channel.Writer.TryWrite(new ChatResponseUpdate
+ {
+ Role = ChatRole.Assistant,
+ Contents = { new FunctionCallContent(toolCallId!, toolCallName!, args) { InformationalOnly = true } }
+ });
+ }
+
+ ///
+ /// Processes a tool result update.
+ ///
+ public void ProcessToolResult(string? toolCallId, string? toolCallResult)
+ {
+ _channel.Writer.TryWrite(new ChatResponseUpdate
+ {
+ Role = ChatRole.Tool,
+ Contents = { new FunctionResultContent(toolCallId!, toolCallResult!) }
+ });
+ }
+
+ ///
+ /// Flushes remaining chunker content and completes the channel successfully.
+ ///
+ public void Complete()
+ {
+ var finalChunk = _chunker.Flush();
+ if (!string.IsNullOrEmpty(finalChunk))
+ {
+ _channel.Writer.TryWrite(new ChatResponseUpdate
+ {
+ Role = ChatRole.Assistant,
+ Contents = { new TextContent(finalChunk) }
+ });
+ }
+
+ _channel.Writer.TryComplete();
+ }
+
+ ///
+ /// Completes the channel with an error. Safe to call multiple times.
+ ///
+ public void CompleteWithError(Exception exception)
+ {
+ _channel.Writer.TryComplete(exception);
+ }
+
+ ///
+ /// Returns an async enumerable that reads all updates from the channel.
+ ///
+ public IAsyncEnumerable ReadAllAsync(CancellationToken cancellationToken)
+ => _channel.Reader.ReadAllAsync(cancellationToken);
+}
diff --git a/src/AI/src/Essentials.AI/Properties/AssemblyInfo.cs b/src/AI/src/Essentials.AI/Properties/AssemblyInfo.cs
index b956bf51277d..721f6d9c78bd 100644
--- a/src/AI/src/Essentials.AI/Properties/AssemblyInfo.cs
+++ b/src/AI/src/Essentials.AI/Properties/AssemblyInfo.cs
@@ -7,3 +7,4 @@
[assembly: Experimental(DiagnosticIds.Experiments.EssentialsAI)]
[assembly: InternalsVisibleTo("Microsoft.Maui.Essentials.AI.UnitTests")]
+[assembly: InternalsVisibleTo("Microsoft.Maui.Essentials.AI.DeviceTests")]
diff --git a/src/AI/src/Essentials.AI/PublicAPI/net-ios/PublicAPI.Unshipped.txt b/src/AI/src/Essentials.AI/PublicAPI/net-ios/PublicAPI.Unshipped.txt
index 2994794f8a9d..3810e55ff5c5 100644
--- a/src/AI/src/Essentials.AI/PublicAPI/net-ios/PublicAPI.Unshipped.txt
+++ b/src/AI/src/Essentials.AI/PublicAPI/net-ios/PublicAPI.Unshipped.txt
@@ -2,6 +2,7 @@
Microsoft.Extensions.AI.NLEmbeddingExtensions
Microsoft.Maui.Essentials.AI.AppleIntelligenceChatClient
Microsoft.Maui.Essentials.AI.AppleIntelligenceChatClient.AppleIntelligenceChatClient() -> void
+Microsoft.Maui.Essentials.AI.AppleIntelligenceChatClient.AppleIntelligenceChatClient(Microsoft.Extensions.Logging.ILoggerFactory? loggerFactory = null, System.IServiceProvider? functionInvocationServices = null) -> void
Microsoft.Maui.Essentials.AI.AppleIntelligenceChatClient.GetResponseAsync(System.Collections.Generic.IEnumerable! messages, Microsoft.Extensions.AI.ChatOptions? options = null, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) -> System.Threading.Tasks.Task!
Microsoft.Maui.Essentials.AI.AppleIntelligenceChatClient.GetStreamingResponseAsync(System.Collections.Generic.IEnumerable! messages, Microsoft.Extensions.AI.ChatOptions? options = null, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) -> System.Collections.Generic.IAsyncEnumerable!
Microsoft.Maui.Essentials.AI.NLEmbeddingGenerator
diff --git a/src/AI/src/Essentials.AI/PublicAPI/net-maccatalyst/PublicAPI.Unshipped.txt b/src/AI/src/Essentials.AI/PublicAPI/net-maccatalyst/PublicAPI.Unshipped.txt
index 2994794f8a9d..3810e55ff5c5 100644
--- a/src/AI/src/Essentials.AI/PublicAPI/net-maccatalyst/PublicAPI.Unshipped.txt
+++ b/src/AI/src/Essentials.AI/PublicAPI/net-maccatalyst/PublicAPI.Unshipped.txt
@@ -2,6 +2,7 @@
Microsoft.Extensions.AI.NLEmbeddingExtensions
Microsoft.Maui.Essentials.AI.AppleIntelligenceChatClient
Microsoft.Maui.Essentials.AI.AppleIntelligenceChatClient.AppleIntelligenceChatClient() -> void
+Microsoft.Maui.Essentials.AI.AppleIntelligenceChatClient.AppleIntelligenceChatClient(Microsoft.Extensions.Logging.ILoggerFactory? loggerFactory = null, System.IServiceProvider? functionInvocationServices = null) -> void
Microsoft.Maui.Essentials.AI.AppleIntelligenceChatClient.GetResponseAsync(System.Collections.Generic.IEnumerable! messages, Microsoft.Extensions.AI.ChatOptions? options = null, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) -> System.Threading.Tasks.Task!
Microsoft.Maui.Essentials.AI.AppleIntelligenceChatClient.GetStreamingResponseAsync(System.Collections.Generic.IEnumerable! messages, Microsoft.Extensions.AI.ChatOptions? options = null, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) -> System.Collections.Generic.IAsyncEnumerable!
Microsoft.Maui.Essentials.AI.NLEmbeddingGenerator
diff --git a/src/AI/src/Essentials.AI/PublicAPI/net-macos/PublicAPI.Unshipped.txt b/src/AI/src/Essentials.AI/PublicAPI/net-macos/PublicAPI.Unshipped.txt
index 2994794f8a9d..3810e55ff5c5 100644
--- a/src/AI/src/Essentials.AI/PublicAPI/net-macos/PublicAPI.Unshipped.txt
+++ b/src/AI/src/Essentials.AI/PublicAPI/net-macos/PublicAPI.Unshipped.txt
@@ -2,6 +2,7 @@
Microsoft.Extensions.AI.NLEmbeddingExtensions
Microsoft.Maui.Essentials.AI.AppleIntelligenceChatClient
Microsoft.Maui.Essentials.AI.AppleIntelligenceChatClient.AppleIntelligenceChatClient() -> void
+Microsoft.Maui.Essentials.AI.AppleIntelligenceChatClient.AppleIntelligenceChatClient(Microsoft.Extensions.Logging.ILoggerFactory? loggerFactory = null, System.IServiceProvider? functionInvocationServices = null) -> void
Microsoft.Maui.Essentials.AI.AppleIntelligenceChatClient.GetResponseAsync(System.Collections.Generic.IEnumerable! messages, Microsoft.Extensions.AI.ChatOptions? options = null, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) -> System.Threading.Tasks.Task!
Microsoft.Maui.Essentials.AI.AppleIntelligenceChatClient.GetStreamingResponseAsync(System.Collections.Generic.IEnumerable! messages, Microsoft.Extensions.AI.ChatOptions? options = null, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) -> System.Collections.Generic.IAsyncEnumerable!
Microsoft.Maui.Essentials.AI.NLEmbeddingGenerator
diff --git a/src/AI/tests/Essentials.AI.DeviceTests/MauiProgram.cs b/src/AI/tests/Essentials.AI.DeviceTests/MauiProgram.cs
index a275613f40b4..b60dc580613a 100644
--- a/src/AI/tests/Essentials.AI.DeviceTests/MauiProgram.cs
+++ b/src/AI/tests/Essentials.AI.DeviceTests/MauiProgram.cs
@@ -1,4 +1,5 @@
-using System.Reflection;
+using System.Linq;
+using System.Reflection;
using Microsoft.Extensions.Configuration;
using Microsoft.Maui.Hosting;
using Microsoft.Maui.TestUtils.DeviceTests.Runners;
@@ -28,6 +29,9 @@ public static MauiApp CreateMauiApp()
{
typeof(MauiProgram).Assembly
},
+ SkipCategories = Traits
+ .GetSkipTraits()
+ .ToList(),
})
.UseHeadlessRunner(new HeadlessRunnerOptions
{
diff --git a/src/AI/tests/Essentials.AI.DeviceTests/Properties/launchSettings.json b/src/AI/tests/Essentials.AI.DeviceTests/Properties/launchSettings.json
new file mode 100644
index 000000000000..537e7f5c2051
--- /dev/null
+++ b/src/AI/tests/Essentials.AI.DeviceTests/Properties/launchSettings.json
@@ -0,0 +1,7 @@
+{
+ "profiles": {
+ "Windows Machine": {
+ "commandName": "MsixPackage"
+ }
+ }
+}
diff --git a/src/AI/tests/Essentials.AI.DeviceTests/Tests/ChatClientFunctionCallingTests.cs b/src/AI/tests/Essentials.AI.DeviceTests/Tests/ChatClientFunctionCallingTests.cs
index be475b3379ed..7e181c2c9014 100644
--- a/src/AI/tests/Essentials.AI.DeviceTests/Tests/ChatClientFunctionCallingTests.cs
+++ b/src/AI/tests/Essentials.AI.DeviceTests/Tests/ChatClientFunctionCallingTests.cs
@@ -196,6 +196,42 @@ public async Task GetStreamingResponseAsync_StreamsToolResultContent()
Assert.True(foundToolResultContent, "Should receive at least one update with FunctionResultContent");
}
+ [Fact]
+ public async Task GetStreamingResponseAsync_ToolResultHasCorrectRole()
+ {
+ // Verifies that streaming updates containing FunctionResultContent
+ // use ChatRole.Tool (not ChatRole.Assistant), matching M.E.AI conventions.
+
+ var weatherTool = AIFunctionFactory.Create(
+ (string location) => $"Rainy, 55°F in {location}",
+ name: "GetWeather",
+ description: "Gets the weather for a location");
+
+ var client = EnableFunctionCalling(new T());
+ var options = new ChatOptions { Tools = [weatherTool] };
+
+ ChatRole? toolResultRole = null;
+ ChatRole? toolCallRole = null;
+
+ await foreach (var update in client.GetStreamingResponseAsync(
+ [new ChatMessage(ChatRole.User, "What's the weather in Portland?")], options))
+ {
+ foreach (var content in update.Contents)
+ {
+ if (content is FunctionCallContent)
+ toolCallRole = update.Role;
+ else if (content is FunctionResultContent)
+ toolResultRole = update.Role;
+ }
+ }
+
+ Assert.NotNull(toolCallRole);
+ Assert.Equal(ChatRole.Assistant, toolCallRole);
+
+ Assert.NotNull(toolResultRole);
+ Assert.Equal(ChatRole.Tool, toolResultRole);
+ }
+
[Fact]
public async Task GetStreamingResponseAsync_StreamsToolCallBeforeToolResult()
{
@@ -621,6 +657,821 @@ public async Task GetStreamingResponseAsync_FunctionWithEnumParameter_CallsToolC
capturedQuery.Contains("maui", StringComparison.OrdinalIgnoreCase),
$"The natural language query should relate to the user's request, but got: {capturedQuery}");
}
+
+ [Fact]
+ public async Task GetResponseAsync_MultiTurnConversationWithToolCalling_SucceedsOnFollowUp()
+ {
+ var weatherTool = AIFunctionFactory.Create(
+ (string location) => $"Sunny, 72°F in {location}",
+ name: "GetWeather",
+ description: "Gets the weather for a location");
+
+ var client = EnableFunctionCalling(new T());
+ var options = new ChatOptions
+ {
+ Tools = [weatherTool]
+ };
+
+ // Turn 1: Ask about weather (triggers tool call)
+ var messages = new List
+ {
+ new(ChatRole.User, "What's the weather in Seattle?")
+ };
+
+ var firstResponse = await client.GetResponseAsync(messages, options);
+ Assert.NotNull(firstResponse);
+ Assert.NotNull(firstResponse.Messages);
+ Assert.True(firstResponse.Messages.Count > 0, "First response should have messages");
+
+ // Verify tool calling occurred in the response
+ bool hasFunctionContent = firstResponse.Messages
+ .Any(m => m.Contents.Any(c => c is FunctionCallContent || c is FunctionResultContent));
+ Assert.True(hasFunctionContent, "First response should contain function call/result content from tool calling");
+
+ // Turn 2: Build conversation history with all messages from first turn, then add follow-up
+ // This is the pattern that triggers the bug: FunctionCallContent/FunctionResultContent
+ // in the history causes ToNative to throw "content type not supported"
+ var followUpMessages = new List(firstResponse.Messages)
+ {
+ new(ChatRole.User, "What about in Portland?")
+ };
+
+ var secondResponse = await client.GetResponseAsync(followUpMessages, options);
+
+ Assert.NotNull(secondResponse);
+ Assert.NotNull(secondResponse.Messages);
+ Assert.True(secondResponse.Messages.Count > 0, "Second response should have messages");
+ }
+
+ [Fact]
+ public async Task GetStreamingResponseAsync_MultiTurnConversationWithToolCalling_SucceedsOnFollowUp()
+ {
+ var weatherTool = AIFunctionFactory.Create(
+ (string location) => $"Sunny, 72°F in {location}",
+ name: "GetWeather",
+ description: "Gets the weather for a location");
+
+ var client = EnableFunctionCalling(new T());
+ var options = new ChatOptions
+ {
+ Tools = [weatherTool]
+ };
+
+ // Turn 1: Ask about weather (triggers tool call)
+ var messages = new List
+ {
+ new(ChatRole.User, "What's the weather in Seattle?")
+ };
+
+ var firstResponse = await client.GetResponseAsync(messages, options);
+ Assert.NotNull(firstResponse);
+ Assert.NotNull(firstResponse.Messages);
+
+ // Verify tool calling occurred
+ bool hasFunctionContent = firstResponse.Messages
+ .Any(m => m.Contents.Any(c => c is FunctionCallContent || c is FunctionResultContent));
+ Assert.True(hasFunctionContent, "First response should contain function call/result content from tool calling");
+
+ // Turn 2: Stream follow-up with full conversation history including tool call/result content
+ var followUpMessages = new List(firstResponse.Messages)
+ {
+ new(ChatRole.User, "What about in Portland?")
+ };
+
+ bool receivedAnyUpdate = false;
+ await foreach (var update in client.GetStreamingResponseAsync(followUpMessages, options))
+ {
+ receivedAnyUpdate = true;
+ Assert.NotNull(update);
+ }
+
+ Assert.True(receivedAnyUpdate, "Should receive at least one streaming update for the follow-up");
+ }
+
+ [Fact]
+ public async Task GetStreamingResponseAsync_MultiTurnWithToolCalling_HistoryBuiltFromStreamedContent_SucceedsOnFollowUp()
+ {
+ // This test simulates the exact pattern ChatViewModel uses:
+ // Add each content item to history as it arrives, preserving stream order.
+ // This catches both the original bug (dropping tool content) and the ordering bug
+ // (grouping all calls together instead of interleaving call→result pairs).
+
+ var weatherTool = AIFunctionFactory.Create(
+ (string location) => $"Sunny, 72°F in {location}",
+ name: "GetWeather",
+ description: "Gets the weather for a location");
+
+ var client = EnableFunctionCalling(new T());
+ var options = new ChatOptions
+ {
+ Tools = [weatherTool]
+ };
+
+ // Turn 1: Stream the first response, building history inline
+ var turn1UserMessage = new ChatMessage(ChatRole.User, "What's the weather in Seattle?");
+ var history = new List { turn1UserMessage };
+
+ bool hasFunctionCall = false;
+ bool hasFunctionResult = false;
+ bool hasText = false;
+ ChatMessage? textMessage = null;
+
+ await foreach (var update in client.GetStreamingResponseAsync([turn1UserMessage], options))
+ {
+ foreach (var content in update.Contents)
+ {
+ switch (content)
+ {
+ case FunctionCallContent fc:
+ history.Add(new ChatMessage(ChatRole.Assistant, [fc]));
+ textMessage = null;
+ hasFunctionCall = true;
+ break;
+ case FunctionResultContent fr:
+ history.Add(new ChatMessage(ChatRole.Tool, [fr]));
+ hasFunctionResult = true;
+ break;
+ case TextContent tc when !string.IsNullOrEmpty(tc.Text):
+ if (textMessage is null)
+ {
+ textMessage = new ChatMessage(ChatRole.Assistant, [tc]);
+ history.Add(textMessage);
+ }
+ else
+ {
+ textMessage.Contents.Add(tc);
+ }
+ hasText = true;
+ break;
+ }
+ }
+ }
+
+ Assert.True(hasFunctionCall, "Streaming should produce FunctionCallContent");
+ Assert.True(hasFunctionResult, "Streaming should produce FunctionResultContent");
+ Assert.True(hasText, "Streaming should produce TextContent");
+
+ // Turn 2: Follow-up using the inline-built history
+ history.Add(new ChatMessage(ChatRole.User, "What about in Portland?"));
+
+ var secondResponse = await client.GetResponseAsync(history, options);
+
+ Assert.NotNull(secondResponse);
+ Assert.NotNull(secondResponse.Messages);
+ Assert.True(secondResponse.Messages.Count > 0, "Second response should have messages after streaming-based history");
+ }
+
+ [Fact]
+ public async Task GetStreamingResponseAsync_MultiTurnWithToolCalling_HistoryBuiltFromStreamedContent_ToolResultsPreservedInContext()
+ {
+ // Verifies tool results from streamed turn 1 are available in turn 2's context.
+ // Uses a distinctive value (47°F) that the model can't hallucinate.
+ // History is built inline as content arrives, preserving stream order.
+
+ var weatherTool = AIFunctionFactory.Create(
+ (string location) => $"The current weather in {location} is 47 degrees Fahrenheit and rainy",
+ name: "GetWeather",
+ description: "Gets the weather for a location");
+
+ var client = EnableFunctionCalling(new T());
+ var toolOptions = new ChatOptions
+ {
+ Tools = [weatherTool]
+ };
+
+ // Turn 1: Stream and build history inline
+ var turn1UserMessage = new ChatMessage(ChatRole.User, "What's the weather in Seattle?");
+ var history = new List { turn1UserMessage };
+ ChatMessage? textMessage = null;
+
+ await foreach (var update in client.GetStreamingResponseAsync([turn1UserMessage], toolOptions))
+ {
+ foreach (var content in update.Contents)
+ {
+ switch (content)
+ {
+ case FunctionCallContent fc:
+ history.Add(new ChatMessage(ChatRole.Assistant, [fc]));
+ textMessage = null;
+ break;
+ case FunctionResultContent fr:
+ history.Add(new ChatMessage(ChatRole.Tool, [fr]));
+ break;
+ case TextContent tc when !string.IsNullOrEmpty(tc.Text):
+ if (textMessage is null)
+ {
+ textMessage = new ChatMessage(ChatRole.Assistant, [tc]);
+ history.Add(textMessage);
+ }
+ else
+ {
+ textMessage.Contents.Add(tc);
+ }
+ break;
+ }
+ }
+ }
+
+ // Turn 2: Ask about the temperature WITHOUT tools — forces model to recall from context
+ history.Add(new ChatMessage(ChatRole.User,
+ "What was the exact temperature in Fahrenheit that the weather check returned for Seattle? Reply with just the number."));
+
+ var secondResponse = await client.GetResponseAsync(history);
+ Assert.NotNull(secondResponse);
+
+ var responseText = secondResponse.Text ?? string.Empty;
+ Assert.True(responseText.Contains("47", StringComparison.Ordinal),
+ $"Follow-up should reference the tool result temperature (47°F) from streamed history, proving tool context is preserved. Got: '{responseText}'");
+ }
+
+ [Fact]
+ public async Task GetResponseAsync_MultiTurnConversationWithToolCalling_ToolResultsPreservedInContext()
+ {
+ // Use a distinctive, unlikely-to-be-hallucinated temperature value
+ var weatherTool = AIFunctionFactory.Create(
+ (string location) => $"The current weather in {location} is 47 degrees Fahrenheit and rainy",
+ name: "GetWeather",
+ description: "Gets the weather for a location");
+
+ var client = EnableFunctionCalling(new T());
+ var toolOptions = new ChatOptions
+ {
+ Tools = [weatherTool]
+ };
+
+ // Turn 1: Ask about weather WITH tools (triggers tool call + result)
+ var messages = new List
+ {
+ new(ChatRole.User, "What's the weather in Seattle?")
+ };
+
+ var firstResponse = await client.GetResponseAsync(messages, toolOptions);
+ Assert.NotNull(firstResponse);
+
+ // Turn 2: Ask about the temperature WITHOUT tools — forces the model to recall from context.
+ // If tool results were dropped from the transcript, the model has no way to know "47".
+ var followUpMessages = new List(firstResponse.Messages)
+ {
+ new(ChatRole.User, "What was the exact temperature in Fahrenheit that the weather check returned for Seattle? Reply with just the number.")
+ };
+
+ // No tools on follow-up — model must rely on conversation history
+ var secondResponse = await client.GetResponseAsync(followUpMessages);
+ Assert.NotNull(secondResponse);
+
+ var responseText = secondResponse.Text ?? string.Empty;
+ Assert.True(responseText.Contains("47", StringComparison.Ordinal),
+ $"Follow-up response should reference the tool result temperature (47°F), proving tool results are preserved in context. Got: '{responseText}'");
+ }
+
+ [Fact]
+ public async Task GetStreamingResponseAsync_WithToolCalling_NoNullTextContent()
+ {
+ // Verifies that no text content with the literal value "null" leaks through
+ // the streaming pipeline during tool-calling conversations. This guards against
+ // both Apple framework sentinels and any serialization bugs in our code.
+
+ var weatherTool = AIFunctionFactory.Create(
+ (string location) => $"Sunny, 72°F in {location}",
+ name: "GetWeather",
+ description: "Gets the weather for a location");
+
+ var client = EnableFunctionCalling(new T());
+ var options = new ChatOptions
+ {
+ Tools = [weatherTool]
+ };
+
+ var allTextDeltas = new List();
+
+ await foreach (var update in client.GetStreamingResponseAsync(
+ [new ChatMessage(ChatRole.User, "What's the weather in Seattle?")], options))
+ {
+ foreach (var content in update.Contents)
+ {
+ if (content is TextContent tc && tc.Text is not null)
+ {
+ allTextDeltas.Add(tc.Text);
+ }
+ }
+ }
+
+ // No text delta should be the literal string "null"
+ Assert.DoesNotContain("null", allTextDeltas);
+ Assert.True(allTextDeltas.Count > 0, "Should receive at least one text delta");
+ }
+
+ [Fact]
+ public async Task GetStreamingResponseAsync_WithToolCalling_StreamOrderIsToolsBeforeResponse()
+ {
+ // Records the exact order of content types in a tool-calling stream
+ // to verify that tool calls/results arrive BEFORE the final text response.
+
+ var weatherTool = AIFunctionFactory.Create(
+ (string location) => $"Sunny, 72°F in {location}",
+ name: "GetWeather",
+ description: "Gets the weather for a location");
+
+ var client = EnableFunctionCalling(new T());
+ var options = new ChatOptions
+ {
+ Tools = [weatherTool]
+ };
+
+ // Record every content item in order: (type, snippet)
+ var streamLog = new List<(string Type, string Snippet)>();
+
+ await foreach (var update in client.GetStreamingResponseAsync(
+ [new ChatMessage(ChatRole.User, "What's the weather in Seattle?")], options))
+ {
+ foreach (var content in update.Contents)
+ {
+ switch (content)
+ {
+ case FunctionCallContent fc:
+ streamLog.Add(("ToolCall", $"{fc.Name}({fc.CallId})"));
+ break;
+ case FunctionResultContent fr:
+ streamLog.Add(("ToolResult", $"{fr.CallId}: {fr.Result?.ToString()?[..Math.Min(fr.Result?.ToString()?.Length ?? 0, 40)]}"));
+ break;
+ case TextContent tc when !string.IsNullOrEmpty(tc.Text):
+ streamLog.Add(("Text", tc.Text.Length > 60 ? tc.Text[..60] + "..." : tc.Text));
+ break;
+ }
+ }
+ }
+
+ // Output the full stream log for diagnostics
+ var logSummary = string.Join("\n", streamLog.Select((item, i) => $" [{i}] {item.Type}: {item.Snippet}"));
+ var typeOrder = string.Join(" → ", streamLog.Select(x => x.Type));
+
+ // Basic sanity: we got tool calls and text
+ Assert.True(streamLog.Any(x => x.Type == "ToolCall"),
+ $"Expected at least one ToolCall in stream. Full log:\n{logSummary}");
+ Assert.True(streamLog.Any(x => x.Type == "Text"),
+ $"Expected at least one Text in stream. Full log:\n{logSummary}");
+
+ // Find the index of the first tool call and first text
+ int firstToolCallIndex = streamLog.FindIndex(x => x.Type == "ToolCall");
+ int firstTextIndex = streamLog.FindIndex(x => x.Type == "Text");
+
+ // Assert: tool calls should arrive BEFORE text (the model can't respond before calling tools)
+ Assert.True(firstToolCallIndex < firstTextIndex,
+ $"Expected ToolCall (index {firstToolCallIndex}) before Text (index {firstTextIndex}). " +
+ $"Stream order: {typeOrder}\nFull log:\n{logSummary}");
+ }
+
+ [Fact]
+ public async Task GetStreamingResponseAsync_WithToolCalling_StreamOrderPreservedThroughFICC()
+ {
+ // Tests the stream order through a FunctionInvokingChatClient middleware chain.
+ // The Apple Intelligence client sets InformationalOnly=true on FunctionCallContent,
+ // so FICC passes them through without trying to invoke them.
+ // The raw client sends ToolCall→ToolResult→Text. Verify FICC doesn't reorder.
+
+ var weatherTool = AIFunctionFactory.Create(
+ (string location) => $"Sunny, 72°F in {location}",
+ name: "GetWeather",
+ description: "Gets the weather for a location");
+
+ var rawClient = EnableFunctionCalling(new T());
+
+ // FICC wraps the raw client — InformationalOnly FunctionCallContent should pass through
+ var ficc = new FunctionInvokingChatClient(rawClient);
+
+ var options = new ChatOptions { Tools = [weatherTool] };
+ var streamLog = new List<(string Type, string Snippet)>();
+
+ await foreach (var update in ficc.GetStreamingResponseAsync(
+ [new ChatMessage(ChatRole.User, "What's the weather in Seattle?")], options))
+ {
+ foreach (var content in update.Contents)
+ {
+ switch (content)
+ {
+ case FunctionCallContent fc:
+ streamLog.Add(("ToolCall", $"{fc.Name}({fc.CallId})"));
+ break;
+ case FunctionResultContent fr:
+ streamLog.Add(("ToolResult", $"{fr.CallId}"));
+ break;
+ case TextContent tc when !string.IsNullOrEmpty(tc.Text):
+ streamLog.Add(("Text", tc.Text.Length > 60 ? tc.Text[..60] + "..." : tc.Text));
+ break;
+ }
+ }
+ }
+
+ var logSummary = string.Join("\n", streamLog.Select((item, i) => $" [{i}] {item.Type}: {item.Snippet}"));
+ var typeOrder = string.Join(" → ", streamLog.Select(x => x.Type));
+
+ Assert.True(streamLog.Any(x => x.Type == "ToolCall"),
+ $"Expected at least one ToolCall. Full log:\n{logSummary}");
+ Assert.True(streamLog.Any(x => x.Type == "Text"),
+ $"Expected at least one Text. Full log:\n{logSummary}");
+
+ int firstToolCallIndex = streamLog.FindIndex(x => x.Type == "ToolCall");
+ int firstTextIndex = streamLog.FindIndex(x => x.Type == "Text");
+
+ Assert.True(firstToolCallIndex < firstTextIndex,
+ $"FICC reordered stream! ToolCall (index {firstToolCallIndex}) should come before Text (index {firstTextIndex}). " +
+ $"Stream order: {typeOrder}\nFull log:\n{logSummary}");
+ }
+
+ [Fact]
+ public async Task GetStreamingResponseAsync_InformationalOnlyFunctionCalls_NotInvokedByFICC()
+ {
+ // The native Apple Intelligence framework invokes tools itself (via AIFunctionToolAdapter).
+ // InformationalOnly=true prevents FICC from invoking them AGAIN.
+ // So we expect exactly 1 invocation (native), not 2 (native + FICC).
+
+ int invocationCount = 0;
+ var weatherTool = AIFunctionFactory.Create(
+ (string location) =>
+ {
+ Interlocked.Increment(ref invocationCount);
+ return $"Clear skies, 72°F in {location}";
+ },
+ name: "GetWeather",
+ description: "Gets the weather for a location");
+
+ var rawClient = EnableFunctionCalling(new T());
+ var ficc = new FunctionInvokingChatClient(rawClient);
+
+ var options = new ChatOptions { Tools = [weatherTool] };
+ var functionCallsSeen = new List();
+
+ await foreach (var update in ficc.GetStreamingResponseAsync(
+ [new ChatMessage(ChatRole.User, "What's the weather in Seattle?")], options))
+ {
+ foreach (var content in update.Contents.OfType())
+ {
+ Assert.True(content.InformationalOnly,
+ $"FunctionCallContent '{content.Name}' should have InformationalOnly=true");
+ functionCallsSeen.Add(content);
+ }
+ }
+
+ Assert.NotEmpty(functionCallsSeen);
+ Assert.Equal(1, invocationCount); // 1 = native only; 2 would mean FICC also invoked
+ }
+
+ [Fact]
+ public async Task GetStreamingResponseAsync_MultiTurnWithToolCalling_ContentOrderPreserved()
+ {
+ // Verifies that conversation history built from streaming preserves the
+ // correct interleaving order: each FunctionCallContent (Assistant) is followed
+ // by its FunctionResultContent (Tool) before the next call or text.
+
+ var weatherTool = AIFunctionFactory.Create(
+ (string location) => $"Clear skies, 68°F in {location}",
+ name: "GetWeather",
+ description: "Gets the weather for a location");
+
+ var client = EnableFunctionCalling(new T());
+ var options = new ChatOptions
+ {
+ Tools = [weatherTool]
+ };
+
+ // Stream and build history, tracking the order of content types
+ var history = new List
+ {
+ new(ChatRole.User, "What's the weather in Seattle?")
+ };
+ var contentOrder = new List(); // Track: "call", "result", "text"
+ ChatMessage? textMessage = null;
+
+ await foreach (var update in client.GetStreamingResponseAsync(
+ [new ChatMessage(ChatRole.User, "What's the weather in Seattle?")], options))
+ {
+ foreach (var content in update.Contents)
+ {
+ switch (content)
+ {
+ case FunctionCallContent fc:
+ contentOrder.Add("call");
+ history.Add(new ChatMessage(ChatRole.Assistant, [fc]));
+ textMessage = null;
+ break;
+ case FunctionResultContent fr:
+ contentOrder.Add("result");
+ history.Add(new ChatMessage(ChatRole.Tool, [fr]));
+ break;
+ case TextContent tc when !string.IsNullOrEmpty(tc.Text):
+ if (!contentOrder.Contains("text") || contentOrder.Last() != "text")
+ contentOrder.Add("text");
+ if (textMessage is null)
+ {
+ textMessage = new ChatMessage(ChatRole.Assistant, [tc]);
+ history.Add(textMessage);
+ }
+ else
+ {
+ textMessage.Contents.Add(tc);
+ }
+ break;
+ }
+ }
+ }
+
+ // Verify: every "call" is immediately followed by "result" (no reordering)
+ for (int i = 0; i < contentOrder.Count; i++)
+ {
+ if (contentOrder[i] == "call")
+ {
+ Assert.True(i + 1 < contentOrder.Count && contentOrder[i + 1] == "result",
+ $"FunctionCall at index {i} should be followed by FunctionResult. Order: [{string.Join(", ", contentOrder)}]");
+ }
+ }
+
+ // Verify the history can be used for a follow-up (ordering is valid for the transcript)
+ history.Add(new ChatMessage(ChatRole.User, "What about Portland?"));
+ var followUp = await client.GetResponseAsync(history, options);
+ Assert.NotNull(followUp);
+ Assert.True(followUp.Messages.Count > 0, "Follow-up should succeed with correctly ordered history");
+ }
+
+ [Fact]
+ public async Task GetStreamingResponseAsync_WithToolCalling_NoNullTextBeforeToolCalls()
+ {
+ // Captures the EXACT raw sequence of all content items during a tool-calling stream.
+ // Verifies: (1) no text delta contains "null" (case-insensitive substring),
+ // (2) all text is non-empty, (3) tool calls always have valid arguments.
+ // Uses a landmarks query that exercises search tools to maximize coverage.
+
+ var landmarkTool = AIFunctionFactory.Create(
+ (string query) => """
+ [
+ {"name": "Table Mountain", "country": "South Africa"},
+ {"name": "Victoria Falls", "country": "Zimbabwe"},
+ {"name": "Pyramids of Giza", "country": "Egypt"}
+ ]
+ """,
+ name: "SearchLandmarks",
+ description: "Searches for landmarks and points of interest by query");
+
+ var client = EnableFunctionCalling(new T());
+ var options = new ChatOptions
+ {
+ Tools = [landmarkTool]
+ };
+
+ // Capture every single content item with its exact value
+ var rawLog = new List<(string Type, string Value)>();
+
+ await foreach (var update in client.GetStreamingResponseAsync(
+ [new ChatMessage(ChatRole.User, "What are famous landmarks in Africa?")], options))
+ {
+ foreach (var content in update.Contents)
+ {
+ switch (content)
+ {
+ case TextContent tc:
+ rawLog.Add(("Text", tc.Text ?? ""));
+ break;
+ case FunctionCallContent fc:
+ rawLog.Add(("ToolCall", $"{fc.Name}({fc.CallId}): {fc.Arguments}"));
+ break;
+ case FunctionResultContent fr:
+ rawLog.Add(("ToolResult", $"{fr.CallId}: {fr.Result}"));
+ break;
+ default:
+ rawLog.Add((content.GetType().Name, content.ToString() ?? ""));
+ break;
+ }
+ }
+ }
+
+ var logSummary = string.Join("\n", rawLog.Select((item, i) => $" [{i}] {item.Type}: {item.Value}"));
+
+ // 1. No text should be the literal "null" or contain "null" as a value artifact
+ var textItems = rawLog.Where(x => x.Type == "Text").ToList();
+ foreach (var (_, value) in textItems)
+ {
+ Assert.False(string.Equals(value, "null", StringComparison.OrdinalIgnoreCase),
+ $"Found literal 'null' text in stream. Full log:\n{logSummary}");
+ Assert.False(string.Equals(value, "", StringComparison.Ordinal),
+ $"Found null TextContent.Text in stream. Full log:\n{logSummary}");
+ }
+
+ // 2. All text should be non-empty (Swift guard and C# filter should prevent this)
+ foreach (var (_, value) in textItems)
+ {
+ Assert.False(string.IsNullOrWhiteSpace(value),
+ $"Found empty/whitespace-only text in stream. Full log:\n{logSummary}");
+ }
+
+ // 3. Should have tool calls and text in the response
+ Assert.True(rawLog.Any(x => x.Type == "ToolCall"),
+ $"Expected at least one ToolCall. Full log:\n{logSummary}");
+ Assert.True(textItems.Count > 0,
+ $"Expected at least one Text item. Full log:\n{logSummary}");
+ }
+
+ [Fact]
+ public async Task GetStreamingResponseAsync_ViewModelSimulation_ThinkingBubbleRemovedBeforeToolCalls()
+ {
+ // Simulates the ChatViewModel's exact state machine to reproduce the "null thinking bubble" bug.
+ // The user reports: "what are landmarks in africa" → thinking bubble text goes to "null",
+ // then tool calls appear, then text streams — but the thinking bubble stays stuck.
+ //
+ // This test runs through the FULL middleware pipeline (wrap→FICC→unwrap) matching the app,
+ // then processes each update exactly as ChatViewModel does, recording what happens to the
+ // simulated "thinking bubble".
+
+ var landmarkTool = AIFunctionFactory.Create(
+ (string query) => """
+ [
+ {"name": "Table Mountain", "country": "South Africa"},
+ {"name": "Victoria Falls", "country": "Zimbabwe"},
+ {"name": "Pyramids of Giza", "country": "Egypt"}
+ ]
+ """,
+ name: "SearchLandmarks",
+ description: "Searches for landmarks and points of interest by query");
+
+ // Set up the pipeline: AppleClient → FunctionInvokingChatClient
+ // InformationalOnly=true on FunctionCallContent prevents FICC from invoking them
+ var rawClient = EnableFunctionCalling(new T());
+ var ficc = new FunctionInvokingChatClient(rawClient);
+ var pipeline = ficc;
+
+ var options = new ChatOptions { Tools = [landmarkTool] };
+
+ // === Simulate ChatViewModel state machine ===
+ string thinkingText = "Thinking...";
+ bool thinkingInMessages = true;
+ string? assistantText = null;
+ #pragma warning disable CS0219 // Variable assigned but never read — it's used for state tracking
+ var assistantIsThinking = false; // true when assistantBubble == thinkingBubble
+ #pragma warning restore CS0219
+ var preToolTextValues = new List(); // Any text set on thinking bubble before tool calls
+ var stateLog = new List(); // Full state transition log
+ bool toolCallSeen = false;
+
+ await foreach (var update in pipeline.GetStreamingResponseAsync(
+ [new ChatMessage(ChatRole.User, "what are landmarks in africa")], options))
+ {
+ foreach (var content in update.Contents)
+ {
+ switch (content)
+ {
+ case TextContent tc when !string.IsNullOrEmpty(tc.Text):
+ if (!toolCallSeen)
+ {
+ // Text arriving BEFORE tool calls — this is the scenario that causes the bug
+ if (assistantText is null)
+ {
+ if (thinkingInMessages)
+ {
+ // Line 122: thinkingBubble.Text = textContent.Text
+ thinkingText = tc.Text;
+ assistantText = thinkingText;
+ assistantIsThinking = true;
+ preToolTextValues.Add(tc.Text);
+ stateLog.Add($"PRE-TOOL TEXT on thinking bubble: \"{tc.Text}\"");
+ }
+ else
+ {
+ assistantText = tc.Text;
+ stateLog.Add($"PRE-TOOL TEXT new bubble: \"{tc.Text}\"");
+ }
+ }
+ else
+ {
+ assistantText += tc.Text;
+ stateLog.Add($"PRE-TOOL TEXT append: \"{tc.Text}\"");
+ }
+ }
+ else
+ {
+ stateLog.Add($"POST-TOOL TEXT: \"{(tc.Text.Length > 60 ? tc.Text[..60] + "..." : tc.Text)}\"");
+ }
+ break;
+
+ case FunctionCallContent fc:
+ toolCallSeen = true;
+ stateLog.Add($"TOOL CALL: {fc.Name}({fc.CallId})");
+
+ // Simulate the FunctionCallContent handler (FIXED version)
+ if (assistantText is not null)
+ {
+ stateLog.Add($" → Removing assistant bubble with text: \"{assistantText}\"");
+ // This is the bug: OLD code would keep the bubble if text.Trim() was not empty
+ // NEW code always removes it
+ thinkingInMessages = false;
+ assistantText = null;
+ assistantIsThinking = false;
+ }
+ else
+ {
+ stateLog.Add(" → Removing thinking bubble (no pre-tool text)");
+ thinkingInMessages = false;
+ }
+ break;
+
+ case FunctionResultContent fr:
+ stateLog.Add($"TOOL RESULT: {fr.CallId}");
+ break;
+ }
+ }
+ }
+
+ var fullLog = string.Join("\n", stateLog);
+
+ // KEY ASSERTIONS:
+
+ // 1. No pre-tool text should be the literal "null"
+ foreach (var text in preToolTextValues)
+ {
+ Assert.False(string.Equals(text, "null", StringComparison.OrdinalIgnoreCase),
+ $"Pre-tool text was literal 'null'! This means a jsonString or toString on a null object " +
+ $"is leaking through the streaming pipeline.\nFull state log:\n{fullLog}");
+ }
+
+ // 2. Should have seen tool calls (the model should use the tool for this query)
+ Assert.True(toolCallSeen,
+ $"Expected tool calls for 'what are landmarks in africa' query.\nFull state log:\n{fullLog}");
+
+ // 3. If ANY pre-tool text arrived, log it as a finding (this is what the user sees)
+ // This is informational — the fix (always remove) handles it, but we want to KNOW if it happens
+ if (preToolTextValues.Count > 0)
+ {
+ // Pre-tool text DID arrive. With the old code, this would have caused the stuck bubble.
+ // With the fix, the bubble is always removed. Log the values for diagnostics.
+ var preToolSummary = string.Join(", ", preToolTextValues.Select(v => $"\"{v}\""));
+ stateLog.Add($"\n=== FINDING: {preToolTextValues.Count} pre-tool text value(s): {preToolSummary} ===");
+ }
+ }
+
+ [Fact]
+ public async Task GetStreamingResponseAsync_ViewModelSimulation_NoNullTextInStream_RawClient()
+ {
+ // Same as above but uses the RAW client (no middleware) to isolate whether
+ // "null" text comes from Apple or from the middleware pipeline.
+ // Runs 3 times to account for model non-determinism.
+
+ var landmarkTool = AIFunctionFactory.Create(
+ (string query) => """
+ [
+ {"name": "Table Mountain", "country": "South Africa"},
+ {"name": "Victoria Falls", "country": "Zimbabwe"},
+ {"name": "Pyramids of Giza", "country": "Egypt"}
+ ]
+ """,
+ name: "SearchLandmarks",
+ description: "Searches for landmarks and points of interest by query");
+
+ var client = EnableFunctionCalling(new T());
+ var options = new ChatOptions { Tools = [landmarkTool] };
+
+ for (int run = 0; run < 3; run++)
+ {
+ var allContent = new List<(string Type, string Value)>();
+
+ await foreach (var update in client.GetStreamingResponseAsync(
+ [new ChatMessage(ChatRole.User, "what are landmarks in africa")], options))
+ {
+ foreach (var content in update.Contents)
+ {
+ switch (content)
+ {
+ case TextContent tc:
+ allContent.Add(("Text", tc.Text ?? ""));
+ break;
+ case FunctionCallContent fc:
+ allContent.Add(("ToolCall", fc.Name));
+ break;
+ case FunctionResultContent fr:
+ allContent.Add(("ToolResult", fr.CallId));
+ break;
+ default:
+ allContent.Add((content.GetType().Name, content.ToString() ?? ""));
+ break;
+ }
+ }
+ }
+
+ var log = string.Join("\n", allContent.Select((c, i) => $" [{i}] {c.Type}: {c.Value}"));
+
+ // Check for literal "null" text
+ var nullTexts = allContent
+ .Where(c => c.Type == "Text" && string.Equals(c.Value, "null", StringComparison.OrdinalIgnoreCase))
+ .ToList();
+
+ Assert.True(nullTexts.Count == 0,
+ $"Run {run + 1}/3: Found {nullTexts.Count} literal 'null' text item(s) in raw stream!\n{log}");
+
+ // Check for C# null text
+ var csharpNulls = allContent
+ .Where(c => c.Type == "Text" && c.Value == "")
+ .ToList();
+
+ Assert.True(csharpNulls.Count == 0,
+ $"Run {run + 1}/3: Found {csharpNulls.Count} C# null TextContent.Text in raw stream!\n{log}");
+ }
+ }
}
public enum PointOfInterestCategory
diff --git a/src/AI/tests/Essentials.AI.DeviceTests/Tests/MaciOS/AppleIntelligenceChatClientTests.cs b/src/AI/tests/Essentials.AI.DeviceTests/Tests/MaciOS/AppleIntelligenceChatClientTests.cs
index c55487ea9849..18111538d8b3 100644
--- a/src/AI/tests/Essentials.AI.DeviceTests/Tests/MaciOS/AppleIntelligenceChatClientTests.cs
+++ b/src/AI/tests/Essentials.AI.DeviceTests/Tests/MaciOS/AppleIntelligenceChatClientTests.cs
@@ -4,34 +4,34 @@
namespace Microsoft.Maui.Essentials.AI.DeviceTests;
-[Category("AppleIntelligenceChatClient")]
+[Category(Traits.AppleIntelligenceChatClient)]
public class AppleIntelligenceChatClientCancellationTests : ChatClientCancellationTestsBase
{
}
-[Category("AppleIntelligenceChatClient")]
+[Category(Traits.AppleIntelligenceChatClient)]
public class AppleIntelligenceChatClientFunctionCallingTestsBase : ChatClientFunctionCallingTestsBase
{
}
-[Category("AppleIntelligenceChatClient")]
+[Category(Traits.AppleIntelligenceChatClient)]
public class AppleIntelligenceChatClientGetServiceTests : ChatClientGetServiceTestsBase
{
protected override string ExpectedProviderName => "apple";
protected override string ExpectedDefaultModelId => "apple-intelligence";
}
-[Category("AppleIntelligenceChatClient")]
+[Category(Traits.AppleIntelligenceChatClient)]
public class AppleIntelligenceChatClientInstantiationTests : ChatClientInstantiationTestsBase
{
}
-[Category("AppleIntelligenceChatClient")]
+[Category(Traits.AppleIntelligenceChatClient)]
public class AppleIntelligenceChatClientMessagesTests : ChatClientMessagesTestsBase
{
}
-[Category("AppleIntelligenceChatClient")]
+[Category(Traits.AppleIntelligenceChatClient)]
public class AppleIntelligenceChatClientOptionsTests : ChatClientOptionsTestsBase
{
///
@@ -86,17 +86,17 @@ public override async Task GetStreamingResponseAsync_WithResponseFormat_AcceptsJ
}
}
-[Category("AppleIntelligenceChatClient")]
+[Category(Traits.AppleIntelligenceChatClient)]
public class AppleIntelligenceChatClientResponseTests : ChatClientResponseTestsBase
{
}
-[Category("AppleIntelligenceChatClient")]
+[Category(Traits.AppleIntelligenceChatClient)]
public class AppleIntelligenceChatClientStreamingTests : ChatClientStreamingTestsBase
{
}
-[Category("AppleIntelligenceChatClient")]
+[Category(Traits.AppleIntelligenceChatClient)]
public class AppleIntelligenceChatClientJsonSchemaTests : ChatClientJsonSchemaTestsBase
{
[Fact(Skip = "Apple Intelligence requires a JSON schema for structured responses, so this test is not applicable.")]
diff --git a/src/AI/tests/Essentials.AI.DeviceTests/Tests/MaciOS/AppleIntelligenceChatClientToolCallLoggingTests.cs b/src/AI/tests/Essentials.AI.DeviceTests/Tests/MaciOS/AppleIntelligenceChatClientToolCallLoggingTests.cs
new file mode 100644
index 000000000000..a8b65d058702
--- /dev/null
+++ b/src/AI/tests/Essentials.AI.DeviceTests/Tests/MaciOS/AppleIntelligenceChatClientToolCallLoggingTests.cs
@@ -0,0 +1,355 @@
+#if IOS || MACCATALYST
+using Microsoft.Extensions.AI;
+using Microsoft.Extensions.Logging;
+using Xunit;
+
+namespace Microsoft.Maui.Essentials.AI.DeviceTests;
+
+[Category(Traits.AppleIntelligenceChatClient)]
+public class AppleIntelligenceChatClientToolCallLoggingTests
+{
+ // ====================================================================
+ // Single tool, Debug level
+ // Expected: exactly 2 entries
+ // [0] Debug: "Invoking GetWeather."
+ // [1] Debug: "GetWeather invocation completed. Duration: {timespan}"
+ // ====================================================================
+ [Fact]
+ public async Task GetResponseAsync_SingleTool_Debug_ProducesExactly2Entries()
+ {
+ var logCollector = new DeviceTestLogCollector(LogLevel.Debug);
+ var client = new AppleIntelligenceChatClient(logCollector);
+
+ var weatherTool = AIFunctionFactory.Create(
+ (string location) => $"Clear skies, 72°F in {location}",
+ name: "GetWeather",
+ description: "Gets the weather for a location");
+
+ var options = new ChatOptions { Tools = [weatherTool] };
+ await client.GetResponseAsync(
+ [new ChatMessage(ChatRole.User, "What's the weather in Seattle?")], options);
+
+ var logs = logCollector.Entries;
+ Assert.Equal(2, logs.Count);
+
+ // Entry 0: "Invoking GetWeather."
+ Assert.Equal(LogLevel.Debug, logs[0].Level);
+ Assert.Equal("Invoking GetWeather.", logs[0].Message);
+
+ // Entry 1: "GetWeather invocation completed. Duration: ..."
+ Assert.Equal(LogLevel.Debug, logs[1].Level);
+ Assert.StartsWith("GetWeather invocation completed. Duration: ", logs[1].Message, StringComparison.Ordinal);
+
+ // Debug must NOT leak arguments or results
+ var allText = string.Join("\n", logs.Select(l => l.Message));
+ Assert.DoesNotContain("Seattle", allText, StringComparison.Ordinal);
+ Assert.DoesNotContain("72°F", allText, StringComparison.Ordinal);
+ }
+
+ // ====================================================================
+ // Single tool, Trace level
+ // Expected: exactly 2 entries
+ // [0] Trace: "Invoking GetWeather({"location": "Seattle"})."
+ // [1] Trace: "GetWeather invocation completed. Duration: {ts}. Result: \"Clear skies, 72°F in Seattle\""
+ // ====================================================================
+ [Fact]
+ public async Task GetResponseAsync_SingleTool_Trace_ProducesExactly2EntriesWithSensitiveData()
+ {
+ var logCollector = new DeviceTestLogCollector(LogLevel.Trace);
+ var client = new AppleIntelligenceChatClient(logCollector);
+
+ var weatherTool = AIFunctionFactory.Create(
+ (string location) => $"Clear skies, 72°F in {location}",
+ name: "GetWeather",
+ description: "Gets the weather for a location");
+
+ var options = new ChatOptions { Tools = [weatherTool] };
+ await client.GetResponseAsync(
+ [new ChatMessage(ChatRole.User, "What's the weather in Seattle?")], options);
+
+ var logs = logCollector.Entries;
+ Assert.Equal(2, logs.Count);
+
+ // Entry 0: Trace with arguments in parentheses
+ Assert.Equal(LogLevel.Trace, logs[0].Level);
+ Assert.StartsWith("Invoking GetWeather(", logs[0].Message, StringComparison.Ordinal);
+ Assert.EndsWith(").", logs[0].Message, StringComparison.Ordinal);
+ Assert.Contains("Seattle", logs[0].Message, StringComparison.Ordinal);
+
+ // Entry 1: Trace with duration AND result
+ Assert.Equal(LogLevel.Trace, logs[1].Level);
+ Assert.StartsWith("GetWeather invocation completed. Duration: ", logs[1].Message, StringComparison.Ordinal);
+ Assert.Contains(". Result: ", logs[1].Message, StringComparison.Ordinal);
+ Assert.Contains("72°F", logs[1].Message, StringComparison.Ordinal);
+ }
+
+ // ====================================================================
+ // No tools — must produce zero log entries even at Trace
+ // ====================================================================
+ [Fact]
+ public async Task GetResponseAsync_NoTools_ProducesZeroEntries()
+ {
+ var logCollector = new DeviceTestLogCollector(LogLevel.Trace);
+ var client = new AppleIntelligenceChatClient(logCollector);
+
+ await client.GetResponseAsync(
+ [new ChatMessage(ChatRole.User, "What is 2+2?")]);
+
+ Assert.Empty(logCollector.Entries);
+ }
+
+ // ====================================================================
+ // Information level — tool calls happen but produce zero log entries
+ // (our logging is Debug/Trace only)
+ // ====================================================================
+ [Fact]
+ public async Task GetResponseAsync_InformationLevel_ProducesZeroEntries()
+ {
+ var logCollector = new DeviceTestLogCollector(LogLevel.Information);
+ var client = new AppleIntelligenceChatClient(logCollector);
+
+ var weatherTool = AIFunctionFactory.Create(
+ (string location) => $"Clear skies, 72°F in {location}",
+ name: "GetWeather",
+ description: "Gets the weather for a location");
+
+ var options = new ChatOptions { Tools = [weatherTool] };
+ await client.GetResponseAsync(
+ [new ChatMessage(ChatRole.User, "What's the weather in Seattle?")], options);
+
+ Assert.Empty(logCollector.Entries);
+ }
+
+ // ====================================================================
+ // Streaming, Debug — same 2 entries as non-streaming
+ // ====================================================================
+ [Fact]
+ public async Task GetStreamingResponseAsync_SingleTool_Debug_ProducesExactly2Entries()
+ {
+ var logCollector = new DeviceTestLogCollector(LogLevel.Debug);
+ var client = new AppleIntelligenceChatClient(logCollector);
+
+ var weatherTool = AIFunctionFactory.Create(
+ (string location) => $"Clear skies, 72°F in {location}",
+ name: "GetWeather",
+ description: "Gets the weather for a location");
+
+ var options = new ChatOptions { Tools = [weatherTool] };
+ await foreach (var _ in client.GetStreamingResponseAsync(
+ [new ChatMessage(ChatRole.User, "What's the weather in Seattle?")], options))
+ { }
+
+ var logs = logCollector.Entries;
+ Assert.Equal(2, logs.Count);
+
+ Assert.Equal(LogLevel.Debug, logs[0].Level);
+ Assert.Equal("Invoking GetWeather.", logs[0].Message);
+
+ Assert.Equal(LogLevel.Debug, logs[1].Level);
+ Assert.StartsWith("GetWeather invocation completed. Duration: ", logs[1].Message, StringComparison.Ordinal);
+ }
+
+ // ====================================================================
+ // Streaming, Trace — same 2 entries as non-streaming, with sensitive data
+ // ====================================================================
+ [Fact]
+ public async Task GetStreamingResponseAsync_SingleTool_Trace_ProducesExactly2EntriesWithSensitiveData()
+ {
+ var logCollector = new DeviceTestLogCollector(LogLevel.Trace);
+ var client = new AppleIntelligenceChatClient(logCollector);
+
+ var weatherTool = AIFunctionFactory.Create(
+ (string location) => $"Clear skies, 72°F in {location}",
+ name: "GetWeather",
+ description: "Gets the weather for a location");
+
+ var options = new ChatOptions { Tools = [weatherTool] };
+ await foreach (var _ in client.GetStreamingResponseAsync(
+ [new ChatMessage(ChatRole.User, "What's the weather in Seattle?")], options))
+ { }
+
+ var logs = logCollector.Entries;
+ Assert.Equal(2, logs.Count);
+
+ Assert.Equal(LogLevel.Trace, logs[0].Level);
+ Assert.StartsWith("Invoking GetWeather(", logs[0].Message, StringComparison.Ordinal);
+ Assert.Contains("Seattle", logs[0].Message, StringComparison.Ordinal);
+
+ Assert.Equal(LogLevel.Trace, logs[1].Level);
+ Assert.Contains(". Result: ", logs[1].Message, StringComparison.Ordinal);
+ Assert.Contains("72°F", logs[1].Message, StringComparison.Ordinal);
+ }
+
+ // ====================================================================
+ // Ordering: for a single tool, "Invoking" must come before "completed"
+ // ====================================================================
+ [Fact]
+ public async Task GetResponseAsync_InvokingIsLoggedBeforeCompleted()
+ {
+ var logCollector = new DeviceTestLogCollector(LogLevel.Debug);
+ var client = new AppleIntelligenceChatClient(logCollector);
+
+ var weatherTool = AIFunctionFactory.Create(
+ (string location) => $"Clear skies, 72°F in {location}",
+ name: "GetWeather",
+ description: "Gets the weather for a location");
+
+ var options = new ChatOptions { Tools = [weatherTool] };
+ await client.GetResponseAsync(
+ [new ChatMessage(ChatRole.User, "What's the weather in Seattle?")], options);
+
+ var logs = logCollector.Entries;
+ Assert.Equal(2, logs.Count);
+ Assert.StartsWith("Invoking GetWeather", logs[0].Message, StringComparison.Ordinal);
+ Assert.StartsWith("GetWeather invocation completed", logs[1].Message, StringComparison.Ordinal);
+ }
+
+ // ====================================================================
+ // Multiple tools — native framework invokes concurrently.
+ // Exactly 4 entries: Invoking + completed for each tool.
+ // Order across tools is non-deterministic, but each tool's
+ // "Invoking" must precede its "completed".
+ // ====================================================================
+ [Fact]
+ public async Task GetResponseAsync_MultipleTools_ProducesExactly4Entries()
+ {
+ var logCollector = new DeviceTestLogCollector(LogLevel.Debug);
+ var client = new AppleIntelligenceChatClient(logCollector);
+
+ var weatherTool = AIFunctionFactory.Create(
+ (string location) => $"Clear skies, 72°F in {location}",
+ name: "GetWeather",
+ description: "Gets the weather for a location");
+
+ var timeTool = AIFunctionFactory.Create(
+ (string timezone) => $"3:00 PM in {timezone}",
+ name: "GetTime",
+ description: "Gets the current time in a timezone");
+
+ var options = new ChatOptions { Tools = [weatherTool, timeTool] };
+ await client.GetResponseAsync(
+ [new ChatMessage(ChatRole.User, "What's the weather in Seattle and the time in EST?")], options);
+
+ var logs = logCollector.Entries;
+ var messages = logs.Select(l => l.Message).ToList();
+
+ // Exactly 4 entries: 2 per tool (Invoking + completed)
+ Assert.Equal(4, logs.Count);
+
+ // All entries at Debug level
+ Assert.All(logs, l => Assert.Equal(LogLevel.Debug, l.Level));
+
+ // Exactly 1 "Invoking GetWeather." and 1 "GetWeather invocation completed..."
+ Assert.Single(messages, m => m == "Invoking GetWeather.");
+ Assert.Single(messages, m => m.StartsWith("GetWeather invocation completed. Duration: ", StringComparison.Ordinal));
+
+ // Exactly 1 "Invoking GetTime." and 1 "GetTime invocation completed..."
+ Assert.Single(messages, m => m == "Invoking GetTime.");
+ Assert.Single(messages, m => m.StartsWith("GetTime invocation completed. Duration: ", StringComparison.Ordinal));
+
+ // Each tool's "Invoking" precedes its "completed" (ordering within each tool)
+ var weatherInvoking = messages.FindIndex(m => m == "Invoking GetWeather.");
+ var weatherCompleted = messages.FindIndex(m => m.StartsWith("GetWeather invocation completed", StringComparison.Ordinal));
+ Assert.True(weatherInvoking < weatherCompleted, "GetWeather: Invoking should come before completed");
+
+ var timeInvoking = messages.FindIndex(m => m == "Invoking GetTime.");
+ var timeCompleted = messages.FindIndex(m => m.StartsWith("GetTime invocation completed", StringComparison.Ordinal));
+ Assert.True(timeInvoking < timeCompleted, "GetTime: Invoking should come before completed");
+ }
+
+ // ====================================================================
+ // Tool failure — exactly 2 entries: Invoking (Debug) + failed (Error)
+ // ====================================================================
+ [Fact]
+ public async Task GetResponseAsync_ToolFailure_Produces2EntriesWithErrorLevel()
+ {
+ var logCollector = new DeviceTestLogCollector(LogLevel.Debug);
+ var client = new AppleIntelligenceChatClient(logCollector);
+
+ var failingTool = AIFunctionFactory.Create(
+ string (string location) => throw new InvalidOperationException("API is down"),
+ name: "GetWeather",
+ description: "Gets the weather for a location");
+
+ var options = new ChatOptions { Tools = [failingTool] };
+ // Native framework propagates tool errors as NSErrorException
+ try
+ {
+ await client.GetResponseAsync(
+ [new ChatMessage(ChatRole.User, "What's the weather in Seattle?")], options);
+ }
+ catch
+ {
+ // Expected — native framework may propagate tool errors
+ }
+
+ var logs = logCollector.Entries;
+ Assert.Equal(2, logs.Count);
+
+ // Entry 0: Debug-level "Invoking GetWeather."
+ Assert.Equal(LogLevel.Debug, logs[0].Level);
+ Assert.Equal("Invoking GetWeather.", logs[0].Message);
+
+ // Entry 1: Error-level "GetWeather invocation failed."
+ Assert.Equal(LogLevel.Error, logs[1].Level);
+ Assert.Equal("GetWeather invocation failed.", logs[1].Message);
+ }
+
+ // ====================================================================
+ // No logger factory — tool invocation works without crashing
+ // ====================================================================
+ [Fact]
+ public async Task GetResponseAsync_NoLoggerFactory_CompletesSuccessfully()
+ {
+ var client = new AppleIntelligenceChatClient();
+
+ var weatherTool = AIFunctionFactory.Create(
+ (string location) => $"Clear skies, 72°F in {location}",
+ name: "GetWeather",
+ description: "Gets the weather for a location");
+
+ var options = new ChatOptions { Tools = [weatherTool] };
+ var response = await client.GetResponseAsync(
+ [new ChatMessage(ChatRole.User, "What's the weather in Seattle?")], options);
+
+ Assert.NotNull(response);
+ Assert.NotEmpty(response.Messages);
+ }
+
+ ///
+ /// Thread-safe log collector for device tests. Uses lock because native
+ /// Apple Intelligence invokes tools concurrently from different threads.
+ ///
+ private class DeviceTestLogCollector : ILoggerFactory, ILogger
+ {
+ private readonly LogLevel _minimumLevel;
+ private readonly object _lock = new();
+
+ public DeviceTestLogCollector(LogLevel minimumLevel) => _minimumLevel = minimumLevel;
+
+ public List Entries { get; } = [];
+
+ public ILogger CreateLogger(string categoryName) => this;
+ public void AddProvider(ILoggerProvider provider) { }
+ void IDisposable.Dispose() { }
+
+ public IDisposable? BeginScope(TState state) where TState : notnull => null;
+ public bool IsEnabled(LogLevel logLevel) => logLevel >= _minimumLevel;
+
+ public void Log(LogLevel logLevel, EventId eventId, TState state, Exception? exception, Func formatter)
+ {
+ if (IsEnabled(logLevel))
+ {
+ var entry = new DeviceTestLogEntry(logLevel, formatter(state, exception));
+ lock (_lock)
+ {
+ Entries.Add(entry);
+ }
+ }
+ }
+ }
+
+ private record DeviceTestLogEntry(LogLevel Level, string Message);
+}
+
+#endif
diff --git a/src/AI/tests/Essentials.AI.DeviceTests/Tests/MaciOS/AppleIntelligenceChatClientValidationTests.cs b/src/AI/tests/Essentials.AI.DeviceTests/Tests/MaciOS/AppleIntelligenceChatClientValidationTests.cs
new file mode 100644
index 000000000000..7b07f32e61ce
--- /dev/null
+++ b/src/AI/tests/Essentials.AI.DeviceTests/Tests/MaciOS/AppleIntelligenceChatClientValidationTests.cs
@@ -0,0 +1,186 @@
+#if IOS || MACCATALYST
+using Microsoft.Extensions.AI;
+using Xunit;
+
+namespace Microsoft.Maui.Essentials.AI.DeviceTests;
+
+///
+/// Tests for AppleIntelligenceChatClient edge cases in message conversion,
+/// tool validation, and error handling paths.
+///
+[Category(Traits.AppleIntelligenceChatClient)]
+public class AppleIntelligenceChatClientValidationTests
+{
+ ///
+ /// Verifies that passing a non-AIFunction tool (e.g., a custom AITool subclass)
+ /// throws NotSupportedException with a descriptive message listing the unsupported types.
+ ///
+ [Fact]
+ public async Task GetResponseAsync_WithNonAIFunctionTool_ThrowsNotSupportedException()
+ {
+ var client = new AppleIntelligenceChatClient();
+ var messages = new List
+ {
+ new(ChatRole.User, "Hello")
+ };
+ var options = new ChatOptions
+ {
+ Tools = [new UnsupportedToolForTesting()]
+ };
+
+ var ex = await Assert.ThrowsAsync(
+ () => client.GetResponseAsync(messages, options));
+ Assert.Contains("AIFunction", ex.Message, StringComparison.Ordinal);
+ Assert.Contains("UnsupportedToolForTesting", ex.Message, StringComparison.Ordinal);
+ }
+
+ ///
+ /// Verifies that messages with TextContent(null) are handled gracefully.
+ /// In M.E.AI 10.3.0+, TextContent(null) defaults to empty text which
+ /// passes through content filtering to the native API without throwing.
+ ///
+ [Fact]
+ public async Task GetResponseAsync_WithOnlyNullTextContent_DoesNotThrow()
+ {
+ var client = new AppleIntelligenceChatClient();
+ var msg = new ChatMessage(ChatRole.User, [new TextContent(null)]);
+ var messages = new List { msg };
+
+ var response = await client.GetResponseAsync(messages);
+ Assert.NotNull(response);
+ }
+
+ ///
+ /// Verifies that messages with unsupported content types
+ /// throw ArgumentException with a descriptive message.
+ ///
+ [Fact]
+ public async Task GetResponseAsync_WithUnsupportedContentType_ThrowsArgumentException()
+ {
+ var client = new AppleIntelligenceChatClient();
+ var msg = new ChatMessage(ChatRole.User, [new UnsupportedContentForTesting()]);
+ var messages = new List { msg };
+
+ await Assert.ThrowsAsync(
+ () => client.GetResponseAsync(messages));
+ }
+
+ ///
+ /// Verifies that FunctionResultContent with a CallId that doesn't match any prior
+ /// FunctionCallContent is handled gracefully (empty tool name, no exception).
+ ///
+ [Fact]
+ public async Task GetResponseAsync_WithOrphanedFunctionResult_DoesNotThrow()
+ {
+ var client = new AppleIntelligenceChatClient();
+ var messages = new List
+ {
+ new(ChatRole.User, "What's the weather?"),
+ new(ChatRole.Assistant, [new FunctionCallContent("call-1", "GetWeather")]),
+ new(ChatRole.Tool, [new FunctionResultContent("call-1", "Sunny")]),
+ // Orphaned result — callId "call-999" was never in a FunctionCallContent
+ new(ChatRole.Tool, [new FunctionResultContent("call-999", "Unknown result")]),
+ new(ChatRole.User, "Tell me more")
+ };
+
+ // Should not throw — orphaned FunctionResultContent gets empty tool name
+ var response = await client.GetResponseAsync(messages);
+ Assert.NotNull(response);
+ }
+
+ ///
+ /// Verifies that FunctionResultContent with a CallId not matching any FunctionCallContent
+ /// is handled gracefully (empty tool name, no exception). This covers the null CallId path too.
+ ///
+ [Fact]
+ public async Task GetResponseAsync_WithFunctionResultOrphanedCallId_DoesNotThrow()
+ {
+ var client = new AppleIntelligenceChatClient();
+
+ // Build a FunctionResultContent with a CallId that has no matching FunctionCallContent
+ var orphanResult = new FunctionResultContent("orphan-call-id", "Sunny result");
+
+ var messages = new List
+ {
+ new(ChatRole.User, "What's the weather?"),
+ new(ChatRole.Assistant, [new FunctionCallContent("call-1", "GetWeather")]),
+ new(ChatRole.Tool, [new FunctionResultContent("call-1", "Sunny")]),
+ // Orphaned result — callId doesn't match any prior FunctionCallContent
+ new(ChatRole.Tool, [orphanResult]),
+ new(ChatRole.User, "Tell me more")
+ };
+
+ // Should not throw — orphaned FunctionResultContent gets empty tool name
+ var response = await client.GetResponseAsync(messages);
+ Assert.NotNull(response);
+ }
+
+ ///
+ /// Verifies that FunctionCallContent with empty Name populates callIdToName
+ /// with an empty string, and subsequent FunctionResultContent for that CallId
+ /// gets the empty name. This is the closest we can test to null since
+ /// FunctionCallContent validates name is not null in its constructor.
+ ///
+ [Fact]
+ public async Task GetResponseAsync_WithFunctionCallEmptyName_DoesNotThrow()
+ {
+ var client = new AppleIntelligenceChatClient();
+ var messages = new List
+ {
+ new(ChatRole.User, "What's the weather?"),
+ new(ChatRole.Assistant, [new FunctionCallContent("call-1", "")]),
+ new(ChatRole.Tool, [new FunctionResultContent("call-1", "Sunny")]),
+ new(ChatRole.User, "Tell me more")
+ };
+
+ // Should not throw — empty Name means callIdToName has empty value for "call-1"
+ var response = await client.GetResponseAsync(messages);
+ Assert.NotNull(response);
+ }
+
+ ///
+ /// Verifies that ChatOptions.Instructions is accepted and the response succeeds.
+ /// The Instructions string is prepended as a system message internally.
+ ///
+ [Fact]
+ public async Task GetResponseAsync_WithInstructions_Succeeds()
+ {
+ var client = new AppleIntelligenceChatClient();
+ var messages = new List
+ {
+ new(ChatRole.User, "Hello")
+ };
+ var options = new ChatOptions
+ {
+ Instructions = "You are a helpful assistant."
+ };
+
+ var response = await client.GetResponseAsync(messages, options);
+ Assert.NotNull(response);
+ Assert.NotEmpty(response.Messages);
+ }
+
+ ///
+ /// Verifies that GetService with null serviceType throws ArgumentNullException.
+ ///
+ [Fact]
+ public void GetService_WithNullServiceType_ThrowsArgumentNullException()
+ {
+ var client = new AppleIntelligenceChatClient();
+
+ Assert.Throws(() =>
+ ((IChatClient)client).GetService(null!, null));
+ }
+
+ ///
+ /// A custom AITool subclass that is NOT an AIFunction, used to test validation.
+ ///
+ private sealed class UnsupportedToolForTesting : AITool;
+
+ ///
+ /// A custom AIContent subclass that is not supported by Apple Intelligence.
+ ///
+ private sealed class UnsupportedContentForTesting : AIContent;
+}
+
+#endif
diff --git a/src/AI/tests/Essentials.AI.DeviceTests/Tests/MaciOS/NLEmbeddingGeneratorTests.cs b/src/AI/tests/Essentials.AI.DeviceTests/Tests/MaciOS/NLEmbeddingGeneratorTests.cs
index f0e32eda28de..0c1823fef2d1 100644
--- a/src/AI/tests/Essentials.AI.DeviceTests/Tests/MaciOS/NLEmbeddingGeneratorTests.cs
+++ b/src/AI/tests/Essentials.AI.DeviceTests/Tests/MaciOS/NLEmbeddingGeneratorTests.cs
@@ -6,17 +6,17 @@
namespace Microsoft.Maui.Essentials.AI.DeviceTests;
-[Category("NLEmbeddingGenerator")]
+[Category(Traits.NLEmbeddingGenerator)]
public class NLEmbeddingGeneratorCancellationTests : EmbeddingGeneratorCancellationTestsBase
{
}
-[Category("NLEmbeddingGenerator")]
+[Category(Traits.NLEmbeddingGenerator)]
public class NLEmbeddingGeneratorConcurrencyTests : EmbeddingGeneratorConcurrencyTestsBase
{
}
-[Category("NLEmbeddingGenerator")]
+[Category(Traits.NLEmbeddingGenerator)]
public class NLEmbeddingGeneratorDisposalTests : EmbeddingGeneratorDisposalTestsBase
{
[Fact]
@@ -55,12 +55,12 @@ public void AsIEmbeddingGenerator_CreatesGeneratorFromNLEmbedding()
}
}
-[Category("NLEmbeddingGenerator")]
+[Category(Traits.NLEmbeddingGenerator)]
public class NLEmbeddingGeneratorGenerateTests : EmbeddingGeneratorGenerateTestsBase
{
}
-[Category("NLEmbeddingGenerator")]
+[Category(Traits.NLEmbeddingGenerator)]
public class NLEmbeddingGeneratorGetServiceTests : EmbeddingGeneratorGetServiceTestsBase
{
protected override string ExpectedProviderName => "apple";
@@ -76,7 +76,7 @@ public void GetService_ReturnsUnderlyingNLEmbedding()
}
}
-[Category("NLEmbeddingGenerator")]
+[Category(Traits.NLEmbeddingGenerator)]
public class NLEmbeddingGeneratorInstantiationTests : EmbeddingGeneratorInstantiationTestsBase
{
[Fact]
@@ -103,7 +103,7 @@ public void EmbeddingConstructor_WithNull_ThrowsArgumentNullException()
}
}
-[Category("NLEmbeddingGenerator")]
+[Category(Traits.NLEmbeddingGenerator)]
public class NLEmbeddingGeneratorSimilarityTests : EmbeddingGeneratorSimilarityTestsBase
{
}
diff --git a/src/AI/tests/Essentials.AI.DeviceTests/Tests/OpenAI/OpenAIChatClientTests.cs b/src/AI/tests/Essentials.AI.DeviceTests/Tests/OpenAI/OpenAIChatClientTests.cs
index b11f25ff46ae..4329adbdf1ee 100644
--- a/src/AI/tests/Essentials.AI.DeviceTests/Tests/OpenAI/OpenAIChatClientTests.cs
+++ b/src/AI/tests/Essentials.AI.DeviceTests/Tests/OpenAI/OpenAIChatClientTests.cs
@@ -12,12 +12,12 @@ public OpenAIChatClient()
}
}
-[Category("OpenAIChatClient")]
+[Category(Traits.OpenAIChatClient)]
public class OpenAIChatClientCancellationTests : ChatClientCancellationTestsBase
{
}
-[Category("OpenAIChatClient")]
+[Category(Traits.OpenAIChatClient)]
public class OpenAIChatClientFunctionCallingTestsBase : ChatClientFunctionCallingTestsBase
{
protected override IChatClient EnableFunctionCalling(OpenAIChatClient client)
@@ -28,39 +28,39 @@ protected override IChatClient EnableFunctionCalling(OpenAIChatClient client)
}
}
-[Category("OpenAIChatClient")]
+[Category(Traits.OpenAIChatClient)]
public class OpenAIChatClientGetServiceTests : ChatClientGetServiceTestsBase
{
protected override string ExpectedProviderName => "openai";
protected override string ExpectedDefaultModelId => "gpt-4o";
}
-[Category("OpenAIChatClient")]
+[Category(Traits.OpenAIChatClient)]
public class OpenAIChatClientInstantiationTests : ChatClientInstantiationTestsBase
{
}
-[Category("OpenAIChatClient")]
+[Category(Traits.OpenAIChatClient)]
public class OpenAIChatClientMessagesTests : ChatClientMessagesTestsBase
{
}
-[Category("OpenAIChatClient")]
+[Category(Traits.OpenAIChatClient)]
public class OpenAIChatClientOptionsTests : ChatClientOptionsTestsBase
{
}
-[Category("OpenAIChatClient")]
+[Category(Traits.OpenAIChatClient)]
public class OpenAIChatClientResponseTests : ChatClientResponseTestsBase
{
}
-[Category("OpenAIChatClient")]
+[Category(Traits.OpenAIChatClient)]
public class OpenAIChatClientStreamingTests : ChatClientStreamingTestsBase
{
}
-[Category("OpenAIChatClient")]
+[Category(Traits.OpenAIChatClient)]
public class OpenAIChatClientJsonSchemaTests : ChatClientJsonSchemaTestsBase
{
}
diff --git a/src/AI/tests/Essentials.AI.DeviceTests/Tests/OpenAI/OpenAIEmbeddingGeneratorTests.cs b/src/AI/tests/Essentials.AI.DeviceTests/Tests/OpenAI/OpenAIEmbeddingGeneratorTests.cs
index bce76747a577..fb0a6806a181 100644
--- a/src/AI/tests/Essentials.AI.DeviceTests/Tests/OpenAI/OpenAIEmbeddingGeneratorTests.cs
+++ b/src/AI/tests/Essentials.AI.DeviceTests/Tests/OpenAI/OpenAIEmbeddingGeneratorTests.cs
@@ -12,39 +12,39 @@ public OpenAIEmbeddingGenerator()
}
}
-[Category("OpenAIEmbeddingGenerator")]
+[Category(Traits.OpenAIEmbeddingGenerator)]
public class OpenAIEmbeddingGeneratorCancellationTests : EmbeddingGeneratorCancellationTestsBase
{
}
-[Category("OpenAIEmbeddingGenerator")]
+[Category(Traits.OpenAIEmbeddingGenerator)]
public class OpenAIEmbeddingGeneratorConcurrencyTests : EmbeddingGeneratorConcurrencyTestsBase
{
}
-[Category("OpenAIEmbeddingGenerator")]
+[Category(Traits.OpenAIEmbeddingGenerator)]
public class OpenAIEmbeddingGeneratorDisposalTests : EmbeddingGeneratorDisposalTestsBase
{
}
-[Category("OpenAIEmbeddingGenerator")]
+[Category(Traits.OpenAIEmbeddingGenerator)]
public class OpenAIEmbeddingGeneratorGenerateTests : EmbeddingGeneratorGenerateTestsBase
{
}
-[Category("OpenAIEmbeddingGenerator")]
+[Category(Traits.OpenAIEmbeddingGenerator)]
public class OpenAIEmbeddingGeneratorGetServiceTests : EmbeddingGeneratorGetServiceTestsBase
{
protected override string ExpectedProviderName => "openai";
protected override string ExpectedDefaultModelId => "text-embedding-3-small";
}
-[Category("OpenAIEmbeddingGenerator")]
+[Category(Traits.OpenAIEmbeddingGenerator)]
public class OpenAIEmbeddingGeneratorInstantiationTests : EmbeddingGeneratorInstantiationTestsBase
{
}
-[Category("OpenAIEmbeddingGenerator")]
+[Category(Traits.OpenAIEmbeddingGenerator)]
public class OpenAIEmbeddingGeneratorSimilarityTests : EmbeddingGeneratorSimilarityTestsBase
{
}
diff --git a/src/AI/tests/Essentials.AI.DeviceTests/Tests/SmokeTests.cs b/src/AI/tests/Essentials.AI.DeviceTests/Tests/SmokeTests.cs
new file mode 100644
index 000000000000..ada3dd38271c
--- /dev/null
+++ b/src/AI/tests/Essentials.AI.DeviceTests/Tests/SmokeTests.cs
@@ -0,0 +1,16 @@
+using Xunit;
+
+namespace Microsoft.Maui.Essentials.AI.DeviceTests;
+
+///
+/// Basic smoke tests that run on all platforms to ensure the test
+/// infrastructure is working correctly (e.g., test discovery on Windows).
+///
+public class SmokeTests
+{
+ [Fact]
+ public void TestInfrastructureWorks()
+ {
+ Assert.True(true);
+ }
+}
diff --git a/src/AI/tests/Essentials.AI.DeviceTests/Traits.cs b/src/AI/tests/Essentials.AI.DeviceTests/Traits.cs
new file mode 100644
index 000000000000..f3d09c28249c
--- /dev/null
+++ b/src/AI/tests/Essentials.AI.DeviceTests/Traits.cs
@@ -0,0 +1,64 @@
+using System;
+using System.Collections.Generic;
+
+namespace Microsoft.Maui.Essentials.AI.DeviceTests;
+
+static class Traits
+{
+ public const string AppleIntelligenceChatClient = "AppleIntelligenceChatClient";
+ public const string NLEmbeddingGenerator = "NLEmbeddingGenerator";
+ public const string OpenAIChatClient = "OpenAIChatClient";
+ public const string OpenAIEmbeddingGenerator = "OpenAIEmbeddingGenerator";
+
+ internal static class FeatureSupport
+ {
+ public const string Supported = "Supported";
+ public const string NotSupported = "NotSupported";
+
+ internal static string ToExclude(bool hasFeature) =>
+ hasFeature ? NotSupported : Supported;
+ }
+
+ internal static IEnumerable GetSkipTraits(IEnumerable? additionalFilters = null)
+ {
+#if IOS || MACCATALYST
+ // Apple Intelligence (FoundationModels) requires iOS/MacCatalyst 26+
+ if (!OperatingSystem.IsIOSVersionAtLeast(26) && !OperatingSystem.IsMacCatalystVersionAtLeast(26))
+ {
+ yield return $"Category={AppleIntelligenceChatClient}";
+ }
+
+ // Read TestFilter from environment (set by Helix/XHarness via --set-env)
+ // Supports TestFilter=SkipCategories=X,Y,Z format
+ string? testFilter = null;
+ foreach (var en in Foundation.NSProcessInfo.ProcessInfo.Environment)
+ {
+ if ($"{en.Key}" == "TestFilter")
+ {
+ testFilter = $"{en.Value}";
+ break;
+ }
+ }
+
+ if (!string.IsNullOrEmpty(testFilter) && testFilter.StartsWith("SkipCategories=", StringComparison.Ordinal))
+ {
+ var parts = testFilter.Substring("SkipCategories=".Length)
+ .Split(new[] { ',', ';' }, StringSplitOptions.RemoveEmptyEntries);
+ foreach (var part in parts)
+ {
+ var cat = part.Trim();
+ if (!string.IsNullOrWhiteSpace(cat))
+ yield return $"Category={cat}";
+ }
+ }
+#endif
+
+ if (additionalFilters != null)
+ {
+ foreach (var filter in additionalFilters)
+ {
+ yield return filter;
+ }
+ }
+ }
+}
diff --git a/src/AI/tests/Essentials.AI.UnitTests/Essentials.AI.UnitTests.csproj b/src/AI/tests/Essentials.AI.UnitTests/Essentials.AI.UnitTests.csproj
index 4686f890f132..12607a9c1909 100644
--- a/src/AI/tests/Essentials.AI.UnitTests/Essentials.AI.UnitTests.csproj
+++ b/src/AI/tests/Essentials.AI.UnitTests/Essentials.AI.UnitTests.csproj
@@ -28,7 +28,6 @@
-
diff --git a/src/AI/tests/Essentials.AI.UnitTests/TestHelpers/DataStreamsHelper.cs b/src/AI/tests/Essentials.AI.UnitTests/TestHelpers/DataStreamsHelper.cs
index 088741575652..36da3a6332cc 100644
--- a/src/AI/tests/Essentials.AI.UnitTests/TestHelpers/DataStreamsHelper.cs
+++ b/src/AI/tests/Essentials.AI.UnitTests/TestHelpers/DataStreamsHelper.cs
@@ -11,7 +11,7 @@ public static string GetFile(string fileName)
public static string GetTxtItinerary(string fileName)
{
- var directory = Path.Combine("..", "..", "..", "..", "..", "src", "AI", "tests", "Essentials.AI.UnitTests", "TestData", "DataStreams", "Itinerary");
+ var directory = Path.Combine("TestData", "DataStreams", "Itinerary");
var path = Path.Combine(directory, Path.ChangeExtension(fileName, ".txt"));
return Path.GetFullPath(path);
}
diff --git a/src/AI/tests/Essentials.AI.UnitTests/Tests/JsonStreamChunkerTests/Reset.cs b/src/AI/tests/Essentials.AI.UnitTests/Tests/JsonStreamChunkerTests/Reset.cs
new file mode 100644
index 000000000000..b279f0541b5b
--- /dev/null
+++ b/src/AI/tests/Essentials.AI.UnitTests/Tests/JsonStreamChunkerTests/Reset.cs
@@ -0,0 +1,131 @@
+using System;
+using Xunit;
+
+namespace Microsoft.Maui.Essentials.AI.UnitTests;
+
+public partial class JsonStreamChunkerTests
+{
+ ///
+ /// Tests for which clears all internal state
+ /// so the next JSON is treated as a fresh stream (e.g., after a tool call boundary).
+ ///
+ public class ResetTests
+ {
+ [Fact]
+ public void Reset_AfterProcessing_NextProcessStartsFresh()
+ {
+ var chunker = new JsonStreamChunker();
+
+ // Process partial JSON
+ chunker.Process("{\"name\":\"Alice\"");
+
+ // Reset
+ chunker.Reset();
+
+ // Next process should treat this as a completely new JSON stream
+ var chunk = chunker.Process("{\"city\":\"Seattle\"}");
+ Assert.NotNull(chunk);
+ Assert.Contains("Seattle", chunk, StringComparison.Ordinal);
+ }
+
+ [Fact]
+ public void Reset_WithoutPriorProcessing_IsNoOp()
+ {
+ var chunker = new JsonStreamChunker();
+
+ // Reset on fresh chunker should not throw
+ chunker.Reset();
+
+ var chunk = chunker.Process("{\"key\":\"value\"}");
+ Assert.NotNull(chunk);
+ Assert.Contains("value", chunk, StringComparison.Ordinal);
+ }
+
+ [Fact]
+ public void Reset_MultipleTimes_IsIdempotent()
+ {
+ var chunker = new JsonStreamChunker();
+
+ chunker.Process("{\"a\":\"b\"");
+ chunker.Reset();
+ chunker.Reset();
+ chunker.Reset();
+
+ var chunk = chunker.Process("{\"c\":\"d\"}");
+ Assert.NotNull(chunk);
+ Assert.Contains("d", chunk, StringComparison.Ordinal);
+ }
+
+ [Fact]
+ public void Reset_ThenFlush_ReturnsClosingBrackets()
+ {
+ var chunker = new JsonStreamChunker();
+
+ chunker.Process("{\"name\":\"Alice\"");
+ chunker.Reset();
+
+ // After reset, flush should not carry over old state
+ var flush = chunker.Flush();
+ // After reset with no new processing, flush may return empty or minimal
+ Assert.NotNull(flush);
+ }
+
+ [Fact]
+ public void Reset_ClearsEmittedStrings_AllowsReEmission()
+ {
+ var chunker = new JsonStreamChunker();
+
+ // Process a property - the string value is emitted and tracked
+ var chunk1 = chunker.Process("{\"greeting\":\"Hello\"}");
+
+ chunker.Reset();
+
+ // Same property name/value should be re-emitted after reset
+ var chunk2 = chunker.Process("{\"greeting\":\"Hello\"}");
+
+ Assert.Contains("Hello", chunk1, StringComparison.Ordinal);
+ Assert.Contains("Hello", chunk2, StringComparison.Ordinal);
+ }
+
+ [Fact]
+ public void Reset_BetweenCompleteJsonObjects_ProducesValidOutput()
+ {
+ var chunker = new JsonStreamChunker();
+
+ // First complete JSON object
+ var chunks1 = new List();
+ chunks1.Add(chunker.Process("{\"temp\":72}"));
+ chunks1.Add(chunker.Flush());
+
+ chunker.Reset();
+
+ // Second complete JSON object (different schema)
+ var chunks2 = new List();
+ chunks2.Add(chunker.Process("{\"city\":\"Seattle\",\"state\":\"WA\"}"));
+ chunks2.Add(chunker.Flush());
+
+ var result1 = string.Concat(chunks1);
+ var result2 = string.Concat(chunks2);
+
+ Assert.Contains("72", result1, StringComparison.Ordinal);
+ Assert.Contains("Seattle", result2, StringComparison.Ordinal);
+ }
+
+ [Fact]
+ public void Reset_DuringPartialString_DoesNotCorrupt()
+ {
+ var chunker = new JsonStreamChunker();
+
+ // Start processing with a partial string value
+ chunker.Process("{\"message\":\"This is a long mess");
+
+ // Reset mid-string
+ chunker.Reset();
+
+ // New JSON should work fine
+ var chunk = chunker.Process("{\"result\":\"OK\"}");
+ Assert.NotNull(chunk);
+ Assert.Contains("OK", chunk, StringComparison.Ordinal);
+ }
+ }
+}
diff --git a/src/AI/tests/Essentials.AI.UnitTests/Tests/NonFunctionInvokingChatClientTests/FunctionLoggingTests.cs b/src/AI/tests/Essentials.AI.UnitTests/Tests/NonFunctionInvokingChatClientTests/FunctionLoggingTests.cs
deleted file mode 100644
index 746f12e59d62..000000000000
--- a/src/AI/tests/Essentials.AI.UnitTests/Tests/NonFunctionInvokingChatClientTests/FunctionLoggingTests.cs
+++ /dev/null
@@ -1,281 +0,0 @@
-// Licensed to the .NET Foundation under one or more agreements.
-// The .NET Foundation licenses this file to you under the MIT license.
-
-using System.Runtime.CompilerServices;
-using Maui.Controls.Sample.AI;
-using Microsoft.Extensions.AI;
-using Microsoft.Extensions.Logging;
-using Xunit;
-
-namespace Microsoft.Maui.Essentials.AI.UnitTests;
-
-///
-/// Tests for the NonFunctionInvokingChatClient function logging functionality.
-/// These tests verify that function calls and results are properly logged at the appropriate log levels.
-///
-public class FunctionLoggingTests
-{
- [Theory]
- [InlineData(LogLevel.Trace)]
- [InlineData(LogLevel.Debug)]
- [InlineData(LogLevel.Information)]
- public async Task FunctionInvocationsLogged(LogLevel level)
- {
- // Arrange
- var logCollector = new LogCollector(level);
-
- var mockClient = new MockChatClient();
- mockClient.AddFunctionCallContent("Func1", "callId1", new Dictionary { ["arg1"] = "value1" });
- mockClient.AddFunctionResultContent("callId1", "Result 1");
- mockClient.AddTextContent("world");
-
- using var client = new NonFunctionInvokingChatClient(mockClient, logCollector);
-
- // Act
- var messages = new List { new(ChatRole.User, "hello") };
- var response = await client.GetResponseAsync(messages);
-
- // Assert
- var logs = logCollector.Entries;
- if (level is LogLevel.Trace)
- {
- Assert.Equal(2, logs.Count);
- Assert.True(logs[0].Message.Contains("Received tool call: Func1", StringComparison.Ordinal) && logs[0].Message.Contains("\"arg1\": \"value1\"", StringComparison.Ordinal));
- Assert.True(logs[1].Message.Contains("Received tool result for call ID: callId1", StringComparison.Ordinal) && logs[1].Message.Contains("Result 1", StringComparison.Ordinal));
- }
- else if (level is LogLevel.Debug)
- {
- Assert.Equal(2, logs.Count);
- Assert.True(logs[0].Message.Contains("Received tool call: Func1", StringComparison.Ordinal) && !logs[0].Message.Contains("arg1", StringComparison.Ordinal));
- Assert.True(logs[1].Message.Contains("Received tool result for call ID: callId1", StringComparison.Ordinal) && !logs[1].Message.Contains("Result", StringComparison.Ordinal));
- }
- else
- {
- Assert.Empty(logs);
- }
- }
-
- [Theory]
- [InlineData(LogLevel.Trace)]
- [InlineData(LogLevel.Debug)]
- [InlineData(LogLevel.Information)]
- public async Task FunctionInvocationsLoggedForStreaming(LogLevel level)
- {
- // Arrange
- var logCollector = new LogCollector(level);
-
- var mockClient = new MockChatClient();
- mockClient.AddFunctionCallContent("Func1", "callId1", new Dictionary { ["arg1"] = "value1" });
- mockClient.AddFunctionResultContent("callId1", "Result 1");
- mockClient.AddTextContent("world");
-
- using var client = new NonFunctionInvokingChatClient(mockClient, logCollector);
-
- // Act
- var messages = new List { new(ChatRole.User, "hello") };
- var updates = new List();
- await foreach (var update in client.GetStreamingResponseAsync(messages))
- {
- updates.Add(update);
- }
-
- // Assert
- var logs = logCollector.Entries;
- if (level is LogLevel.Trace)
- {
- Assert.Equal(2, logs.Count);
- Assert.True(logs[0].Message.Contains("Received tool call: Func1", StringComparison.Ordinal) && logs[0].Message.Contains("\"arg1\": \"value1\"", StringComparison.Ordinal));
- Assert.True(logs[1].Message.Contains("Received tool result for call ID: callId1", StringComparison.Ordinal) && logs[1].Message.Contains("Result 1", StringComparison.Ordinal));
- }
- else if (level is LogLevel.Debug)
- {
- Assert.Equal(2, logs.Count);
- Assert.True(logs[0].Message.Contains("Received tool call: Func1", StringComparison.Ordinal) && !logs[0].Message.Contains("arg1", StringComparison.Ordinal));
- Assert.True(logs[1].Message.Contains("Received tool result for call ID: callId1", StringComparison.Ordinal) && !logs[1].Message.Contains("Result", StringComparison.Ordinal));
- }
- else
- {
- Assert.Empty(logs);
- }
- }
-
- [Fact]
- public async Task NoLoggingWhenNoFunctionCalls()
- {
- // Arrange
- var logCollector = new LogCollector(LogLevel.Trace);
-
- var mockClient = new MockChatClient();
- mockClient.AddTextContent("Hello there!");
-
- using var client = new NonFunctionInvokingChatClient(mockClient, logCollector);
-
- // Act
- var messages = new List { new(ChatRole.User, "hello") };
- var response = await client.GetResponseAsync(messages);
-
- // Assert - no tool logs since there were no function calls
- Assert.Empty(logCollector.Entries);
- }
-
- [Fact]
- public async Task MultipleFunctionCallsLogged()
- {
- // Arrange
- var logCollector = new LogCollector(LogLevel.Debug);
-
- var mockClient = new MockChatClient();
- mockClient.AddFunctionCallContent("Func1", "callId1", new Dictionary { ["arg1"] = "value1" });
- mockClient.AddFunctionCallContent("Func2", "callId2", new Dictionary { ["arg2"] = "value2" });
- mockClient.AddFunctionResultContent("callId1", "Result 1");
- mockClient.AddFunctionResultContent("callId2", "Result 2");
- mockClient.AddTextContent("done");
-
- using var client = new NonFunctionInvokingChatClient(mockClient, logCollector);
-
- // Act
- var messages = new List { new(ChatRole.User, "hello") };
- var response = await client.GetResponseAsync(messages);
-
- // Assert
- var logs = logCollector.Entries;
- Assert.Equal(4, logs.Count);
- Assert.Contains(logs, l => l.Message.Contains("Func1", StringComparison.Ordinal));
- Assert.Contains(logs, l => l.Message.Contains("Func2", StringComparison.Ordinal));
- Assert.Contains(logs, l => l.Message.Contains("callId1", StringComparison.Ordinal));
- Assert.Contains(logs, l => l.Message.Contains("callId2", StringComparison.Ordinal));
- }
-
- ///
- /// Simple log collector for testing that captures log messages.
- ///
- private class LogCollector : ILoggerFactory, ILogger
- {
- private readonly LogLevel _minimumLevel;
-
- public LogCollector(LogLevel minimumLevel)
- {
- _minimumLevel = minimumLevel;
- }
-
- public List Entries { get; } = [];
-
- // ILoggerFactory
- public ILogger CreateLogger(string categoryName) => this;
- public void AddProvider(ILoggerProvider provider) { }
- public void Dispose() { }
-
- // ILogger
- public IDisposable? BeginScope(TState state) where TState : notnull => null;
- public bool IsEnabled(LogLevel logLevel) => logLevel >= _minimumLevel;
-
- public void Log(LogLevel logLevel, EventId eventId, TState state, Exception? exception, Func formatter)
- {
- if (IsEnabled(logLevel))
- {
- Entries.Add(new LogEntry(logLevel, formatter(state, exception)));
- }
- }
- }
-
- public record LogEntry(LogLevel Level, string Message);
-
- ///
- /// Mock chat client for testing that allows adding predefined responses including function calls and results.
- ///
- private class MockChatClient : IChatClient
- {
- private readonly List _streamContent = [];
- private string? _nonStreamingResponse;
-
- public ChatClientMetadata Metadata => new("MockClient");
-
- public void AddTextContent(string text)
- {
- _streamContent.Add(new TextContent(text));
- }
-
- public void AddFunctionCallContent(string name, string callId, Dictionary? arguments = null)
- {
- _streamContent.Add(new FunctionCallContent(callId, name, arguments));
- }
-
- public void AddFunctionResultContent(string callId, object? result)
- {
- _streamContent.Add(new FunctionResultContent(callId, result));
- }
-
- public void SetNonStreamingResponse(string response)
- {
- _nonStreamingResponse = response;
- }
-
- public Task GetResponseAsync(
- IEnumerable messages,
- ChatOptions? options = null,
- CancellationToken cancellationToken = default)
- {
- var responseMessages = new List();
-
- // Group contents by type for more realistic message structure
- var currentContents = new List();
- ChatRole currentRole = ChatRole.Assistant;
-
- foreach (var content in _streamContent)
- {
- if (content is FunctionResultContent)
- {
- // Function results should be in a Tool message
- if (currentContents.Count > 0)
- {
- responseMessages.Add(new ChatMessage(currentRole, [.. currentContents]));
- currentContents.Clear();
- }
- responseMessages.Add(new ChatMessage(ChatRole.Tool, [content]));
- }
- else
- {
- currentContents.Add(content);
- currentRole = ChatRole.Assistant;
- }
- }
-
- if (currentContents.Count > 0)
- {
- responseMessages.Add(new ChatMessage(currentRole, [.. currentContents]));
- }
-
- if (responseMessages.Count == 0 && _nonStreamingResponse is not null)
- {
- responseMessages.Add(new ChatMessage(ChatRole.Assistant, _nonStreamingResponse));
- }
-
- return Task.FromResult(new ChatResponse(responseMessages));
- }
-
- public async IAsyncEnumerable GetStreamingResponseAsync(
- IEnumerable messages,
- ChatOptions? options = null,
- [EnumeratorCancellation] CancellationToken cancellationToken = default)
- {
- foreach (var content in _streamContent)
- {
- await Task.Yield();
-
- var role = content is FunctionResultContent ? ChatRole.Tool : ChatRole.Assistant;
-
- yield return new ChatResponseUpdate
- {
- Role = role,
- Contents = [content]
- };
- }
- }
-
- public object? GetService(Type serviceType, object? serviceKey = null) => null;
-
- public TService? GetService(object? key = null) where TService : class => null;
-
- public void Dispose() { }
- }
-}
diff --git a/src/AI/tests/Essentials.AI.UnitTests/Tests/NonStreamingResponseHandlerTests/Completion.cs b/src/AI/tests/Essentials.AI.UnitTests/Tests/NonStreamingResponseHandlerTests/Completion.cs
new file mode 100644
index 000000000000..5f3dfc859163
--- /dev/null
+++ b/src/AI/tests/Essentials.AI.UnitTests/Tests/NonStreamingResponseHandlerTests/Completion.cs
@@ -0,0 +1,55 @@
+using Microsoft.Extensions.AI;
+using Xunit;
+
+namespace Microsoft.Maui.Essentials.AI.UnitTests;
+
+public partial class NonStreamingResponseHandlerTests
+{
+ ///
+ /// Tests for successful completion paths.
+ ///
+ public class CompletionTests
+ {
+ [Fact]
+ public async Task Complete_WithResponse_ReturnsIt()
+ {
+ var handler = new NonStreamingResponseHandler();
+ var expected = new ChatResponse([new ChatMessage(ChatRole.Assistant, "Hello world")]);
+
+ handler.Complete(expected);
+
+ var result = await handler.Task;
+ Assert.Same(expected, result);
+ }
+
+ [Fact]
+ public async Task Complete_WithMultipleMessages_ReturnsAll()
+ {
+ var handler = new NonStreamingResponseHandler();
+ var expected = new ChatResponse([
+ new ChatMessage(ChatRole.User, "What's the weather?"),
+ new ChatMessage(ChatRole.Assistant, "It's sunny in Boston")
+ ]);
+
+ handler.Complete(expected);
+
+ var result = await handler.Task;
+ Assert.Equal(2, result.Messages.Count);
+ Assert.Equal(ChatRole.User, result.Messages[0].Role);
+ Assert.Equal(ChatRole.Assistant, result.Messages[1].Role);
+ }
+
+ [Fact]
+ public async Task Complete_WithEmptyFallback_ReturnsIt()
+ {
+ var handler = new NonStreamingResponseHandler();
+ var fallback = new ChatResponse([new ChatMessage(ChatRole.Assistant, "")]);
+
+ handler.Complete(fallback);
+
+ var result = await handler.Task;
+ Assert.Single(result.Messages);
+ Assert.Equal("", result.Messages[0].Text);
+ }
+ }
+}
diff --git a/src/AI/tests/Essentials.AI.UnitTests/Tests/NonStreamingResponseHandlerTests/Errors.cs b/src/AI/tests/Essentials.AI.UnitTests/Tests/NonStreamingResponseHandlerTests/Errors.cs
new file mode 100644
index 000000000000..a72b0ec88282
--- /dev/null
+++ b/src/AI/tests/Essentials.AI.UnitTests/Tests/NonStreamingResponseHandlerTests/Errors.cs
@@ -0,0 +1,50 @@
+using Xunit;
+
+namespace Microsoft.Maui.Essentials.AI.UnitTests;
+
+public partial class NonStreamingResponseHandlerTests
+{
+ ///
+ /// Tests for error and cancellation paths.
+ ///
+ public class ErrorTests
+ {
+ [Fact]
+ public async Task CompleteWithError_SurfacesException()
+ {
+ var handler = new NonStreamingResponseHandler();
+
+ handler.CompleteWithError(new InvalidOperationException("test error"));
+
+ var ex = await Assert.ThrowsAsync(() => handler.Task);
+ Assert.Equal("test error", ex.Message);
+ }
+
+ [Fact]
+ public async Task CompleteCancelled_SurfacesCancellation()
+ {
+ var handler = new NonStreamingResponseHandler();
+ using var cts = new CancellationTokenSource();
+ cts.Cancel();
+
+ handler.CompleteCancelled(cts.Token);
+
+ await Assert.ThrowsAsync(() => handler.Task);
+ }
+
+ [Fact]
+ public async Task DoubleComplete_PreservesFirstResult()
+ {
+ var handler = new NonStreamingResponseHandler();
+ var response = new Microsoft.Extensions.AI.ChatResponse(
+ [new Microsoft.Extensions.AI.ChatMessage(Microsoft.Extensions.AI.ChatRole.Assistant, "first")]);
+
+ handler.Complete(response);
+ handler.CompleteWithError(new InvalidOperationException("should be ignored"));
+ handler.CompleteCancelled(CancellationToken.None);
+
+ var result = await handler.Task;
+ Assert.Same(response, result);
+ }
+ }
+}
diff --git a/src/AI/tests/Essentials.AI.UnitTests/Tests/NonStreamingResponseHandlerTests/NonStreamingResponseHandlerTests.cs b/src/AI/tests/Essentials.AI.UnitTests/Tests/NonStreamingResponseHandlerTests/NonStreamingResponseHandlerTests.cs
new file mode 100644
index 000000000000..c19e523e5087
--- /dev/null
+++ b/src/AI/tests/Essentials.AI.UnitTests/Tests/NonStreamingResponseHandlerTests/NonStreamingResponseHandlerTests.cs
@@ -0,0 +1,8 @@
+namespace Microsoft.Maui.Essentials.AI.UnitTests;
+
+///
+/// Tests for .
+///
+public partial class NonStreamingResponseHandlerTests
+{
+}
diff --git a/src/AI/tests/Essentials.AI.UnitTests/Tests/PlainTextStreamChunkerTests/Reset.cs b/src/AI/tests/Essentials.AI.UnitTests/Tests/PlainTextStreamChunkerTests/Reset.cs
new file mode 100644
index 000000000000..d03da2688e2c
--- /dev/null
+++ b/src/AI/tests/Essentials.AI.UnitTests/Tests/PlainTextStreamChunkerTests/Reset.cs
@@ -0,0 +1,145 @@
+using Xunit;
+
+namespace Microsoft.Maui.Essentials.AI.UnitTests;
+
+public partial class PlainTextStreamChunkerTests
+{
+ ///
+ /// Tests for which clears internal state
+ /// so the next text is treated as a fresh stream (e.g., after a tool call boundary).
+ ///
+ public class ResetTests
+ {
+ [Fact]
+ public void Reset_AfterProcessing_NextProcessEmitsFullText()
+ {
+ var chunker = new PlainTextStreamChunker();
+
+ // Process some text
+ chunker.Process("Hello World");
+
+ // Reset
+ chunker.Reset();
+
+ // Next Process should emit the full text, not a delta from "Hello World"
+ var chunk = chunker.Process("Goodbye");
+ Assert.Equal("Goodbye", chunk);
+ }
+
+ [Fact]
+ public void Reset_SimulatesToolCallBoundary_PreservesFullPostToolText()
+ {
+ // This is the exact scenario that caused the bug:
+ // Before tool call: AI streams "Here are some landmarks..."
+ // Tool executes, AI resets text stream and starts fresh
+ // Without Reset(), chunker would compute delta from old text and drop characters
+ var chunker = new PlainTextStreamChunker();
+
+ // Pre-tool text streaming
+ chunker.Process("null");
+ chunker.Process("null"); // Apple Intelligence emits "null" before tool calls
+
+ // Tool call boundary - reset the chunker
+ chunker.Reset();
+
+ // Post-tool text streaming starts fresh
+ var chunk1 = chunker.Process("Here");
+ var chunk2 = chunker.Process("Here are some");
+ var chunk3 = chunker.Process("Here are some landmarks");
+
+ // Concatenate all post-reset chunks
+ var result = chunk1 + chunk2 + chunk3;
+ Assert.Equal("Here are some landmarks", result);
+ }
+
+ [Fact]
+ public void Reset_WithoutPriorProcessing_IsNoOp()
+ {
+ var chunker = new PlainTextStreamChunker();
+
+ // Reset on fresh chunker should not throw or break anything
+ chunker.Reset();
+
+ var chunk = chunker.Process("Hello");
+ Assert.Equal("Hello", chunk);
+ }
+
+ [Fact]
+ public void Reset_MultipleTimes_IsIdempotent()
+ {
+ var chunker = new PlainTextStreamChunker();
+
+ chunker.Process("First text");
+ chunker.Reset();
+ chunker.Reset();
+ chunker.Reset();
+
+ var chunk = chunker.Process("Second text");
+ Assert.Equal("Second text", chunk);
+ }
+
+ [Fact]
+ public void Reset_ThenFlush_ReturnsEmpty()
+ {
+ var chunker = new PlainTextStreamChunker();
+
+ chunker.Process("Some text");
+ chunker.Reset();
+
+ var flush = chunker.Flush();
+ Assert.Equal("", flush);
+ }
+
+ [Fact]
+ public void Reset_MultipleToolCallBoundaries_AllDeltasCorrect()
+ {
+ // Simulates multiple rounds of tool calling within one streaming response
+ var chunker = new PlainTextStreamChunker();
+
+ // Round 1: pre-tool text
+ var r1 = chunker.Process("Looking up weather...");
+ Assert.Equal("Looking up weather...", r1);
+
+ // Tool call 1 boundary
+ chunker.Reset();
+
+ // Round 1: post-tool text
+ var r1Post1 = chunker.Process("The weather");
+ var r1Post2 = chunker.Process("The weather in Seattle is sunny");
+ Assert.Equal("The weather", r1Post1);
+ Assert.Equal(" in Seattle is sunny", r1Post2);
+
+ // Tool call 2 boundary (chained tool)
+ chunker.Reset();
+
+ // Round 2: post-tool text
+ var r2Post1 = chunker.Process("Also");
+ var r2Post2 = chunker.Process("Also, the temperature is 72F");
+ Assert.Equal("Also", r2Post1);
+ Assert.Equal(", the temperature is 72F", r2Post2);
+ }
+
+ [Fact]
+ public void Reset_ConcatenatedChunks_ProduceCorrectOutput()
+ {
+ // Verify that concatenating all chunks from a multi-tool scenario
+ // produces the expected final text for each segment
+ var chunker = new PlainTextStreamChunker();
+
+ // Segment 1
+ var seg1Chunks = new List();
+ seg1Chunks.Add(chunker.Process("Hello"));
+ seg1Chunks.Add(chunker.Process("Hello World"));
+
+ chunker.Reset();
+
+ // Segment 2
+ var seg2Chunks = new List();
+ seg2Chunks.Add(chunker.Process("Goodbye"));
+ seg2Chunks.Add(chunker.Process("Goodbye Moon"));
+
+ Assert.Equal("Hello World", string.Concat(seg1Chunks));
+ Assert.Equal("Goodbye Moon", string.Concat(seg2Chunks));
+ }
+ }
+}
diff --git a/src/AI/tests/Essentials.AI.UnitTests/Tests/StreamingResponseHandlerTests/Cancellation.cs b/src/AI/tests/Essentials.AI.UnitTests/Tests/StreamingResponseHandlerTests/Cancellation.cs
new file mode 100644
index 000000000000..b98cd190157c
--- /dev/null
+++ b/src/AI/tests/Essentials.AI.UnitTests/Tests/StreamingResponseHandlerTests/Cancellation.cs
@@ -0,0 +1,99 @@
+using Xunit;
+
+namespace Microsoft.Maui.Essentials.AI.UnitTests;
+
+public partial class StreamingResponseHandlerTests
+{
+ ///
+ /// Tests for cancellation scenarios during streaming reads.
+ ///
+ public class CancellationTests
+ {
+ [Fact]
+ public async Task ReadAllAsync_WithPreCancelledToken_ThrowsOperationCanceled()
+ {
+ var handler = new StreamingResponseHandler(new PlainTextStreamChunker());
+ handler.ProcessContent("Hello");
+ handler.Complete();
+
+ using var cts = new CancellationTokenSource();
+ cts.Cancel();
+
+ await Assert.ThrowsAnyAsync(async () =>
+ {
+ await foreach (var _ in handler.ReadAllAsync(cts.Token))
+ {
+ }
+ });
+ }
+
+ [Fact]
+ public async Task ReadAllAsync_CancelledDuringRead_ThrowsOperationCanceled()
+ {
+ var handler = new StreamingResponseHandler(new PlainTextStreamChunker());
+
+ // Write one update but don't complete — the reader will block waiting for more
+ handler.ProcessContent("Hello");
+
+ using var cts = new CancellationTokenSource();
+ var readTask = Task.Run(async () =>
+ {
+ var updates = new List();
+ await foreach (var update in handler.ReadAllAsync(cts.Token))
+ {
+ updates.Add(update);
+ }
+ return updates;
+ });
+
+ // Give the reader time to start consuming, then cancel
+ await Task.Delay(50);
+ cts.Cancel();
+
+ await Assert.ThrowsAnyAsync(() => readTask);
+ }
+
+ [Fact]
+ public async Task ReadAllAsync_CancelledAfterToolCall_ThrowsOperationCanceled()
+ {
+ var handler = new StreamingResponseHandler(new PlainTextStreamChunker());
+
+ handler.ProcessContent("Checking weather...");
+ handler.ProcessToolCall("call-1", "GetWeather", "{\"location\":\"Boston\"}");
+ // Don't send tool result or complete — simulates cancellation during tool execution
+
+ using var cts = new CancellationTokenSource();
+ var readTask = Task.Run(async () =>
+ {
+ var updates = new List();
+ await foreach (var update in handler.ReadAllAsync(cts.Token))
+ {
+ updates.Add(update);
+ }
+ return updates;
+ });
+
+ // Let reader consume the queued items, then cancel while waiting for more
+ await Task.Delay(50);
+ cts.Cancel();
+
+ await Assert.ThrowsAnyAsync(() => readTask);
+ }
+
+ [Fact]
+ public async Task ReadAllAsync_WithTimeout_ThrowsWhenNeverCompleted()
+ {
+ var handler = new StreamingResponseHandler(new PlainTextStreamChunker());
+ // Handler is never completed — simulates a hung tool or stalled response
+
+ using var cts = new CancellationTokenSource(TimeSpan.FromMilliseconds(100));
+
+ await Assert.ThrowsAnyAsync(async () =>
+ {
+ await foreach (var _ in handler.ReadAllAsync(cts.Token))
+ {
+ }
+ });
+ }
+ }
+}
diff --git a/src/AI/tests/Essentials.AI.UnitTests/Tests/StreamingResponseHandlerTests/Completion.cs b/src/AI/tests/Essentials.AI.UnitTests/Tests/StreamingResponseHandlerTests/Completion.cs
new file mode 100644
index 000000000000..eb5e53f3665d
--- /dev/null
+++ b/src/AI/tests/Essentials.AI.UnitTests/Tests/StreamingResponseHandlerTests/Completion.cs
@@ -0,0 +1,71 @@
+using Xunit;
+
+namespace Microsoft.Maui.Essentials.AI.UnitTests;
+
+public partial class StreamingResponseHandlerTests
+{
+ ///
+ /// Tests for completion and error handling.
+ ///
+ public class CompletionTests
+ {
+ [Fact]
+ public async Task Complete_FlushesRemainingContent()
+ {
+ var handler = new StreamingResponseHandler(new PlainTextStreamChunker());
+
+ handler.ProcessContent("Hello");
+ handler.Complete();
+
+ var updates = await ReadAll(handler);
+
+ Assert.Single(updates);
+ Assert.Equal("Hello", updates[0].Contents.OfType().Single().Text);
+ }
+
+ [Fact]
+ public async Task CompleteWithError_SurfacesExceptionToReader()
+ {
+ var handler = new StreamingResponseHandler(new PlainTextStreamChunker());
+
+ handler.ProcessContent("Hello");
+ handler.CompleteWithError(new InvalidOperationException("test error"));
+
+ await Assert.ThrowsAsync(async () => await ReadAll(handler));
+ }
+
+ [Fact]
+ public void DoubleComplete_DoesNotThrow()
+ {
+ var handler = new StreamingResponseHandler(new PlainTextStreamChunker());
+
+ handler.CompleteWithError(new InvalidOperationException("first error"));
+ handler.Complete();
+ handler.CompleteWithError(new InvalidOperationException("second error"));
+ }
+
+ [Fact]
+ public async Task Complete_WithJsonChunker_FlushesRemainingJsonContent()
+ {
+ // Use JsonStreamChunker to ensure the Flush()-on-Complete path is exercised.
+ // JsonStreamChunker expects complete valid JSON at each step and tracks partial state.
+ var handler = new StreamingResponseHandler(new JsonStreamChunker());
+
+ // Feed progressive complete JSON snapshots — the chunker tracks partial strings
+ handler.ProcessContent("{\"greeting\":\"Hello\"}");
+ handler.ProcessContent("{\"greeting\":\"Hello world\"}");
+
+ // Complete should flush remaining content from JsonStreamChunker
+ handler.Complete();
+
+ var updates = await ReadAll(handler);
+
+ // Should have text updates from the progressive JSON
+ Assert.NotEmpty(updates);
+ var allText = string.Concat(updates
+ .SelectMany(u => u.Contents.OfType())
+ .Select(tc => tc.Text));
+ Assert.Contains("Hello", allText, StringComparison.Ordinal);
+ }
+ }
+}
diff --git a/src/AI/tests/Essentials.AI.UnitTests/Tests/StreamingResponseHandlerTests/Content.cs b/src/AI/tests/Essentials.AI.UnitTests/Tests/StreamingResponseHandlerTests/Content.cs
new file mode 100644
index 000000000000..13c292a43eb6
--- /dev/null
+++ b/src/AI/tests/Essentials.AI.UnitTests/Tests/StreamingResponseHandlerTests/Content.cs
@@ -0,0 +1,68 @@
+using Microsoft.Extensions.AI;
+using Xunit;
+
+namespace Microsoft.Maui.Essentials.AI.UnitTests;
+
+public partial class StreamingResponseHandlerTests
+{
+ ///
+ /// Tests for .
+ ///
+ public class ContentTests
+ {
+ [Fact]
+ public async Task ProcessContent_EmitsTextDelta()
+ {
+ var handler = new StreamingResponseHandler(new PlainTextStreamChunker());
+
+ handler.ProcessContent("Hello");
+ handler.Complete();
+
+ var updates = await ReadAll(handler);
+
+ Assert.Single(updates);
+ Assert.Equal(ChatRole.Assistant, updates[0].Role);
+ Assert.Equal("Hello", updates[0].Contents.OfType().Single().Text);
+ }
+
+ [Fact]
+ public async Task ProcessContent_ProgressiveUpdates_EmitDeltas()
+ {
+ var handler = new StreamingResponseHandler(new PlainTextStreamChunker());
+
+ handler.ProcessContent("Hello");
+ handler.ProcessContent("Hello world");
+ handler.Complete();
+
+ var updates = await ReadAll(handler);
+
+ Assert.Equal(2, updates.Count);
+ Assert.Equal("Hello", updates[0].Contents.OfType().Single().Text);
+ Assert.Equal(" world", updates[1].Contents.OfType().Single().Text);
+ }
+
+ [Fact]
+ public async Task ProcessContent_EmptyText_ProducesNoOutput()
+ {
+ var handler = new StreamingResponseHandler(new PlainTextStreamChunker());
+
+ handler.ProcessContent("");
+ handler.Complete();
+
+ var updates = await ReadAll(handler);
+ Assert.Empty(updates);
+ }
+
+ [Fact]
+ public async Task ProcessContent_NullText_ProducesNoOutput()
+ {
+ var handler = new StreamingResponseHandler(new PlainTextStreamChunker());
+
+ handler.ProcessContent(null);
+ handler.Complete();
+
+ var updates = await ReadAll(handler);
+ Assert.Empty(updates);
+ }
+ }
+}
diff --git a/src/AI/tests/Essentials.AI.UnitTests/Tests/StreamingResponseHandlerTests/StreamingResponseHandlerTests.cs b/src/AI/tests/Essentials.AI.UnitTests/Tests/StreamingResponseHandlerTests/StreamingResponseHandlerTests.cs
new file mode 100644
index 000000000000..5f973d2c6b71
--- /dev/null
+++ b/src/AI/tests/Essentials.AI.UnitTests/Tests/StreamingResponseHandlerTests/StreamingResponseHandlerTests.cs
@@ -0,0 +1,19 @@
+using Microsoft.Extensions.AI;
+
+namespace Microsoft.Maui.Essentials.AI.UnitTests;
+
+///
+/// Tests for .
+///
+public partial class StreamingResponseHandlerTests
+{
+ static async Task> ReadAll(StreamingResponseHandler handler)
+ {
+ var updates = new List();
+ await foreach (var update in handler.ReadAllAsync(CancellationToken.None))
+ {
+ updates.Add(update);
+ }
+ return updates;
+ }
+}
diff --git a/src/AI/tests/Essentials.AI.UnitTests/Tests/StreamingResponseHandlerTests/Timeout.cs b/src/AI/tests/Essentials.AI.UnitTests/Tests/StreamingResponseHandlerTests/Timeout.cs
new file mode 100644
index 000000000000..cf90b3d44b36
--- /dev/null
+++ b/src/AI/tests/Essentials.AI.UnitTests/Tests/StreamingResponseHandlerTests/Timeout.cs
@@ -0,0 +1,137 @@
+using Xunit;
+
+namespace Microsoft.Maui.Essentials.AI.UnitTests;
+
+public partial class StreamingResponseHandlerTests
+{
+ ///
+ /// Tests for timeout and slow operation scenarios.
+ /// Verifies that the handler behaves correctly when operations are slow
+ /// or when the handler is never completed (simulating hung tools).
+ ///
+ public class TimeoutTests
+ {
+ [Fact]
+ public async Task ReadAllAsync_NeverCompleted_CanBeCancelledViaTimeout()
+ {
+ // Simulates a hung tool that never returns — the handler is never completed.
+ // The reader should be cancellable via CancellationToken timeout.
+ var handler = new StreamingResponseHandler(new PlainTextStreamChunker());
+
+ using var cts = new CancellationTokenSource(TimeSpan.FromMilliseconds(100));
+
+ await Assert.ThrowsAnyAsync(async () =>
+ {
+ await foreach (var _ in handler.ReadAllAsync(cts.Token))
+ {
+ }
+ });
+ }
+
+ [Fact]
+ public async Task ReadAllAsync_SlowProducer_ReadsAllUpdatesBeforeTimeout()
+ {
+ // Simulates a slow producer that writes content with delays.
+ // The reader should still get all updates before the channel is completed.
+ var handler = new StreamingResponseHandler(new PlainTextStreamChunker());
+
+ var writeTask = Task.Run(async () =>
+ {
+ handler.ProcessContent("Hello");
+ await Task.Delay(50);
+ handler.ProcessContent("Hello World");
+ await Task.Delay(50);
+ handler.Complete();
+ });
+
+ // Use a generous timeout — we expect completion before timeout
+ using var cts = new CancellationTokenSource(TimeSpan.FromSeconds(5));
+
+ var updates = new List();
+ await foreach (var update in handler.ReadAllAsync(cts.Token))
+ {
+ updates.Add(update);
+ }
+
+ await writeTask;
+
+ // Should have received text updates
+ Assert.NotEmpty(updates);
+ var allText = string.Concat(updates
+ .SelectMany(u => u.Contents.OfType())
+ .Select(tc => tc.Text));
+ Assert.Contains("World", allText, StringComparison.Ordinal);
+ }
+
+ [Fact]
+ public async Task ReadAllAsync_SlowToolExecution_ReceivesUpdatesBeforeAndAfterTool()
+ {
+ // Simulates a tool that takes time to execute — content before, tool call,
+ // slow tool result, content after.
+ var handler = new StreamingResponseHandler(new PlainTextStreamChunker());
+
+ var writeTask = Task.Run(async () =>
+ {
+ handler.ProcessContent("Let me check the weather...");
+ await Task.Delay(30);
+ handler.ProcessToolCall("call-1", "GetWeather", "{\"location\":\"Boston\"}");
+ // Simulate slow tool execution
+ await Task.Delay(200);
+ handler.ProcessToolResult("call-1", "Sunny, 72°F");
+ await Task.Delay(30);
+ handler.ProcessContent("The weather in Boston is sunny and 72°F.");
+ handler.Complete();
+ });
+
+ using var cts = new CancellationTokenSource(TimeSpan.FromSeconds(5));
+
+ var updates = new List();
+ await foreach (var update in handler.ReadAllAsync(cts.Token))
+ {
+ updates.Add(update);
+ }
+
+ await writeTask;
+
+ // Should have content, tool call, tool result, and final content
+ Assert.True(updates.Count >= 4, $"Expected at least 4 updates, got {updates.Count}");
+
+ var hasToolCall = updates.Any(u => u.Contents.OfType().Any());
+ var hasToolResult = updates.Any(u => u.Contents.OfType().Any());
+ Assert.True(hasToolCall, "Expected a tool call update");
+ Assert.True(hasToolResult, "Expected a tool result update");
+ }
+
+ [Fact]
+ public async Task NonStreamingHandler_NeverCompleted_TaskDoesNotCompleteWithinTimeout()
+ {
+ // Verifies that a NonStreamingResponseHandler that is never completed
+ // will not resolve its Task — and can be observed via a timeout.
+ var handler = new NonStreamingResponseHandler();
+
+ var completed = await Task.WhenAny(handler.Task, Task.Delay(200));
+
+ Assert.NotSame(handler.Task, completed);
+ Assert.False(handler.Task.IsCompleted, "Handler task should not complete when nothing calls Complete/CompleteWithError/CompleteCancelled");
+ }
+
+ [Fact]
+ public async Task NonStreamingHandler_SlowCompletion_TaskCompletesEventually()
+ {
+ // Verifies that a NonStreamingResponseHandler completes when Complete() is eventually called.
+ var handler = new NonStreamingResponseHandler();
+
+ _ = Task.Run(async () =>
+ {
+ await Task.Delay(200);
+ handler.Complete(new Microsoft.Extensions.AI.ChatResponse(
+ [new Microsoft.Extensions.AI.ChatMessage(Microsoft.Extensions.AI.ChatRole.Assistant, "Done")]));
+ });
+
+ using var cts = new CancellationTokenSource(TimeSpan.FromSeconds(5));
+ var result = await handler.Task.WaitAsync(cts.Token);
+
+ Assert.Equal("Done", result.Messages.First().Text);
+ }
+ }
+}
diff --git a/src/AI/tests/Essentials.AI.UnitTests/Tests/StreamingResponseHandlerTests/ToolResultEdgeCases.cs b/src/AI/tests/Essentials.AI.UnitTests/Tests/StreamingResponseHandlerTests/ToolResultEdgeCases.cs
new file mode 100644
index 000000000000..44e2b93eb582
--- /dev/null
+++ b/src/AI/tests/Essentials.AI.UnitTests/Tests/StreamingResponseHandlerTests/ToolResultEdgeCases.cs
@@ -0,0 +1,107 @@
+using Microsoft.Extensions.AI;
+using Xunit;
+
+namespace Microsoft.Maui.Essentials.AI.UnitTests;
+
+public partial class StreamingResponseHandlerTests
+{
+ ///
+ /// Tests for edge cases in tool result processing (empty, null, whitespace).
+ ///
+ public class ToolResultEdgeCaseTests
+ {
+ [Fact]
+ public async Task ProcessToolResult_EmptyString_EmitsWithEmptyResult()
+ {
+ var handler = new StreamingResponseHandler(new PlainTextStreamChunker());
+
+ handler.ProcessToolResult("call-1", string.Empty);
+ handler.Complete();
+
+ var updates = await ReadAll(handler);
+
+ Assert.Single(updates);
+ Assert.Equal(ChatRole.Tool, updates[0].Role);
+ var fr = Assert.Single(updates[0].Contents.OfType());
+ Assert.Equal("call-1", fr.CallId);
+ Assert.Equal(string.Empty, fr.Result?.ToString());
+ }
+
+ [Fact]
+ public async Task ProcessToolResult_NullResult_EmitsWithNullResult()
+ {
+ var handler = new StreamingResponseHandler(new PlainTextStreamChunker());
+
+ handler.ProcessToolResult("call-1", null);
+ handler.Complete();
+
+ var updates = await ReadAll(handler);
+
+ Assert.Single(updates);
+ Assert.Equal(ChatRole.Tool, updates[0].Role);
+
+ // The null-forgiving operator in ProcessToolResult converts null to null!
+ // FunctionResultContent should still be created
+ var fr = Assert.Single(updates[0].Contents.OfType());
+ Assert.Equal("call-1", fr.CallId);
+ }
+
+ [Fact]
+ public async Task ProcessToolResult_WhitespaceResult_EmitsWithWhitespace()
+ {
+ var handler = new StreamingResponseHandler(new PlainTextStreamChunker());
+
+ handler.ProcessToolResult("call-1", " ");
+ handler.Complete();
+
+ var updates = await ReadAll(handler);
+
+ Assert.Single(updates);
+ var fr = Assert.Single(updates[0].Contents.OfType());
+ Assert.Equal(" ", fr.Result?.ToString());
+ }
+
+ [Fact]
+ public async Task ContentAfterEmptyToolResult_ContinuesNormally()
+ {
+ var handler = new StreamingResponseHandler(new PlainTextStreamChunker());
+
+ handler.ProcessToolCall("call-1", "GetWeather", null);
+ handler.ProcessToolResult("call-1", string.Empty);
+ handler.ProcessContent("No weather data available");
+ handler.Complete();
+
+ var updates = await ReadAll(handler);
+
+ Assert.Equal(3, updates.Count);
+ Assert.True(updates[0].Contents.OfType().Any());
+ Assert.True(updates[1].Contents.OfType().Any());
+ Assert.Equal("No weather data available", updates[2].Contents.OfType().Single().Text);
+ }
+
+ [Fact]
+ public async Task MultipleToolResults_MixedEmptyAndPopulated_AllEmitted()
+ {
+ var handler = new StreamingResponseHandler(new PlainTextStreamChunker());
+
+ handler.ProcessToolCall("call-1", "GetWeather", null);
+ handler.ProcessToolResult("call-1", "Sunny");
+ handler.ProcessToolCall("call-2", "GetTime", null);
+ handler.ProcessToolResult("call-2", string.Empty);
+ handler.Complete();
+
+ var updates = await ReadAll(handler);
+
+ var toolResults = updates.Where(u => u.Role == ChatRole.Tool).ToList();
+ Assert.Equal(2, toolResults.Count);
+
+ var fr1 = toolResults[0].Contents.OfType().Single();
+ Assert.Equal("call-1", fr1.CallId);
+ Assert.Equal("Sunny", fr1.Result?.ToString());
+
+ var fr2 = toolResults[1].Contents.OfType().Single();
+ Assert.Equal("call-2", fr2.CallId);
+ Assert.Equal(string.Empty, fr2.Result?.ToString());
+ }
+ }
+}
diff --git a/src/AI/tests/Essentials.AI.UnitTests/Tests/StreamingResponseHandlerTests/Tools.cs b/src/AI/tests/Essentials.AI.UnitTests/Tests/StreamingResponseHandlerTests/Tools.cs
new file mode 100644
index 000000000000..cdea4da0f601
--- /dev/null
+++ b/src/AI/tests/Essentials.AI.UnitTests/Tests/StreamingResponseHandlerTests/Tools.cs
@@ -0,0 +1,148 @@
+using Microsoft.Extensions.AI;
+using Xunit;
+
+namespace Microsoft.Maui.Essentials.AI.UnitTests;
+
+public partial class StreamingResponseHandlerTests
+{
+ ///
+ /// Tests for and
+ /// .
+ ///
+ public class ToolTests
+ {
+ [Fact]
+ public async Task ProcessToolCall_EmitsFunctionCallContent()
+ {
+ var handler = new StreamingResponseHandler(new PlainTextStreamChunker());
+
+ handler.ProcessToolCall("call-1", "GetWeather", "{\"location\":\"Boston\"}");
+ handler.Complete();
+
+ var updates = await ReadAll(handler);
+
+ Assert.Single(updates);
+ Assert.Equal(ChatRole.Assistant, updates[0].Role);
+ var fc = Assert.Single(updates[0].Contents.OfType());
+ Assert.Equal("call-1", fc.CallId);
+ Assert.Equal("GetWeather", fc.Name);
+ Assert.NotNull(fc.Arguments);
+ Assert.Equal("Boston", fc.Arguments["location"]?.ToString());
+ Assert.True(fc.InformationalOnly, "FunctionCallContent should have InformationalOnly=true");
+ }
+
+ [Fact]
+ public async Task ProcessToolResult_EmitsWithToolRole()
+ {
+ var handler = new StreamingResponseHandler(new PlainTextStreamChunker());
+
+ handler.ProcessToolResult("call-1", "Sunny, 72°F");
+ handler.Complete();
+
+ var updates = await ReadAll(handler);
+
+ Assert.Single(updates);
+ Assert.Equal(ChatRole.Tool, updates[0].Role);
+ var fr = Assert.Single(updates[0].Contents.OfType());
+ Assert.Equal("call-1", fr.CallId);
+ Assert.Equal("Sunny, 72°F", fr.Result?.ToString());
+ }
+
+ [Fact]
+ public async Task ProcessToolCall_AfterContent_FlushesContentFirst()
+ {
+ var handler = new StreamingResponseHandler(new PlainTextStreamChunker());
+
+ handler.ProcessContent("Let me check");
+ handler.ProcessToolCall("call-1", "GetWeather", "{\"location\":\"Boston\"}");
+ handler.Complete();
+
+ var updates = await ReadAll(handler);
+
+ Assert.Equal(2, updates.Count);
+
+ var textIndex = updates.FindIndex(u => u.Contents.OfType().Any());
+ var toolIndex = updates.FindIndex(u => u.Contents.OfType().Any());
+ Assert.True(textIndex < toolIndex, "Text content should be emitted before tool call");
+ }
+
+ [Fact]
+ public async Task ProcessToolCall_MalformedJson_Throws()
+ {
+ var handler = new StreamingResponseHandler(new PlainTextStreamChunker());
+
+ var ex = Assert.Throws(() =>
+ handler.ProcessToolCall("call-1", "GetWeather", "not valid json {"));
+
+ handler.CompleteWithError(ex);
+
+ await Assert.ThrowsAsync(async () => await ReadAll(handler));
+ }
+
+ [Fact]
+ public async Task ContentAfterToolResult_StartsNewStream()
+ {
+ var handler = new StreamingResponseHandler(new PlainTextStreamChunker());
+
+ handler.ProcessToolCall("call-1", "GetWeather", "{\"location\":\"Boston\"}");
+ handler.ProcessToolResult("call-1", "Sunny");
+ handler.ProcessContent("The weather is sunny");
+ handler.Complete();
+
+ var updates = await ReadAll(handler);
+
+ Assert.Equal(3, updates.Count);
+ Assert.True(updates[0].Contents.OfType().Any());
+ Assert.True(updates[1].Contents.OfType().Any());
+ Assert.Equal("The weather is sunny", updates[2].Contents.OfType().Single().Text);
+ }
+
+ [Fact]
+ public async Task ProcessToolCall_NullArguments_EmitsWithNullArgs()
+ {
+ var handler = new StreamingResponseHandler(new PlainTextStreamChunker());
+
+ handler.ProcessToolCall("call-1", "GetWeather", null);
+ handler.Complete();
+
+ var updates = await ReadAll(handler);
+
+ Assert.Single(updates);
+ var fc = Assert.Single(updates[0].Contents.OfType());
+ Assert.Equal("call-1", fc.CallId);
+ Assert.Equal("GetWeather", fc.Name);
+ Assert.Null(fc.Arguments);
+ }
+
+ [Fact]
+ public async Task ProcessToolCall_WithJsonChunker_FlushesPartialJsonBeforeToolCall()
+ {
+ // Use JsonStreamChunker — after processing progressive JSON, a tool call
+ // should flush the pending content before emitting the tool call.
+ var handler = new StreamingResponseHandler(new JsonStreamChunker());
+
+ // Feed progressive complete JSON snapshots
+ handler.ProcessContent("{\"greeting\":\"Hello\"}");
+ handler.ProcessContent("{\"greeting\":\"Hello world\"}");
+
+ // Now a tool call arrives — should flush pending JSON content first
+ handler.ProcessToolCall("call-1", "GetWeather", "{\"location\":\"Boston\"}");
+ handler.Complete();
+
+ var updates = await ReadAll(handler);
+
+ // Should have tool call update at minimum
+ var toolUpdates = updates.Where(u => u.Contents.OfType().Any()).ToList();
+ Assert.Single(toolUpdates);
+
+ // If there were flushed text updates, they should come before the tool call
+ var textUpdates = updates.Where(u => u.Contents.OfType().Any()).ToList();
+ if (textUpdates.Count > 0)
+ {
+ var textIndex = updates.IndexOf(textUpdates.First());
+ var toolIndex = updates.IndexOf(toolUpdates.First());
+ Assert.True(textIndex < toolIndex, "Flushed text content should precede tool call");
+ }
+ }
+ }
+}
diff --git a/src/AI/tests/Essentials.AI.UnitTests/Tests/ToolCallLoggingTests/InformationalOnlyToolCallLoggingTests.cs b/src/AI/tests/Essentials.AI.UnitTests/Tests/ToolCallLoggingTests/InformationalOnlyToolCallLoggingTests.cs
new file mode 100644
index 000000000000..168123af7576
--- /dev/null
+++ b/src/AI/tests/Essentials.AI.UnitTests/Tests/ToolCallLoggingTests/InformationalOnlyToolCallLoggingTests.cs
@@ -0,0 +1,133 @@
+// Licensed to the .NET Foundation under one or more agreements.
+// The .NET Foundation licenses this file to you under the MIT license.
+
+using Microsoft.Extensions.AI;
+using Microsoft.Extensions.Logging;
+using Xunit;
+using static Microsoft.Maui.Essentials.AI.UnitTests.ToolCallLoggingHelpers;
+
+namespace Microsoft.Maui.Essentials.AI.UnitTests;
+
+///
+/// Tests the pipeline with InformationalOnly=true tool calls (our pattern).
+/// The native Apple Intelligence model invokes tools itself; FICC sees
+/// InformationalOnly and skips re-invocation. LoggingChatClient still
+/// serializes the full response at Trace level.
+/// Pipeline: MockClient → FunctionInvokingChatClient → LoggingChatClient
+///
+public class InformationalOnlyToolCallLoggingTests
+{
+ [Fact]
+ public async Task Trace_LogsFullToolCallContent()
+ {
+ var (pipeline, logs, options) = BuildPipeline(LogLevel.Trace, informationalOnly: true);
+
+ await pipeline.GetResponseAsync([new ChatMessage(ChatRole.User, "weather?")], options);
+
+ var allLogs = CombineLogs(logs);
+
+ // LoggingChatClient serializes full response at Trace — tool details visible
+ Assert.Contains("GetWeather", allLogs, StringComparison.Ordinal);
+ Assert.Contains("call-1", allLogs, StringComparison.Ordinal);
+ Assert.Contains("Seattle", allLogs, StringComparison.Ordinal);
+ Assert.Contains("Sunny, 72", allLogs, StringComparison.Ordinal);
+
+ // FICC does NOT log "Invoking" because InformationalOnly=true → skipped
+ Assert.DoesNotContain("Invoking GetWeather", allLogs, StringComparison.Ordinal);
+ }
+
+ [Fact]
+ public async Task Debug_LogsLifecycleOnly()
+ {
+ var (pipeline, logs, options) = BuildPipeline(LogLevel.Debug, informationalOnly: true);
+
+ await pipeline.GetResponseAsync([new ChatMessage(ChatRole.User, "weather?")], options);
+
+ var debugMessages = logs.Entries.Where(e => e.Level == LogLevel.Debug).Select(e => e.Message).ToList();
+
+ // LoggingChatClient lifecycle at Debug
+ Assert.Contains(debugMessages, m => m.Contains("GetResponseAsync invoked", StringComparison.Ordinal));
+ Assert.Contains(debugMessages, m => m.Contains("GetResponseAsync completed", StringComparison.Ordinal));
+
+ // No content details at Debug — tool names, args, results are Trace-only
+ var allLogs = CombineLogs(logs);
+ Assert.DoesNotContain("GetWeather", allLogs, StringComparison.Ordinal);
+ Assert.DoesNotContain("Seattle", allLogs, StringComparison.Ordinal);
+ }
+
+ [Fact]
+ public async Task Information_NoLogsEmitted()
+ {
+ var (pipeline, logs, options) = BuildPipeline(LogLevel.Information, informationalOnly: true);
+
+ await pipeline.GetResponseAsync([new ChatMessage(ChatRole.User, "weather?")], options);
+
+ // Both LoggingChatClient and FICC only log at Debug/Trace — nothing at Information+
+ Assert.Empty(logs.Entries);
+ }
+
+ [Fact]
+ public async Task Streaming_Trace_LogsEachUpdate()
+ {
+ var (pipeline, logs, options) = BuildPipeline(LogLevel.Trace, informationalOnly: true);
+
+ await foreach (var _ in pipeline.GetStreamingResponseAsync([new ChatMessage(ChatRole.User, "weather?")], options))
+ {
+ }
+
+ var traceLogs = logs.Entries.Where(e => e.Level == LogLevel.Trace).Select(e => e.Message).ToList();
+ Assert.Contains(traceLogs, m => m.Contains("received update", StringComparison.Ordinal));
+
+ var allTrace = string.Join("\n", traceLogs);
+ Assert.Contains("GetWeather", allTrace, StringComparison.Ordinal);
+ Assert.Contains("call-1", allTrace, StringComparison.Ordinal);
+ }
+
+ [Fact]
+ public async Task MultipleFunctionCalls_AllLoggedAtTrace()
+ {
+ var logs = new LogCollector(LogLevel.Trace);
+ var loggerFactory = new SingleLoggerFactory(logs);
+
+ var mockClient = new MockToolCallClient(informationalOnly: true);
+ mockClient.AddFunctionCallContent("GetWeather", "call-1",
+ new Dictionary { ["location"] = "NYC" });
+ mockClient.AddFunctionCallContent("GetTime", "call-2",
+ new Dictionary { ["timezone"] = "EST" });
+ mockClient.AddFunctionResultContent("call-1", "Cloudy");
+ mockClient.AddFunctionResultContent("call-2", "3:00 PM");
+ mockClient.AddTextContent("Done.");
+
+ using var pipeline = new ChatClientBuilder(mockClient)
+ .UseFunctionInvocation(loggerFactory)
+ .UseLogging(loggerFactory)
+ .Build();
+
+ await pipeline.GetResponseAsync([new ChatMessage(ChatRole.User, "info?")]);
+
+ var allLogs = CombineLogs(logs);
+ Assert.Contains("GetWeather", allLogs, StringComparison.Ordinal);
+ Assert.Contains("GetTime", allLogs, StringComparison.Ordinal);
+ Assert.Contains("NYC", allLogs, StringComparison.Ordinal);
+ Assert.Contains("EST", allLogs, StringComparison.Ordinal);
+ }
+
+ [Fact]
+ public async Task NoTools_NoFunctionContentInLogs()
+ {
+ var logs = new LogCollector(LogLevel.Trace);
+ var loggerFactory = new SingleLoggerFactory(logs);
+ var mockClient = new MockToolCallClient(informationalOnly: true);
+ mockClient.AddTextContent("Hello there!");
+
+ using var pipeline = new ChatClientBuilder(mockClient)
+ .UseLogging(loggerFactory)
+ .Build();
+
+ await pipeline.GetResponseAsync([new ChatMessage(ChatRole.User, "hi")]);
+
+ var allLogs = CombineLogs(logs);
+ Assert.DoesNotContain("FunctionCallContent", allLogs, StringComparison.Ordinal);
+ Assert.DoesNotContain("FunctionResultContent", allLogs, StringComparison.Ordinal);
+ }
+}
diff --git a/src/AI/tests/Essentials.AI.UnitTests/Tests/ToolCallLoggingTests/InvocableToolCallLoggingTests.cs b/src/AI/tests/Essentials.AI.UnitTests/Tests/ToolCallLoggingTests/InvocableToolCallLoggingTests.cs
new file mode 100644
index 000000000000..f8b3efa4f7f9
--- /dev/null
+++ b/src/AI/tests/Essentials.AI.UnitTests/Tests/ToolCallLoggingTests/InvocableToolCallLoggingTests.cs
@@ -0,0 +1,66 @@
+// Licensed to the .NET Foundation under one or more agreements.
+// The .NET Foundation licenses this file to you under the MIT license.
+
+using Microsoft.Extensions.AI;
+using Microsoft.Extensions.Logging;
+using Xunit;
+using static Microsoft.Maui.Essentials.AI.UnitTests.ToolCallLoggingHelpers;
+
+namespace Microsoft.Maui.Essentials.AI.UnitTests;
+
+///
+/// Tests the pipeline with normal (invocable) tool calls.
+/// FunctionInvokingChatClient detects the FunctionCallContent, invokes the
+/// matching tool, and logs the invocation at Debug/Trace.
+/// Pipeline: MockClient → FunctionInvokingChatClient → LoggingChatClient
+///
+public class InvocableToolCallLoggingTests
+{
+ [Fact]
+ public async Task Trace_FICCLogsInvocationWithArguments()
+ {
+ var (pipeline, logs, options) = BuildPipeline(LogLevel.Trace, informationalOnly: false);
+
+ await pipeline.GetResponseAsync([new ChatMessage(ChatRole.User, "weather?")], options);
+
+ var allLogs = CombineLogs(logs);
+
+ // FICC logs "Invoking GetWeather({arguments})" at Trace
+ Assert.Contains("Invoking GetWeather", allLogs, StringComparison.Ordinal);
+
+ // FICC logs "GetWeather invocation completed. Duration: ..." at Debug (captured at Trace min)
+ Assert.Contains("invocation completed", allLogs, StringComparison.Ordinal);
+ Assert.Contains("Duration", allLogs, StringComparison.Ordinal);
+ }
+
+ [Fact]
+ public async Task Debug_FICCLogsInvocationWithDuration()
+ {
+ var (pipeline, logs, options) = BuildPipeline(LogLevel.Debug, informationalOnly: false);
+
+ await pipeline.GetResponseAsync([new ChatMessage(ChatRole.User, "weather?")], options);
+
+ var debugMessages = logs.Entries.Where(e => e.Level == LogLevel.Debug).Select(e => e.Message).ToList();
+
+ // FICC logs "GetWeather invocation completed. Duration: ..." at Debug
+ Assert.Contains(debugMessages, m => m.Contains("GetWeather", StringComparison.Ordinal)
+ && m.Contains("invocation completed", StringComparison.Ordinal)
+ && m.Contains("Duration", StringComparison.Ordinal));
+
+ // LoggingChatClient lifecycle still present
+ Assert.Contains(debugMessages, m => m.Contains("GetResponseAsync invoked", StringComparison.Ordinal));
+ }
+
+ [Fact]
+ public async Task Streaming_Trace_FICCLogsInvocation()
+ {
+ var (pipeline, logs, options) = BuildPipeline(LogLevel.Trace, informationalOnly: false);
+
+ await foreach (var _ in pipeline.GetStreamingResponseAsync([new ChatMessage(ChatRole.User, "weather?")], options))
+ {
+ }
+
+ var allLogs = CombineLogs(logs);
+ Assert.Contains("Invoking GetWeather", allLogs, StringComparison.Ordinal);
+ }
+}
diff --git a/src/AI/tests/Essentials.AI.UnitTests/Tests/ToolCallLoggingTests/ToolCallLoggingHelpers.cs b/src/AI/tests/Essentials.AI.UnitTests/Tests/ToolCallLoggingTests/ToolCallLoggingHelpers.cs
new file mode 100644
index 000000000000..fac75f5cae4f
--- /dev/null
+++ b/src/AI/tests/Essentials.AI.UnitTests/Tests/ToolCallLoggingTests/ToolCallLoggingHelpers.cs
@@ -0,0 +1,166 @@
+// Licensed to the .NET Foundation under one or more agreements.
+// The .NET Foundation licenses this file to you under the MIT license.
+
+using System.Runtime.CompilerServices;
+using Microsoft.Extensions.AI;
+using Microsoft.Extensions.Logging;
+
+namespace Microsoft.Maui.Essentials.AI.UnitTests;
+
+///
+/// Shared infrastructure for tool call logging tests.
+///
+internal static class ToolCallLoggingHelpers
+{
+ ///
+ /// Builds a pipeline: MockClient → FunctionInvokingChatClient → LoggingChatClient.
+ ///
+ public static (IChatClient Pipeline, LogCollector Logs, ChatOptions Options) BuildPipeline(
+ LogLevel level, bool informationalOnly)
+ {
+ var logs = new LogCollector(level);
+ var loggerFactory = new SingleLoggerFactory(logs);
+
+ var mockClient = new MockToolCallClient(informationalOnly);
+ mockClient.AddFunctionCallContent("GetWeather", "call-1",
+ new Dictionary { ["location"] = "Seattle" });
+ mockClient.AddFunctionResultContent("call-1", "Sunny, 72°F");
+ mockClient.AddTextContent("The weather is sunny.");
+
+ var pipeline = new ChatClientBuilder(mockClient)
+ .UseFunctionInvocation(loggerFactory)
+ .UseLogging(loggerFactory)
+ .Build();
+
+ // For invocable tools, register the tool in ChatOptions so FICC can find it
+ var options = new ChatOptions();
+ if (!informationalOnly)
+ {
+ options.Tools = [AIFunctionFactory.Create(
+ (string location) => $"Sunny, 72°F in {location}",
+ name: "GetWeather",
+ description: "Gets the weather")];
+ }
+
+ return (pipeline, logs, options);
+ }
+
+ public static string CombineLogs(LogCollector logs) =>
+ string.Join("\n", logs.Entries.Select(e => e.Message));
+}
+
+///
+/// Collects log entries with minimum level filtering.
+///
+internal class LogCollector : ILogger
+{
+ private readonly LogLevel _minimumLevel;
+
+ public LogCollector(LogLevel minimumLevel) => _minimumLevel = minimumLevel;
+
+ public List Entries { get; } = [];
+
+ public IDisposable? BeginScope(TState state) where TState : notnull => null;
+ public bool IsEnabled(LogLevel logLevel) => logLevel >= _minimumLevel;
+
+ public void Log(LogLevel logLevel, EventId eventId, TState state, Exception? exception, Func formatter)
+ {
+ if (IsEnabled(logLevel))
+ Entries.Add(new LogEntry(logLevel, formatter(state, exception)));
+ }
+}
+
+internal record LogEntry(LogLevel Level, string Message);
+
+///
+/// Minimal ILoggerFactory that returns a single shared logger instance.
+/// Required by FunctionInvokingChatClient constructor.
+///
+internal class SingleLoggerFactory : ILoggerFactory
+{
+ private readonly ILogger _logger;
+ public SingleLoggerFactory(ILogger logger) => _logger = logger;
+ public ILogger CreateLogger(string categoryName) => _logger;
+ public void AddProvider(ILoggerProvider provider) { }
+ public void Dispose() { }
+}
+
+///
+/// Mock chat client that returns predefined content.
+/// When informationalOnly=false, FunctionCallContent is invocable by FICC,
+/// and a matching tool is registered in ChatOptions.
+///
+internal class MockToolCallClient : IChatClient
+{
+ private readonly bool _informationalOnly;
+ private readonly List _content = [];
+
+ public MockToolCallClient(bool informationalOnly) => _informationalOnly = informationalOnly;
+
+ public ChatClientMetadata Metadata => new("MockToolCallClient");
+
+ public void AddTextContent(string text) =>
+ _content.Add(new TextContent(text));
+
+ public void AddFunctionCallContent(string name, string callId, Dictionary? arguments = null) =>
+ _content.Add(new FunctionCallContent(callId, name, arguments) { InformationalOnly = _informationalOnly });
+
+ public void AddFunctionResultContent(string callId, object? result) =>
+ _content.Add(new FunctionResultContent(callId, result));
+
+ public Task GetResponseAsync(
+ IEnumerable messages, ChatOptions? options = null, CancellationToken cancellationToken = default)
+ {
+ // When FICC invokes tools and re-calls us, return just the text
+ if (messages.Any(m => m.Role == ChatRole.Tool))
+ return Task.FromResult(new ChatResponse([new ChatMessage(ChatRole.Assistant, "Weather result processed.")]));
+
+ var responseMessages = new List();
+ var currentContents = new List();
+
+ foreach (var content in _content)
+ {
+ if (content is FunctionResultContent)
+ {
+ if (currentContents.Count > 0)
+ {
+ responseMessages.Add(new ChatMessage(ChatRole.Assistant, [.. currentContents]));
+ currentContents.Clear();
+ }
+ responseMessages.Add(new ChatMessage(ChatRole.Tool, [content]));
+ }
+ else
+ {
+ currentContents.Add(content);
+ }
+ }
+
+ if (currentContents.Count > 0)
+ responseMessages.Add(new ChatMessage(ChatRole.Assistant, [.. currentContents]));
+
+ return Task.FromResult(new ChatResponse(responseMessages));
+ }
+
+ public async IAsyncEnumerable GetStreamingResponseAsync(
+ IEnumerable messages, ChatOptions? options = null,
+ [EnumeratorCancellation] CancellationToken cancellationToken = default)
+ {
+ // When FICC re-calls after invocation, return processed text
+ if (messages.Any(m => m.Role == ChatRole.Tool))
+ {
+ yield return new ChatResponseUpdate { Role = ChatRole.Assistant, Contents = [new TextContent("Weather result processed.")] };
+ yield break;
+ }
+
+ foreach (var content in _content)
+ {
+ await Task.Yield();
+ var role = content is FunctionResultContent ? ChatRole.Tool : ChatRole.Assistant;
+ yield return new ChatResponseUpdate { Role = role, Contents = [content] };
+ }
+ }
+
+ public object? GetService(Type serviceType, object? serviceKey = null) => null;
+ public TService? GetService(object? key = null) where TService : class => null;
+ public void Dispose() { }
+}