diff --git a/dotnet/agent-framework-dotnet.slnx b/dotnet/agent-framework-dotnet.slnx
index 002efdbab1..1bbb31cf15 100644
--- a/dotnet/agent-framework-dotnet.slnx
+++ b/dotnet/agent-framework-dotnet.slnx
@@ -171,6 +171,7 @@
+
diff --git a/dotnet/samples/GettingStarted/Observability/AIClientHttpTrafficTracing/AIClientHttpTrafficTracing.csproj b/dotnet/samples/GettingStarted/Observability/AIClientHttpTrafficTracing/AIClientHttpTrafficTracing.csproj
new file mode 100644
index 0000000000..1055397f4d
--- /dev/null
+++ b/dotnet/samples/GettingStarted/Observability/AIClientHttpTrafficTracing/AIClientHttpTrafficTracing.csproj
@@ -0,0 +1,22 @@
+
+
+
+ Exe
+ net10.0
+ enable
+ enable
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/dotnet/samples/GettingStarted/Observability/AIClientHttpTrafficTracing/Program.cs b/dotnet/samples/GettingStarted/Observability/AIClientHttpTrafficTracing/Program.cs
new file mode 100644
index 0000000000..7dd536486c
--- /dev/null
+++ b/dotnet/samples/GettingStarted/Observability/AIClientHttpTrafficTracing/Program.cs
@@ -0,0 +1,81 @@
+// Copyright (c) Microsoft. All rights reserved.
+
+// This sample shows how to enable **HTTP request/response logging** for LLM calls (including request/response bodies) for any `AIClient`.
+
+using System.ClientModel.Primitives;
+using Azure.AI.OpenAI;
+using Azure.Identity;
+using Microsoft.Agents.AI;
+using Microsoft.Extensions.AI;
+using Microsoft.Extensions.DependencyInjection;
+using Microsoft.Extensions.Logging;
+
+string endpoint = Environment.GetEnvironmentVariable("AZURE_OPENAI_ENDPOINT") ?? throw new InvalidOperationException("AZURE_OPENAI_ENDPOINT environment variable is not set.");
+string deploymentName = Environment.GetEnvironmentVariable("AZURE_OPENAI_DEPLOYMENT_NAME") ?? "gpt-4o-mini";
+
+ServiceCollection services = new();
+services.AddLogging(loggingBuilder =>
+{
+ loggingBuilder.AddConsole();
+ loggingBuilder.AddFilter("System.ClientModel.Primitives.MessageLoggingPolicy", LogLevel.Debug); // For Request and Response body logging we need to set Debug level
+ /* If used in ASP.NET Core, with appsettings then this can be configured in appsettings.json as:
+ {
+ "Logging": {
+ "LogLevel": {
+ "Default": "Information",
+ "Microsoft.AspNetCore": "Warning",
+ "System.ClientModel.Primitives.MessageLoggingPolicy": "Debug"
+ }
+ }
+ }
+ */
+});
+
+services.AddChatClient(provider =>
+{
+ var clientLoggingOptions = new ClientLoggingOptions
+ {
+ EnableLogging = true, // Enable logging overall
+ EnableMessageContentLogging = true, // Enable request and response body logging
+ MessageContentSizeLimit = 5000, // Limit size of logged content. If Null or Not set, then default value will be 4 * 1024 characters
+ EnableMessageLogging = true, // Logging the Request and Response Url and Header information. If Null or Not set, then default value will be true
+ LoggerFactory = provider.GetRequiredService()
+ };
+ // WARNING: Do NOT log sensitive headers such as "Authorization" in production or shared environments.
+ // By default, sensitive headers are REDACTED. The following example shows how to override this behavior
+ // for controlled, non-production testing only.
+ clientLoggingOptions.AllowedHeaderNames.Add("Authorization");
+
+ /* Switch to OpenAI Compatible SDK using below code
+ var clientOptions = new OpenAIClientOptions()
+ {
+ Endpoint = new Uri("https://endpoint"),
+ ClientLoggingOptions = clientLoggingOptions
+ };
+ new OpenAIClient(new ApiKeyCredential(""), clientOptions)
+ .GetChatClient("modelName")
+ .AsIChatClient();
+ */
+
+ return new AzureOpenAIClient(new Uri(endpoint), new AzureCliCredential(), new AzureOpenAIClientOptions() // Use OpenAIClientOptions of OpenAIClient, similar options for other clients
+ {
+ ClientLoggingOptions = clientLoggingOptions
+ })
+ .GetChatClient(deploymentName)
+ .AsIChatClient();
+});
+
+ServiceProvider serviceProvider = services.BuildServiceProvider();
+
+IChatClient chatClient = serviceProvider.GetRequiredService();
+ChatClientAgent pirateAssistant = chatClient.CreateAIAgent("You are a pirate assistant. Answer questions in short pirate speak.");
+
+string userInput = "Who are you?";
+Console.WriteLine($"You: {userInput}\n");
+AgentRunResponse response = await pirateAssistant.RunAsync(userInput);
+Console.WriteLine($"\nPirate Assistant: {response}");
+
+/*await foreach (var item in pirateAssistant.RunStreamingAsync(userInput)) // For Streaming responses (RunStreamingAsync), there will be multiple log entries
+{
+ Console.Write(item);
+}*/
diff --git a/dotnet/samples/GettingStarted/Observability/AIClientHttpTrafficTracing/README.md b/dotnet/samples/GettingStarted/Observability/AIClientHttpTrafficTracing/README.md
new file mode 100644
index 0000000000..91f25eca9c
--- /dev/null
+++ b/dotnet/samples/GettingStarted/Observability/AIClientHttpTrafficTracing/README.md
@@ -0,0 +1,78 @@
+# AIClient HTTP Traffic Tracing
+
+This sample shows how to enable **HTTP request/response logging** for LLM calls (including request/response bodies) for any `AIClient`.
+
+It uses the `ClientLoggingOptions` pipeline to print HTTP details to the `ILogger` so you can troubleshoot prompts, headers, and responses.
+
+## Prerequisites
+
+- Azure CLI login (this sample uses `AzureCliCredential`):
+ - `az login`
+- Environment variables:
+ - `AZURE_OPENAI_ENDPOINT` (e.g. `https://{resource-name}.openai.azure.com/`)
+ - `AZURE_OPENAI_DEPLOYMENT_NAME` (optional; defaults to `gpt-4o-mini`)
+
+Switch to OpenAI Compatible SDK using below code
+```csharp
+var clientOptions = new OpenAIClientOptions()
+{
+ Endpoint = new Uri("https://endpoint"),
+ ClientLoggingOptions = clientLoggingOptions
+};
+new OpenAIClient(new ApiKeyCredential(""), clientOptions)
+.GetChatClient("modelName")
+.AsIChatClient();
+```
+
+## Run
+
+From the repo root:
+
+```powershell
+cd samples/GettingStarted/Observability/AIClientHttpTrafficTracing
+dotnet run
+```
+
+## Enable HTTP traffic logging
+
+This sample enables logging in two places:
+
+1. **Enable HTTP logging on the client**
+
+In [Program.cs](Program.cs), the sample configures:
+
+- `ClientLoggingOptions.EnableLogging = true`
+- `ClientLoggingOptions.EnableMessageLogging = true` (URL + headers + query parameters)
+- `ClientLoggingOptions.EnableMessageContentLogging = true` (request/response bodies)
+- `ClientLoggingOptions.MessageContentSizeLimit` to cap how much body content is written
+
+`ClientLoggingOptions` is a common pattern across SDK clients that expose these options (for example, via a `ClientLoggingOptions` property on client options like `AzureOpenAIClientOptions`).
+
+2. **Raise the log level to `Debug` only if you want request/response bodies**
+
+URL/headers/query parameter logging (step 1) is normally available at `Information` level and step 2 is not needed.
+
+Request/response *body* logging is emitted at `Debug` level by the underlying message logging policy. The sample sets:
+
+- `System.ClientModel.Primitives.MessageLoggingPolicy` → `Debug`
+
+## Security notes
+
+- Logging bodies can include sensitive prompt/response data. Use only in dev/test.
+- Headers like `Authorization` are **redacted by default**. While it is technically possible to allow logging a sensitive header (for example, via `clientLoggingOptions.AllowedHeaderNames.Add("Authorization")`), **do not enable this in production or long-lived environments**. If you must temporarily log such headers for debugging, do so only in tightly controlled, short-lived sessions, treat the logs as secrets, and securely delete them immediately after use.
+
+## Using ASP.NET Core configuration
+
+If you’re using ASP.NET Core, you can set the log level in `appsettings.json` instead of calling `AddFilter`, for example:
+
+```json
+{
+ "Logging": {
+ "LogLevel": {
+ "Default": "Information",
+ "Microsoft.AspNetCore": "Warning",
+ "System.ClientModel.Primitives.MessageLoggingPolicy": "Debug"
+ }
+ }
+}
+```