Skip to content

Commit 8aff8cb

Browse files
authored
.Net: Updated OpenTelemetry GenAI attributes (#11630)
### Motivation and Context <!-- Thank you for your contribution to the semantic-kernel repo! Please help reviewers and future users, providing the following information: 1. Why is this change required? 2. What problem does it solve? 3. What scenario does it contribute to? 4. If it fixes an open issue, please link to the issue here. --> Updated OpenTelemetry GenAI attributes to the latest version: https://opentelemetry.io/docs/specs/semconv/gen-ai/gen-ai-spans/ ### Contribution Checklist <!-- Before submitting this PR, please make sure: --> - [x] The code builds clean without any errors or warnings - [x] The PR follows the [SK Contribution Guidelines](https://github.com/microsoft/semantic-kernel/blob/main/CONTRIBUTING.md) and the [pre-submission formatting script](https://github.com/microsoft/semantic-kernel/blob/main/CONTRIBUTING.md#development-scripts) raises no violations - [x] All unit tests pass, and I have added new tests where possible - [x] I didn't break anyone 😄
1 parent 2a412a1 commit 8aff8cb

File tree

4 files changed

+28
-29
lines changed

4 files changed

+28
-29
lines changed

dotnet/src/Connectors/Connectors.Amazon/Bedrock/Core/Clients/BedrockChatCompletionClient.cs

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -76,8 +76,8 @@ internal async Task<IReadOnlyList<ChatMessageContent>> GenerateChatMessageAsync(
7676
{
7777
activityStatus = BedrockClientUtilities.ConvertHttpStatusCodeToActivityStatusCode(response.HttpStatusCode);
7878
activity.SetStatus(activityStatus);
79-
activity.SetPromptTokenUsage(response?.Usage?.InputTokens ?? default);
80-
activity.SetCompletionTokenUsage(response?.Usage?.OutputTokens ?? default);
79+
activity.SetInputTokensUsage(response?.Usage?.InputTokens ?? default);
80+
activity.SetOutputTokensUsage(response?.Usage?.OutputTokens ?? default);
8181
}
8282
}
8383
catch (Exception ex)
@@ -90,8 +90,8 @@ internal async Task<IReadOnlyList<ChatMessageContent>> GenerateChatMessageAsync(
9090
{
9191
activityStatus = BedrockClientUtilities.ConvertHttpStatusCodeToActivityStatusCode(response.HttpStatusCode);
9292
activity.SetStatus(activityStatus);
93-
activity.SetPromptTokenUsage(response?.Usage?.InputTokens ?? default);
94-
activity.SetCompletionTokenUsage(response?.Usage?.OutputTokens ?? default);
93+
activity.SetInputTokensUsage(response?.Usage?.InputTokens ?? default);
94+
activity.SetOutputTokensUsage(response?.Usage?.OutputTokens ?? default);
9595
}
9696
else
9797
{

dotnet/src/Connectors/Connectors.MistralAI/Client/MistralClient.cs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -89,11 +89,11 @@ internal async Task<IReadOnlyList<ChatMessageContent>> GetChatMessageContentsAsy
8989
{
9090
if (usage.PromptTokens is int promptTokens)
9191
{
92-
activity.SetPromptTokenUsage(promptTokens);
92+
activity.SetInputTokensUsage(promptTokens);
9393
}
9494
if (usage.CompletionTokens is int completionTokens)
9595
{
96-
activity.SetCompletionTokenUsage(completionTokens);
96+
activity.SetOutputTokensUsage(completionTokens);
9797
}
9898
}
9999
}

dotnet/src/Connectors/Connectors.OpenAI/Core/ClientCore.ChatCompletion.cs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -180,8 +180,8 @@ internal async Task<IReadOnlyList<ChatMessageContent>> GetChatMessageContentsAsy
180180
// Capture available metadata even if the operation failed.
181181
activity
182182
.SetResponseId(chatCompletion.Id)
183-
.SetPromptTokenUsage(chatCompletion.Usage.InputTokenCount)
184-
.SetCompletionTokenUsage(chatCompletion.Usage.OutputTokenCount);
183+
.SetInputTokensUsage(chatCompletion.Usage.InputTokenCount)
184+
.SetOutputTokensUsage(chatCompletion.Usage.OutputTokenCount);
185185
}
186186

187187
throw;

dotnet/src/InternalUtilities/src/Diagnostics/ModelDiagnostics.cs

Lines changed: 20 additions & 21 deletions
Original file line numberDiff line numberDiff line change
@@ -133,20 +133,20 @@ internal static void EndStreaming(
133133
internal static Activity SetResponseId(this Activity activity, string responseId) => activity.SetTag(ModelDiagnosticsTags.ResponseId, responseId);
134134

135135
/// <summary>
136-
/// Set the prompt token usage for a given activity.
136+
/// Set the input tokens usage for a given activity.
137137
/// </summary>
138-
/// <param name="activity">The activity to set the prompt token usage</param>
139-
/// <param name="promptTokens">The number of prompt tokens used</param>
140-
/// <returns>The activity with the prompt token usage set for chaining</returns>
141-
internal static Activity SetPromptTokenUsage(this Activity activity, int promptTokens) => activity.SetTag(ModelDiagnosticsTags.PromptToken, promptTokens);
138+
/// <param name="activity">The activity to set the input tokens usage</param>
139+
/// <param name="inputTokens">The number of input tokens used</param>
140+
/// <returns>The activity with the input tokens usage set for chaining</returns>
141+
internal static Activity SetInputTokensUsage(this Activity activity, int inputTokens) => activity.SetTag(ModelDiagnosticsTags.InputTokens, inputTokens);
142142

143143
/// <summary>
144-
/// Set the completion token usage for a given activity.
144+
/// Set the output tokens usage for a given activity.
145145
/// </summary>
146-
/// <param name="activity">The activity to set the completion token usage</param>
147-
/// <param name="completionTokens">The number of completion tokens used</param>
148-
/// <returns>The activity with the completion token usage set for chaining</returns>
149-
internal static Activity SetCompletionTokenUsage(this Activity activity, int completionTokens) => activity.SetTag(ModelDiagnosticsTags.CompletionToken, completionTokens);
146+
/// <param name="activity">The activity to set the output tokens usage</param>
147+
/// <param name="outputTokens">The number of output tokens used</param>
148+
/// <returns>The activity with the output tokens usage set for chaining</returns>
149+
internal static Activity SetOutputTokensUsage(this Activity activity, int outputTokens) => activity.SetTag(ModelDiagnosticsTags.OutputTokens, outputTokens);
150150

151151
/// <summary>
152152
/// Check if model diagnostics is enabled
@@ -316,23 +316,23 @@ private static void ToOpenAIFormat(StringBuilder sb, ChatMessageContentItemColle
316316
private static void SetCompletionResponse<T>(
317317
Activity activity,
318318
T completions,
319-
int? promptTokens,
320-
int? completionTokens,
319+
int? inputTokens,
320+
int? outputTokens,
321321
Func<T, string> formatCompletions) where T : IEnumerable<KernelContent>
322322
{
323323
if (!IsModelDiagnosticsEnabled())
324324
{
325325
return;
326326
}
327327

328-
if (promptTokens != null)
328+
if (inputTokens != null)
329329
{
330-
activity.SetTag(ModelDiagnosticsTags.PromptToken, promptTokens);
330+
activity.SetTag(ModelDiagnosticsTags.InputTokens, inputTokens);
331331
}
332332

333-
if (completionTokens != null)
333+
if (outputTokens != null)
334334
{
335-
activity.SetTag(ModelDiagnosticsTags.CompletionToken, completionTokens);
335+
activity.SetTag(ModelDiagnosticsTags.OutputTokens, outputTokens);
336336
}
337337

338338
activity
@@ -409,7 +409,8 @@ private static Activity SetFinishReasons(this Activity activity, IEnumerable<Ker
409409

410410
if (finishReasons.Any())
411411
{
412-
activity.SetTag(ModelDiagnosticsTags.FinishReason, $"{string.Join(",", finishReasons)}");
412+
activity.SetTag(ModelDiagnosticsTags.FinishReason, $"[{string.Join(",",
413+
finishReasons.Select(finishReason => $"\"{finishReason}\""))}]");
413414
}
414415

415416
return activity;
@@ -466,10 +467,8 @@ private static class ModelDiagnosticsTags
466467
public const string ResponseId = "gen_ai.response.id";
467468
public const string ResponseModel = "gen_ai.response.model";
468469
public const string FinishReason = "gen_ai.response.finish_reason";
469-
public const string PromptToken = "gen_ai.response.prompt_tokens";
470-
public const string CompletionToken = "gen_ai.response.completion_tokens";
471-
public const string Prompt = "gen_ai.content.prompt";
472-
public const string Completion = "gen_ai.content.completion";
470+
public const string InputTokens = "gen_ai.usage.input_tokens";
471+
public const string OutputTokens = "gen_ai.usage.output_tokens";
473472
public const string Address = "server.address";
474473
public const string Port = "server.port";
475474
public const string AgentId = "gen_ai.agent.id";

0 commit comments

Comments
 (0)