diff --git a/src/Libraries/Microsoft.Extensions.AI.Abstractions/ChatCompletion/ChatOptions.cs b/src/Libraries/Microsoft.Extensions.AI.Abstractions/ChatCompletion/ChatOptions.cs
index 4edbed900b4..0a4f6f58296 100644
--- a/src/Libraries/Microsoft.Extensions.AI.Abstractions/ChatCompletion/ChatOptions.cs
+++ b/src/Libraries/Microsoft.Extensions.AI.Abstractions/ChatCompletion/ChatOptions.cs
@@ -27,6 +27,9 @@ public class ChatOptions
/// Gets or sets the presence penalty for generating chat responses.
public float? PresencePenalty { get; set; }
+ /// Gets or sets a seed value used by a service to control the reproducability of results.
+ public long? Seed { get; set; }
+
///
/// Gets or sets the response format for the chat request.
///
@@ -74,6 +77,7 @@ public virtual ChatOptions Clone()
TopK = TopK,
FrequencyPenalty = FrequencyPenalty,
PresencePenalty = PresencePenalty,
+ Seed = Seed,
ResponseFormat = ResponseFormat,
ModelId = ModelId,
ToolMode = ToolMode,
diff --git a/src/Libraries/Microsoft.Extensions.AI.AzureAIInference/AzureAIInferenceChatClient.cs b/src/Libraries/Microsoft.Extensions.AI.AzureAIInference/AzureAIInferenceChatClient.cs
index ecc41140b27..ba76f5c3c90 100644
--- a/src/Libraries/Microsoft.Extensions.AI.AzureAIInference/AzureAIInferenceChatClient.cs
+++ b/src/Libraries/Microsoft.Extensions.AI.AzureAIInference/AzureAIInferenceChatClient.cs
@@ -285,6 +285,7 @@ private ChatCompletionsOptions ToAzureAIOptions(IList chatContents,
result.NucleusSamplingFactor = options.TopP;
result.PresencePenalty = options.PresencePenalty;
result.Temperature = options.Temperature;
+ result.Seed = options.Seed;
if (options.StopSequences is { Count: > 0 } stopSequences)
{
@@ -306,11 +307,6 @@ private ChatCompletionsOptions ToAzureAIOptions(IList chatContents,
{
switch (prop.Key)
{
- // These properties are strongly-typed on the ChatCompletionsOptions class but not on the ChatOptions class.
- case nameof(result.Seed) when prop.Value is long seed:
- result.Seed = seed;
- break;
-
// Propagate everything else to the ChatCompletionOptions' AdditionalProperties.
default:
if (prop.Value is not null)
diff --git a/src/Libraries/Microsoft.Extensions.AI.Ollama/OllamaChatClient.cs b/src/Libraries/Microsoft.Extensions.AI.Ollama/OllamaChatClient.cs
index 72ddb13b2ac..18ff5d50b7c 100644
--- a/src/Libraries/Microsoft.Extensions.AI.Ollama/OllamaChatClient.cs
+++ b/src/Libraries/Microsoft.Extensions.AI.Ollama/OllamaChatClient.cs
@@ -273,7 +273,6 @@ private OllamaChatRequest ToOllamaChatRequest(IList chatMessages, C
TransferMetadataValue(nameof(OllamaRequestOptions.penalize_newline), (options, value) => options.penalize_newline = value);
TransferMetadataValue(nameof(OllamaRequestOptions.repeat_last_n), (options, value) => options.repeat_last_n = value);
TransferMetadataValue(nameof(OllamaRequestOptions.repeat_penalty), (options, value) => options.repeat_penalty = value);
- TransferMetadataValue(nameof(OllamaRequestOptions.seed), (options, value) => options.seed = value);
TransferMetadataValue(nameof(OllamaRequestOptions.tfs_z), (options, value) => options.tfs_z = value);
TransferMetadataValue(nameof(OllamaRequestOptions.typical_p), (options, value) => options.typical_p = value);
TransferMetadataValue(nameof(OllamaRequestOptions.use_mmap), (options, value) => options.use_mmap = value);
@@ -314,6 +313,11 @@ private OllamaChatRequest ToOllamaChatRequest(IList chatMessages, C
{
(request.Options ??= new()).top_k = topK;
}
+
+ if (options.Seed is long seed)
+ {
+ (request.Options ??= new()).seed = seed;
+ }
}
return request;
diff --git a/src/Libraries/Microsoft.Extensions.AI.OpenAI/OpenAIChatClient.cs b/src/Libraries/Microsoft.Extensions.AI.OpenAI/OpenAIChatClient.cs
index 0562352feb6..985060256f7 100644
--- a/src/Libraries/Microsoft.Extensions.AI.OpenAI/OpenAIChatClient.cs
+++ b/src/Libraries/Microsoft.Extensions.AI.OpenAI/OpenAIChatClient.cs
@@ -392,6 +392,9 @@ private static ChatCompletionOptions ToOpenAIOptions(ChatOptions? options)
result.TopP = options.TopP;
result.PresencePenalty = options.PresencePenalty;
result.Temperature = options.Temperature;
+#pragma warning disable OPENAI001 // Type is for evaluation purposes only and is subject to change or removal in future updates.
+ result.Seed = options.Seed;
+#pragma warning restore OPENAI001
if (options.StopSequences is { Count: > 0 } stopSequences)
{
@@ -426,13 +429,6 @@ private static ChatCompletionOptions ToOpenAIOptions(ChatOptions? options)
result.AllowParallelToolCalls = allowParallelToolCalls;
}
-#pragma warning disable OPENAI001 // Type is for evaluation purposes only and is subject to change or removal in future updates. Suppress this diagnostic to proceed.
- if (additionalProperties.TryGetValue(nameof(result.Seed), out long seed))
- {
- result.Seed = seed;
- }
-#pragma warning restore OPENAI001
-
if (additionalProperties.TryGetValue(nameof(result.TopLogProbabilityCount), out int topLogProbabilityCountInt))
{
result.TopLogProbabilityCount = topLogProbabilityCountInt;
diff --git a/src/Libraries/Microsoft.Extensions.AI/ChatCompletion/OpenTelemetryChatClient.cs b/src/Libraries/Microsoft.Extensions.AI/ChatCompletion/OpenTelemetryChatClient.cs
index 905e756e246..a6dfe53adf5 100644
--- a/src/Libraries/Microsoft.Extensions.AI/ChatCompletion/OpenTelemetryChatClient.cs
+++ b/src/Libraries/Microsoft.Extensions.AI/ChatCompletion/OpenTelemetryChatClient.cs
@@ -322,7 +322,7 @@ private static ChatCompletion ComposeStreamingUpdatesIntoChatCompletion(
_ = activity.AddTag(OpenTelemetryConsts.GenAI.Request.PerProvider(_system, "response_format"), responseFormat);
}
- if (options.AdditionalProperties?.TryGetValue("seed", out long seed) is true)
+ if (options.Seed is long seed)
{
_ = activity.AddTag(OpenTelemetryConsts.GenAI.Request.PerProvider(_system, "seed"), seed);
}
diff --git a/test/Libraries/Microsoft.Extensions.AI.Abstractions.Tests/ChatCompletion/ChatOptionsTests.cs b/test/Libraries/Microsoft.Extensions.AI.Abstractions.Tests/ChatCompletion/ChatOptionsTests.cs
index f83169712c3..fcd40a2f446 100644
--- a/test/Libraries/Microsoft.Extensions.AI.Abstractions.Tests/ChatCompletion/ChatOptionsTests.cs
+++ b/test/Libraries/Microsoft.Extensions.AI.Abstractions.Tests/ChatCompletion/ChatOptionsTests.cs
@@ -19,6 +19,7 @@ public void Constructor_Parameterless_PropsDefaulted()
Assert.Null(options.TopK);
Assert.Null(options.FrequencyPenalty);
Assert.Null(options.PresencePenalty);
+ Assert.Null(options.Seed);
Assert.Null(options.ResponseFormat);
Assert.Null(options.ModelId);
Assert.Null(options.StopSequences);
@@ -33,6 +34,7 @@ public void Constructor_Parameterless_PropsDefaulted()
Assert.Null(clone.TopK);
Assert.Null(clone.FrequencyPenalty);
Assert.Null(clone.PresencePenalty);
+ Assert.Null(options.Seed);
Assert.Null(clone.ResponseFormat);
Assert.Null(clone.ModelId);
Assert.Null(clone.StopSequences);
@@ -69,6 +71,7 @@ public void Properties_Roundtrip()
options.TopK = 42;
options.FrequencyPenalty = 0.4f;
options.PresencePenalty = 0.5f;
+ options.Seed = 12345;
options.ResponseFormat = ChatResponseFormat.Json;
options.ModelId = "modelId";
options.StopSequences = stopSequences;
@@ -82,6 +85,7 @@ public void Properties_Roundtrip()
Assert.Equal(42, options.TopK);
Assert.Equal(0.4f, options.FrequencyPenalty);
Assert.Equal(0.5f, options.PresencePenalty);
+ Assert.Equal(12345, options.Seed);
Assert.Same(ChatResponseFormat.Json, options.ResponseFormat);
Assert.Equal("modelId", options.ModelId);
Assert.Same(stopSequences, options.StopSequences);
@@ -96,6 +100,7 @@ public void Properties_Roundtrip()
Assert.Equal(42, clone.TopK);
Assert.Equal(0.4f, clone.FrequencyPenalty);
Assert.Equal(0.5f, clone.PresencePenalty);
+ Assert.Equal(12345, options.Seed);
Assert.Same(ChatResponseFormat.Json, clone.ResponseFormat);
Assert.Equal("modelId", clone.ModelId);
Assert.Equal(stopSequences, clone.StopSequences);
@@ -126,6 +131,7 @@ public void JsonSerialization_Roundtrips()
options.TopK = 42;
options.FrequencyPenalty = 0.4f;
options.PresencePenalty = 0.5f;
+ options.Seed = 12345;
options.ResponseFormat = ChatResponseFormat.Json;
options.ModelId = "modelId";
options.StopSequences = stopSequences;
@@ -148,6 +154,7 @@ public void JsonSerialization_Roundtrips()
Assert.Equal(42, deserialized.TopK);
Assert.Equal(0.4f, deserialized.FrequencyPenalty);
Assert.Equal(0.5f, deserialized.PresencePenalty);
+ Assert.Equal(12345, deserialized.Seed);
Assert.Equal(ChatResponseFormat.Json, deserialized.ResponseFormat);
Assert.NotSame(ChatResponseFormat.Json, deserialized.ResponseFormat);
Assert.Equal("modelId", deserialized.ModelId);
diff --git a/test/Libraries/Microsoft.Extensions.AI.AzureAIInference.Tests/AzureAIInferenceChatClientTests.cs b/test/Libraries/Microsoft.Extensions.AI.AzureAIInference.Tests/AzureAIInferenceChatClientTests.cs
index 4fb5122cc93..f404f5e61ef 100644
--- a/test/Libraries/Microsoft.Extensions.AI.AzureAIInference.Tests/AzureAIInferenceChatClientTests.cs
+++ b/test/Libraries/Microsoft.Extensions.AI.AzureAIInference.Tests/AzureAIInferenceChatClientTests.cs
@@ -247,8 +247,8 @@ public async Task MultipleMessages_NonStreaming()
],
"presence_penalty": 0.5,
"frequency_penalty": 0.75,
- "model": "gpt-4o-mini",
- "seed": 42
+ "seed": 42,
+ "model": "gpt-4o-mini"
}
""";
@@ -303,7 +303,7 @@ public async Task MultipleMessages_NonStreaming()
FrequencyPenalty = 0.75f,
PresencePenalty = 0.5f,
StopSequences = ["great"],
- AdditionalProperties = new() { ["seed"] = 42L },
+ Seed = 42,
});
Assert.NotNull(response);
diff --git a/test/Libraries/Microsoft.Extensions.AI.Ollama.Tests/OllamaChatClientIntegrationTests.cs b/test/Libraries/Microsoft.Extensions.AI.Ollama.Tests/OllamaChatClientIntegrationTests.cs
index ac941623124..4c71690baaf 100644
--- a/test/Libraries/Microsoft.Extensions.AI.Ollama.Tests/OllamaChatClientIntegrationTests.cs
+++ b/test/Libraries/Microsoft.Extensions.AI.Ollama.Tests/OllamaChatClientIntegrationTests.cs
@@ -49,7 +49,7 @@ public async Task PromptBasedFunctionCalling_NoArgs()
ModelId = "llama3:8b",
Tools = [AIFunctionFactory.Create(() => secretNumber, "GetSecretNumber")],
Temperature = 0,
- AdditionalProperties = new() { ["seed"] = 0L },
+ Seed = 0,
});
Assert.Single(response.Choices);
@@ -83,7 +83,7 @@ public async Task PromptBasedFunctionCalling_WithArgs()
{
Tools = [stockPriceTool, irrelevantTool],
Temperature = 0,
- AdditionalProperties = new() { ["seed"] = 0L },
+ Seed = 0,
});
Assert.Single(response.Choices);
diff --git a/test/Libraries/Microsoft.Extensions.AI.Ollama.Tests/OllamaChatClientTests.cs b/test/Libraries/Microsoft.Extensions.AI.Ollama.Tests/OllamaChatClientTests.cs
index 3e281173c8b..67b10e3f24b 100644
--- a/test/Libraries/Microsoft.Extensions.AI.Ollama.Tests/OllamaChatClientTests.cs
+++ b/test/Libraries/Microsoft.Extensions.AI.Ollama.Tests/OllamaChatClientTests.cs
@@ -254,7 +254,7 @@ public async Task MultipleMessages_NonStreaming()
FrequencyPenalty = 0.75f,
PresencePenalty = 0.5f,
StopSequences = ["great"],
- AdditionalProperties = new() { ["seed"] = 42 },
+ Seed = 42,
});
Assert.NotNull(response);
diff --git a/test/Libraries/Microsoft.Extensions.AI.OpenAI.Tests/OpenAIChatClientTests.cs b/test/Libraries/Microsoft.Extensions.AI.OpenAI.Tests/OpenAIChatClientTests.cs
index 691804e5fb8..05d2f5a22ff 100644
--- a/test/Libraries/Microsoft.Extensions.AI.OpenAI.Tests/OpenAIChatClientTests.cs
+++ b/test/Libraries/Microsoft.Extensions.AI.OpenAI.Tests/OpenAIChatClientTests.cs
@@ -348,7 +348,7 @@ public async Task MultipleMessages_NonStreaming()
FrequencyPenalty = 0.75f,
PresencePenalty = 0.5f,
StopSequences = ["great"],
- AdditionalProperties = new() { ["seed"] = 42 },
+ Seed = 42,
});
Assert.NotNull(response);