Skip to content

Commit d6615ab

Browse files
authored
Prepare 2.2.0-beta.2 release (Part 1) (#349)
1 parent 9c04e78 commit d6615ab

14 files changed

+3524
-243
lines changed

CHANGELOG.md

Lines changed: 8 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,13 @@
11
# Release History
22

3+
## 2.2.0-beta.2 (Unreleased)
4+
5+
### Bugs fixed
6+
7+
- Fixed a bug that caused `CompleteChatStreaming*()` with audio-enabled models to fail unless provided a `ChatCompletionOptions` instance that had previously been used for a non-streaming `CompleteChat()*` call
8+
- Addressed an issue that caused calls to `CompleteChatStreaming*()` to not report usage when provided a `ChatCompletionOptions` instance that was previously used in a non-streaming `CompleteChat*()` call
9+
- Addressed issues with standalone serialization and deserialization of `ChatCompletionOptions` that impacted the ability to manipulate chat completion requests via `System.ClientModel.Primitives.ModelReaderWriter` and related utilities
10+
311
## 2.2.0-beta.1 (2025-02-07)
412

513
### Features added

api/OpenAI.net8.0.cs

Lines changed: 3181 additions & 0 deletions
Large diffs are not rendered by default.

api/OpenAI.netstandard2.0.cs

Lines changed: 12 additions & 124 deletions
Large diffs are not rendered by default.

src/Custom/Chat/ChatClient.cs

Lines changed: 13 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -251,9 +251,18 @@ private void CreateChatCompletionOptions(IEnumerable<ChatMessage> messages, ref
251251
{
252252
options.Messages = messages.ToList();
253253
options.Model = _model;
254-
options.Stream = stream
255-
? true
256-
: null;
257-
options.StreamOptions = stream ? options.StreamOptions : null;
254+
if (stream)
255+
{
256+
options.Stream = true;
257+
options.StreamOptions = s_includeUsageStreamOptions;
258+
}
259+
else
260+
{
261+
options.Stream = null;
262+
options.StreamOptions = null;
263+
}
258264
}
265+
266+
private static readonly InternalChatCompletionStreamOptions s_includeUsageStreamOptions
267+
= new(includeUsage: true, additionalBinaryDataProperties: null);
259268
}

src/Custom/Chat/ChatCompletionOptions.Serialization.cs

Lines changed: 4 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -13,19 +13,12 @@ public partial class ChatCompletionOptions
1313
[MethodImpl(MethodImplOptions.AggressiveInlining)]
1414
private void SerializeMessagesValue(Utf8JsonWriter writer, ModelReaderWriterOptions options)
1515
{
16-
if (Messages is not null)
17-
{
18-
writer.WriteStartArray();
19-
foreach (var item in Messages)
20-
{
21-
writer.WriteObjectValue<ChatMessage>(item, options);
22-
}
23-
writer.WriteEndArray();
24-
}
25-
else
16+
writer.WriteStartArray();
17+
foreach (var item in Messages)
2618
{
27-
writer.WriteNullValue();
19+
writer.WriteObjectValue<ChatMessage>(item, options);
2820
}
21+
writer.WriteEndArray();
2922
}
3023

3124
// CUSTOM: Added custom serialization to treat a single string as a collection of strings with one item.

src/Custom/Chat/ChatCompletionOptions.cs

Lines changed: 26 additions & 34 deletions
Original file line numberDiff line numberDiff line change
@@ -1,14 +1,16 @@
11
using System;
2+
using System.ClientModel.Primitives;
23
using System.Collections.Generic;
34
using System.Diagnostics.CodeAnalysis;
5+
using System.Threading;
46

57
namespace OpenAI.Chat;
68

79
/// <summary>
810
/// Request-level options for chat completion.
911
/// </summary>
1012
[CodeGenModel("CreateChatCompletionRequest")]
11-
[CodeGenSuppress("ChatCompletionOptions", typeof(IEnumerable<ChatMessage>), typeof(InternalCreateChatCompletionRequestModel))]
13+
[CodeGenSuppress("ChatCompletionOptions", typeof(IEnumerable<ChatMessage>), typeof(InternalCreateChatCompletionRequestModel?))]
1214
[CodeGenSerialization(nameof(Messages), SerializationValueHook = nameof(SerializeMessagesValue))]
1315
[CodeGenSerialization(nameof(StopSequences), SerializationValueHook = nameof(SerializeStopSequencesValue), DeserializationValueHook = nameof(DeserializeStopSequencesValue))]
1416
[CodeGenSerialization(nameof(LogitBiases), SerializationValueHook = nameof(SerializeLogitBiasesValue), DeserializationValueHook = nameof(DeserializeLogitBiasesValue))]
@@ -32,7 +34,7 @@ public partial class ChatCompletionOptions
3234
/// ID of the model to use. See the <see href="https://platform.openai.com/docs/models/model-endpoint-compatibility">model endpoint compatibility</see> table for details on which models work with the Chat API.
3335
/// </summary>
3436
[CodeGenMember("Model")]
35-
internal InternalCreateChatCompletionRequestModel Model { get; set; }
37+
internal InternalCreateChatCompletionRequestModel? Model { get; set; }
3638

3739
// CUSTOM: Made internal. We only ever request a single choice.
3840
/// <summary> How many chat completion choices to generate for each input message. Note that you will be charged based on the number of generated tokens across all of the choices. Keep `n` as `1` to minimize costs. </summary>
@@ -47,16 +49,18 @@ public partial class ChatCompletionOptions
4749
/// <summary> Gets or sets the stream options. </summary>
4850
[CodeGenMember("StreamOptions")]
4951
internal InternalChatCompletionStreamOptions StreamOptions { get; set; }
50-
= new() { IncludeUsage = true };
5152

5253
// CUSTOM: Made public now that there are no required properties.
5354
/// <summary> Initializes a new instance of <see cref="ChatCompletionOptions"/> for deserialization. </summary>
5455
public ChatCompletionOptions()
5556
{
57+
Messages = new ChangeTrackingList<ChatMessage>();
5658
LogitBiases = new ChangeTrackingDictionary<int, int>();
5759
StopSequences = new ChangeTrackingList<string>();
5860
Tools = new ChangeTrackingList<ChatTool>();
5961
Functions = new ChangeTrackingList<ChatFunction>();
62+
InternalModalities = new ChangeTrackingList<InternalCreateChatCompletionRequestModality>();
63+
Metadata = new ChangeTrackingDictionary<string, string>();
6064
}
6165

6266
// CUSTOM: Renamed.
@@ -113,20 +117,6 @@ public ChatCompletionOptions()
113117
[CodeGenMember("ParallelToolCalls")]
114118
public bool? AllowParallelToolCalls { get; set; }
115119

116-
/// <summary>
117-
/// An object specifying the format that the model must output.
118-
/// </summary>
119-
/// <remarks>
120-
/// <p>
121-
/// Compatible with GPT-4o, GPT-4o mini, GPT-4 Turbo and all GPT-3.5 Turbo models newer than gpt-3.5-turbo-1106.
122-
/// </p>
123-
/// <p>
124-
/// Learn more in the Structured Outputs guide.
125-
/// </p>
126-
/// </remarks>
127-
//[CodeGenMember("ResponseFormat")]
128-
//public ChatResponseFormat ResponseFormat { get; set; }
129-
130120
[CodeGenMember("ServiceTier")]
131121
internal InternalCreateChatCompletionRequestServiceTier? _serviceTier;
132122

@@ -166,7 +156,7 @@ public ChatCompletionOptions()
166156
/// <see href="https://platform.openai.com/chat-completions">OpenAI Platform dashboard</see>.
167157
/// </summary>
168158
[CodeGenMember("Metadata")]
169-
public IDictionary<string, string> Metadata { get; } = new ChangeTrackingDictionary<string, string>();
159+
public IDictionary<string, string> Metadata { get; }
170160

171161
// CUSTOM: Renamed.
172162
/// <summary>
@@ -188,7 +178,16 @@ public ChatCompletionOptions()
188178

189179
// CUSTOM: Made internal for automatic enablement via audio options.
190180
[CodeGenMember("Modalities")]
191-
private IList<InternalCreateChatCompletionRequestModality> _internalModalities = new ChangeTrackingList<InternalCreateChatCompletionRequestModality>();
181+
private IList<InternalCreateChatCompletionRequestModality> InternalModalities
182+
{
183+
get => _internalModalities;
184+
set
185+
{
186+
_internalModalities = value;
187+
_responseModalities = ChatResponseModalitiesExtensions.FromInternalModalities(value);
188+
}
189+
}
190+
private IList<InternalCreateChatCompletionRequestModality> _internalModalities;
192191

193192
/// <summary>
194193
/// Specifies the content types that the model should generate in its responses.
@@ -200,25 +199,18 @@ public ChatCompletionOptions()
200199
/// </remarks>
201200
public ChatResponseModalities ResponseModalities
202201
{
203-
get => ChatResponseModalitiesExtensions.FromInternalModalities(_internalModalities);
204-
set => _internalModalities = value.ToInternalModalities();
205-
}
206-
207-
// CUSTOM: supplemented with custom setter to internally enable audio output via modalities.
208-
[CodeGenMember("Audio")]
209-
private ChatAudioOptions _audioOptions;
210-
211-
public ChatAudioOptions AudioOptions
212-
{
213-
get => _audioOptions;
202+
get => _responseModalities;
214203
set
215204
{
216-
_audioOptions = value;
217-
_internalModalities = value is null
218-
? new ChangeTrackingList<InternalCreateChatCompletionRequestModality>()
219-
: [InternalCreateChatCompletionRequestModality.Text, InternalCreateChatCompletionRequestModality.Audio];
205+
_responseModalities = value;
206+
_internalModalities = value.ToInternalModalities();
220207
}
221208
}
209+
private ChatResponseModalities _responseModalities;
210+
211+
// CUSTOM: Renamed.
212+
[CodeGenMember("Audio")]
213+
public ChatAudioOptions AudioOptions { get; set; }
222214

223215
// CUSTOM: rename.
224216
[CodeGenMember("Prediction")]

src/Custom/Chat/OpenAIChatModelFactory.cs

Lines changed: 109 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -1,12 +1,46 @@
11
using System;
22
using System.Collections.Generic;
3+
using System.ComponentModel;
34
using System.Linq;
45

56
namespace OpenAI.Chat;
67

78
/// <summary> Model factory for models. </summary>
89
public static partial class OpenAIChatModelFactory
910
{
11+
/// <summary> Initializes a new instance of <see cref="OpenAI.Chat.ChatCompletion"/>. </summary>
12+
/// <returns> A new <see cref="OpenAI.Chat.ChatCompletion"/> instance for mocking. </returns>
13+
[EditorBrowsable(EditorBrowsableState.Never)]
14+
public static ChatCompletion ChatCompletion(
15+
string id,
16+
ChatFinishReason finishReason,
17+
ChatMessageContent content,
18+
string refusal,
19+
IEnumerable<ChatToolCall> toolCalls,
20+
ChatMessageRole role,
21+
ChatFunctionCall functionCall,
22+
IEnumerable<ChatTokenLogProbabilityDetails> contentTokenLogProbabilities,
23+
IEnumerable<ChatTokenLogProbabilityDetails> refusalTokenLogProbabilities,
24+
DateTimeOffset createdAt,
25+
string model,
26+
string systemFingerprint,
27+
ChatTokenUsage usage) =>
28+
ChatCompletion(
29+
id: id,
30+
finishReason: finishReason,
31+
content:content,
32+
refusal: refusal,
33+
toolCalls: toolCalls,
34+
role: role,
35+
functionCall: functionCall,
36+
contentTokenLogProbabilities: contentTokenLogProbabilities,
37+
refusalTokenLogProbabilities: refusalTokenLogProbabilities,
38+
createdAt: createdAt,
39+
model: model,
40+
systemFingerprint: systemFingerprint,
41+
usage: usage,
42+
outputAudio: default);
43+
1044
/// <summary> Initializes a new instance of <see cref="OpenAI.Chat.ChatCompletion"/>. </summary>
1145
/// <returns> A new <see cref="OpenAI.Chat.ChatCompletion"/> instance for mocking. </returns>
1246
public static ChatCompletion ChatCompletion(
@@ -16,14 +50,14 @@ public static ChatCompletion ChatCompletion(
1650
string refusal = null,
1751
IEnumerable<ChatToolCall> toolCalls = null,
1852
ChatMessageRole role = default,
19-
ChatFunctionCall functionCall = null,
53+
ChatFunctionCall functionCall = default,
2054
IEnumerable<ChatTokenLogProbabilityDetails> contentTokenLogProbabilities = null,
2155
IEnumerable<ChatTokenLogProbabilityDetails> refusalTokenLogProbabilities = null,
2256
DateTimeOffset createdAt = default,
2357
string model = null,
2458
string systemFingerprint = null,
25-
ChatTokenUsage usage = null,
26-
ChatOutputAudio outputAudio = null)
59+
ChatTokenUsage usage = default,
60+
ChatOutputAudio outputAudio = default)
2761
{
2862
content ??= new ChatMessageContent();
2963
toolCalls ??= new List<ChatToolCall>();
@@ -91,16 +125,27 @@ public static ChatTokenTopLogProbabilityDetails ChatTokenTopLogProbabilityDetail
91125
additionalBinaryDataProperties: null);
92126
}
93127

128+
/// <summary> Initializes a new instance of <see cref="OpenAI.Chat.ChatTokenUsage"/>. </summary>
129+
/// <returns> A new <see cref="OpenAI.Chat.ChatTokenUsage"/> instance for mocking. </returns>
130+
[EditorBrowsable(EditorBrowsableState.Never)]
131+
public static ChatTokenUsage ChatTokenUsage(int outputTokenCount, int inputTokenCount, int totalTokenCount, ChatOutputTokenUsageDetails outputTokenDetails) =>
132+
ChatTokenUsage(
133+
outputTokenCount: outputTokenCount,
134+
inputTokenCount: inputTokenCount,
135+
totalTokenCount: totalTokenCount,
136+
outputTokenDetails: outputTokenDetails,
137+
inputTokenDetails: default);
138+
94139
/// <summary> Initializes a new instance of <see cref="OpenAI.Chat.ChatTokenUsage"/>. </summary>
95140
/// <returns> A new <see cref="OpenAI.Chat.ChatTokenUsage"/> instance for mocking. </returns>
96141
public static ChatTokenUsage ChatTokenUsage(int outputTokenCount = default, int inputTokenCount = default, int totalTokenCount = default, ChatOutputTokenUsageDetails outputTokenDetails = null, ChatInputTokenUsageDetails inputTokenDetails = null)
97142
{
98143
return new ChatTokenUsage(
99-
outputTokenCount,
100-
inputTokenCount,
101-
totalTokenCount,
102-
outputTokenDetails,
103-
inputTokenDetails,
144+
outputTokenCount: outputTokenCount,
145+
inputTokenCount: inputTokenCount,
146+
totalTokenCount: totalTokenCount,
147+
outputTokenDetails: outputTokenDetails,
148+
inputTokenDetails: inputTokenDetails,
104149
additionalBinaryDataProperties: null);
105150
}
106151

@@ -116,7 +161,25 @@ public static ChatInputTokenUsageDetails ChatInputTokenUsageDetails(int audioTok
116161

117162
/// <summary> Initializes a new instance of <see cref="OpenAI.Chat.ChatOutputTokenUsageDetails"/>. </summary>
118163
/// <returns> A new <see cref="OpenAI.Chat.ChatOutputTokenusageDetails"/> instance for mocking. </returns>
119-
public static ChatOutputTokenUsageDetails ChatOutputTokenUsageDetails(int reasoningTokenCount = default, int audioTokenCount = default, int acceptedPredictionTokenCount = default, int rejectedPredictionTokenCount = 0)
164+
[EditorBrowsable(EditorBrowsableState.Never)]
165+
public static ChatOutputTokenUsageDetails ChatOutputTokenUsageDetails(int reasoningTokenCount) =>
166+
ChatOutputTokenUsageDetails(
167+
reasoningTokenCount: reasoningTokenCount,
168+
audioTokenCount: default);
169+
170+
/// <summary> Initializes a new instance of <see cref="OpenAI.Chat.ChatOutputTokenUsageDetails"/>. </summary>
171+
/// <returns> A new <see cref="OpenAI.Chat.ChatOutputTokenusageDetails"/> instance for mocking. </returns>
172+
[EditorBrowsable(EditorBrowsableState.Never)]
173+
public static ChatOutputTokenUsageDetails ChatOutputTokenUsageDetails(int reasoningTokenCount, int audioTokenCount) =>
174+
ChatOutputTokenUsageDetails(
175+
reasoningTokenCount: reasoningTokenCount,
176+
audioTokenCount: audioTokenCount,
177+
acceptedPredictionTokenCount: default,
178+
rejectedPredictionTokenCount: default);
179+
180+
/// <summary> Initializes a new instance of <see cref="OpenAI.Chat.ChatOutputTokenUsageDetails"/>. </summary>
181+
/// <returns> A new <see cref="OpenAI.Chat.ChatOutputTokenusageDetails"/> instance for mocking. </returns>
182+
public static ChatOutputTokenUsageDetails ChatOutputTokenUsageDetails(int reasoningTokenCount = default, int audioTokenCount = default, int acceptedPredictionTokenCount = default, int rejectedPredictionTokenCount = default)
120183
{
121184
return new ChatOutputTokenUsageDetails(
122185
audioTokenCount: audioTokenCount,
@@ -136,23 +199,56 @@ public static ChatOutputAudio ChatOutputAudio(BinaryData audioBytes, string id =
136199
additionalBinaryDataProperties: null);
137200
}
138201

202+
/// <summary> Initializes a new instance of <see cref="OpenAI.Chat.StreamingChatCompletionUpdate"/>. </summary>
203+
/// <returns> A new <see cref="OpenAI.Chat.StreamingChatCompletionUpdate"/> instance for mocking. </returns>
204+
[EditorBrowsable(EditorBrowsableState.Never)]
205+
public static StreamingChatCompletionUpdate StreamingChatCompletionUpdate(
206+
string completionId,
207+
ChatMessageContent contentUpdate,
208+
StreamingChatFunctionCallUpdate functionCallUpdate,
209+
IEnumerable<StreamingChatToolCallUpdate> toolCallUpdates,
210+
ChatMessageRole? role,
211+
string refusalUpdate,
212+
IEnumerable<ChatTokenLogProbabilityDetails> contentTokenLogProbabilities,
213+
IEnumerable<ChatTokenLogProbabilityDetails> refusalTokenLogProbabilities,
214+
ChatFinishReason? finishReason,
215+
DateTimeOffset createdAt,
216+
string model,
217+
string systemFingerprint,
218+
ChatTokenUsage usage) =>
219+
StreamingChatCompletionUpdate(
220+
completionId: completionId,
221+
contentUpdate: contentUpdate,
222+
functionCallUpdate: functionCallUpdate,
223+
toolCallUpdates: toolCallUpdates,
224+
role: role,
225+
refusalUpdate: refusalUpdate,
226+
contentTokenLogProbabilities: contentTokenLogProbabilities,
227+
refusalTokenLogProbabilities: refusalTokenLogProbabilities,
228+
finishReason: finishReason,
229+
createdAt: createdAt,
230+
model: model,
231+
systemFingerprint: systemFingerprint,
232+
usage: usage,
233+
outputAudioUpdate: default);
234+
139235
/// <summary> Initializes a new instance of <see cref="OpenAI.Chat.StreamingChatCompletionUpdate"/>. </summary>
140236
/// <returns> A new <see cref="OpenAI.Chat.StreamingChatCompletionUpdate"/> instance for mocking. </returns>
141237
public static StreamingChatCompletionUpdate StreamingChatCompletionUpdate(
142238
string completionId = null,
143239
ChatMessageContent contentUpdate = null,
144240
StreamingChatFunctionCallUpdate functionCallUpdate = null,
145241
IEnumerable<StreamingChatToolCallUpdate> toolCallUpdates = null,
146-
ChatMessageRole? role = null,
242+
ChatMessageRole? role = default,
147243
string refusalUpdate = null,
148244
IEnumerable<ChatTokenLogProbabilityDetails> contentTokenLogProbabilities = null,
149245
IEnumerable<ChatTokenLogProbabilityDetails> refusalTokenLogProbabilities = null,
150-
ChatFinishReason? finishReason = null,
246+
ChatFinishReason? finishReason = default,
151247
DateTimeOffset createdAt = default,
152248
string model = null,
153249
string systemFingerprint = null,
154-
ChatTokenUsage usage = null,
155-
StreamingChatOutputAudioUpdate outputAudioUpdate = null)
250+
ChatTokenUsage usage = default,
251+
StreamingChatOutputAudioUpdate outputAudioUpdate = default)
156252
{
157253
contentUpdate ??= new ChatMessageContent();
158254
toolCallUpdates ??= new List<StreamingChatToolCallUpdate>();

src/Custom/Chat/Streaming/StreamingChatCompletionUpdate.cs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -11,7 +11,7 @@ public partial class StreamingChatCompletionUpdate
1111
private IReadOnlyList<StreamingChatToolCallUpdate> _toolCallUpdates;
1212
private IReadOnlyList<ChatTokenLogProbabilityDetails> _contentTokenLogProbabilities;
1313
private IReadOnlyList<ChatTokenLogProbabilityDetails> _refusalTokenLogProbabilities;
14-
internal InternalCreateChatCompletionStreamResponseChoice InternalChoice => (Choices.Count > 0) ? Choices[0] : null;
14+
internal InternalCreateChatCompletionStreamResponseChoice InternalChoice => (Choices?.Count > 0) ? Choices[0] : null;
1515
internal InternalChatCompletionStreamResponseDelta InternalChoiceDelta => InternalChoice?.Delta;
1616

1717
// CUSTOM:

0 commit comments

Comments
 (0)