-
Notifications
You must be signed in to change notification settings - Fork 5.1k
Add chat completions model factory methods #37536
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Closed
ralph-msft
wants to merge
1
commit into
Azure:main
from
ralph-msft:ralphe/add_chat_completions_factory
Closed
Changes from all commits
Commits
File filter
Filter by extension
Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
There are no files selected for viewing
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
97 changes: 97 additions & 0 deletions
97
sdk/openai/Azure.AI.OpenAI/tests/OpenAIInferenceModelFactoryTests.cs
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -0,0 +1,97 @@ | ||
| // Copyright (c) Microsoft Corporation. All rights reserved. | ||
| // Licensed under the MIT License. | ||
|
|
||
| using System; | ||
| using System.Linq; | ||
| using NUnit.Framework; | ||
|
|
||
| namespace Azure.AI.OpenAI.Tests | ||
| { | ||
| [TestFixture] | ||
| public class OpenAIInferenceModelFactoryTests | ||
| { | ||
| [Test] | ||
| public void TestCompletionsLogProbabilityModel() | ||
| { | ||
| var logProbabilityModel = AIOpenAIModelFactory.CompletionsLogProbabilityModel( | ||
| new[] { "one", "two" }, | ||
| new float?[] { 0.9f, 0.72f }); | ||
| Assert.That(logProbabilityModel, Is.Not.Null); | ||
| Assert.That(logProbabilityModel.Tokens.Count, Is.EqualTo(2)); | ||
| Assert.That(logProbabilityModel.Tokens[0], Is.EqualTo("one")); | ||
| Assert.That(logProbabilityModel.Tokens[1], Is.EqualTo("two")); | ||
| Assert.That(logProbabilityModel.TokenLogProbabilities.Count, Is.EqualTo(2)); | ||
| Assert.That(logProbabilityModel.TokenLogProbabilities[0], Is.EqualTo(0.9F).Within(2).Percent); | ||
| Assert.That(logProbabilityModel.TokenLogProbabilities[1], Is.EqualTo(0.72F).Within(2).Percent); | ||
| Assert.That(logProbabilityModel.TopLogProbabilities, Is.Empty); | ||
| Assert.That(logProbabilityModel.TextOffsets, Is.Empty); | ||
| } | ||
|
|
||
| [Test] | ||
| public void TestChatChoices() | ||
| { | ||
| var expectedChoices = new[] | ||
| { | ||
| new { role = ChatRole.Assistant, text = "First one", index = 0, reason = CompletionsFinishReason.ContentFiltered }, | ||
| new { role = ChatRole.System, text = "Second one", index = -1, reason = CompletionsFinishReason.Stopped }, | ||
| new { role = ChatRole.User, text = "Final one", index = 3, reason = CompletionsFinishReason.TokenLimitReached }, | ||
| }; | ||
|
|
||
| var chatChoices = expectedChoices | ||
| .Select(e => AzureOpenAIModelFactory.ChatChoice( | ||
| new ChatMessage(e.role, e.text), | ||
| e.index, | ||
| e.reason)) | ||
| .ToArray(); | ||
| Assert.That(chatChoices, Is.All.Not.Null); | ||
|
|
||
| for (int i = 0; i < chatChoices.Length; i++) | ||
| { | ||
| var actual = chatChoices[i]; | ||
| var expected = expectedChoices[i]; | ||
|
|
||
| Assert.That(actual.Message, Is.Not.Null); | ||
| Assert.That(actual.Message.Role, Is.EqualTo(expected.role)); | ||
| Assert.That(actual.Message.Content, Is.EqualTo(expected.text)); | ||
| Assert.That(actual.Index, Is.EqualTo(expected.index)); | ||
| Assert.That(actual.FinishReason, Is.EqualTo(expected.reason)); | ||
| } | ||
| } | ||
|
|
||
| [Test] | ||
| public void TestChatCompletions() | ||
| { | ||
| string expectedId = Guid.NewGuid().ToString(); | ||
| DateTimeOffset expectedCreationTime = DateTimeOffset.Now; | ||
|
|
||
| var expectedChoices = new[] | ||
| { | ||
| new { role = ChatRole.Assistant, text = "First one", index = 0, reason = CompletionsFinishReason.ContentFiltered }, | ||
| new { role = ChatRole.System, text = "Second one", index = -1, reason = CompletionsFinishReason.Stopped }, | ||
| new { role = ChatRole.User, text = "Final one", index = 3, reason = CompletionsFinishReason.TokenLimitReached }, | ||
| }; | ||
|
|
||
| var chatChoices = expectedChoices | ||
| .Select(e => AzureOpenAIModelFactory.ChatChoice( | ||
| new ChatMessage(e.role, e.text), | ||
| e.index, | ||
| e.reason)) | ||
| .ToArray(); | ||
|
|
||
| var chatCompletions = AzureOpenAIModelFactory.ChatCompletions( | ||
| expectedId, | ||
| expectedCreationTime, | ||
| chatChoices, | ||
| AIOpenAIModelFactory.CompletionsUsage(2, 5, 7)); | ||
|
|
||
| Assert.That(chatCompletions, Is.Not.Null); | ||
| Assert.That(chatCompletions.Id, Is.EqualTo(expectedId)); | ||
| Assert.That(chatCompletions.Created, Is.EqualTo(expectedCreationTime).Within(TimeSpan.FromSeconds(1))); // Internally we use Unix time with second precision | ||
| Assert.That(chatCompletions.Choices, Is.EquivalentTo(chatChoices)); | ||
| Assert.That(chatCompletions.Usage, Is.Not.Null); | ||
| Assert.That(chatCompletions.Usage.CompletionTokens, Is.EqualTo(2)); | ||
| Assert.That(chatCompletions.Usage.PromptTokens, Is.EqualTo(5)); | ||
| Assert.That(chatCompletions.Usage.TotalTokens, Is.EqualTo(7)); | ||
| } | ||
| } | ||
| } |
Add this suggestion to a batch that can be applied as a single commit.
This suggestion is invalid because no changes were made to the code.
Suggestions cannot be applied while the pull request is closed.
Suggestions cannot be applied while viewing a subset of changes.
Only one suggestion per line can be applied in a batch.
Add this suggestion to a batch that can be applied as a single commit.
Applying suggestions on deleted lines is not supported.
You must change the existing code in this line in order to create a valid suggestion.
Outdated suggestions cannot be applied.
This suggestion has been applied or marked resolved.
Suggestions cannot be applied from pending reviews.
Suggestions cannot be applied on multi-line comments.
Suggestions cannot be applied while the pull request is queued to merge.
Suggestion cannot be applied right now. Please check back later.
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
Do we know the reason why these are not being auto-generated?
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
It would appear that having some custom code for these types is suppressing the auto-generation
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
that does seem to be the case; there are at least some interesting inconsistencies that crop up. E.g. in the big PR I just published for all the new capabilities, I don't think we got any auto-generated coverage for Chat Functions and only limited coverage for Image Generation: https://github.com/Azure/azure-sdk-for-net/pull/37539/files#diff-32e4c78ddf8afea7d023329cfde10e80cc9c1151343b23db73a8a4c9baa41e11
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
I see. I suspect that the root cause of this are the properties on these classes that have been customized to be
internal, which makes sense, because a user wouldn't know what these values are or how to use them, but the generator cannot simply ignore them either.Let's try a few things:
1️⃣ For the
ChatCompletionsclass, we can remove the customization entirely now that this bug in the generator has been fixed:🔗 Azure/autorest.csharp#3492
The fix must be done in the definition of the ChatCompletions model in TypeSpec, where we have the "created" property defined as an
int32that we manually convert to aDateTimeOffsetin .NET:We should re-define the above as a
utcDateTimeand specify that it is encoded as a Unix timestamp using the@encodedecorator like this:We can then delete the custom ChatCompletions.cs and re-generate the library. This will correctly auto-generate the
Createdproperty of theChatCompletionsclass as aDateTimeOffset, and in addition, it will also auto-generate the model factory method that we're trying to manually add as part of this PR.2️⃣ The "created" property of the Completions model in the TypeSpec has the exact same problem, and the fix is the exact same too: The property must be fixed in the TypeSpec, changing it to a
utcDateTimewith@encode(DateTimeKnownEncoding.unixTimestamp, int32), and re-generating.3️⃣ The case with the
ChatChoiceclass is a little trickier because customizing the property asinternalis by design. This means we actually have to write this model factory method manually, but I have a few suggestions on how to do it: First, we only need one model factory method per class, and it should have parameters for all the properties. In other words, we can remove the second method forChatChoice(see line 15), which does not have theChatMessageparameter. Second, I believe this means that we also need a model factory method for the streaming classes. In this case, we would also need one forStreamingChatChoice, which is where that internalInternalStreamingDeltaMessageproperty is used. For more info on how to write model factories, check out our guidelines:🔗 https://azure.github.io/azure-sdk/dotnet_introduction.html#dotnet-mocking-factory-builder