diff --git a/sdk/openai/Azure.AI.OpenAI/src/Custom/AzureCognitiveSearchChatExtensionConfiguration.Serialization.cs b/sdk/openai/Azure.AI.OpenAI/src/Custom/AzureCognitiveSearchChatExtensionConfiguration.Serialization.cs
index 5198f1881d9c..ec30b5c4f5c4 100644
--- a/sdk/openai/Azure.AI.OpenAI/src/Custom/AzureCognitiveSearchChatExtensionConfiguration.Serialization.cs
+++ b/sdk/openai/Azure.AI.OpenAI/src/Custom/AzureCognitiveSearchChatExtensionConfiguration.Serialization.cs
@@ -9,6 +9,7 @@
namespace Azure.AI.OpenAI
{
[CodeGenSuppress("global::Azure.Core.IUtf8JsonSerializable.Write", typeof(Utf8JsonWriter))]
+ [CodeGenSuppress("ToRequestContent")]
public partial class AzureCognitiveSearchChatExtensionConfiguration : IUtf8JsonSerializable
{
void IUtf8JsonSerializable.Write(Utf8JsonWriter writer)
@@ -17,17 +18,18 @@ void IUtf8JsonSerializable.Write(Utf8JsonWriter writer)
writer.WritePropertyName("type"u8);
writer.WriteStringValue(Type.ToString());
- // Custom code note: everything *except* type goes into 'parameters'
+ // CUSTOM CODE NOTE: Everything *except* 'type' goes into 'parameters'
writer.WriteStartObject("parameters"u8);
writer.WritePropertyName("endpoint"u8);
writer.WriteStringValue(SearchEndpoint.AbsoluteUri);
- writer.WriteString("key"u8, SearchKey);
+ writer.WritePropertyName("key"u8);
+ writer.WriteStringValue(SearchKey);
writer.WritePropertyName("indexName"u8);
writer.WriteStringValue(IndexName);
if (Optional.IsDefined(FieldMappingOptions))
{
- writer.WritePropertyName("fieldMappings"u8);
+ writer.WritePropertyName("fieldsMapping"u8);
writer.WriteObjectValue(FieldMappingOptions);
}
if (Optional.IsDefined(DocumentCount))
@@ -57,10 +59,13 @@ void IUtf8JsonSerializable.Write(Utf8JsonWriter writer)
}
if (Optional.IsDefined(EmbeddingKey))
{
- writer.WriteString("embeddingKey"u8, EmbeddingKey);
+ writer.WritePropertyName("embeddingKey"u8);
+ writer.WriteStringValue(EmbeddingKey);
}
- // CUSTOM CODE NOTE: end of induced 'parameters' first, then the parent object
+
+ // CUSTOM CODE NOTE: End of induced 'parameters' first, then the parent object
writer.WriteEndObject();
+
writer.WriteEndObject();
}
}
diff --git a/sdk/openai/Azure.AI.OpenAI/src/Custom/AzureCognitiveSearchChatExtensionConfiguration.cs b/sdk/openai/Azure.AI.OpenAI/src/Custom/AzureCognitiveSearchChatExtensionConfiguration.cs
index ed8d605e2b62..b9c563000650 100644
--- a/sdk/openai/Azure.AI.OpenAI/src/Custom/AzureCognitiveSearchChatExtensionConfiguration.cs
+++ b/sdk/openai/Azure.AI.OpenAI/src/Custom/AzureCognitiveSearchChatExtensionConfiguration.cs
@@ -28,18 +28,6 @@ public override AzureChatExtensionType Type
public Uri SearchEndpoint { get; set; }
/// The name of the index to use as available in the referenced Azure Cognitive Search resource.
public string IndexName { get; set; }
- /// Customized field mapping behavior to use when interacting with the search index.
- public AzureCognitiveSearchIndexFieldMappingOptions FieldMappingOptions { get; set; }
- /// The configured top number of documents to feature for the configured query.
- public int? DocumentCount { get; set; }
- /// The query type to use with Azure Cognitive Search.
- public AzureCognitiveSearchQueryType? QueryType { get; set; }
- /// Whether queries should be restricted to use of indexed data.
- public bool? ShouldRestrictResultScope { get; set; }
- /// The additional semantic configuration for the query.
- public string SemanticConfiguration { get; set; }
- /// When using embeddings for search, specifies the resource URL from which embeddings should be retrieved.
- public Uri EmbeddingEndpoint { get; set; }
/// The API key to use with the specified Azure Cognitive Search endpoint.
private string SearchKey { get; set; }
@@ -51,8 +39,14 @@ public override AzureChatExtensionType Type
///
public AzureCognitiveSearchChatExtensionConfiguration()
{
+ // CUSTOM CODE NOTE: Empty constructors are added to options classes to facilitate property-only use; this
+ // may be reconsidered for required payload constituents in the future.
}
+ // CUSTOM CODE NOTE: Users must set the search key using the SetSearchKey method, so we make the constructor
+ // that receives it as a parameter to be internal and instead expose a public constructor
+ // without it.
+
/// Initializes a new instance of AzureCognitiveSearchChatExtensionConfiguration.
///
/// The type label to use when configuring Azure OpenAI chat extensions. This should typically not be changed from its
@@ -71,6 +65,27 @@ public AzureCognitiveSearchChatExtensionConfiguration(AzureChatExtensionType typ
IndexName = indexName;
}
+ /// Initializes a new instance of AzureCognitiveSearchChatExtensionConfiguration.
+ ///
+ /// The type label to use when configuring Azure OpenAI chat extensions. This should typically not be changed from its
+ /// default value for Azure Cognitive Search.
+ ///
+ /// The absolute endpoint path for the Azure Cognitive Search resource to use.
+ /// The API admin key to use with the specified Azure Cognitive Search endpoint.
+ /// The name of the index to use as available in the referenced Azure Cognitive Search resource.
+ /// , or is null.
+ internal AzureCognitiveSearchChatExtensionConfiguration(AzureChatExtensionType type, Uri searchEndpoint, string searchKey, string indexName)
+ {
+ Argument.AssertNotNull(searchEndpoint, nameof(searchEndpoint));
+ Argument.AssertNotNull(searchKey, nameof(searchKey));
+ Argument.AssertNotNull(indexName, nameof(indexName));
+
+ Type = type;
+ SearchEndpoint = searchEndpoint;
+ SearchKey = searchKey;
+ IndexName = indexName;
+ }
+
///
/// Sets the API key to use with the specified Azure Cognitive Search endpoint.
///
diff --git a/sdk/openai/Azure.AI.OpenAI/src/Custom/AzureCognitiveSearchIndexFieldMappingOptions.Serialization.cs b/sdk/openai/Azure.AI.OpenAI/src/Custom/AzureCognitiveSearchIndexFieldMappingOptions.Serialization.cs
index 6e25ada2f36c..e732b098aea3 100644
--- a/sdk/openai/Azure.AI.OpenAI/src/Custom/AzureCognitiveSearchIndexFieldMappingOptions.Serialization.cs
+++ b/sdk/openai/Azure.AI.OpenAI/src/Custom/AzureCognitiveSearchIndexFieldMappingOptions.Serialization.cs
@@ -10,52 +10,6 @@ namespace Azure.AI.OpenAI
{
public partial class AzureCognitiveSearchIndexFieldMappingOptions : IUtf8JsonSerializable
{
- void IUtf8JsonSerializable.Write(Utf8JsonWriter writer)
- {
- writer.WriteStartObject();
- if (Optional.IsDefined(TitleFieldName))
- {
- writer.WritePropertyName("titleField"u8);
- writer.WriteStringValue(TitleFieldName);
- }
- if (Optional.IsDefined(UrlFieldName))
- {
- writer.WritePropertyName("urlField"u8);
- writer.WriteStringValue(UrlFieldName);
- }
- if (Optional.IsDefined(FilepathFieldName))
- {
- writer.WritePropertyName("filepathField"u8);
- writer.WriteStringValue(FilepathFieldName);
- }
- if (Optional.IsCollectionDefined(ContentFieldNames))
- {
- writer.WritePropertyName("contentFieldNames"u8);
- writer.WriteStartArray();
- foreach (var item in ContentFieldNames)
- {
- writer.WriteStringValue(item);
- }
- writer.WriteEndArray();
- }
- if (Optional.IsDefined(ContentFieldSeparator))
- {
- writer.WritePropertyName("contentFieldSeparator"u8);
- writer.WriteStringValue(ContentFieldSeparator);
- }
- if (Optional.IsCollectionDefined(VectorFieldNames))
- {
- writer.WritePropertyName("vectorFields"u8);
- writer.WriteStartArray();
- foreach (var item in VectorFieldNames)
- {
- writer.WriteStringValue(item);
- }
- writer.WriteEndArray();
- }
- writer.WriteEndObject();
- }
-
/// Convert into a Utf8JsonRequestContent.
internal virtual RequestContent ToRequestContent()
{
diff --git a/sdk/openai/Azure.AI.OpenAI/src/Custom/ImageSize.cs b/sdk/openai/Azure.AI.OpenAI/src/Custom/ImageSize.cs
deleted file mode 100644
index 6adba31850ef..000000000000
--- a/sdk/openai/Azure.AI.OpenAI/src/Custom/ImageSize.cs
+++ /dev/null
@@ -1,14 +0,0 @@
-// Copyright (c) Microsoft Corporation. All rights reserved.
-// Licensed under the MIT License.
-
-using System;
-
-namespace Azure.AI.OpenAI
-{
- /// The desired size of the generated images. Must be one of 256x256, 512x512, or 1024x1024.
- public readonly partial struct ImageSize : IEquatable
- {
- //Temp change need until we resolve usage issue.
- //https://github.com/Azure/autorest.csharp/issues/3836
- }
-}
diff --git a/sdk/openai/Azure.AI.OpenAI/src/Generated/AudioTranscriptionOptions.cs b/sdk/openai/Azure.AI.OpenAI/src/Generated/AudioTranscriptionOptions.cs
index f5460a934f13..5f28167f63a8 100644
--- a/sdk/openai/Azure.AI.OpenAI/src/Generated/AudioTranscriptionOptions.cs
+++ b/sdk/openai/Azure.AI.OpenAI/src/Generated/AudioTranscriptionOptions.cs
@@ -16,7 +16,7 @@ public partial class AudioTranscriptionOptions
/// Initializes a new instance of AudioTranscriptionOptions.
///
/// The audio data to transcribe. This must be the binary content of a file in one of the supported media formats:
- /// flac, mp3, mp4, mpeg, mpga, m4a, ogg, wav, webm.
+ /// flac, mp3, mp4, mpeg, mpga, m4a, ogg, wav, webm.
///
/// The requested format of the transcription response data, which will influence the content and detail of the result.
///
diff --git a/sdk/openai/Azure.AI.OpenAI/src/Generated/AudioTranslationOptions.cs b/sdk/openai/Azure.AI.OpenAI/src/Generated/AudioTranslationOptions.cs
index 0cf8ee9d3ce5..3d7945be012c 100644
--- a/sdk/openai/Azure.AI.OpenAI/src/Generated/AudioTranslationOptions.cs
+++ b/sdk/openai/Azure.AI.OpenAI/src/Generated/AudioTranslationOptions.cs
@@ -16,7 +16,7 @@ public partial class AudioTranslationOptions
/// Initializes a new instance of AudioTranslationOptions.
///
/// The audio data to translate. This must be the binary content of a file in one of the supported media formats:
- /// flac, mp3, mp4, mpeg, mpga, m4a, ogg, wav, webm.
+ /// flac, mp3, mp4, mpeg, mpga, m4a, ogg, wav, webm.
///
/// The requested format of the translation response data, which will influence the content and detail of the result.
///
diff --git a/sdk/openai/Azure.AI.OpenAI/src/Generated/AzureCognitiveSearchChatExtensionConfiguration.Serialization.cs b/sdk/openai/Azure.AI.OpenAI/src/Generated/AzureCognitiveSearchChatExtensionConfiguration.Serialization.cs
new file mode 100644
index 000000000000..da83a87c0c1d
--- /dev/null
+++ b/sdk/openai/Azure.AI.OpenAI/src/Generated/AzureCognitiveSearchChatExtensionConfiguration.Serialization.cs
@@ -0,0 +1,16 @@
+// Copyright (c) Microsoft Corporation. All rights reserved.
+// Licensed under the MIT License.
+
+//
+
+#nullable disable
+
+using System.Text.Json;
+using Azure.Core;
+
+namespace Azure.AI.OpenAI
+{
+ public partial class AzureCognitiveSearchChatExtensionConfiguration : IUtf8JsonSerializable
+ {
+ }
+}
diff --git a/sdk/openai/Azure.AI.OpenAI/src/Generated/AzureCognitiveSearchChatExtensionConfiguration.cs b/sdk/openai/Azure.AI.OpenAI/src/Generated/AzureCognitiveSearchChatExtensionConfiguration.cs
index ec6088c145bf..588c2679c4b6 100644
--- a/sdk/openai/Azure.AI.OpenAI/src/Generated/AzureCognitiveSearchChatExtensionConfiguration.cs
+++ b/sdk/openai/Azure.AI.OpenAI/src/Generated/AzureCognitiveSearchChatExtensionConfiguration.cs
@@ -16,27 +16,6 @@ namespace Azure.AI.OpenAI
///
public partial class AzureCognitiveSearchChatExtensionConfiguration
{
- /// Initializes a new instance of AzureCognitiveSearchChatExtensionConfiguration.
- ///
- /// The type label to use when configuring Azure OpenAI chat extensions. This should typically not be changed from its
- /// default value for Azure Cognitive Search.
- ///
- /// The absolute endpoint path for the Azure Cognitive Search resource to use.
- /// The API admin key to use with the specified Azure Cognitive Search endpoint.
- /// The name of the index to use as available in the referenced Azure Cognitive Search resource.
- /// , or is null.
- internal AzureCognitiveSearchChatExtensionConfiguration(AzureChatExtensionType type, Uri searchEndpoint, string searchKey, string indexName)
- {
- Argument.AssertNotNull(searchEndpoint, nameof(searchEndpoint));
- Argument.AssertNotNull(searchKey, nameof(searchKey));
- Argument.AssertNotNull(indexName, nameof(indexName));
-
- Type = type;
- SearchEndpoint = searchEndpoint;
- SearchKey = searchKey;
- IndexName = indexName;
- }
-
/// Initializes a new instance of AzureCognitiveSearchChatExtensionConfiguration.
///
/// The type label to use when configuring Azure OpenAI chat extensions. This should typically not be changed from its
@@ -66,5 +45,17 @@ internal AzureCognitiveSearchChatExtensionConfiguration(AzureChatExtensionType t
EmbeddingEndpoint = embeddingEndpoint;
EmbeddingKey = embeddingKey;
}
+ /// Customized field mapping behavior to use when interacting with the search index.
+ public AzureCognitiveSearchIndexFieldMappingOptions FieldMappingOptions { get; set; }
+ /// The configured top number of documents to feature for the configured query.
+ public int? DocumentCount { get; set; }
+ /// The query type to use with Azure Cognitive Search.
+ public AzureCognitiveSearchQueryType? QueryType { get; set; }
+ /// Whether queries should be restricted to use of indexed data.
+ public bool? ShouldRestrictResultScope { get; set; }
+ /// The additional semantic configuration for the query.
+ public string SemanticConfiguration { get; set; }
+ /// When using embeddings for search, specifies the resource URL from which embeddings should be retrieved.
+ public Uri EmbeddingEndpoint { get; set; }
}
}
diff --git a/sdk/openai/Azure.AI.OpenAI/src/Generated/AzureCognitiveSearchIndexFieldMappingOptions.Serialization.cs b/sdk/openai/Azure.AI.OpenAI/src/Generated/AzureCognitiveSearchIndexFieldMappingOptions.Serialization.cs
new file mode 100644
index 000000000000..2f429d249ad0
--- /dev/null
+++ b/sdk/openai/Azure.AI.OpenAI/src/Generated/AzureCognitiveSearchIndexFieldMappingOptions.Serialization.cs
@@ -0,0 +1,61 @@
+// Copyright (c) Microsoft Corporation. All rights reserved.
+// Licensed under the MIT License.
+
+//
+
+#nullable disable
+
+using System.Text.Json;
+using Azure.Core;
+
+namespace Azure.AI.OpenAI
+{
+ public partial class AzureCognitiveSearchIndexFieldMappingOptions : IUtf8JsonSerializable
+ {
+ void IUtf8JsonSerializable.Write(Utf8JsonWriter writer)
+ {
+ writer.WriteStartObject();
+ if (Optional.IsDefined(TitleFieldName))
+ {
+ writer.WritePropertyName("titleField"u8);
+ writer.WriteStringValue(TitleFieldName);
+ }
+ if (Optional.IsDefined(UrlFieldName))
+ {
+ writer.WritePropertyName("urlField"u8);
+ writer.WriteStringValue(UrlFieldName);
+ }
+ if (Optional.IsDefined(FilepathFieldName))
+ {
+ writer.WritePropertyName("filepathField"u8);
+ writer.WriteStringValue(FilepathFieldName);
+ }
+ if (Optional.IsCollectionDefined(ContentFieldNames))
+ {
+ writer.WritePropertyName("contentFieldNames"u8);
+ writer.WriteStartArray();
+ foreach (var item in ContentFieldNames)
+ {
+ writer.WriteStringValue(item);
+ }
+ writer.WriteEndArray();
+ }
+ if (Optional.IsDefined(ContentFieldSeparator))
+ {
+ writer.WritePropertyName("contentFieldSeparator"u8);
+ writer.WriteStringValue(ContentFieldSeparator);
+ }
+ if (Optional.IsCollectionDefined(VectorFieldNames))
+ {
+ writer.WritePropertyName("vectorFields"u8);
+ writer.WriteStartArray();
+ foreach (var item in VectorFieldNames)
+ {
+ writer.WriteStringValue(item);
+ }
+ writer.WriteEndArray();
+ }
+ writer.WriteEndObject();
+ }
+ }
+}
diff --git a/sdk/openai/Azure.AI.OpenAI/src/Generated/AzureOpenAIModelFactory.cs b/sdk/openai/Azure.AI.OpenAI/src/Generated/AzureOpenAIModelFactory.cs
index b73982221f51..8f3cbe8191c9 100644
--- a/sdk/openai/Azure.AI.OpenAI/src/Generated/AzureOpenAIModelFactory.cs
+++ b/sdk/openai/Azure.AI.OpenAI/src/Generated/AzureOpenAIModelFactory.cs
@@ -15,36 +15,54 @@ namespace Azure.AI.OpenAI
/// Model factory for models.
public static partial class AzureOpenAIModelFactory
{
- /// Initializes a new instance of Embeddings.
- /// Embedding values for the prompts submitted in the request.
- /// Usage counts for tokens input using the embeddings API.
- /// A new instance for mocking.
- public static Embeddings Embeddings(IEnumerable data = null, EmbeddingsUsage usage = null)
+ /// Initializes a new instance of AudioTranscriptionSegment.
+ /// The 0-based index of this segment within a transcription.
+ /// The time at which this segment started relative to the beginning of the transcribed audio.
+ /// The time at which this segment ended relative to the beginning of the transcribed audio.
+ /// The transcribed text that was part of this audio segment.
+ /// The temperature score associated with this audio segment.
+ /// The average log probability associated with this audio segment.
+ /// The compression ratio of this audio segment.
+ /// The probability of no speech detection within this audio segment.
+ /// The token IDs matching the transcribed text in this audio segment.
+ ///
+ /// The seek position associated with the processing of this audio segment.
+ /// Seek positions are expressed as hundredths of seconds.
+ /// The model may process several segments from a single seek position, so while the seek position will never represent
+ /// a later time than the segment's start, the segment's start may represent a significantly later time than the
+ /// segment's associated seek position.
+ ///
+ /// A new instance for mocking.
+ public static AudioTranscriptionSegment AudioTranscriptionSegment(int id = default, TimeSpan start = default, TimeSpan end = default, string text = null, float temperature = default, float averageLogProbability = default, float compressionRatio = default, float noSpeechProbability = default, IEnumerable tokens = null, int seek = default)
{
- data ??= new List();
+ tokens ??= new List();
- return new Embeddings(data?.ToList(), usage);
+ return new AudioTranscriptionSegment(id, start, end, text, temperature, averageLogProbability, compressionRatio, noSpeechProbability, tokens?.ToList(), seek);
}
- /// Initializes a new instance of EmbeddingItem.
- ///
- /// List of embeddings value for the input prompt. These represent a measurement of the
- /// vector-based relatedness of the provided input.
+ /// Initializes a new instance of AudioTranslationSegment.
+ /// The 0-based index of this segment within a translation.
+ /// The time at which this segment started relative to the beginning of the translated audio.
+ /// The time at which this segment ended relative to the beginning of the translated audio.
+ /// The translated text that was part of this audio segment.
+ /// The temperature score associated with this audio segment.
+ /// The average log probability associated with this audio segment.
+ /// The compression ratio of this audio segment.
+ /// The probability of no speech detection within this audio segment.
+ /// The token IDs matching the translated text in this audio segment.
+ ///
+ /// The seek position associated with the processing of this audio segment.
+ /// Seek positions are expressed as hundredths of seconds.
+ /// The model may process several segments from a single seek position, so while the seek position will never represent
+ /// a later time than the segment's start, the segment's start may represent a significantly later time than the
+ /// segment's associated seek position.
///
- /// Index of the prompt to which the EmbeddingItem corresponds.
- /// A new instance for mocking.
- public static EmbeddingItem EmbeddingItem(ReadOnlyMemory embedding = default, int index = default)
+ /// A new instance for mocking.
+ public static AudioTranslationSegment AudioTranslationSegment(int id = default, TimeSpan start = default, TimeSpan end = default, string text = null, float temperature = default, float averageLogProbability = default, float compressionRatio = default, float noSpeechProbability = default, IEnumerable tokens = null, int seek = default)
{
- return new EmbeddingItem(embedding, index);
- }
+ tokens ??= new List();
- /// Initializes a new instance of EmbeddingsUsage.
- /// Number of tokens sent in the original request.
- /// Total number of tokens transacted in this request/response.
- /// A new instance for mocking.
- public static EmbeddingsUsage EmbeddingsUsage(int promptTokens = default, int totalTokens = default)
- {
- return new EmbeddingsUsage(promptTokens, totalTokens);
+ return new AudioTranslationSegment(id, start, end, text, temperature, averageLogProbability, compressionRatio, noSpeechProbability, tokens?.ToList(), seek);
}
/// Initializes a new instance of Completions.
@@ -214,54 +232,36 @@ public static ImageLocation ImageLocation(Uri url = null)
return new ImageLocation(url);
}
- /// Initializes a new instance of AudioTranscriptionSegment.
- /// The 0-based index of this segment within a transcription.
- /// The time at which this segment started relative to the beginning of the transcribed audio.
- /// The time at which this segment ended relative to the beginning of the transcribed audio.
- /// The transcribed text that was part of this audio segment.
- /// The temperature score associated with this audio segment.
- /// The average log probability associated with this audio segment.
- /// The compression ratio of this audio segment.
- /// The probability of no speech detection within this audio segment.
- /// The token IDs matching the transcribed text in this audio segment.
- ///
- /// The seek position associated with the processing of this audio segment.
- /// Seek positions are expressed as hundredths of seconds.
- /// The model may process several segments from a single seek position, so while the seek position will never represent
- /// a later time than the segment's start, the segment's start may represent a significantly later time than the
- /// segment's associated seek position.
- ///
- /// A new instance for mocking.
- public static AudioTranscriptionSegment AudioTranscriptionSegment(int id = default, TimeSpan start = default, TimeSpan end = default, string text = null, float temperature = default, float averageLogProbability = default, float compressionRatio = default, float noSpeechProbability = default, IEnumerable tokens = null, int seek = default)
+ /// Initializes a new instance of Embeddings.
+ /// Embedding values for the prompts submitted in the request.
+ /// Usage counts for tokens input using the embeddings API.
+ /// A new instance for mocking.
+ public static Embeddings Embeddings(IEnumerable data = null, EmbeddingsUsage usage = null)
{
- tokens ??= new List();
+ data ??= new List();
- return new AudioTranscriptionSegment(id, start, end, text, temperature, averageLogProbability, compressionRatio, noSpeechProbability, tokens?.ToList(), seek);
+ return new Embeddings(data?.ToList(), usage);
}
- /// Initializes a new instance of AudioTranslationSegment.
- /// The 0-based index of this segment within a translation.
- /// The time at which this segment started relative to the beginning of the translated audio.
- /// The time at which this segment ended relative to the beginning of the translated audio.
- /// The translated text that was part of this audio segment.
- /// The temperature score associated with this audio segment.
- /// The average log probability associated with this audio segment.
- /// The compression ratio of this audio segment.
- /// The probability of no speech detection within this audio segment.
- /// The token IDs matching the translated text in this audio segment.
- ///
- /// The seek position associated with the processing of this audio segment.
- /// Seek positions are expressed as hundredths of seconds.
- /// The model may process several segments from a single seek position, so while the seek position will never represent
- /// a later time than the segment's start, the segment's start may represent a significantly later time than the
- /// segment's associated seek position.
+ /// Initializes a new instance of EmbeddingItem.
+ ///
+ /// List of embeddings value for the input prompt. These represent a measurement of the
+ /// vector-based relatedness of the provided input.
///
- /// A new instance for mocking.
- public static AudioTranslationSegment AudioTranslationSegment(int id = default, TimeSpan start = default, TimeSpan end = default, string text = null, float temperature = default, float averageLogProbability = default, float compressionRatio = default, float noSpeechProbability = default, IEnumerable tokens = null, int seek = default)
+ /// Index of the prompt to which the EmbeddingItem corresponds.
+ /// A new instance for mocking.
+ public static EmbeddingItem EmbeddingItem(ReadOnlyMemory embedding = default, int index = default)
{
- tokens ??= new List();
+ return new EmbeddingItem(embedding, index);
+ }
- return new AudioTranslationSegment(id, start, end, text, temperature, averageLogProbability, compressionRatio, noSpeechProbability, tokens?.ToList(), seek);
+ /// Initializes a new instance of EmbeddingsUsage.
+ /// Number of tokens sent in the original request.
+ /// Total number of tokens transacted in this request/response.
+ /// A new instance for mocking.
+ public static EmbeddingsUsage EmbeddingsUsage(int promptTokens = default, int totalTokens = default)
+ {
+ return new EmbeddingsUsage(promptTokens, totalTokens);
}
}
}
diff --git a/sdk/openai/Azure.AI.OpenAI/tsp-location.yaml b/sdk/openai/Azure.AI.OpenAI/tsp-location.yaml
index ff3fb0810607..4a55b57e2add 100644
--- a/sdk/openai/Azure.AI.OpenAI/tsp-location.yaml
+++ b/sdk/openai/Azure.AI.OpenAI/tsp-location.yaml
@@ -1,3 +1,3 @@
directory: specification/cognitiveservices/OpenAI.Inference
-commit: a66833cbdebb0574ba012f814ab271a382a7c500
+commit: 42e2c0bcd77b0de6c523404668fa63511484d485
repo: Azure/azure-rest-api-specs