diff --git a/sdk/ai/Azure.AI.VoiceLive/CHANGELOG.md b/sdk/ai/Azure.AI.VoiceLive/CHANGELOG.md index d693b973fea1..cde2f968bbbc 100644 --- a/sdk/ai/Azure.AI.VoiceLive/CHANGELOG.md +++ b/sdk/ai/Azure.AI.VoiceLive/CHANGELOG.md @@ -3,8 +3,15 @@ ## 1.0.0-beta.2 (Unreleased) ### Features Added +Added overloads for MessageItem creation to accept a single content part. ### Breaking Changes +AudioFormat was split into InputAudioFormat and OutputAudioFormat. +Emotion classes / options dropped. +Eou and TurnDetection classes renamed. +API properties that were duration based are now TimeSpans +Methods to configure session collapsed to ConfigureSession +Renamed ToolChoiceFunctionObjectFunction to ToolChoiceFunctionObject ### Bugs Fixed diff --git a/sdk/ai/Azure.AI.VoiceLive/README.md b/sdk/ai/Azure.AI.VoiceLive/README.md index e61c2d3d87d6..5f4212cc0cb4 100644 --- a/sdk/ai/Azure.AI.VoiceLive/README.md +++ b/sdk/ai/Azure.AI.VoiceLive/README.md @@ -151,14 +151,14 @@ VoiceLiveSessionOptions sessionOptions = new() Model = model, Instructions = "You are a helpful AI assistant. Respond naturally and conversationally.", Voice = new AzureStandardVoice("en-US-AvaNeural"), - TurnDetection = new ServerVad() + TurnDetection = new AzureSemanticVadTurnDetection() { Threshold = 0.5f, - PrefixPaddingMs = 300, - SilenceDurationMs = 500 + PrefixPadding = TimeSpan.FromMilliseconds(300), + SilenceDuration = TimeSpan.FromMilliseconds(500) }, - InputAudioFormat = AudioFormat.Pcm16, - OutputAudioFormat = AudioFormat.Pcm16 + InputAudioFormat = InputAudioFormat.Pcm16, + OutputAudioFormat = OutputAudioFormat.Pcm16 }; // Ensure modalities include audio @@ -166,7 +166,7 @@ sessionOptions.Modalities.Clear(); sessionOptions.Modalities.Add(InputModality.Text); sessionOptions.Modalities.Add(InputModality.Audio); -await session.ConfigureConversationSessionAsync(sessionOptions).ConfigureAwait(false); +await session.ConfigureSessionAsync(sessionOptions).ConfigureAwait(false); // Process events from the session await foreach (SessionUpdate serverEvent in session.GetUpdatesAsync().ConfigureAwait(false)) @@ -196,14 +196,12 @@ VoiceLiveSessionOptions sessionOptions = new() { Temperature = 0.8f }, - TurnDetection = new AzureSemanticVad() + TurnDetection = new AzureSemanticVadTurnDetection() { - NegThreshold = 0.3f, - WindowSize = 300, RemoveFillerWords = true }, - InputAudioFormat = AudioFormat.Pcm16, - OutputAudioFormat = AudioFormat.Pcm16 + InputAudioFormat = InputAudioFormat.Pcm16, + OutputAudioFormat = OutputAudioFormat.Pcm16 }; // Ensure modalities include audio @@ -211,7 +209,7 @@ sessionOptions.Modalities.Clear(); sessionOptions.Modalities.Add(InputModality.Text); sessionOptions.Modalities.Add(InputModality.Audio); -await session.ConfigureConversationSessionAsync(sessionOptions).ConfigureAwait(false); +await session.ConfigureSessionAsync(sessionOptions).ConfigureAwait(false); ``` ### Function calling example @@ -240,8 +238,8 @@ VoiceLiveSessionOptions sessionOptions = new() Model = model, Instructions = "You are a weather assistant. Use the get_current_weather function to help users with weather information.", Voice = new AzureStandardVoice("en-US-AvaNeural"), - InputAudioFormat = AudioFormat.Pcm16, - OutputAudioFormat = AudioFormat.Pcm16 + InputAudioFormat = InputAudioFormat.Pcm16, + OutputAudioFormat = OutputAudioFormat.Pcm16 }; // Add the function tool @@ -252,7 +250,7 @@ sessionOptions.Modalities.Clear(); sessionOptions.Modalities.Add(InputModality.Text); sessionOptions.Modalities.Add(InputModality.Audio); -await session.ConfigureConversationSessionAsync(sessionOptions).ConfigureAwait(false); +await session.ConfigureSessionAsync(sessionOptions).ConfigureAwait(false); ``` ## Troubleshooting diff --git a/sdk/ai/Azure.AI.VoiceLive/api/Azure.AI.VoiceLive.net8.0.cs b/sdk/ai/Azure.AI.VoiceLive/api/Azure.AI.VoiceLive.net8.0.cs index 9bf096094ee4..dbc03726611e 100644 --- a/sdk/ai/Azure.AI.VoiceLive/api/Azure.AI.VoiceLive.net8.0.cs +++ b/sdk/ai/Azure.AI.VoiceLive/api/Azure.AI.VoiceLive.net8.0.cs @@ -3,6 +3,7 @@ namespace Azure.AI.VoiceLive public partial class AnimationOptions : System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel { public AnimationOptions() { } + public System.TimeSpan? EmotionDetectionInterval { get { throw null; } set { } } public int? EmotionDetectionIntervalMs { get { throw null; } set { } } public string ModelName { get { throw null; } set { } } public System.Collections.Generic.IList Outputs { get { throw null; } } @@ -20,10 +21,10 @@ public enum AnimationOutputType { Blendshapes = 0, VisemeId = 1, - Emotion = 2, } public partial class AssistantMessageItem : Azure.AI.VoiceLive.MessageItem, System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel { + public AssistantMessageItem(Azure.AI.VoiceLive.OutputTextContentPart content) : base (default(string)) { } public AssistantMessageItem(System.Collections.Generic.IEnumerable content) : base (default(string)) { } public System.Collections.Generic.IList Content { get { throw null; } } protected override Azure.AI.VoiceLive.ConversationRequestItem JsonModelCreateCore(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } @@ -50,26 +51,6 @@ protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer string System.ClientModel.Primitives.IPersistableModel.GetFormatFromOptions(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } System.BinaryData System.ClientModel.Primitives.IPersistableModel.Write(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } } - [System.Runtime.InteropServices.StructLayoutAttribute(System.Runtime.InteropServices.LayoutKind.Sequential)] - public readonly partial struct AudioFormat : System.IEquatable - { - private readonly object _dummy; - private readonly int _dummyPrimitive; - public AudioFormat(string value) { throw null; } - public static Azure.AI.VoiceLive.AudioFormat G711Alaw { get { throw null; } } - public static Azure.AI.VoiceLive.AudioFormat G711Ulaw { get { throw null; } } - public static Azure.AI.VoiceLive.AudioFormat Pcm16 { get { throw null; } } - public bool Equals(Azure.AI.VoiceLive.AudioFormat other) { throw null; } - [System.ComponentModel.EditorBrowsableAttribute(System.ComponentModel.EditorBrowsableState.Never)] - public override bool Equals(object obj) { throw null; } - [System.ComponentModel.EditorBrowsableAttribute(System.ComponentModel.EditorBrowsableState.Never)] - public override int GetHashCode() { throw null; } - public static bool operator ==(Azure.AI.VoiceLive.AudioFormat left, Azure.AI.VoiceLive.AudioFormat right) { throw null; } - public static implicit operator Azure.AI.VoiceLive.AudioFormat (string value) { throw null; } - public static implicit operator Azure.AI.VoiceLive.AudioFormat? (string value) { throw null; } - public static bool operator !=(Azure.AI.VoiceLive.AudioFormat left, Azure.AI.VoiceLive.AudioFormat right) { throw null; } - public override string ToString() { throw null; } - } public partial class AudioInputTranscriptionSettings : System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel { public AudioInputTranscriptionSettings(Azure.AI.VoiceLive.AudioInputTranscriptionSettingsModel model) { } @@ -93,7 +74,6 @@ protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer private readonly object _dummy; private readonly int _dummyPrimitive; public AudioInputTranscriptionSettingsModel(string value) { throw null; } - public static Azure.AI.VoiceLive.AudioInputTranscriptionSettingsModel AzureFastTranscription { get { throw null; } } public static Azure.AI.VoiceLive.AudioInputTranscriptionSettingsModel AzureSpeech { get { throw null; } } public static Azure.AI.VoiceLive.AudioInputTranscriptionSettingsModel Gpt4oMiniTranscribe { get { throw null; } } public static Azure.AI.VoiceLive.AudioInputTranscriptionSettingsModel Gpt4oTranscribe { get { throw null; } } @@ -111,8 +91,8 @@ protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer } public partial class AudioNoiseReduction : System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel { - public AudioNoiseReduction() { } - public string Type { get { throw null; } } + public AudioNoiseReduction(Azure.AI.VoiceLive.AudioNoiseReductionType type) { } + public Azure.AI.VoiceLive.AudioNoiseReductionType Type { get { throw null; } set { } } protected virtual Azure.AI.VoiceLive.AudioNoiseReduction JsonModelCreateCore(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } protected virtual Azure.AI.VoiceLive.AudioNoiseReduction PersistableModelCreateCore(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } @@ -123,6 +103,12 @@ protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer string System.ClientModel.Primitives.IPersistableModel.GetFormatFromOptions(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } System.BinaryData System.ClientModel.Primitives.IPersistableModel.Write(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } } + public enum AudioNoiseReductionType + { + AzureDeepNoiseSuppression = 0, + NearField = 1, + FarField = 2, + } [System.Runtime.InteropServices.StructLayoutAttribute(System.Runtime.InteropServices.LayoutKind.Sequential)] public readonly partial struct AudioTimestampType : System.IEquatable { @@ -188,35 +174,10 @@ protected override void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter write string System.ClientModel.Primitives.IPersistableModel.GetFormatFromOptions(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } System.BinaryData System.ClientModel.Primitives.IPersistableModel.Write(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } } - public partial class AzureMultilingualSemanticVad : Azure.AI.VoiceLive.TurnDetection, System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel - { - public AzureMultilingualSemanticVad() { } - public bool? AutoTruncate { get { throw null; } set { } } - public int? DistinctCiPhones { get { throw null; } set { } } - public Azure.AI.VoiceLive.EouDetection EndOfUtteranceDetection { get { throw null; } set { } } - public System.Collections.Generic.IList Languages { get { throw null; } } - public float? NegThreshold { get { throw null; } set { } } - public int? PrefixPaddingMs { get { throw null; } set { } } - public bool? RemoveFillerWords { get { throw null; } set { } } - public bool? RequireVowel { get { throw null; } set { } } - public int? SilenceDurationMs { get { throw null; } set { } } - public int? SpeechDurationMs { get { throw null; } set { } } - public float? Threshold { get { throw null; } set { } } - public int? WindowSize { get { throw null; } set { } } - protected override Azure.AI.VoiceLive.TurnDetection JsonModelCreateCore(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } - protected override void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } - protected override Azure.AI.VoiceLive.TurnDetection PersistableModelCreateCore(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } - protected override System.BinaryData PersistableModelWriteCore(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } - Azure.AI.VoiceLive.AzureMultilingualSemanticVad System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } - void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } - Azure.AI.VoiceLive.AzureMultilingualSemanticVad System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } - string System.ClientModel.Primitives.IPersistableModel.GetFormatFromOptions(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } - System.BinaryData System.ClientModel.Primitives.IPersistableModel.Write(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } - } public partial class AzurePersonalVoice : Azure.AI.VoiceLive.AzureVoice, System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel { - public AzurePersonalVoice(string name, Azure.AI.VoiceLive.AzurePersonalVoiceModel model) { } - public Azure.AI.VoiceLive.AzurePersonalVoiceModel Model { get { throw null; } set { } } + public AzurePersonalVoice(string name, Azure.AI.VoiceLive.PersonalVoiceModels model) { } + public Azure.AI.VoiceLive.PersonalVoiceModels Model { get { throw null; } set { } } public string Name { get { throw null; } set { } } public float? Temperature { get { throw null; } set { } } protected override Azure.AI.VoiceLive.AzureVoice JsonModelCreateCore(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } @@ -229,168 +190,122 @@ protected override void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter write string System.ClientModel.Primitives.IPersistableModel.GetFormatFromOptions(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } System.BinaryData System.ClientModel.Primitives.IPersistableModel.Write(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } } - public enum AzurePersonalVoiceModel - { - DragonLatestNeural = 0, - PhoenixLatestNeural = 1, - PhoenixV2Neural = 2, - } - public partial class AzurePlatformVoice : Azure.AI.VoiceLive.AzureVoice, System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel - { - public AzurePlatformVoice(string name) { } - public string CustomLexiconUrl { get { throw null; } set { } } - public string Locale { get { throw null; } set { } } - public string Name { get { throw null; } set { } } - public string Pitch { get { throw null; } set { } } - public System.Collections.Generic.IList PreferLocales { get { throw null; } } - public string Rate { get { throw null; } set { } } - public string Style { get { throw null; } set { } } - public float? Temperature { get { throw null; } set { } } - public string Volume { get { throw null; } set { } } - protected override Azure.AI.VoiceLive.AzureVoice JsonModelCreateCore(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } - protected override void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } - protected override Azure.AI.VoiceLive.AzureVoice PersistableModelCreateCore(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } - protected override System.BinaryData PersistableModelWriteCore(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } - Azure.AI.VoiceLive.AzurePlatformVoice System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } - void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } - Azure.AI.VoiceLive.AzurePlatformVoice System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } - string System.ClientModel.Primitives.IPersistableModel.GetFormatFromOptions(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } - System.BinaryData System.ClientModel.Primitives.IPersistableModel.Write(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } - } - public partial class AzureSemanticDetection : Azure.AI.VoiceLive.EouDetection, System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel + public partial class AzureSemanticEnEouDetection : Azure.AI.VoiceLive.EouDetection, System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel { - public AzureSemanticDetection() { } - public bool? DisableRules { get { throw null; } set { } } - public bool? ExtraImendCheck { get { throw null; } set { } } - public float? SecondaryThreshold { get { throw null; } set { } } - public float? SecondaryTimeout { get { throw null; } set { } } - public float? SrBoost { get { throw null; } set { } } + public AzureSemanticEnEouDetection() { } public float? Threshold { get { throw null; } set { } } - public float? Timeout { get { throw null; } set { } } + public float? TimeoutMs { get { throw null; } set { } } protected override Azure.AI.VoiceLive.EouDetection JsonModelCreateCore(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } protected override void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } protected override Azure.AI.VoiceLive.EouDetection PersistableModelCreateCore(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } protected override System.BinaryData PersistableModelWriteCore(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } - Azure.AI.VoiceLive.AzureSemanticDetection System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } - void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } - Azure.AI.VoiceLive.AzureSemanticDetection System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } - string System.ClientModel.Primitives.IPersistableModel.GetFormatFromOptions(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } - System.BinaryData System.ClientModel.Primitives.IPersistableModel.Write(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + Azure.AI.VoiceLive.AzureSemanticEnEouDetection System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + Azure.AI.VoiceLive.AzureSemanticEnEouDetection System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + string System.ClientModel.Primitives.IPersistableModel.GetFormatFromOptions(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + System.BinaryData System.ClientModel.Primitives.IPersistableModel.Write(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } } - public partial class AzureSemanticDetectionEn : Azure.AI.VoiceLive.EouDetection, System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel + public partial class AzureSemanticEouDetection : Azure.AI.VoiceLive.EouDetection, System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel { - public AzureSemanticDetectionEn() { } - public bool? DisableRules { get { throw null; } set { } } - public bool? ExtraImendCheck { get { throw null; } set { } } - public float? SecondaryThreshold { get { throw null; } set { } } - public float? SecondaryTimeout { get { throw null; } set { } } - public float? SrBoost { get { throw null; } set { } } + public AzureSemanticEouDetection() { } public float? Threshold { get { throw null; } set { } } - public float? Timeout { get { throw null; } set { } } + public System.TimeSpan Timeout { get { throw null; } set { } } protected override Azure.AI.VoiceLive.EouDetection JsonModelCreateCore(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } protected override void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } protected override Azure.AI.VoiceLive.EouDetection PersistableModelCreateCore(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } protected override System.BinaryData PersistableModelWriteCore(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } - Azure.AI.VoiceLive.AzureSemanticDetectionEn System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } - void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } - Azure.AI.VoiceLive.AzureSemanticDetectionEn System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } - string System.ClientModel.Primitives.IPersistableModel.GetFormatFromOptions(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } - System.BinaryData System.ClientModel.Primitives.IPersistableModel.Write(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + Azure.AI.VoiceLive.AzureSemanticEouDetection System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + Azure.AI.VoiceLive.AzureSemanticEouDetection System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + string System.ClientModel.Primitives.IPersistableModel.GetFormatFromOptions(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + System.BinaryData System.ClientModel.Primitives.IPersistableModel.Write(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + } + public partial class AzureSemanticEouDetectionEn + { + public AzureSemanticEouDetectionEn() { } + public System.TimeSpan Timeout { get { throw null; } set { } } + } + public partial class AzureSemanticEouDetectionMultilingual + { + public AzureSemanticEouDetectionMultilingual() { } + public System.TimeSpan Timeout { get { throw null; } set { } } } - public partial class AzureSemanticDetectionMultilingual : Azure.AI.VoiceLive.EouDetection, System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel + public partial class AzureSemanticMultilingualEouDetection : Azure.AI.VoiceLive.EouDetection, System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel { - public AzureSemanticDetectionMultilingual() { } - public bool? DisableRules { get { throw null; } set { } } - public bool? ExtraImendCheck { get { throw null; } set { } } - public float? SecondaryThreshold { get { throw null; } set { } } - public float? SecondaryTimeout { get { throw null; } set { } } - public float? SrBoost { get { throw null; } set { } } + public AzureSemanticMultilingualEouDetection() { } public float? Threshold { get { throw null; } set { } } - public float? Timeout { get { throw null; } set { } } + public float? TimeoutMs { get { throw null; } set { } } protected override Azure.AI.VoiceLive.EouDetection JsonModelCreateCore(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } protected override void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } protected override Azure.AI.VoiceLive.EouDetection PersistableModelCreateCore(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } protected override System.BinaryData PersistableModelWriteCore(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } - Azure.AI.VoiceLive.AzureSemanticDetectionMultilingual System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } - void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } - Azure.AI.VoiceLive.AzureSemanticDetectionMultilingual System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } - string System.ClientModel.Primitives.IPersistableModel.GetFormatFromOptions(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } - System.BinaryData System.ClientModel.Primitives.IPersistableModel.Write(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + Azure.AI.VoiceLive.AzureSemanticMultilingualEouDetection System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + Azure.AI.VoiceLive.AzureSemanticMultilingualEouDetection System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + string System.ClientModel.Primitives.IPersistableModel.GetFormatFromOptions(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + System.BinaryData System.ClientModel.Primitives.IPersistableModel.Write(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } } - public partial class AzureSemanticVad : Azure.AI.VoiceLive.TurnDetection, System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel + public partial class AzureSemanticVadEnTurnDetection : Azure.AI.VoiceLive.TurnDetection, System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel { - public AzureSemanticVad() { } + public AzureSemanticVadEnTurnDetection() { } public bool? AutoTruncate { get { throw null; } set { } } - public int? DistinctCiPhones { get { throw null; } set { } } public Azure.AI.VoiceLive.EouDetection EndOfUtteranceDetection { get { throw null; } set { } } - public System.Collections.Generic.IList Languages { get { throw null; } } - public float? NegThreshold { get { throw null; } set { } } - public int? PrefixPaddingMs { get { throw null; } set { } } + public System.TimeSpan PrefixPadding { get { throw null; } set { } } public bool? RemoveFillerWords { get { throw null; } set { } } - public bool? RequireVowel { get { throw null; } set { } } - public int? SilenceDurationMs { get { throw null; } set { } } - public int? SpeechDurationMs { get { throw null; } set { } } + public System.TimeSpan SilenceDuration { get { throw null; } set { } } + public System.TimeSpan SpeechDuration { get { throw null; } set { } } public float? Threshold { get { throw null; } set { } } - public int? WindowSize { get { throw null; } set { } } protected override Azure.AI.VoiceLive.TurnDetection JsonModelCreateCore(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } protected override void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } protected override Azure.AI.VoiceLive.TurnDetection PersistableModelCreateCore(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } protected override System.BinaryData PersistableModelWriteCore(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } - Azure.AI.VoiceLive.AzureSemanticVad System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } - void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } - Azure.AI.VoiceLive.AzureSemanticVad System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } - string System.ClientModel.Primitives.IPersistableModel.GetFormatFromOptions(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } - System.BinaryData System.ClientModel.Primitives.IPersistableModel.Write(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + Azure.AI.VoiceLive.AzureSemanticVadEnTurnDetection System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + Azure.AI.VoiceLive.AzureSemanticVadEnTurnDetection System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + string System.ClientModel.Primitives.IPersistableModel.GetFormatFromOptions(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + System.BinaryData System.ClientModel.Primitives.IPersistableModel.Write(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } } - public partial class AzureSemanticVadEn : Azure.AI.VoiceLive.TurnDetection, System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel + public partial class AzureSemanticVadMultilingualTurnDetection : Azure.AI.VoiceLive.TurnDetection, System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel { - public AzureSemanticVadEn() { } + public AzureSemanticVadMultilingualTurnDetection() { } public bool? AutoTruncate { get { throw null; } set { } } - public int? DistinctCiPhones { get { throw null; } set { } } public Azure.AI.VoiceLive.EouDetection EndOfUtteranceDetection { get { throw null; } set { } } public System.Collections.Generic.IList Languages { get { throw null; } } - public float? NegThreshold { get { throw null; } set { } } - public int? PrefixPaddingMs { get { throw null; } set { } } + public System.TimeSpan PrefixPadding { get { throw null; } set { } } public bool? RemoveFillerWords { get { throw null; } set { } } - public bool? RequireVowel { get { throw null; } set { } } - public int? SilenceDurationMs { get { throw null; } set { } } - public int? SpeechDurationMs { get { throw null; } set { } } + public System.TimeSpan SilenceDuration { get { throw null; } set { } } + public System.TimeSpan SpeechDuration { get { throw null; } set { } } public float? Threshold { get { throw null; } set { } } - public int? WindowSize { get { throw null; } set { } } protected override Azure.AI.VoiceLive.TurnDetection JsonModelCreateCore(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } protected override void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } protected override Azure.AI.VoiceLive.TurnDetection PersistableModelCreateCore(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } protected override System.BinaryData PersistableModelWriteCore(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } - Azure.AI.VoiceLive.AzureSemanticVadEn System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } - void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } - Azure.AI.VoiceLive.AzureSemanticVadEn System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } - string System.ClientModel.Primitives.IPersistableModel.GetFormatFromOptions(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } - System.BinaryData System.ClientModel.Primitives.IPersistableModel.Write(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + Azure.AI.VoiceLive.AzureSemanticVadMultilingualTurnDetection System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + Azure.AI.VoiceLive.AzureSemanticVadMultilingualTurnDetection System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + string System.ClientModel.Primitives.IPersistableModel.GetFormatFromOptions(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + System.BinaryData System.ClientModel.Primitives.IPersistableModel.Write(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } } - public partial class AzureSemanticVadServer : Azure.AI.VoiceLive.TurnDetection, System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel + public partial class AzureSemanticVadTurnDetection : Azure.AI.VoiceLive.TurnDetection, System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel { - public AzureSemanticVadServer() { } + public AzureSemanticVadTurnDetection() { } public bool? AutoTruncate { get { throw null; } set { } } - public int? DistinctCiPhones { get { throw null; } set { } } public Azure.AI.VoiceLive.EouDetection EndOfUtteranceDetection { get { throw null; } set { } } public System.Collections.Generic.IList Languages { get { throw null; } } - public float? NegThreshold { get { throw null; } set { } } - public int? PrefixPaddingMs { get { throw null; } set { } } + public System.TimeSpan PrefixPadding { get { throw null; } set { } } public bool? RemoveFillerWords { get { throw null; } set { } } - public bool? RequireVowel { get { throw null; } set { } } - public int? SilenceDurationMs { get { throw null; } set { } } - public int? SpeechDurationMs { get { throw null; } set { } } + public System.TimeSpan SilenceDuration { get { throw null; } set { } } + public System.TimeSpan SpeechDuration { get { throw null; } set { } } public float? Threshold { get { throw null; } set { } } - public int? WindowSize { get { throw null; } set { } } protected override Azure.AI.VoiceLive.TurnDetection JsonModelCreateCore(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } protected override void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } protected override Azure.AI.VoiceLive.TurnDetection PersistableModelCreateCore(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } protected override System.BinaryData PersistableModelWriteCore(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } - Azure.AI.VoiceLive.AzureSemanticVadServer System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } - void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } - Azure.AI.VoiceLive.AzureSemanticVadServer System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } - string System.ClientModel.Primitives.IPersistableModel.GetFormatFromOptions(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } - System.BinaryData System.ClientModel.Primitives.IPersistableModel.Write(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + Azure.AI.VoiceLive.AzureSemanticVadTurnDetection System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + Azure.AI.VoiceLive.AzureSemanticVadTurnDetection System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + string System.ClientModel.Primitives.IPersistableModel.GetFormatFromOptions(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + System.BinaryData System.ClientModel.Primitives.IPersistableModel.Write(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } } public partial class AzureStandardVoice : Azure.AI.VoiceLive.AzureVoice, System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel { @@ -456,21 +371,6 @@ protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer string System.ClientModel.Primitives.IPersistableModel.GetFormatFromOptions(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } System.BinaryData System.ClientModel.Primitives.IPersistableModel.Write(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } } - public partial class EmotionCandidate : System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel - { - internal EmotionCandidate() { } - public float Confidence { get { throw null; } } - public string Emotion { get { throw null; } } - protected virtual Azure.AI.VoiceLive.EmotionCandidate JsonModelCreateCore(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } - protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } - protected virtual Azure.AI.VoiceLive.EmotionCandidate PersistableModelCreateCore(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } - protected virtual System.BinaryData PersistableModelWriteCore(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } - Azure.AI.VoiceLive.EmotionCandidate System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } - void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } - Azure.AI.VoiceLive.EmotionCandidate System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } - string System.ClientModel.Primitives.IPersistableModel.GetFormatFromOptions(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } - System.BinaryData System.ClientModel.Primitives.IPersistableModel.Write(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } - } public abstract partial class EouDetection : System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel { internal EouDetection() { } @@ -487,9 +387,9 @@ protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer public partial class FunctionCallItem : Azure.AI.VoiceLive.ConversationRequestItem, System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel { public FunctionCallItem(string name, string callId, string arguments) { } - public string Arguments { get { throw null; } } - public string CallId { get { throw null; } } - public string Name { get { throw null; } } + public string Arguments { get { throw null; } set { } } + public string CallId { get { throw null; } set { } } + public string Name { get { throw null; } set { } } public Azure.AI.VoiceLive.ItemParamStatus? Status { get { throw null; } set { } } protected override Azure.AI.VoiceLive.ConversationRequestItem JsonModelCreateCore(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } protected override void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } @@ -504,8 +404,8 @@ protected override void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter write public partial class FunctionCallOutputItem : Azure.AI.VoiceLive.ConversationRequestItem, System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel { public FunctionCallOutputItem(string callId, string output) { } - public string CallId { get { throw null; } } - public string Output { get { throw null; } } + public string CallId { get { throw null; } set { } } + public string Output { get { throw null; } set { } } public Azure.AI.VoiceLive.ItemParamStatus? Status { get { throw null; } set { } } protected override Azure.AI.VoiceLive.ConversationRequestItem JsonModelCreateCore(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } protected override void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } @@ -533,25 +433,10 @@ protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer string System.ClientModel.Primitives.IPersistableModel.GetFormatFromOptions(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } System.BinaryData System.ClientModel.Primitives.IPersistableModel.Write(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } } - public partial class InputAudio : System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel - { - public InputAudio() { } - public string Model { get { throw null; } } - public System.Collections.Generic.IList PhraseList { get { throw null; } } - protected virtual Azure.AI.VoiceLive.InputAudio JsonModelCreateCore(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } - protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } - protected virtual Azure.AI.VoiceLive.InputAudio PersistableModelCreateCore(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } - protected virtual System.BinaryData PersistableModelWriteCore(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } - Azure.AI.VoiceLive.InputAudio System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } - void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } - Azure.AI.VoiceLive.InputAudio System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } - string System.ClientModel.Primitives.IPersistableModel.GetFormatFromOptions(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } - System.BinaryData System.ClientModel.Primitives.IPersistableModel.Write(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } - } public partial class InputAudioContentPart : Azure.AI.VoiceLive.UserContentPart, System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel { public InputAudioContentPart(string audio) { } - public string Audio { get { throw null; } } + public string Audio { get { throw null; } set { } } public string Transcript { get { throw null; } set { } } protected override Azure.AI.VoiceLive.UserContentPart JsonModelCreateCore(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } protected override void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } @@ -564,6 +449,26 @@ protected override void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter write System.BinaryData System.ClientModel.Primitives.IPersistableModel.Write(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } } [System.Runtime.InteropServices.StructLayoutAttribute(System.Runtime.InteropServices.LayoutKind.Sequential)] + public readonly partial struct InputAudioFormat : System.IEquatable + { + private readonly object _dummy; + private readonly int _dummyPrimitive; + public InputAudioFormat(string value) { throw null; } + public static Azure.AI.VoiceLive.InputAudioFormat G711Alaw { get { throw null; } } + public static Azure.AI.VoiceLive.InputAudioFormat G711Ulaw { get { throw null; } } + public static Azure.AI.VoiceLive.InputAudioFormat Pcm16 { get { throw null; } } + public bool Equals(Azure.AI.VoiceLive.InputAudioFormat other) { throw null; } + [System.ComponentModel.EditorBrowsableAttribute(System.ComponentModel.EditorBrowsableState.Never)] + public override bool Equals(object obj) { throw null; } + [System.ComponentModel.EditorBrowsableAttribute(System.ComponentModel.EditorBrowsableState.Never)] + public override int GetHashCode() { throw null; } + public static bool operator ==(Azure.AI.VoiceLive.InputAudioFormat left, Azure.AI.VoiceLive.InputAudioFormat right) { throw null; } + public static implicit operator Azure.AI.VoiceLive.InputAudioFormat (string value) { throw null; } + public static implicit operator Azure.AI.VoiceLive.InputAudioFormat? (string value) { throw null; } + public static bool operator !=(Azure.AI.VoiceLive.InputAudioFormat left, Azure.AI.VoiceLive.InputAudioFormat right) { throw null; } + public override string ToString() { throw null; } + } + [System.Runtime.InteropServices.StructLayoutAttribute(System.Runtime.InteropServices.LayoutKind.Sequential)] public readonly partial struct InputModality : System.IEquatable { private readonly object _dummy; @@ -587,7 +492,7 @@ protected override void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter write public partial class InputTextContentPart : Azure.AI.VoiceLive.UserContentPart, System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel { public InputTextContentPart(string text) { } - public string Text { get { throw null; } } + public string Text { get { throw null; } set { } } protected override Azure.AI.VoiceLive.UserContentPart JsonModelCreateCore(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } protected override void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } protected override Azure.AI.VoiceLive.UserContentPart PersistableModelCreateCore(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } @@ -620,39 +525,6 @@ public enum ItemParamStatus Completed = 0, Incomplete = 1, } - public partial class LlmVoice : Azure.AI.VoiceLive.VoiceProvider, System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel - { - public LlmVoice(Azure.AI.VoiceLive.LlmVoiceName name) { } - public Azure.AI.VoiceLive.LlmVoiceName Name { get { throw null; } set { } } - public string Type { get { throw null; } } - protected virtual Azure.AI.VoiceLive.LlmVoice JsonModelCreateCore(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } - protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } - protected virtual Azure.AI.VoiceLive.LlmVoice PersistableModelCreateCore(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } - protected virtual System.BinaryData PersistableModelWriteCore(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } - Azure.AI.VoiceLive.LlmVoice System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } - void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } - Azure.AI.VoiceLive.LlmVoice System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } - string System.ClientModel.Primitives.IPersistableModel.GetFormatFromOptions(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } - System.BinaryData System.ClientModel.Primitives.IPersistableModel.Write(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } - } - [System.Runtime.InteropServices.StructLayoutAttribute(System.Runtime.InteropServices.LayoutKind.Sequential)] - public readonly partial struct LlmVoiceName : System.IEquatable - { - private readonly object _dummy; - private readonly int _dummyPrimitive; - public LlmVoiceName(string value) { throw null; } - public static Azure.AI.VoiceLive.LlmVoiceName Cosyvoice { get { throw null; } } - public bool Equals(Azure.AI.VoiceLive.LlmVoiceName other) { throw null; } - [System.ComponentModel.EditorBrowsableAttribute(System.ComponentModel.EditorBrowsableState.Never)] - public override bool Equals(object obj) { throw null; } - [System.ComponentModel.EditorBrowsableAttribute(System.ComponentModel.EditorBrowsableState.Never)] - public override int GetHashCode() { throw null; } - public static bool operator ==(Azure.AI.VoiceLive.LlmVoiceName left, Azure.AI.VoiceLive.LlmVoiceName right) { throw null; } - public static implicit operator Azure.AI.VoiceLive.LlmVoiceName (string value) { throw null; } - public static implicit operator Azure.AI.VoiceLive.LlmVoiceName? (string value) { throw null; } - public static bool operator !=(Azure.AI.VoiceLive.LlmVoiceName left, Azure.AI.VoiceLive.LlmVoiceName right) { throw null; } - public override string ToString() { throw null; } - } public partial class LogProbProperties : System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel { internal LogProbProperties() { } @@ -736,10 +608,32 @@ protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer string System.ClientModel.Primitives.IPersistableModel.GetFormatFromOptions(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } System.BinaryData System.ClientModel.Primitives.IPersistableModel.Write(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } } + [System.Runtime.InteropServices.StructLayoutAttribute(System.Runtime.InteropServices.LayoutKind.Sequential)] + public readonly partial struct OutputAudioFormat : System.IEquatable + { + private readonly object _dummy; + private readonly int _dummyPrimitive; + public OutputAudioFormat(string value) { throw null; } + public static Azure.AI.VoiceLive.OutputAudioFormat G711Alaw { get { throw null; } } + public static Azure.AI.VoiceLive.OutputAudioFormat G711Ulaw { get { throw null; } } + public static Azure.AI.VoiceLive.OutputAudioFormat Pcm16 { get { throw null; } } + public static Azure.AI.VoiceLive.OutputAudioFormat Pcm1616000hz { get { throw null; } } + public static Azure.AI.VoiceLive.OutputAudioFormat Pcm168000hz { get { throw null; } } + public bool Equals(Azure.AI.VoiceLive.OutputAudioFormat other) { throw null; } + [System.ComponentModel.EditorBrowsableAttribute(System.ComponentModel.EditorBrowsableState.Never)] + public override bool Equals(object obj) { throw null; } + [System.ComponentModel.EditorBrowsableAttribute(System.ComponentModel.EditorBrowsableState.Never)] + public override int GetHashCode() { throw null; } + public static bool operator ==(Azure.AI.VoiceLive.OutputAudioFormat left, Azure.AI.VoiceLive.OutputAudioFormat right) { throw null; } + public static implicit operator Azure.AI.VoiceLive.OutputAudioFormat (string value) { throw null; } + public static implicit operator Azure.AI.VoiceLive.OutputAudioFormat? (string value) { throw null; } + public static bool operator !=(Azure.AI.VoiceLive.OutputAudioFormat left, Azure.AI.VoiceLive.OutputAudioFormat right) { throw null; } + public override string ToString() { throw null; } + } public partial class OutputTextContentPart : System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel { public OutputTextContentPart(string text) { } - public string Text { get { throw null; } } + public string Text { get { throw null; } set { } } public string Type { get { throw null; } } protected virtual Azure.AI.VoiceLive.OutputTextContentPart JsonModelCreateCore(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } @@ -766,10 +660,30 @@ protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer string System.ClientModel.Primitives.IPersistableModel.GetFormatFromOptions(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } System.BinaryData System.ClientModel.Primitives.IPersistableModel.Write(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } } + [System.Runtime.InteropServices.StructLayoutAttribute(System.Runtime.InteropServices.LayoutKind.Sequential)] + public readonly partial struct PersonalVoiceModels : System.IEquatable + { + private readonly object _dummy; + private readonly int _dummyPrimitive; + public PersonalVoiceModels(string value) { throw null; } + public static Azure.AI.VoiceLive.PersonalVoiceModels DragonLatestNeural { get { throw null; } } + public static Azure.AI.VoiceLive.PersonalVoiceModels PhoenixLatestNeural { get { throw null; } } + public static Azure.AI.VoiceLive.PersonalVoiceModels PhoenixV2Neural { get { throw null; } } + public bool Equals(Azure.AI.VoiceLive.PersonalVoiceModels other) { throw null; } + [System.ComponentModel.EditorBrowsableAttribute(System.ComponentModel.EditorBrowsableState.Never)] + public override bool Equals(object obj) { throw null; } + [System.ComponentModel.EditorBrowsableAttribute(System.ComponentModel.EditorBrowsableState.Never)] + public override int GetHashCode() { throw null; } + public static bool operator ==(Azure.AI.VoiceLive.PersonalVoiceModels left, Azure.AI.VoiceLive.PersonalVoiceModels right) { throw null; } + public static implicit operator Azure.AI.VoiceLive.PersonalVoiceModels (string value) { throw null; } + public static implicit operator Azure.AI.VoiceLive.PersonalVoiceModels? (string value) { throw null; } + public static bool operator !=(Azure.AI.VoiceLive.PersonalVoiceModels left, Azure.AI.VoiceLive.PersonalVoiceModels right) { throw null; } + public override string ToString() { throw null; } + } public partial class RequestAudioContentPart : Azure.AI.VoiceLive.VoiceLiveContentPart, System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel { - internal RequestAudioContentPart() { } - public string Transcript { get { throw null; } } + public RequestAudioContentPart() { } + public string Transcript { get { throw null; } set { } } protected override Azure.AI.VoiceLive.VoiceLiveContentPart JsonModelCreateCore(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } protected override void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } protected override Azure.AI.VoiceLive.VoiceLiveContentPart PersistableModelCreateCore(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } @@ -782,8 +696,8 @@ protected override void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter write } public partial class RequestTextContentPart : Azure.AI.VoiceLive.VoiceLiveContentPart, System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel { - internal RequestTextContentPart() { } - public string Text { get { throw null; } } + public RequestTextContentPart() { } + public string Text { get { throw null; } set { } } protected override Azure.AI.VoiceLive.VoiceLiveContentPart JsonModelCreateCore(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } protected override void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } protected override Azure.AI.VoiceLive.VoiceLiveContentPart PersistableModelCreateCore(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } @@ -1025,23 +939,23 @@ protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer string System.ClientModel.Primitives.IPersistableModel.GetFormatFromOptions(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } System.BinaryData System.ClientModel.Primitives.IPersistableModel.Write(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } } - public partial class ServerVad : Azure.AI.VoiceLive.TurnDetection, System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel + public partial class ServerVadTurnDetection : Azure.AI.VoiceLive.TurnDetection, System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel { - public ServerVad() { } + public ServerVadTurnDetection() { } public bool? AutoTruncate { get { throw null; } set { } } public Azure.AI.VoiceLive.EouDetection EndOfUtteranceDetection { get { throw null; } set { } } - public int? PrefixPaddingMs { get { throw null; } set { } } - public int? SilenceDurationMs { get { throw null; } set { } } + public System.TimeSpan PrefixPadding { get { throw null; } set { } } + public System.TimeSpan SilenceDuration { get { throw null; } set { } } public float? Threshold { get { throw null; } set { } } protected override Azure.AI.VoiceLive.TurnDetection JsonModelCreateCore(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } protected override void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } protected override Azure.AI.VoiceLive.TurnDetection PersistableModelCreateCore(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } protected override System.BinaryData PersistableModelWriteCore(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } - Azure.AI.VoiceLive.ServerVad System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } - void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } - Azure.AI.VoiceLive.ServerVad System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } - string System.ClientModel.Primitives.IPersistableModel.GetFormatFromOptions(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } - System.BinaryData System.ClientModel.Primitives.IPersistableModel.Write(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + Azure.AI.VoiceLive.ServerVadTurnDetection System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + Azure.AI.VoiceLive.ServerVadTurnDetection System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + string System.ClientModel.Primitives.IPersistableModel.GetFormatFromOptions(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + System.BinaryData System.ClientModel.Primitives.IPersistableModel.Write(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } } public abstract partial class SessionUpdate : System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel { @@ -1057,6 +971,20 @@ protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer string System.ClientModel.Primitives.IPersistableModel.GetFormatFromOptions(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } System.BinaryData System.ClientModel.Primitives.IPersistableModel.Write(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } } + public partial class SessionUpdateAvatarConnecting : Azure.AI.VoiceLive.SessionUpdate, System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel + { + internal SessionUpdateAvatarConnecting() { } + public string ServerSdp { get { throw null; } } + protected override Azure.AI.VoiceLive.SessionUpdate JsonModelCreateCore(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + protected override void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + protected override Azure.AI.VoiceLive.SessionUpdate PersistableModelCreateCore(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + protected override System.BinaryData PersistableModelWriteCore(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + Azure.AI.VoiceLive.SessionUpdateAvatarConnecting System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + Azure.AI.VoiceLive.SessionUpdateAvatarConnecting System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + string System.ClientModel.Primitives.IPersistableModel.GetFormatFromOptions(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + System.BinaryData System.ClientModel.Primitives.IPersistableModel.Write(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + } public partial class SessionUpdateConversationItemCreated : Azure.AI.VoiceLive.SessionUpdate, System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel { internal SessionUpdateConversationItemCreated() { } @@ -1154,7 +1082,7 @@ protected override void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter write public partial class SessionUpdateConversationItemTruncated : Azure.AI.VoiceLive.SessionUpdate, System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel { internal SessionUpdateConversationItemTruncated() { } - public int AudioEndMs { get { throw null; } } + public System.TimeSpan AudioEnd { get { throw null; } } public int ContentIndex { get { throw null; } } public override string EventId { get { throw null; } } public string ItemId { get { throw null; } } @@ -1247,7 +1175,6 @@ public partial class SessionUpdateInputAudioBufferSpeechStopped : Azure.AI.Voice { internal SessionUpdateInputAudioBufferSpeechStopped() { } public System.TimeSpan AudioEnd { get { throw null; } } - public int AudioEndMs { get { throw null; } } public string ItemId { get { throw null; } } protected override Azure.AI.VoiceLive.SessionUpdate JsonModelCreateCore(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } protected override void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } @@ -1303,7 +1230,7 @@ protected override void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter write public partial class SessionUpdateResponseAnimationVisemeDelta : Azure.AI.VoiceLive.SessionUpdate, System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel { internal SessionUpdateResponseAnimationVisemeDelta() { } - public int AudioOffsetMs { get { throw null; } } + public System.TimeSpan AudioOffset { get { throw null; } } public int ContentIndex { get { throw null; } } public string ItemId { get { throw null; } } public int OutputIndex { get { throw null; } } @@ -1374,8 +1301,8 @@ protected override void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter write public partial class SessionUpdateResponseAudioTimestampDelta : Azure.AI.VoiceLive.SessionUpdate, System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel { internal SessionUpdateResponseAudioTimestampDelta() { } - public int AudioDurationMs { get { throw null; } } - public int AudioOffsetMs { get { throw null; } } + public System.TimeSpan AudioDuration { get { throw null; } } + public System.TimeSpan AudioOffset { get { throw null; } } public int ContentIndex { get { throw null; } } public string ItemId { get { throw null; } } public int OutputIndex { get { throw null; } } @@ -1509,25 +1436,6 @@ protected override void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter write string System.ClientModel.Primitives.IPersistableModel.GetFormatFromOptions(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } System.BinaryData System.ClientModel.Primitives.IPersistableModel.Write(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } } - public partial class SessionUpdateResponseEmotionHypothesis : Azure.AI.VoiceLive.SessionUpdate, System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel - { - internal SessionUpdateResponseEmotionHypothesis() { } - public int AudioDurationMs { get { throw null; } } - public int AudioOffsetMs { get { throw null; } } - public System.Collections.Generic.IList Candidates { get { throw null; } } - public string Emotion { get { throw null; } } - public string ItemId { get { throw null; } } - public string ResponseId { get { throw null; } } - protected override Azure.AI.VoiceLive.SessionUpdate JsonModelCreateCore(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } - protected override void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } - protected override Azure.AI.VoiceLive.SessionUpdate PersistableModelCreateCore(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } - protected override System.BinaryData PersistableModelWriteCore(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } - Azure.AI.VoiceLive.SessionUpdateResponseEmotionHypothesis System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } - void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } - Azure.AI.VoiceLive.SessionUpdateResponseEmotionHypothesis System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } - string System.ClientModel.Primitives.IPersistableModel.GetFormatFromOptions(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } - System.BinaryData System.ClientModel.Primitives.IPersistableModel.Write(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } - } public partial class SessionUpdateResponseFunctionCallArgumentsDelta : Azure.AI.VoiceLive.SessionUpdate, System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel { internal SessionUpdateResponseFunctionCallArgumentsDelta() { } @@ -1633,20 +1541,6 @@ protected override void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter write string System.ClientModel.Primitives.IPersistableModel.GetFormatFromOptions(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } System.BinaryData System.ClientModel.Primitives.IPersistableModel.Write(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } } - public partial class SessionUpdateSessionAvatarConnecting : Azure.AI.VoiceLive.SessionUpdate, System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel - { - internal SessionUpdateSessionAvatarConnecting() { } - public string ServerSdp { get { throw null; } } - protected override Azure.AI.VoiceLive.SessionUpdate JsonModelCreateCore(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } - protected override void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } - protected override Azure.AI.VoiceLive.SessionUpdate PersistableModelCreateCore(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } - protected override System.BinaryData PersistableModelWriteCore(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } - Azure.AI.VoiceLive.SessionUpdateSessionAvatarConnecting System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } - void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } - Azure.AI.VoiceLive.SessionUpdateSessionAvatarConnecting System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } - string System.ClientModel.Primitives.IPersistableModel.GetFormatFromOptions(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } - System.BinaryData System.ClientModel.Primitives.IPersistableModel.Write(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } - } public partial class SessionUpdateSessionCreated : Azure.AI.VoiceLive.SessionUpdate, System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel { internal SessionUpdateSessionCreated() { } @@ -1677,6 +1571,7 @@ protected override void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter write } public partial class SystemMessageItem : Azure.AI.VoiceLive.MessageItem, System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel { + public SystemMessageItem(Azure.AI.VoiceLive.InputTextContentPart content) : base (default(string)) { } public SystemMessageItem(System.Collections.Generic.IEnumerable content) : base (default(string)) { } public System.Collections.Generic.IList Content { get { throw null; } } protected override Azure.AI.VoiceLive.ConversationRequestItem JsonModelCreateCore(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } @@ -1751,6 +1646,7 @@ protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer } public partial class UserMessageItem : Azure.AI.VoiceLive.MessageItem, System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel { + public UserMessageItem(Azure.AI.VoiceLive.UserContentPart content) : base (default(string)) { } public UserMessageItem(System.Collections.Generic.IEnumerable content) : base (default(string)) { } public System.Collections.Generic.IList Content { get { throw null; } } protected override Azure.AI.VoiceLive.ConversationRequestItem JsonModelCreateCore(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } @@ -1881,39 +1777,33 @@ protected override void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter write } public static partial class VoiceLiveModelFactory { - public static Azure.AI.VoiceLive.AnimationOptions AnimationOptions(string modelName = null, System.Collections.Generic.IEnumerable outputs = null, int? emotionDetectionIntervalMs = default(int?)) { throw null; } + public static Azure.AI.VoiceLive.AnimationOptions AnimationOptions(string modelName = null, System.Collections.Generic.IEnumerable outputs = null) { throw null; } public static Azure.AI.VoiceLive.AssistantMessageItem AssistantMessageItem(string id = null, Azure.AI.VoiceLive.ItemParamStatus? status = default(Azure.AI.VoiceLive.ItemParamStatus?), System.Collections.Generic.IEnumerable content = null) { throw null; } public static Azure.AI.VoiceLive.AudioEchoCancellation AudioEchoCancellation(string type = null) { throw null; } public static Azure.AI.VoiceLive.AudioInputTranscriptionSettings AudioInputTranscriptionSettings(Azure.AI.VoiceLive.AudioInputTranscriptionSettingsModel model = default(Azure.AI.VoiceLive.AudioInputTranscriptionSettingsModel), string language = null, System.Collections.Generic.IDictionary customSpeech = null, System.Collections.Generic.IEnumerable phraseList = null) { throw null; } - public static Azure.AI.VoiceLive.AudioNoiseReduction AudioNoiseReduction(string type = null) { throw null; } + public static Azure.AI.VoiceLive.AudioNoiseReduction AudioNoiseReduction(Azure.AI.VoiceLive.AudioNoiseReductionType type = Azure.AI.VoiceLive.AudioNoiseReductionType.AzureDeepNoiseSuppression) { throw null; } public static Azure.AI.VoiceLive.AvatarConfiguration AvatarConfiguration(System.Collections.Generic.IEnumerable iceServers = null, string character = null, string style = null, bool customized = false, Azure.AI.VoiceLive.VideoParams video = null) { throw null; } public static Azure.AI.VoiceLive.AzureCustomVoice AzureCustomVoice(string name = null, string endpointId = null, float? temperature = default(float?), string customLexiconUri = null, System.Collections.Generic.IEnumerable preferLocales = null, string locale = null, string style = null, string pitch = null, string rate = null, string volume = null) { throw null; } - public static Azure.AI.VoiceLive.AzureMultilingualSemanticVad AzureMultilingualSemanticVad(float? threshold = default(float?), int? prefixPaddingMs = default(int?), int? silenceDurationMs = default(int?), Azure.AI.VoiceLive.EouDetection endOfUtteranceDetection = null, float? negThreshold = default(float?), int? speechDurationMs = default(int?), int? windowSize = default(int?), int? distinctCiPhones = default(int?), bool? requireVowel = default(bool?), bool? removeFillerWords = default(bool?), System.Collections.Generic.IEnumerable languages = null, bool? autoTruncate = default(bool?)) { throw null; } - public static Azure.AI.VoiceLive.AzurePersonalVoice AzurePersonalVoice(string name = null, float? temperature = default(float?), Azure.AI.VoiceLive.AzurePersonalVoiceModel model = Azure.AI.VoiceLive.AzurePersonalVoiceModel.DragonLatestNeural) { throw null; } - public static Azure.AI.VoiceLive.AzurePlatformVoice AzurePlatformVoice(string name = null, float? temperature = default(float?), string customLexiconUrl = null, System.Collections.Generic.IEnumerable preferLocales = null, string locale = null, string style = null, string pitch = null, string rate = null, string volume = null) { throw null; } - public static Azure.AI.VoiceLive.AzureSemanticDetection AzureSemanticDetection(float? threshold = default(float?), float? timeout = default(float?), float? secondaryThreshold = default(float?), float? secondaryTimeout = default(float?), bool? disableRules = default(bool?), float? srBoost = default(float?), bool? extraImendCheck = default(bool?)) { throw null; } - public static Azure.AI.VoiceLive.AzureSemanticDetectionEn AzureSemanticDetectionEn(float? threshold = default(float?), float? timeout = default(float?), float? secondaryThreshold = default(float?), float? secondaryTimeout = default(float?), bool? disableRules = default(bool?), float? srBoost = default(float?), bool? extraImendCheck = default(bool?)) { throw null; } - public static Azure.AI.VoiceLive.AzureSemanticDetectionMultilingual AzureSemanticDetectionMultilingual(float? threshold = default(float?), float? timeout = default(float?), float? secondaryThreshold = default(float?), float? secondaryTimeout = default(float?), bool? disableRules = default(bool?), float? srBoost = default(float?), bool? extraImendCheck = default(bool?)) { throw null; } - public static Azure.AI.VoiceLive.AzureSemanticVad AzureSemanticVad(float? threshold = default(float?), int? prefixPaddingMs = default(int?), int? silenceDurationMs = default(int?), Azure.AI.VoiceLive.EouDetection endOfUtteranceDetection = null, float? negThreshold = default(float?), int? speechDurationMs = default(int?), int? windowSize = default(int?), int? distinctCiPhones = default(int?), bool? requireVowel = default(bool?), bool? removeFillerWords = default(bool?), System.Collections.Generic.IEnumerable languages = null, bool? autoTruncate = default(bool?)) { throw null; } - public static Azure.AI.VoiceLive.AzureSemanticVadEn AzureSemanticVadEn(float? threshold = default(float?), int? prefixPaddingMs = default(int?), int? silenceDurationMs = default(int?), Azure.AI.VoiceLive.EouDetection endOfUtteranceDetection = null, float? negThreshold = default(float?), int? speechDurationMs = default(int?), int? windowSize = default(int?), int? distinctCiPhones = default(int?), bool? requireVowel = default(bool?), bool? removeFillerWords = default(bool?), System.Collections.Generic.IEnumerable languages = null, bool? autoTruncate = default(bool?)) { throw null; } - public static Azure.AI.VoiceLive.AzureSemanticVadServer AzureSemanticVadServer(float? threshold = default(float?), int? prefixPaddingMs = default(int?), int? silenceDurationMs = default(int?), Azure.AI.VoiceLive.EouDetection endOfUtteranceDetection = null, float? negThreshold = default(float?), int? speechDurationMs = default(int?), int? windowSize = default(int?), int? distinctCiPhones = default(int?), bool? requireVowel = default(bool?), bool? removeFillerWords = default(bool?), System.Collections.Generic.IEnumerable languages = null, bool? autoTruncate = default(bool?)) { throw null; } + public static Azure.AI.VoiceLive.AzurePersonalVoice AzurePersonalVoice(string name = null, float? temperature = default(float?), Azure.AI.VoiceLive.PersonalVoiceModels model = default(Azure.AI.VoiceLive.PersonalVoiceModels)) { throw null; } + public static Azure.AI.VoiceLive.AzureSemanticEnEouDetection AzureSemanticEnEouDetection(float? threshold = default(float?), float? timeoutMs = default(float?)) { throw null; } + public static Azure.AI.VoiceLive.AzureSemanticEouDetection AzureSemanticEouDetection(float? threshold = default(float?), float? timeoutMs = default(float?)) { throw null; } + public static Azure.AI.VoiceLive.AzureSemanticMultilingualEouDetection AzureSemanticMultilingualEouDetection(float? threshold = default(float?), float? timeoutMs = default(float?)) { throw null; } + public static Azure.AI.VoiceLive.AzureSemanticVadEnTurnDetection AzureSemanticVadEnTurnDetection(float? threshold = default(float?), int? prefixPaddingMs = default(int?), int? silenceDurationMs = default(int?), Azure.AI.VoiceLive.EouDetection endOfUtteranceDetection = null, int? speechDurationMs = default(int?), bool? removeFillerWords = default(bool?), bool? autoTruncate = default(bool?)) { throw null; } + public static Azure.AI.VoiceLive.AzureSemanticVadMultilingualTurnDetection AzureSemanticVadMultilingualTurnDetection(float? threshold = default(float?), int? prefixPaddingMs = default(int?), int? silenceDurationMs = default(int?), Azure.AI.VoiceLive.EouDetection endOfUtteranceDetection = null, int? speechDurationMs = default(int?), bool? removeFillerWords = default(bool?), System.Collections.Generic.IEnumerable languages = null, bool? autoTruncate = default(bool?)) { throw null; } + public static Azure.AI.VoiceLive.AzureSemanticVadTurnDetection AzureSemanticVadTurnDetection(float? threshold = default(float?), int? prefixPaddingMs = default(int?), int? silenceDurationMs = default(int?), Azure.AI.VoiceLive.EouDetection endOfUtteranceDetection = null, int? speechDurationMs = default(int?), bool? removeFillerWords = default(bool?), System.Collections.Generic.IEnumerable languages = null, bool? autoTruncate = default(bool?)) { throw null; } public static Azure.AI.VoiceLive.AzureStandardVoice AzureStandardVoice(string name = null, float? temperature = default(float?), string customLexiconUrl = null, System.Collections.Generic.IEnumerable preferLocales = null, string locale = null, string style = null, string pitch = null, string rate = null, string volume = null) { throw null; } public static Azure.AI.VoiceLive.AzureVoice AzureVoice(string type = null) { throw null; } public static Azure.AI.VoiceLive.CachedTokenDetails CachedTokenDetails(int textTokens = 0, int audioTokens = 0) { throw null; } public static Azure.AI.VoiceLive.ConversationRequestItem ConversationRequestItem(string type = null, string id = null) { throw null; } - public static Azure.AI.VoiceLive.EmotionCandidate EmotionCandidate(string emotion = null, float confidence = 0f) { throw null; } public static Azure.AI.VoiceLive.EouDetection EouDetection(string model = null) { throw null; } public static Azure.AI.VoiceLive.FunctionCallItem FunctionCallItem(string id = null, string name = null, string callId = null, string arguments = null, Azure.AI.VoiceLive.ItemParamStatus? status = default(Azure.AI.VoiceLive.ItemParamStatus?)) { throw null; } public static Azure.AI.VoiceLive.FunctionCallOutputItem FunctionCallOutputItem(string id = null, string callId = null, string output = null, Azure.AI.VoiceLive.ItemParamStatus? status = default(Azure.AI.VoiceLive.ItemParamStatus?)) { throw null; } public static Azure.AI.VoiceLive.IceServer IceServer(System.Collections.Generic.IEnumerable uris = null, string username = null, string credential = null) { throw null; } - public static Azure.AI.VoiceLive.InputAudio InputAudio(string model = null, System.Collections.Generic.IEnumerable phraseList = null) { throw null; } public static Azure.AI.VoiceLive.InputAudioContentPart InputAudioContentPart(string audio = null, string transcript = null) { throw null; } public static Azure.AI.VoiceLive.InputTextContentPart InputTextContentPart(string text = null) { throw null; } public static Azure.AI.VoiceLive.InputTokenDetails InputTokenDetails(int cachedTokens = 0, int textTokens = 0, int audioTokens = 0, Azure.AI.VoiceLive.CachedTokenDetails cachedTokensDetails = null) { throw null; } - public static Azure.AI.VoiceLive.LlmVoice LlmVoice(string type = null, Azure.AI.VoiceLive.LlmVoiceName name = default(Azure.AI.VoiceLive.LlmVoiceName)) { throw null; } public static Azure.AI.VoiceLive.LogProbProperties LogProbProperties(string token = null, float logprob = 0f, System.BinaryData bytes = null) { throw null; } public static Azure.AI.VoiceLive.MessageItem MessageItem(string id = null, Azure.AI.VoiceLive.ItemParamStatus? status = default(Azure.AI.VoiceLive.ItemParamStatus?)) { throw null; } - public static Azure.AI.VoiceLive.NoTurnDetection NoTurnDetection() { throw null; } public static Azure.AI.VoiceLive.OpenAIVoice OpenAIVoice(string type = null, Azure.AI.VoiceLive.OAIVoice name = default(Azure.AI.VoiceLive.OAIVoice)) { throw null; } public static Azure.AI.VoiceLive.OutputTextContentPart OutputTextContentPart(string type = null, string text = null) { throw null; } public static Azure.AI.VoiceLive.OutputTokenDetails OutputTokenDetails(int textTokens = 0, int audioTokens = 0) { throw null; } @@ -1931,8 +1821,9 @@ public static partial class VoiceLiveModelFactory public static Azure.AI.VoiceLive.ResponseStatusDetails ResponseStatusDetails(string type = null) { throw null; } public static Azure.AI.VoiceLive.ResponseTextContentPart ResponseTextContentPart(string text = null) { throw null; } public static Azure.AI.VoiceLive.ResponseTokenStatistics ResponseTokenStatistics(int totalTokens = 0, int inputTokens = 0, int outputTokens = 0, Azure.AI.VoiceLive.InputTokenDetails inputTokenDetails = null, Azure.AI.VoiceLive.OutputTokenDetails outputTokenDetails = null) { throw null; } - public static Azure.AI.VoiceLive.ServerVad ServerVad(float? threshold = default(float?), int? prefixPaddingMs = default(int?), int? silenceDurationMs = default(int?), Azure.AI.VoiceLive.EouDetection endOfUtteranceDetection = null, bool? autoTruncate = default(bool?)) { throw null; } + public static Azure.AI.VoiceLive.ServerVadTurnDetection ServerVadTurnDetection(float? threshold = default(float?), int? prefixPaddingMs = default(int?), int? silenceDurationMs = default(int?), Azure.AI.VoiceLive.EouDetection endOfUtteranceDetection = null, bool? autoTruncate = default(bool?)) { throw null; } public static Azure.AI.VoiceLive.SessionUpdate SessionUpdate(string type = null, string eventId = null) { throw null; } + public static Azure.AI.VoiceLive.SessionUpdateAvatarConnecting SessionUpdateAvatarConnecting(string eventId = null, string serverSdp = null) { throw null; } public static Azure.AI.VoiceLive.SessionUpdateConversationItemCreated SessionUpdateConversationItemCreated(string eventId = null, string previousItemId = null, Azure.AI.VoiceLive.ResponseItem item = null) { throw null; } public static Azure.AI.VoiceLive.SessionUpdateConversationItemDeleted SessionUpdateConversationItemDeleted(string itemId = null, string eventId = null) { throw null; } public static Azure.AI.VoiceLive.SessionUpdateConversationItemInputAudioTranscriptionCompleted SessionUpdateConversationItemInputAudioTranscriptionCompleted(string eventId = null, string itemId = null, int contentIndex = 0, string transcript = null) { throw null; } @@ -1960,14 +1851,12 @@ public static partial class VoiceLiveModelFactory public static Azure.AI.VoiceLive.SessionUpdateResponseContentPartDone SessionUpdateResponseContentPartDone(string eventId = null, string responseId = null, string itemId = null, int outputIndex = 0, int contentIndex = 0, Azure.AI.VoiceLive.VoiceLiveContentPart part = null) { throw null; } public static Azure.AI.VoiceLive.SessionUpdateResponseCreated SessionUpdateResponseCreated(string eventId = null, Azure.AI.VoiceLive.VoiceLiveResponse response = null) { throw null; } public static Azure.AI.VoiceLive.SessionUpdateResponseDone SessionUpdateResponseDone(string eventId = null, Azure.AI.VoiceLive.VoiceLiveResponse response = null) { throw null; } - public static Azure.AI.VoiceLive.SessionUpdateResponseEmotionHypothesis SessionUpdateResponseEmotionHypothesis(string eventId = null, string emotion = null, System.Collections.Generic.IEnumerable candidates = null, int audioOffsetMs = 0, int audioDurationMs = 0, string responseId = null, string itemId = null) { throw null; } public static Azure.AI.VoiceLive.SessionUpdateResponseFunctionCallArgumentsDelta SessionUpdateResponseFunctionCallArgumentsDelta(string eventId = null, string responseId = null, string itemId = null, int outputIndex = 0, string callId = null, string delta = null) { throw null; } public static Azure.AI.VoiceLive.SessionUpdateResponseFunctionCallArgumentsDone SessionUpdateResponseFunctionCallArgumentsDone(string eventId = null, string responseId = null, string itemId = null, int outputIndex = 0, string callId = null, string arguments = null, string name = null) { throw null; } public static Azure.AI.VoiceLive.SessionUpdateResponseOutputItemAdded SessionUpdateResponseOutputItemAdded(string eventId = null, string responseId = null, int outputIndex = 0, Azure.AI.VoiceLive.ResponseItem item = null) { throw null; } public static Azure.AI.VoiceLive.SessionUpdateResponseOutputItemDone SessionUpdateResponseOutputItemDone(string eventId = null, string responseId = null, int outputIndex = 0, Azure.AI.VoiceLive.ResponseItem item = null) { throw null; } public static Azure.AI.VoiceLive.SessionUpdateResponseTextDelta SessionUpdateResponseTextDelta(string eventId = null, string responseId = null, string itemId = null, int outputIndex = 0, int contentIndex = 0, string delta = null) { throw null; } public static Azure.AI.VoiceLive.SessionUpdateResponseTextDone SessionUpdateResponseTextDone(string eventId = null, string responseId = null, string itemId = null, int outputIndex = 0, int contentIndex = 0, string text = null) { throw null; } - public static Azure.AI.VoiceLive.SessionUpdateSessionAvatarConnecting SessionUpdateSessionAvatarConnecting(string eventId = null, string serverSdp = null) { throw null; } public static Azure.AI.VoiceLive.SessionUpdateSessionCreated SessionUpdateSessionCreated(string eventId = null, Azure.AI.VoiceLive.VoiceLiveSessionResponse session = null) { throw null; } public static Azure.AI.VoiceLive.SessionUpdateSessionUpdated SessionUpdateSessionUpdated(string eventId = null, Azure.AI.VoiceLive.VoiceLiveSessionResponse session = null) { throw null; } public static Azure.AI.VoiceLive.SystemMessageItem SystemMessageItem(string id = null, Azure.AI.VoiceLive.ItemParamStatus? status = default(Azure.AI.VoiceLive.ItemParamStatus?), System.Collections.Generic.IEnumerable content = null) { throw null; } @@ -1980,8 +1869,8 @@ public static partial class VoiceLiveModelFactory public static Azure.AI.VoiceLive.VoiceLiveContentPart VoiceLiveContentPart(string type = null) { throw null; } public static Azure.AI.VoiceLive.VoiceLiveErrorDetails VoiceLiveErrorDetails(string code = null, string message = null, string param = null, string type = null, string eventId = null) { throw null; } public static Azure.AI.VoiceLive.VoiceLiveFunctionDefinition VoiceLiveFunctionDefinition(string name = null, string description = null, System.BinaryData parameters = null) { throw null; } - public static Azure.AI.VoiceLive.VoiceLiveSessionOptions VoiceLiveSessionOptions(string model = null, System.Collections.Generic.IEnumerable modalities = null, Azure.AI.VoiceLive.AnimationOptions animation = null, string instructions = null, Azure.AI.VoiceLive.InputAudio inputAudio = null, int? inputAudioSamplingRate = default(int?), Azure.AI.VoiceLive.AudioFormat? inputAudioFormat = default(Azure.AI.VoiceLive.AudioFormat?), Azure.AI.VoiceLive.AudioFormat? outputAudioFormat = default(Azure.AI.VoiceLive.AudioFormat?), Azure.AI.VoiceLive.TurnDetection turnDetection = null, Azure.AI.VoiceLive.AudioNoiseReduction inputAudioNoiseReduction = null, Azure.AI.VoiceLive.AudioEchoCancellation inputAudioEchoCancellation = null, Azure.AI.VoiceLive.AvatarConfiguration avatar = null, Azure.AI.VoiceLive.AudioInputTranscriptionSettings inputAudioTranscription = null, System.Collections.Generic.IEnumerable outputAudioTimestampTypes = null, System.Collections.Generic.IEnumerable tools = null, float? temperature = default(float?), Azure.AI.VoiceLive.RespondingAgentOptions agent = null, System.BinaryData voiceInternal = null, System.BinaryData maxResponseOutputTokens = null, System.BinaryData toolChoice = null) { throw null; } - public static Azure.AI.VoiceLive.VoiceLiveSessionResponse VoiceLiveSessionResponse(string model = null, System.Collections.Generic.IEnumerable modalities = null, Azure.AI.VoiceLive.AnimationOptions animation = null, string instructions = null, Azure.AI.VoiceLive.InputAudio inputAudio = null, int? inputAudioSamplingRate = default(int?), Azure.AI.VoiceLive.AudioFormat? inputAudioFormat = default(Azure.AI.VoiceLive.AudioFormat?), Azure.AI.VoiceLive.AudioFormat? outputAudioFormat = default(Azure.AI.VoiceLive.AudioFormat?), Azure.AI.VoiceLive.TurnDetection turnDetection = null, Azure.AI.VoiceLive.AudioNoiseReduction inputAudioNoiseReduction = null, Azure.AI.VoiceLive.AudioEchoCancellation inputAudioEchoCancellation = null, Azure.AI.VoiceLive.AvatarConfiguration avatar = null, Azure.AI.VoiceLive.AudioInputTranscriptionSettings inputAudioTranscription = null, System.Collections.Generic.IEnumerable outputAudioTimestampTypes = null, System.Collections.Generic.IEnumerable tools = null, float? temperature = default(float?), Azure.AI.VoiceLive.RespondingAgentOptions agent = null, System.BinaryData voiceInternal = null, System.BinaryData maxResponseOutputTokens = null, System.BinaryData toolChoice = null, string id = null) { throw null; } + public static Azure.AI.VoiceLive.VoiceLiveSessionOptions VoiceLiveSessionOptions(string model = null, System.Collections.Generic.IEnumerable modalities = null, Azure.AI.VoiceLive.AnimationOptions animation = null, string instructions = null, int? inputAudioSamplingRate = default(int?), Azure.AI.VoiceLive.InputAudioFormat? inputAudioFormat = default(Azure.AI.VoiceLive.InputAudioFormat?), Azure.AI.VoiceLive.OutputAudioFormat? outputAudioFormat = default(Azure.AI.VoiceLive.OutputAudioFormat?), Azure.AI.VoiceLive.AudioNoiseReduction inputAudioNoiseReduction = null, Azure.AI.VoiceLive.AudioEchoCancellation inputAudioEchoCancellation = null, Azure.AI.VoiceLive.AvatarConfiguration avatar = null, Azure.AI.VoiceLive.AudioInputTranscriptionSettings inputAudioTranscription = null, System.Collections.Generic.IEnumerable outputAudioTimestampTypes = null, System.Collections.Generic.IEnumerable tools = null, float? temperature = default(float?), System.BinaryData voiceInternal = null, System.BinaryData maxResponseOutputTokens = null, System.BinaryData toolChoice = null, System.BinaryData turnDetection = null) { throw null; } + public static Azure.AI.VoiceLive.VoiceLiveSessionResponse VoiceLiveSessionResponse(string model = null, System.Collections.Generic.IEnumerable modalities = null, Azure.AI.VoiceLive.AnimationOptions animation = null, string instructions = null, int? inputAudioSamplingRate = default(int?), Azure.AI.VoiceLive.InputAudioFormat? inputAudioFormat = default(Azure.AI.VoiceLive.InputAudioFormat?), Azure.AI.VoiceLive.OutputAudioFormat? outputAudioFormat = default(Azure.AI.VoiceLive.OutputAudioFormat?), Azure.AI.VoiceLive.AudioNoiseReduction inputAudioNoiseReduction = null, Azure.AI.VoiceLive.AudioEchoCancellation inputAudioEchoCancellation = null, Azure.AI.VoiceLive.AvatarConfiguration avatar = null, Azure.AI.VoiceLive.AudioInputTranscriptionSettings inputAudioTranscription = null, System.Collections.Generic.IEnumerable outputAudioTimestampTypes = null, System.Collections.Generic.IEnumerable tools = null, float? temperature = default(float?), System.BinaryData voiceInternal = null, System.BinaryData maxResponseOutputTokens = null, System.BinaryData toolChoice = null, System.BinaryData turnDetection = null, Azure.AI.VoiceLive.RespondingAgentOptions agent = null, string id = null) { throw null; } public static Azure.AI.VoiceLive.VoiceLiveToolDefinition VoiceLiveToolDefinition(string type = null) { throw null; } } public partial class VoiceLiveResponse : System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel @@ -1993,7 +1882,7 @@ internal VoiceLiveResponse() { } public Azure.AI.VoiceLive.SessionUpdateModality Modalities { get { throw null; } } public string Object { get { throw null; } } public System.Collections.Generic.IList Output { get { throw null; } } - public Azure.AI.VoiceLive.AudioFormat? OutputAudioFormat { get { throw null; } } + public Azure.AI.VoiceLive.OutputAudioFormat? OutputAudioFormat { get { throw null; } } public Azure.AI.VoiceLive.VoiceLiveResponseStatus? Status { get { throw null; } } public Azure.AI.VoiceLive.ResponseStatusDetails StatusDetails { get { throw null; } } public float? Temperature { get { throw null; } } @@ -2029,13 +1918,27 @@ protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer public static bool operator !=(Azure.AI.VoiceLive.VoiceLiveResponseItemStatus left, Azure.AI.VoiceLive.VoiceLiveResponseItemStatus right) { throw null; } public override string ToString() { throw null; } } - public enum VoiceLiveResponseStatus + [System.Runtime.InteropServices.StructLayoutAttribute(System.Runtime.InteropServices.LayoutKind.Sequential)] + public readonly partial struct VoiceLiveResponseStatus : System.IEquatable { - Completed = 0, - Cancelled = 1, - Failed = 2, - Incomplete = 3, - InProgress = 4, + private readonly object _dummy; + private readonly int _dummyPrimitive; + public VoiceLiveResponseStatus(string value) { throw null; } + public static Azure.AI.VoiceLive.VoiceLiveResponseStatus Cancelled { get { throw null; } } + public static Azure.AI.VoiceLive.VoiceLiveResponseStatus Completed { get { throw null; } } + public static Azure.AI.VoiceLive.VoiceLiveResponseStatus Failed { get { throw null; } } + public static Azure.AI.VoiceLive.VoiceLiveResponseStatus Incomplete { get { throw null; } } + public static Azure.AI.VoiceLive.VoiceLiveResponseStatus InProgress { get { throw null; } } + public bool Equals(Azure.AI.VoiceLive.VoiceLiveResponseStatus other) { throw null; } + [System.ComponentModel.EditorBrowsableAttribute(System.ComponentModel.EditorBrowsableState.Never)] + public override bool Equals(object obj) { throw null; } + [System.ComponentModel.EditorBrowsableAttribute(System.ComponentModel.EditorBrowsableState.Never)] + public override int GetHashCode() { throw null; } + public static bool operator ==(Azure.AI.VoiceLive.VoiceLiveResponseStatus left, Azure.AI.VoiceLive.VoiceLiveResponseStatus right) { throw null; } + public static implicit operator Azure.AI.VoiceLive.VoiceLiveResponseStatus (string value) { throw null; } + public static implicit operator Azure.AI.VoiceLive.VoiceLiveResponseStatus? (string value) { throw null; } + public static bool operator !=(Azure.AI.VoiceLive.VoiceLiveResponseStatus left, Azure.AI.VoiceLive.VoiceLiveResponseStatus right) { throw null; } + public override string ToString() { throw null; } } public partial class VoiceLiveSession : System.IAsyncDisposable, System.IDisposable { @@ -2043,7 +1946,6 @@ protected internal VoiceLiveSession(Azure.AI.VoiceLive.VoiceLiveClient parentCli protected internal VoiceLiveSession(Azure.AI.VoiceLive.VoiceLiveClient parentClient, System.Uri endpoint, Azure.Core.TokenCredential credential) { } public System.Net.WebSockets.WebSocketState ConnectionState { get { throw null; } } public bool IsConnected { get { throw null; } } - public System.Net.WebSockets.WebSocket WebSocket { get { throw null; } protected set { } } public virtual void AddItem(Azure.AI.VoiceLive.ConversationRequestItem item, string previousItemId, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { } public virtual void AddItem(Azure.AI.VoiceLive.ConversationRequestItem item, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { } public virtual System.Threading.Tasks.Task AddItemAsync(Azure.AI.VoiceLive.ConversationRequestItem item, string previousItemId, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } @@ -2064,12 +1966,8 @@ protected internal VoiceLiveSession(Azure.AI.VoiceLive.VoiceLiveClient parentCli public virtual System.Threading.Tasks.Task CloseAsync(System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } public virtual void CommitInputAudio(System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { } public virtual System.Threading.Tasks.Task CommitInputAudioAsync(System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } - public virtual void ConfigureConversationSession(Azure.AI.VoiceLive.VoiceLiveSessionOptions sessionOptions, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { } - public virtual System.Threading.Tasks.Task ConfigureConversationSessionAsync(Azure.AI.VoiceLive.VoiceLiveSessionOptions sessionOptions, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } public virtual void ConfigureSession(Azure.AI.VoiceLive.VoiceLiveSessionOptions sessionOptions, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { } public virtual System.Threading.Tasks.Task ConfigureSessionAsync(Azure.AI.VoiceLive.VoiceLiveSessionOptions sessionOptions, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } - public virtual void ConfigureTranscriptionSession(Azure.AI.VoiceLive.VoiceLiveSessionOptions sessionOptions, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { } - public virtual System.Threading.Tasks.Task ConfigureTranscriptionSessionAsync(Azure.AI.VoiceLive.VoiceLiveSessionOptions sessionOptions, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } protected internal virtual void Connect(System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { } protected internal virtual System.Threading.Tasks.Task ConnectAsync(System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } public virtual void ConnectAvatar(string clientSdp, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { } @@ -2106,19 +2004,17 @@ protected virtual void Dispose(bool disposing) { } public virtual System.Threading.Tasks.Task StartResponseAsync(Azure.AI.VoiceLive.VoiceLiveSessionOptions responseOptions, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } public virtual System.Threading.Tasks.Task StartResponseAsync(string additionalInstructions, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } public virtual System.Threading.Tasks.Task StartResponseAsync(System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } - public virtual void TruncateConversation(string itemId, int contentIndex, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { } - public virtual System.Threading.Tasks.Task TruncateConversationAsync(string itemId, int contentIndex, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + public virtual void TruncateConversation(string itemId, int contentIndex, System.TimeSpan audioEnd = default(System.TimeSpan), System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { } + public virtual System.Threading.Tasks.Task TruncateConversationAsync(string itemId, int contentIndex, System.TimeSpan audioEnd = default(System.TimeSpan), System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } public System.Threading.Tasks.Task WaitForUpdateAsync(System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) where T : Azure.AI.VoiceLive.SessionUpdate { throw null; } } public partial class VoiceLiveSessionOptions : System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel { public VoiceLiveSessionOptions() { } - public Azure.AI.VoiceLive.RespondingAgentOptions Agent { get { throw null; } set { } } public Azure.AI.VoiceLive.AnimationOptions Animation { get { throw null; } set { } } public Azure.AI.VoiceLive.AvatarConfiguration Avatar { get { throw null; } set { } } - public Azure.AI.VoiceLive.InputAudio InputAudio { get { throw null; } set { } } public Azure.AI.VoiceLive.AudioEchoCancellation InputAudioEchoCancellation { get { throw null; } set { } } - public Azure.AI.VoiceLive.AudioFormat? InputAudioFormat { get { throw null; } set { } } + public Azure.AI.VoiceLive.InputAudioFormat? InputAudioFormat { get { throw null; } set { } } public Azure.AI.VoiceLive.AudioNoiseReduction InputAudioNoiseReduction { get { throw null; } set { } } public int? InputAudioSamplingRate { get { throw null; } set { } } public Azure.AI.VoiceLive.AudioInputTranscriptionSettings InputAudioTranscription { get { throw null; } set { } } @@ -2126,7 +2022,7 @@ public VoiceLiveSessionOptions() { } public Azure.AI.VoiceLive.ResponseMaxOutputTokensOption MaxResponseOutputTokens { get { throw null; } set { } } public System.Collections.Generic.IList Modalities { get { throw null; } } public string Model { get { throw null; } set { } } - public Azure.AI.VoiceLive.AudioFormat? OutputAudioFormat { get { throw null; } set { } } + public Azure.AI.VoiceLive.OutputAudioFormat? OutputAudioFormat { get { throw null; } set { } } public System.Collections.Generic.IList OutputAudioTimestampTypes { get { throw null; } } public float? Temperature { get { throw null; } set { } } public Azure.AI.VoiceLive.ToolChoiceOption ToolChoice { get { throw null; } set { } } @@ -2146,6 +2042,7 @@ protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer public partial class VoiceLiveSessionResponse : Azure.AI.VoiceLive.VoiceLiveSessionOptions, System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel { internal VoiceLiveSessionResponse() { } + public Azure.AI.VoiceLive.RespondingAgentOptions Agent { get { throw null; } } public string Id { get { throw null; } } protected override Azure.AI.VoiceLive.VoiceLiveSessionOptions JsonModelCreateCore(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } protected override void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } @@ -2176,11 +2073,3 @@ protected VoiceProvider() { } internal abstract System.BinaryData ToBinaryData(); } } -namespace Microsoft.Extensions.Azure -{ - public static partial class VoiceLiveClientBuilderExtensions - { - [System.Diagnostics.CodeAnalysis.RequiresDynamicCodeAttribute("Requires unreferenced code until we opt into EnableConfigurationBindingGenerator.")] - public static Azure.Core.Extensions.IAzureClientBuilder AddVoiceLiveClient(this TBuilder builder, TConfiguration configuration) where TBuilder : Azure.Core.Extensions.IAzureClientFactoryBuilderWithConfiguration { throw null; } - } -} diff --git a/sdk/ai/Azure.AI.VoiceLive/api/Azure.AI.VoiceLive.netstandard2.0.cs b/sdk/ai/Azure.AI.VoiceLive/api/Azure.AI.VoiceLive.netstandard2.0.cs index 60a9dbddc894..dbc03726611e 100644 --- a/sdk/ai/Azure.AI.VoiceLive/api/Azure.AI.VoiceLive.netstandard2.0.cs +++ b/sdk/ai/Azure.AI.VoiceLive/api/Azure.AI.VoiceLive.netstandard2.0.cs @@ -3,6 +3,7 @@ namespace Azure.AI.VoiceLive public partial class AnimationOptions : System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel { public AnimationOptions() { } + public System.TimeSpan? EmotionDetectionInterval { get { throw null; } set { } } public int? EmotionDetectionIntervalMs { get { throw null; } set { } } public string ModelName { get { throw null; } set { } } public System.Collections.Generic.IList Outputs { get { throw null; } } @@ -20,10 +21,10 @@ public enum AnimationOutputType { Blendshapes = 0, VisemeId = 1, - Emotion = 2, } public partial class AssistantMessageItem : Azure.AI.VoiceLive.MessageItem, System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel { + public AssistantMessageItem(Azure.AI.VoiceLive.OutputTextContentPart content) : base (default(string)) { } public AssistantMessageItem(System.Collections.Generic.IEnumerable content) : base (default(string)) { } public System.Collections.Generic.IList Content { get { throw null; } } protected override Azure.AI.VoiceLive.ConversationRequestItem JsonModelCreateCore(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } @@ -50,26 +51,6 @@ protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer string System.ClientModel.Primitives.IPersistableModel.GetFormatFromOptions(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } System.BinaryData System.ClientModel.Primitives.IPersistableModel.Write(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } } - [System.Runtime.InteropServices.StructLayoutAttribute(System.Runtime.InteropServices.LayoutKind.Sequential)] - public readonly partial struct AudioFormat : System.IEquatable - { - private readonly object _dummy; - private readonly int _dummyPrimitive; - public AudioFormat(string value) { throw null; } - public static Azure.AI.VoiceLive.AudioFormat G711Alaw { get { throw null; } } - public static Azure.AI.VoiceLive.AudioFormat G711Ulaw { get { throw null; } } - public static Azure.AI.VoiceLive.AudioFormat Pcm16 { get { throw null; } } - public bool Equals(Azure.AI.VoiceLive.AudioFormat other) { throw null; } - [System.ComponentModel.EditorBrowsableAttribute(System.ComponentModel.EditorBrowsableState.Never)] - public override bool Equals(object obj) { throw null; } - [System.ComponentModel.EditorBrowsableAttribute(System.ComponentModel.EditorBrowsableState.Never)] - public override int GetHashCode() { throw null; } - public static bool operator ==(Azure.AI.VoiceLive.AudioFormat left, Azure.AI.VoiceLive.AudioFormat right) { throw null; } - public static implicit operator Azure.AI.VoiceLive.AudioFormat (string value) { throw null; } - public static implicit operator Azure.AI.VoiceLive.AudioFormat? (string value) { throw null; } - public static bool operator !=(Azure.AI.VoiceLive.AudioFormat left, Azure.AI.VoiceLive.AudioFormat right) { throw null; } - public override string ToString() { throw null; } - } public partial class AudioInputTranscriptionSettings : System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel { public AudioInputTranscriptionSettings(Azure.AI.VoiceLive.AudioInputTranscriptionSettingsModel model) { } @@ -93,7 +74,6 @@ protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer private readonly object _dummy; private readonly int _dummyPrimitive; public AudioInputTranscriptionSettingsModel(string value) { throw null; } - public static Azure.AI.VoiceLive.AudioInputTranscriptionSettingsModel AzureFastTranscription { get { throw null; } } public static Azure.AI.VoiceLive.AudioInputTranscriptionSettingsModel AzureSpeech { get { throw null; } } public static Azure.AI.VoiceLive.AudioInputTranscriptionSettingsModel Gpt4oMiniTranscribe { get { throw null; } } public static Azure.AI.VoiceLive.AudioInputTranscriptionSettingsModel Gpt4oTranscribe { get { throw null; } } @@ -111,8 +91,8 @@ protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer } public partial class AudioNoiseReduction : System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel { - public AudioNoiseReduction() { } - public string Type { get { throw null; } } + public AudioNoiseReduction(Azure.AI.VoiceLive.AudioNoiseReductionType type) { } + public Azure.AI.VoiceLive.AudioNoiseReductionType Type { get { throw null; } set { } } protected virtual Azure.AI.VoiceLive.AudioNoiseReduction JsonModelCreateCore(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } protected virtual Azure.AI.VoiceLive.AudioNoiseReduction PersistableModelCreateCore(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } @@ -123,6 +103,12 @@ protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer string System.ClientModel.Primitives.IPersistableModel.GetFormatFromOptions(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } System.BinaryData System.ClientModel.Primitives.IPersistableModel.Write(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } } + public enum AudioNoiseReductionType + { + AzureDeepNoiseSuppression = 0, + NearField = 1, + FarField = 2, + } [System.Runtime.InteropServices.StructLayoutAttribute(System.Runtime.InteropServices.LayoutKind.Sequential)] public readonly partial struct AudioTimestampType : System.IEquatable { @@ -188,35 +174,10 @@ protected override void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter write string System.ClientModel.Primitives.IPersistableModel.GetFormatFromOptions(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } System.BinaryData System.ClientModel.Primitives.IPersistableModel.Write(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } } - public partial class AzureMultilingualSemanticVad : Azure.AI.VoiceLive.TurnDetection, System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel - { - public AzureMultilingualSemanticVad() { } - public bool? AutoTruncate { get { throw null; } set { } } - public int? DistinctCiPhones { get { throw null; } set { } } - public Azure.AI.VoiceLive.EouDetection EndOfUtteranceDetection { get { throw null; } set { } } - public System.Collections.Generic.IList Languages { get { throw null; } } - public float? NegThreshold { get { throw null; } set { } } - public int? PrefixPaddingMs { get { throw null; } set { } } - public bool? RemoveFillerWords { get { throw null; } set { } } - public bool? RequireVowel { get { throw null; } set { } } - public int? SilenceDurationMs { get { throw null; } set { } } - public int? SpeechDurationMs { get { throw null; } set { } } - public float? Threshold { get { throw null; } set { } } - public int? WindowSize { get { throw null; } set { } } - protected override Azure.AI.VoiceLive.TurnDetection JsonModelCreateCore(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } - protected override void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } - protected override Azure.AI.VoiceLive.TurnDetection PersistableModelCreateCore(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } - protected override System.BinaryData PersistableModelWriteCore(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } - Azure.AI.VoiceLive.AzureMultilingualSemanticVad System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } - void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } - Azure.AI.VoiceLive.AzureMultilingualSemanticVad System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } - string System.ClientModel.Primitives.IPersistableModel.GetFormatFromOptions(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } - System.BinaryData System.ClientModel.Primitives.IPersistableModel.Write(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } - } public partial class AzurePersonalVoice : Azure.AI.VoiceLive.AzureVoice, System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel { - public AzurePersonalVoice(string name, Azure.AI.VoiceLive.AzurePersonalVoiceModel model) { } - public Azure.AI.VoiceLive.AzurePersonalVoiceModel Model { get { throw null; } set { } } + public AzurePersonalVoice(string name, Azure.AI.VoiceLive.PersonalVoiceModels model) { } + public Azure.AI.VoiceLive.PersonalVoiceModels Model { get { throw null; } set { } } public string Name { get { throw null; } set { } } public float? Temperature { get { throw null; } set { } } protected override Azure.AI.VoiceLive.AzureVoice JsonModelCreateCore(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } @@ -229,168 +190,122 @@ protected override void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter write string System.ClientModel.Primitives.IPersistableModel.GetFormatFromOptions(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } System.BinaryData System.ClientModel.Primitives.IPersistableModel.Write(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } } - public enum AzurePersonalVoiceModel - { - DragonLatestNeural = 0, - PhoenixLatestNeural = 1, - PhoenixV2Neural = 2, - } - public partial class AzurePlatformVoice : Azure.AI.VoiceLive.AzureVoice, System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel - { - public AzurePlatformVoice(string name) { } - public string CustomLexiconUrl { get { throw null; } set { } } - public string Locale { get { throw null; } set { } } - public string Name { get { throw null; } set { } } - public string Pitch { get { throw null; } set { } } - public System.Collections.Generic.IList PreferLocales { get { throw null; } } - public string Rate { get { throw null; } set { } } - public string Style { get { throw null; } set { } } - public float? Temperature { get { throw null; } set { } } - public string Volume { get { throw null; } set { } } - protected override Azure.AI.VoiceLive.AzureVoice JsonModelCreateCore(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } - protected override void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } - protected override Azure.AI.VoiceLive.AzureVoice PersistableModelCreateCore(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } - protected override System.BinaryData PersistableModelWriteCore(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } - Azure.AI.VoiceLive.AzurePlatformVoice System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } - void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } - Azure.AI.VoiceLive.AzurePlatformVoice System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } - string System.ClientModel.Primitives.IPersistableModel.GetFormatFromOptions(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } - System.BinaryData System.ClientModel.Primitives.IPersistableModel.Write(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } - } - public partial class AzureSemanticDetection : Azure.AI.VoiceLive.EouDetection, System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel + public partial class AzureSemanticEnEouDetection : Azure.AI.VoiceLive.EouDetection, System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel { - public AzureSemanticDetection() { } - public bool? DisableRules { get { throw null; } set { } } - public bool? ExtraImendCheck { get { throw null; } set { } } - public float? SecondaryThreshold { get { throw null; } set { } } - public float? SecondaryTimeout { get { throw null; } set { } } - public float? SrBoost { get { throw null; } set { } } + public AzureSemanticEnEouDetection() { } public float? Threshold { get { throw null; } set { } } - public float? Timeout { get { throw null; } set { } } + public float? TimeoutMs { get { throw null; } set { } } protected override Azure.AI.VoiceLive.EouDetection JsonModelCreateCore(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } protected override void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } protected override Azure.AI.VoiceLive.EouDetection PersistableModelCreateCore(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } protected override System.BinaryData PersistableModelWriteCore(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } - Azure.AI.VoiceLive.AzureSemanticDetection System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } - void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } - Azure.AI.VoiceLive.AzureSemanticDetection System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } - string System.ClientModel.Primitives.IPersistableModel.GetFormatFromOptions(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } - System.BinaryData System.ClientModel.Primitives.IPersistableModel.Write(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + Azure.AI.VoiceLive.AzureSemanticEnEouDetection System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + Azure.AI.VoiceLive.AzureSemanticEnEouDetection System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + string System.ClientModel.Primitives.IPersistableModel.GetFormatFromOptions(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + System.BinaryData System.ClientModel.Primitives.IPersistableModel.Write(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } } - public partial class AzureSemanticDetectionEn : Azure.AI.VoiceLive.EouDetection, System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel + public partial class AzureSemanticEouDetection : Azure.AI.VoiceLive.EouDetection, System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel { - public AzureSemanticDetectionEn() { } - public bool? DisableRules { get { throw null; } set { } } - public bool? ExtraImendCheck { get { throw null; } set { } } - public float? SecondaryThreshold { get { throw null; } set { } } - public float? SecondaryTimeout { get { throw null; } set { } } - public float? SrBoost { get { throw null; } set { } } + public AzureSemanticEouDetection() { } public float? Threshold { get { throw null; } set { } } - public float? Timeout { get { throw null; } set { } } + public System.TimeSpan Timeout { get { throw null; } set { } } protected override Azure.AI.VoiceLive.EouDetection JsonModelCreateCore(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } protected override void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } protected override Azure.AI.VoiceLive.EouDetection PersistableModelCreateCore(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } protected override System.BinaryData PersistableModelWriteCore(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } - Azure.AI.VoiceLive.AzureSemanticDetectionEn System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } - void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } - Azure.AI.VoiceLive.AzureSemanticDetectionEn System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } - string System.ClientModel.Primitives.IPersistableModel.GetFormatFromOptions(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } - System.BinaryData System.ClientModel.Primitives.IPersistableModel.Write(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + Azure.AI.VoiceLive.AzureSemanticEouDetection System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + Azure.AI.VoiceLive.AzureSemanticEouDetection System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + string System.ClientModel.Primitives.IPersistableModel.GetFormatFromOptions(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + System.BinaryData System.ClientModel.Primitives.IPersistableModel.Write(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + } + public partial class AzureSemanticEouDetectionEn + { + public AzureSemanticEouDetectionEn() { } + public System.TimeSpan Timeout { get { throw null; } set { } } + } + public partial class AzureSemanticEouDetectionMultilingual + { + public AzureSemanticEouDetectionMultilingual() { } + public System.TimeSpan Timeout { get { throw null; } set { } } } - public partial class AzureSemanticDetectionMultilingual : Azure.AI.VoiceLive.EouDetection, System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel + public partial class AzureSemanticMultilingualEouDetection : Azure.AI.VoiceLive.EouDetection, System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel { - public AzureSemanticDetectionMultilingual() { } - public bool? DisableRules { get { throw null; } set { } } - public bool? ExtraImendCheck { get { throw null; } set { } } - public float? SecondaryThreshold { get { throw null; } set { } } - public float? SecondaryTimeout { get { throw null; } set { } } - public float? SrBoost { get { throw null; } set { } } + public AzureSemanticMultilingualEouDetection() { } public float? Threshold { get { throw null; } set { } } - public float? Timeout { get { throw null; } set { } } + public float? TimeoutMs { get { throw null; } set { } } protected override Azure.AI.VoiceLive.EouDetection JsonModelCreateCore(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } protected override void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } protected override Azure.AI.VoiceLive.EouDetection PersistableModelCreateCore(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } protected override System.BinaryData PersistableModelWriteCore(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } - Azure.AI.VoiceLive.AzureSemanticDetectionMultilingual System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } - void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } - Azure.AI.VoiceLive.AzureSemanticDetectionMultilingual System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } - string System.ClientModel.Primitives.IPersistableModel.GetFormatFromOptions(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } - System.BinaryData System.ClientModel.Primitives.IPersistableModel.Write(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + Azure.AI.VoiceLive.AzureSemanticMultilingualEouDetection System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + Azure.AI.VoiceLive.AzureSemanticMultilingualEouDetection System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + string System.ClientModel.Primitives.IPersistableModel.GetFormatFromOptions(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + System.BinaryData System.ClientModel.Primitives.IPersistableModel.Write(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } } - public partial class AzureSemanticVad : Azure.AI.VoiceLive.TurnDetection, System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel + public partial class AzureSemanticVadEnTurnDetection : Azure.AI.VoiceLive.TurnDetection, System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel { - public AzureSemanticVad() { } + public AzureSemanticVadEnTurnDetection() { } public bool? AutoTruncate { get { throw null; } set { } } - public int? DistinctCiPhones { get { throw null; } set { } } public Azure.AI.VoiceLive.EouDetection EndOfUtteranceDetection { get { throw null; } set { } } - public System.Collections.Generic.IList Languages { get { throw null; } } - public float? NegThreshold { get { throw null; } set { } } - public int? PrefixPaddingMs { get { throw null; } set { } } + public System.TimeSpan PrefixPadding { get { throw null; } set { } } public bool? RemoveFillerWords { get { throw null; } set { } } - public bool? RequireVowel { get { throw null; } set { } } - public int? SilenceDurationMs { get { throw null; } set { } } - public int? SpeechDurationMs { get { throw null; } set { } } + public System.TimeSpan SilenceDuration { get { throw null; } set { } } + public System.TimeSpan SpeechDuration { get { throw null; } set { } } public float? Threshold { get { throw null; } set { } } - public int? WindowSize { get { throw null; } set { } } protected override Azure.AI.VoiceLive.TurnDetection JsonModelCreateCore(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } protected override void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } protected override Azure.AI.VoiceLive.TurnDetection PersistableModelCreateCore(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } protected override System.BinaryData PersistableModelWriteCore(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } - Azure.AI.VoiceLive.AzureSemanticVad System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } - void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } - Azure.AI.VoiceLive.AzureSemanticVad System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } - string System.ClientModel.Primitives.IPersistableModel.GetFormatFromOptions(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } - System.BinaryData System.ClientModel.Primitives.IPersistableModel.Write(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + Azure.AI.VoiceLive.AzureSemanticVadEnTurnDetection System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + Azure.AI.VoiceLive.AzureSemanticVadEnTurnDetection System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + string System.ClientModel.Primitives.IPersistableModel.GetFormatFromOptions(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + System.BinaryData System.ClientModel.Primitives.IPersistableModel.Write(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } } - public partial class AzureSemanticVadEn : Azure.AI.VoiceLive.TurnDetection, System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel + public partial class AzureSemanticVadMultilingualTurnDetection : Azure.AI.VoiceLive.TurnDetection, System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel { - public AzureSemanticVadEn() { } + public AzureSemanticVadMultilingualTurnDetection() { } public bool? AutoTruncate { get { throw null; } set { } } - public int? DistinctCiPhones { get { throw null; } set { } } public Azure.AI.VoiceLive.EouDetection EndOfUtteranceDetection { get { throw null; } set { } } public System.Collections.Generic.IList Languages { get { throw null; } } - public float? NegThreshold { get { throw null; } set { } } - public int? PrefixPaddingMs { get { throw null; } set { } } + public System.TimeSpan PrefixPadding { get { throw null; } set { } } public bool? RemoveFillerWords { get { throw null; } set { } } - public bool? RequireVowel { get { throw null; } set { } } - public int? SilenceDurationMs { get { throw null; } set { } } - public int? SpeechDurationMs { get { throw null; } set { } } + public System.TimeSpan SilenceDuration { get { throw null; } set { } } + public System.TimeSpan SpeechDuration { get { throw null; } set { } } public float? Threshold { get { throw null; } set { } } - public int? WindowSize { get { throw null; } set { } } protected override Azure.AI.VoiceLive.TurnDetection JsonModelCreateCore(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } protected override void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } protected override Azure.AI.VoiceLive.TurnDetection PersistableModelCreateCore(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } protected override System.BinaryData PersistableModelWriteCore(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } - Azure.AI.VoiceLive.AzureSemanticVadEn System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } - void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } - Azure.AI.VoiceLive.AzureSemanticVadEn System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } - string System.ClientModel.Primitives.IPersistableModel.GetFormatFromOptions(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } - System.BinaryData System.ClientModel.Primitives.IPersistableModel.Write(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + Azure.AI.VoiceLive.AzureSemanticVadMultilingualTurnDetection System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + Azure.AI.VoiceLive.AzureSemanticVadMultilingualTurnDetection System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + string System.ClientModel.Primitives.IPersistableModel.GetFormatFromOptions(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + System.BinaryData System.ClientModel.Primitives.IPersistableModel.Write(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } } - public partial class AzureSemanticVadServer : Azure.AI.VoiceLive.TurnDetection, System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel + public partial class AzureSemanticVadTurnDetection : Azure.AI.VoiceLive.TurnDetection, System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel { - public AzureSemanticVadServer() { } + public AzureSemanticVadTurnDetection() { } public bool? AutoTruncate { get { throw null; } set { } } - public int? DistinctCiPhones { get { throw null; } set { } } public Azure.AI.VoiceLive.EouDetection EndOfUtteranceDetection { get { throw null; } set { } } public System.Collections.Generic.IList Languages { get { throw null; } } - public float? NegThreshold { get { throw null; } set { } } - public int? PrefixPaddingMs { get { throw null; } set { } } + public System.TimeSpan PrefixPadding { get { throw null; } set { } } public bool? RemoveFillerWords { get { throw null; } set { } } - public bool? RequireVowel { get { throw null; } set { } } - public int? SilenceDurationMs { get { throw null; } set { } } - public int? SpeechDurationMs { get { throw null; } set { } } + public System.TimeSpan SilenceDuration { get { throw null; } set { } } + public System.TimeSpan SpeechDuration { get { throw null; } set { } } public float? Threshold { get { throw null; } set { } } - public int? WindowSize { get { throw null; } set { } } protected override Azure.AI.VoiceLive.TurnDetection JsonModelCreateCore(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } protected override void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } protected override Azure.AI.VoiceLive.TurnDetection PersistableModelCreateCore(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } protected override System.BinaryData PersistableModelWriteCore(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } - Azure.AI.VoiceLive.AzureSemanticVadServer System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } - void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } - Azure.AI.VoiceLive.AzureSemanticVadServer System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } - string System.ClientModel.Primitives.IPersistableModel.GetFormatFromOptions(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } - System.BinaryData System.ClientModel.Primitives.IPersistableModel.Write(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + Azure.AI.VoiceLive.AzureSemanticVadTurnDetection System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + Azure.AI.VoiceLive.AzureSemanticVadTurnDetection System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + string System.ClientModel.Primitives.IPersistableModel.GetFormatFromOptions(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + System.BinaryData System.ClientModel.Primitives.IPersistableModel.Write(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } } public partial class AzureStandardVoice : Azure.AI.VoiceLive.AzureVoice, System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel { @@ -456,21 +371,6 @@ protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer string System.ClientModel.Primitives.IPersistableModel.GetFormatFromOptions(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } System.BinaryData System.ClientModel.Primitives.IPersistableModel.Write(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } } - public partial class EmotionCandidate : System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel - { - internal EmotionCandidate() { } - public float Confidence { get { throw null; } } - public string Emotion { get { throw null; } } - protected virtual Azure.AI.VoiceLive.EmotionCandidate JsonModelCreateCore(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } - protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } - protected virtual Azure.AI.VoiceLive.EmotionCandidate PersistableModelCreateCore(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } - protected virtual System.BinaryData PersistableModelWriteCore(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } - Azure.AI.VoiceLive.EmotionCandidate System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } - void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } - Azure.AI.VoiceLive.EmotionCandidate System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } - string System.ClientModel.Primitives.IPersistableModel.GetFormatFromOptions(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } - System.BinaryData System.ClientModel.Primitives.IPersistableModel.Write(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } - } public abstract partial class EouDetection : System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel { internal EouDetection() { } @@ -487,9 +387,9 @@ protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer public partial class FunctionCallItem : Azure.AI.VoiceLive.ConversationRequestItem, System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel { public FunctionCallItem(string name, string callId, string arguments) { } - public string Arguments { get { throw null; } } - public string CallId { get { throw null; } } - public string Name { get { throw null; } } + public string Arguments { get { throw null; } set { } } + public string CallId { get { throw null; } set { } } + public string Name { get { throw null; } set { } } public Azure.AI.VoiceLive.ItemParamStatus? Status { get { throw null; } set { } } protected override Azure.AI.VoiceLive.ConversationRequestItem JsonModelCreateCore(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } protected override void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } @@ -504,8 +404,8 @@ protected override void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter write public partial class FunctionCallOutputItem : Azure.AI.VoiceLive.ConversationRequestItem, System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel { public FunctionCallOutputItem(string callId, string output) { } - public string CallId { get { throw null; } } - public string Output { get { throw null; } } + public string CallId { get { throw null; } set { } } + public string Output { get { throw null; } set { } } public Azure.AI.VoiceLive.ItemParamStatus? Status { get { throw null; } set { } } protected override Azure.AI.VoiceLive.ConversationRequestItem JsonModelCreateCore(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } protected override void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } @@ -533,25 +433,10 @@ protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer string System.ClientModel.Primitives.IPersistableModel.GetFormatFromOptions(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } System.BinaryData System.ClientModel.Primitives.IPersistableModel.Write(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } } - public partial class InputAudio : System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel - { - public InputAudio() { } - public string Model { get { throw null; } } - public System.Collections.Generic.IList PhraseList { get { throw null; } } - protected virtual Azure.AI.VoiceLive.InputAudio JsonModelCreateCore(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } - protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } - protected virtual Azure.AI.VoiceLive.InputAudio PersistableModelCreateCore(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } - protected virtual System.BinaryData PersistableModelWriteCore(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } - Azure.AI.VoiceLive.InputAudio System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } - void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } - Azure.AI.VoiceLive.InputAudio System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } - string System.ClientModel.Primitives.IPersistableModel.GetFormatFromOptions(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } - System.BinaryData System.ClientModel.Primitives.IPersistableModel.Write(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } - } public partial class InputAudioContentPart : Azure.AI.VoiceLive.UserContentPart, System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel { public InputAudioContentPart(string audio) { } - public string Audio { get { throw null; } } + public string Audio { get { throw null; } set { } } public string Transcript { get { throw null; } set { } } protected override Azure.AI.VoiceLive.UserContentPart JsonModelCreateCore(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } protected override void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } @@ -564,6 +449,26 @@ protected override void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter write System.BinaryData System.ClientModel.Primitives.IPersistableModel.Write(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } } [System.Runtime.InteropServices.StructLayoutAttribute(System.Runtime.InteropServices.LayoutKind.Sequential)] + public readonly partial struct InputAudioFormat : System.IEquatable + { + private readonly object _dummy; + private readonly int _dummyPrimitive; + public InputAudioFormat(string value) { throw null; } + public static Azure.AI.VoiceLive.InputAudioFormat G711Alaw { get { throw null; } } + public static Azure.AI.VoiceLive.InputAudioFormat G711Ulaw { get { throw null; } } + public static Azure.AI.VoiceLive.InputAudioFormat Pcm16 { get { throw null; } } + public bool Equals(Azure.AI.VoiceLive.InputAudioFormat other) { throw null; } + [System.ComponentModel.EditorBrowsableAttribute(System.ComponentModel.EditorBrowsableState.Never)] + public override bool Equals(object obj) { throw null; } + [System.ComponentModel.EditorBrowsableAttribute(System.ComponentModel.EditorBrowsableState.Never)] + public override int GetHashCode() { throw null; } + public static bool operator ==(Azure.AI.VoiceLive.InputAudioFormat left, Azure.AI.VoiceLive.InputAudioFormat right) { throw null; } + public static implicit operator Azure.AI.VoiceLive.InputAudioFormat (string value) { throw null; } + public static implicit operator Azure.AI.VoiceLive.InputAudioFormat? (string value) { throw null; } + public static bool operator !=(Azure.AI.VoiceLive.InputAudioFormat left, Azure.AI.VoiceLive.InputAudioFormat right) { throw null; } + public override string ToString() { throw null; } + } + [System.Runtime.InteropServices.StructLayoutAttribute(System.Runtime.InteropServices.LayoutKind.Sequential)] public readonly partial struct InputModality : System.IEquatable { private readonly object _dummy; @@ -587,7 +492,7 @@ protected override void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter write public partial class InputTextContentPart : Azure.AI.VoiceLive.UserContentPart, System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel { public InputTextContentPart(string text) { } - public string Text { get { throw null; } } + public string Text { get { throw null; } set { } } protected override Azure.AI.VoiceLive.UserContentPart JsonModelCreateCore(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } protected override void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } protected override Azure.AI.VoiceLive.UserContentPart PersistableModelCreateCore(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } @@ -620,39 +525,6 @@ public enum ItemParamStatus Completed = 0, Incomplete = 1, } - public partial class LlmVoice : Azure.AI.VoiceLive.VoiceProvider, System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel - { - public LlmVoice(Azure.AI.VoiceLive.LlmVoiceName name) { } - public Azure.AI.VoiceLive.LlmVoiceName Name { get { throw null; } set { } } - public string Type { get { throw null; } } - protected virtual Azure.AI.VoiceLive.LlmVoice JsonModelCreateCore(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } - protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } - protected virtual Azure.AI.VoiceLive.LlmVoice PersistableModelCreateCore(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } - protected virtual System.BinaryData PersistableModelWriteCore(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } - Azure.AI.VoiceLive.LlmVoice System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } - void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } - Azure.AI.VoiceLive.LlmVoice System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } - string System.ClientModel.Primitives.IPersistableModel.GetFormatFromOptions(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } - System.BinaryData System.ClientModel.Primitives.IPersistableModel.Write(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } - } - [System.Runtime.InteropServices.StructLayoutAttribute(System.Runtime.InteropServices.LayoutKind.Sequential)] - public readonly partial struct LlmVoiceName : System.IEquatable - { - private readonly object _dummy; - private readonly int _dummyPrimitive; - public LlmVoiceName(string value) { throw null; } - public static Azure.AI.VoiceLive.LlmVoiceName Cosyvoice { get { throw null; } } - public bool Equals(Azure.AI.VoiceLive.LlmVoiceName other) { throw null; } - [System.ComponentModel.EditorBrowsableAttribute(System.ComponentModel.EditorBrowsableState.Never)] - public override bool Equals(object obj) { throw null; } - [System.ComponentModel.EditorBrowsableAttribute(System.ComponentModel.EditorBrowsableState.Never)] - public override int GetHashCode() { throw null; } - public static bool operator ==(Azure.AI.VoiceLive.LlmVoiceName left, Azure.AI.VoiceLive.LlmVoiceName right) { throw null; } - public static implicit operator Azure.AI.VoiceLive.LlmVoiceName (string value) { throw null; } - public static implicit operator Azure.AI.VoiceLive.LlmVoiceName? (string value) { throw null; } - public static bool operator !=(Azure.AI.VoiceLive.LlmVoiceName left, Azure.AI.VoiceLive.LlmVoiceName right) { throw null; } - public override string ToString() { throw null; } - } public partial class LogProbProperties : System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel { internal LogProbProperties() { } @@ -736,10 +608,32 @@ protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer string System.ClientModel.Primitives.IPersistableModel.GetFormatFromOptions(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } System.BinaryData System.ClientModel.Primitives.IPersistableModel.Write(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } } + [System.Runtime.InteropServices.StructLayoutAttribute(System.Runtime.InteropServices.LayoutKind.Sequential)] + public readonly partial struct OutputAudioFormat : System.IEquatable + { + private readonly object _dummy; + private readonly int _dummyPrimitive; + public OutputAudioFormat(string value) { throw null; } + public static Azure.AI.VoiceLive.OutputAudioFormat G711Alaw { get { throw null; } } + public static Azure.AI.VoiceLive.OutputAudioFormat G711Ulaw { get { throw null; } } + public static Azure.AI.VoiceLive.OutputAudioFormat Pcm16 { get { throw null; } } + public static Azure.AI.VoiceLive.OutputAudioFormat Pcm1616000hz { get { throw null; } } + public static Azure.AI.VoiceLive.OutputAudioFormat Pcm168000hz { get { throw null; } } + public bool Equals(Azure.AI.VoiceLive.OutputAudioFormat other) { throw null; } + [System.ComponentModel.EditorBrowsableAttribute(System.ComponentModel.EditorBrowsableState.Never)] + public override bool Equals(object obj) { throw null; } + [System.ComponentModel.EditorBrowsableAttribute(System.ComponentModel.EditorBrowsableState.Never)] + public override int GetHashCode() { throw null; } + public static bool operator ==(Azure.AI.VoiceLive.OutputAudioFormat left, Azure.AI.VoiceLive.OutputAudioFormat right) { throw null; } + public static implicit operator Azure.AI.VoiceLive.OutputAudioFormat (string value) { throw null; } + public static implicit operator Azure.AI.VoiceLive.OutputAudioFormat? (string value) { throw null; } + public static bool operator !=(Azure.AI.VoiceLive.OutputAudioFormat left, Azure.AI.VoiceLive.OutputAudioFormat right) { throw null; } + public override string ToString() { throw null; } + } public partial class OutputTextContentPart : System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel { public OutputTextContentPart(string text) { } - public string Text { get { throw null; } } + public string Text { get { throw null; } set { } } public string Type { get { throw null; } } protected virtual Azure.AI.VoiceLive.OutputTextContentPart JsonModelCreateCore(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } @@ -766,10 +660,30 @@ protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer string System.ClientModel.Primitives.IPersistableModel.GetFormatFromOptions(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } System.BinaryData System.ClientModel.Primitives.IPersistableModel.Write(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } } + [System.Runtime.InteropServices.StructLayoutAttribute(System.Runtime.InteropServices.LayoutKind.Sequential)] + public readonly partial struct PersonalVoiceModels : System.IEquatable + { + private readonly object _dummy; + private readonly int _dummyPrimitive; + public PersonalVoiceModels(string value) { throw null; } + public static Azure.AI.VoiceLive.PersonalVoiceModels DragonLatestNeural { get { throw null; } } + public static Azure.AI.VoiceLive.PersonalVoiceModels PhoenixLatestNeural { get { throw null; } } + public static Azure.AI.VoiceLive.PersonalVoiceModels PhoenixV2Neural { get { throw null; } } + public bool Equals(Azure.AI.VoiceLive.PersonalVoiceModels other) { throw null; } + [System.ComponentModel.EditorBrowsableAttribute(System.ComponentModel.EditorBrowsableState.Never)] + public override bool Equals(object obj) { throw null; } + [System.ComponentModel.EditorBrowsableAttribute(System.ComponentModel.EditorBrowsableState.Never)] + public override int GetHashCode() { throw null; } + public static bool operator ==(Azure.AI.VoiceLive.PersonalVoiceModels left, Azure.AI.VoiceLive.PersonalVoiceModels right) { throw null; } + public static implicit operator Azure.AI.VoiceLive.PersonalVoiceModels (string value) { throw null; } + public static implicit operator Azure.AI.VoiceLive.PersonalVoiceModels? (string value) { throw null; } + public static bool operator !=(Azure.AI.VoiceLive.PersonalVoiceModels left, Azure.AI.VoiceLive.PersonalVoiceModels right) { throw null; } + public override string ToString() { throw null; } + } public partial class RequestAudioContentPart : Azure.AI.VoiceLive.VoiceLiveContentPart, System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel { - internal RequestAudioContentPart() { } - public string Transcript { get { throw null; } } + public RequestAudioContentPart() { } + public string Transcript { get { throw null; } set { } } protected override Azure.AI.VoiceLive.VoiceLiveContentPart JsonModelCreateCore(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } protected override void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } protected override Azure.AI.VoiceLive.VoiceLiveContentPart PersistableModelCreateCore(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } @@ -782,8 +696,8 @@ protected override void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter write } public partial class RequestTextContentPart : Azure.AI.VoiceLive.VoiceLiveContentPart, System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel { - internal RequestTextContentPart() { } - public string Text { get { throw null; } } + public RequestTextContentPart() { } + public string Text { get { throw null; } set { } } protected override Azure.AI.VoiceLive.VoiceLiveContentPart JsonModelCreateCore(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } protected override void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } protected override Azure.AI.VoiceLive.VoiceLiveContentPart PersistableModelCreateCore(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } @@ -1025,23 +939,23 @@ protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer string System.ClientModel.Primitives.IPersistableModel.GetFormatFromOptions(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } System.BinaryData System.ClientModel.Primitives.IPersistableModel.Write(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } } - public partial class ServerVad : Azure.AI.VoiceLive.TurnDetection, System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel + public partial class ServerVadTurnDetection : Azure.AI.VoiceLive.TurnDetection, System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel { - public ServerVad() { } + public ServerVadTurnDetection() { } public bool? AutoTruncate { get { throw null; } set { } } public Azure.AI.VoiceLive.EouDetection EndOfUtteranceDetection { get { throw null; } set { } } - public int? PrefixPaddingMs { get { throw null; } set { } } - public int? SilenceDurationMs { get { throw null; } set { } } + public System.TimeSpan PrefixPadding { get { throw null; } set { } } + public System.TimeSpan SilenceDuration { get { throw null; } set { } } public float? Threshold { get { throw null; } set { } } protected override Azure.AI.VoiceLive.TurnDetection JsonModelCreateCore(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } protected override void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } protected override Azure.AI.VoiceLive.TurnDetection PersistableModelCreateCore(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } protected override System.BinaryData PersistableModelWriteCore(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } - Azure.AI.VoiceLive.ServerVad System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } - void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } - Azure.AI.VoiceLive.ServerVad System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } - string System.ClientModel.Primitives.IPersistableModel.GetFormatFromOptions(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } - System.BinaryData System.ClientModel.Primitives.IPersistableModel.Write(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + Azure.AI.VoiceLive.ServerVadTurnDetection System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + Azure.AI.VoiceLive.ServerVadTurnDetection System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + string System.ClientModel.Primitives.IPersistableModel.GetFormatFromOptions(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + System.BinaryData System.ClientModel.Primitives.IPersistableModel.Write(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } } public abstract partial class SessionUpdate : System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel { @@ -1057,6 +971,20 @@ protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer string System.ClientModel.Primitives.IPersistableModel.GetFormatFromOptions(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } System.BinaryData System.ClientModel.Primitives.IPersistableModel.Write(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } } + public partial class SessionUpdateAvatarConnecting : Azure.AI.VoiceLive.SessionUpdate, System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel + { + internal SessionUpdateAvatarConnecting() { } + public string ServerSdp { get { throw null; } } + protected override Azure.AI.VoiceLive.SessionUpdate JsonModelCreateCore(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + protected override void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + protected override Azure.AI.VoiceLive.SessionUpdate PersistableModelCreateCore(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + protected override System.BinaryData PersistableModelWriteCore(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + Azure.AI.VoiceLive.SessionUpdateAvatarConnecting System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + Azure.AI.VoiceLive.SessionUpdateAvatarConnecting System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + string System.ClientModel.Primitives.IPersistableModel.GetFormatFromOptions(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + System.BinaryData System.ClientModel.Primitives.IPersistableModel.Write(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + } public partial class SessionUpdateConversationItemCreated : Azure.AI.VoiceLive.SessionUpdate, System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel { internal SessionUpdateConversationItemCreated() { } @@ -1154,7 +1082,7 @@ protected override void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter write public partial class SessionUpdateConversationItemTruncated : Azure.AI.VoiceLive.SessionUpdate, System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel { internal SessionUpdateConversationItemTruncated() { } - public int AudioEndMs { get { throw null; } } + public System.TimeSpan AudioEnd { get { throw null; } } public int ContentIndex { get { throw null; } } public override string EventId { get { throw null; } } public string ItemId { get { throw null; } } @@ -1247,7 +1175,6 @@ public partial class SessionUpdateInputAudioBufferSpeechStopped : Azure.AI.Voice { internal SessionUpdateInputAudioBufferSpeechStopped() { } public System.TimeSpan AudioEnd { get { throw null; } } - public int AudioEndMs { get { throw null; } } public string ItemId { get { throw null; } } protected override Azure.AI.VoiceLive.SessionUpdate JsonModelCreateCore(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } protected override void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } @@ -1303,7 +1230,7 @@ protected override void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter write public partial class SessionUpdateResponseAnimationVisemeDelta : Azure.AI.VoiceLive.SessionUpdate, System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel { internal SessionUpdateResponseAnimationVisemeDelta() { } - public int AudioOffsetMs { get { throw null; } } + public System.TimeSpan AudioOffset { get { throw null; } } public int ContentIndex { get { throw null; } } public string ItemId { get { throw null; } } public int OutputIndex { get { throw null; } } @@ -1374,8 +1301,8 @@ protected override void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter write public partial class SessionUpdateResponseAudioTimestampDelta : Azure.AI.VoiceLive.SessionUpdate, System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel { internal SessionUpdateResponseAudioTimestampDelta() { } - public int AudioDurationMs { get { throw null; } } - public int AudioOffsetMs { get { throw null; } } + public System.TimeSpan AudioDuration { get { throw null; } } + public System.TimeSpan AudioOffset { get { throw null; } } public int ContentIndex { get { throw null; } } public string ItemId { get { throw null; } } public int OutputIndex { get { throw null; } } @@ -1509,25 +1436,6 @@ protected override void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter write string System.ClientModel.Primitives.IPersistableModel.GetFormatFromOptions(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } System.BinaryData System.ClientModel.Primitives.IPersistableModel.Write(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } } - public partial class SessionUpdateResponseEmotionHypothesis : Azure.AI.VoiceLive.SessionUpdate, System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel - { - internal SessionUpdateResponseEmotionHypothesis() { } - public int AudioDurationMs { get { throw null; } } - public int AudioOffsetMs { get { throw null; } } - public System.Collections.Generic.IList Candidates { get { throw null; } } - public string Emotion { get { throw null; } } - public string ItemId { get { throw null; } } - public string ResponseId { get { throw null; } } - protected override Azure.AI.VoiceLive.SessionUpdate JsonModelCreateCore(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } - protected override void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } - protected override Azure.AI.VoiceLive.SessionUpdate PersistableModelCreateCore(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } - protected override System.BinaryData PersistableModelWriteCore(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } - Azure.AI.VoiceLive.SessionUpdateResponseEmotionHypothesis System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } - void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } - Azure.AI.VoiceLive.SessionUpdateResponseEmotionHypothesis System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } - string System.ClientModel.Primitives.IPersistableModel.GetFormatFromOptions(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } - System.BinaryData System.ClientModel.Primitives.IPersistableModel.Write(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } - } public partial class SessionUpdateResponseFunctionCallArgumentsDelta : Azure.AI.VoiceLive.SessionUpdate, System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel { internal SessionUpdateResponseFunctionCallArgumentsDelta() { } @@ -1633,20 +1541,6 @@ protected override void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter write string System.ClientModel.Primitives.IPersistableModel.GetFormatFromOptions(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } System.BinaryData System.ClientModel.Primitives.IPersistableModel.Write(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } } - public partial class SessionUpdateSessionAvatarConnecting : Azure.AI.VoiceLive.SessionUpdate, System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel - { - internal SessionUpdateSessionAvatarConnecting() { } - public string ServerSdp { get { throw null; } } - protected override Azure.AI.VoiceLive.SessionUpdate JsonModelCreateCore(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } - protected override void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } - protected override Azure.AI.VoiceLive.SessionUpdate PersistableModelCreateCore(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } - protected override System.BinaryData PersistableModelWriteCore(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } - Azure.AI.VoiceLive.SessionUpdateSessionAvatarConnecting System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } - void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } - Azure.AI.VoiceLive.SessionUpdateSessionAvatarConnecting System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } - string System.ClientModel.Primitives.IPersistableModel.GetFormatFromOptions(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } - System.BinaryData System.ClientModel.Primitives.IPersistableModel.Write(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } - } public partial class SessionUpdateSessionCreated : Azure.AI.VoiceLive.SessionUpdate, System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel { internal SessionUpdateSessionCreated() { } @@ -1677,6 +1571,7 @@ protected override void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter write } public partial class SystemMessageItem : Azure.AI.VoiceLive.MessageItem, System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel { + public SystemMessageItem(Azure.AI.VoiceLive.InputTextContentPart content) : base (default(string)) { } public SystemMessageItem(System.Collections.Generic.IEnumerable content) : base (default(string)) { } public System.Collections.Generic.IList Content { get { throw null; } } protected override Azure.AI.VoiceLive.ConversationRequestItem JsonModelCreateCore(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } @@ -1751,6 +1646,7 @@ protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer } public partial class UserMessageItem : Azure.AI.VoiceLive.MessageItem, System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel { + public UserMessageItem(Azure.AI.VoiceLive.UserContentPart content) : base (default(string)) { } public UserMessageItem(System.Collections.Generic.IEnumerable content) : base (default(string)) { } public System.Collections.Generic.IList Content { get { throw null; } } protected override Azure.AI.VoiceLive.ConversationRequestItem JsonModelCreateCore(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } @@ -1881,39 +1777,33 @@ protected override void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter write } public static partial class VoiceLiveModelFactory { - public static Azure.AI.VoiceLive.AnimationOptions AnimationOptions(string modelName = null, System.Collections.Generic.IEnumerable outputs = null, int? emotionDetectionIntervalMs = default(int?)) { throw null; } + public static Azure.AI.VoiceLive.AnimationOptions AnimationOptions(string modelName = null, System.Collections.Generic.IEnumerable outputs = null) { throw null; } public static Azure.AI.VoiceLive.AssistantMessageItem AssistantMessageItem(string id = null, Azure.AI.VoiceLive.ItemParamStatus? status = default(Azure.AI.VoiceLive.ItemParamStatus?), System.Collections.Generic.IEnumerable content = null) { throw null; } public static Azure.AI.VoiceLive.AudioEchoCancellation AudioEchoCancellation(string type = null) { throw null; } public static Azure.AI.VoiceLive.AudioInputTranscriptionSettings AudioInputTranscriptionSettings(Azure.AI.VoiceLive.AudioInputTranscriptionSettingsModel model = default(Azure.AI.VoiceLive.AudioInputTranscriptionSettingsModel), string language = null, System.Collections.Generic.IDictionary customSpeech = null, System.Collections.Generic.IEnumerable phraseList = null) { throw null; } - public static Azure.AI.VoiceLive.AudioNoiseReduction AudioNoiseReduction(string type = null) { throw null; } + public static Azure.AI.VoiceLive.AudioNoiseReduction AudioNoiseReduction(Azure.AI.VoiceLive.AudioNoiseReductionType type = Azure.AI.VoiceLive.AudioNoiseReductionType.AzureDeepNoiseSuppression) { throw null; } public static Azure.AI.VoiceLive.AvatarConfiguration AvatarConfiguration(System.Collections.Generic.IEnumerable iceServers = null, string character = null, string style = null, bool customized = false, Azure.AI.VoiceLive.VideoParams video = null) { throw null; } public static Azure.AI.VoiceLive.AzureCustomVoice AzureCustomVoice(string name = null, string endpointId = null, float? temperature = default(float?), string customLexiconUri = null, System.Collections.Generic.IEnumerable preferLocales = null, string locale = null, string style = null, string pitch = null, string rate = null, string volume = null) { throw null; } - public static Azure.AI.VoiceLive.AzureMultilingualSemanticVad AzureMultilingualSemanticVad(float? threshold = default(float?), int? prefixPaddingMs = default(int?), int? silenceDurationMs = default(int?), Azure.AI.VoiceLive.EouDetection endOfUtteranceDetection = null, float? negThreshold = default(float?), int? speechDurationMs = default(int?), int? windowSize = default(int?), int? distinctCiPhones = default(int?), bool? requireVowel = default(bool?), bool? removeFillerWords = default(bool?), System.Collections.Generic.IEnumerable languages = null, bool? autoTruncate = default(bool?)) { throw null; } - public static Azure.AI.VoiceLive.AzurePersonalVoice AzurePersonalVoice(string name = null, float? temperature = default(float?), Azure.AI.VoiceLive.AzurePersonalVoiceModel model = Azure.AI.VoiceLive.AzurePersonalVoiceModel.DragonLatestNeural) { throw null; } - public static Azure.AI.VoiceLive.AzurePlatformVoice AzurePlatformVoice(string name = null, float? temperature = default(float?), string customLexiconUrl = null, System.Collections.Generic.IEnumerable preferLocales = null, string locale = null, string style = null, string pitch = null, string rate = null, string volume = null) { throw null; } - public static Azure.AI.VoiceLive.AzureSemanticDetection AzureSemanticDetection(float? threshold = default(float?), float? timeout = default(float?), float? secondaryThreshold = default(float?), float? secondaryTimeout = default(float?), bool? disableRules = default(bool?), float? srBoost = default(float?), bool? extraImendCheck = default(bool?)) { throw null; } - public static Azure.AI.VoiceLive.AzureSemanticDetectionEn AzureSemanticDetectionEn(float? threshold = default(float?), float? timeout = default(float?), float? secondaryThreshold = default(float?), float? secondaryTimeout = default(float?), bool? disableRules = default(bool?), float? srBoost = default(float?), bool? extraImendCheck = default(bool?)) { throw null; } - public static Azure.AI.VoiceLive.AzureSemanticDetectionMultilingual AzureSemanticDetectionMultilingual(float? threshold = default(float?), float? timeout = default(float?), float? secondaryThreshold = default(float?), float? secondaryTimeout = default(float?), bool? disableRules = default(bool?), float? srBoost = default(float?), bool? extraImendCheck = default(bool?)) { throw null; } - public static Azure.AI.VoiceLive.AzureSemanticVad AzureSemanticVad(float? threshold = default(float?), int? prefixPaddingMs = default(int?), int? silenceDurationMs = default(int?), Azure.AI.VoiceLive.EouDetection endOfUtteranceDetection = null, float? negThreshold = default(float?), int? speechDurationMs = default(int?), int? windowSize = default(int?), int? distinctCiPhones = default(int?), bool? requireVowel = default(bool?), bool? removeFillerWords = default(bool?), System.Collections.Generic.IEnumerable languages = null, bool? autoTruncate = default(bool?)) { throw null; } - public static Azure.AI.VoiceLive.AzureSemanticVadEn AzureSemanticVadEn(float? threshold = default(float?), int? prefixPaddingMs = default(int?), int? silenceDurationMs = default(int?), Azure.AI.VoiceLive.EouDetection endOfUtteranceDetection = null, float? negThreshold = default(float?), int? speechDurationMs = default(int?), int? windowSize = default(int?), int? distinctCiPhones = default(int?), bool? requireVowel = default(bool?), bool? removeFillerWords = default(bool?), System.Collections.Generic.IEnumerable languages = null, bool? autoTruncate = default(bool?)) { throw null; } - public static Azure.AI.VoiceLive.AzureSemanticVadServer AzureSemanticVadServer(float? threshold = default(float?), int? prefixPaddingMs = default(int?), int? silenceDurationMs = default(int?), Azure.AI.VoiceLive.EouDetection endOfUtteranceDetection = null, float? negThreshold = default(float?), int? speechDurationMs = default(int?), int? windowSize = default(int?), int? distinctCiPhones = default(int?), bool? requireVowel = default(bool?), bool? removeFillerWords = default(bool?), System.Collections.Generic.IEnumerable languages = null, bool? autoTruncate = default(bool?)) { throw null; } + public static Azure.AI.VoiceLive.AzurePersonalVoice AzurePersonalVoice(string name = null, float? temperature = default(float?), Azure.AI.VoiceLive.PersonalVoiceModels model = default(Azure.AI.VoiceLive.PersonalVoiceModels)) { throw null; } + public static Azure.AI.VoiceLive.AzureSemanticEnEouDetection AzureSemanticEnEouDetection(float? threshold = default(float?), float? timeoutMs = default(float?)) { throw null; } + public static Azure.AI.VoiceLive.AzureSemanticEouDetection AzureSemanticEouDetection(float? threshold = default(float?), float? timeoutMs = default(float?)) { throw null; } + public static Azure.AI.VoiceLive.AzureSemanticMultilingualEouDetection AzureSemanticMultilingualEouDetection(float? threshold = default(float?), float? timeoutMs = default(float?)) { throw null; } + public static Azure.AI.VoiceLive.AzureSemanticVadEnTurnDetection AzureSemanticVadEnTurnDetection(float? threshold = default(float?), int? prefixPaddingMs = default(int?), int? silenceDurationMs = default(int?), Azure.AI.VoiceLive.EouDetection endOfUtteranceDetection = null, int? speechDurationMs = default(int?), bool? removeFillerWords = default(bool?), bool? autoTruncate = default(bool?)) { throw null; } + public static Azure.AI.VoiceLive.AzureSemanticVadMultilingualTurnDetection AzureSemanticVadMultilingualTurnDetection(float? threshold = default(float?), int? prefixPaddingMs = default(int?), int? silenceDurationMs = default(int?), Azure.AI.VoiceLive.EouDetection endOfUtteranceDetection = null, int? speechDurationMs = default(int?), bool? removeFillerWords = default(bool?), System.Collections.Generic.IEnumerable languages = null, bool? autoTruncate = default(bool?)) { throw null; } + public static Azure.AI.VoiceLive.AzureSemanticVadTurnDetection AzureSemanticVadTurnDetection(float? threshold = default(float?), int? prefixPaddingMs = default(int?), int? silenceDurationMs = default(int?), Azure.AI.VoiceLive.EouDetection endOfUtteranceDetection = null, int? speechDurationMs = default(int?), bool? removeFillerWords = default(bool?), System.Collections.Generic.IEnumerable languages = null, bool? autoTruncate = default(bool?)) { throw null; } public static Azure.AI.VoiceLive.AzureStandardVoice AzureStandardVoice(string name = null, float? temperature = default(float?), string customLexiconUrl = null, System.Collections.Generic.IEnumerable preferLocales = null, string locale = null, string style = null, string pitch = null, string rate = null, string volume = null) { throw null; } public static Azure.AI.VoiceLive.AzureVoice AzureVoice(string type = null) { throw null; } public static Azure.AI.VoiceLive.CachedTokenDetails CachedTokenDetails(int textTokens = 0, int audioTokens = 0) { throw null; } public static Azure.AI.VoiceLive.ConversationRequestItem ConversationRequestItem(string type = null, string id = null) { throw null; } - public static Azure.AI.VoiceLive.EmotionCandidate EmotionCandidate(string emotion = null, float confidence = 0f) { throw null; } public static Azure.AI.VoiceLive.EouDetection EouDetection(string model = null) { throw null; } public static Azure.AI.VoiceLive.FunctionCallItem FunctionCallItem(string id = null, string name = null, string callId = null, string arguments = null, Azure.AI.VoiceLive.ItemParamStatus? status = default(Azure.AI.VoiceLive.ItemParamStatus?)) { throw null; } public static Azure.AI.VoiceLive.FunctionCallOutputItem FunctionCallOutputItem(string id = null, string callId = null, string output = null, Azure.AI.VoiceLive.ItemParamStatus? status = default(Azure.AI.VoiceLive.ItemParamStatus?)) { throw null; } public static Azure.AI.VoiceLive.IceServer IceServer(System.Collections.Generic.IEnumerable uris = null, string username = null, string credential = null) { throw null; } - public static Azure.AI.VoiceLive.InputAudio InputAudio(string model = null, System.Collections.Generic.IEnumerable phraseList = null) { throw null; } public static Azure.AI.VoiceLive.InputAudioContentPart InputAudioContentPart(string audio = null, string transcript = null) { throw null; } public static Azure.AI.VoiceLive.InputTextContentPart InputTextContentPart(string text = null) { throw null; } public static Azure.AI.VoiceLive.InputTokenDetails InputTokenDetails(int cachedTokens = 0, int textTokens = 0, int audioTokens = 0, Azure.AI.VoiceLive.CachedTokenDetails cachedTokensDetails = null) { throw null; } - public static Azure.AI.VoiceLive.LlmVoice LlmVoice(string type = null, Azure.AI.VoiceLive.LlmVoiceName name = default(Azure.AI.VoiceLive.LlmVoiceName)) { throw null; } public static Azure.AI.VoiceLive.LogProbProperties LogProbProperties(string token = null, float logprob = 0f, System.BinaryData bytes = null) { throw null; } public static Azure.AI.VoiceLive.MessageItem MessageItem(string id = null, Azure.AI.VoiceLive.ItemParamStatus? status = default(Azure.AI.VoiceLive.ItemParamStatus?)) { throw null; } - public static Azure.AI.VoiceLive.NoTurnDetection NoTurnDetection() { throw null; } public static Azure.AI.VoiceLive.OpenAIVoice OpenAIVoice(string type = null, Azure.AI.VoiceLive.OAIVoice name = default(Azure.AI.VoiceLive.OAIVoice)) { throw null; } public static Azure.AI.VoiceLive.OutputTextContentPart OutputTextContentPart(string type = null, string text = null) { throw null; } public static Azure.AI.VoiceLive.OutputTokenDetails OutputTokenDetails(int textTokens = 0, int audioTokens = 0) { throw null; } @@ -1931,8 +1821,9 @@ public static partial class VoiceLiveModelFactory public static Azure.AI.VoiceLive.ResponseStatusDetails ResponseStatusDetails(string type = null) { throw null; } public static Azure.AI.VoiceLive.ResponseTextContentPart ResponseTextContentPart(string text = null) { throw null; } public static Azure.AI.VoiceLive.ResponseTokenStatistics ResponseTokenStatistics(int totalTokens = 0, int inputTokens = 0, int outputTokens = 0, Azure.AI.VoiceLive.InputTokenDetails inputTokenDetails = null, Azure.AI.VoiceLive.OutputTokenDetails outputTokenDetails = null) { throw null; } - public static Azure.AI.VoiceLive.ServerVad ServerVad(float? threshold = default(float?), int? prefixPaddingMs = default(int?), int? silenceDurationMs = default(int?), Azure.AI.VoiceLive.EouDetection endOfUtteranceDetection = null, bool? autoTruncate = default(bool?)) { throw null; } + public static Azure.AI.VoiceLive.ServerVadTurnDetection ServerVadTurnDetection(float? threshold = default(float?), int? prefixPaddingMs = default(int?), int? silenceDurationMs = default(int?), Azure.AI.VoiceLive.EouDetection endOfUtteranceDetection = null, bool? autoTruncate = default(bool?)) { throw null; } public static Azure.AI.VoiceLive.SessionUpdate SessionUpdate(string type = null, string eventId = null) { throw null; } + public static Azure.AI.VoiceLive.SessionUpdateAvatarConnecting SessionUpdateAvatarConnecting(string eventId = null, string serverSdp = null) { throw null; } public static Azure.AI.VoiceLive.SessionUpdateConversationItemCreated SessionUpdateConversationItemCreated(string eventId = null, string previousItemId = null, Azure.AI.VoiceLive.ResponseItem item = null) { throw null; } public static Azure.AI.VoiceLive.SessionUpdateConversationItemDeleted SessionUpdateConversationItemDeleted(string itemId = null, string eventId = null) { throw null; } public static Azure.AI.VoiceLive.SessionUpdateConversationItemInputAudioTranscriptionCompleted SessionUpdateConversationItemInputAudioTranscriptionCompleted(string eventId = null, string itemId = null, int contentIndex = 0, string transcript = null) { throw null; } @@ -1960,14 +1851,12 @@ public static partial class VoiceLiveModelFactory public static Azure.AI.VoiceLive.SessionUpdateResponseContentPartDone SessionUpdateResponseContentPartDone(string eventId = null, string responseId = null, string itemId = null, int outputIndex = 0, int contentIndex = 0, Azure.AI.VoiceLive.VoiceLiveContentPart part = null) { throw null; } public static Azure.AI.VoiceLive.SessionUpdateResponseCreated SessionUpdateResponseCreated(string eventId = null, Azure.AI.VoiceLive.VoiceLiveResponse response = null) { throw null; } public static Azure.AI.VoiceLive.SessionUpdateResponseDone SessionUpdateResponseDone(string eventId = null, Azure.AI.VoiceLive.VoiceLiveResponse response = null) { throw null; } - public static Azure.AI.VoiceLive.SessionUpdateResponseEmotionHypothesis SessionUpdateResponseEmotionHypothesis(string eventId = null, string emotion = null, System.Collections.Generic.IEnumerable candidates = null, int audioOffsetMs = 0, int audioDurationMs = 0, string responseId = null, string itemId = null) { throw null; } public static Azure.AI.VoiceLive.SessionUpdateResponseFunctionCallArgumentsDelta SessionUpdateResponseFunctionCallArgumentsDelta(string eventId = null, string responseId = null, string itemId = null, int outputIndex = 0, string callId = null, string delta = null) { throw null; } public static Azure.AI.VoiceLive.SessionUpdateResponseFunctionCallArgumentsDone SessionUpdateResponseFunctionCallArgumentsDone(string eventId = null, string responseId = null, string itemId = null, int outputIndex = 0, string callId = null, string arguments = null, string name = null) { throw null; } public static Azure.AI.VoiceLive.SessionUpdateResponseOutputItemAdded SessionUpdateResponseOutputItemAdded(string eventId = null, string responseId = null, int outputIndex = 0, Azure.AI.VoiceLive.ResponseItem item = null) { throw null; } public static Azure.AI.VoiceLive.SessionUpdateResponseOutputItemDone SessionUpdateResponseOutputItemDone(string eventId = null, string responseId = null, int outputIndex = 0, Azure.AI.VoiceLive.ResponseItem item = null) { throw null; } public static Azure.AI.VoiceLive.SessionUpdateResponseTextDelta SessionUpdateResponseTextDelta(string eventId = null, string responseId = null, string itemId = null, int outputIndex = 0, int contentIndex = 0, string delta = null) { throw null; } public static Azure.AI.VoiceLive.SessionUpdateResponseTextDone SessionUpdateResponseTextDone(string eventId = null, string responseId = null, string itemId = null, int outputIndex = 0, int contentIndex = 0, string text = null) { throw null; } - public static Azure.AI.VoiceLive.SessionUpdateSessionAvatarConnecting SessionUpdateSessionAvatarConnecting(string eventId = null, string serverSdp = null) { throw null; } public static Azure.AI.VoiceLive.SessionUpdateSessionCreated SessionUpdateSessionCreated(string eventId = null, Azure.AI.VoiceLive.VoiceLiveSessionResponse session = null) { throw null; } public static Azure.AI.VoiceLive.SessionUpdateSessionUpdated SessionUpdateSessionUpdated(string eventId = null, Azure.AI.VoiceLive.VoiceLiveSessionResponse session = null) { throw null; } public static Azure.AI.VoiceLive.SystemMessageItem SystemMessageItem(string id = null, Azure.AI.VoiceLive.ItemParamStatus? status = default(Azure.AI.VoiceLive.ItemParamStatus?), System.Collections.Generic.IEnumerable content = null) { throw null; } @@ -1980,8 +1869,8 @@ public static partial class VoiceLiveModelFactory public static Azure.AI.VoiceLive.VoiceLiveContentPart VoiceLiveContentPart(string type = null) { throw null; } public static Azure.AI.VoiceLive.VoiceLiveErrorDetails VoiceLiveErrorDetails(string code = null, string message = null, string param = null, string type = null, string eventId = null) { throw null; } public static Azure.AI.VoiceLive.VoiceLiveFunctionDefinition VoiceLiveFunctionDefinition(string name = null, string description = null, System.BinaryData parameters = null) { throw null; } - public static Azure.AI.VoiceLive.VoiceLiveSessionOptions VoiceLiveSessionOptions(string model = null, System.Collections.Generic.IEnumerable modalities = null, Azure.AI.VoiceLive.AnimationOptions animation = null, string instructions = null, Azure.AI.VoiceLive.InputAudio inputAudio = null, int? inputAudioSamplingRate = default(int?), Azure.AI.VoiceLive.AudioFormat? inputAudioFormat = default(Azure.AI.VoiceLive.AudioFormat?), Azure.AI.VoiceLive.AudioFormat? outputAudioFormat = default(Azure.AI.VoiceLive.AudioFormat?), Azure.AI.VoiceLive.TurnDetection turnDetection = null, Azure.AI.VoiceLive.AudioNoiseReduction inputAudioNoiseReduction = null, Azure.AI.VoiceLive.AudioEchoCancellation inputAudioEchoCancellation = null, Azure.AI.VoiceLive.AvatarConfiguration avatar = null, Azure.AI.VoiceLive.AudioInputTranscriptionSettings inputAudioTranscription = null, System.Collections.Generic.IEnumerable outputAudioTimestampTypes = null, System.Collections.Generic.IEnumerable tools = null, float? temperature = default(float?), Azure.AI.VoiceLive.RespondingAgentOptions agent = null, System.BinaryData voiceInternal = null, System.BinaryData maxResponseOutputTokens = null, System.BinaryData toolChoice = null) { throw null; } - public static Azure.AI.VoiceLive.VoiceLiveSessionResponse VoiceLiveSessionResponse(string model = null, System.Collections.Generic.IEnumerable modalities = null, Azure.AI.VoiceLive.AnimationOptions animation = null, string instructions = null, Azure.AI.VoiceLive.InputAudio inputAudio = null, int? inputAudioSamplingRate = default(int?), Azure.AI.VoiceLive.AudioFormat? inputAudioFormat = default(Azure.AI.VoiceLive.AudioFormat?), Azure.AI.VoiceLive.AudioFormat? outputAudioFormat = default(Azure.AI.VoiceLive.AudioFormat?), Azure.AI.VoiceLive.TurnDetection turnDetection = null, Azure.AI.VoiceLive.AudioNoiseReduction inputAudioNoiseReduction = null, Azure.AI.VoiceLive.AudioEchoCancellation inputAudioEchoCancellation = null, Azure.AI.VoiceLive.AvatarConfiguration avatar = null, Azure.AI.VoiceLive.AudioInputTranscriptionSettings inputAudioTranscription = null, System.Collections.Generic.IEnumerable outputAudioTimestampTypes = null, System.Collections.Generic.IEnumerable tools = null, float? temperature = default(float?), Azure.AI.VoiceLive.RespondingAgentOptions agent = null, System.BinaryData voiceInternal = null, System.BinaryData maxResponseOutputTokens = null, System.BinaryData toolChoice = null, string id = null) { throw null; } + public static Azure.AI.VoiceLive.VoiceLiveSessionOptions VoiceLiveSessionOptions(string model = null, System.Collections.Generic.IEnumerable modalities = null, Azure.AI.VoiceLive.AnimationOptions animation = null, string instructions = null, int? inputAudioSamplingRate = default(int?), Azure.AI.VoiceLive.InputAudioFormat? inputAudioFormat = default(Azure.AI.VoiceLive.InputAudioFormat?), Azure.AI.VoiceLive.OutputAudioFormat? outputAudioFormat = default(Azure.AI.VoiceLive.OutputAudioFormat?), Azure.AI.VoiceLive.AudioNoiseReduction inputAudioNoiseReduction = null, Azure.AI.VoiceLive.AudioEchoCancellation inputAudioEchoCancellation = null, Azure.AI.VoiceLive.AvatarConfiguration avatar = null, Azure.AI.VoiceLive.AudioInputTranscriptionSettings inputAudioTranscription = null, System.Collections.Generic.IEnumerable outputAudioTimestampTypes = null, System.Collections.Generic.IEnumerable tools = null, float? temperature = default(float?), System.BinaryData voiceInternal = null, System.BinaryData maxResponseOutputTokens = null, System.BinaryData toolChoice = null, System.BinaryData turnDetection = null) { throw null; } + public static Azure.AI.VoiceLive.VoiceLiveSessionResponse VoiceLiveSessionResponse(string model = null, System.Collections.Generic.IEnumerable modalities = null, Azure.AI.VoiceLive.AnimationOptions animation = null, string instructions = null, int? inputAudioSamplingRate = default(int?), Azure.AI.VoiceLive.InputAudioFormat? inputAudioFormat = default(Azure.AI.VoiceLive.InputAudioFormat?), Azure.AI.VoiceLive.OutputAudioFormat? outputAudioFormat = default(Azure.AI.VoiceLive.OutputAudioFormat?), Azure.AI.VoiceLive.AudioNoiseReduction inputAudioNoiseReduction = null, Azure.AI.VoiceLive.AudioEchoCancellation inputAudioEchoCancellation = null, Azure.AI.VoiceLive.AvatarConfiguration avatar = null, Azure.AI.VoiceLive.AudioInputTranscriptionSettings inputAudioTranscription = null, System.Collections.Generic.IEnumerable outputAudioTimestampTypes = null, System.Collections.Generic.IEnumerable tools = null, float? temperature = default(float?), System.BinaryData voiceInternal = null, System.BinaryData maxResponseOutputTokens = null, System.BinaryData toolChoice = null, System.BinaryData turnDetection = null, Azure.AI.VoiceLive.RespondingAgentOptions agent = null, string id = null) { throw null; } public static Azure.AI.VoiceLive.VoiceLiveToolDefinition VoiceLiveToolDefinition(string type = null) { throw null; } } public partial class VoiceLiveResponse : System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel @@ -1993,7 +1882,7 @@ internal VoiceLiveResponse() { } public Azure.AI.VoiceLive.SessionUpdateModality Modalities { get { throw null; } } public string Object { get { throw null; } } public System.Collections.Generic.IList Output { get { throw null; } } - public Azure.AI.VoiceLive.AudioFormat? OutputAudioFormat { get { throw null; } } + public Azure.AI.VoiceLive.OutputAudioFormat? OutputAudioFormat { get { throw null; } } public Azure.AI.VoiceLive.VoiceLiveResponseStatus? Status { get { throw null; } } public Azure.AI.VoiceLive.ResponseStatusDetails StatusDetails { get { throw null; } } public float? Temperature { get { throw null; } } @@ -2029,13 +1918,27 @@ protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer public static bool operator !=(Azure.AI.VoiceLive.VoiceLiveResponseItemStatus left, Azure.AI.VoiceLive.VoiceLiveResponseItemStatus right) { throw null; } public override string ToString() { throw null; } } - public enum VoiceLiveResponseStatus + [System.Runtime.InteropServices.StructLayoutAttribute(System.Runtime.InteropServices.LayoutKind.Sequential)] + public readonly partial struct VoiceLiveResponseStatus : System.IEquatable { - Completed = 0, - Cancelled = 1, - Failed = 2, - Incomplete = 3, - InProgress = 4, + private readonly object _dummy; + private readonly int _dummyPrimitive; + public VoiceLiveResponseStatus(string value) { throw null; } + public static Azure.AI.VoiceLive.VoiceLiveResponseStatus Cancelled { get { throw null; } } + public static Azure.AI.VoiceLive.VoiceLiveResponseStatus Completed { get { throw null; } } + public static Azure.AI.VoiceLive.VoiceLiveResponseStatus Failed { get { throw null; } } + public static Azure.AI.VoiceLive.VoiceLiveResponseStatus Incomplete { get { throw null; } } + public static Azure.AI.VoiceLive.VoiceLiveResponseStatus InProgress { get { throw null; } } + public bool Equals(Azure.AI.VoiceLive.VoiceLiveResponseStatus other) { throw null; } + [System.ComponentModel.EditorBrowsableAttribute(System.ComponentModel.EditorBrowsableState.Never)] + public override bool Equals(object obj) { throw null; } + [System.ComponentModel.EditorBrowsableAttribute(System.ComponentModel.EditorBrowsableState.Never)] + public override int GetHashCode() { throw null; } + public static bool operator ==(Azure.AI.VoiceLive.VoiceLiveResponseStatus left, Azure.AI.VoiceLive.VoiceLiveResponseStatus right) { throw null; } + public static implicit operator Azure.AI.VoiceLive.VoiceLiveResponseStatus (string value) { throw null; } + public static implicit operator Azure.AI.VoiceLive.VoiceLiveResponseStatus? (string value) { throw null; } + public static bool operator !=(Azure.AI.VoiceLive.VoiceLiveResponseStatus left, Azure.AI.VoiceLive.VoiceLiveResponseStatus right) { throw null; } + public override string ToString() { throw null; } } public partial class VoiceLiveSession : System.IAsyncDisposable, System.IDisposable { @@ -2043,7 +1946,6 @@ protected internal VoiceLiveSession(Azure.AI.VoiceLive.VoiceLiveClient parentCli protected internal VoiceLiveSession(Azure.AI.VoiceLive.VoiceLiveClient parentClient, System.Uri endpoint, Azure.Core.TokenCredential credential) { } public System.Net.WebSockets.WebSocketState ConnectionState { get { throw null; } } public bool IsConnected { get { throw null; } } - public System.Net.WebSockets.WebSocket WebSocket { get { throw null; } protected set { } } public virtual void AddItem(Azure.AI.VoiceLive.ConversationRequestItem item, string previousItemId, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { } public virtual void AddItem(Azure.AI.VoiceLive.ConversationRequestItem item, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { } public virtual System.Threading.Tasks.Task AddItemAsync(Azure.AI.VoiceLive.ConversationRequestItem item, string previousItemId, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } @@ -2064,12 +1966,8 @@ protected internal VoiceLiveSession(Azure.AI.VoiceLive.VoiceLiveClient parentCli public virtual System.Threading.Tasks.Task CloseAsync(System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } public virtual void CommitInputAudio(System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { } public virtual System.Threading.Tasks.Task CommitInputAudioAsync(System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } - public virtual void ConfigureConversationSession(Azure.AI.VoiceLive.VoiceLiveSessionOptions sessionOptions, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { } - public virtual System.Threading.Tasks.Task ConfigureConversationSessionAsync(Azure.AI.VoiceLive.VoiceLiveSessionOptions sessionOptions, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } public virtual void ConfigureSession(Azure.AI.VoiceLive.VoiceLiveSessionOptions sessionOptions, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { } public virtual System.Threading.Tasks.Task ConfigureSessionAsync(Azure.AI.VoiceLive.VoiceLiveSessionOptions sessionOptions, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } - public virtual void ConfigureTranscriptionSession(Azure.AI.VoiceLive.VoiceLiveSessionOptions sessionOptions, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { } - public virtual System.Threading.Tasks.Task ConfigureTranscriptionSessionAsync(Azure.AI.VoiceLive.VoiceLiveSessionOptions sessionOptions, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } protected internal virtual void Connect(System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { } protected internal virtual System.Threading.Tasks.Task ConnectAsync(System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } public virtual void ConnectAvatar(string clientSdp, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { } @@ -2106,19 +2004,17 @@ protected virtual void Dispose(bool disposing) { } public virtual System.Threading.Tasks.Task StartResponseAsync(Azure.AI.VoiceLive.VoiceLiveSessionOptions responseOptions, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } public virtual System.Threading.Tasks.Task StartResponseAsync(string additionalInstructions, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } public virtual System.Threading.Tasks.Task StartResponseAsync(System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } - public virtual void TruncateConversation(string itemId, int contentIndex, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { } - public virtual System.Threading.Tasks.Task TruncateConversationAsync(string itemId, int contentIndex, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + public virtual void TruncateConversation(string itemId, int contentIndex, System.TimeSpan audioEnd = default(System.TimeSpan), System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { } + public virtual System.Threading.Tasks.Task TruncateConversationAsync(string itemId, int contentIndex, System.TimeSpan audioEnd = default(System.TimeSpan), System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } public System.Threading.Tasks.Task WaitForUpdateAsync(System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) where T : Azure.AI.VoiceLive.SessionUpdate { throw null; } } public partial class VoiceLiveSessionOptions : System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel { public VoiceLiveSessionOptions() { } - public Azure.AI.VoiceLive.RespondingAgentOptions Agent { get { throw null; } set { } } public Azure.AI.VoiceLive.AnimationOptions Animation { get { throw null; } set { } } public Azure.AI.VoiceLive.AvatarConfiguration Avatar { get { throw null; } set { } } - public Azure.AI.VoiceLive.InputAudio InputAudio { get { throw null; } set { } } public Azure.AI.VoiceLive.AudioEchoCancellation InputAudioEchoCancellation { get { throw null; } set { } } - public Azure.AI.VoiceLive.AudioFormat? InputAudioFormat { get { throw null; } set { } } + public Azure.AI.VoiceLive.InputAudioFormat? InputAudioFormat { get { throw null; } set { } } public Azure.AI.VoiceLive.AudioNoiseReduction InputAudioNoiseReduction { get { throw null; } set { } } public int? InputAudioSamplingRate { get { throw null; } set { } } public Azure.AI.VoiceLive.AudioInputTranscriptionSettings InputAudioTranscription { get { throw null; } set { } } @@ -2126,7 +2022,7 @@ public VoiceLiveSessionOptions() { } public Azure.AI.VoiceLive.ResponseMaxOutputTokensOption MaxResponseOutputTokens { get { throw null; } set { } } public System.Collections.Generic.IList Modalities { get { throw null; } } public string Model { get { throw null; } set { } } - public Azure.AI.VoiceLive.AudioFormat? OutputAudioFormat { get { throw null; } set { } } + public Azure.AI.VoiceLive.OutputAudioFormat? OutputAudioFormat { get { throw null; } set { } } public System.Collections.Generic.IList OutputAudioTimestampTypes { get { throw null; } } public float? Temperature { get { throw null; } set { } } public Azure.AI.VoiceLive.ToolChoiceOption ToolChoice { get { throw null; } set { } } @@ -2146,6 +2042,7 @@ protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer public partial class VoiceLiveSessionResponse : Azure.AI.VoiceLive.VoiceLiveSessionOptions, System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel { internal VoiceLiveSessionResponse() { } + public Azure.AI.VoiceLive.RespondingAgentOptions Agent { get { throw null; } } public string Id { get { throw null; } } protected override Azure.AI.VoiceLive.VoiceLiveSessionOptions JsonModelCreateCore(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } protected override void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } @@ -2176,10 +2073,3 @@ protected VoiceProvider() { } internal abstract System.BinaryData ToBinaryData(); } } -namespace Microsoft.Extensions.Azure -{ - public static partial class VoiceLiveClientBuilderExtensions - { - public static Azure.Core.Extensions.IAzureClientBuilder AddVoiceLiveClient(this TBuilder builder, TConfiguration configuration) where TBuilder : Azure.Core.Extensions.IAzureClientFactoryBuilderWithConfiguration { throw null; } - } -} diff --git a/sdk/ai/Azure.AI.VoiceLive/samples/BasicVoiceAssistant/BasicVoiceAssistant.cs b/sdk/ai/Azure.AI.VoiceLive/samples/BasicVoiceAssistant/BasicVoiceAssistant.cs index 72ea927376af..b13588afae7e 100644 --- a/sdk/ai/Azure.AI.VoiceLive/samples/BasicVoiceAssistant/BasicVoiceAssistant.cs +++ b/sdk/ai/Azure.AI.VoiceLive/samples/BasicVoiceAssistant/BasicVoiceAssistant.cs @@ -12,7 +12,7 @@ namespace Azure.AI.VoiceLive.Samples; /// This sample now demonstrates some of the new convenience methods added to the VoiceLive SDK: /// - ClearStreamingAudioAsync() - Clears all input audio currently being streamed /// - CancelResponseAsync() - Cancels the current response generation (existing method) -/// - ConfigureConversationSessionAsync() - Configures session options (existing method) +/// - ConfigureSessionAsync() - Configures session options (existing method) /// /// Additional convenience methods available but not shown in this sample: /// - StartAudioTurnAsync() / EndAudioTurnAsync() / CancelAudioTurnAsync() - Audio turn management @@ -123,11 +123,11 @@ private async Task SetupSessionAsync(CancellationToken cancellationToken) var azureVoice = new AzureStandardVoice(_voice); // Create strongly typed turn detection configuration - var turnDetectionConfig = new ServerVad + var turnDetectionConfig = new ServerVadTurnDetection { Threshold = 0.5f, - PrefixPaddingMs = 300, - SilenceDurationMs = 500 + PrefixPadding = TimeSpan.FromMilliseconds(300), + SilenceDuration = TimeSpan.FromMilliseconds(500) }; // Create conversation session options @@ -137,8 +137,8 @@ private async Task SetupSessionAsync(CancellationToken cancellationToken) Model = _model, Instructions = _instructions, Voice = azureVoice, - InputAudioFormat = AudioFormat.Pcm16, - OutputAudioFormat = AudioFormat.Pcm16, + InputAudioFormat = InputAudioFormat.Pcm16, + OutputAudioFormat = OutputAudioFormat.Pcm16, TurnDetection = turnDetectionConfig }; @@ -147,7 +147,7 @@ private async Task SetupSessionAsync(CancellationToken cancellationToken) sessionOptions.Modalities.Add(InputModality.Text); sessionOptions.Modalities.Add(InputModality.Audio); - await _session!.ConfigureConversationSessionAsync(sessionOptions, cancellationToken).ConfigureAwait(false); + await _session!.ConfigureSessionAsync(sessionOptions, cancellationToken).ConfigureAwait(false); _logger.LogInformation("Session configuration sent"); } diff --git a/sdk/ai/Azure.AI.VoiceLive/samples/BasicVoiceAssistant/README.md b/sdk/ai/Azure.AI.VoiceLive/samples/BasicVoiceAssistant/README.md index dcdef5f6a866..c3dda9da52c2 100644 --- a/sdk/ai/Azure.AI.VoiceLive/samples/BasicVoiceAssistant/README.md +++ b/sdk/ai/Azure.AI.VoiceLive/samples/BasicVoiceAssistant/README.md @@ -8,7 +8,7 @@ This sample now demonstrates some of the new convenience methods added to the Vo **Used in this sample:** - `ClearStreamingAudioAsync()` - Clears all input audio currently being streamed -- `ConfigureConversationSessionAsync()` - Configures conversation session options +- `ConfigureSessionAsync()` - Configures conversation session options - `CancelResponseAsync()` - Cancels the current response generation - `SendInputAudioAsync()` - Sends audio data to the service diff --git a/sdk/ai/Azure.AI.VoiceLive/samples/CustomerServiceBot/CustomerServiceBot.cs b/sdk/ai/Azure.AI.VoiceLive/samples/CustomerServiceBot/CustomerServiceBot.cs index 7533383430fd..f77969ad0c66 100644 --- a/sdk/ai/Azure.AI.VoiceLive/samples/CustomerServiceBot/CustomerServiceBot.cs +++ b/sdk/ai/Azure.AI.VoiceLive/samples/CustomerServiceBot/CustomerServiceBot.cs @@ -126,11 +126,11 @@ private async Task SetupSessionAsync(CancellationToken cancellationToken) var azureVoice = new AzureStandardVoice(_voice); // Create strongly typed turn detection configuration - var turnDetectionConfig = new ServerVad + var turnDetectionConfig = new ServerVadTurnDetection { Threshold = 0.5f, - PrefixPaddingMs = 300, - SilenceDurationMs = 500 + PrefixPadding = TimeSpan.FromMilliseconds(300), + SilenceDuration = TimeSpan.FromMilliseconds(500) }; // Create conversation session options with function tools @@ -139,8 +139,8 @@ private async Task SetupSessionAsync(CancellationToken cancellationToken) Model = _model, Instructions = _instructions, Voice = azureVoice, - InputAudioFormat = AudioFormat.Pcm16, - OutputAudioFormat = AudioFormat.Pcm16, + InputAudioFormat = InputAudioFormat.Pcm16, + OutputAudioFormat = OutputAudioFormat.Pcm16, TurnDetection = turnDetectionConfig }; @@ -157,7 +157,7 @@ private async Task SetupSessionAsync(CancellationToken cancellationToken) sessionOptions.Tools.Add(CreateUpdateShippingAddressTool()); - await _session!.ConfigureConversationSessionAsync(sessionOptions, cancellationToken).ConfigureAwait(false); + await _session!.ConfigureSessionAsync(sessionOptions, cancellationToken).ConfigureAwait(false); _logger.LogInformation("Session configuration sent with {ToolCount} customer service tools", sessionOptions.Tools.Count); } diff --git a/sdk/ai/Azure.AI.VoiceLive/samples/snippets/BasicUsageSnippets.cs b/sdk/ai/Azure.AI.VoiceLive/samples/snippets/BasicUsageSnippets.cs index af6d7a6b33f6..e0f8638b2736 100644 --- a/sdk/ai/Azure.AI.VoiceLive/samples/snippets/BasicUsageSnippets.cs +++ b/sdk/ai/Azure.AI.VoiceLive/samples/snippets/BasicUsageSnippets.cs @@ -35,14 +35,14 @@ public async Task BasicVoiceAssistantExample() Model = model, Instructions = "You are a helpful AI assistant. Respond naturally and conversationally.", Voice = new AzureStandardVoice("en-US-AvaNeural"), - TurnDetection = new ServerVad() + TurnDetection = new AzureSemanticVadTurnDetection() { Threshold = 0.5f, - PrefixPaddingMs = 300, - SilenceDurationMs = 500 + PrefixPadding = TimeSpan.FromMilliseconds(300), + SilenceDuration = TimeSpan.FromMilliseconds(500) }, - InputAudioFormat = AudioFormat.Pcm16, - OutputAudioFormat = AudioFormat.Pcm16 + InputAudioFormat = InputAudioFormat.Pcm16, + OutputAudioFormat = OutputAudioFormat.Pcm16 }; // Ensure modalities include audio @@ -50,7 +50,7 @@ public async Task BasicVoiceAssistantExample() sessionOptions.Modalities.Add(InputModality.Text); sessionOptions.Modalities.Add(InputModality.Audio); - await session.ConfigureConversationSessionAsync(sessionOptions).ConfigureAwait(false); + await session.ConfigureSessionAsync(sessionOptions).ConfigureAwait(false); // Process events from the session await foreach (SessionUpdate serverEvent in session.GetUpdatesAsync().ConfigureAwait(false)) @@ -92,14 +92,12 @@ public async Task AdvancedVoiceConfiguration() { Temperature = 0.8f }, - TurnDetection = new AzureSemanticVad() + TurnDetection = new AzureSemanticVadTurnDetection() { - NegThreshold = 0.3f, - WindowSize = 300, RemoveFillerWords = true }, - InputAudioFormat = AudioFormat.Pcm16, - OutputAudioFormat = AudioFormat.Pcm16 + InputAudioFormat = InputAudioFormat.Pcm16, + OutputAudioFormat = OutputAudioFormat.Pcm16 }; // Ensure modalities include audio @@ -107,7 +105,7 @@ public async Task AdvancedVoiceConfiguration() sessionOptions.Modalities.Add(InputModality.Text); sessionOptions.Modalities.Add(InputModality.Audio); - await session.ConfigureConversationSessionAsync(sessionOptions).ConfigureAwait(false); + await session.ConfigureSessionAsync(sessionOptions).ConfigureAwait(false); #endregion } @@ -148,8 +146,8 @@ public async Task FunctionCallingExample() Model = model, Instructions = "You are a weather assistant. Use the get_current_weather function to help users with weather information.", Voice = new AzureStandardVoice("en-US-AvaNeural"), - InputAudioFormat = AudioFormat.Pcm16, - OutputAudioFormat = AudioFormat.Pcm16 + InputAudioFormat = InputAudioFormat.Pcm16, + OutputAudioFormat = OutputAudioFormat.Pcm16 }; // Add the function tool @@ -160,7 +158,7 @@ public async Task FunctionCallingExample() sessionOptions.Modalities.Add(InputModality.Text); sessionOptions.Modalities.Add(InputModality.Audio); - await session.ConfigureConversationSessionAsync(sessionOptions).ConfigureAwait(false); + await session.ConfigureSessionAsync(sessionOptions).ConfigureAwait(false); #endregion } } diff --git a/sdk/ai/Azure.AI.VoiceLive/src/Customizations/AnimationOptions.cs b/sdk/ai/Azure.AI.VoiceLive/src/Customizations/AnimationOptions.cs new file mode 100644 index 000000000000..9ccba6781e8d --- /dev/null +++ b/sdk/ai/Azure.AI.VoiceLive/src/Customizations/AnimationOptions.cs @@ -0,0 +1,24 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.AI.VoiceLive +{ + /// Configuration for animation outputs including blendshapes, visemes, and emotion metadata. + public partial class AnimationOptions + { + /// Interval for emotion detection in milliseconds. If not set, emotion detection is disabled. + public int? EmotionDetectionIntervalMs { get; set; } + + /// Interval for emotion detection. If not set, emotion detection is disabled. + public TimeSpan? EmotionDetectionInterval + { + get => EmotionDetectionIntervalMs.HasValue ? TimeSpan.FromMilliseconds(EmotionDetectionIntervalMs.Value) : (TimeSpan?)null; + set => EmotionDetectionIntervalMs = value.HasValue ? (int?)value.Value.TotalMilliseconds : null; + } + } +} diff --git a/sdk/ai/Azure.AI.VoiceLive/src/Customizations/AssistantMessageItem.cs b/sdk/ai/Azure.AI.VoiceLive/src/Customizations/AssistantMessageItem.cs new file mode 100644 index 000000000000..27d9b3694f1e --- /dev/null +++ b/sdk/ai/Azure.AI.VoiceLive/src/Customizations/AssistantMessageItem.cs @@ -0,0 +1,20 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +#nullable disable + +using System; +using System.Collections.Generic; +using System.Linq; + +namespace Azure.AI.VoiceLive +{ + /// The AssistantMessageItem. + public partial class AssistantMessageItem : MessageItem + { + /// Initializes a new instance of . + /// + /// is null. + public AssistantMessageItem(OutputTextContentPart content) : this(new[] { content }) { } + } +} diff --git a/sdk/ai/Azure.AI.VoiceLive/src/Customizations/AzurePlatformVoice.cs b/sdk/ai/Azure.AI.VoiceLive/src/Customizations/AzurePlatformVoice.cs deleted file mode 100644 index 4e27edfd3bd4..000000000000 --- a/sdk/ai/Azure.AI.VoiceLive/src/Customizations/AzurePlatformVoice.cs +++ /dev/null @@ -1,17 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -using System; -using System.IO; - -namespace Azure.AI.VoiceLive -{ - public partial class AzurePlatformVoice - { - /// - /// - /// - /// - internal override BinaryData ToBinaryData() => this.PersistableModelWriteCore(System.ClientModel.Primitives.ModelReaderWriterOptions.Json); - } -} diff --git a/sdk/ai/Azure.AI.VoiceLive/src/Customizations/AzureSemanticEouDetection.cs b/sdk/ai/Azure.AI.VoiceLive/src/Customizations/AzureSemanticEouDetection.cs new file mode 100644 index 000000000000..2a419e189ed4 --- /dev/null +++ b/sdk/ai/Azure.AI.VoiceLive/src/Customizations/AzureSemanticEouDetection.cs @@ -0,0 +1,23 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +#nullable disable + +using System; + +namespace Azure.AI.VoiceLive +{ + /// Azure semantic end-of-utterance detection (default). + public partial class AzureSemanticEouDetection + { + /// Gets or sets the Timeout. + internal float? TimeoutMs { get; set; } + + /// Gets or sets the Timeout. + public TimeSpan Timeout + { + get => TimeSpan.FromMilliseconds(TimeoutMs ?? 0); + set => TimeoutMs = (float)value.TotalMilliseconds; + } + } +} diff --git a/sdk/ai/Azure.AI.VoiceLive/src/Customizations/AzureSemanticEouDetectionEn.cs b/sdk/ai/Azure.AI.VoiceLive/src/Customizations/AzureSemanticEouDetectionEn.cs new file mode 100644 index 000000000000..43573429b9db --- /dev/null +++ b/sdk/ai/Azure.AI.VoiceLive/src/Customizations/AzureSemanticEouDetectionEn.cs @@ -0,0 +1,23 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +#nullable disable + +using System; + +namespace Azure.AI.VoiceLive +{ + /// Azure semantic end-of-utterance detection (default). + public partial class AzureSemanticEouDetectionEn + { + /// Gets or sets the Timeout. + internal float? TimeoutMs { get; set; } + + /// Gets or sets the Timeout. + public TimeSpan Timeout + { + get => TimeSpan.FromMilliseconds(TimeoutMs ?? 0); + set => TimeoutMs = (float)value.TotalMilliseconds; + } + } +} diff --git a/sdk/ai/Azure.AI.VoiceLive/src/Customizations/AzureSemanticEouDetectionMultilingual.cs b/sdk/ai/Azure.AI.VoiceLive/src/Customizations/AzureSemanticEouDetectionMultilingual.cs new file mode 100644 index 000000000000..d38fef4ce6f4 --- /dev/null +++ b/sdk/ai/Azure.AI.VoiceLive/src/Customizations/AzureSemanticEouDetectionMultilingual.cs @@ -0,0 +1,23 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +#nullable disable + +using System; + +namespace Azure.AI.VoiceLive +{ + /// Azure semantic end-of-utterance detection (default). + public partial class AzureSemanticEouDetectionMultilingual + { + /// Gets or sets the Timeout. + internal float? TimeoutMs { get; set; } + + /// Gets or sets the Timeout. + public TimeSpan Timeout + { + get => TimeSpan.FromMilliseconds(TimeoutMs ?? 0); + set => TimeoutMs = (float)value.TotalMilliseconds; + } + } +} diff --git a/sdk/ai/Azure.AI.VoiceLive/src/Customizations/AzureSemanticVadEnTurnDetection.cs b/sdk/ai/Azure.AI.VoiceLive/src/Customizations/AzureSemanticVadEnTurnDetection.cs new file mode 100644 index 000000000000..75c4e0d27660 --- /dev/null +++ b/sdk/ai/Azure.AI.VoiceLive/src/Customizations/AzureSemanticVadEnTurnDetection.cs @@ -0,0 +1,43 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +#nullable disable + +using System; + +namespace Azure.AI.VoiceLive +{ + /// Base model for VAD-based turn detection. + public partial class AzureSemanticVadEnTurnDetection + { + /// Gets or sets the PrefixPaddingMs. + internal int? PrefixPaddingMs { get; set; } + + /// Gets or sets the PrefixPaddingMs. + public TimeSpan PrefixPadding + { + get => TimeSpan.FromMilliseconds(PrefixPaddingMs ?? 0); + set => PrefixPaddingMs = (int)value.TotalMilliseconds; + } + + /// Gets or sets the SilenceDurationMs. + internal int? SilenceDurationMs { get; set; } + + /// Gets or sets the SilenceDurationMs. + public TimeSpan SilenceDuration + { + get => TimeSpan.FromMilliseconds(SilenceDurationMs ?? 0); + set => SilenceDurationMs = (int)value.TotalMilliseconds; + } + + /// Gets or sets the SpeechDurationMs. + internal int? SpeechDurationMs { get; set; } + + /// Gets or sets the SpeechDurationMs. + public TimeSpan SpeechDuration + { + get => TimeSpan.FromMilliseconds(SpeechDurationMs ?? 0); + set => SpeechDurationMs = (int)value.TotalMilliseconds; + } + } +} diff --git a/sdk/ai/Azure.AI.VoiceLive/src/Customizations/AzureSemanticVadMultilingualTurnDetection.cs b/sdk/ai/Azure.AI.VoiceLive/src/Customizations/AzureSemanticVadMultilingualTurnDetection.cs new file mode 100644 index 000000000000..3dd4a0729b33 --- /dev/null +++ b/sdk/ai/Azure.AI.VoiceLive/src/Customizations/AzureSemanticVadMultilingualTurnDetection.cs @@ -0,0 +1,43 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +#nullable disable + +using System; + +namespace Azure.AI.VoiceLive +{ + /// Base model for VAD-based turn detection. + public partial class AzureSemanticVadMultilingualTurnDetection + { + /// Gets or sets the PrefixPaddingMs. + internal int? PrefixPaddingMs { get; set; } + + /// Gets or sets the PrefixPaddingMs. + public TimeSpan PrefixPadding + { + get => TimeSpan.FromMilliseconds(PrefixPaddingMs ?? 0); + set => PrefixPaddingMs = (int)value.TotalMilliseconds; + } + + /// Gets or sets the SilenceDurationMs. + internal int? SilenceDurationMs { get; set; } + + /// Gets or sets the SilenceDurationMs. + public TimeSpan SilenceDuration + { + get => TimeSpan.FromMilliseconds(SilenceDurationMs ?? 0); + set => SilenceDurationMs = (int)value.TotalMilliseconds; + } + + /// Gets or sets the SpeechDurationMs. + internal int? SpeechDurationMs { get; set; } + + /// Gets or sets the Speech Duration. + public TimeSpan SpeechDuration + { + get => TimeSpan.FromMilliseconds(SpeechDurationMs ?? 0); + set => SpeechDurationMs = (int)value.TotalMilliseconds; + } + } +} diff --git a/sdk/ai/Azure.AI.VoiceLive/src/Customizations/AzureSemanticVadTurnDetection.cs b/sdk/ai/Azure.AI.VoiceLive/src/Customizations/AzureSemanticVadTurnDetection.cs new file mode 100644 index 000000000000..e17356c6626e --- /dev/null +++ b/sdk/ai/Azure.AI.VoiceLive/src/Customizations/AzureSemanticVadTurnDetection.cs @@ -0,0 +1,43 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +#nullable disable + +using System; + +namespace Azure.AI.VoiceLive +{ + /// Base model for VAD-based turn detection. + public partial class AzureSemanticVadTurnDetection + { + /// Gets or sets the PrefixPaddingMs. + internal int? PrefixPaddingMs { get; set; } + + /// Gets or sets the PrefixPaddingMs. + public TimeSpan PrefixPadding + { + get => TimeSpan.FromMilliseconds(PrefixPaddingMs ?? 0); + set => PrefixPaddingMs = (int)value.TotalMilliseconds; + } + + /// Gets or sets the SilenceDurationMs. + internal int? SilenceDurationMs { get; set; } + + /// Gets or sets the SilenceDurationMs. + public TimeSpan SilenceDuration + { + get => TimeSpan.FromMilliseconds(SilenceDurationMs ?? 0); + set => SilenceDurationMs = (int)value.TotalMilliseconds; + } + + /// Gets or sets the SpeechDurationMs. + internal int? SpeechDurationMs { get; set; } + + /// Gets or sets the SpeechDurationMs. + public TimeSpan SpeechDuration + { + get => TimeSpan.FromMilliseconds(SpeechDurationMs ?? 0); + set => SpeechDurationMs = (int)value.TotalMilliseconds; + } + } +} diff --git a/sdk/ai/Azure.AI.VoiceLive/src/Customizations/LLMVoice.cs b/sdk/ai/Azure.AI.VoiceLive/src/Customizations/LLMVoice.cs deleted file mode 100644 index 01414c7153fa..000000000000 --- a/sdk/ai/Azure.AI.VoiceLive/src/Customizations/LLMVoice.cs +++ /dev/null @@ -1,17 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -using System; -using System.IO; - -namespace Azure.AI.VoiceLive -{ - public partial class LlmVoice : VoiceProvider - { - /// - /// - /// - /// - internal override BinaryData ToBinaryData() => this.PersistableModelWriteCore(System.ClientModel.Primitives.ModelReaderWriterOptions.Json); - } -} diff --git a/sdk/ai/Azure.AI.VoiceLive/src/Generated/NoTurnDetection.Serialization.cs b/sdk/ai/Azure.AI.VoiceLive/src/Customizations/NoTurnDetection.Serialization.cs similarity index 86% rename from sdk/ai/Azure.AI.VoiceLive/src/Generated/NoTurnDetection.Serialization.cs rename to sdk/ai/Azure.AI.VoiceLive/src/Customizations/NoTurnDetection.Serialization.cs index 649b86745cc6..9959003afda6 100644 --- a/sdk/ai/Azure.AI.VoiceLive/src/Generated/NoTurnDetection.Serialization.cs +++ b/sdk/ai/Azure.AI.VoiceLive/src/Customizations/NoTurnDetection.Serialization.cs @@ -12,7 +12,7 @@ namespace Azure.AI.VoiceLive { - /// Disables turn detection. + /// Disables turn detection. public partial class NoTurnDetection : IJsonModel { /// The JSON writer. @@ -34,6 +34,7 @@ protected override void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWri throw new FormatException($"The model {nameof(NoTurnDetection)} does not support writing '{format}' format."); } base.JsonModelWriteCore(writer, options); + } /// The JSON reader. @@ -61,21 +62,9 @@ internal static NoTurnDetection DeserializeNoTurnDetection(JsonElement element, { return null; } - TurnDetectionType @type = default; - IDictionary additionalBinaryDataProperties = new ChangeTrackingDictionary(); - foreach (var prop in element.EnumerateObject()) - { - if (prop.NameEquals("type"u8)) - { - @type = prop.Value.GetString().ToTurnDetectionType(); - continue; - } - if (options.Format != "W") - { - additionalBinaryDataProperties.Add(prop.Name, BinaryData.FromString(prop.Value.GetRawText())); - } - } - return new NoTurnDetection(@type, additionalBinaryDataProperties); + + return new NoTurnDetection( + new TurnDetectionType("None"), null); } /// The client options for reading and writing models. @@ -102,7 +91,7 @@ protected override BinaryData PersistableModelWriteCore(ModelReaderWriterOptions /// The client options for reading and writing models. protected override TurnDetection PersistableModelCreateCore(BinaryData data, ModelReaderWriterOptions options) { - string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; switch (format) { case "J": diff --git a/sdk/ai/Azure.AI.VoiceLive/src/Generated/NoTurnDetection.cs b/sdk/ai/Azure.AI.VoiceLive/src/Customizations/NoTurnDetection.cs similarity index 72% rename from sdk/ai/Azure.AI.VoiceLive/src/Generated/NoTurnDetection.cs rename to sdk/ai/Azure.AI.VoiceLive/src/Customizations/NoTurnDetection.cs index d1d1b7fe39de..d35fa090288c 100644 --- a/sdk/ai/Azure.AI.VoiceLive/src/Generated/NoTurnDetection.cs +++ b/sdk/ai/Azure.AI.VoiceLive/src/Customizations/NoTurnDetection.cs @@ -10,19 +10,17 @@ namespace Azure.AI.VoiceLive { - /// Disables turn detection. + /// Disables turn detection. public partial class NoTurnDetection : TurnDetection { /// Initializes a new instance of . - public NoTurnDetection() : base(TurnDetectionType.None) - { - } + public NoTurnDetection() : base(new TurnDetectionType("None")) { } /// Initializes a new instance of . /// /// Keeps track of any properties unknown to the library. - internal NoTurnDetection(TurnDetectionType @type, IDictionary additionalBinaryDataProperties) : base(@type, additionalBinaryDataProperties) - { - } + internal NoTurnDetection(TurnDetectionType @type, IDictionary additionalBinaryDataProperties) : + base(@type, additionalBinaryDataProperties) + { } } } diff --git a/sdk/ai/Azure.AI.VoiceLive/src/Customizations/ResponseSession.cs b/sdk/ai/Azure.AI.VoiceLive/src/Customizations/ResponseSession.cs index 366c9447394f..fbb286c74bbd 100644 --- a/sdk/ai/Azure.AI.VoiceLive/src/Customizations/ResponseSession.cs +++ b/sdk/ai/Azure.AI.VoiceLive/src/Customizations/ResponseSession.cs @@ -29,12 +29,6 @@ public partial class ResponseSession /// /// . /// - /// - /// . - /// - /// - /// . - /// /// /// /// diff --git a/sdk/ai/Azure.AI.VoiceLive/src/Customizations/ServerVadTurnDetection.cs b/sdk/ai/Azure.AI.VoiceLive/src/Customizations/ServerVadTurnDetection.cs new file mode 100644 index 000000000000..0a167f2e06d8 --- /dev/null +++ b/sdk/ai/Azure.AI.VoiceLive/src/Customizations/ServerVadTurnDetection.cs @@ -0,0 +1,33 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +#nullable disable + +using System; + +namespace Azure.AI.VoiceLive +{ + /// Base model for VAD-based turn detection. + public partial class ServerVadTurnDetection + { + /// Gets or sets the PrefixPaddingMs. + internal int? PrefixPaddingMs { get; set; } + + /// Gets or sets the PrefixPaddingMs. + public TimeSpan PrefixPadding + { + get => TimeSpan.FromMilliseconds(PrefixPaddingMs ?? 0); + set => PrefixPaddingMs = (int)value.TotalMilliseconds; + } + + /// Gets or sets the SilenceDurationMs. + internal int? SilenceDurationMs { get; set; } + + /// Gets or sets the SilenceDurationMs. + public TimeSpan SilenceDuration + { + get => TimeSpan.FromMilliseconds(SilenceDurationMs ?? 0); + set => SilenceDurationMs = (int)value.TotalMilliseconds; + } + } +} diff --git a/sdk/ai/Azure.AI.VoiceLive/src/Customizations/SessionUpdateConversationItemTruncated.cs b/sdk/ai/Azure.AI.VoiceLive/src/Customizations/SessionUpdateConversationItemTruncated.cs new file mode 100644 index 000000000000..ddcd8af4051a --- /dev/null +++ b/sdk/ai/Azure.AI.VoiceLive/src/Customizations/SessionUpdateConversationItemTruncated.cs @@ -0,0 +1,35 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.AI.VoiceLive +{ + /// + /// Returned when an earlier assistant audio message item is truncated by the + /// client with a `conversation.item.truncate` event. + /// + /// + /// + /// This event is used to + /// synchronize the server's understanding of the audio with the client's playback. + /// + /// + /// This action will truncate the audio and remove the server-side text transcript + /// to ensure there is no text in the context that hasn't been heard by the user. + /// + /// + public partial class SessionUpdateConversationItemTruncated + { + /// The duration up to which the audio was truncated, in milliseconds. + internal int AudioEndMs { get; } + + /// + /// The duration up to which the audio was truncated. + /// + public TimeSpan AudioEnd => TimeSpan.FromMilliseconds(AudioEndMs); + } +} diff --git a/sdk/ai/Azure.AI.VoiceLive/src/Customizations/SessionUpdateInputAudioBufferSpeechStarted.cs b/sdk/ai/Azure.AI.VoiceLive/src/Customizations/SessionUpdateInputAudioBufferSpeechStarted.cs index c91baa32d548..72fb5995d97a 100644 --- a/sdk/ai/Azure.AI.VoiceLive/src/Customizations/SessionUpdateInputAudioBufferSpeechStarted.cs +++ b/sdk/ai/Azure.AI.VoiceLive/src/Customizations/SessionUpdateInputAudioBufferSpeechStarted.cs @@ -8,8 +8,8 @@ namespace Azure.AI.VoiceLive { - /// The SessionUpdateInputAudioBufferSpeechStopped. - public partial class SessionUpdateInputAudioBufferSpeechStopped + /// The SessionUpdateInputAudioBufferSpeechStarted. + public partial class SessionUpdateInputAudioBufferSpeechStarted { /// /// Time from the start of all audio written to the buffer during the @@ -17,8 +17,8 @@ public partial class SessionUpdateInputAudioBufferSpeechStopped /// beginning of audio sent to the model, and thus includes the /// `prefix_padding_ms` configured in the Session. /// - public TimeSpan AudioEnd { get => TimeSpan.FromMilliseconds(AudioEndMs); } + public TimeSpan AudioStart { get => TimeSpan.FromMilliseconds(AudioStartMs); } - internal int AudioEndMsAs { get; } + internal int AudioStartMs { get; } } } diff --git a/sdk/ai/Azure.AI.VoiceLive/src/Customizations/SessionUpdateInputAudioBufferSpeechStopped.cs b/sdk/ai/Azure.AI.VoiceLive/src/Customizations/SessionUpdateInputAudioBufferSpeechStopped.cs index 72fb5995d97a..e478cfcac349 100644 --- a/sdk/ai/Azure.AI.VoiceLive/src/Customizations/SessionUpdateInputAudioBufferSpeechStopped.cs +++ b/sdk/ai/Azure.AI.VoiceLive/src/Customizations/SessionUpdateInputAudioBufferSpeechStopped.cs @@ -8,8 +8,8 @@ namespace Azure.AI.VoiceLive { - /// The SessionUpdateInputAudioBufferSpeechStarted. - public partial class SessionUpdateInputAudioBufferSpeechStarted + /// The SessionUpdateInputAudioBufferSpeechStopped. + public partial class SessionUpdateInputAudioBufferSpeechStopped { /// /// Time from the start of all audio written to the buffer during the @@ -17,8 +17,8 @@ public partial class SessionUpdateInputAudioBufferSpeechStarted /// beginning of audio sent to the model, and thus includes the /// `prefix_padding_ms` configured in the Session. /// - public TimeSpan AudioStart { get => TimeSpan.FromMilliseconds(AudioStartMs); } + public TimeSpan AudioEnd { get => TimeSpan.FromMilliseconds(AudioEndMs); } - internal int AudioStartMs { get; } + internal int AudioEndMs { get; } } } diff --git a/sdk/ai/Azure.AI.VoiceLive/src/Customizations/SessionUpdateResponseAnimationVisemeDelta.cs b/sdk/ai/Azure.AI.VoiceLive/src/Customizations/SessionUpdateResponseAnimationVisemeDelta.cs new file mode 100644 index 000000000000..f0734a66fee3 --- /dev/null +++ b/sdk/ai/Azure.AI.VoiceLive/src/Customizations/SessionUpdateResponseAnimationVisemeDelta.cs @@ -0,0 +1,22 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.AI.VoiceLive +{ + /// Represents a viseme ID delta update for animation based on audio. + public partial class SessionUpdateResponseAnimationVisemeDelta : SessionUpdate + { + /// Gets the AudioOffsetMs. + internal int AudioOffsetMs { get; } + + /// + /// Gets the offset in the overall response audio where the viseme occurs. + /// + public TimeSpan AudioOffset => TimeSpan.FromMilliseconds(AudioOffsetMs); + } +} diff --git a/sdk/ai/Azure.AI.VoiceLive/src/Customizations/SessionUpdateResponseAudioTimestampDelta.cs b/sdk/ai/Azure.AI.VoiceLive/src/Customizations/SessionUpdateResponseAudioTimestampDelta.cs new file mode 100644 index 000000000000..c4fba80d31b1 --- /dev/null +++ b/sdk/ai/Azure.AI.VoiceLive/src/Customizations/SessionUpdateResponseAudioTimestampDelta.cs @@ -0,0 +1,30 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.AI.VoiceLive +{ + /// Represents a word-level audio timestamp delta for a response. + public partial class SessionUpdateResponseAudioTimestampDelta + { + /// Gets the AudioOffsetMs. + internal int AudioOffsetMs { get; } + + /// + /// Offset in the overall response audio where the word begins. + /// + public TimeSpan AudioOffset => TimeSpan.FromMilliseconds(AudioOffsetMs); + + /// Gets the AudioDurationMs. + internal int AudioDurationMs { get; } + + /// + /// Gets the duration of the audio. + /// + public TimeSpan AudioDuration => TimeSpan.FromMilliseconds(AudioDurationMs); + } +} diff --git a/sdk/ai/Azure.AI.VoiceLive/src/Customizations/SystemMessageItem.cs b/sdk/ai/Azure.AI.VoiceLive/src/Customizations/SystemMessageItem.cs new file mode 100644 index 000000000000..e5c807d9ae0f --- /dev/null +++ b/sdk/ai/Azure.AI.VoiceLive/src/Customizations/SystemMessageItem.cs @@ -0,0 +1,22 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; +using System.Linq; + +namespace Azure.AI.VoiceLive +{ + /// The SystemMessageItem. + public partial class SystemMessageItem : MessageItem + { + /// Initializes a new instance of . + /// + /// is null. + public SystemMessageItem(InputTextContentPart content) : this(new[] { content }) { } + } +} diff --git a/sdk/ai/Azure.AI.VoiceLive/src/Customizations/ToolChoiceOption.Serialization.cs b/sdk/ai/Azure.AI.VoiceLive/src/Customizations/ToolChoiceOption.Serialization.cs index 899d2e6924c9..3cea67807730 100644 --- a/sdk/ai/Azure.AI.VoiceLive/src/Customizations/ToolChoiceOption.Serialization.cs +++ b/sdk/ai/Azure.AI.VoiceLive/src/Customizations/ToolChoiceOption.Serialization.cs @@ -39,7 +39,7 @@ internal static void SerializeConversationToolChoiceOption(ToolChoiceOption inst } else if (instance.FunctionName is not null) { - var functionObject = new ToolChoiceFunctionObjectFunction(name: instance.FunctionName); + var functionObject = new ToolChoiceFunctionObject(name: instance.FunctionName); writer.WriteObjectValue(functionObject, options); } } @@ -48,7 +48,7 @@ internal static ToolChoiceOption DeserializeConversationToolChoiceOption(JsonEle { if (element.ValueKind == JsonValueKind.Object) { - var functionObject = ToolChoiceFunctionObjectFunction.DeserializeToolChoiceFunctionObjectFunction(element, options); + var functionObject = ToolChoiceFunctionObject.DeserializeToolChoiceFunctionObject(element, options); return new ToolChoiceOption(functionObject.Name); } if (element.ValueKind == JsonValueKind.String) diff --git a/sdk/ai/Azure.AI.VoiceLive/src/Customizations/UserMessageItem.cs b/sdk/ai/Azure.AI.VoiceLive/src/Customizations/UserMessageItem.cs new file mode 100644 index 000000000000..678c9e00242b --- /dev/null +++ b/sdk/ai/Azure.AI.VoiceLive/src/Customizations/UserMessageItem.cs @@ -0,0 +1,22 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +#nullable disable + +using System; +using System.Collections.Generic; +using System.Linq; + +namespace Azure.AI.VoiceLive +{ + /// The UserMessageItem. + public partial class UserMessageItem : MessageItem + { + /// Initializes a new instance of . + /// + /// is null. + public UserMessageItem(UserContentPart content) : this(new[] { content }) + { + } + } +} diff --git a/sdk/ai/Azure.AI.VoiceLive/src/Customizations/VoiceLiveClient.core.cs b/sdk/ai/Azure.AI.VoiceLive/src/Customizations/VoiceLiveClient.core.cs new file mode 100644 index 000000000000..c0e3315bd002 --- /dev/null +++ b/sdk/ai/Azure.AI.VoiceLive/src/Customizations/VoiceLiveClient.core.cs @@ -0,0 +1,53 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +#nullable disable + +using System; +using System.Threading; +using System.Threading.Tasks; +using Azure; +using Azure.Core; +using Azure.Core.Pipeline; + +namespace Azure.AI.VoiceLive +{ + /// The VoiceLiveClient. + public partial class VoiceLiveClient + { + private readonly Uri _endpoint; + /// A credential used to authenticate to the service. + private readonly AzureKeyCredential _keyCredential; + private const string AuthorizationHeader = "api-key"; + /// A credential used to authenticate to the service. + private readonly TokenCredential _tokenCredential; + private static readonly string[] AuthorizationScopes = new string[] { "https://cognitiveservices.azure.com/.default" }; + + /// Initializes a new instance of VoiceLiveClient for mocking. + protected VoiceLiveClient() + { + } + + /// Initializes a new instance of VoiceLiveClient. + /// Service endpoint. + /// A credential used to authenticate to the service. + /// or is null. + public VoiceLiveClient(Uri endpoint, AzureKeyCredential credential) : this(endpoint, credential, new VoiceLiveClientOptions()) + { + } + + /// Initializes a new instance of VoiceLiveClient. + /// Service endpoint. + /// A credential used to authenticate to the service. + /// or is null. + public VoiceLiveClient(Uri endpoint, TokenCredential credential) : this(endpoint, credential, new VoiceLiveClientOptions()) + { + } + + /// The HTTP pipeline for sending and receiving REST requests and responses. + public virtual HttpPipeline Pipeline { get; } + + /// The ClientDiagnostics is used to provide tracing support for the client library. + internal ClientDiagnostics ClientDiagnostics { get; } + } +} diff --git a/sdk/ai/Azure.AI.VoiceLive/src/Generated/VoiceLiveClientOptions.cs b/sdk/ai/Azure.AI.VoiceLive/src/Customizations/VoiceLiveClientOptions.cs similarity index 93% rename from sdk/ai/Azure.AI.VoiceLive/src/Generated/VoiceLiveClientOptions.cs rename to sdk/ai/Azure.AI.VoiceLive/src/Customizations/VoiceLiveClientOptions.cs index 38535b99f477..96fbfc03fd4a 100644 --- a/sdk/ai/Azure.AI.VoiceLive/src/Generated/VoiceLiveClientOptions.cs +++ b/sdk/ai/Azure.AI.VoiceLive/src/Customizations/VoiceLiveClientOptions.cs @@ -1,8 +1,6 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. +// Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. -// - #nullable disable using System; diff --git a/sdk/ai/Azure.AI.VoiceLive/src/Customizations/VoiceLiveResponse.cs b/sdk/ai/Azure.AI.VoiceLive/src/Customizations/VoiceLiveResponse.cs index 80b2fca4b5b3..5911cb83a97f 100644 --- a/sdk/ai/Azure.AI.VoiceLive/src/Customizations/VoiceLiveResponse.cs +++ b/sdk/ai/Azure.AI.VoiceLive/src/Customizations/VoiceLiveResponse.cs @@ -29,12 +29,6 @@ public partial class VoiceLiveResponse /// /// . /// - /// - /// . - /// - /// - /// . - /// /// /// /// diff --git a/sdk/ai/Azure.AI.VoiceLive/src/Customizations/VoiceLiveSession.Commands.cs b/sdk/ai/Azure.AI.VoiceLive/src/Customizations/VoiceLiveSession.Commands.cs index c896750f0539..5f9c263a625a 100644 --- a/sdk/ai/Azure.AI.VoiceLive/src/Customizations/VoiceLiveSession.Commands.cs +++ b/sdk/ai/Azure.AI.VoiceLive/src/Customizations/VoiceLiveSession.Commands.cs @@ -344,52 +344,6 @@ public virtual void ConfigureSession(VoiceLiveSessionOptions sessionOptions, Can ConfigureSessionAsync(sessionOptions, cancellationToken).EnsureCompleted(); } - /// - /// Updates the conversation session configuration. - /// - /// The session configuration options. - /// An optional cancellation token. - /// Thrown when is null. - /// A task that represents the asynchronous operation. - public virtual async Task ConfigureConversationSessionAsync(VoiceLiveSessionOptions sessionOptions, CancellationToken cancellationToken = default) - { - await ConfigureSessionAsync(sessionOptions, cancellationToken).ConfigureAwait(false); - } - - /// - /// Updates the conversation session configuration. - /// - /// The session configuration options. - /// An optional cancellation token. - /// Thrown when is null. - public virtual void ConfigureConversationSession(VoiceLiveSessionOptions sessionOptions, CancellationToken cancellationToken = default) - { - ConfigureConversationSessionAsync(sessionOptions, cancellationToken).EnsureCompleted(); - } - - /// - /// Updates the transcription session configuration. - /// - /// The session configuration options. - /// An optional cancellation token. - /// Thrown when is null. - /// A task that represents the asynchronous operation. - public virtual async Task ConfigureTranscriptionSessionAsync(VoiceLiveSessionOptions sessionOptions, CancellationToken cancellationToken = default) - { - await ConfigureSessionAsync(sessionOptions, cancellationToken).ConfigureAwait(false); - } - - /// - /// Updates the transcription session configuration. - /// - /// The session configuration options. - /// An optional cancellation token. - /// Thrown when is null. - public virtual void ConfigureTranscriptionSession(VoiceLiveSessionOptions sessionOptions, CancellationToken cancellationToken = default) - { - ConfigureTranscriptionSessionAsync(sessionOptions, cancellationToken).EnsureCompleted(); - } - #endregion #region Item Management @@ -518,15 +472,16 @@ public virtual void DeleteItem(string itemId, CancellationToken cancellationToke /// The ID of the item up to which to truncate the conversation. /// The content index within the item to truncate to. /// An optional cancellation token. + /// Inclusive duration up to which audio is truncated /// Thrown when is null. /// Thrown when is empty. /// A task that represents the asynchronous operation. - public virtual async Task TruncateConversationAsync(string itemId, int contentIndex, CancellationToken cancellationToken = default) + public virtual async Task TruncateConversationAsync(string itemId, int contentIndex, TimeSpan audioEnd = default, CancellationToken cancellationToken = default) { Argument.AssertNotNullOrEmpty(itemId, nameof(itemId)); ThrowIfDisposed(); - var truncateEvent = new ClientEventConversationItemTruncate(itemId, contentIndex, 0); + var truncateEvent = new ClientEventConversationItemTruncate(itemId, contentIndex, Convert.ToInt32(audioEnd.TotalMilliseconds)); await SendCommandAsync(truncateEvent, cancellationToken).ConfigureAwait(false); } @@ -537,11 +492,12 @@ public virtual async Task TruncateConversationAsync(string itemId, int contentIn /// The ID of the item up to which to truncate the conversation. /// The content index within the item to truncate to. /// An optional cancellation token. + /// Inclusive duration up to which audio is truncated /// Thrown when is null. /// Thrown when is empty. - public virtual void TruncateConversation(string itemId, int contentIndex, CancellationToken cancellationToken = default) + public virtual void TruncateConversation(string itemId, int contentIndex, TimeSpan audioEnd = default, CancellationToken cancellationToken = default) { - TruncateConversationAsync(itemId, contentIndex, cancellationToken).EnsureCompleted(); + TruncateConversationAsync(itemId, contentIndex, audioEnd, cancellationToken).EnsureCompleted(); } #endregion diff --git a/sdk/ai/Azure.AI.VoiceLive/src/Customizations/VoiceLiveSession.cs b/sdk/ai/Azure.AI.VoiceLive/src/Customizations/VoiceLiveSession.cs index a1938b0a67ab..098da1878078 100644 --- a/sdk/ai/Azure.AI.VoiceLive/src/Customizations/VoiceLiveSession.cs +++ b/sdk/ai/Azure.AI.VoiceLive/src/Customizations/VoiceLiveSession.cs @@ -27,7 +27,7 @@ public partial class VoiceLiveSession : IDisposable, IAsyncDisposable /// /// Gets the underlying WebSocket connection. /// - public WebSocket WebSocket { get; protected set; } + internal WebSocket WebSocket { get; set; } private readonly VoiceLiveClient _parentClient; private readonly Uri _endpoint; diff --git a/sdk/ai/Azure.AI.VoiceLive/src/Customizations/VoiceLiveSessionOptions.cs b/sdk/ai/Azure.AI.VoiceLive/src/Customizations/VoiceLiveSessionOptions.cs index 8d5057a3b713..56fae67791db 100644 --- a/sdk/ai/Azure.AI.VoiceLive/src/Customizations/VoiceLiveSessionOptions.cs +++ b/sdk/ai/Azure.AI.VoiceLive/src/Customizations/VoiceLiveSessionOptions.cs @@ -6,6 +6,7 @@ using System; using System.ClientModel.Primitives; using System.Collections.Generic; +using System.Text.Json; namespace Azure.AI.VoiceLive { @@ -79,5 +80,46 @@ public ToolChoiceOption ToolChoice _toolChoice = persistable?.Write(new ModelReaderWriterOptions("J")) ?? null; } } + + [CodeGenMember("TurnDetection")] + private BinaryData _turnDetection; + + /// + /// Gets or sets the TurnDetection. + /// + public TurnDetection TurnDetection + { + get + { + var tdAsString = _turnDetection?.ToString(); + if (string.IsNullOrEmpty(tdAsString)) + { + return null; + } + else if ("null" == tdAsString.ToLower(System.Globalization.CultureInfo.InvariantCulture)) + { + return new NoTurnDetection(); + } + else + { + using (JsonDocument document = JsonDocument.Parse(_turnDetection)) + { + return TurnDetection.DeserializeTurnDetection(document.RootElement, new ModelReaderWriterOptions("J")); + } + } + } + set + { + if (value.Type == new TurnDetectionType("None")) + { + _turnDetection = BinaryData.FromString(" null"); + } + else + { + var persist = value as IPersistableModel; + _turnDetection = persist.Write(new ModelReaderWriterOptions("J")); + } + } + } } } diff --git a/sdk/ai/Azure.AI.VoiceLive/src/Generated/AnimationOptions.Serialization.cs b/sdk/ai/Azure.AI.VoiceLive/src/Generated/AnimationOptions.Serialization.cs index 1e4c20e07b06..77fbca8c46e5 100644 --- a/sdk/ai/Azure.AI.VoiceLive/src/Generated/AnimationOptions.Serialization.cs +++ b/sdk/ai/Azure.AI.VoiceLive/src/Generated/AnimationOptions.Serialization.cs @@ -12,7 +12,7 @@ namespace Azure.AI.VoiceLive { - /// Configuration for animation outputs including blendshapes, visemes, and emotion metadata. + /// Configuration for animation outputs including blendshapes and visemes metadata. public partial class AnimationOptions : IJsonModel { /// The JSON writer. @@ -48,11 +48,6 @@ protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWrit } writer.WriteEndArray(); } - if (Optional.IsDefined(EmotionDetectionIntervalMs)) - { - writer.WritePropertyName("emotion_detection_interval_ms"u8); - writer.WriteNumberValue(EmotionDetectionIntervalMs.Value); - } if (options.Format != "W" && _additionalBinaryDataProperties != null) { foreach (var item in _additionalBinaryDataProperties) @@ -97,7 +92,6 @@ internal static AnimationOptions DeserializeAnimationOptions(JsonElement element } string modelName = default; IList outputs = default; - int? emotionDetectionIntervalMs = default; IDictionary additionalBinaryDataProperties = new ChangeTrackingDictionary(); foreach (var prop in element.EnumerateObject()) { @@ -120,21 +114,12 @@ internal static AnimationOptions DeserializeAnimationOptions(JsonElement element outputs = array; continue; } - if (prop.NameEquals("emotion_detection_interval_ms"u8)) - { - if (prop.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - emotionDetectionIntervalMs = prop.Value.GetInt32(); - continue; - } if (options.Format != "W") { additionalBinaryDataProperties.Add(prop.Name, BinaryData.FromString(prop.Value.GetRawText())); } } - return new AnimationOptions(modelName, outputs ?? new ChangeTrackingList(), emotionDetectionIntervalMs, additionalBinaryDataProperties); + return new AnimationOptions(modelName, outputs ?? new ChangeTrackingList(), additionalBinaryDataProperties); } /// The client options for reading and writing models. diff --git a/sdk/ai/Azure.AI.VoiceLive/src/Generated/AnimationOptions.cs b/sdk/ai/Azure.AI.VoiceLive/src/Generated/AnimationOptions.cs index 053081c1e37c..acadf9f7372a 100644 --- a/sdk/ai/Azure.AI.VoiceLive/src/Generated/AnimationOptions.cs +++ b/sdk/ai/Azure.AI.VoiceLive/src/Generated/AnimationOptions.cs @@ -10,7 +10,7 @@ namespace Azure.AI.VoiceLive { - /// Configuration for animation outputs including blendshapes, visemes, and emotion metadata. + /// Configuration for animation outputs including blendshapes and visemes metadata. public partial class AnimationOptions { /// Keeps track of any properties unknown to the library. @@ -25,13 +25,11 @@ public AnimationOptions() /// Initializes a new instance of . /// The name of the animation model to use. /// Set of output data types requested from the animation system. - /// Interval for emotion detection in milliseconds. If not set, emotion detection is disabled. /// Keeps track of any properties unknown to the library. - internal AnimationOptions(string modelName, IList outputs, int? emotionDetectionIntervalMs, IDictionary additionalBinaryDataProperties) + internal AnimationOptions(string modelName, IList outputs, IDictionary additionalBinaryDataProperties) { ModelName = modelName; Outputs = outputs; - EmotionDetectionIntervalMs = emotionDetectionIntervalMs; _additionalBinaryDataProperties = additionalBinaryDataProperties; } @@ -40,8 +38,5 @@ internal AnimationOptions(string modelName, IList outputs, /// Set of output data types requested from the animation system. public IList Outputs { get; } - - /// Interval for emotion detection in milliseconds. If not set, emotion detection is disabled. - public int? EmotionDetectionIntervalMs { get; set; } } } diff --git a/sdk/ai/Azure.AI.VoiceLive/src/Generated/AnimationOutputType.Serialization.cs b/sdk/ai/Azure.AI.VoiceLive/src/Generated/AnimationOutputType.Serialization.cs index 58f297ae0a89..d662224a00bf 100644 --- a/sdk/ai/Azure.AI.VoiceLive/src/Generated/AnimationOutputType.Serialization.cs +++ b/sdk/ai/Azure.AI.VoiceLive/src/Generated/AnimationOutputType.Serialization.cs @@ -16,7 +16,6 @@ internal static partial class AnimationOutputTypeExtensions { AnimationOutputType.Blendshapes => "blendshapes", AnimationOutputType.VisemeId => "viseme_id", - AnimationOutputType.Emotion => "emotion", _ => throw new ArgumentOutOfRangeException(nameof(value), value, "Unknown AnimationOutputType value.") }; @@ -31,10 +30,6 @@ public static AnimationOutputType ToAnimationOutputType(this string value) { return AnimationOutputType.VisemeId; } - if (StringComparer.OrdinalIgnoreCase.Equals(value, "emotion")) - { - return AnimationOutputType.Emotion; - } throw new ArgumentOutOfRangeException(nameof(value), value, "Unknown AnimationOutputType value."); } } diff --git a/sdk/ai/Azure.AI.VoiceLive/src/Generated/AnimationOutputType.cs b/sdk/ai/Azure.AI.VoiceLive/src/Generated/AnimationOutputType.cs index 8bda37e13e3e..87a6ffff23f3 100644 --- a/sdk/ai/Azure.AI.VoiceLive/src/Generated/AnimationOutputType.cs +++ b/sdk/ai/Azure.AI.VoiceLive/src/Generated/AnimationOutputType.cs @@ -13,8 +13,6 @@ public enum AnimationOutputType /// Blendshapes. Blendshapes, /// VisemeId. - VisemeId, - /// Emotion. - Emotion + VisemeId } } diff --git a/sdk/ai/Azure.AI.VoiceLive/src/Generated/AudioInputTranscriptionSettings.cs b/sdk/ai/Azure.AI.VoiceLive/src/Generated/AudioInputTranscriptionSettings.cs index 20fc9eefdcf0..2367d1899a30 100644 --- a/sdk/ai/Azure.AI.VoiceLive/src/Generated/AudioInputTranscriptionSettings.cs +++ b/sdk/ai/Azure.AI.VoiceLive/src/Generated/AudioInputTranscriptionSettings.cs @@ -20,7 +20,7 @@ public partial class AudioInputTranscriptionSettings /// /// The transcription model to use. Supported values: /// 'whisper-1', 'gpt-4o-transcribe', 'gpt-4o-mini-transcribe', - /// 'azure-fast-transcription', 'azure-speech'. + /// 'azure-speech'. /// public AudioInputTranscriptionSettings(AudioInputTranscriptionSettingsModel model) { @@ -33,9 +33,9 @@ public AudioInputTranscriptionSettings(AudioInputTranscriptionSettingsModel mode /// /// The transcription model to use. Supported values: /// 'whisper-1', 'gpt-4o-transcribe', 'gpt-4o-mini-transcribe', - /// 'azure-fast-transcription', 'azure-speech'. + /// 'azure-speech'. /// - /// Optional BCP-47 language code (e.g., 'en-US'). + /// Optional language code in BCP-47 (e.g., 'en-US'), or ISO-639-1 (e.g., 'en'), or multi languages with auto detection, (e.g., 'en,zh'). /// Optional configuration for custom speech models. /// Optional list of phrase hints to bias recognition. /// Keeps track of any properties unknown to the library. @@ -51,11 +51,11 @@ internal AudioInputTranscriptionSettings(AudioInputTranscriptionSettingsModel mo /// /// The transcription model to use. Supported values: /// 'whisper-1', 'gpt-4o-transcribe', 'gpt-4o-mini-transcribe', - /// 'azure-fast-transcription', 'azure-speech'. + /// 'azure-speech'. /// public AudioInputTranscriptionSettingsModel Model { get; set; } - /// Optional BCP-47 language code (e.g., 'en-US'). + /// Optional language code in BCP-47 (e.g., 'en-US'), or ISO-639-1 (e.g., 'en'), or multi languages with auto detection, (e.g., 'en,zh'). public string Language { get; set; } /// Optional configuration for custom speech models. diff --git a/sdk/ai/Azure.AI.VoiceLive/src/Generated/AudioInputTranscriptionSettingsModel.cs b/sdk/ai/Azure.AI.VoiceLive/src/Generated/AudioInputTranscriptionSettingsModel.cs index d1e8af355ed2..a8eb2bb5ccfb 100644 --- a/sdk/ai/Azure.AI.VoiceLive/src/Generated/AudioInputTranscriptionSettingsModel.cs +++ b/sdk/ai/Azure.AI.VoiceLive/src/Generated/AudioInputTranscriptionSettingsModel.cs @@ -17,7 +17,6 @@ namespace Azure.AI.VoiceLive private const string Whisper1Value = "whisper-1"; private const string Gpt4oTranscribeValue = "gpt-4o-transcribe"; private const string Gpt4oMiniTranscribeValue = "gpt-4o-mini-transcribe"; - private const string AzureFastTranscriptionValue = "azure-fast-transcription"; private const string AzureSpeechValue = "azure-speech"; /// Initializes a new instance of . @@ -39,9 +38,6 @@ public AudioInputTranscriptionSettingsModel(string value) /// Gets the Gpt4oMiniTranscribe. public static AudioInputTranscriptionSettingsModel Gpt4oMiniTranscribe { get; } = new AudioInputTranscriptionSettingsModel(Gpt4oMiniTranscribeValue); - /// Gets the AzureFastTranscription. - public static AudioInputTranscriptionSettingsModel AzureFastTranscription { get; } = new AudioInputTranscriptionSettingsModel(AzureFastTranscriptionValue); - /// Gets the AzureSpeech. public static AudioInputTranscriptionSettingsModel AzureSpeech { get; } = new AudioInputTranscriptionSettingsModel(AzureSpeechValue); diff --git a/sdk/ai/Azure.AI.VoiceLive/src/Generated/AudioNoiseReduction.Serialization.cs b/sdk/ai/Azure.AI.VoiceLive/src/Generated/AudioNoiseReduction.Serialization.cs index 5994c7efa998..4da5a1314562 100644 --- a/sdk/ai/Azure.AI.VoiceLive/src/Generated/AudioNoiseReduction.Serialization.cs +++ b/sdk/ai/Azure.AI.VoiceLive/src/Generated/AudioNoiseReduction.Serialization.cs @@ -15,6 +15,11 @@ namespace Azure.AI.VoiceLive /// Configuration for input audio noise reduction. public partial class AudioNoiseReduction : IJsonModel { + /// Initializes a new instance of for deserialization. + internal AudioNoiseReduction() + { + } + /// The JSON writer. /// The client options for reading and writing models. void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) @@ -34,7 +39,7 @@ protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWrit throw new FormatException($"The model {nameof(AudioNoiseReduction)} does not support writing '{format}' format."); } writer.WritePropertyName("type"u8); - writer.WriteStringValue(Type); + writer.WriteStringValue(Type.ToSerialString()); if (options.Format != "W" && _additionalBinaryDataProperties != null) { foreach (var item in _additionalBinaryDataProperties) @@ -77,13 +82,13 @@ internal static AudioNoiseReduction DeserializeAudioNoiseReduction(JsonElement e { return null; } - string @type = default; + AudioNoiseReductionType @type = default; IDictionary additionalBinaryDataProperties = new ChangeTrackingDictionary(); foreach (var prop in element.EnumerateObject()) { if (prop.NameEquals("type"u8)) { - @type = prop.Value.GetString(); + @type = prop.Value.GetString().ToAudioNoiseReductionType(); continue; } if (options.Format != "W") diff --git a/sdk/ai/Azure.AI.VoiceLive/src/Generated/AudioNoiseReduction.cs b/sdk/ai/Azure.AI.VoiceLive/src/Generated/AudioNoiseReduction.cs index 99edb1c44702..2e44eb7c18e7 100644 --- a/sdk/ai/Azure.AI.VoiceLive/src/Generated/AudioNoiseReduction.cs +++ b/sdk/ai/Azure.AI.VoiceLive/src/Generated/AudioNoiseReduction.cs @@ -17,20 +17,22 @@ public partial class AudioNoiseReduction private protected readonly IDictionary _additionalBinaryDataProperties; /// Initializes a new instance of . - public AudioNoiseReduction() + /// The type of noise reduction model. + public AudioNoiseReduction(AudioNoiseReductionType @type) { + Type = @type; } /// Initializes a new instance of . /// The type of noise reduction model. /// Keeps track of any properties unknown to the library. - internal AudioNoiseReduction(string @type, IDictionary additionalBinaryDataProperties) + internal AudioNoiseReduction(AudioNoiseReductionType @type, IDictionary additionalBinaryDataProperties) { Type = @type; _additionalBinaryDataProperties = additionalBinaryDataProperties; } /// The type of noise reduction model. - public string Type { get; } = "azure_deep_noise_suppression"; + public AudioNoiseReductionType Type { get; set; } } } diff --git a/sdk/ai/Azure.AI.VoiceLive/src/Generated/AudioNoiseReductionType.Serialization.cs b/sdk/ai/Azure.AI.VoiceLive/src/Generated/AudioNoiseReductionType.Serialization.cs new file mode 100644 index 000000000000..1c745c721552 --- /dev/null +++ b/sdk/ai/Azure.AI.VoiceLive/src/Generated/AudioNoiseReductionType.Serialization.cs @@ -0,0 +1,41 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; + +namespace Azure.AI.VoiceLive +{ + internal static partial class AudioNoiseReductionTypeExtensions + { + /// The value to serialize. + public static string ToSerialString(this AudioNoiseReductionType value) => value switch + { + AudioNoiseReductionType.AzureDeepNoiseSuppression => "azure_deep_noise_suppression", + AudioNoiseReductionType.NearField => "near_field", + AudioNoiseReductionType.FarField => "far_field", + _ => throw new ArgumentOutOfRangeException(nameof(value), value, "Unknown AudioNoiseReductionType value.") + }; + + /// The value to deserialize. + public static AudioNoiseReductionType ToAudioNoiseReductionType(this string value) + { + if (StringComparer.OrdinalIgnoreCase.Equals(value, "azure_deep_noise_suppression")) + { + return AudioNoiseReductionType.AzureDeepNoiseSuppression; + } + if (StringComparer.OrdinalIgnoreCase.Equals(value, "near_field")) + { + return AudioNoiseReductionType.NearField; + } + if (StringComparer.OrdinalIgnoreCase.Equals(value, "far_field")) + { + return AudioNoiseReductionType.FarField; + } + throw new ArgumentOutOfRangeException(nameof(value), value, "Unknown AudioNoiseReductionType value."); + } + } +} diff --git a/sdk/ai/Azure.AI.VoiceLive/src/Generated/AudioNoiseReductionType.cs b/sdk/ai/Azure.AI.VoiceLive/src/Generated/AudioNoiseReductionType.cs new file mode 100644 index 000000000000..d0000b338668 --- /dev/null +++ b/sdk/ai/Azure.AI.VoiceLive/src/Generated/AudioNoiseReductionType.cs @@ -0,0 +1,20 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +namespace Azure.AI.VoiceLive +{ + /// + public enum AudioNoiseReductionType + { + /// AzureDeepNoiseSuppression. + AzureDeepNoiseSuppression, + /// NearField. + NearField, + /// FarField. + FarField + } +} diff --git a/sdk/ai/Azure.AI.VoiceLive/src/Generated/AzureCustomVoice.Serialization.cs b/sdk/ai/Azure.AI.VoiceLive/src/Generated/AzureCustomVoice.Serialization.cs index bba557192c14..e7f1cd1b60ac 100644 --- a/sdk/ai/Azure.AI.VoiceLive/src/Generated/AzureCustomVoice.Serialization.cs +++ b/sdk/ai/Azure.AI.VoiceLive/src/Generated/AzureCustomVoice.Serialization.cs @@ -12,7 +12,7 @@ namespace Azure.AI.VoiceLive { - /// Azure custom voice configuration (preferred). + /// Azure custom voice configuration. public partial class AzureCustomVoice : IJsonModel { /// Initializes a new instance of for deserialization. diff --git a/sdk/ai/Azure.AI.VoiceLive/src/Generated/AzureCustomVoice.cs b/sdk/ai/Azure.AI.VoiceLive/src/Generated/AzureCustomVoice.cs index f80d25ac40a0..8b041367a845 100644 --- a/sdk/ai/Azure.AI.VoiceLive/src/Generated/AzureCustomVoice.cs +++ b/sdk/ai/Azure.AI.VoiceLive/src/Generated/AzureCustomVoice.cs @@ -10,7 +10,7 @@ namespace Azure.AI.VoiceLive { - /// Azure custom voice configuration (preferred). + /// Azure custom voice configuration. public partial class AzureCustomVoice : AzureVoice { /// Initializes a new instance of . diff --git a/sdk/ai/Azure.AI.VoiceLive/src/Generated/AzureMultilingualSemanticVad.Serialization.cs b/sdk/ai/Azure.AI.VoiceLive/src/Generated/AzureMultilingualSemanticVad.Serialization.cs deleted file mode 100644 index f6fbf3ee16bf..000000000000 --- a/sdk/ai/Azure.AI.VoiceLive/src/Generated/AzureMultilingualSemanticVad.Serialization.cs +++ /dev/null @@ -1,337 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System; -using System.ClientModel.Primitives; -using System.Collections.Generic; -using System.Text.Json; - -namespace Azure.AI.VoiceLive -{ - /// Server Speech Detection (Azure semantic VAD). - public partial class AzureMultilingualSemanticVad : IJsonModel - { - /// The JSON writer. - /// The client options for reading and writing models. - void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) - { - writer.WriteStartObject(); - JsonModelWriteCore(writer, options); - writer.WriteEndObject(); - } - - /// The JSON writer. - /// The client options for reading and writing models. - protected override void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) - { - string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; - if (format != "J") - { - throw new FormatException($"The model {nameof(AzureMultilingualSemanticVad)} does not support writing '{format}' format."); - } - base.JsonModelWriteCore(writer, options); - if (Optional.IsDefined(Threshold)) - { - writer.WritePropertyName("threshold"u8); - writer.WriteNumberValue(Threshold.Value); - } - if (Optional.IsDefined(PrefixPaddingMs)) - { - writer.WritePropertyName("prefix_padding_ms"u8); - writer.WriteNumberValue(PrefixPaddingMs.Value); - } - if (Optional.IsDefined(SilenceDurationMs)) - { - writer.WritePropertyName("silence_duration_ms"u8); - writer.WriteNumberValue(SilenceDurationMs.Value); - } - if (Optional.IsDefined(EndOfUtteranceDetection)) - { - writer.WritePropertyName("end_of_utterance_detection"u8); - writer.WriteObjectValue(EndOfUtteranceDetection, options); - } - if (Optional.IsDefined(NegThreshold)) - { - writer.WritePropertyName("neg_threshold"u8); - writer.WriteNumberValue(NegThreshold.Value); - } - if (Optional.IsDefined(SpeechDurationMs)) - { - writer.WritePropertyName("speech_duration_ms"u8); - writer.WriteNumberValue(SpeechDurationMs.Value); - } - if (Optional.IsDefined(WindowSize)) - { - writer.WritePropertyName("window_size"u8); - writer.WriteNumberValue(WindowSize.Value); - } - if (Optional.IsDefined(DistinctCiPhones)) - { - writer.WritePropertyName("distinct_ci_phones"u8); - writer.WriteNumberValue(DistinctCiPhones.Value); - } - if (Optional.IsDefined(RequireVowel)) - { - writer.WritePropertyName("require_vowel"u8); - writer.WriteBooleanValue(RequireVowel.Value); - } - if (Optional.IsDefined(RemoveFillerWords)) - { - writer.WritePropertyName("remove_filler_words"u8); - writer.WriteBooleanValue(RemoveFillerWords.Value); - } - if (Optional.IsCollectionDefined(Languages)) - { - writer.WritePropertyName("languages"u8); - writer.WriteStartArray(); - foreach (string item in Languages) - { - if (item == null) - { - writer.WriteNullValue(); - continue; - } - writer.WriteStringValue(item); - } - writer.WriteEndArray(); - } - if (Optional.IsDefined(AutoTruncate)) - { - writer.WritePropertyName("auto_truncate"u8); - writer.WriteBooleanValue(AutoTruncate.Value); - } - } - - /// The JSON reader. - /// The client options for reading and writing models. - AzureMultilingualSemanticVad IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) => (AzureMultilingualSemanticVad)JsonModelCreateCore(ref reader, options); - - /// The JSON reader. - /// The client options for reading and writing models. - protected override TurnDetection JsonModelCreateCore(ref Utf8JsonReader reader, ModelReaderWriterOptions options) - { - string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; - if (format != "J") - { - throw new FormatException($"The model {nameof(AzureMultilingualSemanticVad)} does not support reading '{format}' format."); - } - using JsonDocument document = JsonDocument.ParseValue(ref reader); - return DeserializeAzureMultilingualSemanticVad(document.RootElement, options); - } - - /// The JSON element to deserialize. - /// The client options for reading and writing models. - internal static AzureMultilingualSemanticVad DeserializeAzureMultilingualSemanticVad(JsonElement element, ModelReaderWriterOptions options) - { - if (element.ValueKind == JsonValueKind.Null) - { - return null; - } - TurnDetectionType @type = default; - IDictionary additionalBinaryDataProperties = new ChangeTrackingDictionary(); - float? threshold = default; - int? prefixPaddingMs = default; - int? silenceDurationMs = default; - EouDetection endOfUtteranceDetection = default; - float? negThreshold = default; - int? speechDurationMs = default; - int? windowSize = default; - int? distinctCiPhones = default; - bool? requireVowel = default; - bool? removeFillerWords = default; - IList languages = default; - bool? autoTruncate = default; - foreach (var prop in element.EnumerateObject()) - { - if (prop.NameEquals("type"u8)) - { - @type = prop.Value.GetString().ToTurnDetectionType(); - continue; - } - if (prop.NameEquals("threshold"u8)) - { - if (prop.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - threshold = prop.Value.GetSingle(); - continue; - } - if (prop.NameEquals("prefix_padding_ms"u8)) - { - if (prop.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - prefixPaddingMs = prop.Value.GetInt32(); - continue; - } - if (prop.NameEquals("silence_duration_ms"u8)) - { - if (prop.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - silenceDurationMs = prop.Value.GetInt32(); - continue; - } - if (prop.NameEquals("end_of_utterance_detection"u8)) - { - if (prop.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - endOfUtteranceDetection = EouDetection.DeserializeEouDetection(prop.Value, options); - continue; - } - if (prop.NameEquals("neg_threshold"u8)) - { - if (prop.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - negThreshold = prop.Value.GetSingle(); - continue; - } - if (prop.NameEquals("speech_duration_ms"u8)) - { - if (prop.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - speechDurationMs = prop.Value.GetInt32(); - continue; - } - if (prop.NameEquals("window_size"u8)) - { - if (prop.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - windowSize = prop.Value.GetInt32(); - continue; - } - if (prop.NameEquals("distinct_ci_phones"u8)) - { - if (prop.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - distinctCiPhones = prop.Value.GetInt32(); - continue; - } - if (prop.NameEquals("require_vowel"u8)) - { - if (prop.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - requireVowel = prop.Value.GetBoolean(); - continue; - } - if (prop.NameEquals("remove_filler_words"u8)) - { - if (prop.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - removeFillerWords = prop.Value.GetBoolean(); - continue; - } - if (prop.NameEquals("languages"u8)) - { - if (prop.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - List array = new List(); - foreach (var item in prop.Value.EnumerateArray()) - { - if (item.ValueKind == JsonValueKind.Null) - { - array.Add(null); - } - else - { - array.Add(item.GetString()); - } - } - languages = array; - continue; - } - if (prop.NameEquals("auto_truncate"u8)) - { - if (prop.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - autoTruncate = prop.Value.GetBoolean(); - continue; - } - if (options.Format != "W") - { - additionalBinaryDataProperties.Add(prop.Name, BinaryData.FromString(prop.Value.GetRawText())); - } - } - return new AzureMultilingualSemanticVad( - @type, - additionalBinaryDataProperties, - threshold, - prefixPaddingMs, - silenceDurationMs, - endOfUtteranceDetection, - negThreshold, - speechDurationMs, - windowSize, - distinctCiPhones, - requireVowel, - removeFillerWords, - languages ?? new ChangeTrackingList(), - autoTruncate); - } - - /// The client options for reading and writing models. - BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) => PersistableModelWriteCore(options); - - /// The client options for reading and writing models. - protected override BinaryData PersistableModelWriteCore(ModelReaderWriterOptions options) - { - string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; - switch (format) - { - case "J": - return ModelReaderWriter.Write(this, options, AzureAIVoiceLiveContext.Default); - default: - throw new FormatException($"The model {nameof(AzureMultilingualSemanticVad)} does not support writing '{options.Format}' format."); - } - } - - /// The data to parse. - /// The client options for reading and writing models. - AzureMultilingualSemanticVad IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) => (AzureMultilingualSemanticVad)PersistableModelCreateCore(data, options); - - /// The data to parse. - /// The client options for reading and writing models. - protected override TurnDetection PersistableModelCreateCore(BinaryData data, ModelReaderWriterOptions options) - { - string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; - switch (format) - { - case "J": - using (JsonDocument document = JsonDocument.Parse(data)) - { - return DeserializeAzureMultilingualSemanticVad(document.RootElement, options); - } - default: - throw new FormatException($"The model {nameof(AzureMultilingualSemanticVad)} does not support reading '{options.Format}' format."); - } - } - - /// The client options for reading and writing models. - string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; - } -} diff --git a/sdk/ai/Azure.AI.VoiceLive/src/Generated/AzureMultilingualSemanticVad.cs b/sdk/ai/Azure.AI.VoiceLive/src/Generated/AzureMultilingualSemanticVad.cs deleted file mode 100644 index adb9336c07df..000000000000 --- a/sdk/ai/Azure.AI.VoiceLive/src/Generated/AzureMultilingualSemanticVad.cs +++ /dev/null @@ -1,89 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System; -using System.Collections.Generic; - -namespace Azure.AI.VoiceLive -{ - /// Server Speech Detection (Azure semantic VAD). - public partial class AzureMultilingualSemanticVad : TurnDetection - { - /// Initializes a new instance of . - public AzureMultilingualSemanticVad() : base(TurnDetectionType.AzureSemanticVadMultilingual) - { - Languages = new ChangeTrackingList(); - } - - /// Initializes a new instance of . - /// - /// Keeps track of any properties unknown to the library. - /// - /// - /// - /// - /// - /// - /// - /// - /// - /// - /// - /// - internal AzureMultilingualSemanticVad(TurnDetectionType @type, IDictionary additionalBinaryDataProperties, float? threshold, int? prefixPaddingMs, int? silenceDurationMs, EouDetection endOfUtteranceDetection, float? negThreshold, int? speechDurationMs, int? windowSize, int? distinctCiPhones, bool? requireVowel, bool? removeFillerWords, IList languages, bool? autoTruncate) : base(@type, additionalBinaryDataProperties) - { - Threshold = threshold; - PrefixPaddingMs = prefixPaddingMs; - SilenceDurationMs = silenceDurationMs; - EndOfUtteranceDetection = endOfUtteranceDetection; - NegThreshold = negThreshold; - SpeechDurationMs = speechDurationMs; - WindowSize = windowSize; - DistinctCiPhones = distinctCiPhones; - RequireVowel = requireVowel; - RemoveFillerWords = removeFillerWords; - Languages = languages; - AutoTruncate = autoTruncate; - } - - /// Gets or sets the Threshold. - public float? Threshold { get; set; } - - /// Gets or sets the PrefixPaddingMs. - public int? PrefixPaddingMs { get; set; } - - /// Gets or sets the SilenceDurationMs. - public int? SilenceDurationMs { get; set; } - - /// Gets or sets the EndOfUtteranceDetection. - public EouDetection EndOfUtteranceDetection { get; set; } - - /// Gets or sets the NegThreshold. - public float? NegThreshold { get; set; } - - /// Gets or sets the SpeechDurationMs. - public int? SpeechDurationMs { get; set; } - - /// Gets or sets the WindowSize. - public int? WindowSize { get; set; } - - /// Gets or sets the DistinctCiPhones. - public int? DistinctCiPhones { get; set; } - - /// Gets or sets the RequireVowel. - public bool? RequireVowel { get; set; } - - /// Gets or sets the RemoveFillerWords. - public bool? RemoveFillerWords { get; set; } - - /// Gets the Languages. - public IList Languages { get; } - - /// Gets or sets the AutoTruncate. - public bool? AutoTruncate { get; set; } - } -} diff --git a/sdk/ai/Azure.AI.VoiceLive/src/Generated/AzurePersonalVoice.Serialization.cs b/sdk/ai/Azure.AI.VoiceLive/src/Generated/AzurePersonalVoice.Serialization.cs index e506119e7d3c..66de2198eb32 100644 --- a/sdk/ai/Azure.AI.VoiceLive/src/Generated/AzurePersonalVoice.Serialization.cs +++ b/sdk/ai/Azure.AI.VoiceLive/src/Generated/AzurePersonalVoice.Serialization.cs @@ -47,7 +47,7 @@ protected override void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWri writer.WriteNumberValue(Temperature.Value); } writer.WritePropertyName("model"u8); - writer.WriteStringValue(Model.ToSerialString()); + writer.WriteStringValue(Model.ToString()); } /// The JSON reader. @@ -79,7 +79,7 @@ internal static AzurePersonalVoice DeserializeAzurePersonalVoice(JsonElement ele IDictionary additionalBinaryDataProperties = new ChangeTrackingDictionary(); string name = default; float? temperature = default; - AzurePersonalVoiceModel model = default; + PersonalVoiceModels model = default; foreach (var prop in element.EnumerateObject()) { if (prop.NameEquals("type"u8)) @@ -103,7 +103,7 @@ internal static AzurePersonalVoice DeserializeAzurePersonalVoice(JsonElement ele } if (prop.NameEquals("model"u8)) { - model = prop.Value.GetString().ToAzurePersonalVoiceModel(); + model = new PersonalVoiceModels(prop.Value.GetString()); continue; } if (options.Format != "W") diff --git a/sdk/ai/Azure.AI.VoiceLive/src/Generated/AzurePersonalVoice.cs b/sdk/ai/Azure.AI.VoiceLive/src/Generated/AzurePersonalVoice.cs index 14ce03e7319d..de750f40a8cf 100644 --- a/sdk/ai/Azure.AI.VoiceLive/src/Generated/AzurePersonalVoice.cs +++ b/sdk/ai/Azure.AI.VoiceLive/src/Generated/AzurePersonalVoice.cs @@ -17,7 +17,7 @@ public partial class AzurePersonalVoice : AzureVoice /// Voice name cannot be empty. /// Underlying neural model to use for personal voice. /// is null. - public AzurePersonalVoice(string name, AzurePersonalVoiceModel model) : base("azure-personal") + public AzurePersonalVoice(string name, PersonalVoiceModels model) : base("azure-personal") { Argument.AssertNotNull(name, nameof(name)); @@ -31,7 +31,7 @@ public AzurePersonalVoice(string name, AzurePersonalVoiceModel model) : base("az /// Voice name cannot be empty. /// Temperature must be between 0.0 and 1.0. /// Underlying neural model to use for personal voice. - internal AzurePersonalVoice(string @type, IDictionary additionalBinaryDataProperties, string name, float? temperature, AzurePersonalVoiceModel model) : base(@type, additionalBinaryDataProperties) + internal AzurePersonalVoice(string @type, IDictionary additionalBinaryDataProperties, string name, float? temperature, PersonalVoiceModels model) : base(@type, additionalBinaryDataProperties) { Name = name; Temperature = temperature; @@ -45,6 +45,6 @@ internal AzurePersonalVoice(string @type, IDictionary additi public float? Temperature { get; set; } /// Underlying neural model to use for personal voice. - public AzurePersonalVoiceModel Model { get; set; } + public PersonalVoiceModels Model { get; set; } } } diff --git a/sdk/ai/Azure.AI.VoiceLive/src/Generated/AzurePersonalVoiceModel.Serialization.cs b/sdk/ai/Azure.AI.VoiceLive/src/Generated/AzurePersonalVoiceModel.Serialization.cs deleted file mode 100644 index 139bd52ce965..000000000000 --- a/sdk/ai/Azure.AI.VoiceLive/src/Generated/AzurePersonalVoiceModel.Serialization.cs +++ /dev/null @@ -1,41 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System; - -namespace Azure.AI.VoiceLive -{ - internal static partial class AzurePersonalVoiceModelExtensions - { - /// The value to serialize. - public static string ToSerialString(this AzurePersonalVoiceModel value) => value switch - { - AzurePersonalVoiceModel.DragonLatestNeural => "DragonLatestNeural", - AzurePersonalVoiceModel.PhoenixLatestNeural => "PhoenixLatestNeural", - AzurePersonalVoiceModel.PhoenixV2Neural => "PhoenixV2Neural", - _ => throw new ArgumentOutOfRangeException(nameof(value), value, "Unknown AzurePersonalVoiceModel value.") - }; - - /// The value to deserialize. - public static AzurePersonalVoiceModel ToAzurePersonalVoiceModel(this string value) - { - if (StringComparer.OrdinalIgnoreCase.Equals(value, "DragonLatestNeural")) - { - return AzurePersonalVoiceModel.DragonLatestNeural; - } - if (StringComparer.OrdinalIgnoreCase.Equals(value, "PhoenixLatestNeural")) - { - return AzurePersonalVoiceModel.PhoenixLatestNeural; - } - if (StringComparer.OrdinalIgnoreCase.Equals(value, "PhoenixV2Neural")) - { - return AzurePersonalVoiceModel.PhoenixV2Neural; - } - throw new ArgumentOutOfRangeException(nameof(value), value, "Unknown AzurePersonalVoiceModel value."); - } - } -} diff --git a/sdk/ai/Azure.AI.VoiceLive/src/Generated/AzurePersonalVoiceModel.cs b/sdk/ai/Azure.AI.VoiceLive/src/Generated/AzurePersonalVoiceModel.cs deleted file mode 100644 index 3a22b4501abe..000000000000 --- a/sdk/ai/Azure.AI.VoiceLive/src/Generated/AzurePersonalVoiceModel.cs +++ /dev/null @@ -1,20 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -namespace Azure.AI.VoiceLive -{ - /// - public enum AzurePersonalVoiceModel - { - /// DragonLatestNeural. - DragonLatestNeural, - /// PhoenixLatestNeural. - PhoenixLatestNeural, - /// PhoenixV2Neural. - PhoenixV2Neural - } -} diff --git a/sdk/ai/Azure.AI.VoiceLive/src/Generated/AzurePlatformVoice.Serialization.cs b/sdk/ai/Azure.AI.VoiceLive/src/Generated/AzurePlatformVoice.Serialization.cs deleted file mode 100644 index e6cc872fdf58..000000000000 --- a/sdk/ai/Azure.AI.VoiceLive/src/Generated/AzurePlatformVoice.Serialization.cs +++ /dev/null @@ -1,263 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System; -using System.ClientModel.Primitives; -using System.Collections.Generic; -using System.Text.Json; - -namespace Azure.AI.VoiceLive -{ - /// Azure platform voice configuration (variant of standard). - public partial class AzurePlatformVoice : IJsonModel - { - /// Initializes a new instance of for deserialization. - internal AzurePlatformVoice() - { - } - - /// The JSON writer. - /// The client options for reading and writing models. - void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) - { - writer.WriteStartObject(); - JsonModelWriteCore(writer, options); - writer.WriteEndObject(); - } - - /// The JSON writer. - /// The client options for reading and writing models. - protected override void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) - { - string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; - if (format != "J") - { - throw new FormatException($"The model {nameof(AzurePlatformVoice)} does not support writing '{format}' format."); - } - base.JsonModelWriteCore(writer, options); - writer.WritePropertyName("name"u8); - writer.WriteStringValue(Name); - if (Optional.IsDefined(Temperature)) - { - writer.WritePropertyName("temperature"u8); - writer.WriteNumberValue(Temperature.Value); - } - if (Optional.IsDefined(CustomLexiconUrl)) - { - writer.WritePropertyName("custom_lexicon_url"u8); - writer.WriteStringValue(CustomLexiconUrl); - } - if (Optional.IsCollectionDefined(PreferLocales)) - { - writer.WritePropertyName("prefer_locales"u8); - writer.WriteStartArray(); - foreach (string item in PreferLocales) - { - if (item == null) - { - writer.WriteNullValue(); - continue; - } - writer.WriteStringValue(item); - } - writer.WriteEndArray(); - } - if (Optional.IsDefined(Locale)) - { - writer.WritePropertyName("locale"u8); - writer.WriteStringValue(Locale); - } - if (Optional.IsDefined(Style)) - { - writer.WritePropertyName("style"u8); - writer.WriteStringValue(Style); - } - if (Optional.IsDefined(Pitch)) - { - writer.WritePropertyName("pitch"u8); - writer.WriteStringValue(Pitch); - } - if (Optional.IsDefined(Rate)) - { - writer.WritePropertyName("rate"u8); - writer.WriteStringValue(Rate); - } - if (Optional.IsDefined(Volume)) - { - writer.WritePropertyName("volume"u8); - writer.WriteStringValue(Volume); - } - } - - /// The JSON reader. - /// The client options for reading and writing models. - AzurePlatformVoice IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) => (AzurePlatformVoice)JsonModelCreateCore(ref reader, options); - - /// The JSON reader. - /// The client options for reading and writing models. - protected override AzureVoice JsonModelCreateCore(ref Utf8JsonReader reader, ModelReaderWriterOptions options) - { - string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; - if (format != "J") - { - throw new FormatException($"The model {nameof(AzurePlatformVoice)} does not support reading '{format}' format."); - } - using JsonDocument document = JsonDocument.ParseValue(ref reader); - return DeserializeAzurePlatformVoice(document.RootElement, options); - } - - /// The JSON element to deserialize. - /// The client options for reading and writing models. - internal static AzurePlatformVoice DeserializeAzurePlatformVoice(JsonElement element, ModelReaderWriterOptions options) - { - if (element.ValueKind == JsonValueKind.Null) - { - return null; - } - string @type = "azure-platform"; - IDictionary additionalBinaryDataProperties = new ChangeTrackingDictionary(); - string name = default; - float? temperature = default; - string customLexiconUrl = default; - IList preferLocales = default; - string locale = default; - string style = default; - string pitch = default; - string rate = default; - string volume = default; - foreach (var prop in element.EnumerateObject()) - { - if (prop.NameEquals("type"u8)) - { - @type = prop.Value.GetString(); - continue; - } - if (prop.NameEquals("name"u8)) - { - name = prop.Value.GetString(); - continue; - } - if (prop.NameEquals("temperature"u8)) - { - if (prop.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - temperature = prop.Value.GetSingle(); - continue; - } - if (prop.NameEquals("custom_lexicon_url"u8)) - { - customLexiconUrl = prop.Value.GetString(); - continue; - } - if (prop.NameEquals("prefer_locales"u8)) - { - if (prop.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - List array = new List(); - foreach (var item in prop.Value.EnumerateArray()) - { - if (item.ValueKind == JsonValueKind.Null) - { - array.Add(null); - } - else - { - array.Add(item.GetString()); - } - } - preferLocales = array; - continue; - } - if (prop.NameEquals("locale"u8)) - { - locale = prop.Value.GetString(); - continue; - } - if (prop.NameEquals("style"u8)) - { - style = prop.Value.GetString(); - continue; - } - if (prop.NameEquals("pitch"u8)) - { - pitch = prop.Value.GetString(); - continue; - } - if (prop.NameEquals("rate"u8)) - { - rate = prop.Value.GetString(); - continue; - } - if (prop.NameEquals("volume"u8)) - { - volume = prop.Value.GetString(); - continue; - } - if (options.Format != "W") - { - additionalBinaryDataProperties.Add(prop.Name, BinaryData.FromString(prop.Value.GetRawText())); - } - } - return new AzurePlatformVoice( - @type, - additionalBinaryDataProperties, - name, - temperature, - customLexiconUrl, - preferLocales ?? new ChangeTrackingList(), - locale, - style, - pitch, - rate, - volume); - } - - /// The client options for reading and writing models. - BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) => PersistableModelWriteCore(options); - - /// The client options for reading and writing models. - protected override BinaryData PersistableModelWriteCore(ModelReaderWriterOptions options) - { - string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; - switch (format) - { - case "J": - return ModelReaderWriter.Write(this, options, AzureAIVoiceLiveContext.Default); - default: - throw new FormatException($"The model {nameof(AzurePlatformVoice)} does not support writing '{options.Format}' format."); - } - } - - /// The data to parse. - /// The client options for reading and writing models. - AzurePlatformVoice IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) => (AzurePlatformVoice)PersistableModelCreateCore(data, options); - - /// The data to parse. - /// The client options for reading and writing models. - protected override AzureVoice PersistableModelCreateCore(BinaryData data, ModelReaderWriterOptions options) - { - string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; - switch (format) - { - case "J": - using (JsonDocument document = JsonDocument.Parse(data)) - { - return DeserializeAzurePlatformVoice(document.RootElement, options); - } - default: - throw new FormatException($"The model {nameof(AzurePlatformVoice)} does not support reading '{options.Format}' format."); - } - } - - /// The client options for reading and writing models. - string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; - } -} diff --git a/sdk/ai/Azure.AI.VoiceLive/src/Generated/AzurePlatformVoice.cs b/sdk/ai/Azure.AI.VoiceLive/src/Generated/AzurePlatformVoice.cs deleted file mode 100644 index 56a035f38ca5..000000000000 --- a/sdk/ai/Azure.AI.VoiceLive/src/Generated/AzurePlatformVoice.cs +++ /dev/null @@ -1,79 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System; -using System.Collections.Generic; - -namespace Azure.AI.VoiceLive -{ - /// Azure platform voice configuration (variant of standard). - public partial class AzurePlatformVoice : AzureVoice - { - /// Initializes a new instance of . - /// Voice name cannot be empty. - /// is null. - public AzurePlatformVoice(string name) : base("azure-platform") - { - Argument.AssertNotNull(name, nameof(name)); - - Name = name; - PreferLocales = new ChangeTrackingList(); - } - - /// Initializes a new instance of . - /// - /// Keeps track of any properties unknown to the library. - /// Voice name cannot be empty. - /// Temperature must be between 0.0 and 1.0. - /// - /// - /// - /// - /// - /// - /// - internal AzurePlatformVoice(string @type, IDictionary additionalBinaryDataProperties, string name, float? temperature, string customLexiconUrl, IList preferLocales, string locale, string style, string pitch, string rate, string volume) : base(@type, additionalBinaryDataProperties) - { - Name = name; - Temperature = temperature; - CustomLexiconUrl = customLexiconUrl; - PreferLocales = preferLocales; - Locale = locale; - Style = style; - Pitch = pitch; - Rate = rate; - Volume = volume; - } - - /// Voice name cannot be empty. - public string Name { get; set; } - - /// Temperature must be between 0.0 and 1.0. - public float? Temperature { get; set; } - - /// Gets or sets the CustomLexiconUrl. - public string CustomLexiconUrl { get; set; } - - /// Gets the PreferLocales. - public IList PreferLocales { get; } - - /// Gets or sets the Locale. - public string Locale { get; set; } - - /// Gets or sets the Style. - public string Style { get; set; } - - /// Gets or sets the Pitch. - public string Pitch { get; set; } - - /// Gets or sets the Rate. - public string Rate { get; set; } - - /// Gets or sets the Volume. - public string Volume { get; set; } - } -} diff --git a/sdk/ai/Azure.AI.VoiceLive/src/Generated/AzureSemanticDetection.cs b/sdk/ai/Azure.AI.VoiceLive/src/Generated/AzureSemanticDetection.cs deleted file mode 100644 index e4452f4a9d4d..000000000000 --- a/sdk/ai/Azure.AI.VoiceLive/src/Generated/AzureSemanticDetection.cs +++ /dev/null @@ -1,63 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System; -using System.Collections.Generic; - -namespace Azure.AI.VoiceLive -{ - /// Azure semantic end-of-utterance detection (default). - public partial class AzureSemanticDetection : EouDetection - { - /// Initializes a new instance of . - public AzureSemanticDetection() : base(EOUDetectionModel.SemanticDetectionV1) - { - } - - /// Initializes a new instance of . - /// - /// Keeps track of any properties unknown to the library. - /// - /// - /// - /// - /// - /// - /// - internal AzureSemanticDetection(EOUDetectionModel model, IDictionary additionalBinaryDataProperties, float? threshold, float? timeout, float? secondaryThreshold, float? secondaryTimeout, bool? disableRules, float? srBoost, bool? extraImendCheck) : base(model, additionalBinaryDataProperties) - { - Threshold = threshold; - Timeout = timeout; - SecondaryThreshold = secondaryThreshold; - SecondaryTimeout = secondaryTimeout; - DisableRules = disableRules; - SrBoost = srBoost; - ExtraImendCheck = extraImendCheck; - } - - /// Gets or sets the Threshold. - public float? Threshold { get; set; } - - /// Gets or sets the Timeout. - public float? Timeout { get; set; } - - /// Gets or sets the SecondaryThreshold. - public float? SecondaryThreshold { get; set; } - - /// Gets or sets the SecondaryTimeout. - public float? SecondaryTimeout { get; set; } - - /// Gets or sets the DisableRules. - public bool? DisableRules { get; set; } - - /// Gets or sets the SrBoost. - public float? SrBoost { get; set; } - - /// Gets or sets the ExtraImendCheck. - public bool? ExtraImendCheck { get; set; } - } -} diff --git a/sdk/ai/Azure.AI.VoiceLive/src/Generated/AzureSemanticDetectionEn.cs b/sdk/ai/Azure.AI.VoiceLive/src/Generated/AzureSemanticDetectionEn.cs deleted file mode 100644 index 9c3349c4830b..000000000000 --- a/sdk/ai/Azure.AI.VoiceLive/src/Generated/AzureSemanticDetectionEn.cs +++ /dev/null @@ -1,63 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System; -using System.Collections.Generic; - -namespace Azure.AI.VoiceLive -{ - /// Azure semantic end-of-utterance detection (English-optimized). - public partial class AzureSemanticDetectionEn : EouDetection - { - /// Initializes a new instance of . - public AzureSemanticDetectionEn() : base(EOUDetectionModel.SemanticDetectionV1En) - { - } - - /// Initializes a new instance of . - /// - /// Keeps track of any properties unknown to the library. - /// - /// - /// - /// - /// - /// - /// - internal AzureSemanticDetectionEn(EOUDetectionModel model, IDictionary additionalBinaryDataProperties, float? threshold, float? timeout, float? secondaryThreshold, float? secondaryTimeout, bool? disableRules, float? srBoost, bool? extraImendCheck) : base(model, additionalBinaryDataProperties) - { - Threshold = threshold; - Timeout = timeout; - SecondaryThreshold = secondaryThreshold; - SecondaryTimeout = secondaryTimeout; - DisableRules = disableRules; - SrBoost = srBoost; - ExtraImendCheck = extraImendCheck; - } - - /// Gets or sets the Threshold. - public float? Threshold { get; set; } - - /// Gets or sets the Timeout. - public float? Timeout { get; set; } - - /// Gets or sets the SecondaryThreshold. - public float? SecondaryThreshold { get; set; } - - /// Gets or sets the SecondaryTimeout. - public float? SecondaryTimeout { get; set; } - - /// Gets or sets the DisableRules. - public bool? DisableRules { get; set; } - - /// Gets or sets the SrBoost. - public float? SrBoost { get; set; } - - /// Gets or sets the ExtraImendCheck. - public bool? ExtraImendCheck { get; set; } - } -} diff --git a/sdk/ai/Azure.AI.VoiceLive/src/Generated/AzureSemanticDetectionMultilingual.Serialization.cs b/sdk/ai/Azure.AI.VoiceLive/src/Generated/AzureSemanticDetectionMultilingual.Serialization.cs deleted file mode 100644 index a44ad09c897d..000000000000 --- a/sdk/ai/Azure.AI.VoiceLive/src/Generated/AzureSemanticDetectionMultilingual.Serialization.cs +++ /dev/null @@ -1,235 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System; -using System.ClientModel.Primitives; -using System.Collections.Generic; -using System.Text.Json; - -namespace Azure.AI.VoiceLive -{ - /// Azure semantic end-of-utterance detection (multilingual). - public partial class AzureSemanticDetectionMultilingual : IJsonModel - { - /// The JSON writer. - /// The client options for reading and writing models. - void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) - { - writer.WriteStartObject(); - JsonModelWriteCore(writer, options); - writer.WriteEndObject(); - } - - /// The JSON writer. - /// The client options for reading and writing models. - protected override void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) - { - string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; - if (format != "J") - { - throw new FormatException($"The model {nameof(AzureSemanticDetectionMultilingual)} does not support writing '{format}' format."); - } - base.JsonModelWriteCore(writer, options); - if (Optional.IsDefined(Threshold)) - { - writer.WritePropertyName("threshold"u8); - writer.WriteNumberValue(Threshold.Value); - } - if (Optional.IsDefined(Timeout)) - { - writer.WritePropertyName("timeout"u8); - writer.WriteNumberValue(Timeout.Value); - } - if (Optional.IsDefined(SecondaryThreshold)) - { - writer.WritePropertyName("secondary_threshold"u8); - writer.WriteNumberValue(SecondaryThreshold.Value); - } - if (Optional.IsDefined(SecondaryTimeout)) - { - writer.WritePropertyName("secondary_timeout"u8); - writer.WriteNumberValue(SecondaryTimeout.Value); - } - if (Optional.IsDefined(DisableRules)) - { - writer.WritePropertyName("disable_rules"u8); - writer.WriteBooleanValue(DisableRules.Value); - } - if (Optional.IsDefined(SrBoost)) - { - writer.WritePropertyName("sr_boost"u8); - writer.WriteNumberValue(SrBoost.Value); - } - if (Optional.IsDefined(ExtraImendCheck)) - { - writer.WritePropertyName("extra_imend_check"u8); - writer.WriteBooleanValue(ExtraImendCheck.Value); - } - } - - /// The JSON reader. - /// The client options for reading and writing models. - AzureSemanticDetectionMultilingual IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) => (AzureSemanticDetectionMultilingual)JsonModelCreateCore(ref reader, options); - - /// The JSON reader. - /// The client options for reading and writing models. - protected override EouDetection JsonModelCreateCore(ref Utf8JsonReader reader, ModelReaderWriterOptions options) - { - string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; - if (format != "J") - { - throw new FormatException($"The model {nameof(AzureSemanticDetectionMultilingual)} does not support reading '{format}' format."); - } - using JsonDocument document = JsonDocument.ParseValue(ref reader); - return DeserializeAzureSemanticDetectionMultilingual(document.RootElement, options); - } - - /// The JSON element to deserialize. - /// The client options for reading and writing models. - internal static AzureSemanticDetectionMultilingual DeserializeAzureSemanticDetectionMultilingual(JsonElement element, ModelReaderWriterOptions options) - { - if (element.ValueKind == JsonValueKind.Null) - { - return null; - } - EOUDetectionModel model = default; - IDictionary additionalBinaryDataProperties = new ChangeTrackingDictionary(); - float? threshold = default; - float? timeout = default; - float? secondaryThreshold = default; - float? secondaryTimeout = default; - bool? disableRules = default; - float? srBoost = default; - bool? extraImendCheck = default; - foreach (var prop in element.EnumerateObject()) - { - if (prop.NameEquals("model"u8)) - { - model = prop.Value.GetString().ToEOUDetectionModel(); - continue; - } - if (prop.NameEquals("threshold"u8)) - { - if (prop.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - threshold = prop.Value.GetSingle(); - continue; - } - if (prop.NameEquals("timeout"u8)) - { - if (prop.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - timeout = prop.Value.GetSingle(); - continue; - } - if (prop.NameEquals("secondary_threshold"u8)) - { - if (prop.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - secondaryThreshold = prop.Value.GetSingle(); - continue; - } - if (prop.NameEquals("secondary_timeout"u8)) - { - if (prop.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - secondaryTimeout = prop.Value.GetSingle(); - continue; - } - if (prop.NameEquals("disable_rules"u8)) - { - if (prop.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - disableRules = prop.Value.GetBoolean(); - continue; - } - if (prop.NameEquals("sr_boost"u8)) - { - if (prop.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - srBoost = prop.Value.GetSingle(); - continue; - } - if (prop.NameEquals("extra_imend_check"u8)) - { - if (prop.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - extraImendCheck = prop.Value.GetBoolean(); - continue; - } - if (options.Format != "W") - { - additionalBinaryDataProperties.Add(prop.Name, BinaryData.FromString(prop.Value.GetRawText())); - } - } - return new AzureSemanticDetectionMultilingual( - model, - additionalBinaryDataProperties, - threshold, - timeout, - secondaryThreshold, - secondaryTimeout, - disableRules, - srBoost, - extraImendCheck); - } - - /// The client options for reading and writing models. - BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) => PersistableModelWriteCore(options); - - /// The client options for reading and writing models. - protected override BinaryData PersistableModelWriteCore(ModelReaderWriterOptions options) - { - string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; - switch (format) - { - case "J": - return ModelReaderWriter.Write(this, options, AzureAIVoiceLiveContext.Default); - default: - throw new FormatException($"The model {nameof(AzureSemanticDetectionMultilingual)} does not support writing '{options.Format}' format."); - } - } - - /// The data to parse. - /// The client options for reading and writing models. - AzureSemanticDetectionMultilingual IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) => (AzureSemanticDetectionMultilingual)PersistableModelCreateCore(data, options); - - /// The data to parse. - /// The client options for reading and writing models. - protected override EouDetection PersistableModelCreateCore(BinaryData data, ModelReaderWriterOptions options) - { - string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; - switch (format) - { - case "J": - using (JsonDocument document = JsonDocument.Parse(data)) - { - return DeserializeAzureSemanticDetectionMultilingual(document.RootElement, options); - } - default: - throw new FormatException($"The model {nameof(AzureSemanticDetectionMultilingual)} does not support reading '{options.Format}' format."); - } - } - - /// The client options for reading and writing models. - string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; - } -} diff --git a/sdk/ai/Azure.AI.VoiceLive/src/Generated/AzureSemanticDetectionMultilingual.cs b/sdk/ai/Azure.AI.VoiceLive/src/Generated/AzureSemanticDetectionMultilingual.cs deleted file mode 100644 index a29604feee04..000000000000 --- a/sdk/ai/Azure.AI.VoiceLive/src/Generated/AzureSemanticDetectionMultilingual.cs +++ /dev/null @@ -1,63 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System; -using System.Collections.Generic; - -namespace Azure.AI.VoiceLive -{ - /// Azure semantic end-of-utterance detection (multilingual). - public partial class AzureSemanticDetectionMultilingual : EouDetection - { - /// Initializes a new instance of . - public AzureSemanticDetectionMultilingual() : base(EOUDetectionModel.SemanticDetectionV1Multilingual) - { - } - - /// Initializes a new instance of . - /// - /// Keeps track of any properties unknown to the library. - /// - /// - /// - /// - /// - /// - /// - internal AzureSemanticDetectionMultilingual(EOUDetectionModel model, IDictionary additionalBinaryDataProperties, float? threshold, float? timeout, float? secondaryThreshold, float? secondaryTimeout, bool? disableRules, float? srBoost, bool? extraImendCheck) : base(model, additionalBinaryDataProperties) - { - Threshold = threshold; - Timeout = timeout; - SecondaryThreshold = secondaryThreshold; - SecondaryTimeout = secondaryTimeout; - DisableRules = disableRules; - SrBoost = srBoost; - ExtraImendCheck = extraImendCheck; - } - - /// Gets or sets the Threshold. - public float? Threshold { get; set; } - - /// Gets or sets the Timeout. - public float? Timeout { get; set; } - - /// Gets or sets the SecondaryThreshold. - public float? SecondaryThreshold { get; set; } - - /// Gets or sets the SecondaryTimeout. - public float? SecondaryTimeout { get; set; } - - /// Gets or sets the DisableRules. - public bool? DisableRules { get; set; } - - /// Gets or sets the SrBoost. - public float? SrBoost { get; set; } - - /// Gets or sets the ExtraImendCheck. - public bool? ExtraImendCheck { get; set; } - } -} diff --git a/sdk/ai/Azure.AI.VoiceLive/src/Generated/AzureSemanticDetectionEn.Serialization.cs b/sdk/ai/Azure.AI.VoiceLive/src/Generated/AzureSemanticEnEouDetection.Serialization.cs similarity index 50% rename from sdk/ai/Azure.AI.VoiceLive/src/Generated/AzureSemanticDetectionEn.Serialization.cs rename to sdk/ai/Azure.AI.VoiceLive/src/Generated/AzureSemanticEnEouDetection.Serialization.cs index d98e1a063ad1..e9b9a6c02ef2 100644 --- a/sdk/ai/Azure.AI.VoiceLive/src/Generated/AzureSemanticDetectionEn.Serialization.cs +++ b/sdk/ai/Azure.AI.VoiceLive/src/Generated/AzureSemanticEnEouDetection.Serialization.cs @@ -13,11 +13,11 @@ namespace Azure.AI.VoiceLive { /// Azure semantic end-of-utterance detection (English-optimized). - public partial class AzureSemanticDetectionEn : IJsonModel + public partial class AzureSemanticEnEouDetection : IJsonModel { /// The JSON writer. /// The client options for reading and writing models. - void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) { writer.WriteStartObject(); JsonModelWriteCore(writer, options); @@ -28,10 +28,10 @@ void IJsonModel.Write(Utf8JsonWriter writer, ModelRead /// The client options for reading and writing models. protected override void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) { - string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; if (format != "J") { - throw new FormatException($"The model {nameof(AzureSemanticDetectionEn)} does not support writing '{format}' format."); + throw new FormatException($"The model {nameof(AzureSemanticEnEouDetection)} does not support writing '{format}' format."); } base.JsonModelWriteCore(writer, options); if (Optional.IsDefined(Threshold)) @@ -39,58 +39,33 @@ protected override void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWri writer.WritePropertyName("threshold"u8); writer.WriteNumberValue(Threshold.Value); } - if (Optional.IsDefined(Timeout)) + if (Optional.IsDefined(TimeoutMs)) { writer.WritePropertyName("timeout"u8); - writer.WriteNumberValue(Timeout.Value); - } - if (Optional.IsDefined(SecondaryThreshold)) - { - writer.WritePropertyName("secondary_threshold"u8); - writer.WriteNumberValue(SecondaryThreshold.Value); - } - if (Optional.IsDefined(SecondaryTimeout)) - { - writer.WritePropertyName("secondary_timeout"u8); - writer.WriteNumberValue(SecondaryTimeout.Value); - } - if (Optional.IsDefined(DisableRules)) - { - writer.WritePropertyName("disable_rules"u8); - writer.WriteBooleanValue(DisableRules.Value); - } - if (Optional.IsDefined(SrBoost)) - { - writer.WritePropertyName("sr_boost"u8); - writer.WriteNumberValue(SrBoost.Value); - } - if (Optional.IsDefined(ExtraImendCheck)) - { - writer.WritePropertyName("extra_imend_check"u8); - writer.WriteBooleanValue(ExtraImendCheck.Value); + writer.WriteNumberValue(TimeoutMs.Value); } } /// The JSON reader. /// The client options for reading and writing models. - AzureSemanticDetectionEn IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) => (AzureSemanticDetectionEn)JsonModelCreateCore(ref reader, options); + AzureSemanticEnEouDetection IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) => (AzureSemanticEnEouDetection)JsonModelCreateCore(ref reader, options); /// The JSON reader. /// The client options for reading and writing models. protected override EouDetection JsonModelCreateCore(ref Utf8JsonReader reader, ModelReaderWriterOptions options) { - string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; if (format != "J") { - throw new FormatException($"The model {nameof(AzureSemanticDetectionEn)} does not support reading '{format}' format."); + throw new FormatException($"The model {nameof(AzureSemanticEnEouDetection)} does not support reading '{format}' format."); } using JsonDocument document = JsonDocument.ParseValue(ref reader); - return DeserializeAzureSemanticDetectionEn(document.RootElement, options); + return DeserializeAzureSemanticEnEouDetection(document.RootElement, options); } /// The JSON element to deserialize. /// The client options for reading and writing models. - internal static AzureSemanticDetectionEn DeserializeAzureSemanticDetectionEn(JsonElement element, ModelReaderWriterOptions options) + internal static AzureSemanticEnEouDetection DeserializeAzureSemanticEnEouDetection(JsonElement element, ModelReaderWriterOptions options) { if (element.ValueKind == JsonValueKind.Null) { @@ -99,12 +74,7 @@ internal static AzureSemanticDetectionEn DeserializeAzureSemanticDetectionEn(Jso EOUDetectionModel model = default; IDictionary additionalBinaryDataProperties = new ChangeTrackingDictionary(); float? threshold = default; - float? timeout = default; - float? secondaryThreshold = default; - float? secondaryTimeout = default; - bool? disableRules = default; - float? srBoost = default; - bool? extraImendCheck = default; + float? timeoutMs = default; foreach (var prop in element.EnumerateObject()) { if (prop.NameEquals("model"u8)) @@ -127,52 +97,7 @@ internal static AzureSemanticDetectionEn DeserializeAzureSemanticDetectionEn(Jso { continue; } - timeout = prop.Value.GetSingle(); - continue; - } - if (prop.NameEquals("secondary_threshold"u8)) - { - if (prop.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - secondaryThreshold = prop.Value.GetSingle(); - continue; - } - if (prop.NameEquals("secondary_timeout"u8)) - { - if (prop.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - secondaryTimeout = prop.Value.GetSingle(); - continue; - } - if (prop.NameEquals("disable_rules"u8)) - { - if (prop.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - disableRules = prop.Value.GetBoolean(); - continue; - } - if (prop.NameEquals("sr_boost"u8)) - { - if (prop.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - srBoost = prop.Value.GetSingle(); - continue; - } - if (prop.NameEquals("extra_imend_check"u8)) - { - if (prop.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - extraImendCheck = prop.Value.GetBoolean(); + timeoutMs = prop.Value.GetSingle(); continue; } if (options.Format != "W") @@ -180,56 +105,47 @@ internal static AzureSemanticDetectionEn DeserializeAzureSemanticDetectionEn(Jso additionalBinaryDataProperties.Add(prop.Name, BinaryData.FromString(prop.Value.GetRawText())); } } - return new AzureSemanticDetectionEn( - model, - additionalBinaryDataProperties, - threshold, - timeout, - secondaryThreshold, - secondaryTimeout, - disableRules, - srBoost, - extraImendCheck); + return new AzureSemanticEnEouDetection(model, additionalBinaryDataProperties, threshold, timeoutMs); } /// The client options for reading and writing models. - BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) => PersistableModelWriteCore(options); + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) => PersistableModelWriteCore(options); /// The client options for reading and writing models. protected override BinaryData PersistableModelWriteCore(ModelReaderWriterOptions options) { - string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; switch (format) { case "J": return ModelReaderWriter.Write(this, options, AzureAIVoiceLiveContext.Default); default: - throw new FormatException($"The model {nameof(AzureSemanticDetectionEn)} does not support writing '{options.Format}' format."); + throw new FormatException($"The model {nameof(AzureSemanticEnEouDetection)} does not support writing '{options.Format}' format."); } } /// The data to parse. /// The client options for reading and writing models. - AzureSemanticDetectionEn IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) => (AzureSemanticDetectionEn)PersistableModelCreateCore(data, options); + AzureSemanticEnEouDetection IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) => (AzureSemanticEnEouDetection)PersistableModelCreateCore(data, options); /// The data to parse. /// The client options for reading and writing models. protected override EouDetection PersistableModelCreateCore(BinaryData data, ModelReaderWriterOptions options) { - string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; switch (format) { case "J": using (JsonDocument document = JsonDocument.Parse(data)) { - return DeserializeAzureSemanticDetectionEn(document.RootElement, options); + return DeserializeAzureSemanticEnEouDetection(document.RootElement, options); } default: - throw new FormatException($"The model {nameof(AzureSemanticDetectionEn)} does not support reading '{options.Format}' format."); + throw new FormatException($"The model {nameof(AzureSemanticEnEouDetection)} does not support reading '{options.Format}' format."); } } /// The client options for reading and writing models. - string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; } } diff --git a/sdk/ai/Azure.AI.VoiceLive/src/Generated/AzureSemanticEnEouDetection.cs b/sdk/ai/Azure.AI.VoiceLive/src/Generated/AzureSemanticEnEouDetection.cs new file mode 100644 index 000000000000..2610df46494b --- /dev/null +++ b/sdk/ai/Azure.AI.VoiceLive/src/Generated/AzureSemanticEnEouDetection.cs @@ -0,0 +1,38 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.AI.VoiceLive +{ + /// Azure semantic end-of-utterance detection (English-optimized). + public partial class AzureSemanticEnEouDetection : EouDetection + { + /// Initializes a new instance of . + public AzureSemanticEnEouDetection() : base(EOUDetectionModel.SemanticDetectionV1En) + { + } + + /// Initializes a new instance of . + /// + /// Keeps track of any properties unknown to the library. + /// + /// + internal AzureSemanticEnEouDetection(EOUDetectionModel model, IDictionary additionalBinaryDataProperties, float? threshold, float? timeoutMs) : base(model, additionalBinaryDataProperties) + { + Threshold = threshold; + TimeoutMs = timeoutMs; + } + + /// Gets or sets the Threshold. + public float? Threshold { get; set; } + + /// Gets or sets the TimeoutMs. + public float? TimeoutMs { get; set; } + } +} diff --git a/sdk/ai/Azure.AI.VoiceLive/src/Generated/AzureSemanticDetection.Serialization.cs b/sdk/ai/Azure.AI.VoiceLive/src/Generated/AzureSemanticEouDetection.Serialization.cs similarity index 50% rename from sdk/ai/Azure.AI.VoiceLive/src/Generated/AzureSemanticDetection.Serialization.cs rename to sdk/ai/Azure.AI.VoiceLive/src/Generated/AzureSemanticEouDetection.Serialization.cs index 031ddaf1f3e7..972dcb0a6def 100644 --- a/sdk/ai/Azure.AI.VoiceLive/src/Generated/AzureSemanticDetection.Serialization.cs +++ b/sdk/ai/Azure.AI.VoiceLive/src/Generated/AzureSemanticEouDetection.Serialization.cs @@ -13,11 +13,11 @@ namespace Azure.AI.VoiceLive { /// Azure semantic end-of-utterance detection (default). - public partial class AzureSemanticDetection : IJsonModel + public partial class AzureSemanticEouDetection : IJsonModel { /// The JSON writer. /// The client options for reading and writing models. - void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) { writer.WriteStartObject(); JsonModelWriteCore(writer, options); @@ -28,10 +28,10 @@ void IJsonModel.Write(Utf8JsonWriter writer, ModelReader /// The client options for reading and writing models. protected override void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) { - string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; if (format != "J") { - throw new FormatException($"The model {nameof(AzureSemanticDetection)} does not support writing '{format}' format."); + throw new FormatException($"The model {nameof(AzureSemanticEouDetection)} does not support writing '{format}' format."); } base.JsonModelWriteCore(writer, options); if (Optional.IsDefined(Threshold)) @@ -39,58 +39,33 @@ protected override void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWri writer.WritePropertyName("threshold"u8); writer.WriteNumberValue(Threshold.Value); } - if (Optional.IsDefined(Timeout)) + if (Optional.IsDefined(TimeoutMs)) { writer.WritePropertyName("timeout"u8); - writer.WriteNumberValue(Timeout.Value); - } - if (Optional.IsDefined(SecondaryThreshold)) - { - writer.WritePropertyName("secondary_threshold"u8); - writer.WriteNumberValue(SecondaryThreshold.Value); - } - if (Optional.IsDefined(SecondaryTimeout)) - { - writer.WritePropertyName("secondary_timeout"u8); - writer.WriteNumberValue(SecondaryTimeout.Value); - } - if (Optional.IsDefined(DisableRules)) - { - writer.WritePropertyName("disable_rules"u8); - writer.WriteBooleanValue(DisableRules.Value); - } - if (Optional.IsDefined(SrBoost)) - { - writer.WritePropertyName("sr_boost"u8); - writer.WriteNumberValue(SrBoost.Value); - } - if (Optional.IsDefined(ExtraImendCheck)) - { - writer.WritePropertyName("extra_imend_check"u8); - writer.WriteBooleanValue(ExtraImendCheck.Value); + writer.WriteNumberValue(TimeoutMs.Value); } } /// The JSON reader. /// The client options for reading and writing models. - AzureSemanticDetection IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) => (AzureSemanticDetection)JsonModelCreateCore(ref reader, options); + AzureSemanticEouDetection IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) => (AzureSemanticEouDetection)JsonModelCreateCore(ref reader, options); /// The JSON reader. /// The client options for reading and writing models. protected override EouDetection JsonModelCreateCore(ref Utf8JsonReader reader, ModelReaderWriterOptions options) { - string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; if (format != "J") { - throw new FormatException($"The model {nameof(AzureSemanticDetection)} does not support reading '{format}' format."); + throw new FormatException($"The model {nameof(AzureSemanticEouDetection)} does not support reading '{format}' format."); } using JsonDocument document = JsonDocument.ParseValue(ref reader); - return DeserializeAzureSemanticDetection(document.RootElement, options); + return DeserializeAzureSemanticEouDetection(document.RootElement, options); } /// The JSON element to deserialize. /// The client options for reading and writing models. - internal static AzureSemanticDetection DeserializeAzureSemanticDetection(JsonElement element, ModelReaderWriterOptions options) + internal static AzureSemanticEouDetection DeserializeAzureSemanticEouDetection(JsonElement element, ModelReaderWriterOptions options) { if (element.ValueKind == JsonValueKind.Null) { @@ -99,12 +74,7 @@ internal static AzureSemanticDetection DeserializeAzureSemanticDetection(JsonEle EOUDetectionModel model = default; IDictionary additionalBinaryDataProperties = new ChangeTrackingDictionary(); float? threshold = default; - float? timeout = default; - float? secondaryThreshold = default; - float? secondaryTimeout = default; - bool? disableRules = default; - float? srBoost = default; - bool? extraImendCheck = default; + float? timeoutMs = default; foreach (var prop in element.EnumerateObject()) { if (prop.NameEquals("model"u8)) @@ -127,52 +97,7 @@ internal static AzureSemanticDetection DeserializeAzureSemanticDetection(JsonEle { continue; } - timeout = prop.Value.GetSingle(); - continue; - } - if (prop.NameEquals("secondary_threshold"u8)) - { - if (prop.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - secondaryThreshold = prop.Value.GetSingle(); - continue; - } - if (prop.NameEquals("secondary_timeout"u8)) - { - if (prop.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - secondaryTimeout = prop.Value.GetSingle(); - continue; - } - if (prop.NameEquals("disable_rules"u8)) - { - if (prop.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - disableRules = prop.Value.GetBoolean(); - continue; - } - if (prop.NameEquals("sr_boost"u8)) - { - if (prop.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - srBoost = prop.Value.GetSingle(); - continue; - } - if (prop.NameEquals("extra_imend_check"u8)) - { - if (prop.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - extraImendCheck = prop.Value.GetBoolean(); + timeoutMs = prop.Value.GetSingle(); continue; } if (options.Format != "W") @@ -180,56 +105,47 @@ internal static AzureSemanticDetection DeserializeAzureSemanticDetection(JsonEle additionalBinaryDataProperties.Add(prop.Name, BinaryData.FromString(prop.Value.GetRawText())); } } - return new AzureSemanticDetection( - model, - additionalBinaryDataProperties, - threshold, - timeout, - secondaryThreshold, - secondaryTimeout, - disableRules, - srBoost, - extraImendCheck); + return new AzureSemanticEouDetection(model, additionalBinaryDataProperties, threshold, timeoutMs); } /// The client options for reading and writing models. - BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) => PersistableModelWriteCore(options); + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) => PersistableModelWriteCore(options); /// The client options for reading and writing models. protected override BinaryData PersistableModelWriteCore(ModelReaderWriterOptions options) { - string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; switch (format) { case "J": return ModelReaderWriter.Write(this, options, AzureAIVoiceLiveContext.Default); default: - throw new FormatException($"The model {nameof(AzureSemanticDetection)} does not support writing '{options.Format}' format."); + throw new FormatException($"The model {nameof(AzureSemanticEouDetection)} does not support writing '{options.Format}' format."); } } /// The data to parse. /// The client options for reading and writing models. - AzureSemanticDetection IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) => (AzureSemanticDetection)PersistableModelCreateCore(data, options); + AzureSemanticEouDetection IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) => (AzureSemanticEouDetection)PersistableModelCreateCore(data, options); /// The data to parse. /// The client options for reading and writing models. protected override EouDetection PersistableModelCreateCore(BinaryData data, ModelReaderWriterOptions options) { - string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; switch (format) { case "J": using (JsonDocument document = JsonDocument.Parse(data)) { - return DeserializeAzureSemanticDetection(document.RootElement, options); + return DeserializeAzureSemanticEouDetection(document.RootElement, options); } default: - throw new FormatException($"The model {nameof(AzureSemanticDetection)} does not support reading '{options.Format}' format."); + throw new FormatException($"The model {nameof(AzureSemanticEouDetection)} does not support reading '{options.Format}' format."); } } /// The client options for reading and writing models. - string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; } } diff --git a/sdk/ai/Azure.AI.VoiceLive/src/Generated/AzureSemanticEouDetection.cs b/sdk/ai/Azure.AI.VoiceLive/src/Generated/AzureSemanticEouDetection.cs new file mode 100644 index 000000000000..315085bae105 --- /dev/null +++ b/sdk/ai/Azure.AI.VoiceLive/src/Generated/AzureSemanticEouDetection.cs @@ -0,0 +1,35 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.AI.VoiceLive +{ + /// Azure semantic end-of-utterance detection (default). + public partial class AzureSemanticEouDetection : EouDetection + { + /// Initializes a new instance of . + public AzureSemanticEouDetection() : base(EOUDetectionModel.SemanticDetectionV1) + { + } + + /// Initializes a new instance of . + /// + /// Keeps track of any properties unknown to the library. + /// + /// Gets or sets the Timeout. + internal AzureSemanticEouDetection(EOUDetectionModel model, IDictionary additionalBinaryDataProperties, float? threshold, float? timeoutMs) : base(model, additionalBinaryDataProperties) + { + Threshold = threshold; + TimeoutMs = timeoutMs; + } + + /// Gets or sets the Threshold. + public float? Threshold { get; set; } + } +} diff --git a/sdk/ai/Azure.AI.VoiceLive/src/Generated/AzureSemanticMultilingualEouDetection.Serialization.cs b/sdk/ai/Azure.AI.VoiceLive/src/Generated/AzureSemanticMultilingualEouDetection.Serialization.cs new file mode 100644 index 000000000000..51b42ef9105e --- /dev/null +++ b/sdk/ai/Azure.AI.VoiceLive/src/Generated/AzureSemanticMultilingualEouDetection.Serialization.cs @@ -0,0 +1,151 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; + +namespace Azure.AI.VoiceLive +{ + /// Azure semantic end-of-utterance detection (multilingual). + public partial class AzureSemanticMultilingualEouDetection : IJsonModel + { + /// The JSON writer. + /// The client options for reading and writing models. + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected override void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(AzureSemanticMultilingualEouDetection)} does not support writing '{format}' format."); + } + base.JsonModelWriteCore(writer, options); + if (Optional.IsDefined(Threshold)) + { + writer.WritePropertyName("threshold"u8); + writer.WriteNumberValue(Threshold.Value); + } + if (Optional.IsDefined(TimeoutMs)) + { + writer.WritePropertyName("timeout"u8); + writer.WriteNumberValue(TimeoutMs.Value); + } + } + + /// The JSON reader. + /// The client options for reading and writing models. + AzureSemanticMultilingualEouDetection IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) => (AzureSemanticMultilingualEouDetection)JsonModelCreateCore(ref reader, options); + + /// The JSON reader. + /// The client options for reading and writing models. + protected override EouDetection JsonModelCreateCore(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(AzureSemanticMultilingualEouDetection)} does not support reading '{format}' format."); + } + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeAzureSemanticMultilingualEouDetection(document.RootElement, options); + } + + /// The JSON element to deserialize. + /// The client options for reading and writing models. + internal static AzureSemanticMultilingualEouDetection DeserializeAzureSemanticMultilingualEouDetection(JsonElement element, ModelReaderWriterOptions options) + { + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + EOUDetectionModel model = default; + IDictionary additionalBinaryDataProperties = new ChangeTrackingDictionary(); + float? threshold = default; + float? timeoutMs = default; + foreach (var prop in element.EnumerateObject()) + { + if (prop.NameEquals("model"u8)) + { + model = prop.Value.GetString().ToEOUDetectionModel(); + continue; + } + if (prop.NameEquals("threshold"u8)) + { + if (prop.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + threshold = prop.Value.GetSingle(); + continue; + } + if (prop.NameEquals("timeout"u8)) + { + if (prop.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + timeoutMs = prop.Value.GetSingle(); + continue; + } + if (options.Format != "W") + { + additionalBinaryDataProperties.Add(prop.Name, BinaryData.FromString(prop.Value.GetRawText())); + } + } + return new AzureSemanticMultilingualEouDetection(model, additionalBinaryDataProperties, threshold, timeoutMs); + } + + /// The client options for reading and writing models. + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) => PersistableModelWriteCore(options); + + /// The client options for reading and writing models. + protected override BinaryData PersistableModelWriteCore(ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options, AzureAIVoiceLiveContext.Default); + default: + throw new FormatException($"The model {nameof(AzureSemanticMultilingualEouDetection)} does not support writing '{options.Format}' format."); + } + } + + /// The data to parse. + /// The client options for reading and writing models. + AzureSemanticMultilingualEouDetection IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) => (AzureSemanticMultilingualEouDetection)PersistableModelCreateCore(data, options); + + /// The data to parse. + /// The client options for reading and writing models. + protected override EouDetection PersistableModelCreateCore(BinaryData data, ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + switch (format) + { + case "J": + using (JsonDocument document = JsonDocument.Parse(data)) + { + return DeserializeAzureSemanticMultilingualEouDetection(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(AzureSemanticMultilingualEouDetection)} does not support reading '{options.Format}' format."); + } + } + + /// The client options for reading and writing models. + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + } +} diff --git a/sdk/ai/Azure.AI.VoiceLive/src/Generated/AzureSemanticMultilingualEouDetection.cs b/sdk/ai/Azure.AI.VoiceLive/src/Generated/AzureSemanticMultilingualEouDetection.cs new file mode 100644 index 000000000000..5025c0b0415b --- /dev/null +++ b/sdk/ai/Azure.AI.VoiceLive/src/Generated/AzureSemanticMultilingualEouDetection.cs @@ -0,0 +1,38 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.AI.VoiceLive +{ + /// Azure semantic end-of-utterance detection (multilingual). + public partial class AzureSemanticMultilingualEouDetection : EouDetection + { + /// Initializes a new instance of . + public AzureSemanticMultilingualEouDetection() : base(EOUDetectionModel.SemanticDetectionV1Multilingual) + { + } + + /// Initializes a new instance of . + /// + /// Keeps track of any properties unknown to the library. + /// + /// + internal AzureSemanticMultilingualEouDetection(EOUDetectionModel model, IDictionary additionalBinaryDataProperties, float? threshold, float? timeoutMs) : base(model, additionalBinaryDataProperties) + { + Threshold = threshold; + TimeoutMs = timeoutMs; + } + + /// Gets or sets the Threshold. + public float? Threshold { get; set; } + + /// Gets or sets the TimeoutMs. + public float? TimeoutMs { get; set; } + } +} diff --git a/sdk/ai/Azure.AI.VoiceLive/src/Generated/AzureSemanticVad.cs b/sdk/ai/Azure.AI.VoiceLive/src/Generated/AzureSemanticVad.cs deleted file mode 100644 index 96fa8c6f0a91..000000000000 --- a/sdk/ai/Azure.AI.VoiceLive/src/Generated/AzureSemanticVad.cs +++ /dev/null @@ -1,89 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System; -using System.Collections.Generic; - -namespace Azure.AI.VoiceLive -{ - /// Server Speech Detection (Azure semantic VAD, default variant). - public partial class AzureSemanticVad : TurnDetection - { - /// Initializes a new instance of . - public AzureSemanticVad() : base(TurnDetectionType.AzureSemanticVad) - { - Languages = new ChangeTrackingList(); - } - - /// Initializes a new instance of . - /// - /// Keeps track of any properties unknown to the library. - /// - /// - /// - /// - /// - /// - /// - /// - /// - /// - /// - /// - internal AzureSemanticVad(TurnDetectionType @type, IDictionary additionalBinaryDataProperties, float? threshold, int? prefixPaddingMs, int? silenceDurationMs, EouDetection endOfUtteranceDetection, float? negThreshold, int? speechDurationMs, int? windowSize, int? distinctCiPhones, bool? requireVowel, bool? removeFillerWords, IList languages, bool? autoTruncate) : base(@type, additionalBinaryDataProperties) - { - Threshold = threshold; - PrefixPaddingMs = prefixPaddingMs; - SilenceDurationMs = silenceDurationMs; - EndOfUtteranceDetection = endOfUtteranceDetection; - NegThreshold = negThreshold; - SpeechDurationMs = speechDurationMs; - WindowSize = windowSize; - DistinctCiPhones = distinctCiPhones; - RequireVowel = requireVowel; - RemoveFillerWords = removeFillerWords; - Languages = languages; - AutoTruncate = autoTruncate; - } - - /// Gets or sets the Threshold. - public float? Threshold { get; set; } - - /// Gets or sets the PrefixPaddingMs. - public int? PrefixPaddingMs { get; set; } - - /// Gets or sets the SilenceDurationMs. - public int? SilenceDurationMs { get; set; } - - /// Gets or sets the EndOfUtteranceDetection. - public EouDetection EndOfUtteranceDetection { get; set; } - - /// Gets or sets the NegThreshold. - public float? NegThreshold { get; set; } - - /// Gets or sets the SpeechDurationMs. - public int? SpeechDurationMs { get; set; } - - /// Gets or sets the WindowSize. - public int? WindowSize { get; set; } - - /// Gets or sets the DistinctCiPhones. - public int? DistinctCiPhones { get; set; } - - /// Gets or sets the RequireVowel. - public bool? RequireVowel { get; set; } - - /// Gets or sets the RemoveFillerWords. - public bool? RemoveFillerWords { get; set; } - - /// Gets the Languages. - public IList Languages { get; } - - /// Gets or sets the AutoTruncate. - public bool? AutoTruncate { get; set; } - } -} diff --git a/sdk/ai/Azure.AI.VoiceLive/src/Generated/AzureSemanticVadEn.cs b/sdk/ai/Azure.AI.VoiceLive/src/Generated/AzureSemanticVadEn.cs deleted file mode 100644 index 465c99bf1d4c..000000000000 --- a/sdk/ai/Azure.AI.VoiceLive/src/Generated/AzureSemanticVadEn.cs +++ /dev/null @@ -1,89 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System; -using System.Collections.Generic; - -namespace Azure.AI.VoiceLive -{ - /// Server Speech Detection (Azure semantic VAD, English-only). - public partial class AzureSemanticVadEn : TurnDetection - { - /// Initializes a new instance of . - public AzureSemanticVadEn() : base(TurnDetectionType.AzureSemanticVadEn) - { - Languages = new ChangeTrackingList(); - } - - /// Initializes a new instance of . - /// - /// Keeps track of any properties unknown to the library. - /// - /// - /// - /// - /// - /// - /// - /// - /// - /// - /// - /// - internal AzureSemanticVadEn(TurnDetectionType @type, IDictionary additionalBinaryDataProperties, float? threshold, int? prefixPaddingMs, int? silenceDurationMs, EouDetection endOfUtteranceDetection, float? negThreshold, int? speechDurationMs, int? windowSize, int? distinctCiPhones, bool? requireVowel, bool? removeFillerWords, IList languages, bool? autoTruncate) : base(@type, additionalBinaryDataProperties) - { - Threshold = threshold; - PrefixPaddingMs = prefixPaddingMs; - SilenceDurationMs = silenceDurationMs; - EndOfUtteranceDetection = endOfUtteranceDetection; - NegThreshold = negThreshold; - SpeechDurationMs = speechDurationMs; - WindowSize = windowSize; - DistinctCiPhones = distinctCiPhones; - RequireVowel = requireVowel; - RemoveFillerWords = removeFillerWords; - Languages = languages; - AutoTruncate = autoTruncate; - } - - /// Gets or sets the Threshold. - public float? Threshold { get; set; } - - /// Gets or sets the PrefixPaddingMs. - public int? PrefixPaddingMs { get; set; } - - /// Gets or sets the SilenceDurationMs. - public int? SilenceDurationMs { get; set; } - - /// Gets or sets the EndOfUtteranceDetection. - public EouDetection EndOfUtteranceDetection { get; set; } - - /// Gets or sets the NegThreshold. - public float? NegThreshold { get; set; } - - /// Gets or sets the SpeechDurationMs. - public int? SpeechDurationMs { get; set; } - - /// Gets or sets the WindowSize. - public int? WindowSize { get; set; } - - /// Gets or sets the DistinctCiPhones. - public int? DistinctCiPhones { get; set; } - - /// Gets or sets the RequireVowel. - public bool? RequireVowel { get; set; } - - /// Gets or sets the RemoveFillerWords. - public bool? RemoveFillerWords { get; set; } - - /// Gets the Languages. - public IList Languages { get; } - - /// Gets or sets the AutoTruncate. - public bool? AutoTruncate { get; set; } - } -} diff --git a/sdk/ai/Azure.AI.VoiceLive/src/Generated/AzureSemanticVadServer.Serialization.cs b/sdk/ai/Azure.AI.VoiceLive/src/Generated/AzureSemanticVadEnTurnDetection.Serialization.cs similarity index 59% rename from sdk/ai/Azure.AI.VoiceLive/src/Generated/AzureSemanticVadServer.Serialization.cs rename to sdk/ai/Azure.AI.VoiceLive/src/Generated/AzureSemanticVadEnTurnDetection.Serialization.cs index 65763ca2d721..a9f8cdf04112 100644 --- a/sdk/ai/Azure.AI.VoiceLive/src/Generated/AzureSemanticVadServer.Serialization.cs +++ b/sdk/ai/Azure.AI.VoiceLive/src/Generated/AzureSemanticVadEnTurnDetection.Serialization.cs @@ -12,12 +12,12 @@ namespace Azure.AI.VoiceLive { - /// Server Speech Detection (legacy `server_sd` alias). - public partial class AzureSemanticVadServer : IJsonModel + /// Server Speech Detection (Azure semantic VAD, English-only). + public partial class AzureSemanticVadEnTurnDetection : IJsonModel { /// The JSON writer. /// The client options for reading and writing models. - void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) { writer.WriteStartObject(); JsonModelWriteCore(writer, options); @@ -28,10 +28,10 @@ void IJsonModel.Write(Utf8JsonWriter writer, ModelReader /// The client options for reading and writing models. protected override void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) { - string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; if (format != "J") { - throw new FormatException($"The model {nameof(AzureSemanticVadServer)} does not support writing '{format}' format."); + throw new FormatException($"The model {nameof(AzureSemanticVadEnTurnDetection)} does not support writing '{format}' format."); } base.JsonModelWriteCore(writer, options); if (Optional.IsDefined(Threshold)) @@ -54,51 +54,16 @@ protected override void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWri writer.WritePropertyName("end_of_utterance_detection"u8); writer.WriteObjectValue(EndOfUtteranceDetection, options); } - if (Optional.IsDefined(NegThreshold)) - { - writer.WritePropertyName("neg_threshold"u8); - writer.WriteNumberValue(NegThreshold.Value); - } if (Optional.IsDefined(SpeechDurationMs)) { writer.WritePropertyName("speech_duration_ms"u8); writer.WriteNumberValue(SpeechDurationMs.Value); } - if (Optional.IsDefined(WindowSize)) - { - writer.WritePropertyName("window_size"u8); - writer.WriteNumberValue(WindowSize.Value); - } - if (Optional.IsDefined(DistinctCiPhones)) - { - writer.WritePropertyName("distinct_ci_phones"u8); - writer.WriteNumberValue(DistinctCiPhones.Value); - } - if (Optional.IsDefined(RequireVowel)) - { - writer.WritePropertyName("require_vowel"u8); - writer.WriteBooleanValue(RequireVowel.Value); - } if (Optional.IsDefined(RemoveFillerWords)) { writer.WritePropertyName("remove_filler_words"u8); writer.WriteBooleanValue(RemoveFillerWords.Value); } - if (Optional.IsCollectionDefined(Languages)) - { - writer.WritePropertyName("languages"u8); - writer.WriteStartArray(); - foreach (string item in Languages) - { - if (item == null) - { - writer.WriteNullValue(); - continue; - } - writer.WriteStringValue(item); - } - writer.WriteEndArray(); - } if (Optional.IsDefined(AutoTruncate)) { writer.WritePropertyName("auto_truncate"u8); @@ -108,24 +73,24 @@ protected override void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWri /// The JSON reader. /// The client options for reading and writing models. - AzureSemanticVadServer IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) => (AzureSemanticVadServer)JsonModelCreateCore(ref reader, options); + AzureSemanticVadEnTurnDetection IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) => (AzureSemanticVadEnTurnDetection)JsonModelCreateCore(ref reader, options); /// The JSON reader. /// The client options for reading and writing models. protected override TurnDetection JsonModelCreateCore(ref Utf8JsonReader reader, ModelReaderWriterOptions options) { - string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; if (format != "J") { - throw new FormatException($"The model {nameof(AzureSemanticVadServer)} does not support reading '{format}' format."); + throw new FormatException($"The model {nameof(AzureSemanticVadEnTurnDetection)} does not support reading '{format}' format."); } using JsonDocument document = JsonDocument.ParseValue(ref reader); - return DeserializeAzureSemanticVadServer(document.RootElement, options); + return DeserializeAzureSemanticVadEnTurnDetection(document.RootElement, options); } /// The JSON element to deserialize. /// The client options for reading and writing models. - internal static AzureSemanticVadServer DeserializeAzureSemanticVadServer(JsonElement element, ModelReaderWriterOptions options) + internal static AzureSemanticVadEnTurnDetection DeserializeAzureSemanticVadEnTurnDetection(JsonElement element, ModelReaderWriterOptions options) { if (element.ValueKind == JsonValueKind.Null) { @@ -137,19 +102,14 @@ internal static AzureSemanticVadServer DeserializeAzureSemanticVadServer(JsonEle int? prefixPaddingMs = default; int? silenceDurationMs = default; EouDetection endOfUtteranceDetection = default; - float? negThreshold = default; int? speechDurationMs = default; - int? windowSize = default; - int? distinctCiPhones = default; - bool? requireVowel = default; bool? removeFillerWords = default; - IList languages = default; bool? autoTruncate = default; foreach (var prop in element.EnumerateObject()) { if (prop.NameEquals("type"u8)) { - @type = prop.Value.GetString().ToTurnDetectionType(); + @type = new TurnDetectionType(prop.Value.GetString()); continue; } if (prop.NameEquals("threshold"u8)) @@ -188,15 +148,6 @@ internal static AzureSemanticVadServer DeserializeAzureSemanticVadServer(JsonEle endOfUtteranceDetection = EouDetection.DeserializeEouDetection(prop.Value, options); continue; } - if (prop.NameEquals("neg_threshold"u8)) - { - if (prop.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - negThreshold = prop.Value.GetSingle(); - continue; - } if (prop.NameEquals("speech_duration_ms"u8)) { if (prop.Value.ValueKind == JsonValueKind.Null) @@ -206,33 +157,6 @@ internal static AzureSemanticVadServer DeserializeAzureSemanticVadServer(JsonEle speechDurationMs = prop.Value.GetInt32(); continue; } - if (prop.NameEquals("window_size"u8)) - { - if (prop.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - windowSize = prop.Value.GetInt32(); - continue; - } - if (prop.NameEquals("distinct_ci_phones"u8)) - { - if (prop.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - distinctCiPhones = prop.Value.GetInt32(); - continue; - } - if (prop.NameEquals("require_vowel"u8)) - { - if (prop.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - requireVowel = prop.Value.GetBoolean(); - continue; - } if (prop.NameEquals("remove_filler_words"u8)) { if (prop.Value.ValueKind == JsonValueKind.Null) @@ -242,27 +166,6 @@ internal static AzureSemanticVadServer DeserializeAzureSemanticVadServer(JsonEle removeFillerWords = prop.Value.GetBoolean(); continue; } - if (prop.NameEquals("languages"u8)) - { - if (prop.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - List array = new List(); - foreach (var item in prop.Value.EnumerateArray()) - { - if (item.ValueKind == JsonValueKind.Null) - { - array.Add(null); - } - else - { - array.Add(item.GetString()); - } - } - languages = array; - continue; - } if (prop.NameEquals("auto_truncate"u8)) { if (prop.Value.ValueKind == JsonValueKind.Null) @@ -277,61 +180,56 @@ internal static AzureSemanticVadServer DeserializeAzureSemanticVadServer(JsonEle additionalBinaryDataProperties.Add(prop.Name, BinaryData.FromString(prop.Value.GetRawText())); } } - return new AzureSemanticVadServer( + return new AzureSemanticVadEnTurnDetection( @type, additionalBinaryDataProperties, threshold, prefixPaddingMs, silenceDurationMs, endOfUtteranceDetection, - negThreshold, speechDurationMs, - windowSize, - distinctCiPhones, - requireVowel, removeFillerWords, - languages ?? new ChangeTrackingList(), autoTruncate); } /// The client options for reading and writing models. - BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) => PersistableModelWriteCore(options); + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) => PersistableModelWriteCore(options); /// The client options for reading and writing models. protected override BinaryData PersistableModelWriteCore(ModelReaderWriterOptions options) { - string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; switch (format) { case "J": return ModelReaderWriter.Write(this, options, AzureAIVoiceLiveContext.Default); default: - throw new FormatException($"The model {nameof(AzureSemanticVadServer)} does not support writing '{options.Format}' format."); + throw new FormatException($"The model {nameof(AzureSemanticVadEnTurnDetection)} does not support writing '{options.Format}' format."); } } /// The data to parse. /// The client options for reading and writing models. - AzureSemanticVadServer IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) => (AzureSemanticVadServer)PersistableModelCreateCore(data, options); + AzureSemanticVadEnTurnDetection IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) => (AzureSemanticVadEnTurnDetection)PersistableModelCreateCore(data, options); /// The data to parse. /// The client options for reading and writing models. protected override TurnDetection PersistableModelCreateCore(BinaryData data, ModelReaderWriterOptions options) { - string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; switch (format) { case "J": using (JsonDocument document = JsonDocument.Parse(data)) { - return DeserializeAzureSemanticVadServer(document.RootElement, options); + return DeserializeAzureSemanticVadEnTurnDetection(document.RootElement, options); } default: - throw new FormatException($"The model {nameof(AzureSemanticVadServer)} does not support reading '{options.Format}' format."); + throw new FormatException($"The model {nameof(AzureSemanticVadEnTurnDetection)} does not support reading '{options.Format}' format."); } } /// The client options for reading and writing models. - string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; } } diff --git a/sdk/ai/Azure.AI.VoiceLive/src/Generated/AzureSemanticVadEnTurnDetection.cs b/sdk/ai/Azure.AI.VoiceLive/src/Generated/AzureSemanticVadEnTurnDetection.cs new file mode 100644 index 000000000000..7c9ad87a3563 --- /dev/null +++ b/sdk/ai/Azure.AI.VoiceLive/src/Generated/AzureSemanticVadEnTurnDetection.cs @@ -0,0 +1,54 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.AI.VoiceLive +{ + /// Server Speech Detection (Azure semantic VAD, English-only). + public partial class AzureSemanticVadEnTurnDetection : TurnDetection + { + /// Initializes a new instance of . + public AzureSemanticVadEnTurnDetection() : base(TurnDetectionType.AzureSemanticVadEn) + { + } + + /// Initializes a new instance of . + /// + /// Keeps track of any properties unknown to the library. + /// + /// Gets or sets the PrefixPaddingMs. + /// Gets or sets the SilenceDurationMs. + /// + /// Gets or sets the SpeechDurationMs. + /// + /// + internal AzureSemanticVadEnTurnDetection(TurnDetectionType @type, IDictionary additionalBinaryDataProperties, float? threshold, int? prefixPaddingMs, int? silenceDurationMs, EouDetection endOfUtteranceDetection, int? speechDurationMs, bool? removeFillerWords, bool? autoTruncate) : base(@type, additionalBinaryDataProperties) + { + Threshold = threshold; + PrefixPaddingMs = prefixPaddingMs; + SilenceDurationMs = silenceDurationMs; + EndOfUtteranceDetection = endOfUtteranceDetection; + SpeechDurationMs = speechDurationMs; + RemoveFillerWords = removeFillerWords; + AutoTruncate = autoTruncate; + } + + /// Gets or sets the Threshold. + public float? Threshold { get; set; } + + /// Gets or sets the EndOfUtteranceDetection. + public EouDetection EndOfUtteranceDetection { get; set; } + + /// Gets or sets the RemoveFillerWords. + public bool? RemoveFillerWords { get; set; } + + /// Gets or sets the AutoTruncate. + public bool? AutoTruncate { get; set; } + } +} diff --git a/sdk/ai/Azure.AI.VoiceLive/src/Generated/AzureSemanticVadEn.Serialization.cs b/sdk/ai/Azure.AI.VoiceLive/src/Generated/AzureSemanticVadMultilingualTurnDetection.Serialization.cs similarity index 70% rename from sdk/ai/Azure.AI.VoiceLive/src/Generated/AzureSemanticVadEn.Serialization.cs rename to sdk/ai/Azure.AI.VoiceLive/src/Generated/AzureSemanticVadMultilingualTurnDetection.Serialization.cs index 8d147e2ad601..52be31682aed 100644 --- a/sdk/ai/Azure.AI.VoiceLive/src/Generated/AzureSemanticVadEn.Serialization.cs +++ b/sdk/ai/Azure.AI.VoiceLive/src/Generated/AzureSemanticVadMultilingualTurnDetection.Serialization.cs @@ -12,12 +12,12 @@ namespace Azure.AI.VoiceLive { - /// Server Speech Detection (Azure semantic VAD, English-only). - public partial class AzureSemanticVadEn : IJsonModel + /// Server Speech Detection (Azure semantic VAD). + public partial class AzureSemanticVadMultilingualTurnDetection : IJsonModel { /// The JSON writer. /// The client options for reading and writing models. - void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) { writer.WriteStartObject(); JsonModelWriteCore(writer, options); @@ -28,10 +28,10 @@ void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWrit /// The client options for reading and writing models. protected override void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) { - string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; if (format != "J") { - throw new FormatException($"The model {nameof(AzureSemanticVadEn)} does not support writing '{format}' format."); + throw new FormatException($"The model {nameof(AzureSemanticVadMultilingualTurnDetection)} does not support writing '{format}' format."); } base.JsonModelWriteCore(writer, options); if (Optional.IsDefined(Threshold)) @@ -54,31 +54,11 @@ protected override void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWri writer.WritePropertyName("end_of_utterance_detection"u8); writer.WriteObjectValue(EndOfUtteranceDetection, options); } - if (Optional.IsDefined(NegThreshold)) - { - writer.WritePropertyName("neg_threshold"u8); - writer.WriteNumberValue(NegThreshold.Value); - } if (Optional.IsDefined(SpeechDurationMs)) { writer.WritePropertyName("speech_duration_ms"u8); writer.WriteNumberValue(SpeechDurationMs.Value); } - if (Optional.IsDefined(WindowSize)) - { - writer.WritePropertyName("window_size"u8); - writer.WriteNumberValue(WindowSize.Value); - } - if (Optional.IsDefined(DistinctCiPhones)) - { - writer.WritePropertyName("distinct_ci_phones"u8); - writer.WriteNumberValue(DistinctCiPhones.Value); - } - if (Optional.IsDefined(RequireVowel)) - { - writer.WritePropertyName("require_vowel"u8); - writer.WriteBooleanValue(RequireVowel.Value); - } if (Optional.IsDefined(RemoveFillerWords)) { writer.WritePropertyName("remove_filler_words"u8); @@ -108,24 +88,24 @@ protected override void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWri /// The JSON reader. /// The client options for reading and writing models. - AzureSemanticVadEn IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) => (AzureSemanticVadEn)JsonModelCreateCore(ref reader, options); + AzureSemanticVadMultilingualTurnDetection IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) => (AzureSemanticVadMultilingualTurnDetection)JsonModelCreateCore(ref reader, options); /// The JSON reader. /// The client options for reading and writing models. protected override TurnDetection JsonModelCreateCore(ref Utf8JsonReader reader, ModelReaderWriterOptions options) { - string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; if (format != "J") { - throw new FormatException($"The model {nameof(AzureSemanticVadEn)} does not support reading '{format}' format."); + throw new FormatException($"The model {nameof(AzureSemanticVadMultilingualTurnDetection)} does not support reading '{format}' format."); } using JsonDocument document = JsonDocument.ParseValue(ref reader); - return DeserializeAzureSemanticVadEn(document.RootElement, options); + return DeserializeAzureSemanticVadMultilingualTurnDetection(document.RootElement, options); } /// The JSON element to deserialize. /// The client options for reading and writing models. - internal static AzureSemanticVadEn DeserializeAzureSemanticVadEn(JsonElement element, ModelReaderWriterOptions options) + internal static AzureSemanticVadMultilingualTurnDetection DeserializeAzureSemanticVadMultilingualTurnDetection(JsonElement element, ModelReaderWriterOptions options) { if (element.ValueKind == JsonValueKind.Null) { @@ -137,11 +117,7 @@ internal static AzureSemanticVadEn DeserializeAzureSemanticVadEn(JsonElement ele int? prefixPaddingMs = default; int? silenceDurationMs = default; EouDetection endOfUtteranceDetection = default; - float? negThreshold = default; int? speechDurationMs = default; - int? windowSize = default; - int? distinctCiPhones = default; - bool? requireVowel = default; bool? removeFillerWords = default; IList languages = default; bool? autoTruncate = default; @@ -149,7 +125,7 @@ internal static AzureSemanticVadEn DeserializeAzureSemanticVadEn(JsonElement ele { if (prop.NameEquals("type"u8)) { - @type = prop.Value.GetString().ToTurnDetectionType(); + @type = new TurnDetectionType(prop.Value.GetString()); continue; } if (prop.NameEquals("threshold"u8)) @@ -188,15 +164,6 @@ internal static AzureSemanticVadEn DeserializeAzureSemanticVadEn(JsonElement ele endOfUtteranceDetection = EouDetection.DeserializeEouDetection(prop.Value, options); continue; } - if (prop.NameEquals("neg_threshold"u8)) - { - if (prop.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - negThreshold = prop.Value.GetSingle(); - continue; - } if (prop.NameEquals("speech_duration_ms"u8)) { if (prop.Value.ValueKind == JsonValueKind.Null) @@ -206,33 +173,6 @@ internal static AzureSemanticVadEn DeserializeAzureSemanticVadEn(JsonElement ele speechDurationMs = prop.Value.GetInt32(); continue; } - if (prop.NameEquals("window_size"u8)) - { - if (prop.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - windowSize = prop.Value.GetInt32(); - continue; - } - if (prop.NameEquals("distinct_ci_phones"u8)) - { - if (prop.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - distinctCiPhones = prop.Value.GetInt32(); - continue; - } - if (prop.NameEquals("require_vowel"u8)) - { - if (prop.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - requireVowel = prop.Value.GetBoolean(); - continue; - } if (prop.NameEquals("remove_filler_words"u8)) { if (prop.Value.ValueKind == JsonValueKind.Null) @@ -277,61 +217,57 @@ internal static AzureSemanticVadEn DeserializeAzureSemanticVadEn(JsonElement ele additionalBinaryDataProperties.Add(prop.Name, BinaryData.FromString(prop.Value.GetRawText())); } } - return new AzureSemanticVadEn( + return new AzureSemanticVadMultilingualTurnDetection( @type, additionalBinaryDataProperties, threshold, prefixPaddingMs, silenceDurationMs, endOfUtteranceDetection, - negThreshold, speechDurationMs, - windowSize, - distinctCiPhones, - requireVowel, removeFillerWords, languages ?? new ChangeTrackingList(), autoTruncate); } /// The client options for reading and writing models. - BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) => PersistableModelWriteCore(options); + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) => PersistableModelWriteCore(options); /// The client options for reading and writing models. protected override BinaryData PersistableModelWriteCore(ModelReaderWriterOptions options) { - string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; switch (format) { case "J": return ModelReaderWriter.Write(this, options, AzureAIVoiceLiveContext.Default); default: - throw new FormatException($"The model {nameof(AzureSemanticVadEn)} does not support writing '{options.Format}' format."); + throw new FormatException($"The model {nameof(AzureSemanticVadMultilingualTurnDetection)} does not support writing '{options.Format}' format."); } } /// The data to parse. /// The client options for reading and writing models. - AzureSemanticVadEn IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) => (AzureSemanticVadEn)PersistableModelCreateCore(data, options); + AzureSemanticVadMultilingualTurnDetection IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) => (AzureSemanticVadMultilingualTurnDetection)PersistableModelCreateCore(data, options); /// The data to parse. /// The client options for reading and writing models. protected override TurnDetection PersistableModelCreateCore(BinaryData data, ModelReaderWriterOptions options) { - string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; switch (format) { case "J": using (JsonDocument document = JsonDocument.Parse(data)) { - return DeserializeAzureSemanticVadEn(document.RootElement, options); + return DeserializeAzureSemanticVadMultilingualTurnDetection(document.RootElement, options); } default: - throw new FormatException($"The model {nameof(AzureSemanticVadEn)} does not support reading '{options.Format}' format."); + throw new FormatException($"The model {nameof(AzureSemanticVadMultilingualTurnDetection)} does not support reading '{options.Format}' format."); } } /// The client options for reading and writing models. - string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; } } diff --git a/sdk/ai/Azure.AI.VoiceLive/src/Generated/AzureSemanticVadMultilingualTurnDetection.cs b/sdk/ai/Azure.AI.VoiceLive/src/Generated/AzureSemanticVadMultilingualTurnDetection.cs new file mode 100644 index 000000000000..ce42a7aad8b9 --- /dev/null +++ b/sdk/ai/Azure.AI.VoiceLive/src/Generated/AzureSemanticVadMultilingualTurnDetection.cs @@ -0,0 +1,60 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.AI.VoiceLive +{ + /// Server Speech Detection (Azure semantic VAD). + public partial class AzureSemanticVadMultilingualTurnDetection : TurnDetection + { + /// Initializes a new instance of . + public AzureSemanticVadMultilingualTurnDetection() : base(TurnDetectionType.AzureSemanticVadMultilingual) + { + Languages = new ChangeTrackingList(); + } + + /// Initializes a new instance of . + /// + /// Keeps track of any properties unknown to the library. + /// + /// Gets or sets the PrefixPaddingMs. + /// Gets or sets the SilenceDurationMs. + /// + /// Gets or sets the SpeechDurationMs. + /// + /// + /// + internal AzureSemanticVadMultilingualTurnDetection(TurnDetectionType @type, IDictionary additionalBinaryDataProperties, float? threshold, int? prefixPaddingMs, int? silenceDurationMs, EouDetection endOfUtteranceDetection, int? speechDurationMs, bool? removeFillerWords, IList languages, bool? autoTruncate) : base(@type, additionalBinaryDataProperties) + { + Threshold = threshold; + PrefixPaddingMs = prefixPaddingMs; + SilenceDurationMs = silenceDurationMs; + EndOfUtteranceDetection = endOfUtteranceDetection; + SpeechDurationMs = speechDurationMs; + RemoveFillerWords = removeFillerWords; + Languages = languages; + AutoTruncate = autoTruncate; + } + + /// Gets or sets the Threshold. + public float? Threshold { get; set; } + + /// Gets or sets the EndOfUtteranceDetection. + public EouDetection EndOfUtteranceDetection { get; set; } + + /// Gets or sets the RemoveFillerWords. + public bool? RemoveFillerWords { get; set; } + + /// Gets the Languages. + public IList Languages { get; } + + /// Gets or sets the AutoTruncate. + public bool? AutoTruncate { get; set; } + } +} diff --git a/sdk/ai/Azure.AI.VoiceLive/src/Generated/AzureSemanticVadServer.cs b/sdk/ai/Azure.AI.VoiceLive/src/Generated/AzureSemanticVadServer.cs deleted file mode 100644 index 582542a96024..000000000000 --- a/sdk/ai/Azure.AI.VoiceLive/src/Generated/AzureSemanticVadServer.cs +++ /dev/null @@ -1,89 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System; -using System.Collections.Generic; - -namespace Azure.AI.VoiceLive -{ - /// Server Speech Detection (legacy `server_sd` alias). - public partial class AzureSemanticVadServer : TurnDetection - { - /// Initializes a new instance of . - public AzureSemanticVadServer() : base(TurnDetectionType.ServerSd) - { - Languages = new ChangeTrackingList(); - } - - /// Initializes a new instance of . - /// - /// Keeps track of any properties unknown to the library. - /// - /// - /// - /// - /// - /// - /// - /// - /// - /// - /// - /// - internal AzureSemanticVadServer(TurnDetectionType @type, IDictionary additionalBinaryDataProperties, float? threshold, int? prefixPaddingMs, int? silenceDurationMs, EouDetection endOfUtteranceDetection, float? negThreshold, int? speechDurationMs, int? windowSize, int? distinctCiPhones, bool? requireVowel, bool? removeFillerWords, IList languages, bool? autoTruncate) : base(@type, additionalBinaryDataProperties) - { - Threshold = threshold; - PrefixPaddingMs = prefixPaddingMs; - SilenceDurationMs = silenceDurationMs; - EndOfUtteranceDetection = endOfUtteranceDetection; - NegThreshold = negThreshold; - SpeechDurationMs = speechDurationMs; - WindowSize = windowSize; - DistinctCiPhones = distinctCiPhones; - RequireVowel = requireVowel; - RemoveFillerWords = removeFillerWords; - Languages = languages; - AutoTruncate = autoTruncate; - } - - /// Gets or sets the Threshold. - public float? Threshold { get; set; } - - /// Gets or sets the PrefixPaddingMs. - public int? PrefixPaddingMs { get; set; } - - /// Gets or sets the SilenceDurationMs. - public int? SilenceDurationMs { get; set; } - - /// Gets or sets the EndOfUtteranceDetection. - public EouDetection EndOfUtteranceDetection { get; set; } - - /// Gets or sets the NegThreshold. - public float? NegThreshold { get; set; } - - /// Gets or sets the SpeechDurationMs. - public int? SpeechDurationMs { get; set; } - - /// Gets or sets the WindowSize. - public int? WindowSize { get; set; } - - /// Gets or sets the DistinctCiPhones. - public int? DistinctCiPhones { get; set; } - - /// Gets or sets the RequireVowel. - public bool? RequireVowel { get; set; } - - /// Gets or sets the RemoveFillerWords. - public bool? RemoveFillerWords { get; set; } - - /// Gets the Languages. - public IList Languages { get; } - - /// Gets or sets the AutoTruncate. - public bool? AutoTruncate { get; set; } - } -} diff --git a/sdk/ai/Azure.AI.VoiceLive/src/Generated/AzureSemanticVad.Serialization.cs b/sdk/ai/Azure.AI.VoiceLive/src/Generated/AzureSemanticVadTurnDetection.Serialization.cs similarity index 70% rename from sdk/ai/Azure.AI.VoiceLive/src/Generated/AzureSemanticVad.Serialization.cs rename to sdk/ai/Azure.AI.VoiceLive/src/Generated/AzureSemanticVadTurnDetection.Serialization.cs index a346cbddc23b..071453f8ee41 100644 --- a/sdk/ai/Azure.AI.VoiceLive/src/Generated/AzureSemanticVad.Serialization.cs +++ b/sdk/ai/Azure.AI.VoiceLive/src/Generated/AzureSemanticVadTurnDetection.Serialization.cs @@ -13,11 +13,11 @@ namespace Azure.AI.VoiceLive { /// Server Speech Detection (Azure semantic VAD, default variant). - public partial class AzureSemanticVad : IJsonModel + public partial class AzureSemanticVadTurnDetection : IJsonModel { /// The JSON writer. /// The client options for reading and writing models. - void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) { writer.WriteStartObject(); JsonModelWriteCore(writer, options); @@ -28,10 +28,10 @@ void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriter /// The client options for reading and writing models. protected override void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) { - string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; if (format != "J") { - throw new FormatException($"The model {nameof(AzureSemanticVad)} does not support writing '{format}' format."); + throw new FormatException($"The model {nameof(AzureSemanticVadTurnDetection)} does not support writing '{format}' format."); } base.JsonModelWriteCore(writer, options); if (Optional.IsDefined(Threshold)) @@ -54,31 +54,11 @@ protected override void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWri writer.WritePropertyName("end_of_utterance_detection"u8); writer.WriteObjectValue(EndOfUtteranceDetection, options); } - if (Optional.IsDefined(NegThreshold)) - { - writer.WritePropertyName("neg_threshold"u8); - writer.WriteNumberValue(NegThreshold.Value); - } if (Optional.IsDefined(SpeechDurationMs)) { writer.WritePropertyName("speech_duration_ms"u8); writer.WriteNumberValue(SpeechDurationMs.Value); } - if (Optional.IsDefined(WindowSize)) - { - writer.WritePropertyName("window_size"u8); - writer.WriteNumberValue(WindowSize.Value); - } - if (Optional.IsDefined(DistinctCiPhones)) - { - writer.WritePropertyName("distinct_ci_phones"u8); - writer.WriteNumberValue(DistinctCiPhones.Value); - } - if (Optional.IsDefined(RequireVowel)) - { - writer.WritePropertyName("require_vowel"u8); - writer.WriteBooleanValue(RequireVowel.Value); - } if (Optional.IsDefined(RemoveFillerWords)) { writer.WritePropertyName("remove_filler_words"u8); @@ -108,24 +88,24 @@ protected override void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWri /// The JSON reader. /// The client options for reading and writing models. - AzureSemanticVad IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) => (AzureSemanticVad)JsonModelCreateCore(ref reader, options); + AzureSemanticVadTurnDetection IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) => (AzureSemanticVadTurnDetection)JsonModelCreateCore(ref reader, options); /// The JSON reader. /// The client options for reading and writing models. protected override TurnDetection JsonModelCreateCore(ref Utf8JsonReader reader, ModelReaderWriterOptions options) { - string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; if (format != "J") { - throw new FormatException($"The model {nameof(AzureSemanticVad)} does not support reading '{format}' format."); + throw new FormatException($"The model {nameof(AzureSemanticVadTurnDetection)} does not support reading '{format}' format."); } using JsonDocument document = JsonDocument.ParseValue(ref reader); - return DeserializeAzureSemanticVad(document.RootElement, options); + return DeserializeAzureSemanticVadTurnDetection(document.RootElement, options); } /// The JSON element to deserialize. /// The client options for reading and writing models. - internal static AzureSemanticVad DeserializeAzureSemanticVad(JsonElement element, ModelReaderWriterOptions options) + internal static AzureSemanticVadTurnDetection DeserializeAzureSemanticVadTurnDetection(JsonElement element, ModelReaderWriterOptions options) { if (element.ValueKind == JsonValueKind.Null) { @@ -137,11 +117,7 @@ internal static AzureSemanticVad DeserializeAzureSemanticVad(JsonElement element int? prefixPaddingMs = default; int? silenceDurationMs = default; EouDetection endOfUtteranceDetection = default; - float? negThreshold = default; int? speechDurationMs = default; - int? windowSize = default; - int? distinctCiPhones = default; - bool? requireVowel = default; bool? removeFillerWords = default; IList languages = default; bool? autoTruncate = default; @@ -149,7 +125,7 @@ internal static AzureSemanticVad DeserializeAzureSemanticVad(JsonElement element { if (prop.NameEquals("type"u8)) { - @type = prop.Value.GetString().ToTurnDetectionType(); + @type = new TurnDetectionType(prop.Value.GetString()); continue; } if (prop.NameEquals("threshold"u8)) @@ -188,15 +164,6 @@ internal static AzureSemanticVad DeserializeAzureSemanticVad(JsonElement element endOfUtteranceDetection = EouDetection.DeserializeEouDetection(prop.Value, options); continue; } - if (prop.NameEquals("neg_threshold"u8)) - { - if (prop.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - negThreshold = prop.Value.GetSingle(); - continue; - } if (prop.NameEquals("speech_duration_ms"u8)) { if (prop.Value.ValueKind == JsonValueKind.Null) @@ -206,33 +173,6 @@ internal static AzureSemanticVad DeserializeAzureSemanticVad(JsonElement element speechDurationMs = prop.Value.GetInt32(); continue; } - if (prop.NameEquals("window_size"u8)) - { - if (prop.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - windowSize = prop.Value.GetInt32(); - continue; - } - if (prop.NameEquals("distinct_ci_phones"u8)) - { - if (prop.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - distinctCiPhones = prop.Value.GetInt32(); - continue; - } - if (prop.NameEquals("require_vowel"u8)) - { - if (prop.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - requireVowel = prop.Value.GetBoolean(); - continue; - } if (prop.NameEquals("remove_filler_words"u8)) { if (prop.Value.ValueKind == JsonValueKind.Null) @@ -277,61 +217,57 @@ internal static AzureSemanticVad DeserializeAzureSemanticVad(JsonElement element additionalBinaryDataProperties.Add(prop.Name, BinaryData.FromString(prop.Value.GetRawText())); } } - return new AzureSemanticVad( + return new AzureSemanticVadTurnDetection( @type, additionalBinaryDataProperties, threshold, prefixPaddingMs, silenceDurationMs, endOfUtteranceDetection, - negThreshold, speechDurationMs, - windowSize, - distinctCiPhones, - requireVowel, removeFillerWords, languages ?? new ChangeTrackingList(), autoTruncate); } /// The client options for reading and writing models. - BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) => PersistableModelWriteCore(options); + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) => PersistableModelWriteCore(options); /// The client options for reading and writing models. protected override BinaryData PersistableModelWriteCore(ModelReaderWriterOptions options) { - string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; switch (format) { case "J": return ModelReaderWriter.Write(this, options, AzureAIVoiceLiveContext.Default); default: - throw new FormatException($"The model {nameof(AzureSemanticVad)} does not support writing '{options.Format}' format."); + throw new FormatException($"The model {nameof(AzureSemanticVadTurnDetection)} does not support writing '{options.Format}' format."); } } /// The data to parse. /// The client options for reading and writing models. - AzureSemanticVad IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) => (AzureSemanticVad)PersistableModelCreateCore(data, options); + AzureSemanticVadTurnDetection IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) => (AzureSemanticVadTurnDetection)PersistableModelCreateCore(data, options); /// The data to parse. /// The client options for reading and writing models. protected override TurnDetection PersistableModelCreateCore(BinaryData data, ModelReaderWriterOptions options) { - string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; switch (format) { case "J": using (JsonDocument document = JsonDocument.Parse(data)) { - return DeserializeAzureSemanticVad(document.RootElement, options); + return DeserializeAzureSemanticVadTurnDetection(document.RootElement, options); } default: - throw new FormatException($"The model {nameof(AzureSemanticVad)} does not support reading '{options.Format}' format."); + throw new FormatException($"The model {nameof(AzureSemanticVadTurnDetection)} does not support reading '{options.Format}' format."); } } /// The client options for reading and writing models. - string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; } } diff --git a/sdk/ai/Azure.AI.VoiceLive/src/Generated/AzureSemanticVadTurnDetection.cs b/sdk/ai/Azure.AI.VoiceLive/src/Generated/AzureSemanticVadTurnDetection.cs new file mode 100644 index 000000000000..c1e037bc0d3f --- /dev/null +++ b/sdk/ai/Azure.AI.VoiceLive/src/Generated/AzureSemanticVadTurnDetection.cs @@ -0,0 +1,60 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.AI.VoiceLive +{ + /// Server Speech Detection (Azure semantic VAD, default variant). + public partial class AzureSemanticVadTurnDetection : TurnDetection + { + /// Initializes a new instance of . + public AzureSemanticVadTurnDetection() : base(TurnDetectionType.AzureSemanticVad) + { + Languages = new ChangeTrackingList(); + } + + /// Initializes a new instance of . + /// + /// Keeps track of any properties unknown to the library. + /// + /// Gets or sets the PrefixPaddingMs. + /// Gets or sets the SilenceDurationMs. + /// + /// Gets or sets the SpeechDurationMs. + /// + /// + /// + internal AzureSemanticVadTurnDetection(TurnDetectionType @type, IDictionary additionalBinaryDataProperties, float? threshold, int? prefixPaddingMs, int? silenceDurationMs, EouDetection endOfUtteranceDetection, int? speechDurationMs, bool? removeFillerWords, IList languages, bool? autoTruncate) : base(@type, additionalBinaryDataProperties) + { + Threshold = threshold; + PrefixPaddingMs = prefixPaddingMs; + SilenceDurationMs = silenceDurationMs; + EndOfUtteranceDetection = endOfUtteranceDetection; + SpeechDurationMs = speechDurationMs; + RemoveFillerWords = removeFillerWords; + Languages = languages; + AutoTruncate = autoTruncate; + } + + /// Gets or sets the Threshold. + public float? Threshold { get; set; } + + /// Gets or sets the EndOfUtteranceDetection. + public EouDetection EndOfUtteranceDetection { get; set; } + + /// Gets or sets the RemoveFillerWords. + public bool? RemoveFillerWords { get; set; } + + /// Gets the Languages. + public IList Languages { get; } + + /// Gets or sets the AutoTruncate. + public bool? AutoTruncate { get; set; } + } +} diff --git a/sdk/ai/Azure.AI.VoiceLive/src/Generated/AzureVoice.Serialization.cs b/sdk/ai/Azure.AI.VoiceLive/src/Generated/AzureVoice.Serialization.cs index feb13543d55d..dc117d99104b 100644 --- a/sdk/ai/Azure.AI.VoiceLive/src/Generated/AzureVoice.Serialization.cs +++ b/sdk/ai/Azure.AI.VoiceLive/src/Generated/AzureVoice.Serialization.cs @@ -13,7 +13,7 @@ namespace Azure.AI.VoiceLive { /// /// Base for Azure voice configurations. - /// Please note this is the abstract base class. The derived classes available for instantiation are: , , , and . + /// Please note this is the abstract base class. The derived classes available for instantiation are: , , and . /// [PersistableModelProxy(typeof(UnknownAzureVoice))] public abstract partial class AzureVoice : IJsonModel @@ -93,8 +93,6 @@ internal static AzureVoice DeserializeAzureVoice(JsonElement element, ModelReade return AzureCustomVoice.DeserializeAzureCustomVoice(element, options); case "azure-standard": return AzureStandardVoice.DeserializeAzureStandardVoice(element, options); - case "azure-platform": - return AzurePlatformVoice.DeserializeAzurePlatformVoice(element, options); case "azure-personal": return AzurePersonalVoice.DeserializeAzurePersonalVoice(element, options); } diff --git a/sdk/ai/Azure.AI.VoiceLive/src/Generated/AzureVoice.cs b/sdk/ai/Azure.AI.VoiceLive/src/Generated/AzureVoice.cs index 96fbbb90c5cc..a603565dd918 100644 --- a/sdk/ai/Azure.AI.VoiceLive/src/Generated/AzureVoice.cs +++ b/sdk/ai/Azure.AI.VoiceLive/src/Generated/AzureVoice.cs @@ -12,7 +12,7 @@ namespace Azure.AI.VoiceLive { /// /// Base for Azure voice configurations. - /// Please note this is the abstract base class. The derived classes available for instantiation are: , , , and . + /// Please note this is the abstract base class. The derived classes available for instantiation are: , , and . /// public abstract partial class AzureVoice { diff --git a/sdk/ai/Azure.AI.VoiceLive/src/Generated/EmotionCandidate.Serialization.cs b/sdk/ai/Azure.AI.VoiceLive/src/Generated/EmotionCandidate.Serialization.cs deleted file mode 100644 index 060b86f90ec9..000000000000 --- a/sdk/ai/Azure.AI.VoiceLive/src/Generated/EmotionCandidate.Serialization.cs +++ /dev/null @@ -1,150 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System; -using System.ClientModel.Primitives; -using System.Collections.Generic; -using System.Text.Json; - -namespace Azure.AI.VoiceLive -{ - /// The EmotionCandidate. - public partial class EmotionCandidate : IJsonModel - { - /// Initializes a new instance of for deserialization. - internal EmotionCandidate() - { - } - - /// The JSON writer. - /// The client options for reading and writing models. - void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) - { - writer.WriteStartObject(); - JsonModelWriteCore(writer, options); - writer.WriteEndObject(); - } - - /// The JSON writer. - /// The client options for reading and writing models. - protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) - { - string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; - if (format != "J") - { - throw new FormatException($"The model {nameof(EmotionCandidate)} does not support writing '{format}' format."); - } - writer.WritePropertyName("emotion"u8); - writer.WriteStringValue(Emotion); - writer.WritePropertyName("confidence"u8); - writer.WriteNumberValue(Confidence); - if (options.Format != "W" && _additionalBinaryDataProperties != null) - { - foreach (var item in _additionalBinaryDataProperties) - { - writer.WritePropertyName(item.Key); -#if NET6_0_OR_GREATER - writer.WriteRawValue(item.Value); -#else - using (JsonDocument document = JsonDocument.Parse(item.Value)) - { - JsonSerializer.Serialize(writer, document.RootElement); - } -#endif - } - } - } - - /// The JSON reader. - /// The client options for reading and writing models. - EmotionCandidate IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) => JsonModelCreateCore(ref reader, options); - - /// The JSON reader. - /// The client options for reading and writing models. - protected virtual EmotionCandidate JsonModelCreateCore(ref Utf8JsonReader reader, ModelReaderWriterOptions options) - { - string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; - if (format != "J") - { - throw new FormatException($"The model {nameof(EmotionCandidate)} does not support reading '{format}' format."); - } - using JsonDocument document = JsonDocument.ParseValue(ref reader); - return DeserializeEmotionCandidate(document.RootElement, options); - } - - /// The JSON element to deserialize. - /// The client options for reading and writing models. - internal static EmotionCandidate DeserializeEmotionCandidate(JsonElement element, ModelReaderWriterOptions options) - { - if (element.ValueKind == JsonValueKind.Null) - { - return null; - } - string emotion = default; - float confidence = default; - IDictionary additionalBinaryDataProperties = new ChangeTrackingDictionary(); - foreach (var prop in element.EnumerateObject()) - { - if (prop.NameEquals("emotion"u8)) - { - emotion = prop.Value.GetString(); - continue; - } - if (prop.NameEquals("confidence"u8)) - { - confidence = prop.Value.GetSingle(); - continue; - } - if (options.Format != "W") - { - additionalBinaryDataProperties.Add(prop.Name, BinaryData.FromString(prop.Value.GetRawText())); - } - } - return new EmotionCandidate(emotion, confidence, additionalBinaryDataProperties); - } - - /// The client options for reading and writing models. - BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) => PersistableModelWriteCore(options); - - /// The client options for reading and writing models. - protected virtual BinaryData PersistableModelWriteCore(ModelReaderWriterOptions options) - { - string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; - switch (format) - { - case "J": - return ModelReaderWriter.Write(this, options, AzureAIVoiceLiveContext.Default); - default: - throw new FormatException($"The model {nameof(EmotionCandidate)} does not support writing '{options.Format}' format."); - } - } - - /// The data to parse. - /// The client options for reading and writing models. - EmotionCandidate IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) => PersistableModelCreateCore(data, options); - - /// The data to parse. - /// The client options for reading and writing models. - protected virtual EmotionCandidate PersistableModelCreateCore(BinaryData data, ModelReaderWriterOptions options) - { - string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; - switch (format) - { - case "J": - using (JsonDocument document = JsonDocument.Parse(data)) - { - return DeserializeEmotionCandidate(document.RootElement, options); - } - default: - throw new FormatException($"The model {nameof(EmotionCandidate)} does not support reading '{options.Format}' format."); - } - } - - /// The client options for reading and writing models. - string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; - } -} diff --git a/sdk/ai/Azure.AI.VoiceLive/src/Generated/EmotionCandidate.cs b/sdk/ai/Azure.AI.VoiceLive/src/Generated/EmotionCandidate.cs deleted file mode 100644 index bd8cc975738c..000000000000 --- a/sdk/ai/Azure.AI.VoiceLive/src/Generated/EmotionCandidate.cs +++ /dev/null @@ -1,45 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System; -using System.Collections.Generic; - -namespace Azure.AI.VoiceLive -{ - /// The EmotionCandidate. - public partial class EmotionCandidate - { - /// Keeps track of any properties unknown to the library. - private protected readonly IDictionary _additionalBinaryDataProperties; - - /// Initializes a new instance of . - /// - /// - internal EmotionCandidate(string emotion, float confidence) - { - Emotion = emotion; - Confidence = confidence; - } - - /// Initializes a new instance of . - /// - /// - /// Keeps track of any properties unknown to the library. - internal EmotionCandidate(string emotion, float confidence, IDictionary additionalBinaryDataProperties) - { - Emotion = emotion; - Confidence = confidence; - _additionalBinaryDataProperties = additionalBinaryDataProperties; - } - - /// Gets the Emotion. - public string Emotion { get; } - - /// Gets the Confidence. - public float Confidence { get; } - } -} diff --git a/sdk/ai/Azure.AI.VoiceLive/src/Generated/EouDetection.Serialization.cs b/sdk/ai/Azure.AI.VoiceLive/src/Generated/EouDetection.Serialization.cs index 5acad2e287a2..2d48b4f20c45 100644 --- a/sdk/ai/Azure.AI.VoiceLive/src/Generated/EouDetection.Serialization.cs +++ b/sdk/ai/Azure.AI.VoiceLive/src/Generated/EouDetection.Serialization.cs @@ -13,7 +13,7 @@ namespace Azure.AI.VoiceLive { /// /// Top-level union for end-of-utterance (EOU) semantic detection configuration. - /// Please note this is the abstract base class. The derived classes available for instantiation are: , , and . + /// Please note this is the abstract base class. The derived classes available for instantiation are: , , and . /// [PersistableModelProxy(typeof(UnknownEouDetection))] public abstract partial class EouDetection : IJsonModel @@ -90,11 +90,11 @@ internal static EouDetection DeserializeEouDetection(JsonElement element, ModelR switch (discriminator.GetString()) { case "semantic_detection_v1": - return AzureSemanticDetection.DeserializeAzureSemanticDetection(element, options); + return AzureSemanticEouDetection.DeserializeAzureSemanticEouDetection(element, options); case "semantic_detection_v1_en": - return AzureSemanticDetectionEn.DeserializeAzureSemanticDetectionEn(element, options); + return AzureSemanticEnEouDetection.DeserializeAzureSemanticEnEouDetection(element, options); case "semantic_detection_v1_multilingual": - return AzureSemanticDetectionMultilingual.DeserializeAzureSemanticDetectionMultilingual(element, options); + return AzureSemanticMultilingualEouDetection.DeserializeAzureSemanticMultilingualEouDetection(element, options); } } return UnknownEouDetection.DeserializeUnknownEouDetection(element, options); diff --git a/sdk/ai/Azure.AI.VoiceLive/src/Generated/EouDetection.cs b/sdk/ai/Azure.AI.VoiceLive/src/Generated/EouDetection.cs index 1d015a20e9be..ee4faafb3b5d 100644 --- a/sdk/ai/Azure.AI.VoiceLive/src/Generated/EouDetection.cs +++ b/sdk/ai/Azure.AI.VoiceLive/src/Generated/EouDetection.cs @@ -12,7 +12,7 @@ namespace Azure.AI.VoiceLive { /// /// Top-level union for end-of-utterance (EOU) semantic detection configuration. - /// Please note this is the abstract base class. The derived classes available for instantiation are: , , and . + /// Please note this is the abstract base class. The derived classes available for instantiation are: , , and . /// public abstract partial class EouDetection { diff --git a/sdk/ai/Azure.AI.VoiceLive/src/Generated/ForceModelsRequest.Serialization.cs b/sdk/ai/Azure.AI.VoiceLive/src/Generated/ForceModelsRequest.Serialization.cs deleted file mode 100644 index bf65afcfd288..000000000000 --- a/sdk/ai/Azure.AI.VoiceLive/src/Generated/ForceModelsRequest.Serialization.cs +++ /dev/null @@ -1,142 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System; -using System.ClientModel.Primitives; -using System.Collections.Generic; -using System.Text.Json; -using Azure.Core; - -namespace Azure.AI.VoiceLive -{ - internal partial class ForceModelsRequest : IJsonModel - { - /// Initializes a new instance of for deserialization. - internal ForceModelsRequest() - { - } - - /// The JSON writer. - /// The client options for reading and writing models. - void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) - { - writer.WriteStartObject(); - JsonModelWriteCore(writer, options); - writer.WriteEndObject(); - } - - /// The JSON writer. - /// The client options for reading and writing models. - protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) - { - string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; - if (format != "J") - { - throw new FormatException($"The model {nameof(ForceModelsRequest)} does not support writing '{format}' format."); - } - writer.WritePropertyName("event"u8); -#if NET6_0_OR_GREATER - writer.WriteRawValue(Event); -#else - using (JsonDocument document = JsonDocument.Parse(Event)) - { - JsonSerializer.Serialize(writer, document.RootElement); - } -#endif - } - - /// The JSON reader. - /// The client options for reading and writing models. - ForceModelsRequest IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) => JsonModelCreateCore(ref reader, options); - - /// The JSON reader. - /// The client options for reading and writing models. - protected virtual ForceModelsRequest JsonModelCreateCore(ref Utf8JsonReader reader, ModelReaderWriterOptions options) - { - string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; - if (format != "J") - { - throw new FormatException($"The model {nameof(ForceModelsRequest)} does not support reading '{format}' format."); - } - using JsonDocument document = JsonDocument.ParseValue(ref reader); - return DeserializeForceModelsRequest(document.RootElement, options); - } - - /// The JSON element to deserialize. - /// The client options for reading and writing models. - internal static ForceModelsRequest DeserializeForceModelsRequest(JsonElement element, ModelReaderWriterOptions options) - { - if (element.ValueKind == JsonValueKind.Null) - { - return null; - } - BinaryData @event = default; - IDictionary additionalBinaryDataProperties = new ChangeTrackingDictionary(); - foreach (var prop in element.EnumerateObject()) - { - if (prop.NameEquals("event"u8)) - { - @event = BinaryData.FromString(prop.Value.GetRawText()); - continue; - } - } - return new ForceModelsRequest(@event, additionalBinaryDataProperties); - } - - /// The client options for reading and writing models. - BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) => PersistableModelWriteCore(options); - - /// The client options for reading and writing models. - protected virtual BinaryData PersistableModelWriteCore(ModelReaderWriterOptions options) - { - string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; - switch (format) - { - case "J": - return ModelReaderWriter.Write(this, options, AzureAIVoiceLiveContext.Default); - default: - throw new FormatException($"The model {nameof(ForceModelsRequest)} does not support writing '{options.Format}' format."); - } - } - - /// The data to parse. - /// The client options for reading and writing models. - ForceModelsRequest IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) => PersistableModelCreateCore(data, options); - - /// The data to parse. - /// The client options for reading and writing models. - protected virtual ForceModelsRequest PersistableModelCreateCore(BinaryData data, ModelReaderWriterOptions options) - { - string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; - switch (format) - { - case "J": - using (JsonDocument document = JsonDocument.Parse(data)) - { - return DeserializeForceModelsRequest(document.RootElement, options); - } - default: - throw new FormatException($"The model {nameof(ForceModelsRequest)} does not support reading '{options.Format}' format."); - } - } - - /// The client options for reading and writing models. - string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; - - /// The to serialize into . - public static implicit operator RequestContent(ForceModelsRequest forceModelsRequest) - { - if (forceModelsRequest == null) - { - return null; - } - Utf8JsonRequestContent content = new Utf8JsonRequestContent(); - content.JsonWriter.WriteObjectValue(forceModelsRequest, ModelSerializationExtensions.WireOptions); - return content; - } - } -} diff --git a/sdk/ai/Azure.AI.VoiceLive/src/Generated/ForceModelsRequest.cs b/sdk/ai/Azure.AI.VoiceLive/src/Generated/ForceModelsRequest.cs deleted file mode 100644 index 75e7ced4e03b..000000000000 --- a/sdk/ai/Azure.AI.VoiceLive/src/Generated/ForceModelsRequest.cs +++ /dev/null @@ -1,13 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -namespace Azure.AI.VoiceLive -{ - internal partial class ForceModelsRequest - { - } -} diff --git a/sdk/ai/Azure.AI.VoiceLive/src/Generated/FunctionCallItem.cs b/sdk/ai/Azure.AI.VoiceLive/src/Generated/FunctionCallItem.cs index 6c1aa6ff3590..f7cbe38b5e4e 100644 --- a/sdk/ai/Azure.AI.VoiceLive/src/Generated/FunctionCallItem.cs +++ b/sdk/ai/Azure.AI.VoiceLive/src/Generated/FunctionCallItem.cs @@ -45,14 +45,14 @@ internal FunctionCallItem(ItemType @type, string id, IDictionary Gets the Name. - public string Name { get; } + /// Gets or sets the Name. + public string Name { get; set; } - /// Gets the CallId. - public string CallId { get; } + /// Gets or sets the CallId. + public string CallId { get; set; } - /// Gets the Arguments. - public string Arguments { get; } + /// Gets or sets the Arguments. + public string Arguments { get; set; } /// Gets or sets the Status. public ItemParamStatus? Status { get; set; } diff --git a/sdk/ai/Azure.AI.VoiceLive/src/Generated/FunctionCallOutputItem.cs b/sdk/ai/Azure.AI.VoiceLive/src/Generated/FunctionCallOutputItem.cs index 7f78c9bdf427..486c68397d54 100644 --- a/sdk/ai/Azure.AI.VoiceLive/src/Generated/FunctionCallOutputItem.cs +++ b/sdk/ai/Azure.AI.VoiceLive/src/Generated/FunctionCallOutputItem.cs @@ -40,11 +40,11 @@ internal FunctionCallOutputItem(ItemType @type, string id, IDictionary Gets the CallId. - public string CallId { get; } + /// Gets or sets the CallId. + public string CallId { get; set; } - /// Gets the Output. - public string Output { get; } + /// Gets or sets the Output. + public string Output { get; set; } /// Gets or sets the Status. public ItemParamStatus? Status { get; set; } diff --git a/sdk/ai/Azure.AI.VoiceLive/src/Generated/InputAudio.Serialization.cs b/sdk/ai/Azure.AI.VoiceLive/src/Generated/InputAudio.Serialization.cs deleted file mode 100644 index 2e18df471b8e..000000000000 --- a/sdk/ai/Azure.AI.VoiceLive/src/Generated/InputAudio.Serialization.cs +++ /dev/null @@ -1,174 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System; -using System.ClientModel.Primitives; -using System.Collections.Generic; -using System.Text.Json; - -namespace Azure.AI.VoiceLive -{ - /// Configuration for client audio input. Used to specify the audio model and optional phrase list. - public partial class InputAudio : IJsonModel - { - /// The JSON writer. - /// The client options for reading and writing models. - void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) - { - writer.WriteStartObject(); - JsonModelWriteCore(writer, options); - writer.WriteEndObject(); - } - - /// The JSON writer. - /// The client options for reading and writing models. - protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) - { - string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; - if (format != "J") - { - throw new FormatException($"The model {nameof(InputAudio)} does not support writing '{format}' format."); - } - writer.WritePropertyName("model"u8); - writer.WriteStringValue(Model); - if (Optional.IsCollectionDefined(PhraseList)) - { - writer.WritePropertyName("phrase_list"u8); - writer.WriteStartArray(); - foreach (string item in PhraseList) - { - if (item == null) - { - writer.WriteNullValue(); - continue; - } - writer.WriteStringValue(item); - } - writer.WriteEndArray(); - } - if (options.Format != "W" && _additionalBinaryDataProperties != null) - { - foreach (var item in _additionalBinaryDataProperties) - { - writer.WritePropertyName(item.Key); -#if NET6_0_OR_GREATER - writer.WriteRawValue(item.Value); -#else - using (JsonDocument document = JsonDocument.Parse(item.Value)) - { - JsonSerializer.Serialize(writer, document.RootElement); - } -#endif - } - } - } - - /// The JSON reader. - /// The client options for reading and writing models. - InputAudio IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) => JsonModelCreateCore(ref reader, options); - - /// The JSON reader. - /// The client options for reading and writing models. - protected virtual InputAudio JsonModelCreateCore(ref Utf8JsonReader reader, ModelReaderWriterOptions options) - { - string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; - if (format != "J") - { - throw new FormatException($"The model {nameof(InputAudio)} does not support reading '{format}' format."); - } - using JsonDocument document = JsonDocument.ParseValue(ref reader); - return DeserializeInputAudio(document.RootElement, options); - } - - /// The JSON element to deserialize. - /// The client options for reading and writing models. - internal static InputAudio DeserializeInputAudio(JsonElement element, ModelReaderWriterOptions options) - { - if (element.ValueKind == JsonValueKind.Null) - { - return null; - } - string model = default; - IList phraseList = default; - IDictionary additionalBinaryDataProperties = new ChangeTrackingDictionary(); - foreach (var prop in element.EnumerateObject()) - { - if (prop.NameEquals("model"u8)) - { - model = prop.Value.GetString(); - continue; - } - if (prop.NameEquals("phrase_list"u8)) - { - if (prop.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - List array = new List(); - foreach (var item in prop.Value.EnumerateArray()) - { - if (item.ValueKind == JsonValueKind.Null) - { - array.Add(null); - } - else - { - array.Add(item.GetString()); - } - } - phraseList = array; - continue; - } - if (options.Format != "W") - { - additionalBinaryDataProperties.Add(prop.Name, BinaryData.FromString(prop.Value.GetRawText())); - } - } - return new InputAudio(model, phraseList ?? new ChangeTrackingList(), additionalBinaryDataProperties); - } - - /// The client options for reading and writing models. - BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) => PersistableModelWriteCore(options); - - /// The client options for reading and writing models. - protected virtual BinaryData PersistableModelWriteCore(ModelReaderWriterOptions options) - { - string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; - switch (format) - { - case "J": - return ModelReaderWriter.Write(this, options, AzureAIVoiceLiveContext.Default); - default: - throw new FormatException($"The model {nameof(InputAudio)} does not support writing '{options.Format}' format."); - } - } - - /// The data to parse. - /// The client options for reading and writing models. - InputAudio IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) => PersistableModelCreateCore(data, options); - - /// The data to parse. - /// The client options for reading and writing models. - protected virtual InputAudio PersistableModelCreateCore(BinaryData data, ModelReaderWriterOptions options) - { - string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; - switch (format) - { - case "J": - using (JsonDocument document = JsonDocument.Parse(data)) - { - return DeserializeInputAudio(document.RootElement, options); - } - default: - throw new FormatException($"The model {nameof(InputAudio)} does not support reading '{options.Format}' format."); - } - } - - /// The client options for reading and writing models. - string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; - } -} diff --git a/sdk/ai/Azure.AI.VoiceLive/src/Generated/InputAudio.cs b/sdk/ai/Azure.AI.VoiceLive/src/Generated/InputAudio.cs deleted file mode 100644 index 3a2adf993df8..000000000000 --- a/sdk/ai/Azure.AI.VoiceLive/src/Generated/InputAudio.cs +++ /dev/null @@ -1,42 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System; -using System.Collections.Generic; - -namespace Azure.AI.VoiceLive -{ - /// Configuration for client audio input. Used to specify the audio model and optional phrase list. - public partial class InputAudio - { - /// Keeps track of any properties unknown to the library. - private protected readonly IDictionary _additionalBinaryDataProperties; - - /// Initializes a new instance of . - public InputAudio() - { - PhraseList = new ChangeTrackingList(); - } - - /// Initializes a new instance of . - /// The name of the model to use for input audio (currently only 'azure-standard' is supported). - /// Optional list of phrases to bias the speech recognition engine. - /// Keeps track of any properties unknown to the library. - internal InputAudio(string model, IList phraseList, IDictionary additionalBinaryDataProperties) - { - Model = model; - PhraseList = phraseList; - _additionalBinaryDataProperties = additionalBinaryDataProperties; - } - - /// The name of the model to use for input audio (currently only 'azure-standard' is supported). - public string Model { get; } = "azure-standard"; - - /// Optional list of phrases to bias the speech recognition engine. - public IList PhraseList { get; } - } -} diff --git a/sdk/ai/Azure.AI.VoiceLive/src/Generated/InputAudioContentPart.cs b/sdk/ai/Azure.AI.VoiceLive/src/Generated/InputAudioContentPart.cs index 8b7f30265b15..807a29c8375a 100644 --- a/sdk/ai/Azure.AI.VoiceLive/src/Generated/InputAudioContentPart.cs +++ b/sdk/ai/Azure.AI.VoiceLive/src/Generated/InputAudioContentPart.cs @@ -34,8 +34,8 @@ internal InputAudioContentPart(string @type, IDictionary add Transcript = transcript; } - /// Gets the Audio. - public string Audio { get; } + /// Gets or sets the Audio. + public string Audio { get; set; } /// Gets or sets the Transcript. public string Transcript { get; set; } diff --git a/sdk/ai/Azure.AI.VoiceLive/src/Generated/AudioFormat.cs b/sdk/ai/Azure.AI.VoiceLive/src/Generated/InputAudioFormat.cs similarity index 51% rename from sdk/ai/Azure.AI.VoiceLive/src/Generated/AudioFormat.cs rename to sdk/ai/Azure.AI.VoiceLive/src/Generated/InputAudioFormat.cs index b80be462758b..8686f42542d7 100644 --- a/sdk/ai/Azure.AI.VoiceLive/src/Generated/AudioFormat.cs +++ b/sdk/ai/Azure.AI.VoiceLive/src/Generated/InputAudioFormat.cs @@ -11,17 +11,17 @@ namespace Azure.AI.VoiceLive { /// - public readonly partial struct AudioFormat : IEquatable + public readonly partial struct InputAudioFormat : IEquatable { private readonly string _value; private const string Pcm16Value = "pcm16"; private const string G711UlawValue = "g711_ulaw"; private const string G711AlawValue = "g711_alaw"; - /// Initializes a new instance of . + /// Initializes a new instance of . /// The value. /// is null. - public AudioFormat(string value) + public InputAudioFormat(string value) { Argument.AssertNotNull(value, nameof(value)); @@ -29,38 +29,38 @@ public AudioFormat(string value) } /// Gets the Pcm16. - public static AudioFormat Pcm16 { get; } = new AudioFormat(Pcm16Value); + public static InputAudioFormat Pcm16 { get; } = new InputAudioFormat(Pcm16Value); /// Gets the G711Ulaw. - public static AudioFormat G711Ulaw { get; } = new AudioFormat(G711UlawValue); + public static InputAudioFormat G711Ulaw { get; } = new InputAudioFormat(G711UlawValue); /// Gets the G711Alaw. - public static AudioFormat G711Alaw { get; } = new AudioFormat(G711AlawValue); + public static InputAudioFormat G711Alaw { get; } = new InputAudioFormat(G711AlawValue); - /// Determines if two values are the same. + /// Determines if two values are the same. /// The left value to compare. /// The right value to compare. - public static bool operator ==(AudioFormat left, AudioFormat right) => left.Equals(right); + public static bool operator ==(InputAudioFormat left, InputAudioFormat right) => left.Equals(right); - /// Determines if two values are not the same. + /// Determines if two values are not the same. /// The left value to compare. /// The right value to compare. - public static bool operator !=(AudioFormat left, AudioFormat right) => !left.Equals(right); + public static bool operator !=(InputAudioFormat left, InputAudioFormat right) => !left.Equals(right); - /// Converts a string to a . + /// Converts a string to a . /// The value. - public static implicit operator AudioFormat(string value) => new AudioFormat(value); + public static implicit operator InputAudioFormat(string value) => new InputAudioFormat(value); - /// Converts a string to a . + /// Converts a string to a . /// The value. - public static implicit operator AudioFormat?(string value) => value == null ? null : new AudioFormat(value); + public static implicit operator InputAudioFormat?(string value) => value == null ? null : new InputAudioFormat(value); /// [EditorBrowsable(EditorBrowsableState.Never)] - public override bool Equals(object obj) => obj is AudioFormat other && Equals(other); + public override bool Equals(object obj) => obj is InputAudioFormat other && Equals(other); /// - public bool Equals(AudioFormat other) => string.Equals(_value, other._value, StringComparison.InvariantCultureIgnoreCase); + public bool Equals(InputAudioFormat other) => string.Equals(_value, other._value, StringComparison.InvariantCultureIgnoreCase); /// [EditorBrowsable(EditorBrowsableState.Never)] diff --git a/sdk/ai/Azure.AI.VoiceLive/src/Generated/InputTextContentPart.cs b/sdk/ai/Azure.AI.VoiceLive/src/Generated/InputTextContentPart.cs index b70e3d1f4353..feec2ec454aa 100644 --- a/sdk/ai/Azure.AI.VoiceLive/src/Generated/InputTextContentPart.cs +++ b/sdk/ai/Azure.AI.VoiceLive/src/Generated/InputTextContentPart.cs @@ -32,7 +32,7 @@ internal InputTextContentPart(string @type, IDictionary addi Text = text; } - /// Gets the Text. - public string Text { get; } + /// Gets or sets the Text. + public string Text { get; set; } } } diff --git a/sdk/ai/Azure.AI.VoiceLive/src/Generated/Internal/ClientPipelineExtensions.cs b/sdk/ai/Azure.AI.VoiceLive/src/Generated/Internal/ClientPipelineExtensions.cs deleted file mode 100644 index d85dc928e30a..000000000000 --- a/sdk/ai/Azure.AI.VoiceLive/src/Generated/Internal/ClientPipelineExtensions.cs +++ /dev/null @@ -1,72 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Threading; -using System.Threading.Tasks; -using Azure; -using Azure.Core; -using Azure.Core.Pipeline; - -namespace Azure.AI.VoiceLive -{ - internal static partial class ClientPipelineExtensions - { - public static async ValueTask ProcessMessageAsync(this HttpPipeline pipeline, HttpMessage message, RequestContext context) - { - (CancellationToken userCancellationToken, ErrorOptions statusOption) = context.Parse(); - await pipeline.SendAsync(message, userCancellationToken).ConfigureAwait(false); - - if (message.Response.IsError && (context?.ErrorOptions & ErrorOptions.NoThrow) != ErrorOptions.NoThrow) - { - throw new RequestFailedException(message.Response); - } - - return message.Response; - } - - public static Response ProcessMessage(this HttpPipeline pipeline, HttpMessage message, RequestContext context) - { - (CancellationToken userCancellationToken, ErrorOptions statusOption) = context.Parse(); - pipeline.Send(message, userCancellationToken); - - if (message.Response.IsError && (context?.ErrorOptions & ErrorOptions.NoThrow) != ErrorOptions.NoThrow) - { - throw new RequestFailedException(message.Response); - } - - return message.Response; - } - - public static async ValueTask> ProcessHeadAsBoolMessageAsync(this HttpPipeline pipeline, HttpMessage message, RequestContext context) - { - Response response = await pipeline.ProcessMessageAsync(message, context).ConfigureAwait(false); - switch (response.Status) - { - case >= 200 and < 300: - return Response.FromValue(true, response); - case >= 400 and < 500: - return Response.FromValue(false, response); - default: - return new ErrorResult(response, new RequestFailedException(response)); - } - } - - public static Response ProcessHeadAsBoolMessage(this HttpPipeline pipeline, HttpMessage message, RequestContext context) - { - Response response = pipeline.ProcessMessage(message, context); - switch (response.Status) - { - case >= 200 and < 300: - return Response.FromValue(true, response); - case >= 400 and < 500: - return Response.FromValue(false, response); - default: - return new ErrorResult(response, new RequestFailedException(response)); - } - } - } -} diff --git a/sdk/ai/Azure.AI.VoiceLive/src/Generated/Internal/ErrorResult.cs b/sdk/ai/Azure.AI.VoiceLive/src/Generated/Internal/ErrorResult.cs deleted file mode 100644 index 7aaf11e39c53..000000000000 --- a/sdk/ai/Azure.AI.VoiceLive/src/Generated/Internal/ErrorResult.cs +++ /dev/null @@ -1,32 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using Azure; - -namespace Azure.AI.VoiceLive -{ - internal partial class ErrorResult : Response - { - private readonly Response _response; - private readonly RequestFailedException _exception; - - public ErrorResult(Response response, RequestFailedException exception) - { - _response = response; - _exception = exception; - } - - /// Gets the Value. - public override T Value => throw _exception; - - /// - public override Response GetRawResponse() - { - return _response; - } - } -} diff --git a/sdk/ai/Azure.AI.VoiceLive/src/Generated/Internal/RequestContextExtensions.cs b/sdk/ai/Azure.AI.VoiceLive/src/Generated/Internal/RequestContextExtensions.cs deleted file mode 100644 index 0782995f019d..000000000000 --- a/sdk/ai/Azure.AI.VoiceLive/src/Generated/Internal/RequestContextExtensions.cs +++ /dev/null @@ -1,26 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System; -using System.Threading; -using Azure; - -namespace Azure.AI.VoiceLive -{ - internal static partial class RequestContextExtensions - { - /// The request context, which can override default behaviors of the client pipeline on a per-call basis. - public static ValueTuple Parse(this RequestContext context) - { - if (context == null) - { - return (CancellationToken.None, ErrorOptions.Default); - } - return (context.CancellationToken, context.ErrorOptions); - } - } -} diff --git a/sdk/ai/Azure.AI.VoiceLive/src/Generated/Internal/Utf8JsonRequestContent.cs b/sdk/ai/Azure.AI.VoiceLive/src/Generated/Internal/Utf8JsonRequestContent.cs deleted file mode 100644 index e996ca7e264c..000000000000 --- a/sdk/ai/Azure.AI.VoiceLive/src/Generated/Internal/Utf8JsonRequestContent.cs +++ /dev/null @@ -1,61 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.IO; -using System.Text.Json; -using System.Threading; -using System.Threading.Tasks; -using Azure.Core; - -namespace Azure.AI.VoiceLive -{ - internal partial class Utf8JsonRequestContent : RequestContent - { - private readonly MemoryStream _stream; - private readonly RequestContent _content; - - public Utf8JsonRequestContent() - { - _stream = new MemoryStream(); - _content = Create(_stream); - JsonWriter = new Utf8JsonWriter(_stream); - } - - /// Gets the JsonWriter. - public Utf8JsonWriter JsonWriter { get; } - - /// The stream containing the data to be written. - /// The cancellation token to use. - public override async Task WriteToAsync(Stream stream, CancellationToken cancellationToken = default) - { - await JsonWriter.FlushAsync().ConfigureAwait(false); - await _content.WriteToAsync(stream, cancellationToken).ConfigureAwait(false); - } - - /// The stream containing the data to be written. - /// The cancellation token to use. - public override void WriteTo(Stream stream, CancellationToken cancellationToken = default) - { - JsonWriter.Flush(); - _content.WriteTo(stream, cancellationToken); - } - - /// - public override bool TryComputeLength(out long length) - { - length = JsonWriter.BytesCommitted + JsonWriter.BytesPending; - return true; - } - - public override void Dispose() - { - JsonWriter.Dispose(); - _content.Dispose(); - _stream.Dispose(); - } - } -} diff --git a/sdk/ai/Azure.AI.VoiceLive/src/Generated/LlmVoice.Serialization.cs b/sdk/ai/Azure.AI.VoiceLive/src/Generated/LlmVoice.Serialization.cs deleted file mode 100644 index 79197c17d1cf..000000000000 --- a/sdk/ai/Azure.AI.VoiceLive/src/Generated/LlmVoice.Serialization.cs +++ /dev/null @@ -1,150 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System; -using System.ClientModel.Primitives; -using System.Collections.Generic; -using System.Text.Json; - -namespace Azure.AI.VoiceLive -{ - /// Voice configuration for LLM (Large Language Model) voices. - public partial class LlmVoice : IJsonModel - { - /// Initializes a new instance of for deserialization. - internal LlmVoice() - { - } - - /// The JSON writer. - /// The client options for reading and writing models. - void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) - { - writer.WriteStartObject(); - JsonModelWriteCore(writer, options); - writer.WriteEndObject(); - } - - /// The JSON writer. - /// The client options for reading and writing models. - protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) - { - string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; - if (format != "J") - { - throw new FormatException($"The model {nameof(LlmVoice)} does not support writing '{format}' format."); - } - writer.WritePropertyName("type"u8); - writer.WriteStringValue(Type); - writer.WritePropertyName("name"u8); - writer.WriteStringValue(Name.ToString()); - if (options.Format != "W" && _additionalBinaryDataProperties != null) - { - foreach (var item in _additionalBinaryDataProperties) - { - writer.WritePropertyName(item.Key); -#if NET6_0_OR_GREATER - writer.WriteRawValue(item.Value); -#else - using (JsonDocument document = JsonDocument.Parse(item.Value)) - { - JsonSerializer.Serialize(writer, document.RootElement); - } -#endif - } - } - } - - /// The JSON reader. - /// The client options for reading and writing models. - LlmVoice IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) => JsonModelCreateCore(ref reader, options); - - /// The JSON reader. - /// The client options for reading and writing models. - protected virtual LlmVoice JsonModelCreateCore(ref Utf8JsonReader reader, ModelReaderWriterOptions options) - { - string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; - if (format != "J") - { - throw new FormatException($"The model {nameof(LlmVoice)} does not support reading '{format}' format."); - } - using JsonDocument document = JsonDocument.ParseValue(ref reader); - return DeserializeLlmVoice(document.RootElement, options); - } - - /// The JSON element to deserialize. - /// The client options for reading and writing models. - internal static LlmVoice DeserializeLlmVoice(JsonElement element, ModelReaderWriterOptions options) - { - if (element.ValueKind == JsonValueKind.Null) - { - return null; - } - string @type = default; - LlmVoiceName name = default; - IDictionary additionalBinaryDataProperties = new ChangeTrackingDictionary(); - foreach (var prop in element.EnumerateObject()) - { - if (prop.NameEquals("type"u8)) - { - @type = prop.Value.GetString(); - continue; - } - if (prop.NameEquals("name"u8)) - { - name = new LlmVoiceName(prop.Value.GetString()); - continue; - } - if (options.Format != "W") - { - additionalBinaryDataProperties.Add(prop.Name, BinaryData.FromString(prop.Value.GetRawText())); - } - } - return new LlmVoice(@type, name, additionalBinaryDataProperties); - } - - /// The client options for reading and writing models. - BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) => PersistableModelWriteCore(options); - - /// The client options for reading and writing models. - protected virtual BinaryData PersistableModelWriteCore(ModelReaderWriterOptions options) - { - string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; - switch (format) - { - case "J": - return ModelReaderWriter.Write(this, options, AzureAIVoiceLiveContext.Default); - default: - throw new FormatException($"The model {nameof(LlmVoice)} does not support writing '{options.Format}' format."); - } - } - - /// The data to parse. - /// The client options for reading and writing models. - LlmVoice IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) => PersistableModelCreateCore(data, options); - - /// The data to parse. - /// The client options for reading and writing models. - protected virtual LlmVoice PersistableModelCreateCore(BinaryData data, ModelReaderWriterOptions options) - { - string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; - switch (format) - { - case "J": - using (JsonDocument document = JsonDocument.Parse(data)) - { - return DeserializeLlmVoice(document.RootElement, options); - } - default: - throw new FormatException($"The model {nameof(LlmVoice)} does not support reading '{options.Format}' format."); - } - } - - /// The client options for reading and writing models. - string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; - } -} diff --git a/sdk/ai/Azure.AI.VoiceLive/src/Generated/LlmVoice.cs b/sdk/ai/Azure.AI.VoiceLive/src/Generated/LlmVoice.cs deleted file mode 100644 index f2633bc5598e..000000000000 --- a/sdk/ai/Azure.AI.VoiceLive/src/Generated/LlmVoice.cs +++ /dev/null @@ -1,43 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System; -using System.Collections.Generic; - -namespace Azure.AI.VoiceLive -{ - /// Voice configuration for LLM (Large Language Model) voices. - public partial class LlmVoice - { - /// Keeps track of any properties unknown to the library. - private protected readonly IDictionary _additionalBinaryDataProperties; - - /// Initializes a new instance of . - /// - public LlmVoice(LlmVoiceName name) - { - Name = name; - } - - /// Initializes a new instance of . - /// - /// - /// Keeps track of any properties unknown to the library. - internal LlmVoice(string @type, LlmVoiceName name, IDictionary additionalBinaryDataProperties) - { - Type = @type; - Name = name; - _additionalBinaryDataProperties = additionalBinaryDataProperties; - } - - /// Gets the Type. - public string Type { get; } = "llm"; - - /// Gets or sets the Name. - public LlmVoiceName Name { get; set; } - } -} diff --git a/sdk/ai/Azure.AI.VoiceLive/src/Generated/LlmVoiceName.cs b/sdk/ai/Azure.AI.VoiceLive/src/Generated/LlmVoiceName.cs deleted file mode 100644 index 9f9102129ea7..000000000000 --- a/sdk/ai/Azure.AI.VoiceLive/src/Generated/LlmVoiceName.cs +++ /dev/null @@ -1,64 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System; -using System.ComponentModel; - -namespace Azure.AI.VoiceLive -{ - /// Voice identifier for Phi4mm voices. - public readonly partial struct LlmVoiceName : IEquatable - { - private readonly string _value; - private const string CosyvoiceValue = "cosyvoice"; - - /// Initializes a new instance of . - /// The value. - /// is null. - public LlmVoiceName(string value) - { - Argument.AssertNotNull(value, nameof(value)); - - _value = value; - } - - /// Gets the Cosyvoice. - public static LlmVoiceName Cosyvoice { get; } = new LlmVoiceName(CosyvoiceValue); - - /// Determines if two values are the same. - /// The left value to compare. - /// The right value to compare. - public static bool operator ==(LlmVoiceName left, LlmVoiceName right) => left.Equals(right); - - /// Determines if two values are not the same. - /// The left value to compare. - /// The right value to compare. - public static bool operator !=(LlmVoiceName left, LlmVoiceName right) => !left.Equals(right); - - /// Converts a string to a . - /// The value. - public static implicit operator LlmVoiceName(string value) => new LlmVoiceName(value); - - /// Converts a string to a . - /// The value. - public static implicit operator LlmVoiceName?(string value) => value == null ? null : new LlmVoiceName(value); - - /// - [EditorBrowsable(EditorBrowsableState.Never)] - public override bool Equals(object obj) => obj is LlmVoiceName other && Equals(other); - - /// - public bool Equals(LlmVoiceName other) => string.Equals(_value, other._value, StringComparison.InvariantCultureIgnoreCase); - - /// - [EditorBrowsable(EditorBrowsableState.Never)] - public override int GetHashCode() => _value != null ? StringComparer.InvariantCultureIgnoreCase.GetHashCode(_value) : 0; - - /// - public override string ToString() => _value; - } -} diff --git a/sdk/ai/Azure.AI.VoiceLive/src/Generated/Models/AzureAIVoiceLiveContext.cs b/sdk/ai/Azure.AI.VoiceLive/src/Generated/Models/AzureAIVoiceLiveContext.cs index adcda161a2e5..a062fbc3f07b 100644 --- a/sdk/ai/Azure.AI.VoiceLive/src/Generated/Models/AzureAIVoiceLiveContext.cs +++ b/sdk/ai/Azure.AI.VoiceLive/src/Generated/Models/AzureAIVoiceLiveContext.cs @@ -20,15 +20,13 @@ namespace Azure.AI.VoiceLive [ModelReaderWriterBuildable(typeof(AudioNoiseReduction))] [ModelReaderWriterBuildable(typeof(AvatarConfiguration))] [ModelReaderWriterBuildable(typeof(AzureCustomVoice))] - [ModelReaderWriterBuildable(typeof(AzureMultilingualSemanticVad))] [ModelReaderWriterBuildable(typeof(AzurePersonalVoice))] - [ModelReaderWriterBuildable(typeof(AzurePlatformVoice))] - [ModelReaderWriterBuildable(typeof(AzureSemanticDetection))] - [ModelReaderWriterBuildable(typeof(AzureSemanticDetectionEn))] - [ModelReaderWriterBuildable(typeof(AzureSemanticDetectionMultilingual))] - [ModelReaderWriterBuildable(typeof(AzureSemanticVad))] - [ModelReaderWriterBuildable(typeof(AzureSemanticVadEn))] - [ModelReaderWriterBuildable(typeof(AzureSemanticVadServer))] + [ModelReaderWriterBuildable(typeof(AzureSemanticEnEouDetection))] + [ModelReaderWriterBuildable(typeof(AzureSemanticEouDetection))] + [ModelReaderWriterBuildable(typeof(AzureSemanticMultilingualEouDetection))] + [ModelReaderWriterBuildable(typeof(AzureSemanticVadEnTurnDetection))] + [ModelReaderWriterBuildable(typeof(AzureSemanticVadMultilingualTurnDetection))] + [ModelReaderWriterBuildable(typeof(AzureSemanticVadTurnDetection))] [ModelReaderWriterBuildable(typeof(AzureStandardVoice))] [ModelReaderWriterBuildable(typeof(AzureVoice))] [ModelReaderWriterBuildable(typeof(CachedTokenDetails))] @@ -50,20 +48,15 @@ namespace Azure.AI.VoiceLive [ModelReaderWriterBuildable(typeof(ClientEventSessionAvatarConnect))] [ModelReaderWriterBuildable(typeof(ClientEventSessionUpdate))] [ModelReaderWriterBuildable(typeof(ConversationRequestItem))] - [ModelReaderWriterBuildable(typeof(EmotionCandidate))] [ModelReaderWriterBuildable(typeof(EouDetection))] - [ModelReaderWriterBuildable(typeof(ForceModelsRequest))] [ModelReaderWriterBuildable(typeof(FunctionCallItem))] [ModelReaderWriterBuildable(typeof(FunctionCallOutputItem))] [ModelReaderWriterBuildable(typeof(IceServer))] - [ModelReaderWriterBuildable(typeof(InputAudio))] [ModelReaderWriterBuildable(typeof(InputAudioContentPart))] [ModelReaderWriterBuildable(typeof(InputTextContentPart))] [ModelReaderWriterBuildable(typeof(InputTokenDetails))] - [ModelReaderWriterBuildable(typeof(LlmVoice))] [ModelReaderWriterBuildable(typeof(LogProbProperties))] [ModelReaderWriterBuildable(typeof(MessageItem))] - [ModelReaderWriterBuildable(typeof(NoTurnDetection))] [ModelReaderWriterBuildable(typeof(OpenAIVoice))] [ModelReaderWriterBuildable(typeof(OutputTextContentPart))] [ModelReaderWriterBuildable(typeof(OutputTokenDetails))] @@ -82,8 +75,9 @@ namespace Azure.AI.VoiceLive [ModelReaderWriterBuildable(typeof(ResponseStatusDetails))] [ModelReaderWriterBuildable(typeof(ResponseTextContentPart))] [ModelReaderWriterBuildable(typeof(ResponseTokenStatistics))] - [ModelReaderWriterBuildable(typeof(ServerVad))] + [ModelReaderWriterBuildable(typeof(ServerVadTurnDetection))] [ModelReaderWriterBuildable(typeof(SessionUpdate))] + [ModelReaderWriterBuildable(typeof(SessionUpdateAvatarConnecting))] [ModelReaderWriterBuildable(typeof(SessionUpdateConversationItemCreated))] [ModelReaderWriterBuildable(typeof(SessionUpdateConversationItemDeleted))] [ModelReaderWriterBuildable(typeof(SessionUpdateConversationItemInputAudioTranscriptionCompleted))] @@ -111,19 +105,16 @@ namespace Azure.AI.VoiceLive [ModelReaderWriterBuildable(typeof(SessionUpdateResponseContentPartDone))] [ModelReaderWriterBuildable(typeof(SessionUpdateResponseCreated))] [ModelReaderWriterBuildable(typeof(SessionUpdateResponseDone))] - [ModelReaderWriterBuildable(typeof(SessionUpdateResponseEmotionHypothesis))] [ModelReaderWriterBuildable(typeof(SessionUpdateResponseFunctionCallArgumentsDelta))] [ModelReaderWriterBuildable(typeof(SessionUpdateResponseFunctionCallArgumentsDone))] [ModelReaderWriterBuildable(typeof(SessionUpdateResponseOutputItemAdded))] [ModelReaderWriterBuildable(typeof(SessionUpdateResponseOutputItemDone))] [ModelReaderWriterBuildable(typeof(SessionUpdateResponseTextDelta))] [ModelReaderWriterBuildable(typeof(SessionUpdateResponseTextDone))] - [ModelReaderWriterBuildable(typeof(SessionUpdateSessionAvatarConnecting))] [ModelReaderWriterBuildable(typeof(SessionUpdateSessionCreated))] [ModelReaderWriterBuildable(typeof(SessionUpdateSessionUpdated))] [ModelReaderWriterBuildable(typeof(SystemMessageItem))] [ModelReaderWriterBuildable(typeof(ToolChoiceFunctionObject))] - [ModelReaderWriterBuildable(typeof(ToolChoiceFunctionObjectFunction))] [ModelReaderWriterBuildable(typeof(ToolChoiceObject))] [ModelReaderWriterBuildable(typeof(TurnDetection))] [ModelReaderWriterBuildable(typeof(UnknownAzureVoice))] diff --git a/sdk/ai/Azure.AI.VoiceLive/src/Generated/OutputAudioFormat.cs b/sdk/ai/Azure.AI.VoiceLive/src/Generated/OutputAudioFormat.cs new file mode 100644 index 000000000000..7ccd9392076d --- /dev/null +++ b/sdk/ai/Azure.AI.VoiceLive/src/Generated/OutputAudioFormat.cs @@ -0,0 +1,85 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ComponentModel; + +namespace Azure.AI.VoiceLive +{ + /// + public readonly partial struct OutputAudioFormat : IEquatable + { + private readonly string _value; + /// 16-bit PCM audio format at default sampling rate (24kHz). + private const string Pcm16Value = "pcm16"; + /// 16-bit PCM audio format at 8kHz sampling rate. + private const string Pcm168000hzValue = "pcm16-8000hz"; + /// 16-bit PCM audio format at 16kHz sampling rate. + private const string Pcm1616000hzValue = "pcm16-16000hz"; + /// G.711 μ-law (mu-law) audio format at 8kHz sampling rate. + private const string G711UlawValue = "g711_ulaw"; + /// G.711 A-law audio format at 8kHz sampling rate. + private const string G711AlawValue = "g711_alaw"; + + /// Initializes a new instance of . + /// The value. + /// is null. + public OutputAudioFormat(string value) + { + Argument.AssertNotNull(value, nameof(value)); + + _value = value; + } + + /// 16-bit PCM audio format at default sampling rate (24kHz). + public static OutputAudioFormat Pcm16 { get; } = new OutputAudioFormat(Pcm16Value); + + /// 16-bit PCM audio format at 8kHz sampling rate. + public static OutputAudioFormat Pcm168000hz { get; } = new OutputAudioFormat(Pcm168000hzValue); + + /// 16-bit PCM audio format at 16kHz sampling rate. + public static OutputAudioFormat Pcm1616000hz { get; } = new OutputAudioFormat(Pcm1616000hzValue); + + /// G.711 μ-law (mu-law) audio format at 8kHz sampling rate. + public static OutputAudioFormat G711Ulaw { get; } = new OutputAudioFormat(G711UlawValue); + + /// G.711 A-law audio format at 8kHz sampling rate. + public static OutputAudioFormat G711Alaw { get; } = new OutputAudioFormat(G711AlawValue); + + /// Determines if two values are the same. + /// The left value to compare. + /// The right value to compare. + public static bool operator ==(OutputAudioFormat left, OutputAudioFormat right) => left.Equals(right); + + /// Determines if two values are not the same. + /// The left value to compare. + /// The right value to compare. + public static bool operator !=(OutputAudioFormat left, OutputAudioFormat right) => !left.Equals(right); + + /// Converts a string to a . + /// The value. + public static implicit operator OutputAudioFormat(string value) => new OutputAudioFormat(value); + + /// Converts a string to a . + /// The value. + public static implicit operator OutputAudioFormat?(string value) => value == null ? null : new OutputAudioFormat(value); + + /// + [EditorBrowsable(EditorBrowsableState.Never)] + public override bool Equals(object obj) => obj is OutputAudioFormat other && Equals(other); + + /// + public bool Equals(OutputAudioFormat other) => string.Equals(_value, other._value, StringComparison.InvariantCultureIgnoreCase); + + /// + [EditorBrowsable(EditorBrowsableState.Never)] + public override int GetHashCode() => _value != null ? StringComparer.InvariantCultureIgnoreCase.GetHashCode(_value) : 0; + + /// + public override string ToString() => _value; + } +} diff --git a/sdk/ai/Azure.AI.VoiceLive/src/Generated/OutputTextContentPart.cs b/sdk/ai/Azure.AI.VoiceLive/src/Generated/OutputTextContentPart.cs index e08ee05b7a58..deadae43c422 100644 --- a/sdk/ai/Azure.AI.VoiceLive/src/Generated/OutputTextContentPart.cs +++ b/sdk/ai/Azure.AI.VoiceLive/src/Generated/OutputTextContentPart.cs @@ -40,7 +40,7 @@ internal OutputTextContentPart(string @type, string text, IDictionary Gets the Type. public string Type { get; } = "text"; - /// Gets the Text. - public string Text { get; } + /// Gets or sets the Text. + public string Text { get; set; } } } diff --git a/sdk/ai/Azure.AI.VoiceLive/src/Generated/PersonalVoiceModels.cs b/sdk/ai/Azure.AI.VoiceLive/src/Generated/PersonalVoiceModels.cs new file mode 100644 index 000000000000..1817b24bf1d6 --- /dev/null +++ b/sdk/ai/Azure.AI.VoiceLive/src/Generated/PersonalVoiceModels.cs @@ -0,0 +1,72 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ComponentModel; + +namespace Azure.AI.VoiceLive +{ + /// PersonalVoice models. + public readonly partial struct PersonalVoiceModels : IEquatable + { + private readonly string _value; + private const string DragonLatestNeuralValue = "DragonLatestNeural"; + private const string PhoenixLatestNeuralValue = "PhoenixLatestNeural"; + private const string PhoenixV2NeuralValue = "PhoenixV2Neural"; + + /// Initializes a new instance of . + /// The value. + /// is null. + public PersonalVoiceModels(string value) + { + Argument.AssertNotNull(value, nameof(value)); + + _value = value; + } + + /// Gets the DragonLatestNeural. + public static PersonalVoiceModels DragonLatestNeural { get; } = new PersonalVoiceModels(DragonLatestNeuralValue); + + /// Gets the PhoenixLatestNeural. + public static PersonalVoiceModels PhoenixLatestNeural { get; } = new PersonalVoiceModels(PhoenixLatestNeuralValue); + + /// Gets the PhoenixV2Neural. + public static PersonalVoiceModels PhoenixV2Neural { get; } = new PersonalVoiceModels(PhoenixV2NeuralValue); + + /// Determines if two values are the same. + /// The left value to compare. + /// The right value to compare. + public static bool operator ==(PersonalVoiceModels left, PersonalVoiceModels right) => left.Equals(right); + + /// Determines if two values are not the same. + /// The left value to compare. + /// The right value to compare. + public static bool operator !=(PersonalVoiceModels left, PersonalVoiceModels right) => !left.Equals(right); + + /// Converts a string to a . + /// The value. + public static implicit operator PersonalVoiceModels(string value) => new PersonalVoiceModels(value); + + /// Converts a string to a . + /// The value. + public static implicit operator PersonalVoiceModels?(string value) => value == null ? null : new PersonalVoiceModels(value); + + /// + [EditorBrowsable(EditorBrowsableState.Never)] + public override bool Equals(object obj) => obj is PersonalVoiceModels other && Equals(other); + + /// + public bool Equals(PersonalVoiceModels other) => string.Equals(_value, other._value, StringComparison.InvariantCultureIgnoreCase); + + /// + [EditorBrowsable(EditorBrowsableState.Never)] + public override int GetHashCode() => _value != null ? StringComparer.InvariantCultureIgnoreCase.GetHashCode(_value) : 0; + + /// + public override string ToString() => _value; + } +} diff --git a/sdk/ai/Azure.AI.VoiceLive/src/Generated/RequestAudioContentPart.cs b/sdk/ai/Azure.AI.VoiceLive/src/Generated/RequestAudioContentPart.cs index f019acc37cfd..9ba48229a2b5 100644 --- a/sdk/ai/Azure.AI.VoiceLive/src/Generated/RequestAudioContentPart.cs +++ b/sdk/ai/Azure.AI.VoiceLive/src/Generated/RequestAudioContentPart.cs @@ -14,7 +14,7 @@ namespace Azure.AI.VoiceLive public partial class RequestAudioContentPart : VoiceLiveContentPart { /// Initializes a new instance of . - internal RequestAudioContentPart() : base(ContentPartType.InputAudio) + public RequestAudioContentPart() : base(ContentPartType.InputAudio) { } @@ -27,7 +27,7 @@ internal RequestAudioContentPart(ContentPartType @type, IDictionary Gets the Transcript. - public string Transcript { get; } + /// Gets or sets the Transcript. + public string Transcript { get; set; } } } diff --git a/sdk/ai/Azure.AI.VoiceLive/src/Generated/RequestTextContentPart.cs b/sdk/ai/Azure.AI.VoiceLive/src/Generated/RequestTextContentPart.cs index 0578109be5e8..67bfa0348280 100644 --- a/sdk/ai/Azure.AI.VoiceLive/src/Generated/RequestTextContentPart.cs +++ b/sdk/ai/Azure.AI.VoiceLive/src/Generated/RequestTextContentPart.cs @@ -14,7 +14,7 @@ namespace Azure.AI.VoiceLive public partial class RequestTextContentPart : VoiceLiveContentPart { /// Initializes a new instance of . - internal RequestTextContentPart() : base(ContentPartType.InputText) + public RequestTextContentPart() : base(ContentPartType.InputText) { } @@ -27,7 +27,7 @@ internal RequestTextContentPart(ContentPartType @type, IDictionary Gets the Text. - public string Text { get; } + /// Gets or sets the Text. + public string Text { get; set; } } } diff --git a/sdk/ai/Azure.AI.VoiceLive/src/Generated/ResponseCancelledDetails.Serialization.cs b/sdk/ai/Azure.AI.VoiceLive/src/Generated/ResponseCancelledDetails.Serialization.cs index 7a84a93e7d48..05f802dc2f1b 100644 --- a/sdk/ai/Azure.AI.VoiceLive/src/Generated/ResponseCancelledDetails.Serialization.cs +++ b/sdk/ai/Azure.AI.VoiceLive/src/Generated/ResponseCancelledDetails.Serialization.cs @@ -68,14 +68,14 @@ internal static ResponseCancelledDetails DeserializeResponseCancelledDetails(Jso { return null; } - string @type = "cancelled"; + VoiceLiveResponseStatus @type = default; IDictionary additionalBinaryDataProperties = new ChangeTrackingDictionary(); ResponseCancelledDetailsReason reason = default; foreach (var prop in element.EnumerateObject()) { if (prop.NameEquals("type"u8)) { - @type = prop.Value.GetString(); + @type = new VoiceLiveResponseStatus(prop.Value.GetString()); continue; } if (prop.NameEquals("reason"u8)) diff --git a/sdk/ai/Azure.AI.VoiceLive/src/Generated/ResponseCancelledDetails.cs b/sdk/ai/Azure.AI.VoiceLive/src/Generated/ResponseCancelledDetails.cs index 58d183a6b979..4744e6cf152f 100644 --- a/sdk/ai/Azure.AI.VoiceLive/src/Generated/ResponseCancelledDetails.cs +++ b/sdk/ai/Azure.AI.VoiceLive/src/Generated/ResponseCancelledDetails.cs @@ -15,7 +15,7 @@ public partial class ResponseCancelledDetails : ResponseStatusDetails { /// Initializes a new instance of . /// - internal ResponseCancelledDetails(ResponseCancelledDetailsReason reason) : base("cancelled") + internal ResponseCancelledDetails(ResponseCancelledDetailsReason reason) : base(VoiceLiveResponseStatus.Cancelled) { Reason = reason; } @@ -24,7 +24,7 @@ internal ResponseCancelledDetails(ResponseCancelledDetailsReason reason) : base( /// /// Keeps track of any properties unknown to the library. /// - internal ResponseCancelledDetails(string @type, IDictionary additionalBinaryDataProperties, ResponseCancelledDetailsReason reason) : base(@type, additionalBinaryDataProperties) + internal ResponseCancelledDetails(VoiceLiveResponseStatus @type, IDictionary additionalBinaryDataProperties, ResponseCancelledDetailsReason reason) : base(@type, additionalBinaryDataProperties) { Reason = reason; } diff --git a/sdk/ai/Azure.AI.VoiceLive/src/Generated/ResponseCreateParams.Serialization.cs b/sdk/ai/Azure.AI.VoiceLive/src/Generated/ResponseCreateParams.Serialization.cs index b773111b4c22..486f449982d7 100644 --- a/sdk/ai/Azure.AI.VoiceLive/src/Generated/ResponseCreateParams.Serialization.cs +++ b/sdk/ai/Azure.AI.VoiceLive/src/Generated/ResponseCreateParams.Serialization.cs @@ -176,7 +176,7 @@ internal static ResponseCreateParams DeserializeResponseCreateParams(JsonElement IList modalities = default; string instructions = default; BinaryData voice = default; - AudioFormat? outputAudioFormat = default; + OutputAudioFormat? outputAudioFormat = default; IList tools = default; string toolChoice = default; float? temperature = default; @@ -264,7 +264,7 @@ internal static ResponseCreateParams DeserializeResponseCreateParams(JsonElement { continue; } - outputAudioFormat = new AudioFormat(prop.Value.GetString()); + outputAudioFormat = new OutputAudioFormat(prop.Value.GetString()); continue; } if (prop.NameEquals("tools"u8)) diff --git a/sdk/ai/Azure.AI.VoiceLive/src/Generated/ResponseCreateParams.cs b/sdk/ai/Azure.AI.VoiceLive/src/Generated/ResponseCreateParams.cs index 274b82ff8060..3252910a327c 100644 --- a/sdk/ai/Azure.AI.VoiceLive/src/Generated/ResponseCreateParams.cs +++ b/sdk/ai/Azure.AI.VoiceLive/src/Generated/ResponseCreateParams.cs @@ -67,7 +67,7 @@ public ResponseCreateParams() /// given model. Defaults to `inf`. /// /// Keeps track of any properties unknown to the library. - internal ResponseCreateParams(bool? commit, bool? cancelPrevious, IList appendInputItems, IList inputItems, IList modalities, string instructions, BinaryData voice, AudioFormat? outputAudioFormat, IList tools, string toolChoice, float? temperature, BinaryData maxOutputTokens, IDictionary additionalBinaryDataProperties) + internal ResponseCreateParams(bool? commit, bool? cancelPrevious, IList appendInputItems, IList inputItems, IList modalities, string instructions, BinaryData voice, OutputAudioFormat? outputAudioFormat, IList tools, string toolChoice, float? temperature, BinaryData maxOutputTokens, IDictionary additionalBinaryDataProperties) { Commit = commit; CancelPrevious = cancelPrevious; @@ -138,12 +138,6 @@ internal ResponseCreateParams(bool? commit, bool? cancelPrevious, IList /// . /// - /// - /// . - /// - /// - /// . - /// /// /// /// @@ -172,7 +166,7 @@ internal ResponseCreateParams(bool? commit, bool? cancelPrevious, IList The format of output audio. Options are `pcm16`, `g711_ulaw`, or `g711_alaw`. - public AudioFormat? OutputAudioFormat { get; set; } + public OutputAudioFormat? OutputAudioFormat { get; set; } /// Tools (functions) available to the model. public IList Tools { get; } diff --git a/sdk/ai/Azure.AI.VoiceLive/src/Generated/ResponseFailedDetails.Serialization.cs b/sdk/ai/Azure.AI.VoiceLive/src/Generated/ResponseFailedDetails.Serialization.cs index deb0b73ec9a5..9ea047e4888d 100644 --- a/sdk/ai/Azure.AI.VoiceLive/src/Generated/ResponseFailedDetails.Serialization.cs +++ b/sdk/ai/Azure.AI.VoiceLive/src/Generated/ResponseFailedDetails.Serialization.cs @@ -75,14 +75,14 @@ internal static ResponseFailedDetails DeserializeResponseFailedDetails(JsonEleme { return null; } - string @type = "failed"; + VoiceLiveResponseStatus @type = default; IDictionary additionalBinaryDataProperties = new ChangeTrackingDictionary(); BinaryData error = default; foreach (var prop in element.EnumerateObject()) { if (prop.NameEquals("type"u8)) { - @type = prop.Value.GetString(); + @type = new VoiceLiveResponseStatus(prop.Value.GetString()); continue; } if (prop.NameEquals("error"u8)) diff --git a/sdk/ai/Azure.AI.VoiceLive/src/Generated/ResponseFailedDetails.cs b/sdk/ai/Azure.AI.VoiceLive/src/Generated/ResponseFailedDetails.cs index 9975b28e24fd..30821154c9c6 100644 --- a/sdk/ai/Azure.AI.VoiceLive/src/Generated/ResponseFailedDetails.cs +++ b/sdk/ai/Azure.AI.VoiceLive/src/Generated/ResponseFailedDetails.cs @@ -16,7 +16,7 @@ public partial class ResponseFailedDetails : ResponseStatusDetails { /// Initializes a new instance of . /// - internal ResponseFailedDetails(BinaryData error) : base("failed") + internal ResponseFailedDetails(BinaryData error) : base(VoiceLiveResponseStatus.Failed) { Error = error; } @@ -25,7 +25,7 @@ internal ResponseFailedDetails(BinaryData error) : base("failed") /// /// Keeps track of any properties unknown to the library. /// - internal ResponseFailedDetails(string @type, IDictionary additionalBinaryDataProperties, BinaryData error) : base(@type, additionalBinaryDataProperties) + internal ResponseFailedDetails(VoiceLiveResponseStatus @type, IDictionary additionalBinaryDataProperties, BinaryData error) : base(@type, additionalBinaryDataProperties) { Error = error; } diff --git a/sdk/ai/Azure.AI.VoiceLive/src/Generated/ResponseIncompleteDetails.Serialization.cs b/sdk/ai/Azure.AI.VoiceLive/src/Generated/ResponseIncompleteDetails.Serialization.cs index fada1a3f8f2e..0b4d573299bd 100644 --- a/sdk/ai/Azure.AI.VoiceLive/src/Generated/ResponseIncompleteDetails.Serialization.cs +++ b/sdk/ai/Azure.AI.VoiceLive/src/Generated/ResponseIncompleteDetails.Serialization.cs @@ -68,14 +68,14 @@ internal static ResponseIncompleteDetails DeserializeResponseIncompleteDetails(J { return null; } - string @type = "incomplete"; + VoiceLiveResponseStatus @type = default; IDictionary additionalBinaryDataProperties = new ChangeTrackingDictionary(); ResponseIncompleteDetailsReason reason = default; foreach (var prop in element.EnumerateObject()) { if (prop.NameEquals("type"u8)) { - @type = prop.Value.GetString(); + @type = new VoiceLiveResponseStatus(prop.Value.GetString()); continue; } if (prop.NameEquals("reason"u8)) diff --git a/sdk/ai/Azure.AI.VoiceLive/src/Generated/ResponseIncompleteDetails.cs b/sdk/ai/Azure.AI.VoiceLive/src/Generated/ResponseIncompleteDetails.cs index 31bd7bc929e9..3bf06403a65b 100644 --- a/sdk/ai/Azure.AI.VoiceLive/src/Generated/ResponseIncompleteDetails.cs +++ b/sdk/ai/Azure.AI.VoiceLive/src/Generated/ResponseIncompleteDetails.cs @@ -15,7 +15,7 @@ public partial class ResponseIncompleteDetails : ResponseStatusDetails { /// Initializes a new instance of . /// - internal ResponseIncompleteDetails(ResponseIncompleteDetailsReason reason) : base("incomplete") + internal ResponseIncompleteDetails(ResponseIncompleteDetailsReason reason) : base(VoiceLiveResponseStatus.Incomplete) { Reason = reason; } @@ -24,7 +24,7 @@ internal ResponseIncompleteDetails(ResponseIncompleteDetailsReason reason) : bas /// /// Keeps track of any properties unknown to the library. /// - internal ResponseIncompleteDetails(string @type, IDictionary additionalBinaryDataProperties, ResponseIncompleteDetailsReason reason) : base(@type, additionalBinaryDataProperties) + internal ResponseIncompleteDetails(VoiceLiveResponseStatus @type, IDictionary additionalBinaryDataProperties, ResponseIncompleteDetailsReason reason) : base(@type, additionalBinaryDataProperties) { Reason = reason; } diff --git a/sdk/ai/Azure.AI.VoiceLive/src/Generated/ResponseStatusDetails.Serialization.cs b/sdk/ai/Azure.AI.VoiceLive/src/Generated/ResponseStatusDetails.Serialization.cs index 38325792b07d..ca290d97726a 100644 --- a/sdk/ai/Azure.AI.VoiceLive/src/Generated/ResponseStatusDetails.Serialization.cs +++ b/sdk/ai/Azure.AI.VoiceLive/src/Generated/ResponseStatusDetails.Serialization.cs @@ -42,7 +42,7 @@ protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWrit throw new FormatException($"The model {nameof(ResponseStatusDetails)} does not support writing '{format}' format."); } writer.WritePropertyName("type"u8); - writer.WriteStringValue(Type); + writer.WriteStringValue(Type.ToString()); if (options.Format != "W" && _additionalBinaryDataProperties != null) { foreach (var item in _additionalBinaryDataProperties) diff --git a/sdk/ai/Azure.AI.VoiceLive/src/Generated/ResponseStatusDetails.cs b/sdk/ai/Azure.AI.VoiceLive/src/Generated/ResponseStatusDetails.cs index 26139bf44187..0a3bdc19061d 100644 --- a/sdk/ai/Azure.AI.VoiceLive/src/Generated/ResponseStatusDetails.cs +++ b/sdk/ai/Azure.AI.VoiceLive/src/Generated/ResponseStatusDetails.cs @@ -21,7 +21,7 @@ public abstract partial class ResponseStatusDetails /// Initializes a new instance of . /// - private protected ResponseStatusDetails(string @type) + private protected ResponseStatusDetails(VoiceLiveResponseStatus @type) { Type = @type; } @@ -29,13 +29,13 @@ private protected ResponseStatusDetails(string @type) /// Initializes a new instance of . /// /// Keeps track of any properties unknown to the library. - internal ResponseStatusDetails(string @type, IDictionary additionalBinaryDataProperties) + internal ResponseStatusDetails(VoiceLiveResponseStatus @type, IDictionary additionalBinaryDataProperties) { Type = @type; _additionalBinaryDataProperties = additionalBinaryDataProperties; } /// Gets or sets the Type. - internal string Type { get; set; } + internal VoiceLiveResponseStatus Type { get; set; } } } diff --git a/sdk/ai/Azure.AI.VoiceLive/src/Generated/ServerEventType.cs b/sdk/ai/Azure.AI.VoiceLive/src/Generated/ServerEventType.cs index d86547fd3317..ec5f72690288 100644 --- a/sdk/ai/Azure.AI.VoiceLive/src/Generated/ServerEventType.cs +++ b/sdk/ai/Azure.AI.VoiceLive/src/Generated/ServerEventType.cs @@ -43,7 +43,6 @@ namespace Azure.AI.VoiceLive private const string ResponseAudioDoneValue = "response.audio.done"; private const string ResponseAnimationBlendshapesDeltaValue = "response.animation_blendshapes.delta"; private const string ResponseAnimationBlendshapesDoneValue = "response.animation_blendshapes.done"; - private const string ResponseEmotionHypothesisValue = "response.emotion_hypothesis"; private const string ResponseAudioTimestampDeltaValue = "response.audio_timestamp.delta"; private const string ResponseAudioTimestampDoneValue = "response.audio_timestamp.done"; private const string ResponseAnimationVisemeDeltaValue = "response.animation_viseme.delta"; @@ -148,9 +147,6 @@ public ServerEventType(string value) /// Gets the ResponseAnimationBlendshapesDone. public static ServerEventType ResponseAnimationBlendshapesDone { get; } = new ServerEventType(ResponseAnimationBlendshapesDoneValue); - /// Gets the ResponseEmotionHypothesis. - public static ServerEventType ResponseEmotionHypothesis { get; } = new ServerEventType(ResponseEmotionHypothesisValue); - /// Gets the ResponseAudioTimestampDelta. public static ServerEventType ResponseAudioTimestampDelta { get; } = new ServerEventType(ResponseAudioTimestampDeltaValue); diff --git a/sdk/ai/Azure.AI.VoiceLive/src/Generated/ServerVad.Serialization.cs b/sdk/ai/Azure.AI.VoiceLive/src/Generated/ServerVadTurnDetection.Serialization.cs similarity index 79% rename from sdk/ai/Azure.AI.VoiceLive/src/Generated/ServerVad.Serialization.cs rename to sdk/ai/Azure.AI.VoiceLive/src/Generated/ServerVadTurnDetection.Serialization.cs index 4508bacdb667..0ef28e1973fb 100644 --- a/sdk/ai/Azure.AI.VoiceLive/src/Generated/ServerVad.Serialization.cs +++ b/sdk/ai/Azure.AI.VoiceLive/src/Generated/ServerVadTurnDetection.Serialization.cs @@ -13,11 +13,11 @@ namespace Azure.AI.VoiceLive { /// Base model for VAD-based turn detection. - public partial class ServerVad : IJsonModel + public partial class ServerVadTurnDetection : IJsonModel { /// The JSON writer. /// The client options for reading and writing models. - void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) { writer.WriteStartObject(); JsonModelWriteCore(writer, options); @@ -28,10 +28,10 @@ void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions /// The client options for reading and writing models. protected override void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) { - string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; if (format != "J") { - throw new FormatException($"The model {nameof(ServerVad)} does not support writing '{format}' format."); + throw new FormatException($"The model {nameof(ServerVadTurnDetection)} does not support writing '{format}' format."); } base.JsonModelWriteCore(writer, options); if (Optional.IsDefined(Threshold)) @@ -63,24 +63,24 @@ protected override void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWri /// The JSON reader. /// The client options for reading and writing models. - ServerVad IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) => (ServerVad)JsonModelCreateCore(ref reader, options); + ServerVadTurnDetection IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) => (ServerVadTurnDetection)JsonModelCreateCore(ref reader, options); /// The JSON reader. /// The client options for reading and writing models. protected override TurnDetection JsonModelCreateCore(ref Utf8JsonReader reader, ModelReaderWriterOptions options) { - string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; if (format != "J") { - throw new FormatException($"The model {nameof(ServerVad)} does not support reading '{format}' format."); + throw new FormatException($"The model {nameof(ServerVadTurnDetection)} does not support reading '{format}' format."); } using JsonDocument document = JsonDocument.ParseValue(ref reader); - return DeserializeServerVad(document.RootElement, options); + return DeserializeServerVadTurnDetection(document.RootElement, options); } /// The JSON element to deserialize. /// The client options for reading and writing models. - internal static ServerVad DeserializeServerVad(JsonElement element, ModelReaderWriterOptions options) + internal static ServerVadTurnDetection DeserializeServerVadTurnDetection(JsonElement element, ModelReaderWriterOptions options) { if (element.ValueKind == JsonValueKind.Null) { @@ -97,7 +97,7 @@ internal static ServerVad DeserializeServerVad(JsonElement element, ModelReaderW { if (prop.NameEquals("type"u8)) { - @type = prop.Value.GetString().ToTurnDetectionType(); + @type = new TurnDetectionType(prop.Value.GetString()); continue; } if (prop.NameEquals("threshold"u8)) @@ -150,7 +150,7 @@ internal static ServerVad DeserializeServerVad(JsonElement element, ModelReaderW additionalBinaryDataProperties.Add(prop.Name, BinaryData.FromString(prop.Value.GetRawText())); } } - return new ServerVad( + return new ServerVadTurnDetection( @type, additionalBinaryDataProperties, threshold, @@ -161,43 +161,43 @@ internal static ServerVad DeserializeServerVad(JsonElement element, ModelReaderW } /// The client options for reading and writing models. - BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) => PersistableModelWriteCore(options); + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) => PersistableModelWriteCore(options); /// The client options for reading and writing models. protected override BinaryData PersistableModelWriteCore(ModelReaderWriterOptions options) { - string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; switch (format) { case "J": return ModelReaderWriter.Write(this, options, AzureAIVoiceLiveContext.Default); default: - throw new FormatException($"The model {nameof(ServerVad)} does not support writing '{options.Format}' format."); + throw new FormatException($"The model {nameof(ServerVadTurnDetection)} does not support writing '{options.Format}' format."); } } /// The data to parse. /// The client options for reading and writing models. - ServerVad IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) => (ServerVad)PersistableModelCreateCore(data, options); + ServerVadTurnDetection IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) => (ServerVadTurnDetection)PersistableModelCreateCore(data, options); /// The data to parse. /// The client options for reading and writing models. protected override TurnDetection PersistableModelCreateCore(BinaryData data, ModelReaderWriterOptions options) { - string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; switch (format) { case "J": using (JsonDocument document = JsonDocument.Parse(data)) { - return DeserializeServerVad(document.RootElement, options); + return DeserializeServerVadTurnDetection(document.RootElement, options); } default: - throw new FormatException($"The model {nameof(ServerVad)} does not support reading '{options.Format}' format."); + throw new FormatException($"The model {nameof(ServerVadTurnDetection)} does not support reading '{options.Format}' format."); } } /// The client options for reading and writing models. - string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; } } diff --git a/sdk/ai/Azure.AI.VoiceLive/src/Generated/ServerVad.cs b/sdk/ai/Azure.AI.VoiceLive/src/Generated/ServerVadTurnDetection.cs similarity index 63% rename from sdk/ai/Azure.AI.VoiceLive/src/Generated/ServerVad.cs rename to sdk/ai/Azure.AI.VoiceLive/src/Generated/ServerVadTurnDetection.cs index cd27d964c652..955d6cd51d45 100644 --- a/sdk/ai/Azure.AI.VoiceLive/src/Generated/ServerVad.cs +++ b/sdk/ai/Azure.AI.VoiceLive/src/Generated/ServerVadTurnDetection.cs @@ -11,22 +11,22 @@ namespace Azure.AI.VoiceLive { /// Base model for VAD-based turn detection. - public partial class ServerVad : TurnDetection + public partial class ServerVadTurnDetection : TurnDetection { - /// Initializes a new instance of . - public ServerVad() : base(TurnDetectionType.ServerVad) + /// Initializes a new instance of . + public ServerVadTurnDetection() : base(TurnDetectionType.ServerVad) { } - /// Initializes a new instance of . + /// Initializes a new instance of . /// /// Keeps track of any properties unknown to the library. /// - /// - /// + /// Gets or sets the PrefixPaddingMs. + /// Gets or sets the SilenceDurationMs. /// /// - internal ServerVad(TurnDetectionType @type, IDictionary additionalBinaryDataProperties, float? threshold, int? prefixPaddingMs, int? silenceDurationMs, EouDetection endOfUtteranceDetection, bool? autoTruncate) : base(@type, additionalBinaryDataProperties) + internal ServerVadTurnDetection(TurnDetectionType @type, IDictionary additionalBinaryDataProperties, float? threshold, int? prefixPaddingMs, int? silenceDurationMs, EouDetection endOfUtteranceDetection, bool? autoTruncate) : base(@type, additionalBinaryDataProperties) { Threshold = threshold; PrefixPaddingMs = prefixPaddingMs; @@ -38,12 +38,6 @@ internal ServerVad(TurnDetectionType @type, IDictionary addi /// Gets or sets the Threshold. public float? Threshold { get; set; } - /// Gets or sets the PrefixPaddingMs. - public int? PrefixPaddingMs { get; set; } - - /// Gets or sets the SilenceDurationMs. - public int? SilenceDurationMs { get; set; } - /// Gets or sets the EndOfUtteranceDetection. public EouDetection EndOfUtteranceDetection { get; set; } diff --git a/sdk/ai/Azure.AI.VoiceLive/src/Generated/SessionUpdate.Serialization.cs b/sdk/ai/Azure.AI.VoiceLive/src/Generated/SessionUpdate.Serialization.cs index 165aa52b6423..0cde2c3bd210 100644 --- a/sdk/ai/Azure.AI.VoiceLive/src/Generated/SessionUpdate.Serialization.cs +++ b/sdk/ai/Azure.AI.VoiceLive/src/Generated/SessionUpdate.Serialization.cs @@ -13,7 +13,7 @@ namespace Azure.AI.VoiceLive { /// /// A voicelive server event. - /// Please note this is the abstract base class. The derived classes available for instantiation are: , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , and . + /// Please note this is the abstract base class. The derived classes available for instantiation are: , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , and . /// [PersistableModelProxy(typeof(UnknownSessionUpdate))] public abstract partial class SessionUpdate : IJsonModel @@ -95,7 +95,7 @@ internal static SessionUpdate DeserializeSessionUpdate(JsonElement element, Mode switch (discriminator.GetString()) { case "session.avatar.connecting": - return SessionUpdateSessionAvatarConnecting.DeserializeSessionUpdateSessionAvatarConnecting(element, options); + return SessionUpdateAvatarConnecting.DeserializeSessionUpdateAvatarConnecting(element, options); case "session.created": return SessionUpdateSessionCreated.DeserializeSessionUpdateSessionCreated(element, options); case "session.updated": @@ -156,8 +156,6 @@ internal static SessionUpdate DeserializeSessionUpdate(JsonElement element, Mode return SessionUpdateResponseAnimationBlendshapeDelta.DeserializeSessionUpdateResponseAnimationBlendshapeDelta(element, options); case "response.animation_blendshapes.done": return SessionUpdateResponseAnimationBlendshapeDone.DeserializeSessionUpdateResponseAnimationBlendshapeDone(element, options); - case "response.emotion_hypothesis": - return SessionUpdateResponseEmotionHypothesis.DeserializeSessionUpdateResponseEmotionHypothesis(element, options); case "response.audio_timestamp.delta": return SessionUpdateResponseAudioTimestampDelta.DeserializeSessionUpdateResponseAudioTimestampDelta(element, options); case "response.audio_timestamp.done": diff --git a/sdk/ai/Azure.AI.VoiceLive/src/Generated/SessionUpdate.cs b/sdk/ai/Azure.AI.VoiceLive/src/Generated/SessionUpdate.cs index e934c9edeffc..6d016e655529 100644 --- a/sdk/ai/Azure.AI.VoiceLive/src/Generated/SessionUpdate.cs +++ b/sdk/ai/Azure.AI.VoiceLive/src/Generated/SessionUpdate.cs @@ -12,7 +12,7 @@ namespace Azure.AI.VoiceLive { /// /// A voicelive server event. - /// Please note this is the abstract base class. The derived classes available for instantiation are: , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , and . + /// Please note this is the abstract base class. The derived classes available for instantiation are: , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , and . /// public abstract partial class SessionUpdate { diff --git a/sdk/ai/Azure.AI.VoiceLive/src/Generated/SessionUpdateSessionAvatarConnecting.Serialization.cs b/sdk/ai/Azure.AI.VoiceLive/src/Generated/SessionUpdateAvatarConnecting.Serialization.cs similarity index 67% rename from sdk/ai/Azure.AI.VoiceLive/src/Generated/SessionUpdateSessionAvatarConnecting.Serialization.cs rename to sdk/ai/Azure.AI.VoiceLive/src/Generated/SessionUpdateAvatarConnecting.Serialization.cs index 619d2e934041..914c0b587e83 100644 --- a/sdk/ai/Azure.AI.VoiceLive/src/Generated/SessionUpdateSessionAvatarConnecting.Serialization.cs +++ b/sdk/ai/Azure.AI.VoiceLive/src/Generated/SessionUpdateAvatarConnecting.Serialization.cs @@ -13,16 +13,16 @@ namespace Azure.AI.VoiceLive { /// Sent when the server is in the process of establishing an avatar media connection and provides its SDP answer. - public partial class SessionUpdateSessionAvatarConnecting : IJsonModel + public partial class SessionUpdateAvatarConnecting : IJsonModel { - /// Initializes a new instance of for deserialization. - internal SessionUpdateSessionAvatarConnecting() + /// Initializes a new instance of for deserialization. + internal SessionUpdateAvatarConnecting() { } /// The JSON writer. /// The client options for reading and writing models. - void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) { writer.WriteStartObject(); JsonModelWriteCore(writer, options); @@ -33,10 +33,10 @@ void IJsonModel.Write(Utf8JsonWriter write /// The client options for reading and writing models. protected override void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) { - string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; if (format != "J") { - throw new FormatException($"The model {nameof(SessionUpdateSessionAvatarConnecting)} does not support writing '{format}' format."); + throw new FormatException($"The model {nameof(SessionUpdateAvatarConnecting)} does not support writing '{format}' format."); } base.JsonModelWriteCore(writer, options); writer.WritePropertyName("server_sdp"u8); @@ -45,24 +45,24 @@ protected override void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWri /// The JSON reader. /// The client options for reading and writing models. - SessionUpdateSessionAvatarConnecting IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) => (SessionUpdateSessionAvatarConnecting)JsonModelCreateCore(ref reader, options); + SessionUpdateAvatarConnecting IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) => (SessionUpdateAvatarConnecting)JsonModelCreateCore(ref reader, options); /// The JSON reader. /// The client options for reading and writing models. protected override SessionUpdate JsonModelCreateCore(ref Utf8JsonReader reader, ModelReaderWriterOptions options) { - string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; if (format != "J") { - throw new FormatException($"The model {nameof(SessionUpdateSessionAvatarConnecting)} does not support reading '{format}' format."); + throw new FormatException($"The model {nameof(SessionUpdateAvatarConnecting)} does not support reading '{format}' format."); } using JsonDocument document = JsonDocument.ParseValue(ref reader); - return DeserializeSessionUpdateSessionAvatarConnecting(document.RootElement, options); + return DeserializeSessionUpdateAvatarConnecting(document.RootElement, options); } /// The JSON element to deserialize. /// The client options for reading and writing models. - internal static SessionUpdateSessionAvatarConnecting DeserializeSessionUpdateSessionAvatarConnecting(JsonElement element, ModelReaderWriterOptions options) + internal static SessionUpdateAvatarConnecting DeserializeSessionUpdateAvatarConnecting(JsonElement element, ModelReaderWriterOptions options) { if (element.ValueKind == JsonValueKind.Null) { @@ -94,47 +94,47 @@ internal static SessionUpdateSessionAvatarConnecting DeserializeSessionUpdateSes additionalBinaryDataProperties.Add(prop.Name, BinaryData.FromString(prop.Value.GetRawText())); } } - return new SessionUpdateSessionAvatarConnecting(@type, eventId, additionalBinaryDataProperties, serverSdp); + return new SessionUpdateAvatarConnecting(@type, eventId, additionalBinaryDataProperties, serverSdp); } /// The client options for reading and writing models. - BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) => PersistableModelWriteCore(options); + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) => PersistableModelWriteCore(options); /// The client options for reading and writing models. protected override BinaryData PersistableModelWriteCore(ModelReaderWriterOptions options) { - string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; switch (format) { case "J": return ModelReaderWriter.Write(this, options, AzureAIVoiceLiveContext.Default); default: - throw new FormatException($"The model {nameof(SessionUpdateSessionAvatarConnecting)} does not support writing '{options.Format}' format."); + throw new FormatException($"The model {nameof(SessionUpdateAvatarConnecting)} does not support writing '{options.Format}' format."); } } /// The data to parse. /// The client options for reading and writing models. - SessionUpdateSessionAvatarConnecting IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) => (SessionUpdateSessionAvatarConnecting)PersistableModelCreateCore(data, options); + SessionUpdateAvatarConnecting IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) => (SessionUpdateAvatarConnecting)PersistableModelCreateCore(data, options); /// The data to parse. /// The client options for reading and writing models. protected override SessionUpdate PersistableModelCreateCore(BinaryData data, ModelReaderWriterOptions options) { - string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; switch (format) { case "J": using (JsonDocument document = JsonDocument.Parse(data)) { - return DeserializeSessionUpdateSessionAvatarConnecting(document.RootElement, options); + return DeserializeSessionUpdateAvatarConnecting(document.RootElement, options); } default: - throw new FormatException($"The model {nameof(SessionUpdateSessionAvatarConnecting)} does not support reading '{options.Format}' format."); + throw new FormatException($"The model {nameof(SessionUpdateAvatarConnecting)} does not support reading '{options.Format}' format."); } } /// The client options for reading and writing models. - string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; } } diff --git a/sdk/ai/Azure.AI.VoiceLive/src/Generated/SessionUpdateSessionAvatarConnecting.cs b/sdk/ai/Azure.AI.VoiceLive/src/Generated/SessionUpdateAvatarConnecting.cs similarity index 68% rename from sdk/ai/Azure.AI.VoiceLive/src/Generated/SessionUpdateSessionAvatarConnecting.cs rename to sdk/ai/Azure.AI.VoiceLive/src/Generated/SessionUpdateAvatarConnecting.cs index 048d7d5694f0..14eb77d9a933 100644 --- a/sdk/ai/Azure.AI.VoiceLive/src/Generated/SessionUpdateSessionAvatarConnecting.cs +++ b/sdk/ai/Azure.AI.VoiceLive/src/Generated/SessionUpdateAvatarConnecting.cs @@ -11,21 +11,21 @@ namespace Azure.AI.VoiceLive { /// Sent when the server is in the process of establishing an avatar media connection and provides its SDP answer. - public partial class SessionUpdateSessionAvatarConnecting : SessionUpdate + public partial class SessionUpdateAvatarConnecting : SessionUpdate { - /// Initializes a new instance of . + /// Initializes a new instance of . /// The server's SDP answer for the avatar connection. - internal SessionUpdateSessionAvatarConnecting(string serverSdp) : base(ServerEventType.SessionAvatarConnecting) + internal SessionUpdateAvatarConnecting(string serverSdp) : base(ServerEventType.SessionAvatarConnecting) { ServerSdp = serverSdp; } - /// Initializes a new instance of . + /// Initializes a new instance of . /// The type of event. /// /// Keeps track of any properties unknown to the library. /// The server's SDP answer for the avatar connection. - internal SessionUpdateSessionAvatarConnecting(ServerEventType @type, string eventId, IDictionary additionalBinaryDataProperties, string serverSdp) : base(@type, eventId, additionalBinaryDataProperties) + internal SessionUpdateAvatarConnecting(ServerEventType @type, string eventId, IDictionary additionalBinaryDataProperties, string serverSdp) : base(@type, eventId, additionalBinaryDataProperties) { ServerSdp = serverSdp; } diff --git a/sdk/ai/Azure.AI.VoiceLive/src/Generated/SessionUpdateConversationItemTruncated.cs b/sdk/ai/Azure.AI.VoiceLive/src/Generated/SessionUpdateConversationItemTruncated.cs index 42f3a35e8579..f84c00bdfd23 100644 --- a/sdk/ai/Azure.AI.VoiceLive/src/Generated/SessionUpdateConversationItemTruncated.cs +++ b/sdk/ai/Azure.AI.VoiceLive/src/Generated/SessionUpdateConversationItemTruncated.cs @@ -51,9 +51,6 @@ internal SessionUpdateConversationItemTruncated(ServerEventType @type, IDictiona /// The index of the content part that was truncated. public int ContentIndex { get; } - /// The duration up to which the audio was truncated, in milliseconds. - public int AudioEndMs { get; } - /// Gets the EventId. public override string EventId { get; } } diff --git a/sdk/ai/Azure.AI.VoiceLive/src/Generated/SessionUpdateInputAudioBufferSpeechStopped.cs b/sdk/ai/Azure.AI.VoiceLive/src/Generated/SessionUpdateInputAudioBufferSpeechStopped.cs index d8c574a1566b..0d2d119daa25 100644 --- a/sdk/ai/Azure.AI.VoiceLive/src/Generated/SessionUpdateInputAudioBufferSpeechStopped.cs +++ b/sdk/ai/Azure.AI.VoiceLive/src/Generated/SessionUpdateInputAudioBufferSpeechStopped.cs @@ -18,11 +18,7 @@ namespace Azure.AI.VoiceLive public partial class SessionUpdateInputAudioBufferSpeechStopped : SessionUpdate { /// Initializes a new instance of . - /// - /// Milliseconds since the session started when speech stopped. This will - /// correspond to the end of audio sent to the model, and thus includes the - /// `min_silence_duration_ms` configured in the Session. - /// + /// /// The ID of the user message item that will be created. internal SessionUpdateInputAudioBufferSpeechStopped(int audioEndMs, string itemId) : base(ServerEventType.InputAudioBufferSpeechStopped) { @@ -34,11 +30,7 @@ internal SessionUpdateInputAudioBufferSpeechStopped(int audioEndMs, string itemI /// The type of event. /// /// Keeps track of any properties unknown to the library. - /// - /// Milliseconds since the session started when speech stopped. This will - /// correspond to the end of audio sent to the model, and thus includes the - /// `min_silence_duration_ms` configured in the Session. - /// + /// /// The ID of the user message item that will be created. internal SessionUpdateInputAudioBufferSpeechStopped(ServerEventType @type, string eventId, IDictionary additionalBinaryDataProperties, int audioEndMs, string itemId) : base(@type, eventId, additionalBinaryDataProperties) { @@ -46,13 +38,6 @@ internal SessionUpdateInputAudioBufferSpeechStopped(ServerEventType @type, strin ItemId = itemId; } - /// - /// Milliseconds since the session started when speech stopped. This will - /// correspond to the end of audio sent to the model, and thus includes the - /// `min_silence_duration_ms` configured in the Session. - /// - public int AudioEndMs { get; } - /// The ID of the user message item that will be created. public string ItemId { get; } } diff --git a/sdk/ai/Azure.AI.VoiceLive/src/Generated/SessionUpdateResponseAnimationVisemeDelta.cs b/sdk/ai/Azure.AI.VoiceLive/src/Generated/SessionUpdateResponseAnimationVisemeDelta.cs index f721e0958274..4c096b5d4854 100644 --- a/sdk/ai/Azure.AI.VoiceLive/src/Generated/SessionUpdateResponseAnimationVisemeDelta.cs +++ b/sdk/ai/Azure.AI.VoiceLive/src/Generated/SessionUpdateResponseAnimationVisemeDelta.cs @@ -18,7 +18,7 @@ public partial class SessionUpdateResponseAnimationVisemeDelta : SessionUpdate /// /// /// - /// + /// Gets the AudioOffsetMs. /// internal SessionUpdateResponseAnimationVisemeDelta(string responseId, string itemId, int outputIndex, int contentIndex, int audioOffsetMs, int visemeId) : base(ServerEventType.ResponseAnimationVisemeDelta) { @@ -38,7 +38,7 @@ internal SessionUpdateResponseAnimationVisemeDelta(string responseId, string ite /// /// /// - /// + /// Gets the AudioOffsetMs. /// internal SessionUpdateResponseAnimationVisemeDelta(ServerEventType @type, string eventId, IDictionary additionalBinaryDataProperties, string responseId, string itemId, int outputIndex, int contentIndex, int audioOffsetMs, int visemeId) : base(@type, eventId, additionalBinaryDataProperties) { @@ -62,9 +62,6 @@ internal SessionUpdateResponseAnimationVisemeDelta(ServerEventType @type, string /// Gets the ContentIndex. public int ContentIndex { get; } - /// Gets the AudioOffsetMs. - public int AudioOffsetMs { get; } - /// Gets the VisemeId. public int VisemeId { get; } } diff --git a/sdk/ai/Azure.AI.VoiceLive/src/Generated/SessionUpdateResponseAudioTimestampDelta.cs b/sdk/ai/Azure.AI.VoiceLive/src/Generated/SessionUpdateResponseAudioTimestampDelta.cs index 9cd65cce4942..044b4211ca6e 100644 --- a/sdk/ai/Azure.AI.VoiceLive/src/Generated/SessionUpdateResponseAudioTimestampDelta.cs +++ b/sdk/ai/Azure.AI.VoiceLive/src/Generated/SessionUpdateResponseAudioTimestampDelta.cs @@ -18,8 +18,8 @@ public partial class SessionUpdateResponseAudioTimestampDelta : SessionUpdate /// /// /// - /// - /// + /// Gets the AudioOffsetMs. + /// Gets the AudioDurationMs. /// internal SessionUpdateResponseAudioTimestampDelta(string responseId, string itemId, int outputIndex, int contentIndex, int audioOffsetMs, int audioDurationMs, string text) : base(ServerEventType.ResponseAudioTimestampDelta) { @@ -40,8 +40,8 @@ internal SessionUpdateResponseAudioTimestampDelta(string responseId, string item /// /// /// - /// - /// + /// Gets the AudioOffsetMs. + /// Gets the AudioDurationMs. /// /// internal SessionUpdateResponseAudioTimestampDelta(ServerEventType @type, string eventId, IDictionary additionalBinaryDataProperties, string responseId, string itemId, int outputIndex, int contentIndex, int audioOffsetMs, int audioDurationMs, string text, string timestampType) : base(@type, eventId, additionalBinaryDataProperties) @@ -68,12 +68,6 @@ internal SessionUpdateResponseAudioTimestampDelta(ServerEventType @type, string /// Gets the ContentIndex. public int ContentIndex { get; } - /// Gets the AudioOffsetMs. - public int AudioOffsetMs { get; } - - /// Gets the AudioDurationMs. - public int AudioDurationMs { get; } - /// Gets the Text. public string Text { get; } diff --git a/sdk/ai/Azure.AI.VoiceLive/src/Generated/SessionUpdateResponseEmotionHypothesis.Serialization.cs b/sdk/ai/Azure.AI.VoiceLive/src/Generated/SessionUpdateResponseEmotionHypothesis.Serialization.cs deleted file mode 100644 index 86706be56547..000000000000 --- a/sdk/ai/Azure.AI.VoiceLive/src/Generated/SessionUpdateResponseEmotionHypothesis.Serialization.cs +++ /dev/null @@ -1,202 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System; -using System.ClientModel.Primitives; -using System.Collections.Generic; -using System.Text.Json; - -namespace Azure.AI.VoiceLive -{ - /// Represents an emotion hypothesis detected from response audio with multiple candidates. - public partial class SessionUpdateResponseEmotionHypothesis : IJsonModel - { - /// Initializes a new instance of for deserialization. - internal SessionUpdateResponseEmotionHypothesis() - { - } - - /// The JSON writer. - /// The client options for reading and writing models. - void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) - { - writer.WriteStartObject(); - JsonModelWriteCore(writer, options); - writer.WriteEndObject(); - } - - /// The JSON writer. - /// The client options for reading and writing models. - protected override void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) - { - string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; - if (format != "J") - { - throw new FormatException($"The model {nameof(SessionUpdateResponseEmotionHypothesis)} does not support writing '{format}' format."); - } - base.JsonModelWriteCore(writer, options); - writer.WritePropertyName("emotion"u8); - writer.WriteStringValue(Emotion); - writer.WritePropertyName("candidates"u8); - writer.WriteStartArray(); - foreach (EmotionCandidate item in Candidates) - { - writer.WriteObjectValue(item, options); - } - writer.WriteEndArray(); - writer.WritePropertyName("audio_offset_ms"u8); - writer.WriteNumberValue(AudioOffsetMs); - writer.WritePropertyName("audio_duration_ms"u8); - writer.WriteNumberValue(AudioDurationMs); - if (Optional.IsDefined(ResponseId)) - { - writer.WritePropertyName("response_id"u8); - writer.WriteStringValue(ResponseId); - } - writer.WritePropertyName("item_id"u8); - writer.WriteStringValue(ItemId); - } - - /// The JSON reader. - /// The client options for reading and writing models. - SessionUpdateResponseEmotionHypothesis IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) => (SessionUpdateResponseEmotionHypothesis)JsonModelCreateCore(ref reader, options); - - /// The JSON reader. - /// The client options for reading and writing models. - protected override SessionUpdate JsonModelCreateCore(ref Utf8JsonReader reader, ModelReaderWriterOptions options) - { - string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; - if (format != "J") - { - throw new FormatException($"The model {nameof(SessionUpdateResponseEmotionHypothesis)} does not support reading '{format}' format."); - } - using JsonDocument document = JsonDocument.ParseValue(ref reader); - return DeserializeSessionUpdateResponseEmotionHypothesis(document.RootElement, options); - } - - /// The JSON element to deserialize. - /// The client options for reading and writing models. - internal static SessionUpdateResponseEmotionHypothesis DeserializeSessionUpdateResponseEmotionHypothesis(JsonElement element, ModelReaderWriterOptions options) - { - if (element.ValueKind == JsonValueKind.Null) - { - return null; - } - ServerEventType @type = default; - string eventId = default; - IDictionary additionalBinaryDataProperties = new ChangeTrackingDictionary(); - string emotion = default; - IList candidates = default; - int audioOffsetMs = default; - int audioDurationMs = default; - string responseId = default; - string itemId = default; - foreach (var prop in element.EnumerateObject()) - { - if (prop.NameEquals("type"u8)) - { - @type = new ServerEventType(prop.Value.GetString()); - continue; - } - if (prop.NameEquals("event_id"u8)) - { - eventId = prop.Value.GetString(); - continue; - } - if (prop.NameEquals("emotion"u8)) - { - emotion = prop.Value.GetString(); - continue; - } - if (prop.NameEquals("candidates"u8)) - { - List array = new List(); - foreach (var item in prop.Value.EnumerateArray()) - { - array.Add(EmotionCandidate.DeserializeEmotionCandidate(item, options)); - } - candidates = array; - continue; - } - if (prop.NameEquals("audio_offset_ms"u8)) - { - audioOffsetMs = prop.Value.GetInt32(); - continue; - } - if (prop.NameEquals("audio_duration_ms"u8)) - { - audioDurationMs = prop.Value.GetInt32(); - continue; - } - if (prop.NameEquals("response_id"u8)) - { - responseId = prop.Value.GetString(); - continue; - } - if (prop.NameEquals("item_id"u8)) - { - itemId = prop.Value.GetString(); - continue; - } - if (options.Format != "W") - { - additionalBinaryDataProperties.Add(prop.Name, BinaryData.FromString(prop.Value.GetRawText())); - } - } - return new SessionUpdateResponseEmotionHypothesis( - @type, - eventId, - additionalBinaryDataProperties, - emotion, - candidates, - audioOffsetMs, - audioDurationMs, - responseId, - itemId); - } - - /// The client options for reading and writing models. - BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) => PersistableModelWriteCore(options); - - /// The client options for reading and writing models. - protected override BinaryData PersistableModelWriteCore(ModelReaderWriterOptions options) - { - string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; - switch (format) - { - case "J": - return ModelReaderWriter.Write(this, options, AzureAIVoiceLiveContext.Default); - default: - throw new FormatException($"The model {nameof(SessionUpdateResponseEmotionHypothesis)} does not support writing '{options.Format}' format."); - } - } - - /// The data to parse. - /// The client options for reading and writing models. - SessionUpdateResponseEmotionHypothesis IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) => (SessionUpdateResponseEmotionHypothesis)PersistableModelCreateCore(data, options); - - /// The data to parse. - /// The client options for reading and writing models. - protected override SessionUpdate PersistableModelCreateCore(BinaryData data, ModelReaderWriterOptions options) - { - string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; - switch (format) - { - case "J": - using (JsonDocument document = JsonDocument.Parse(data)) - { - return DeserializeSessionUpdateResponseEmotionHypothesis(document.RootElement, options); - } - default: - throw new FormatException($"The model {nameof(SessionUpdateResponseEmotionHypothesis)} does not support reading '{options.Format}' format."); - } - } - - /// The client options for reading and writing models. - string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; - } -} diff --git a/sdk/ai/Azure.AI.VoiceLive/src/Generated/SessionUpdateResponseEmotionHypothesis.cs b/sdk/ai/Azure.AI.VoiceLive/src/Generated/SessionUpdateResponseEmotionHypothesis.cs deleted file mode 100644 index c171e2344931..000000000000 --- a/sdk/ai/Azure.AI.VoiceLive/src/Generated/SessionUpdateResponseEmotionHypothesis.cs +++ /dev/null @@ -1,70 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System; -using System.Collections.Generic; -using System.Linq; - -namespace Azure.AI.VoiceLive -{ - /// Represents an emotion hypothesis detected from response audio with multiple candidates. - public partial class SessionUpdateResponseEmotionHypothesis : SessionUpdate - { - /// Initializes a new instance of . - /// - /// - /// - /// - /// - internal SessionUpdateResponseEmotionHypothesis(string emotion, IEnumerable candidates, int audioOffsetMs, int audioDurationMs, string itemId) : base(ServerEventType.ResponseEmotionHypothesis) - { - Emotion = emotion; - Candidates = candidates.ToList(); - AudioOffsetMs = audioOffsetMs; - AudioDurationMs = audioDurationMs; - ItemId = itemId; - } - - /// Initializes a new instance of . - /// The type of event. - /// - /// Keeps track of any properties unknown to the library. - /// - /// - /// - /// - /// - /// - internal SessionUpdateResponseEmotionHypothesis(ServerEventType @type, string eventId, IDictionary additionalBinaryDataProperties, string emotion, IList candidates, int audioOffsetMs, int audioDurationMs, string responseId, string itemId) : base(@type, eventId, additionalBinaryDataProperties) - { - Emotion = emotion; - Candidates = candidates; - AudioOffsetMs = audioOffsetMs; - AudioDurationMs = audioDurationMs; - ResponseId = responseId; - ItemId = itemId; - } - - /// Gets the Emotion. - public string Emotion { get; } - - /// Gets the Candidates. - public IList Candidates { get; } - - /// Gets the AudioOffsetMs. - public int AudioOffsetMs { get; } - - /// Gets the AudioDurationMs. - public int AudioDurationMs { get; } - - /// Gets the ResponseId. - public string ResponseId { get; } - - /// Gets the ItemId. - public string ItemId { get; } - } -} diff --git a/sdk/ai/Azure.AI.VoiceLive/src/Generated/ToolChoiceFunctionObject.Serialization.cs b/sdk/ai/Azure.AI.VoiceLive/src/Generated/ToolChoiceFunctionObject.Serialization.cs index acceeb9edf9e..69a9b73c58ef 100644 --- a/sdk/ai/Azure.AI.VoiceLive/src/Generated/ToolChoiceFunctionObject.Serialization.cs +++ b/sdk/ai/Azure.AI.VoiceLive/src/Generated/ToolChoiceFunctionObject.Serialization.cs @@ -38,8 +38,8 @@ protected override void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWri throw new FormatException($"The model {nameof(ToolChoiceFunctionObject)} does not support writing '{format}' format."); } base.JsonModelWriteCore(writer, options); - writer.WritePropertyName("function"u8); - writer.WriteObjectValue(Function, options); + writer.WritePropertyName("name"u8); + writer.WriteStringValue(Name); } /// The JSON reader. @@ -69,7 +69,7 @@ internal static ToolChoiceFunctionObject DeserializeToolChoiceFunctionObject(Jso } ToolType @type = default; IDictionary additionalBinaryDataProperties = new ChangeTrackingDictionary(); - ToolChoiceFunctionObjectFunction function = default; + string name = default; foreach (var prop in element.EnumerateObject()) { if (prop.NameEquals("type"u8)) @@ -77,9 +77,9 @@ internal static ToolChoiceFunctionObject DeserializeToolChoiceFunctionObject(Jso @type = new ToolType(prop.Value.GetString()); continue; } - if (prop.NameEquals("function"u8)) + if (prop.NameEquals("name"u8)) { - function = ToolChoiceFunctionObjectFunction.DeserializeToolChoiceFunctionObjectFunction(prop.Value, options); + name = prop.Value.GetString(); continue; } if (options.Format != "W") @@ -87,7 +87,7 @@ internal static ToolChoiceFunctionObject DeserializeToolChoiceFunctionObject(Jso additionalBinaryDataProperties.Add(prop.Name, BinaryData.FromString(prop.Value.GetRawText())); } } - return new ToolChoiceFunctionObject(@type, additionalBinaryDataProperties, function); + return new ToolChoiceFunctionObject(@type, additionalBinaryDataProperties, name); } /// The client options for reading and writing models. diff --git a/sdk/ai/Azure.AI.VoiceLive/src/Generated/ToolChoiceFunctionObject.cs b/sdk/ai/Azure.AI.VoiceLive/src/Generated/ToolChoiceFunctionObject.cs index c2000649cd8a..c2ff7fab566f 100644 --- a/sdk/ai/Azure.AI.VoiceLive/src/Generated/ToolChoiceFunctionObject.cs +++ b/sdk/ai/Azure.AI.VoiceLive/src/Generated/ToolChoiceFunctionObject.cs @@ -13,25 +13,25 @@ namespace Azure.AI.VoiceLive internal partial class ToolChoiceFunctionObject : ToolChoiceObject { /// Initializes a new instance of . - /// - /// is null. - public ToolChoiceFunctionObject(ToolChoiceFunctionObjectFunction function) : base(ToolType.Function) + /// + /// is null. + public ToolChoiceFunctionObject(string name) : base(ToolType.Function) { - Argument.AssertNotNull(function, nameof(function)); + Argument.AssertNotNull(name, nameof(name)); - Function = function; + Name = name; } /// Initializes a new instance of . /// /// Keeps track of any properties unknown to the library. - /// - internal ToolChoiceFunctionObject(ToolType @type, IDictionary additionalBinaryDataProperties, ToolChoiceFunctionObjectFunction function) : base(@type, additionalBinaryDataProperties) + /// + internal ToolChoiceFunctionObject(ToolType @type, IDictionary additionalBinaryDataProperties, string name) : base(@type, additionalBinaryDataProperties) { - Function = function; + Name = name; } - /// Gets or sets the Function. - public ToolChoiceFunctionObjectFunction Function { get; set; } + /// Gets or sets the Name. + public string Name { get; set; } } } diff --git a/sdk/ai/Azure.AI.VoiceLive/src/Generated/ToolChoiceFunctionObjectFunction.Serialization.cs b/sdk/ai/Azure.AI.VoiceLive/src/Generated/ToolChoiceFunctionObjectFunction.Serialization.cs deleted file mode 100644 index 4a777976b926..000000000000 --- a/sdk/ai/Azure.AI.VoiceLive/src/Generated/ToolChoiceFunctionObjectFunction.Serialization.cs +++ /dev/null @@ -1,142 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System; -using System.ClientModel.Primitives; -using System.Collections.Generic; -using System.Text.Json; - -namespace Azure.AI.VoiceLive -{ - /// The ToolChoiceFunctionObjectFunction. - internal partial class ToolChoiceFunctionObjectFunction : IJsonModel - { - /// Initializes a new instance of for deserialization. - internal ToolChoiceFunctionObjectFunction() - { - } - - /// The JSON writer. - /// The client options for reading and writing models. - void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) - { - writer.WriteStartObject(); - JsonModelWriteCore(writer, options); - writer.WriteEndObject(); - } - - /// The JSON writer. - /// The client options for reading and writing models. - protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) - { - string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; - if (format != "J") - { - throw new FormatException($"The model {nameof(ToolChoiceFunctionObjectFunction)} does not support writing '{format}' format."); - } - writer.WritePropertyName("name"u8); - writer.WriteStringValue(Name); - if (options.Format != "W" && _additionalBinaryDataProperties != null) - { - foreach (var item in _additionalBinaryDataProperties) - { - writer.WritePropertyName(item.Key); -#if NET6_0_OR_GREATER - writer.WriteRawValue(item.Value); -#else - using (JsonDocument document = JsonDocument.Parse(item.Value)) - { - JsonSerializer.Serialize(writer, document.RootElement); - } -#endif - } - } - } - - /// The JSON reader. - /// The client options for reading and writing models. - ToolChoiceFunctionObjectFunction IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) => JsonModelCreateCore(ref reader, options); - - /// The JSON reader. - /// The client options for reading and writing models. - protected virtual ToolChoiceFunctionObjectFunction JsonModelCreateCore(ref Utf8JsonReader reader, ModelReaderWriterOptions options) - { - string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; - if (format != "J") - { - throw new FormatException($"The model {nameof(ToolChoiceFunctionObjectFunction)} does not support reading '{format}' format."); - } - using JsonDocument document = JsonDocument.ParseValue(ref reader); - return DeserializeToolChoiceFunctionObjectFunction(document.RootElement, options); - } - - /// The JSON element to deserialize. - /// The client options for reading and writing models. - internal static ToolChoiceFunctionObjectFunction DeserializeToolChoiceFunctionObjectFunction(JsonElement element, ModelReaderWriterOptions options) - { - if (element.ValueKind == JsonValueKind.Null) - { - return null; - } - string name = default; - IDictionary additionalBinaryDataProperties = new ChangeTrackingDictionary(); - foreach (var prop in element.EnumerateObject()) - { - if (prop.NameEquals("name"u8)) - { - name = prop.Value.GetString(); - continue; - } - if (options.Format != "W") - { - additionalBinaryDataProperties.Add(prop.Name, BinaryData.FromString(prop.Value.GetRawText())); - } - } - return new ToolChoiceFunctionObjectFunction(name, additionalBinaryDataProperties); - } - - /// The client options for reading and writing models. - BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) => PersistableModelWriteCore(options); - - /// The client options for reading and writing models. - protected virtual BinaryData PersistableModelWriteCore(ModelReaderWriterOptions options) - { - string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; - switch (format) - { - case "J": - return ModelReaderWriter.Write(this, options, AzureAIVoiceLiveContext.Default); - default: - throw new FormatException($"The model {nameof(ToolChoiceFunctionObjectFunction)} does not support writing '{options.Format}' format."); - } - } - - /// The data to parse. - /// The client options for reading and writing models. - ToolChoiceFunctionObjectFunction IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) => PersistableModelCreateCore(data, options); - - /// The data to parse. - /// The client options for reading and writing models. - protected virtual ToolChoiceFunctionObjectFunction PersistableModelCreateCore(BinaryData data, ModelReaderWriterOptions options) - { - string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; - switch (format) - { - case "J": - using (JsonDocument document = JsonDocument.Parse(data)) - { - return DeserializeToolChoiceFunctionObjectFunction(document.RootElement, options); - } - default: - throw new FormatException($"The model {nameof(ToolChoiceFunctionObjectFunction)} does not support reading '{options.Format}' format."); - } - } - - /// The client options for reading and writing models. - string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; - } -} diff --git a/sdk/ai/Azure.AI.VoiceLive/src/Generated/ToolChoiceFunctionObjectFunction.cs b/sdk/ai/Azure.AI.VoiceLive/src/Generated/ToolChoiceFunctionObjectFunction.cs deleted file mode 100644 index 918782cd9d6a..000000000000 --- a/sdk/ai/Azure.AI.VoiceLive/src/Generated/ToolChoiceFunctionObjectFunction.cs +++ /dev/null @@ -1,41 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System; -using System.Collections.Generic; - -namespace Azure.AI.VoiceLive -{ - /// The ToolChoiceFunctionObjectFunction. - internal partial class ToolChoiceFunctionObjectFunction - { - /// Keeps track of any properties unknown to the library. - private protected readonly IDictionary _additionalBinaryDataProperties; - - /// Initializes a new instance of . - /// - /// is null. - public ToolChoiceFunctionObjectFunction(string name) - { - Argument.AssertNotNull(name, nameof(name)); - - Name = name; - } - - /// Initializes a new instance of . - /// - /// Keeps track of any properties unknown to the library. - internal ToolChoiceFunctionObjectFunction(string name, IDictionary additionalBinaryDataProperties) - { - Name = name; - _additionalBinaryDataProperties = additionalBinaryDataProperties; - } - - /// Gets or sets the Name. - public string Name { get; set; } - } -} diff --git a/sdk/ai/Azure.AI.VoiceLive/src/Generated/TurnDetection.Serialization.cs b/sdk/ai/Azure.AI.VoiceLive/src/Generated/TurnDetection.Serialization.cs index d1d0f71ec8dc..96ae6ead3782 100644 --- a/sdk/ai/Azure.AI.VoiceLive/src/Generated/TurnDetection.Serialization.cs +++ b/sdk/ai/Azure.AI.VoiceLive/src/Generated/TurnDetection.Serialization.cs @@ -13,7 +13,7 @@ namespace Azure.AI.VoiceLive { /// /// Top-level union for turn detection configuration. - /// Please note this is the abstract base class. The derived classes available for instantiation are: , , , , , and . + /// Please note this is the abstract base class. The derived classes available for instantiation are: , , , and . /// [PersistableModelProxy(typeof(UnknownTurnDetection))] public abstract partial class TurnDetection : IJsonModel @@ -42,7 +42,7 @@ protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWrit throw new FormatException($"The model {nameof(TurnDetection)} does not support writing '{format}' format."); } writer.WritePropertyName("type"u8); - writer.WriteStringValue(Type.ToSerialString()); + writer.WriteStringValue(Type.ToString()); if (options.Format != "W" && _additionalBinaryDataProperties != null) { foreach (var item in _additionalBinaryDataProperties) @@ -89,18 +89,14 @@ internal static TurnDetection DeserializeTurnDetection(JsonElement element, Mode { switch (discriminator.GetString()) { - case "none": - return NoTurnDetection.DeserializeNoTurnDetection(element, options); case "server_vad": - return ServerVad.DeserializeServerVad(element, options); + return ServerVadTurnDetection.DeserializeServerVadTurnDetection(element, options); case "azure_semantic_vad": - return AzureSemanticVad.DeserializeAzureSemanticVad(element, options); + return AzureSemanticVadTurnDetection.DeserializeAzureSemanticVadTurnDetection(element, options); case "azure_semantic_vad_en": - return AzureSemanticVadEn.DeserializeAzureSemanticVadEn(element, options); - case "server_sd": - return AzureSemanticVadServer.DeserializeAzureSemanticVadServer(element, options); + return AzureSemanticVadEnTurnDetection.DeserializeAzureSemanticVadEnTurnDetection(element, options); case "azure_semantic_vad_multilingual": - return AzureMultilingualSemanticVad.DeserializeAzureMultilingualSemanticVad(element, options); + return AzureSemanticVadMultilingualTurnDetection.DeserializeAzureSemanticVadMultilingualTurnDetection(element, options); } } return UnknownTurnDetection.DeserializeUnknownTurnDetection(element, options); diff --git a/sdk/ai/Azure.AI.VoiceLive/src/Generated/TurnDetection.cs b/sdk/ai/Azure.AI.VoiceLive/src/Generated/TurnDetection.cs index e1db3a7f9da6..d510024ac2f4 100644 --- a/sdk/ai/Azure.AI.VoiceLive/src/Generated/TurnDetection.cs +++ b/sdk/ai/Azure.AI.VoiceLive/src/Generated/TurnDetection.cs @@ -12,7 +12,7 @@ namespace Azure.AI.VoiceLive { /// /// Top-level union for turn detection configuration. - /// Please note this is the abstract base class. The derived classes available for instantiation are: , , , , , and . + /// Please note this is the abstract base class. The derived classes available for instantiation are: , , , and . /// public abstract partial class TurnDetection { diff --git a/sdk/ai/Azure.AI.VoiceLive/src/Generated/TurnDetectionType.Serialization.cs b/sdk/ai/Azure.AI.VoiceLive/src/Generated/TurnDetectionType.Serialization.cs deleted file mode 100644 index 0f691418e479..000000000000 --- a/sdk/ai/Azure.AI.VoiceLive/src/Generated/TurnDetectionType.Serialization.cs +++ /dev/null @@ -1,56 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System; - -namespace Azure.AI.VoiceLive -{ - internal static partial class TurnDetectionTypeExtensions - { - /// The value to serialize. - public static string ToSerialString(this TurnDetectionType value) => value switch - { - TurnDetectionType.None => "none", - TurnDetectionType.ServerVad => "server_vad", - TurnDetectionType.AzureSemanticVad => "azure_semantic_vad", - TurnDetectionType.AzureSemanticVadEn => "azure_semantic_vad_en", - TurnDetectionType.ServerSd => "server_sd", - TurnDetectionType.AzureSemanticVadMultilingual => "azure_semantic_vad_multilingual", - _ => throw new ArgumentOutOfRangeException(nameof(value), value, "Unknown TurnDetectionType value.") - }; - - /// The value to deserialize. - public static TurnDetectionType ToTurnDetectionType(this string value) - { - if (StringComparer.OrdinalIgnoreCase.Equals(value, "none")) - { - return TurnDetectionType.None; - } - if (StringComparer.OrdinalIgnoreCase.Equals(value, "server_vad")) - { - return TurnDetectionType.ServerVad; - } - if (StringComparer.OrdinalIgnoreCase.Equals(value, "azure_semantic_vad")) - { - return TurnDetectionType.AzureSemanticVad; - } - if (StringComparer.OrdinalIgnoreCase.Equals(value, "azure_semantic_vad_en")) - { - return TurnDetectionType.AzureSemanticVadEn; - } - if (StringComparer.OrdinalIgnoreCase.Equals(value, "server_sd")) - { - return TurnDetectionType.ServerSd; - } - if (StringComparer.OrdinalIgnoreCase.Equals(value, "azure_semantic_vad_multilingual")) - { - return TurnDetectionType.AzureSemanticVadMultilingual; - } - throw new ArgumentOutOfRangeException(nameof(value), value, "Unknown TurnDetectionType value."); - } - } -} diff --git a/sdk/ai/Azure.AI.VoiceLive/src/Generated/TurnDetectionType.cs b/sdk/ai/Azure.AI.VoiceLive/src/Generated/TurnDetectionType.cs index 3c7298aa4902..eb26a3c8d36f 100644 --- a/sdk/ai/Azure.AI.VoiceLive/src/Generated/TurnDetectionType.cs +++ b/sdk/ai/Azure.AI.VoiceLive/src/Generated/TurnDetectionType.cs @@ -5,22 +5,72 @@ #nullable disable +using System; +using System.ComponentModel; + namespace Azure.AI.VoiceLive { /// - internal enum TurnDetectionType + internal readonly partial struct TurnDetectionType : IEquatable { - /// None. - None, - /// ServerVad. - ServerVad, - /// AzureSemanticVad. - AzureSemanticVad, - /// AzureSemanticVadEn. - AzureSemanticVadEn, - /// ServerSd. - ServerSd, - /// AzureSemanticVadMultilingual. - AzureSemanticVadMultilingual + private readonly string _value; + private const string ServerVadValue = "server_vad"; + private const string AzureSemanticVadValue = "azure_semantic_vad"; + private const string AzureSemanticVadEnValue = "azure_semantic_vad_en"; + private const string AzureSemanticVadMultilingualValue = "azure_semantic_vad_multilingual"; + + /// Initializes a new instance of . + /// The value. + /// is null. + public TurnDetectionType(string value) + { + Argument.AssertNotNull(value, nameof(value)); + + _value = value; + } + + /// Gets the ServerVad. + public static TurnDetectionType ServerVad { get; } = new TurnDetectionType(ServerVadValue); + + /// Gets the AzureSemanticVad. + public static TurnDetectionType AzureSemanticVad { get; } = new TurnDetectionType(AzureSemanticVadValue); + + /// Gets the AzureSemanticVadEn. + public static TurnDetectionType AzureSemanticVadEn { get; } = new TurnDetectionType(AzureSemanticVadEnValue); + + /// Gets the AzureSemanticVadMultilingual. + public static TurnDetectionType AzureSemanticVadMultilingual { get; } = new TurnDetectionType(AzureSemanticVadMultilingualValue); + + /// Determines if two values are the same. + /// The left value to compare. + /// The right value to compare. + public static bool operator ==(TurnDetectionType left, TurnDetectionType right) => left.Equals(right); + + /// Determines if two values are not the same. + /// The left value to compare. + /// The right value to compare. + public static bool operator !=(TurnDetectionType left, TurnDetectionType right) => !left.Equals(right); + + /// Converts a string to a . + /// The value. + public static implicit operator TurnDetectionType(string value) => new TurnDetectionType(value); + + /// Converts a string to a . + /// The value. + public static implicit operator TurnDetectionType?(string value) => value == null ? null : new TurnDetectionType(value); + + /// + [EditorBrowsable(EditorBrowsableState.Never)] + public override bool Equals(object obj) => obj is TurnDetectionType other && Equals(other); + + /// + public bool Equals(TurnDetectionType other) => string.Equals(_value, other._value, StringComparison.InvariantCultureIgnoreCase); + + /// + [EditorBrowsable(EditorBrowsableState.Never)] + public override int GetHashCode() => _value != null ? StringComparer.InvariantCultureIgnoreCase.GetHashCode(_value) : 0; + + /// + public override string ToString() => _value; } } diff --git a/sdk/ai/Azure.AI.VoiceLive/src/Generated/UnknownResponseStatusDetails.Serialization.cs b/sdk/ai/Azure.AI.VoiceLive/src/Generated/UnknownResponseStatusDetails.Serialization.cs index 9e883c96ed48..994133102d0f 100644 --- a/sdk/ai/Azure.AI.VoiceLive/src/Generated/UnknownResponseStatusDetails.Serialization.cs +++ b/sdk/ai/Azure.AI.VoiceLive/src/Generated/UnknownResponseStatusDetails.Serialization.cs @@ -65,13 +65,13 @@ internal static UnknownResponseStatusDetails DeserializeUnknownResponseStatusDet { return null; } - string @type = "unknown"; + VoiceLiveResponseStatus @type = default; IDictionary additionalBinaryDataProperties = new ChangeTrackingDictionary(); foreach (var prop in element.EnumerateObject()) { if (prop.NameEquals("type"u8)) { - @type = prop.Value.GetString(); + @type = new VoiceLiveResponseStatus(prop.Value.GetString()); continue; } if (options.Format != "W") diff --git a/sdk/ai/Azure.AI.VoiceLive/src/Generated/UnknownResponseStatusDetails.cs b/sdk/ai/Azure.AI.VoiceLive/src/Generated/UnknownResponseStatusDetails.cs index 17e080493878..10cfa1e4d93e 100644 --- a/sdk/ai/Azure.AI.VoiceLive/src/Generated/UnknownResponseStatusDetails.cs +++ b/sdk/ai/Azure.AI.VoiceLive/src/Generated/UnknownResponseStatusDetails.cs @@ -15,7 +15,7 @@ internal partial class UnknownResponseStatusDetails : ResponseStatusDetails /// Initializes a new instance of . /// /// Keeps track of any properties unknown to the library. - internal UnknownResponseStatusDetails(string @type, IDictionary additionalBinaryDataProperties) : base(@type ?? "unknown", additionalBinaryDataProperties) + internal UnknownResponseStatusDetails(VoiceLiveResponseStatus @type, IDictionary additionalBinaryDataProperties) : base(@type != default ? @type : "unknown", additionalBinaryDataProperties) { } } diff --git a/sdk/ai/Azure.AI.VoiceLive/src/Generated/UnknownTurnDetection.Serialization.cs b/sdk/ai/Azure.AI.VoiceLive/src/Generated/UnknownTurnDetection.Serialization.cs index 7cf4a6ccf452..ec37fedd155a 100644 --- a/sdk/ai/Azure.AI.VoiceLive/src/Generated/UnknownTurnDetection.Serialization.cs +++ b/sdk/ai/Azure.AI.VoiceLive/src/Generated/UnknownTurnDetection.Serialization.cs @@ -71,7 +71,7 @@ internal static UnknownTurnDetection DeserializeUnknownTurnDetection(JsonElement { if (prop.NameEquals("type"u8)) { - @type = prop.Value.GetString().ToTurnDetectionType(); + @type = new TurnDetectionType(prop.Value.GetString()); continue; } if (options.Format != "W") diff --git a/sdk/ai/Azure.AI.VoiceLive/src/Generated/UnknownTurnDetection.cs b/sdk/ai/Azure.AI.VoiceLive/src/Generated/UnknownTurnDetection.cs index 56eb1c45ffdf..630fea583034 100644 --- a/sdk/ai/Azure.AI.VoiceLive/src/Generated/UnknownTurnDetection.cs +++ b/sdk/ai/Azure.AI.VoiceLive/src/Generated/UnknownTurnDetection.cs @@ -15,7 +15,7 @@ internal partial class UnknownTurnDetection : TurnDetection /// Initializes a new instance of . /// /// Keeps track of any properties unknown to the library. - internal UnknownTurnDetection(TurnDetectionType @type, IDictionary additionalBinaryDataProperties) : base(@type, additionalBinaryDataProperties) + internal UnknownTurnDetection(TurnDetectionType @type, IDictionary additionalBinaryDataProperties) : base(@type != default ? @type : "unknown", additionalBinaryDataProperties) { } } diff --git a/sdk/ai/Azure.AI.VoiceLive/src/Generated/VoiceLiveClient.RestClient.cs b/sdk/ai/Azure.AI.VoiceLive/src/Generated/VoiceLiveClient.RestClient.cs deleted file mode 100644 index d5aa31e6a3d4..000000000000 --- a/sdk/ai/Azure.AI.VoiceLive/src/Generated/VoiceLiveClient.RestClient.cs +++ /dev/null @@ -1,36 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using Azure; -using Azure.Core; - -namespace Azure.AI.VoiceLive -{ - /// - public partial class VoiceLiveClient - { - private static ResponseClassifier _pipelineMessageClassifier200; - - private static ResponseClassifier PipelineMessageClassifier200 => _pipelineMessageClassifier200 = new StatusCodeClassifier(stackalloc ushort[] { 200 }); - - internal HttpMessage CreateForceModelsRequest(string accept, RequestContent content, RequestContext context) - { - RawRequestUriBuilder uri = new RawRequestUriBuilder(); - uri.Reset(_endpoint); - uri.AppendPath("/voice-agent/realtime", false); - uri.AppendPath("/", false); - HttpMessage message = Pipeline.CreateMessage(context, PipelineMessageClassifier200); - Request request = message.Request; - request.Uri = uri; - request.Method = RequestMethod.Post; - request.Headers.SetValue("Content-Type", "application/json"); - request.Headers.SetValue("Accept", accept); - request.Content = content; - return message; - } - } -} diff --git a/sdk/ai/Azure.AI.VoiceLive/src/Generated/VoiceLiveClient.cs b/sdk/ai/Azure.AI.VoiceLive/src/Generated/VoiceLiveClient.cs deleted file mode 100644 index b6c9320e1b35..000000000000 --- a/sdk/ai/Azure.AI.VoiceLive/src/Generated/VoiceLiveClient.cs +++ /dev/null @@ -1,137 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System; -using System.Threading; -using System.Threading.Tasks; -using Azure; -using Azure.Core; -using Azure.Core.Pipeline; - -namespace Azure.AI.VoiceLive -{ - /// The VoiceLiveClient. - public partial class VoiceLiveClient - { - private readonly Uri _endpoint; - /// A credential used to authenticate to the service. - private readonly AzureKeyCredential _keyCredential; - private const string AuthorizationHeader = "api-key"; - /// A credential used to authenticate to the service. - private readonly TokenCredential _tokenCredential; - private static readonly string[] AuthorizationScopes = new string[] { "https://cognitiveservices.azure.com/.default" }; - - /// Initializes a new instance of VoiceLiveClient for mocking. - protected VoiceLiveClient() - { - } - - /// Initializes a new instance of VoiceLiveClient. - /// Service endpoint. - /// A credential used to authenticate to the service. - /// or is null. - public VoiceLiveClient(Uri endpoint, AzureKeyCredential credential) : this(endpoint, credential, new VoiceLiveClientOptions()) - { - } - - /// Initializes a new instance of VoiceLiveClient. - /// Service endpoint. - /// A credential used to authenticate to the service. - /// or is null. - public VoiceLiveClient(Uri endpoint, TokenCredential credential) : this(endpoint, credential, new VoiceLiveClientOptions()) - { - } - - /// The HTTP pipeline for sending and receiving REST requests and responses. - public virtual HttpPipeline Pipeline { get; } - - /// The ClientDiagnostics is used to provide tracing support for the client library. - internal ClientDiagnostics ClientDiagnostics { get; } - - /// - /// [Protocol Method] ForceModels - /// - /// - /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. - /// - /// - /// - /// - /// The content to send as the body of the request. - /// The request options, which can override default behaviors of the client pipeline on a per-call basis. - /// Service returned a non-success status code. - /// The response returned from the service. - internal virtual Response ForceModels(string accept, RequestContent content, RequestContext context = null) - { - using DiagnosticScope scope = ClientDiagnostics.CreateScope("VoiceLiveClient.ForceModels"); - scope.Start(); - try - { - using HttpMessage message = CreateForceModelsRequest(accept, content, context); - return Pipeline.ProcessMessage(message, context); - } - catch (Exception e) - { - scope.Failed(e); - throw; - } - } - - /// - /// [Protocol Method] ForceModels - /// - /// - /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. - /// - /// - /// - /// - /// The content to send as the body of the request. - /// The request options, which can override default behaviors of the client pipeline on a per-call basis. - /// Service returned a non-success status code. - /// The response returned from the service. - internal virtual async Task ForceModelsAsync(string accept, RequestContent content, RequestContext context = null) - { - using DiagnosticScope scope = ClientDiagnostics.CreateScope("VoiceLiveClient.ForceModels"); - scope.Start(); - try - { - using HttpMessage message = CreateForceModelsRequest(accept, content, context); - return await Pipeline.ProcessMessageAsync(message, context).ConfigureAwait(false); - } - catch (Exception e) - { - scope.Failed(e); - throw; - } - } - - /// ForceModels. - /// - /// - /// The cancellation token that can be used to cancel the operation. - /// Service returned a non-success status code. - internal virtual Response ForceModels(string accept, BinaryData @event, CancellationToken cancellationToken = default) - { - ForceModelsRequest spreadModel = new ForceModelsRequest(@event, null); - Response result = ForceModels(accept, spreadModel, cancellationToken.CanBeCanceled ? new RequestContext { CancellationToken = cancellationToken } : null); - return Response.FromValue(result.Content, result); - } - - /// ForceModels. - /// - /// - /// The cancellation token that can be used to cancel the operation. - /// Service returned a non-success status code. - internal virtual async Task> ForceModelsAsync(string accept, BinaryData @event, CancellationToken cancellationToken = default) - { - ForceModelsRequest spreadModel = new ForceModelsRequest(@event, null); - Response result = await ForceModelsAsync(accept, spreadModel, cancellationToken.CanBeCanceled ? new RequestContext { CancellationToken = cancellationToken } : null).ConfigureAwait(false); - return Response.FromValue(result.Content, result); - } - } -} diff --git a/sdk/ai/Azure.AI.VoiceLive/src/Generated/VoiceLiveClientBuilderExtensions.cs b/sdk/ai/Azure.AI.VoiceLive/src/Generated/VoiceLiveClientBuilderExtensions.cs deleted file mode 100644 index ba91ded31add..000000000000 --- a/sdk/ai/Azure.AI.VoiceLive/src/Generated/VoiceLiveClientBuilderExtensions.cs +++ /dev/null @@ -1,28 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Diagnostics.CodeAnalysis; -using Azure.AI.VoiceLive; -using Azure.Core.Extensions; - -namespace Microsoft.Extensions.Azure -{ - /// Extension methods to add clients to . - public static partial class VoiceLiveClientBuilderExtensions - { - /// Registers a client with the specified . - /// The builder to register with. - /// The configuration to use for the client. - [RequiresUnreferencedCode("Requires unreferenced code until we opt into EnableConfigurationBindingGenerator.")] - [RequiresDynamicCode("Requires unreferenced code until we opt into EnableConfigurationBindingGenerator.")] - public static IAzureClientBuilder AddVoiceLiveClient(this TBuilder builder, TConfiguration configuration) - where TBuilder : IAzureClientFactoryBuilderWithConfiguration - { - return builder.RegisterClientFactory(configuration); - } - } -} diff --git a/sdk/ai/Azure.AI.VoiceLive/src/Generated/VoiceLiveModelFactory.cs b/sdk/ai/Azure.AI.VoiceLive/src/Generated/VoiceLiveModelFactory.cs index 2eb26961e16e..afa2ef8c4ac6 100644 --- a/sdk/ai/Azure.AI.VoiceLive/src/Generated/VoiceLiveModelFactory.cs +++ b/sdk/ai/Azure.AI.VoiceLive/src/Generated/VoiceLiveModelFactory.cs @@ -19,11 +19,13 @@ public static partial class VoiceLiveModelFactory /// /// /// - /// - /// + /// + /// Input audio sampling rate in Hz. Available values: + /// - For pcm16: 8000, 16000, 24000 + /// - For g711_alaw/g711_ulaw: 8000 + /// /// /// - /// /// /// /// @@ -31,12 +33,12 @@ public static partial class VoiceLiveModelFactory /// /// /// - /// /// /// /// + /// /// A new instance for mocking. - public static VoiceLiveSessionOptions VoiceLiveSessionOptions(string model = default, IEnumerable modalities = default, AnimationOptions animation = default, string instructions = default, InputAudio inputAudio = default, int? inputAudioSamplingRate = default, AudioFormat? inputAudioFormat = default, AudioFormat? outputAudioFormat = default, TurnDetection turnDetection = default, AudioNoiseReduction inputAudioNoiseReduction = default, AudioEchoCancellation inputAudioEchoCancellation = default, AvatarConfiguration avatar = default, AudioInputTranscriptionSettings inputAudioTranscription = default, IEnumerable outputAudioTimestampTypes = default, IEnumerable tools = default, float? temperature = default, RespondingAgentOptions agent = default, BinaryData voiceInternal = default, BinaryData maxResponseOutputTokens = default, BinaryData toolChoice = default) + public static VoiceLiveSessionOptions VoiceLiveSessionOptions(string model = default, IEnumerable modalities = default, AnimationOptions animation = default, string instructions = default, int? inputAudioSamplingRate = default, InputAudioFormat? inputAudioFormat = default, OutputAudioFormat? outputAudioFormat = default, AudioNoiseReduction inputAudioNoiseReduction = default, AudioEchoCancellation inputAudioEchoCancellation = default, AvatarConfiguration avatar = default, AudioInputTranscriptionSettings inputAudioTranscription = default, IEnumerable outputAudioTimestampTypes = default, IEnumerable tools = default, float? temperature = default, BinaryData voiceInternal = default, BinaryData maxResponseOutputTokens = default, BinaryData toolChoice = default, BinaryData turnDetection = default) { modalities ??= new ChangeTrackingList(); outputAudioTimestampTypes ??= new ChangeTrackingList(); @@ -47,11 +49,9 @@ public static VoiceLiveSessionOptions VoiceLiveSessionOptions(string model = def modalities.ToList(), animation, instructions, - inputAudio, inputAudioSamplingRate, inputAudioFormat, outputAudioFormat, - turnDetection, inputAudioNoiseReduction, inputAudioEchoCancellation, avatar, @@ -59,23 +59,22 @@ public static VoiceLiveSessionOptions VoiceLiveSessionOptions(string model = def outputAudioTimestampTypes.ToList(), tools.ToList(), temperature, - agent, voiceInternal, maxResponseOutputTokens, toolChoice, + turnDetection, additionalBinaryDataProperties: null); } - /// Configuration for animation outputs including blendshapes, visemes, and emotion metadata. + /// Configuration for animation outputs including blendshapes and visemes metadata. /// The name of the animation model to use. /// Set of output data types requested from the animation system. - /// Interval for emotion detection in milliseconds. If not set, emotion detection is disabled. /// A new instance for mocking. - public static AnimationOptions AnimationOptions(string modelName = default, IEnumerable outputs = default, int? emotionDetectionIntervalMs = default) + public static AnimationOptions AnimationOptions(string modelName = default, IEnumerable outputs = default) { outputs ??= new ChangeTrackingList(); - return new AnimationOptions(modelName, outputs.ToList(), emotionDetectionIntervalMs, additionalBinaryDataProperties: null); + return new AnimationOptions(modelName, outputs.ToList(), additionalBinaryDataProperties: null); } /// @@ -94,7 +93,7 @@ public static OpenAIVoice OpenAIVoice(string @type = default, OAIVoice name = de /// /// Base for Azure voice configurations. - /// Please note this is the abstract base class. The derived classes available for instantiation are: , , , and . + /// Please note this is the abstract base class. The derived classes available for instantiation are: , , and . /// /// /// A new instance for mocking. @@ -103,7 +102,7 @@ public static AzureVoice AzureVoice(string @type = default) return new UnknownAzureVoice(@type, additionalBinaryDataProperties: null); } - /// Azure custom voice configuration (preferred). + /// Azure custom voice configuration. /// Voice name cannot be empty. /// Endpoint ID cannot be empty. /// Temperature must be between 0.0 and 1.0. @@ -163,326 +162,20 @@ public static AzureStandardVoice AzureStandardVoice(string name = default, float volume); } - /// Azure platform voice configuration (variant of standard). - /// Voice name cannot be empty. - /// Temperature must be between 0.0 and 1.0. - /// - /// - /// - /// - /// - /// - /// - /// A new instance for mocking. - public static AzurePlatformVoice AzurePlatformVoice(string name = default, float? temperature = default, string customLexiconUrl = default, IEnumerable preferLocales = default, string locale = default, string style = default, string pitch = default, string rate = default, string volume = default) - { - preferLocales ??= new ChangeTrackingList(); - - return new AzurePlatformVoice( - "azure-platform", - additionalBinaryDataProperties: null, - name, - temperature, - customLexiconUrl, - preferLocales.ToList(), - locale, - style, - pitch, - rate, - volume); - } - /// Azure personal voice configuration. /// Voice name cannot be empty. /// Temperature must be between 0.0 and 1.0. /// Underlying neural model to use for personal voice. /// A new instance for mocking. - public static AzurePersonalVoice AzurePersonalVoice(string name = default, float? temperature = default, AzurePersonalVoiceModel model = default) + public static AzurePersonalVoice AzurePersonalVoice(string name = default, float? temperature = default, PersonalVoiceModels model = default) { return new AzurePersonalVoice("azure-personal", additionalBinaryDataProperties: null, name, temperature, model); } - /// Voice configuration for LLM (Large Language Model) voices. - /// - /// - /// A new instance for mocking. - public static LlmVoice LlmVoice(string @type = default, LlmVoiceName name = default) - { - return new LlmVoice(@type, name, additionalBinaryDataProperties: null); - } - - /// Configuration for client audio input. Used to specify the audio model and optional phrase list. - /// The name of the model to use for input audio (currently only 'azure-standard' is supported). - /// Optional list of phrases to bias the speech recognition engine. - /// A new instance for mocking. - public static InputAudio InputAudio(string model = default, IEnumerable phraseList = default) - { - phraseList ??= new ChangeTrackingList(); - - return new InputAudio(model, phraseList.ToList(), additionalBinaryDataProperties: null); - } - - /// - /// Top-level union for turn detection configuration. - /// Please note this is the abstract base class. The derived classes available for instantiation are: , , , , , and . - /// - /// - /// A new instance for mocking. - public static TurnDetection TurnDetection(string @type = default) - { - return new UnknownTurnDetection(@type.ToTurnDetectionType(), additionalBinaryDataProperties: null); - } - - /// Disables turn detection. - /// A new instance for mocking. - public static NoTurnDetection NoTurnDetection() - { - return new NoTurnDetection(TurnDetectionType.None, additionalBinaryDataProperties: null); - } - - /// Base model for VAD-based turn detection. - /// - /// - /// - /// - /// - /// A new instance for mocking. - public static ServerVad ServerVad(float? threshold = default, int? prefixPaddingMs = default, int? silenceDurationMs = default, EouDetection endOfUtteranceDetection = default, bool? autoTruncate = default) - { - return new ServerVad( - TurnDetectionType.ServerVad, - additionalBinaryDataProperties: null, - threshold, - prefixPaddingMs, - silenceDurationMs, - endOfUtteranceDetection, - autoTruncate); - } - - /// - /// Top-level union for end-of-utterance (EOU) semantic detection configuration. - /// Please note this is the abstract base class. The derived classes available for instantiation are: , , and . - /// - /// - /// A new instance for mocking. - public static EouDetection EouDetection(string model = default) - { - return new UnknownEouDetection(model.ToEOUDetectionModel(), additionalBinaryDataProperties: null); - } - - /// Azure semantic end-of-utterance detection (default). - /// - /// - /// - /// - /// - /// - /// - /// A new instance for mocking. - public static AzureSemanticDetection AzureSemanticDetection(float? threshold = default, float? timeout = default, float? secondaryThreshold = default, float? secondaryTimeout = default, bool? disableRules = default, float? srBoost = default, bool? extraImendCheck = default) - { - return new AzureSemanticDetection( - EOUDetectionModel.SemanticDetectionV1, - additionalBinaryDataProperties: null, - threshold, - timeout, - secondaryThreshold, - secondaryTimeout, - disableRules, - srBoost, - extraImendCheck); - } - - /// Azure semantic end-of-utterance detection (English-optimized). - /// - /// - /// - /// - /// - /// - /// - /// A new instance for mocking. - public static AzureSemanticDetectionEn AzureSemanticDetectionEn(float? threshold = default, float? timeout = default, float? secondaryThreshold = default, float? secondaryTimeout = default, bool? disableRules = default, float? srBoost = default, bool? extraImendCheck = default) - { - return new AzureSemanticDetectionEn( - EOUDetectionModel.SemanticDetectionV1En, - additionalBinaryDataProperties: null, - threshold, - timeout, - secondaryThreshold, - secondaryTimeout, - disableRules, - srBoost, - extraImendCheck); - } - - /// Azure semantic end-of-utterance detection (multilingual). - /// - /// - /// - /// - /// - /// - /// - /// A new instance for mocking. - public static AzureSemanticDetectionMultilingual AzureSemanticDetectionMultilingual(float? threshold = default, float? timeout = default, float? secondaryThreshold = default, float? secondaryTimeout = default, bool? disableRules = default, float? srBoost = default, bool? extraImendCheck = default) - { - return new AzureSemanticDetectionMultilingual( - EOUDetectionModel.SemanticDetectionV1Multilingual, - additionalBinaryDataProperties: null, - threshold, - timeout, - secondaryThreshold, - secondaryTimeout, - disableRules, - srBoost, - extraImendCheck); - } - - /// Server Speech Detection (Azure semantic VAD, default variant). - /// - /// - /// - /// - /// - /// - /// - /// - /// - /// - /// - /// - /// A new instance for mocking. - public static AzureSemanticVad AzureSemanticVad(float? threshold = default, int? prefixPaddingMs = default, int? silenceDurationMs = default, EouDetection endOfUtteranceDetection = default, float? negThreshold = default, int? speechDurationMs = default, int? windowSize = default, int? distinctCiPhones = default, bool? requireVowel = default, bool? removeFillerWords = default, IEnumerable languages = default, bool? autoTruncate = default) - { - languages ??= new ChangeTrackingList(); - - return new AzureSemanticVad( - TurnDetectionType.AzureSemanticVad, - additionalBinaryDataProperties: null, - threshold, - prefixPaddingMs, - silenceDurationMs, - endOfUtteranceDetection, - negThreshold, - speechDurationMs, - windowSize, - distinctCiPhones, - requireVowel, - removeFillerWords, - languages.ToList(), - autoTruncate); - } - - /// Server Speech Detection (Azure semantic VAD, English-only). - /// - /// - /// - /// - /// - /// - /// - /// - /// - /// - /// - /// - /// A new instance for mocking. - public static AzureSemanticVadEn AzureSemanticVadEn(float? threshold = default, int? prefixPaddingMs = default, int? silenceDurationMs = default, EouDetection endOfUtteranceDetection = default, float? negThreshold = default, int? speechDurationMs = default, int? windowSize = default, int? distinctCiPhones = default, bool? requireVowel = default, bool? removeFillerWords = default, IEnumerable languages = default, bool? autoTruncate = default) - { - languages ??= new ChangeTrackingList(); - - return new AzureSemanticVadEn( - TurnDetectionType.AzureSemanticVadEn, - additionalBinaryDataProperties: null, - threshold, - prefixPaddingMs, - silenceDurationMs, - endOfUtteranceDetection, - negThreshold, - speechDurationMs, - windowSize, - distinctCiPhones, - requireVowel, - removeFillerWords, - languages.ToList(), - autoTruncate); - } - - /// Server Speech Detection (legacy `server_sd` alias). - /// - /// - /// - /// - /// - /// - /// - /// - /// - /// - /// - /// - /// A new instance for mocking. - public static AzureSemanticVadServer AzureSemanticVadServer(float? threshold = default, int? prefixPaddingMs = default, int? silenceDurationMs = default, EouDetection endOfUtteranceDetection = default, float? negThreshold = default, int? speechDurationMs = default, int? windowSize = default, int? distinctCiPhones = default, bool? requireVowel = default, bool? removeFillerWords = default, IEnumerable languages = default, bool? autoTruncate = default) - { - languages ??= new ChangeTrackingList(); - - return new AzureSemanticVadServer( - TurnDetectionType.ServerSd, - additionalBinaryDataProperties: null, - threshold, - prefixPaddingMs, - silenceDurationMs, - endOfUtteranceDetection, - negThreshold, - speechDurationMs, - windowSize, - distinctCiPhones, - requireVowel, - removeFillerWords, - languages.ToList(), - autoTruncate); - } - - /// Server Speech Detection (Azure semantic VAD). - /// - /// - /// - /// - /// - /// - /// - /// - /// - /// - /// - /// - /// A new instance for mocking. - public static AzureMultilingualSemanticVad AzureMultilingualSemanticVad(float? threshold = default, int? prefixPaddingMs = default, int? silenceDurationMs = default, EouDetection endOfUtteranceDetection = default, float? negThreshold = default, int? speechDurationMs = default, int? windowSize = default, int? distinctCiPhones = default, bool? requireVowel = default, bool? removeFillerWords = default, IEnumerable languages = default, bool? autoTruncate = default) - { - languages ??= new ChangeTrackingList(); - - return new AzureMultilingualSemanticVad( - TurnDetectionType.AzureSemanticVadMultilingual, - additionalBinaryDataProperties: null, - threshold, - prefixPaddingMs, - silenceDurationMs, - endOfUtteranceDetection, - negThreshold, - speechDurationMs, - windowSize, - distinctCiPhones, - requireVowel, - removeFillerWords, - languages.ToList(), - autoTruncate); - } - /// Configuration for input audio noise reduction. /// The type of noise reduction model. /// A new instance for mocking. - public static AudioNoiseReduction AudioNoiseReduction(string @type = default) + public static AudioNoiseReduction AudioNoiseReduction(AudioNoiseReductionType @type = default) { return new AudioNoiseReduction(@type, additionalBinaryDataProperties: null); } @@ -563,9 +256,9 @@ public static VideoResolution VideoResolution(int width = default, int height = /// /// The transcription model to use. Supported values: /// 'whisper-1', 'gpt-4o-transcribe', 'gpt-4o-mini-transcribe', - /// 'azure-fast-transcription', 'azure-speech'. + /// 'azure-speech'. /// - /// Optional BCP-47 language code (e.g., 'en-US'). + /// Optional language code in BCP-47 (e.g., 'en-US'), or ISO-639-1 (e.g., 'en'), or multi languages with auto detection, (e.g., 'en,zh'). /// Optional configuration for custom speech models. /// Optional list of phrase hints to bias recognition. /// A new instance for mocking. @@ -598,24 +291,6 @@ public static VoiceLiveFunctionDefinition VoiceLiveFunctionDefinition(string nam return new VoiceLiveFunctionDefinition(ToolType.Function, additionalBinaryDataProperties: null, name, description, parameters); } - /// The RespondingAgentOptions. - /// - /// - /// - /// - /// - /// A new instance for mocking. - public static RespondingAgentOptions RespondingAgentOptions(string @type = default, string name = default, string description = default, string agentId = default, string threadId = default) - { - return new RespondingAgentOptions( - @type, - name, - description, - agentId, - threadId, - additionalBinaryDataProperties: null); - } - /// /// Base for any response item; discriminated by `type`. /// Please note this is the abstract base class. The derived classes available for instantiation are: , , and . @@ -764,18 +439,154 @@ public static FunctionCallOutputItem FunctionCallOutputItem(string id = default, status); } - /// Sent when the server is in the process of establishing an avatar media connection and provides its SDP answer. - /// - /// The server's SDP answer for the avatar connection. - /// A new instance for mocking. - public static SessionUpdateSessionAvatarConnecting SessionUpdateSessionAvatarConnecting(string eventId = default, string serverSdp = default) + /// + /// Top-level union for turn detection configuration. + /// Please note this is the abstract base class. The derived classes available for instantiation are: , , , and . + /// + /// + /// A new instance for mocking. + public static TurnDetection TurnDetection(string @type = default) + { + return new UnknownTurnDetection(new TurnDetectionType(@type), additionalBinaryDataProperties: null); + } + + /// Base model for VAD-based turn detection. + /// + /// Gets or sets the PrefixPaddingMs. + /// Gets or sets the SilenceDurationMs. + /// + /// + /// A new instance for mocking. + public static ServerVadTurnDetection ServerVadTurnDetection(float? threshold = default, int? prefixPaddingMs = default, int? silenceDurationMs = default, EouDetection endOfUtteranceDetection = default, bool? autoTruncate = default) { - return new SessionUpdateSessionAvatarConnecting(ServerEventType.SessionAvatarConnecting, eventId, additionalBinaryDataProperties: null, serverSdp); + return new ServerVadTurnDetection( + TurnDetectionType.ServerVad, + additionalBinaryDataProperties: null, + threshold, + prefixPaddingMs, + silenceDurationMs, + endOfUtteranceDetection, + autoTruncate); + } + + /// + /// Top-level union for end-of-utterance (EOU) semantic detection configuration. + /// Please note this is the abstract base class. The derived classes available for instantiation are: , , and . + /// + /// + /// A new instance for mocking. + public static EouDetection EouDetection(string model = default) + { + return new UnknownEouDetection(model.ToEOUDetectionModel(), additionalBinaryDataProperties: null); + } + + /// Azure semantic end-of-utterance detection (default). + /// + /// Gets or sets the Timeout. + /// A new instance for mocking. + public static AzureSemanticEouDetection AzureSemanticEouDetection(float? threshold = default, float? timeoutMs = default) + { + return new AzureSemanticEouDetection(EOUDetectionModel.SemanticDetectionV1, additionalBinaryDataProperties: null, threshold, timeoutMs); + } + + /// Azure semantic end-of-utterance detection (English-optimized). + /// + /// + /// A new instance for mocking. + public static AzureSemanticEnEouDetection AzureSemanticEnEouDetection(float? threshold = default, float? timeoutMs = default) + { + return new AzureSemanticEnEouDetection(EOUDetectionModel.SemanticDetectionV1En, additionalBinaryDataProperties: null, threshold, timeoutMs); + } + + /// Azure semantic end-of-utterance detection (multilingual). + /// + /// + /// A new instance for mocking. + public static AzureSemanticMultilingualEouDetection AzureSemanticMultilingualEouDetection(float? threshold = default, float? timeoutMs = default) + { + return new AzureSemanticMultilingualEouDetection(EOUDetectionModel.SemanticDetectionV1Multilingual, additionalBinaryDataProperties: null, threshold, timeoutMs); + } + + /// Server Speech Detection (Azure semantic VAD, default variant). + /// + /// Gets or sets the PrefixPaddingMs. + /// Gets or sets the SilenceDurationMs. + /// + /// Gets or sets the SpeechDurationMs. + /// + /// + /// + /// A new instance for mocking. + public static AzureSemanticVadTurnDetection AzureSemanticVadTurnDetection(float? threshold = default, int? prefixPaddingMs = default, int? silenceDurationMs = default, EouDetection endOfUtteranceDetection = default, int? speechDurationMs = default, bool? removeFillerWords = default, IEnumerable languages = default, bool? autoTruncate = default) + { + languages ??= new ChangeTrackingList(); + + return new AzureSemanticVadTurnDetection( + TurnDetectionType.AzureSemanticVad, + additionalBinaryDataProperties: null, + threshold, + prefixPaddingMs, + silenceDurationMs, + endOfUtteranceDetection, + speechDurationMs, + removeFillerWords, + languages.ToList(), + autoTruncate); + } + + /// Server Speech Detection (Azure semantic VAD, English-only). + /// + /// Gets or sets the PrefixPaddingMs. + /// Gets or sets the SilenceDurationMs. + /// + /// Gets or sets the SpeechDurationMs. + /// + /// + /// A new instance for mocking. + public static AzureSemanticVadEnTurnDetection AzureSemanticVadEnTurnDetection(float? threshold = default, int? prefixPaddingMs = default, int? silenceDurationMs = default, EouDetection endOfUtteranceDetection = default, int? speechDurationMs = default, bool? removeFillerWords = default, bool? autoTruncate = default) + { + return new AzureSemanticVadEnTurnDetection( + TurnDetectionType.AzureSemanticVadEn, + additionalBinaryDataProperties: null, + threshold, + prefixPaddingMs, + silenceDurationMs, + endOfUtteranceDetection, + speechDurationMs, + removeFillerWords, + autoTruncate); + } + + /// Server Speech Detection (Azure semantic VAD). + /// + /// Gets or sets the PrefixPaddingMs. + /// Gets or sets the SilenceDurationMs. + /// + /// Gets or sets the SpeechDurationMs. + /// + /// + /// + /// A new instance for mocking. + public static AzureSemanticVadMultilingualTurnDetection AzureSemanticVadMultilingualTurnDetection(float? threshold = default, int? prefixPaddingMs = default, int? silenceDurationMs = default, EouDetection endOfUtteranceDetection = default, int? speechDurationMs = default, bool? removeFillerWords = default, IEnumerable languages = default, bool? autoTruncate = default) + { + languages ??= new ChangeTrackingList(); + + return new AzureSemanticVadMultilingualTurnDetection( + TurnDetectionType.AzureSemanticVadMultilingual, + additionalBinaryDataProperties: null, + threshold, + prefixPaddingMs, + silenceDurationMs, + endOfUtteranceDetection, + speechDurationMs, + removeFillerWords, + languages.ToList(), + autoTruncate); } /// /// A voicelive server event. - /// Please note this is the abstract base class. The derived classes available for instantiation are: , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , and . + /// Please note this is the abstract base class. The derived classes available for instantiation are: , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , and . /// /// The type of event. /// @@ -785,6 +596,15 @@ public static SessionUpdate SessionUpdate(string @type = default, string eventId return new UnknownSessionUpdate(new ServerEventType(@type), eventId, additionalBinaryDataProperties: null); } + /// Sent when the server is in the process of establishing an avatar media connection and provides its SDP answer. + /// + /// The server's SDP answer for the avatar connection. + /// A new instance for mocking. + public static SessionUpdateAvatarConnecting SessionUpdateAvatarConnecting(string eventId = default, string serverSdp = default) + { + return new SessionUpdateAvatarConnecting(ServerEventType.SessionAvatarConnecting, eventId, additionalBinaryDataProperties: null, serverSdp); + } + /// /// Returned when a Session is created. Emitted automatically when a new /// connection is established as the first server event. This event will contain @@ -803,11 +623,13 @@ public static SessionUpdateSessionCreated SessionUpdateSessionCreated(string eve /// /// /// - /// - /// + /// + /// Input audio sampling rate in Hz. Available values: + /// - For pcm16: 8000, 16000, 24000 + /// - For g711_alaw/g711_ulaw: 8000 + /// /// /// - /// /// /// /// @@ -815,13 +637,14 @@ public static SessionUpdateSessionCreated SessionUpdateSessionCreated(string eve /// /// /// - /// /// /// /// + /// + /// /// /// A new instance for mocking. - public static VoiceLiveSessionResponse VoiceLiveSessionResponse(string model = default, IEnumerable modalities = default, AnimationOptions animation = default, string instructions = default, InputAudio inputAudio = default, int? inputAudioSamplingRate = default, AudioFormat? inputAudioFormat = default, AudioFormat? outputAudioFormat = default, TurnDetection turnDetection = default, AudioNoiseReduction inputAudioNoiseReduction = default, AudioEchoCancellation inputAudioEchoCancellation = default, AvatarConfiguration avatar = default, AudioInputTranscriptionSettings inputAudioTranscription = default, IEnumerable outputAudioTimestampTypes = default, IEnumerable tools = default, float? temperature = default, RespondingAgentOptions agent = default, BinaryData voiceInternal = default, BinaryData maxResponseOutputTokens = default, BinaryData toolChoice = default, string id = default) + public static VoiceLiveSessionResponse VoiceLiveSessionResponse(string model = default, IEnumerable modalities = default, AnimationOptions animation = default, string instructions = default, int? inputAudioSamplingRate = default, InputAudioFormat? inputAudioFormat = default, OutputAudioFormat? outputAudioFormat = default, AudioNoiseReduction inputAudioNoiseReduction = default, AudioEchoCancellation inputAudioEchoCancellation = default, AvatarConfiguration avatar = default, AudioInputTranscriptionSettings inputAudioTranscription = default, IEnumerable outputAudioTimestampTypes = default, IEnumerable tools = default, float? temperature = default, BinaryData voiceInternal = default, BinaryData maxResponseOutputTokens = default, BinaryData toolChoice = default, BinaryData turnDetection = default, RespondingAgentOptions agent = default, string id = default) { modalities ??= new ChangeTrackingList(); outputAudioTimestampTypes ??= new ChangeTrackingList(); @@ -832,11 +655,9 @@ public static VoiceLiveSessionResponse VoiceLiveSessionResponse(string model = d modalities.ToList(), animation, instructions, - inputAudio, inputAudioSamplingRate, inputAudioFormat, outputAudioFormat, - turnDetection, inputAudioNoiseReduction, inputAudioEchoCancellation, avatar, @@ -844,14 +665,33 @@ public static VoiceLiveSessionResponse VoiceLiveSessionResponse(string model = d outputAudioTimestampTypes.ToList(), tools.ToList(), temperature, - agent, voiceInternal, maxResponseOutputTokens, toolChoice, + turnDetection, additionalBinaryDataProperties: null, + agent, id); } + /// The RespondingAgentOptions. + /// + /// + /// + /// + /// + /// A new instance for mocking. + public static RespondingAgentOptions RespondingAgentOptions(string @type = default, string name = default, string description = default, string agentId = default, string threadId = default) + { + return new RespondingAgentOptions( + @type, + name, + description, + agentId, + threadId, + additionalBinaryDataProperties: null); + } + /// /// Returned when a session is updated with a `session.update` event, unless /// there is an error. @@ -1272,11 +1112,7 @@ public static SessionUpdateInputAudioBufferSpeechStarted SessionUpdateInputAudio /// event with the user message item that is created from the audio buffer. /// /// - /// - /// Milliseconds since the session started when speech stopped. This will - /// correspond to the end of audio sent to the model, and thus includes the - /// `min_silence_duration_ms` configured in the Session. - /// + /// /// The ID of the user message item that will be created. /// A new instance for mocking. public static SessionUpdateInputAudioBufferSpeechStopped SessionUpdateInputAudioBufferSpeechStopped(string eventId = default, int audioEndMs = default, string itemId = default) @@ -1304,7 +1140,7 @@ public static SessionUpdateResponseCreated SessionUpdateResponseCreated(string e /// A new instance for mocking. public static ResponseStatusDetails ResponseStatusDetails(string @type = default) { - return new UnknownResponseStatusDetails(@type, additionalBinaryDataProperties: null); + return new UnknownResponseStatusDetails(new VoiceLiveResponseStatus(@type), additionalBinaryDataProperties: null); } /// Details for a cancelled response. @@ -1312,7 +1148,7 @@ public static ResponseStatusDetails ResponseStatusDetails(string @type = default /// A new instance for mocking. public static ResponseCancelledDetails ResponseCancelledDetails(ResponseCancelledDetailsReason reason = default) { - return new ResponseCancelledDetails("cancelled", additionalBinaryDataProperties: null, reason); + return new ResponseCancelledDetails(VoiceLiveResponseStatus.Cancelled, additionalBinaryDataProperties: null, reason); } /// Details for an incomplete response. @@ -1320,7 +1156,7 @@ public static ResponseCancelledDetails ResponseCancelledDetails(ResponseCancelle /// A new instance for mocking. public static ResponseIncompleteDetails ResponseIncompleteDetails(ResponseIncompleteDetailsReason reason = default) { - return new ResponseIncompleteDetails("incomplete", additionalBinaryDataProperties: null, reason); + return new ResponseIncompleteDetails(VoiceLiveResponseStatus.Incomplete, additionalBinaryDataProperties: null, reason); } /// Details for a failed response. @@ -1328,7 +1164,7 @@ public static ResponseIncompleteDetails ResponseIncompleteDetails(ResponseIncomp /// A new instance for mocking. public static ResponseFailedDetails ResponseFailedDetails(BinaryData error = default) { - return new ResponseFailedDetails("failed", additionalBinaryDataProperties: null, error); + return new ResponseFailedDetails(VoiceLiveResponseStatus.Failed, additionalBinaryDataProperties: null, error); } /// Overall usage statistics for a response. @@ -1655,48 +1491,14 @@ public static SessionUpdateResponseAnimationBlendshapeDone SessionUpdateResponse outputIndex); } - /// Represents an emotion hypothesis detected from response audio with multiple candidates. - /// - /// - /// - /// - /// - /// - /// - /// A new instance for mocking. - public static SessionUpdateResponseEmotionHypothesis SessionUpdateResponseEmotionHypothesis(string eventId = default, string emotion = default, IEnumerable candidates = default, int audioOffsetMs = default, int audioDurationMs = default, string responseId = default, string itemId = default) - { - candidates ??= new ChangeTrackingList(); - - return new SessionUpdateResponseEmotionHypothesis( - ServerEventType.ResponseEmotionHypothesis, - eventId, - additionalBinaryDataProperties: null, - emotion, - candidates.ToList(), - audioOffsetMs, - audioDurationMs, - responseId, - itemId); - } - - /// The EmotionCandidate. - /// - /// - /// A new instance for mocking. - public static EmotionCandidate EmotionCandidate(string emotion = default, float confidence = default) - { - return new EmotionCandidate(emotion, confidence, additionalBinaryDataProperties: null); - } - /// Represents a word-level audio timestamp delta for a response. /// /// /// /// /// - /// - /// + /// Gets the AudioOffsetMs. + /// Gets the AudioDurationMs. /// /// /// A new instance for mocking. @@ -1741,7 +1543,7 @@ public static SessionUpdateResponseAudioTimestampDone SessionUpdateResponseAudio /// /// /// - /// + /// Gets the AudioOffsetMs. /// /// A new instance for mocking. public static SessionUpdateResponseAnimationVisemeDelta SessionUpdateResponseAnimationVisemeDelta(string eventId = default, string responseId = default, string itemId = default, int outputIndex = default, int contentIndex = default, int audioOffsetMs = default, int visemeId = default) diff --git a/sdk/ai/Azure.AI.VoiceLive/src/Generated/VoiceLiveResponse.Serialization.cs b/sdk/ai/Azure.AI.VoiceLive/src/Generated/VoiceLiveResponse.Serialization.cs index db39eb8c3f2f..916db596b709 100644 --- a/sdk/ai/Azure.AI.VoiceLive/src/Generated/VoiceLiveResponse.Serialization.cs +++ b/sdk/ai/Azure.AI.VoiceLive/src/Generated/VoiceLiveResponse.Serialization.cs @@ -46,7 +46,7 @@ protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWrit if (Optional.IsDefined(Status)) { writer.WritePropertyName("status"u8); - writer.WriteStringValue(Status.Value.ToSerialString()); + writer.WriteStringValue(Status.Value.ToString()); } if (Optional.IsDefined(StatusDetails)) { @@ -168,7 +168,7 @@ internal static VoiceLiveResponse DeserializeVoiceLiveResponse(JsonElement eleme string conversationId = default; BinaryData voiceInternal = default; IList modalitiesInternal = default; - AudioFormat? outputAudioFormat = default; + OutputAudioFormat? outputAudioFormat = default; float? temperature = default; BinaryData maxOutputTokens = default; IDictionary additionalBinaryDataProperties = new ChangeTrackingDictionary(); @@ -190,7 +190,7 @@ internal static VoiceLiveResponse DeserializeVoiceLiveResponse(JsonElement eleme { continue; } - status = prop.Value.GetString().ToVoiceLiveResponseStatus(); + status = new VoiceLiveResponseStatus(prop.Value.GetString()); continue; } if (prop.NameEquals("status_details"u8)) @@ -259,7 +259,7 @@ internal static VoiceLiveResponse DeserializeVoiceLiveResponse(JsonElement eleme { continue; } - outputAudioFormat = new AudioFormat(prop.Value.GetString()); + outputAudioFormat = new OutputAudioFormat(prop.Value.GetString()); continue; } if (prop.NameEquals("temperature"u8)) diff --git a/sdk/ai/Azure.AI.VoiceLive/src/Generated/VoiceLiveResponse.cs b/sdk/ai/Azure.AI.VoiceLive/src/Generated/VoiceLiveResponse.cs index e2157eff6754..c4b37ad5b193 100644 --- a/sdk/ai/Azure.AI.VoiceLive/src/Generated/VoiceLiveResponse.cs +++ b/sdk/ai/Azure.AI.VoiceLive/src/Generated/VoiceLiveResponse.cs @@ -51,7 +51,7 @@ internal VoiceLiveResponse() /// supported voice identifiers and configurations. /// To assign an object to this property use . To assign an already formatted json string to this property use . /// Supported types: - /// . . . . . + /// . . . /// Examples: /// BinaryData.FromObjectAsJson("foo"). Creates a payload of "foo". BinaryData.FromString("\"foo\""). Creates a payload of "foo". BinaryData.FromObjectAsJson(new { key = "value" }). Creates a payload of { "key": "value" }. BinaryData.FromString("{\"key\": \"value\"}"). Creates a payload of { "key": "value" }. /// @@ -60,7 +60,7 @@ internal VoiceLiveResponse() /// Sampling temperature for the model, limited to [0.6, 1.2]. Defaults to 0.8. /// /// Keeps track of any properties unknown to the library. - internal VoiceLiveResponse(string id, string @object, VoiceLiveResponseStatus? status, ResponseStatusDetails statusDetails, IList output, ResponseTokenStatistics usage, string conversationId, BinaryData voiceInternal, IList modalitiesInternal, AudioFormat? outputAudioFormat, float? temperature, BinaryData maxOutputTokens, IDictionary additionalBinaryDataProperties) + internal VoiceLiveResponse(string id, string @object, VoiceLiveResponseStatus? status, ResponseStatusDetails statusDetails, IList output, ResponseTokenStatistics usage, string conversationId, BinaryData voiceInternal, IList modalitiesInternal, OutputAudioFormat? outputAudioFormat, float? temperature, BinaryData maxOutputTokens, IDictionary additionalBinaryDataProperties) { Id = id; Object = @object; @@ -115,7 +115,7 @@ internal VoiceLiveResponse(string id, string @object, VoiceLiveResponseStatus? s public string ConversationId { get; } /// The format of output audio. Options are `pcm16`, `g711_ulaw`, or `g711_alaw`. - public AudioFormat? OutputAudioFormat { get; } + public OutputAudioFormat? OutputAudioFormat { get; } /// Sampling temperature for the model, limited to [0.6, 1.2]. Defaults to 0.8. public float? Temperature { get; } diff --git a/sdk/ai/Azure.AI.VoiceLive/src/Generated/VoiceLiveResponseStatus.Serialization.cs b/sdk/ai/Azure.AI.VoiceLive/src/Generated/VoiceLiveResponseStatus.Serialization.cs deleted file mode 100644 index ff663583a365..000000000000 --- a/sdk/ai/Azure.AI.VoiceLive/src/Generated/VoiceLiveResponseStatus.Serialization.cs +++ /dev/null @@ -1,51 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System; - -namespace Azure.AI.VoiceLive -{ - internal static partial class VoiceLiveResponseStatusExtensions - { - /// The value to serialize. - public static string ToSerialString(this VoiceLiveResponseStatus value) => value switch - { - VoiceLiveResponseStatus.Completed => "completed", - VoiceLiveResponseStatus.Cancelled => "cancelled", - VoiceLiveResponseStatus.Failed => "failed", - VoiceLiveResponseStatus.Incomplete => "incomplete", - VoiceLiveResponseStatus.InProgress => "in_progress", - _ => throw new ArgumentOutOfRangeException(nameof(value), value, "Unknown VoiceLiveResponseStatus value.") - }; - - /// The value to deserialize. - public static VoiceLiveResponseStatus ToVoiceLiveResponseStatus(this string value) - { - if (StringComparer.OrdinalIgnoreCase.Equals(value, "completed")) - { - return VoiceLiveResponseStatus.Completed; - } - if (StringComparer.OrdinalIgnoreCase.Equals(value, "cancelled")) - { - return VoiceLiveResponseStatus.Cancelled; - } - if (StringComparer.OrdinalIgnoreCase.Equals(value, "failed")) - { - return VoiceLiveResponseStatus.Failed; - } - if (StringComparer.OrdinalIgnoreCase.Equals(value, "incomplete")) - { - return VoiceLiveResponseStatus.Incomplete; - } - if (StringComparer.OrdinalIgnoreCase.Equals(value, "in_progress")) - { - return VoiceLiveResponseStatus.InProgress; - } - throw new ArgumentOutOfRangeException(nameof(value), value, "Unknown VoiceLiveResponseStatus value."); - } - } -} diff --git a/sdk/ai/Azure.AI.VoiceLive/src/Generated/VoiceLiveResponseStatus.cs b/sdk/ai/Azure.AI.VoiceLive/src/Generated/VoiceLiveResponseStatus.cs index 7eca1b44a2da..01ef57eccdb2 100644 --- a/sdk/ai/Azure.AI.VoiceLive/src/Generated/VoiceLiveResponseStatus.cs +++ b/sdk/ai/Azure.AI.VoiceLive/src/Generated/VoiceLiveResponseStatus.cs @@ -5,20 +5,76 @@ #nullable disable +using System; +using System.ComponentModel; + namespace Azure.AI.VoiceLive { /// Terminal status of a response. - public enum VoiceLiveResponseStatus + public readonly partial struct VoiceLiveResponseStatus : IEquatable { - /// Completed. - Completed, - /// Cancelled. - Cancelled, - /// Failed. - Failed, - /// Incomplete. - Incomplete, - /// InProgress. - InProgress + private readonly string _value; + private const string CompletedValue = "completed"; + private const string CancelledValue = "cancelled"; + private const string FailedValue = "failed"; + private const string IncompleteValue = "incomplete"; + private const string InProgressValue = "in_progress"; + + /// Initializes a new instance of . + /// The value. + /// is null. + public VoiceLiveResponseStatus(string value) + { + Argument.AssertNotNull(value, nameof(value)); + + _value = value; + } + + /// Gets the Completed. + public static VoiceLiveResponseStatus Completed { get; } = new VoiceLiveResponseStatus(CompletedValue); + + /// Gets the Cancelled. + public static VoiceLiveResponseStatus Cancelled { get; } = new VoiceLiveResponseStatus(CancelledValue); + + /// Gets the Failed. + public static VoiceLiveResponseStatus Failed { get; } = new VoiceLiveResponseStatus(FailedValue); + + /// Gets the Incomplete. + public static VoiceLiveResponseStatus Incomplete { get; } = new VoiceLiveResponseStatus(IncompleteValue); + + /// Gets the InProgress. + public static VoiceLiveResponseStatus InProgress { get; } = new VoiceLiveResponseStatus(InProgressValue); + + /// Determines if two values are the same. + /// The left value to compare. + /// The right value to compare. + public static bool operator ==(VoiceLiveResponseStatus left, VoiceLiveResponseStatus right) => left.Equals(right); + + /// Determines if two values are not the same. + /// The left value to compare. + /// The right value to compare. + public static bool operator !=(VoiceLiveResponseStatus left, VoiceLiveResponseStatus right) => !left.Equals(right); + + /// Converts a string to a . + /// The value. + public static implicit operator VoiceLiveResponseStatus(string value) => new VoiceLiveResponseStatus(value); + + /// Converts a string to a . + /// The value. + public static implicit operator VoiceLiveResponseStatus?(string value) => value == null ? null : new VoiceLiveResponseStatus(value); + + /// + [EditorBrowsable(EditorBrowsableState.Never)] + public override bool Equals(object obj) => obj is VoiceLiveResponseStatus other && Equals(other); + + /// + public bool Equals(VoiceLiveResponseStatus other) => string.Equals(_value, other._value, StringComparison.InvariantCultureIgnoreCase); + + /// + [EditorBrowsable(EditorBrowsableState.Never)] + public override int GetHashCode() => _value != null ? StringComparer.InvariantCultureIgnoreCase.GetHashCode(_value) : 0; + + /// + public override string ToString() => _value; } } diff --git a/sdk/ai/Azure.AI.VoiceLive/src/Generated/VoiceLiveSessionOptions.Serialization.cs b/sdk/ai/Azure.AI.VoiceLive/src/Generated/VoiceLiveSessionOptions.Serialization.cs index d732134d2008..308d91985930 100644 --- a/sdk/ai/Azure.AI.VoiceLive/src/Generated/VoiceLiveSessionOptions.Serialization.cs +++ b/sdk/ai/Azure.AI.VoiceLive/src/Generated/VoiceLiveSessionOptions.Serialization.cs @@ -58,11 +58,6 @@ protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWrit writer.WritePropertyName("instructions"u8); writer.WriteStringValue(Instructions); } - if (Optional.IsDefined(InputAudio)) - { - writer.WritePropertyName("input_audio"u8); - writer.WriteObjectValue(InputAudio, options); - } if (Optional.IsDefined(InputAudioSamplingRate)) { writer.WritePropertyName("input_audio_sampling_rate"u8); @@ -78,11 +73,6 @@ protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWrit writer.WritePropertyName("output_audio_format"u8); writer.WriteStringValue(OutputAudioFormat.Value.ToString()); } - if (Optional.IsDefined(TurnDetection)) - { - writer.WritePropertyName("turn_detection"u8); - writer.WriteObjectValue(TurnDetection, options); - } if (Optional.IsDefined(InputAudioNoiseReduction)) { writer.WritePropertyName("input_audio_noise_reduction"u8); @@ -128,11 +118,6 @@ protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWrit writer.WritePropertyName("temperature"u8); writer.WriteNumberValue(Temperature.Value); } - if (Optional.IsDefined(Agent)) - { - writer.WritePropertyName("agent"u8); - writer.WriteObjectValue(Agent, options); - } if (Optional.IsDefined(VoiceInternal)) { writer.WritePropertyName("voice"u8); @@ -167,6 +152,18 @@ protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWrit { JsonSerializer.Serialize(writer, document.RootElement); } +#endif + } + if (Optional.IsDefined(_turnDetection)) + { + writer.WritePropertyName("turn_detection"u8); +#if NET6_0_OR_GREATER + writer.WriteRawValue(_turnDetection); +#else + using (JsonDocument document = JsonDocument.Parse(_turnDetection)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } #endif } if (options.Format != "W" && _additionalBinaryDataProperties != null) @@ -215,11 +212,9 @@ internal static VoiceLiveSessionOptions DeserializeVoiceLiveSessionOptions(JsonE IList modalities = default; AnimationOptions animation = default; string instructions = default; - InputAudio inputAudio = default; int? inputAudioSamplingRate = default; - AudioFormat? inputAudioFormat = default; - AudioFormat? outputAudioFormat = default; - TurnDetection turnDetection = default; + InputAudioFormat? inputAudioFormat = default; + OutputAudioFormat? outputAudioFormat = default; AudioNoiseReduction inputAudioNoiseReduction = default; AudioEchoCancellation inputAudioEchoCancellation = default; AvatarConfiguration avatar = default; @@ -227,10 +222,10 @@ internal static VoiceLiveSessionOptions DeserializeVoiceLiveSessionOptions(JsonE IList outputAudioTimestampTypes = default; IList tools = default; float? temperature = default; - RespondingAgentOptions agent = default; BinaryData voiceInternal = default; BinaryData maxResponseOutputTokens = default; BinaryData toolChoice = default; + BinaryData turnDetection = default; IDictionary additionalBinaryDataProperties = new ChangeTrackingDictionary(); foreach (var prop in element.EnumerateObject()) { @@ -267,15 +262,6 @@ internal static VoiceLiveSessionOptions DeserializeVoiceLiveSessionOptions(JsonE instructions = prop.Value.GetString(); continue; } - if (prop.NameEquals("input_audio"u8)) - { - if (prop.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - inputAudio = InputAudio.DeserializeInputAudio(prop.Value, options); - continue; - } if (prop.NameEquals("input_audio_sampling_rate"u8)) { if (prop.Value.ValueKind == JsonValueKind.Null) @@ -291,7 +277,7 @@ internal static VoiceLiveSessionOptions DeserializeVoiceLiveSessionOptions(JsonE { continue; } - inputAudioFormat = new AudioFormat(prop.Value.GetString()); + inputAudioFormat = new InputAudioFormat(prop.Value.GetString()); continue; } if (prop.NameEquals("output_audio_format"u8)) @@ -300,17 +286,7 @@ internal static VoiceLiveSessionOptions DeserializeVoiceLiveSessionOptions(JsonE { continue; } - outputAudioFormat = new AudioFormat(prop.Value.GetString()); - continue; - } - if (prop.NameEquals("turn_detection"u8)) - { - if (prop.Value.ValueKind == JsonValueKind.Null) - { - turnDetection = null; - continue; - } - turnDetection = TurnDetection.DeserializeTurnDetection(prop.Value, options); + outputAudioFormat = new OutputAudioFormat(prop.Value.GetString()); continue; } if (prop.NameEquals("input_audio_noise_reduction"u8)) @@ -386,40 +362,40 @@ internal static VoiceLiveSessionOptions DeserializeVoiceLiveSessionOptions(JsonE temperature = prop.Value.GetSingle(); continue; } - if (prop.NameEquals("agent"u8)) + if (prop.NameEquals("voice"u8)) { if (prop.Value.ValueKind == JsonValueKind.Null) { continue; } - agent = RespondingAgentOptions.DeserializeRespondingAgentOptions(prop.Value, options); + voiceInternal = BinaryData.FromString(prop.Value.GetRawText()); continue; } - if (prop.NameEquals("voice"u8)) + if (prop.NameEquals("max_response_output_tokens"u8)) { if (prop.Value.ValueKind == JsonValueKind.Null) { continue; } - voiceInternal = BinaryData.FromString(prop.Value.GetRawText()); + maxResponseOutputTokens = BinaryData.FromString(prop.Value.GetRawText()); continue; } - if (prop.NameEquals("max_response_output_tokens"u8)) + if (prop.NameEquals("tool_choice"u8)) { if (prop.Value.ValueKind == JsonValueKind.Null) { continue; } - maxResponseOutputTokens = BinaryData.FromString(prop.Value.GetRawText()); + toolChoice = BinaryData.FromString(prop.Value.GetRawText()); continue; } - if (prop.NameEquals("tool_choice"u8)) + if (prop.NameEquals("turn_detection"u8)) { if (prop.Value.ValueKind == JsonValueKind.Null) { continue; } - toolChoice = BinaryData.FromString(prop.Value.GetRawText()); + turnDetection = BinaryData.FromString(prop.Value.GetRawText()); continue; } if (options.Format != "W") @@ -432,11 +408,9 @@ internal static VoiceLiveSessionOptions DeserializeVoiceLiveSessionOptions(JsonE modalities ?? new ChangeTrackingList(), animation, instructions, - inputAudio, inputAudioSamplingRate, inputAudioFormat, outputAudioFormat, - turnDetection, inputAudioNoiseReduction, inputAudioEchoCancellation, avatar, @@ -444,10 +418,10 @@ internal static VoiceLiveSessionOptions DeserializeVoiceLiveSessionOptions(JsonE outputAudioTimestampTypes ?? new ChangeTrackingList(), tools ?? new ChangeTrackingList(), temperature, - agent, voiceInternal, maxResponseOutputTokens, toolChoice, + turnDetection, additionalBinaryDataProperties); } diff --git a/sdk/ai/Azure.AI.VoiceLive/src/Generated/VoiceLiveSessionOptions.cs b/sdk/ai/Azure.AI.VoiceLive/src/Generated/VoiceLiveSessionOptions.cs index 6550bbe4d3b9..5ea8505dd8a4 100644 --- a/sdk/ai/Azure.AI.VoiceLive/src/Generated/VoiceLiveSessionOptions.cs +++ b/sdk/ai/Azure.AI.VoiceLive/src/Generated/VoiceLiveSessionOptions.cs @@ -29,11 +29,13 @@ public VoiceLiveSessionOptions() /// /// /// - /// - /// + /// + /// Input audio sampling rate in Hz. Available values: + /// - For pcm16: 8000, 16000, 24000 + /// - For g711_alaw/g711_ulaw: 8000 + /// /// /// - /// /// /// /// @@ -41,22 +43,20 @@ public VoiceLiveSessionOptions() /// /// /// - /// /// /// /// + /// /// Keeps track of any properties unknown to the library. - internal VoiceLiveSessionOptions(string model, IList modalities, AnimationOptions animation, string instructions, InputAudio inputAudio, int? inputAudioSamplingRate, AudioFormat? inputAudioFormat, AudioFormat? outputAudioFormat, TurnDetection turnDetection, AudioNoiseReduction inputAudioNoiseReduction, AudioEchoCancellation inputAudioEchoCancellation, AvatarConfiguration avatar, AudioInputTranscriptionSettings inputAudioTranscription, IList outputAudioTimestampTypes, IList tools, float? temperature, RespondingAgentOptions agent, BinaryData voiceInternal, BinaryData maxResponseOutputTokens, BinaryData toolChoice, IDictionary additionalBinaryDataProperties) + internal VoiceLiveSessionOptions(string model, IList modalities, AnimationOptions animation, string instructions, int? inputAudioSamplingRate, InputAudioFormat? inputAudioFormat, OutputAudioFormat? outputAudioFormat, AudioNoiseReduction inputAudioNoiseReduction, AudioEchoCancellation inputAudioEchoCancellation, AvatarConfiguration avatar, AudioInputTranscriptionSettings inputAudioTranscription, IList outputAudioTimestampTypes, IList tools, float? temperature, BinaryData voiceInternal, BinaryData maxResponseOutputTokens, BinaryData toolChoice, BinaryData turnDetection, IDictionary additionalBinaryDataProperties) { Model = model; Modalities = modalities; Animation = animation; Instructions = instructions; - InputAudio = inputAudio; InputAudioSamplingRate = inputAudioSamplingRate; InputAudioFormat = inputAudioFormat; OutputAudioFormat = outputAudioFormat; - TurnDetection = turnDetection; InputAudioNoiseReduction = inputAudioNoiseReduction; InputAudioEchoCancellation = inputAudioEchoCancellation; Avatar = avatar; @@ -64,10 +64,10 @@ internal VoiceLiveSessionOptions(string model, IList modalities, OutputAudioTimestampTypes = outputAudioTimestampTypes; Tools = tools; Temperature = temperature; - Agent = agent; VoiceInternal = voiceInternal; _maxResponseOutputTokens = maxResponseOutputTokens; _toolChoice = toolChoice; + _turnDetection = turnDetection; _additionalBinaryDataProperties = additionalBinaryDataProperties; } @@ -83,20 +83,18 @@ internal VoiceLiveSessionOptions(string model, IList modalities, /// Gets or sets the Instructions. public string Instructions { get; set; } - /// Gets or sets the InputAudio. - public InputAudio InputAudio { get; set; } - - /// Gets or sets the InputAudioSamplingRate. + /// + /// Input audio sampling rate in Hz. Available values: + /// - For pcm16: 8000, 16000, 24000 + /// - For g711_alaw/g711_ulaw: 8000 + /// public int? InputAudioSamplingRate { get; set; } /// Gets or sets the InputAudioFormat. - public AudioFormat? InputAudioFormat { get; set; } + public InputAudioFormat? InputAudioFormat { get; set; } /// Gets or sets the OutputAudioFormat. - public AudioFormat? OutputAudioFormat { get; set; } - - /// Gets or sets the TurnDetection. - public TurnDetection TurnDetection { get; set; } + public OutputAudioFormat? OutputAudioFormat { get; set; } /// Gets or sets the InputAudioNoiseReduction. public AudioNoiseReduction InputAudioNoiseReduction { get; set; } @@ -118,8 +116,5 @@ internal VoiceLiveSessionOptions(string model, IList modalities, /// Gets or sets the Temperature. public float? Temperature { get; set; } - - /// Gets or sets the Agent. - public RespondingAgentOptions Agent { get; set; } } } diff --git a/sdk/ai/Azure.AI.VoiceLive/src/Generated/VoiceLiveSessionResponse.Serialization.cs b/sdk/ai/Azure.AI.VoiceLive/src/Generated/VoiceLiveSessionResponse.Serialization.cs index b359ba6fc318..7cd88ad60b15 100644 --- a/sdk/ai/Azure.AI.VoiceLive/src/Generated/VoiceLiveSessionResponse.Serialization.cs +++ b/sdk/ai/Azure.AI.VoiceLive/src/Generated/VoiceLiveSessionResponse.Serialization.cs @@ -34,6 +34,11 @@ protected override void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWri throw new FormatException($"The model {nameof(VoiceLiveSessionResponse)} does not support writing '{format}' format."); } base.JsonModelWriteCore(writer, options); + if (Optional.IsDefined(Agent)) + { + writer.WritePropertyName("agent"u8); + writer.WriteObjectValue(Agent, options); + } if (Optional.IsDefined(Id)) { writer.WritePropertyName("id"u8); @@ -70,11 +75,9 @@ internal static VoiceLiveSessionResponse DeserializeVoiceLiveSessionResponse(Jso IList modalities = default; AnimationOptions animation = default; string instructions = default; - InputAudio inputAudio = default; int? inputAudioSamplingRate = default; - AudioFormat? inputAudioFormat = default; - AudioFormat? outputAudioFormat = default; - TurnDetection turnDetection = default; + InputAudioFormat? inputAudioFormat = default; + OutputAudioFormat? outputAudioFormat = default; AudioNoiseReduction inputAudioNoiseReduction = default; AudioEchoCancellation inputAudioEchoCancellation = default; AvatarConfiguration avatar = default; @@ -82,11 +85,12 @@ internal static VoiceLiveSessionResponse DeserializeVoiceLiveSessionResponse(Jso IList outputAudioTimestampTypes = default; IList tools = default; float? temperature = default; - RespondingAgentOptions agent = default; BinaryData voiceInternal = default; BinaryData maxResponseOutputTokens = default; BinaryData toolChoice = default; + BinaryData turnDetection = default; IDictionary additionalBinaryDataProperties = new ChangeTrackingDictionary(); + RespondingAgentOptions agent = default; string id = default; foreach (var prop in element.EnumerateObject()) { @@ -123,15 +127,6 @@ internal static VoiceLiveSessionResponse DeserializeVoiceLiveSessionResponse(Jso instructions = prop.Value.GetString(); continue; } - if (prop.NameEquals("input_audio"u8)) - { - if (prop.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - inputAudio = InputAudio.DeserializeInputAudio(prop.Value, options); - continue; - } if (prop.NameEquals("input_audio_sampling_rate"u8)) { if (prop.Value.ValueKind == JsonValueKind.Null) @@ -147,7 +142,7 @@ internal static VoiceLiveSessionResponse DeserializeVoiceLiveSessionResponse(Jso { continue; } - inputAudioFormat = new AudioFormat(prop.Value.GetString()); + inputAudioFormat = new InputAudioFormat(prop.Value.GetString()); continue; } if (prop.NameEquals("output_audio_format"u8)) @@ -156,17 +151,7 @@ internal static VoiceLiveSessionResponse DeserializeVoiceLiveSessionResponse(Jso { continue; } - outputAudioFormat = new AudioFormat(prop.Value.GetString()); - continue; - } - if (prop.NameEquals("turn_detection"u8)) - { - if (prop.Value.ValueKind == JsonValueKind.Null) - { - turnDetection = null; - continue; - } - turnDetection = TurnDetection.DeserializeTurnDetection(prop.Value, options); + outputAudioFormat = new OutputAudioFormat(prop.Value.GetString()); continue; } if (prop.NameEquals("input_audio_noise_reduction"u8)) @@ -242,40 +227,49 @@ internal static VoiceLiveSessionResponse DeserializeVoiceLiveSessionResponse(Jso temperature = prop.Value.GetSingle(); continue; } - if (prop.NameEquals("agent"u8)) + if (prop.NameEquals("voice"u8)) { if (prop.Value.ValueKind == JsonValueKind.Null) { continue; } - agent = RespondingAgentOptions.DeserializeRespondingAgentOptions(prop.Value, options); + voiceInternal = BinaryData.FromString(prop.Value.GetRawText()); continue; } - if (prop.NameEquals("voice"u8)) + if (prop.NameEquals("max_response_output_tokens"u8)) { if (prop.Value.ValueKind == JsonValueKind.Null) { continue; } - voiceInternal = BinaryData.FromString(prop.Value.GetRawText()); + maxResponseOutputTokens = BinaryData.FromString(prop.Value.GetRawText()); continue; } - if (prop.NameEquals("max_response_output_tokens"u8)) + if (prop.NameEquals("tool_choice"u8)) { if (prop.Value.ValueKind == JsonValueKind.Null) { continue; } - maxResponseOutputTokens = BinaryData.FromString(prop.Value.GetRawText()); + toolChoice = BinaryData.FromString(prop.Value.GetRawText()); continue; } - if (prop.NameEquals("tool_choice"u8)) + if (prop.NameEquals("turn_detection"u8)) { if (prop.Value.ValueKind == JsonValueKind.Null) { continue; } - toolChoice = BinaryData.FromString(prop.Value.GetRawText()); + turnDetection = BinaryData.FromString(prop.Value.GetRawText()); + continue; + } + if (prop.NameEquals("agent"u8)) + { + if (prop.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + agent = RespondingAgentOptions.DeserializeRespondingAgentOptions(prop.Value, options); continue; } if (prop.NameEquals("id"u8)) @@ -293,11 +287,9 @@ internal static VoiceLiveSessionResponse DeserializeVoiceLiveSessionResponse(Jso modalities ?? new ChangeTrackingList(), animation, instructions, - inputAudio, inputAudioSamplingRate, inputAudioFormat, outputAudioFormat, - turnDetection, inputAudioNoiseReduction, inputAudioEchoCancellation, avatar, @@ -305,11 +297,12 @@ internal static VoiceLiveSessionResponse DeserializeVoiceLiveSessionResponse(Jso outputAudioTimestampTypes ?? new ChangeTrackingList(), tools ?? new ChangeTrackingList(), temperature, - agent, voiceInternal, maxResponseOutputTokens, toolChoice, + turnDetection, additionalBinaryDataProperties, + agent, id); } diff --git a/sdk/ai/Azure.AI.VoiceLive/src/Generated/VoiceLiveSessionResponse.cs b/sdk/ai/Azure.AI.VoiceLive/src/Generated/VoiceLiveSessionResponse.cs index dc2a7c597265..7de2c3e7e3d4 100644 --- a/sdk/ai/Azure.AI.VoiceLive/src/Generated/VoiceLiveSessionResponse.cs +++ b/sdk/ai/Azure.AI.VoiceLive/src/Generated/VoiceLiveSessionResponse.cs @@ -23,11 +23,13 @@ internal VoiceLiveSessionResponse() /// /// /// - /// - /// + /// + /// Input audio sampling rate in Hz. Available values: + /// - For pcm16: 8000, 16000, 24000 + /// - For g711_alaw/g711_ulaw: 8000 + /// /// /// - /// /// /// /// @@ -35,17 +37,22 @@ internal VoiceLiveSessionResponse() /// /// /// - /// /// /// /// + /// /// Keeps track of any properties unknown to the library. + /// /// - internal VoiceLiveSessionResponse(string model, IList modalities, AnimationOptions animation, string instructions, InputAudio inputAudio, int? inputAudioSamplingRate, AudioFormat? inputAudioFormat, AudioFormat? outputAudioFormat, TurnDetection turnDetection, AudioNoiseReduction inputAudioNoiseReduction, AudioEchoCancellation inputAudioEchoCancellation, AvatarConfiguration avatar, AudioInputTranscriptionSettings inputAudioTranscription, IList outputAudioTimestampTypes, IList tools, float? temperature, RespondingAgentOptions agent, BinaryData voiceInternal, BinaryData maxResponseOutputTokens, BinaryData toolChoice, IDictionary additionalBinaryDataProperties, string id) : base(model, modalities, animation, instructions, inputAudio, inputAudioSamplingRate, inputAudioFormat, outputAudioFormat, turnDetection, inputAudioNoiseReduction, inputAudioEchoCancellation, avatar, inputAudioTranscription, outputAudioTimestampTypes, tools, temperature, agent, voiceInternal, maxResponseOutputTokens, toolChoice, additionalBinaryDataProperties) + internal VoiceLiveSessionResponse(string model, IList modalities, AnimationOptions animation, string instructions, int? inputAudioSamplingRate, InputAudioFormat? inputAudioFormat, OutputAudioFormat? outputAudioFormat, AudioNoiseReduction inputAudioNoiseReduction, AudioEchoCancellation inputAudioEchoCancellation, AvatarConfiguration avatar, AudioInputTranscriptionSettings inputAudioTranscription, IList outputAudioTimestampTypes, IList tools, float? temperature, BinaryData voiceInternal, BinaryData maxResponseOutputTokens, BinaryData toolChoice, BinaryData turnDetection, IDictionary additionalBinaryDataProperties, RespondingAgentOptions agent, string id) : base(model, modalities, animation, instructions, inputAudioSamplingRate, inputAudioFormat, outputAudioFormat, inputAudioNoiseReduction, inputAudioEchoCancellation, avatar, inputAudioTranscription, outputAudioTimestampTypes, tools, temperature, voiceInternal, maxResponseOutputTokens, toolChoice, turnDetection, additionalBinaryDataProperties) { + Agent = agent; Id = id; } + /// Gets the Agent. + public RespondingAgentOptions Agent { get; } + /// Gets the Id. public string Id { get; } } diff --git a/sdk/ai/Azure.AI.VoiceLive/tests/FunctionCallingFlowTests.cs b/sdk/ai/Azure.AI.VoiceLive/tests/FunctionCallingFlowTests.cs index ccb68a2c9bf4..cde016e6fd5b 100644 --- a/sdk/ai/Azure.AI.VoiceLive/tests/FunctionCallingFlowTests.cs +++ b/sdk/ai/Azure.AI.VoiceLive/tests/FunctionCallingFlowTests.cs @@ -47,7 +47,7 @@ public async Task ConfigureConversationSession_WithFunctionTools_RegistersAll() options.Tools.Add(new VoiceLiveFunctionDefinition("apply_refund") { Description = "Applies a refund to an order." }); options.Tools.Add(new VoiceLiveFunctionDefinition("escalate_case") { Description = "Escalates the current case." }); - await session.ConfigureConversationSessionAsync(options); + await session.ConfigureSessionAsync(options); var updateMessages = GetSentMessagesOfType(fake, "session.update"); Assert.That(updateMessages, Is.Not.Empty, "Expected a session.update message containing tools."); diff --git a/sdk/ai/Azure.AI.VoiceLive/tests/LiveTests/BasicConversationTests.cs b/sdk/ai/Azure.AI.VoiceLive/tests/LiveTests/BasicConversationTests.cs index 74eca37fc911..1d2aa703c841 100644 --- a/sdk/ai/Azure.AI.VoiceLive/tests/LiveTests/BasicConversationTests.cs +++ b/sdk/ai/Azure.AI.VoiceLive/tests/LiveTests/BasicConversationTests.cs @@ -7,9 +7,10 @@ using System.Text; using System.Threading; using System.Threading.Tasks; +using Azure.AI.VoiceLive.Tests.Infrastructure; using Azure.Core.TestFramework; using Azure.Identity; -using Microsoft.Extensions.Azure; +//using Microsoft.Extensions.Azure; using Microsoft.Extensions.Logging; using NUnit.Framework; @@ -43,7 +44,7 @@ public async Task BasicHelloTest() var options = new VoiceLiveSessionOptions() { Model = "gpt-4o", - InputAudioFormat = AudioFormat.Pcm16 + InputAudioFormat = InputAudioFormat.Pcm16 }; var session = await vlc.StartSessionAsync(options, TimeoutToken).ConfigureAwait(false); @@ -54,7 +55,7 @@ public async Task BasicHelloTest() var sessionCreated = await GetNextUpdate(updatesEnum).ConfigureAwait(false); var sessionUpdated = await GetNextUpdate(updatesEnum).ConfigureAwait(false); - Assert.AreEqual(sessionUpdated.Session.InputAudioFormat, AudioFormat.Pcm16); + Assert.AreEqual(sessionUpdated.Session.InputAudioFormat, InputAudioFormat.Pcm16); Assert.AreEqual(sessionCreated.Session.Id, sessionUpdated.Session.Id); Assert.AreEqual(sessionCreated.Session.Model, sessionUpdated.Session.Model); Assert.AreEqual(sessionCreated.Session.Agent, sessionUpdated.Session.Agent); @@ -131,7 +132,7 @@ public async Task BasicToolCallTest() var sessionUpdated = await GetNextUpdate(updatesEnum).ConfigureAwait(false); - Assert.AreEqual(sessionUpdated.Session.InputAudioFormat, AudioFormat.Pcm16); + Assert.AreEqual(sessionUpdated.Session.InputAudioFormat, InputAudioFormat.Pcm16); Assert.AreEqual(sessionCreated.Session.Id, sessionUpdated.Session.Id); Assert.AreEqual(sessionCreated.Session.Model, sessionUpdated.Session.Model); Assert.AreEqual(sessionCreated.Session.Agent, sessionUpdated.Session.Agent); @@ -144,7 +145,7 @@ public async Task BasicToolCallTest() await session.AddItemAsync(new UserMessageItem(new[] { content }), null, TimeoutToken).ConfigureAwait(false); var conversationItemCreated = await GetNextUpdate(updatesEnum).ConfigureAwait(false); - Assert.IsTrue(conversationItemCreated.PreviousItemId == null); + Assert.IsTrue(string.IsNullOrEmpty(conversationItemCreated.PreviousItemId)); var message = SafeCast(conversationItemCreated.Item); Assert.AreEqual(ResponseMessageRole.User, message.Role); Assert.AreEqual(1, message.Content.Count); @@ -158,6 +159,21 @@ public async Task BasicToolCallTest() Assert.IsTrue(responseItems.Count() > 0); responseItems.Insert(0, responseCreated); ValidateResponseUpdates(responseItems, string.Empty); + + var callDone = responseItems.Where((s) => + { + return s is SessionUpdateResponseFunctionCallArgumentsDone; + }); + + Assert.IsTrue(callDone.Count() == 1); + var callInfo = SafeCast(callDone.First()); + + await session.AddItemAsync(new FunctionCallOutputItem(callInfo.CallId, "42"), TimeoutToken).ConfigureAwait(false); + var conversationItemCreated2 = await GetNextUpdate(updatesEnum).ConfigureAwait(false); + + await session.StartResponseAsync(TimeoutToken).ConfigureAwait(false); + + var functionResponses = await CollectResponseUpdates(updatesEnum, TimeoutToken).ConfigureAwait(false); } [LiveOnly] @@ -185,7 +201,7 @@ public async Task PrallelToolCallTest() var sessionUpdated = await GetNextUpdate(updatesEnum).ConfigureAwait(false); - Assert.AreEqual(sessionUpdated.Session.InputAudioFormat, AudioFormat.Pcm16); + Assert.AreEqual(sessionUpdated.Session.InputAudioFormat, InputAudioFormat.Pcm16); Assert.AreEqual(sessionCreated.Session.Id, sessionUpdated.Session.Id); Assert.AreEqual(sessionCreated.Session.Model, sessionUpdated.Session.Model); Assert.AreEqual(sessionCreated.Session.Agent, sessionUpdated.Session.Agent); @@ -199,7 +215,6 @@ public async Task PrallelToolCallTest() await session.AddItemAsync(new UserMessageItem(new[] { content1, content2 }), null, TimeoutToken).ConfigureAwait(false); var conversationItemCreated = await GetNextUpdate(updatesEnum).ConfigureAwait(false); - Assert.IsTrue(conversationItemCreated.PreviousItemId == null); var message = SafeCast(conversationItemCreated.Item); Assert.AreEqual(ResponseMessageRole.User, message.Role); Assert.AreEqual(2, message.Content.Count); @@ -216,6 +231,358 @@ public async Task PrallelToolCallTest() Assert.IsTrue(responseItems.Count() > 0); responseItems.Insert(0, responseCreated); ValidateResponseUpdates(responseItems, string.Empty); + + var callDones = responseItems.Where((s) => + { + return s is SessionUpdateResponseFunctionCallArgumentsDone; + }); + Assert.IsTrue(callDones.Count() == 2); + var callInfo1 = SafeCast(callDones.First()); + var callInfo2 = SafeCast(callDones.Last()); + await session.AddItemAsync(new FunctionCallOutputItem(callInfo1.CallId, "42"), TimeoutToken).ConfigureAwait(false); + await session.AddItemAsync(new FunctionCallOutputItem(callInfo2.CallId, "98"), TimeoutToken).ConfigureAwait(false); + await GetNextUpdate(updatesEnum).ConfigureAwait(false); + await GetNextUpdate(updatesEnum).ConfigureAwait(false); + + await session.StartResponseAsync(TimeoutToken).ConfigureAwait(false); + var functionResponses = await CollectResponseUpdates(updatesEnum, TimeoutToken).ConfigureAwait(false); + } + + [Ignore("Truncate isn't currently supported")] + [LiveOnly] + [TestCase] + public async Task Truncate() + { + var vlc = string.IsNullOrEmpty(TestEnvironment.ApiKey) ? + new VoiceLiveClient(new Uri(TestEnvironment.Endpoint), new DefaultAzureCredential(true)) : + new VoiceLiveClient(new Uri(TestEnvironment.Endpoint), new AzureKeyCredential(TestEnvironment.ApiKey)); + + var options = new VoiceLiveSessionOptions() + { + Model = "gpt-4o", + Modalities = { InputModality.Text } + }; + + var session = await vlc.StartSessionAsync(options, TimeoutToken).ConfigureAwait(false); + + // Should get two updates back. + var updatesEnum = session.GetUpdatesAsync(TimeoutToken).GetAsyncEnumerator(); + + var sessionCreated = await GetNextUpdate(updatesEnum).ConfigureAwait(false); + var sessionUpdated = await GetNextUpdate(updatesEnum).ConfigureAwait(false); + + await session.AddItemAsync(new UserMessageItem(new InputTextContentPart("Hello")), null, TimeoutToken).ConfigureAwait(false); + await GetNextUpdate(updatesEnum).ConfigureAwait(false); + + await session.AddItemAsync(new AssistantMessageItem(new OutputTextContentPart("Hello, how can I help you?")), null, TimeoutToken).ConfigureAwait(false); + await GetNextUpdate(updatesEnum).ConfigureAwait(false); + + await session.AddItemAsync(new UserMessageItem(new InputTextContentPart("My name is Bill")), null, TimeoutToken).ConfigureAwait(false); + var q1 = await GetNextUpdate(updatesEnum).ConfigureAwait(false); + + await session.AddItemAsync(new AssistantMessageItem(new OutputTextContentPart("Hello Bill")), null, TimeoutToken).ConfigureAwait(false); + var q2 = await GetNextUpdate(updatesEnum).ConfigureAwait(false); + + await session.AddItemAsync(new UserMessageItem(new InputTextContentPart("My name is Ted")), null, TimeoutToken).ConfigureAwait(false); + await GetNextUpdate(updatesEnum).ConfigureAwait(false); + + await session.AddItemAsync(new AssistantMessageItem(new OutputTextContentPart("ok")), null, TimeoutToken).ConfigureAwait(false); + await GetNextUpdate(updatesEnum).ConfigureAwait(false); + + await session.TruncateConversationAsync(q1.Item.Id, 0, default, TimeoutToken).ConfigureAwait(false); + //await session.DeleteItemAsync(q1.Item.Id, TimeoutToken).ConfigureAwait(false); + + await session.AddItemAsync(new UserMessageItem(new InputTextContentPart("What's my name?")), null, TimeoutToken).ConfigureAwait(false); + await GetNextUpdate(updatesEnum).ConfigureAwait(false); + await session.StartResponseAsync(TimeoutToken).ConfigureAwait(false); + var responses = await CollectResponseUpdates(updatesEnum, TimeoutToken).ConfigureAwait(false); + Assert.IsTrue(responses.Count > 0); + var responseDone = responses.Where((r) => r is SessionUpdateResponseDone); + Assert.IsTrue(responseDone.Count() == 1); + var response = SafeCast(responseDone.First()); + Assert.IsNotNull(response.Response); + var outputItems = response.Response.Output.Where((item) => + { + if (item is not ResponseMessageItem) + { + return false; + } + var message = SafeCast(item); + return true; + }); + Assert.IsTrue(outputItems.Count() == 1); + var messageItem = SafeCast(outputItems.First()); + var textParts = messageItem.Content.Where((part) => part.Type == ContentPartType.Text); + Assert.IsTrue(textParts.Count() == 1); + var textPart = SafeCast(textParts.First()); + StringAssert.Contains("Ted", textPart.Text); + } + + [LiveOnly] + [TestCase] + public async Task DefaultAndUpdateTurnDetectionAzureSemanticVadEnTurnDetection() + { + var vlc = string.IsNullOrEmpty(TestEnvironment.ApiKey) ? + new VoiceLiveClient(new Uri(TestEnvironment.Endpoint), new DefaultAzureCredential(true)) : + new VoiceLiveClient(new Uri(TestEnvironment.Endpoint), new AzureKeyCredential(TestEnvironment.ApiKey)); + + var options = new VoiceLiveSessionOptions() + { + Model = "gpt-4o", + InputAudioFormat = InputAudioFormat.Pcm16, + TurnDetection = new AzureSemanticVadEnTurnDetection() + }; + + var session = await vlc.StartSessionAsync(options, TimeoutToken).ConfigureAwait(false); + + // Should get two updates back. + var updatesEnum = session.GetUpdatesAsync(TimeoutToken).GetAsyncEnumerator(); + + var sessionCreated = await GetNextUpdate(updatesEnum).ConfigureAwait(false); + var sessionUpdated = await GetNextUpdate(updatesEnum).ConfigureAwait(false); + + var defaultTurnDetection = sessionCreated.Session.TurnDetection; + Assert.IsTrue(defaultTurnDetection is ServerVadTurnDetection, $"Default turn detection was {defaultTurnDetection.GetType().Name} and not {typeof(ServerVadTurnDetection).Name}"); + + var modifiedTurnDetection = sessionUpdated.Session.TurnDetection; + Assert.IsTrue(modifiedTurnDetection is AzureSemanticVadEnTurnDetection, $"Updated turn detection was {modifiedTurnDetection.GetType().Name} and not {typeof(AzureSemanticVadEnTurnDetection).Name}"); + } + + [LiveOnly] + [TestCase] + public async Task InstructionTest() + { + var vlc = string.IsNullOrEmpty(TestEnvironment.ApiKey) ? + new VoiceLiveClient(new Uri(TestEnvironment.Endpoint), new DefaultAzureCredential(true)) : + new VoiceLiveClient(new Uri(TestEnvironment.Endpoint), new AzureKeyCredential(TestEnvironment.ApiKey)); + + var options = new VoiceLiveSessionOptions() + { + Model = "gpt-4o", + Modalities = { InputModality.Text }, + Instructions = "Your name is Frank. Never forget that!" + }; + + var session = await vlc.StartSessionAsync(options, TimeoutToken).ConfigureAwait(false); + + // Should get two updates back. + var updatesEnum = session.GetUpdatesAsync(TimeoutToken).GetAsyncEnumerator(); + + var sessionCreated = await GetNextUpdate(updatesEnum).ConfigureAwait(false); + var sessionUpdated = await GetNextUpdate(updatesEnum).ConfigureAwait(false); + + var um = new UserMessageItem(new InputTextContentPart("What is your name?")); + await session.AddItemAsync(um, null, TimeoutToken).ConfigureAwait(false); + var conversationItemCreated = await GetNextUpdate(updatesEnum).ConfigureAwait(false); + + await session.StartResponseAsync(TimeoutToken).ConfigureAwait(false); + var responses = await CollectResponseUpdates(updatesEnum, TimeoutToken).ConfigureAwait(false); + Assert.IsTrue(responses.Count > 0); + + var responseDone = responses.Where((r) => r is SessionUpdateResponseDone); + Assert.IsTrue(responseDone.Count() == 1); + var response = SafeCast(responseDone.First()); + Assert.IsNotNull(response.Response); + var outputItems = response.Response.Output.Where((item) => + { + if (item is not ResponseMessageItem) + { + return false; + } + var message = SafeCast(item); + return true; + }); + Assert.IsTrue(outputItems.Count() == 1); + var messageItem = SafeCast(outputItems.First()); + var textParts = messageItem.Content.Where((part) => part.Type == ContentPartType.Text); + Assert.IsTrue(textParts.Count() == 1); + var textPart = SafeCast(textParts.First()); + StringAssert.Contains("Frank", textPart.Text); + + // Update the instructions + options.Instructions = "Your name is Samantha. Never forget that!"; + await session.ConfigureSessionAsync(options, TimeoutToken).ConfigureAwait(false); + await GetNextUpdate(updatesEnum).ConfigureAwait(false); + um = new UserMessageItem(new InputTextContentPart("What is your name?")); + await session.AddItemAsync(um, null, TimeoutToken).ConfigureAwait(false); + conversationItemCreated = await GetNextUpdate(updatesEnum).ConfigureAwait(false); + await session.StartResponseAsync(TimeoutToken).ConfigureAwait(false); + responses = await CollectResponseUpdates(updatesEnum, TimeoutToken).ConfigureAwait(false); + Assert.IsTrue(responses.Count > 0); + responseDone = responses.Where((r) => r is SessionUpdateResponseDone); + Assert.IsTrue(responseDone.Count() == 1); + response = SafeCast(responseDone.First()); + Assert.IsNotNull(response.Response); + outputItems = response.Response.Output.Where((item) => + { + if (item is not ResponseMessageItem) + { + return false; + } + var message = SafeCast(item); + return true; + }); + Assert.IsTrue(outputItems.Count() == 1); + messageItem = SafeCast(outputItems.First()); + textParts = messageItem.Content.Where((part) => part.Type == ContentPartType.Text); + Assert.IsTrue(textParts.Count() == 1); + textPart = SafeCast(textParts.First()); + StringAssert.Contains("Samantha", textPart.Text); + } + + [Ignore("NoTurnDetection nto returned on update, even though it works")] + [LiveOnly] + [TestCase] + public async Task DefaultAndUpdateTurnDetectionNoTurnDetection() + { + var vlc = string.IsNullOrEmpty(TestEnvironment.ApiKey) ? + new VoiceLiveClient(new Uri(TestEnvironment.Endpoint), new DefaultAzureCredential(true)) : + new VoiceLiveClient(new Uri(TestEnvironment.Endpoint), new AzureKeyCredential(TestEnvironment.ApiKey)); + + var options = new VoiceLiveSessionOptions() + { + Model = "gpt-4o", + InputAudioFormat = InputAudioFormat.Pcm16, + TurnDetection = new NoTurnDetection() + }; + + var session = await vlc.StartSessionAsync(options, TimeoutToken).ConfigureAwait(false); + + // Should get two updates back. + var updatesEnum = session.GetUpdatesAsync(TimeoutToken).GetAsyncEnumerator(); + + var sessionCreated = await GetNextUpdate(updatesEnum).ConfigureAwait(false); + var sessionUpdated = await GetNextUpdate(updatesEnum).ConfigureAwait(false); + + var defaultTurnDetection = sessionCreated.Session.TurnDetection; + Assert.IsTrue(defaultTurnDetection is ServerVadTurnDetection, $"Default turn detection was {defaultTurnDetection.GetType().Name} and not {typeof(ServerVadTurnDetection).Name}"); + + var modifiedTurnDetection = sessionUpdated.Session.TurnDetection; + Assert.IsTrue(modifiedTurnDetection is NoTurnDetection, $"Updated turn detection was {modifiedTurnDetection?.GetType().Name} and not {typeof(NoTurnDetection).Name}"); + } + + [LiveOnly] + [TestCase] + public async Task DefaultAndUpdateTurnDetectionAzureSemanticVadMultilingualTurnDetection() + { + var vlc = string.IsNullOrEmpty(TestEnvironment.ApiKey) ? + new VoiceLiveClient(new Uri(TestEnvironment.Endpoint), new DefaultAzureCredential(true)) : + new VoiceLiveClient(new Uri(TestEnvironment.Endpoint), new AzureKeyCredential(TestEnvironment.ApiKey)); + + var options = new VoiceLiveSessionOptions() + { + Model = "gpt-4o", + InputAudioFormat = InputAudioFormat.Pcm16, + TurnDetection = new AzureSemanticVadMultilingualTurnDetection() + }; + + var session = await vlc.StartSessionAsync(options, TimeoutToken).ConfigureAwait(false); + + // Should get two updates back. + var updatesEnum = session.GetUpdatesAsync(TimeoutToken).GetAsyncEnumerator(); + + var sessionCreated = await GetNextUpdate(updatesEnum).ConfigureAwait(false); + var sessionUpdated = await GetNextUpdate(updatesEnum).ConfigureAwait(false); + + var defaultTurnDetection = sessionCreated.Session.TurnDetection; + Assert.IsTrue(defaultTurnDetection is ServerVadTurnDetection, $"Default turn detection was {defaultTurnDetection.GetType().Name} and not {typeof(ServerVadTurnDetection).Name}"); + + var modifiedTurnDetection = sessionUpdated.Session.TurnDetection; + Assert.IsTrue(modifiedTurnDetection is AzureSemanticVadMultilingualTurnDetection, $"Updated turn detection was {modifiedTurnDetection.GetType().Name} and not {typeof(AzureSemanticVadMultilingualTurnDetection).Name}"); + } + + [LiveOnly] + [TestCase] + public async Task ClearBufferAndGetResult() + { + var vlc = string.IsNullOrEmpty(TestEnvironment.ApiKey) ? + new VoiceLiveClient(new Uri(TestEnvironment.Endpoint), new DefaultAzureCredential(true)) : + new VoiceLiveClient(new Uri(TestEnvironment.Endpoint), new AzureKeyCredential(TestEnvironment.ApiKey)); + + var options = new VoiceLiveSessionOptions() + { + Model = "gpt-4o", + InputAudioFormat = InputAudioFormat.Pcm16, + TurnDetection = new NoTurnDetection() + }; + + var session = await vlc.StartSessionAsync(options, TimeoutToken).ConfigureAwait(false); + + // Should get two updates back. + var updatesEnum = session.GetUpdatesAsync(TimeoutToken).GetAsyncEnumerator(); + + var sessionCreated = await GetNextUpdate(updatesEnum).ConfigureAwait(false); + var sessionUpdated = await GetNextUpdate(updatesEnum).ConfigureAwait(false); + + // Now send audio: + await SendAudioAsync(session, "Weather.wav").ConfigureAwait(false); + await session.ClearInputAudioAsync(TimeoutToken).ConfigureAwait(false); + + await SendAudioAsync(session, "kws_howoldareyou.wav").ConfigureAwait(false); + + await session.CommitInputAudioAsync(TimeoutToken).ConfigureAwait(false); + await GetNextUpdate(updatesEnum).ConfigureAwait(false); + + await session.StartResponseAsync(TimeoutToken).ConfigureAwait(false); + + var responses = await CollectResponseUpdates(updatesEnum, TimeoutToken).ConfigureAwait(false); + Assert.IsTrue(responses.Count > 0); + + var responseDone = responses.Where((r) => r is SessionUpdateResponseDone); + Assert.IsTrue(responseDone.Count() == 1); + var response = SafeCast(responseDone.First()); + + Assert.IsNotNull(response.Response); + var outputItems = response.Response.Output.Where((item) => + { + if (item is not ResponseMessageItem) + { + return false; + } + var message = SafeCast(item); + + return true; + }); + } + + [LiveOnly] + [TestCase] + public async Task SendMultipleAudioFrames() + { + var vlc = string.IsNullOrEmpty(TestEnvironment.ApiKey) ? + new VoiceLiveClient(new Uri(TestEnvironment.Endpoint), new DefaultAzureCredential(true)) : + new VoiceLiveClient(new Uri(TestEnvironment.Endpoint), new AzureKeyCredential(TestEnvironment.ApiKey)); + + var options = new VoiceLiveSessionOptions() + { + Model = "gpt-4o", + InputAudioFormat = InputAudioFormat.Pcm16, + TurnDetection = new NoTurnDetection() + }; + + var session = await vlc.StartSessionAsync(options, TimeoutToken).ConfigureAwait(false); + + // Should get two updates back. + var updatesEnum = session.GetUpdatesAsync(TimeoutToken).GetAsyncEnumerator(); + + var sessionCreated = await GetNextUpdate(updatesEnum).ConfigureAwait(false); + var sessionUpdated = await GetNextUpdate(updatesEnum).ConfigureAwait(false); + + for (int i = 0; i < 300; i++) + { + await session.SendInputAudioAsync(BinaryData.FromBytes(new byte[3200]), TimeoutToken).ConfigureAwait(false); + } + + // error + await session.CommitInputAudioAsync(TimeoutToken).ConfigureAwait(false); + await GetNextUpdate(updatesEnum).ConfigureAwait(false); + + await session.ClearInputAudioAsync(TimeoutToken).ConfigureAwait(false); + + // Now send audio: + await SendAudioAsync(session, "Weather.wav").ConfigureAwait(false); + + var speechDetected = await GetNextUpdate(updatesEnum).ConfigureAwait(false); } private void ValidateResponseUpdates(List responseItems, string previousItemId) diff --git a/sdk/ai/Azure.AI.VoiceLive/tests/LiveTests/ErrorTests.cs b/sdk/ai/Azure.AI.VoiceLive/tests/LiveTests/ErrorTests.cs index cb2f1a4e40ba..c5f3222ae281 100644 --- a/sdk/ai/Azure.AI.VoiceLive/tests/LiveTests/ErrorTests.cs +++ b/sdk/ai/Azure.AI.VoiceLive/tests/LiveTests/ErrorTests.cs @@ -35,7 +35,7 @@ public async Task BadModelName() var options = new VoiceLiveSessionOptions() { Model = "invalidModelName", - InputAudioFormat = AudioFormat.Pcm16, + InputAudioFormat = InputAudioFormat.Pcm16, Voice = voice }; @@ -60,7 +60,7 @@ public async Task BadVoiceName() var options = new VoiceLiveSessionOptions() { Model = "gpt-4o", - InputAudioFormat = AudioFormat.Pcm16, + InputAudioFormat = InputAudioFormat.Pcm16, Voice = voice }; @@ -82,7 +82,7 @@ public void BadEndpoint() var options = new VoiceLiveSessionOptions() { Model = "gpt-4o", - InputAudioFormat = AudioFormat.Pcm16, + InputAudioFormat = InputAudioFormat.Pcm16, }; Assert.ThrowsAsync(typeof(WebSocketException), () => vlc.StartSessionAsync(options, TimeoutToken)); } diff --git a/sdk/ai/Azure.AI.VoiceLive/tests/LiveTests/SessionConfigTests.cs b/sdk/ai/Azure.AI.VoiceLive/tests/LiveTests/SessionConfigTests.cs index 80abec650904..7563198b00f8 100644 --- a/sdk/ai/Azure.AI.VoiceLive/tests/LiveTests/SessionConfigTests.cs +++ b/sdk/ai/Azure.AI.VoiceLive/tests/LiveTests/SessionConfigTests.cs @@ -35,7 +35,7 @@ public async Task AzureStandardVoice() var options = new VoiceLiveSessionOptions() { Model = "gpt-4o", - InputAudioFormat = AudioFormat.Pcm16, + InputAudioFormat = InputAudioFormat.Pcm16, Voice = voice }; diff --git a/sdk/ai/Azure.AI.VoiceLive/tests/VoiceLiveSessionConfigurationTests.cs b/sdk/ai/Azure.AI.VoiceLive/tests/VoiceLiveSessionConfigurationTests.cs index 335446174c52..1c7f3f5765cd 100644 --- a/sdk/ai/Azure.AI.VoiceLive/tests/VoiceLiveSessionConfigurationTests.cs +++ b/sdk/ai/Azure.AI.VoiceLive/tests/VoiceLiveSessionConfigurationTests.cs @@ -81,16 +81,16 @@ public async Task ConfigureConversationSession_SetsModalitiesAndVoice() Voice = new AzureStandardVoice(TestConstants.VoiceName), Model = TestConstants.ModelName, Instructions = "You are a helpful assistant.", - TurnDetection = new ServerVad { Threshold = 0.5f, SilenceDurationMs = 500 }, - InputAudioFormat = AudioFormat.Pcm16, - OutputAudioFormat = AudioFormat.Pcm16 + TurnDetection = new ServerVadTurnDetection { Threshold = 0.5f, SilenceDurationMs = 500 }, + InputAudioFormat = InputAudioFormat.Pcm16, + OutputAudioFormat = OutputAudioFormat.Pcm16 }; // Ensure we control modalities explicitly (clear defaults then add back only text & audio) options.Modalities.Clear(); options.Modalities.Add(InputModality.Text); options.Modalities.Add(InputModality.Audio); - await session.ConfigureConversationSessionAsync(options); + await session.ConfigureSessionAsync(options); var updateMessages = GetSentMessagesOfType(fake, "session.update"); Assert.That(updateMessages, Is.Not.Empty, "Expected at least one session.update message to be sent."); @@ -129,7 +129,7 @@ public async Task ConfigureConversationSession_IncludesTools() options.Tools.Add(new VoiceLiveFunctionDefinition("get_weather") { Description = "Gets the weather." }); options.Tools.Add(new VoiceLiveFunctionDefinition("book_flight") { Description = "Books a flight." }); - await session.ConfigureConversationSessionAsync(options); + await session.ConfigureSessionAsync(options); var updateMessages = GetSentMessagesOfType(fake, "session.update"); Assert.That(updateMessages, Is.Not.Empty, "Expected session.update message."); @@ -146,7 +146,7 @@ public async Task ConfigureConversationSession_IncludesTools() public void ConfigureConversationSession_NullOptions_Throws() { var session = CreateSessionWithFakeSocket(out _); - Assert.ThrowsAsync(async () => await session.ConfigureConversationSessionAsync(null)); + Assert.ThrowsAsync(async () => await session.ConfigureSessionAsync(null)); } [Test] @@ -162,8 +162,8 @@ public async Task MultipleConfigureCalls_SendsMultipleUpdateMessages() options2.Modalities.Clear(); options2.Modalities.Add(InputModality.Audio); - await session.ConfigureConversationSessionAsync(options1); - await session.ConfigureConversationSessionAsync(options2); + await session.ConfigureSessionAsync(options1); + await session.ConfigureSessionAsync(options2); var updateMessages = GetSentMessagesOfType(fake, "session.update"); Assert.That(updateMessages.Count, Is.GreaterThanOrEqualTo(2), "Expected two session.update messages after two configuration calls."); diff --git a/sdk/ai/Azure.AI.VoiceLive/tests/VoiceLiveSessionDisposalTests.cs b/sdk/ai/Azure.AI.VoiceLive/tests/VoiceLiveSessionDisposalTests.cs index bbef59f5d09b..4d6bffa88854 100644 --- a/sdk/ai/Azure.AI.VoiceLive/tests/VoiceLiveSessionDisposalTests.cs +++ b/sdk/ai/Azure.AI.VoiceLive/tests/VoiceLiveSessionDisposalTests.cs @@ -73,9 +73,9 @@ public void MethodsAfterDispose_ThrowObjectDisposedException() // SendInputAudioAsync(byte[]) should throw Assert.ThrowsAsync(async () => await session.SendInputAudioAsync(new byte[] { 0x01 })); - // ConfigureConversationSessionAsync should throw + // ConfigureSessionAsync should throw var convoOptions = new VoiceLiveSessionOptions { Model = TestConstants.ModelName }; - Assert.ThrowsAsync(async () => await session.ConfigureConversationSessionAsync(convoOptions)); + Assert.ThrowsAsync(async () => await session.ConfigureSessionAsync(convoOptions)); } [Test] diff --git a/sdk/ai/Azure.AI.VoiceLive/tests/VoiceLiveTestBase.cs b/sdk/ai/Azure.AI.VoiceLive/tests/VoiceLiveTestBase.cs index 874d4743947f..a4358361f7b9 100644 --- a/sdk/ai/Azure.AI.VoiceLive/tests/VoiceLiveTestBase.cs +++ b/sdk/ai/Azure.AI.VoiceLive/tests/VoiceLiveTestBase.cs @@ -185,7 +185,7 @@ protected VoiceProvider CreateVoiceProvider(string voiceType = "azure-platform") switch (voiceType) { case "azure-platform": - return new AzurePlatformVoice("en-US-AriaNeural"); + return new AzureStandardVoice("en-US-AriaNeural"); case "azure-custom": RequireFeature(TestEnvironment.HasCustomVoice, @@ -199,14 +199,11 @@ protected VoiceProvider CreateVoiceProvider(string voiceType = "azure-platform") "Personal voice not configured"); return new AzurePersonalVoice( TestEnvironment.PersonalVoiceName, - (AzurePersonalVoiceModel)Enum.Parse(typeof(AzurePersonalVoiceModel), TestEnvironment.PersonalVoiceModel)); + new PersonalVoiceModels(TestEnvironment.PersonalVoiceModel)); case "openai": return new OpenAIVoice(OAIVoice.Alloy); - case "llm": - return new LlmVoice(LlmVoiceName.Cosyvoice); - default: throw new ArgumentException($"Unknown voice type: {voiceType}"); } @@ -219,11 +216,8 @@ protected TurnDetection CreateTurnDetection(string detectionType = "server-vad") { switch (detectionType) { - case "none": - return new NoTurnDetection(); - case "server-vad": - return new ServerVad + return new ServerVadTurnDetection { Threshold = 0.5f, SilenceDurationMs = 500, @@ -231,14 +225,14 @@ protected TurnDetection CreateTurnDetection(string detectionType = "server-vad") }; case "azure-semantic": - return new AzureSemanticVad + return new AzureSemanticVadTurnDetection { Languages = { "en-US" }, Threshold = 0.7f }; case "azure-multilingual": - return new AzureMultilingualSemanticVad + return new AzureSemanticVadMultilingualTurnDetection { Languages = { "en-US", "es-ES", "fr-FR" }, Threshold = 0.7f diff --git a/sdk/ai/Azure.AI.VoiceLive/tsp-location.yaml b/sdk/ai/Azure.AI.VoiceLive/tsp-location.yaml index e48f4b681c85..aa9752ed8dda 100644 --- a/sdk/ai/Azure.AI.VoiceLive/tsp-location.yaml +++ b/sdk/ai/Azure.AI.VoiceLive/tsp-location.yaml @@ -1,5 +1,5 @@ directory: specification/ai/data-plane/VoiceLive -commit: f99d32b66214505357e68b3523abd2ad209da678 +commit: c99a7e4d84dd21a02cee469ff70e4c147eee96e3 repo: rhurey/azure-rest-api-specs additionalDirectories: - specification/ai/data-plane/VoiceLive