diff --git a/sdk/cognitiveservices/Azure.AI.Speech.Transcription/Azure.AI.Speech.Transcription.sln b/sdk/cognitiveservices/Azure.AI.Speech.Transcription/Azure.AI.Speech.Transcription.sln
new file mode 100644
index 000000000000..0e27ceffdc2c
--- /dev/null
+++ b/sdk/cognitiveservices/Azure.AI.Speech.Transcription/Azure.AI.Speech.Transcription.sln
@@ -0,0 +1,56 @@
+Microsoft Visual Studio Solution File, Format Version 12.00
+# Visual Studio Version 16
+VisualStudioVersion = 16.0.29709.97
+MinimumVisualStudioVersion = 10.0.40219.1
+Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Azure.Core.TestFramework", "..\..\core\Azure.Core.TestFramework\src\Azure.Core.TestFramework.csproj", "{ECC730C1-4AEA-420C-916A-66B19B79E4DC}"
+EndProject
+Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Azure.AI.Speech.Transcription", "src\Azure.AI.Speech.Transcription.csproj", "{28FF4005-4467-4E36-92E7-DEA27DEB1519}"
+EndProject
+Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Azure.AI.Speech.Transcription.Tests", "tests\Azure.AI.Speech.Transcription.Tests.csproj", "{1F1CD1D4-9932-4B73-99D8-C252A67D4B46}"
+EndProject
+Global
+ GlobalSection(SolutionConfigurationPlatforms) = preSolution
+ Debug|Any CPU = Debug|Any CPU
+ Release|Any CPU = Release|Any CPU
+ EndGlobalSection
+ GlobalSection(ProjectConfigurationPlatforms) = postSolution
+ {B0C276D1-2930-4887-B29A-D1A33E7009A2}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
+ {B0C276D1-2930-4887-B29A-D1A33E7009A2}.Debug|Any CPU.Build.0 = Debug|Any CPU
+ {B0C276D1-2930-4887-B29A-D1A33E7009A2}.Release|Any CPU.ActiveCfg = Release|Any CPU
+ {B0C276D1-2930-4887-B29A-D1A33E7009A2}.Release|Any CPU.Build.0 = Release|Any CPU
+ {8E9A77AC-792A-4432-8320-ACFD46730401}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
+ {8E9A77AC-792A-4432-8320-ACFD46730401}.Debug|Any CPU.Build.0 = Debug|Any CPU
+ {8E9A77AC-792A-4432-8320-ACFD46730401}.Release|Any CPU.ActiveCfg = Release|Any CPU
+ {8E9A77AC-792A-4432-8320-ACFD46730401}.Release|Any CPU.Build.0 = Release|Any CPU
+ {ECC730C1-4AEA-420C-916A-66B19B79E4DC}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
+ {ECC730C1-4AEA-420C-916A-66B19B79E4DC}.Debug|Any CPU.Build.0 = Debug|Any CPU
+ {ECC730C1-4AEA-420C-916A-66B19B79E4DC}.Release|Any CPU.ActiveCfg = Release|Any CPU
+ {ECC730C1-4AEA-420C-916A-66B19B79E4DC}.Release|Any CPU.Build.0 = Release|Any CPU
+ {A4241C1F-A53D-474C-9E4E-075054407E74}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
+ {A4241C1F-A53D-474C-9E4E-075054407E74}.Debug|Any CPU.Build.0 = Debug|Any CPU
+ {A4241C1F-A53D-474C-9E4E-075054407E74}.Release|Any CPU.ActiveCfg = Release|Any CPU
+ {A4241C1F-A53D-474C-9E4E-075054407E74}.Release|Any CPU.Build.0 = Release|Any CPU
+ {FA8BD3F1-8616-47B6-974C-7576CDF4717E}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
+ {FA8BD3F1-8616-47B6-974C-7576CDF4717E}.Debug|Any CPU.Build.0 = Debug|Any CPU
+ {FA8BD3F1-8616-47B6-974C-7576CDF4717E}.Release|Any CPU.ActiveCfg = Release|Any CPU
+ {FA8BD3F1-8616-47B6-974C-7576CDF4717E}.Release|Any CPU.Build.0 = Release|Any CPU
+ {85677AD3-C214-42FA-AE6E-49B956CAC8DC}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
+ {85677AD3-C214-42FA-AE6E-49B956CAC8DC}.Debug|Any CPU.Build.0 = Debug|Any CPU
+ {85677AD3-C214-42FA-AE6E-49B956CAC8DC}.Release|Any CPU.ActiveCfg = Release|Any CPU
+ {85677AD3-C214-42FA-AE6E-49B956CAC8DC}.Release|Any CPU.Build.0 = Release|Any CPU
+ {28FF4005-4467-4E36-92E7-DEA27DEB1519}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
+ {28FF4005-4467-4E36-92E7-DEA27DEB1519}.Debug|Any CPU.Build.0 = Debug|Any CPU
+ {28FF4005-4467-4E36-92E7-DEA27DEB1519}.Release|Any CPU.ActiveCfg = Release|Any CPU
+ {28FF4005-4467-4E36-92E7-DEA27DEB1519}.Release|Any CPU.Build.0 = Release|Any CPU
+ {1F1CD1D4-9932-4B73-99D8-C252A67D4B46}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
+ {1F1CD1D4-9932-4B73-99D8-C252A67D4B46}.Debug|Any CPU.Build.0 = Debug|Any CPU
+ {1F1CD1D4-9932-4B73-99D8-C252A67D4B46}.Release|Any CPU.ActiveCfg = Release|Any CPU
+ {1F1CD1D4-9932-4B73-99D8-C252A67D4B46}.Release|Any CPU.Build.0 = Release|Any CPU
+ EndGlobalSection
+ GlobalSection(SolutionProperties) = preSolution
+ HideSolutionNode = FALSE
+ EndGlobalSection
+ GlobalSection(ExtensibilityGlobals) = postSolution
+ SolutionGuid = {A97F4B90-2591-4689-B1F8-5F21FE6D6CAE}
+ EndGlobalSection
+EndGlobal
diff --git a/sdk/cognitiveservices/Azure.AI.Speech.Transcription/CHANGELOG.md b/sdk/cognitiveservices/Azure.AI.Speech.Transcription/CHANGELOG.md
new file mode 100644
index 000000000000..8b33f0fedccc
--- /dev/null
+++ b/sdk/cognitiveservices/Azure.AI.Speech.Transcription/CHANGELOG.md
@@ -0,0 +1,11 @@
+# Release History
+
+## 1.0.0-beta.1 (Unreleased)
+
+### Features Added
+
+### Breaking Changes
+
+### Bugs Fixed
+
+### Other Changes
\ No newline at end of file
diff --git a/sdk/cognitiveservices/Azure.AI.Speech.Transcription/Directory.Build.props b/sdk/cognitiveservices/Azure.AI.Speech.Transcription/Directory.Build.props
new file mode 100644
index 000000000000..63bd836ad44b
--- /dev/null
+++ b/sdk/cognitiveservices/Azure.AI.Speech.Transcription/Directory.Build.props
@@ -0,0 +1,6 @@
+
+
+
+
diff --git a/sdk/cognitiveservices/Azure.AI.Speech.Transcription/README.md b/sdk/cognitiveservices/Azure.AI.Speech.Transcription/README.md
new file mode 100644
index 000000000000..ef9c504869f2
--- /dev/null
+++ b/sdk/cognitiveservices/Azure.AI.Speech.Transcription/README.md
@@ -0,0 +1,107 @@
+# Azure AI Speech Transcription client library for .NET
+
+Azure.AI.Speech.Transcription is a managed service that helps developers get secret simply and securely.
+
+Use the client library for to:
+
+* [Get secret](https://docs.microsoft.com/azure)
+
+[Source code][source_root] | [Package (NuGet)][package] | [API reference documentation][reference_docs] | [Product documentation][azconfig_docs] | [Samples][source_samples]
+
+ [Source code](https://github.com/Azure/azure-sdk-for-net/blob/main/sdk/cognitiveservices/Azure.AI.Speech.Transcription/src) | [Package (NuGet)](https://www.nuget.org/packages) | [API reference documentation](https://azure.github.io/azure-sdk-for-net) | [Product documentation](https://docs.microsoft.com/azure)
+
+## Getting started
+
+This section should include everything a developer needs to do to install and create their first client connection *very quickly*.
+
+### Install the package
+
+First, provide instruction for obtaining and installing the package or library. This section might include only a single line of code, like `dotnet add package package-name`, but should enable a developer to successfully install the package from NuGet, npm, or even cloning a GitHub repository.
+
+Install the client library for .NET with [NuGet](https://www.nuget.org/ ):
+
+```dotnetcli
+dotnet add package Azure.AI.Speech.Transcription --prerelease
+```
+
+### Prerequisites
+
+Include a section after the install command that details any requirements that must be satisfied before a developer can [authenticate](#authenticate-the-client) and test all of the snippets in the [Examples](#examples) section. For example, for Cosmos DB:
+
+> You must have an [Azure subscription](https://azure.microsoft.com/free/dotnet/) and [Cosmos DB account](https://docs.microsoft.com/azure/cosmos-db/account-overview) (SQL API). In order to take advantage of the C# 8.0 syntax, it is recommended that you compile using the [.NET Core SDK](https://dotnet.microsoft.com/download) 3.0 or higher with a [language version](https://docs.microsoft.com/dotnet/csharp/language-reference/configure-language-version#override-a-default) of `latest`. It is also possible to compile with the .NET Core SDK 2.1.x using a language version of `preview`.
+
+### Authenticate the client
+
+If your library requires authentication for use, such as for Azure services, include instructions and example code needed for initializing and authenticating.
+
+For example, include details on obtaining an account key and endpoint URI, setting environment variables for each, and initializing the client object.
+
+### Service API versions
+
+The client library targets the latest service API version by default. A client instance accepts an optional service API version parameter from its options to specify which API version service to communicate.
+
+#### Select a service API version
+
+You have the flexibility to explicitly select a supported service API version when instantiating a client by configuring its associated options. This ensures that the client can communicate with services using the specified API version.
+
+For example,
+
+```C# Snippet:CreateTranscriptionClientForSpecificApiVersion
+Uri endpoint = new Uri("https://myaccount.api.cognitive.microsoft.com/");
+AzureKeyCredential credential = new("your apikey");
+TranscriptionClientOptions options = new TranscriptionClientOptions(TranscriptionClientOptions.ServiceVersion.V2025_10_15);
+TranscriptionClient client = new TranscriptionClient(endpoint, credential, options);
+```
+
+When selecting an API version, it's important to verify that there are no breaking changes compared to the latest API version. If there are significant differences, API calls may fail due to incompatibility.
+
+Always ensure that the chosen API version is fully supported and operational for your specific use case and that it aligns with the service's versioning policy.
+
+## Key concepts
+
+The *Key concepts* section should describe the functionality of the main classes. Point out the most important and useful classes in the package (with links to their reference pages) and explain how those classes work together. Feel free to use bulleted lists, tables, code blocks, or even diagrams for clarity.
+
+Include the *Thread safety* and *Additional concepts* sections below at the end of your *Key concepts* section. You may remove or add links depending on what your library makes use of:
+
+### Thread safety
+
+We guarantee that all client instance methods are thread-safe and independent of each other ([guideline](https://azure.github.io/azure-sdk/dotnet_introduction.html#dotnet-service-methods-thread-safety)). This ensures that the recommendation of reusing client instances is always safe, even across threads.
+
+### Additional concepts
+
+[Client options](https://github.com/Azure/azure-sdk-for-net/blob/main/sdk/core/Azure.Core/README.md#configuring-service-clients-using-clientoptions) |
+[Accessing the response](https://github.com/Azure/azure-sdk-for-net/blob/main/sdk/core/Azure.Core/README.md#accessing-http-response-details-using-responset) |
+[Long-running operations](https://github.com/Azure/azure-sdk-for-net/blob/main/sdk/core/Azure.Core/README.md#consuming-long-running-operations-using-operationt) |
+[Handling failures](https://github.com/Azure/azure-sdk-for-net/blob/main/sdk/core/Azure.Core/README.md#reporting-errors-requestfailedexception) |
+[Diagnostics](https://github.com/Azure/azure-sdk-for-net/blob/main/sdk/core/Azure.Core/samples/Diagnostics.md) |
+[Mocking](https://github.com/Azure/azure-sdk-for-net/blob/main/sdk/core/Azure.Core/README.md#mocking) |
+[Client lifetime](https://devblogs.microsoft.com/azure-sdk/lifetime-management-and-thread-safety-guarantees-of-azure-sdk-net-clients/)
+
+
+## Examples
+
+You can familiarize yourself with different APIs using [Samples](https://github.com/Azure/azure-sdk-for-net/tree/main/sdk/cognitiveservices/Azure.AI.Speech.Transcription/samples).
+
+## Troubleshooting
+
+Describe common errors and exceptions, how to "unpack" them if necessary, and include guidance for graceful handling and recovery.
+
+Provide information to help developers avoid throttling or other service-enforced errors they might encounter. For example, provide guidance and examples for using retry or connection policies in the API.
+
+If the package or a related package supports it, include tips for logging or enabling instrumentation to help them debug their code.
+
+## Next steps
+
+* Provide a link to additional code examples, ideally to those sitting alongside the README in the package's `/samples` directory.
+* If appropriate, point users to other packages that might be useful.
+* If you think there's a good chance that developers might stumble across your package in error (because they're searching for specific functionality and mistakenly think the package provides that functionality), point them to the packages they might be looking for.
+
+## Contributing
+
+This is a template, but your SDK readme should include details on how to contribute code to the repo/package.
+
+
+[style-guide-msft]: https://docs.microsoft.com/style-guide/capitalization
+[style-guide-cloud]: https://aka.ms/azsdk/cloud-style-guide
+
+
\ No newline at end of file
diff --git a/sdk/cognitiveservices/Azure.AI.Speech.Transcription/api/Azure.AI.Speech.Transcription.net8.0.cs b/sdk/cognitiveservices/Azure.AI.Speech.Transcription/api/Azure.AI.Speech.Transcription.net8.0.cs
new file mode 100644
index 000000000000..dab85f3c4e19
--- /dev/null
+++ b/sdk/cognitiveservices/Azure.AI.Speech.Transcription/api/Azure.AI.Speech.Transcription.net8.0.cs
@@ -0,0 +1,170 @@
+namespace Azure.AI.Speech.Transcription
+{
+ public static partial class AISpeechTranscriptionModelFactory
+ {
+ public static Azure.AI.Speech.Transcription.EnhancedModeProperties EnhancedModeProperties(bool? enabled = default(bool?), string task = null, string targetLanguage = null, System.Collections.Generic.IEnumerable prompt = null) { throw null; }
+ public static Azure.AI.Speech.Transcription.TranscriptionDiarizationOptions TranscriptionDiarizationOptions(bool? enabled = default(bool?), int? maxSpeakers = default(int?)) { throw null; }
+ public static Azure.AI.Speech.Transcription.TranscriptionOptions TranscriptionOptions(System.Uri audioUri = null, System.Collections.Generic.IEnumerable locales = null, System.Collections.Generic.IDictionary models = null, Azure.AI.Speech.Transcription.ProfanityFilterMode? profanityFilterMode = default(Azure.AI.Speech.Transcription.ProfanityFilterMode?), Azure.AI.Speech.Transcription.TranscriptionDiarizationOptions diarizationOptions = null, System.Collections.Generic.IEnumerable activeChannels = null, Azure.AI.Speech.Transcription.EnhancedModeProperties enhancedMode = null, Azure.AI.Speech.Transcription.PhraseListProperties phraseList = null) { throw null; }
+ public static Azure.AI.Speech.Transcription.TranscriptionResult TranscriptionResult(int durationMilliseconds = 0) { throw null; }
+ }
+ public partial class AzureAISpeechTranscriptionContext : System.ClientModel.Primitives.ModelReaderWriterContext
+ {
+ internal AzureAISpeechTranscriptionContext() { }
+ public static Azure.AI.Speech.Transcription.AzureAISpeechTranscriptionContext Default { get { throw null; } }
+ protected override bool TryGetTypeBuilderCore(System.Type type, out System.ClientModel.Primitives.ModelReaderWriterTypeBuilder builder) { throw null; }
+ }
+ public partial class EnhancedModeProperties : System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel
+ {
+ public EnhancedModeProperties() { }
+ public bool? Enabled { get { throw null; } }
+ public System.Collections.Generic.IList Prompt { get { throw null; } }
+ public string TargetLanguage { get { throw null; } set { } }
+ public string Task { get { throw null; } set { } }
+ Azure.AI.Speech.Transcription.EnhancedModeProperties System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; }
+ void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { }
+ Azure.AI.Speech.Transcription.EnhancedModeProperties System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; }
+ string System.ClientModel.Primitives.IPersistableModel.GetFormatFromOptions(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; }
+ System.BinaryData System.ClientModel.Primitives.IPersistableModel.Write(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; }
+ }
+ public partial class PhraseListProperties : System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel
+ {
+ public PhraseListProperties() { }
+ public float? BiasingWeight { get { throw null; } set { } }
+ public System.Collections.Generic.IList Phrases { get { throw null; } }
+ protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { }
+ Azure.AI.Speech.Transcription.PhraseListProperties System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; }
+ void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { }
+ Azure.AI.Speech.Transcription.PhraseListProperties System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; }
+ string System.ClientModel.Primitives.IPersistableModel.GetFormatFromOptions(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; }
+ System.BinaryData System.ClientModel.Primitives.IPersistableModel.Write(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; }
+ }
+ [System.Runtime.InteropServices.StructLayoutAttribute(System.Runtime.InteropServices.LayoutKind.Sequential)]
+ public readonly partial struct ProfanityFilterMode : System.IEquatable
+ {
+ private readonly object _dummy;
+ private readonly int _dummyPrimitive;
+ public ProfanityFilterMode(string value) { throw null; }
+ public static Azure.AI.Speech.Transcription.ProfanityFilterMode Masked { get { throw null; } }
+ public static Azure.AI.Speech.Transcription.ProfanityFilterMode None { get { throw null; } }
+ public static Azure.AI.Speech.Transcription.ProfanityFilterMode Removed { get { throw null; } }
+ public static Azure.AI.Speech.Transcription.ProfanityFilterMode Tags { get { throw null; } }
+ public bool Equals(Azure.AI.Speech.Transcription.ProfanityFilterMode other) { throw null; }
+ [System.ComponentModel.EditorBrowsableAttribute(System.ComponentModel.EditorBrowsableState.Never)]
+ public override bool Equals(object obj) { throw null; }
+ [System.ComponentModel.EditorBrowsableAttribute(System.ComponentModel.EditorBrowsableState.Never)]
+ public override int GetHashCode() { throw null; }
+ public static bool operator ==(Azure.AI.Speech.Transcription.ProfanityFilterMode left, Azure.AI.Speech.Transcription.ProfanityFilterMode right) { throw null; }
+ public static implicit operator Azure.AI.Speech.Transcription.ProfanityFilterMode (string value) { throw null; }
+ public static bool operator !=(Azure.AI.Speech.Transcription.ProfanityFilterMode left, Azure.AI.Speech.Transcription.ProfanityFilterMode right) { throw null; }
+ public override string ToString() { throw null; }
+ }
+ public partial class TranscribedPhrase : System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel
+ {
+ internal TranscribedPhrase() { }
+ public float Confidence { get { throw null; } }
+ public System.TimeSpan Duration { get { throw null; } }
+ public string Locale { get { throw null; } }
+ public System.TimeSpan Offset { get { throw null; } }
+ public int? Speaker { get { throw null; } }
+ public string Text { get { throw null; } }
+ public System.Collections.Generic.IReadOnlyList Words { get { throw null; } }
+ protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { }
+ Azure.AI.Speech.Transcription.TranscribedPhrase System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; }
+ void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { }
+ Azure.AI.Speech.Transcription.TranscribedPhrase System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; }
+ string System.ClientModel.Primitives.IPersistableModel.GetFormatFromOptions(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; }
+ System.BinaryData System.ClientModel.Primitives.IPersistableModel.Write(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; }
+ }
+ public partial class TranscribedPhrases
+ {
+ public int? Channel;
+ public System.Collections.Generic.IEnumerable Phrases;
+ public TranscribedPhrases(int? Channel, string Text, System.Collections.Generic.IEnumerable Phrases) { }
+ public string Text { get { throw null; } }
+ }
+ public partial class TranscribedWord : System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel
+ {
+ internal TranscribedWord() { }
+ public System.TimeSpan Duration { get { throw null; } }
+ public System.TimeSpan Offset { get { throw null; } }
+ public string Text { get { throw null; } }
+ protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { }
+ Azure.AI.Speech.Transcription.TranscribedWord System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; }
+ void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { }
+ Azure.AI.Speech.Transcription.TranscribedWord System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; }
+ string System.ClientModel.Primitives.IPersistableModel.GetFormatFromOptions(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; }
+ System.BinaryData System.ClientModel.Primitives.IPersistableModel.Write(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; }
+ }
+ public partial class TranscriptionClient
+ {
+ protected TranscriptionClient() { }
+ public TranscriptionClient(System.Uri endpoint, Azure.AzureKeyCredential credential) { }
+ public TranscriptionClient(System.Uri endpoint, Azure.AzureKeyCredential credential, Azure.AI.Speech.Transcription.TranscriptionClientOptions options) { }
+ public TranscriptionClient(System.Uri endpoint, Azure.Core.TokenCredential credential) { }
+ public TranscriptionClient(System.Uri endpoint, Azure.Core.TokenCredential credential, Azure.AI.Speech.Transcription.TranscriptionClientOptions options) { }
+ public virtual Azure.Core.Pipeline.HttpPipeline Pipeline { get { throw null; } }
+ public virtual Azure.Response Transcribe(Azure.AI.Speech.Transcription.TranscriptionOptions options, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; }
+ public virtual System.Threading.Tasks.Task> TranscribeAsync(Azure.AI.Speech.Transcription.TranscriptionOptions options, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; }
+ }
+ public partial class TranscriptionClientOptions : Azure.Core.ClientOptions
+ {
+ public TranscriptionClientOptions(Azure.AI.Speech.Transcription.TranscriptionClientOptions.ServiceVersion version = Azure.AI.Speech.Transcription.TranscriptionClientOptions.ServiceVersion.V2025_10_15) { }
+ public enum ServiceVersion
+ {
+ V2025_10_15 = 1,
+ }
+ }
+ public partial class TranscriptionDiarizationOptions : System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel
+ {
+ public TranscriptionDiarizationOptions() { }
+ public bool? Enabled { get { throw null; } }
+ public int? MaxSpeakers { get { throw null; } set { } }
+ Azure.AI.Speech.Transcription.TranscriptionDiarizationOptions System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; }
+ void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { }
+ Azure.AI.Speech.Transcription.TranscriptionDiarizationOptions System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; }
+ string System.ClientModel.Primitives.IPersistableModel.GetFormatFromOptions(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; }
+ System.BinaryData System.ClientModel.Primitives.IPersistableModel.Write(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; }
+ }
+ public partial class TranscriptionOptions : System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel
+ {
+ public TranscriptionOptions() { }
+ public TranscriptionOptions(System.IO.Stream audioStream) { }
+ public TranscriptionOptions(System.Uri audioUri) { }
+ public System.Collections.Generic.IList ActiveChannels { get { throw null; } }
+ public System.Uri AudioUri { get { throw null; } }
+ public Azure.AI.Speech.Transcription.TranscriptionDiarizationOptions DiarizationOptions { get { throw null; } set { } }
+ public Azure.AI.Speech.Transcription.EnhancedModeProperties EnhancedMode { get { throw null; } set { } }
+ public System.Collections.Generic.IList Locales { get { throw null; } }
+ public System.Collections.Generic.IDictionary Models { get { throw null; } }
+ public Azure.AI.Speech.Transcription.PhraseListProperties PhraseList { get { throw null; } set { } }
+ public Azure.AI.Speech.Transcription.ProfanityFilterMode? ProfanityFilterMode { get { throw null; } set { } }
+ protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { }
+ Azure.AI.Speech.Transcription.TranscriptionOptions System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; }
+ void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { }
+ Azure.AI.Speech.Transcription.TranscriptionOptions System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; }
+ string System.ClientModel.Primitives.IPersistableModel.GetFormatFromOptions(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; }
+ System.BinaryData System.ClientModel.Primitives.IPersistableModel.Write(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; }
+ }
+ public partial class TranscriptionResult : System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel
+ {
+ internal TranscriptionResult() { }
+ public System.TimeSpan Duration { get { throw null; } }
+ public System.Collections.Generic.IEnumerable PhrasesByChannel { get { throw null; } }
+ protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { }
+ Azure.AI.Speech.Transcription.TranscriptionResult System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; }
+ void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { }
+ Azure.AI.Speech.Transcription.TranscriptionResult System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; }
+ string System.ClientModel.Primitives.IPersistableModel.GetFormatFromOptions(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; }
+ System.BinaryData System.ClientModel.Primitives.IPersistableModel.Write(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; }
+ }
+}
+namespace Microsoft.Extensions.Azure
+{
+ public static partial class AISpeechTranscriptionClientBuilderExtensions
+ {
+ public static Azure.Core.Extensions.IAzureClientBuilder AddTranscriptionClient(this TBuilder builder, System.Uri endpoint) where TBuilder : Azure.Core.Extensions.IAzureClientFactoryBuilderWithCredential { throw null; }
+ public static Azure.Core.Extensions.IAzureClientBuilder AddTranscriptionClient(this TBuilder builder, System.Uri endpoint, Azure.AzureKeyCredential credential) where TBuilder : Azure.Core.Extensions.IAzureClientFactoryBuilder { throw null; }
+ [System.Diagnostics.CodeAnalysis.RequiresDynamicCodeAttribute("Requires unreferenced code until we opt into EnableConfigurationBindingGenerator.")]
+ public static Azure.Core.Extensions.IAzureClientBuilder AddTranscriptionClient(this TBuilder builder, TConfiguration configuration) where TBuilder : Azure.Core.Extensions.IAzureClientFactoryBuilderWithConfiguration { throw null; }
+ }
+}
diff --git a/sdk/cognitiveservices/Azure.AI.Speech.Transcription/api/Azure.AI.Speech.Transcription.netstandard2.0.cs b/sdk/cognitiveservices/Azure.AI.Speech.Transcription/api/Azure.AI.Speech.Transcription.netstandard2.0.cs
new file mode 100644
index 000000000000..18307e0ee5ed
--- /dev/null
+++ b/sdk/cognitiveservices/Azure.AI.Speech.Transcription/api/Azure.AI.Speech.Transcription.netstandard2.0.cs
@@ -0,0 +1,169 @@
+namespace Azure.AI.Speech.Transcription
+{
+ public static partial class AISpeechTranscriptionModelFactory
+ {
+ public static Azure.AI.Speech.Transcription.EnhancedModeProperties EnhancedModeProperties(bool? enabled = default(bool?), string task = null, string targetLanguage = null, System.Collections.Generic.IEnumerable prompt = null) { throw null; }
+ public static Azure.AI.Speech.Transcription.TranscriptionDiarizationOptions TranscriptionDiarizationOptions(bool? enabled = default(bool?), int? maxSpeakers = default(int?)) { throw null; }
+ public static Azure.AI.Speech.Transcription.TranscriptionOptions TranscriptionOptions(System.Uri audioUri = null, System.Collections.Generic.IEnumerable locales = null, System.Collections.Generic.IDictionary models = null, Azure.AI.Speech.Transcription.ProfanityFilterMode? profanityFilterMode = default(Azure.AI.Speech.Transcription.ProfanityFilterMode?), Azure.AI.Speech.Transcription.TranscriptionDiarizationOptions diarizationOptions = null, System.Collections.Generic.IEnumerable activeChannels = null, Azure.AI.Speech.Transcription.EnhancedModeProperties enhancedMode = null, Azure.AI.Speech.Transcription.PhraseListProperties phraseList = null) { throw null; }
+ public static Azure.AI.Speech.Transcription.TranscriptionResult TranscriptionResult(int durationMilliseconds = 0) { throw null; }
+ }
+ public partial class AzureAISpeechTranscriptionContext : System.ClientModel.Primitives.ModelReaderWriterContext
+ {
+ internal AzureAISpeechTranscriptionContext() { }
+ public static Azure.AI.Speech.Transcription.AzureAISpeechTranscriptionContext Default { get { throw null; } }
+ protected override bool TryGetTypeBuilderCore(System.Type type, out System.ClientModel.Primitives.ModelReaderWriterTypeBuilder builder) { throw null; }
+ }
+ public partial class EnhancedModeProperties : System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel
+ {
+ public EnhancedModeProperties() { }
+ public bool? Enabled { get { throw null; } }
+ public System.Collections.Generic.IList Prompt { get { throw null; } }
+ public string TargetLanguage { get { throw null; } set { } }
+ public string Task { get { throw null; } set { } }
+ Azure.AI.Speech.Transcription.EnhancedModeProperties System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; }
+ void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { }
+ Azure.AI.Speech.Transcription.EnhancedModeProperties System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; }
+ string System.ClientModel.Primitives.IPersistableModel.GetFormatFromOptions(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; }
+ System.BinaryData System.ClientModel.Primitives.IPersistableModel.Write(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; }
+ }
+ public partial class PhraseListProperties : System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel
+ {
+ public PhraseListProperties() { }
+ public float? BiasingWeight { get { throw null; } set { } }
+ public System.Collections.Generic.IList Phrases { get { throw null; } }
+ protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { }
+ Azure.AI.Speech.Transcription.PhraseListProperties System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; }
+ void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { }
+ Azure.AI.Speech.Transcription.PhraseListProperties System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; }
+ string System.ClientModel.Primitives.IPersistableModel.GetFormatFromOptions(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; }
+ System.BinaryData System.ClientModel.Primitives.IPersistableModel.Write(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; }
+ }
+ [System.Runtime.InteropServices.StructLayoutAttribute(System.Runtime.InteropServices.LayoutKind.Sequential)]
+ public readonly partial struct ProfanityFilterMode : System.IEquatable
+ {
+ private readonly object _dummy;
+ private readonly int _dummyPrimitive;
+ public ProfanityFilterMode(string value) { throw null; }
+ public static Azure.AI.Speech.Transcription.ProfanityFilterMode Masked { get { throw null; } }
+ public static Azure.AI.Speech.Transcription.ProfanityFilterMode None { get { throw null; } }
+ public static Azure.AI.Speech.Transcription.ProfanityFilterMode Removed { get { throw null; } }
+ public static Azure.AI.Speech.Transcription.ProfanityFilterMode Tags { get { throw null; } }
+ public bool Equals(Azure.AI.Speech.Transcription.ProfanityFilterMode other) { throw null; }
+ [System.ComponentModel.EditorBrowsableAttribute(System.ComponentModel.EditorBrowsableState.Never)]
+ public override bool Equals(object obj) { throw null; }
+ [System.ComponentModel.EditorBrowsableAttribute(System.ComponentModel.EditorBrowsableState.Never)]
+ public override int GetHashCode() { throw null; }
+ public static bool operator ==(Azure.AI.Speech.Transcription.ProfanityFilterMode left, Azure.AI.Speech.Transcription.ProfanityFilterMode right) { throw null; }
+ public static implicit operator Azure.AI.Speech.Transcription.ProfanityFilterMode (string value) { throw null; }
+ public static bool operator !=(Azure.AI.Speech.Transcription.ProfanityFilterMode left, Azure.AI.Speech.Transcription.ProfanityFilterMode right) { throw null; }
+ public override string ToString() { throw null; }
+ }
+ public partial class TranscribedPhrase : System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel
+ {
+ internal TranscribedPhrase() { }
+ public float Confidence { get { throw null; } }
+ public System.TimeSpan Duration { get { throw null; } }
+ public string Locale { get { throw null; } }
+ public System.TimeSpan Offset { get { throw null; } }
+ public int? Speaker { get { throw null; } }
+ public string Text { get { throw null; } }
+ public System.Collections.Generic.IReadOnlyList Words { get { throw null; } }
+ protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { }
+ Azure.AI.Speech.Transcription.TranscribedPhrase System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; }
+ void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { }
+ Azure.AI.Speech.Transcription.TranscribedPhrase System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; }
+ string System.ClientModel.Primitives.IPersistableModel.GetFormatFromOptions(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; }
+ System.BinaryData System.ClientModel.Primitives.IPersistableModel.Write(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; }
+ }
+ public partial class TranscribedPhrases
+ {
+ public int? Channel;
+ public System.Collections.Generic.IEnumerable Phrases;
+ public TranscribedPhrases(int? Channel, string Text, System.Collections.Generic.IEnumerable Phrases) { }
+ public string Text { get { throw null; } }
+ }
+ public partial class TranscribedWord : System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel
+ {
+ internal TranscribedWord() { }
+ public System.TimeSpan Duration { get { throw null; } }
+ public System.TimeSpan Offset { get { throw null; } }
+ public string Text { get { throw null; } }
+ protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { }
+ Azure.AI.Speech.Transcription.TranscribedWord System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; }
+ void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { }
+ Azure.AI.Speech.Transcription.TranscribedWord System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; }
+ string System.ClientModel.Primitives.IPersistableModel.GetFormatFromOptions(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; }
+ System.BinaryData System.ClientModel.Primitives.IPersistableModel.Write(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; }
+ }
+ public partial class TranscriptionClient
+ {
+ protected TranscriptionClient() { }
+ public TranscriptionClient(System.Uri endpoint, Azure.AzureKeyCredential credential) { }
+ public TranscriptionClient(System.Uri endpoint, Azure.AzureKeyCredential credential, Azure.AI.Speech.Transcription.TranscriptionClientOptions options) { }
+ public TranscriptionClient(System.Uri endpoint, Azure.Core.TokenCredential credential) { }
+ public TranscriptionClient(System.Uri endpoint, Azure.Core.TokenCredential credential, Azure.AI.Speech.Transcription.TranscriptionClientOptions options) { }
+ public virtual Azure.Core.Pipeline.HttpPipeline Pipeline { get { throw null; } }
+ public virtual Azure.Response Transcribe(Azure.AI.Speech.Transcription.TranscriptionOptions options, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; }
+ public virtual System.Threading.Tasks.Task> TranscribeAsync(Azure.AI.Speech.Transcription.TranscriptionOptions options, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; }
+ }
+ public partial class TranscriptionClientOptions : Azure.Core.ClientOptions
+ {
+ public TranscriptionClientOptions(Azure.AI.Speech.Transcription.TranscriptionClientOptions.ServiceVersion version = Azure.AI.Speech.Transcription.TranscriptionClientOptions.ServiceVersion.V2025_10_15) { }
+ public enum ServiceVersion
+ {
+ V2025_10_15 = 1,
+ }
+ }
+ public partial class TranscriptionDiarizationOptions : System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel
+ {
+ public TranscriptionDiarizationOptions() { }
+ public bool? Enabled { get { throw null; } }
+ public int? MaxSpeakers { get { throw null; } set { } }
+ Azure.AI.Speech.Transcription.TranscriptionDiarizationOptions System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; }
+ void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { }
+ Azure.AI.Speech.Transcription.TranscriptionDiarizationOptions System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; }
+ string System.ClientModel.Primitives.IPersistableModel.GetFormatFromOptions(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; }
+ System.BinaryData System.ClientModel.Primitives.IPersistableModel.Write(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; }
+ }
+ public partial class TranscriptionOptions : System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel
+ {
+ public TranscriptionOptions() { }
+ public TranscriptionOptions(System.IO.Stream audioStream) { }
+ public TranscriptionOptions(System.Uri audioUri) { }
+ public System.Collections.Generic.IList ActiveChannels { get { throw null; } }
+ public System.Uri AudioUri { get { throw null; } }
+ public Azure.AI.Speech.Transcription.TranscriptionDiarizationOptions DiarizationOptions { get { throw null; } set { } }
+ public Azure.AI.Speech.Transcription.EnhancedModeProperties EnhancedMode { get { throw null; } set { } }
+ public System.Collections.Generic.IList Locales { get { throw null; } }
+ public System.Collections.Generic.IDictionary Models { get { throw null; } }
+ public Azure.AI.Speech.Transcription.PhraseListProperties PhraseList { get { throw null; } set { } }
+ public Azure.AI.Speech.Transcription.ProfanityFilterMode? ProfanityFilterMode { get { throw null; } set { } }
+ protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { }
+ Azure.AI.Speech.Transcription.TranscriptionOptions System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; }
+ void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { }
+ Azure.AI.Speech.Transcription.TranscriptionOptions System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; }
+ string System.ClientModel.Primitives.IPersistableModel.GetFormatFromOptions(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; }
+ System.BinaryData System.ClientModel.Primitives.IPersistableModel.Write(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; }
+ }
+ public partial class TranscriptionResult : System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel
+ {
+ internal TranscriptionResult() { }
+ public System.TimeSpan Duration { get { throw null; } }
+ public System.Collections.Generic.IEnumerable PhrasesByChannel { get { throw null; } }
+ protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { }
+ Azure.AI.Speech.Transcription.TranscriptionResult System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; }
+ void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { }
+ Azure.AI.Speech.Transcription.TranscriptionResult System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; }
+ string System.ClientModel.Primitives.IPersistableModel.GetFormatFromOptions(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; }
+ System.BinaryData System.ClientModel.Primitives.IPersistableModel.Write(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; }
+ }
+}
+namespace Microsoft.Extensions.Azure
+{
+ public static partial class AISpeechTranscriptionClientBuilderExtensions
+ {
+ public static Azure.Core.Extensions.IAzureClientBuilder AddTranscriptionClient(this TBuilder builder, System.Uri endpoint) where TBuilder : Azure.Core.Extensions.IAzureClientFactoryBuilderWithCredential { throw null; }
+ public static Azure.Core.Extensions.IAzureClientBuilder AddTranscriptionClient(this TBuilder builder, System.Uri endpoint, Azure.AzureKeyCredential credential) where TBuilder : Azure.Core.Extensions.IAzureClientFactoryBuilder { throw null; }
+ public static Azure.Core.Extensions.IAzureClientBuilder AddTranscriptionClient(this TBuilder builder, TConfiguration configuration) where TBuilder : Azure.Core.Extensions.IAzureClientFactoryBuilderWithConfiguration { throw null; }
+ }
+}
diff --git a/sdk/cognitiveservices/Azure.AI.Speech.Transcription/samples/README.md b/sdk/cognitiveservices/Azure.AI.Speech.Transcription/samples/README.md
new file mode 100644
index 000000000000..072ceaaa6277
--- /dev/null
+++ b/sdk/cognitiveservices/Azure.AI.Speech.Transcription/samples/README.md
@@ -0,0 +1,19 @@
+---
+page_type: sample
+languages:
+- csharp
+products:
+- azure
+- azure-speech
+name: Azure.AI.Speech.Transcription samples for .NET
+description: Samples for the Azure.AI.Speech.Transcription client library
+---
+
+# Azure AI Speech Transcription client SDK samples
+
+- [Transcribe a Local File](https://github.com/Azure/azure-sdk-for-net/blob/main/sdk/cognitiveservices/Azure.AI.Speech.Transcription/samples/Sample_1_TranscribeLocalFile.md)
+- [Transcribe a Remote File](https://github.com/Azure/azure-sdk-for-net/blob/main/sdk/cognitiveservices/Azure.AI.Speech.Transcription/samples/Sample_2_TranscribeRemoteFile.md)
+- [Transcribe with Options](https://github.com/Azure/azure-sdk-for-net/blob/main/sdk/cognitiveservices/Azure.AI.Speech.Transcription/samples/Sample_3_TranscribeWithOptions.md)
+- [Advanced Remote File Transcription](https://github.com/Azure/azure-sdk-for-net/blob/main/sdk/cognitiveservices/Azure.AI.Speech.Transcription/samples/Sample_4_AdvancedRemoteTranscription.md)
+- [Mock a Client for Testing](https://github.com/Azure/azure-sdk-for-net/blob/main/sdk/cognitiveservices/Azure.AI.Speech.Transcription/samples/Sample_5_MockClient.md)
+
diff --git a/sdk/cognitiveservices/Azure.AI.Speech.Transcription/samples/Sample_1_TranscribeLocalFile.md b/sdk/cognitiveservices/Azure.AI.Speech.Transcription/samples/Sample_1_TranscribeLocalFile.md
new file mode 100644
index 000000000000..15b2ce65b74e
--- /dev/null
+++ b/sdk/cognitiveservices/Azure.AI.Speech.Transcription/samples/Sample_1_TranscribeLocalFile.md
@@ -0,0 +1,55 @@
+# Transcribe a Local File
+
+This sample shows how to transcribe a local file using the `Azure.AI.Speech.Transcription` SDK.
+
+## Create a Transcription Client
+
+To create a Transcription Client, you will need the service endpoint and credentials of your AI Foundry resource or Speech Service resource. You can specify the service version by providing a TranscriptionClientOptions instance.
+
+```C# Snippet:CreateTranscriptionClientForSpecificApiVersion
+Uri endpoint = new Uri("https://myaccount.api.cognitive.microsoft.com/");
+AzureKeyCredential credential = new("your apikey");
+TranscriptionClientOptions options = new TranscriptionClientOptions(TranscriptionClientOptions.ServiceVersion.V2025_10_15);
+TranscriptionClient client = new TranscriptionClient(endpoint, credential, options);
+```
+
+## Transcribe Local File (Synchronous)
+
+To transcribe a local file synchronously, create a stream from the file and call `Transcribe` on the `TranscriptionClient` clientlet, which returns the transcribed phrases and total duration of the file
+
+```C# Snippet:TranscribeLocalFileSync
+string filePath = "path/to/audio.wav";
+TranscriptionClient client = new TranscriptionClient(new Uri("https://myaccount.api.cognitive.microsoft.com/"), new AzureKeyCredential("your apikey"));
+using (FileStream fileStream = File.Open(filePath, FileMode.Open))
+{
+ var request = new TranscriptionContent { Audio = fileStream };
+ var response = client.Transcribe(request);
+
+ Console.WriteLine($"File Duration: {response.Value.Duration}");
+ foreach (var phrase in response.Value.PhrasesByChannel.First().Phrases)
+ {
+ Console.WriteLine($"{phrase.Offset}-{phrase.Offset+phrase.Duration}: {phrase.Text}");
+ }
+}
+```
+
+## Transcribe Local File (Asynchronous)
+
+To transcribe a local file asynchronously, create a stream from the file and call `TranscribeAsync` on the `TranscriptionClient` clientlet, which returns the transcribed phrases and total duration of the file
+
+```C# Snippet:TranscribeLocalFileAsync
+string filePath = "path/to/audio.wav";
+TranscriptionClient client = new TranscriptionClient(new Uri("https://myaccount.api.cognitive.microsoft.com/"), new AzureKeyCredential("your apikey"));
+using (FileStream fileStream = File.Open(filePath, FileMode.Open))
+{
+ var request = new TranscriptionContent { Audio = fileStream };
+ var response = await client.TranscribeAsync(request);
+
+ Console.WriteLine($"File Duration: {response.Value.Duration}");
+ foreach (var phrase in response.Value.PhrasesByChannel.First().Phrases)
+ {
+ Console.WriteLine($"{phrase.Offset}-{phrase.Offset+phrase.Duration}: {phrase.Text}");
+ }
+}
+```
+
diff --git a/sdk/cognitiveservices/Azure.AI.Speech.Transcription/samples/Sample_2_TranscribeRemoteFile.md b/sdk/cognitiveservices/Azure.AI.Speech.Transcription/samples/Sample_2_TranscribeRemoteFile.md
new file mode 100644
index 000000000000..20c366b84015
--- /dev/null
+++ b/sdk/cognitiveservices/Azure.AI.Speech.Transcription/samples/Sample_2_TranscribeRemoteFile.md
@@ -0,0 +1,54 @@
+# Transcribe a Remote File
+
+This sample shows how to transcribe a remote file using the `Azure.AI.Speech.Transcription` SDK.
+
+## Create a Transcription Client
+
+To create a Transcription Client, you will need the service endpoint and credentials of your AI Foundry resource or Speech Service resource. You can specify the service version by providing a TranscriptionClientOptions instance.
+
+```C# Snippet:CreateTranscriptionClientForSpecificApiVersion
+Uri endpoint = new Uri("https://myaccount.api.cognitive.microsoft.com/");
+AzureKeyCredential credential = new("your apikey");
+TranscriptionClientOptions options = new TranscriptionClientOptions(TranscriptionClientOptions.ServiceVersion.V2025_10_15);
+TranscriptionClient client = new TranscriptionClient(endpoint, credential, options);
+```
+
+## Transcribe Remote File (Synchronous)
+
+To transcribe a remote file synchronously, create a stream from url of the file and call `Transcribe` on the `TranscriptionClient` clientlet, which returns the transcribed phrases and total duration of the file
+
+```C# Snippet:TranscribeRemoteFileSync
+TranscriptionClient client = new TranscriptionClient(new Uri("https://myaccount.api.cognitive.microsoft.com/"), new AzureKeyCredential("your apikey"));
+using HttpClient httpClient = new HttpClient();
+using HttpResponseMessage httpResponse = httpClient.GetAsync("https://your-domain.com/your-file.mp3").Result;
+using Stream stream = httpResponse.Content.ReadAsStreamAsync().Result;
+
+var request = new TranscriptionContent { Audio = stream };
+var response = client.Transcribe(request);
+
+Console.WriteLine($"File Duration: {response.Value.Duration}");
+foreach (var phrase in response.Value.PhrasesByChannel.First().Phrases)
+{
+ Console.WriteLine($"{phrase.Offset}-{phrase.Offset+phrase.Duration}: {phrase.Text}");
+}
+```
+
+## Transcribe Remote File (Asynchronous)
+
+To transcribe a remote file ssynchronously, create a stream from url of the file and call `TranscribeAsync` on the `TranscriptionClient` clientlet, which returns the transcribed phrases and total duration of the file
+
+```C# Snippet:TranscribeRemoteFileAsync
+TranscriptionClient client = new TranscriptionClient(new Uri("https://myaccount.api.cognitive.microsoft.com/"), new AzureKeyCredential("your apikey"));
+using HttpClient httpClient = new HttpClient();
+using HttpResponseMessage httpResponse = await httpClient.GetAsync("https://your-domain.com/your-file.mp3");
+using Stream stream = await httpResponse.Content.ReadAsStreamAsync();
+
+var request = new TranscriptionContent { Audio = stream };
+var response = await client.TranscribeAsync(request);
+
+Console.WriteLine($"File Duration: {response.Value.Duration}");
+foreach (var phrase in response.Value.PhrasesByChannel.First().Phrases)
+{
+ Console.WriteLine($"{phrase.Offset}-{phrase.Offset+phrase.Duration}: {phrase.Text}");
+}
+```
diff --git a/sdk/cognitiveservices/Azure.AI.Speech.Transcription/samples/Sample_3_TranscribeWithOptions.md b/sdk/cognitiveservices/Azure.AI.Speech.Transcription/samples/Sample_3_TranscribeWithOptions.md
new file mode 100644
index 000000000000..c018c3e069c0
--- /dev/null
+++ b/sdk/cognitiveservices/Azure.AI.Speech.Transcription/samples/Sample_3_TranscribeWithOptions.md
@@ -0,0 +1,169 @@
+# Transcribe with Options
+
+This sample shows how to transcribe files using the options of the `Azure.AI.Speech.Transcription` SDK.
+
+## Create a Transcription Client
+
+To create a Transcription Client, you will need the service endpoint and credentials of your AI Foundry resource or Speech Service resource. You can specify the service version by providing a TranscriptionClientOptions instance.
+
+```C# Snippet:CreateTranscriptionClientForSpecificApiVersion
+Uri endpoint = new Uri("https://myaccount.api.cognitive.microsoft.com/");
+AzureKeyCredential credential = new("your apikey");
+TranscriptionClientOptions options = new TranscriptionClientOptions(TranscriptionClientOptions.ServiceVersion.V2025_10_15);
+TranscriptionClient client = new TranscriptionClient(endpoint, credential, options);
+```
+
+## Transcribe with Locale Options
+
+To transcribe a file using manually specified locales, create a stream from the file, add the locale in the `TranscriptionOptions` and call `TranscribeAsync` on the `TranscriptionClient` clientlet. This method returns the transcribed phrases and total duration of the file.
+
+If not specified, the locale of the speech in the audio is detected automatically from all supported locales.
+
+```C# Snippet:TranscribeWithLocales
+string filePath = "path/to/audio.wav";
+TranscriptionClient client = new TranscriptionClient(new Uri("https://myaccount.api.cognitive.microsoft.com/"), new AzureKeyCredential("your apikey"));
+using (FileStream fileStream = File.Open(filePath, FileMode.Open))
+{
+ var options = new TranscriptionOptions();
+ options.Locales.Add("en-US");
+
+ var request = new TranscriptionContent
+ {
+ Audio = fileStream,
+ Options = options
+ };
+
+ var response = await client.TranscribeAsync(request);
+
+ Console.WriteLine($"File Duration: {response.Value.Duration}");
+ foreach (var phrase in response.Value.PhrasesByChannel.First().Phrases)
+ {
+ Console.WriteLine($"{phrase.Offset}-{phrase.Offset+phrase.Duration}: {phrase.Text}");
+ }
+}
+```
+
+## Transcribe with Model Options
+
+To transcribe a file using specific models for specific locales, create a stream from the file, add the model mapping in the `TranscriptionOptions` and call `TranscribeAsync` on the `TranscriptionClient` clientlet. This method returns the transcribed phrases and total duration of the file.
+
+If no mapping is given, the default model for the locale is used.
+
+```C# Snippet:TranscribeWithModels
+string filePath = "path/to/audio.wav";
+TranscriptionClient client = new TranscriptionClient(new Uri("https://myaccount.api.cognitive.microsoft.com/"), new AzureKeyCredential("your apikey"));
+using (FileStream fileStream = File.Open(filePath, FileMode.Open))
+{
+ var options = new TranscriptionOptions();
+ options.Models.Add("en-US", new Uri("https://myaccount.api.cognitive.microsoft.com/speechtotext/models/your-model-uuid"));
+
+ var request = new TranscriptionContent
+ {
+ Audio = fileStream,
+ Options = options
+ };
+
+ var response = await client.TranscribeAsync(request);
+
+ Console.WriteLine($"File Duration: {response.Value.Duration}");
+ foreach (var phrase in response.Value.PhrasesByChannel.First().Phrases)
+ {
+ Console.WriteLine($"{phrase.Offset}-{phrase.Offset+phrase.Duration}: {phrase.Text}");
+ }
+}
+```
+
+## Transcribe with Profanity Filter Options
+
+To transcribe a file using profanity filters, create a stream from the file, specify the filter mode in the `TranscriptionOptions` and call `TranscribeAsync` on the `TranscriptionClient` clientlet. This method returns the transcribed phrases and total duration of the file.
+
+```C# Snippet:TranscribeWithProfinityFilter
+string filePath = "path/to/audio.wav";
+TranscriptionClient client = new TranscriptionClient(new Uri("https://myaccount.api.cognitive.microsoft.com/"), new AzureKeyCredential("your apikey"));
+using (FileStream fileStream = File.Open(filePath, FileMode.Open))
+{
+ var options = new TranscriptionOptions();
+ options.ProfanityFilterMode = ProfanityFilterMode.Masked;
+
+ var request = new TranscriptionContent
+ {
+ Audio = fileStream,
+ Options = options
+ };
+
+ var response = await client.TranscribeAsync(request);
+
+ Console.WriteLine($"File Duration: {response.Value.Duration}");
+ foreach (var phrase in response.Value.PhrasesByChannel.First().Phrases)
+ {
+ Console.WriteLine($"{phrase.Offset}-{phrase.Offset+phrase.Duration}: {phrase.Text}");
+ }
+}
+```
+
+## Transcribe with Active Channels Options
+
+To transcribe a file using only a subset of the channels, create a stream from the file, specify the 0-based indices of the active channels in the `TranscriptionOptions` and call `TranscribeAsync` on the `TranscriptionClient` clientlet. This method returns the transcribed phrases and total duration of the file.
+
+If not specified, multiple channels are merged and transcribed jointly. Only up to two channels are supported.
+
+```C# Snippet:TranscribeWithActiveChannels
+string filePath = "path/to/audio.wav";
+TranscriptionClient client = new TranscriptionClient(new Uri("https://myaccount.api.cognitive.microsoft.com/"), new AzureKeyCredential("your apikey"));
+using (FileStream fileStream = File.Open(filePath, FileMode.Open))
+{
+ var options = new TranscriptionOptions();
+ options.ActiveChannels.Add(0);
+
+ var request = new TranscriptionContent
+ {
+ Audio = fileStream,
+ Options = options
+ };
+
+ var response = await client.TranscribeAsync(request);
+
+ Console.WriteLine($"File Duration: {response.Value.Duration}");
+ foreach (var phrase in response.Value.PhrasesByChannel.First().Phrases)
+ {
+ Console.WriteLine($"{phrase.Offset}-{phrase.Offset+phrase.Duration}: {phrase.Text}");
+ }
+}
+```
+
+## Transcribe with Diarization Options
+
+To transcribe a file with speaker identification, create a stream from the file, specify the diarization options in the `TranscriptionOptions` and call `TranscribeAsync` on the `TranscriptionClient` clientlet. This method returns the transcribed phrases and total duration of the file.
+
+If not specified, no speaker information is included in the transcribed phrases.
+
+```C# Snippet:TranscribeWithDiarization
+string filePath = "path/to/audio.wav";
+TranscriptionClient client = new TranscriptionClient(new Uri("https://myaccount.api.cognitive.microsoft.com/"), new AzureKeyCredential("your apikey"));
+using (FileStream fileStream = File.Open(filePath, FileMode.Open))
+{
+ var options = new TranscriptionOptions()
+ {
+ DiarizationOptions = new()
+ {
+ // Enabled is automatically set to true when MaxSpeakers is specified
+ MaxSpeakers = 2
+ }
+ };
+
+ var request = new TranscriptionContent
+ {
+ Audio = fileStream,
+ Options = options
+ };
+
+ var response = await client.TranscribeAsync(request);
+
+ Console.WriteLine($"File Duration: {response.Value.Duration}");
+ foreach (var phrase in response.Value.PhrasesByChannel.First().Phrases)
+ {
+ Console.WriteLine($"{phrase.Offset}-{phrase.Offset+phrase.Duration} [{phrase.Speaker}]: {phrase.Text}");
+ }
+}
+```
+
diff --git a/sdk/cognitiveservices/Azure.AI.Speech.Transcription/samples/Sample_4_AdvancedRemoteTranscription.md b/sdk/cognitiveservices/Azure.AI.Speech.Transcription/samples/Sample_4_AdvancedRemoteTranscription.md
new file mode 100644
index 000000000000..b06513e1932f
--- /dev/null
+++ b/sdk/cognitiveservices/Azure.AI.Speech.Transcription/samples/Sample_4_AdvancedRemoteTranscription.md
@@ -0,0 +1,153 @@
+# Advanced Remote File Transcription
+
+This sample demonstrates advanced scenarios for transcribing audio files from remote locations using the `Azure.AI.Speech.Transcription` SDK.
+
+## Transcribe from URL
+
+Transcribe an audio file directly from a public URL without downloading it first.
+
+```C# Snippet:TranscribeFromUrl
+// Specify the URL of the audio file to transcribe
+Uri audioUrl = new Uri("https://example.com/audio/sample.wav");
+
+// Configure transcription to use the remote URL
+TranscriptionOptions options = new TranscriptionOptions(audioUrl);
+
+// No audio stream needed - the service fetches the file from the URL
+Response response = await client.TranscribeAsync(options);
+TranscriptionResult result = response.Value;
+
+Console.WriteLine($"Transcribed audio from URL: {audioUrl}");
+Console.WriteLine($"Duration: {result.Duration}");
+
+var channelPhrases = result.PhrasesByChannel.First();
+Console.WriteLine($"\nTranscription:\n{channelPhrases.Text}");
+```
+
+## Download and Transcribe from HTTP
+
+Download an audio file from a remote location and transcribe it.
+
+```C# Snippet:TranscribeFromHttpStream
+// Download the audio file from a remote location
+string audioUrl = "https://example.com/audio/sample.wav";
+
+using HttpClient httpClient = new HttpClient();
+using HttpResponseMessage httpResponse = await httpClient.GetAsync(audioUrl);
+httpResponse.EnsureSuccessStatusCode();
+
+// Get the audio stream from the HTTP response
+using Stream audioStream = await httpResponse.Content.ReadAsStreamAsync();
+
+// Create transcription request with the downloaded stream
+TranscriptionContent request = new TranscriptionContent
+{
+ Audio = audioStream
+};
+
+Response response = await client.TranscribeAsync(request);
+TranscriptionResult result = response.Value;
+
+Console.WriteLine($"Downloaded and transcribed audio from: {audioUrl}");
+Console.WriteLine($"Duration: {result.Duration}");
+
+var channelPhrases = result.PhrasesByChannel.First();
+foreach (TranscribedPhrase phrase in channelPhrases.Phrases)
+{
+ Console.WriteLine($"[{phrase.Offset}] {phrase.Text}");
+}
+```
+
+## Transcribe from Azure Blob Storage
+
+Transcribe audio files stored in Azure Blob Storage using SAS URLs.
+
+```C# Snippet:TranscribeFromBlobStorage
+// Azure Blob Storage URL with SAS token for access
+Uri blobSasUrl = new Uri(
+ "https://mystorageaccount.blob.core.windows.net/audio-files/recording.wav?sv=2021-06-08&st=...");
+
+TranscriptionOptions options = new TranscriptionOptions(blobSasUrl);
+
+Response response = await client.TranscribeAsync(options);
+TranscriptionResult result = response.Value;
+
+Console.WriteLine($"Transcribed audio from Azure Blob Storage");
+Console.WriteLine($"Duration: {result.Duration}");
+
+var channelPhrases = result.PhrasesByChannel.First();
+Console.WriteLine($"\nFull Transcription:\n{channelPhrases.Text}");
+```
+
+## Transcribe Remote File with Options
+
+Combine remote file transcription with transcription options like locale and diarization.
+
+```C# Snippet:TranscribeRemoteFileWithOptions
+Uri audioUrl = new Uri("https://example.com/audio/spanish-interview.mp3");
+
+// Configure transcription options for remote audio
+TranscriptionOptions options = new TranscriptionOptions(audioUrl)
+{
+ ProfanityFilterMode = ProfanityFilterMode.Masked,
+ DiarizationOptions = new TranscriptionDiarizationOptions
+ {
+ // Enabled is automatically set to true when MaxSpeakers is specified
+ MaxSpeakers = 2
+ }
+};
+
+// Add Spanish locale
+options.Locales.Add("es-ES");
+
+Response response = await client.TranscribeAsync(options);
+TranscriptionResult result = response.Value;
+
+Console.WriteLine("Remote transcription with options:");
+Console.WriteLine($"Duration: {result.Duration}");
+
+var channelPhrases = result.PhrasesByChannel.First();
+foreach (TranscribedPhrase phrase in channelPhrases.Phrases)
+{
+ Console.WriteLine($"Speaker {phrase.Speaker}: {phrase.Text}");
+}
+```
+
+## Process Multiple Remote Files
+
+Process multiple audio files from different sources in parallel.
+
+```C# Snippet:TranscribeMultipleRemoteFiles
+// List of audio files to transcribe
+Uri[] audioUrls = new[]
+{
+ new Uri("https://example.com/audio/file1.wav"),
+ new Uri("https://example.com/audio/file2.wav"),
+ new Uri("https://example.com/audio/file3.wav")
+};
+
+// Create tasks for parallel transcription
+Task>[] transcriptionTasks = audioUrls
+ .Select(url =>
+ {
+ TranscriptionOptions options = new TranscriptionOptions(url);
+
+ return client.TranscribeAsync(options);
+ })
+ .ToArray();
+
+// Wait for all transcriptions to complete
+Response[] responses = await Task.WhenAll(transcriptionTasks);
+
+// Process results
+for (int i = 0; i < responses.Length; i++)
+{
+ TranscriptionResult result = responses[i].Value;
+ Console.WriteLine($"\nFile {i + 1} ({audioUrls[i]}):");
+ Console.WriteLine($"Duration: {result.Duration}");
+
+ var channelPhrases = result.PhrasesByChannel.First();
+ Console.WriteLine($"Text: {channelPhrases.Text}");
+}
+```
+
diff --git a/sdk/cognitiveservices/Azure.AI.Speech.Transcription/samples/Sample_5_MockClient.md b/sdk/cognitiveservices/Azure.AI.Speech.Transcription/samples/Sample_5_MockClient.md
new file mode 100644
index 000000000000..30ed92bc7155
--- /dev/null
+++ b/sdk/cognitiveservices/Azure.AI.Speech.Transcription/samples/Sample_5_MockClient.md
@@ -0,0 +1,208 @@
+# Mock Client for Testing
+
+This sample demonstrates how to mock the `TranscriptionClient` for unit testing without making actual API calls to Azure.
+
+## Why Mock the Client?
+
+Mocking the `TranscriptionClient` is useful for:
+
+- Unit testing application logic without network calls
+- Testing error handling scenarios
+- Running tests in CI/CD pipelines without Azure credentials
+- Faster test execution
+
+## Create a Mock Client
+
+The `TranscriptionClient` provides a protected constructor for creating mock instances.
+
+```C# Snippet:CreateMockTranscriptionClient
+// TranscriptionClient provides a protected constructor for mocking
+// You can create a derived class for testing purposes
+
+// Example: Create a test-specific derived class
+var mockClient = new MockTranscriptionClient();
+
+// Use the mock client in your tests
+// It won't make actual API calls
+```
+
+## Mock Transcription Behavior
+
+Configure the mock client to return predefined results for testing.
+
+```C# Snippet:MockTranscriptionBehavior
+// Create a mock client that returns predefined results
+var mockClient = new MockTranscriptionClient();
+
+// Configure the mock to return a specific result
+var expectedText = "This is a mock transcription result";
+mockClient.SetMockResult(expectedText);
+
+// Create a test request
+using var audioStream = new MemoryStream(new byte[] { 0x00, 0x01, 0x02 });
+TranscriptionContent request = new TranscriptionContent
+{
+ Audio = audioStream
+};
+
+// Call the mock client
+Response response = await mockClient.TranscribeAsync(request);
+
+// Verify the result
+Assert.IsNotNull(response);
+Assert.IsNotNull(response.Value);
+
+// The mock client returns the configured result
+var phrases = response.Value.PhrasesByChannel.FirstOrDefault();
+if (phrases != null)
+{
+ Console.WriteLine($"Mock transcription: {phrases.Text}");
+}
+```
+
+## Use InMemoryTransport for Testing
+
+Use Azure SDK's `MockTransport` to test without network calls.
+
+```C# Snippet:UseInMemoryTransport
+// Create a mock response that the client will return
+var mockResponseContent = @"{
+ ""durationMilliseconds"": 5000,
+ ""combinedPhrases"": [
+ {
+ ""channel"": 0,
+ ""text"": ""This is a test transcription""
+ }
+ ],
+ ""phrases"": [
+ {
+ ""channel"": 0,
+ ""offsetMilliseconds"": 0,
+ ""durationMilliseconds"": 5000,
+ ""text"": ""This is a test transcription"",
+ ""words"": [],
+ ""locale"": ""en-US"",
+ ""confidence"": 0.95
+ }
+ ]
+}";
+
+// Create options with a mock transport
+var mockTransport = new MockTransport(new MockResponse(200)
+{
+ ContentStream = new MemoryStream(System.Text.Encoding.UTF8.GetBytes(mockResponseContent))
+});
+
+TranscriptionClientOptions options = new TranscriptionClientOptions();
+options.Transport = mockTransport;
+
+// Create client with mock transport
+Uri endpoint = new Uri("https://mock.api.cognitive.microsoft.com/");
+AzureKeyCredential credential = new AzureKeyCredential("mock-key");
+TranscriptionClient client = new TranscriptionClient(endpoint, credential, options);
+
+// Make a request - it will use the mock response
+using var audioStream = new MemoryStream(new byte[] { 0x00, 0x01, 0x02 });
+TranscriptionContent request = new TranscriptionContent
+{
+ Audio = audioStream
+};
+
+Response response = await client.TranscribeAsync(request);
+
+// Verify the mock response was returned
+Console.WriteLine($"Duration: {response.Value.Duration}");
+var phrases = response.Value.PhrasesByChannel.First();
+Console.WriteLine($"Transcription: {phrases.Text}");
+```
+
+## Mock Error Scenarios
+
+Test error handling by mocking error responses.
+
+```C# Snippet:MockErrorScenarios
+// Create a mock transport that returns an error
+var mockTransport = new MockTransport(new MockResponse(401)
+{
+ ContentStream = new MemoryStream(System.Text.Encoding.UTF8.GetBytes(
+ @"{""error"": {""code"": ""Unauthorized"", ""message"": ""Invalid API key""}}"
+ ))
+});
+
+TranscriptionClientOptions options = new TranscriptionClientOptions();
+options.Transport = mockTransport;
+
+Uri endpoint = new Uri("https://mock.api.cognitive.microsoft.com/");
+AzureKeyCredential credential = new AzureKeyCredential("invalid-key");
+TranscriptionClient client = new TranscriptionClient(endpoint, credential, options);
+
+// Test error handling
+try
+{
+ using var audioStream = new MemoryStream(new byte[] { 0x00, 0x01, 0x02 });
+ TranscriptionContent request = new TranscriptionContent
+ {
+ Audio = audioStream
+ };
+
+ await client.TranscribeAsync(request);
+ Assert.Fail("Expected RequestFailedException was not thrown");
+}
+catch (RequestFailedException ex)
+{
+ // Verify error handling works correctly
+ Assert.AreEqual(401, ex.Status);
+ Console.WriteLine($"Successfully caught error: {ex.Message}");
+}
+```
+
+## Example: Testing Application Logic
+
+Here's a complete example of testing application code that uses `TranscriptionClient`:
+
+```csharp
+// Your application code
+public class TranscriptionService
+{
+ private readonly TranscriptionClient _client;
+
+ public TranscriptionService(TranscriptionClient client)
+ {
+ _client = client;
+ }
+
+ public async Task GetTranscriptionAsync(Stream audioStream)
+ {
+ var request = new TranscriptionContent { Audio = audioStream };
+ var response = await _client.TranscribeAsync(request);
+ return response.Value.PhrasesByChannel.First().Text;
+ }
+}
+
+// Your test code
+[Test]
+public async Task TestTranscriptionService()
+{
+ // Arrange: Create mock client
+ var mockClient = new MockTranscriptionClient();
+ mockClient.SetMockResult("Test transcription");
+
+ var service = new TranscriptionService(mockClient);
+
+ // Act: Call your service
+ using var audioStream = new MemoryStream(new byte[] { 0x00 });
+ string result = await service.GetTranscriptionAsync(audioStream);
+
+ // Assert: Verify the result
+ Assert.AreEqual("Test transcription", result);
+}
+```
+
+## Best Practices
+
+- Use mocks for unit tests; use real client for integration tests
+- Test both success and error scenarios
+- Verify your application handles all possible response states
+- Use dependency injection to make your code testable
+- Keep mock data realistic to catch serialization issues
+
diff --git a/sdk/cognitiveservices/Azure.AI.Speech.Transcription/src/Azure.AI.Speech.Transcription.csproj b/sdk/cognitiveservices/Azure.AI.Speech.Transcription/src/Azure.AI.Speech.Transcription.csproj
new file mode 100644
index 000000000000..f0ad75e9138e
--- /dev/null
+++ b/sdk/cognitiveservices/Azure.AI.Speech.Transcription/src/Azure.AI.Speech.Transcription.csproj
@@ -0,0 +1,20 @@
+
+
+ This is the Azure.AI.Speech.Transcription client library for developing .NET applications with rich experience.
+ Azure SDK Code Generation Azure.AI.Speech.Transcription for Azure Data Plane
+ 1.0.0-beta.1
+ Azure.AI.Speech.Transcription
+ $(RequiredTargetFrameworks)
+ true
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/sdk/cognitiveservices/Azure.AI.Speech.Transcription/src/Custom/AISpeechTranscriptionModelFactory.cs b/sdk/cognitiveservices/Azure.AI.Speech.Transcription/src/Custom/AISpeechTranscriptionModelFactory.cs
new file mode 100644
index 000000000000..f9a4af7acf63
--- /dev/null
+++ b/sdk/cognitiveservices/Azure.AI.Speech.Transcription/src/Custom/AISpeechTranscriptionModelFactory.cs
@@ -0,0 +1,20 @@
+// Copyright (c) Microsoft Corporation. All rights reserved.
+// Licensed under the MIT License.
+
+using System;
+using System.Collections.Generic;
+
+namespace Azure.AI.Speech.Transcription
+{
+ /// Model factory for models.
+ public static partial class AISpeechTranscriptionModelFactory
+ {
+ /// Initializes a new instance of .
+ /// The duration of the audio in milliseconds.
+ /// A new instance for mocking.
+ public static TranscriptionResult TranscriptionResult(int durationMilliseconds = default)
+ {
+ return new TranscriptionResult(durationMilliseconds, new List(), new List(), serializedAdditionalRawData: null);
+ }
+ }
+}
diff --git a/sdk/cognitiveservices/Azure.AI.Speech.Transcription/src/Custom/EnhancedModeProperties.Serialization.cs b/sdk/cognitiveservices/Azure.AI.Speech.Transcription/src/Custom/EnhancedModeProperties.Serialization.cs
new file mode 100644
index 000000000000..ac944cf40f11
--- /dev/null
+++ b/sdk/cognitiveservices/Azure.AI.Speech.Transcription/src/Custom/EnhancedModeProperties.Serialization.cs
@@ -0,0 +1,81 @@
+// Copyright (c) Microsoft Corporation. All rights reserved.
+// Licensed under the MIT License.
+
+#nullable disable
+
+using System;
+using System.ClientModel.Primitives;
+using System.Text.Json;
+using Azure.Core;
+
+namespace Azure.AI.Speech.Transcription
+{
+ public partial class EnhancedModeProperties
+ {
+ ///
+ /// Custom serialization to auto-enable enhanced mode when properties are set.
+ /// The 'enabled' property is automatically set to true when task, targetLanguage, or prompt are specified.
+ ///
+ private void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options)
+ {
+ var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format;
+ if (format != "J")
+ {
+ throw new FormatException($"The model {nameof(EnhancedModeProperties)} does not support writing '{format}' format.");
+ }
+
+ // Auto-enable if any property is configured
+ bool shouldEnable = Optional.IsDefined(Task) ||
+ Optional.IsDefined(TargetLanguage) ||
+ Optional.IsCollectionDefined(Prompt);
+
+ if (shouldEnable)
+ {
+ writer.WritePropertyName("enabled"u8);
+ writer.WriteBooleanValue(true);
+ }
+ else if (Optional.IsDefined(Enabled))
+ {
+ // Only write explicit enabled value if nothing else is set (for deserialization round-trip)
+ writer.WritePropertyName("enabled"u8);
+ writer.WriteBooleanValue(Enabled.Value);
+ }
+
+ if (Optional.IsDefined(Task))
+ {
+ writer.WritePropertyName("task"u8);
+ writer.WriteStringValue(Task);
+ }
+ if (Optional.IsDefined(TargetLanguage))
+ {
+ writer.WritePropertyName("targetLanguage"u8);
+ writer.WriteStringValue(TargetLanguage);
+ }
+ if (Optional.IsCollectionDefined(Prompt))
+ {
+ writer.WritePropertyName("prompt"u8);
+ writer.WriteStartArray();
+ foreach (var item in Prompt)
+ {
+ writer.WriteStringValue(item);
+ }
+ writer.WriteEndArray();
+ }
+ if (options.Format != "W" && _serializedAdditionalRawData != null)
+ {
+ foreach (var item in _serializedAdditionalRawData)
+ {
+ writer.WritePropertyName(item.Key);
+#if NET6_0_OR_GREATER
+ writer.WriteRawValue(item.Value);
+#else
+ using (JsonDocument document = JsonDocument.Parse(item.Value, ModelSerializationExtensions.JsonDocumentOptions))
+ {
+ JsonSerializer.Serialize(writer, document.RootElement);
+ }
+#endif
+ }
+ }
+ }
+ }
+}
diff --git a/sdk/cognitiveservices/Azure.AI.Speech.Transcription/src/Custom/TranscribedPhrase.cs b/sdk/cognitiveservices/Azure.AI.Speech.Transcription/src/Custom/TranscribedPhrase.cs
new file mode 100644
index 000000000000..272deaf26008
--- /dev/null
+++ b/sdk/cognitiveservices/Azure.AI.Speech.Transcription/src/Custom/TranscribedPhrase.cs
@@ -0,0 +1,34 @@
+// Copyright (c) Microsoft Corporation. All rights reserved.
+// Licensed under the MIT License.
+
+using System;
+
+namespace Azure.AI.Speech.Transcription;
+
+public partial class TranscribedPhrase
+{
+ /// The start offset of the phrase.
+ public TimeSpan Offset
+ {
+ get
+ {
+ return TimeSpan.FromMilliseconds(OffsetMilliseconds);
+ }
+ }
+
+ /// The duration of the phrase.
+ public TimeSpan Duration
+ {
+ get
+ {
+ return TimeSpan.FromMilliseconds(DurationMilliseconds);
+ }
+ }
+
+ /// The 0-based channel index. Only present if channel separation is enabled.
+ internal int? Channel { get; }
+ /// The start offset of the phrase in milliseconds.
+ internal int OffsetMilliseconds { get; }
+ /// The duration of the phrase in milliseconds.
+ internal int DurationMilliseconds { get; }
+}
diff --git a/sdk/cognitiveservices/Azure.AI.Speech.Transcription/src/Custom/TranscribedPhrases.cs b/sdk/cognitiveservices/Azure.AI.Speech.Transcription/src/Custom/TranscribedPhrases.cs
new file mode 100644
index 000000000000..e201d10d517c
--- /dev/null
+++ b/sdk/cognitiveservices/Azure.AI.Speech.Transcription/src/Custom/TranscribedPhrases.cs
@@ -0,0 +1,34 @@
+// Copyright (c) Microsoft Corporation. All rights reserved.
+// Licensed under the MIT License.
+
+using System.Collections.Generic;
+
+namespace Azure.AI.Speech.Transcription
+{
+ /// TranscribedPhrases.
+ public partial class TranscribedPhrases
+ {
+ ///
+ /// Initializes a new instance of the class.
+ ///
+ /// The 0-based channel index. Only present if channel separation is enabled.
+ /// The complete transcribed text for the channel.
+ /// The transcription results segmented into phrases.
+ public TranscribedPhrases(int? Channel, string Text, IEnumerable Phrases)
+ {
+ this.Channel = Channel;
+ this.Text = Text;
+ this.Phrases = Phrases;
+ }
+
+ /// The 0-based channel index. Only present if channel separation is enabled.
+ public int? Channel;
+
+ /// The complete transcribed text for the channel.
+ public string Text { get; }
+
+ /// The transcription results segmented into phrases.
+
+ public IEnumerable Phrases;
+ }
+}
diff --git a/sdk/cognitiveservices/Azure.AI.Speech.Transcription/src/Custom/TranscribedWord.cs b/sdk/cognitiveservices/Azure.AI.Speech.Transcription/src/Custom/TranscribedWord.cs
new file mode 100644
index 000000000000..e76946264d18
--- /dev/null
+++ b/sdk/cognitiveservices/Azure.AI.Speech.Transcription/src/Custom/TranscribedWord.cs
@@ -0,0 +1,32 @@
+// Copyright (c) Microsoft Corporation. All rights reserved.
+// Licensed under the MIT License.
+
+using System;
+
+namespace Azure.AI.Speech.Transcription;
+
+public partial class TranscribedWord
+{
+ /// The start offset of the word.
+ public TimeSpan Offset
+ {
+ get
+ {
+ return TimeSpan.FromMilliseconds(OffsetMilliseconds);
+ }
+ }
+
+ /// The duration of the word.
+ public TimeSpan Duration
+ {
+ get
+ {
+ return TimeSpan.FromMilliseconds(DurationMilliseconds);
+ }
+ }
+
+ /// The start offset of the word in milliseconds.
+ internal int OffsetMilliseconds { get; }
+ /// The duration of the word in milliseconds.
+ internal int DurationMilliseconds { get; }
+}
diff --git a/sdk/cognitiveservices/Azure.AI.Speech.Transcription/src/Custom/TranscriptionClient.cs b/sdk/cognitiveservices/Azure.AI.Speech.Transcription/src/Custom/TranscriptionClient.cs
new file mode 100644
index 000000000000..0a91498dde16
--- /dev/null
+++ b/sdk/cognitiveservices/Azure.AI.Speech.Transcription/src/Custom/TranscriptionClient.cs
@@ -0,0 +1,38 @@
+// Copyright (c) Microsoft Corporation. All rights reserved.
+// Licensed under the MIT License.
+
+#nullable disable
+
+using System;
+using System.Threading;
+using System.Threading.Tasks;
+using Azure.Core;
+using Azure.Core.Pipeline;
+
+namespace Azure.AI.Speech.Transcription
+{
+ // Data plane generated client.
+ /// The Transcription service client.
+ public partial class TranscriptionClient
+ {
+ /// Transcribes the provided audio stream.
+ /// The transcription options containing audio and configuration.
+ /// The cancellation token to use.
+ /// is null.
+ public virtual async Task> TranscribeAsync(TranscriptionOptions options, CancellationToken cancellationToken = default)
+ {
+ TranscriptionContent body = new TranscriptionContent(options, options.AudioStream, null);
+ return await TranscribeAsync(body, cancellationToken).ConfigureAwait(false);
+ }
+
+ /// Transcribes the provided audio stream.
+ /// The transcription options containing audio and configuration.
+ /// The cancellation token to use.
+ /// is null.
+ public virtual Response Transcribe(TranscriptionOptions options, CancellationToken cancellationToken = default)
+ {
+ TranscriptionContent body = new TranscriptionContent(options, options.AudioStream, null);
+ return Transcribe(body, cancellationToken);
+ }
+ }
+}
diff --git a/sdk/cognitiveservices/Azure.AI.Speech.Transcription/src/Custom/TranscriptionDiarizationOptions.Serialization.cs b/sdk/cognitiveservices/Azure.AI.Speech.Transcription/src/Custom/TranscriptionDiarizationOptions.Serialization.cs
new file mode 100644
index 000000000000..38fe8b5f0320
--- /dev/null
+++ b/sdk/cognitiveservices/Azure.AI.Speech.Transcription/src/Custom/TranscriptionDiarizationOptions.Serialization.cs
@@ -0,0 +1,64 @@
+// Copyright (c) Microsoft Corporation. All rights reserved.
+// Licensed under the MIT License.
+
+#nullable disable
+
+using System;
+using System.ClientModel.Primitives;
+using System.Text.Json;
+using Azure.Core;
+
+namespace Azure.AI.Speech.Transcription
+{
+ public partial class TranscriptionDiarizationOptions
+ {
+ ///
+ /// Custom serialization to auto-enable diarization when MaxSpeakers is set.
+ /// The 'enabled' property is automatically set to true when maxSpeakers is specified.
+ ///
+ private void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options)
+ {
+ var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format;
+ if (format != "J")
+ {
+ throw new FormatException($"The model {nameof(TranscriptionDiarizationOptions)} does not support writing '{format}' format.");
+ }
+
+ // Auto-enable if MaxSpeakers is configured
+ bool shouldEnable = Optional.IsDefined(MaxSpeakers);
+
+ if (shouldEnable)
+ {
+ writer.WritePropertyName("enabled"u8);
+ writer.WriteBooleanValue(true);
+ }
+ else if (Optional.IsDefined(Enabled))
+ {
+ // Only write explicit enabled value if MaxSpeakers is not set (for deserialization round-trip)
+ writer.WritePropertyName("enabled"u8);
+ writer.WriteBooleanValue(Enabled.Value);
+ }
+
+ if (Optional.IsDefined(MaxSpeakers))
+ {
+ writer.WritePropertyName("maxSpeakers"u8);
+ writer.WriteNumberValue(MaxSpeakers.Value);
+ }
+ if (options.Format != "W" && _serializedAdditionalRawData != null)
+ {
+ foreach (var item in _serializedAdditionalRawData)
+ {
+ writer.WritePropertyName(item.Key);
+#if NET6_0_OR_GREATER
+ writer.WriteRawValue(item.Value);
+#else
+ using (JsonDocument document = JsonDocument.Parse(item.Value, ModelSerializationExtensions.JsonDocumentOptions))
+ {
+ JsonSerializer.Serialize(writer, document.RootElement);
+ }
+#endif
+ }
+ }
+ }
+ }
+}
diff --git a/sdk/cognitiveservices/Azure.AI.Speech.Transcription/src/Custom/TranscriptionOptions.cs b/sdk/cognitiveservices/Azure.AI.Speech.Transcription/src/Custom/TranscriptionOptions.cs
new file mode 100644
index 000000000000..1b0867a710a4
--- /dev/null
+++ b/sdk/cognitiveservices/Azure.AI.Speech.Transcription/src/Custom/TranscriptionOptions.cs
@@ -0,0 +1,43 @@
+// Copyright (c) Microsoft Corporation. All rights reserved.
+// Licensed under the MIT License.
+
+#nullable disable
+
+using System;
+using System.Collections.Generic;
+using System.IO;
+
+namespace Azure.AI.Speech.Transcription
+{
+ #pragma warning disable SCM0005
+ /// Metadata for a transcription request.
+ public partial class TranscriptionOptions
+ {
+ /// Initializes a new instance of with an audio URI.
+ /// The URL of the audio to be transcribed.
+ public TranscriptionOptions(Uri audioUri):this()
+ {
+ AudioUri = audioUri;
+ }
+
+ /// Initializes a new instance of with an audio stream.
+ /// The audio stream to be transcribed.
+ public TranscriptionOptions(Stream audioStream):this()
+ {
+ AudioStream = audioStream;
+ }
+
+ /// Initializes a new instance of .
+ public TranscriptionOptions()
+ {
+ Locales = new ChangeTrackingList();
+ Models = new ChangeTrackingDictionary();
+ ActiveChannels = new ChangeTrackingList();
+ }
+
+ /// The URL of the audio to be transcribed. The audio must be shorter than 2 hours in audio duration and smaller than 250 MB in size. If both Audio and AudioUrl are provided, Audio is used.
+ public Uri AudioUri { get; }
+ internal Stream AudioStream { get; }
+ }
+ #pragma warning restore SCM0005
+}
diff --git a/sdk/cognitiveservices/Azure.AI.Speech.Transcription/src/Custom/TranscriptionResult.cs b/sdk/cognitiveservices/Azure.AI.Speech.Transcription/src/Custom/TranscriptionResult.cs
new file mode 100644
index 000000000000..0dbd3229db86
--- /dev/null
+++ b/sdk/cognitiveservices/Azure.AI.Speech.Transcription/src/Custom/TranscriptionResult.cs
@@ -0,0 +1,55 @@
+// Copyright (c) Microsoft Corporation. All rights reserved.
+// Licensed under the MIT License.
+
+using System;
+using System.Collections.Generic;
+using System.Linq;
+
+namespace Azure.AI.Speech.Transcription;
+
+public partial class TranscriptionResult
+{
+ /// The duration of the audio.
+ public TimeSpan Duration
+ {
+ get
+ {
+ return TimeSpan.FromMilliseconds(DurationMilliseconds);
+ }
+ }
+
+ private IEnumerable _TranscribedPhrases;
+
+ /// The transcripted phrases by their channel.
+ public IEnumerable PhrasesByChannel
+ {
+ get
+ {
+ if (_TranscribedPhrases != null)
+ {
+ return _TranscribedPhrases;
+ }
+ var TranscribedPhrases = new List();
+
+ var CombinedPhrases = this.CombinedPhrases.ToDictionary((phrase) => phrase.Channel ?? -1);
+ var Phrases = this.Phrases.GroupBy((phrase) => phrase.Channel).ToDictionary((e) => e.Key ?? -1, (e) => e.ToList());
+ foreach (var key in CombinedPhrases.Keys)
+ {
+ var CombinedPhrase = CombinedPhrases[key];
+ var Phrase = Phrases[key];
+ TranscribedPhrases.Add(new TranscribedPhrases(key == -1 ? null : key, CombinedPhrase.Text, Phrase));
+ }
+
+ _TranscribedPhrases = TranscribedPhrases;
+ return _TranscribedPhrases;
+ }
+ }
+
+ /// The duration of the audio in milliseconds.
+ internal int DurationMilliseconds { get; }
+
+ /// The full transcript for each channel.
+ internal IReadOnlyList CombinedPhrases { get; }
+ /// The transcription results segmented into phrases.
+ internal IReadOnlyList Phrases { get; }
+}
diff --git a/sdk/cognitiveservices/Azure.AI.Speech.Transcription/src/Generated/AISpeechTranscriptionClientBuilderExtensions.cs b/sdk/cognitiveservices/Azure.AI.Speech.Transcription/src/Generated/AISpeechTranscriptionClientBuilderExtensions.cs
new file mode 100644
index 000000000000..440116bb9441
--- /dev/null
+++ b/sdk/cognitiveservices/Azure.AI.Speech.Transcription/src/Generated/AISpeechTranscriptionClientBuilderExtensions.cs
@@ -0,0 +1,49 @@
+// Copyright (c) Microsoft Corporation. All rights reserved.
+// Licensed under the MIT License.
+
+//
+
+#nullable disable
+
+using System;
+using System.Diagnostics.CodeAnalysis;
+using Azure;
+using Azure.AI.Speech.Transcription;
+using Azure.Core.Extensions;
+
+namespace Microsoft.Extensions.Azure
+{
+ /// Extension methods to add to client builder.
+ public static partial class AISpeechTranscriptionClientBuilderExtensions
+ {
+ /// Registers a instance.
+ /// The builder to register with.
+ /// Supported Cognitive Services endpoints (protocol and hostname, for example: https://westus.api.cognitive.microsoft.com.
+ /// A credential used to authenticate to an Azure Service.
+ public static IAzureClientBuilder AddTranscriptionClient(this TBuilder builder, Uri endpoint, AzureKeyCredential credential)
+ where TBuilder : IAzureClientFactoryBuilder
+ {
+ return builder.RegisterClientFactory((options) => new TranscriptionClient(endpoint, credential, options));
+ }
+
+ /// Registers a instance.
+ /// The builder to register with.
+ /// Supported Cognitive Services endpoints (protocol and hostname, for example: https://westus.api.cognitive.microsoft.com.
+ public static IAzureClientBuilder AddTranscriptionClient(this TBuilder builder, Uri endpoint)
+ where TBuilder : IAzureClientFactoryBuilderWithCredential
+ {
+ return builder.RegisterClientFactory((options, cred) => new TranscriptionClient(endpoint, cred, options));
+ }
+
+ /// Registers a instance.
+ /// The builder to register with.
+ /// The configuration values.
+ [RequiresUnreferencedCode("Requires unreferenced code until we opt into EnableConfigurationBindingGenerator.")]
+ [RequiresDynamicCode("Requires unreferenced code until we opt into EnableConfigurationBindingGenerator.")]
+ public static IAzureClientBuilder AddTranscriptionClient(this TBuilder builder, TConfiguration configuration)
+ where TBuilder : IAzureClientFactoryBuilderWithConfiguration
+ {
+ return builder.RegisterClientFactory(configuration);
+ }
+ }
+}
diff --git a/sdk/cognitiveservices/Azure.AI.Speech.Transcription/src/Generated/AISpeechTranscriptionModelFactory.cs b/sdk/cognitiveservices/Azure.AI.Speech.Transcription/src/Generated/AISpeechTranscriptionModelFactory.cs
new file mode 100644
index 000000000000..a6e1c8319275
--- /dev/null
+++ b/sdk/cognitiveservices/Azure.AI.Speech.Transcription/src/Generated/AISpeechTranscriptionModelFactory.cs
@@ -0,0 +1,67 @@
+// Copyright (c) Microsoft Corporation. All rights reserved.
+// Licensed under the MIT License.
+
+//
+
+#nullable disable
+
+using System;
+using System.Collections.Generic;
+using System.Linq;
+
+namespace Azure.AI.Speech.Transcription
+{
+ /// Model factory for models.
+ public static partial class AISpeechTranscriptionModelFactory
+ {
+ /// Initializes a new instance of .
+ /// The URL of the audio to be transcribed. The audio must be shorter than 2 hours in audio duration and smaller than 250 MB in size. If both Audio and AudioUrl are provided, Audio is used.
+ /// A list of possible locales for the transcription. If not specified, the locale of the speech in the audio is detected automatically from all supported locales.
+ /// Maps some or all candidate locales to a model URI to be used for transcription. If no mapping is given, the default model for the locale is used.
+ /// Mode of profanity filtering.
+ /// Mode of diarization.
+ /// The 0-based indices of the channels to be transcribed separately. If not specified, multiple channels are merged and transcribed jointly. Only up to two channels are supported.
+ /// Enhanced mode properties.
+ /// Phrase list properties.
+ /// A new instance for mocking.
+ public static TranscriptionOptions TranscriptionOptions(Uri audioUri = null, IEnumerable locales = null, IDictionary models = null, ProfanityFilterMode? profanityFilterMode = null, TranscriptionDiarizationOptions diarizationOptions = null, IEnumerable activeChannels = null, EnhancedModeProperties enhancedMode = null, PhraseListProperties phraseList = null)
+ {
+ locales ??= new List();
+ models ??= new Dictionary();
+ activeChannels ??= new List();
+
+ return new TranscriptionOptions(
+ audioUri,
+ locales?.ToList(),
+ models,
+ profanityFilterMode,
+ diarizationOptions,
+ activeChannels?.ToList(),
+ enhancedMode,
+ phraseList,
+ serializedAdditionalRawData: null);
+ }
+
+ /// Initializes a new instance of .
+ /// Enable speaker diarization. This is automatically set to true when maxSpeakers is specified.
+ /// Gets or sets a hint for the maximum number of speakers for diarization. Must be greater than 1 and less than 36.
+ /// A new instance for mocking.
+ public static TranscriptionDiarizationOptions TranscriptionDiarizationOptions(bool? enabled = null, int? maxSpeakers = null)
+ {
+ return new TranscriptionDiarizationOptions(enabled, maxSpeakers, serializedAdditionalRawData: null);
+ }
+
+ /// Initializes a new instance of .
+ /// Enable enhanced mode for transcription. This is automatically set to true when task, targetLanguage, or prompt are specified.
+ /// Task type for enhanced mode.
+ /// Target language for enhanced mode.
+ /// A list of user prompts.
+ /// A new instance for mocking.
+ public static EnhancedModeProperties EnhancedModeProperties(bool? enabled = null, string task = null, string targetLanguage = null, IEnumerable prompt = null)
+ {
+ prompt ??= new List();
+
+ return new EnhancedModeProperties(enabled, task, targetLanguage, prompt?.ToList(), serializedAdditionalRawData: null);
+ }
+ }
+}
diff --git a/sdk/cognitiveservices/Azure.AI.Speech.Transcription/src/Generated/ChannelCombinedPhrases.Serialization.cs b/sdk/cognitiveservices/Azure.AI.Speech.Transcription/src/Generated/ChannelCombinedPhrases.Serialization.cs
new file mode 100644
index 000000000000..c04ef1bf1b9d
--- /dev/null
+++ b/sdk/cognitiveservices/Azure.AI.Speech.Transcription/src/Generated/ChannelCombinedPhrases.Serialization.cs
@@ -0,0 +1,157 @@
+// Copyright (c) Microsoft Corporation. All rights reserved.
+// Licensed under the MIT License.
+
+//
+
+#nullable disable
+
+using System;
+using System.ClientModel.Primitives;
+using System.Collections.Generic;
+using System.Text.Json;
+using Azure.Core;
+
+namespace Azure.AI.Speech.Transcription
+{
+ internal partial class ChannelCombinedPhrases : IUtf8JsonSerializable, IJsonModel
+ {
+ void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions);
+
+ void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options)
+ {
+ writer.WriteStartObject();
+ JsonModelWriteCore(writer, options);
+ writer.WriteEndObject();
+ }
+
+ /// The JSON writer.
+ /// The client options for reading and writing models.
+ protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options)
+ {
+ var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format;
+ if (format != "J")
+ {
+ throw new FormatException($"The model {nameof(ChannelCombinedPhrases)} does not support writing '{format}' format.");
+ }
+
+ if (Optional.IsDefined(Channel))
+ {
+ writer.WritePropertyName("channel"u8);
+ writer.WriteNumberValue(Channel.Value);
+ }
+ writer.WritePropertyName("text"u8);
+ writer.WriteStringValue(Text);
+ if (options.Format != "W" && _serializedAdditionalRawData != null)
+ {
+ foreach (var item in _serializedAdditionalRawData)
+ {
+ writer.WritePropertyName(item.Key);
+#if NET6_0_OR_GREATER
+ writer.WriteRawValue(item.Value);
+#else
+ using (JsonDocument document = JsonDocument.Parse(item.Value, ModelSerializationExtensions.JsonDocumentOptions))
+ {
+ JsonSerializer.Serialize(writer, document.RootElement);
+ }
+#endif
+ }
+ }
+ }
+
+ ChannelCombinedPhrases IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options)
+ {
+ var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format;
+ if (format != "J")
+ {
+ throw new FormatException($"The model {nameof(ChannelCombinedPhrases)} does not support reading '{format}' format.");
+ }
+
+ using JsonDocument document = JsonDocument.ParseValue(ref reader);
+ return DeserializeChannelCombinedPhrases(document.RootElement, options);
+ }
+
+ internal static ChannelCombinedPhrases DeserializeChannelCombinedPhrases(JsonElement element, ModelReaderWriterOptions options = null)
+ {
+ options ??= ModelSerializationExtensions.WireOptions;
+
+ if (element.ValueKind == JsonValueKind.Null)
+ {
+ return null;
+ }
+ int? channel = default;
+ string text = default;
+ IDictionary serializedAdditionalRawData = default;
+ Dictionary rawDataDictionary = new Dictionary();
+ foreach (var property in element.EnumerateObject())
+ {
+ if (property.NameEquals("channel"u8))
+ {
+ if (property.Value.ValueKind == JsonValueKind.Null)
+ {
+ continue;
+ }
+ channel = property.Value.GetInt32();
+ continue;
+ }
+ if (property.NameEquals("text"u8))
+ {
+ text = property.Value.GetString();
+ continue;
+ }
+ if (options.Format != "W")
+ {
+ rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText()));
+ }
+ }
+ serializedAdditionalRawData = rawDataDictionary;
+ return new ChannelCombinedPhrases(channel, text, serializedAdditionalRawData);
+ }
+
+ BinaryData IPersistableModel.Write(ModelReaderWriterOptions options)
+ {
+ var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format;
+
+ switch (format)
+ {
+ case "J":
+ return ModelReaderWriter.Write(this, options, AzureAISpeechTranscriptionContext.Default);
+ default:
+ throw new FormatException($"The model {nameof(ChannelCombinedPhrases)} does not support writing '{options.Format}' format.");
+ }
+ }
+
+ ChannelCombinedPhrases IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options)
+ {
+ var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format;
+
+ switch (format)
+ {
+ case "J":
+ {
+ using JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions);
+ return DeserializeChannelCombinedPhrases(document.RootElement, options);
+ }
+ default:
+ throw new FormatException($"The model {nameof(ChannelCombinedPhrases)} does not support reading '{options.Format}' format.");
+ }
+ }
+
+ string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J";
+
+ /// Deserializes the model from a raw response.
+ /// The response to deserialize the model from.
+ internal static ChannelCombinedPhrases FromResponse(Response response)
+ {
+ using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions);
+ return DeserializeChannelCombinedPhrases(document.RootElement);
+ }
+
+ /// Convert into a .
+ internal virtual RequestContent ToRequestContent()
+ {
+ var content = new Utf8JsonRequestContent();
+ content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions);
+ return content;
+ }
+ }
+}
diff --git a/sdk/cognitiveservices/Azure.AI.Speech.Transcription/src/Generated/ChannelCombinedPhrases.cs b/sdk/cognitiveservices/Azure.AI.Speech.Transcription/src/Generated/ChannelCombinedPhrases.cs
new file mode 100644
index 000000000000..f17b153c52d6
--- /dev/null
+++ b/sdk/cognitiveservices/Azure.AI.Speech.Transcription/src/Generated/ChannelCombinedPhrases.cs
@@ -0,0 +1,79 @@
+// Copyright (c) Microsoft Corporation. All rights reserved.
+// Licensed under the MIT License.
+
+//
+
+#nullable disable
+
+using System;
+using System.Collections.Generic;
+
+namespace Azure.AI.Speech.Transcription
+{
+ /// The full transcript per channel.
+ internal partial class ChannelCombinedPhrases
+ {
+ ///
+ /// Keeps track of any properties unknown to the library.
+ ///
+ /// To assign an object to the value of this property use .
+ ///
+ ///
+ /// To assign an already formatted json string to this property use .
+ ///
+ ///
+ /// Examples:
+ ///
+ /// -
+ /// BinaryData.FromObjectAsJson("foo")
+ /// Creates a payload of "foo".
+ ///
+ /// -
+ /// BinaryData.FromString("\"foo\"")
+ /// Creates a payload of "foo".
+ ///
+ /// -
+ /// BinaryData.FromObjectAsJson(new { key = "value" })
+ /// Creates a payload of { "key": "value" }.
+ ///
+ /// -
+ /// BinaryData.FromString("{\"key\": \"value\"}")
+ /// Creates a payload of { "key": "value" }.
+ ///
+ ///
+ ///
+ ///
+ private IDictionary _serializedAdditionalRawData;
+
+ /// Initializes a new instance of .
+ /// The complete transcribed text for the channel.
+ /// is null.
+ internal ChannelCombinedPhrases(string text)
+ {
+ Argument.AssertNotNull(text, nameof(text));
+
+ Text = text;
+ }
+
+ /// Initializes a new instance of .
+ /// The 0-based channel index. Only present if channel separation is enabled.
+ /// The complete transcribed text for the channel.
+ /// Keeps track of any properties unknown to the library.
+ internal ChannelCombinedPhrases(int? channel, string text, IDictionary serializedAdditionalRawData)
+ {
+ Channel = channel;
+ Text = text;
+ _serializedAdditionalRawData = serializedAdditionalRawData;
+ }
+
+ /// Initializes a new instance of for deserialization.
+ internal ChannelCombinedPhrases()
+ {
+ }
+
+ /// The 0-based channel index. Only present if channel separation is enabled.
+ public int? Channel { get; }
+ /// The complete transcribed text for the channel.
+ public string Text { get; }
+ }
+}
diff --git a/sdk/cognitiveservices/Azure.AI.Speech.Transcription/src/Generated/EnhancedModeProperties.Serialization.cs b/sdk/cognitiveservices/Azure.AI.Speech.Transcription/src/Generated/EnhancedModeProperties.Serialization.cs
new file mode 100644
index 000000000000..127039c273e0
--- /dev/null
+++ b/sdk/cognitiveservices/Azure.AI.Speech.Transcription/src/Generated/EnhancedModeProperties.Serialization.cs
@@ -0,0 +1,144 @@
+// Copyright (c) Microsoft Corporation. All rights reserved.
+// Licensed under the MIT License.
+
+//
+
+#nullable disable
+
+using System;
+using System.ClientModel.Primitives;
+using System.Collections.Generic;
+using System.Text.Json;
+using Azure.Core;
+
+namespace Azure.AI.Speech.Transcription
+{
+ public partial class EnhancedModeProperties : IUtf8JsonSerializable, IJsonModel
+ {
+ void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions);
+
+ void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options)
+ {
+ writer.WriteStartObject();
+ JsonModelWriteCore(writer, options);
+ writer.WriteEndObject();
+ }
+
+ EnhancedModeProperties IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options)
+ {
+ var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format;
+ if (format != "J")
+ {
+ throw new FormatException($"The model {nameof(EnhancedModeProperties)} does not support reading '{format}' format.");
+ }
+
+ using JsonDocument document = JsonDocument.ParseValue(ref reader);
+ return DeserializeEnhancedModeProperties(document.RootElement, options);
+ }
+
+ internal static EnhancedModeProperties DeserializeEnhancedModeProperties(JsonElement element, ModelReaderWriterOptions options = null)
+ {
+ options ??= ModelSerializationExtensions.WireOptions;
+
+ if (element.ValueKind == JsonValueKind.Null)
+ {
+ return null;
+ }
+ bool? enabled = default;
+ string task = default;
+ string targetLanguage = default;
+ IList prompt = default;
+ IDictionary serializedAdditionalRawData = default;
+ Dictionary rawDataDictionary = new Dictionary();
+ foreach (var property in element.EnumerateObject())
+ {
+ if (property.NameEquals("enabled"u8))
+ {
+ if (property.Value.ValueKind == JsonValueKind.Null)
+ {
+ continue;
+ }
+ enabled = property.Value.GetBoolean();
+ continue;
+ }
+ if (property.NameEquals("task"u8))
+ {
+ task = property.Value.GetString();
+ continue;
+ }
+ if (property.NameEquals("targetLanguage"u8))
+ {
+ targetLanguage = property.Value.GetString();
+ continue;
+ }
+ if (property.NameEquals("prompt"u8))
+ {
+ if (property.Value.ValueKind == JsonValueKind.Null)
+ {
+ continue;
+ }
+ List array = new List();
+ foreach (var item in property.Value.EnumerateArray())
+ {
+ array.Add(item.GetString());
+ }
+ prompt = array;
+ continue;
+ }
+ if (options.Format != "W")
+ {
+ rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText()));
+ }
+ }
+ serializedAdditionalRawData = rawDataDictionary;
+ return new EnhancedModeProperties(enabled, task, targetLanguage, prompt ?? new ChangeTrackingList(), serializedAdditionalRawData);
+ }
+
+ BinaryData IPersistableModel.Write(ModelReaderWriterOptions options)
+ {
+ var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format;
+
+ switch (format)
+ {
+ case "J":
+ return ModelReaderWriter.Write(this, options, AzureAISpeechTranscriptionContext.Default);
+ default:
+ throw new FormatException($"The model {nameof(EnhancedModeProperties)} does not support writing '{options.Format}' format.");
+ }
+ }
+
+ EnhancedModeProperties IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options)
+ {
+ var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format;
+
+ switch (format)
+ {
+ case "J":
+ {
+ using JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions);
+ return DeserializeEnhancedModeProperties(document.RootElement, options);
+ }
+ default:
+ throw new FormatException($"The model {nameof(EnhancedModeProperties)} does not support reading '{options.Format}' format.");
+ }
+ }
+
+ string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J";
+
+ /// Deserializes the model from a raw response.
+ /// The response to deserialize the model from.
+ internal static EnhancedModeProperties FromResponse(Response response)
+ {
+ using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions);
+ return DeserializeEnhancedModeProperties(document.RootElement);
+ }
+
+ /// Convert into a .
+ internal virtual RequestContent ToRequestContent()
+ {
+ var content = new Utf8JsonRequestContent();
+ content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions);
+ return content;
+ }
+ }
+}
diff --git a/sdk/cognitiveservices/Azure.AI.Speech.Transcription/src/Generated/EnhancedModeProperties.cs b/sdk/cognitiveservices/Azure.AI.Speech.Transcription/src/Generated/EnhancedModeProperties.cs
new file mode 100644
index 000000000000..765b7ecc7074
--- /dev/null
+++ b/sdk/cognitiveservices/Azure.AI.Speech.Transcription/src/Generated/EnhancedModeProperties.cs
@@ -0,0 +1,78 @@
+// Copyright (c) Microsoft Corporation. All rights reserved.
+// Licensed under the MIT License.
+
+//
+
+#nullable disable
+
+using System;
+using System.Collections.Generic;
+
+namespace Azure.AI.Speech.Transcription
+{
+ /// Enhanced mode properties for transcription.
+ public partial class EnhancedModeProperties
+ {
+ ///
+ /// Keeps track of any properties unknown to the library.
+ ///
+ /// To assign an object to the value of this property use .
+ ///
+ ///
+ /// To assign an already formatted json string to this property use .
+ ///
+ ///
+ /// Examples:
+ ///
+ /// -
+ /// BinaryData.FromObjectAsJson("foo")
+ /// Creates a payload of "foo".
+ ///
+ /// -
+ /// BinaryData.FromString("\"foo\"")
+ /// Creates a payload of "foo".
+ ///
+ /// -
+ /// BinaryData.FromObjectAsJson(new { key = "value" })
+ /// Creates a payload of { "key": "value" }.
+ ///
+ /// -
+ /// BinaryData.FromString("{\"key\": \"value\"}")
+ /// Creates a payload of { "key": "value" }.
+ ///
+ ///
+ ///
+ ///
+ private IDictionary _serializedAdditionalRawData;
+
+ /// Initializes a new instance of .
+ public EnhancedModeProperties()
+ {
+ Prompt = new ChangeTrackingList();
+ }
+
+ /// Initializes a new instance of .
+ /// Enable enhanced mode for transcription. This is automatically set to true when task, targetLanguage, or prompt are specified.
+ /// Task type for enhanced mode.
+ /// Target language for enhanced mode.
+ /// A list of user prompts.
+ /// Keeps track of any properties unknown to the library.
+ internal EnhancedModeProperties(bool? enabled, string task, string targetLanguage, IList prompt, IDictionary serializedAdditionalRawData)
+ {
+ Enabled = enabled;
+ Task = task;
+ TargetLanguage = targetLanguage;
+ Prompt = prompt;
+ _serializedAdditionalRawData = serializedAdditionalRawData;
+ }
+
+ /// Enable enhanced mode for transcription. This is automatically set to true when task, targetLanguage, or prompt are specified.
+ public bool? Enabled { get; }
+ /// Task type for enhanced mode.
+ public string Task { get; set; }
+ /// Target language for enhanced mode.
+ public string TargetLanguage { get; set; }
+ /// A list of user prompts.
+ public IList Prompt { get; }
+ }
+}
diff --git a/sdk/cognitiveservices/Azure.AI.Speech.Transcription/src/Generated/Internal/Argument.cs b/sdk/cognitiveservices/Azure.AI.Speech.Transcription/src/Generated/Internal/Argument.cs
new file mode 100644
index 000000000000..f7300d83d72b
--- /dev/null
+++ b/sdk/cognitiveservices/Azure.AI.Speech.Transcription/src/Generated/Internal/Argument.cs
@@ -0,0 +1,129 @@
+// Copyright (c) Microsoft Corporation. All rights reserved.
+// Licensed under the MIT License.
+
+//
+
+#nullable disable
+
+using System;
+using System.Collections;
+using System.Collections.Generic;
+
+namespace Azure.AI.Speech.Transcription
+{
+ internal static class Argument
+ {
+ public static void AssertNotNull(T value, string name)
+ {
+ if (value is null)
+ {
+ throw new ArgumentNullException(name);
+ }
+ }
+
+ public static void AssertNotNull(T? value, string name)
+ where T : struct
+ {
+ if (!value.HasValue)
+ {
+ throw new ArgumentNullException(name);
+ }
+ }
+
+ public static void AssertNotNullOrEmpty(IEnumerable value, string name)
+ {
+ if (value is null)
+ {
+ throw new ArgumentNullException(name);
+ }
+ if (value is ICollection collectionOfT && collectionOfT.Count == 0)
+ {
+ throw new ArgumentException("Value cannot be an empty collection.", name);
+ }
+ if (value is ICollection collection && collection.Count == 0)
+ {
+ throw new ArgumentException("Value cannot be an empty collection.", name);
+ }
+ using IEnumerator e = value.GetEnumerator();
+ if (!e.MoveNext())
+ {
+ throw new ArgumentException("Value cannot be an empty collection.", name);
+ }
+ }
+
+ public static void AssertNotNullOrEmpty(string value, string name)
+ {
+ if (value is null)
+ {
+ throw new ArgumentNullException(name);
+ }
+ if (value.Length == 0)
+ {
+ throw new ArgumentException("Value cannot be an empty string.", name);
+ }
+ }
+
+ public static void AssertNotNullOrWhiteSpace(string value, string name)
+ {
+ if (value is null)
+ {
+ throw new ArgumentNullException(name);
+ }
+ if (string.IsNullOrWhiteSpace(value))
+ {
+ throw new ArgumentException("Value cannot be empty or contain only white-space characters.", name);
+ }
+ }
+
+ public static void AssertNotDefault(ref T value, string name)
+ where T : struct, IEquatable
+ {
+ if (value.Equals(default))
+ {
+ throw new ArgumentException("Value cannot be empty.", name);
+ }
+ }
+
+ public static void AssertInRange(T value, T minimum, T maximum, string name)
+ where T : notnull, IComparable
+ {
+ if (minimum.CompareTo(value) > 0)
+ {
+ throw new ArgumentOutOfRangeException(name, "Value is less than the minimum allowed.");
+ }
+ if (maximum.CompareTo(value) < 0)
+ {
+ throw new ArgumentOutOfRangeException(name, "Value is greater than the maximum allowed.");
+ }
+ }
+
+ public static void AssertEnumDefined(Type enumType, object value, string name)
+ {
+ if (!Enum.IsDefined(enumType, value))
+ {
+ throw new ArgumentException($"Value not defined for {enumType.FullName}.", name);
+ }
+ }
+
+ public static T CheckNotNull(T value, string name)
+ where T : class
+ {
+ AssertNotNull(value, name);
+ return value;
+ }
+
+ public static string CheckNotNullOrEmpty(string value, string name)
+ {
+ AssertNotNullOrEmpty(value, name);
+ return value;
+ }
+
+ public static void AssertNull(T value, string name, string message = null)
+ {
+ if (value != null)
+ {
+ throw new ArgumentException(message ?? "Value must be null.", name);
+ }
+ }
+ }
+}
diff --git a/sdk/cognitiveservices/Azure.AI.Speech.Transcription/src/Generated/Internal/ChangeTrackingDictionary.cs b/sdk/cognitiveservices/Azure.AI.Speech.Transcription/src/Generated/Internal/ChangeTrackingDictionary.cs
new file mode 100644
index 000000000000..e01597dc4bf1
--- /dev/null
+++ b/sdk/cognitiveservices/Azure.AI.Speech.Transcription/src/Generated/Internal/ChangeTrackingDictionary.cs
@@ -0,0 +1,167 @@
+// Copyright (c) Microsoft Corporation. All rights reserved.
+// Licensed under the MIT License.
+
+//
+
+#nullable disable
+
+using System;
+using System.Collections;
+using System.Collections.Generic;
+
+namespace Azure.AI.Speech.Transcription
+{
+ internal class ChangeTrackingDictionary : IDictionary, IReadOnlyDictionary where TKey : notnull
+ {
+ private IDictionary _innerDictionary;
+
+ public ChangeTrackingDictionary()
+ {
+ }
+
+ public ChangeTrackingDictionary(IDictionary dictionary)
+ {
+ if (dictionary == null)
+ {
+ return;
+ }
+ _innerDictionary = new Dictionary(dictionary);
+ }
+
+ public ChangeTrackingDictionary(IReadOnlyDictionary dictionary)
+ {
+ if (dictionary == null)
+ {
+ return;
+ }
+ _innerDictionary = new Dictionary();
+ foreach (var pair in dictionary)
+ {
+ _innerDictionary.Add(pair);
+ }
+ }
+
+ public bool IsUndefined => _innerDictionary == null;
+
+ public int Count => IsUndefined ? 0 : EnsureDictionary().Count;
+
+ public bool IsReadOnly => IsUndefined ? false : EnsureDictionary().IsReadOnly;
+
+ public ICollection Keys => IsUndefined ? Array.Empty() : EnsureDictionary().Keys;
+
+ public ICollection Values => IsUndefined ? Array.Empty() : EnsureDictionary().Values;
+
+ public TValue this[TKey key]
+ {
+ get
+ {
+ if (IsUndefined)
+ {
+ throw new KeyNotFoundException(nameof(key));
+ }
+ return EnsureDictionary()[key];
+ }
+ set
+ {
+ EnsureDictionary()[key] = value;
+ }
+ }
+
+ IEnumerable IReadOnlyDictionary.Keys => Keys;
+
+ IEnumerable IReadOnlyDictionary.Values => Values;
+
+ public IEnumerator> GetEnumerator()
+ {
+ if (IsUndefined)
+ {
+ IEnumerator