-
Notifications
You must be signed in to change notification settings - Fork 5.1k
ADT perf tests #21944
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
ADT perf tests #21944
Changes from all commits
16b8e88
32c67f0
65b88c3
72d9208
338a82b
5d60e58
7f5f869
0e81911
11501d9
e4e47c4
e0d1723
1fcc72c
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -0,0 +1,24 @@ | ||
| <Project Sdk="Microsoft.NET.Sdk"> | ||
| <PropertyGroup> | ||
| <OutputType>Exe</OutputType> | ||
| </PropertyGroup> | ||
|
|
||
| <ItemGroup> | ||
| <PackageReference Include="FluentAssertions" /> | ||
| </ItemGroup> | ||
|
|
||
| <ItemGroup> | ||
| <ProjectReference Include="$(MSBuildThisFileDirectory)..\..\src\Azure.DigitalTwins.Core.csproj" /> | ||
| <ProjectReference Include="$(MSBuildThisFileDirectory)..\..\..\..\..\common\Perf\Azure.Test.Perf\Azure.Test.Perf.csproj" /> | ||
| <ProjectReference Include="$(MSBuildThisFileDirectory)..\..\..\..\core\Azure.Core.TestFramework\src\Azure.Core.TestFramework.csproj" /> | ||
| </ItemGroup> | ||
|
|
||
| <ItemGroup> | ||
| <None Update="Infrastructure\Models\Room.json"> | ||
| <CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory> | ||
| </None> | ||
| <None Update="Infrastructure\Twins\RoomTwin.json"> | ||
| <CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory> | ||
| </None> | ||
| </ItemGroup> | ||
| </Project> |
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -0,0 +1,87 @@ | ||
| // Copyright (c) Microsoft Corporation. All rights reserved. | ||
| // Licensed under the MIT License. | ||
|
|
||
| using System; | ||
| using System.Collections.Generic; | ||
| using System.IO; | ||
| using System.Linq; | ||
| using System.Net; | ||
| using System.Reflection; | ||
| using System.Text.Json; | ||
| using System.Threading.Tasks; | ||
|
|
||
| namespace Azure.DigitalTwins.Core.Perf.Infrastructure | ||
| { | ||
| internal class AdtInstancePopulator | ||
| { | ||
| private static readonly string s_dtdlDirectoryPath = Path.Combine(GetWorkingDirectory(), "Infrastructure"); | ||
|
|
||
| private static readonly string s_modelsPath = Path.Combine(s_dtdlDirectoryPath, "Models"); | ||
| private static readonly string s_twinsPath = Path.Combine(s_dtdlDirectoryPath, "Twins"); | ||
|
|
||
| private const string RoomModelFileName = "Room.json"; | ||
| private const string RoomTwinFileName = "RoomTwin.json"; | ||
|
|
||
| public static async Task CreateRoomModelAsync(DigitalTwinsClient client) | ||
| { | ||
| try | ||
| { | ||
| await client.CreateModelsAsync(new List<string> { GetRoomModel() }).ConfigureAwait(false); | ||
| } | ||
| catch (RequestFailedException ex) when (ex.Status == (int)HttpStatusCode.Conflict) | ||
| { | ||
| Console.WriteLine("Model already exists"); | ||
| } | ||
| } | ||
|
|
||
| public static async Task<List<BasicDigitalTwin>> CreateRoomTwinsForTestIdAsync(DigitalTwinsClient client, string testId, long countOftwins) | ||
| { | ||
| List<BasicDigitalTwin> createdTwins = new List<BasicDigitalTwin>(); | ||
|
|
||
| string batchTwinPrefix = $"room-{testId}-{Guid.NewGuid().ToString().Substring(0, 8)}"; | ||
| for (long i = 0; i < countOftwins; i++) | ||
| { | ||
| string twinId = $"{batchTwinPrefix}-{i}"; | ||
| createdTwins.Add(await client.CreateOrReplaceDigitalTwinAsync(twinId, GetRoomTwin(testId)).ConfigureAwait(false)); | ||
| } | ||
|
|
||
| return createdTwins; | ||
| } | ||
|
|
||
| public static string GetRoomModel() | ||
| { | ||
| return LoadFileFromPath(s_modelsPath, RoomModelFileName); | ||
| } | ||
|
|
||
| public static BasicDigitalTwin GetRoomTwin(string testId) | ||
| { | ||
| string value = LoadFileFromPath(s_twinsPath, RoomTwinFileName).Replace("TEST_ID", testId); | ||
| return JsonSerializer.Deserialize<BasicDigitalTwin>(value); | ||
| } | ||
|
|
||
| private static string GetWorkingDirectory() | ||
| { | ||
| string codeBase = Assembly.GetExecutingAssembly().Location; | ||
| var uri = new UriBuilder(codeBase); | ||
| string path = Uri.UnescapeDataString(uri.Path); | ||
| return Path.GetDirectoryName(path); | ||
| } | ||
|
|
||
| private static string LoadFileFromPath(string path, string fileName) | ||
| { | ||
| string[] allFilesPath = Directory.GetFiles(path, "*.json"); | ||
| try | ||
| { | ||
| string filePathOfInterest = allFilesPath.Where(s => Path.GetFileName(s) == fileName).FirstOrDefault(); | ||
| return File.ReadAllText(filePathOfInterest); | ||
| } | ||
| catch (Exception ex) | ||
| { | ||
| Console.WriteLine($"Error reading twin types from disk due to: {ex.Message}", ConsoleColor.Red); | ||
| Environment.Exit(0); | ||
| } | ||
|
|
||
| return null; | ||
| } | ||
| } | ||
| } |
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -0,0 +1,13 @@ | ||
| { | ||
| "@id": "dtmi:com:samples:Room;1", | ||
| "@type": "Interface", | ||
| "@context": "dtmi:dtdl:context;2", | ||
| "displayName": "Room", | ||
| "contents": [ | ||
| { | ||
| "@type": "Property", | ||
| "name": "TestId", | ||
| "schema": "string" | ||
| } | ||
| ] | ||
| } |
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -0,0 +1,48 @@ | ||
| // Copyright (c) Microsoft Corporation. All rights reserved. | ||
| // Licensed under the MIT License. | ||
|
|
||
| using Azure.Core.TestFramework; | ||
|
|
||
| namespace Azure.DigitalTwins.Core.Perf | ||
| { | ||
| /// <summary> | ||
| /// Represents the ambient environment in which the test suite is being run, offering access to information such as environment variables. | ||
| /// </summary> | ||
| internal sealed class PerfTestEnvironment : TestEnvironment | ||
| { | ||
| /// <summary> | ||
| /// The shared instance of the <see cref="PerfTestEnvironment"/> to be used during test runs. | ||
| /// </summary> | ||
| public static PerfTestEnvironment Instance { get; } = new PerfTestEnvironment(); | ||
|
|
||
| /// <summary> | ||
| /// The Digital Twins instance endpoint to run the tests against. | ||
| /// </summary> | ||
| public string DigitalTwinsUrl => GetVariable("DIGITALTWINS_URL"); | ||
|
|
||
| /// <summary> | ||
| /// The Microsoft tenant Id for the App registration. | ||
| /// </summary> | ||
| /// <value>The Microsoft tenant Id for the App registration, read from the "DIGITALTWINS_TENANT_ID" environment variable.</value> | ||
| public string DigitalTwinsTenantId => GetVariable("DIGITALTWINS_TENANT_ID"); | ||
|
|
||
| /// <summary> | ||
| /// The App registration client Id used to authenticate against the instance. | ||
| /// </summary> | ||
| /// <value>The App registration client Id used to authenticate against the instance, read from the "DIGITALTWINS_CLIENT_ID" environment variable.</value> | ||
| public string DigitalTwinsClientId => GetVariable("DIGITALTWINS_CLIENT_ID"); | ||
|
|
||
| /// <summary> | ||
| /// The App registration client secret. | ||
| /// </summary> | ||
| /// <value>The App registration client secret, read from the "DIGITALTWINS_CLIENT_SECRET" environment variable.</value> | ||
| public string DigitalTwinsClientSecret => GetVariable("DIGITALTWINS_CLIENT_SECRET"); | ||
|
|
||
| /// <summary> | ||
| /// Initializes a new instance of the <see cref="PerfTestEnvironment"/> class. | ||
| /// </summary> | ||
| public PerfTestEnvironment() | ||
| { | ||
| } | ||
| } | ||
| } | ||
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -0,0 +1,6 @@ | ||
| { | ||
| "$metadata": { | ||
| "$model": "dtmi:com:samples:Room;1" | ||
| }, | ||
| "TestId": "TEST_ID" | ||
| } |
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -0,0 +1,7 @@ | ||
| // Copyright (c) Microsoft Corporation. All rights reserved. | ||
| // Licensed under the MIT License. | ||
|
|
||
| using System.Reflection; | ||
| using Azure.Test.Perf; | ||
|
|
||
| await PerfProgram.Main(Assembly.GetEntryAssembly(), args); |
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -0,0 +1,125 @@ | ||
| //Copyright (c) Microsoft Corporation. All rights reserved. | ||
| // Licensed under the MIT License. | ||
|
|
||
| using System; | ||
| using System.Collections.Generic; | ||
| using System.Threading; | ||
| using System.Threading.Tasks; | ||
| using Azure.DigitalTwins.Core.Perf.Infrastructure; | ||
| using Azure.Identity; | ||
| using Azure.Test.Perf; | ||
| using NUnit.Framework; | ||
| using FluentAssertions; | ||
|
|
||
| namespace Azure.DigitalTwins.Core.Perf.Scenarios | ||
| { | ||
| /// <summary> | ||
| /// The performance test scenario focused on running queries against digital twins instances. | ||
| /// </summary> | ||
| /// <seealso cref="PerfTest{SizeOptions}" /> | ||
| public sealed class QueryDigitalTwins : PerfTest<SizeOptions> | ||
| { | ||
| private readonly DigitalTwinsClient _digitalTwinsClient; | ||
| private readonly string _testId; | ||
| private readonly long _size; | ||
| private readonly TimeSpan _delayPeriod = TimeSpan.FromMinutes(1); | ||
| private List<BasicDigitalTwin> _createdTwins = new List<BasicDigitalTwin>(); | ||
|
|
||
| public QueryDigitalTwins(SizeOptions options) : base(options) | ||
| { | ||
| _digitalTwinsClient = new DigitalTwinsClient( | ||
|
azabbasi marked this conversation as resolved.
|
||
| new Uri(PerfTestEnvironment.Instance.DigitalTwinsUrl), | ||
| new ClientSecretCredential( | ||
| PerfTestEnvironment.Instance.DigitalTwinsTenantId, | ||
| PerfTestEnvironment.Instance.DigitalTwinsClientId, | ||
| PerfTestEnvironment.Instance.DigitalTwinsClientSecret)); | ||
|
|
||
| _size = options.Size; | ||
| _testId = Guid.NewGuid().ToString().Substring(0, 8); | ||
| } | ||
|
|
||
| public override async Task GlobalSetupAsync() | ||
| { | ||
| await base.GlobalSetupAsync(); | ||
|
|
||
| // Global setup code that runs once at the beginning of test execution. | ||
| // Create the model globally so all tests can take advantage of it. | ||
| await AdtInstancePopulator.CreateRoomModelAsync(_digitalTwinsClient).ConfigureAwait(false); | ||
|
Contributor
Author
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. This is a one-time setup for all parallel tests. we only need to do this once per run. |
||
| } | ||
|
|
||
| public override async Task SetupAsync() | ||
| { | ||
| await base.SetupAsync(); | ||
| _createdTwins = await AdtInstancePopulator.CreateRoomTwinsForTestIdAsync(_digitalTwinsClient, _testId, _size).ConfigureAwait(false); | ||
|
|
||
| // Since it takes some time for the newly created twins to be included in the query result, we have to wait some time. | ||
| await Task.Delay(_delayPeriod); | ||
| } | ||
|
|
||
| public override async Task CleanupAsync() | ||
| { | ||
| // Individual test-level cleanup code that runs for each instance of the test. | ||
| await base.CleanupAsync(); | ||
|
|
||
| // We will delete all twins created by this test instance. | ||
| foreach (BasicDigitalTwin twin in _createdTwins) | ||
| { | ||
| await _digitalTwinsClient.DeleteDigitalTwinAsync(twin.Id).ConfigureAwait(false); | ||
| } | ||
| } | ||
|
|
||
| public override async Task GlobalCleanupAsync() | ||
| { | ||
| // Global cleanup code that runs once at the end of test execution. | ||
| await base.GlobalCleanupAsync(); | ||
|
|
||
| // List all the models and delete all of them. | ||
| AsyncPageable<DigitalTwinsModelData> allModels = _digitalTwinsClient.GetModelsAsync(); | ||
|
|
||
| await foreach (DigitalTwinsModelData model in allModels) | ||
| { | ||
| await _digitalTwinsClient.DeleteModelAsync(model.Id).ConfigureAwait(false); | ||
| } | ||
| } | ||
|
|
||
| /// <summary> | ||
| /// Queries for all digital twins using <see cref="DigitalTwinsClient.Query{T}(string, CancellationToken)"/>. | ||
| /// </summary> | ||
| /// <param name="cancellationToken">The token used to signal cancellation request.</param> | ||
| public override void Run(CancellationToken cancellationToken) | ||
| { | ||
| Pageable<BasicDigitalTwin> result = _digitalTwinsClient | ||
| .Query<BasicDigitalTwin>($"SELECT * FROM DIGITALTWINS WHERE TestId = '{_testId}'", CancellationToken.None); | ||
|
Contributor
Author
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. we will only query for twins with the test Id that is associated with this object. |
||
| long resultCount = 0; | ||
|
|
||
| foreach (BasicDigitalTwin a in result) | ||
| { | ||
| resultCount++; | ||
| } | ||
|
|
||
| #if DEBUG | ||
| resultCount.Should().Be(_size); | ||
| #endif | ||
| } | ||
|
|
||
| /// <summary> | ||
| /// Queries for all digital twins using <see cref="DigitalTwinsClient.QueryAsync{T}(string, CancellationToken)"/>. | ||
| /// </summary> | ||
| /// <param name="cancellationToken">The token used to signal cancellation request.</param> | ||
| public override async Task RunAsync(CancellationToken cancellationToken) | ||
| { | ||
| AsyncPageable<BasicDigitalTwin> result = _digitalTwinsClient | ||
| .QueryAsync<BasicDigitalTwin>($"SELECT * FROM DIGITALTWINS WHERE TestId = '{_testId}'", CancellationToken.None); | ||
| long resultCount = 0; | ||
|
|
||
| await foreach (BasicDigitalTwin a in result) | ||
| { | ||
| resultCount++; | ||
| } | ||
|
|
||
| #if DEBUG | ||
| resultCount.Should().Be(_size); | ||
| #endif | ||
| } | ||
| } | ||
| } | ||
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -0,0 +1,43 @@ | ||
| # Azure Digital Twins performance tests | ||
|
|
||
| The assets in this area comprise a set of performance tests for the [Azure DigitalTwins client library for .NET](https://github.com/Azure/azure-sdk-for-net/tree/master/sdk/digitaltwins/Azure.DigitalTwins.Core) and its associated ecosystem. The artifacts in this library are intended to be used primarily with the Azure SDK engineering system's testing infrastructure, but may also be run as stand-alone applications from the command-line. | ||
|
|
||
| You can learn more about the project structure [here](https://github.com/Azure/azure-sdk-for-net/wiki/Writing-performance-tests-for-Client-libraries). | ||
| ## Purpose | ||
| Performance Testing using performance framework, in general, allows you to test throughput and latency offered to the customers via the SDKs. | ||
|
|
||
| Major Benefit: | ||
| - Performance Regressions are caught prior to release. Regressions can come in from new code changes that get merged between two releases, new dependencies that get introduced or old dependencies that are upgraded. | ||
|
|
||
| The Digital Twins performance tests will be plugged into performance automation pipelines automatically and will run the tests regularly to scan for any performance issues that should be fixed before releasing the SDK. | ||
|
|
||
| ## Perf test scenarios | ||
|
|
||
| ### QueryDigitalTwins | ||
|
|
||
| This scenario tests API calls to the DigitalTwins service to query for Twins and Relationships. | ||
| The `GlobalTestSetupAsync` method override will create a single model that is used for all instances of the parallel test runs. This method is only invoked once and will not be called during the parallel test run across all instances of the test. | ||
| The `SetupAsync` method override will create multiple Twins that is configurable using the input options. Each test will create Twins using a unique test Id and will only query that subset during each run. | ||
|
|
||
| ## Running the tests | ||
|
|
||
| Build a performance test project | ||
| ```bash | ||
| dotnet run -c Release -f <supported-framework> --no-build -p <path/to/project/file> -- [parameters needed for the test] | ||
| ``` | ||
|
|
||
| Run the executable output of a project | ||
| ```bash | ||
| dotnet run -c Release -f <supported-framework> --no-build -p <path/to/project/file> -- [parameters needed for the test] | ||
| ``` | ||
|
|
||
| \<supported-framework\> can be one of netcoreapp2.1, netcoreapp3.1, net461 or net5.0. Note the -- before any custom parameters to pass. This prevents dotnet from trying to handle any ambiguous command line switches. | ||
|
|
||
| You should use the scenario test class names as the first parameter that is needed for running the test. | ||
|
|
||
| ## Contributing | ||
| This project welcomes contributions and suggestions. Most contributions require you to agree to a Contributor License Agreement (CLA) declaring that you have the right to, and actually do, grant us the rights to use your contribution. For details, visit https://cla.microsoft.com. | ||
|
|
||
| When you submit a pull request, a CLA-bot will automatically determine whether you need to provide a CLA and decorate the PR appropriately (e.g., label, comment). Simply follow the instructions provided by the bot. You will only need to do this once across all repos using our CLA. | ||
|
|
||
| This project has adopted the [Microsoft Open Source Code of Conduct](https://opensource.microsoft.com/codeofconduct/). For more information see the [Code of Conduct FAQ](https://opensource.microsoft.com/codeofconduct/faq/) or contact [opencode@microsoft.com](mailto:opencode@microsoft.com) with any additional questions or comments. |
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
These environment variables are set during the live test pipeline resource deployment. I assume we have to use the same ones for the perf tests (since it was mentioned that we write them just as we would write Live tests)
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
It turns out that there is currently no automation in perf test framework and the perf team will run the tests manually (with my help of course)