diff --git a/.config/dotnet-tools.json b/.config/dotnet-tools.json
index cd481982e26..97bddf872c0 100644
--- a/.config/dotnet-tools.json
+++ b/.config/dotnet-tools.json
@@ -15,7 +15,7 @@
]
},
"microsoft.visualstudio.slngen.tool": {
- "version": "9.5.3",
+ "version": "11.1.0",
"commands": [
"slngen"
]
diff --git a/.gitignore b/.gitignore
index 60836b26d3a..677a001a33b 100644
--- a/.gitignore
+++ b/.gitignore
@@ -312,4 +312,4 @@ BenchmarkDotNet.artifacts/
/_TEST
-*.binlog
\ No newline at end of file
+*.binlog
diff --git a/.vscode/settings.json b/.vscode/settings.json
new file mode 100644
index 00000000000..5e501ec13ee
--- /dev/null
+++ b/.vscode/settings.json
@@ -0,0 +1,3 @@
+{
+ "dotnet.automaticallyCreateSolutionInWorkspace": false
+}
diff --git a/Directory.Build.props b/Directory.Build.props
index cea28e22ade..7e4dc9ad808 100644
--- a/Directory.Build.props
+++ b/Directory.Build.props
@@ -5,7 +5,7 @@
net
- 8
+ 9
0
$(TargetFrameworkMajorVersion).$(TargetFrameworkMinorVersion)
@@ -13,10 +13,10 @@
$(TargetFrameworkName)$(TargetFrameworkVersion)
$(LatestTargetFramework)
- $(SupportedNetCoreTargetFrameworks);net6.0
+ $(SupportedNetCoreTargetFrameworks);net8.0
- net6.0
+ net8.0
diff --git a/Directory.Build.targets b/Directory.Build.targets
index cd5fab65acb..5fcf797523c 100644
--- a/Directory.Build.targets
+++ b/Directory.Build.targets
@@ -20,7 +20,7 @@
$(NoWarn);AD0001
- $(NoWarn);EXTEXP0001;EXTEXP0002;EXTEXP0003;EXTEXP0004;EXTEXP0005;EXTEXP0006;EXTEXP0007;EXTEXP0008;EXTEXP0009;EXTEXP0010;EXTEXP0011;EXTEXP0012;EXTEXP0013;EXTEXP0014;EXTEXP0015;EXTEXP0016;EXTEXP0017
+ $(NoWarn);EXTEXP0001;EXTEXP0002;EXTEXP0003;EXTEXP0004;EXTEXP0005;EXTEXP0006;EXTEXP0007;EXTEXP0008;EXTEXP0009;EXTEXP0010;EXTEXP0011;EXTEXP0012;EXTEXP0013;EXTEXP0014;EXTEXP0015;EXTEXP0016;EXTEXP0017;EXTEXP0018
$(NoWarn);EXTOBS0001;
@@ -30,6 +30,9 @@
$(NoWarn);SA1600;SA0001
+
+ $(NoWarn);S3236
+
$(NoWarn);CA1062
diff --git a/NuGet.config b/NuGet.config
index 9e48c557166..f91233ccab5 100644
--- a/NuGet.config
+++ b/NuGet.config
@@ -2,25 +2,16 @@
-
-
-
-
-
-
-
-
-
-
-
-
-
+
+
+
+
@@ -42,6 +33,18 @@
+
+
+
+
+
+
+
+
+
+
+
+
@@ -49,15 +52,8 @@
-
-
-
-
-
-
-
diff --git a/azure-pipelines.yml b/azure-pipelines.yml
index c8ab2149b97..211058cf56a 100644
--- a/azure-pipelines.yml
+++ b/azure-pipelines.yml
@@ -7,6 +7,7 @@ trigger:
branches:
include:
- main
+ - dev
- release/*
- internal/release/*
paths:
@@ -28,6 +29,7 @@ pr:
branches:
include:
- main
+ - dev
- release/*
- internal/release/*
paths:
diff --git a/docs/building.md b/docs/building.md
index b7845cd6945..d7e179f1b6b 100644
--- a/docs/building.md
+++ b/docs/building.md
@@ -53,7 +53,7 @@ Here are few commands that you will likely use the most:
- `build.sh --build`: to build the solution1.
- `build.sh --test`: to run all unit tests in the solution1.
- `build.sh --vs `: to generate a "filtered" solution and save it as `SDK.sln`. It also performs the "restore" operation. Keywords can be any part of the name or path of project files you want to include. For example: `./build.sh --vs Http,Fakes,AspNetCore`.
- If for some reason you wish to generate a solution with all projects you can pass `*` for the keyword, e.g.: `./build.sh --vs '*'` (Note: you have to escape the asterisk or use `set -f` to turn off expansion).
+ If for some reason you wish to generate a solution with all projects you can pass `*` for the keyword, e.g.: `./build.sh -vs '*'` (Note: you have to escape the asterisk or use `set -f` to turn off expansion).
> Under the hood, this invokes `scripts/Slngen.ps1` script, which in turn executes [slngen tool][slngen-tool]. If you want to customize how the "filtered" solution is generated, you will need to invoke `scripts/Slngen.ps1` script directly.
Run `./scripts/Slngen.ps1 -help` for more details.
diff --git a/docs/list-of-diagnostics.md b/docs/list-of-diagnostics.md
index ba8e170a878..4ba19ed1099 100644
--- a/docs/list-of-diagnostics.md
+++ b/docs/list-of-diagnostics.md
@@ -40,6 +40,7 @@ if desired.
| `EXTEXP0015` | Environmental probes experiments |
| `EXTEXP0016` | Hosting integration testing experiments |
| `EXTEXP0017` | Contextual options experiments |
+| `EXTEXP0018` | HybridCache experiments |
# Obsoletions
@@ -81,7 +82,7 @@ You may continue using obsolete APIs in your application, but we advise explorin
| `LOGGEN023` | Tag provider method is inaccessible |
| `LOGGEN024` | Property provider method has an invalid signature |
| `LOGGEN025` | Logging method parameters can't have "ref" or "out" modifiers |
-| `LOGGEN026` | Parameters with a custom tag provider are not subject to redaciton |
+| `LOGGEN026` | Parameters with a custom tag provider are not subject to redaction |
| `LOGGEN027` | Multiple logging methods shouldn't use the same event name |
| `LOGGEN028` | Logging method parameter's type has a hidden property |
| `LOGGEN029` | A logging method parameter causes name conflicts |
diff --git a/eng/MSBuild/LegacySupport.props b/eng/MSBuild/LegacySupport.props
index c96a83d34d6..8ebacbd60f7 100644
--- a/eng/MSBuild/LegacySupport.props
+++ b/eng/MSBuild/LegacySupport.props
@@ -7,11 +7,11 @@
-
+
-
+
@@ -47,7 +47,7 @@
-
+
diff --git a/eng/Tools/DiagPublisher/DiagPublisher.csproj b/eng/Tools/DiagPublisher/DiagPublisher.csproj
index 0679bc045d5..15c12e422cc 100644
--- a/eng/Tools/DiagPublisher/DiagPublisher.csproj
+++ b/eng/Tools/DiagPublisher/DiagPublisher.csproj
@@ -14,7 +14,7 @@
-
+
diff --git a/eng/Version.Details.xml b/eng/Version.Details.xml
index 674eca8d91e..e4dcd0226ff 100644
--- a/eng/Version.Details.xml
+++ b/eng/Version.Details.xml
@@ -1,150 +1,182 @@
-
- https://dev.azure.com/dnceng/internal/_git/dotnet-runtime
- bf5e279d9239bfef5bb1b8d6212f1b971c434606
+
+ https://github.com/dotnet/runtime
+ 0d44aea3696bab80b11a12c6bdfdbf8de9c4e815
+
+
+ https://github.com/dotnet/runtime
+ 0d44aea3696bab80b11a12c6bdfdbf8de9c4e815
+
+
+ https://github.com/dotnet/runtime
+ 0d44aea3696bab80b11a12c6bdfdbf8de9c4e815
+
+
+ https://github.com/dotnet/runtime
+ 0d44aea3696bab80b11a12c6bdfdbf8de9c4e815
+
+
+ https://github.com/dotnet/runtime
+ 0d44aea3696bab80b11a12c6bdfdbf8de9c4e815
+
+
+ https://github.com/dotnet/runtime
+ 0d44aea3696bab80b11a12c6bdfdbf8de9c4e815
+
+
+ https://github.com/dotnet/runtime
+ 0d44aea3696bab80b11a12c6bdfdbf8de9c4e815
+
+
+ https://github.com/dotnet/runtime
+ 0d44aea3696bab80b11a12c6bdfdbf8de9c4e815
+
+
+ https://github.com/dotnet/runtime
+ 0d44aea3696bab80b11a12c6bdfdbf8de9c4e815
+
+
+ https://github.com/dotnet/runtime
+ 0d44aea3696bab80b11a12c6bdfdbf8de9c4e815
+
+
+ https://github.com/dotnet/runtime
+ 0d44aea3696bab80b11a12c6bdfdbf8de9c4e815
+
+
+ https://github.com/dotnet/runtime
+ 0d44aea3696bab80b11a12c6bdfdbf8de9c4e815
+
+
+ https://github.com/dotnet/runtime
+ 0d44aea3696bab80b11a12c6bdfdbf8de9c4e815
+
+
+ https://github.com/dotnet/runtime
+ 0d44aea3696bab80b11a12c6bdfdbf8de9c4e815
+
+
+ https://github.com/dotnet/runtime
+ 0d44aea3696bab80b11a12c6bdfdbf8de9c4e815
+
+
+ https://github.com/dotnet/runtime
+ 0d44aea3696bab80b11a12c6bdfdbf8de9c4e815
+
+
+ https://github.com/dotnet/runtime
+ 0d44aea3696bab80b11a12c6bdfdbf8de9c4e815
+
+
+ https://github.com/dotnet/runtime
+ 0d44aea3696bab80b11a12c6bdfdbf8de9c4e815
+
+
+ https://github.com/dotnet/runtime
+ 0d44aea3696bab80b11a12c6bdfdbf8de9c4e815
+
+
+ https://github.com/dotnet/runtime
+ 0d44aea3696bab80b11a12c6bdfdbf8de9c4e815
+
+
+
+ https://github.com/dotnet/runtime
+ 0d44aea3696bab80b11a12c6bdfdbf8de9c4e815
+
+
+ https://github.com/dotnet/runtime
+ 0d44aea3696bab80b11a12c6bdfdbf8de9c4e815
+
+
+ https://github.com/dotnet/runtime
+ 0d44aea3696bab80b11a12c6bdfdbf8de9c4e815
+
+
+ https://github.com/dotnet/runtime
+ 0d44aea3696bab80b11a12c6bdfdbf8de9c4e815
+
+
+ https://github.com/dotnet/runtime
+ 0d44aea3696bab80b11a12c6bdfdbf8de9c4e815
-
- https://dev.azure.com/dnceng/internal/_git/dotnet-runtime
- 5535e31a712343a63f5d7d796cd874e563e5ac14
+
+ https://github.com/dotnet/runtime
+ 0d44aea3696bab80b11a12c6bdfdbf8de9c4e815
-
- https://dev.azure.com/dnceng/internal/_git/dotnet-runtime
- 5535e31a712343a63f5d7d796cd874e563e5ac14
+
+ https://github.com/dotnet/runtime
+ 0d44aea3696bab80b11a12c6bdfdbf8de9c4e815
-
- https://dev.azure.com/dnceng/internal/_git/dotnet-runtime
- 2aade6beb02ea367fd97c4070a4198802fe61c03
+
+ https://github.com/dotnet/runtime
+ 0d44aea3696bab80b11a12c6bdfdbf8de9c4e815
-
- https://dev.azure.com/dnceng/internal/_git/dotnet-runtime
- 5535e31a712343a63f5d7d796cd874e563e5ac14
+
+ https://github.com/dotnet/runtime
+ 0d44aea3696bab80b11a12c6bdfdbf8de9c4e815
-
- https://dev.azure.com/dnceng/internal/_git/dotnet-runtime
- 5535e31a712343a63f5d7d796cd874e563e5ac14
+
+ https://github.com/dotnet/runtime
+ 0d44aea3696bab80b11a12c6bdfdbf8de9c4e815
-
- https://dev.azure.com/dnceng/internal/_git/dotnet-runtime
- 9f4b1f5d664afdfc80e1508ab7ed099dff210fbd
+
+ https://github.com/dotnet/runtime
+ 0d44aea3696bab80b11a12c6bdfdbf8de9c4e815
-
- https://dev.azure.com/dnceng/internal/_git/dotnet-runtime
- 5535e31a712343a63f5d7d796cd874e563e5ac14
+
+ https://github.com/dotnet/runtime
+ 0d44aea3696bab80b11a12c6bdfdbf8de9c4e815
-
- https://dev.azure.com/dnceng/internal/_git/dotnet-runtime
- 5535e31a712343a63f5d7d796cd874e563e5ac14
+
+ https://github.com/dotnet/aspnetcore
+ 9a34a6e3c7975f41300bd2550a089a85810cafd1
-
- https://dev.azure.com/dnceng/internal/_git/dotnet-runtime
- 5535e31a712343a63f5d7d796cd874e563e5ac14
+
+ https://github.com/dotnet/aspnetcore
+ 9a34a6e3c7975f41300bd2550a089a85810cafd1
-
- https://dev.azure.com/dnceng/internal/_git/dotnet-runtime
- 5535e31a712343a63f5d7d796cd874e563e5ac14
+
+ https://github.com/dotnet/aspnetcore
+ 9a34a6e3c7975f41300bd2550a089a85810cafd1
-
- https://dev.azure.com/dnceng/internal/_git/dotnet-runtime
- 5535e31a712343a63f5d7d796cd874e563e5ac14
+
+ https://github.com/dotnet/aspnetcore
+ 9a34a6e3c7975f41300bd2550a089a85810cafd1
-
- https://dev.azure.com/dnceng/internal/_git/dotnet-runtime
- 9f4b1f5d664afdfc80e1508ab7ed099dff210fbd
+
+ https://github.com/dotnet/aspnetcore
+ 9a34a6e3c7975f41300bd2550a089a85810cafd1
-
- https://dev.azure.com/dnceng/internal/_git/dotnet-runtime
- 5535e31a712343a63f5d7d796cd874e563e5ac14
+
+ https://github.com/dotnet/aspnetcore
+ 9a34a6e3c7975f41300bd2550a089a85810cafd1
-
- https://dev.azure.com/dnceng/internal/_git/dotnet-runtime
- 5535e31a712343a63f5d7d796cd874e563e5ac14
+
+ https://github.com/dotnet/aspnetcore
+ 9a34a6e3c7975f41300bd2550a089a85810cafd1
-
- https://dev.azure.com/dnceng/internal/_git/dotnet-runtime
- 5535e31a712343a63f5d7d796cd874e563e5ac14
+
+ https://github.com/dotnet/aspnetcore
+ 9a34a6e3c7975f41300bd2550a089a85810cafd1
-
- https://dev.azure.com/dnceng/internal/_git/dotnet-runtime
- 5535e31a712343a63f5d7d796cd874e563e5ac14
-
-
- https://dev.azure.com/dnceng/internal/_git/dotnet-runtime
- 9f4b1f5d664afdfc80e1508ab7ed099dff210fbd
-
-
- https://dev.azure.com/dnceng/internal/_git/dotnet-runtime
- 5535e31a712343a63f5d7d796cd874e563e5ac14
-
-
- https://dev.azure.com/dnceng/internal/_git/dotnet-runtime
- 5535e31a712343a63f5d7d796cd874e563e5ac14
-
-
- https://dev.azure.com/dnceng/internal/_git/dotnet-runtime
- 2d7eea252964e69be94cb9c847b371b23e4dd470
-
-
- https://dev.azure.com/dnceng/internal/_git/dotnet-runtime
- 5535e31a712343a63f5d7d796cd874e563e5ac14
-
-
- https://dev.azure.com/dnceng/internal/_git/dotnet-runtime
- 5535e31a712343a63f5d7d796cd874e563e5ac14
-
-
- https://dev.azure.com/dnceng/internal/_git/dotnet-runtime
- 5535e31a712343a63f5d7d796cd874e563e5ac14
-
-
- https://dev.azure.com/dnceng/internal/_git/dotnet-runtime
- 2aade6beb02ea367fd97c4070a4198802fe61c03
-
-
- https://dev.azure.com/dnceng/internal/_git/dotnet-runtime
- 5535e31a712343a63f5d7d796cd874e563e5ac14
-
-
- https://dev.azure.com/dnceng/internal/_git/dotnet-runtime
- 5535e31a712343a63f5d7d796cd874e563e5ac14
-
-
- https://dev.azure.com/dnceng/internal/_git/dotnet-aspnetcore
- 954f61dd38b33caa2b736c73530bd5a294174437
-
-
- https://dev.azure.com/dnceng/internal/_git/dotnet-aspnetcore
- 954f61dd38b33caa2b736c73530bd5a294174437
-
-
- https://dev.azure.com/dnceng/internal/_git/dotnet-aspnetcore
- 954f61dd38b33caa2b736c73530bd5a294174437
-
-
- https://dev.azure.com/dnceng/internal/_git/dotnet-aspnetcore
- 954f61dd38b33caa2b736c73530bd5a294174437
-
-
- https://dev.azure.com/dnceng/internal/_git/dotnet-aspnetcore
- 954f61dd38b33caa2b736c73530bd5a294174437
-
-
- https://dev.azure.com/dnceng/internal/_git/dotnet-aspnetcore
- 954f61dd38b33caa2b736c73530bd5a294174437
-
-
- https://dev.azure.com/dnceng/internal/_git/dotnet-aspnetcore
- 954f61dd38b33caa2b736c73530bd5a294174437
+
+ https://github.com/dotnet/aspnetcore
+ 9a34a6e3c7975f41300bd2550a089a85810cafd1
-
+
https://github.com/dotnet/arcade
- 69abe6b2063083c0b35fc3a5b16cb2bdbaf5e8b0
+ 1230437de1ab7b3e15fe7cdfe7ffce2f65449959
-
+
https://github.com/dotnet/arcade
- 69abe6b2063083c0b35fc3a5b16cb2bdbaf5e8b0
+ 1230437de1ab7b3e15fe7cdfe7ffce2f65449959
diff --git a/eng/Versions.props b/eng/Versions.props
index 314b495eeb7..0d718b9059b 100644
--- a/eng/Versions.props
+++ b/eng/Versions.props
@@ -1,11 +1,10 @@
- 8
- 10
+ 9
+ 0
0
preview
-
-
+ 9
$(MajorVersion).$(MinorVersion).$(PatchVersion)
true
$(MajorVersion).$(MinorVersion).0.0
@@ -28,49 +27,48 @@
-->
- 8.0.1
- 8.0.0
- 8.0.0
- 8.0.0
- 8.0.2
- 8.0.0
- 8.0.0
- 8.0.0
- 8.0.1
- 8.0.0
- 8.0.0
- 8.0.0
- 8.0.0
- 8.0.0
- 8.0.1
- 8.0.0
- 8.0.0
- 8.0.0
- 8.0.0
- 8.0.0
- 8.0.2
- 8.0.0
- 8.0.0
- 8.0.0
- 8.0.1
- 8.0.0
- 8.0.0
- 8.0.0
- 8.0.0
- 8.0.1
- 8.0.0
- 8.0.3
- 8.0.0
+ 9.0.0-rtm.24476.4
+ 9.0.0-rtm.24476.4
+ 9.0.0-rtm.24476.4
+ 9.0.0-rtm.24476.4
+ 9.0.0-rtm.24476.4
+ 9.0.0-rtm.24476.4
+ 9.0.0-rtm.24476.4
+ 9.0.0-rtm.24476.4
+ 9.0.0-rtm.24476.4
+ 9.0.0-rtm.24476.4
+ 9.0.0-rtm.24476.4
+ 9.0.0-rtm.24476.4
+ 9.0.0-rtm.24476.4
+ 9.0.0-rtm.24476.4
+ 9.0.0-rtm.24476.4
+ 9.0.0-rtm.24476.4
+ 9.0.0-rtm.24476.4
+ 9.0.0-rtm.24476.4
+ 9.0.0-rtm.24476.4
+ 9.0.0-rtm.24476.4
+ 9.0.0-rtm.24476.4
+ 9.0.0-rtm.24476.4
+ 9.0.0-rtm.24476.4
+ 9.0.0-rtm.24476.4
+ 9.0.0-rtm.24476.4
+ 9.0.0-rtm.24476.4
+ 9.0.0-rtm.24476.4
+ 9.0.0-rtm.24476.4
+ 9.0.0-rtm.24476.4
+ 9.0.0-rtm.24476.4
+ 9.0.0-rtm.24476.4
+ 9.0.0-rtm.24476.4
- 8.0.8
- 8.0.8
- 8.0.8
- 8.0.8
- 8.0.5
- 8.0.8
- 8.0.5
- 8.0.8
- 8.0.8
+ 9.0.0-rtm.24477.5
+ 9.0.0-rtm.24477.5
+ 9.0.0-rtm.24477.5
+ 9.0.0-rtm.24477.5
+ 9.0.0-rtm.24477.5
+ 9.0.0-rtm.24477.5
+ 9.0.0-rtm.24477.5
+ 9.0.0-rtm.24477.5
+ 9.0.0-rtm.24477.5
+
+ $(MicrosoftNETCoreAppRuntimewinx64Version)
4.8.0
3.3.4
diff --git a/eng/common/SetupNugetSources.ps1 b/eng/common/SetupNugetSources.ps1
index efa2fd72bfa..5db4ad71ee2 100644
--- a/eng/common/SetupNugetSources.ps1
+++ b/eng/common/SetupNugetSources.ps1
@@ -1,17 +1,10 @@
-# This file is a temporary workaround for internal builds to be able to restore from private AzDO feeds.
-# This file should be removed as part of this issue: https://github.com/dotnet/arcade/issues/4080
+# This script adds internal feeds required to build commits that depend on internal package sources. For instance,
+# dotnet6-internal would be added automatically if dotnet6 was found in the nuget.config file. In addition also enables
+# disabled internal Maestro (darc-int*) feeds.
#
-# What the script does is iterate over all package sources in the pointed NuGet.config and add a credential entry
-# under for each Maestro managed private feed. Two additional credential
-# entries are also added for the two private static internal feeds: dotnet3-internal and dotnet3-internal-transport.
+# Optionally, this script also adds a credential entry for each of the internal feeds if supplied.
#
-# This script needs to be called in every job that will restore packages and which the base repo has
-# private AzDO feeds in the NuGet.config.
-#
-# See example YAML call for this script below. Note the use of the variable `$(dn-bot-dnceng-artifact-feeds-rw)`
-# from the AzureDevOps-Artifact-Feeds-Pats variable group.
-#
-# Any disabledPackageSources entries which start with "darc-int" will be re-enabled as part of this script executing
+# See example call for this script below.
#
# - task: PowerShell@2
# displayName: Setup Private Feeds Credentials
@@ -21,11 +14,18 @@
# arguments: -ConfigFile $(Build.SourcesDirectory)/NuGet.config -Password $Env:Token
# env:
# Token: $(dn-bot-dnceng-artifact-feeds-rw)
+#
+# Note that the NuGetAuthenticate task should be called after SetupNugetSources.
+# This ensures that:
+# - Appropriate creds are set for the added internal feeds (if not supplied to the scrupt)
+# - The credential provider is installed.
+#
+# This logic is also abstracted into enable-internal-sources.yml.
[CmdletBinding()]
param (
[Parameter(Mandatory = $true)][string]$ConfigFile,
- [Parameter(Mandatory = $true)][string]$Password
+ $Password
)
$ErrorActionPreference = "Stop"
@@ -48,11 +48,17 @@ function AddPackageSource($sources, $SourceName, $SourceEndPoint, $creds, $Usern
else {
Write-Host "Package source $SourceName already present."
}
+
AddCredential -Creds $creds -Source $SourceName -Username $Username -pwd $pwd
}
# Add a credential node for the specified source
function AddCredential($creds, $source, $username, $pwd) {
+ # If no cred supplied, don't do anything.
+ if (!$pwd) {
+ return;
+ }
+
# Looks for credential configuration for the given SourceName. Create it if none is found.
$sourceElement = $creds.SelectSingleNode($Source)
if ($sourceElement -eq $null)
@@ -110,11 +116,6 @@ if (!(Test-Path $ConfigFile -PathType Leaf)) {
ExitWithExitCode 1
}
-if (!$Password) {
- Write-PipelineTelemetryError -Category 'Build' -Message 'Eng/common/SetupNugetSources.ps1 returned a non-zero exit code. Please supply a valid PAT'
- ExitWithExitCode 1
-}
-
# Load NuGet.config
$doc = New-Object System.Xml.XmlDocument
$filename = (Get-Item $ConfigFile).FullName
@@ -127,11 +128,14 @@ if ($sources -eq $null) {
$doc.DocumentElement.AppendChild($sources) | Out-Null
}
-# Looks for a node. Create it if none is found.
-$creds = $doc.DocumentElement.SelectSingleNode("packageSourceCredentials")
-if ($creds -eq $null) {
- $creds = $doc.CreateElement("packageSourceCredentials")
- $doc.DocumentElement.AppendChild($creds) | Out-Null
+$creds = $null
+if ($Password) {
+ # Looks for a node. Create it if none is found.
+ $creds = $doc.DocumentElement.SelectSingleNode("packageSourceCredentials")
+ if ($creds -eq $null) {
+ $creds = $doc.CreateElement("packageSourceCredentials")
+ $doc.DocumentElement.AppendChild($creds) | Out-Null
+ }
}
# Check for disabledPackageSources; we'll enable any darc-int ones we find there
@@ -153,7 +157,7 @@ if ($dotnet31Source -ne $null) {
AddPackageSource -Sources $sources -SourceName "dotnet3.1-internal-transport" -SourceEndPoint "https://pkgs.dev.azure.com/dnceng/_packaging/dotnet3.1-internal-transport/nuget/v2" -Creds $creds -Username $userName -pwd $Password
}
-$dotnetVersions = @('5','6','7','8')
+$dotnetVersions = @('5','6','7','8','9')
foreach ($dotnetVersion in $dotnetVersions) {
$feedPrefix = "dotnet" + $dotnetVersion;
@@ -164,4 +168,4 @@ foreach ($dotnetVersion in $dotnetVersions) {
}
}
-$doc.Save($filename)
\ No newline at end of file
+$doc.Save($filename)
diff --git a/eng/common/SetupNugetSources.sh b/eng/common/SetupNugetSources.sh
index d387c7eac95..4604b61b032 100755
--- a/eng/common/SetupNugetSources.sh
+++ b/eng/common/SetupNugetSources.sh
@@ -1,28 +1,27 @@
#!/usr/bin/env bash
-# This file is a temporary workaround for internal builds to be able to restore from private AzDO feeds.
-# This file should be removed as part of this issue: https://github.com/dotnet/arcade/issues/4080
+# This script adds internal feeds required to build commits that depend on internal package sources. For instance,
+# dotnet6-internal would be added automatically if dotnet6 was found in the nuget.config file. In addition also enables
+# disabled internal Maestro (darc-int*) feeds.
+#
+# Optionally, this script also adds a credential entry for each of the internal feeds if supplied.
#
-# What the script does is iterate over all package sources in the pointed NuGet.config and add a credential entry
-# under for each Maestro's managed private feed. Two additional credential
-# entries are also added for the two private static internal feeds: dotnet3-internal and dotnet3-internal-transport.
-#
-# This script needs to be called in every job that will restore packages and which the base repo has
-# private AzDO feeds in the NuGet.config.
-#
-# See example YAML call for this script below. Note the use of the variable `$(dn-bot-dnceng-artifact-feeds-rw)`
-# from the AzureDevOps-Artifact-Feeds-Pats variable group.
-#
-# Any disabledPackageSources entries which start with "darc-int" will be re-enabled as part of this script executing.
+# See example call for this script below.
#
# - task: Bash@3
-# displayName: Setup Private Feeds Credentials
+# displayName: Setup Internal Feeds
# inputs:
# filePath: $(Build.SourcesDirectory)/eng/common/SetupNugetSources.sh
-# arguments: $(Build.SourcesDirectory)/NuGet.config $Token
+# arguments: $(Build.SourcesDirectory)/NuGet.config
# condition: ne(variables['Agent.OS'], 'Windows_NT')
-# env:
-# Token: $(dn-bot-dnceng-artifact-feeds-rw)
+# - task: NuGetAuthenticate@1
+#
+# Note that the NuGetAuthenticate task should be called after SetupNugetSources.
+# This ensures that:
+# - Appropriate creds are set for the added internal feeds (if not supplied to the scrupt)
+# - The credential provider is installed.
+#
+# This logic is also abstracted into enable-internal-sources.yml.
ConfigFile=$1
CredToken=$2
@@ -48,11 +47,6 @@ if [ ! -f "$ConfigFile" ]; then
ExitWithExitCode 1
fi
-if [ -z "$CredToken" ]; then
- Write-PipelineTelemetryError -category 'Build' "Error: Eng/common/SetupNugetSources.sh returned a non-zero exit code. Please supply a valid PAT"
- ExitWithExitCode 1
-fi
-
if [[ `uname -s` == "Darwin" ]]; then
NL=$'\\\n'
TB=''
@@ -105,7 +99,7 @@ if [ "$?" == "0" ]; then
PackageSources+=('dotnet3.1-internal-transport')
fi
-DotNetVersions=('5' '6' '7' '8')
+DotNetVersions=('5' '6' '7' '8' '9')
for DotNetVersion in ${DotNetVersions[@]} ; do
FeedPrefix="dotnet${DotNetVersion}";
@@ -140,18 +134,20 @@ PackageSources+="$IFS"
PackageSources+=$(grep -oh '"darc-int-[^"]*"' $ConfigFile | tr -d '"')
IFS=$PrevIFS
-for FeedName in ${PackageSources[@]} ; do
- # Check if there is no existing credential for this FeedName
- grep -i "<$FeedName>" $ConfigFile
- if [ "$?" != "0" ]; then
- echo "Adding credentials for $FeedName."
+if [ "$CredToken" ]; then
+ for FeedName in ${PackageSources[@]} ; do
+ # Check if there is no existing credential for this FeedName
+ grep -i "<$FeedName>" $ConfigFile
+ if [ "$?" != "0" ]; then
+ echo "Adding credentials for $FeedName."
- PackageSourceCredentialsNodeFooter=""
- NewCredential="${TB}${TB}<$FeedName>${NL}${NL}${NL}$FeedName>"
+ PackageSourceCredentialsNodeFooter=""
+ NewCredential="${TB}${TB}<$FeedName>${NL}${NL}${NL}$FeedName>"
- sed -i.bak "s|$PackageSourceCredentialsNodeFooter|$NewCredential${NL}$PackageSourceCredentialsNodeFooter|" $ConfigFile
- fi
-done
+ sed -i.bak "s|$PackageSourceCredentialsNodeFooter|$NewCredential${NL}$PackageSourceCredentialsNodeFooter|" $ConfigFile
+ fi
+ done
+fi
# Re-enable any entries in disabledPackageSources where the feed name contains darc-int
grep -i "" $ConfigFile
diff --git a/eng/common/build.cmd b/eng/common/build.cmd
new file mode 100644
index 00000000000..99daf368aba
--- /dev/null
+++ b/eng/common/build.cmd
@@ -0,0 +1,3 @@
+@echo off
+powershell -ExecutionPolicy ByPass -NoProfile -command "& """%~dp0build.ps1""" %*"
+exit /b %ErrorLevel%
diff --git a/eng/common/build.ps1 b/eng/common/build.ps1
index 33a6f2d0e24..438f9920c43 100644
--- a/eng/common/build.ps1
+++ b/eng/common/build.ps1
@@ -19,6 +19,7 @@ Param(
[switch] $pack,
[switch] $publish,
[switch] $clean,
+ [switch][Alias('pb')]$productBuild,
[switch][Alias('bl')]$binaryLog,
[switch][Alias('nobl')]$excludeCIBinarylog,
[switch] $ci,
@@ -58,6 +59,7 @@ function Print-Usage() {
Write-Host " -sign Sign build outputs"
Write-Host " -publish Publish artifacts (e.g. symbols)"
Write-Host " -clean Clean the solution"
+ Write-Host " -productBuild Build the solution in the way it will be built in the full .NET product (VMR) build (short: -pb)"
Write-Host ""
Write-Host "Advanced settings:"
@@ -120,6 +122,7 @@ function Build {
/p:Deploy=$deploy `
/p:Test=$test `
/p:Pack=$pack `
+ /p:DotNetBuildRepo=$productBuild `
/p:IntegrationTest=$integrationTest `
/p:PerformanceTest=$performanceTest `
/p:Sign=$sign `
diff --git a/eng/common/build.sh b/eng/common/build.sh
index 50af40cdd2c..ac1ee8620cd 100755
--- a/eng/common/build.sh
+++ b/eng/common/build.sh
@@ -22,6 +22,9 @@ usage()
echo " --sourceBuild Source-build the solution (short: -sb)"
echo " Will additionally trigger the following actions: --restore, --build, --pack"
echo " If --configuration is not set explicitly, will also set it to 'Release'"
+ echo " --productBuild Build the solution in the way it will be built in the full .NET product (VMR) build (short: -pb)"
+ echo " Will additionally trigger the following actions: --restore, --build, --pack"
+ echo " If --configuration is not set explicitly, will also set it to 'Release'"
echo " --rebuild Rebuild solution"
echo " --test Run all unit tests in the solution (short: -t)"
echo " --integrationTest Run all integration tests in the solution"
@@ -59,6 +62,7 @@ scriptroot="$( cd -P "$( dirname "$source" )" && pwd )"
restore=false
build=false
source_build=false
+product_build=false
rebuild=false
test=false
integration_test=false
@@ -105,7 +109,7 @@ while [[ $# > 0 ]]; do
-binarylog|-bl)
binary_log=true
;;
- -excludeCIBinarylog|-nobl)
+ -excludecibinarylog|-nobl)
exclude_ci_binary_log=true
;;
-pipelineslog|-pl)
@@ -126,6 +130,13 @@ while [[ $# > 0 ]]; do
-sourcebuild|-sb)
build=true
source_build=true
+ product_build=true
+ restore=true
+ pack=true
+ ;;
+ -productBuild|-pb)
+ build=true
+ product_build=true
restore=true
pack=true
;;
@@ -219,7 +230,9 @@ function Build {
/p:RepoRoot="$repo_root" \
/p:Restore=$restore \
/p:Build=$build \
+ /p:DotNetBuildRepo=$product_build \
/p:ArcadeBuildFromSource=$source_build \
+ /p:DotNetBuildSourceOnly=$source_build \
/p:Rebuild=$rebuild \
/p:Test=$test \
/p:Pack=$pack \
diff --git a/eng/common/core-templates/job/job.yml b/eng/common/core-templates/job/job.yml
new file mode 100644
index 00000000000..ba53ebfbd51
--- /dev/null
+++ b/eng/common/core-templates/job/job.yml
@@ -0,0 +1,247 @@
+parameters:
+# Job schema parameters - https://docs.microsoft.com/en-us/azure/devops/pipelines/yaml-schema?view=vsts&tabs=schema#job
+ cancelTimeoutInMinutes: ''
+ condition: ''
+ container: ''
+ continueOnError: false
+ dependsOn: ''
+ displayName: ''
+ pool: ''
+ steps: []
+ strategy: ''
+ timeoutInMinutes: ''
+ variables: []
+ workspace: ''
+ templateContext: {}
+
+# Job base template specific parameters
+ # See schema documentation - https://github.com/dotnet/arcade/blob/master/Documentation/AzureDevOps/TemplateSchema.md
+ # publishing defaults
+ artifacts: ''
+ enableMicrobuild: false
+ enablePublishBuildArtifacts: false
+ enablePublishBuildAssets: false
+ enablePublishTestResults: false
+ enablePublishUsingPipelines: false
+ enableBuildRetry: false
+ mergeTestResults: false
+ testRunTitle: ''
+ testResultsFormat: ''
+ name: ''
+ componentGovernanceSteps: []
+ preSteps: []
+ artifactPublishSteps: []
+ runAsPublic: false
+
+# 1es specific parameters
+ is1ESPipeline: ''
+
+jobs:
+- job: ${{ parameters.name }}
+
+ ${{ if ne(parameters.cancelTimeoutInMinutes, '') }}:
+ cancelTimeoutInMinutes: ${{ parameters.cancelTimeoutInMinutes }}
+
+ ${{ if ne(parameters.condition, '') }}:
+ condition: ${{ parameters.condition }}
+
+ ${{ if ne(parameters.container, '') }}:
+ container: ${{ parameters.container }}
+
+ ${{ if ne(parameters.continueOnError, '') }}:
+ continueOnError: ${{ parameters.continueOnError }}
+
+ ${{ if ne(parameters.dependsOn, '') }}:
+ dependsOn: ${{ parameters.dependsOn }}
+
+ ${{ if ne(parameters.displayName, '') }}:
+ displayName: ${{ parameters.displayName }}
+
+ ${{ if ne(parameters.pool, '') }}:
+ pool: ${{ parameters.pool }}
+
+ ${{ if ne(parameters.strategy, '') }}:
+ strategy: ${{ parameters.strategy }}
+
+ ${{ if ne(parameters.timeoutInMinutes, '') }}:
+ timeoutInMinutes: ${{ parameters.timeoutInMinutes }}
+
+ ${{ if ne(parameters.templateContext, '') }}:
+ templateContext: ${{ parameters.templateContext }}
+
+ variables:
+ - ${{ if ne(parameters.enableTelemetry, 'false') }}:
+ - name: DOTNET_CLI_TELEMETRY_PROFILE
+ value: '$(Build.Repository.Uri)'
+ - ${{ if eq(parameters.enableRichCodeNavigation, 'true') }}:
+ - name: EnableRichCodeNavigation
+ value: 'true'
+ # Retry signature validation up to three times, waiting 2 seconds between attempts.
+ # See https://learn.microsoft.com/en-us/nuget/reference/errors-and-warnings/nu3028#retry-untrusted-root-failures
+ - name: NUGET_EXPERIMENTAL_CHAIN_BUILD_RETRY_POLICY
+ value: 3,2000
+ - ${{ each variable in parameters.variables }}:
+ # handle name-value variable syntax
+ # example:
+ # - name: [key]
+ # value: [value]
+ - ${{ if ne(variable.name, '') }}:
+ - name: ${{ variable.name }}
+ value: ${{ variable.value }}
+
+ # handle variable groups
+ - ${{ if ne(variable.group, '') }}:
+ - group: ${{ variable.group }}
+
+ # handle template variable syntax
+ # example:
+ # - template: path/to/template.yml
+ # parameters:
+ # [key]: [value]
+ - ${{ if ne(variable.template, '') }}:
+ - template: ${{ variable.template }}
+ ${{ if ne(variable.parameters, '') }}:
+ parameters: ${{ variable.parameters }}
+
+ # handle key-value variable syntax.
+ # example:
+ # - [key]: [value]
+ - ${{ if and(eq(variable.name, ''), eq(variable.group, ''), eq(variable.template, '')) }}:
+ - ${{ each pair in variable }}:
+ - name: ${{ pair.key }}
+ value: ${{ pair.value }}
+
+ # DotNet-HelixApi-Access provides 'HelixApiAccessToken' for internal builds
+ - ${{ if and(eq(parameters.enableTelemetry, 'true'), eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
+ - group: DotNet-HelixApi-Access
+
+ ${{ if ne(parameters.workspace, '') }}:
+ workspace: ${{ parameters.workspace }}
+
+ steps:
+ - ${{ if eq(parameters.is1ESPipeline, '') }}:
+ - 'Illegal entry point, is1ESPipeline is not defined. Repository yaml should not directly reference templates in core-templates folder.': error
+
+ - ${{ if ne(parameters.preSteps, '') }}:
+ - ${{ each preStep in parameters.preSteps }}:
+ - ${{ preStep }}
+
+ - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
+ - ${{ if eq(parameters.enableMicrobuild, 'true') }}:
+ - task: MicroBuildSigningPlugin@4
+ displayName: Install MicroBuild plugin
+ inputs:
+ signType: $(_SignType)
+ zipSources: false
+ feedSource: https://dnceng.pkgs.visualstudio.com/_packaging/MicroBuildToolset/nuget/v3/index.json
+ env:
+ TeamName: $(_TeamName)
+ MicroBuildOutputFolderOverride: '$(Agent.TempDirectory)'
+ continueOnError: ${{ parameters.continueOnError }}
+ condition: and(succeeded(), in(variables['_SignType'], 'real', 'test'), eq(variables['Agent.Os'], 'Windows_NT'))
+
+ - ${{ if and(eq(parameters.runAsPublic, 'false'), eq(variables['System.TeamProject'], 'internal')) }}:
+ - task: NuGetAuthenticate@1
+
+ - ${{ if and(ne(parameters.artifacts.download, 'false'), ne(parameters.artifacts.download, '')) }}:
+ - task: DownloadPipelineArtifact@2
+ inputs:
+ buildType: current
+ artifactName: ${{ coalesce(parameters.artifacts.download.name, 'Artifacts_$(Agent.OS)_$(_BuildConfig)') }}
+ targetPath: ${{ coalesce(parameters.artifacts.download.path, 'artifacts') }}
+ itemPattern: ${{ coalesce(parameters.artifacts.download.pattern, '**') }}
+
+ - ${{ each step in parameters.steps }}:
+ - ${{ step }}
+
+ - ${{ if eq(parameters.enableRichCodeNavigation, true) }}:
+ - task: RichCodeNavIndexer@0
+ displayName: RichCodeNav Upload
+ inputs:
+ languages: ${{ coalesce(parameters.richCodeNavigationLanguage, 'csharp') }}
+ environment: ${{ coalesce(parameters.richCodeNavigationEnvironment, 'internal') }}
+ richNavLogOutputDirectory: $(Build.SourcesDirectory)/artifacts/bin
+ uploadRichNavArtifacts: ${{ coalesce(parameters.richCodeNavigationUploadArtifacts, false) }}
+ continueOnError: true
+
+ - ${{ each step in parameters.componentGovernanceSteps }}:
+ - ${{ step }}
+
+ - ${{ if eq(parameters.enableMicrobuild, 'true') }}:
+ - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
+ - task: MicroBuildCleanup@1
+ displayName: Execute Microbuild cleanup tasks
+ condition: and(always(), in(variables['_SignType'], 'real', 'test'), eq(variables['Agent.Os'], 'Windows_NT'))
+ continueOnError: ${{ parameters.continueOnError }}
+ env:
+ TeamName: $(_TeamName)
+
+ # Publish test results
+ - ${{ if or(and(eq(parameters.enablePublishTestResults, 'true'), eq(parameters.testResultsFormat, '')), eq(parameters.testResultsFormat, 'xunit')) }}:
+ - task: PublishTestResults@2
+ displayName: Publish XUnit Test Results
+ inputs:
+ testResultsFormat: 'xUnit'
+ testResultsFiles: '*.xml'
+ searchFolder: '$(Build.SourcesDirectory)/artifacts/TestResults/$(_BuildConfig)'
+ testRunTitle: ${{ coalesce(parameters.testRunTitle, parameters.name, '$(System.JobName)') }}-xunit
+ mergeTestResults: ${{ parameters.mergeTestResults }}
+ continueOnError: true
+ condition: always()
+ - ${{ if or(and(eq(parameters.enablePublishTestResults, 'true'), eq(parameters.testResultsFormat, '')), eq(parameters.testResultsFormat, 'vstest')) }}:
+ - task: PublishTestResults@2
+ displayName: Publish TRX Test Results
+ inputs:
+ testResultsFormat: 'VSTest'
+ testResultsFiles: '*.trx'
+ searchFolder: '$(Build.SourcesDirectory)/artifacts/TestResults/$(_BuildConfig)'
+ testRunTitle: ${{ coalesce(parameters.testRunTitle, parameters.name, '$(System.JobName)') }}-trx
+ mergeTestResults: ${{ parameters.mergeTestResults }}
+ continueOnError: true
+ condition: always()
+
+ # gather artifacts
+ - ${{ if ne(parameters.artifacts.publish, '') }}:
+ - ${{ if and(ne(parameters.artifacts.publish.artifacts, 'false'), ne(parameters.artifacts.publish.artifacts, '')) }}:
+ - task: CopyFiles@2
+ displayName: Gather binaries for publish to artifacts
+ inputs:
+ SourceFolder: 'artifacts/bin'
+ Contents: '**'
+ TargetFolder: '$(Build.ArtifactStagingDirectory)/artifacts/bin'
+ - task: CopyFiles@2
+ displayName: Gather packages for publish to artifacts
+ inputs:
+ SourceFolder: 'artifacts/packages'
+ Contents: '**'
+ TargetFolder: '$(Build.ArtifactStagingDirectory)/artifacts/packages'
+ - ${{ if and(ne(parameters.artifacts.publish.logs, 'false'), ne(parameters.artifacts.publish.logs, '')) }}:
+ - task: CopyFiles@2
+ displayName: Gather logs for publish to artifacts
+ inputs:
+ SourceFolder: 'artifacts/log'
+ Contents: '**'
+ TargetFolder: '$(Build.ArtifactStagingDirectory)/artifacts/log'
+ continueOnError: true
+ condition: always()
+
+ - ${{ if eq(parameters.enablePublishBuildArtifacts, 'true') }}:
+ - task: CopyFiles@2
+ displayName: Gather logs for publish to artifacts
+ inputs:
+ SourceFolder: 'artifacts/log/$(_BuildConfig)'
+ Contents: '**'
+ TargetFolder: '$(Build.ArtifactStagingDirectory)/artifacts/log/$(_BuildConfig)'
+ continueOnError: true
+ condition: always()
+ - ${{ if eq(parameters.enableBuildRetry, 'true') }}:
+ - task: CopyFiles@2
+ displayName: Gather buildconfiguration for build retry
+ inputs:
+ SourceFolder: '$(Build.SourcesDirectory)/eng/common/BuildConfiguration'
+ Contents: '**'
+ TargetFolder: '$(Build.ArtifactStagingDirectory)/eng/common/BuildConfiguration'
+ continueOnError: true
+ condition: always()
+ - ${{ each step in parameters.artifactPublishSteps }}:
+ - ${{ step }}
diff --git a/eng/common/core-templates/job/onelocbuild.yml b/eng/common/core-templates/job/onelocbuild.yml
new file mode 100644
index 00000000000..00feec8ebbc
--- /dev/null
+++ b/eng/common/core-templates/job/onelocbuild.yml
@@ -0,0 +1,121 @@
+parameters:
+ # Optional: dependencies of the job
+ dependsOn: ''
+
+ # Optional: A defined YAML pool - https://docs.microsoft.com/en-us/azure/devops/pipelines/yaml-schema?view=vsts&tabs=schema#pool
+ pool: ''
+
+ CeapexPat: $(dn-bot-ceapex-package-r) # PAT for the loc AzDO instance https://dev.azure.com/ceapex
+ GithubPat: $(BotAccount-dotnet-bot-repo-PAT)
+
+ SourcesDirectory: $(Build.SourcesDirectory)
+ CreatePr: true
+ AutoCompletePr: false
+ ReusePr: true
+ UseLfLineEndings: true
+ UseCheckedInLocProjectJson: false
+ SkipLocProjectJsonGeneration: false
+ LanguageSet: VS_Main_Languages
+ LclSource: lclFilesInRepo
+ LclPackageId: ''
+ RepoType: gitHub
+ GitHubOrg: dotnet
+ MirrorRepo: ''
+ MirrorBranch: main
+ condition: ''
+ JobNameSuffix: ''
+ is1ESPipeline: ''
+jobs:
+- job: OneLocBuild${{ parameters.JobNameSuffix }}
+
+ dependsOn: ${{ parameters.dependsOn }}
+
+ displayName: OneLocBuild${{ parameters.JobNameSuffix }}
+
+ variables:
+ - group: OneLocBuildVariables # Contains the CeapexPat and GithubPat
+ - name: _GenerateLocProjectArguments
+ value: -SourcesDirectory ${{ parameters.SourcesDirectory }}
+ -LanguageSet "${{ parameters.LanguageSet }}"
+ -CreateNeutralXlfs
+ - ${{ if eq(parameters.UseCheckedInLocProjectJson, 'true') }}:
+ - name: _GenerateLocProjectArguments
+ value: ${{ variables._GenerateLocProjectArguments }} -UseCheckedInLocProjectJson
+ - template: /eng/common/core-templates/variables/pool-providers.yml
+ parameters:
+ is1ESPipeline: ${{ parameters.is1ESPipeline }}
+
+ ${{ if ne(parameters.pool, '') }}:
+ pool: ${{ parameters.pool }}
+ ${{ if eq(parameters.pool, '') }}:
+ pool:
+ # We don't use the collection uri here because it might vary (.visualstudio.com vs. dev.azure.com)
+ ${{ if eq(variables['System.TeamProject'], 'DevDiv') }}:
+ name: AzurePipelines-EO
+ image: 1ESPT-Windows2022
+ demands: Cmd
+ os: windows
+ # If it's not devdiv, it's dnceng
+ ${{ if ne(variables['System.TeamProject'], 'DevDiv') }}:
+ name: $(DncEngInternalBuildPool)
+ image: 1es-windows-2022
+ os: windows
+
+ steps:
+ - ${{ if eq(parameters.is1ESPipeline, '') }}:
+ - 'Illegal entry point, is1ESPipeline is not defined. Repository yaml should not directly reference templates in core-templates folder.': error
+
+ - ${{ if ne(parameters.SkipLocProjectJsonGeneration, 'true') }}:
+ - task: Powershell@2
+ inputs:
+ filePath: $(Build.SourcesDirectory)/eng/common/generate-locproject.ps1
+ arguments: $(_GenerateLocProjectArguments)
+ displayName: Generate LocProject.json
+ condition: ${{ parameters.condition }}
+
+ - task: OneLocBuild@2
+ displayName: OneLocBuild
+ env:
+ SYSTEM_ACCESSTOKEN: $(System.AccessToken)
+ inputs:
+ locProj: eng/Localize/LocProject.json
+ outDir: $(Build.ArtifactStagingDirectory)
+ lclSource: ${{ parameters.LclSource }}
+ lclPackageId: ${{ parameters.LclPackageId }}
+ isCreatePrSelected: ${{ parameters.CreatePr }}
+ isAutoCompletePrSelected: ${{ parameters.AutoCompletePr }}
+ ${{ if eq(parameters.CreatePr, true) }}:
+ isUseLfLineEndingsSelected: ${{ parameters.UseLfLineEndings }}
+ ${{ if eq(parameters.RepoType, 'gitHub') }}:
+ isShouldReusePrSelected: ${{ parameters.ReusePr }}
+ packageSourceAuth: patAuth
+ patVariable: ${{ parameters.CeapexPat }}
+ ${{ if eq(parameters.RepoType, 'gitHub') }}:
+ repoType: ${{ parameters.RepoType }}
+ gitHubPatVariable: "${{ parameters.GithubPat }}"
+ ${{ if ne(parameters.MirrorRepo, '') }}:
+ isMirrorRepoSelected: true
+ gitHubOrganization: ${{ parameters.GitHubOrg }}
+ mirrorRepo: ${{ parameters.MirrorRepo }}
+ mirrorBranch: ${{ parameters.MirrorBranch }}
+ condition: ${{ parameters.condition }}
+
+ - template: /eng/common/core-templates/steps/publish-build-artifacts.yml
+ parameters:
+ is1ESPipeline: ${{ parameters.is1ESPipeline }}
+ args:
+ displayName: Publish Localization Files
+ pathToPublish: '$(Build.ArtifactStagingDirectory)/loc'
+ publishLocation: Container
+ artifactName: Loc
+ condition: ${{ parameters.condition }}
+
+ - template: /eng/common/core-templates/steps/publish-build-artifacts.yml
+ parameters:
+ is1ESPipeline: ${{ parameters.is1ESPipeline }}
+ args:
+ displayName: Publish LocProject.json
+ pathToPublish: '$(Build.SourcesDirectory)/eng/Localize/'
+ publishLocation: Container
+ artifactName: Loc
+ condition: ${{ parameters.condition }}
\ No newline at end of file
diff --git a/eng/common/core-templates/job/publish-build-assets.yml b/eng/common/core-templates/job/publish-build-assets.yml
new file mode 100644
index 00000000000..3d3356e3196
--- /dev/null
+++ b/eng/common/core-templates/job/publish-build-assets.yml
@@ -0,0 +1,158 @@
+parameters:
+ configuration: 'Debug'
+
+ # Optional: condition for the job to run
+ condition: ''
+
+ # Optional: 'true' if future jobs should run even if this job fails
+ continueOnError: false
+
+ # Optional: dependencies of the job
+ dependsOn: ''
+
+ # Optional: Include PublishBuildArtifacts task
+ enablePublishBuildArtifacts: false
+
+ # Optional: A defined YAML pool - https://docs.microsoft.com/en-us/azure/devops/pipelines/yaml-schema?view=vsts&tabs=schema#pool
+ pool: {}
+
+ # Optional: should run as a public build even in the internal project
+ # if 'true', the build won't run any of the internal only steps, even if it is running in non-public projects.
+ runAsPublic: false
+
+ # Optional: whether the build's artifacts will be published using release pipelines or direct feed publishing
+ publishUsingPipelines: false
+
+ # Optional: whether the build's artifacts will be published using release pipelines or direct feed publishing
+ publishAssetsImmediately: false
+
+ artifactsPublishingAdditionalParameters: ''
+
+ signingValidationAdditionalParameters: ''
+
+ is1ESPipeline: ''
+
+jobs:
+- job: Asset_Registry_Publish
+
+ dependsOn: ${{ parameters.dependsOn }}
+ timeoutInMinutes: 150
+
+ ${{ if eq(parameters.publishAssetsImmediately, 'true') }}:
+ displayName: Publish Assets
+ ${{ else }}:
+ displayName: Publish to Build Asset Registry
+
+ variables:
+ - template: /eng/common/core-templates/variables/pool-providers.yml
+ parameters:
+ is1ESPipeline: ${{ parameters.is1ESPipeline }}
+ - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
+ - group: Publish-Build-Assets
+ - group: AzureDevOps-Artifact-Feeds-Pats
+ - name: runCodesignValidationInjection
+ value: false
+ # unconditional - needed for logs publishing (redactor tool version)
+ - template: /eng/common/core-templates/post-build/common-variables.yml
+
+ pool:
+ # We don't use the collection uri here because it might vary (.visualstudio.com vs. dev.azure.com)
+ ${{ if eq(variables['System.TeamProject'], 'DevDiv') }}:
+ name: AzurePipelines-EO
+ image: 1ESPT-Windows2022
+ demands: Cmd
+ os: windows
+ # If it's not devdiv, it's dnceng
+ ${{ if ne(variables['System.TeamProject'], 'DevDiv') }}:
+ name: NetCore1ESPool-Publishing-Internal
+ image: windows.vs2019.amd64
+ os: windows
+ steps:
+ - ${{ if eq(parameters.is1ESPipeline, '') }}:
+ - 'Illegal entry point, is1ESPipeline is not defined. Repository yaml should not directly reference templates in core-templates folder.': error
+
+ - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
+ - checkout: self
+ fetchDepth: 3
+ clean: true
+
+ - task: DownloadBuildArtifacts@0
+ displayName: Download artifact
+ inputs:
+ artifactName: AssetManifests
+ downloadPath: '$(Build.StagingDirectory)/Download'
+ checkDownloadedFiles: true
+ condition: ${{ parameters.condition }}
+ continueOnError: ${{ parameters.continueOnError }}
+
+ - task: NuGetAuthenticate@1
+
+ - task: AzureCLI@2
+ displayName: Publish Build Assets
+ inputs:
+ azureSubscription: "Darc: Maestro Production"
+ scriptType: ps
+ scriptLocation: scriptPath
+ scriptPath: $(Build.SourcesDirectory)/eng/common/sdk-task.ps1
+ arguments: -task PublishBuildAssets -restore -msbuildEngine dotnet
+ /p:ManifestsPath='$(Build.StagingDirectory)/Download/AssetManifests'
+ /p:MaestroApiEndpoint=https://maestro.dot.net
+ /p:PublishUsingPipelines=${{ parameters.publishUsingPipelines }}
+ /p:OfficialBuildId=$(Build.BuildNumber)
+ condition: ${{ parameters.condition }}
+ continueOnError: ${{ parameters.continueOnError }}
+
+ - task: powershell@2
+ displayName: Create ReleaseConfigs Artifact
+ inputs:
+ targetType: inline
+ script: |
+ New-Item -Path "$(Build.StagingDirectory)/ReleaseConfigs" -ItemType Directory -Force
+ $filePath = "$(Build.StagingDirectory)/ReleaseConfigs/ReleaseConfigs.txt"
+ Add-Content -Path $filePath -Value $(BARBuildId)
+ Add-Content -Path $filePath -Value "$(DefaultChannels)"
+ Add-Content -Path $filePath -Value $(IsStableBuild)
+
+ $symbolExclusionfile = "$(Build.SourcesDirectory)/eng/SymbolPublishingExclusionsFile.txt"
+ if (Test-Path -Path $symbolExclusionfile)
+ {
+ Write-Host "SymbolExclusionFile exists"
+ Copy-Item -Path $symbolExclusionfile -Destination "$(Build.StagingDirectory)/ReleaseConfigs"
+ }
+
+ - template: /eng/common/core-templates/steps/publish-build-artifacts.yml
+ parameters:
+ is1ESPipeline: ${{ parameters.is1ESPipeline }}
+ args:
+ displayName: Publish ReleaseConfigs Artifact
+ pathToPublish: '$(Build.StagingDirectory)/ReleaseConfigs'
+ publishLocation: Container
+ artifactName: ReleaseConfigs
+
+ - ${{ if eq(parameters.publishAssetsImmediately, 'true') }}:
+ - template: /eng/common/core-templates/post-build/setup-maestro-vars.yml
+ parameters:
+ BARBuildId: ${{ parameters.BARBuildId }}
+ PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }}
+ is1ESPipeline: ${{ parameters.is1ESPipeline }}
+
+ - task: AzureCLI@2
+ displayName: Publish Using Darc
+ inputs:
+ azureSubscription: "Darc: Maestro Production"
+ scriptType: ps
+ scriptLocation: scriptPath
+ scriptPath: $(Build.SourcesDirectory)/eng/common/post-build/publish-using-darc.ps1
+ arguments: >
+ -BuildId $(BARBuildId)
+ -PublishingInfraVersion 3
+ -AzdoToken '$(System.AccessToken)'
+ -WaitPublishingFinish true
+ -ArtifactsPublishingAdditionalParameters '${{ parameters.artifactsPublishingAdditionalParameters }}'
+ -SymbolPublishingAdditionalParameters '${{ parameters.symbolPublishingAdditionalParameters }}'
+
+ - ${{ if eq(parameters.enablePublishBuildArtifacts, 'true') }}:
+ - template: /eng/common/core-templates/steps/publish-logs.yml
+ parameters:
+ is1ESPipeline: ${{ parameters.is1ESPipeline }}
+ JobLabel: 'Publish_Artifacts_Logs'
diff --git a/eng/common/core-templates/job/source-build.yml b/eng/common/core-templates/job/source-build.yml
new file mode 100644
index 00000000000..c4713c8b6ed
--- /dev/null
+++ b/eng/common/core-templates/job/source-build.yml
@@ -0,0 +1,93 @@
+parameters:
+ # This template adds arcade-powered source-build to CI. The template produces a server job with a
+ # default ID 'Source_Build_Complete' to put in a dependency list if necessary.
+
+ # Specifies the prefix for source-build jobs added to pipeline. Use this if disambiguation needed.
+ jobNamePrefix: 'Source_Build'
+
+ # Defines the platform on which to run the job. By default, a linux-x64 machine, suitable for
+ # managed-only repositories. This is an object with these properties:
+ #
+ # name: ''
+ # The name of the job. This is included in the job ID.
+ # targetRID: ''
+ # The name of the target RID to use, instead of the one auto-detected by Arcade.
+ # nonPortable: false
+ # Enables non-portable mode. This means a more specific RID (e.g. fedora.32-x64 rather than
+ # linux-x64), and compiling against distro-provided packages rather than portable ones.
+ # skipPublishValidation: false
+ # Disables publishing validation. By default, a check is performed to ensure no packages are
+ # published by source-build.
+ # container: ''
+ # A container to use. Runs in docker.
+ # pool: {}
+ # A pool to use. Runs directly on an agent.
+ # buildScript: ''
+ # Specifies the build script to invoke to perform the build in the repo. The default
+ # './build.sh' should work for typical Arcade repositories, but this is customizable for
+ # difficult situations.
+ # jobProperties: {}
+ # A list of job properties to inject at the top level, for potential extensibility beyond
+ # container and pool.
+ platform: {}
+
+ is1ESPipeline: ''
+
+ # If set to true and running on a non-public project,
+ # Internal nuget and blob storage locations will be enabled.
+ # This is not enabled by default because many repositories do not need internal sources
+ # and do not need to have the required service connections approved in the pipeline.
+ enableInternalSources: false
+
+jobs:
+- job: ${{ parameters.jobNamePrefix }}_${{ parameters.platform.name }}
+ displayName: Source-Build (${{ parameters.platform.name }})
+
+ ${{ each property in parameters.platform.jobProperties }}:
+ ${{ property.key }}: ${{ property.value }}
+
+ ${{ if ne(parameters.platform.container, '') }}:
+ container: ${{ parameters.platform.container }}
+
+ ${{ if eq(parameters.platform.pool, '') }}:
+ # The default VM host AzDO pool. This should be capable of running Docker containers: almost all
+ # source-build builds run in Docker, including the default managed platform.
+ # /eng/common/core-templates/variables/pool-providers.yml can't be used here (some customers declare variables already), so duplicate its logic
+ ${{ if eq(parameters.is1ESPipeline, 'true') }}:
+ pool:
+ ${{ if eq(variables['System.TeamProject'], 'public') }}:
+ name: $[replace(replace(eq(contains(coalesce(variables['System.PullRequest.TargetBranch'], variables['Build.SourceBranch'], 'refs/heads/main'), 'release'), 'true'), True, 'NetCore-Svc-Public' ), False, 'NetCore-Public')]
+ demands: ImageOverride -equals build.ubuntu.2004.amd64
+ ${{ if eq(variables['System.TeamProject'], 'internal') }}:
+ name: $[replace(replace(eq(contains(coalesce(variables['System.PullRequest.TargetBranch'], variables['Build.SourceBranch'], 'refs/heads/main'), 'release'), 'true'), True, 'NetCore1ESPool-Svc-Internal'), False, 'NetCore1ESPool-Internal')]
+ image: 1es-mariner-2
+ os: linux
+ ${{ else }}:
+ pool:
+ ${{ if eq(variables['System.TeamProject'], 'public') }}:
+ name: $[replace(replace(eq(contains(coalesce(variables['System.PullRequest.TargetBranch'], variables['Build.SourceBranch'], 'refs/heads/main'), 'release'), 'true'), True, 'NetCore-Svc-Public' ), False, 'NetCore-Public')]
+ demands: ImageOverride -equals Build.Ubuntu.2204.Amd64.Open
+ ${{ if eq(variables['System.TeamProject'], 'internal') }}:
+ name: $[replace(replace(eq(contains(coalesce(variables['System.PullRequest.TargetBranch'], variables['Build.SourceBranch'], 'refs/heads/main'), 'release'), 'true'), True, 'NetCore1ESPool-Svc-Internal'), False, 'NetCore1ESPool-Internal')]
+ demands: ImageOverride -equals Build.Ubuntu.2204.Amd64
+ ${{ if ne(parameters.platform.pool, '') }}:
+ pool: ${{ parameters.platform.pool }}
+
+ workspace:
+ clean: all
+
+ steps:
+ - ${{ if eq(parameters.is1ESPipeline, '') }}:
+ - 'Illegal entry point, is1ESPipeline is not defined. Repository yaml should not directly reference templates in core-templates folder.': error
+
+ - ${{ if eq(parameters.enableInternalSources, true) }}:
+ - template: /eng/common/core-templates/steps/enable-internal-sources.yml
+ parameters:
+ is1ESPipeline: ${{ parameters.is1ESPipeline }}
+ - template: /eng/common/core-templates/steps/enable-internal-runtimes.yml
+ parameters:
+ is1ESPipeline: ${{ parameters.is1ESPipeline }}
+ - template: /eng/common/core-templates/steps/source-build.yml
+ parameters:
+ is1ESPipeline: ${{ parameters.is1ESPipeline }}
+ platform: ${{ parameters.platform }}
diff --git a/eng/common/core-templates/job/source-index-stage1.yml b/eng/common/core-templates/job/source-index-stage1.yml
new file mode 100644
index 00000000000..205fb5b3a39
--- /dev/null
+++ b/eng/common/core-templates/job/source-index-stage1.yml
@@ -0,0 +1,81 @@
+parameters:
+ runAsPublic: false
+ sourceIndexUploadPackageVersion: 2.0.0-20240522.1
+ sourceIndexProcessBinlogPackageVersion: 1.0.1-20240522.1
+ sourceIndexPackageSource: https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-tools/nuget/v3/index.json
+ sourceIndexBuildCommand: powershell -NoLogo -NoProfile -ExecutionPolicy Bypass -Command "eng/common/build.ps1 -restore -build -binarylog -ci"
+ preSteps: []
+ binlogPath: artifacts/log/Debug/Build.binlog
+ condition: ''
+ dependsOn: ''
+ pool: ''
+ is1ESPipeline: ''
+
+jobs:
+- job: SourceIndexStage1
+ dependsOn: ${{ parameters.dependsOn }}
+ condition: ${{ parameters.condition }}
+ variables:
+ - name: SourceIndexUploadPackageVersion
+ value: ${{ parameters.sourceIndexUploadPackageVersion }}
+ - name: SourceIndexProcessBinlogPackageVersion
+ value: ${{ parameters.sourceIndexProcessBinlogPackageVersion }}
+ - name: SourceIndexPackageSource
+ value: ${{ parameters.sourceIndexPackageSource }}
+ - name: BinlogPath
+ value: ${{ parameters.binlogPath }}
+ - template: /eng/common/core-templates/variables/pool-providers.yml
+ parameters:
+ is1ESPipeline: ${{ parameters.is1ESPipeline }}
+
+ ${{ if ne(parameters.pool, '') }}:
+ pool: ${{ parameters.pool }}
+ ${{ if eq(parameters.pool, '') }}:
+ pool:
+ ${{ if eq(variables['System.TeamProject'], 'public') }}:
+ name: $(DncEngPublicBuildPool)
+ image: 1es-windows-2022-open
+ os: windows
+ ${{ if eq(variables['System.TeamProject'], 'internal') }}:
+ name: $(DncEngInternalBuildPool)
+ image: 1es-windows-2022
+ os: windows
+
+ steps:
+ - ${{ if eq(parameters.is1ESPipeline, '') }}:
+ - 'Illegal entry point, is1ESPipeline is not defined. Repository yaml should not directly reference templates in core-templates folder.': error
+
+ - ${{ each preStep in parameters.preSteps }}:
+ - ${{ preStep }}
+
+ - task: UseDotNet@2
+ displayName: Use .NET 8 SDK
+ inputs:
+ packageType: sdk
+ version: 8.0.x
+ installationPath: $(Agent.TempDirectory)/dotnet
+ workingDirectory: $(Agent.TempDirectory)
+
+ - script: |
+ $(Agent.TempDirectory)/dotnet/dotnet tool install BinLogToSln --version $(sourceIndexProcessBinlogPackageVersion) --add-source $(SourceIndexPackageSource) --tool-path $(Agent.TempDirectory)/.source-index/tools
+ $(Agent.TempDirectory)/dotnet/dotnet tool install UploadIndexStage1 --version $(sourceIndexUploadPackageVersion) --add-source $(SourceIndexPackageSource) --tool-path $(Agent.TempDirectory)/.source-index/tools
+ displayName: Download Tools
+ # Set working directory to temp directory so 'dotnet' doesn't try to use global.json and use the repo's sdk.
+ workingDirectory: $(Agent.TempDirectory)
+
+ - script: ${{ parameters.sourceIndexBuildCommand }}
+ displayName: Build Repository
+
+ - script: $(Agent.TempDirectory)/.source-index/tools/BinLogToSln -i $(BinlogPath) -r $(Build.SourcesDirectory) -n $(Build.Repository.Name) -o .source-index/stage1output
+ displayName: Process Binlog into indexable sln
+
+ - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
+ - task: AzureCLI@2
+ displayName: Log in to Azure and upload stage1 artifacts to source index
+ inputs:
+ azureSubscription: 'SourceDotNet Stage1 Publish'
+ addSpnToEnvironment: true
+ scriptType: 'ps'
+ scriptLocation: 'inlineScript'
+ inlineScript: |
+ $(Agent.TempDirectory)/.source-index/tools/UploadIndexStage1 -i .source-index/stage1output -n $(Build.Repository.Name) -s netsourceindexstage1 -b stage1
diff --git a/eng/common/core-templates/jobs/codeql-build.yml b/eng/common/core-templates/jobs/codeql-build.yml
new file mode 100644
index 00000000000..f2144252cc6
--- /dev/null
+++ b/eng/common/core-templates/jobs/codeql-build.yml
@@ -0,0 +1,33 @@
+parameters:
+ # See schema documentation in /Documentation/AzureDevOps/TemplateSchema.md
+ continueOnError: false
+ # Required: A collection of jobs to run - https://docs.microsoft.com/en-us/azure/devops/pipelines/yaml-schema?view=vsts&tabs=schema#job
+ jobs: []
+ # Optional: if specified, restore and use this version of Guardian instead of the default.
+ overrideGuardianVersion: ''
+ is1ESPipeline: ''
+
+jobs:
+- template: /eng/common/core-templates/jobs/jobs.yml
+ parameters:
+ is1ESPipeline: ${{ parameters.is1ESPipeline }}
+ enableMicrobuild: false
+ enablePublishBuildArtifacts: false
+ enablePublishTestResults: false
+ enablePublishBuildAssets: false
+ enablePublishUsingPipelines: false
+ enableTelemetry: true
+
+ variables:
+ - group: Publish-Build-Assets
+ # The Guardian version specified in 'eng/common/sdl/packages.config'. This value must be kept in
+ # sync with the packages.config file.
+ - name: DefaultGuardianVersion
+ value: 0.109.0
+ - name: GuardianPackagesConfigFile
+ value: $(Build.SourcesDirectory)\eng\common\sdl\packages.config
+ - name: GuardianVersion
+ value: ${{ coalesce(parameters.overrideGuardianVersion, '$(DefaultGuardianVersion)') }}
+
+ jobs: ${{ parameters.jobs }}
+
diff --git a/eng/common/core-templates/jobs/jobs.yml b/eng/common/core-templates/jobs/jobs.yml
new file mode 100644
index 00000000000..ea69be4341c
--- /dev/null
+++ b/eng/common/core-templates/jobs/jobs.yml
@@ -0,0 +1,119 @@
+parameters:
+ # See schema documentation in /Documentation/AzureDevOps/TemplateSchema.md
+ continueOnError: false
+
+ # Optional: Include PublishBuildArtifacts task
+ enablePublishBuildArtifacts: false
+
+ # Optional: Enable publishing using release pipelines
+ enablePublishUsingPipelines: false
+
+ # Optional: Enable running the source-build jobs to build repo from source
+ enableSourceBuild: false
+
+ # Optional: Parameters for source-build template.
+ # See /eng/common/core-templates/jobs/source-build.yml for options
+ sourceBuildParameters: []
+
+ graphFileGeneration:
+ # Optional: Enable generating the graph files at the end of the build
+ enabled: false
+ # Optional: Include toolset dependencies in the generated graph files
+ includeToolset: false
+
+ # Required: A collection of jobs to run - https://docs.microsoft.com/en-us/azure/devops/pipelines/yaml-schema?view=vsts&tabs=schema#job
+ jobs: []
+
+ # Optional: Override automatically derived dependsOn value for "publish build assets" job
+ publishBuildAssetsDependsOn: ''
+
+ # Optional: Publish the assets as soon as the publish to BAR stage is complete, rather doing so in a separate stage.
+ publishAssetsImmediately: false
+
+ # Optional: If using publishAssetsImmediately and additional parameters are needed, can be used to send along additional parameters (normally sent to post-build.yml)
+ artifactsPublishingAdditionalParameters: ''
+ signingValidationAdditionalParameters: ''
+
+ # Optional: should run as a public build even in the internal project
+ # if 'true', the build won't run any of the internal only steps, even if it is running in non-public projects.
+ runAsPublic: false
+
+ enableSourceIndex: false
+ sourceIndexParams: {}
+
+ artifacts: {}
+ is1ESPipeline: ''
+
+# Internal resources (telemetry, microbuild) can only be accessed from non-public projects,
+# and some (Microbuild) should only be applied to non-PR cases for internal builds.
+
+jobs:
+- ${{ each job in parameters.jobs }}:
+ - ${{ if eq(parameters.is1ESPipeline, 'true') }}:
+ - template: /eng/common/templates-official/job/job.yml
+ parameters:
+ # pass along parameters
+ ${{ each parameter in parameters }}:
+ ${{ if ne(parameter.key, 'jobs') }}:
+ ${{ parameter.key }}: ${{ parameter.value }}
+
+ # pass along job properties
+ ${{ each property in job }}:
+ ${{ if ne(property.key, 'job') }}:
+ ${{ property.key }}: ${{ property.value }}
+
+ name: ${{ job.job }}
+
+ - ${{ else }}:
+ - template: /eng/common/templates/job/job.yml
+ parameters:
+ # pass along parameters
+ ${{ each parameter in parameters }}:
+ ${{ if ne(parameter.key, 'jobs') }}:
+ ${{ parameter.key }}: ${{ parameter.value }}
+
+ # pass along job properties
+ ${{ each property in job }}:
+ ${{ if ne(property.key, 'job') }}:
+ ${{ property.key }}: ${{ property.value }}
+
+ name: ${{ job.job }}
+
+- ${{ if eq(parameters.enableSourceBuild, true) }}:
+ - template: /eng/common/core-templates/jobs/source-build.yml
+ parameters:
+ is1ESPipeline: ${{ parameters.is1ESPipeline }}
+ allCompletedJobId: Source_Build_Complete
+ ${{ each parameter in parameters.sourceBuildParameters }}:
+ ${{ parameter.key }}: ${{ parameter.value }}
+
+- ${{ if eq(parameters.enableSourceIndex, 'true') }}:
+ - template: ../job/source-index-stage1.yml
+ parameters:
+ is1ESPipeline: ${{ parameters.is1ESPipeline }}
+ runAsPublic: ${{ parameters.runAsPublic }}
+ ${{ each parameter in parameters.sourceIndexParams }}:
+ ${{ parameter.key }}: ${{ parameter.value }}
+
+- ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
+ - ${{ if or(eq(parameters.enablePublishBuildAssets, true), eq(parameters.artifacts.publish.manifests, 'true'), ne(parameters.artifacts.publish.manifests, '')) }}:
+ - template: ../job/publish-build-assets.yml
+ parameters:
+ is1ESPipeline: ${{ parameters.is1ESPipeline }}
+ continueOnError: ${{ parameters.continueOnError }}
+ dependsOn:
+ - ${{ if ne(parameters.publishBuildAssetsDependsOn, '') }}:
+ - ${{ each job in parameters.publishBuildAssetsDependsOn }}:
+ - ${{ job.job }}
+ - ${{ if eq(parameters.publishBuildAssetsDependsOn, '') }}:
+ - ${{ each job in parameters.jobs }}:
+ - ${{ job.job }}
+ - ${{ if eq(parameters.enableSourceBuild, true) }}:
+ - Source_Build_Complete
+
+ runAsPublic: ${{ parameters.runAsPublic }}
+ publishUsingPipelines: ${{ parameters.enablePublishUsingPipelines }}
+ publishAssetsImmediately: ${{ parameters.publishAssetsImmediately }}
+ enablePublishBuildArtifacts: ${{ parameters.enablePublishBuildArtifacts }}
+ artifactsPublishingAdditionalParameters: ${{ parameters.artifactsPublishingAdditionalParameters }}
+ signingValidationAdditionalParameters: ${{ parameters.signingValidationAdditionalParameters }}
diff --git a/eng/common/core-templates/jobs/source-build.yml b/eng/common/core-templates/jobs/source-build.yml
new file mode 100644
index 00000000000..a10ccfbee6d
--- /dev/null
+++ b/eng/common/core-templates/jobs/source-build.yml
@@ -0,0 +1,58 @@
+parameters:
+ # This template adds arcade-powered source-build to CI. A job is created for each platform, as
+ # well as an optional server job that completes when all platform jobs complete.
+
+ # The name of the "join" job for all source-build platforms. If set to empty string, the job is
+ # not included. Existing repo pipelines can use this job depend on all source-build jobs
+ # completing without maintaining a separate list of every single job ID: just depend on this one
+ # server job. By default, not included. Recommended name if used: 'Source_Build_Complete'.
+ allCompletedJobId: ''
+
+ # See /eng/common/core-templates/job/source-build.yml
+ jobNamePrefix: 'Source_Build'
+
+ # This is the default platform provided by Arcade, intended for use by a managed-only repo.
+ defaultManagedPlatform:
+ name: 'Managed'
+ container: 'mcr.microsoft.com/dotnet-buildtools/prereqs:centos-stream9'
+
+ # Defines the platforms on which to run build jobs. One job is created for each platform, and the
+ # object in this array is sent to the job template as 'platform'. If no platforms are specified,
+ # one job runs on 'defaultManagedPlatform'.
+ platforms: []
+
+ is1ESPipeline: ''
+
+ # If set to true and running on a non-public project,
+ # Internal nuget and blob storage locations will be enabled.
+ # This is not enabled by default because many repositories do not need internal sources
+ # and do not need to have the required service connections approved in the pipeline.
+ enableInternalSources: false
+
+jobs:
+
+- ${{ if ne(parameters.allCompletedJobId, '') }}:
+ - job: ${{ parameters.allCompletedJobId }}
+ displayName: Source-Build Complete
+ pool: server
+ dependsOn:
+ - ${{ each platform in parameters.platforms }}:
+ - ${{ parameters.jobNamePrefix }}_${{ platform.name }}
+ - ${{ if eq(length(parameters.platforms), 0) }}:
+ - ${{ parameters.jobNamePrefix }}_${{ parameters.defaultManagedPlatform.name }}
+
+- ${{ each platform in parameters.platforms }}:
+ - template: /eng/common/core-templates/job/source-build.yml
+ parameters:
+ is1ESPipeline: ${{ parameters.is1ESPipeline }}
+ jobNamePrefix: ${{ parameters.jobNamePrefix }}
+ platform: ${{ platform }}
+ enableInternalSources: ${{ parameters.enableInternalSources }}
+
+- ${{ if eq(length(parameters.platforms), 0) }}:
+ - template: /eng/common/core-templates/job/source-build.yml
+ parameters:
+ is1ESPipeline: ${{ parameters.is1ESPipeline }}
+ jobNamePrefix: ${{ parameters.jobNamePrefix }}
+ platform: ${{ parameters.defaultManagedPlatform }}
+ enableInternalSources: ${{ parameters.enableInternalSources }}
diff --git a/eng/common/core-templates/post-build/common-variables.yml b/eng/common/core-templates/post-build/common-variables.yml
new file mode 100644
index 00000000000..d5627a994ae
--- /dev/null
+++ b/eng/common/core-templates/post-build/common-variables.yml
@@ -0,0 +1,22 @@
+variables:
+ - group: Publish-Build-Assets
+
+ # Whether the build is internal or not
+ - name: IsInternalBuild
+ value: ${{ and(ne(variables['System.TeamProject'], 'public'), contains(variables['Build.SourceBranch'], 'internal')) }}
+
+ # Default Maestro++ API Endpoint and API Version
+ - name: MaestroApiEndPoint
+ value: "https://maestro.dot.net"
+ - name: MaestroApiVersion
+ value: "2020-02-20"
+
+ - name: SourceLinkCLIVersion
+ value: 3.0.0
+ - name: SymbolToolVersion
+ value: 1.0.1
+ - name: BinlogToolVersion
+ value: 1.0.11
+
+ - name: runCodesignValidationInjection
+ value: false
diff --git a/eng/common/core-templates/post-build/post-build.yml b/eng/common/core-templates/post-build/post-build.yml
new file mode 100644
index 00000000000..454fd75c7af
--- /dev/null
+++ b/eng/common/core-templates/post-build/post-build.yml
@@ -0,0 +1,316 @@
+parameters:
+ # Which publishing infra should be used. THIS SHOULD MATCH THE VERSION ON THE BUILD MANIFEST.
+ # Publishing V1 is no longer supported
+ # Publishing V2 is no longer supported
+ # Publishing V3 is the default
+ - name: publishingInfraVersion
+ displayName: Which version of publishing should be used to promote the build definition?
+ type: number
+ default: 3
+ values:
+ - 3
+
+ - name: BARBuildId
+ displayName: BAR Build Id
+ type: number
+ default: 0
+
+ - name: PromoteToChannelIds
+ displayName: Channel to promote BARBuildId to
+ type: string
+ default: ''
+
+ - name: enableSourceLinkValidation
+ displayName: Enable SourceLink validation
+ type: boolean
+ default: false
+
+ - name: enableSigningValidation
+ displayName: Enable signing validation
+ type: boolean
+ default: true
+
+ - name: enableSymbolValidation
+ displayName: Enable symbol validation
+ type: boolean
+ default: false
+
+ - name: enableNugetValidation
+ displayName: Enable NuGet validation
+ type: boolean
+ default: true
+
+ - name: publishInstallersAndChecksums
+ displayName: Publish installers and checksums
+ type: boolean
+ default: true
+
+ - name: SDLValidationParameters
+ type: object
+ default:
+ enable: false
+ publishGdn: false
+ continueOnError: false
+ params: ''
+ artifactNames: ''
+ downloadArtifacts: true
+
+ # These parameters let the user customize the call to sdk-task.ps1 for publishing
+ # symbols & general artifacts as well as for signing validation
+ - name: symbolPublishingAdditionalParameters
+ displayName: Symbol publishing additional parameters
+ type: string
+ default: ''
+
+ - name: artifactsPublishingAdditionalParameters
+ displayName: Artifact publishing additional parameters
+ type: string
+ default: ''
+
+ - name: signingValidationAdditionalParameters
+ displayName: Signing validation additional parameters
+ type: string
+ default: ''
+
+ # Which stages should finish execution before post-build stages start
+ - name: validateDependsOn
+ type: object
+ default:
+ - build
+
+ - name: publishDependsOn
+ type: object
+ default:
+ - Validate
+
+ # Optional: Call asset publishing rather than running in a separate stage
+ - name: publishAssetsImmediately
+ type: boolean
+ default: false
+
+ - name: is1ESPipeline
+ type: boolean
+ default: false
+
+stages:
+- ${{ if or(eq( parameters.enableNugetValidation, 'true'), eq(parameters.enableSigningValidation, 'true'), eq(parameters.enableSourceLinkValidation, 'true'), eq(parameters.SDLValidationParameters.enable, 'true')) }}:
+ - stage: Validate
+ dependsOn: ${{ parameters.validateDependsOn }}
+ displayName: Validate Build Assets
+ variables:
+ - template: /eng/common/core-templates/post-build/common-variables.yml
+ - template: /eng/common/core-templates/variables/pool-providers.yml
+ parameters:
+ is1ESPipeline: ${{ parameters.is1ESPipeline }}
+ jobs:
+ - job:
+ displayName: NuGet Validation
+ condition: and(succeededOrFailed(), eq( ${{ parameters.enableNugetValidation }}, 'true'))
+ pool:
+ # We don't use the collection uri here because it might vary (.visualstudio.com vs. dev.azure.com)
+ ${{ if eq(variables['System.TeamProject'], 'DevDiv') }}:
+ name: AzurePipelines-EO
+ image: 1ESPT-Windows2022
+ demands: Cmd
+ os: windows
+ # If it's not devdiv, it's dnceng
+ ${{ else }}:
+ ${{ if eq(parameters.is1ESPipeline, true) }}:
+ name: $(DncEngInternalBuildPool)
+ image: windows.vs2022.amd64
+ os: windows
+ ${{ else }}:
+ name: $(DncEngInternalBuildPool)
+ demands: ImageOverride -equals windows.vs2022.amd64
+
+ steps:
+ - template: /eng/common/core-templates/post-build/setup-maestro-vars.yml
+ parameters:
+ BARBuildId: ${{ parameters.BARBuildId }}
+ PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }}
+ is1ESPipeline: ${{ parameters.is1ESPipeline }}
+
+ - task: DownloadBuildArtifacts@0
+ displayName: Download Package Artifacts
+ inputs:
+ buildType: specific
+ buildVersionToDownload: specific
+ project: $(AzDOProjectName)
+ pipeline: $(AzDOPipelineId)
+ buildId: $(AzDOBuildId)
+ artifactName: PackageArtifacts
+ checkDownloadedFiles: true
+
+ - task: PowerShell@2
+ displayName: Validate
+ inputs:
+ filePath: $(Build.SourcesDirectory)/eng/common/post-build/nuget-validation.ps1
+ arguments: -PackagesPath $(Build.ArtifactStagingDirectory)/PackageArtifacts/
+
+ - job:
+ displayName: Signing Validation
+ condition: and( eq( ${{ parameters.enableSigningValidation }}, 'true'), ne( variables['PostBuildSign'], 'true'))
+ pool:
+ # We don't use the collection uri here because it might vary (.visualstudio.com vs. dev.azure.com)
+ ${{ if eq(variables['System.TeamProject'], 'DevDiv') }}:
+ name: AzurePipelines-EO
+ image: 1ESPT-Windows2022
+ demands: Cmd
+ os: windows
+ # If it's not devdiv, it's dnceng
+ ${{ else }}:
+ ${{ if eq(parameters.is1ESPipeline, true) }}:
+ name: $(DncEngInternalBuildPool)
+ image: 1es-windows-2022
+ os: windows
+ ${{ else }}:
+ name: $(DncEngInternalBuildPool)
+ demands: ImageOverride -equals windows.vs2022.amd64
+ steps:
+ - template: /eng/common/core-templates/post-build/setup-maestro-vars.yml
+ parameters:
+ BARBuildId: ${{ parameters.BARBuildId }}
+ PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }}
+ is1ESPipeline: ${{ parameters.is1ESPipeline }}
+
+ - task: DownloadBuildArtifacts@0
+ displayName: Download Package Artifacts
+ inputs:
+ buildType: specific
+ buildVersionToDownload: specific
+ project: $(AzDOProjectName)
+ pipeline: $(AzDOPipelineId)
+ buildId: $(AzDOBuildId)
+ artifactName: PackageArtifacts
+ checkDownloadedFiles: true
+ itemPattern: |
+ **
+ !**/Microsoft.SourceBuild.Intermediate.*.nupkg
+
+ # This is necessary whenever we want to publish/restore to an AzDO private feed
+ # Since sdk-task.ps1 tries to restore packages we need to do this authentication here
+ # otherwise it'll complain about accessing a private feed.
+ - task: NuGetAuthenticate@1
+ displayName: 'Authenticate to AzDO Feeds'
+
+ # Signing validation will optionally work with the buildmanifest file which is downloaded from
+ # Azure DevOps above.
+ - task: PowerShell@2
+ displayName: Validate
+ inputs:
+ filePath: eng\common\sdk-task.ps1
+ arguments: -task SigningValidation -restore -msbuildEngine vs
+ /p:PackageBasePath='$(Build.ArtifactStagingDirectory)/PackageArtifacts'
+ /p:SignCheckExclusionsFile='$(Build.SourcesDirectory)/eng/SignCheckExclusionsFile.txt'
+ ${{ parameters.signingValidationAdditionalParameters }}
+
+ - template: /eng/common/core-templates/steps/publish-logs.yml
+ parameters:
+ is1ESPipeline: ${{ parameters.is1ESPipeline }}
+ StageLabel: 'Validation'
+ JobLabel: 'Signing'
+ BinlogToolVersion: $(BinlogToolVersion)
+
+ - job:
+ displayName: SourceLink Validation
+ condition: eq( ${{ parameters.enableSourceLinkValidation }}, 'true')
+ pool:
+ # We don't use the collection uri here because it might vary (.visualstudio.com vs. dev.azure.com)
+ ${{ if eq(variables['System.TeamProject'], 'DevDiv') }}:
+ name: AzurePipelines-EO
+ image: 1ESPT-Windows2022
+ demands: Cmd
+ os: windows
+ # If it's not devdiv, it's dnceng
+ ${{ else }}:
+ ${{ if eq(parameters.is1ESPipeline, true) }}:
+ name: $(DncEngInternalBuildPool)
+ image: 1es-windows-2022
+ os: windows
+ ${{ else }}:
+ name: $(DncEngInternalBuildPool)
+ demands: ImageOverride -equals windows.vs2022.amd64
+ steps:
+ - template: /eng/common/core-templates/post-build/setup-maestro-vars.yml
+ parameters:
+ BARBuildId: ${{ parameters.BARBuildId }}
+ PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }}
+ is1ESPipeline: ${{ parameters.is1ESPipeline }}
+
+ - task: DownloadBuildArtifacts@0
+ displayName: Download Blob Artifacts
+ inputs:
+ buildType: specific
+ buildVersionToDownload: specific
+ project: $(AzDOProjectName)
+ pipeline: $(AzDOPipelineId)
+ buildId: $(AzDOBuildId)
+ artifactName: BlobArtifacts
+ checkDownloadedFiles: true
+
+ - task: PowerShell@2
+ displayName: Validate
+ inputs:
+ filePath: $(Build.SourcesDirectory)/eng/common/post-build/sourcelink-validation.ps1
+ arguments: -InputPath $(Build.ArtifactStagingDirectory)/BlobArtifacts/
+ -ExtractPath $(Agent.BuildDirectory)/Extract/
+ -GHRepoName $(Build.Repository.Name)
+ -GHCommit $(Build.SourceVersion)
+ -SourcelinkCliVersion $(SourceLinkCLIVersion)
+ continueOnError: true
+
+- ${{ if ne(parameters.publishAssetsImmediately, 'true') }}:
+ - stage: publish_using_darc
+ ${{ if or(eq(parameters.enableNugetValidation, 'true'), eq(parameters.enableSigningValidation, 'true'), eq(parameters.enableSourceLinkValidation, 'true'), eq(parameters.SDLValidationParameters.enable, 'true')) }}:
+ dependsOn: ${{ parameters.publishDependsOn }}
+ ${{ else }}:
+ dependsOn: ${{ parameters.validateDependsOn }}
+ displayName: Publish using Darc
+ variables:
+ - template: /eng/common/core-templates/post-build/common-variables.yml
+ - template: /eng/common/core-templates/variables/pool-providers.yml
+ parameters:
+ is1ESPipeline: ${{ parameters.is1ESPipeline }}
+ jobs:
+ - job:
+ displayName: Publish Using Darc
+ timeoutInMinutes: 120
+ pool:
+ # We don't use the collection uri here because it might vary (.visualstudio.com vs. dev.azure.com)
+ ${{ if eq(variables['System.TeamProject'], 'DevDiv') }}:
+ name: AzurePipelines-EO
+ image: 1ESPT-Windows2022
+ demands: Cmd
+ os: windows
+ # If it's not devdiv, it's dnceng
+ ${{ else }}:
+ ${{ if eq(parameters.is1ESPipeline, true) }}:
+ name: NetCore1ESPool-Publishing-Internal
+ image: windows.vs2019.amd64
+ os: windows
+ ${{ else }}:
+ name: NetCore1ESPool-Publishing-Internal
+ demands: ImageOverride -equals windows.vs2019.amd64
+ steps:
+ - template: /eng/common/core-templates/post-build/setup-maestro-vars.yml
+ parameters:
+ BARBuildId: ${{ parameters.BARBuildId }}
+ PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }}
+ is1ESPipeline: ${{ parameters.is1ESPipeline }}
+
+ - task: NuGetAuthenticate@1
+
+ - task: AzureCLI@2
+ displayName: Publish Using Darc
+ inputs:
+ azureSubscription: "Darc: Maestro Production"
+ scriptType: ps
+ scriptLocation: scriptPath
+ scriptPath: $(Build.SourcesDirectory)/eng/common/post-build/publish-using-darc.ps1
+ arguments: >
+ -BuildId $(BARBuildId)
+ -PublishingInfraVersion ${{ parameters.publishingInfraVersion }}
+ -AzdoToken '$(System.AccessToken)'
+ -WaitPublishingFinish true
+ -ArtifactsPublishingAdditionalParameters '${{ parameters.artifactsPublishingAdditionalParameters }}'
+ -SymbolPublishingAdditionalParameters '${{ parameters.symbolPublishingAdditionalParameters }}'
diff --git a/eng/common/core-templates/post-build/setup-maestro-vars.yml b/eng/common/core-templates/post-build/setup-maestro-vars.yml
new file mode 100644
index 00000000000..f7602980dbe
--- /dev/null
+++ b/eng/common/core-templates/post-build/setup-maestro-vars.yml
@@ -0,0 +1,74 @@
+parameters:
+ BARBuildId: ''
+ PromoteToChannelIds: ''
+ is1ESPipeline: ''
+
+steps:
+ - ${{ if eq(parameters.is1ESPipeline, '') }}:
+ - 'Illegal entry point, is1ESPipeline is not defined. Repository yaml should not directly reference templates in core-templates folder.': error
+
+ - ${{ if eq(coalesce(parameters.PromoteToChannelIds, 0), 0) }}:
+ - task: DownloadBuildArtifacts@0
+ displayName: Download Release Configs
+ inputs:
+ buildType: current
+ artifactName: ReleaseConfigs
+ checkDownloadedFiles: true
+
+ - task: AzureCLI@2
+ name: setReleaseVars
+ displayName: Set Release Configs Vars
+ inputs:
+ azureSubscription: "Darc: Maestro Production"
+ scriptType: pscore
+ scriptLocation: inlineScript
+ inlineScript: |
+ try {
+ if (!$Env:PromoteToMaestroChannels -or $Env:PromoteToMaestroChannels.Trim() -eq '') {
+ $Content = Get-Content $(Build.StagingDirectory)/ReleaseConfigs/ReleaseConfigs.txt
+
+ $BarId = $Content | Select -Index 0
+ $Channels = $Content | Select -Index 1
+ $IsStableBuild = $Content | Select -Index 2
+
+ $AzureDevOpsProject = $Env:System_TeamProject
+ $AzureDevOpsBuildDefinitionId = $Env:System_DefinitionId
+ $AzureDevOpsBuildId = $Env:Build_BuildId
+ }
+ else {
+ . $(Build.SourcesDirectory)\eng\common\tools.ps1
+ $darc = Get-Darc
+ $buildInfo = & $darc get-build `
+ --id ${{ parameters.BARBuildId }} `
+ --extended `
+ --output-format json `
+ --ci `
+ | convertFrom-Json
+
+ $BarId = ${{ parameters.BARBuildId }}
+ $Channels = $Env:PromoteToMaestroChannels -split ","
+ $Channels = $Channels -join "]["
+ $Channels = "[$Channels]"
+
+ $IsStableBuild = $buildInfo.stable
+ $AzureDevOpsProject = $buildInfo.azureDevOpsProject
+ $AzureDevOpsBuildDefinitionId = $buildInfo.azureDevOpsBuildDefinitionId
+ $AzureDevOpsBuildId = $buildInfo.azureDevOpsBuildId
+ }
+
+ Write-Host "##vso[task.setvariable variable=BARBuildId]$BarId"
+ Write-Host "##vso[task.setvariable variable=TargetChannels]$Channels"
+ Write-Host "##vso[task.setvariable variable=IsStableBuild]$IsStableBuild"
+
+ Write-Host "##vso[task.setvariable variable=AzDOProjectName]$AzureDevOpsProject"
+ Write-Host "##vso[task.setvariable variable=AzDOPipelineId]$AzureDevOpsBuildDefinitionId"
+ Write-Host "##vso[task.setvariable variable=AzDOBuildId]$AzureDevOpsBuildId"
+ }
+ catch {
+ Write-Host $_
+ Write-Host $_.Exception
+ Write-Host $_.ScriptStackTrace
+ exit 1
+ }
+ env:
+ PromoteToMaestroChannels: ${{ parameters.PromoteToChannelIds }}
diff --git a/eng/common/core-templates/steps/component-governance.yml b/eng/common/core-templates/steps/component-governance.yml
new file mode 100644
index 00000000000..cf0649aa956
--- /dev/null
+++ b/eng/common/core-templates/steps/component-governance.yml
@@ -0,0 +1,16 @@
+parameters:
+ disableComponentGovernance: false
+ componentGovernanceIgnoreDirectories: ''
+ is1ESPipeline: false
+ displayName: 'Component Detection'
+
+steps:
+- ${{ if eq(parameters.disableComponentGovernance, 'true') }}:
+ - script: echo "##vso[task.setvariable variable=skipComponentGovernanceDetection]true"
+ displayName: Set skipComponentGovernanceDetection variable
+- ${{ if ne(parameters.disableComponentGovernance, 'true') }}:
+ - task: ComponentGovernanceComponentDetection@0
+ continueOnError: true
+ displayName: ${{ parameters.displayName }}
+ inputs:
+ ignoreDirectories: ${{ parameters.componentGovernanceIgnoreDirectories }}
diff --git a/eng/common/core-templates/steps/enable-internal-runtimes.yml b/eng/common/core-templates/steps/enable-internal-runtimes.yml
new file mode 100644
index 00000000000..6bdbf62ac50
--- /dev/null
+++ b/eng/common/core-templates/steps/enable-internal-runtimes.yml
@@ -0,0 +1,32 @@
+# Obtains internal runtime download credentials and populates the 'dotnetbuilds-internal-container-read-token-base64'
+# variable with the base64-encoded SAS token, by default
+
+parameters:
+- name: federatedServiceConnection
+ type: string
+ default: 'dotnetbuilds-internal-read'
+- name: outputVariableName
+ type: string
+ default: 'dotnetbuilds-internal-container-read-token-base64'
+- name: expiryInHours
+ type: number
+ default: 1
+- name: base64Encode
+ type: boolean
+ default: true
+- name: is1ESPipeline
+ type: boolean
+ default: false
+
+steps:
+- ${{ if ne(variables['System.TeamProject'], 'public') }}:
+ - template: /eng/common/core-templates/steps/get-delegation-sas.yml
+ parameters:
+ federatedServiceConnection: ${{ parameters.federatedServiceConnection }}
+ outputVariableName: ${{ parameters.outputVariableName }}
+ expiryInHours: ${{ parameters.expiryInHours }}
+ base64Encode: ${{ parameters.base64Encode }}
+ storageAccount: dotnetbuilds
+ container: internal
+ permissions: rl
+ is1ESPipeline: ${{ parameters.is1ESPipeline }}
\ No newline at end of file
diff --git a/eng/common/core-templates/steps/enable-internal-sources.yml b/eng/common/core-templates/steps/enable-internal-sources.yml
new file mode 100644
index 00000000000..64f881bffc3
--- /dev/null
+++ b/eng/common/core-templates/steps/enable-internal-sources.yml
@@ -0,0 +1,47 @@
+parameters:
+# This is the Azure federated service connection that we log into to get an access token.
+- name: nugetFederatedServiceConnection
+ type: string
+ default: 'dnceng-artifacts-feeds-read'
+- name: is1ESPipeline
+ type: boolean
+ default: false
+# Legacy parameters to allow for PAT usage
+- name: legacyCredential
+ type: string
+ default: ''
+
+steps:
+- ${{ if ne(variables['System.TeamProject'], 'public') }}:
+ - ${{ if ne(parameters.legacyCredential, '') }}:
+ - task: PowerShell@2
+ displayName: Setup Internal Feeds
+ inputs:
+ filePath: $(Build.SourcesDirectory)/eng/common/SetupNugetSources.ps1
+ arguments: -ConfigFile $(Build.SourcesDirectory)/NuGet.config -Password $Env:Token
+ env:
+ Token: ${{ parameters.legacyCredential }}
+ # If running on dnceng (internal project), just use the default behavior for NuGetAuthenticate.
+ # If running on DevDiv, NuGetAuthenticate is not really an option. It's scoped to a single feed, and we have many feeds that
+ # may be added. Instead, we'll use the traditional approach (add cred to nuget.config), but use an account token.
+ - ${{ else }}:
+ - ${{ if eq(variables['System.TeamProject'], 'internal') }}:
+ - task: PowerShell@2
+ displayName: Setup Internal Feeds
+ inputs:
+ filePath: $(Build.SourcesDirectory)/eng/common/SetupNugetSources.ps1
+ arguments: -ConfigFile $(Build.SourcesDirectory)/NuGet.config
+ - ${{ else }}:
+ - template: /eng/common/templates/steps/get-federated-access-token.yml
+ parameters:
+ federatedServiceConnection: ${{ parameters.nugetFederatedServiceConnection }}
+ outputVariableName: 'dnceng-artifacts-feeds-read-access-token'
+ - task: PowerShell@2
+ displayName: Setup Internal Feeds
+ inputs:
+ filePath: $(Build.SourcesDirectory)/eng/common/SetupNugetSources.ps1
+ arguments: -ConfigFile $(Build.SourcesDirectory)/NuGet.config -Password $(dnceng-artifacts-feeds-read-access-token)
+ # This is required in certain scenarios to install the ADO credential provider.
+ # It installed by default in some msbuild invocations (e.g. VS msbuild), but needs to be installed for others
+ # (e.g. dotnet msbuild).
+ - task: NuGetAuthenticate@1
diff --git a/eng/common/core-templates/steps/generate-sbom.yml b/eng/common/core-templates/steps/generate-sbom.yml
new file mode 100644
index 00000000000..d938b60e1bb
--- /dev/null
+++ b/eng/common/core-templates/steps/generate-sbom.yml
@@ -0,0 +1,54 @@
+# BuildDropPath - The root folder of the drop directory for which the manifest file will be generated.
+# PackageName - The name of the package this SBOM represents.
+# PackageVersion - The version of the package this SBOM represents.
+# ManifestDirPath - The path of the directory where the generated manifest files will be placed
+# IgnoreDirectories - Directories to ignore for SBOM generation. This will be passed through to the CG component detector.
+
+parameters:
+ PackageVersion: 9.0.0
+ BuildDropPath: '$(Build.SourcesDirectory)/artifacts'
+ PackageName: '.NET'
+ ManifestDirPath: $(Build.ArtifactStagingDirectory)/sbom
+ IgnoreDirectories: ''
+ sbomContinueOnError: true
+ is1ESPipeline: false
+ # disable publishArtifacts if some other step is publishing the artifacts (like job.yml).
+ publishArtifacts: true
+
+steps:
+- task: PowerShell@2
+ displayName: Prep for SBOM generation in (Non-linux)
+ condition: or(eq(variables['Agent.Os'], 'Windows_NT'), eq(variables['Agent.Os'], 'Darwin'))
+ inputs:
+ filePath: ./eng/common/generate-sbom-prep.ps1
+ arguments: ${{parameters.manifestDirPath}}
+
+# Chmodding is a workaround for https://github.com/dotnet/arcade/issues/8461
+- script: |
+ chmod +x ./eng/common/generate-sbom-prep.sh
+ ./eng/common/generate-sbom-prep.sh ${{parameters.manifestDirPath}}
+ displayName: Prep for SBOM generation in (Linux)
+ condition: eq(variables['Agent.Os'], 'Linux')
+ continueOnError: ${{ parameters.sbomContinueOnError }}
+
+- task: AzureArtifacts.manifest-generator-task.manifest-generator-task.ManifestGeneratorTask@0
+ displayName: 'Generate SBOM manifest'
+ continueOnError: ${{ parameters.sbomContinueOnError }}
+ inputs:
+ PackageName: ${{ parameters.packageName }}
+ BuildDropPath: ${{ parameters.buildDropPath }}
+ PackageVersion: ${{ parameters.packageVersion }}
+ ManifestDirPath: ${{ parameters.manifestDirPath }}
+ ${{ if ne(parameters.IgnoreDirectories, '') }}:
+ AdditionalComponentDetectorArgs: '--IgnoreDirectories ${{ parameters.IgnoreDirectories }}'
+
+- ${{ if eq(parameters.publishArtifacts, 'true')}}:
+ - template: /eng/common/core-templates/steps/publish-pipeline-artifacts.yml
+ parameters:
+ is1ESPipeline: ${{ parameters.is1ESPipeline }}
+ args:
+ displayName: Publish SBOM manifest
+ continueOnError: ${{parameters.sbomContinueOnError}}
+ targetPath: '${{ parameters.manifestDirPath }}'
+ artifactName: $(ARTIFACT_NAME)
+
diff --git a/eng/common/core-templates/steps/get-delegation-sas.yml b/eng/common/core-templates/steps/get-delegation-sas.yml
new file mode 100644
index 00000000000..d2901470a7f
--- /dev/null
+++ b/eng/common/core-templates/steps/get-delegation-sas.yml
@@ -0,0 +1,46 @@
+parameters:
+- name: federatedServiceConnection
+ type: string
+- name: outputVariableName
+ type: string
+- name: expiryInHours
+ type: number
+ default: 1
+- name: base64Encode
+ type: boolean
+ default: false
+- name: storageAccount
+ type: string
+- name: container
+ type: string
+- name: permissions
+ type: string
+ default: 'rl'
+- name: is1ESPipeline
+ type: boolean
+ default: false
+
+steps:
+- task: AzureCLI@2
+ displayName: 'Generate delegation SAS Token for ${{ parameters.storageAccount }}/${{ parameters.container }}'
+ inputs:
+ azureSubscription: ${{ parameters.federatedServiceConnection }}
+ scriptType: 'pscore'
+ scriptLocation: 'inlineScript'
+ inlineScript: |
+ # Calculate the expiration of the SAS token and convert to UTC
+ $expiry = (Get-Date).AddHours(${{ parameters.expiryInHours }}).ToUniversalTime().ToString("yyyy-MM-ddTHH:mm:ssZ")
+
+ $sas = az storage container generate-sas --account-name ${{ parameters.storageAccount }} --name ${{ parameters.container }} --permissions ${{ parameters.permissions }} --expiry $expiry --auth-mode login --as-user -o tsv
+
+ if ($LASTEXITCODE -ne 0) {
+ Write-Error "Failed to generate SAS token."
+ exit 1
+ }
+
+ if ('${{ parameters.base64Encode }}' -eq 'true') {
+ $sas = [Convert]::ToBase64String([System.Text.Encoding]::UTF8.GetBytes($sas))
+ }
+
+ Write-Host "Setting '${{ parameters.outputVariableName }}' with the access token value"
+ Write-Host "##vso[task.setvariable variable=${{ parameters.outputVariableName }};issecret=true]$sas"
diff --git a/eng/common/core-templates/steps/get-federated-access-token.yml b/eng/common/core-templates/steps/get-federated-access-token.yml
new file mode 100644
index 00000000000..3a4d4410c48
--- /dev/null
+++ b/eng/common/core-templates/steps/get-federated-access-token.yml
@@ -0,0 +1,42 @@
+parameters:
+- name: federatedServiceConnection
+ type: string
+- name: outputVariableName
+ type: string
+- name: is1ESPipeline
+ type: boolean
+- name: stepName
+ type: string
+ default: 'getFederatedAccessToken'
+- name: condition
+ type: string
+ default: ''
+# Resource to get a token for. Common values include:
+# - '499b84ac-1321-427f-aa17-267ca6975798' for Azure DevOps
+# - 'https://storage.azure.com/' for storage
+# Defaults to Azure DevOps
+- name: resource
+ type: string
+ default: '499b84ac-1321-427f-aa17-267ca6975798'
+- name: isStepOutputVariable
+ type: boolean
+ default: false
+
+steps:
+- task: AzureCLI@2
+ displayName: 'Getting federated access token for feeds'
+ name: ${{ parameters.stepName }}
+ ${{ if ne(parameters.condition, '') }}:
+ condition: ${{ parameters.condition }}
+ inputs:
+ azureSubscription: ${{ parameters.federatedServiceConnection }}
+ scriptType: 'pscore'
+ scriptLocation: 'inlineScript'
+ inlineScript: |
+ $accessToken = az account get-access-token --query accessToken --resource ${{ parameters.resource }} --output tsv
+ if ($LASTEXITCODE -ne 0) {
+ Write-Error "Failed to get access token for resource '${{ parameters.resource }}'"
+ exit 1
+ }
+ Write-Host "Setting '${{ parameters.outputVariableName }}' with the access token value"
+ Write-Host "##vso[task.setvariable variable=${{ parameters.outputVariableName }};issecret=true;isOutput=${{ parameters.isStepOutputVariable }}]$accessToken"
\ No newline at end of file
diff --git a/eng/common/core-templates/steps/publish-build-artifacts.yml b/eng/common/core-templates/steps/publish-build-artifacts.yml
new file mode 100644
index 00000000000..f24ce346684
--- /dev/null
+++ b/eng/common/core-templates/steps/publish-build-artifacts.yml
@@ -0,0 +1,20 @@
+parameters:
+- name: is1ESPipeline
+ type: boolean
+ default: false
+- name: args
+ type: object
+ default: {}
+steps:
+- ${{ if ne(parameters.is1ESPipeline, true) }}:
+ - template: /eng/common/templates/steps/publish-build-artifacts.yml
+ parameters:
+ is1ESPipeline: ${{ parameters.is1ESPipeline }}
+ ${{ each parameter in parameters.args }}:
+ ${{ parameter.key }}: ${{ parameter.value }}
+- ${{ else }}:
+ - template: /eng/common/templates-official/steps/publish-build-artifacts.yml
+ parameters:
+ is1ESPipeline: ${{ parameters.is1ESPipeline }}
+ ${{ each parameter in parameters.args }}:
+ ${{ parameter.key }}: ${{ parameter.value }}
\ No newline at end of file
diff --git a/eng/common/core-templates/steps/publish-logs.yml b/eng/common/core-templates/steps/publish-logs.yml
new file mode 100644
index 00000000000..80788c52319
--- /dev/null
+++ b/eng/common/core-templates/steps/publish-logs.yml
@@ -0,0 +1,58 @@
+parameters:
+ StageLabel: ''
+ JobLabel: ''
+ CustomSensitiveDataList: ''
+ # A default - in case value from eng/common/core-templates/post-build/common-variables.yml is not passed
+ BinlogToolVersion: '1.0.11'
+ is1ESPipeline: false
+
+steps:
+- task: Powershell@2
+ displayName: Prepare Binlogs to Upload
+ inputs:
+ targetType: inline
+ script: |
+ New-Item -ItemType Directory $(Build.SourcesDirectory)/PostBuildLogs/${{parameters.StageLabel}}/${{parameters.JobLabel}}/
+ Move-Item -Path $(Build.SourcesDirectory)/artifacts/log/Debug/* $(Build.SourcesDirectory)/PostBuildLogs/${{parameters.StageLabel}}/${{parameters.JobLabel}}/
+ continueOnError: true
+ condition: always()
+
+- task: PowerShell@2
+ displayName: Redact Logs
+ inputs:
+ filePath: $(Build.SourcesDirectory)/eng/common/post-build/redact-logs.ps1
+ # For now this needs to have explicit list of all sensitive data. Taken from eng/publishing/v3/publish.yml
+ # Sensitive data can as well be added to $(Build.SourcesDirectory)/eng/BinlogSecretsRedactionFile.txt'
+ # If the file exists - sensitive data for redaction will be sourced from it
+ # (single entry per line, lines starting with '# ' are considered comments and skipped)
+ arguments: -InputPath '$(Build.SourcesDirectory)/PostBuildLogs'
+ -BinlogToolVersion ${{parameters.BinlogToolVersion}}
+ -TokensFilePath '$(Build.SourcesDirectory)/eng/BinlogSecretsRedactionFile.txt'
+ '$(publishing-dnceng-devdiv-code-r-build-re)'
+ '$(MaestroAccessToken)'
+ '$(dn-bot-all-orgs-artifact-feeds-rw)'
+ '$(akams-client-id)'
+ '$(microsoft-symbol-server-pat)'
+ '$(symweb-symbol-server-pat)'
+ '$(dn-bot-all-orgs-build-rw-code-rw)'
+ ${{parameters.CustomSensitiveDataList}}
+ continueOnError: true
+ condition: always()
+
+- task: CopyFiles@2
+ displayName: Gather post build logs
+ inputs:
+ SourceFolder: '$(Build.SourcesDirectory)/PostBuildLogs'
+ Contents: '**'
+ TargetFolder: '$(Build.ArtifactStagingDirectory)/PostBuildLogs'
+
+- template: /eng/common/core-templates/steps/publish-build-artifacts.yml
+ parameters:
+ is1ESPipeline: ${{ parameters.is1ESPipeline }}
+ args:
+ displayName: Publish Logs
+ pathToPublish: '$(Build.ArtifactStagingDirectory)/PostBuildLogs'
+ publishLocation: Container
+ artifactName: PostBuildLogs
+ continueOnError: true
+ condition: always()
diff --git a/eng/common/core-templates/steps/publish-pipeline-artifacts.yml b/eng/common/core-templates/steps/publish-pipeline-artifacts.yml
new file mode 100644
index 00000000000..2efec04dc2c
--- /dev/null
+++ b/eng/common/core-templates/steps/publish-pipeline-artifacts.yml
@@ -0,0 +1,20 @@
+parameters:
+- name: is1ESPipeline
+ type: boolean
+ default: false
+
+- name: args
+ type: object
+ default: {}
+
+steps:
+- ${{ if ne(parameters.is1ESPipeline, true) }}:
+ - template: /eng/common/templates/steps/publish-pipeline-artifacts.yml
+ parameters:
+ ${{ each parameter in parameters }}:
+ ${{ parameter.key }}: ${{ parameter.value }}
+- ${{ else }}:
+ - template: /eng/common/templates-official/steps/publish-pipeline-artifacts.yml
+ parameters:
+ ${{ each parameter in parameters }}:
+ ${{ parameter.key }}: ${{ parameter.value }}
diff --git a/eng/common/core-templates/steps/retain-build.yml b/eng/common/core-templates/steps/retain-build.yml
new file mode 100644
index 00000000000..83d97a26a01
--- /dev/null
+++ b/eng/common/core-templates/steps/retain-build.yml
@@ -0,0 +1,28 @@
+parameters:
+ # Optional azure devops PAT with build execute permissions for the build's organization,
+ # only needed if the build that should be retained ran on a different organization than
+ # the pipeline where this template is executing from
+ Token: ''
+ # Optional BuildId to retain, defaults to the current running build
+ BuildId: ''
+ # Azure devops Organization URI for the build in the https://dev.azure.com/ format.
+ # Defaults to the organization the current pipeline is running on
+ AzdoOrgUri: '$(System.CollectionUri)'
+ # Azure devops project for the build. Defaults to the project the current pipeline is running on
+ AzdoProject: '$(System.TeamProject)'
+
+steps:
+ - task: powershell@2
+ inputs:
+ targetType: 'filePath'
+ filePath: eng/common/retain-build.ps1
+ pwsh: true
+ arguments: >
+ -AzdoOrgUri: ${{parameters.AzdoOrgUri}}
+ -AzdoProject ${{parameters.AzdoProject}}
+ -Token ${{coalesce(parameters.Token, '$env:SYSTEM_ACCESSTOKEN') }}
+ -BuildId ${{coalesce(parameters.BuildId, '$env:BUILD_ID')}}
+ displayName: Enable permanent build retention
+ env:
+ SYSTEM_ACCESSTOKEN: $(System.AccessToken)
+ BUILD_ID: $(Build.BuildId)
\ No newline at end of file
diff --git a/eng/common/core-templates/steps/send-to-helix.yml b/eng/common/core-templates/steps/send-to-helix.yml
new file mode 100644
index 00000000000..68fa739c4ab
--- /dev/null
+++ b/eng/common/core-templates/steps/send-to-helix.yml
@@ -0,0 +1,93 @@
+# Please remember to update the documentation if you make changes to these parameters!
+parameters:
+ HelixSource: 'pr/default' # required -- sources must start with pr/, official/, prodcon/, or agent/
+ HelixType: 'tests/default/' # required -- Helix telemetry which identifies what type of data this is; should include "test" for clarity and must end in '/'
+ HelixBuild: $(Build.BuildNumber) # required -- the build number Helix will use to identify this -- automatically set to the AzDO build number
+ HelixTargetQueues: '' # required -- semicolon-delimited list of Helix queues to test on; see https://helix.dot.net/ for a list of queues
+ HelixAccessToken: '' # required -- access token to make Helix API requests; should be provided by the appropriate variable group
+ HelixProjectPath: 'eng/common/helixpublish.proj' # optional -- path to the project file to build relative to BUILD_SOURCESDIRECTORY
+ HelixProjectArguments: '' # optional -- arguments passed to the build command
+ HelixConfiguration: '' # optional -- additional property attached to a job
+ HelixPreCommands: '' # optional -- commands to run before Helix work item execution
+ HelixPostCommands: '' # optional -- commands to run after Helix work item execution
+ WorkItemDirectory: '' # optional -- a payload directory to zip up and send to Helix; requires WorkItemCommand; incompatible with XUnitProjects
+ WorkItemCommand: '' # optional -- a command to execute on the payload; requires WorkItemDirectory; incompatible with XUnitProjects
+ WorkItemTimeout: '' # optional -- a timeout in TimeSpan.Parse-ready value (e.g. 00:02:00) for the work item command; requires WorkItemDirectory; incompatible with XUnitProjects
+ CorrelationPayloadDirectory: '' # optional -- a directory to zip up and send to Helix as a correlation payload
+ XUnitProjects: '' # optional -- semicolon-delimited list of XUnitProjects to parse and send to Helix; requires XUnitRuntimeTargetFramework, XUnitPublishTargetFramework, XUnitRunnerVersion, and IncludeDotNetCli=true
+ XUnitWorkItemTimeout: '' # optional -- the workitem timeout in seconds for all workitems created from the xUnit projects specified by XUnitProjects
+ XUnitPublishTargetFramework: '' # optional -- framework to use to publish your xUnit projects
+ XUnitRuntimeTargetFramework: '' # optional -- framework to use for the xUnit console runner
+ XUnitRunnerVersion: '' # optional -- version of the xUnit nuget package you wish to use on Helix; required for XUnitProjects
+ IncludeDotNetCli: false # optional -- true will download a version of the .NET CLI onto the Helix machine as a correlation payload; requires DotNetCliPackageType and DotNetCliVersion
+ DotNetCliPackageType: '' # optional -- either 'sdk', 'runtime' or 'aspnetcore-runtime'; determines whether the sdk or runtime will be sent to Helix; see https://raw.githubusercontent.com/dotnet/core/main/release-notes/releases-index.json
+ DotNetCliVersion: '' # optional -- version of the CLI to send to Helix; based on this: https://raw.githubusercontent.com/dotnet/core/main/release-notes/releases-index.json
+ WaitForWorkItemCompletion: true # optional -- true will make the task wait until work items have been completed and fail the build if work items fail. False is "fire and forget."
+ IsExternal: false # [DEPRECATED] -- doesn't do anything, jobs are external if HelixAccessToken is empty and Creator is set
+ HelixBaseUri: 'https://helix.dot.net/' # optional -- sets the Helix API base URI (allows targeting https://helix.int-dot.net )
+ Creator: '' # optional -- if the build is external, use this to specify who is sending the job
+ DisplayNamePrefix: 'Run Tests' # optional -- rename the beginning of the displayName of the steps in AzDO
+ condition: succeeded() # optional -- condition for step to execute; defaults to succeeded()
+ continueOnError: false # optional -- determines whether to continue the build if the step errors; defaults to false
+
+steps:
+ - powershell: 'powershell "$env:BUILD_SOURCESDIRECTORY\eng\common\msbuild.ps1 $env:BUILD_SOURCESDIRECTORY/${{ parameters.HelixProjectPath }} /restore /p:TreatWarningsAsErrors=false ${{ parameters.HelixProjectArguments }} /t:Test /bl:$env:BUILD_SOURCESDIRECTORY\artifacts\log\$env:BuildConfig\SendToHelix.binlog"'
+ displayName: ${{ parameters.DisplayNamePrefix }} (Windows)
+ env:
+ BuildConfig: $(_BuildConfig)
+ HelixSource: ${{ parameters.HelixSource }}
+ HelixType: ${{ parameters.HelixType }}
+ HelixBuild: ${{ parameters.HelixBuild }}
+ HelixConfiguration: ${{ parameters.HelixConfiguration }}
+ HelixTargetQueues: ${{ parameters.HelixTargetQueues }}
+ HelixAccessToken: ${{ parameters.HelixAccessToken }}
+ HelixPreCommands: ${{ parameters.HelixPreCommands }}
+ HelixPostCommands: ${{ parameters.HelixPostCommands }}
+ WorkItemDirectory: ${{ parameters.WorkItemDirectory }}
+ WorkItemCommand: ${{ parameters.WorkItemCommand }}
+ WorkItemTimeout: ${{ parameters.WorkItemTimeout }}
+ CorrelationPayloadDirectory: ${{ parameters.CorrelationPayloadDirectory }}
+ XUnitProjects: ${{ parameters.XUnitProjects }}
+ XUnitWorkItemTimeout: ${{ parameters.XUnitWorkItemTimeout }}
+ XUnitPublishTargetFramework: ${{ parameters.XUnitPublishTargetFramework }}
+ XUnitRuntimeTargetFramework: ${{ parameters.XUnitRuntimeTargetFramework }}
+ XUnitRunnerVersion: ${{ parameters.XUnitRunnerVersion }}
+ IncludeDotNetCli: ${{ parameters.IncludeDotNetCli }}
+ DotNetCliPackageType: ${{ parameters.DotNetCliPackageType }}
+ DotNetCliVersion: ${{ parameters.DotNetCliVersion }}
+ WaitForWorkItemCompletion: ${{ parameters.WaitForWorkItemCompletion }}
+ HelixBaseUri: ${{ parameters.HelixBaseUri }}
+ Creator: ${{ parameters.Creator }}
+ SYSTEM_ACCESSTOKEN: $(System.AccessToken)
+ condition: and(${{ parameters.condition }}, eq(variables['Agent.Os'], 'Windows_NT'))
+ continueOnError: ${{ parameters.continueOnError }}
+ - script: $BUILD_SOURCESDIRECTORY/eng/common/msbuild.sh $BUILD_SOURCESDIRECTORY/${{ parameters.HelixProjectPath }} /restore /p:TreatWarningsAsErrors=false ${{ parameters.HelixProjectArguments }} /t:Test /bl:$BUILD_SOURCESDIRECTORY/artifacts/log/$BuildConfig/SendToHelix.binlog
+ displayName: ${{ parameters.DisplayNamePrefix }} (Unix)
+ env:
+ BuildConfig: $(_BuildConfig)
+ HelixSource: ${{ parameters.HelixSource }}
+ HelixType: ${{ parameters.HelixType }}
+ HelixBuild: ${{ parameters.HelixBuild }}
+ HelixConfiguration: ${{ parameters.HelixConfiguration }}
+ HelixTargetQueues: ${{ parameters.HelixTargetQueues }}
+ HelixAccessToken: ${{ parameters.HelixAccessToken }}
+ HelixPreCommands: ${{ parameters.HelixPreCommands }}
+ HelixPostCommands: ${{ parameters.HelixPostCommands }}
+ WorkItemDirectory: ${{ parameters.WorkItemDirectory }}
+ WorkItemCommand: ${{ parameters.WorkItemCommand }}
+ WorkItemTimeout: ${{ parameters.WorkItemTimeout }}
+ CorrelationPayloadDirectory: ${{ parameters.CorrelationPayloadDirectory }}
+ XUnitProjects: ${{ parameters.XUnitProjects }}
+ XUnitWorkItemTimeout: ${{ parameters.XUnitWorkItemTimeout }}
+ XUnitPublishTargetFramework: ${{ parameters.XUnitPublishTargetFramework }}
+ XUnitRuntimeTargetFramework: ${{ parameters.XUnitRuntimeTargetFramework }}
+ XUnitRunnerVersion: ${{ parameters.XUnitRunnerVersion }}
+ IncludeDotNetCli: ${{ parameters.IncludeDotNetCli }}
+ DotNetCliPackageType: ${{ parameters.DotNetCliPackageType }}
+ DotNetCliVersion: ${{ parameters.DotNetCliVersion }}
+ WaitForWorkItemCompletion: ${{ parameters.WaitForWorkItemCompletion }}
+ HelixBaseUri: ${{ parameters.HelixBaseUri }}
+ Creator: ${{ parameters.Creator }}
+ SYSTEM_ACCESSTOKEN: $(System.AccessToken)
+ condition: and(${{ parameters.condition }}, ne(variables['Agent.Os'], 'Windows_NT'))
+ continueOnError: ${{ parameters.continueOnError }}
diff --git a/eng/common/core-templates/steps/source-build.yml b/eng/common/core-templates/steps/source-build.yml
new file mode 100644
index 00000000000..2915d29bb7f
--- /dev/null
+++ b/eng/common/core-templates/steps/source-build.yml
@@ -0,0 +1,129 @@
+parameters:
+ # This template adds arcade-powered source-build to CI.
+
+ # This is a 'steps' template, and is intended for advanced scenarios where the existing build
+ # infra has a careful build methodology that must be followed. For example, a repo
+ # (dotnet/runtime) might choose to clone the GitHub repo only once and store it as a pipeline
+ # artifact for all subsequent jobs to use, to reduce dependence on a strong network connection to
+ # GitHub. Using this steps template leaves room for that infra to be included.
+
+ # Defines the platform on which to run the steps. See 'eng/common/core-templates/job/source-build.yml'
+ # for details. The entire object is described in the 'job' template for simplicity, even though
+ # the usage of the properties on this object is split between the 'job' and 'steps' templates.
+ platform: {}
+ is1ESPipeline: false
+
+steps:
+# Build. Keep it self-contained for simple reusability. (No source-build-specific job variables.)
+- script: |
+ set -x
+ df -h
+
+ # If file changes are detected, set CopyWipIntoInnerSourceBuildRepo to copy the WIP changes into the inner source build repo.
+ internalRestoreArgs=
+ if ! git diff --quiet; then
+ internalRestoreArgs='/p:CopyWipIntoInnerSourceBuildRepo=true'
+ # The 'Copy WIP' feature of source build uses git stash to apply changes from the original repo.
+ # This only works if there is a username/email configured, which won't be the case in most CI runs.
+ git config --get user.email
+ if [ $? -ne 0 ]; then
+ git config user.email dn-bot@microsoft.com
+ git config user.name dn-bot
+ fi
+ fi
+
+ # If building on the internal project, the internal storage variable may be available (usually only if needed)
+ # In that case, add variables to allow the download of internal runtimes if the specified versions are not found
+ # in the default public locations.
+ internalRuntimeDownloadArgs=
+ if [ '$(dotnetbuilds-internal-container-read-token-base64)' != '$''(dotnetbuilds-internal-container-read-token-base64)' ]; then
+ internalRuntimeDownloadArgs='/p:DotNetRuntimeSourceFeed=https://dotnetbuilds.blob.core.windows.net/internal /p:DotNetRuntimeSourceFeedKey=$(dotnetbuilds-internal-container-read-token-base64) --runtimesourcefeed https://dotnetbuilds.blob.core.windows.net/internal --runtimesourcefeedkey $(dotnetbuilds-internal-container-read-token-base64)'
+ fi
+
+ buildConfig=Release
+ # Check if AzDO substitutes in a build config from a variable, and use it if so.
+ if [ '$(_BuildConfig)' != '$''(_BuildConfig)' ]; then
+ buildConfig='$(_BuildConfig)'
+ fi
+
+ officialBuildArgs=
+ if [ '${{ and(ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}' = 'True' ]; then
+ officialBuildArgs='/p:DotNetPublishUsingPipelines=true /p:OfficialBuildId=$(BUILD.BUILDNUMBER)'
+ fi
+
+ targetRidArgs=
+ if [ '${{ parameters.platform.targetRID }}' != '' ]; then
+ targetRidArgs='/p:TargetRid=${{ parameters.platform.targetRID }}'
+ fi
+
+ runtimeOsArgs=
+ if [ '${{ parameters.platform.runtimeOS }}' != '' ]; then
+ runtimeOsArgs='/p:RuntimeOS=${{ parameters.platform.runtimeOS }}'
+ fi
+
+ baseOsArgs=
+ if [ '${{ parameters.platform.baseOS }}' != '' ]; then
+ baseOsArgs='/p:BaseOS=${{ parameters.platform.baseOS }}'
+ fi
+
+ publishArgs=
+ if [ '${{ parameters.platform.skipPublishValidation }}' != 'true' ]; then
+ publishArgs='--publish'
+ fi
+
+ assetManifestFileName=SourceBuild_RidSpecific.xml
+ if [ '${{ parameters.platform.name }}' != '' ]; then
+ assetManifestFileName=SourceBuild_${{ parameters.platform.name }}.xml
+ fi
+
+ ${{ coalesce(parameters.platform.buildScript, './build.sh') }} --ci \
+ --configuration $buildConfig \
+ --restore --build --pack $publishArgs -bl \
+ $officialBuildArgs \
+ $internalRuntimeDownloadArgs \
+ $internalRestoreArgs \
+ $targetRidArgs \
+ $runtimeOsArgs \
+ $baseOsArgs \
+ /p:SourceBuildNonPortable=${{ parameters.platform.nonPortable }} \
+ /p:ArcadeBuildFromSource=true \
+ /p:DotNetBuildSourceOnly=true \
+ /p:DotNetBuildRepo=true \
+ /p:AssetManifestFileName=$assetManifestFileName
+ displayName: Build
+
+# Upload build logs for diagnosis.
+- task: CopyFiles@2
+ displayName: Prepare BuildLogs staging directory
+ inputs:
+ SourceFolder: '$(Build.SourcesDirectory)'
+ Contents: |
+ **/*.log
+ **/*.binlog
+ artifacts/sb/prebuilt-report/**
+ TargetFolder: '$(Build.StagingDirectory)/BuildLogs'
+ CleanTargetFolder: true
+ continueOnError: true
+ condition: succeededOrFailed()
+
+- template: /eng/common/core-templates/steps/publish-pipeline-artifacts.yml
+ parameters:
+ is1ESPipeline: ${{ parameters.is1ESPipeline }}
+ args:
+ displayName: Publish BuildLogs
+ targetPath: '$(Build.StagingDirectory)/BuildLogs'
+ artifactName: BuildLogs_SourceBuild_${{ parameters.platform.name }}_Attempt$(System.JobAttempt)
+ continueOnError: true
+ condition: succeededOrFailed()
+ sbomEnabled: false # we don't need SBOM for logs
+
+# Manually inject component detection so that we can ignore the source build upstream cache, which contains
+# a nupkg cache of input packages (a local feed).
+# This path must match the upstream cache path in property 'CurrentRepoSourceBuiltNupkgCacheDir'
+# in src\Microsoft.DotNet.Arcade.Sdk\tools\SourceBuild\SourceBuildArcade.targets
+- template: /eng/common/core-templates/steps/component-governance.yml
+ parameters:
+ displayName: Component Detection (Exclude upstream cache)
+ is1ESPipeline: ${{ parameters.is1ESPipeline }}
+ componentGovernanceIgnoreDirectories: '$(Build.SourcesDirectory)/artifacts/sb/src/artifacts/obj/source-built-upstream-cache'
+ disableComponentGovernance: ${{ eq(variables['System.TeamProject'], 'public') }}
diff --git a/eng/common/core-templates/variables/pool-providers.yml b/eng/common/core-templates/variables/pool-providers.yml
new file mode 100644
index 00000000000..41053d382a2
--- /dev/null
+++ b/eng/common/core-templates/variables/pool-providers.yml
@@ -0,0 +1,8 @@
+parameters:
+ is1ESPipeline: false
+
+variables:
+ - ${{ if eq(parameters.is1ESPipeline, 'true') }}:
+ - template: /eng/common/templates-official/variables/pool-providers.yml
+ - ${{ else }}:
+ - template: /eng/common/templates/variables/pool-providers.yml
\ No newline at end of file
diff --git a/eng/common/cross/arm/sources.list.bionic b/eng/common/cross/arm/sources.list.bionic
deleted file mode 100644
index 21095574095..00000000000
--- a/eng/common/cross/arm/sources.list.bionic
+++ /dev/null
@@ -1,11 +0,0 @@
-deb http://ports.ubuntu.com/ubuntu-ports/ bionic main restricted universe
-deb-src http://ports.ubuntu.com/ubuntu-ports/ bionic main restricted universe
-
-deb http://ports.ubuntu.com/ubuntu-ports/ bionic-updates main restricted universe
-deb-src http://ports.ubuntu.com/ubuntu-ports/ bionic-updates main restricted universe
-
-deb http://ports.ubuntu.com/ubuntu-ports/ bionic-backports main restricted
-deb-src http://ports.ubuntu.com/ubuntu-ports/ bionic-backports main restricted
-
-deb http://ports.ubuntu.com/ubuntu-ports/ bionic-security main restricted universe multiverse
-deb-src http://ports.ubuntu.com/ubuntu-ports/ bionic-security main restricted universe multiverse
diff --git a/eng/common/cross/arm/sources.list.focal b/eng/common/cross/arm/sources.list.focal
deleted file mode 100644
index 4de2600c174..00000000000
--- a/eng/common/cross/arm/sources.list.focal
+++ /dev/null
@@ -1,11 +0,0 @@
-deb http://ports.ubuntu.com/ubuntu-ports/ focal main restricted universe
-deb-src http://ports.ubuntu.com/ubuntu-ports/ focal main restricted universe
-
-deb http://ports.ubuntu.com/ubuntu-ports/ focal-updates main restricted universe
-deb-src http://ports.ubuntu.com/ubuntu-ports/ focal-updates main restricted universe
-
-deb http://ports.ubuntu.com/ubuntu-ports/ focal-backports main restricted
-deb-src http://ports.ubuntu.com/ubuntu-ports/ focal-backports main restricted
-
-deb http://ports.ubuntu.com/ubuntu-ports/ focal-security main restricted universe multiverse
-deb-src http://ports.ubuntu.com/ubuntu-ports/ focal-security main restricted universe multiverse
diff --git a/eng/common/cross/arm/sources.list.jammy b/eng/common/cross/arm/sources.list.jammy
deleted file mode 100644
index 6bb0453029c..00000000000
--- a/eng/common/cross/arm/sources.list.jammy
+++ /dev/null
@@ -1,11 +0,0 @@
-deb http://ports.ubuntu.com/ubuntu-ports/ jammy main restricted universe
-deb-src http://ports.ubuntu.com/ubuntu-ports/ jammy main restricted universe
-
-deb http://ports.ubuntu.com/ubuntu-ports/ jammy-updates main restricted universe
-deb-src http://ports.ubuntu.com/ubuntu-ports/ jammy-updates main restricted universe
-
-deb http://ports.ubuntu.com/ubuntu-ports/ jammy-backports main restricted
-deb-src http://ports.ubuntu.com/ubuntu-ports/ jammy-backports main restricted
-
-deb http://ports.ubuntu.com/ubuntu-ports/ jammy-security main restricted universe multiverse
-deb-src http://ports.ubuntu.com/ubuntu-ports/ jammy-security main restricted universe multiverse
diff --git a/eng/common/cross/arm/sources.list.jessie b/eng/common/cross/arm/sources.list.jessie
deleted file mode 100644
index 4d142ac9b10..00000000000
--- a/eng/common/cross/arm/sources.list.jessie
+++ /dev/null
@@ -1,3 +0,0 @@
-# Debian (sid) # UNSTABLE
-deb http://ftp.debian.org/debian/ sid main contrib non-free
-deb-src http://ftp.debian.org/debian/ sid main contrib non-free
diff --git a/eng/common/cross/arm/sources.list.xenial b/eng/common/cross/arm/sources.list.xenial
deleted file mode 100644
index 56fbb36a59f..00000000000
--- a/eng/common/cross/arm/sources.list.xenial
+++ /dev/null
@@ -1,11 +0,0 @@
-deb http://ports.ubuntu.com/ubuntu-ports/ xenial main restricted universe
-deb-src http://ports.ubuntu.com/ubuntu-ports/ xenial main restricted universe
-
-deb http://ports.ubuntu.com/ubuntu-ports/ xenial-updates main restricted universe
-deb-src http://ports.ubuntu.com/ubuntu-ports/ xenial-updates main restricted universe
-
-deb http://ports.ubuntu.com/ubuntu-ports/ xenial-backports main restricted
-deb-src http://ports.ubuntu.com/ubuntu-ports/ xenial-backports main restricted
-
-deb http://ports.ubuntu.com/ubuntu-ports/ xenial-security main restricted universe multiverse
-deb-src http://ports.ubuntu.com/ubuntu-ports/ xenial-security main restricted universe multiverse
diff --git a/eng/common/cross/arm/sources.list.zesty b/eng/common/cross/arm/sources.list.zesty
deleted file mode 100644
index ea2c14a7874..00000000000
--- a/eng/common/cross/arm/sources.list.zesty
+++ /dev/null
@@ -1,11 +0,0 @@
-deb http://ports.ubuntu.com/ubuntu-ports/ zesty main restricted universe
-deb-src http://ports.ubuntu.com/ubuntu-ports/ zesty main restricted universe
-
-deb http://ports.ubuntu.com/ubuntu-ports/ zesty-updates main restricted universe
-deb-src http://ports.ubuntu.com/ubuntu-ports/ zesty-updates main restricted universe
-
-deb http://ports.ubuntu.com/ubuntu-ports/ zesty-backports main restricted
-deb-src http://ports.ubuntu.com/ubuntu-ports/ zesty-backports main restricted
-
-deb http://ports.ubuntu.com/ubuntu-ports/ zesty-security main restricted universe multiverse
-deb-src http://ports.ubuntu.com/ubuntu-ports/ zesty-security main restricted universe multiverse
diff --git a/eng/common/cross/arm64/sources.list.bionic b/eng/common/cross/arm64/sources.list.bionic
deleted file mode 100644
index 21095574095..00000000000
--- a/eng/common/cross/arm64/sources.list.bionic
+++ /dev/null
@@ -1,11 +0,0 @@
-deb http://ports.ubuntu.com/ubuntu-ports/ bionic main restricted universe
-deb-src http://ports.ubuntu.com/ubuntu-ports/ bionic main restricted universe
-
-deb http://ports.ubuntu.com/ubuntu-ports/ bionic-updates main restricted universe
-deb-src http://ports.ubuntu.com/ubuntu-ports/ bionic-updates main restricted universe
-
-deb http://ports.ubuntu.com/ubuntu-ports/ bionic-backports main restricted
-deb-src http://ports.ubuntu.com/ubuntu-ports/ bionic-backports main restricted
-
-deb http://ports.ubuntu.com/ubuntu-ports/ bionic-security main restricted universe multiverse
-deb-src http://ports.ubuntu.com/ubuntu-ports/ bionic-security main restricted universe multiverse
diff --git a/eng/common/cross/arm64/sources.list.buster b/eng/common/cross/arm64/sources.list.buster
deleted file mode 100644
index 7194ac64a96..00000000000
--- a/eng/common/cross/arm64/sources.list.buster
+++ /dev/null
@@ -1,11 +0,0 @@
-deb http://deb.debian.org/debian buster main
-deb-src http://deb.debian.org/debian buster main
-
-deb http://deb.debian.org/debian-security/ buster/updates main
-deb-src http://deb.debian.org/debian-security/ buster/updates main
-
-deb http://deb.debian.org/debian buster-updates main
-deb-src http://deb.debian.org/debian buster-updates main
-
-deb http://deb.debian.org/debian buster-backports main contrib non-free
-deb-src http://deb.debian.org/debian buster-backports main contrib non-free
diff --git a/eng/common/cross/arm64/sources.list.focal b/eng/common/cross/arm64/sources.list.focal
deleted file mode 100644
index 4de2600c174..00000000000
--- a/eng/common/cross/arm64/sources.list.focal
+++ /dev/null
@@ -1,11 +0,0 @@
-deb http://ports.ubuntu.com/ubuntu-ports/ focal main restricted universe
-deb-src http://ports.ubuntu.com/ubuntu-ports/ focal main restricted universe
-
-deb http://ports.ubuntu.com/ubuntu-ports/ focal-updates main restricted universe
-deb-src http://ports.ubuntu.com/ubuntu-ports/ focal-updates main restricted universe
-
-deb http://ports.ubuntu.com/ubuntu-ports/ focal-backports main restricted
-deb-src http://ports.ubuntu.com/ubuntu-ports/ focal-backports main restricted
-
-deb http://ports.ubuntu.com/ubuntu-ports/ focal-security main restricted universe multiverse
-deb-src http://ports.ubuntu.com/ubuntu-ports/ focal-security main restricted universe multiverse
diff --git a/eng/common/cross/arm64/sources.list.jammy b/eng/common/cross/arm64/sources.list.jammy
deleted file mode 100644
index 6bb0453029c..00000000000
--- a/eng/common/cross/arm64/sources.list.jammy
+++ /dev/null
@@ -1,11 +0,0 @@
-deb http://ports.ubuntu.com/ubuntu-ports/ jammy main restricted universe
-deb-src http://ports.ubuntu.com/ubuntu-ports/ jammy main restricted universe
-
-deb http://ports.ubuntu.com/ubuntu-ports/ jammy-updates main restricted universe
-deb-src http://ports.ubuntu.com/ubuntu-ports/ jammy-updates main restricted universe
-
-deb http://ports.ubuntu.com/ubuntu-ports/ jammy-backports main restricted
-deb-src http://ports.ubuntu.com/ubuntu-ports/ jammy-backports main restricted
-
-deb http://ports.ubuntu.com/ubuntu-ports/ jammy-security main restricted universe multiverse
-deb-src http://ports.ubuntu.com/ubuntu-ports/ jammy-security main restricted universe multiverse
diff --git a/eng/common/cross/arm64/sources.list.stretch b/eng/common/cross/arm64/sources.list.stretch
deleted file mode 100644
index 0e121577436..00000000000
--- a/eng/common/cross/arm64/sources.list.stretch
+++ /dev/null
@@ -1,12 +0,0 @@
-deb http://deb.debian.org/debian stretch main
-deb-src http://deb.debian.org/debian stretch main
-
-deb http://deb.debian.org/debian-security/ stretch/updates main
-deb-src http://deb.debian.org/debian-security/ stretch/updates main
-
-deb http://deb.debian.org/debian stretch-updates main
-deb-src http://deb.debian.org/debian stretch-updates main
-
-deb http://deb.debian.org/debian stretch-backports main contrib non-free
-deb-src http://deb.debian.org/debian stretch-backports main contrib non-free
-
diff --git a/eng/common/cross/arm64/sources.list.xenial b/eng/common/cross/arm64/sources.list.xenial
deleted file mode 100644
index 56fbb36a59f..00000000000
--- a/eng/common/cross/arm64/sources.list.xenial
+++ /dev/null
@@ -1,11 +0,0 @@
-deb http://ports.ubuntu.com/ubuntu-ports/ xenial main restricted universe
-deb-src http://ports.ubuntu.com/ubuntu-ports/ xenial main restricted universe
-
-deb http://ports.ubuntu.com/ubuntu-ports/ xenial-updates main restricted universe
-deb-src http://ports.ubuntu.com/ubuntu-ports/ xenial-updates main restricted universe
-
-deb http://ports.ubuntu.com/ubuntu-ports/ xenial-backports main restricted
-deb-src http://ports.ubuntu.com/ubuntu-ports/ xenial-backports main restricted
-
-deb http://ports.ubuntu.com/ubuntu-ports/ xenial-security main restricted universe multiverse
-deb-src http://ports.ubuntu.com/ubuntu-ports/ xenial-security main restricted universe multiverse
diff --git a/eng/common/cross/arm64/sources.list.zesty b/eng/common/cross/arm64/sources.list.zesty
deleted file mode 100644
index ea2c14a7874..00000000000
--- a/eng/common/cross/arm64/sources.list.zesty
+++ /dev/null
@@ -1,11 +0,0 @@
-deb http://ports.ubuntu.com/ubuntu-ports/ zesty main restricted universe
-deb-src http://ports.ubuntu.com/ubuntu-ports/ zesty main restricted universe
-
-deb http://ports.ubuntu.com/ubuntu-ports/ zesty-updates main restricted universe
-deb-src http://ports.ubuntu.com/ubuntu-ports/ zesty-updates main restricted universe
-
-deb http://ports.ubuntu.com/ubuntu-ports/ zesty-backports main restricted
-deb-src http://ports.ubuntu.com/ubuntu-ports/ zesty-backports main restricted
-
-deb http://ports.ubuntu.com/ubuntu-ports/ zesty-security main restricted universe multiverse
-deb-src http://ports.ubuntu.com/ubuntu-ports/ zesty-security main restricted universe multiverse
diff --git a/eng/common/cross/armel/sources.list.jessie b/eng/common/cross/armel/sources.list.jessie
deleted file mode 100644
index 3d9c3059d89..00000000000
--- a/eng/common/cross/armel/sources.list.jessie
+++ /dev/null
@@ -1,3 +0,0 @@
-# Debian (jessie) # Stable
-deb http://ftp.debian.org/debian/ jessie main contrib non-free
-deb-src http://ftp.debian.org/debian/ jessie main contrib non-free
diff --git a/eng/common/cross/armv6/sources.list.buster b/eng/common/cross/armv6/sources.list.buster
deleted file mode 100644
index f27fc4fb346..00000000000
--- a/eng/common/cross/armv6/sources.list.buster
+++ /dev/null
@@ -1,2 +0,0 @@
-deb http://raspbian.raspberrypi.org/raspbian/ buster main contrib non-free rpi
-deb-src http://raspbian.raspberrypi.org/raspbian/ buster main contrib non-free rpi
diff --git a/eng/common/cross/build-android-rootfs.sh b/eng/common/cross/build-android-rootfs.sh
index f163fb9dae9..7e9ba2b75ed 100755
--- a/eng/common/cross/build-android-rootfs.sh
+++ b/eng/common/cross/build-android-rootfs.sh
@@ -5,15 +5,15 @@ __NDK_Version=r21
usage()
{
echo "Creates a toolchain and sysroot used for cross-compiling for Android."
- echo.
+ echo
echo "Usage: $0 [BuildArch] [ApiLevel]"
- echo.
+ echo
echo "BuildArch is the target architecture of Android. Currently only arm64 is supported."
echo "ApiLevel is the target Android API level. API levels usually match to Android releases. See https://source.android.com/source/build-numbers.html"
- echo.
+ echo
echo "By default, the toolchain and sysroot will be generated in cross/android-rootfs/toolchain/[BuildArch]. You can change this behavior"
echo "by setting the TOOLCHAIN_DIR environment variable"
- echo.
+ echo
echo "By default, the NDK will be downloaded into the cross/android-rootfs/android-ndk-$__NDK_Version directory. If you already have an NDK installation,"
echo "you can set the NDK_DIR environment variable to have this script use that installation of the NDK."
echo "By default, this script will generate a file, android_platform, in the root of the ROOTFS_DIR directory that contains the RID for the supported and tested Android build: android.28-arm64. This file is to replace '/etc/os-release', which is not available for Android."
diff --git a/eng/common/cross/build-rootfs.sh b/eng/common/cross/build-rootfs.sh
index 9caf9b021db..4b5e8d7166b 100755
--- a/eng/common/cross/build-rootfs.sh
+++ b/eng/common/cross/build-rootfs.sh
@@ -8,7 +8,7 @@ usage()
echo "BuildArch can be: arm(default), arm64, armel, armv6, ppc64le, riscv64, s390x, x64, x86"
echo "CodeName - optional, Code name for Linux, can be: xenial(default), zesty, bionic, alpine"
echo " for alpine can be specified with version: alpineX.YY or alpineedge"
- echo " for FreeBSD can be: freebsd12, freebsd13"
+ echo " for FreeBSD can be: freebsd13, freebsd14"
echo " for illumos can be: illumos"
echo " for Haiku can be: haiku."
echo "lldbx.y - optional, LLDB version, can be: lldb3.9(default), lldb4.0, lldb5.0, lldb6.0 no-lldb. Ignored for alpine and FreeBSD"
@@ -30,7 +30,8 @@ __IllumosArch=arm7
__HaikuArch=arm
__QEMUArch=arm
__UbuntuArch=armhf
-__UbuntuRepo="http://ports.ubuntu.com/"
+__UbuntuRepo=
+__UbuntuSuites="updates security backports"
__LLDB_Package="liblldb-3.9-dev"
__SkipUnmount=0
@@ -71,9 +72,9 @@ __AlpinePackages+=" krb5-dev"
__AlpinePackages+=" openssl-dev"
__AlpinePackages+=" zlib-dev"
-__FreeBSDBase="12.4-RELEASE"
+__FreeBSDBase="13.3-RELEASE"
__FreeBSDPkg="1.17.0"
-__FreeBSDABI="12"
+__FreeBSDABI="13"
__FreeBSDPackages="libunwind"
__FreeBSDPackages+=" icu"
__FreeBSDPackages+=" libinotify"
@@ -129,6 +130,7 @@ __AlpineKeys='
616db30d:MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAnpUpyWDWjlUk3smlWeA0\nlIMW+oJ38t92CRLHH3IqRhyECBRW0d0aRGtq7TY8PmxjjvBZrxTNDpJT6KUk4LRm\na6A6IuAI7QnNK8SJqM0DLzlpygd7GJf8ZL9SoHSH+gFsYF67Cpooz/YDqWrlN7Vw\ntO00s0B+eXy+PCXYU7VSfuWFGK8TGEv6HfGMALLjhqMManyvfp8hz3ubN1rK3c8C\nUS/ilRh1qckdbtPvoDPhSbTDmfU1g/EfRSIEXBrIMLg9ka/XB9PvWRrekrppnQzP\nhP9YE3x/wbFc5QqQWiRCYyQl/rgIMOXvIxhkfe8H5n1Et4VAorkpEAXdsfN8KSVv\nLSMazVlLp9GYq5SUpqYX3KnxdWBgN7BJoZ4sltsTpHQ/34SXWfu3UmyUveWj7wp0\nx9hwsPirVI00EEea9AbP7NM2rAyu6ukcm4m6ATd2DZJIViq2es6m60AE6SMCmrQF\nwmk4H/kdQgeAELVfGOm2VyJ3z69fQuywz7xu27S6zTKi05Qlnohxol4wVb6OB7qG\nLPRtK9ObgzRo/OPumyXqlzAi/Yvyd1ZQk8labZps3e16bQp8+pVPiumWioMFJDWV\nGZjCmyMSU8V6MB6njbgLHoyg2LCukCAeSjbPGGGYhnKLm1AKSoJh3IpZuqcKCk5C\n8CM1S15HxV78s9dFntEqIokCAwEAAQ==
'
__Keyring=
+__KeyringFile="/usr/share/keyrings/ubuntu-archive-keyring.gpg"
__SkipSigCheck=0
__UseMirror=0
@@ -142,7 +144,6 @@ while :; do
case $lowerI in
-\?|-h|--help)
usage
- exit 1
;;
arm)
__BuildArch=arm
@@ -163,6 +164,7 @@ while :; do
__UbuntuArch=armel
__UbuntuRepo="http://ftp.debian.org/debian/"
__CodeName=jessie
+ __KeyringFile="/usr/share/keyrings/debian-archive-keyring.gpg"
;;
armv6)
__BuildArch=armv6
@@ -170,10 +172,12 @@ while :; do
__QEMUArch=arm
__UbuntuRepo="http://raspbian.raspberrypi.org/raspbian/"
__CodeName=buster
+ __KeyringFile="/usr/share/keyrings/raspbian-archive-keyring.gpg"
__LLDB_Package="liblldb-6.0-dev"
+ __UbuntuSuites=
- if [[ -e "/usr/share/keyrings/raspbian-archive-keyring.gpg" ]]; then
- __Keyring="--keyring /usr/share/keyrings/raspbian-archive-keyring.gpg"
+ if [[ -e "$__KeyringFile" ]]; then
+ __Keyring="--keyring $__KeyringFile"
fi
;;
riscv64)
@@ -182,13 +186,8 @@ while :; do
__AlpinePackages="${__AlpinePackages// lldb-dev/}"
__QEMUArch=riscv64
__UbuntuArch=riscv64
- __UbuntuRepo="http://deb.debian.org/debian-ports"
__UbuntuPackages="${__UbuntuPackages// libunwind8-dev/}"
unset __LLDB_Package
-
- if [[ -e "/usr/share/keyrings/debian-ports-archive-keyring.gpg" ]]; then
- __Keyring="--keyring /usr/share/keyrings/debian-ports-archive-keyring.gpg --include=debian-ports-archive-keyring"
- fi
;;
ppc64le)
__BuildArch=ppc64le
@@ -229,12 +228,19 @@ while :; do
__UbuntuRepo="http://archive.ubuntu.com/ubuntu/"
;;
lldb*)
- version="${lowerI/lldb/}"
- parts=(${version//./ })
+ version="$(echo "$lowerI" | tr -d '[:alpha:]-=')"
+ majorVersion="${version%%.*}"
+
+ [ -z "${version##*.*}" ] && minorVersion="${version#*.}"
+ if [ -z "$minorVersion" ]; then
+ minorVersion=0
+ fi
# for versions > 6.0, lldb has dropped the minor version
- if [[ "${parts[0]}" -gt 6 ]]; then
- version="${parts[0]}"
+ if [ "$majorVersion" -le 6 ]; then
+ version="$majorVersion.$minorVersion"
+ else
+ version="$majorVersion"
fi
__LLDB_Package="liblldb-${version}-dev"
@@ -243,15 +249,19 @@ while :; do
unset __LLDB_Package
;;
llvm*)
- version="${lowerI/llvm/}"
- parts=(${version//./ })
- __LLVM_MajorVersion="${parts[0]}"
- __LLVM_MinorVersion="${parts[1]}"
-
- # for versions > 6.0, llvm has dropped the minor version
- if [[ -z "$__LLVM_MinorVersion" && "$__LLVM_MajorVersion" -le 6 ]]; then
- __LLVM_MinorVersion=0;
+ version="$(echo "$lowerI" | tr -d '[:alpha:]-=')"
+ __LLVM_MajorVersion="${version%%.*}"
+
+ [ -z "${version##*.*}" ] && __LLVM_MinorVersion="${version#*.}"
+ if [ -z "$__LLVM_MinorVersion" ]; then
+ __LLVM_MinorVersion=0
+ fi
+
+ # for versions > 6.0, lldb has dropped the minor version
+ if [ "$__LLVM_MajorVersion" -gt 6 ]; then
+ __LLVM_MinorVersion=
fi
+
;;
xenial) # Ubuntu 16.04
if [[ "$__CodeName" != "jessie" ]]; then
@@ -278,8 +288,17 @@ while :; do
__CodeName=jammy
fi
;;
+ noble) # Ubuntu 24.04
+ if [[ "$__CodeName" != "jessie" ]]; then
+ __CodeName=noble
+ fi
+ if [[ -n "$__LLDB_Package" ]]; then
+ __LLDB_Package="liblldb-18-dev"
+ fi
+ ;;
jessie) # Debian 8
__CodeName=jessie
+ __KeyringFile="/usr/share/keyrings/debian-archive-keyring.gpg"
if [[ -z "$__UbuntuRepo" ]]; then
__UbuntuRepo="http://ftp.debian.org/debian/"
@@ -288,6 +307,7 @@ while :; do
stretch) # Debian 9
__CodeName=stretch
__LLDB_Package="liblldb-6.0-dev"
+ __KeyringFile="/usr/share/keyrings/debian-archive-keyring.gpg"
if [[ -z "$__UbuntuRepo" ]]; then
__UbuntuRepo="http://ftp.debian.org/debian/"
@@ -296,6 +316,7 @@ while :; do
buster) # Debian 10
__CodeName=buster
__LLDB_Package="liblldb-6.0-dev"
+ __KeyringFile="/usr/share/keyrings/debian-archive-keyring.gpg"
if [[ -z "$__UbuntuRepo" ]]; then
__UbuntuRepo="http://ftp.debian.org/debian/"
@@ -303,6 +324,15 @@ while :; do
;;
bullseye) # Debian 11
__CodeName=bullseye
+ __KeyringFile="/usr/share/keyrings/debian-archive-keyring.gpg"
+
+ if [[ -z "$__UbuntuRepo" ]]; then
+ __UbuntuRepo="http://ftp.debian.org/debian/"
+ fi
+ ;;
+ bookworm) # Debian 12
+ __CodeName=bookworm
+ __KeyringFile="/usr/share/keyrings/debian-archive-keyring.gpg"
if [[ -z "$__UbuntuRepo" ]]; then
__UbuntuRepo="http://ftp.debian.org/debian/"
@@ -310,6 +340,7 @@ while :; do
;;
sid) # Debian sid
__CodeName=sid
+ __KeyringFile="/usr/share/keyrings/debian-archive-keyring.gpg"
if [[ -z "$__UbuntuRepo" ]]; then
__UbuntuRepo="http://ftp.debian.org/debian/"
@@ -323,25 +354,24 @@ while :; do
alpine*)
__CodeName=alpine
__UbuntuRepo=
- version="${lowerI/alpine/}"
- if [[ "$version" == "edge" ]]; then
+ if [[ "$lowerI" == "alpineedge" ]]; then
__AlpineVersion=edge
else
- parts=(${version//./ })
- __AlpineMajorVersion="${parts[0]}"
- __AlpineMinoVersion="${parts[1]}"
- __AlpineVersion="$__AlpineMajorVersion.$__AlpineMinoVersion"
+ version="$(echo "$lowerI" | tr -d '[:alpha:]-=')"
+ __AlpineMajorVersion="${version%%.*}"
+ __AlpineMinorVersion="${version#*.}"
+ __AlpineVersion="$__AlpineMajorVersion.$__AlpineMinorVersion"
fi
;;
- freebsd12)
+ freebsd13)
__CodeName=freebsd
__SkipUnmount=1
;;
- freebsd13)
+ freebsd14)
__CodeName=freebsd
- __FreeBSDBase="13.2-RELEASE"
- __FreeBSDABI="13"
+ __FreeBSDBase="14.0-RELEASE"
+ __FreeBSDABI="14"
__SkipUnmount=1
;;
illumos)
@@ -420,6 +450,10 @@ fi
__UbuntuPackages+=" ${__LLDB_Package:-}"
+if [[ -z "$__UbuntuRepo" ]]; then
+ __UbuntuRepo="http://ports.ubuntu.com/"
+fi
+
if [[ -n "$__LLVM_MajorVersion" ]]; then
__UbuntuPackages+=" libclang-common-${__LLVM_MajorVersion}${__LLVM_MinorVersion:+.$__LLVM_MinorVersion}-dev"
fi
@@ -442,13 +476,39 @@ fi
mkdir -p "$__RootfsDir"
__RootfsDir="$( cd "$__RootfsDir" && pwd )"
+__hasWget=
+ensureDownloadTool()
+{
+ if command -v wget &> /dev/null; then
+ __hasWget=1
+ elif command -v curl &> /dev/null; then
+ __hasWget=0
+ else
+ >&2 echo "ERROR: either wget or curl is required by this script."
+ exit 1
+ fi
+}
+
if [[ "$__CodeName" == "alpine" ]]; then
__ApkToolsVersion=2.12.11
- __ApkToolsSHA512SUM=53e57b49230da07ef44ee0765b9592580308c407a8d4da7125550957bb72cb59638e04f8892a18b584451c8d841d1c7cb0f0ab680cc323a3015776affaa3be33
__ApkToolsDir="$(mktemp -d)"
__ApkKeysDir="$(mktemp -d)"
+ arch="$(uname -m)"
- wget "https://gitlab.alpinelinux.org/api/v4/projects/5/packages/generic//v$__ApkToolsVersion/x86_64/apk.static" -P "$__ApkToolsDir"
+ ensureDownloadTool
+
+ if [[ "$__hasWget" == 1 ]]; then
+ wget -P "$__ApkToolsDir" "https://gitlab.alpinelinux.org/api/v4/projects/5/packages/generic/v$__ApkToolsVersion/$arch/apk.static"
+ else
+ curl -SLO --create-dirs --output-dir "$__ApkToolsDir" "https://gitlab.alpinelinux.org/api/v4/projects/5/packages/generic/v$__ApkToolsVersion/$arch/apk.static"
+ fi
+ if [[ "$arch" == "x86_64" ]]; then
+ __ApkToolsSHA512SUM="53e57b49230da07ef44ee0765b9592580308c407a8d4da7125550957bb72cb59638e04f8892a18b584451c8d841d1c7cb0f0ab680cc323a3015776affaa3be33"
+ elif [[ "$arch" == "aarch64" ]]; then
+ __ApkToolsSHA512SUM="9e2b37ecb2b56c05dad23d379be84fd494c14bd730b620d0d576bda760588e1f2f59a7fcb2f2080577e0085f23a0ca8eadd993b4e61c2ab29549fdb71969afd0"
+ else
+ echo "WARNING: add missing hash for your host architecture. To find the value, use: 'find /tmp -name apk.static -exec sha512sum {} \;'"
+ fi
echo "$__ApkToolsSHA512SUM $__ApkToolsDir/apk.static" | sha512sum -c
chmod +x "$__ApkToolsDir/apk.static"
@@ -477,20 +537,23 @@ if [[ "$__CodeName" == "alpine" ]]; then
fi
# initialize DB
+ # shellcheck disable=SC2086
"$__ApkToolsDir/apk.static" \
-X "http://dl-cdn.alpinelinux.org/alpine/$version/main" \
-X "http://dl-cdn.alpinelinux.org/alpine/$version/community" \
-U $__ApkSignatureArg --root "$__RootfsDir" --arch "$__AlpineArch" --initdb add
if [[ "$__AlpineLlvmLibsLookup" == 1 ]]; then
+ # shellcheck disable=SC2086
__AlpinePackages+=" $("$__ApkToolsDir/apk.static" \
-X "http://dl-cdn.alpinelinux.org/alpine/$version/main" \
-X "http://dl-cdn.alpinelinux.org/alpine/$version/community" \
-U $__ApkSignatureArg --root "$__RootfsDir" --arch "$__AlpineArch" \
- search 'llvm*-libs' | sort | tail -1 | sed 's/-[^-]*//2g')"
+ search 'llvm*-libs' | grep -E '^llvm' | sort | tail -1 | sed 's/-[^-]*//2g')"
fi
# install all packages in one go
+ # shellcheck disable=SC2086
"$__ApkToolsDir/apk.static" \
-X "http://dl-cdn.alpinelinux.org/alpine/$version/main" \
-X "http://dl-cdn.alpinelinux.org/alpine/$version/community" \
@@ -501,12 +564,23 @@ if [[ "$__CodeName" == "alpine" ]]; then
elif [[ "$__CodeName" == "freebsd" ]]; then
mkdir -p "$__RootfsDir"/usr/local/etc
JOBS=${MAXJOBS:="$(getconf _NPROCESSORS_ONLN)"}
- wget -O - "https://download.freebsd.org/ftp/releases/${__FreeBSDArch}/${__FreeBSDMachineArch}/${__FreeBSDBase}/base.txz" | tar -C "$__RootfsDir" -Jxf - ./lib ./usr/lib ./usr/libdata ./usr/include ./usr/share/keys ./etc ./bin/freebsd-version
+
+ ensureDownloadTool
+
+ if [[ "$__hasWget" == 1 ]]; then
+ wget -O- "https://download.freebsd.org/ftp/releases/${__FreeBSDArch}/${__FreeBSDMachineArch}/${__FreeBSDBase}/base.txz" | tar -C "$__RootfsDir" -Jxf - ./lib ./usr/lib ./usr/libdata ./usr/include ./usr/share/keys ./etc ./bin/freebsd-version
+ else
+ curl -SL "https://download.freebsd.org/ftp/releases/${__FreeBSDArch}/${__FreeBSDMachineArch}/${__FreeBSDBase}/base.txz" | tar -C "$__RootfsDir" -Jxf - ./lib ./usr/lib ./usr/libdata ./usr/include ./usr/share/keys ./etc ./bin/freebsd-version
+ fi
echo "ABI = \"FreeBSD:${__FreeBSDABI}:${__FreeBSDMachineArch}\"; FINGERPRINTS = \"${__RootfsDir}/usr/share/keys\"; REPOS_DIR = [\"${__RootfsDir}/etc/pkg\"]; REPO_AUTOUPDATE = NO; RUN_SCRIPTS = NO;" > "${__RootfsDir}"/usr/local/etc/pkg.conf
echo "FreeBSD: { url: \"pkg+http://pkg.FreeBSD.org/\${ABI}/quarterly\", mirror_type: \"srv\", signature_type: \"fingerprints\", fingerprints: \"${__RootfsDir}/usr/share/keys/pkg\", enabled: yes }" > "${__RootfsDir}"/etc/pkg/FreeBSD.conf
mkdir -p "$__RootfsDir"/tmp
# get and build package manager
- wget -O - "https://github.com/freebsd/pkg/archive/${__FreeBSDPkg}.tar.gz" | tar -C "$__RootfsDir"/tmp -zxf -
+ if [[ "$__hasWget" == 1 ]]; then
+ wget -O- "https://github.com/freebsd/pkg/archive/${__FreeBSDPkg}.tar.gz" | tar -C "$__RootfsDir"/tmp -zxf -
+ else
+ curl -SL "https://github.com/freebsd/pkg/archive/${__FreeBSDPkg}.tar.gz" | tar -C "$__RootfsDir"/tmp -zxf -
+ fi
cd "$__RootfsDir/tmp/pkg-${__FreeBSDPkg}"
# needed for install to succeed
mkdir -p "$__RootfsDir"/host/etc
@@ -514,27 +588,43 @@ elif [[ "$__CodeName" == "freebsd" ]]; then
rm -rf "$__RootfsDir/tmp/pkg-${__FreeBSDPkg}"
# install packages we need.
INSTALL_AS_USER=$(whoami) "$__RootfsDir"/host/sbin/pkg -r "$__RootfsDir" -C "$__RootfsDir"/usr/local/etc/pkg.conf update
+ # shellcheck disable=SC2086
INSTALL_AS_USER=$(whoami) "$__RootfsDir"/host/sbin/pkg -r "$__RootfsDir" -C "$__RootfsDir"/usr/local/etc/pkg.conf install --yes $__FreeBSDPackages
elif [[ "$__CodeName" == "illumos" ]]; then
mkdir "$__RootfsDir/tmp"
pushd "$__RootfsDir/tmp"
JOBS=${MAXJOBS:="$(getconf _NPROCESSORS_ONLN)"}
+
+ ensureDownloadTool
+
echo "Downloading sysroot."
- wget -O - https://github.com/illumos/sysroot/releases/download/20181213-de6af22ae73b-v1/illumos-sysroot-i386-20181213-de6af22ae73b-v1.tar.gz | tar -C "$__RootfsDir" -xzf -
+ if [[ "$__hasWget" == 1 ]]; then
+ wget -O- https://github.com/illumos/sysroot/releases/download/20181213-de6af22ae73b-v1/illumos-sysroot-i386-20181213-de6af22ae73b-v1.tar.gz | tar -C "$__RootfsDir" -xzf -
+ else
+ curl -SL https://github.com/illumos/sysroot/releases/download/20181213-de6af22ae73b-v1/illumos-sysroot-i386-20181213-de6af22ae73b-v1.tar.gz | tar -C "$__RootfsDir" -xzf -
+ fi
echo "Building binutils. Please wait.."
- wget -O - https://ftp.gnu.org/gnu/binutils/binutils-2.33.1.tar.bz2 | tar -xjf -
+ if [[ "$__hasWget" == 1 ]]; then
+ wget -O- https://ftp.gnu.org/gnu/binutils/binutils-2.42.tar.xz | tar -xJf -
+ else
+ curl -SL https://ftp.gnu.org/gnu/binutils/binutils-2.42.tar.xz | tar -xJf -
+ fi
mkdir build-binutils && cd build-binutils
- ../binutils-2.33.1/configure --prefix="$__RootfsDir" --target="${__illumosArch}-sun-solaris2.10" --program-prefix="${__illumosArch}-illumos-" --with-sysroot="$__RootfsDir"
+ ../binutils-2.42/configure --prefix="$__RootfsDir" --target="${__illumosArch}-sun-solaris2.11" --program-prefix="${__illumosArch}-illumos-" --with-sysroot="$__RootfsDir"
make -j "$JOBS" && make install && cd ..
echo "Building gcc. Please wait.."
- wget -O - https://ftp.gnu.org/gnu/gcc/gcc-8.4.0/gcc-8.4.0.tar.xz | tar -xJf -
+ if [[ "$__hasWget" == 1 ]]; then
+ wget -O- https://ftp.gnu.org/gnu/gcc/gcc-13.3.0/gcc-13.3.0.tar.xz | tar -xJf -
+ else
+ curl -SL https://ftp.gnu.org/gnu/gcc/gcc-13.3.0/gcc-13.3.0.tar.xz | tar -xJf -
+ fi
CFLAGS="-fPIC"
CXXFLAGS="-fPIC"
CXXFLAGS_FOR_TARGET="-fPIC"
CFLAGS_FOR_TARGET="-fPIC"
export CFLAGS CXXFLAGS CXXFLAGS_FOR_TARGET CFLAGS_FOR_TARGET
mkdir build-gcc && cd build-gcc
- ../gcc-8.4.0/configure --prefix="$__RootfsDir" --target="${__illumosArch}-sun-solaris2.10" --program-prefix="${__illumosArch}-illumos-" --with-sysroot="$__RootfsDir" --with-gnu-as \
+ ../gcc-13.3.0/configure --prefix="$__RootfsDir" --target="${__illumosArch}-sun-solaris2.11" --program-prefix="${__illumosArch}-illumos-" --with-sysroot="$__RootfsDir" --with-gnu-as \
--with-gnu-ld --disable-nls --disable-libgomp --disable-libquadmath --disable-libssp --disable-libvtv --disable-libcilkrts --disable-libada --disable-libsanitizer \
--disable-libquadmath-support --disable-shared --enable-tls
make -j "$JOBS" && make install && cd ..
@@ -542,9 +632,13 @@ elif [[ "$__CodeName" == "illumos" ]]; then
if [[ "$__UseMirror" == 1 ]]; then
BaseUrl=https://pkgsrc.smartos.skylime.net
fi
- BaseUrl="$BaseUrl/packages/SmartOS/trunk/${__illumosArch}/All"
+ BaseUrl="$BaseUrl/packages/SmartOS/2019Q4/${__illumosArch}/All"
echo "Downloading manifest"
- wget "$BaseUrl"
+ if [[ "$__hasWget" == 1 ]]; then
+ wget "$BaseUrl"
+ else
+ curl -SLO "$BaseUrl"
+ fi
echo "Downloading dependencies."
read -ra array <<<"$__IllumosPackages"
for package in "${array[@]}"; do
@@ -552,7 +646,11 @@ elif [[ "$__CodeName" == "illumos" ]]; then
# find last occurrence of package in listing and extract its name
package="$(sed -En '/.*href="('"$package"'-[0-9].*).tgz".*/h;$!d;g;s//\1/p' All)"
echo "Resolved name '$package'"
- wget "$BaseUrl"/"$package".tgz
+ if [[ "$__hasWget" == 1 ]]; then
+ wget "$BaseUrl"/"$package".tgz
+ else
+ curl -SLO "$BaseUrl"/"$package".tgz
+ fi
ar -x "$package".tgz
tar --skip-old-files -xzf "$package".tmp.tg* -C "$__RootfsDir" 2>/dev/null
done
@@ -561,10 +659,17 @@ elif [[ "$__CodeName" == "illumos" ]]; then
rm -rf "$__RootfsDir"/{tmp,+*}
mkdir -p "$__RootfsDir"/usr/include/net
mkdir -p "$__RootfsDir"/usr/include/netpacket
- wget -P "$__RootfsDir"/usr/include/net https://raw.githubusercontent.com/illumos/illumos-gate/master/usr/src/uts/common/io/bpf/net/bpf.h
- wget -P "$__RootfsDir"/usr/include/net https://raw.githubusercontent.com/illumos/illumos-gate/master/usr/src/uts/common/io/bpf/net/dlt.h
- wget -P "$__RootfsDir"/usr/include/netpacket https://raw.githubusercontent.com/illumos/illumos-gate/master/usr/src/uts/common/inet/sockmods/netpacket/packet.h
- wget -P "$__RootfsDir"/usr/include/sys https://raw.githubusercontent.com/illumos/illumos-gate/master/usr/src/uts/common/sys/sdt.h
+ if [[ "$__hasWget" == 1 ]]; then
+ wget -P "$__RootfsDir"/usr/include/net https://raw.githubusercontent.com/illumos/illumos-gate/master/usr/src/uts/common/io/bpf/net/bpf.h
+ wget -P "$__RootfsDir"/usr/include/net https://raw.githubusercontent.com/illumos/illumos-gate/master/usr/src/uts/common/io/bpf/net/dlt.h
+ wget -P "$__RootfsDir"/usr/include/netpacket https://raw.githubusercontent.com/illumos/illumos-gate/master/usr/src/uts/common/inet/sockmods/netpacket/packet.h
+ wget -P "$__RootfsDir"/usr/include/sys https://raw.githubusercontent.com/illumos/illumos-gate/master/usr/src/uts/common/sys/sdt.h
+ else
+ curl -SLO --create-dirs --output-dir "$__RootfsDir"/usr/include/net https://raw.githubusercontent.com/illumos/illumos-gate/master/usr/src/uts/common/io/bpf/net/bpf.h
+ curl -SLO --create-dirs --output-dir "$__RootfsDir"/usr/include/net https://raw.githubusercontent.com/illumos/illumos-gate/master/usr/src/uts/common/io/bpf/net/dlt.h
+ curl -SLO --create-dirs --output-dir "$__RootfsDir"/usr/include/netpacket https://raw.githubusercontent.com/illumos/illumos-gate/master/usr/src/uts/common/inet/sockmods/netpacket/packet.h
+ curl -SLO --create-dirs --output-dir "$__RootfsDir"/usr/include/sys https://raw.githubusercontent.com/illumos/illumos-gate/master/usr/src/uts/common/sys/sdt.h
+ fi
elif [[ "$__CodeName" == "haiku" ]]; then
JOBS=${MAXJOBS:="$(getconf _NPROCESSORS_ONLN)"}
@@ -574,9 +679,16 @@ elif [[ "$__CodeName" == "haiku" ]]; then
mkdir "$__RootfsDir/tmp/download"
+ ensureDownloadTool
+
echo "Downloading Haiku package tool"
- git clone https://github.com/haiku/haiku-toolchains-ubuntu --depth 1 $__RootfsDir/tmp/script
- wget -O "$__RootfsDir/tmp/download/hosttools.zip" $($__RootfsDir/tmp/script/fetch.sh --hosttools)
+ git clone https://github.com/haiku/haiku-toolchains-ubuntu --depth 1 "$__RootfsDir/tmp/script"
+ if [[ "$__hasWget" == 1 ]]; then
+ wget -O "$__RootfsDir/tmp/download/hosttools.zip" "$("$__RootfsDir/tmp/script/fetch.sh" --hosttools)"
+ else
+ curl -SLo "$__RootfsDir/tmp/download/hosttools.zip" "$("$__RootfsDir/tmp/script/fetch.sh" --hosttools)"
+ fi
+
unzip -o "$__RootfsDir/tmp/download/hosttools.zip" -d "$__RootfsDir/tmp/bin"
DepotBaseUrl="https://depot.haiku-os.org/__api/v2/pkg/get-pkg"
@@ -589,14 +701,25 @@ elif [[ "$__CodeName" == "haiku" ]]; then
echo "Downloading $package..."
# API documented here: https://github.com/haiku/haikudepotserver/blob/master/haikudepotserver-api2/src/main/resources/api2/pkg.yaml#L60
# The schema here: https://github.com/haiku/haikudepotserver/blob/master/haikudepotserver-api2/src/main/resources/api2/pkg.yaml#L598
- hpkgDownloadUrl="$(wget -qO- --post-data='{"name":"'"$package"'","repositorySourceCode":"haikuports_'$__HaikuArch'","versionType":"LATEST","naturalLanguageCode":"en"}' \
- --header='Content-Type:application/json' "$DepotBaseUrl" | jq -r '.result.versions[].hpkgDownloadURL')"
- wget -P "$__RootfsDir/tmp/download" "$hpkgDownloadUrl"
+ if [[ "$__hasWget" == 1 ]]; then
+ hpkgDownloadUrl="$(wget -qO- --post-data '{"name":"'"$package"'","repositorySourceCode":"haikuports_'$__HaikuArch'","versionType":"LATEST","naturalLanguageCode":"en"}' \
+ --header 'Content-Type:application/json' "$DepotBaseUrl" | jq -r '.result.versions[].hpkgDownloadURL')"
+ wget -P "$__RootfsDir/tmp/download" "$hpkgDownloadUrl"
+ else
+ hpkgDownloadUrl="$(curl -sSL -XPOST --data '{"name":"'"$package"'","repositorySourceCode":"haikuports_'$__HaikuArch'","versionType":"LATEST","naturalLanguageCode":"en"}' \
+ --header 'Content-Type:application/json' "$DepotBaseUrl" | jq -r '.result.versions[].hpkgDownloadURL')"
+ curl -SLO --create-dirs --output-dir "$__RootfsDir/tmp/download" "$hpkgDownloadUrl"
+ fi
done
for package in haiku haiku_devel; do
echo "Downloading $package..."
- hpkgVersion="$(wget -qO- $HpkgBaseUrl | sed -n 's/^.*version: "\([^"]*\)".*$/\1/p')"
- wget -P "$__RootfsDir/tmp/download" "$HpkgBaseUrl/packages/$package-$hpkgVersion-1-$__HaikuArch.hpkg"
+ if [[ "$__hasWget" == 1 ]]; then
+ hpkgVersion="$(wget -qO- "$HpkgBaseUrl" | sed -n 's/^.*version: "\([^"]*\)".*$/\1/p')"
+ wget -P "$__RootfsDir/tmp/download" "$HpkgBaseUrl/packages/$package-$hpkgVersion-1-$__HaikuArch.hpkg"
+ else
+ hpkgVersion="$(curl -sSL "$HpkgBaseUrl" | sed -n 's/^.*version: "\([^"]*\)".*$/\1/p')"
+ curl -SLO --create-dirs --output-dir "$__RootfsDir/tmp/download" "$HpkgBaseUrl/packages/$package-$hpkgVersion-1-$__HaikuArch.hpkg"
+ fi
done
# Set up the sysroot
@@ -609,7 +732,11 @@ elif [[ "$__CodeName" == "haiku" ]]; then
# Download buildtools
echo "Downloading Haiku buildtools"
- wget -O "$__RootfsDir/tmp/download/buildtools.zip" $($__RootfsDir/tmp/script/fetch.sh --buildtools --arch=$__HaikuArch)
+ if [[ "$__hasWget" == 1 ]]; then
+ wget -O "$__RootfsDir/tmp/download/buildtools.zip" "$("$__RootfsDir/tmp/script/fetch.sh" --buildtools --arch=$__HaikuArch)"
+ else
+ curl -SLo "$__RootfsDir/tmp/download/buildtools.zip" "$("$__RootfsDir/tmp/script/fetch.sh" --buildtools --arch=$__HaikuArch)"
+ fi
unzip -o "$__RootfsDir/tmp/download/buildtools.zip" -d "$__RootfsDir"
# Cleaning up temporary files
@@ -622,10 +749,22 @@ elif [[ -n "$__CodeName" ]]; then
__Keyring="$__Keyring --force-check-gpg"
fi
+ # shellcheck disable=SC2086
+ echo running debootstrap "--variant=minbase" $__Keyring --arch "$__UbuntuArch" "$__CodeName" "$__RootfsDir" "$__UbuntuRepo"
debootstrap "--variant=minbase" $__Keyring --arch "$__UbuntuArch" "$__CodeName" "$__RootfsDir" "$__UbuntuRepo"
- cp "$__CrossDir/$__BuildArch/sources.list.$__CodeName" "$__RootfsDir/etc/apt/sources.list"
+
+ mkdir -p "$__RootfsDir/etc/apt/sources.list.d/"
+ cat > "$__RootfsDir/etc/apt/sources.list.d/$__CodeName.sources" <>Start configuring Tizen rootfs"
ln -sfn asm-${LINK_ARCH} ./usr/include/asm
patch -p1 < $__TIZEN_CROSSDIR/tizen.patch
+if [[ "$TIZEN_ARCH" == "riscv64" ]]; then
+ echo "Fixing broken symlinks in $PWD"
+ rm ./usr/lib64/libresolv.so
+ ln -s ../../lib64/libresolv.so.2 ./usr/lib64/libresolv.so
+ rm ./usr/lib64/libpthread.so
+ ln -s ../../lib64/libpthread.so.0 ./usr/lib64/libpthread.so
+ rm ./usr/lib64/libdl.so
+ ln -s ../../lib64/libdl.so.2 ./usr/lib64/libdl.so
+ rm ./usr/lib64/libutil.so
+ ln -s ../../lib64/libutil.so.1 ./usr/lib64/libutil.so
+ rm ./usr/lib64/libm.so
+ ln -s ../../lib64/libm.so.6 ./usr/lib64/libm.so
+ rm ./usr/lib64/librt.so
+ ln -s ../../lib64/librt.so.1 ./usr/lib64/librt.so
+ rm ./lib/ld-linux-riscv64-lp64d.so.1
+ ln -s ../lib64/ld-linux-riscv64-lp64d.so.1 ./lib/ld-linux-riscv64-lp64d.so.1
+fi
echo "<:--stdlib=${CLR_CMAKE_CXX_STANDARD_LIBRARY}>)
+ add_link_options($<$:--stdlib=${CLR_CMAKE_CXX_STANDARD_LIBRARY}>)
+endif()
+
+option(CLR_CMAKE_CXX_STANDARD_LIBRARY_STATIC "Statically link against the C++ standard library" OFF)
+if(CLR_CMAKE_CXX_STANDARD_LIBRARY_STATIC)
+ add_link_options($<$:-static-libstdc++>)
+endif()
+
+set(CLR_CMAKE_CXX_ABI_LIBRARY "" CACHE STRING "C++ ABI implementation library to link against. Only supported with the Clang compiler.")
+if (CLR_CMAKE_CXX_ABI_LIBRARY)
+ # The user may specify the ABI library with the 'lib' prefix, like 'libstdc++'. Strip the prefix here so the linker finds the right library.
+ string(REGEX REPLACE "^lib(.+)" "\\1" CLR_CMAKE_CXX_ABI_LIBRARY ${CLR_CMAKE_CXX_ABI_LIBRARY})
+ # We need to specify this as a linker-backend option as Clang will filter this option out when linking to libc++.
+ add_link_options("LINKER:-l${CLR_CMAKE_CXX_ABI_LIBRARY}")
+endif()
+
set(CMAKE_FIND_ROOT_PATH_MODE_PROGRAM NEVER)
set(CMAKE_FIND_ROOT_PATH_MODE_LIBRARY ONLY)
set(CMAKE_FIND_ROOT_PATH_MODE_INCLUDE ONLY)
diff --git a/eng/common/darc-init.ps1 b/eng/common/darc-init.ps1
index 8fda30bdce2..e3374310563 100644
--- a/eng/common/darc-init.ps1
+++ b/eng/common/darc-init.ps1
@@ -1,6 +1,6 @@
param (
$darcVersion = $null,
- $versionEndpoint = 'https://maestro.dot.net/api/assets/darc-version?api-version=2019-01-16',
+ $versionEndpoint = 'https://maestro.dot.net/api/assets/darc-version?api-version=2020-02-20',
$verbosity = 'minimal',
$toolpath = $null
)
diff --git a/eng/common/darc-init.sh b/eng/common/darc-init.sh
index c305ae6bd77..36dbd45e1ce 100755
--- a/eng/common/darc-init.sh
+++ b/eng/common/darc-init.sh
@@ -2,7 +2,7 @@
source="${BASH_SOURCE[0]}"
darcVersion=''
-versionEndpoint='https://maestro.dot.net/api/assets/darc-version?api-version=2019-01-16'
+versionEndpoint='https://maestro.dot.net/api/assets/darc-version?api-version=2020-02-20'
verbosity='minimal'
while [[ $# > 0 ]]; do
diff --git a/eng/common/dotnet-install.sh b/eng/common/dotnet-install.sh
index 7e69e3a9e24..7b9d97e3bd4 100755
--- a/eng/common/dotnet-install.sh
+++ b/eng/common/dotnet-install.sh
@@ -71,6 +71,9 @@ case $cpuname in
i[3-6]86)
buildarch=x86
;;
+ riscv64)
+ buildarch=riscv64
+ ;;
*)
echo "Unknown CPU $cpuname detected, treating it as x64"
buildarch=x64
@@ -82,7 +85,7 @@ if [[ $architecture != "" ]] && [[ $architecture != $buildarch ]]; then
dotnetRoot="$dotnetRoot/$architecture"
fi
-InstallDotNet $dotnetRoot $version "$architecture" $runtime true $runtimeSourceFeed $runtimeSourceFeedKey || {
+InstallDotNet "$dotnetRoot" $version "$architecture" $runtime true $runtimeSourceFeed $runtimeSourceFeedKey || {
local exit_code=$?
Write-PipelineTelemetryError -Category 'InitializeToolset' -Message "dotnet-install.sh failed (exit code '$exit_code')." >&2
ExitWithExitCode $exit_code
diff --git a/eng/common/helixpublish.proj b/eng/common/helixpublish.proj
index d7f185856e7..c1323bf4121 100644
--- a/eng/common/helixpublish.proj
+++ b/eng/common/helixpublish.proj
@@ -1,3 +1,4 @@
+
diff --git a/eng/common/internal/Directory.Build.props b/eng/common/internal/Directory.Build.props
index dbf99d82a5c..f1d041c33da 100644
--- a/eng/common/internal/Directory.Build.props
+++ b/eng/common/internal/Directory.Build.props
@@ -1,4 +1,11 @@
+
+
+ false
+ false
+
+
+
diff --git a/eng/common/internal/Tools.csproj b/eng/common/internal/Tools.csproj
index 7f5ce6d6081..32f79dfb340 100644
--- a/eng/common/internal/Tools.csproj
+++ b/eng/common/internal/Tools.csproj
@@ -1,9 +1,10 @@
+
net472
- false
false
+ false
@@ -27,4 +28,5 @@
+
diff --git a/eng/common/native/CommonLibrary.psm1 b/eng/common/native/CommonLibrary.psm1
index ca38268c44d..f71f6af6cdb 100644
--- a/eng/common/native/CommonLibrary.psm1
+++ b/eng/common/native/CommonLibrary.psm1
@@ -277,7 +277,8 @@ function Get-MachineArchitecture {
if (($ProcessorArchitecture -Eq "AMD64") -Or
($ProcessorArchitecture -Eq "IA64") -Or
($ProcessorArchitecture -Eq "ARM64") -Or
- ($ProcessorArchitecture -Eq "LOONGARCH64")) {
+ ($ProcessorArchitecture -Eq "LOONGARCH64") -Or
+ ($ProcessorArchitecture -Eq "RISCV64")) {
return "x64"
}
return "x86"
diff --git a/eng/common/native/init-compiler.sh b/eng/common/native/init-compiler.sh
index 2d5660642b8..9a0e1f2b456 100755
--- a/eng/common/native/init-compiler.sh
+++ b/eng/common/native/init-compiler.sh
@@ -2,7 +2,9 @@
#
# This file detects the C/C++ compiler and exports it to the CC/CXX environment variables
#
-# NOTE: some scripts source this file and rely on stdout being empty, make sure to not output anything here!
+# NOTE: some scripts source this file and rely on stdout being empty, make sure
+# to not output *anything* here, unless it is an error message that fails the
+# build.
if [ -z "$build_arch" ] || [ -z "$compiler" ]; then
echo "Usage..."
@@ -17,11 +19,9 @@ case "$compiler" in
# clangx.y or clang-x.y
version="$(echo "$compiler" | tr -d '[:alpha:]-=')"
majorVersion="${version%%.*}"
- [ -z "${version##*.*}" ] && minorVersion="${version#*.}"
- if [ -z "$minorVersion" ] && [ -n "$majorVersion" ] && [ "$majorVersion" -le 6 ]; then
- minorVersion=0;
- fi
+ # LLVM based on v18 released in early 2024, with two releases per year
+ maxVersion="$((18 + ((($(date +%Y) - 2024) * 12 + $(date +%-m) - 3) / 6)))"
compiler=clang
;;
@@ -29,7 +29,9 @@ case "$compiler" in
# gccx.y or gcc-x.y
version="$(echo "$compiler" | tr -d '[:alpha:]-=')"
majorVersion="${version%%.*}"
- [ -z "${version##*.*}" ] && minorVersion="${version#*.}"
+
+ # GCC based on v14 released in early 2024, with one release per year
+ maxVersion="$((14 + ((($(date +%Y) - 2024) * 12 + $(date +%-m) - 3) / 12)))"
compiler=gcc
;;
esac
@@ -47,91 +49,98 @@ check_version_exists() {
desired_version=-1
# Set up the environment to be used for building with the desired compiler.
- if command -v "$compiler-$1.$2" > /dev/null; then
- desired_version="-$1.$2"
- elif command -v "$compiler$1$2" > /dev/null; then
- desired_version="$1$2"
- elif command -v "$compiler-$1$2" > /dev/null; then
- desired_version="-$1$2"
+ if command -v "$compiler-$1" > /dev/null; then
+ desired_version="-$1"
+ elif command -v "$compiler$1" > /dev/null; then
+ desired_version="$1"
fi
echo "$desired_version"
}
+__baseOS="$(uname)"
+set_compiler_version_from_CC() {
+ if [ "$__baseOS" = "Darwin" ]; then
+ # On Darwin, the versions from -version/-dumpversion refer to Xcode
+ # versions, not llvm versions, so we can't rely on them.
+ return
+ fi
+
+ version="$("$CC" -dumpversion)"
+ if [ -z "$version" ]; then
+ echo "Error: $CC -dumpversion didn't provide a version"
+ exit 1
+ fi
+
+ # gcc and clang often display 3 part versions. However, gcc can show only 1 part in some environments.
+ IFS=. read -r majorVersion _ < /dev/null; then
- if [ "$(uname)" != "Darwin" ]; then
- echo "Warning: Specific version of $compiler not found, falling back to use the one in PATH."
- fi
- CC="$(command -v "$compiler")"
- CXX="$(command -v "$cxxCompiler")"
- else
- echo "No usable version of $compiler found."
+ if ! command -v "$compiler" > /dev/null; then
+ echo "Error: No compatible version of $compiler was found within the range of $minVersion to $maxVersion. Please upgrade your toolchain or specify the compiler explicitly using CLR_CC and CLR_CXX environment variables."
exit 1
fi
- else
- if [ "$compiler" = "clang" ] && [ "$majorVersion" -lt 5 ]; then
- if [ "$build_arch" = "arm" ] || [ "$build_arch" = "armel" ]; then
- if command -v "$compiler" > /dev/null; then
- echo "Warning: Found clang version $majorVersion which is not supported on arm/armel architectures, falling back to use clang from PATH."
- CC="$(command -v "$compiler")"
- CXX="$(command -v "$cxxCompiler")"
- else
- echo "Found clang version $majorVersion which is not supported on arm/armel architectures, and there is no clang in PATH."
- exit 1
- fi
- fi
- fi
+
+ CC="$(command -v "$compiler" 2> /dev/null)"
+ CXX="$(command -v "$cxxCompiler" 2> /dev/null)"
+ set_compiler_version_from_CC
fi
else
- desired_version="$(check_version_exists "$majorVersion" "$minorVersion")"
+ desired_version="$(check_version_exists "$majorVersion")"
if [ "$desired_version" = "-1" ]; then
- echo "Could not find specific version of $compiler: $majorVersion $minorVersion."
+ echo "Error: Could not find specific version of $compiler: $majorVersion."
exit 1
fi
fi
if [ -z "$CC" ]; then
- CC="$(command -v "$compiler$desired_version")"
- CXX="$(command -v "$cxxCompiler$desired_version")"
- if [ -z "$CXX" ]; then CXX="$(command -v "$cxxCompiler")"; fi
+ CC="$(command -v "$compiler$desired_version" 2> /dev/null)"
+ CXX="$(command -v "$cxxCompiler$desired_version" 2> /dev/null)"
+ if [ -z "$CXX" ]; then CXX="$(command -v "$cxxCompiler" 2> /dev/null)"; fi
+ set_compiler_version_from_CC
fi
else
if [ ! -f "$CLR_CC" ]; then
- echo "CLR_CC is set but path '$CLR_CC' does not exist"
+ echo "Error: CLR_CC is set but path '$CLR_CC' does not exist"
exit 1
fi
CC="$CLR_CC"
CXX="$CLR_CXX"
+ set_compiler_version_from_CC
fi
if [ -z "$CC" ]; then
- echo "Unable to find $compiler."
+ echo "Error: Unable to find $compiler."
exit 1
fi
-# Only lld version >= 9 can be considered stable. lld doesn't support s390x.
-if [ "$compiler" = "clang" ] && [ -n "$majorVersion" ] && [ "$majorVersion" -ge 9 ] && [ "$build_arch" != "s390x" ]; then
- if "$CC" -fuse-ld=lld -Wl,--version >/dev/null 2>&1; then
- LDFLAGS="-fuse-ld=lld"
+if [ "$__baseOS" != "Darwin" ]; then
+ # On Darwin, we always want to use the Apple linker.
+
+ # Only lld version >= 9 can be considered stable. lld supports s390x starting from 18.0.
+ if [ "$compiler" = "clang" ] && [ -n "$majorVersion" ] && [ "$majorVersion" -ge 9 ] && { [ "$build_arch" != "s390x" ] || [ "$majorVersion" -ge 18 ]; }; then
+ if "$CC" -fuse-ld=lld -Wl,--version >/dev/null 2>&1; then
+ LDFLAGS="-fuse-ld=lld"
+ fi
fi
fi
-SCAN_BUILD_COMMAND="$(command -v "scan-build$desired_version")"
+SCAN_BUILD_COMMAND="$(command -v "scan-build$desired_version" 2> /dev/null)"
export CC CXX LDFLAGS SCAN_BUILD_COMMAND
diff --git a/eng/common/native/init-distro-rid.sh b/eng/common/native/init-distro-rid.sh
index de1687b2ccb..83ea7aab0e0 100644
--- a/eng/common/native/init-distro-rid.sh
+++ b/eng/common/native/init-distro-rid.sh
@@ -1,4 +1,4 @@
-#!/usr/bin/env bash
+#!/bin/sh
# getNonPortableDistroRid
#
@@ -11,21 +11,16 @@
# non-portable rid
getNonPortableDistroRid()
{
- local targetOs="$1"
- local targetArch="$2"
- local rootfsDir="$3"
- local nonPortableRid=""
+ targetOs="$1"
+ targetArch="$2"
+ rootfsDir="$3"
+ nonPortableRid=""
if [ "$targetOs" = "linux" ]; then
+ # shellcheck disable=SC1091
if [ -e "${rootfsDir}/etc/os-release" ]; then
- source "${rootfsDir}/etc/os-release"
-
- if [[ "${ID}" == "rhel" || "${ID}" == "rocky" || "${ID}" == "alpine" ]]; then
- # remove the last version digit
- VERSION_ID="${VERSION_ID%.*}"
- fi
-
- if [[ "${VERSION_ID:-}" =~ ^([[:digit:]]|\.)+$ ]]; then
+ . "${rootfsDir}/etc/os-release"
+ if echo "${VERSION_ID:-}" | grep -qE '^([[:digit:]]|\.)+$'; then
nonPortableRid="${ID}.${VERSION_ID}-${targetArch}"
else
# Rolling release distros either do not set VERSION_ID, set it as blank or
@@ -33,45 +28,33 @@ getNonPortableDistroRid()
# so omit it here to be consistent with everything else.
nonPortableRid="${ID}-${targetArch}"
fi
-
elif [ -e "${rootfsDir}/android_platform" ]; then
- source "$rootfsDir"/android_platform
+ # shellcheck disable=SC1091
+ . "${rootfsDir}/android_platform"
nonPortableRid="$RID"
fi
fi
if [ "$targetOs" = "freebsd" ]; then
- # $rootfsDir can be empty. freebsd-version is shell script and it should always work.
- __freebsd_major_version=$($rootfsDir/bin/freebsd-version | { read v; echo "${v%%.*}"; })
+ # $rootfsDir can be empty. freebsd-version is a shell script and should always work.
+ __freebsd_major_version=$("$rootfsDir"/bin/freebsd-version | cut -d'.' -f1)
nonPortableRid="freebsd.$__freebsd_major_version-${targetArch}"
- elif command -v getprop && getprop ro.product.system.model 2>&1 | grep -qi android; then
+ elif command -v getprop >/dev/null && getprop ro.product.system.model | grep -qi android; then
__android_sdk_version=$(getprop ro.build.version.sdk)
nonPortableRid="android.$__android_sdk_version-${targetArch}"
elif [ "$targetOs" = "illumos" ]; then
__uname_version=$(uname -v)
- case "$__uname_version" in
- omnios-*)
- __omnios_major_version=$(echo "${__uname_version:8:2}")
- nonPortableRid=omnios."$__omnios_major_version"-"$targetArch"
- ;;
- joyent_*)
- __smartos_major_version=$(echo "${__uname_version:7:4}")
- nonPortableRid=smartos."$__smartos_major_version"-"$targetArch"
- ;;
- illumos_*)
- nonPortableRid=openindiana-"$targetArch"
- ;;
- esac
+ nonPortableRid="illumos-${targetArch}"
elif [ "$targetOs" = "solaris" ]; then
__uname_version=$(uname -v)
- __solaris_major_version=$(echo "${__uname_version%.*}")
- nonPortableRid=solaris."$__solaris_major_version"-"$targetArch"
+ __solaris_major_version=$(echo "$__uname_version" | cut -d'.' -f1)
+ nonPortableRid="solaris.$__solaris_major_version-${targetArch}"
elif [ "$targetOs" = "haiku" ]; then
- __uname_release=$(uname -r)
+ __uname_release="$(uname -r)"
nonPortableRid=haiku.r"$__uname_release"-"$targetArch"
fi
- echo "$(echo $nonPortableRid | tr '[:upper:]' '[:lower:]')"
+ echo "$nonPortableRid" | tr '[:upper:]' '[:lower:]'
}
# initDistroRidGlobal
@@ -85,26 +68,23 @@ getNonPortableDistroRid()
# None
#
# Notes:
-#
-# It is important to note that the function does not return anything, but it
-# exports the following variables on success:
-#
-# __DistroRid : Non-portable rid of the target platform.
-# __PortableTargetOS : OS-part of the portable rid that corresponds to the target platform.
-#
+# It is important to note that the function does not return anything, but it
+# exports the following variables on success:
+# __DistroRid : Non-portable rid of the target platform.
+# __PortableTargetOS : OS-part of the portable rid that corresponds to the target platform.
initDistroRidGlobal()
{
- local targetOs="$1"
- local targetArch="$2"
- local rootfsDir=""
- if [ "$#" -ge 3 ]; then
+ targetOs="$1"
+ targetArch="$2"
+ rootfsDir=""
+ if [ $# -ge 3 ]; then
rootfsDir="$3"
fi
if [ -n "${rootfsDir}" ]; then
# We may have a cross build. Check for the existence of the rootfsDir
if [ ! -e "${rootfsDir}" ]; then
- echo "Error rootfsDir has been passed, but the location is not valid."
+ echo "Error: rootfsDir has been passed, but the location is not valid."
exit 1
fi
fi
@@ -119,7 +99,7 @@ initDistroRidGlobal()
STRINGS="$(command -v llvm-strings || true)"
fi
- # Check for musl-based distros (e.g Alpine Linux, Void Linux).
+ # Check for musl-based distros (e.g. Alpine Linux, Void Linux).
if "${rootfsDir}/usr/bin/ldd" --version 2>&1 | grep -q musl ||
( [ -n "$STRINGS" ] && "$STRINGS" "${rootfsDir}/usr/bin/ldd" 2>&1 | grep -q musl ); then
__PortableTargetOS="linux-musl"
diff --git a/eng/common/native/init-os-and-arch.sh b/eng/common/native/init-os-and-arch.sh
index e693617a6c2..38921d4338f 100644
--- a/eng/common/native/init-os-and-arch.sh
+++ b/eng/common/native/init-os-and-arch.sh
@@ -1,4 +1,4 @@
-#!/usr/bin/env bash
+#!/bin/sh
# Use uname to determine what the OS is.
OSName=$(uname -s | tr '[:upper:]' '[:lower:]')
@@ -35,6 +35,10 @@ fi
case "$CPUName" in
arm64|aarch64)
arch=arm64
+ if [ "$(getconf LONG_BIT)" -lt 64 ]; then
+ # This is 32-bit OS running on 64-bit CPU (for example Raspberry Pi OS)
+ arch=arm
+ fi
;;
loongarch64)
@@ -50,6 +54,7 @@ case "$CPUName" in
;;
armv7l|armv8l)
+ # shellcheck disable=SC1091
if (NAME=""; . /etc/os-release; test "$NAME" = "Tizen"); then
arch=armel
else
diff --git a/eng/common/post-build/add-build-to-channel.ps1 b/eng/common/post-build/add-build-to-channel.ps1
deleted file mode 100644
index 49938f0c89f..00000000000
--- a/eng/common/post-build/add-build-to-channel.ps1
+++ /dev/null
@@ -1,48 +0,0 @@
-param(
- [Parameter(Mandatory=$true)][int] $BuildId,
- [Parameter(Mandatory=$true)][int] $ChannelId,
- [Parameter(Mandatory=$true)][string] $MaestroApiAccessToken,
- [Parameter(Mandatory=$false)][string] $MaestroApiEndPoint = 'https://maestro.dot.net',
- [Parameter(Mandatory=$false)][string] $MaestroApiVersion = '2019-01-16'
-)
-
-try {
- . $PSScriptRoot\post-build-utils.ps1
-
- # Check that the channel we are going to promote the build to exist
- $channelInfo = Get-MaestroChannel -ChannelId $ChannelId
-
- if (!$channelInfo) {
- Write-PipelineTelemetryCategory -Category 'PromoteBuild' -Message "Channel with BAR ID $ChannelId was not found in BAR!"
- ExitWithExitCode 1
- }
-
- # Get info about which channel(s) the build has already been promoted to
- $buildInfo = Get-MaestroBuild -BuildId $BuildId
-
- if (!$buildInfo) {
- Write-PipelineTelemetryError -Category 'PromoteBuild' -Message "Build with BAR ID $BuildId was not found in BAR!"
- ExitWithExitCode 1
- }
-
- # Find whether the build is already assigned to the channel or not
- if ($buildInfo.channels) {
- foreach ($channel in $buildInfo.channels) {
- if ($channel.Id -eq $ChannelId) {
- Write-Host "The build with BAR ID $BuildId is already on channel $ChannelId!"
- ExitWithExitCode 0
- }
- }
- }
-
- Write-Host "Promoting build '$BuildId' to channel '$ChannelId'."
-
- Assign-BuildToChannel -BuildId $BuildId -ChannelId $ChannelId
-
- Write-Host 'done.'
-}
-catch {
- Write-Host $_
- Write-PipelineTelemetryError -Category 'PromoteBuild' -Message "There was an error while trying to promote build '$BuildId' to channel '$ChannelId'"
- ExitWithExitCode 1
-}
diff --git a/eng/common/post-build/check-channel-consistency.ps1 b/eng/common/post-build/check-channel-consistency.ps1
index 63f3464c986..61208d2d135 100644
--- a/eng/common/post-build/check-channel-consistency.ps1
+++ b/eng/common/post-build/check-channel-consistency.ps1
@@ -4,10 +4,18 @@ param(
)
try {
- . $PSScriptRoot\post-build-utils.ps1
+ $ErrorActionPreference = 'Stop'
+ Set-StrictMode -Version 2.0
+
+ # `tools.ps1` checks $ci to perform some actions. Since the post-build
+ # scripts don't necessarily execute in the same agent that run the
+ # build.ps1/sh script this variable isn't automatically set.
+ $ci = $true
+ $disableConfigureToolsetImport = $true
+ . $PSScriptRoot\..\tools.ps1
if ($PromoteToChannels -eq "") {
- Write-PipelineTaskError -Type 'warning' -Message "This build won't publish assets as it's not configured to any Maestro channel. If that wasn't intended use Darc to configure a default channel using add-default-channel for this branch or to promote it to a channel using add-build-to-channel. See https://github.com/dotnet/arcade/blob/master/Documentation/Darc.md#assigning-an-individual-build-to-a-channel for more info."
+ Write-PipelineTaskError -Type 'warning' -Message "This build won't publish assets as it's not configured to any Maestro channel. If that wasn't intended use Darc to configure a default channel using add-default-channel for this branch or to promote it to a channel using add-build-to-channel. See https://github.com/dotnet/arcade/blob/main/Documentation/Darc.md#assigning-an-individual-build-to-a-channel for more info."
ExitWithExitCode 0
}
diff --git a/eng/common/post-build/nuget-validation.ps1 b/eng/common/post-build/nuget-validation.ps1
index dab3534ab53..e5de00c8983 100644
--- a/eng/common/post-build/nuget-validation.ps1
+++ b/eng/common/post-build/nuget-validation.ps1
@@ -2,20 +2,18 @@
# tool: https://github.com/NuGet/NuGetGallery/tree/jver-verify/src/VerifyMicrosoftPackage
param(
- [Parameter(Mandatory=$true)][string] $PackagesPath, # Path to where the packages to be validated are
- [Parameter(Mandatory=$true)][string] $ToolDestinationPath # Where the validation tool should be downloaded to
+ [Parameter(Mandatory=$true)][string] $PackagesPath # Path to where the packages to be validated are
)
-try {
- . $PSScriptRoot\post-build-utils.ps1
-
- $url = 'https://raw.githubusercontent.com/NuGet/NuGetGallery/3e25ad135146676bcab0050a516939d9958bfa5d/src/VerifyMicrosoftPackage/verify.ps1'
-
- New-Item -ItemType 'directory' -Path ${ToolDestinationPath} -Force
+# `tools.ps1` checks $ci to perform some actions. Since the post-build
+# scripts don't necessarily execute in the same agent that run the
+# build.ps1/sh script this variable isn't automatically set.
+$ci = $true
+$disableConfigureToolsetImport = $true
+. $PSScriptRoot\..\tools.ps1
- Invoke-WebRequest $url -OutFile ${ToolDestinationPath}\verify.ps1
-
- & ${ToolDestinationPath}\verify.ps1 ${PackagesPath}\*.nupkg
+try {
+ & $PSScriptRoot\nuget-verification.ps1 ${PackagesPath}\*.nupkg
}
catch {
Write-Host $_.ScriptStackTrace
diff --git a/eng/common/post-build/nuget-verification.ps1 b/eng/common/post-build/nuget-verification.ps1
new file mode 100644
index 00000000000..a365194a938
--- /dev/null
+++ b/eng/common/post-build/nuget-verification.ps1
@@ -0,0 +1,121 @@
+<#
+.SYNOPSIS
+ Verifies that Microsoft NuGet packages have proper metadata.
+.DESCRIPTION
+ Downloads a verification tool and runs metadata validation on the provided NuGet packages. This script writes an
+ error if any of the provided packages fail validation. All arguments provided to this PowerShell script that do not
+ match PowerShell parameters are passed on to the verification tool downloaded during the execution of this script.
+.PARAMETER NuGetExePath
+ The path to the nuget.exe binary to use. If not provided, nuget.exe will be downloaded into the -DownloadPath
+ directory.
+.PARAMETER PackageSource
+ The package source to use to download the verification tool. If not provided, nuget.org will be used.
+.PARAMETER DownloadPath
+ The directory path to download the verification tool and nuget.exe to. If not provided,
+ %TEMP%\NuGet.VerifyNuGetPackage will be used.
+.PARAMETER args
+ Arguments that will be passed to the verification tool.
+.EXAMPLE
+ PS> .\verify.ps1 *.nupkg
+ Verifies the metadata of all .nupkg files in the currect working directory.
+.EXAMPLE
+ PS> .\verify.ps1 --help
+ Displays the help text of the downloaded verifiction tool.
+.LINK
+ https://github.com/NuGet/NuGetGallery/blob/master/src/VerifyMicrosoftPackage/README.md
+#>
+
+# This script was copied from https://github.com/NuGet/NuGetGallery/blob/3e25ad135146676bcab0050a516939d9958bfa5d/src/VerifyMicrosoftPackage/verify.ps1
+
+[CmdletBinding(PositionalBinding = $false)]
+param(
+ [string]$NuGetExePath,
+ [string]$PackageSource = "https://api.nuget.org/v3/index.json",
+ [string]$DownloadPath,
+ [Parameter(ValueFromRemainingArguments = $true)]
+ [string[]]$args
+)
+
+# The URL to download nuget.exe.
+$nugetExeUrl = "https://dist.nuget.org/win-x86-commandline/v4.9.4/nuget.exe"
+
+# The package ID of the verification tool.
+$packageId = "NuGet.VerifyMicrosoftPackage"
+
+# The location that nuget.exe and the verification tool will be downloaded to.
+if (!$DownloadPath) {
+ $DownloadPath = (Join-Path $env:TEMP "NuGet.VerifyMicrosoftPackage")
+}
+
+$fence = New-Object -TypeName string -ArgumentList '=', 80
+
+# Create the download directory, if it doesn't already exist.
+if (!(Test-Path $DownloadPath)) {
+ New-Item -ItemType Directory $DownloadPath | Out-Null
+}
+Write-Host "Using download path: $DownloadPath"
+
+if ($NuGetExePath) {
+ $nuget = $NuGetExePath
+} else {
+ $downloadedNuGetExe = Join-Path $DownloadPath "nuget.exe"
+
+ # Download nuget.exe, if it doesn't already exist.
+ if (!(Test-Path $downloadedNuGetExe)) {
+ Write-Host "Downloading nuget.exe from $nugetExeUrl..."
+ $ProgressPreference = 'SilentlyContinue'
+ try {
+ Invoke-WebRequest $nugetExeUrl -OutFile $downloadedNuGetExe
+ $ProgressPreference = 'Continue'
+ } catch {
+ $ProgressPreference = 'Continue'
+ Write-Error $_
+ Write-Error "nuget.exe failed to download."
+ exit
+ }
+ }
+
+ $nuget = $downloadedNuGetExe
+}
+
+Write-Host "Using nuget.exe path: $nuget"
+Write-Host " "
+
+# Download the latest version of the verification tool.
+Write-Host "Downloading the latest version of $packageId from $packageSource..."
+Write-Host $fence
+& $nuget install $packageId `
+ -Prerelease `
+ -OutputDirectory $DownloadPath `
+ -Source $PackageSource
+Write-Host $fence
+Write-Host " "
+
+if ($LASTEXITCODE -ne 0) {
+ Write-Error "nuget.exe failed to fetch the verify tool."
+ exit
+}
+
+# Find the most recently downloaded tool
+Write-Host "Finding the most recently downloaded verification tool."
+$verifyProbePath = Join-Path $DownloadPath "$packageId.*"
+$verifyPath = Get-ChildItem -Path $verifyProbePath -Directory `
+ | Sort-Object -Property LastWriteTime -Descending `
+ | Select-Object -First 1
+$verify = Join-Path $verifyPath "tools\NuGet.VerifyMicrosoftPackage.exe"
+Write-Host "Using verification tool: $verify"
+Write-Host " "
+
+# Execute the verification tool.
+Write-Host "Executing the verify tool..."
+Write-Host $fence
+& $verify $args
+Write-Host $fence
+Write-Host " "
+
+# Respond to the exit code.
+if ($LASTEXITCODE -ne 0) {
+ Write-Error "The verify tool found some problems."
+} else {
+ Write-Output "The verify tool succeeded."
+}
diff --git a/eng/common/post-build/post-build-utils.ps1 b/eng/common/post-build/post-build-utils.ps1
deleted file mode 100644
index 534f6988d5b..00000000000
--- a/eng/common/post-build/post-build-utils.ps1
+++ /dev/null
@@ -1,91 +0,0 @@
-# Most of the functions in this file require the variables `MaestroApiEndPoint`,
-# `MaestroApiVersion` and `MaestroApiAccessToken` to be globally available.
-
-$ErrorActionPreference = 'Stop'
-Set-StrictMode -Version 2.0
-
-# `tools.ps1` checks $ci to perform some actions. Since the post-build
-# scripts don't necessarily execute in the same agent that run the
-# build.ps1/sh script this variable isn't automatically set.
-$ci = $true
-$disableConfigureToolsetImport = $true
-. $PSScriptRoot\..\tools.ps1
-
-function Create-MaestroApiRequestHeaders([string]$ContentType = 'application/json') {
- Validate-MaestroVars
-
- $headers = New-Object 'System.Collections.Generic.Dictionary[[String],[String]]'
- $headers.Add('Accept', $ContentType)
- $headers.Add('Authorization',"Bearer $MaestroApiAccessToken")
- return $headers
-}
-
-function Get-MaestroChannel([int]$ChannelId) {
- Validate-MaestroVars
-
- $apiHeaders = Create-MaestroApiRequestHeaders
- $apiEndpoint = "$MaestroApiEndPoint/api/channels/${ChannelId}?api-version=$MaestroApiVersion"
-
- $result = try { Invoke-WebRequest -Method Get -Uri $apiEndpoint -Headers $apiHeaders | ConvertFrom-Json } catch { Write-Host "Error: $_" }
- return $result
-}
-
-function Get-MaestroBuild([int]$BuildId) {
- Validate-MaestroVars
-
- $apiHeaders = Create-MaestroApiRequestHeaders -AuthToken $MaestroApiAccessToken
- $apiEndpoint = "$MaestroApiEndPoint/api/builds/${BuildId}?api-version=$MaestroApiVersion"
-
- $result = try { return Invoke-WebRequest -Method Get -Uri $apiEndpoint -Headers $apiHeaders | ConvertFrom-Json } catch { Write-Host "Error: $_" }
- return $result
-}
-
-function Get-MaestroSubscriptions([string]$SourceRepository, [int]$ChannelId) {
- Validate-MaestroVars
-
- $SourceRepository = [System.Web.HttpUtility]::UrlEncode($SourceRepository)
- $apiHeaders = Create-MaestroApiRequestHeaders -AuthToken $MaestroApiAccessToken
- $apiEndpoint = "$MaestroApiEndPoint/api/subscriptions?sourceRepository=$SourceRepository&channelId=$ChannelId&api-version=$MaestroApiVersion"
-
- $result = try { Invoke-WebRequest -Method Get -Uri $apiEndpoint -Headers $apiHeaders | ConvertFrom-Json } catch { Write-Host "Error: $_" }
- return $result
-}
-
-function Assign-BuildToChannel([int]$BuildId, [int]$ChannelId) {
- Validate-MaestroVars
-
- $apiHeaders = Create-MaestroApiRequestHeaders -AuthToken $MaestroApiAccessToken
- $apiEndpoint = "$MaestroApiEndPoint/api/channels/${ChannelId}/builds/${BuildId}?api-version=$MaestroApiVersion"
- Invoke-WebRequest -Method Post -Uri $apiEndpoint -Headers $apiHeaders | Out-Null
-}
-
-function Trigger-Subscription([string]$SubscriptionId) {
- Validate-MaestroVars
-
- $apiHeaders = Create-MaestroApiRequestHeaders -AuthToken $MaestroApiAccessToken
- $apiEndpoint = "$MaestroApiEndPoint/api/subscriptions/$SubscriptionId/trigger?api-version=$MaestroApiVersion"
- Invoke-WebRequest -Uri $apiEndpoint -Headers $apiHeaders -Method Post | Out-Null
-}
-
-function Validate-MaestroVars {
- try {
- Get-Variable MaestroApiEndPoint | Out-Null
- Get-Variable MaestroApiVersion | Out-Null
- Get-Variable MaestroApiAccessToken | Out-Null
-
- if (!($MaestroApiEndPoint -Match '^http[s]?://maestro-(int|prod).westus2.cloudapp.azure.com$')) {
- Write-PipelineTelemetryError -Category 'MaestroVars' -Message "MaestroApiEndPoint is not a valid Maestro URL. '$MaestroApiEndPoint'"
- ExitWithExitCode 1
- }
-
- if (!($MaestroApiVersion -Match '^[0-9]{4}-[0-9]{2}-[0-9]{2}$')) {
- Write-PipelineTelemetryError -Category 'MaestroVars' -Message "MaestroApiVersion does not match a version string in the format yyyy-MM-DD. '$MaestroApiVersion'"
- ExitWithExitCode 1
- }
- }
- catch {
- Write-PipelineTelemetryError -Category 'MaestroVars' -Message 'Error: Variables `MaestroApiEndPoint`, `MaestroApiVersion` and `MaestroApiAccessToken` are required while using this script.'
- Write-Host $_
- ExitWithExitCode 1
- }
-}
diff --git a/eng/common/post-build/publish-using-darc.ps1 b/eng/common/post-build/publish-using-darc.ps1
index 238945cb5ab..90b58e32a87 100644
--- a/eng/common/post-build/publish-using-darc.ps1
+++ b/eng/common/post-build/publish-using-darc.ps1
@@ -9,7 +9,12 @@ param(
)
try {
- . $PSScriptRoot\post-build-utils.ps1
+ # `tools.ps1` checks $ci to perform some actions. Since the post-build
+ # scripts don't necessarily execute in the same agent that run the
+ # build.ps1/sh script this variable isn't automatically set.
+ $ci = $true
+ $disableConfigureToolsetImport = $true
+ . $PSScriptRoot\..\tools.ps1
$darc = Get-Darc
@@ -37,6 +42,7 @@ try {
--azdev-pat "$AzdoToken" `
--bar-uri "$MaestroApiEndPoint" `
--ci `
+ --verbose `
@optionalParams
if ($LastExitCode -ne 0) {
diff --git a/eng/common/post-build/redact-logs.ps1 b/eng/common/post-build/redact-logs.ps1
new file mode 100644
index 00000000000..b7fc1959150
--- /dev/null
+++ b/eng/common/post-build/redact-logs.ps1
@@ -0,0 +1,89 @@
+[CmdletBinding(PositionalBinding=$False)]
+param(
+ [Parameter(Mandatory=$true, Position=0)][string] $InputPath,
+ [Parameter(Mandatory=$true)][string] $BinlogToolVersion,
+ [Parameter(Mandatory=$false)][string] $DotnetPath,
+ [Parameter(Mandatory=$false)][string] $PackageFeed = 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-public/nuget/v3/index.json',
+ # File with strings to redact - separated by newlines.
+ # For comments start the line with '# ' - such lines are ignored
+ [Parameter(Mandatory=$false)][string] $TokensFilePath,
+ [Parameter(ValueFromRemainingArguments=$true)][String[]]$TokensToRedact
+)
+
+try {
+ $ErrorActionPreference = 'Stop'
+ Set-StrictMode -Version 2.0
+
+ # `tools.ps1` checks $ci to perform some actions. Since the post-build
+ # scripts don't necessarily execute in the same agent that run the
+ # build.ps1/sh script this variable isn't automatically set.
+ $ci = $true
+ $disableConfigureToolsetImport = $true
+ . $PSScriptRoot\..\tools.ps1
+
+ $packageName = 'binlogtool'
+
+ $dotnet = $DotnetPath
+
+ if (!$dotnet) {
+ $dotnetRoot = InitializeDotNetCli -install:$true
+ $dotnet = "$dotnetRoot\dotnet.exe"
+ }
+
+ $toolList = & "$dotnet" tool list -g
+
+ if ($toolList -like "*$packageName*") {
+ & "$dotnet" tool uninstall $packageName -g
+ }
+
+ $toolPath = "$PSScriptRoot\..\..\..\.tools"
+ $verbosity = 'minimal'
+
+ New-Item -ItemType Directory -Force -Path $toolPath
+
+ Push-Location -Path $toolPath
+
+ try {
+ Write-Host "Installing Binlog redactor CLI..."
+ Write-Host "'$dotnet' new tool-manifest"
+ & "$dotnet" new tool-manifest
+ Write-Host "'$dotnet' tool install $packageName --local --add-source '$PackageFeed' -v $verbosity --version $BinlogToolVersion"
+ & "$dotnet" tool install $packageName --local --add-source "$PackageFeed" -v $verbosity --version $BinlogToolVersion
+
+ if (Test-Path $TokensFilePath) {
+ Write-Host "Adding additional sensitive data for redaction from file: " $TokensFilePath
+ $TokensToRedact += Get-Content -Path $TokensFilePath | Foreach {$_.Trim()} | Where { $_ -notmatch "^# " }
+ }
+
+ $optionalParams = [System.Collections.ArrayList]::new()
+
+ Foreach ($p in $TokensToRedact)
+ {
+ if($p -match '^\$\(.*\)$')
+ {
+ Write-Host ("Ignoring token {0} as it is probably unexpanded AzDO variable" -f $p)
+ }
+ elseif($p)
+ {
+ $optionalParams.Add("-p:" + $p) | Out-Null
+ }
+ }
+
+ & $dotnet binlogtool redact --input:$InputPath --recurse --in-place `
+ @optionalParams
+
+ if ($LastExitCode -ne 0) {
+ Write-PipelineTelemetryError -Category 'Redactor' -Type 'warning' -Message "Problems using Redactor tool (exit code: $LastExitCode). But ignoring them now."
+ }
+ }
+ finally {
+ Pop-Location
+ }
+
+ Write-Host 'done.'
+}
+catch {
+ Write-Host $_
+ Write-PipelineTelemetryError -Category 'Redactor' -Message "There was an error while trying to redact logs. Error: $_"
+ ExitWithExitCode 1
+}
diff --git a/eng/common/post-build/sourcelink-validation.ps1 b/eng/common/post-build/sourcelink-validation.ps1
index 4011d324e73..1976ef70fb8 100644
--- a/eng/common/post-build/sourcelink-validation.ps1
+++ b/eng/common/post-build/sourcelink-validation.ps1
@@ -6,7 +6,15 @@ param(
[Parameter(Mandatory=$true)][string] $SourcelinkCliVersion # Version of SourceLink CLI to use
)
-. $PSScriptRoot\post-build-utils.ps1
+$ErrorActionPreference = 'Stop'
+Set-StrictMode -Version 2.0
+
+# `tools.ps1` checks $ci to perform some actions. Since the post-build
+# scripts don't necessarily execute in the same agent that run the
+# build.ps1/sh script this variable isn't automatically set.
+$ci = $true
+$disableConfigureToolsetImport = $true
+. $PSScriptRoot\..\tools.ps1
# Cache/HashMap (File -> Exist flag) used to consult whether a file exist
# in the repository at a specific commit point. This is populated by inserting
diff --git a/eng/common/post-build/symbols-validation.ps1 b/eng/common/post-build/symbols-validation.ps1
index cd2181bafa0..7146e593ffa 100644
--- a/eng/common/post-build/symbols-validation.ps1
+++ b/eng/common/post-build/symbols-validation.ps1
@@ -322,8 +322,6 @@ function InstallDotnetSymbol {
}
try {
- . $PSScriptRoot\post-build-utils.ps1
-
InstallDotnetSymbol
foreach ($Job in @(Get-Job)) {
diff --git a/eng/common/post-build/trigger-subscriptions.ps1 b/eng/common/post-build/trigger-subscriptions.ps1
deleted file mode 100644
index ac9a95778fc..00000000000
--- a/eng/common/post-build/trigger-subscriptions.ps1
+++ /dev/null
@@ -1,64 +0,0 @@
-param(
- [Parameter(Mandatory=$true)][string] $SourceRepo,
- [Parameter(Mandatory=$true)][int] $ChannelId,
- [Parameter(Mandatory=$true)][string] $MaestroApiAccessToken,
- [Parameter(Mandatory=$false)][string] $MaestroApiEndPoint = 'https://maestro.dot.net',
- [Parameter(Mandatory=$false)][string] $MaestroApiVersion = '2019-01-16'
-)
-
-try {
- . $PSScriptRoot\post-build-utils.ps1
-
- # Get all the $SourceRepo subscriptions
- $normalizedSourceRepo = $SourceRepo.Replace('dnceng@', '')
- $subscriptions = Get-MaestroSubscriptions -SourceRepository $normalizedSourceRepo -ChannelId $ChannelId
-
- if (!$subscriptions) {
- Write-PipelineTelemetryError -Category 'TriggerSubscriptions' -Message "No subscriptions found for source repo '$normalizedSourceRepo' in channel '$ChannelId'"
- ExitWithExitCode 0
- }
-
- $subscriptionsToTrigger = New-Object System.Collections.Generic.List[string]
- $failedTriggeredSubscription = $false
-
- # Get all enabled subscriptions that need dependency flow on 'everyBuild'
- foreach ($subscription in $subscriptions) {
- if ($subscription.enabled -and $subscription.policy.updateFrequency -like 'everyBuild' -and $subscription.channel.id -eq $ChannelId) {
- Write-Host "Should trigger this subscription: ${$subscription.id}"
- [void]$subscriptionsToTrigger.Add($subscription.id)
- }
- }
-
- foreach ($subscriptionToTrigger in $subscriptionsToTrigger) {
- try {
- Write-Host "Triggering subscription '$subscriptionToTrigger'."
-
- Trigger-Subscription -SubscriptionId $subscriptionToTrigger
-
- Write-Host 'done.'
- }
- catch
- {
- Write-Host "There was an error while triggering subscription '$subscriptionToTrigger'"
- Write-Host $_
- Write-Host $_.ScriptStackTrace
- $failedTriggeredSubscription = $true
- }
- }
-
- if ($subscriptionsToTrigger.Count -eq 0) {
- Write-Host "No subscription matched source repo '$normalizedSourceRepo' and channel ID '$ChannelId'."
- }
- elseif ($failedTriggeredSubscription) {
- Write-PipelineTelemetryError -Category 'TriggerSubscriptions' -Message 'At least one subscription failed to be triggered...'
- ExitWithExitCode 1
- }
- else {
- Write-Host 'All subscriptions were triggered successfully!'
- }
-}
-catch {
- Write-Host $_.ScriptStackTrace
- Write-PipelineTelemetryError -Category 'TriggerSubscriptions' -Message $_
- ExitWithExitCode 1
-}
diff --git a/eng/common/sdk-task.ps1 b/eng/common/sdk-task.ps1
index 73828dd30d3..aab40de3fd9 100644
--- a/eng/common/sdk-task.ps1
+++ b/eng/common/sdk-task.ps1
@@ -64,7 +64,7 @@ try {
$GlobalJson.tools | Add-Member -Name "vs" -Value (ConvertFrom-Json "{ `"version`": `"16.5`" }") -MemberType NoteProperty
}
if( -not ($GlobalJson.tools.PSObject.Properties.Name -match "xcopy-msbuild" )) {
- $GlobalJson.tools | Add-Member -Name "xcopy-msbuild" -Value "17.8.1-2" -MemberType NoteProperty
+ $GlobalJson.tools | Add-Member -Name "xcopy-msbuild" -Value "17.10.0-pre.4.0" -MemberType NoteProperty
}
if ($GlobalJson.tools."xcopy-msbuild".Trim() -ine "none") {
$xcopyMSBuildToolsFolder = InitializeXCopyMSBuild $GlobalJson.tools."xcopy-msbuild" -install $true
diff --git a/eng/common/sdl/NuGet.config b/eng/common/sdl/NuGet.config
index 5bfbb02ef04..3849bdb3cf5 100644
--- a/eng/common/sdl/NuGet.config
+++ b/eng/common/sdl/NuGet.config
@@ -5,11 +5,11 @@
-
+
-
+
diff --git a/eng/common/sdl/execute-all-sdl-tools.ps1 b/eng/common/sdl/execute-all-sdl-tools.ps1
index 81ded5b7f47..4715d75e974 100644
--- a/eng/common/sdl/execute-all-sdl-tools.ps1
+++ b/eng/common/sdl/execute-all-sdl-tools.ps1
@@ -6,6 +6,7 @@ Param(
[string] $BranchName=$env:BUILD_SOURCEBRANCH, # Optional: name of branch or version of gdn settings; defaults to master
[string] $SourceDirectory=$env:BUILD_SOURCESDIRECTORY, # Required: the directory where source files are located
[string] $ArtifactsDirectory = (Join-Path $env:BUILD_ARTIFACTSTAGINGDIRECTORY ('artifacts')), # Required: the directory where build artifacts are located
+ [string] $AzureDevOpsAccessToken, # Required: access token for dnceng; should be provided via KeyVault
# Optional: list of SDL tools to run on source code. See 'configure-sdl-tool.ps1' for tools list
# format.
@@ -74,7 +75,7 @@ try {
}
Exec-BlockVerbosely {
- & $(Join-Path $PSScriptRoot 'init-sdl.ps1') -GuardianCliLocation $guardianCliLocation -Repository $RepoName -BranchName $BranchName -WorkingDirectory $workingDirectory -GuardianLoggerLevel $GuardianLoggerLevel
+ & $(Join-Path $PSScriptRoot 'init-sdl.ps1') -GuardianCliLocation $guardianCliLocation -Repository $RepoName -BranchName $BranchName -WorkingDirectory $workingDirectory -AzureDevOpsAccessToken $AzureDevOpsAccessToken -GuardianLoggerLevel $GuardianLoggerLevel
}
$gdnFolder = Join-Path $workingDirectory '.gdn'
@@ -103,6 +104,7 @@ try {
-TargetDirectory $targetDirectory `
-GdnFolder $gdnFolder `
-ToolsList $tools `
+ -AzureDevOpsAccessToken $AzureDevOpsAccessToken `
-GuardianLoggerLevel $GuardianLoggerLevel `
-CrScanAdditionalRunConfigParams $CrScanAdditionalRunConfigParams `
-PoliCheckAdditionalRunConfigParams $PoliCheckAdditionalRunConfigParams `
diff --git a/eng/common/sdl/init-sdl.ps1 b/eng/common/sdl/init-sdl.ps1
index 588ff8e22fb..3ac1d92b370 100644
--- a/eng/common/sdl/init-sdl.ps1
+++ b/eng/common/sdl/init-sdl.ps1
@@ -3,6 +3,7 @@ Param(
[string] $Repository,
[string] $BranchName='master',
[string] $WorkingDirectory,
+ [string] $AzureDevOpsAccessToken,
[string] $GuardianLoggerLevel='Standard'
)
@@ -20,7 +21,14 @@ $ci = $true
# Don't display the console progress UI - it's a huge perf hit
$ProgressPreference = 'SilentlyContinue'
+# Construct basic auth from AzDO access token; construct URI to the repository's gdn folder stored in that repository; construct location of zip file
+$encodedPat = [Convert]::ToBase64String([System.Text.Encoding]::ASCII.GetBytes(":$AzureDevOpsAccessToken"))
+$escapedRepository = [Uri]::EscapeDataString("/$Repository/$BranchName/.gdn")
+$uri = "https://dev.azure.com/dnceng/internal/_apis/git/repositories/sdl-tool-cfg/Items?path=$escapedRepository&versionDescriptor[versionOptions]=0&`$format=zip&api-version=5.0"
+$zipFile = "$WorkingDirectory/gdn.zip"
+
Add-Type -AssemblyName System.IO.Compression.FileSystem
+$gdnFolder = (Join-Path $WorkingDirectory '.gdn')
try {
# if the folder does not exist, we'll do a guardian init and push it to the remote repository
diff --git a/eng/common/sdl/sdl.ps1 b/eng/common/sdl/sdl.ps1
index 7fe603fe995..648c5068d7d 100644
--- a/eng/common/sdl/sdl.ps1
+++ b/eng/common/sdl/sdl.ps1
@@ -4,8 +4,6 @@ function Install-Gdn {
[Parameter(Mandatory=$true)]
[string]$Path,
- [string]$Source = "https://pkgs.dev.azure.com/dnceng/_packaging/Guardian1ESPTUpstreamOrgFeed/nuget/v3/index.json",
-
# If omitted, install the latest version of Guardian, otherwise install that specific version.
[string]$Version
)
@@ -21,7 +19,7 @@ function Install-Gdn {
$ci = $true
. $PSScriptRoot\..\tools.ps1
- $argumentList = @("install", "Microsoft.Guardian.Cli.win-x64", "-Source $Source", "-OutputDirectory $Path", "-NonInteractive", "-NoCache")
+ $argumentList = @("install", "Microsoft.Guardian.Cli", "-Source https://securitytools.pkgs.visualstudio.com/_packaging/Guardian/nuget/v3/index.json", "-OutputDirectory $Path", "-NonInteractive", "-NoCache")
if ($Version) {
$argumentList += "-Version $Version"
diff --git a/eng/common/sdl/trim-assets-version.ps1 b/eng/common/sdl/trim-assets-version.ps1
index a2e00487704..0daa2a9e946 100644
--- a/eng/common/sdl/trim-assets-version.ps1
+++ b/eng/common/sdl/trim-assets-version.ps1
@@ -72,4 +72,4 @@ catch {
Write-Host $_
Write-PipelineTelemetryError -Force -Category 'Sdl' -Message $_
ExitWithExitCode 1
-}
\ No newline at end of file
+}
diff --git a/eng/common/template-guidance.md b/eng/common/template-guidance.md
new file mode 100644
index 00000000000..5ef6c30ba92
--- /dev/null
+++ b/eng/common/template-guidance.md
@@ -0,0 +1,133 @@
+# Overview
+
+Arcade provides templates for public (`/templates`) and 1ES pipeline templates (`/templates-official`) scenarios. Pipelines which are required to be managed by 1ES pipeline templates should reference `/templates-offical`, all other pipelines may reference `/templates`.
+
+## How to use
+
+Basic guidance is:
+
+- 1ES Pipeline Template or 1ES Microbuild template runs should reference `eng/common/templates-official`. Any internal production-graded pipeline should use these templates.
+
+- All other runs should reference `eng/common/templates`.
+
+See [azure-pipelines.yml](../../azure-pipelines.yml) (templates-official example) or [azure-pipelines-pr.yml](../../azure-pipelines-pr.yml) (templates example) for examples.
+
+#### The `templateIs1ESManaged` parameter
+
+The `templateIs1ESManaged` is available on most templates and affects which of the variants is used for nested templates. See [Development Notes](#development-notes) below for more information on the `templateIs1ESManaged1 parameter.
+
+- For templates under `job/`, `jobs/`, `steps`, or `post-build/`, this parameter must be explicitly set.
+
+## Multiple outputs
+
+1ES pipeline templates impose a policy where every publish artifact execution results in additional security scans being injected into your pipeline. When using `templates-official/jobs/jobs.yml`, Arcade reduces the number of additional security injections by gathering all publishing outputs into the [Build.ArtifactStagingDirectory](https://learn.microsoft.com/en-us/azure/devops/pipelines/build/variables?view=azure-devops&tabs=yaml#build-variables-devops-services), and utilizing the [outputParentDirectory](https://eng.ms/docs/cloud-ai-platform/devdiv/one-engineering-system-1es/1es-docs/1es-pipeline-templates/features/outputs#multiple-outputs) feature of 1ES pipeline templates. When implementing your pipeline, if you ensure publish artifacts are located in the `$(Build.ArtifactStagingDirectory)`, and utilize the 1ES provided template context, then you can reduce the number of security scans for your pipeline.
+
+Example:
+``` yaml
+# azure-pipelines.yml
+extends:
+ template: azure-pipelines/MicroBuild.1ES.Official.yml@MicroBuildTemplate
+ parameters:
+ stages:
+ - stage: build
+ jobs:
+ - template: /eng/common/templates-official/jobs/jobs.yml@self
+ parameters:
+ # 1ES makes use of outputs to reduce security task injection overhead
+ templateContext:
+ outputs:
+ - output: pipelineArtifact
+ displayName: 'Publish logs from source'
+ continueOnError: true
+ condition: always()
+ targetPath: $(Build.ArtifactStagingDirectory)/artifacts/log
+ artifactName: Logs
+ jobs:
+ - job: Windows
+ steps:
+ - script: echo "friendly neighborhood" > artifacts/marvel/spiderman.txt
+ # copy build outputs to artifact staging directory for publishing
+ - task: CopyFiles@2
+ displayName: Gather build output
+ inputs:
+ SourceFolder: '$(Build.SourcesDirectory)/artifacts/marvel'
+ Contents: '**'
+ TargetFolder: '$(Build.ArtifactStagingDirectory)/artifacts/marvel'
+```
+
+Note: Multiple outputs are ONLY applicable to 1ES PT publishing (only usable when referencing `templates-official`).
+
+# Development notes
+
+**Folder / file structure**
+
+``` text
+eng\common\
+ [templates || templates-official]\
+ job\
+ job.yml (shim + artifact publishing logic)
+ onelocbuild.yml (shim)
+ publish-build-assets.yml (shim)
+ source-build.yml (shim)
+ source-index-stage1.yml (shim)
+ jobs\
+ codeql-build.yml (shim)
+ jobs.yml (shim)
+ source-build.yml (shim)
+ post-build\
+ post-build.yml (shim)
+ common-variabls.yml (shim)
+ setup-maestro-vars.yml (shim)
+ steps\
+ publish-build-artifacts.yml (logic)
+ publish-pipeline-artifacts.yml (logic)
+ component-governance.yml (shim)
+ generate-sbom.yml (shim)
+ publish-logs.yml (shim)
+ retain-build.yml (shim)
+ send-to-helix.yml (shim)
+ source-build.yml (shim)
+ variables\
+ pool-providers.yml (logic + redirect) # templates/variables/pool-providers.yml will redirect to templates-official/variables/pool-providers.yml if you are running in the internal project
+ sdl-variables.yml (logic)
+ core-templates\
+ job\
+ job.yml (logic)
+ onelocbuild.yml (logic)
+ publish-build-assets.yml (logic)
+ source-build.yml (logic)
+ source-index-stage1.yml (logic)
+ jobs\
+ codeql-build.yml (logic)
+ jobs.yml (logic)
+ source-build.yml (logic)
+ post-build\
+ common-variabls.yml (logic)
+ post-build.yml (logic)
+ setup-maestro-vars.yml (logic)
+ steps\
+ component-governance.yml (logic)
+ generate-sbom.yml (logic)
+ publish-build-artifacts.yml (redirect)
+ publish-logs.yml (logic)
+ publish-pipeline-artifacts.yml (redirect)
+ retain-build.yml (logic)
+ send-to-helix.yml (logic)
+ source-build.yml (logic)
+ variables\
+ pool-providers.yml (redirect)
+```
+
+In the table above, a file is designated as "shim", "logic", or "redirect".
+
+- shim - represents a yaml file which is an intermediate step between pipeline logic and .Net Core Engineering's templates (`core-templates`) and defines the `is1ESPipeline` parameter value.
+
+- logic - represents actual base template logic.
+
+- redirect- represents a file in `core-templates` which redirects to the "logic" file in either `templates` or `templates-official`.
+
+Logic for Arcade's templates live **primarily** in the `core-templates` folder. The exceptions to the location of the logic files are around artifact publishing, which is handled differently between 1es pipeline templates and standard templates. `templates` and `templates-official` provide shim entry points which redirect to `core-templates` while also defining the `is1ESPipeline` parameter. If a shim is referenced in `templates`, then `is1ESPipeline` is set to `false`. If a shim is referenced in `templates-official`, then `is1ESPipeline` is set to `true`.
+
+Within `templates` and `templates-official`, the templates at the "stages", and "jobs" / "job" level have been replaced with shims. Templates at the "steps" and "variables" level are typically too granular to be replaced with shims and instead persist logic which is directly applicable to either scenario.
+
+Within `core-templates`, there are a handful of places where logic is dependent on which shim entry point was used. In those places, we redirect back to the respective logic file in `templates` or `templates-official`.
diff --git a/eng/common/templates-official/job/job.yml b/eng/common/templates-official/job/job.yml
index 1f035fee73f..3d16b41c78c 100644
--- a/eng/common/templates-official/job/job.yml
+++ b/eng/common/templates-official/job/job.yml
@@ -1,264 +1,79 @@
-# Internal resources (telemetry, microbuild) can only be accessed from non-public projects,
-# and some (Microbuild) should only be applied to non-PR cases for internal builds.
-
parameters:
-# Job schema parameters - https://docs.microsoft.com/en-us/azure/devops/pipelines/yaml-schema?view=vsts&tabs=schema#job
- cancelTimeoutInMinutes: ''
- condition: ''
- container: ''
- continueOnError: false
- dependsOn: ''
- displayName: ''
- pool: ''
- steps: []
- strategy: ''
- timeoutInMinutes: ''
- variables: []
- workspace: ''
- templateContext: ''
-
-# Job base template specific parameters
- # See schema documentation - https://github.com/dotnet/arcade/blob/master/Documentation/AzureDevOps/TemplateSchema.md
- artifacts: ''
- enableMicrobuild: false
- enablePublishBuildArtifacts: false
- enablePublishBuildAssets: false
- enablePublishTestResults: false
- enablePublishUsingPipelines: false
- enableBuildRetry: false
- disableComponentGovernance: ''
- componentGovernanceIgnoreDirectories: ''
- mergeTestResults: false
- testRunTitle: ''
- testResultsFormat: ''
- name: ''
- preSteps: []
- runAsPublic: false
# Sbom related params
enableSbom: true
- PackageVersion: 7.0.0
+ PackageVersion: 9.0.0
BuildDropPath: '$(Build.SourcesDirectory)/artifacts'
jobs:
-- job: ${{ parameters.name }}
-
- ${{ if ne(parameters.cancelTimeoutInMinutes, '') }}:
- cancelTimeoutInMinutes: ${{ parameters.cancelTimeoutInMinutes }}
-
- ${{ if ne(parameters.condition, '') }}:
- condition: ${{ parameters.condition }}
-
- ${{ if ne(parameters.container, '') }}:
- container: ${{ parameters.container }}
-
- ${{ if ne(parameters.continueOnError, '') }}:
- continueOnError: ${{ parameters.continueOnError }}
-
- ${{ if ne(parameters.dependsOn, '') }}:
- dependsOn: ${{ parameters.dependsOn }}
-
- ${{ if ne(parameters.displayName, '') }}:
- displayName: ${{ parameters.displayName }}
-
- ${{ if ne(parameters.pool, '') }}:
- pool: ${{ parameters.pool }}
-
- ${{ if ne(parameters.strategy, '') }}:
- strategy: ${{ parameters.strategy }}
-
- ${{ if ne(parameters.timeoutInMinutes, '') }}:
- timeoutInMinutes: ${{ parameters.timeoutInMinutes }}
-
- ${{ if ne(parameters.templateContext, '') }}:
- templateContext: ${{ parameters.templateContext }}
-
- variables:
- - ${{ if ne(parameters.enableTelemetry, 'false') }}:
- - name: DOTNET_CLI_TELEMETRY_PROFILE
- value: '$(Build.Repository.Uri)'
- - ${{ if eq(parameters.enableRichCodeNavigation, 'true') }}:
- - name: EnableRichCodeNavigation
- value: 'true'
- # Retry signature validation up to three times, waiting 2 seconds between attempts.
- # See https://learn.microsoft.com/en-us/nuget/reference/errors-and-warnings/nu3028#retry-untrusted-root-failures
- - name: NUGET_EXPERIMENTAL_CHAIN_BUILD_RETRY_POLICY
- value: 3,2000
- - ${{ each variable in parameters.variables }}:
- # handle name-value variable syntax
- # example:
- # - name: [key]
- # value: [value]
- - ${{ if ne(variable.name, '') }}:
- - name: ${{ variable.name }}
- value: ${{ variable.value }}
-
- # handle variable groups
- - ${{ if ne(variable.group, '') }}:
- - group: ${{ variable.group }}
-
- # handle template variable syntax
- # example:
- # - template: path/to/template.yml
- # parameters:
- # [key]: [value]
- - ${{ if ne(variable.template, '') }}:
- - template: ${{ variable.template }}
- ${{ if ne(variable.parameters, '') }}:
- parameters: ${{ variable.parameters }}
-
- # handle key-value variable syntax.
- # example:
- # - [key]: [value]
- - ${{ if and(eq(variable.name, ''), eq(variable.group, ''), eq(variable.template, '')) }}:
- - ${{ each pair in variable }}:
- - name: ${{ pair.key }}
- value: ${{ pair.value }}
-
- # DotNet-HelixApi-Access provides 'HelixApiAccessToken' for internal builds
- - ${{ if and(eq(parameters.enableTelemetry, 'true'), eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
- - group: DotNet-HelixApi-Access
-
- ${{ if ne(parameters.workspace, '') }}:
- workspace: ${{ parameters.workspace }}
-
- steps:
- - ${{ if ne(parameters.preSteps, '') }}:
- - ${{ each preStep in parameters.preSteps }}:
- - ${{ preStep }}
-
- - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
- - ${{ if eq(parameters.enableMicrobuild, 'true') }}:
- - task: MicroBuildSigningPlugin@4
- displayName: Install MicroBuild plugin
- inputs:
- signType: $(_SignType)
- zipSources: false
- feedSource: https://dnceng.pkgs.visualstudio.com/_packaging/MicroBuildToolset/nuget/v3/index.json
- env:
- TeamName: $(_TeamName)
- MicroBuildOutputFolderOverride: '$(Agent.TempDirectory)'
- continueOnError: ${{ parameters.continueOnError }}
- condition: and(succeeded(), in(variables['_SignType'], 'real', 'test'), eq(variables['Agent.Os'], 'Windows_NT'))
-
- - ${{ if and(eq(parameters.runAsPublic, 'false'), eq(variables['System.TeamProject'], 'internal')) }}:
- - task: NuGetAuthenticate@1
-
- - ${{ if and(ne(parameters.artifacts.download, 'false'), ne(parameters.artifacts.download, '')) }}:
- - task: DownloadPipelineArtifact@2
- inputs:
- buildType: current
- artifactName: ${{ coalesce(parameters.artifacts.download.name, 'Artifacts_$(Agent.OS)_$(_BuildConfig)') }}
- targetPath: ${{ coalesce(parameters.artifacts.download.path, 'artifacts') }}
- itemPattern: ${{ coalesce(parameters.artifacts.download.pattern, '**') }}
-
- - ${{ each step in parameters.steps }}:
- - ${{ step }}
-
- - ${{ if eq(parameters.enableRichCodeNavigation, true) }}:
- - task: RichCodeNavIndexer@0
- displayName: RichCodeNav Upload
- inputs:
- languages: ${{ coalesce(parameters.richCodeNavigationLanguage, 'csharp') }}
- environment: ${{ coalesce(parameters.richCodeNavigationEnvironment, 'production') }}
- richNavLogOutputDirectory: $(Build.SourcesDirectory)/artifacts/bin
- uploadRichNavArtifacts: ${{ coalesce(parameters.richCodeNavigationUploadArtifacts, false) }}
- continueOnError: true
-
- - template: /eng/common/templates-official/steps/component-governance.yml
- parameters:
- ${{ if eq(parameters.disableComponentGovernance, '') }}:
- ${{ if and(ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest'), eq(parameters.runAsPublic, 'false'), or(startsWith(variables['Build.SourceBranch'], 'refs/heads/release/'), startsWith(variables['Build.SourceBranch'], 'refs/heads/dotnet/'), startsWith(variables['Build.SourceBranch'], 'refs/heads/microsoft/'), eq(variables['Build.SourceBranch'], 'refs/heads/main'))) }}:
- disableComponentGovernance: false
- ${{ else }}:
- disableComponentGovernance: true
- ${{ else }}:
- disableComponentGovernance: ${{ parameters.disableComponentGovernance }}
- componentGovernanceIgnoreDirectories: ${{ parameters.componentGovernanceIgnoreDirectories }}
-
- - ${{ if eq(parameters.enableMicrobuild, 'true') }}:
- - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
- - task: MicroBuildCleanup@1
- displayName: Execute Microbuild cleanup tasks
- condition: and(always(), in(variables['_SignType'], 'real', 'test'), eq(variables['Agent.Os'], 'Windows_NT'))
- continueOnError: ${{ parameters.continueOnError }}
- env:
- TeamName: $(_TeamName)
-
- - ${{ if ne(parameters.artifacts.publish, '') }}:
- - ${{ if and(ne(parameters.artifacts.publish.artifacts, 'false'), ne(parameters.artifacts.publish.artifacts, '')) }}:
- - task: CopyFiles@2
- displayName: Gather binaries for publish to artifacts
- inputs:
- SourceFolder: 'artifacts/bin'
- Contents: '**'
- TargetFolder: '$(Build.ArtifactStagingDirectory)/artifacts/bin'
- - task: CopyFiles@2
- displayName: Gather packages for publish to artifacts
- inputs:
- SourceFolder: 'artifacts/packages'
- Contents: '**'
- TargetFolder: '$(Build.ArtifactStagingDirectory)/artifacts/packages'
- - task: 1ES.PublishBuildArtifacts@1
- displayName: Publish pipeline artifacts
- inputs:
- PathtoPublish: '$(Build.ArtifactStagingDirectory)/artifacts'
- PublishLocation: Container
- ArtifactName: ${{ coalesce(parameters.artifacts.publish.artifacts.name , 'Artifacts_$(Agent.Os)_$(_BuildConfig)') }}
- continueOnError: true
- condition: always()
- - ${{ if and(ne(parameters.artifacts.publish.logs, 'false'), ne(parameters.artifacts.publish.logs, '')) }}:
- - task: 1ES.PublishPipelineArtifact@1
- inputs:
- targetPath: 'artifacts/log'
- artifactName: ${{ coalesce(parameters.artifacts.publish.logs.name, 'Logs_Build_$(Agent.Os)_$(_BuildConfig)') }}
- displayName: 'Publish logs'
- continueOnError: true
- condition: always()
-
- - ${{ if ne(parameters.enablePublishBuildArtifacts, 'false') }}:
- - task: 1ES.PublishBuildArtifacts@1
- displayName: Publish Logs
- inputs:
- PathtoPublish: '$(Build.SourcesDirectory)/artifacts/log/$(_BuildConfig)'
- PublishLocation: Container
- ArtifactName: ${{ coalesce(parameters.enablePublishBuildArtifacts.artifactName, '$(Agent.Os)_$(Agent.JobName)' ) }}
- continueOnError: true
- condition: always()
-
- - ${{ if or(and(eq(parameters.enablePublishTestResults, 'true'), eq(parameters.testResultsFormat, '')), eq(parameters.testResultsFormat, 'xunit')) }}:
- - task: PublishTestResults@2
- displayName: Publish XUnit Test Results
- inputs:
- testResultsFormat: 'xUnit'
- testResultsFiles: '*.xml'
- searchFolder: '$(Build.SourcesDirectory)/artifacts/TestResults/$(_BuildConfig)'
- testRunTitle: ${{ coalesce(parameters.testRunTitle, parameters.name, '$(System.JobName)') }}-xunit
- mergeTestResults: ${{ parameters.mergeTestResults }}
- continueOnError: true
- condition: always()
- - ${{ if or(and(eq(parameters.enablePublishTestResults, 'true'), eq(parameters.testResultsFormat, '')), eq(parameters.testResultsFormat, 'vstest')) }}:
- - task: PublishTestResults@2
- displayName: Publish TRX Test Results
- inputs:
- testResultsFormat: 'VSTest'
- testResultsFiles: '*.trx'
- searchFolder: '$(Build.SourcesDirectory)/artifacts/TestResults/$(_BuildConfig)'
- testRunTitle: ${{ coalesce(parameters.testRunTitle, parameters.name, '$(System.JobName)') }}-trx
- mergeTestResults: ${{ parameters.mergeTestResults }}
- continueOnError: true
- condition: always()
-
- - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest'), eq(parameters.enableSbom, 'true')) }}:
- - template: /eng/common/templates-official/steps/generate-sbom.yml
- parameters:
- PackageVersion: ${{ parameters.packageVersion}}
- BuildDropPath: ${{ parameters.buildDropPath }}
- IgnoreDirectories: ${{ parameters.componentGovernanceIgnoreDirectories }}
-
- - ${{ if eq(parameters.enableBuildRetry, 'true') }}:
- - task: 1ES.PublishPipelineArtifact@1
- inputs:
- targetPath: '$(Build.SourcesDirectory)\eng\common\BuildConfiguration'
- artifactName: 'BuildConfiguration'
- displayName: 'Publish build retry configuration'
- continueOnError: true
\ No newline at end of file
+- template: /eng/common/core-templates/job/job.yml
+ parameters:
+ is1ESPipeline: true
+
+ componentGovernanceSteps:
+ - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest'), eq(parameters.enableSbom, 'true')) }}:
+ - template: /eng/common/templates/steps/generate-sbom.yml
+ parameters:
+ PackageVersion: ${{ parameters.packageVersion }}
+ BuildDropPath: ${{ parameters.buildDropPath }}
+ publishArtifacts: false
+
+ # publish artifacts
+ # for 1ES managed templates, use the templateContext.output to handle multiple outputs.
+ templateContext:
+ outputParentDirectory: $(Build.ArtifactStagingDirectory)
+ outputs:
+ - ${{ if ne(parameters.artifacts.publish, '') }}:
+ - ${{ if and(ne(parameters.artifacts.publish.artifacts, 'false'), ne(parameters.artifacts.publish.artifacts, '')) }}:
+ - output: buildArtifacts
+ displayName: Publish pipeline artifacts
+ PathtoPublish: '$(Build.ArtifactStagingDirectory)/artifacts'
+ ArtifactName: ${{ coalesce(parameters.artifacts.publish.artifacts.name , 'Artifacts_$(Agent.Os)_$(_BuildConfig)') }}
+ condition: always()
+ continueOnError: true
+ - ${{ if and(ne(parameters.artifacts.publish.logs, 'false'), ne(parameters.artifacts.publish.logs, '')) }}:
+ - output: pipelineArtifact
+ targetPath: '$(Build.ArtifactStagingDirectory)/artifacts/log'
+ artifactName: ${{ coalesce(parameters.artifacts.publish.logs.name, 'Logs_Build_$(Agent.Os)_$(_BuildConfig)_Attempt$(System.JobAttempt)') }}
+ displayName: 'Publish logs'
+ continueOnError: true
+ condition: always()
+ sbomEnabled: false # we don't need SBOM for logs
+
+ - ${{ if eq(parameters.enablePublishBuildArtifacts, true) }}:
+ - output: buildArtifacts
+ displayName: Publish Logs
+ PathtoPublish: '$(Build.ArtifactStagingDirectory)/artifacts/log/$(_BuildConfig)'
+ publishLocation: Container
+ ArtifactName: ${{ coalesce(parameters.enablePublishBuildArtifacts.artifactName, '$(Agent.Os)_$(Agent.JobName)' ) }}
+ continueOnError: true
+ condition: always()
+ sbomEnabled: false # we don't need SBOM for logs
+
+ - ${{ if eq(parameters.enableBuildRetry, 'true') }}:
+ - output: pipelineArtifact
+ targetPath: '$(Build.ArtifactStagingDirectory)/artifacts/eng/common/BuildConfiguration'
+ artifactName: 'BuildConfiguration'
+ displayName: 'Publish build retry configuration'
+ continueOnError: true
+ sbomEnabled: false # we don't need SBOM for BuildConfiguration
+
+ - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest'), eq(parameters.enableSbom, 'true')) }}:
+ - output: pipelineArtifact
+ displayName: Publish SBOM manifest
+ continueOnError: true
+ targetPath: $(Build.ArtifactStagingDirectory)/sbom
+ artifactName: $(ARTIFACT_NAME)
+
+ # add any outputs provided via root yaml
+ - ${{ if ne(parameters.templateContext.outputs, '') }}:
+ - ${{ each output in parameters.templateContext.outputs }}:
+ - ${{ output }}
+
+ # add any remaining templateContext properties
+ ${{ each context in parameters.templateContext }}:
+ ${{ if and(ne(context.key, 'outputParentDirectory'), ne(context.key, 'outputs')) }}:
+ ${{ context.key }}: ${{ context.value }}
+
+ ${{ each parameter in parameters }}:
+ ${{ if and(ne(parameter.key, 'templateContext'), ne(parameter.key, 'is1ESPipeline')) }}:
+ ${{ parameter.key }}: ${{ parameter.value }}
diff --git a/eng/common/templates-official/job/onelocbuild.yml b/eng/common/templates-official/job/onelocbuild.yml
index 52b4d05d3f8..0f0c514b912 100644
--- a/eng/common/templates-official/job/onelocbuild.yml
+++ b/eng/common/templates-official/job/onelocbuild.yml
@@ -1,112 +1,7 @@
-parameters:
- # Optional: dependencies of the job
- dependsOn: ''
-
- # Optional: A defined YAML pool - https://docs.microsoft.com/en-us/azure/devops/pipelines/yaml-schema?view=vsts&tabs=schema#pool
- pool: ''
-
- CeapexPat: $(dn-bot-ceapex-package-r) # PAT for the loc AzDO instance https://dev.azure.com/ceapex
- GithubPat: $(BotAccount-dotnet-bot-repo-PAT)
-
- SourcesDirectory: $(Build.SourcesDirectory)
- CreatePr: true
- AutoCompletePr: false
- ReusePr: true
- UseLfLineEndings: true
- UseCheckedInLocProjectJson: false
- SkipLocProjectJsonGeneration: false
- LanguageSet: VS_Main_Languages
- LclSource: lclFilesInRepo
- LclPackageId: ''
- RepoType: gitHub
- GitHubOrg: dotnet
- MirrorRepo: ''
- MirrorBranch: main
- condition: ''
- JobNameSuffix: ''
-
jobs:
-- job: OneLocBuild${{ parameters.JobNameSuffix }}
-
- dependsOn: ${{ parameters.dependsOn }}
-
- displayName: OneLocBuild${{ parameters.JobNameSuffix }}
-
- variables:
- - group: OneLocBuildVariables # Contains the CeapexPat and GithubPat
- - name: _GenerateLocProjectArguments
- value: -SourcesDirectory ${{ parameters.SourcesDirectory }}
- -LanguageSet "${{ parameters.LanguageSet }}"
- -CreateNeutralXlfs
- - ${{ if eq(parameters.UseCheckedInLocProjectJson, 'true') }}:
- - name: _GenerateLocProjectArguments
- value: ${{ variables._GenerateLocProjectArguments }} -UseCheckedInLocProjectJson
- - template: /eng/common/templates-official/variables/pool-providers.yml
-
- ${{ if ne(parameters.pool, '') }}:
- pool: ${{ parameters.pool }}
- ${{ if eq(parameters.pool, '') }}:
- pool:
- # We don't use the collection uri here because it might vary (.visualstudio.com vs. dev.azure.com)
- ${{ if eq(variables['System.TeamProject'], 'DevDiv') }}:
- name: AzurePipelines-EO
- image: 1ESPT-Windows2022
- demands: Cmd
- os: windows
- # If it's not devdiv, it's dnceng
- ${{ if ne(variables['System.TeamProject'], 'DevDiv') }}:
- name: $(DncEngInternalBuildPool)
- image: 1es-windows-2022
- os: windows
-
- steps:
- - ${{ if ne(parameters.SkipLocProjectJsonGeneration, 'true') }}:
- - task: Powershell@2
- inputs:
- filePath: $(Build.SourcesDirectory)/eng/common/generate-locproject.ps1
- arguments: $(_GenerateLocProjectArguments)
- displayName: Generate LocProject.json
- condition: ${{ parameters.condition }}
-
- - task: OneLocBuild@2
- displayName: OneLocBuild
- env:
- SYSTEM_ACCESSTOKEN: $(System.AccessToken)
- inputs:
- locProj: eng/Localize/LocProject.json
- outDir: $(Build.ArtifactStagingDirectory)
- lclSource: ${{ parameters.LclSource }}
- lclPackageId: ${{ parameters.LclPackageId }}
- isCreatePrSelected: ${{ parameters.CreatePr }}
- isAutoCompletePrSelected: ${{ parameters.AutoCompletePr }}
- ${{ if eq(parameters.CreatePr, true) }}:
- isUseLfLineEndingsSelected: ${{ parameters.UseLfLineEndings }}
- ${{ if eq(parameters.RepoType, 'gitHub') }}:
- isShouldReusePrSelected: ${{ parameters.ReusePr }}
- packageSourceAuth: patAuth
- patVariable: ${{ parameters.CeapexPat }}
- ${{ if eq(parameters.RepoType, 'gitHub') }}:
- repoType: ${{ parameters.RepoType }}
- gitHubPatVariable: "${{ parameters.GithubPat }}"
- ${{ if ne(parameters.MirrorRepo, '') }}:
- isMirrorRepoSelected: true
- gitHubOrganization: ${{ parameters.GitHubOrg }}
- mirrorRepo: ${{ parameters.MirrorRepo }}
- mirrorBranch: ${{ parameters.MirrorBranch }}
- condition: ${{ parameters.condition }}
-
- - task: 1ES.PublishBuildArtifacts@1
- displayName: Publish Localization Files
- inputs:
- PathtoPublish: '$(Build.ArtifactStagingDirectory)/loc'
- PublishLocation: Container
- ArtifactName: Loc
- condition: ${{ parameters.condition }}
+- template: /eng/common/core-templates/job/onelocbuild.yml
+ parameters:
+ is1ESPipeline: true
- - task: 1ES.PublishBuildArtifacts@1
- displayName: Publish LocProject.json
- inputs:
- PathtoPublish: '$(Build.SourcesDirectory)/eng/Localize/'
- PublishLocation: Container
- ArtifactName: Loc
- condition: ${{ parameters.condition }}
\ No newline at end of file
+ ${{ each parameter in parameters }}:
+ ${{ parameter.key }}: ${{ parameter.value }}
diff --git a/eng/common/templates-official/job/publish-build-assets.yml b/eng/common/templates-official/job/publish-build-assets.yml
index 0117328800c..d667a70e8de 100644
--- a/eng/common/templates-official/job/publish-build-assets.yml
+++ b/eng/common/templates-official/job/publish-build-assets.yml
@@ -1,160 +1,7 @@
-parameters:
- configuration: 'Debug'
-
- # Optional: condition for the job to run
- condition: ''
-
- # Optional: 'true' if future jobs should run even if this job fails
- continueOnError: false
-
- # Optional: dependencies of the job
- dependsOn: ''
-
- # Optional: Include PublishBuildArtifacts task
- enablePublishBuildArtifacts: false
-
- # Optional: A defined YAML pool - https://docs.microsoft.com/en-us/azure/devops/pipelines/yaml-schema?view=vsts&tabs=schema#pool
- pool: {}
-
- # Optional: should run as a public build even in the internal project
- # if 'true', the build won't run any of the internal only steps, even if it is running in non-public projects.
- runAsPublic: false
-
- # Optional: whether the build's artifacts will be published using release pipelines or direct feed publishing
- publishUsingPipelines: false
-
- # Optional: whether the build's artifacts will be published using release pipelines or direct feed publishing
- publishAssetsImmediately: false
-
- artifactsPublishingAdditionalParameters: ''
-
- signingValidationAdditionalParameters: ''
-
jobs:
-- job: Asset_Registry_Publish
-
- dependsOn: ${{ parameters.dependsOn }}
- timeoutInMinutes: 150
-
- ${{ if eq(parameters.publishAssetsImmediately, 'true') }}:
- displayName: Publish Assets
- ${{ else }}:
- displayName: Publish to Build Asset Registry
-
- variables:
- - template: /eng/common/templates-official/variables/pool-providers.yml
- - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
- - group: Publish-Build-Assets
- - group: AzureDevOps-Artifact-Feeds-Pats
- - name: runCodesignValidationInjection
- value: false
- - ${{ if eq(parameters.publishAssetsImmediately, 'true') }}:
- - template: /eng/common/templates-official/post-build/common-variables.yml
-
- pool:
- # We don't use the collection uri here because it might vary (.visualstudio.com vs. dev.azure.com)
- ${{ if eq(variables['System.TeamProject'], 'DevDiv') }}:
- name: AzurePipelines-EO
- image: 1ESPT-Windows2022
- demands: Cmd
- os: windows
- # If it's not devdiv, it's dnceng
- ${{ if ne(variables['System.TeamProject'], 'DevDiv') }}:
- name: NetCore1ESPool-Publishing-Internal
- image: windows.vs2019.amd64
- os: windows
- steps:
- - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
- - task: DownloadBuildArtifacts@0
- displayName: Download artifact
- inputs:
- artifactName: AssetManifests
- downloadPath: '$(Build.StagingDirectory)/Download'
- checkDownloadedFiles: true
- condition: ${{ parameters.condition }}
- continueOnError: ${{ parameters.continueOnError }}
-
- - task: NuGetAuthenticate@1
-
- - task: AzureCLI@2
- displayName: Publish Build Assets
- inputs:
- azureSubscription: "Darc: Maestro Production"
- scriptType: ps
- scriptLocation: scriptPath
- scriptPath: $(Build.SourcesDirectory)/eng/common/sdk-task.ps1
- arguments: >
- -task PublishBuildAssets -restore -msbuildEngine dotnet
- /p:ManifestsPath='$(Build.StagingDirectory)/Download/AssetManifests'
- /p:MaestroApiEndpoint=https://maestro-prod.westus2.cloudapp.azure.com
- /p:PublishUsingPipelines=${{ parameters.publishUsingPipelines }}
- /p:OfficialBuildId=$(Build.BuildNumber)
- condition: ${{ parameters.condition }}
- continueOnError: ${{ parameters.continueOnError }}
-
- - task: powershell@2
- displayName: Create ReleaseConfigs Artifact
- inputs:
- targetType: inline
- script: |
- New-Item -Path "$(Build.StagingDirectory)/ReleaseConfigs" -ItemType Directory -Force
- $filePath = "$(Build.StagingDirectory)/ReleaseConfigs/ReleaseConfigs.txt"
- Add-Content -Path $filePath -Value $(BARBuildId)
- Add-Content -Path $filePath -Value "$(DefaultChannels)"
- Add-Content -Path $filePath -Value $(IsStableBuild)
-
- - task: 1ES.PublishBuildArtifacts@1
- displayName: Publish ReleaseConfigs Artifact
- inputs:
- PathtoPublish: '$(Build.StagingDirectory)/ReleaseConfigs'
- PublishLocation: Container
- ArtifactName: ReleaseConfigs
-
- - task: powershell@2
- displayName: Check if SymbolPublishingExclusionsFile.txt exists
- inputs:
- targetType: inline
- script: |
- $symbolExclusionfile = "$(Build.SourcesDirectory)/eng/SymbolPublishingExclusionsFile.txt"
- if(Test-Path -Path $symbolExclusionfile)
- {
- Write-Host "SymbolExclusionFile exists"
- Write-Host "##vso[task.setvariable variable=SymbolExclusionFile]true"
- }
- else{
- Write-Host "Symbols Exclusion file does not exists"
- Write-Host "##vso[task.setvariable variable=SymbolExclusionFile]false"
- }
-
- - task: 1ES.PublishBuildArtifacts@1
- displayName: Publish SymbolPublishingExclusionsFile Artifact
- condition: eq(variables['SymbolExclusionFile'], 'true')
- inputs:
- PathtoPublish: '$(Build.SourcesDirectory)/eng/SymbolPublishingExclusionsFile.txt'
- PublishLocation: Container
- ArtifactName: ReleaseConfigs
-
- - ${{ if eq(parameters.publishAssetsImmediately, 'true') }}:
- - template: /eng/common/templates-official/post-build/setup-maestro-vars.yml
- parameters:
- BARBuildId: ${{ parameters.BARBuildId }}
- PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }}
-
- - task: AzureCLI@2
- displayName: Publish Using Darc
- inputs:
- azureSubscription: "Darc: Maestro Production"
- scriptType: ps
- scriptLocation: scriptPath
- scriptPath: $(Build.SourcesDirectory)/eng/common/post-build/publish-using-darc.ps1
- arguments: -BuildId $(BARBuildId)
- -PublishingInfraVersion 3
- -AzdoToken '$(System.AccessToken)'
- -WaitPublishingFinish true
- -ArtifactsPublishingAdditionalParameters '${{ parameters.artifactsPublishingAdditionalParameters }}'
- -SymbolPublishingAdditionalParameters '${{ parameters.symbolPublishingAdditionalParameters }}'
+- template: /eng/common/core-templates/job/publish-build-assets.yml
+ parameters:
+ is1ESPipeline: true
- - ${{ if eq(parameters.enablePublishBuildArtifacts, 'true') }}:
- - template: /eng/common/templates-official/steps/publish-logs.yml
- parameters:
- JobLabel: 'Publish_Artifacts_Logs'
+ ${{ each parameter in parameters }}:
+ ${{ parameter.key }}: ${{ parameter.value }}
diff --git a/eng/common/templates-official/job/source-build.yml b/eng/common/templates-official/job/source-build.yml
index f983033bb02..1a480034b67 100644
--- a/eng/common/templates-official/job/source-build.yml
+++ b/eng/common/templates-official/job/source-build.yml
@@ -1,75 +1,7 @@
-parameters:
- # This template adds arcade-powered source-build to CI. The template produces a server job with a
- # default ID 'Source_Build_Complete' to put in a dependency list if necessary.
-
- # Specifies the prefix for source-build jobs added to pipeline. Use this if disambiguation needed.
- jobNamePrefix: 'Source_Build'
-
- # Defines the platform on which to run the job. By default, a linux-x64 machine, suitable for
- # managed-only repositories. This is an object with these properties:
- #
- # name: ''
- # The name of the job. This is included in the job ID.
- # targetRID: ''
- # The name of the target RID to use, instead of the one auto-detected by Arcade.
- # nonPortable: false
- # Enables non-portable mode. This means a more specific RID (e.g. fedora.32-x64 rather than
- # linux-x64), and compiling against distro-provided packages rather than portable ones.
- # skipPublishValidation: false
- # Disables publishing validation. By default, a check is performed to ensure no packages are
- # published by source-build.
- # container: ''
- # A container to use. Runs in docker.
- # pool: {}
- # A pool to use. Runs directly on an agent.
- # buildScript: ''
- # Specifies the build script to invoke to perform the build in the repo. The default
- # './build.sh' should work for typical Arcade repositories, but this is customizable for
- # difficult situations.
- # jobProperties: {}
- # A list of job properties to inject at the top level, for potential extensibility beyond
- # container and pool.
- platform: {}
-
- # If set to true and running on a non-public project,
- # Internal blob storage locations will be enabled.
- # This is not enabled by default because many repositories do not need internal sources
- # and do not need to have the required service connections approved in the pipeline.
- enableInternalSources: false
-
jobs:
-- job: ${{ parameters.jobNamePrefix }}_${{ parameters.platform.name }}
- displayName: Source-Build (${{ parameters.platform.name }})
-
- ${{ each property in parameters.platform.jobProperties }}:
- ${{ property.key }}: ${{ property.value }}
-
- ${{ if ne(parameters.platform.container, '') }}:
- container: ${{ parameters.platform.container }}
-
- ${{ if eq(parameters.platform.pool, '') }}:
- # The default VM host AzDO pool. This should be capable of running Docker containers: almost all
- # source-build builds run in Docker, including the default managed platform.
- # /eng/common/templates-official/variables/pool-providers.yml can't be used here (some customers declare variables already), so duplicate its logic
- pool:
- ${{ if eq(variables['System.TeamProject'], 'public') }}:
- name: $[replace(replace(eq(contains(coalesce(variables['System.PullRequest.TargetBranch'], variables['Build.SourceBranch'], 'refs/heads/main'), 'release'), 'true'), True, 'NetCore-Svc-Public' ), False, 'NetCore-Public')]
- demands: ImageOverride -equals Build.Ubuntu.1804.Amd64.Open
-
- ${{ if eq(variables['System.TeamProject'], 'internal') }}:
- name: $[replace(replace(eq(contains(coalesce(variables['System.PullRequest.TargetBranch'], variables['Build.SourceBranch'], 'refs/heads/main'), 'release'), 'true'), True, 'NetCore1ESPool-Svc-Internal'), False, 'NetCore1ESPool-Internal')]
- image: 1es-mariner-2
- os: linux
-
- ${{ if ne(parameters.platform.pool, '') }}:
- pool: ${{ parameters.platform.pool }}
-
- workspace:
- clean: all
+- template: /eng/common/core-templates/job/source-build.yml
+ parameters:
+ is1ESPipeline: true
- steps:
- - ${{ if eq(parameters.enableInternalSources, true) }}:
- - template: /eng/common/templates-official/steps/enable-internal-runtimes.yml
- - template: /eng/common/templates-official/steps/source-build.yml
- parameters:
- platform: ${{ parameters.platform }}
+ ${{ each parameter in parameters }}:
+ ${{ parameter.key }}: ${{ parameter.value }}
diff --git a/eng/common/templates-official/job/source-index-stage1.yml b/eng/common/templates-official/job/source-index-stage1.yml
index 60dfb6b2d1c..6d5ead316f9 100644
--- a/eng/common/templates-official/job/source-index-stage1.yml
+++ b/eng/common/templates-official/job/source-index-stage1.yml
@@ -1,83 +1,7 @@
-parameters:
- runAsPublic: false
- sourceIndexUploadPackageVersion: 2.0.0-20240502.12
- sourceIndexProcessBinlogPackageVersion: 1.0.1-20240129.2
- sourceIndexPackageSource: https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-tools/nuget/v3/index.json
- sourceIndexBuildCommand: powershell -NoLogo -NoProfile -ExecutionPolicy Bypass -Command "eng/common/build.ps1 -restore -build -binarylog -ci"
- preSteps: []
- binlogPath: artifacts/log/Debug/Build.binlog
- condition: ''
- dependsOn: ''
- pool: ''
-
jobs:
-- job: SourceIndexStage1
- dependsOn: ${{ parameters.dependsOn }}
- condition: ${{ parameters.condition }}
- variables:
- - name: SourceIndexUploadPackageVersion
- value: ${{ parameters.sourceIndexUploadPackageVersion }}
- - name: SourceIndexProcessBinlogPackageVersion
- value: ${{ parameters.sourceIndexProcessBinlogPackageVersion }}
- - name: SourceIndexPackageSource
- value: ${{ parameters.sourceIndexPackageSource }}
- - name: BinlogPath
- value: ${{ parameters.binlogPath }}
- - template: /eng/common/templates-official/variables/pool-providers.yml
-
- ${{ if ne(parameters.pool, '') }}:
- pool: ${{ parameters.pool }}
- ${{ if eq(parameters.pool, '') }}:
- pool:
- ${{ if eq(variables['System.TeamProject'], 'public') }}:
- name: $(DncEngPublicBuildPool)
- demands: ImageOverride -equals windows.vs2019.amd64.open
- ${{ if eq(variables['System.TeamProject'], 'internal') }}:
- name: $(DncEngInternalBuildPool)
- image: windows.vs2022.amd64
- os: windows
-
- steps:
- - ${{ each preStep in parameters.preSteps }}:
- - ${{ preStep }}
-
- - task: UseDotNet@2
- displayName: Use .NET 8 SDK
- inputs:
- packageType: sdk
- version: 8.0.x
- installationPath: $(Agent.TempDirectory)/dotnet
- workingDirectory: $(Agent.TempDirectory)
-
- - script: |
- $(Agent.TempDirectory)/dotnet/dotnet tool install BinLogToSln --version $(sourceIndexProcessBinlogPackageVersion) --add-source $(SourceIndexPackageSource) --tool-path $(Agent.TempDirectory)/.source-index/tools
- $(Agent.TempDirectory)/dotnet/dotnet tool install UploadIndexStage1 --version $(sourceIndexUploadPackageVersion) --add-source $(SourceIndexPackageSource) --tool-path $(Agent.TempDirectory)/.source-index/tools
- displayName: Download Tools
- # Set working directory to temp directory so 'dotnet' doesn't try to use global.json and use the repo's sdk.
- workingDirectory: $(Agent.TempDirectory)
-
- - script: ${{ parameters.sourceIndexBuildCommand }}
- displayName: Build Repository
-
- - script: $(Agent.TempDirectory)/.source-index/tools/BinLogToSln -i $(BinlogPath) -r $(Build.SourcesDirectory) -n $(Build.Repository.Name) -o .source-index/stage1output
- displayName: Process Binlog into indexable sln
-
- - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
- - task: AzureCLI@2
- displayName: Get stage 1 auth token
- inputs:
- azureSubscription: 'SourceDotNet Stage1 Publish'
- addSpnToEnvironment: true
- scriptType: 'ps'
- scriptLocation: 'inlineScript'
- inlineScript: |
- echo "##vso[task.setvariable variable=ARM_CLIENT_ID;issecret=true]$env:servicePrincipalId"
- echo "##vso[task.setvariable variable=ARM_ID_TOKEN;issecret=true]$env:idToken"
- echo "##vso[task.setvariable variable=ARM_TENANT_ID;issecret=true]$env:tenantId"
-
- - script: |
- az login --service-principal -u $(ARM_CLIENT_ID) --tenant $(ARM_TENANT_ID) --allow-no-subscriptions --federated-token $(ARM_ID_TOKEN)
- displayName: "Login to Azure"
+- template: /eng/common/core-templates/job/source-index-stage1.yml
+ parameters:
+ is1ESPipeline: true
- - script: $(Agent.TempDirectory)/.source-index/tools/UploadIndexStage1 -i .source-index/stage1output -n $(Build.Repository.Name) -s netsourceindexstage1 -b stage1
- displayName: Upload stage1 artifacts to source index
+ ${{ each parameter in parameters }}:
+ ${{ parameter.key }}: ${{ parameter.value }}
diff --git a/eng/common/templates-official/jobs/codeql-build.yml b/eng/common/templates-official/jobs/codeql-build.yml
index b68d3c2f319..a726322ecfe 100644
--- a/eng/common/templates-official/jobs/codeql-build.yml
+++ b/eng/common/templates-official/jobs/codeql-build.yml
@@ -1,31 +1,7 @@
-parameters:
- # See schema documentation in /Documentation/AzureDevOps/TemplateSchema.md
- continueOnError: false
- # Required: A collection of jobs to run - https://docs.microsoft.com/en-us/azure/devops/pipelines/yaml-schema?view=vsts&tabs=schema#job
- jobs: []
- # Optional: if specified, restore and use this version of Guardian instead of the default.
- overrideGuardianVersion: ''
-
jobs:
-- template: /eng/common/templates-official/jobs/jobs.yml
+- template: /eng/common/core-templates/jobs/codeql-build.yml
parameters:
- enableMicrobuild: false
- enablePublishBuildArtifacts: false
- enablePublishTestResults: false
- enablePublishBuildAssets: false
- enablePublishUsingPipelines: false
- enableTelemetry: true
+ is1ESPipeline: true
- variables:
- - group: Publish-Build-Assets
- # The Guardian version specified in 'eng/common/sdl/packages.config'. This value must be kept in
- # sync with the packages.config file.
- - name: DefaultGuardianVersion
- value: 0.109.0
- - name: GuardianPackagesConfigFile
- value: $(Build.SourcesDirectory)\eng\common\sdl\packages.config
- - name: GuardianVersion
- value: ${{ coalesce(parameters.overrideGuardianVersion, '$(DefaultGuardianVersion)') }}
-
- jobs: ${{ parameters.jobs }}
-
+ ${{ each parameter in parameters }}:
+ ${{ parameter.key }}: ${{ parameter.value }}
diff --git a/eng/common/templates-official/jobs/jobs.yml b/eng/common/templates-official/jobs/jobs.yml
index 857a0f8ba43..007deddaea0 100644
--- a/eng/common/templates-official/jobs/jobs.yml
+++ b/eng/common/templates-official/jobs/jobs.yml
@@ -1,97 +1,7 @@
-parameters:
- # See schema documentation in /Documentation/AzureDevOps/TemplateSchema.md
- continueOnError: false
-
- # Optional: Include PublishBuildArtifacts task
- enablePublishBuildArtifacts: false
-
- # Optional: Enable publishing using release pipelines
- enablePublishUsingPipelines: false
-
- # Optional: Enable running the source-build jobs to build repo from source
- enableSourceBuild: false
-
- # Optional: Parameters for source-build template.
- # See /eng/common/templates-official/jobs/source-build.yml for options
- sourceBuildParameters: []
-
- graphFileGeneration:
- # Optional: Enable generating the graph files at the end of the build
- enabled: false
- # Optional: Include toolset dependencies in the generated graph files
- includeToolset: false
-
- # Required: A collection of jobs to run - https://docs.microsoft.com/en-us/azure/devops/pipelines/yaml-schema?view=vsts&tabs=schema#job
- jobs: []
-
- # Optional: Override automatically derived dependsOn value for "publish build assets" job
- publishBuildAssetsDependsOn: ''
-
- # Optional: Publish the assets as soon as the publish to BAR stage is complete, rather doing so in a separate stage.
- publishAssetsImmediately: false
-
- # Optional: If using publishAssetsImmediately and additional parameters are needed, can be used to send along additional parameters (normally sent to post-build.yml)
- artifactsPublishingAdditionalParameters: ''
- signingValidationAdditionalParameters: ''
-
- # Optional: should run as a public build even in the internal project
- # if 'true', the build won't run any of the internal only steps, even if it is running in non-public projects.
- runAsPublic: false
-
- enableSourceIndex: false
- sourceIndexParams: {}
-
-# Internal resources (telemetry, microbuild) can only be accessed from non-public projects,
-# and some (Microbuild) should only be applied to non-PR cases for internal builds.
-
jobs:
-- ${{ each job in parameters.jobs }}:
- - template: ../job/job.yml
- parameters:
- # pass along parameters
- ${{ each parameter in parameters }}:
- ${{ if ne(parameter.key, 'jobs') }}:
- ${{ parameter.key }}: ${{ parameter.value }}
-
- # pass along job properties
- ${{ each property in job }}:
- ${{ if ne(property.key, 'job') }}:
- ${{ property.key }}: ${{ property.value }}
-
- name: ${{ job.job }}
-
-- ${{ if eq(parameters.enableSourceBuild, true) }}:
- - template: /eng/common/templates-official/jobs/source-build.yml
- parameters:
- allCompletedJobId: Source_Build_Complete
- ${{ each parameter in parameters.sourceBuildParameters }}:
- ${{ parameter.key }}: ${{ parameter.value }}
-
-- ${{ if eq(parameters.enableSourceIndex, 'true') }}:
- - template: ../job/source-index-stage1.yml
- parameters:
- runAsPublic: ${{ parameters.runAsPublic }}
- ${{ each parameter in parameters.sourceIndexParams }}:
- ${{ parameter.key }}: ${{ parameter.value }}
-
-- ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
- - ${{ if or(eq(parameters.enablePublishBuildAssets, true), eq(parameters.artifacts.publish.manifests, 'true'), ne(parameters.artifacts.publish.manifests, '')) }}:
- - template: ../job/publish-build-assets.yml
- parameters:
- continueOnError: ${{ parameters.continueOnError }}
- dependsOn:
- - ${{ if ne(parameters.publishBuildAssetsDependsOn, '') }}:
- - ${{ each job in parameters.publishBuildAssetsDependsOn }}:
- - ${{ job.job }}
- - ${{ if eq(parameters.publishBuildAssetsDependsOn, '') }}:
- - ${{ each job in parameters.jobs }}:
- - ${{ job.job }}
- - ${{ if eq(parameters.enableSourceBuild, true) }}:
- - Source_Build_Complete
+- template: /eng/common/core-templates/jobs/jobs.yml
+ parameters:
+ is1ESPipeline: true
- runAsPublic: ${{ parameters.runAsPublic }}
- publishUsingPipelines: ${{ parameters.enablePublishUsingPipelines }}
- publishAssetsImmediately: ${{ parameters.publishAssetsImmediately }}
- enablePublishBuildArtifacts: ${{ parameters.enablePublishBuildArtifacts }}
- artifactsPublishingAdditionalParameters: ${{ parameters.artifactsPublishingAdditionalParameters }}
- signingValidationAdditionalParameters: ${{ parameters.signingValidationAdditionalParameters }}
+ ${{ each parameter in parameters }}:
+ ${{ parameter.key }}: ${{ parameter.value }}
diff --git a/eng/common/templates-official/jobs/source-build.yml b/eng/common/templates-official/jobs/source-build.yml
index 5cf6a269c0b..483e7b611f3 100644
--- a/eng/common/templates-official/jobs/source-build.yml
+++ b/eng/common/templates-official/jobs/source-build.yml
@@ -1,54 +1,7 @@
-parameters:
- # This template adds arcade-powered source-build to CI. A job is created for each platform, as
- # well as an optional server job that completes when all platform jobs complete.
-
- # The name of the "join" job for all source-build platforms. If set to empty string, the job is
- # not included. Existing repo pipelines can use this job depend on all source-build jobs
- # completing without maintaining a separate list of every single job ID: just depend on this one
- # server job. By default, not included. Recommended name if used: 'Source_Build_Complete'.
- allCompletedJobId: ''
-
- # See /eng/common/templates-official/job/source-build.yml
- jobNamePrefix: 'Source_Build'
-
- # This is the default platform provided by Arcade, intended for use by a managed-only repo.
- defaultManagedPlatform:
- name: 'Managed'
- container: 'mcr.microsoft.com/dotnet-buildtools/prereqs:centos-stream8'
-
- # Defines the platforms on which to run build jobs. One job is created for each platform, and the
- # object in this array is sent to the job template as 'platform'. If no platforms are specified,
- # one job runs on 'defaultManagedPlatform'.
- platforms: []
-
- # If set to true and running on a non-public project,
- # Internal nuget and blob storage locations will be enabled.
- # This is not enabled by default because many repositories do not need internal sources
- # and do not need to have the required service connections approved in the pipeline.
- enableInternalSources: false
-
jobs:
+- template: /eng/common/core-templates/jobs/source-build.yml
+ parameters:
+ is1ESPipeline: true
-- ${{ if ne(parameters.allCompletedJobId, '') }}:
- - job: ${{ parameters.allCompletedJobId }}
- displayName: Source-Build Complete
- pool: server
- dependsOn:
- - ${{ each platform in parameters.platforms }}:
- - ${{ parameters.jobNamePrefix }}_${{ platform.name }}
- - ${{ if eq(length(parameters.platforms), 0) }}:
- - ${{ parameters.jobNamePrefix }}_${{ parameters.defaultManagedPlatform.name }}
-
-- ${{ each platform in parameters.platforms }}:
- - template: /eng/common/templates-official/job/source-build.yml
- parameters:
- jobNamePrefix: ${{ parameters.jobNamePrefix }}
- platform: ${{ platform }}
- enableInternalSources: ${{ parameters.enableInternalSources }}
-
-- ${{ if eq(length(parameters.platforms), 0) }}:
- - template: /eng/common/templates-official/job/source-build.yml
- parameters:
- jobNamePrefix: ${{ parameters.jobNamePrefix }}
- platform: ${{ parameters.defaultManagedPlatform }}
- enableInternalSources: ${{ parameters.enableInternalSources }}
+ ${{ each parameter in parameters }}:
+ ${{ parameter.key }}: ${{ parameter.value }}
\ No newline at end of file
diff --git a/eng/common/templates-official/post-build/common-variables.yml b/eng/common/templates-official/post-build/common-variables.yml
index c24193acfc9..c32fc49233f 100644
--- a/eng/common/templates-official/post-build/common-variables.yml
+++ b/eng/common/templates-official/post-build/common-variables.yml
@@ -1,22 +1,8 @@
variables:
- - group: Publish-Build-Assets
+- template: /eng/common/core-templates/post-build/common-variables.yml
+ parameters:
+ # Specifies whether to use 1ES
+ is1ESPipeline: true
- # Whether the build is internal or not
- - name: IsInternalBuild
- value: ${{ and(ne(variables['System.TeamProject'], 'public'), contains(variables['Build.SourceBranch'], 'internal')) }}
-
- # Default Maestro++ API Endpoint and API Version
- - name: MaestroApiEndPoint
- value: "https://maestro-prod.westus2.cloudapp.azure.com"
- - name: MaestroApiAccessToken
- value: $(MaestroAccessToken)
- - name: MaestroApiVersion
- value: "2020-02-20"
-
- - name: SourceLinkCLIVersion
- value: 3.0.0
- - name: SymbolToolVersion
- value: 1.0.1
-
- - name: runCodesignValidationInjection
- value: false
+ ${{ each parameter in parameters }}:
+ ${{ parameter.key }}: ${{ parameter.value }}
\ No newline at end of file
diff --git a/eng/common/templates-official/post-build/post-build.yml b/eng/common/templates-official/post-build/post-build.yml
index b81b8770b34..2364c0fd4a5 100644
--- a/eng/common/templates-official/post-build/post-build.yml
+++ b/eng/common/templates-official/post-build/post-build.yml
@@ -1,287 +1,8 @@
-parameters:
- # Which publishing infra should be used. THIS SHOULD MATCH THE VERSION ON THE BUILD MANIFEST.
- # Publishing V1 is no longer supported
- # Publishing V2 is no longer supported
- # Publishing V3 is the default
- - name: publishingInfraVersion
- displayName: Which version of publishing should be used to promote the build definition?
- type: number
- default: 3
- values:
- - 3
-
- - name: BARBuildId
- displayName: BAR Build Id
- type: number
- default: 0
-
- - name: PromoteToChannelIds
- displayName: Channel to promote BARBuildId to
- type: string
- default: ''
-
- - name: enableSourceLinkValidation
- displayName: Enable SourceLink validation
- type: boolean
- default: false
-
- - name: enableSigningValidation
- displayName: Enable signing validation
- type: boolean
- default: true
-
- - name: enableSymbolValidation
- displayName: Enable symbol validation
- type: boolean
- default: false
-
- - name: enableNugetValidation
- displayName: Enable NuGet validation
- type: boolean
- default: true
-
- - name: publishInstallersAndChecksums
- displayName: Publish installers and checksums
- type: boolean
- default: true
-
- - name: SDLValidationParameters
- type: object
- default:
- enable: false
- publishGdn: false
- continueOnError: false
- params: ''
- artifactNames: ''
- downloadArtifacts: true
-
- # These parameters let the user customize the call to sdk-task.ps1 for publishing
- # symbols & general artifacts as well as for signing validation
- - name: symbolPublishingAdditionalParameters
- displayName: Symbol publishing additional parameters
- type: string
- default: ''
-
- - name: artifactsPublishingAdditionalParameters
- displayName: Artifact publishing additional parameters
- type: string
- default: ''
-
- - name: signingValidationAdditionalParameters
- displayName: Signing validation additional parameters
- type: string
- default: ''
-
- # Which stages should finish execution before post-build stages start
- - name: validateDependsOn
- type: object
- default:
- - build
-
- - name: publishDependsOn
- type: object
- default:
- - Validate
-
- # Optional: Call asset publishing rather than running in a separate stage
- - name: publishAssetsImmediately
- type: boolean
- default: false
-
stages:
-- ${{ if or(eq( parameters.enableNugetValidation, 'true'), eq(parameters.enableSigningValidation, 'true'), eq(parameters.enableSourceLinkValidation, 'true'), eq(parameters.SDLValidationParameters.enable, 'true')) }}:
- - stage: Validate
- dependsOn: ${{ parameters.validateDependsOn }}
- displayName: Validate Build Assets
- variables:
- - template: common-variables.yml
- - template: /eng/common/templates-official/variables/pool-providers.yml
- jobs:
- - job:
- displayName: NuGet Validation
- condition: and(succeededOrFailed(), eq( ${{ parameters.enableNugetValidation }}, 'true'))
- pool:
- # We don't use the collection uri here because it might vary (.visualstudio.com vs. dev.azure.com)
- ${{ if eq(variables['System.TeamProject'], 'DevDiv') }}:
- name: AzurePipelines-EO
- image: 1ESPT-Windows2022
- demands: Cmd
- os: windows
- # If it's not devdiv, it's dnceng
- ${{ else }}:
- name: $(DncEngInternalBuildPool)
- image: 1es-windows-2022
- os: windows
-
- steps:
- - template: setup-maestro-vars.yml
- parameters:
- BARBuildId: ${{ parameters.BARBuildId }}
- PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }}
-
- - task: DownloadBuildArtifacts@0
- displayName: Download Package Artifacts
- inputs:
- buildType: specific
- buildVersionToDownload: specific
- project: $(AzDOProjectName)
- pipeline: $(AzDOPipelineId)
- buildId: $(AzDOBuildId)
- artifactName: PackageArtifacts
- checkDownloadedFiles: true
-
- - task: PowerShell@2
- displayName: Validate
- inputs:
- filePath: $(Build.SourcesDirectory)/eng/common/post-build/nuget-validation.ps1
- arguments: -PackagesPath $(Build.ArtifactStagingDirectory)/PackageArtifacts/
- -ToolDestinationPath $(Agent.BuildDirectory)/Extract/
-
- - job:
- displayName: Signing Validation
- condition: and( eq( ${{ parameters.enableSigningValidation }}, 'true'), ne( variables['PostBuildSign'], 'true'))
- pool:
- # We don't use the collection uri here because it might vary (.visualstudio.com vs. dev.azure.com)
- ${{ if eq(variables['System.TeamProject'], 'DevDiv') }}:
- name: AzurePipelines-EO
- image: 1ESPT-Windows2022
- demands: Cmd
- os: windows
- # If it's not devdiv, it's dnceng
- ${{ else }}:
- name: $(DncEngInternalBuildPool)
- image: 1es-windows-2022
- os: windows
- steps:
- - template: setup-maestro-vars.yml
- parameters:
- BARBuildId: ${{ parameters.BARBuildId }}
- PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }}
-
- - task: DownloadBuildArtifacts@0
- displayName: Download Package Artifacts
- inputs:
- buildType: specific
- buildVersionToDownload: specific
- project: $(AzDOProjectName)
- pipeline: $(AzDOPipelineId)
- buildId: $(AzDOBuildId)
- artifactName: PackageArtifacts
- checkDownloadedFiles: true
- itemPattern: |
- **
- !**/Microsoft.SourceBuild.Intermediate.*.nupkg
-
- # This is necessary whenever we want to publish/restore to an AzDO private feed
- # Since sdk-task.ps1 tries to restore packages we need to do this authentication here
- # otherwise it'll complain about accessing a private feed.
- - task: NuGetAuthenticate@1
- displayName: 'Authenticate to AzDO Feeds'
-
- # Signing validation will optionally work with the buildmanifest file which is downloaded from
- # Azure DevOps above.
- - task: PowerShell@2
- displayName: Validate
- inputs:
- filePath: eng\common\sdk-task.ps1
- arguments: -task SigningValidation -restore -msbuildEngine vs
- /p:PackageBasePath='$(Build.ArtifactStagingDirectory)/PackageArtifacts'
- /p:SignCheckExclusionsFile='$(Build.SourcesDirectory)/eng/SignCheckExclusionsFile.txt'
- ${{ parameters.signingValidationAdditionalParameters }}
-
- - template: ../steps/publish-logs.yml
- parameters:
- StageLabel: 'Validation'
- JobLabel: 'Signing'
- BinlogToolVersion: $(BinlogToolVersion)
-
- - job:
- displayName: SourceLink Validation
- condition: eq( ${{ parameters.enableSourceLinkValidation }}, 'true')
- pool:
- # We don't use the collection uri here because it might vary (.visualstudio.com vs. dev.azure.com)
- ${{ if eq(variables['System.TeamProject'], 'DevDiv') }}:
- name: AzurePipelines-EO
- image: 1ESPT-Windows2022
- demands: Cmd
- os: windows
- # If it's not devdiv, it's dnceng
- ${{ else }}:
- name: $(DncEngInternalBuildPool)
- image: 1es-windows-2022
- os: windows
- steps:
- - template: setup-maestro-vars.yml
- parameters:
- BARBuildId: ${{ parameters.BARBuildId }}
- PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }}
-
- - task: DownloadBuildArtifacts@0
- displayName: Download Blob Artifacts
- inputs:
- buildType: specific
- buildVersionToDownload: specific
- project: $(AzDOProjectName)
- pipeline: $(AzDOPipelineId)
- buildId: $(AzDOBuildId)
- artifactName: BlobArtifacts
- checkDownloadedFiles: true
-
- - task: PowerShell@2
- displayName: Validate
- inputs:
- filePath: $(Build.SourcesDirectory)/eng/common/post-build/sourcelink-validation.ps1
- arguments: -InputPath $(Build.ArtifactStagingDirectory)/BlobArtifacts/
- -ExtractPath $(Agent.BuildDirectory)/Extract/
- -GHRepoName $(Build.Repository.Name)
- -GHCommit $(Build.SourceVersion)
- -SourcelinkCliVersion $(SourceLinkCLIVersion)
- continueOnError: true
-
-- ${{ if ne(parameters.publishAssetsImmediately, 'true') }}:
- - stage: publish_using_darc
- ${{ if or(eq(parameters.enableNugetValidation, 'true'), eq(parameters.enableSigningValidation, 'true'), eq(parameters.enableSourceLinkValidation, 'true'), eq(parameters.SDLValidationParameters.enable, 'true')) }}:
- dependsOn: ${{ parameters.publishDependsOn }}
- ${{ else }}:
- dependsOn: ${{ parameters.validateDependsOn }}
- displayName: Publish using Darc
- variables:
- - template: common-variables.yml
- - template: /eng/common/templates-official/variables/pool-providers.yml
- jobs:
- - job:
- displayName: Publish Using Darc
- timeoutInMinutes: 120
- pool:
- # We don't use the collection uri here because it might vary (.visualstudio.com vs. dev.azure.com)
- ${{ if eq(variables['System.TeamProject'], 'DevDiv') }}:
- name: AzurePipelines-EO
- image: 1ESPT-Windows2022
- demands: Cmd
- os: windows
- # If it's not devdiv, it's dnceng
- ${{ else }}:
- name: NetCore1ESPool-Publishing-Internal
- image: windows.vs2019.amd64
- os: windows
- steps:
- - template: setup-maestro-vars.yml
- parameters:
- BARBuildId: ${{ parameters.BARBuildId }}
- PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }}
-
- - task: NuGetAuthenticate@1
+- template: /eng/common/core-templates/post-build/post-build.yml
+ parameters:
+ # Specifies whether to use 1ES
+ is1ESPipeline: true
- - task: AzureCLI@2
- displayName: Publish Using Darc
- inputs:
- azureSubscription: "Darc: Maestro Production"
- scriptType: ps
- scriptLocation: scriptPath
- scriptPath: $(Build.SourcesDirectory)/eng/common/post-build/publish-using-darc.ps1
- arguments: -BuildId $(BARBuildId)
- -PublishingInfraVersion ${{ parameters.publishingInfraVersion }}
- -AzdoToken '$(System.AccessToken)'
- -WaitPublishingFinish true
- -ArtifactsPublishingAdditionalParameters '${{ parameters.artifactsPublishingAdditionalParameters }}'
- -SymbolPublishingAdditionalParameters '${{ parameters.symbolPublishingAdditionalParameters }}'
+ ${{ each parameter in parameters }}:
+ ${{ parameter.key }}: ${{ parameter.value }}
diff --git a/eng/common/templates-official/post-build/setup-maestro-vars.yml b/eng/common/templates-official/post-build/setup-maestro-vars.yml
index 0c87f149a4a..024397d8786 100644
--- a/eng/common/templates-official/post-build/setup-maestro-vars.yml
+++ b/eng/common/templates-official/post-build/setup-maestro-vars.yml
@@ -1,70 +1,8 @@
-parameters:
- BARBuildId: ''
- PromoteToChannelIds: ''
-
steps:
- - ${{ if eq(coalesce(parameters.PromoteToChannelIds, 0), 0) }}:
- - task: DownloadBuildArtifacts@0
- displayName: Download Release Configs
- inputs:
- buildType: current
- artifactName: ReleaseConfigs
- checkDownloadedFiles: true
-
- - task: PowerShell@2
- name: setReleaseVars
- displayName: Set Release Configs Vars
- inputs:
- targetType: inline
- pwsh: true
- script: |
- try {
- if (!$Env:PromoteToMaestroChannels -or $Env:PromoteToMaestroChannels.Trim() -eq '') {
- $Content = Get-Content $(Build.StagingDirectory)/ReleaseConfigs/ReleaseConfigs.txt
-
- $BarId = $Content | Select -Index 0
- $Channels = $Content | Select -Index 1
- $IsStableBuild = $Content | Select -Index 2
-
- $AzureDevOpsProject = $Env:System_TeamProject
- $AzureDevOpsBuildDefinitionId = $Env:System_DefinitionId
- $AzureDevOpsBuildId = $Env:Build_BuildId
- }
- else {
- $buildApiEndpoint = "${Env:MaestroApiEndPoint}/api/builds/${Env:BARBuildId}?api-version=${Env:MaestroApiVersion}"
-
- $apiHeaders = New-Object 'System.Collections.Generic.Dictionary[[String],[String]]'
- $apiHeaders.Add('Accept', 'application/json')
- $apiHeaders.Add('Authorization',"Bearer ${Env:MAESTRO_API_TOKEN}")
-
- $buildInfo = try { Invoke-WebRequest -Method Get -Uri $buildApiEndpoint -Headers $apiHeaders | ConvertFrom-Json } catch { Write-Host "Error: $_" }
-
- $BarId = $Env:BARBuildId
- $Channels = $Env:PromoteToMaestroChannels -split ","
- $Channels = $Channels -join "]["
- $Channels = "[$Channels]"
-
- $IsStableBuild = $buildInfo.stable
- $AzureDevOpsProject = $buildInfo.azureDevOpsProject
- $AzureDevOpsBuildDefinitionId = $buildInfo.azureDevOpsBuildDefinitionId
- $AzureDevOpsBuildId = $buildInfo.azureDevOpsBuildId
- }
-
- Write-Host "##vso[task.setvariable variable=BARBuildId]$BarId"
- Write-Host "##vso[task.setvariable variable=TargetChannels]$Channels"
- Write-Host "##vso[task.setvariable variable=IsStableBuild]$IsStableBuild"
+- template: /eng/common/core-templates/post-build/setup-maestro-vars.yml
+ parameters:
+ # Specifies whether to use 1ES
+ is1ESPipeline: true
- Write-Host "##vso[task.setvariable variable=AzDOProjectName]$AzureDevOpsProject"
- Write-Host "##vso[task.setvariable variable=AzDOPipelineId]$AzureDevOpsBuildDefinitionId"
- Write-Host "##vso[task.setvariable variable=AzDOBuildId]$AzureDevOpsBuildId"
- }
- catch {
- Write-Host $_
- Write-Host $_.Exception
- Write-Host $_.ScriptStackTrace
- exit 1
- }
- env:
- MAESTRO_API_TOKEN: $(MaestroApiAccessToken)
- BARBuildId: ${{ parameters.BARBuildId }}
- PromoteToMaestroChannels: ${{ parameters.PromoteToChannelIds }}
+ ${{ each parameter in parameters }}:
+ ${{ parameter.key }}: ${{ parameter.value }}
\ No newline at end of file
diff --git a/eng/common/templates-official/post-build/trigger-subscription.yml b/eng/common/templates-official/post-build/trigger-subscription.yml
deleted file mode 100644
index da669030daf..00000000000
--- a/eng/common/templates-official/post-build/trigger-subscription.yml
+++ /dev/null
@@ -1,13 +0,0 @@
-parameters:
- ChannelId: 0
-
-steps:
-- task: PowerShell@2
- displayName: Triggering subscriptions
- inputs:
- filePath: $(Build.SourcesDirectory)/eng/common/post-build/trigger-subscriptions.ps1
- arguments: -SourceRepo $(Build.Repository.Uri)
- -ChannelId ${{ parameters.ChannelId }}
- -MaestroApiAccessToken $(MaestroAccessToken)
- -MaestroApiEndPoint $(MaestroApiEndPoint)
- -MaestroApiVersion $(MaestroApiVersion)
diff --git a/eng/common/templates-official/steps/add-build-to-channel.yml b/eng/common/templates-official/steps/add-build-to-channel.yml
deleted file mode 100644
index f67a210d62f..00000000000
--- a/eng/common/templates-official/steps/add-build-to-channel.yml
+++ /dev/null
@@ -1,13 +0,0 @@
-parameters:
- ChannelId: 0
-
-steps:
-- task: PowerShell@2
- displayName: Add Build to Channel
- inputs:
- filePath: $(Build.SourcesDirectory)/eng/common/post-build/add-build-to-channel.ps1
- arguments: -BuildId $(BARBuildId)
- -ChannelId ${{ parameters.ChannelId }}
- -MaestroApiAccessToken $(MaestroApiAccessToken)
- -MaestroApiEndPoint $(MaestroApiEndPoint)
- -MaestroApiVersion $(MaestroApiVersion)
diff --git a/eng/common/templates-official/steps/build-reason.yml b/eng/common/templates-official/steps/build-reason.yml
deleted file mode 100644
index eba58109b52..00000000000
--- a/eng/common/templates-official/steps/build-reason.yml
+++ /dev/null
@@ -1,12 +0,0 @@
-# build-reason.yml
-# Description: runs steps if build.reason condition is valid. conditions is a string of valid build reasons
-# to include steps (',' separated).
-parameters:
- conditions: ''
- steps: []
-
-steps:
- - ${{ if and( not(startsWith(parameters.conditions, 'not')), contains(parameters.conditions, variables['build.reason'])) }}:
- - ${{ parameters.steps }}
- - ${{ if and( startsWith(parameters.conditions, 'not'), not(contains(parameters.conditions, variables['build.reason']))) }}:
- - ${{ parameters.steps }}
diff --git a/eng/common/templates-official/steps/component-governance.yml b/eng/common/templates-official/steps/component-governance.yml
index cbba0596709..30bb3985ca2 100644
--- a/eng/common/templates-official/steps/component-governance.yml
+++ b/eng/common/templates-official/steps/component-governance.yml
@@ -1,13 +1,7 @@
-parameters:
- disableComponentGovernance: false
- componentGovernanceIgnoreDirectories: ''
-
steps:
-- ${{ if eq(parameters.disableComponentGovernance, 'true') }}:
- - script: echo "##vso[task.setvariable variable=skipComponentGovernanceDetection]true"
- displayName: Set skipComponentGovernanceDetection variable
-- ${{ if ne(parameters.disableComponentGovernance, 'true') }}:
- - task: ComponentGovernanceComponentDetection@0
- continueOnError: true
- inputs:
- ignoreDirectories: ${{ parameters.componentGovernanceIgnoreDirectories }}
\ No newline at end of file
+- template: /eng/common/core-templates/steps/component-governance.yml
+ parameters:
+ is1ESPipeline: true
+
+ ${{ each parameter in parameters }}:
+ ${{ parameter.key }}: ${{ parameter.value }}
diff --git a/eng/common/templates-official/steps/enable-internal-runtimes.yml b/eng/common/templates-official/steps/enable-internal-runtimes.yml
index 93a8394a666..f9dd238c6cd 100644
--- a/eng/common/templates-official/steps/enable-internal-runtimes.yml
+++ b/eng/common/templates-official/steps/enable-internal-runtimes.yml
@@ -1,28 +1,9 @@
# Obtains internal runtime download credentials and populates the 'dotnetbuilds-internal-container-read-token-base64'
# variable with the base64-encoded SAS token, by default
-
-parameters:
-- name: federatedServiceConnection
- type: string
- default: 'dotnetbuilds-internal-read'
-- name: outputVariableName
- type: string
- default: 'dotnetbuilds-internal-container-read-token-base64'
-- name: expiryInHours
- type: number
- default: 1
-- name: base64Encode
- type: boolean
- default: true
-
steps:
-- ${{ if ne(variables['System.TeamProject'], 'public') }}:
- - template: /eng/common/templates-official/steps/get-delegation-sas.yml
- parameters:
- federatedServiceConnection: ${{ parameters.federatedServiceConnection }}
- outputVariableName: ${{ parameters.outputVariableName }}
- expiryInHours: ${{ parameters.expiryInHours }}
- base64Encode: ${{ parameters.base64Encode }}
- storageAccount: dotnetbuilds
- container: internal
- permissions: rl
+- template: /eng/common/core-templates/steps/enable-internal-runtimes.yml
+ parameters:
+ is1ESPipeline: true
+
+ ${{ each parameter in parameters }}:
+ ${{ parameter.key }}: ${{ parameter.value }}
diff --git a/eng/common/templates-official/steps/enable-internal-sources.yml b/eng/common/templates-official/steps/enable-internal-sources.yml
new file mode 100644
index 00000000000..e6d57182284
--- /dev/null
+++ b/eng/common/templates-official/steps/enable-internal-sources.yml
@@ -0,0 +1,7 @@
+steps:
+- template: /eng/common/core-templates/steps/enable-internal-sources.yml
+ parameters:
+ is1ESPipeline: true
+
+ ${{ each parameter in parameters }}:
+ ${{ parameter.key }}: ${{ parameter.value }}
\ No newline at end of file
diff --git a/eng/common/templates-official/steps/execute-codeql.yml b/eng/common/templates-official/steps/execute-codeql.yml
deleted file mode 100644
index 9b4a5ffa30a..00000000000
--- a/eng/common/templates-official/steps/execute-codeql.yml
+++ /dev/null
@@ -1,32 +0,0 @@
-parameters:
- # Language that should be analyzed. Defaults to csharp
- language: csharp
- # Build Commands
- buildCommands: ''
- overrideParameters: '' # Optional: to override values for parameters.
- additionalParameters: '' # Optional: parameters that need user specific values eg: '-SourceToolsList @("abc","def") -ArtifactToolsList @("ghi","jkl")'
- # Optional: if specified, restore and use this version of Guardian instead of the default.
- overrideGuardianVersion: ''
- # Optional: if true, publish the '.gdn' folder as a pipeline artifact. This can help with in-depth
- # diagnosis of problems with specific tool configurations.
- publishGuardianDirectoryToPipeline: false
- # The script to run to execute all SDL tools. Use this if you want to use a script to define SDL
- # parameters rather than relying on YAML. It may be better to use a local script, because you can
- # reproduce results locally without piecing together a command based on the YAML.
- executeAllSdlToolsScript: 'eng/common/sdl/execute-all-sdl-tools.ps1'
- # There is some sort of bug (has been reported) in Azure DevOps where if this parameter is named
- # 'continueOnError', the parameter value is not correctly picked up.
- # This can also be remedied by the caller (post-build.yml) if it does not use a nested parameter
- # optional: determines whether to continue the build if the step errors;
- sdlContinueOnError: false
-
-steps:
-- template: /eng/common/templates-official/steps/execute-sdl.yml
- parameters:
- overrideGuardianVersion: ${{ parameters.overrideGuardianVersion }}
- executeAllSdlToolsScript: ${{ parameters.executeAllSdlToolsScript }}
- overrideParameters: ${{ parameters.overrideParameters }}
- additionalParameters: '${{ parameters.additionalParameters }}
- -CodeQLAdditionalRunConfigParams @("BuildCommands < ${{ parameters.buildCommands }}", "Language < ${{ parameters.language }}")'
- publishGuardianDirectoryToPipeline: ${{ parameters.publishGuardianDirectoryToPipeline }}
- sdlContinueOnError: ${{ parameters.sdlContinueOnError }}
\ No newline at end of file
diff --git a/eng/common/templates-official/steps/execute-sdl.yml b/eng/common/templates-official/steps/execute-sdl.yml
deleted file mode 100644
index 301d5c591eb..00000000000
--- a/eng/common/templates-official/steps/execute-sdl.yml
+++ /dev/null
@@ -1,86 +0,0 @@
-parameters:
- overrideGuardianVersion: ''
- executeAllSdlToolsScript: ''
- overrideParameters: ''
- additionalParameters: ''
- publishGuardianDirectoryToPipeline: false
- sdlContinueOnError: false
- condition: ''
-
-steps:
-- task: NuGetAuthenticate@1
-
-- task: NuGetToolInstaller@1
- displayName: 'Install NuGet.exe'
-
-- ${{ if ne(parameters.overrideGuardianVersion, '') }}:
- - pwsh: |
- Set-Location -Path $(Build.SourcesDirectory)\eng\common\sdl
- . .\sdl.ps1
- $guardianCliLocation = Install-Gdn -Path $(Build.SourcesDirectory)\.artifacts -Version ${{ parameters.overrideGuardianVersion }}
- Write-Host "##vso[task.setvariable variable=GuardianCliLocation]$guardianCliLocation"
- displayName: Install Guardian (Overridden)
-
-- ${{ if eq(parameters.overrideGuardianVersion, '') }}:
- - pwsh: |
- Set-Location -Path $(Build.SourcesDirectory)\eng\common\sdl
- . .\sdl.ps1
- $guardianCliLocation = Install-Gdn -Path $(Build.SourcesDirectory)\.artifacts
- Write-Host "##vso[task.setvariable variable=GuardianCliLocation]$guardianCliLocation"
- displayName: Install Guardian
-
-- ${{ if ne(parameters.overrideParameters, '') }}:
- - powershell: ${{ parameters.executeAllSdlToolsScript }} ${{ parameters.overrideParameters }}
- displayName: Execute SDL (Overridden)
- continueOnError: ${{ parameters.sdlContinueOnError }}
- condition: ${{ parameters.condition }}
-
-- ${{ if eq(parameters.overrideParameters, '') }}:
- - powershell: ${{ parameters.executeAllSdlToolsScript }}
- -GuardianCliLocation $(GuardianCliLocation)
- -NugetPackageDirectory $(Build.SourcesDirectory)\.packages
- -AzureDevOpsAccessToken $(dn-bot-dotnet-build-rw-code-rw)
- ${{ parameters.additionalParameters }}
- displayName: Execute SDL
- continueOnError: ${{ parameters.sdlContinueOnError }}
- condition: ${{ parameters.condition }}
-
-- ${{ if ne(parameters.publishGuardianDirectoryToPipeline, 'false') }}:
- # We want to publish the Guardian results and configuration for easy diagnosis. However, the
- # '.gdn' dir is a mix of configuration, results, extracted dependencies, and Guardian default
- # tooling files. Some of these files are large and aren't useful during an investigation, so
- # exclude them by simply deleting them before publishing. (As of writing, there is no documented
- # way to selectively exclude a dir from the pipeline artifact publish task.)
- - task: DeleteFiles@1
- displayName: Delete Guardian dependencies to avoid uploading
- inputs:
- SourceFolder: $(Agent.BuildDirectory)/.gdn
- Contents: |
- c
- i
- condition: succeededOrFailed()
-
- - publish: $(Agent.BuildDirectory)/.gdn
- artifact: GuardianConfiguration
- displayName: Publish GuardianConfiguration
- condition: succeededOrFailed()
-
- # Publish the SARIF files in a container named CodeAnalysisLogs to enable integration
- # with the "SARIF SAST Scans Tab" Azure DevOps extension
- - task: CopyFiles@2
- displayName: Copy SARIF files
- inputs:
- flattenFolders: true
- sourceFolder: $(Agent.BuildDirectory)/.gdn/rc/
- contents: '**/*.sarif'
- targetFolder: $(Build.SourcesDirectory)/CodeAnalysisLogs
- condition: succeededOrFailed()
-
- # Use PublishBuildArtifacts because the SARIF extension only checks this case
- # see microsoft/sarif-azuredevops-extension#4
- - task: PublishBuildArtifacts@1
- displayName: Publish SARIF files to CodeAnalysisLogs container
- inputs:
- pathToPublish: $(Build.SourcesDirectory)/CodeAnalysisLogs
- artifactName: CodeAnalysisLogs
- condition: succeededOrFailed()
\ No newline at end of file
diff --git a/eng/common/templates-official/steps/generate-sbom.yml b/eng/common/templates-official/steps/generate-sbom.yml
index 1bf43bf807a..9a89a4706d9 100644
--- a/eng/common/templates-official/steps/generate-sbom.yml
+++ b/eng/common/templates-official/steps/generate-sbom.yml
@@ -1,48 +1,7 @@
-# BuildDropPath - The root folder of the drop directory for which the manifest file will be generated.
-# PackageName - The name of the package this SBOM represents.
-# PackageVersion - The version of the package this SBOM represents.
-# ManifestDirPath - The path of the directory where the generated manifest files will be placed
-# IgnoreDirectories - Directories to ignore for SBOM generation. This will be passed through to the CG component detector.
-
-parameters:
- PackageVersion: 8.0.0
- BuildDropPath: '$(Build.SourcesDirectory)/artifacts'
- PackageName: '.NET'
- ManifestDirPath: $(Build.ArtifactStagingDirectory)/sbom
- IgnoreDirectories: ''
- sbomContinueOnError: true
-
steps:
-- task: PowerShell@2
- displayName: Prep for SBOM generation in (Non-linux)
- condition: or(eq(variables['Agent.Os'], 'Windows_NT'), eq(variables['Agent.Os'], 'Darwin'))
- inputs:
- filePath: ./eng/common/generate-sbom-prep.ps1
- arguments: ${{parameters.manifestDirPath}}
-
-# Chmodding is a workaround for https://github.com/dotnet/arcade/issues/8461
-- script: |
- chmod +x ./eng/common/generate-sbom-prep.sh
- ./eng/common/generate-sbom-prep.sh ${{parameters.manifestDirPath}}
- displayName: Prep for SBOM generation in (Linux)
- condition: eq(variables['Agent.Os'], 'Linux')
- continueOnError: ${{ parameters.sbomContinueOnError }}
-
-- task: AzureArtifacts.manifest-generator-task.manifest-generator-task.ManifestGeneratorTask@0
- displayName: 'Generate SBOM manifest'
- continueOnError: ${{ parameters.sbomContinueOnError }}
- inputs:
- PackageName: ${{ parameters.packageName }}
- BuildDropPath: ${{ parameters.buildDropPath }}
- PackageVersion: ${{ parameters.packageVersion }}
- ManifestDirPath: ${{ parameters.manifestDirPath }}
- ${{ if ne(parameters.IgnoreDirectories, '') }}:
- AdditionalComponentDetectorArgs: '--IgnoreDirectories ${{ parameters.IgnoreDirectories }}'
-
-- task: 1ES.PublishPipelineArtifact@1
- displayName: Publish SBOM manifest
- continueOnError: ${{parameters.sbomContinueOnError}}
- inputs:
- targetPath: '${{parameters.manifestDirPath}}'
- artifactName: $(ARTIFACT_NAME)
+- template: /eng/common/core-templates/steps/generate-sbom.yml
+ parameters:
+ is1ESPipeline: true
+ ${{ each parameter in parameters }}:
+ ${{ parameter.key }}: ${{ parameter.value }}
diff --git a/eng/common/templates-official/steps/get-delegation-sas.yml b/eng/common/templates-official/steps/get-delegation-sas.yml
index c0e8f91317f..c5a9c1f8275 100644
--- a/eng/common/templates-official/steps/get-delegation-sas.yml
+++ b/eng/common/templates-official/steps/get-delegation-sas.yml
@@ -1,43 +1,7 @@
-parameters:
-- name: federatedServiceConnection
- type: string
-- name: outputVariableName
- type: string
-- name: expiryInHours
- type: number
- default: 1
-- name: base64Encode
- type: boolean
- default: false
-- name: storageAccount
- type: string
-- name: container
- type: string
-- name: permissions
- type: string
- default: 'rl'
-
steps:
-- task: AzureCLI@2
- displayName: 'Generate delegation SAS Token for ${{ parameters.storageAccount }}/${{ parameters.container }}'
- inputs:
- azureSubscription: ${{ parameters.federatedServiceConnection }}
- scriptType: 'pscore'
- scriptLocation: 'inlineScript'
- inlineScript: |
- # Calculate the expiration of the SAS token and convert to UTC
- $expiry = (Get-Date).AddHours(${{ parameters.expiryInHours }}).ToUniversalTime().ToString("yyyy-MM-ddTHH:mm:ssZ")
-
- $sas = az storage container generate-sas --account-name ${{ parameters.storageAccount }} --name ${{ parameters.container }} --permissions ${{ parameters.permissions }} --expiry $expiry --auth-mode login --as-user -o tsv
-
- if ($LASTEXITCODE -ne 0) {
- Write-Error "Failed to generate SAS token."
- exit 1
- }
-
- if ('${{ parameters.base64Encode }}' -eq 'true') {
- $sas = [Convert]::ToBase64String([System.Text.Encoding]::UTF8.GetBytes($sas))
- }
+- template: /eng/common/core-templates/steps/get-delegation-sas.yml
+ parameters:
+ is1ESPipeline: true
- Write-Host "Setting '${{ parameters.outputVariableName }}' with the access token value"
- Write-Host "##vso[task.setvariable variable=${{ parameters.outputVariableName }};issecret=true]$sas"
+ ${{ each parameter in parameters }}:
+ ${{ parameter.key }}: ${{ parameter.value }}
diff --git a/eng/common/templates-official/steps/get-federated-access-token.yml b/eng/common/templates-official/steps/get-federated-access-token.yml
index 55e33bd38f7..c8dcf6b8139 100644
--- a/eng/common/templates-official/steps/get-federated-access-token.yml
+++ b/eng/common/templates-official/steps/get-federated-access-token.yml
@@ -1,40 +1,7 @@
-parameters:
-- name: federatedServiceConnection
- type: string
-- name: outputVariableName
- type: string
-- name: stepName
- type: string
- default: 'getFederatedAccessToken'
-- name: condition
- type: string
- default: ''
-# Resource to get a token for. Common values include:
-# - '499b84ac-1321-427f-aa17-267ca6975798' for Azure DevOps
-# - 'https://storage.azure.com/' for storage
-# Defaults to Azure DevOps
-- name: resource
- type: string
- default: '499b84ac-1321-427f-aa17-267ca6975798'
-- name: isStepOutputVariable
- type: boolean
- default: false
-
steps:
-- task: AzureCLI@2
- displayName: 'Getting federated access token for feeds'
- name: ${{ parameters.stepName }}
- ${{ if ne(parameters.condition, '') }}:
- condition: ${{ parameters.condition }}
- inputs:
- azureSubscription: ${{ parameters.federatedServiceConnection }}
- scriptType: 'pscore'
- scriptLocation: 'inlineScript'
- inlineScript: |
- $accessToken = az account get-access-token --query accessToken --resource ${{ parameters.resource }} --output tsv
- if ($LASTEXITCODE -ne 0) {
- Write-Error "Failed to get access token for resource '${{ parameters.resource }}'"
- exit 1
- }
- Write-Host "Setting '${{ parameters.outputVariableName }}' with the access token value"
- Write-Host "##vso[task.setvariable variable=${{ parameters.outputVariableName }};issecret=true;isOutput=${{ parameters.isStepOutputVariable }}]$accessToken"
\ No newline at end of file
+- template: /eng/common/core-templates/steps/get-federated-access-token.yml
+ parameters:
+ is1ESPipeline: true
+
+ ${{ each parameter in parameters }}:
+ ${{ parameter.key }}: ${{ parameter.value }}
\ No newline at end of file
diff --git a/eng/common/templates-official/steps/publish-build-artifacts.yml b/eng/common/templates-official/steps/publish-build-artifacts.yml
new file mode 100644
index 00000000000..100a3fc9849
--- /dev/null
+++ b/eng/common/templates-official/steps/publish-build-artifacts.yml
@@ -0,0 +1,41 @@
+parameters:
+- name: displayName
+ type: string
+ default: 'Publish to Build Artifact'
+
+- name: condition
+ type: string
+ default: succeeded()
+
+- name: artifactName
+ type: string
+
+- name: pathToPublish
+ type: string
+
+- name: continueOnError
+ type: boolean
+ default: false
+
+- name: publishLocation
+ type: string
+ default: 'Container'
+
+- name: is1ESPipeline
+ type: boolean
+ default: true
+
+steps:
+- ${{ if ne(parameters.is1ESPipeline, true) }}:
+ - 'eng/common/templates-official cannot be referenced from a non-1ES managed template': error
+- task: 1ES.PublishBuildArtifacts@1
+ displayName: ${{ parameters.displayName }}
+ condition: ${{ parameters.condition }}
+ ${{ if parameters.continueOnError }}:
+ continueOnError: ${{ parameters.continueOnError }}
+ inputs:
+ PublishLocation: ${{ parameters.publishLocation }}
+ PathtoPublish: ${{ parameters.pathToPublish }}
+ ${{ if parameters.artifactName }}:
+ ArtifactName: ${{ parameters.artifactName }}
+
diff --git a/eng/common/templates-official/steps/publish-logs.yml b/eng/common/templates-official/steps/publish-logs.yml
index 04012fed182..579fd531e94 100644
--- a/eng/common/templates-official/steps/publish-logs.yml
+++ b/eng/common/templates-official/steps/publish-logs.yml
@@ -1,23 +1,7 @@
-parameters:
- StageLabel: ''
- JobLabel: ''
-
steps:
-- task: Powershell@2
- displayName: Prepare Binlogs to Upload
- inputs:
- targetType: inline
- script: |
- New-Item -ItemType Directory $(Build.SourcesDirectory)/PostBuildLogs/${{parameters.StageLabel}}/${{parameters.JobLabel}}/
- Move-Item -Path $(Build.SourcesDirectory)/artifacts/log/Debug/* $(Build.SourcesDirectory)/PostBuildLogs/${{parameters.StageLabel}}/${{parameters.JobLabel}}/
- continueOnError: true
- condition: always()
+- template: /eng/common/core-templates/steps/publish-logs.yml
+ parameters:
+ is1ESPipeline: true
-- task: 1ES.PublishBuildArtifacts@1
- displayName: Publish Logs
- inputs:
- PathtoPublish: '$(Build.SourcesDirectory)/PostBuildLogs'
- PublishLocation: Container
- ArtifactName: PostBuildLogs
- continueOnError: true
- condition: always()
+ ${{ each parameter in parameters }}:
+ ${{ parameter.key }}: ${{ parameter.value }}
diff --git a/eng/common/templates-official/steps/publish-pipeline-artifacts.yml b/eng/common/templates-official/steps/publish-pipeline-artifacts.yml
new file mode 100644
index 00000000000..172f9f0fdc9
--- /dev/null
+++ b/eng/common/templates-official/steps/publish-pipeline-artifacts.yml
@@ -0,0 +1,28 @@
+parameters:
+- name: is1ESPipeline
+ type: boolean
+ default: true
+
+- name: args
+ type: object
+ default: {}
+
+steps:
+- ${{ if ne(parameters.is1ESPipeline, true) }}:
+ - 'eng/common/templates-official cannot be referenced from a non-1ES managed template': error
+- task: 1ES.PublishPipelineArtifact@1
+ displayName: ${{ coalesce(parameters.args.displayName, 'Publish to Build Artifact') }}
+ ${{ if parameters.args.condition }}:
+ condition: ${{ parameters.args.condition }}
+ ${{ else }}:
+ condition: succeeded()
+ ${{ if parameters.args.continueOnError }}:
+ continueOnError: ${{ parameters.args.continueOnError }}
+ inputs:
+ targetPath: ${{ parameters.args.targetPath }}
+ ${{ if parameters.args.artifactName }}:
+ artifactName: ${{ parameters.args.artifactName }}
+ ${{ if parameters.args.properties }}:
+ properties: ${{ parameters.args.properties }}
+ ${{ if parameters.args.sbomEnabled }}:
+ sbomEnabled: ${{ parameters.args.sbomEnabled }}
diff --git a/eng/common/templates-official/steps/retain-build.yml b/eng/common/templates-official/steps/retain-build.yml
index 83d97a26a01..5594551508a 100644
--- a/eng/common/templates-official/steps/retain-build.yml
+++ b/eng/common/templates-official/steps/retain-build.yml
@@ -1,28 +1,7 @@
-parameters:
- # Optional azure devops PAT with build execute permissions for the build's organization,
- # only needed if the build that should be retained ran on a different organization than
- # the pipeline where this template is executing from
- Token: ''
- # Optional BuildId to retain, defaults to the current running build
- BuildId: ''
- # Azure devops Organization URI for the build in the https://dev.azure.com/ format.
- # Defaults to the organization the current pipeline is running on
- AzdoOrgUri: '$(System.CollectionUri)'
- # Azure devops project for the build. Defaults to the project the current pipeline is running on
- AzdoProject: '$(System.TeamProject)'
-
steps:
- - task: powershell@2
- inputs:
- targetType: 'filePath'
- filePath: eng/common/retain-build.ps1
- pwsh: true
- arguments: >
- -AzdoOrgUri: ${{parameters.AzdoOrgUri}}
- -AzdoProject ${{parameters.AzdoProject}}
- -Token ${{coalesce(parameters.Token, '$env:SYSTEM_ACCESSTOKEN') }}
- -BuildId ${{coalesce(parameters.BuildId, '$env:BUILD_ID')}}
- displayName: Enable permanent build retention
- env:
- SYSTEM_ACCESSTOKEN: $(System.AccessToken)
- BUILD_ID: $(Build.BuildId)
\ No newline at end of file
+- template: /eng/common/core-templates/steps/retain-build.yml
+ parameters:
+ is1ESPipeline: true
+
+ ${{ each parameter in parameters }}:
+ ${{ parameter.key }}: ${{ parameter.value }}
diff --git a/eng/common/templates-official/steps/send-to-helix.yml b/eng/common/templates-official/steps/send-to-helix.yml
index 3eb7e2d5f84..6500f21bf84 100644
--- a/eng/common/templates-official/steps/send-to-helix.yml
+++ b/eng/common/templates-official/steps/send-to-helix.yml
@@ -1,91 +1,7 @@
-# Please remember to update the documentation if you make changes to these parameters!
-parameters:
- HelixSource: 'pr/default' # required -- sources must start with pr/, official/, prodcon/, or agent/
- HelixType: 'tests/default/' # required -- Helix telemetry which identifies what type of data this is; should include "test" for clarity and must end in '/'
- HelixBuild: $(Build.BuildNumber) # required -- the build number Helix will use to identify this -- automatically set to the AzDO build number
- HelixTargetQueues: '' # required -- semicolon-delimited list of Helix queues to test on; see https://helix.dot.net/ for a list of queues
- HelixAccessToken: '' # required -- access token to make Helix API requests; should be provided by the appropriate variable group
- HelixConfiguration: '' # optional -- additional property attached to a job
- HelixPreCommands: '' # optional -- commands to run before Helix work item execution
- HelixPostCommands: '' # optional -- commands to run after Helix work item execution
- WorkItemDirectory: '' # optional -- a payload directory to zip up and send to Helix; requires WorkItemCommand; incompatible with XUnitProjects
- WorkItemCommand: '' # optional -- a command to execute on the payload; requires WorkItemDirectory; incompatible with XUnitProjects
- WorkItemTimeout: '' # optional -- a timeout in TimeSpan.Parse-ready value (e.g. 00:02:00) for the work item command; requires WorkItemDirectory; incompatible with XUnitProjects
- CorrelationPayloadDirectory: '' # optional -- a directory to zip up and send to Helix as a correlation payload
- XUnitProjects: '' # optional -- semicolon-delimited list of XUnitProjects to parse and send to Helix; requires XUnitRuntimeTargetFramework, XUnitPublishTargetFramework, XUnitRunnerVersion, and IncludeDotNetCli=true
- XUnitWorkItemTimeout: '' # optional -- the workitem timeout in seconds for all workitems created from the xUnit projects specified by XUnitProjects
- XUnitPublishTargetFramework: '' # optional -- framework to use to publish your xUnit projects
- XUnitRuntimeTargetFramework: '' # optional -- framework to use for the xUnit console runner
- XUnitRunnerVersion: '' # optional -- version of the xUnit nuget package you wish to use on Helix; required for XUnitProjects
- IncludeDotNetCli: false # optional -- true will download a version of the .NET CLI onto the Helix machine as a correlation payload; requires DotNetCliPackageType and DotNetCliVersion
- DotNetCliPackageType: '' # optional -- either 'sdk', 'runtime' or 'aspnetcore-runtime'; determines whether the sdk or runtime will be sent to Helix; see https://raw.githubusercontent.com/dotnet/core/main/release-notes/releases-index.json
- DotNetCliVersion: '' # optional -- version of the CLI to send to Helix; based on this: https://raw.githubusercontent.com/dotnet/core/main/release-notes/releases-index.json
- WaitForWorkItemCompletion: true # optional -- true will make the task wait until work items have been completed and fail the build if work items fail. False is "fire and forget."
- IsExternal: false # [DEPRECATED] -- doesn't do anything, jobs are external if HelixAccessToken is empty and Creator is set
- HelixBaseUri: 'https://helix.dot.net/' # optional -- sets the Helix API base URI (allows targeting https://helix.int-dot.net )
- Creator: '' # optional -- if the build is external, use this to specify who is sending the job
- DisplayNamePrefix: 'Run Tests' # optional -- rename the beginning of the displayName of the steps in AzDO
- condition: succeeded() # optional -- condition for step to execute; defaults to succeeded()
- continueOnError: false # optional -- determines whether to continue the build if the step errors; defaults to false
-
steps:
- - powershell: 'powershell "$env:BUILD_SOURCESDIRECTORY\eng\common\msbuild.ps1 $env:BUILD_SOURCESDIRECTORY\eng\common\helixpublish.proj /restore /p:TreatWarningsAsErrors=false /t:Test /bl:$env:BUILD_SOURCESDIRECTORY\artifacts\log\$env:BuildConfig\SendToHelix.binlog"'
- displayName: ${{ parameters.DisplayNamePrefix }} (Windows)
- env:
- BuildConfig: $(_BuildConfig)
- HelixSource: ${{ parameters.HelixSource }}
- HelixType: ${{ parameters.HelixType }}
- HelixBuild: ${{ parameters.HelixBuild }}
- HelixConfiguration: ${{ parameters.HelixConfiguration }}
- HelixTargetQueues: ${{ parameters.HelixTargetQueues }}
- HelixAccessToken: ${{ parameters.HelixAccessToken }}
- HelixPreCommands: ${{ parameters.HelixPreCommands }}
- HelixPostCommands: ${{ parameters.HelixPostCommands }}
- WorkItemDirectory: ${{ parameters.WorkItemDirectory }}
- WorkItemCommand: ${{ parameters.WorkItemCommand }}
- WorkItemTimeout: ${{ parameters.WorkItemTimeout }}
- CorrelationPayloadDirectory: ${{ parameters.CorrelationPayloadDirectory }}
- XUnitProjects: ${{ parameters.XUnitProjects }}
- XUnitWorkItemTimeout: ${{ parameters.XUnitWorkItemTimeout }}
- XUnitPublishTargetFramework: ${{ parameters.XUnitPublishTargetFramework }}
- XUnitRuntimeTargetFramework: ${{ parameters.XUnitRuntimeTargetFramework }}
- XUnitRunnerVersion: ${{ parameters.XUnitRunnerVersion }}
- IncludeDotNetCli: ${{ parameters.IncludeDotNetCli }}
- DotNetCliPackageType: ${{ parameters.DotNetCliPackageType }}
- DotNetCliVersion: ${{ parameters.DotNetCliVersion }}
- WaitForWorkItemCompletion: ${{ parameters.WaitForWorkItemCompletion }}
- HelixBaseUri: ${{ parameters.HelixBaseUri }}
- Creator: ${{ parameters.Creator }}
- SYSTEM_ACCESSTOKEN: $(System.AccessToken)
- condition: and(${{ parameters.condition }}, eq(variables['Agent.Os'], 'Windows_NT'))
- continueOnError: ${{ parameters.continueOnError }}
- - script: $BUILD_SOURCESDIRECTORY/eng/common/msbuild.sh $BUILD_SOURCESDIRECTORY/eng/common/helixpublish.proj /restore /p:TreatWarningsAsErrors=false /t:Test /bl:$BUILD_SOURCESDIRECTORY/artifacts/log/$BuildConfig/SendToHelix.binlog
- displayName: ${{ parameters.DisplayNamePrefix }} (Unix)
- env:
- BuildConfig: $(_BuildConfig)
- HelixSource: ${{ parameters.HelixSource }}
- HelixType: ${{ parameters.HelixType }}
- HelixBuild: ${{ parameters.HelixBuild }}
- HelixConfiguration: ${{ parameters.HelixConfiguration }}
- HelixTargetQueues: ${{ parameters.HelixTargetQueues }}
- HelixAccessToken: ${{ parameters.HelixAccessToken }}
- HelixPreCommands: ${{ parameters.HelixPreCommands }}
- HelixPostCommands: ${{ parameters.HelixPostCommands }}
- WorkItemDirectory: ${{ parameters.WorkItemDirectory }}
- WorkItemCommand: ${{ parameters.WorkItemCommand }}
- WorkItemTimeout: ${{ parameters.WorkItemTimeout }}
- CorrelationPayloadDirectory: ${{ parameters.CorrelationPayloadDirectory }}
- XUnitProjects: ${{ parameters.XUnitProjects }}
- XUnitWorkItemTimeout: ${{ parameters.XUnitWorkItemTimeout }}
- XUnitPublishTargetFramework: ${{ parameters.XUnitPublishTargetFramework }}
- XUnitRuntimeTargetFramework: ${{ parameters.XUnitRuntimeTargetFramework }}
- XUnitRunnerVersion: ${{ parameters.XUnitRunnerVersion }}
- IncludeDotNetCli: ${{ parameters.IncludeDotNetCli }}
- DotNetCliPackageType: ${{ parameters.DotNetCliPackageType }}
- DotNetCliVersion: ${{ parameters.DotNetCliVersion }}
- WaitForWorkItemCompletion: ${{ parameters.WaitForWorkItemCompletion }}
- HelixBaseUri: ${{ parameters.HelixBaseUri }}
- Creator: ${{ parameters.Creator }}
- SYSTEM_ACCESSTOKEN: $(System.AccessToken)
- condition: and(${{ parameters.condition }}, ne(variables['Agent.Os'], 'Windows_NT'))
- continueOnError: ${{ parameters.continueOnError }}
+- template: /eng/common/core-templates/steps/send-to-helix.yml
+ parameters:
+ is1ESPipeline: true
+
+ ${{ each parameter in parameters }}:
+ ${{ parameter.key }}: ${{ parameter.value }}
diff --git a/eng/common/templates-official/steps/source-build.yml b/eng/common/templates-official/steps/source-build.yml
index 829f17c34d1..8f92c49e7b0 100644
--- a/eng/common/templates-official/steps/source-build.yml
+++ b/eng/common/templates-official/steps/source-build.yml
@@ -1,129 +1,7 @@
-parameters:
- # This template adds arcade-powered source-build to CI.
-
- # This is a 'steps' template, and is intended for advanced scenarios where the existing build
- # infra has a careful build methodology that must be followed. For example, a repo
- # (dotnet/runtime) might choose to clone the GitHub repo only once and store it as a pipeline
- # artifact for all subsequent jobs to use, to reduce dependence on a strong network connection to
- # GitHub. Using this steps template leaves room for that infra to be included.
-
- # Defines the platform on which to run the steps. See 'eng/common/templates-official/job/source-build.yml'
- # for details. The entire object is described in the 'job' template for simplicity, even though
- # the usage of the properties on this object is split between the 'job' and 'steps' templates.
- platform: {}
-
steps:
-# Build. Keep it self-contained for simple reusability. (No source-build-specific job variables.)
-- script: |
- set -x
- df -h
-
- # If building on the internal project, the artifact feeds variable may be available (usually only if needed)
- # In that case, call the feed setup script to add internal feeds corresponding to public ones.
- # In addition, add an msbuild argument to copy the WIP from the repo to the target build location.
- # This is because SetupNuGetSources.sh will alter the current NuGet.config file, and we need to preserve those
- # changes.
- internalRestoreArgs=
- if [ '$(dn-bot-dnceng-artifact-feeds-rw)' != '$''(dn-bot-dnceng-artifact-feeds-rw)' ]; then
- # Temporarily work around https://github.com/dotnet/arcade/issues/7709
- chmod +x $(Build.SourcesDirectory)/eng/common/SetupNugetSources.sh
- $(Build.SourcesDirectory)/eng/common/SetupNugetSources.sh $(Build.SourcesDirectory)/NuGet.config $(dn-bot-dnceng-artifact-feeds-rw)
- internalRestoreArgs='/p:CopyWipIntoInnerSourceBuildRepo=true'
-
- # The 'Copy WIP' feature of source build uses git stash to apply changes from the original repo.
- # This only works if there is a username/email configured, which won't be the case in most CI runs.
- git config --get user.email
- if [ $? -ne 0 ]; then
- git config user.email dn-bot@microsoft.com
- git config user.name dn-bot
- fi
- fi
-
- # If building on the internal project, the internal storage variable may be available (usually only if needed)
- # In that case, add variables to allow the download of internal runtimes if the specified versions are not found
- # in the default public locations.
- internalRuntimeDownloadArgs=
- if [ '$(dotnetbuilds-internal-container-read-token-base64)' != '$''(dotnetbuilds-internal-container-read-token-base64)' ]; then
- internalRuntimeDownloadArgs='/p:DotNetRuntimeSourceFeed=https://dotnetbuilds.blob.core.windows.net/internal /p:DotNetRuntimeSourceFeedKey=$(dotnetbuilds-internal-container-read-token-base64) --runtimesourcefeed https://dotnetbuilds.blob.core.windows.net/internal --runtimesourcefeedkey $(dotnetbuilds-internal-container-read-token-base64)'
- fi
-
- buildConfig=Release
- # Check if AzDO substitutes in a build config from a variable, and use it if so.
- if [ '$(_BuildConfig)' != '$''(_BuildConfig)' ]; then
- buildConfig='$(_BuildConfig)'
- fi
-
- officialBuildArgs=
- if [ '${{ and(ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}' = 'True' ]; then
- officialBuildArgs='/p:DotNetPublishUsingPipelines=true /p:OfficialBuildId=$(BUILD.BUILDNUMBER)'
- fi
-
- targetRidArgs=
- if [ '${{ parameters.platform.targetRID }}' != '' ]; then
- targetRidArgs='/p:TargetRid=${{ parameters.platform.targetRID }}'
- fi
-
- runtimeOsArgs=
- if [ '${{ parameters.platform.runtimeOS }}' != '' ]; then
- runtimeOsArgs='/p:RuntimeOS=${{ parameters.platform.runtimeOS }}'
- fi
-
- baseOsArgs=
- if [ '${{ parameters.platform.baseOS }}' != '' ]; then
- baseOsArgs='/p:BaseOS=${{ parameters.platform.baseOS }}'
- fi
-
- publishArgs=
- if [ '${{ parameters.platform.skipPublishValidation }}' != 'true' ]; then
- publishArgs='--publish'
- fi
-
- assetManifestFileName=SourceBuild_RidSpecific.xml
- if [ '${{ parameters.platform.name }}' != '' ]; then
- assetManifestFileName=SourceBuild_${{ parameters.platform.name }}.xml
- fi
-
- ${{ coalesce(parameters.platform.buildScript, './build.sh') }} --ci \
- --configuration $buildConfig \
- --restore --build --pack $publishArgs -bl \
- $officialBuildArgs \
- $internalRuntimeDownloadArgs \
- $internalRestoreArgs \
- $targetRidArgs \
- $runtimeOsArgs \
- $baseOsArgs \
- /p:SourceBuildNonPortable=${{ parameters.platform.nonPortable }} \
- /p:ArcadeBuildFromSource=true \
- /p:AssetManifestFileName=$assetManifestFileName
- displayName: Build
-
-# Upload build logs for diagnosis.
-- task: CopyFiles@2
- displayName: Prepare BuildLogs staging directory
- inputs:
- SourceFolder: '$(Build.SourcesDirectory)'
- Contents: |
- **/*.log
- **/*.binlog
- artifacts/source-build/self/prebuilt-report/**
- TargetFolder: '$(Build.StagingDirectory)/BuildLogs'
- CleanTargetFolder: true
- continueOnError: true
- condition: succeededOrFailed()
-
-- task: 1ES.PublishPipelineArtifact@1
- displayName: Publish BuildLogs
- inputs:
- targetPath: '$(Build.StagingDirectory)/BuildLogs'
- artifactName: BuildLogs_SourceBuild_${{ parameters.platform.name }}_Attempt$(System.JobAttempt)
- continueOnError: true
- condition: succeededOrFailed()
+- template: /eng/common/core-templates/steps/source-build.yml
+ parameters:
+ is1ESPipeline: true
-# Manually inject component detection so that we can ignore the source build upstream cache, which contains
-# a nupkg cache of input packages (a local feed).
-# This path must match the upstream cache path in property 'CurrentRepoSourceBuiltNupkgCacheDir'
-# in src\Microsoft.DotNet.Arcade.Sdk\tools\SourceBuild\SourceBuildArcade.targets
-- task: ComponentGovernanceComponentDetection@0
- displayName: Component Detection (Exclude upstream cache)
- inputs:
- ignoreDirectories: '$(Build.SourcesDirectory)/artifacts/source-build/self/src/artifacts/obj/source-built-upstream-cache'
+ ${{ each parameter in parameters }}:
+ ${{ parameter.key }}: ${{ parameter.value }}
diff --git a/eng/common/templates/job/execute-sdl.yml b/eng/common/templates/job/execute-sdl.yml
deleted file mode 100644
index 7870f93bc17..00000000000
--- a/eng/common/templates/job/execute-sdl.yml
+++ /dev/null
@@ -1,139 +0,0 @@
-parameters:
- enable: 'false' # Whether the SDL validation job should execute or not
- overrideParameters: '' # Optional: to override values for parameters.
- additionalParameters: '' # Optional: parameters that need user specific values eg: '-SourceToolsList @("abc","def") -ArtifactToolsList @("ghi","jkl")'
- # Optional: if specified, restore and use this version of Guardian instead of the default.
- overrideGuardianVersion: ''
- # Optional: if true, publish the '.gdn' folder as a pipeline artifact. This can help with in-depth
- # diagnosis of problems with specific tool configurations.
- publishGuardianDirectoryToPipeline: false
- # The script to run to execute all SDL tools. Use this if you want to use a script to define SDL
- # parameters rather than relying on YAML. It may be better to use a local script, because you can
- # reproduce results locally without piecing together a command based on the YAML.
- executeAllSdlToolsScript: 'eng/common/sdl/execute-all-sdl-tools.ps1'
- # There is some sort of bug (has been reported) in Azure DevOps where if this parameter is named
- # 'continueOnError', the parameter value is not correctly picked up.
- # This can also be remedied by the caller (post-build.yml) if it does not use a nested parameter
- sdlContinueOnError: false # optional: determines whether to continue the build if the step errors;
- # optional: determines if build artifacts should be downloaded.
- downloadArtifacts: true
- # optional: determines if this job should search the directory of downloaded artifacts for
- # 'tar.gz' and 'zip' archive files and extract them before running SDL validation tasks.
- extractArchiveArtifacts: false
- dependsOn: '' # Optional: dependencies of the job
- artifactNames: '' # Optional: patterns supplied to DownloadBuildArtifacts
- # Usage:
- # artifactNames:
- # - 'BlobArtifacts'
- # - 'Artifacts_Windows_NT_Release'
- # Optional: download a list of pipeline artifacts. 'downloadArtifacts' controls build artifacts,
- # not pipeline artifacts, so doesn't affect the use of this parameter.
- pipelineArtifactNames: []
-
-jobs:
-- job: Run_SDL
- dependsOn: ${{ parameters.dependsOn }}
- displayName: Run SDL tool
- condition: and(succeededOrFailed(), eq( ${{ parameters.enable }}, 'true'))
- variables:
- - group: DotNet-VSTS-Bot
- - name: AzDOProjectName
- value: ${{ parameters.AzDOProjectName }}
- - name: AzDOPipelineId
- value: ${{ parameters.AzDOPipelineId }}
- - name: AzDOBuildId
- value: ${{ parameters.AzDOBuildId }}
- - template: /eng/common/templates/variables/sdl-variables.yml
- - name: GuardianVersion
- value: ${{ coalesce(parameters.overrideGuardianVersion, '$(DefaultGuardianVersion)') }}
- - template: /eng/common/templates/variables/pool-providers.yml
- pool:
- # We don't use the collection uri here because it might vary (.visualstudio.com vs. dev.azure.com)
- ${{ if eq(variables['System.TeamProject'], 'DevDiv') }}:
- name: VSEngSS-MicroBuild2022-1ES
- demands: Cmd
- # If it's not devdiv, it's dnceng
- ${{ if ne(variables['System.TeamProject'], 'DevDiv') }}:
- name: $(DncEngInternalBuildPool)
- demands: ImageOverride -equals windows.vs2019.amd64
- steps:
- - checkout: self
- clean: true
-
- # If the template caller didn't provide an AzDO parameter, set them all up as Maestro vars.
- - ${{ if not(and(parameters.AzDOProjectName, parameters.AzDOPipelineId, parameters.AzDOBuildId)) }}:
- - template: /eng/common/templates/post-build/setup-maestro-vars.yml
-
- - ${{ if ne(parameters.downloadArtifacts, 'false')}}:
- - ${{ if ne(parameters.artifactNames, '') }}:
- - ${{ each artifactName in parameters.artifactNames }}:
- - task: DownloadBuildArtifacts@0
- displayName: Download Build Artifacts
- inputs:
- buildType: specific
- buildVersionToDownload: specific
- project: $(AzDOProjectName)
- pipeline: $(AzDOPipelineId)
- buildId: $(AzDOBuildId)
- artifactName: ${{ artifactName }}
- downloadPath: $(Build.ArtifactStagingDirectory)\artifacts
- checkDownloadedFiles: true
- - ${{ if eq(parameters.artifactNames, '') }}:
- - task: DownloadBuildArtifacts@0
- displayName: Download Build Artifacts
- inputs:
- buildType: specific
- buildVersionToDownload: specific
- project: $(AzDOProjectName)
- pipeline: $(AzDOPipelineId)
- buildId: $(AzDOBuildId)
- downloadType: specific files
- itemPattern: "**"
- downloadPath: $(Build.ArtifactStagingDirectory)\artifacts
- checkDownloadedFiles: true
-
- - ${{ each artifactName in parameters.pipelineArtifactNames }}:
- - task: DownloadPipelineArtifact@2
- displayName: Download Pipeline Artifacts
- inputs:
- buildType: specific
- buildVersionToDownload: specific
- project: $(AzDOProjectName)
- pipeline: $(AzDOPipelineId)
- buildId: $(AzDOBuildId)
- artifactName: ${{ artifactName }}
- downloadPath: $(Build.ArtifactStagingDirectory)\artifacts
- checkDownloadedFiles: true
-
- - powershell: eng/common/sdl/trim-assets-version.ps1
- -InputPath $(Build.ArtifactStagingDirectory)\artifacts
- displayName: Trim the version from the NuGet packages
- continueOnError: ${{ parameters.sdlContinueOnError }}
-
- - powershell: eng/common/sdl/extract-artifact-packages.ps1
- -InputPath $(Build.ArtifactStagingDirectory)\artifacts\BlobArtifacts
- -ExtractPath $(Build.ArtifactStagingDirectory)\artifacts\BlobArtifacts
- displayName: Extract Blob Artifacts
- continueOnError: ${{ parameters.sdlContinueOnError }}
-
- - powershell: eng/common/sdl/extract-artifact-packages.ps1
- -InputPath $(Build.ArtifactStagingDirectory)\artifacts\PackageArtifacts
- -ExtractPath $(Build.ArtifactStagingDirectory)\artifacts\PackageArtifacts
- displayName: Extract Package Artifacts
- continueOnError: ${{ parameters.sdlContinueOnError }}
-
- - ${{ if ne(parameters.extractArchiveArtifacts, 'false') }}:
- - powershell: eng/common/sdl/extract-artifact-archives.ps1
- -InputPath $(Build.ArtifactStagingDirectory)\artifacts
- -ExtractPath $(Build.ArtifactStagingDirectory)\artifacts
- displayName: Extract Archive Artifacts
- continueOnError: ${{ parameters.sdlContinueOnError }}
-
- - template: /eng/common/templates/steps/execute-sdl.yml
- parameters:
- overrideGuardianVersion: ${{ parameters.overrideGuardianVersion }}
- executeAllSdlToolsScript: ${{ parameters.executeAllSdlToolsScript }}
- overrideParameters: ${{ parameters.overrideParameters }}
- additionalParameters: ${{ parameters.additionalParameters }}
- publishGuardianDirectoryToPipeline: ${{ parameters.publishGuardianDirectoryToPipeline }}
- sdlContinueOnError: ${{ parameters.sdlContinueOnError }}
diff --git a/eng/common/templates/job/job.yml b/eng/common/templates/job/job.yml
index 8ec5c4f2d9f..07d317bf8f9 100644
--- a/eng/common/templates/job/job.yml
+++ b/eng/common/templates/job/job.yml
@@ -1,259 +1,81 @@
-# Internal resources (telemetry, microbuild) can only be accessed from non-public projects,
-# and some (Microbuild) should only be applied to non-PR cases for internal builds.
-
-parameters:
-# Job schema parameters - https://docs.microsoft.com/en-us/azure/devops/pipelines/yaml-schema?view=vsts&tabs=schema#job
- cancelTimeoutInMinutes: ''
- condition: ''
- container: ''
- continueOnError: false
- dependsOn: ''
- displayName: ''
- pool: ''
- steps: []
- strategy: ''
- timeoutInMinutes: ''
- variables: []
- workspace: ''
- templateContext: ''
-
-# Job base template specific parameters
- # See schema documentation - https://github.com/dotnet/arcade/blob/master/Documentation/AzureDevOps/TemplateSchema.md
- artifacts: ''
- enableMicrobuild: false
+parameters:
enablePublishBuildArtifacts: false
- enablePublishBuildAssets: false
- enablePublishTestResults: false
- enablePublishUsingPipelines: false
- enableBuildRetry: false
disableComponentGovernance: ''
componentGovernanceIgnoreDirectories: ''
- mergeTestResults: false
- testRunTitle: ''
- testResultsFormat: ''
- name: ''
- preSteps: []
- runAsPublic: false
# Sbom related params
enableSbom: true
- PackageVersion: 7.0.0
+ PackageVersion: 9.0.0
BuildDropPath: '$(Build.SourcesDirectory)/artifacts'
jobs:
-- job: ${{ parameters.name }}
-
- ${{ if ne(parameters.cancelTimeoutInMinutes, '') }}:
- cancelTimeoutInMinutes: ${{ parameters.cancelTimeoutInMinutes }}
-
- ${{ if ne(parameters.condition, '') }}:
- condition: ${{ parameters.condition }}
-
- ${{ if ne(parameters.container, '') }}:
- container: ${{ parameters.container }}
-
- ${{ if ne(parameters.continueOnError, '') }}:
- continueOnError: ${{ parameters.continueOnError }}
-
- ${{ if ne(parameters.dependsOn, '') }}:
- dependsOn: ${{ parameters.dependsOn }}
-
- ${{ if ne(parameters.displayName, '') }}:
- displayName: ${{ parameters.displayName }}
-
- ${{ if ne(parameters.pool, '') }}:
- pool: ${{ parameters.pool }}
-
- ${{ if ne(parameters.strategy, '') }}:
- strategy: ${{ parameters.strategy }}
-
- ${{ if ne(parameters.timeoutInMinutes, '') }}:
- timeoutInMinutes: ${{ parameters.timeoutInMinutes }}
-
- ${{ if ne(parameters.templateContext, '') }}:
- templateContext: ${{ parameters.templateContext }}
-
- variables:
- - ${{ if ne(parameters.enableTelemetry, 'false') }}:
- - name: DOTNET_CLI_TELEMETRY_PROFILE
- value: '$(Build.Repository.Uri)'
- - ${{ if eq(parameters.enableRichCodeNavigation, 'true') }}:
- - name: EnableRichCodeNavigation
- value: 'true'
- # Retry signature validation up to three times, waiting 2 seconds between attempts.
- # See https://learn.microsoft.com/en-us/nuget/reference/errors-and-warnings/nu3028#retry-untrusted-root-failures
- - name: NUGET_EXPERIMENTAL_CHAIN_BUILD_RETRY_POLICY
- value: 3,2000
- - ${{ each variable in parameters.variables }}:
- # handle name-value variable syntax
- # example:
- # - name: [key]
- # value: [value]
- - ${{ if ne(variable.name, '') }}:
- - name: ${{ variable.name }}
- value: ${{ variable.value }}
-
- # handle variable groups
- - ${{ if ne(variable.group, '') }}:
- - group: ${{ variable.group }}
-
- # handle template variable syntax
- # example:
- # - template: path/to/template.yml
- # parameters:
- # [key]: [value]
- - ${{ if ne(variable.template, '') }}:
- - template: ${{ variable.template }}
- ${{ if ne(variable.parameters, '') }}:
- parameters: ${{ variable.parameters }}
+- template: /eng/common/core-templates/job/job.yml
+ parameters:
+ is1ESPipeline: false
- # handle key-value variable syntax.
- # example:
- # - [key]: [value]
- - ${{ if and(eq(variable.name, ''), eq(variable.group, ''), eq(variable.template, '')) }}:
- - ${{ each pair in variable }}:
- - name: ${{ pair.key }}
- value: ${{ pair.value }}
+ ${{ each parameter in parameters }}:
+ ${{ if and(ne(parameter.key, 'steps'), ne(parameter.key, 'is1ESPipeline')) }}:
+ ${{ parameter.key }}: ${{ parameter.value }}
- # DotNet-HelixApi-Access provides 'HelixApiAccessToken' for internal builds
- - ${{ if and(eq(parameters.enableTelemetry, 'true'), eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
- - group: DotNet-HelixApi-Access
+ steps:
+ - ${{ each step in parameters.steps }}:
+ - ${{ step }}
- ${{ if ne(parameters.workspace, '') }}:
- workspace: ${{ parameters.workspace }}
-
- steps:
- - ${{ if ne(parameters.preSteps, '') }}:
- - ${{ each preStep in parameters.preSteps }}:
- - ${{ preStep }}
-
- - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
- - ${{ if eq(parameters.enableMicrobuild, 'true') }}:
- - task: MicroBuildSigningPlugin@3
- displayName: Install MicroBuild plugin
- inputs:
- signType: $(_SignType)
- zipSources: false
- feedSource: https://dnceng.pkgs.visualstudio.com/_packaging/MicroBuildToolset/nuget/v3/index.json
- env:
- TeamName: $(_TeamName)
- continueOnError: ${{ parameters.continueOnError }}
- condition: and(succeeded(), in(variables['_SignType'], 'real', 'test'), eq(variables['Agent.Os'], 'Windows_NT'))
-
- - ${{ if and(eq(parameters.runAsPublic, 'false'), eq(variables['System.TeamProject'], 'internal')) }}:
- - task: NuGetAuthenticate@1
-
- - ${{ if and(ne(parameters.artifacts.download, 'false'), ne(parameters.artifacts.download, '')) }}:
- - task: DownloadPipelineArtifact@2
- inputs:
- buildType: current
- artifactName: ${{ coalesce(parameters.artifacts.download.name, 'Artifacts_$(Agent.OS)_$(_BuildConfig)') }}
- targetPath: ${{ coalesce(parameters.artifacts.download.path, 'artifacts') }}
- itemPattern: ${{ coalesce(parameters.artifacts.download.pattern, '**') }}
-
- - ${{ each step in parameters.steps }}:
- - ${{ step }}
-
- - ${{ if eq(parameters.enableRichCodeNavigation, true) }}:
- - task: RichCodeNavIndexer@0
- displayName: RichCodeNav Upload
- inputs:
- languages: ${{ coalesce(parameters.richCodeNavigationLanguage, 'csharp') }}
- environment: ${{ coalesce(parameters.richCodeNavigationEnvironment, 'production') }}
- richNavLogOutputDirectory: $(Build.SourcesDirectory)/artifacts/bin
- uploadRichNavArtifacts: ${{ coalesce(parameters.richCodeNavigationUploadArtifacts, false) }}
- continueOnError: true
-
- - template: /eng/common/templates/steps/component-governance.yml
- parameters:
- ${{ if eq(parameters.disableComponentGovernance, '') }}:
- ${{ if and(ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest'), eq(parameters.runAsPublic, 'false'), or(startsWith(variables['Build.SourceBranch'], 'refs/heads/release/'), startsWith(variables['Build.SourceBranch'], 'refs/heads/dotnet/'), startsWith(variables['Build.SourceBranch'], 'refs/heads/microsoft/'), eq(variables['Build.SourceBranch'], 'refs/heads/main'))) }}:
- disableComponentGovernance: false
- ${{ else }}:
- disableComponentGovernance: true
- ${{ else }}:
- disableComponentGovernance: ${{ parameters.disableComponentGovernance }}
- componentGovernanceIgnoreDirectories: ${{ parameters.componentGovernanceIgnoreDirectories }}
-
- - ${{ if eq(parameters.enableMicrobuild, 'true') }}:
- - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
- - task: MicroBuildCleanup@1
- displayName: Execute Microbuild cleanup tasks
- condition: and(always(), in(variables['_SignType'], 'real', 'test'), eq(variables['Agent.Os'], 'Windows_NT'))
- continueOnError: ${{ parameters.continueOnError }}
- env:
- TeamName: $(_TeamName)
-
- - ${{ if ne(parameters.artifacts.publish, '') }}:
- - ${{ if and(ne(parameters.artifacts.publish.artifacts, 'false'), ne(parameters.artifacts.publish.artifacts, '')) }}:
- - task: CopyFiles@2
- displayName: Gather binaries for publish to artifacts
- inputs:
- SourceFolder: 'artifacts/bin'
- Contents: '**'
- TargetFolder: '$(Build.ArtifactStagingDirectory)/artifacts/bin'
- - task: CopyFiles@2
- displayName: Gather packages for publish to artifacts
- inputs:
- SourceFolder: 'artifacts/packages'
- Contents: '**'
- TargetFolder: '$(Build.ArtifactStagingDirectory)/artifacts/packages'
- - task: PublishBuildArtifacts@1
- displayName: Publish pipeline artifacts
- inputs:
- PathtoPublish: '$(Build.ArtifactStagingDirectory)/artifacts'
- PublishLocation: Container
- ArtifactName: ${{ coalesce(parameters.artifacts.publish.artifacts.name , 'Artifacts_$(Agent.Os)_$(_BuildConfig)') }}
- continueOnError: true
- condition: always()
- - ${{ if and(ne(parameters.artifacts.publish.logs, 'false'), ne(parameters.artifacts.publish.logs, '')) }}:
- - publish: artifacts/log
- artifact: ${{ coalesce(parameters.artifacts.publish.logs.name, 'Logs_Build_$(Agent.Os)_$(_BuildConfig)') }}
- displayName: Publish logs
- continueOnError: true
- condition: always()
-
- - ${{ if ne(parameters.enablePublishBuildArtifacts, 'false') }}:
- - task: PublishBuildArtifacts@1
- displayName: Publish Logs
- inputs:
- PathtoPublish: '$(Build.SourcesDirectory)/artifacts/log/$(_BuildConfig)'
- PublishLocation: Container
- ArtifactName: ${{ coalesce(parameters.enablePublishBuildArtifacts.artifactName, '$(Agent.Os)_$(Agent.JobName)' ) }}
- continueOnError: true
- condition: always()
-
- - ${{ if or(and(eq(parameters.enablePublishTestResults, 'true'), eq(parameters.testResultsFormat, '')), eq(parameters.testResultsFormat, 'xunit')) }}:
- - task: PublishTestResults@2
- displayName: Publish XUnit Test Results
- inputs:
- testResultsFormat: 'xUnit'
- testResultsFiles: '*.xml'
- searchFolder: '$(Build.SourcesDirectory)/artifacts/TestResults/$(_BuildConfig)'
- testRunTitle: ${{ coalesce(parameters.testRunTitle, parameters.name, '$(System.JobName)') }}-xunit
- mergeTestResults: ${{ parameters.mergeTestResults }}
- continueOnError: true
- condition: always()
- - ${{ if or(and(eq(parameters.enablePublishTestResults, 'true'), eq(parameters.testResultsFormat, '')), eq(parameters.testResultsFormat, 'vstest')) }}:
- - task: PublishTestResults@2
- displayName: Publish TRX Test Results
- inputs:
- testResultsFormat: 'VSTest'
- testResultsFiles: '*.trx'
- searchFolder: '$(Build.SourcesDirectory)/artifacts/TestResults/$(_BuildConfig)'
- testRunTitle: ${{ coalesce(parameters.testRunTitle, parameters.name, '$(System.JobName)') }}-trx
- mergeTestResults: ${{ parameters.mergeTestResults }}
- continueOnError: true
- condition: always()
-
- - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest'), eq(parameters.enableSbom, 'true')) }}:
- - template: /eng/common/templates/steps/generate-sbom.yml
+ componentGovernanceSteps:
+ - template: /eng/common/templates/steps/component-governance.yml
parameters:
- PackageVersion: ${{ parameters.packageVersion}}
- BuildDropPath: ${{ parameters.buildDropPath }}
- IgnoreDirectories: ${{ parameters.componentGovernanceIgnoreDirectories }}
-
- - ${{ if eq(parameters.enableBuildRetry, 'true') }}:
- - publish: $(Build.SourcesDirectory)\eng\common\BuildConfiguration
- artifact: BuildConfiguration
- displayName: Publish build retry configuration
- continueOnError: true
+ ${{ if eq(parameters.disableComponentGovernance, '') }}:
+ ${{ if and(ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest'), eq(parameters.runAsPublic, 'false'), or(startsWith(variables['Build.SourceBranch'], 'refs/heads/release/'), startsWith(variables['Build.SourceBranch'], 'refs/heads/dotnet/'), startsWith(variables['Build.SourceBranch'], 'refs/heads/microsoft/'), eq(variables['Build.SourceBranch'], 'refs/heads/main'))) }}:
+ disableComponentGovernance: false
+ ${{ else }}:
+ disableComponentGovernance: true
+ ${{ else }}:
+ disableComponentGovernance: ${{ parameters.disableComponentGovernance }}
+ componentGovernanceIgnoreDirectories: ${{ parameters.componentGovernanceIgnoreDirectories }}
+
+ artifactPublishSteps:
+ - ${{ if ne(parameters.artifacts.publish, '') }}:
+ - ${{ if and(ne(parameters.artifacts.publish.artifacts, 'false'), ne(parameters.artifacts.publish.artifacts, '')) }}:
+ - template: /eng/common/core-templates/steps/publish-build-artifacts.yml
+ parameters:
+ is1ESPipeline: false
+ args:
+ displayName: Publish pipeline artifacts
+ pathToPublish: '$(Build.ArtifactStagingDirectory)/artifacts'
+ publishLocation: Container
+ artifactName: ${{ coalesce(parameters.artifacts.publish.artifacts.name , 'Artifacts_$(Agent.Os)_$(_BuildConfig)') }}
+ continueOnError: true
+ condition: always()
+ - ${{ if and(ne(parameters.artifacts.publish.logs, 'false'), ne(parameters.artifacts.publish.logs, '')) }}:
+ - template: /eng/common/core-templates/steps/publish-pipeline-artifacts.yml
+ parameters:
+ is1ESPipeline: false
+ args:
+ targetPath: '$(Build.ArtifactStagingDirectory)/artifacts/log'
+ artifactName: ${{ coalesce(parameters.artifacts.publish.logs.name, 'Logs_Build_$(Agent.Os)_$(_BuildConfig)') }}
+ displayName: 'Publish logs'
+ continueOnError: true
+ condition: always()
+ sbomEnabled: false # we don't need SBOM for logs
+
+ - ${{ if ne(parameters.enablePublishBuildArtifacts, 'false') }}:
+ - template: /eng/common/core-templates/steps/publish-build-artifacts.yml
+ parameters:
+ is1ESPipeline: false
+ args:
+ displayName: Publish Logs
+ pathToPublish: '$(Build.ArtifactStagingDirectory)/artifacts/log/$(_BuildConfig)'
+ publishLocation: Container
+ artifactName: ${{ coalesce(parameters.enablePublishBuildArtifacts.artifactName, '$(Agent.Os)_$(Agent.JobName)' ) }}
+ continueOnError: true
+ condition: always()
+
+ - ${{ if eq(parameters.enableBuildRetry, 'true') }}:
+ - template: /eng/common/core-templates/steps/publish-pipeline-artifacts.yml
+ parameters:
+ is1ESPipeline: false
+ args:
+ targetPath: '$(Build.SourcesDirectory)\eng\common\BuildConfiguration'
+ artifactName: 'BuildConfiguration'
+ displayName: 'Publish build retry configuration'
+ continueOnError: true
+ sbomEnabled: false # we don't need SBOM for BuildConfiguration
diff --git a/eng/common/templates/job/onelocbuild.yml b/eng/common/templates/job/onelocbuild.yml
index 60ab00c4de3..ff829dc4c70 100644
--- a/eng/common/templates/job/onelocbuild.yml
+++ b/eng/common/templates/job/onelocbuild.yml
@@ -1,109 +1,7 @@
-parameters:
- # Optional: dependencies of the job
- dependsOn: ''
-
- # Optional: A defined YAML pool - https://docs.microsoft.com/en-us/azure/devops/pipelines/yaml-schema?view=vsts&tabs=schema#pool
- pool: ''
-
- CeapexPat: $(dn-bot-ceapex-package-r) # PAT for the loc AzDO instance https://dev.azure.com/ceapex
- GithubPat: $(BotAccount-dotnet-bot-repo-PAT)
-
- SourcesDirectory: $(Build.SourcesDirectory)
- CreatePr: true
- AutoCompletePr: false
- ReusePr: true
- UseLfLineEndings: true
- UseCheckedInLocProjectJson: false
- SkipLocProjectJsonGeneration: false
- LanguageSet: VS_Main_Languages
- LclSource: lclFilesInRepo
- LclPackageId: ''
- RepoType: gitHub
- GitHubOrg: dotnet
- MirrorRepo: ''
- MirrorBranch: main
- condition: ''
- JobNameSuffix: ''
-
jobs:
-- job: OneLocBuild${{ parameters.JobNameSuffix }}
-
- dependsOn: ${{ parameters.dependsOn }}
-
- displayName: OneLocBuild${{ parameters.JobNameSuffix }}
-
- variables:
- - group: OneLocBuildVariables # Contains the CeapexPat and GithubPat
- - name: _GenerateLocProjectArguments
- value: -SourcesDirectory ${{ parameters.SourcesDirectory }}
- -LanguageSet "${{ parameters.LanguageSet }}"
- -CreateNeutralXlfs
- - ${{ if eq(parameters.UseCheckedInLocProjectJson, 'true') }}:
- - name: _GenerateLocProjectArguments
- value: ${{ variables._GenerateLocProjectArguments }} -UseCheckedInLocProjectJson
- - template: /eng/common/templates/variables/pool-providers.yml
-
- ${{ if ne(parameters.pool, '') }}:
- pool: ${{ parameters.pool }}
- ${{ if eq(parameters.pool, '') }}:
- pool:
- # We don't use the collection uri here because it might vary (.visualstudio.com vs. dev.azure.com)
- ${{ if eq(variables['System.TeamProject'], 'DevDiv') }}:
- name: VSEngSS-MicroBuild2022-1ES
- demands: Cmd
- # If it's not devdiv, it's dnceng
- ${{ if ne(variables['System.TeamProject'], 'DevDiv') }}:
- name: $(DncEngInternalBuildPool)
- demands: ImageOverride -equals windows.vs2019.amd64
-
- steps:
- - ${{ if ne(parameters.SkipLocProjectJsonGeneration, 'true') }}:
- - task: Powershell@2
- inputs:
- filePath: $(Build.SourcesDirectory)/eng/common/generate-locproject.ps1
- arguments: $(_GenerateLocProjectArguments)
- displayName: Generate LocProject.json
- condition: ${{ parameters.condition }}
-
- - task: OneLocBuild@2
- displayName: OneLocBuild
- env:
- SYSTEM_ACCESSTOKEN: $(System.AccessToken)
- inputs:
- locProj: eng/Localize/LocProject.json
- outDir: $(Build.ArtifactStagingDirectory)
- lclSource: ${{ parameters.LclSource }}
- lclPackageId: ${{ parameters.LclPackageId }}
- isCreatePrSelected: ${{ parameters.CreatePr }}
- isAutoCompletePrSelected: ${{ parameters.AutoCompletePr }}
- ${{ if eq(parameters.CreatePr, true) }}:
- isUseLfLineEndingsSelected: ${{ parameters.UseLfLineEndings }}
- ${{ if eq(parameters.RepoType, 'gitHub') }}:
- isShouldReusePrSelected: ${{ parameters.ReusePr }}
- packageSourceAuth: patAuth
- patVariable: ${{ parameters.CeapexPat }}
- ${{ if eq(parameters.RepoType, 'gitHub') }}:
- repoType: ${{ parameters.RepoType }}
- gitHubPatVariable: "${{ parameters.GithubPat }}"
- ${{ if ne(parameters.MirrorRepo, '') }}:
- isMirrorRepoSelected: true
- gitHubOrganization: ${{ parameters.GitHubOrg }}
- mirrorRepo: ${{ parameters.MirrorRepo }}
- mirrorBranch: ${{ parameters.MirrorBranch }}
- condition: ${{ parameters.condition }}
-
- - task: PublishBuildArtifacts@1
- displayName: Publish Localization Files
- inputs:
- PathtoPublish: '$(Build.ArtifactStagingDirectory)/loc'
- PublishLocation: Container
- ArtifactName: Loc
- condition: ${{ parameters.condition }}
+- template: /eng/common/core-templates/job/onelocbuild.yml
+ parameters:
+ is1ESPipeline: false
- - task: PublishBuildArtifacts@1
- displayName: Publish LocProject.json
- inputs:
- PathtoPublish: '$(Build.SourcesDirectory)/eng/Localize/'
- PublishLocation: Container
- ArtifactName: Loc
- condition: ${{ parameters.condition }}
\ No newline at end of file
+ ${{ each parameter in parameters }}:
+ ${{ parameter.key }}: ${{ parameter.value }}
diff --git a/eng/common/templates/job/publish-build-assets.yml b/eng/common/templates/job/publish-build-assets.yml
index cc2b346ba8b..ab2edec2adb 100644
--- a/eng/common/templates/job/publish-build-assets.yml
+++ b/eng/common/templates/job/publish-build-assets.yml
@@ -1,156 +1,7 @@
-parameters:
- configuration: 'Debug'
-
- # Optional: condition for the job to run
- condition: ''
-
- # Optional: 'true' if future jobs should run even if this job fails
- continueOnError: false
-
- # Optional: dependencies of the job
- dependsOn: ''
-
- # Optional: Include PublishBuildArtifacts task
- enablePublishBuildArtifacts: false
-
- # Optional: A defined YAML pool - https://docs.microsoft.com/en-us/azure/devops/pipelines/yaml-schema?view=vsts&tabs=schema#pool
- pool: {}
-
- # Optional: should run as a public build even in the internal project
- # if 'true', the build won't run any of the internal only steps, even if it is running in non-public projects.
- runAsPublic: false
-
- # Optional: whether the build's artifacts will be published using release pipelines or direct feed publishing
- publishUsingPipelines: false
-
- # Optional: whether the build's artifacts will be published using release pipelines or direct feed publishing
- publishAssetsImmediately: false
-
- artifactsPublishingAdditionalParameters: ''
-
- signingValidationAdditionalParameters: ''
-
jobs:
-- job: Asset_Registry_Publish
-
- dependsOn: ${{ parameters.dependsOn }}
- timeoutInMinutes: 150
-
- ${{ if eq(parameters.publishAssetsImmediately, 'true') }}:
- displayName: Publish Assets
- ${{ else }}:
- displayName: Publish to Build Asset Registry
-
- variables:
- - template: /eng/common/templates/variables/pool-providers.yml
- - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
- - group: Publish-Build-Assets
- - group: AzureDevOps-Artifact-Feeds-Pats
- - name: runCodesignValidationInjection
- value: false
- - ${{ if eq(parameters.publishAssetsImmediately, 'true') }}:
- - template: /eng/common/templates/post-build/common-variables.yml
-
- pool:
- # We don't use the collection uri here because it might vary (.visualstudio.com vs. dev.azure.com)
- ${{ if eq(variables['System.TeamProject'], 'DevDiv') }}:
- name: VSEngSS-MicroBuild2022-1ES
- demands: Cmd
- # If it's not devdiv, it's dnceng
- ${{ if ne(variables['System.TeamProject'], 'DevDiv') }}:
- name: NetCore1ESPool-Publishing-Internal
- demands: ImageOverride -equals windows.vs2019.amd64
-
- steps:
- - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
- - task: DownloadBuildArtifacts@0
- displayName: Download artifact
- inputs:
- artifactName: AssetManifests
- downloadPath: '$(Build.StagingDirectory)/Download'
- checkDownloadedFiles: true
- condition: ${{ parameters.condition }}
- continueOnError: ${{ parameters.continueOnError }}
-
- - task: NuGetAuthenticate@1
-
- - task: AzureCLI@2
- displayName: Publish Build Assets
- inputs:
- azureSubscription: "Darc: Maestro Production"
- scriptType: ps
- scriptLocation: scriptPath
- scriptPath: $(Build.SourcesDirectory)/eng/common/sdk-task.ps1
- arguments: >
- -task PublishBuildAssets -restore -msbuildEngine dotnet
- /p:ManifestsPath='$(Build.StagingDirectory)/Download/AssetManifests'
- /p:MaestroApiEndpoint=https://maestro.dot.net
- /p:PublishUsingPipelines=${{ parameters.publishUsingPipelines }}
- /p:OfficialBuildId=$(Build.BuildNumber)
- condition: ${{ parameters.condition }}
- continueOnError: ${{ parameters.continueOnError }}
-
- - task: powershell@2
- displayName: Create ReleaseConfigs Artifact
- inputs:
- targetType: inline
- script: |
- Add-Content -Path "$(Build.StagingDirectory)/ReleaseConfigs.txt" -Value $(BARBuildId)
- Add-Content -Path "$(Build.StagingDirectory)/ReleaseConfigs.txt" -Value "$(DefaultChannels)"
- Add-Content -Path "$(Build.StagingDirectory)/ReleaseConfigs.txt" -Value $(IsStableBuild)
-
- - task: PublishBuildArtifacts@1
- displayName: Publish ReleaseConfigs Artifact
- inputs:
- PathtoPublish: '$(Build.StagingDirectory)/ReleaseConfigs.txt'
- PublishLocation: Container
- ArtifactName: ReleaseConfigs
-
- - task: powershell@2
- displayName: Check if SymbolPublishingExclusionsFile.txt exists
- inputs:
- targetType: inline
- script: |
- $symbolExclusionfile = "$(Build.SourcesDirectory)/eng/SymbolPublishingExclusionsFile.txt"
- if(Test-Path -Path $symbolExclusionfile)
- {
- Write-Host "SymbolExclusionFile exists"
- Write-Host "##vso[task.setvariable variable=SymbolExclusionFile]true"
- }
- else{
- Write-Host "Symbols Exclusion file does not exists"
- Write-Host "##vso[task.setvariable variable=SymbolExclusionFile]false"
- }
-
- - task: PublishBuildArtifacts@1
- displayName: Publish SymbolPublishingExclusionsFile Artifact
- condition: eq(variables['SymbolExclusionFile'], 'true')
- inputs:
- PathtoPublish: '$(Build.SourcesDirectory)/eng/SymbolPublishingExclusionsFile.txt'
- PublishLocation: Container
- ArtifactName: ReleaseConfigs
-
- - ${{ if eq(parameters.publishAssetsImmediately, 'true') }}:
- - template: /eng/common/templates/post-build/setup-maestro-vars.yml
- parameters:
- BARBuildId: ${{ parameters.BARBuildId }}
- PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }}
-
- - task: AzureCLI@2
- displayName: Publish Using Darc
- inputs:
- azureSubscription: "Darc: Maestro Production"
- scriptType: ps
- scriptLocation: scriptPath
- scriptPath: $(Build.SourcesDirectory)/eng/common/post-build/publish-using-darc.ps1
- arguments: -BuildId $(BARBuildId)
- -PublishingInfraVersion 3
- -AzdoToken '$(System.AccessToken)'
- -WaitPublishingFinish true
- -ArtifactsPublishingAdditionalParameters '${{ parameters.artifactsPublishingAdditionalParameters }}'
- -SymbolPublishingAdditionalParameters '${{ parameters.symbolPublishingAdditionalParameters }}'
+- template: /eng/common/core-templates/job/publish-build-assets.yml
+ parameters:
+ is1ESPipeline: false
- - ${{ if eq(parameters.enablePublishBuildArtifacts, 'true') }}:
- - template: /eng/common/templates/steps/publish-logs.yml
- parameters:
- JobLabel: 'Publish_Artifacts_Logs'
+ ${{ each parameter in parameters }}:
+ ${{ parameter.key }}: ${{ parameter.value }}
diff --git a/eng/common/templates/job/source-build.yml b/eng/common/templates/job/source-build.yml
index c0ff472b697..e44d47b1d76 100644
--- a/eng/common/templates/job/source-build.yml
+++ b/eng/common/templates/job/source-build.yml
@@ -1,74 +1,7 @@
-parameters:
- # This template adds arcade-powered source-build to CI. The template produces a server job with a
- # default ID 'Source_Build_Complete' to put in a dependency list if necessary.
-
- # Specifies the prefix for source-build jobs added to pipeline. Use this if disambiguation needed.
- jobNamePrefix: 'Source_Build'
-
- # Defines the platform on which to run the job. By default, a linux-x64 machine, suitable for
- # managed-only repositories. This is an object with these properties:
- #
- # name: ''
- # The name of the job. This is included in the job ID.
- # targetRID: ''
- # The name of the target RID to use, instead of the one auto-detected by Arcade.
- # nonPortable: false
- # Enables non-portable mode. This means a more specific RID (e.g. fedora.32-x64 rather than
- # linux-x64), and compiling against distro-provided packages rather than portable ones.
- # skipPublishValidation: false
- # Disables publishing validation. By default, a check is performed to ensure no packages are
- # published by source-build.
- # container: ''
- # A container to use. Runs in docker.
- # pool: {}
- # A pool to use. Runs directly on an agent.
- # buildScript: ''
- # Specifies the build script to invoke to perform the build in the repo. The default
- # './build.sh' should work for typical Arcade repositories, but this is customizable for
- # difficult situations.
- # jobProperties: {}
- # A list of job properties to inject at the top level, for potential extensibility beyond
- # container and pool.
- platform: {}
-
- # If set to true and running on a non-public project,
- # Internal blob storage locations will be enabled.
- # This is not enabled by default because many repositories do not need internal sources
- # and do not need to have the required service connections approved in the pipeline.
- enableInternalSources: false
-
jobs:
-- job: ${{ parameters.jobNamePrefix }}_${{ parameters.platform.name }}
- displayName: Source-Build (${{ parameters.platform.name }})
-
- ${{ each property in parameters.platform.jobProperties }}:
- ${{ property.key }}: ${{ property.value }}
-
- ${{ if ne(parameters.platform.container, '') }}:
- container: ${{ parameters.platform.container }}
-
- ${{ if eq(parameters.platform.pool, '') }}:
- # The default VM host AzDO pool. This should be capable of running Docker containers: almost all
- # source-build builds run in Docker, including the default managed platform.
- # /eng/common/templates/variables/pool-providers.yml can't be used here (some customers declare variables already), so duplicate its logic
- pool:
- ${{ if eq(variables['System.TeamProject'], 'public') }}:
- name: $[replace(replace(eq(contains(coalesce(variables['System.PullRequest.TargetBranch'], variables['Build.SourceBranch'], 'refs/heads/main'), 'release'), 'true'), True, 'NetCore-Svc-Public' ), False, 'NetCore-Public')]
- demands: ImageOverride -equals Build.Ubuntu.1804.Amd64.Open
-
- ${{ if eq(variables['System.TeamProject'], 'internal') }}:
- name: $[replace(replace(eq(contains(coalesce(variables['System.PullRequest.TargetBranch'], variables['Build.SourceBranch'], 'refs/heads/main'), 'release'), 'true'), True, 'NetCore1ESPool-Svc-Internal'), False, 'NetCore1ESPool-Internal')]
- demands: ImageOverride -equals Build.Ubuntu.1804.Amd64
-
- ${{ if ne(parameters.platform.pool, '') }}:
- pool: ${{ parameters.platform.pool }}
-
- workspace:
- clean: all
+- template: /eng/common/core-templates/job/source-build.yml
+ parameters:
+ is1ESPipeline: false
- steps:
- - ${{ if eq(parameters.enableInternalSources, true) }}:
- - template: /eng/common/templates/steps/enable-internal-runtimes.yml
- - template: /eng/common/templates/steps/source-build.yml
- parameters:
- platform: ${{ parameters.platform }}
+ ${{ each parameter in parameters }}:
+ ${{ parameter.key }}: ${{ parameter.value }}
diff --git a/eng/common/templates/job/source-index-stage1.yml b/eng/common/templates/job/source-index-stage1.yml
index 0b6bb89dc78..89f3291593c 100644
--- a/eng/common/templates/job/source-index-stage1.yml
+++ b/eng/common/templates/job/source-index-stage1.yml
@@ -1,82 +1,7 @@
-parameters:
- runAsPublic: false
- sourceIndexUploadPackageVersion: 2.0.0-20240502.12
- sourceIndexProcessBinlogPackageVersion: 1.0.1-20240129.2
- sourceIndexPackageSource: https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-tools/nuget/v3/index.json
- sourceIndexBuildCommand: powershell -NoLogo -NoProfile -ExecutionPolicy Bypass -Command "eng/common/build.ps1 -restore -build -binarylog -ci"
- preSteps: []
- binlogPath: artifacts/log/Debug/Build.binlog
- condition: ''
- dependsOn: ''
- pool: ''
-
jobs:
-- job: SourceIndexStage1
- dependsOn: ${{ parameters.dependsOn }}
- condition: ${{ parameters.condition }}
- variables:
- - name: SourceIndexUploadPackageVersion
- value: ${{ parameters.sourceIndexUploadPackageVersion }}
- - name: SourceIndexProcessBinlogPackageVersion
- value: ${{ parameters.sourceIndexProcessBinlogPackageVersion }}
- - name: SourceIndexPackageSource
- value: ${{ parameters.sourceIndexPackageSource }}
- - name: BinlogPath
- value: ${{ parameters.binlogPath }}
- - template: /eng/common/templates/variables/pool-providers.yml
-
- ${{ if ne(parameters.pool, '') }}:
- pool: ${{ parameters.pool }}
- ${{ if eq(parameters.pool, '') }}:
- pool:
- ${{ if eq(variables['System.TeamProject'], 'public') }}:
- name: $(DncEngPublicBuildPool)
- demands: ImageOverride -equals windows.vs2019.amd64.open
- ${{ if eq(variables['System.TeamProject'], 'internal') }}:
- name: $(DncEngInternalBuildPool)
- demands: ImageOverride -equals windows.vs2019.amd64
-
- steps:
- - ${{ each preStep in parameters.preSteps }}:
- - ${{ preStep }}
-
- - task: UseDotNet@2
- displayName: Use .NET 8 SDK
- inputs:
- packageType: sdk
- version: 8.0.x
- installationPath: $(Agent.TempDirectory)/dotnet
- workingDirectory: $(Agent.TempDirectory)
-
- - script: |
- $(Agent.TempDirectory)/dotnet/dotnet tool install BinLogToSln --version $(sourceIndexProcessBinlogPackageVersion) --add-source $(SourceIndexPackageSource) --tool-path $(Agent.TempDirectory)/.source-index/tools
- $(Agent.TempDirectory)/dotnet/dotnet tool install UploadIndexStage1 --version $(sourceIndexUploadPackageVersion) --add-source $(SourceIndexPackageSource) --tool-path $(Agent.TempDirectory)/.source-index/tools
- displayName: Download Tools
- # Set working directory to temp directory so 'dotnet' doesn't try to use global.json and use the repo's sdk.
- workingDirectory: $(Agent.TempDirectory)
-
- - script: ${{ parameters.sourceIndexBuildCommand }}
- displayName: Build Repository
-
- - script: $(Agent.TempDirectory)/.source-index/tools/BinLogToSln -i $(BinlogPath) -r $(Build.SourcesDirectory) -n $(Build.Repository.Name) -o .source-index/stage1output
- displayName: Process Binlog into indexable sln
-
- - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
- - task: AzureCLI@2
- displayName: Get stage 1 auth token
- inputs:
- azureSubscription: 'SourceDotNet Stage1 Publish'
- addSpnToEnvironment: true
- scriptType: 'ps'
- scriptLocation: 'inlineScript'
- inlineScript: |
- echo "##vso[task.setvariable variable=ARM_CLIENT_ID;issecret=true]$env:servicePrincipalId"
- echo "##vso[task.setvariable variable=ARM_ID_TOKEN;issecret=true]$env:idToken"
- echo "##vso[task.setvariable variable=ARM_TENANT_ID;issecret=true]$env:tenantId"
-
- - script: |
- az login --service-principal -u $(ARM_CLIENT_ID) --tenant $(ARM_TENANT_ID) --allow-no-subscriptions --federated-token $(ARM_ID_TOKEN)
- displayName: "Login to Azure"
+- template: /eng/common/core-templates/job/source-index-stage1.yml
+ parameters:
+ is1ESPipeline: false
- - script: $(Agent.TempDirectory)/.source-index/tools/UploadIndexStage1 -i .source-index/stage1output -n $(Build.Repository.Name) -s netsourceindexstage1 -b stage1
- displayName: Upload stage1 artifacts to source index
+ ${{ each parameter in parameters }}:
+ ${{ parameter.key }}: ${{ parameter.value }}
diff --git a/eng/common/templates/jobs/codeql-build.yml b/eng/common/templates/jobs/codeql-build.yml
index f7dc5ea4aaa..517f24d6a52 100644
--- a/eng/common/templates/jobs/codeql-build.yml
+++ b/eng/common/templates/jobs/codeql-build.yml
@@ -1,31 +1,7 @@
-parameters:
- # See schema documentation in /Documentation/AzureDevOps/TemplateSchema.md
- continueOnError: false
- # Required: A collection of jobs to run - https://docs.microsoft.com/en-us/azure/devops/pipelines/yaml-schema?view=vsts&tabs=schema#job
- jobs: []
- # Optional: if specified, restore and use this version of Guardian instead of the default.
- overrideGuardianVersion: ''
-
jobs:
-- template: /eng/common/templates/jobs/jobs.yml
+- template: /eng/common/core-templates/jobs/codeql-build.yml
parameters:
- enableMicrobuild: false
- enablePublishBuildArtifacts: false
- enablePublishTestResults: false
- enablePublishBuildAssets: false
- enablePublishUsingPipelines: false
- enableTelemetry: true
+ is1ESPipeline: false
- variables:
- - group: Publish-Build-Assets
- # The Guardian version specified in 'eng/common/sdl/packages.config'. This value must be kept in
- # sync with the packages.config file.
- - name: DefaultGuardianVersion
- value: 0.109.0
- - name: GuardianPackagesConfigFile
- value: $(Build.SourcesDirectory)\eng\common\sdl\packages.config
- - name: GuardianVersion
- value: ${{ coalesce(parameters.overrideGuardianVersion, '$(DefaultGuardianVersion)') }}
-
- jobs: ${{ parameters.jobs }}
-
+ ${{ each parameter in parameters }}:
+ ${{ parameter.key }}: ${{ parameter.value }}
diff --git a/eng/common/templates/jobs/jobs.yml b/eng/common/templates/jobs/jobs.yml
index 289bb2396ce..388e9037b3e 100644
--- a/eng/common/templates/jobs/jobs.yml
+++ b/eng/common/templates/jobs/jobs.yml
@@ -1,97 +1,7 @@
-parameters:
- # See schema documentation in /Documentation/AzureDevOps/TemplateSchema.md
- continueOnError: false
-
- # Optional: Include PublishBuildArtifacts task
- enablePublishBuildArtifacts: false
-
- # Optional: Enable publishing using release pipelines
- enablePublishUsingPipelines: false
-
- # Optional: Enable running the source-build jobs to build repo from source
- enableSourceBuild: false
-
- # Optional: Parameters for source-build template.
- # See /eng/common/templates/jobs/source-build.yml for options
- sourceBuildParameters: []
-
- graphFileGeneration:
- # Optional: Enable generating the graph files at the end of the build
- enabled: false
- # Optional: Include toolset dependencies in the generated graph files
- includeToolset: false
-
- # Required: A collection of jobs to run - https://docs.microsoft.com/en-us/azure/devops/pipelines/yaml-schema?view=vsts&tabs=schema#job
- jobs: []
-
- # Optional: Override automatically derived dependsOn value for "publish build assets" job
- publishBuildAssetsDependsOn: ''
-
- # Optional: Publish the assets as soon as the publish to BAR stage is complete, rather doing so in a separate stage.
- publishAssetsImmediately: false
-
- # Optional: If using publishAssetsImmediately and additional parameters are needed, can be used to send along additional parameters (normally sent to post-build.yml)
- artifactsPublishingAdditionalParameters: ''
- signingValidationAdditionalParameters: ''
-
- # Optional: should run as a public build even in the internal project
- # if 'true', the build won't run any of the internal only steps, even if it is running in non-public projects.
- runAsPublic: false
-
- enableSourceIndex: false
- sourceIndexParams: {}
-
-# Internal resources (telemetry, microbuild) can only be accessed from non-public projects,
-# and some (Microbuild) should only be applied to non-PR cases for internal builds.
-
jobs:
-- ${{ each job in parameters.jobs }}:
- - template: ../job/job.yml
- parameters:
- # pass along parameters
- ${{ each parameter in parameters }}:
- ${{ if ne(parameter.key, 'jobs') }}:
- ${{ parameter.key }}: ${{ parameter.value }}
-
- # pass along job properties
- ${{ each property in job }}:
- ${{ if ne(property.key, 'job') }}:
- ${{ property.key }}: ${{ property.value }}
-
- name: ${{ job.job }}
-
-- ${{ if eq(parameters.enableSourceBuild, true) }}:
- - template: /eng/common/templates/jobs/source-build.yml
- parameters:
- allCompletedJobId: Source_Build_Complete
- ${{ each parameter in parameters.sourceBuildParameters }}:
- ${{ parameter.key }}: ${{ parameter.value }}
-
-- ${{ if eq(parameters.enableSourceIndex, 'true') }}:
- - template: ../job/source-index-stage1.yml
- parameters:
- runAsPublic: ${{ parameters.runAsPublic }}
- ${{ each parameter in parameters.sourceIndexParams }}:
- ${{ parameter.key }}: ${{ parameter.value }}
-
-- ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
- - ${{ if or(eq(parameters.enablePublishBuildAssets, true), eq(parameters.artifacts.publish.manifests, 'true'), ne(parameters.artifacts.publish.manifests, '')) }}:
- - template: ../job/publish-build-assets.yml
- parameters:
- continueOnError: ${{ parameters.continueOnError }}
- dependsOn:
- - ${{ if ne(parameters.publishBuildAssetsDependsOn, '') }}:
- - ${{ each job in parameters.publishBuildAssetsDependsOn }}:
- - ${{ job.job }}
- - ${{ if eq(parameters.publishBuildAssetsDependsOn, '') }}:
- - ${{ each job in parameters.jobs }}:
- - ${{ job.job }}
- - ${{ if eq(parameters.enableSourceBuild, true) }}:
- - Source_Build_Complete
+- template: /eng/common/core-templates/jobs/jobs.yml
+ parameters:
+ is1ESPipeline: false
- runAsPublic: ${{ parameters.runAsPublic }}
- publishUsingPipelines: ${{ parameters.enablePublishUsingPipelines }}
- publishAssetsImmediately: ${{ parameters.publishAssetsImmediately }}
- enablePublishBuildArtifacts: ${{ parameters.enablePublishBuildArtifacts }}
- artifactsPublishingAdditionalParameters: ${{ parameters.artifactsPublishingAdditionalParameters }}
- signingValidationAdditionalParameters: ${{ parameters.signingValidationAdditionalParameters }}
+ ${{ each parameter in parameters }}:
+ ${{ parameter.key }}: ${{ parameter.value }}
diff --git a/eng/common/templates/jobs/source-build.yml b/eng/common/templates/jobs/source-build.yml
index 5f46bfa895c..818d4c326db 100644
--- a/eng/common/templates/jobs/source-build.yml
+++ b/eng/common/templates/jobs/source-build.yml
@@ -1,54 +1,7 @@
-parameters:
- # This template adds arcade-powered source-build to CI. A job is created for each platform, as
- # well as an optional server job that completes when all platform jobs complete.
-
- # The name of the "join" job for all source-build platforms. If set to empty string, the job is
- # not included. Existing repo pipelines can use this job depend on all source-build jobs
- # completing without maintaining a separate list of every single job ID: just depend on this one
- # server job. By default, not included. Recommended name if used: 'Source_Build_Complete'.
- allCompletedJobId: ''
-
- # See /eng/common/templates/job/source-build.yml
- jobNamePrefix: 'Source_Build'
-
- # This is the default platform provided by Arcade, intended for use by a managed-only repo.
- defaultManagedPlatform:
- name: 'Managed'
- container: 'mcr.microsoft.com/dotnet-buildtools/prereqs:centos-stream8'
-
- # Defines the platforms on which to run build jobs. One job is created for each platform, and the
- # object in this array is sent to the job template as 'platform'. If no platforms are specified,
- # one job runs on 'defaultManagedPlatform'.
- platforms: []
-
- # If set to true and running on a non-public project,
- # Internal nuget and blob storage locations will be enabled.
- # This is not enabled by default because many repositories do not need internal sources
- # and do not need to have the required service connections approved in the pipeline.
- enableInternalSources: false
-
jobs:
+- template: /eng/common/core-templates/jobs/source-build.yml
+ parameters:
+ is1ESPipeline: false
-- ${{ if ne(parameters.allCompletedJobId, '') }}:
- - job: ${{ parameters.allCompletedJobId }}
- displayName: Source-Build Complete
- pool: server
- dependsOn:
- - ${{ each platform in parameters.platforms }}:
- - ${{ parameters.jobNamePrefix }}_${{ platform.name }}
- - ${{ if eq(length(parameters.platforms), 0) }}:
- - ${{ parameters.jobNamePrefix }}_${{ parameters.defaultManagedPlatform.name }}
-
-- ${{ each platform in parameters.platforms }}:
- - template: /eng/common/templates/job/source-build.yml
- parameters:
- jobNamePrefix: ${{ parameters.jobNamePrefix }}
- platform: ${{ platform }}
- enableInternalSources: ${{ parameters.enableInternalSources }}
-
-- ${{ if eq(length(parameters.platforms), 0) }}:
- - template: /eng/common/templates/job/source-build.yml
- parameters:
- jobNamePrefix: ${{ parameters.jobNamePrefix }}
- platform: ${{ parameters.defaultManagedPlatform }}
- enableInternalSources: ${{ parameters.enableInternalSources }}
+ ${{ each parameter in parameters }}:
+ ${{ parameter.key }}: ${{ parameter.value }}
\ No newline at end of file
diff --git a/eng/common/templates/post-build/common-variables.yml b/eng/common/templates/post-build/common-variables.yml
index 173914f2364..7fa10587559 100644
--- a/eng/common/templates/post-build/common-variables.yml
+++ b/eng/common/templates/post-build/common-variables.yml
@@ -1,22 +1,8 @@
variables:
- - group: Publish-Build-Assets
+- template: /eng/common/core-templates/post-build/common-variables.yml
+ parameters:
+ # Specifies whether to use 1ES
+ is1ESPipeline: false
- # Whether the build is internal or not
- - name: IsInternalBuild
- value: ${{ and(ne(variables['System.TeamProject'], 'public'), contains(variables['Build.SourceBranch'], 'internal')) }}
-
- # Default Maestro++ API Endpoint and API Version
- - name: MaestroApiEndPoint
- value: "https://maestro.dot.net"
- - name: MaestroApiAccessToken
- value: $(MaestroAccessToken)
- - name: MaestroApiVersion
- value: "2020-02-20"
-
- - name: SourceLinkCLIVersion
- value: 3.0.0
- - name: SymbolToolVersion
- value: 1.0.1
-
- - name: runCodesignValidationInjection
- value: false
+ ${{ each parameter in parameters }}:
+ ${{ parameter.key }}: ${{ parameter.value }}
\ No newline at end of file
diff --git a/eng/common/templates/post-build/post-build.yml b/eng/common/templates/post-build/post-build.yml
index c3b6a3012fe..53ede714bdd 100644
--- a/eng/common/templates/post-build/post-build.yml
+++ b/eng/common/templates/post-build/post-build.yml
@@ -1,283 +1,8 @@
-parameters:
- # Which publishing infra should be used. THIS SHOULD MATCH THE VERSION ON THE BUILD MANIFEST.
- # Publishing V1 is no longer supported
- # Publishing V2 is no longer supported
- # Publishing V3 is the default
- - name: publishingInfraVersion
- displayName: Which version of publishing should be used to promote the build definition?
- type: number
- default: 3
- values:
- - 3
-
- - name: BARBuildId
- displayName: BAR Build Id
- type: number
- default: 0
-
- - name: PromoteToChannelIds
- displayName: Channel to promote BARBuildId to
- type: string
- default: ''
-
- - name: enableSourceLinkValidation
- displayName: Enable SourceLink validation
- type: boolean
- default: false
-
- - name: enableSigningValidation
- displayName: Enable signing validation
- type: boolean
- default: true
-
- - name: enableSymbolValidation
- displayName: Enable symbol validation
- type: boolean
- default: false
-
- - name: enableNugetValidation
- displayName: Enable NuGet validation
- type: boolean
- default: true
-
- - name: publishInstallersAndChecksums
- displayName: Publish installers and checksums
- type: boolean
- default: true
-
- - name: SDLValidationParameters
- type: object
- default:
- enable: false
- publishGdn: false
- continueOnError: false
- params: ''
- artifactNames: ''
- downloadArtifacts: true
-
- # These parameters let the user customize the call to sdk-task.ps1 for publishing
- # symbols & general artifacts as well as for signing validation
- - name: symbolPublishingAdditionalParameters
- displayName: Symbol publishing additional parameters
- type: string
- default: ''
-
- - name: artifactsPublishingAdditionalParameters
- displayName: Artifact publishing additional parameters
- type: string
- default: ''
-
- - name: signingValidationAdditionalParameters
- displayName: Signing validation additional parameters
- type: string
- default: ''
-
- # Which stages should finish execution before post-build stages start
- - name: validateDependsOn
- type: object
- default:
- - build
-
- - name: publishDependsOn
- type: object
- default:
- - Validate
-
- # Optional: Call asset publishing rather than running in a separate stage
- - name: publishAssetsImmediately
- type: boolean
- default: false
-
stages:
-- ${{ if or(eq( parameters.enableNugetValidation, 'true'), eq(parameters.enableSigningValidation, 'true'), eq(parameters.enableSourceLinkValidation, 'true'), eq(parameters.SDLValidationParameters.enable, 'true')) }}:
- - stage: Validate
- dependsOn: ${{ parameters.validateDependsOn }}
- displayName: Validate Build Assets
- variables:
- - template: common-variables.yml
- - template: /eng/common/templates/variables/pool-providers.yml
- jobs:
- - job:
- displayName: NuGet Validation
- condition: and(succeededOrFailed(), eq( ${{ parameters.enableNugetValidation }}, 'true'))
- pool:
- # We don't use the collection uri here because it might vary (.visualstudio.com vs. dev.azure.com)
- ${{ if eq(variables['System.TeamProject'], 'DevDiv') }}:
- name: VSEngSS-MicroBuild2022-1ES
- demands: Cmd
- # If it's not devdiv, it's dnceng
- ${{ else }}:
- name: $(DncEngInternalBuildPool)
- demands: ImageOverride -equals windows.vs2019.amd64
-
- steps:
- - template: setup-maestro-vars.yml
- parameters:
- BARBuildId: ${{ parameters.BARBuildId }}
- PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }}
-
- - task: DownloadBuildArtifacts@0
- displayName: Download Package Artifacts
- inputs:
- buildType: specific
- buildVersionToDownload: specific
- project: $(AzDOProjectName)
- pipeline: $(AzDOPipelineId)
- buildId: $(AzDOBuildId)
- artifactName: PackageArtifacts
- checkDownloadedFiles: true
-
- - task: PowerShell@2
- displayName: Validate
- inputs:
- filePath: $(Build.SourcesDirectory)/eng/common/post-build/nuget-validation.ps1
- arguments: -PackagesPath $(Build.ArtifactStagingDirectory)/PackageArtifacts/
- -ToolDestinationPath $(Agent.BuildDirectory)/Extract/
-
- - job:
- displayName: Signing Validation
- condition: and( eq( ${{ parameters.enableSigningValidation }}, 'true'), ne( variables['PostBuildSign'], 'true'))
- pool:
- # We don't use the collection uri here because it might vary (.visualstudio.com vs. dev.azure.com)
- ${{ if eq(variables['System.TeamProject'], 'DevDiv') }}:
- name: VSEngSS-MicroBuild2022-1ES
- demands: Cmd
- # If it's not devdiv, it's dnceng
- ${{ else }}:
- name: $(DncEngInternalBuildPool)
- demands: ImageOverride -equals windows.vs2019.amd64
- steps:
- - template: setup-maestro-vars.yml
- parameters:
- BARBuildId: ${{ parameters.BARBuildId }}
- PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }}
-
- - task: DownloadBuildArtifacts@0
- displayName: Download Package Artifacts
- inputs:
- buildType: specific
- buildVersionToDownload: specific
- project: $(AzDOProjectName)
- pipeline: $(AzDOPipelineId)
- buildId: $(AzDOBuildId)
- artifactName: PackageArtifacts
- checkDownloadedFiles: true
- itemPattern: |
- **
- !**/Microsoft.SourceBuild.Intermediate.*.nupkg
-
- # This is necessary whenever we want to publish/restore to an AzDO private feed
- # Since sdk-task.ps1 tries to restore packages we need to do this authentication here
- # otherwise it'll complain about accessing a private feed.
- - task: NuGetAuthenticate@1
- displayName: 'Authenticate to AzDO Feeds'
-
- # Signing validation will optionally work with the buildmanifest file which is downloaded from
- # Azure DevOps above.
- - task: PowerShell@2
- displayName: Validate
- inputs:
- filePath: eng\common\sdk-task.ps1
- arguments: -task SigningValidation -restore -msbuildEngine vs
- /p:PackageBasePath='$(Build.ArtifactStagingDirectory)/PackageArtifacts'
- /p:SignCheckExclusionsFile='$(Build.SourcesDirectory)/eng/SignCheckExclusionsFile.txt'
- ${{ parameters.signingValidationAdditionalParameters }}
-
- - template: ../steps/publish-logs.yml
- parameters:
- StageLabel: 'Validation'
- JobLabel: 'Signing'
-
- - job:
- displayName: SourceLink Validation
- condition: eq( ${{ parameters.enableSourceLinkValidation }}, 'true')
- pool:
- # We don't use the collection uri here because it might vary (.visualstudio.com vs. dev.azure.com)
- ${{ if eq(variables['System.TeamProject'], 'DevDiv') }}:
- name: VSEngSS-MicroBuild2022-1ES
- demands: Cmd
- # If it's not devdiv, it's dnceng
- ${{ else }}:
- name: $(DncEngInternalBuildPool)
- demands: ImageOverride -equals windows.vs2019.amd64
- steps:
- - template: setup-maestro-vars.yml
- parameters:
- BARBuildId: ${{ parameters.BARBuildId }}
- PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }}
-
- - task: DownloadBuildArtifacts@0
- displayName: Download Blob Artifacts
- inputs:
- buildType: specific
- buildVersionToDownload: specific
- project: $(AzDOProjectName)
- pipeline: $(AzDOPipelineId)
- buildId: $(AzDOBuildId)
- artifactName: BlobArtifacts
- checkDownloadedFiles: true
-
- - task: PowerShell@2
- displayName: Validate
- inputs:
- filePath: $(Build.SourcesDirectory)/eng/common/post-build/sourcelink-validation.ps1
- arguments: -InputPath $(Build.ArtifactStagingDirectory)/BlobArtifacts/
- -ExtractPath $(Agent.BuildDirectory)/Extract/
- -GHRepoName $(Build.Repository.Name)
- -GHCommit $(Build.SourceVersion)
- -SourcelinkCliVersion $(SourceLinkCLIVersion)
- continueOnError: true
-
- - template: /eng/common/templates/job/execute-sdl.yml
- parameters:
- enable: ${{ parameters.SDLValidationParameters.enable }}
- publishGuardianDirectoryToPipeline: ${{ parameters.SDLValidationParameters.publishGdn }}
- additionalParameters: ${{ parameters.SDLValidationParameters.params }}
- continueOnError: ${{ parameters.SDLValidationParameters.continueOnError }}
- artifactNames: ${{ parameters.SDLValidationParameters.artifactNames }}
- downloadArtifacts: ${{ parameters.SDLValidationParameters.downloadArtifacts }}
-
-- ${{ if ne(parameters.publishAssetsImmediately, 'true') }}:
- - stage: publish_using_darc
- ${{ if or(eq(parameters.enableNugetValidation, 'true'), eq(parameters.enableSigningValidation, 'true'), eq(parameters.enableSourceLinkValidation, 'true'), eq(parameters.SDLValidationParameters.enable, 'true')) }}:
- dependsOn: ${{ parameters.publishDependsOn }}
- ${{ else }}:
- dependsOn: ${{ parameters.validateDependsOn }}
- displayName: Publish using Darc
- variables:
- - template: common-variables.yml
- - template: /eng/common/templates/variables/pool-providers.yml
- jobs:
- - job:
- displayName: Publish Using Darc
- timeoutInMinutes: 120
- pool:
- # We don't use the collection uri here because it might vary (.visualstudio.com vs. dev.azure.com)
- ${{ if eq(variables['System.TeamProject'], 'DevDiv') }}:
- name: VSEngSS-MicroBuild2022-1ES
- demands: Cmd
- # If it's not devdiv, it's dnceng
- ${{ else }}:
- name: NetCore1ESPool-Publishing-Internal
- demands: ImageOverride -equals windows.vs2019.amd64
- steps:
- - template: setup-maestro-vars.yml
- parameters:
- BARBuildId: ${{ parameters.BARBuildId }}
- PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }}
-
- - task: NuGetAuthenticate@1
+- template: /eng/common/core-templates/post-build/post-build.yml
+ parameters:
+ # Specifies whether to use 1ES
+ is1ESPipeline: false
- - task: AzureCLI@2
- displayName: Publish Using Darc
- inputs:
- azureSubscription: "Darc: Maestro Production"
- scriptType: ps
- scriptLocation: scriptPath
- scriptPath: $(Build.SourcesDirectory)/eng/common/post-build/publish-using-darc.ps1
- arguments: -BuildId $(BARBuildId)
- -PublishingInfraVersion ${{ parameters.publishingInfraVersion }}
- -AzdoToken '$(System.AccessToken)'
- -WaitPublishingFinish true
- -ArtifactsPublishingAdditionalParameters '${{ parameters.artifactsPublishingAdditionalParameters }}'
- -SymbolPublishingAdditionalParameters '${{ parameters.symbolPublishingAdditionalParameters }}'
+ ${{ each parameter in parameters }}:
+ ${{ parameter.key }}: ${{ parameter.value }}
\ No newline at end of file
diff --git a/eng/common/templates/post-build/setup-maestro-vars.yml b/eng/common/templates/post-build/setup-maestro-vars.yml
index 64b9abc6850..a79fab5b441 100644
--- a/eng/common/templates/post-build/setup-maestro-vars.yml
+++ b/eng/common/templates/post-build/setup-maestro-vars.yml
@@ -1,70 +1,8 @@
-parameters:
- BARBuildId: ''
- PromoteToChannelIds: ''
-
steps:
- - ${{ if eq(coalesce(parameters.PromoteToChannelIds, 0), 0) }}:
- - task: DownloadBuildArtifacts@0
- displayName: Download Release Configs
- inputs:
- buildType: current
- artifactName: ReleaseConfigs
- checkDownloadedFiles: true
-
- - task: AzureCLI@2
- name: setReleaseVars
- displayName: Set Release Configs Vars
- inputs:
- azureSubscription: "Darc: Maestro Production"
- scriptType: pscore
- scriptLocation: inlineScript
- inlineScript: |
- try {
- if (!$Env:PromoteToMaestroChannels -or $Env:PromoteToMaestroChannels.Trim() -eq '') {
- $Content = Get-Content $(Build.StagingDirectory)/ReleaseConfigs/ReleaseConfigs.txt
-
- $BarId = $Content | Select -Index 0
- $Channels = $Content | Select -Index 1
- $IsStableBuild = $Content | Select -Index 2
-
- $AzureDevOpsProject = $Env:System_TeamProject
- $AzureDevOpsBuildDefinitionId = $Env:System_DefinitionId
- $AzureDevOpsBuildId = $Env:Build_BuildId
- }
- else {
- . $(Build.SourcesDirectory)\eng\common\tools.ps1
- $darc = Get-Darc
- $buildInfo = & $darc get-build `
- --id ${{ parameters.BARBuildId }} `
- --extended `
- --output-format json `
- --ci `
- | convertFrom-Json
-
- $BarId = ${{ parameters.BARBuildId }}
- $Channels = $Env:PromoteToMaestroChannels -split ","
- $Channels = $Channels -join "]["
- $Channels = "[$Channels]"
-
- $IsStableBuild = $buildInfo.stable
- $AzureDevOpsProject = $buildInfo.azureDevOpsProject
- $AzureDevOpsBuildDefinitionId = $buildInfo.azureDevOpsBuildDefinitionId
- $AzureDevOpsBuildId = $buildInfo.azureDevOpsBuildId
- }
-
- Write-Host "##vso[task.setvariable variable=BARBuildId]$BarId"
- Write-Host "##vso[task.setvariable variable=TargetChannels]$Channels"
- Write-Host "##vso[task.setvariable variable=IsStableBuild]$IsStableBuild"
+- template: /eng/common/core-templates/post-build/setup-maestro-vars.yml
+ parameters:
+ # Specifies whether to use 1ES
+ is1ESPipeline: false
- Write-Host "##vso[task.setvariable variable=AzDOProjectName]$AzureDevOpsProject"
- Write-Host "##vso[task.setvariable variable=AzDOPipelineId]$AzureDevOpsBuildDefinitionId"
- Write-Host "##vso[task.setvariable variable=AzDOBuildId]$AzureDevOpsBuildId"
- }
- catch {
- Write-Host $_
- Write-Host $_.Exception
- Write-Host $_.ScriptStackTrace
- exit 1
- }
- env:
- PromoteToMaestroChannels: ${{ parameters.PromoteToChannelIds }}
+ ${{ each parameter in parameters }}:
+ ${{ parameter.key }}: ${{ parameter.value }}
\ No newline at end of file
diff --git a/eng/common/templates/post-build/trigger-subscription.yml b/eng/common/templates/post-build/trigger-subscription.yml
deleted file mode 100644
index da669030daf..00000000000
--- a/eng/common/templates/post-build/trigger-subscription.yml
+++ /dev/null
@@ -1,13 +0,0 @@
-parameters:
- ChannelId: 0
-
-steps:
-- task: PowerShell@2
- displayName: Triggering subscriptions
- inputs:
- filePath: $(Build.SourcesDirectory)/eng/common/post-build/trigger-subscriptions.ps1
- arguments: -SourceRepo $(Build.Repository.Uri)
- -ChannelId ${{ parameters.ChannelId }}
- -MaestroApiAccessToken $(MaestroAccessToken)
- -MaestroApiEndPoint $(MaestroApiEndPoint)
- -MaestroApiVersion $(MaestroApiVersion)
diff --git a/eng/common/templates/steps/add-build-to-channel.yml b/eng/common/templates/steps/add-build-to-channel.yml
deleted file mode 100644
index f67a210d62f..00000000000
--- a/eng/common/templates/steps/add-build-to-channel.yml
+++ /dev/null
@@ -1,13 +0,0 @@
-parameters:
- ChannelId: 0
-
-steps:
-- task: PowerShell@2
- displayName: Add Build to Channel
- inputs:
- filePath: $(Build.SourcesDirectory)/eng/common/post-build/add-build-to-channel.ps1
- arguments: -BuildId $(BARBuildId)
- -ChannelId ${{ parameters.ChannelId }}
- -MaestroApiAccessToken $(MaestroApiAccessToken)
- -MaestroApiEndPoint $(MaestroApiEndPoint)
- -MaestroApiVersion $(MaestroApiVersion)
diff --git a/eng/common/templates/steps/build-reason.yml b/eng/common/templates/steps/build-reason.yml
deleted file mode 100644
index eba58109b52..00000000000
--- a/eng/common/templates/steps/build-reason.yml
+++ /dev/null
@@ -1,12 +0,0 @@
-# build-reason.yml
-# Description: runs steps if build.reason condition is valid. conditions is a string of valid build reasons
-# to include steps (',' separated).
-parameters:
- conditions: ''
- steps: []
-
-steps:
- - ${{ if and( not(startsWith(parameters.conditions, 'not')), contains(parameters.conditions, variables['build.reason'])) }}:
- - ${{ parameters.steps }}
- - ${{ if and( startsWith(parameters.conditions, 'not'), not(contains(parameters.conditions, variables['build.reason']))) }}:
- - ${{ parameters.steps }}
diff --git a/eng/common/templates/steps/component-governance.yml b/eng/common/templates/steps/component-governance.yml
index cbba0596709..c12a5f8d21d 100644
--- a/eng/common/templates/steps/component-governance.yml
+++ b/eng/common/templates/steps/component-governance.yml
@@ -1,13 +1,7 @@
-parameters:
- disableComponentGovernance: false
- componentGovernanceIgnoreDirectories: ''
-
steps:
-- ${{ if eq(parameters.disableComponentGovernance, 'true') }}:
- - script: echo "##vso[task.setvariable variable=skipComponentGovernanceDetection]true"
- displayName: Set skipComponentGovernanceDetection variable
-- ${{ if ne(parameters.disableComponentGovernance, 'true') }}:
- - task: ComponentGovernanceComponentDetection@0
- continueOnError: true
- inputs:
- ignoreDirectories: ${{ parameters.componentGovernanceIgnoreDirectories }}
\ No newline at end of file
+- template: /eng/common/core-templates/steps/component-governance.yml
+ parameters:
+ is1ESPipeline: false
+
+ ${{ each parameter in parameters }}:
+ ${{ parameter.key }}: ${{ parameter.value }}
diff --git a/eng/common/templates/steps/enable-internal-runtimes.yml b/eng/common/templates/steps/enable-internal-runtimes.yml
index 54dc9416c51..b21a8038cc1 100644
--- a/eng/common/templates/steps/enable-internal-runtimes.yml
+++ b/eng/common/templates/steps/enable-internal-runtimes.yml
@@ -1,28 +1,10 @@
# Obtains internal runtime download credentials and populates the 'dotnetbuilds-internal-container-read-token-base64'
# variable with the base64-encoded SAS token, by default
-parameters:
-- name: federatedServiceConnection
- type: string
- default: 'dotnetbuilds-internal-read'
-- name: outputVariableName
- type: string
- default: 'dotnetbuilds-internal-container-read-token-base64'
-- name: expiryInHours
- type: number
- default: 1
-- name: base64Encode
- type: boolean
- default: true
-
steps:
-- ${{ if ne(variables['System.TeamProject'], 'public') }}:
- - template: /eng/common/templates/steps/get-delegation-sas.yml
- parameters:
- federatedServiceConnection: ${{ parameters.federatedServiceConnection }}
- outputVariableName: ${{ parameters.outputVariableName }}
- expiryInHours: ${{ parameters.expiryInHours }}
- base64Encode: ${{ parameters.base64Encode }}
- storageAccount: dotnetbuilds
- container: internal
- permissions: rl
+- template: /eng/common/core-templates/steps/enable-internal-runtimes.yml
+ parameters:
+ is1ESPipeline: false
+
+ ${{ each parameter in parameters }}:
+ ${{ parameter.key }}: ${{ parameter.value }}
diff --git a/eng/common/templates/steps/enable-internal-sources.yml b/eng/common/templates/steps/enable-internal-sources.yml
new file mode 100644
index 00000000000..5f87e9abb8a
--- /dev/null
+++ b/eng/common/templates/steps/enable-internal-sources.yml
@@ -0,0 +1,7 @@
+steps:
+- template: /eng/common/core-templates/steps/enable-internal-sources.yml
+ parameters:
+ is1ESPipeline: false
+
+ ${{ each parameter in parameters }}:
+ ${{ parameter.key }}: ${{ parameter.value }}
\ No newline at end of file
diff --git a/eng/common/templates/steps/execute-codeql.yml b/eng/common/templates/steps/execute-codeql.yml
deleted file mode 100644
index 3930b163021..00000000000
--- a/eng/common/templates/steps/execute-codeql.yml
+++ /dev/null
@@ -1,32 +0,0 @@
-parameters:
- # Language that should be analyzed. Defaults to csharp
- language: csharp
- # Build Commands
- buildCommands: ''
- overrideParameters: '' # Optional: to override values for parameters.
- additionalParameters: '' # Optional: parameters that need user specific values eg: '-SourceToolsList @("abc","def") -ArtifactToolsList @("ghi","jkl")'
- # Optional: if specified, restore and use this version of Guardian instead of the default.
- overrideGuardianVersion: ''
- # Optional: if true, publish the '.gdn' folder as a pipeline artifact. This can help with in-depth
- # diagnosis of problems with specific tool configurations.
- publishGuardianDirectoryToPipeline: false
- # The script to run to execute all SDL tools. Use this if you want to use a script to define SDL
- # parameters rather than relying on YAML. It may be better to use a local script, because you can
- # reproduce results locally without piecing together a command based on the YAML.
- executeAllSdlToolsScript: 'eng/common/sdl/execute-all-sdl-tools.ps1'
- # There is some sort of bug (has been reported) in Azure DevOps where if this parameter is named
- # 'continueOnError', the parameter value is not correctly picked up.
- # This can also be remedied by the caller (post-build.yml) if it does not use a nested parameter
- # optional: determines whether to continue the build if the step errors;
- sdlContinueOnError: false
-
-steps:
-- template: /eng/common/templates/steps/execute-sdl.yml
- parameters:
- overrideGuardianVersion: ${{ parameters.overrideGuardianVersion }}
- executeAllSdlToolsScript: ${{ parameters.executeAllSdlToolsScript }}
- overrideParameters: ${{ parameters.overrideParameters }}
- additionalParameters: '${{ parameters.additionalParameters }}
- -CodeQLAdditionalRunConfigParams @("BuildCommands < ${{ parameters.buildCommands }}", "Language < ${{ parameters.language }}")'
- publishGuardianDirectoryToPipeline: ${{ parameters.publishGuardianDirectoryToPipeline }}
- sdlContinueOnError: ${{ parameters.sdlContinueOnError }}
\ No newline at end of file
diff --git a/eng/common/templates/steps/execute-sdl.yml b/eng/common/templates/steps/execute-sdl.yml
deleted file mode 100644
index fe0ebf8c904..00000000000
--- a/eng/common/templates/steps/execute-sdl.yml
+++ /dev/null
@@ -1,89 +0,0 @@
-parameters:
- overrideGuardianVersion: ''
- executeAllSdlToolsScript: ''
- overrideParameters: ''
- additionalParameters: ''
- publishGuardianDirectoryToPipeline: false
- sdlContinueOnError: false
- condition: ''
-
-steps:
-- task: NuGetAuthenticate@1
-
-- task: NuGetToolInstaller@1
- displayName: 'Install NuGet.exe'
-
-- ${{ if ne(parameters.overrideGuardianVersion, '') }}:
- - pwsh: |
- Set-Location -Path $(Build.SourcesDirectory)\eng\common\sdl
- . .\sdl.ps1
- $guardianCliLocation = Install-Gdn -Path $(Build.SourcesDirectory)\.artifacts -Version ${{ parameters.overrideGuardianVersion }}
- Write-Host "##vso[task.setvariable variable=GuardianCliLocation]$guardianCliLocation"
- displayName: Install Guardian (Overridden)
-
-- ${{ if eq(parameters.overrideGuardianVersion, '') }}:
- - pwsh: |
- Set-Location -Path $(Build.SourcesDirectory)\eng\common\sdl
- . .\sdl.ps1
- $guardianCliLocation = Install-Gdn -Path $(Build.SourcesDirectory)\.artifacts
- Write-Host "##vso[task.setvariable variable=GuardianCliLocation]$guardianCliLocation"
- displayName: Install Guardian
-
-- ${{ if ne(parameters.overrideParameters, '') }}:
- - powershell: ${{ parameters.executeAllSdlToolsScript }} ${{ parameters.overrideParameters }}
- displayName: Execute SDL (Overridden)
- continueOnError: ${{ parameters.sdlContinueOnError }}
- condition: ${{ parameters.condition }}
- env:
- GUARDIAN_DEFAULT_PACKAGE_SOURCE_SECRET: $(System.AccessToken)
-
-- ${{ if eq(parameters.overrideParameters, '') }}:
- - powershell: ${{ parameters.executeAllSdlToolsScript }}
- -GuardianCliLocation $(GuardianCliLocation)
- -NugetPackageDirectory $(Build.SourcesDirectory)\.packages
- ${{ parameters.additionalParameters }}
- displayName: Execute SDL
- continueOnError: ${{ parameters.sdlContinueOnError }}
- condition: ${{ parameters.condition }}
- env:
- GUARDIAN_DEFAULT_PACKAGE_SOURCE_SECRET: $(System.AccessToken)
-
-- ${{ if ne(parameters.publishGuardianDirectoryToPipeline, 'false') }}:
- # We want to publish the Guardian results and configuration for easy diagnosis. However, the
- # '.gdn' dir is a mix of configuration, results, extracted dependencies, and Guardian default
- # tooling files. Some of these files are large and aren't useful during an investigation, so
- # exclude them by simply deleting them before publishing. (As of writing, there is no documented
- # way to selectively exclude a dir from the pipeline artifact publish task.)
- - task: DeleteFiles@1
- displayName: Delete Guardian dependencies to avoid uploading
- inputs:
- SourceFolder: $(Agent.BuildDirectory)/.gdn
- Contents: |
- c
- i
- condition: succeededOrFailed()
-
- - publish: $(Agent.BuildDirectory)/.gdn
- artifact: GuardianConfiguration
- displayName: Publish GuardianConfiguration
- condition: succeededOrFailed()
-
- # Publish the SARIF files in a container named CodeAnalysisLogs to enable integration
- # with the "SARIF SAST Scans Tab" Azure DevOps extension
- - task: CopyFiles@2
- displayName: Copy SARIF files
- inputs:
- flattenFolders: true
- sourceFolder: $(Agent.BuildDirectory)/.gdn/rc/
- contents: '**/*.sarif'
- targetFolder: $(Build.SourcesDirectory)/CodeAnalysisLogs
- condition: succeededOrFailed()
-
- # Use PublishBuildArtifacts because the SARIF extension only checks this case
- # see microsoft/sarif-azuredevops-extension#4
- - task: PublishBuildArtifacts@1
- displayName: Publish SARIF files to CodeAnalysisLogs container
- inputs:
- pathToPublish: $(Build.SourcesDirectory)/CodeAnalysisLogs
- artifactName: CodeAnalysisLogs
- condition: succeededOrFailed()
\ No newline at end of file
diff --git a/eng/common/templates/steps/generate-sbom.yml b/eng/common/templates/steps/generate-sbom.yml
index 2b21eae4273..26dc00a2e0f 100644
--- a/eng/common/templates/steps/generate-sbom.yml
+++ b/eng/common/templates/steps/generate-sbom.yml
@@ -1,48 +1,7 @@
-# BuildDropPath - The root folder of the drop directory for which the manifest file will be generated.
-# PackageName - The name of the package this SBOM represents.
-# PackageVersion - The version of the package this SBOM represents.
-# ManifestDirPath - The path of the directory where the generated manifest files will be placed
-# IgnoreDirectories - Directories to ignore for SBOM generation. This will be passed through to the CG component detector.
-
-parameters:
- PackageVersion: 8.0.0
- BuildDropPath: '$(Build.SourcesDirectory)/artifacts'
- PackageName: '.NET'
- ManifestDirPath: $(Build.ArtifactStagingDirectory)/sbom
- IgnoreDirectories: ''
- sbomContinueOnError: true
-
steps:
-- task: PowerShell@2
- displayName: Prep for SBOM generation in (Non-linux)
- condition: or(eq(variables['Agent.Os'], 'Windows_NT'), eq(variables['Agent.Os'], 'Darwin'))
- inputs:
- filePath: ./eng/common/generate-sbom-prep.ps1
- arguments: ${{parameters.manifestDirPath}}
-
-# Chmodding is a workaround for https://github.com/dotnet/arcade/issues/8461
-- script: |
- chmod +x ./eng/common/generate-sbom-prep.sh
- ./eng/common/generate-sbom-prep.sh ${{parameters.manifestDirPath}}
- displayName: Prep for SBOM generation in (Linux)
- condition: eq(variables['Agent.Os'], 'Linux')
- continueOnError: ${{ parameters.sbomContinueOnError }}
-
-- task: AzureArtifacts.manifest-generator-task.manifest-generator-task.ManifestGeneratorTask@0
- displayName: 'Generate SBOM manifest'
- continueOnError: ${{ parameters.sbomContinueOnError }}
- inputs:
- PackageName: ${{ parameters.packageName }}
- BuildDropPath: ${{ parameters.buildDropPath }}
- PackageVersion: ${{ parameters.packageVersion }}
- ManifestDirPath: ${{ parameters.manifestDirPath }}
- ${{ if ne(parameters.IgnoreDirectories, '') }}:
- AdditionalComponentDetectorArgs: '--IgnoreDirectories ${{ parameters.IgnoreDirectories }}'
-
-- task: PublishPipelineArtifact@1
- displayName: Publish SBOM manifest
- continueOnError: ${{parameters.sbomContinueOnError}}
- inputs:
- targetPath: '${{parameters.manifestDirPath}}'
- artifactName: $(ARTIFACT_NAME)
+- template: /eng/common/core-templates/steps/generate-sbom.yml
+ parameters:
+ is1ESPipeline: false
+ ${{ each parameter in parameters }}:
+ ${{ parameter.key }}: ${{ parameter.value }}
diff --git a/eng/common/templates/steps/get-delegation-sas.yml b/eng/common/templates/steps/get-delegation-sas.yml
index c0e8f91317f..83760c9798e 100644
--- a/eng/common/templates/steps/get-delegation-sas.yml
+++ b/eng/common/templates/steps/get-delegation-sas.yml
@@ -1,43 +1,7 @@
-parameters:
-- name: federatedServiceConnection
- type: string
-- name: outputVariableName
- type: string
-- name: expiryInHours
- type: number
- default: 1
-- name: base64Encode
- type: boolean
- default: false
-- name: storageAccount
- type: string
-- name: container
- type: string
-- name: permissions
- type: string
- default: 'rl'
-
steps:
-- task: AzureCLI@2
- displayName: 'Generate delegation SAS Token for ${{ parameters.storageAccount }}/${{ parameters.container }}'
- inputs:
- azureSubscription: ${{ parameters.federatedServiceConnection }}
- scriptType: 'pscore'
- scriptLocation: 'inlineScript'
- inlineScript: |
- # Calculate the expiration of the SAS token and convert to UTC
- $expiry = (Get-Date).AddHours(${{ parameters.expiryInHours }}).ToUniversalTime().ToString("yyyy-MM-ddTHH:mm:ssZ")
-
- $sas = az storage container generate-sas --account-name ${{ parameters.storageAccount }} --name ${{ parameters.container }} --permissions ${{ parameters.permissions }} --expiry $expiry --auth-mode login --as-user -o tsv
-
- if ($LASTEXITCODE -ne 0) {
- Write-Error "Failed to generate SAS token."
- exit 1
- }
-
- if ('${{ parameters.base64Encode }}' -eq 'true') {
- $sas = [Convert]::ToBase64String([System.Text.Encoding]::UTF8.GetBytes($sas))
- }
+- template: /eng/common/core-templates/steps/get-delegation-sas.yml
+ parameters:
+ is1ESPipeline: false
- Write-Host "Setting '${{ parameters.outputVariableName }}' with the access token value"
- Write-Host "##vso[task.setvariable variable=${{ parameters.outputVariableName }};issecret=true]$sas"
+ ${{ each parameter in parameters }}:
+ ${{ parameter.key }}: ${{ parameter.value }}
diff --git a/eng/common/templates/steps/get-federated-access-token.yml b/eng/common/templates/steps/get-federated-access-token.yml
index 55e33bd38f7..31e151d9d9e 100644
--- a/eng/common/templates/steps/get-federated-access-token.yml
+++ b/eng/common/templates/steps/get-federated-access-token.yml
@@ -1,40 +1,7 @@
-parameters:
-- name: federatedServiceConnection
- type: string
-- name: outputVariableName
- type: string
-- name: stepName
- type: string
- default: 'getFederatedAccessToken'
-- name: condition
- type: string
- default: ''
-# Resource to get a token for. Common values include:
-# - '499b84ac-1321-427f-aa17-267ca6975798' for Azure DevOps
-# - 'https://storage.azure.com/' for storage
-# Defaults to Azure DevOps
-- name: resource
- type: string
- default: '499b84ac-1321-427f-aa17-267ca6975798'
-- name: isStepOutputVariable
- type: boolean
- default: false
-
steps:
-- task: AzureCLI@2
- displayName: 'Getting federated access token for feeds'
- name: ${{ parameters.stepName }}
- ${{ if ne(parameters.condition, '') }}:
- condition: ${{ parameters.condition }}
- inputs:
- azureSubscription: ${{ parameters.federatedServiceConnection }}
- scriptType: 'pscore'
- scriptLocation: 'inlineScript'
- inlineScript: |
- $accessToken = az account get-access-token --query accessToken --resource ${{ parameters.resource }} --output tsv
- if ($LASTEXITCODE -ne 0) {
- Write-Error "Failed to get access token for resource '${{ parameters.resource }}'"
- exit 1
- }
- Write-Host "Setting '${{ parameters.outputVariableName }}' with the access token value"
- Write-Host "##vso[task.setvariable variable=${{ parameters.outputVariableName }};issecret=true;isOutput=${{ parameters.isStepOutputVariable }}]$accessToken"
\ No newline at end of file
+- template: /eng/common/core-templates/steps/get-federated-access-token.yml
+ parameters:
+ is1ESPipeline: false
+
+ ${{ each parameter in parameters }}:
+ ${{ parameter.key }}: ${{ parameter.value }}
\ No newline at end of file
diff --git a/eng/common/templates/steps/publish-build-artifacts.yml b/eng/common/templates/steps/publish-build-artifacts.yml
new file mode 100644
index 00000000000..6428a98dfef
--- /dev/null
+++ b/eng/common/templates/steps/publish-build-artifacts.yml
@@ -0,0 +1,40 @@
+parameters:
+- name: is1ESPipeline
+ type: boolean
+ default: false
+
+- name: displayName
+ type: string
+ default: 'Publish to Build Artifact'
+
+- name: condition
+ type: string
+ default: succeeded()
+
+- name: artifactName
+ type: string
+
+- name: pathToPublish
+ type: string
+
+- name: continueOnError
+ type: boolean
+ default: false
+
+- name: publishLocation
+ type: string
+ default: 'Container'
+
+steps:
+- ${{ if eq(parameters.is1ESPipeline, true) }}:
+ - 'eng/common/templates cannot be referenced from a 1ES managed template': error
+- task: PublishBuildArtifacts@1
+ displayName: ${{ parameters.displayName }}
+ condition: ${{ parameters.condition }}
+ ${{ if parameters.continueOnError }}:
+ continueOnError: ${{ parameters.continueOnError }}
+ inputs:
+ PublishLocation: ${{ parameters.publishLocation }}
+ PathtoPublish: ${{ parameters.pathToPublish }}
+ ${{ if parameters.artifactName }}:
+ ArtifactName: ${{ parameters.artifactName }}
\ No newline at end of file
diff --git a/eng/common/templates/steps/publish-logs.yml b/eng/common/templates/steps/publish-logs.yml
index 88f238f36bf..4ea86bd8823 100644
--- a/eng/common/templates/steps/publish-logs.yml
+++ b/eng/common/templates/steps/publish-logs.yml
@@ -1,23 +1,7 @@
-parameters:
- StageLabel: ''
- JobLabel: ''
-
steps:
-- task: Powershell@2
- displayName: Prepare Binlogs to Upload
- inputs:
- targetType: inline
- script: |
- New-Item -ItemType Directory $(Build.SourcesDirectory)/PostBuildLogs/${{parameters.StageLabel}}/${{parameters.JobLabel}}/
- Move-Item -Path $(Build.SourcesDirectory)/artifacts/log/Debug/* $(Build.SourcesDirectory)/PostBuildLogs/${{parameters.StageLabel}}/${{parameters.JobLabel}}/
- continueOnError: true
- condition: always()
+- template: /eng/common/core-templates/steps/publish-logs.yml
+ parameters:
+ is1ESPipeline: false
-- task: PublishBuildArtifacts@1
- displayName: Publish Logs
- inputs:
- PathtoPublish: '$(Build.SourcesDirectory)/PostBuildLogs'
- PublishLocation: Container
- ArtifactName: PostBuildLogs
- continueOnError: true
- condition: always()
+ ${{ each parameter in parameters }}:
+ ${{ parameter.key }}: ${{ parameter.value }}
diff --git a/eng/common/templates/steps/publish-pipeline-artifacts.yml b/eng/common/templates/steps/publish-pipeline-artifacts.yml
new file mode 100644
index 00000000000..5dd698b212f
--- /dev/null
+++ b/eng/common/templates/steps/publish-pipeline-artifacts.yml
@@ -0,0 +1,34 @@
+parameters:
+- name: is1ESPipeline
+ type: boolean
+ default: false
+
+- name: args
+ type: object
+ default: {}
+
+steps:
+- ${{ if eq(parameters.is1ESPipeline, true) }}:
+ - 'eng/common/templates cannot be referenced from a 1ES managed template': error
+- task: PublishPipelineArtifact@1
+ displayName: ${{ coalesce(parameters.args.displayName, 'Publish to Build Artifact') }}
+ ${{ if parameters.args.condition }}:
+ condition: ${{ parameters.args.condition }}
+ ${{ else }}:
+ condition: succeeded()
+ ${{ if parameters.args.continueOnError }}:
+ continueOnError: ${{ parameters.args.continueOnError }}
+ inputs:
+ targetPath: ${{ parameters.args.targetPath }}
+ ${{ if parameters.args.artifactName }}:
+ artifactName: ${{ parameters.args.artifactName }}
+ ${{ if parameters.args.publishLocation }}:
+ publishLocation: ${{ parameters.args.publishLocation }}
+ ${{ if parameters.args.fileSharePath }}:
+ fileSharePath: ${{ parameters.args.fileSharePath }}
+ ${{ if parameters.args.Parallel }}:
+ parallel: ${{ parameters.args.Parallel }}
+ ${{ if parameters.args.parallelCount }}:
+ parallelCount: ${{ parameters.args.parallelCount }}
+ ${{ if parameters.args.properties }}:
+ properties: ${{ parameters.args.properties }}
\ No newline at end of file
diff --git a/eng/common/templates/steps/retain-build.yml b/eng/common/templates/steps/retain-build.yml
index 83d97a26a01..8e841ace3d2 100644
--- a/eng/common/templates/steps/retain-build.yml
+++ b/eng/common/templates/steps/retain-build.yml
@@ -1,28 +1,7 @@
-parameters:
- # Optional azure devops PAT with build execute permissions for the build's organization,
- # only needed if the build that should be retained ran on a different organization than
- # the pipeline where this template is executing from
- Token: ''
- # Optional BuildId to retain, defaults to the current running build
- BuildId: ''
- # Azure devops Organization URI for the build in the https://dev.azure.com/ format.
- # Defaults to the organization the current pipeline is running on
- AzdoOrgUri: '$(System.CollectionUri)'
- # Azure devops project for the build. Defaults to the project the current pipeline is running on
- AzdoProject: '$(System.TeamProject)'
-
steps:
- - task: powershell@2
- inputs:
- targetType: 'filePath'
- filePath: eng/common/retain-build.ps1
- pwsh: true
- arguments: >
- -AzdoOrgUri: ${{parameters.AzdoOrgUri}}
- -AzdoProject ${{parameters.AzdoProject}}
- -Token ${{coalesce(parameters.Token, '$env:SYSTEM_ACCESSTOKEN') }}
- -BuildId ${{coalesce(parameters.BuildId, '$env:BUILD_ID')}}
- displayName: Enable permanent build retention
- env:
- SYSTEM_ACCESSTOKEN: $(System.AccessToken)
- BUILD_ID: $(Build.BuildId)
\ No newline at end of file
+- template: /eng/common/core-templates/steps/retain-build.yml
+ parameters:
+ is1ESPipeline: false
+
+ ${{ each parameter in parameters }}:
+ ${{ parameter.key }}: ${{ parameter.value }}
diff --git a/eng/common/templates/steps/run-on-unix.yml b/eng/common/templates/steps/run-on-unix.yml
deleted file mode 100644
index e1733814f65..00000000000
--- a/eng/common/templates/steps/run-on-unix.yml
+++ /dev/null
@@ -1,7 +0,0 @@
-parameters:
- agentOs: ''
- steps: []
-
-steps:
-- ${{ if ne(parameters.agentOs, 'Windows_NT') }}:
- - ${{ parameters.steps }}
diff --git a/eng/common/templates/steps/run-on-windows.yml b/eng/common/templates/steps/run-on-windows.yml
deleted file mode 100644
index 73e7e9c275a..00000000000
--- a/eng/common/templates/steps/run-on-windows.yml
+++ /dev/null
@@ -1,7 +0,0 @@
-parameters:
- agentOs: ''
- steps: []
-
-steps:
-- ${{ if eq(parameters.agentOs, 'Windows_NT') }}:
- - ${{ parameters.steps }}
diff --git a/eng/common/templates/steps/run-script-ifequalelse.yml b/eng/common/templates/steps/run-script-ifequalelse.yml
deleted file mode 100644
index 3d1242f5587..00000000000
--- a/eng/common/templates/steps/run-script-ifequalelse.yml
+++ /dev/null
@@ -1,33 +0,0 @@
-parameters:
- # if parameter1 equals parameter 2, run 'ifScript' command, else run 'elsescript' command
- parameter1: ''
- parameter2: ''
- ifScript: ''
- elseScript: ''
-
- # name of script step
- name: Script
-
- # display name of script step
- displayName: If-Equal-Else Script
-
- # environment
- env: {}
-
- # conditional expression for step execution
- condition: ''
-
-steps:
-- ${{ if and(ne(parameters.ifScript, ''), eq(parameters.parameter1, parameters.parameter2)) }}:
- - script: ${{ parameters.ifScript }}
- name: ${{ parameters.name }}
- displayName: ${{ parameters.displayName }}
- env: ${{ parameters.env }}
- condition: ${{ parameters.condition }}
-
-- ${{ if and(ne(parameters.elseScript, ''), ne(parameters.parameter1, parameters.parameter2)) }}:
- - script: ${{ parameters.elseScript }}
- name: ${{ parameters.name }}
- displayName: ${{ parameters.displayName }}
- env: ${{ parameters.env }}
- condition: ${{ parameters.condition }}
\ No newline at end of file
diff --git a/eng/common/templates/steps/send-to-helix.yml b/eng/common/templates/steps/send-to-helix.yml
index 3eb7e2d5f84..39f99fc2762 100644
--- a/eng/common/templates/steps/send-to-helix.yml
+++ b/eng/common/templates/steps/send-to-helix.yml
@@ -1,91 +1,7 @@
-# Please remember to update the documentation if you make changes to these parameters!
-parameters:
- HelixSource: 'pr/default' # required -- sources must start with pr/, official/, prodcon/, or agent/
- HelixType: 'tests/default/' # required -- Helix telemetry which identifies what type of data this is; should include "test" for clarity and must end in '/'
- HelixBuild: $(Build.BuildNumber) # required -- the build number Helix will use to identify this -- automatically set to the AzDO build number
- HelixTargetQueues: '' # required -- semicolon-delimited list of Helix queues to test on; see https://helix.dot.net/ for a list of queues
- HelixAccessToken: '' # required -- access token to make Helix API requests; should be provided by the appropriate variable group
- HelixConfiguration: '' # optional -- additional property attached to a job
- HelixPreCommands: '' # optional -- commands to run before Helix work item execution
- HelixPostCommands: '' # optional -- commands to run after Helix work item execution
- WorkItemDirectory: '' # optional -- a payload directory to zip up and send to Helix; requires WorkItemCommand; incompatible with XUnitProjects
- WorkItemCommand: '' # optional -- a command to execute on the payload; requires WorkItemDirectory; incompatible with XUnitProjects
- WorkItemTimeout: '' # optional -- a timeout in TimeSpan.Parse-ready value (e.g. 00:02:00) for the work item command; requires WorkItemDirectory; incompatible with XUnitProjects
- CorrelationPayloadDirectory: '' # optional -- a directory to zip up and send to Helix as a correlation payload
- XUnitProjects: '' # optional -- semicolon-delimited list of XUnitProjects to parse and send to Helix; requires XUnitRuntimeTargetFramework, XUnitPublishTargetFramework, XUnitRunnerVersion, and IncludeDotNetCli=true
- XUnitWorkItemTimeout: '' # optional -- the workitem timeout in seconds for all workitems created from the xUnit projects specified by XUnitProjects
- XUnitPublishTargetFramework: '' # optional -- framework to use to publish your xUnit projects
- XUnitRuntimeTargetFramework: '' # optional -- framework to use for the xUnit console runner
- XUnitRunnerVersion: '' # optional -- version of the xUnit nuget package you wish to use on Helix; required for XUnitProjects
- IncludeDotNetCli: false # optional -- true will download a version of the .NET CLI onto the Helix machine as a correlation payload; requires DotNetCliPackageType and DotNetCliVersion
- DotNetCliPackageType: '' # optional -- either 'sdk', 'runtime' or 'aspnetcore-runtime'; determines whether the sdk or runtime will be sent to Helix; see https://raw.githubusercontent.com/dotnet/core/main/release-notes/releases-index.json
- DotNetCliVersion: '' # optional -- version of the CLI to send to Helix; based on this: https://raw.githubusercontent.com/dotnet/core/main/release-notes/releases-index.json
- WaitForWorkItemCompletion: true # optional -- true will make the task wait until work items have been completed and fail the build if work items fail. False is "fire and forget."
- IsExternal: false # [DEPRECATED] -- doesn't do anything, jobs are external if HelixAccessToken is empty and Creator is set
- HelixBaseUri: 'https://helix.dot.net/' # optional -- sets the Helix API base URI (allows targeting https://helix.int-dot.net )
- Creator: '' # optional -- if the build is external, use this to specify who is sending the job
- DisplayNamePrefix: 'Run Tests' # optional -- rename the beginning of the displayName of the steps in AzDO
- condition: succeeded() # optional -- condition for step to execute; defaults to succeeded()
- continueOnError: false # optional -- determines whether to continue the build if the step errors; defaults to false
-
steps:
- - powershell: 'powershell "$env:BUILD_SOURCESDIRECTORY\eng\common\msbuild.ps1 $env:BUILD_SOURCESDIRECTORY\eng\common\helixpublish.proj /restore /p:TreatWarningsAsErrors=false /t:Test /bl:$env:BUILD_SOURCESDIRECTORY\artifacts\log\$env:BuildConfig\SendToHelix.binlog"'
- displayName: ${{ parameters.DisplayNamePrefix }} (Windows)
- env:
- BuildConfig: $(_BuildConfig)
- HelixSource: ${{ parameters.HelixSource }}
- HelixType: ${{ parameters.HelixType }}
- HelixBuild: ${{ parameters.HelixBuild }}
- HelixConfiguration: ${{ parameters.HelixConfiguration }}
- HelixTargetQueues: ${{ parameters.HelixTargetQueues }}
- HelixAccessToken: ${{ parameters.HelixAccessToken }}
- HelixPreCommands: ${{ parameters.HelixPreCommands }}
- HelixPostCommands: ${{ parameters.HelixPostCommands }}
- WorkItemDirectory: ${{ parameters.WorkItemDirectory }}
- WorkItemCommand: ${{ parameters.WorkItemCommand }}
- WorkItemTimeout: ${{ parameters.WorkItemTimeout }}
- CorrelationPayloadDirectory: ${{ parameters.CorrelationPayloadDirectory }}
- XUnitProjects: ${{ parameters.XUnitProjects }}
- XUnitWorkItemTimeout: ${{ parameters.XUnitWorkItemTimeout }}
- XUnitPublishTargetFramework: ${{ parameters.XUnitPublishTargetFramework }}
- XUnitRuntimeTargetFramework: ${{ parameters.XUnitRuntimeTargetFramework }}
- XUnitRunnerVersion: ${{ parameters.XUnitRunnerVersion }}
- IncludeDotNetCli: ${{ parameters.IncludeDotNetCli }}
- DotNetCliPackageType: ${{ parameters.DotNetCliPackageType }}
- DotNetCliVersion: ${{ parameters.DotNetCliVersion }}
- WaitForWorkItemCompletion: ${{ parameters.WaitForWorkItemCompletion }}
- HelixBaseUri: ${{ parameters.HelixBaseUri }}
- Creator: ${{ parameters.Creator }}
- SYSTEM_ACCESSTOKEN: $(System.AccessToken)
- condition: and(${{ parameters.condition }}, eq(variables['Agent.Os'], 'Windows_NT'))
- continueOnError: ${{ parameters.continueOnError }}
- - script: $BUILD_SOURCESDIRECTORY/eng/common/msbuild.sh $BUILD_SOURCESDIRECTORY/eng/common/helixpublish.proj /restore /p:TreatWarningsAsErrors=false /t:Test /bl:$BUILD_SOURCESDIRECTORY/artifacts/log/$BuildConfig/SendToHelix.binlog
- displayName: ${{ parameters.DisplayNamePrefix }} (Unix)
- env:
- BuildConfig: $(_BuildConfig)
- HelixSource: ${{ parameters.HelixSource }}
- HelixType: ${{ parameters.HelixType }}
- HelixBuild: ${{ parameters.HelixBuild }}
- HelixConfiguration: ${{ parameters.HelixConfiguration }}
- HelixTargetQueues: ${{ parameters.HelixTargetQueues }}
- HelixAccessToken: ${{ parameters.HelixAccessToken }}
- HelixPreCommands: ${{ parameters.HelixPreCommands }}
- HelixPostCommands: ${{ parameters.HelixPostCommands }}
- WorkItemDirectory: ${{ parameters.WorkItemDirectory }}
- WorkItemCommand: ${{ parameters.WorkItemCommand }}
- WorkItemTimeout: ${{ parameters.WorkItemTimeout }}
- CorrelationPayloadDirectory: ${{ parameters.CorrelationPayloadDirectory }}
- XUnitProjects: ${{ parameters.XUnitProjects }}
- XUnitWorkItemTimeout: ${{ parameters.XUnitWorkItemTimeout }}
- XUnitPublishTargetFramework: ${{ parameters.XUnitPublishTargetFramework }}
- XUnitRuntimeTargetFramework: ${{ parameters.XUnitRuntimeTargetFramework }}
- XUnitRunnerVersion: ${{ parameters.XUnitRunnerVersion }}
- IncludeDotNetCli: ${{ parameters.IncludeDotNetCli }}
- DotNetCliPackageType: ${{ parameters.DotNetCliPackageType }}
- DotNetCliVersion: ${{ parameters.DotNetCliVersion }}
- WaitForWorkItemCompletion: ${{ parameters.WaitForWorkItemCompletion }}
- HelixBaseUri: ${{ parameters.HelixBaseUri }}
- Creator: ${{ parameters.Creator }}
- SYSTEM_ACCESSTOKEN: $(System.AccessToken)
- condition: and(${{ parameters.condition }}, ne(variables['Agent.Os'], 'Windows_NT'))
- continueOnError: ${{ parameters.continueOnError }}
+- template: /eng/common/core-templates/steps/send-to-helix.yml
+ parameters:
+ is1ESPipeline: false
+
+ ${{ each parameter in parameters }}:
+ ${{ parameter.key }}: ${{ parameter.value }}
diff --git a/eng/common/templates/steps/source-build.yml b/eng/common/templates/steps/source-build.yml
index 41bbb915736..23c1d6f4e9f 100644
--- a/eng/common/templates/steps/source-build.yml
+++ b/eng/common/templates/steps/source-build.yml
@@ -1,129 +1,7 @@
-parameters:
- # This template adds arcade-powered source-build to CI.
-
- # This is a 'steps' template, and is intended for advanced scenarios where the existing build
- # infra has a careful build methodology that must be followed. For example, a repo
- # (dotnet/runtime) might choose to clone the GitHub repo only once and store it as a pipeline
- # artifact for all subsequent jobs to use, to reduce dependence on a strong network connection to
- # GitHub. Using this steps template leaves room for that infra to be included.
-
- # Defines the platform on which to run the steps. See 'eng/common/templates/job/source-build.yml'
- # for details. The entire object is described in the 'job' template for simplicity, even though
- # the usage of the properties on this object is split between the 'job' and 'steps' templates.
- platform: {}
-
steps:
-# Build. Keep it self-contained for simple reusability. (No source-build-specific job variables.)
-- script: |
- set -x
- df -h
-
- # If building on the internal project, the artifact feeds variable may be available (usually only if needed)
- # In that case, call the feed setup script to add internal feeds corresponding to public ones.
- # In addition, add an msbuild argument to copy the WIP from the repo to the target build location.
- # This is because SetupNuGetSources.sh will alter the current NuGet.config file, and we need to preserve those
- # changes.
- internalRestoreArgs=
- if [ '$(dn-bot-dnceng-artifact-feeds-rw)' != '$''(dn-bot-dnceng-artifact-feeds-rw)' ]; then
- # Temporarily work around https://github.com/dotnet/arcade/issues/7709
- chmod +x $(Build.SourcesDirectory)/eng/common/SetupNugetSources.sh
- $(Build.SourcesDirectory)/eng/common/SetupNugetSources.sh $(Build.SourcesDirectory)/NuGet.config $(dn-bot-dnceng-artifact-feeds-rw)
- internalRestoreArgs='/p:CopyWipIntoInnerSourceBuildRepo=true'
-
- # The 'Copy WIP' feature of source build uses git stash to apply changes from the original repo.
- # This only works if there is a username/email configured, which won't be the case in most CI runs.
- git config --get user.email
- if [ $? -ne 0 ]; then
- git config user.email dn-bot@microsoft.com
- git config user.name dn-bot
- fi
- fi
-
- # If building on the internal project, the internal storage variable may be available (usually only if needed)
- # In that case, add variables to allow the download of internal runtimes if the specified versions are not found
- # in the default public locations.
- internalRuntimeDownloadArgs=
- if [ '$(dotnetbuilds-internal-container-read-token-base64)' != '$''(dotnetbuilds-internal-container-read-token-base64)' ]; then
- internalRuntimeDownloadArgs='/p:DotNetRuntimeSourceFeed=https://dotnetbuilds.blob.core.windows.net/internal /p:DotNetRuntimeSourceFeedKey=$(dotnetbuilds-internal-container-read-token-base64) --runtimesourcefeed https://dotnetbuilds.blob.core.windows.net/internal --runtimesourcefeedkey $(dotnetbuilds-internal-container-read-token-base64)'
- fi
-
- buildConfig=Release
- # Check if AzDO substitutes in a build config from a variable, and use it if so.
- if [ '$(_BuildConfig)' != '$''(_BuildConfig)' ]; then
- buildConfig='$(_BuildConfig)'
- fi
-
- officialBuildArgs=
- if [ '${{ and(ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}' = 'True' ]; then
- officialBuildArgs='/p:DotNetPublishUsingPipelines=true /p:OfficialBuildId=$(BUILD.BUILDNUMBER)'
- fi
-
- targetRidArgs=
- if [ '${{ parameters.platform.targetRID }}' != '' ]; then
- targetRidArgs='/p:TargetRid=${{ parameters.platform.targetRID }}'
- fi
-
- runtimeOsArgs=
- if [ '${{ parameters.platform.runtimeOS }}' != '' ]; then
- runtimeOsArgs='/p:RuntimeOS=${{ parameters.platform.runtimeOS }}'
- fi
-
- baseOsArgs=
- if [ '${{ parameters.platform.baseOS }}' != '' ]; then
- baseOsArgs='/p:BaseOS=${{ parameters.platform.baseOS }}'
- fi
-
- publishArgs=
- if [ '${{ parameters.platform.skipPublishValidation }}' != 'true' ]; then
- publishArgs='--publish'
- fi
-
- assetManifestFileName=SourceBuild_RidSpecific.xml
- if [ '${{ parameters.platform.name }}' != '' ]; then
- assetManifestFileName=SourceBuild_${{ parameters.platform.name }}.xml
- fi
-
- ${{ coalesce(parameters.platform.buildScript, './build.sh') }} --ci \
- --configuration $buildConfig \
- --restore --build --pack $publishArgs -bl \
- $officialBuildArgs \
- $internalRuntimeDownloadArgs \
- $internalRestoreArgs \
- $targetRidArgs \
- $runtimeOsArgs \
- $baseOsArgs \
- /p:SourceBuildNonPortable=${{ parameters.platform.nonPortable }} \
- /p:ArcadeBuildFromSource=true \
- /p:AssetManifestFileName=$assetManifestFileName
- displayName: Build
-
-# Upload build logs for diagnosis.
-- task: CopyFiles@2
- displayName: Prepare BuildLogs staging directory
- inputs:
- SourceFolder: '$(Build.SourcesDirectory)'
- Contents: |
- **/*.log
- **/*.binlog
- artifacts/source-build/self/prebuilt-report/**
- TargetFolder: '$(Build.StagingDirectory)/BuildLogs'
- CleanTargetFolder: true
- continueOnError: true
- condition: succeededOrFailed()
-
-- task: PublishPipelineArtifact@1
- displayName: Publish BuildLogs
- inputs:
- targetPath: '$(Build.StagingDirectory)/BuildLogs'
- artifactName: BuildLogs_SourceBuild_${{ parameters.platform.name }}_Attempt$(System.JobAttempt)
- continueOnError: true
- condition: succeededOrFailed()
+- template: /eng/common/core-templates/steps/source-build.yml
+ parameters:
+ is1ESPipeline: false
-# Manually inject component detection so that we can ignore the source build upstream cache, which contains
-# a nupkg cache of input packages (a local feed).
-# This path must match the upstream cache path in property 'CurrentRepoSourceBuiltNupkgCacheDir'
-# in src\Microsoft.DotNet.Arcade.Sdk\tools\SourceBuild\SourceBuildArcade.targets
-- task: ComponentGovernanceComponentDetection@0
- displayName: Component Detection (Exclude upstream cache)
- inputs:
- ignoreDirectories: '$(Build.SourcesDirectory)/artifacts/source-build/self/src/artifacts/obj/source-built-upstream-cache'
+ ${{ each parameter in parameters }}:
+ ${{ parameter.key }}: ${{ parameter.value }}
diff --git a/eng/common/templates/steps/telemetry-end.yml b/eng/common/templates/steps/telemetry-end.yml
deleted file mode 100644
index fadc04ca1b9..00000000000
--- a/eng/common/templates/steps/telemetry-end.yml
+++ /dev/null
@@ -1,102 +0,0 @@
-parameters:
- maxRetries: 5
- retryDelay: 10 # in seconds
-
-steps:
-- bash: |
- if [ "$AGENT_JOBSTATUS" = "Succeeded" ] || [ "$AGENT_JOBSTATUS" = "PartiallySucceeded" ]; then
- errorCount=0
- else
- errorCount=1
- fi
- warningCount=0
-
- curlStatus=1
- retryCount=0
- # retry loop to harden against spotty telemetry connections
- # we don't retry successes and 4xx client errors
- until [[ $curlStatus -eq 0 || ( $curlStatus -ge 400 && $curlStatus -le 499 ) || $retryCount -ge $MaxRetries ]]
- do
- if [ $retryCount -gt 0 ]; then
- echo "Failed to send telemetry to Helix; waiting $RetryDelay seconds before retrying..."
- sleep $RetryDelay
- fi
-
- # create a temporary file for curl output
- res=`mktemp`
-
- curlResult=`
- curl --verbose --output $res --write-out "%{http_code}"\
- -H 'Content-Type: application/json' \
- -H "X-Helix-Job-Token: $Helix_JobToken" \
- -H 'Content-Length: 0' \
- -X POST -G "https://helix.dot.net/api/2018-03-14/telemetry/job/build/$Helix_WorkItemId/finish" \
- --data-urlencode "errorCount=$errorCount" \
- --data-urlencode "warningCount=$warningCount"`
- curlStatus=$?
-
- if [ $curlStatus -eq 0 ]; then
- if [ $curlResult -gt 299 ] || [ $curlResult -lt 200 ]; then
- curlStatus=$curlResult
- fi
- fi
-
- let retryCount++
- done
-
- if [ $curlStatus -ne 0 ]; then
- echo "Failed to Send Build Finish information after $retryCount retries"
- vstsLogOutput="vso[task.logissue type=error;sourcepath=templates/steps/telemetry-end.yml;code=1;]Failed to Send Build Finish information: $curlStatus"
- echo "##$vstsLogOutput"
- exit 1
- fi
- displayName: Send Unix Build End Telemetry
- env:
- # defined via VSTS variables in start-job.sh
- Helix_JobToken: $(Helix_JobToken)
- Helix_WorkItemId: $(Helix_WorkItemId)
- MaxRetries: ${{ parameters.maxRetries }}
- RetryDelay: ${{ parameters.retryDelay }}
- condition: and(always(), ne(variables['Agent.Os'], 'Windows_NT'))
-- powershell: |
- if (($env:Agent_JobStatus -eq 'Succeeded') -or ($env:Agent_JobStatus -eq 'PartiallySucceeded')) {
- $ErrorCount = 0
- } else {
- $ErrorCount = 1
- }
- $WarningCount = 0
-
- # Basic retry loop to harden against server flakiness
- $retryCount = 0
- while ($retryCount -lt $env:MaxRetries) {
- try {
- Invoke-RestMethod -Uri "https://helix.dot.net/api/2018-03-14/telemetry/job/build/$env:Helix_WorkItemId/finish?errorCount=$ErrorCount&warningCount=$WarningCount" -Method Post -ContentType "application/json" -Body "" `
- -Headers @{ 'X-Helix-Job-Token'=$env:Helix_JobToken }
- break
- }
- catch {
- $statusCode = $_.Exception.Response.StatusCode.value__
- if ($statusCode -ge 400 -and $statusCode -le 499) {
- Write-Host "##vso[task.logissue]error Failed to send telemetry to Helix (status code $statusCode); not retrying (4xx client error)"
- Write-Host "##vso[task.logissue]error ", $_.Exception.GetType().FullName, $_.Exception.Message
- exit 1
- }
- Write-Host "Failed to send telemetry to Helix (status code $statusCode); waiting $env:RetryDelay seconds before retrying..."
- $retryCount++
- sleep $env:RetryDelay
- continue
- }
- }
-
- if ($retryCount -ge $env:MaxRetries) {
- Write-Host "##vso[task.logissue]error Failed to send telemetry to Helix after $retryCount retries."
- exit 1
- }
- displayName: Send Windows Build End Telemetry
- env:
- # defined via VSTS variables in start-job.ps1
- Helix_JobToken: $(Helix_JobToken)
- Helix_WorkItemId: $(Helix_WorkItemId)
- MaxRetries: ${{ parameters.maxRetries }}
- RetryDelay: ${{ parameters.retryDelay }}
- condition: and(always(),eq(variables['Agent.Os'], 'Windows_NT'))
diff --git a/eng/common/templates/steps/telemetry-start.yml b/eng/common/templates/steps/telemetry-start.yml
deleted file mode 100644
index 6abbcb33a67..00000000000
--- a/eng/common/templates/steps/telemetry-start.yml
+++ /dev/null
@@ -1,241 +0,0 @@
-parameters:
- helixSource: 'undefined_defaulted_in_telemetry.yml'
- helixType: 'undefined_defaulted_in_telemetry.yml'
- buildConfig: ''
- runAsPublic: false
- maxRetries: 5
- retryDelay: 10 # in seconds
-
-steps:
-- ${{ if and(eq(parameters.runAsPublic, 'false'), not(eq(variables['System.TeamProject'], 'public'))) }}:
- - task: AzureKeyVault@2
- inputs:
- azureSubscription: 'HelixProd_KeyVault'
- KeyVaultName: HelixProdKV
- SecretsFilter: 'HelixApiAccessToken'
- condition: always()
-- bash: |
- # create a temporary file
- jobInfo=`mktemp`
-
- # write job info content to temporary file
- cat > $jobInfo < /dev/null; then
echo "Curl failed; dumping some information about dotnet.microsoft.com for later investigation"
- echo | openssl s_client -showcerts -servername dotnet.microsoft.com -connect dotnet.microsoft.com:443
+ echo | openssl s_client -showcerts -servername dotnet.microsoft.com -connect dotnet.microsoft.com:443 || true
fi
echo "Will now retry the same URL with verbose logging."
with_retries curl "$install_script_url" -sSL --verbose --retry 10 --create-dirs -o "$install_script" || {
@@ -343,20 +341,20 @@ function InitializeBuildTool {
_InitializeBuildToolCommand="msbuild"
# use override if it exists - commonly set by source-build
if [[ "${_OverrideArcadeInitializeBuildToolFramework:-x}" == "x" ]]; then
- _InitializeBuildToolFramework="net8.0"
+ _InitializeBuildToolFramework="net9.0"
else
_InitializeBuildToolFramework="${_OverrideArcadeInitializeBuildToolFramework}"
fi
}
-# Set RestoreNoCache as a workaround for https://github.com/NuGet/Home/issues/3116
+# Set RestoreNoHttpCache as a workaround for https://github.com/NuGet/Home/issues/3116
function GetNuGetPackageCachePath {
if [[ -z ${NUGET_PACKAGES:-} ]]; then
if [[ "$use_global_nuget_cache" == true ]]; then
- export NUGET_PACKAGES="$HOME/.nuget/packages"
+ export NUGET_PACKAGES="$HOME/.nuget/packages/"
else
- export NUGET_PACKAGES="$repo_root/.packages"
- export RESTORENOCACHE=true
+ export NUGET_PACKAGES="$repo_root/.packages/"
+ export RESTORENOHTTPCACHE=true
fi
fi
@@ -440,7 +438,7 @@ function StopProcesses {
}
function MSBuild {
- local args=$@
+ local args=( "$@" )
if [[ "$pipelines_log" == true ]]; then
InitializeBuildTool
InitializeToolset
@@ -458,12 +456,10 @@ function MSBuild {
local possiblePaths=()
possiblePaths+=( "$toolset_dir/$_InitializeBuildToolFramework/Microsoft.DotNet.ArcadeLogging.dll" )
possiblePaths+=( "$toolset_dir/$_InitializeBuildToolFramework/Microsoft.DotNet.Arcade.Sdk.dll" )
- possiblePaths+=( "$toolset_dir/netcoreapp2.1/Microsoft.DotNet.ArcadeLogging.dll" )
- possiblePaths+=( "$toolset_dir/netcoreapp2.1/Microsoft.DotNet.Arcade.Sdk.dll" )
- possiblePaths+=( "$toolset_dir/netcoreapp3.1/Microsoft.DotNet.ArcadeLogging.dll" )
- possiblePaths+=( "$toolset_dir/netcoreapp3.1/Microsoft.DotNet.Arcade.Sdk.dll" )
possiblePaths+=( "$toolset_dir/net7.0/Microsoft.DotNet.ArcadeLogging.dll" )
possiblePaths+=( "$toolset_dir/net7.0/Microsoft.DotNet.Arcade.Sdk.dll" )
+ possiblePaths+=( "$toolset_dir/net8.0/Microsoft.DotNet.ArcadeLogging.dll" )
+ possiblePaths+=( "$toolset_dir/net8.0/Microsoft.DotNet.Arcade.Sdk.dll" )
for path in "${possiblePaths[@]}"; do
if [[ -f $path ]]; then
selectedPath=$path
@@ -477,7 +473,7 @@ function MSBuild {
args+=( "-logger:$selectedPath" )
fi
- MSBuild-Core ${args[@]}
+ MSBuild-Core "${args[@]}"
}
function MSBuild-Core {
@@ -510,7 +506,8 @@ function MSBuild-Core {
echo "Build failed with exit code $exit_code. Check errors above."
# When running on Azure Pipelines, override the returned exit code to avoid double logging.
- if [[ "$ci" == "true" && -n ${SYSTEM_TEAMPROJECT:-} ]]; then
+ # Skip this when the build is a child of the VMR orchestrator build.
+ if [[ "$ci" == true && -n ${SYSTEM_TEAMPROJECT:-} && "$product_build" != true && "$properties" != *"DotNetBuildRepo=true"* ]]; then
Write-PipelineSetResult -result "Failed" -message "msbuild execution failed."
# Exiting with an exit code causes the azure pipelines task to log yet another "noise" error
# The above Write-PipelineSetResult will cause the task to be marked as failure without adding yet another error
diff --git a/eng/packages/General.props b/eng/packages/General.props
index 2d727376c67..b7e3259930f 100644
--- a/eng/packages/General.props
+++ b/eng/packages/General.props
@@ -10,6 +10,9 @@
+
+
+
@@ -42,9 +45,11 @@
+
+
@@ -62,7 +67,6 @@
-
diff --git a/eng/packages/TestOnly.props b/eng/packages/TestOnly.props
index 680b9e8c5a7..9e9fefae39d 100644
--- a/eng/packages/TestOnly.props
+++ b/eng/packages/TestOnly.props
@@ -6,6 +6,7 @@
+
@@ -32,6 +33,11 @@
+
+
+
+
+
diff --git a/eng/spellchecking_exclusions.dic b/eng/spellchecking_exclusions.dic
index 2f00ad64f92..2fc9b74699b 100644
Binary files a/eng/spellchecking_exclusions.dic and b/eng/spellchecking_exclusions.dic differ
diff --git a/github-merge-flow.jsonc b/github-merge-flow.jsonc
index 2045a8a6989..3071846fbe1 100644
--- a/github-merge-flow.jsonc
+++ b/github-merge-flow.jsonc
@@ -2,9 +2,10 @@
{
"merge-flow-configurations": {
// Automate opening PRs to merge extensions main branch back to dev
- "main":{
- "MergeToBranch": "dev",
- "ExtraSwitches": "-QuietComments"
- }
+ // Disable this for now as dev branch has been merged to main
+ // "main":{
+ // "MergeToBranch": "dev",
+ // "ExtraSwitches": "-QuietComments"
+ // }
}
}
\ No newline at end of file
diff --git a/global.json b/global.json
index 556fb52a7df..bffeb4e2565 100644
--- a/global.json
+++ b/global.json
@@ -1,22 +1,24 @@
{
"sdk": {
- "version": "8.0.300"
+ "version": "9.0.100-rtm.24479.2"
},
"tools": {
- "dotnet": "8.0.300",
+ "dotnet": "9.0.100-rtm.24479.2",
"runtimes": {
"dotnet/x64": [
- "6.0.22"
+ "8.0.0",
+ "$(MicrosoftNETCoreAppRuntimewinx64Version)"
],
"aspnetcore/x64": [
- "6.0.22"
+ "8.0.0",
+ "$(MicrosoftAspNetCoreAppRuntimewinx64Version)"
]
}
},
"msbuild-sdks": {
"Microsoft.Build.NoTargets": "3.7.0",
"Microsoft.Build.Traversal": "3.2.0",
- "Microsoft.DotNet.Arcade.Sdk": "8.0.0-beta.24475.3",
- "Microsoft.DotNet.Helix.Sdk": "8.0.0-beta.24475.3"
+ "Microsoft.DotNet.Arcade.Sdk": "9.0.0-beta.24473.1",
+ "Microsoft.DotNet.Helix.Sdk": "9.0.0-beta.24473.1"
}
}
diff --git a/src/Libraries/Microsoft.AspNetCore.HeaderParsing/Microsoft.AspNetCore.HeaderParsing.csproj b/src/Libraries/Microsoft.AspNetCore.HeaderParsing/Microsoft.AspNetCore.HeaderParsing.csproj
index 6993d36408f..dfff6aaffa1 100644
--- a/src/Libraries/Microsoft.AspNetCore.HeaderParsing/Microsoft.AspNetCore.HeaderParsing.csproj
+++ b/src/Libraries/Microsoft.AspNetCore.HeaderParsing/Microsoft.AspNetCore.HeaderParsing.csproj
@@ -8,14 +8,11 @@
$(NetCoreTargetFrameworks)
+ true
true
true
true
true
-
-
- false
- $(NoWarn);IL2026
diff --git a/src/Libraries/Microsoft.Extensions.Caching.Hybrid/HybridCacheBuilderExtensions.cs b/src/Libraries/Microsoft.Extensions.Caching.Hybrid/HybridCacheBuilderExtensions.cs
new file mode 100644
index 00000000000..d8fa3a3a3ad
--- /dev/null
+++ b/src/Libraries/Microsoft.Extensions.Caching.Hybrid/HybridCacheBuilderExtensions.cs
@@ -0,0 +1,62 @@
+// Licensed to the .NET Foundation under one or more agreements.
+// The .NET Foundation licenses this file to you under the MIT license.
+
+using System.Diagnostics.CodeAnalysis;
+using Microsoft.Extensions.Caching.Hybrid;
+using Microsoft.Shared.Diagnostics;
+
+namespace Microsoft.Extensions.DependencyInjection;
+
+///
+/// Configuration extension methods for / .
+///
+public static class HybridCacheBuilderExtensions
+{
+ ///
+ /// Serialize values of type with the specified serializer from .
+ ///
+ /// The type to be serialized.
+ /// The instance.
+ public static IHybridCacheBuilder AddSerializer(this IHybridCacheBuilder builder, IHybridCacheSerializer serializer)
+ {
+ _ = Throw.IfNull(builder).Services.AddSingleton>(serializer);
+ return builder;
+ }
+
+ ///
+ /// Serialize values of type with the serializer of type .
+ ///
+ /// The type to be serialized.
+ /// The serializer to use for this type.
+ /// The instance.
+ public static IHybridCacheBuilder AddSerializer(this IHybridCacheBuilder builder)
+ where TImplementation : class, IHybridCacheSerializer
+ {
+ _ = Throw.IfNull(builder).Services.AddSingleton, TImplementation>();
+ return builder;
+ }
+
+ ///
+ /// Add as an additional serializer factory, which can provide serializers for multiple types.
+ ///
+ /// The instance.
+ public static IHybridCacheBuilder AddSerializerFactory(this IHybridCacheBuilder builder, IHybridCacheSerializerFactory factory)
+ {
+ _ = Throw.IfNull(builder).Services.AddSingleton(factory);
+ return builder;
+ }
+
+ ///
+ /// Add a factory of type as an additional serializer factory, which can provide serializers for multiple types.
+ ///
+ /// The type of the serializer factory.
+ /// The instance.
+ public static IHybridCacheBuilder AddSerializerFactory<
+ [DynamicallyAccessedMembers(DynamicallyAccessedMemberTypes.PublicConstructors)] TImplementation>(this IHybridCacheBuilder builder)
+ where TImplementation : class, IHybridCacheSerializerFactory
+ {
+ _ = Throw.IfNull(builder).Services.AddSingleton();
+ return builder;
+ }
+}
diff --git a/src/Libraries/Microsoft.Extensions.Caching.Hybrid/HybridCacheOptions.cs b/src/Libraries/Microsoft.Extensions.Caching.Hybrid/HybridCacheOptions.cs
new file mode 100644
index 00000000000..982ea55a6af
--- /dev/null
+++ b/src/Libraries/Microsoft.Extensions.Caching.Hybrid/HybridCacheOptions.cs
@@ -0,0 +1,44 @@
+// Licensed to the .NET Foundation under one or more agreements.
+// The .NET Foundation licenses this file to you under the MIT license.
+
+namespace Microsoft.Extensions.Caching.Hybrid;
+
+///
+/// Options for configuring the default implementation.
+///
+public class HybridCacheOptions
+{
+ private const int ShiftBytesToMibiBytes = 20;
+
+ ///
+ /// Gets or sets the default global options to be applied to operations; if options are
+ /// specified at the individual call level, the non-null values are merged (with the per-call
+ /// options being used in preference to the global options). If no value is specified for a given
+ /// option (globally or per-call), the implementation may choose a reasonable default.
+ ///
+ public HybridCacheEntryOptions? DefaultEntryOptions { get; set; }
+
+ ///
+ /// Gets or sets a value indicating whether compression for this instance is disabled.
+ ///
+ public bool DisableCompression { get; set; }
+
+ ///
+ /// Gets or sets the maximum size of cache items; attempts to store values over this size will be logged
+ /// and the value will not be stored in cache.
+ ///
+ /// The default value is 1 MiB.
+ public long MaximumPayloadBytes { get; set; } = 1 << ShiftBytesToMibiBytes; // 1MiB
+
+ ///
+ /// Gets or sets the maximum permitted length (in characters) of keys; attempts to use keys over this size will be logged.
+ ///
+ /// The default value is 1024 characters.
+ public int MaximumKeyLength { get; set; } = 1024; // characters
+
+ ///
+ /// Gets or sets a value indicating whether to use "tags" data as dimensions on metric reporting; if enabled, care should be used to ensure that
+ /// tags do not contain data that should not be visible in metrics systems.
+ ///
+ public bool ReportTagMetrics { get; set; }
+}
diff --git a/src/Libraries/Microsoft.Extensions.Caching.Hybrid/HybridCacheServiceExtensions.cs b/src/Libraries/Microsoft.Extensions.Caching.Hybrid/HybridCacheServiceExtensions.cs
new file mode 100644
index 00000000000..d28dc4e47d5
--- /dev/null
+++ b/src/Libraries/Microsoft.Extensions.Caching.Hybrid/HybridCacheServiceExtensions.cs
@@ -0,0 +1,44 @@
+// Licensed to the .NET Foundation under one or more agreements.
+// The .NET Foundation licenses this file to you under the MIT license.
+
+using System;
+using Microsoft.Extensions.Caching.Hybrid;
+using Microsoft.Extensions.Caching.Hybrid.Internal;
+using Microsoft.Extensions.DependencyInjection.Extensions;
+using Microsoft.Shared.Diagnostics;
+
+namespace Microsoft.Extensions.DependencyInjection;
+
+///
+/// Configuration extension methods for .
+///
+public static class HybridCacheServiceExtensions
+{
+ ///
+ /// Adds support for multi-tier caching services.
+ ///
+ /// A builder instance that allows further configuration of the system.
+ public static IHybridCacheBuilder AddHybridCache(this IServiceCollection services, Action setupAction)
+ {
+ _ = Throw.IfNull(setupAction);
+ _ = AddHybridCache(services);
+ _ = services.Configure(setupAction);
+ return new HybridCacheBuilder(services);
+ }
+
+ ///
+ /// Adds support for multi-tier caching services.
+ ///
+ /// A builder instance that allows further configuration of the system.
+ public static IHybridCacheBuilder AddHybridCache(this IServiceCollection services)
+ {
+ _ = Throw.IfNull(services);
+ services.TryAddSingleton(TimeProvider.System);
+ _ = services.AddOptions().AddMemoryCache();
+ services.TryAddSingleton();
+ services.TryAddSingleton>(InbuiltTypeSerializer.Instance);
+ services.TryAddSingleton>(InbuiltTypeSerializer.Instance);
+ services.TryAddSingleton();
+ return new HybridCacheBuilder(services);
+ }
+}
diff --git a/src/Libraries/Microsoft.Extensions.Caching.Hybrid/IHybridCacheBuilder.cs b/src/Libraries/Microsoft.Extensions.Caching.Hybrid/IHybridCacheBuilder.cs
new file mode 100644
index 00000000000..55c1f47ae3e
--- /dev/null
+++ b/src/Libraries/Microsoft.Extensions.Caching.Hybrid/IHybridCacheBuilder.cs
@@ -0,0 +1,17 @@
+// Licensed to the .NET Foundation under one or more agreements.
+// The .NET Foundation licenses this file to you under the MIT license.
+
+using Microsoft.Extensions.DependencyInjection;
+
+namespace Microsoft.Extensions.Caching.Hybrid;
+
+///
+/// Helper API for configuring .
+///
+public interface IHybridCacheBuilder
+{
+ ///
+ /// Gets the services collection associated with this instance.
+ ///
+ IServiceCollection Services { get; }
+}
diff --git a/src/Libraries/Microsoft.Extensions.Caching.Hybrid/Internal/BufferChunk.cs b/src/Libraries/Microsoft.Extensions.Caching.Hybrid/Internal/BufferChunk.cs
new file mode 100644
index 00000000000..0d7d54cfdd6
--- /dev/null
+++ b/src/Libraries/Microsoft.Extensions.Caching.Hybrid/Internal/BufferChunk.cs
@@ -0,0 +1,94 @@
+// Licensed to the .NET Foundation under one or more agreements.
+// The .NET Foundation licenses this file to you under the MIT license.
+
+using System;
+using System.Buffers;
+using System.Diagnostics;
+using System.Runtime.CompilerServices;
+
+namespace Microsoft.Extensions.Caching.Hybrid.Internal;
+
+// Used to convey buffer status; like ArraySegment, but Offset is always
+// zero, and we use the most significant bit of the length (usually the sign flag,
+// but we do not need to support negative length) to track whether or not
+// to recycle this value.
+internal readonly struct BufferChunk
+{
+ private const int FlagReturnToPool = (1 << 31);
+
+ private readonly int _lengthAndPoolFlag;
+
+ public byte[]? Array { get; } // null for default
+
+ public int Length => _lengthAndPoolFlag & ~FlagReturnToPool;
+
+ public bool ReturnToPool => (_lengthAndPoolFlag & FlagReturnToPool) != 0;
+
+ public BufferChunk(byte[] array)
+ {
+ Debug.Assert(array is not null, "expected valid array input");
+ Array = array;
+ _lengthAndPoolFlag = array!.Length;
+
+ // assume not pooled, if exact-sized
+ // (we don't expect array.Length to be negative; we're really just saying
+ // "we expect the result of assigning array.Length to _lengthAndPoolFlag
+ // to give the expected Length *and* not have the MSB set; we're just
+ // checking that we haven't fat-fingered our MSB logic)
+ Debug.Assert(!ReturnToPool, "do not return right-sized arrays");
+ Debug.Assert(Length == array.Length, "array length not respected");
+ }
+
+ public BufferChunk(byte[] array, int length, bool returnToPool)
+ {
+ Debug.Assert(array is not null, "expected valid array input");
+ Debug.Assert(length >= 0, "expected valid length");
+ Array = array;
+ _lengthAndPoolFlag = length | (returnToPool ? FlagReturnToPool : 0);
+ Debug.Assert(ReturnToPool == returnToPool, "return-to-pool not respected");
+ Debug.Assert(Length == length, "length not respected");
+ }
+
+ public byte[] ToArray()
+ {
+ var length = Length;
+ if (length == 0)
+ {
+ return [];
+ }
+
+ var copy = new byte[length];
+ Buffer.BlockCopy(Array!, 0, copy, 0, length);
+ return copy;
+
+ // Note on nullability of Array; the usage here is that a non-null array
+ // is always provided during construction, so the only null scenario is for default(BufferChunk).
+ // Since the constructor explicitly accesses array.Length, any null array passed to the constructor
+ // will cause an exception, even in release (the Debug.Assert only covers debug) - although in
+ // reality we do not expect this to ever occur (internal type, usage checked, etc). In the case of
+ // default(BufferChunk), we know that Length will be zero, which means we will hit the [] case.
+ }
+
+ internal void RecycleIfAppropriate()
+ {
+ if (ReturnToPool)
+ {
+ ArrayPool.Shared.Return(Array!);
+ }
+
+ Unsafe.AsRef(in this) = default; // anti foot-shotgun double-return guard; not 100%, but worth doing
+ Debug.Assert(Array is null && !ReturnToPool, "expected clean slate after recycle");
+ }
+
+ // get the data as a ROS; for note on null-logic of Array!, see comment in ToArray
+ internal ReadOnlySequence AsSequence() => Length == 0 ? default : new ReadOnlySequence(Array!, 0, Length);
+
+ internal BufferChunk DoNotReturnToPool()
+ {
+ var copy = this;
+ Unsafe.AsRef(in copy._lengthAndPoolFlag) &= ~FlagReturnToPool;
+ Debug.Assert(copy.Length == Length, "same length expected");
+ Debug.Assert(!copy.ReturnToPool, "do not return to pool");
+ return copy;
+ }
+}
diff --git a/src/Libraries/Microsoft.Extensions.Caching.Hybrid/Internal/DefaultHybridCache.CacheItem.cs b/src/Libraries/Microsoft.Extensions.Caching.Hybrid/Internal/DefaultHybridCache.CacheItem.cs
new file mode 100644
index 00000000000..5585b9b2a29
--- /dev/null
+++ b/src/Libraries/Microsoft.Extensions.Caching.Hybrid/Internal/DefaultHybridCache.CacheItem.cs
@@ -0,0 +1,111 @@
+// Licensed to the .NET Foundation under one or more agreements.
+// The .NET Foundation licenses this file to you under the MIT license.
+
+using System;
+using System.Diagnostics;
+using System.Threading;
+using Microsoft.Extensions.Caching.Memory;
+
+namespace Microsoft.Extensions.Caching.Hybrid.Internal;
+
+internal partial class DefaultHybridCache
+{
+ internal abstract class CacheItem
+ {
+ private int _refCount = 1; // the number of pending operations against this cache item
+
+ public abstract bool DebugIsImmutable { get; }
+
+ // Note: the ref count is the number of callers anticipating this value at any given time. Initially,
+ // it is one for a simple "get the value" flow, but if another call joins with us, it'll be incremented.
+ // If either cancels, it will get decremented, with the entire flow being cancelled if it ever becomes
+ // zero.
+ // This counter also drives cache lifetime, with the cache itself incrementing the count by one. In the
+ // case of mutable data, cache eviction may reduce this to zero (in cooperation with any concurrent readers,
+ // who incr/decr around their fetch), allowing safe buffer recycling.
+
+ internal int RefCount => Volatile.Read(ref _refCount);
+
+ internal static readonly PostEvictionDelegate SharedOnEviction = static (key, value, reason, state) =>
+ {
+ if (value is CacheItem item)
+ {
+ _ = item.Release();
+ }
+ };
+
+ public virtual bool NeedsEvictionCallback => false; // do we need to call Release when evicted?
+
+ public abstract bool TryReserveBuffer(out BufferChunk buffer);
+
+ ///
+ /// Signal that the consumer is done with this item (ref-count decr).
+ ///
+ /// True if this is the final release.
+ public bool Release()
+ {
+ int newCount = Interlocked.Decrement(ref _refCount);
+ Debug.Assert(newCount >= 0, "over-release detected");
+ if (newCount == 0)
+ {
+ // perform per-item clean-up, i.e. buffer recycling (if defensive copies needed)
+ OnFinalRelease();
+ return true;
+ }
+
+ return false;
+ }
+
+ public bool TryReserve()
+ {
+ // This is basically interlocked increment, but with a check against:
+ // a) incrementing upwards from zero
+ // b) overflowing *back* to zero
+ int oldValue = Volatile.Read(ref _refCount);
+ do
+ {
+ if (oldValue is 0 or -1)
+ {
+ return false; // already burned, or about to roll around back to zero
+ }
+
+ var updated = Interlocked.CompareExchange(ref _refCount, oldValue + 1, oldValue);
+ if (updated == oldValue)
+ {
+ return true; // we exchanged
+ }
+
+ oldValue = updated; // we failed, but we have an updated state
+ }
+ while (true);
+ }
+
+ protected virtual void OnFinalRelease() // any required release semantics
+ {
+ }
+ }
+
+ internal abstract class CacheItem : CacheItem
+ {
+ public abstract bool TryGetSize(out long size);
+
+ // attempt to get a value that was *not* previously reserved
+ public abstract bool TryGetValue(out T value);
+
+ // get a value that *was* reserved, countermanding our reservation in the process
+ public T GetReservedValue()
+ {
+ if (!TryGetValue(out var value))
+ {
+ Throw();
+ }
+
+ _ = Release();
+ return value;
+
+ static void Throw() => throw new ObjectDisposedException("The cache item has been recycled before the value was obtained");
+ }
+
+ internal static CacheItem Create() => ImmutableTypeCache.IsImmutable ? new ImmutableCacheItem() : new MutableCacheItem();
+ }
+}
diff --git a/src/Libraries/Microsoft.Extensions.Caching.Hybrid/Internal/DefaultHybridCache.Debug.cs b/src/Libraries/Microsoft.Extensions.Caching.Hybrid/Internal/DefaultHybridCache.Debug.cs
new file mode 100644
index 00000000000..a9901103555
--- /dev/null
+++ b/src/Libraries/Microsoft.Extensions.Caching.Hybrid/Internal/DefaultHybridCache.Debug.cs
@@ -0,0 +1,81 @@
+// Licensed to the .NET Foundation under one or more agreements.
+// The .NET Foundation licenses this file to you under the MIT license.
+
+using System.Diagnostics;
+using System.Diagnostics.CodeAnalysis;
+
+#if DEBUG
+using System.Threading;
+#endif
+
+namespace Microsoft.Extensions.Caching.Hybrid.Internal;
+
+internal partial class DefaultHybridCache
+{
+ ///
+ /// Auxiliary API for testing purposes, allowing confirmation of the internal state independent of the public API.
+ ///
+ internal bool DebugTryGetCacheItem(string key, [NotNullWhen(true)] out CacheItem? value)
+ {
+ if (_localCache.TryGetValue(key, out var untyped) && untyped is CacheItem typed)
+ {
+ value = typed;
+ return true;
+ }
+
+ value = null;
+ return false;
+ }
+
+#if DEBUG // enable ref-counted buffers
+
+ private int _outstandingBufferCount;
+
+ internal int DebugOnlyGetOutstandingBuffers(bool flush = false)
+ => flush ? Interlocked.Exchange(ref _outstandingBufferCount, 0) : Volatile.Read(ref _outstandingBufferCount);
+
+ [Conditional("DEBUG")]
+ internal void DebugOnlyDecrementOutstandingBuffers()
+ {
+ _ = Interlocked.Decrement(ref _outstandingBufferCount);
+ }
+
+ [Conditional("DEBUG")]
+ internal void DebugOnlyIncrementOutstandingBuffers()
+ {
+ _ = Interlocked.Increment(ref _outstandingBufferCount);
+ }
+#endif
+
+ private partial class MutableCacheItem
+ {
+#if DEBUG
+ private DefaultHybridCache? _cache; // for buffer-tracking - only needed in DEBUG
+#endif
+
+ [Conditional("DEBUG")]
+ [SuppressMessage("Performance", "CA1822:Mark members as static", Justification = "Instance state used in debug")]
+ internal void DebugOnlyTrackBuffer(DefaultHybridCache cache)
+ {
+#if DEBUG
+ _cache = cache;
+ if (_buffer.ReturnToPool)
+ {
+ _cache?.DebugOnlyIncrementOutstandingBuffers();
+ }
+#endif
+ }
+
+ [Conditional("DEBUG")]
+ [SuppressMessage("Performance", "CA1822:Mark members as static", Justification = "Instance state used in debug")]
+ private void DebugOnlyDecrementOutstandingBuffers()
+ {
+#if DEBUG
+ if (_buffer.ReturnToPool)
+ {
+ _cache?.DebugOnlyDecrementOutstandingBuffers();
+ }
+#endif
+ }
+ }
+}
diff --git a/src/Libraries/Microsoft.Extensions.Caching.Hybrid/Internal/DefaultHybridCache.ImmutableCacheItem.cs b/src/Libraries/Microsoft.Extensions.Caching.Hybrid/Internal/DefaultHybridCache.ImmutableCacheItem.cs
new file mode 100644
index 00000000000..9ae8468ba29
--- /dev/null
+++ b/src/Libraries/Microsoft.Extensions.Caching.Hybrid/Internal/DefaultHybridCache.ImmutableCacheItem.cs
@@ -0,0 +1,59 @@
+// Licensed to the .NET Foundation under one or more agreements.
+// The .NET Foundation licenses this file to you under the MIT license.
+
+using System.Threading;
+
+namespace Microsoft.Extensions.Caching.Hybrid.Internal;
+
+internal partial class DefaultHybridCache
+{
+ private sealed class ImmutableCacheItem : CacheItem // used to hold types that do not require defensive copies
+ {
+ private static ImmutableCacheItem? _sharedDefault;
+
+ private T _value = default!; // deferred until SetValue
+
+ public long Size { get; private set; } = -1;
+
+ public override bool DebugIsImmutable => true;
+
+ // get a shared instance that passes as "reserved"; doesn't need to be 100% singleton,
+ // but we don't want to break the reservation rules either; if we can't reserve: create new
+ public static ImmutableCacheItem GetReservedShared()
+ {
+ ImmutableCacheItem? obj = Volatile.Read(ref _sharedDefault);
+ if (obj is null || !obj.TryReserve())
+ {
+ obj = new();
+ _ = obj.TryReserve(); // this is reliable on a new instance
+ Volatile.Write(ref _sharedDefault, obj);
+ }
+
+ return obj;
+ }
+
+ public void SetValue(T value, long size)
+ {
+ _value = value;
+ Size = size;
+ }
+
+ public override bool TryGetValue(out T value)
+ {
+ value = _value;
+ return true; // always available
+ }
+
+ public override bool TryGetSize(out long size)
+ {
+ size = Size;
+ return size >= 0;
+ }
+
+ public override bool TryReserveBuffer(out BufferChunk buffer)
+ {
+ buffer = default;
+ return false; // we don't have one to reserve!
+ }
+ }
+}
diff --git a/src/Libraries/Microsoft.Extensions.Caching.Hybrid/Internal/DefaultHybridCache.L2.cs b/src/Libraries/Microsoft.Extensions.Caching.Hybrid/Internal/DefaultHybridCache.L2.cs
new file mode 100644
index 00000000000..1e694448737
--- /dev/null
+++ b/src/Libraries/Microsoft.Extensions.Caching.Hybrid/Internal/DefaultHybridCache.L2.cs
@@ -0,0 +1,167 @@
+// Licensed to the .NET Foundation under one or more agreements.
+// The .NET Foundation licenses this file to you under the MIT license.
+
+using System;
+using System.Diagnostics;
+using System.Diagnostics.CodeAnalysis;
+using System.Runtime.CompilerServices;
+using System.Threading;
+using System.Threading.Tasks;
+using Microsoft.Extensions.Caching.Distributed;
+using Microsoft.Extensions.Caching.Memory;
+
+namespace Microsoft.Extensions.Caching.Hybrid.Internal;
+
+internal partial class DefaultHybridCache
+{
+ [SuppressMessage("Performance", "CA1849:Call async methods when in an async method", Justification = "Manual sync check")]
+ [SuppressMessage("Usage", "VSTHRD003:Avoid awaiting foreign Tasks", Justification = "Manual sync check")]
+ internal ValueTask GetFromL2Async(string key, CancellationToken token)
+ {
+ switch (GetFeatures(CacheFeatures.BackendCache | CacheFeatures.BackendBuffers))
+ {
+ case CacheFeatures.BackendCache: // legacy byte[]-based
+ var pendingLegacy = _backendCache!.GetAsync(key, token);
+#if NETCOREAPP2_0_OR_GREATER || NETSTANDARD2_1_OR_GREATER
+ if (!pendingLegacy.IsCompletedSuccessfully)
+#else
+ if (pendingLegacy.Status != TaskStatus.RanToCompletion)
+#endif
+ {
+ return new(AwaitedLegacyAsync(pendingLegacy, this));
+ }
+
+ return new(GetValidPayloadSegment(pendingLegacy.Result)); // already complete
+
+ case CacheFeatures.BackendCache | CacheFeatures.BackendBuffers: // IBufferWriter-based
+ RecyclableArrayBufferWriter writer = RecyclableArrayBufferWriter.Create(MaximumPayloadBytes);
+ var cache = Unsafe.As(_backendCache!); // type-checked already
+ var pendingBuffers = cache.TryGetAsync(key, writer, token);
+ if (!pendingBuffers.IsCompletedSuccessfully)
+ {
+ return new(AwaitedBuffersAsync(pendingBuffers, writer));
+ }
+
+ BufferChunk result = pendingBuffers.GetAwaiter().GetResult()
+ ? new(writer.DetachCommitted(out var length), length, returnToPool: true)
+ : default;
+ writer.Dispose(); // it is not accidental that this isn't "using"; avoid recycling if not 100% sure what happened
+ return new(result);
+ }
+
+ return default;
+
+ static async Task AwaitedLegacyAsync(Task pending, DefaultHybridCache @this)
+ {
+ var bytes = await pending.ConfigureAwait(false);
+ return @this.GetValidPayloadSegment(bytes);
+ }
+
+ static async Task AwaitedBuffersAsync(ValueTask pending, RecyclableArrayBufferWriter writer)
+ {
+ BufferChunk result = await pending.ConfigureAwait(false)
+ ? new(writer.DetachCommitted(out var length), length, returnToPool: true)
+ : default;
+ writer.Dispose(); // it is not accidental that this isn't "using"; avoid recycling if not 100% sure what happened
+ return result;
+ }
+ }
+
+ internal ValueTask SetL2Async(string key, in BufferChunk buffer, HybridCacheEntryOptions? options, CancellationToken token)
+ {
+ Debug.Assert(buffer.Array is not null, "array should be non-null");
+ switch (GetFeatures(CacheFeatures.BackendCache | CacheFeatures.BackendBuffers))
+ {
+ case CacheFeatures.BackendCache: // legacy byte[]-based
+ var arr = buffer.Array!;
+ if (arr.Length != buffer.Length)
+ {
+ // we'll need a right-sized snapshot
+ arr = buffer.ToArray();
+ }
+
+ return new(_backendCache!.SetAsync(key, arr, GetOptions(options), token));
+ case CacheFeatures.BackendCache | CacheFeatures.BackendBuffers: // ReadOnlySequence-based
+ var cache = Unsafe.As(_backendCache!); // type-checked already
+ return cache.SetAsync(key, buffer.AsSequence(), GetOptions(options), token);
+ }
+
+ return default;
+ }
+
+ internal void SetL1(string key, CacheItem value, HybridCacheEntryOptions? options)
+ {
+ // incr ref-count for the the cache itself; this *may* be released via the NeedsEvictionCallback path
+ if (value.TryReserve())
+ {
+ // based on CacheExtensions.Set, but with post-eviction recycling
+
+ // intentionally use manual Dispose rather than "using"; confusingly, it is Dispose()
+ // that actually commits the add - so: if we fault, we don't want to try
+ // committing a partially configured cache entry
+ ICacheEntry cacheEntry = _localCache.CreateEntry(key);
+ cacheEntry.AbsoluteExpirationRelativeToNow = options?.LocalCacheExpiration ?? _defaultLocalCacheExpiration;
+ cacheEntry.Value = value;
+
+ if (value.TryGetSize(out var size))
+ {
+ cacheEntry = cacheEntry.SetSize(size);
+ }
+
+ if (value.NeedsEvictionCallback)
+ {
+ cacheEntry = cacheEntry.RegisterPostEvictionCallback(CacheItem.SharedOnEviction);
+ }
+
+ // commit
+ cacheEntry.Dispose();
+ }
+ }
+
+ private BufferChunk GetValidPayloadSegment(byte[]? payload)
+ {
+ if (payload is not null)
+ {
+ if (payload.Length > MaximumPayloadBytes)
+ {
+ ThrowPayloadLengthExceeded(payload.Length);
+ }
+
+ return new(payload);
+ }
+
+ return default;
+ }
+
+ [DoesNotReturn]
+ [MethodImpl(MethodImplOptions.NoInlining)]
+ private void ThrowPayloadLengthExceeded(int size) // splitting the exception bits out to a different method
+ {
+ // also add via logger when possible
+ throw new InvalidOperationException($"Maximum cache length ({MaximumPayloadBytes} bytes) exceeded");
+ }
+
+#if NET8_0_OR_GREATER
+ [SuppressMessage("Maintainability", "CA1508:Avoid dead conditional code", Justification = "False positive from unsafe accessor")]
+#endif
+ private DistributedCacheEntryOptions GetOptions(HybridCacheEntryOptions? options)
+ {
+ DistributedCacheEntryOptions? result = null;
+ if (options is not null && options.Expiration.HasValue && options.Expiration.GetValueOrDefault() != _defaultExpiration)
+ {
+ result = ToDistributedCacheEntryOptions(options);
+ }
+
+ return result ?? _defaultDistributedCacheExpiration;
+
+#if NET8_0_OR_GREATER
+ // internal method memoizes this allocation; since it is "init", it is immutable (outside reflection)
+ [UnsafeAccessor(UnsafeAccessorKind.Method, Name = nameof(ToDistributedCacheEntryOptions))]
+ extern static DistributedCacheEntryOptions? ToDistributedCacheEntryOptions(HybridCacheEntryOptions options);
+#else
+ // without that helper method, we'll just eat the alloc (down-level TFMs)
+ static DistributedCacheEntryOptions ToDistributedCacheEntryOptions(HybridCacheEntryOptions options)
+ => new() { AbsoluteExpirationRelativeToNow = options.Expiration };
+#endif
+ }
+}
diff --git a/src/Libraries/Microsoft.Extensions.Caching.Hybrid/Internal/DefaultHybridCache.MutableCacheItem.cs b/src/Libraries/Microsoft.Extensions.Caching.Hybrid/Internal/DefaultHybridCache.MutableCacheItem.cs
new file mode 100644
index 00000000000..2d02c23b6d8
--- /dev/null
+++ b/src/Libraries/Microsoft.Extensions.Caching.Hybrid/Internal/DefaultHybridCache.MutableCacheItem.cs
@@ -0,0 +1,77 @@
+// Licensed to the .NET Foundation under one or more agreements.
+// The .NET Foundation licenses this file to you under the MIT license.
+
+namespace Microsoft.Extensions.Caching.Hybrid.Internal;
+
+internal partial class DefaultHybridCache
+{
+ private sealed partial class MutableCacheItem : CacheItem // used to hold types that require defensive copies
+ {
+ private IHybridCacheSerializer _serializer = null!; // deferred until SetValue
+ private BufferChunk _buffer;
+
+ public override bool NeedsEvictionCallback => _buffer.ReturnToPool;
+
+ public override bool DebugIsImmutable => false;
+
+ public void SetValue(ref BufferChunk buffer, IHybridCacheSerializer serializer)
+ {
+ _serializer = serializer;
+ _buffer = buffer;
+ buffer = default; // we're taking over the lifetime; the caller no longer has it!
+ }
+
+ public override bool TryGetValue(out T value)
+ {
+ // only if we haven't already burned
+ if (TryReserve())
+ {
+ try
+ {
+ value = _serializer.Deserialize(_buffer.AsSequence());
+ return true;
+ }
+ finally
+ {
+ _ = Release();
+ }
+ }
+
+ value = default!;
+ return false;
+ }
+
+ public override bool TryGetSize(out long size)
+ {
+ // only if we haven't already burned
+ if (TryReserve())
+ {
+ size = _buffer.Length;
+ _ = Release();
+ return true;
+ }
+
+ size = 0;
+ return false;
+ }
+
+ public override bool TryReserveBuffer(out BufferChunk buffer)
+ {
+ // only if we haven't already burned
+ if (TryReserve())
+ {
+ buffer = _buffer.DoNotReturnToPool(); // not up to them!
+ return true;
+ }
+
+ buffer = default;
+ return false;
+ }
+
+ protected override void OnFinalRelease()
+ {
+ DebugOnlyDecrementOutstandingBuffers();
+ _buffer.RecycleIfAppropriate();
+ }
+ }
+}
diff --git a/src/Libraries/Microsoft.Extensions.Caching.Hybrid/Internal/DefaultHybridCache.Serialization.cs b/src/Libraries/Microsoft.Extensions.Caching.Hybrid/Internal/DefaultHybridCache.Serialization.cs
new file mode 100644
index 00000000000..523a95e279a
--- /dev/null
+++ b/src/Libraries/Microsoft.Extensions.Caching.Hybrid/Internal/DefaultHybridCache.Serialization.cs
@@ -0,0 +1,54 @@
+// Licensed to the .NET Foundation under one or more agreements.
+// The .NET Foundation licenses this file to you under the MIT license.
+
+using System;
+using System.Collections.Concurrent;
+using System.Reflection;
+using System.Runtime.CompilerServices;
+using Microsoft.Extensions.DependencyInjection;
+
+namespace Microsoft.Extensions.Caching.Hybrid.Internal;
+
+internal partial class DefaultHybridCache
+{
+ // Per instance cache of typed serializers; each serializer is a
+ // IHybridCacheSerializer for the corresponding Type, but we can't
+ // know which here - and undesirable to add an artificial non-generic
+ // IHybridCacheSerializer base that serves no other purpose.
+ private readonly ConcurrentDictionary _serializers = new();
+
+ internal int MaximumPayloadBytes { get; }
+
+ internal IHybridCacheSerializer GetSerializer()
+ {
+ return _serializers.TryGetValue(typeof(T), out var serializer)
+ ? Unsafe.As>(serializer) : ResolveAndAddSerializer(this);
+
+ static IHybridCacheSerializer ResolveAndAddSerializer(DefaultHybridCache @this)
+ {
+ // It isn't critical that we get only one serializer instance during start-up; what matters
+ // is that we don't get a new serializer instance *every time*.
+ var serializer = @this._services.GetService>();
+ if (serializer is null)
+ {
+ foreach (var factory in @this._serializerFactories)
+ {
+ if (factory.TryCreateSerializer(out var current))
+ {
+ serializer = current;
+ break; // we've already reversed the factories, so: the first hit is what we want
+ }
+ }
+ }
+
+ if (serializer is null)
+ {
+ throw new InvalidOperationException($"No {nameof(IHybridCacheSerializer)} configured for type '{typeof(T).Name}'");
+ }
+
+ // store the result so we don't repeat this in future
+ @this._serializers[typeof(T)] = serializer;
+ return serializer;
+ }
+ }
+}
diff --git a/src/Libraries/Microsoft.Extensions.Caching.Hybrid/Internal/DefaultHybridCache.Stampede.cs b/src/Libraries/Microsoft.Extensions.Caching.Hybrid/Internal/DefaultHybridCache.Stampede.cs
new file mode 100644
index 00000000000..ef5c570c670
--- /dev/null
+++ b/src/Libraries/Microsoft.Extensions.Caching.Hybrid/Internal/DefaultHybridCache.Stampede.cs
@@ -0,0 +1,111 @@
+// Licensed to the .NET Foundation under one or more agreements.
+// The .NET Foundation licenses this file to you under the MIT license.
+
+using System;
+using System.Collections.Concurrent;
+using System.Diagnostics;
+using System.Diagnostics.CodeAnalysis;
+
+namespace Microsoft.Extensions.Caching.Hybrid.Internal;
+
+internal partial class DefaultHybridCache
+{
+ private readonly ConcurrentDictionary _currentOperations = new();
+
+ // returns true for a new session (in which case: we need to start the work), false for a pre-existing session
+ public bool GetOrCreateStampedeState(string key, HybridCacheEntryFlags flags, out StampedeState stampedeState, bool canBeCanceled)
+ {
+ var stampedeKey = new StampedeKey(key, flags);
+
+ // Double-checked locking to try to avoid unnecessary sessions in race conditions,
+ // while avoiding the lock completely whenever possible.
+ if (TryJoinExistingSession(this, stampedeKey, out var existing))
+ {
+ stampedeState = existing;
+ return false; // someone ELSE is running the work
+ }
+
+ // Most common scenario here, then, is that we're not fighting with anyone else
+ // go ahead and create a placeholder state object and *try* to add it.
+ stampedeState = new StampedeState(this, stampedeKey, canBeCanceled);
+ if (_currentOperations.TryAdd(stampedeKey, stampedeState))
+ {
+ // successfully added; indeed, no-one else was fighting: we're done
+ return true; // the CURRENT caller is responsible for making the work happen
+ }
+
+ // Hmmm, failed to add - there's concurrent activity on the same key; we're now
+ // in very rare race condition territory; go ahead and take a lock while we
+ // collect our thoughts.
+
+ // see notes in SyncLock.cs
+ lock (GetPartitionedSyncLock(in stampedeKey))
+ {
+ // check again while we hold the lock
+ if (TryJoinExistingSession(this, stampedeKey, out existing))
+ {
+ // we found an existing state we can join; do that
+ stampedeState.SetCanceled(); // to be thorough: mark our speculative one as doomed (no-one has seen it, though)
+ stampedeState = existing; // and replace with the one we found
+ return false; // someone ELSE is running the work
+
+ // Note that in this case we allocated a StampedeState that got dropped on
+ // the floor; in the grand scheme of things, that's OK; this is a rare outcome.
+ }
+
+ // Check whether the value was L1-cached by an outgoing operation (for *us* to check needs local-cache-read,
+ // and for *them* to have updated needs local-cache-write, but since the shared us/them key includes flags,
+ // we can skip this if *either* flag is set).
+ if ((flags & HybridCacheEntryFlags.DisableLocalCache) == 0 && _localCache.TryGetValue(key, out var untyped)
+ && untyped is CacheItem typed && typed.TryReserve())
+ {
+ stampedeState.SetResultDirect(typed);
+ return false; // the work has ALREADY been done
+ }
+
+ // Otherwise, either nothing existed - or the thing that already exists can't be joined
+ // in that case, go ahead and use the state that we invented a moment ago (outside of the lock).
+ _currentOperations[stampedeKey] = stampedeState;
+ return true; // the CURRENT caller is responsible for making the work happen
+ }
+
+ static bool TryJoinExistingSession(DefaultHybridCache @this, in StampedeKey stampedeKey,
+ [NotNullWhen(true)] out StampedeState? stampedeState)
+ {
+ if (@this._currentOperations.TryGetValue(stampedeKey, out var found))
+ {
+ if (found is not StampedeState tmp)
+ {
+ ThrowWrongType(stampedeKey.Key, found.Type, typeof(T));
+ }
+
+ if (tmp.TryAddCaller())
+ {
+ // we joined an existing session
+ stampedeState = tmp;
+ return true;
+ }
+ }
+
+ stampedeState = null;
+ return false;
+ }
+
+ [DoesNotReturn]
+ static void ThrowWrongType(string key, Type existingType, Type newType)
+ {
+ Debug.Assert(existingType != newType, "should be different types");
+ throw new InvalidOperationException(
+ $"All calls to {nameof(HybridCache)} with the same key should use the same data type; the same key is being used for '{existingType.FullName}' and '{newType.FullName}' data")
+ {
+ Data = { { "CacheKey", key } }
+ };
+ }
+ }
+
+ internal int DebugGetCallerCount(string key, HybridCacheEntryFlags? flags = null)
+ {
+ var stampedeKey = new StampedeKey(key, flags ?? _defaultFlags);
+ return _currentOperations.TryGetValue(stampedeKey, out var state) ? state.DebugCallerCount : 0;
+ }
+}
diff --git a/src/Libraries/Microsoft.Extensions.Caching.Hybrid/Internal/DefaultHybridCache.StampedeKey.cs b/src/Libraries/Microsoft.Extensions.Caching.Hybrid/Internal/DefaultHybridCache.StampedeKey.cs
new file mode 100644
index 00000000000..bbb519b2992
--- /dev/null
+++ b/src/Libraries/Microsoft.Extensions.Caching.Hybrid/Internal/DefaultHybridCache.StampedeKey.cs
@@ -0,0 +1,54 @@
+// Licensed to the .NET Foundation under one or more agreements.
+// The .NET Foundation licenses this file to you under the MIT license.
+
+using System;
+using System.Diagnostics.CodeAnalysis;
+
+namespace Microsoft.Extensions.Caching.Hybrid.Internal;
+
+internal partial class DefaultHybridCache
+{
+ [SuppressMessage("Style", "IDE0032:Use auto property", Justification = "Prefer explicit field in this case")]
+ internal readonly struct StampedeKey : IEquatable
+ {
+ private readonly string _key;
+ private readonly HybridCacheEntryFlags _flags;
+ private readonly int _hashCode; // we know we'll need it; compute it once only
+ public StampedeKey(string key, HybridCacheEntryFlags flags)
+ {
+ // We'll use both the key *and* the flags as combined flag; in reality, we *expect*
+ // the flags to be consistent between calls on the same operation, and it must be
+ // noted that the *cache items* only use the key (not the flags), but: it gets
+ // very hard to grok what the correct behaviour should be if combining two calls
+ // with different flags, since they could have mutually exclusive behaviours!
+
+ // As such, we'll treat conflicting calls entirely separately from a stampede
+ // perspective.
+ _key = key;
+ _flags = flags;
+#if NETCOREAPP2_1_OR_GREATER || NETSTANDARD2_1_OR_GREATER
+ _hashCode = System.HashCode.Combine(key, flags);
+#else
+ _hashCode = key.GetHashCode() ^ (int)flags;
+#endif
+ }
+
+ public string Key => _key;
+ public HybridCacheEntryFlags Flags => _flags;
+
+ // Allow direct access to the pre-computed hash-code, semantically emphasizing that
+ // this is a constant-time operation against a known value.
+ internal int HashCode => _hashCode;
+
+ [System.Diagnostics.CodeAnalysis.SuppressMessage("Blocker Code Smell", "S2178:Short-circuit logic should be used in boolean contexts",
+ Justification = "Non-short-circuiting intentional to remove unnecessary branch")]
+ public bool Equals(StampedeKey other) => _flags == other._flags & _key == other._key;
+
+ public override bool Equals([NotNullWhen(true)] object? obj)
+ => obj is StampedeKey other && Equals(other);
+
+ public override int GetHashCode() => _hashCode;
+
+ public override string ToString() => $"{_key} ({_flags})";
+ }
+}
diff --git a/src/Libraries/Microsoft.Extensions.Caching.Hybrid/Internal/DefaultHybridCache.StampedeState.cs b/src/Libraries/Microsoft.Extensions.Caching.Hybrid/Internal/DefaultHybridCache.StampedeState.cs
new file mode 100644
index 00000000000..eba71774395
--- /dev/null
+++ b/src/Libraries/Microsoft.Extensions.Caching.Hybrid/Internal/DefaultHybridCache.StampedeState.cs
@@ -0,0 +1,109 @@
+// Licensed to the .NET Foundation under one or more agreements.
+// The .NET Foundation licenses this file to you under the MIT license.
+
+using System;
+using System.Threading;
+
+#if !NETCOREAPP3_0_OR_GREATER
+using System.Runtime.CompilerServices;
+#endif
+
+namespace Microsoft.Extensions.Caching.Hybrid.Internal;
+
+internal partial class DefaultHybridCache
+{
+ internal abstract class StampedeState
+#if NETCOREAPP3_0_OR_GREATER
+ : IThreadPoolWorkItem
+#endif
+ {
+ internal readonly CancellationToken SharedToken; // this might have a value even when _sharedCancellation is null
+
+ // Because multiple callers can enlist, we need to track when the *last* caller cancels
+ // (and keep going until then); that means we need to run with custom cancellation.
+ private readonly CancellationTokenSource? _sharedCancellation;
+
+ [System.Diagnostics.CodeAnalysis.SuppressMessage("Style", "IDE0032:Use auto property", Justification = "Keep usage explicit")]
+ private readonly DefaultHybridCache _cache;
+ [System.Diagnostics.CodeAnalysis.SuppressMessage("Style", "IDE0032:Use auto property", Justification = "Keep usage explicit")]
+ private readonly CacheItem _cacheItem;
+
+ // we expose the key as a by-ref readonly; this minimizes the stack work involved in passing the key around
+ // (both in terms of width and copy-semantics)
+ private readonly StampedeKey _key;
+ public ref readonly StampedeKey Key => ref _key;
+ protected CacheItem CacheItem => _cacheItem;
+
+ ///
+ /// Initializes a new instance of the class optionally with shared cancellation support.
+ ///
+ protected StampedeState(DefaultHybridCache cache, in StampedeKey key, CacheItem cacheItem, bool canBeCanceled)
+ {
+ _cache = cache;
+ _key = key;
+ _cacheItem = cacheItem;
+ if (canBeCanceled)
+ {
+ // If the first (or any) caller can't be cancelled;,we'll never get to zero: n point tracking.
+ // (in reality, all callers usually use the same path, so cancellation is usually "all" or "none")
+ _sharedCancellation = new();
+ SharedToken = _sharedCancellation.Token;
+ }
+ else
+ {
+ SharedToken = CancellationToken.None;
+ }
+ }
+
+ ///
+ /// Initializes a new instance of the class using a fixed cancellation token.
+ ///
+ protected StampedeState(DefaultHybridCache cache, in StampedeKey key, CacheItem cacheItem, CancellationToken token)
+ {
+ _cache = cache;
+ _key = key;
+ _cacheItem = cacheItem;
+ SharedToken = token;
+ }
+
+#if !NETCOREAPP3_0_OR_GREATER
+ protected static readonly WaitCallback SharedWaitCallback = static obj => Unsafe.As(obj).Execute();
+#endif
+
+ protected DefaultHybridCache Cache => _cache;
+
+ public abstract void Execute();
+
+ protected int MaximumPayloadBytes => _cache.MaximumPayloadBytes;
+
+ public override string ToString() => Key.ToString();
+
+ public abstract void SetCanceled();
+
+ public int DebugCallerCount => _cacheItem.RefCount;
+
+ public abstract Type Type { get; }
+
+ public void CancelCaller()
+ {
+ // note that TryAddCaller has protections to avoid getting back from zero
+ if (_cacheItem.Release())
+ {
+ // we're the last to leave; turn off the lights
+ _sharedCancellation?.Cancel();
+ SetCanceled();
+ }
+ }
+
+ public bool TryAddCaller() => _cacheItem.TryReserve();
+ }
+
+ private void RemoveStampedeState(in StampedeKey key)
+ {
+ // see notes in SyncLock.cs
+ lock (GetPartitionedSyncLock(in key))
+ {
+ _ = _currentOperations.TryRemove(key, out _);
+ }
+ }
+}
diff --git a/src/Libraries/Microsoft.Extensions.Caching.Hybrid/Internal/DefaultHybridCache.StampedeStateT.cs b/src/Libraries/Microsoft.Extensions.Caching.Hybrid/Internal/DefaultHybridCache.StampedeStateT.cs
new file mode 100644
index 00000000000..4e45acae930
--- /dev/null
+++ b/src/Libraries/Microsoft.Extensions.Caching.Hybrid/Internal/DefaultHybridCache.StampedeStateT.cs
@@ -0,0 +1,372 @@
+// Licensed to the .NET Foundation under one or more agreements.
+// The .NET Foundation licenses this file to you under the MIT license.
+
+using System;
+using System.Diagnostics;
+using System.Diagnostics.CodeAnalysis;
+using System.Threading;
+using System.Threading.Tasks;
+using static Microsoft.Extensions.Caching.Hybrid.Internal.DefaultHybridCache;
+
+namespace Microsoft.Extensions.Caching.Hybrid.Internal;
+
+internal partial class DefaultHybridCache
+{
+ internal sealed class StampedeState : StampedeState
+ {
+ private const HybridCacheEntryFlags FlagsDisableL1AndL2 = HybridCacheEntryFlags.DisableLocalCacheWrite | HybridCacheEntryFlags.DisableDistributedCacheWrite;
+
+ private readonly TaskCompletionSource>? _result;
+ private TState? _state;
+ private Func>? _underlying; // main data factory
+ private HybridCacheEntryOptions? _options;
+ private Task? _sharedUnwrap; // allows multiple non-cancellable callers to share a single task (when no defensive copy needed)
+
+ // ONLY set the result, without any other side-effects
+ internal void SetResultDirect(CacheItem value)
+ => _result?.TrySetResult(value);
+
+ public StampedeState(DefaultHybridCache cache, in StampedeKey key, bool canBeCanceled)
+ : base(cache, key, CacheItem.Create(), canBeCanceled)
+ {
+ _result = new(TaskCreationOptions.RunContinuationsAsynchronously);
+ }
+
+ public StampedeState(DefaultHybridCache cache, in StampedeKey key, CancellationToken token)
+ : base(cache, key, CacheItem.Create(), token)
+ {
+ // no TCS in this case - this is for SetValue only
+ }
+
+ public override Type Type => typeof(T);
+
+ public void QueueUserWorkItem(in TState state, Func> underlying, HybridCacheEntryOptions? options)
+ {
+ Debug.Assert(_underlying is null, "should not already have factory field");
+ Debug.Assert(underlying is not null, "factory argument should be meaningful");
+
+ // initialize the callback state
+ _state = state;
+ _underlying = underlying;
+ _options = options;
+
+#if NETCOREAPP3_0_OR_GREATER
+ ThreadPool.UnsafeQueueUserWorkItem(this, false);
+#else
+ ThreadPool.UnsafeQueueUserWorkItem(SharedWaitCallback, this);
+#endif
+ }
+
+ [SuppressMessage("Resilience", "EA0014:The async method doesn't support cancellation", Justification = "Cancellation is handled separately via SharedToken")]
+ public Task ExecuteDirectAsync(in TState state, Func> underlying, HybridCacheEntryOptions? options)
+ {
+ Debug.Assert(_underlying is null, "should not already have factory field");
+ Debug.Assert(underlying is not null, "factory argument should be meaningful");
+
+ // initialize the callback state
+ _state = state;
+ _underlying = underlying;
+ _options = options;
+
+ return BackgroundFetchAsync();
+ }
+
+ public override void Execute() => _ = BackgroundFetchAsync();
+
+ public override void SetCanceled() => _result?.TrySetCanceled(SharedToken);
+
+ [SuppressMessage("Usage", "VSTHRD003:Avoid awaiting foreign Tasks", Justification = "Custom task management")]
+ public ValueTask JoinAsync(CancellationToken token)
+ {
+ // If the underlying has already completed, and/or our local token can't cancel: we
+ // can simply wrap the shared task; otherwise, we need our own cancellation state.
+ return token.CanBeCanceled && !Task.IsCompleted ? WithCancellationAsync(this, token) : UnwrapReservedAsync();
+
+ static async ValueTask WithCancellationAsync(StampedeState stampede, CancellationToken token)
+ {
+ var cancelStub = new TaskCompletionSource(TaskCreationOptions.RunContinuationsAsynchronously);
+ using var reg = token.Register(static obj =>
+ {
+ _ = ((TaskCompletionSource)obj!).TrySetResult(true);
+ }, cancelStub);
+
+ CacheItem result;
+ try
+ {
+ var first = await System.Threading.Tasks.Task.WhenAny(stampede.Task, cancelStub.Task).ConfigureAwait(false);
+ if (ReferenceEquals(first, cancelStub.Task))
+ {
+ // we expect this to throw, because otherwise we wouldn't have gotten here
+ token.ThrowIfCancellationRequested(); // get an appropriate exception
+ }
+
+ Debug.Assert(ReferenceEquals(first, stampede.Task), "should not be cancelled");
+
+ // this has already completed, but we'll get the stack nicely
+ result = await stampede.Task.ConfigureAwait(false);
+ }
+ catch
+ {
+ stampede.CancelCaller();
+ throw;
+ }
+
+ // outside the catch, so we know we only decrement one way or the other
+ return result.GetReservedValue();
+ }
+ }
+
+ [SuppressMessage("Maintainability", "CA1508:Avoid dead conditional code", Justification = "Reliability")]
+ public Task> Task
+ {
+ get
+ {
+ Debug.Assert(_result is not null, "result should be assigned");
+ return _result is null ? InvalidAsync() : _result.Task;
+
+ static Task> InvalidAsync() => System.Threading.Tasks.Task.FromException>(
+ new InvalidOperationException("Task should not be accessed for non-shared instances"));
+ }
+ }
+
+ [SuppressMessage("Resilience", "EA0014:The async method doesn't support cancellation", Justification = "No cancellable operation")]
+ [SuppressMessage("Performance", "CA1849:Call async methods when in an async method", Justification = "Checked manual unwrap")]
+ [SuppressMessage("Usage", "VSTHRD003:Avoid awaiting foreign Tasks", Justification = "Checked manual unwrap")]
+ [SuppressMessage("Major Code Smell", "S1121:Assignments should not be made from within sub-expressions", Justification = "Unusual, but legit here")]
+ internal ValueTask UnwrapReservedAsync()
+ {
+ var task = Task;
+#if NETCOREAPP2_0_OR_GREATER || NETSTANDARD2_1_OR_GREATER
+ if (task.IsCompletedSuccessfully)
+#else
+ if (task.Status == TaskStatus.RanToCompletion)
+#endif
+ {
+ return new(task.Result.GetReservedValue());
+ }
+
+ // if the type is immutable, callers can share the final step too (this may leave dangling
+ // reservation counters, but that's OK)
+ var result = ImmutableTypeCache.IsImmutable ? (_sharedUnwrap ??= AwaitedAsync(Task)) : AwaitedAsync(Task);
+ return new(result);
+
+ static async Task AwaitedAsync(Task> task)
+ => (await task.ConfigureAwait(false)).GetReservedValue();
+ }
+
+ [DoesNotReturn]
+ private static CacheItem ThrowUnexpectedCacheItem() => throw new InvalidOperationException("Unexpected cache item");
+
+ [SuppressMessage("Resilience", "EA0014:The async method doesn't support cancellation", Justification = "In this case the cancellation token is provided internally via SharedToken")]
+ [SuppressMessage("Design", "CA1031:Do not catch general exception types", Justification = "Exception is passed through to faulted task result")]
+ private async Task BackgroundFetchAsync()
+ {
+ try
+ {
+ // read from L2 if appropriate
+ if ((Key.Flags & HybridCacheEntryFlags.DisableDistributedCacheRead) == 0)
+ {
+ var result = await Cache.GetFromL2Async(Key.Key, SharedToken).ConfigureAwait(false);
+
+ if (result.Array is not null)
+ {
+ SetResultAndRecycleIfAppropriate(ref result);
+ return;
+ }
+ }
+
+ // nothing from L2; invoke the underlying data store
+ if ((Key.Flags & HybridCacheEntryFlags.DisableUnderlyingData) == 0)
+ {
+ // invoke the callback supplied by the caller
+ T newValue = await _underlying!(_state!, SharedToken).ConfigureAwait(false);
+
+ // If we're writing this value *anywhere*, we're going to need to serialize; this is obvious
+ // in the case of L2, but we also need it for L1, because MemoryCache might be enforcing
+ // SizeLimit (we can't know - it is an abstraction), and for *that* we need to know the item size.
+ // Likewise, if we're writing to a MutableCacheItem, we'll be serializing *anyway* for the payload.
+ //
+ // Rephrasing that: the only scenario in which we *do not* need to serialize is if:
+ // - it is an ImmutableCacheItem
+ // - we're writing neither to L1 nor L2
+
+ CacheItem cacheItem = CacheItem;
+ bool skipSerialize = cacheItem is ImmutableCacheItem && (Key.Flags & FlagsDisableL1AndL2) == FlagsDisableL1AndL2;
+
+ if (skipSerialize)
+ {
+ SetImmutableResultWithoutSerialize(newValue);
+ }
+ else if (cacheItem.TryReserve())
+ {
+ // ^^^ The first thing we need to do is make sure we're not getting into a thread race over buffer disposal.
+ // In particular, if this cache item is somehow so short-lived that the buffers would be released *before* we're
+ // done writing them to L2, which happens *after* we've provided the value to consumers.
+ RecyclableArrayBufferWriter writer = RecyclableArrayBufferWriter.Create(MaximumPayloadBytes); // note this lifetime spans the SetL2Async
+ IHybridCacheSerializer serializer = Cache.GetSerializer();
+ serializer.Serialize(newValue, writer);
+ BufferChunk buffer = new(writer.DetachCommitted(out var length), length, returnToPool: true); // remove buffer ownership from the writer
+ writer.Dispose(); // we're done with the writer
+
+ // protect "buffer" (this is why we "reserved") for writing to L2 if needed; SetResultPreSerialized
+ // *may* (depending on context) claim this buffer, in which case "bufferToRelease" gets reset, and
+ // the final RecycleIfAppropriate() is a no-op; however, the buffer is valid in either event,
+ // (with TryReserve above guaranteeing that we aren't in a race condition).
+ BufferChunk bufferToRelease = buffer;
+
+ // and since "bufferToRelease" is the thing that will be returned at some point, we can make it explicit
+ // that we do not need or want "buffer" to do any recycling (they're the same memory)
+ buffer = buffer.DoNotReturnToPool();
+
+ // set the underlying result for this operation (includes L1 write if appropriate)
+ SetResultPreSerialized(newValue, ref bufferToRelease, serializer);
+
+ // Note that at this point we've already released most or all of the waiting callers. Everything
+ // from this point onwards happens in the background, from the perspective of the calling code.
+
+ // Write to L2 if appropriate.
+ if ((Key.Flags & HybridCacheEntryFlags.DisableDistributedCacheWrite) == 0)
+ {
+ // We already have the payload serialized, so this is trivial to do.
+ await Cache.SetL2Async(Key.Key, in buffer, _options, SharedToken).ConfigureAwait(false);
+ }
+
+ // Release our hook on the CacheItem (only really important for "mutable").
+ _ = cacheItem.Release();
+
+ // Finally, recycle whatever was left over from SetResultPreSerialized; using "bufferToRelease"
+ // here is NOT a typo; if SetResultPreSerialized left this value alone (immutable), then
+ // this is our recycle step; if SetResultPreSerialized transferred ownership to the (mutable)
+ // CacheItem, then this becomes a no-op, and the buffer only gets fully recycled when the
+ // CacheItem itself is fully clear.
+ bufferToRelease.RecycleIfAppropriate();
+ }
+ else
+ {
+ throw new InvalidOperationException("Internal HybridCache failure: unable to reserve cache item to assign result");
+ }
+ }
+ else
+ {
+ // can't read from data store; implies we shouldn't write
+ // back to anywhere else, either
+ SetDefaultResult();
+ }
+ }
+ catch (Exception ex)
+ {
+ SetException(ex);
+ }
+ }
+
+ private void SetException(Exception ex)
+ {
+ if (_result is not null)
+ {
+ Cache.RemoveStampedeState(in Key);
+ _ = _result.TrySetException(ex);
+ }
+ }
+
+ private void SetDefaultResult()
+ {
+ // note we don't store this dummy result in L1 or L2
+ if (_result is not null)
+ {
+ Cache.RemoveStampedeState(in Key);
+ _ = _result.TrySetResult(ImmutableCacheItem.GetReservedShared());
+ }
+ }
+
+ private void SetResultAndRecycleIfAppropriate(ref BufferChunk value)
+ {
+ // set a result from L2 cache
+ Debug.Assert(value.Array is not null, "expected buffer");
+
+ IHybridCacheSerializer serializer = Cache.GetSerializer();
+ CacheItem cacheItem;
+ switch (CacheItem)
+ {
+ case ImmutableCacheItem immutable:
+ // deserialize; and store object; buffer can be recycled now
+ immutable.SetValue(serializer.Deserialize(new(value.Array!, 0, value.Length)), value.Length);
+ value.RecycleIfAppropriate();
+ cacheItem = immutable;
+ break;
+ case MutableCacheItem mutable:
+ // use the buffer directly as the backing in the cache-item; do *not* recycle now
+ mutable.SetValue(ref value, serializer);
+ mutable.DebugOnlyTrackBuffer(Cache);
+ cacheItem = mutable;
+ break;
+ default:
+ cacheItem = ThrowUnexpectedCacheItem();
+ break;
+ }
+
+ SetResult(cacheItem);
+ }
+
+ private void SetImmutableResultWithoutSerialize(T value)
+ {
+ Debug.Assert((Key.Flags & FlagsDisableL1AndL2) == FlagsDisableL1AndL2, "Only expected if L1+L2 disabled");
+
+ // set a result from a value we calculated directly
+ CacheItem cacheItem;
+ switch (CacheItem)
+ {
+ case ImmutableCacheItem immutable:
+ // no serialize needed
+ immutable.SetValue(value, size: -1);
+ cacheItem = immutable;
+ break;
+ default:
+ cacheItem = ThrowUnexpectedCacheItem();
+ break;
+ }
+
+ SetResult(cacheItem);
+ }
+
+ private void SetResultPreSerialized(T value, ref BufferChunk buffer, IHybridCacheSerializer serializer)
+ {
+ // set a result from a value we calculated directly that
+ // has ALREADY BEEN SERIALIZED (we can optionally consume this buffer)
+ CacheItem cacheItem;
+ switch (CacheItem)
+ {
+ case ImmutableCacheItem immutable:
+ // no serialize needed
+ immutable.SetValue(value, size: buffer.Length);
+ cacheItem = immutable;
+
+ // (but leave the buffer alone)
+ break;
+ case MutableCacheItem mutable:
+ mutable.SetValue(ref buffer, serializer);
+ mutable.DebugOnlyTrackBuffer(Cache);
+ cacheItem = mutable;
+ break;
+ default:
+ cacheItem = ThrowUnexpectedCacheItem();
+ break;
+ }
+
+ SetResult(cacheItem);
+ }
+
+ private void SetResult(CacheItem value)
+ {
+ if ((Key.Flags & HybridCacheEntryFlags.DisableLocalCacheWrite) == 0)
+ {
+ Cache.SetL1(Key.Key, value, _options); // we can do this without a TCS, for SetValue
+ }
+
+ if (_result is not null)
+ {
+ Cache.RemoveStampedeState(in Key);
+ _ = _result.TrySetResult(value);
+ }
+ }
+ }
+}
diff --git a/src/Libraries/Microsoft.Extensions.Caching.Hybrid/Internal/DefaultHybridCache.SyncLock.cs b/src/Libraries/Microsoft.Extensions.Caching.Hybrid/Internal/DefaultHybridCache.SyncLock.cs
new file mode 100644
index 00000000000..4672818d056
--- /dev/null
+++ b/src/Libraries/Microsoft.Extensions.Caching.Hybrid/Internal/DefaultHybridCache.SyncLock.cs
@@ -0,0 +1,44 @@
+// Licensed to the .NET Foundation under one or more agreements.
+// The .NET Foundation licenses this file to you under the MIT license.
+
+namespace Microsoft.Extensions.Caching.Hybrid.Internal;
+
+internal partial class DefaultHybridCache
+{
+ // HybridCache's stampede protection requires some level of synchronization to avoid unnecessary runs
+ // of the underlying data fetch; this is *minimized* by the use of double-checked locking and
+ // interlocked join (adding a new request to an existing execution), but: that would leave a race
+ // condition where the *remove* step of the stampede would be in a race with the *add new* step; the
+ // *add new* step is inside a lock, but we need to *remove* step to share that lock, to avoid
+ // the race. We deal with that by taking the same lock during remove, but *that* means we're locking
+ // on all executions.
+ //
+ // To minimize lock contention, we will therefore use partitioning of the lock-token, by using the
+ // low 3 bits of the hash-code (which we calculate eagerly only once, so: already known). This gives
+ // us a fast way to split contention by 8, almost an order-of-magnitude, which is sufficient. We *could*
+ // use an array for this, but: for directness, let's inline it instead (avoiding bounds-checks,
+ // an extra layer of dereferencing, and the allocation; I will acknowledge these are miniscule, but:
+ // it costs us nothing to do)
+
+ private readonly object _syncLock0 = new();
+ private readonly object _syncLock1 = new();
+ private readonly object _syncLock2 = new();
+ private readonly object _syncLock3 = new();
+ private readonly object _syncLock4 = new();
+ private readonly object _syncLock5 = new();
+ private readonly object _syncLock6 = new();
+ private readonly object _syncLock7 = new();
+
+ [System.Diagnostics.CodeAnalysis.SuppressMessage("Major Code Smell", "S109:Magic numbers should not be used", Justification = "Trivial low 3 bits")]
+ internal object GetPartitionedSyncLock(in StampedeKey key) => (key.HashCode & 0b111) switch // generate 8 partitions using the low 3 bits
+ {
+ 0 => _syncLock0,
+ 1 => _syncLock1,
+ 2 => _syncLock2,
+ 3 => _syncLock3,
+ 4 => _syncLock4,
+ 5 => _syncLock5,
+ 6 => _syncLock6,
+ _ => _syncLock7,
+ };
+}
diff --git a/src/Libraries/Microsoft.Extensions.Caching.Hybrid/Internal/DefaultHybridCache.cs b/src/Libraries/Microsoft.Extensions.Caching.Hybrid/Internal/DefaultHybridCache.cs
new file mode 100644
index 00000000000..c789e7c6652
--- /dev/null
+++ b/src/Libraries/Microsoft.Extensions.Caching.Hybrid/Internal/DefaultHybridCache.cs
@@ -0,0 +1,170 @@
+// Licensed to the .NET Foundation under one or more agreements.
+// The .NET Foundation licenses this file to you under the MIT license.
+
+using System;
+using System.Collections.Generic;
+using System.Linq;
+using System.Runtime.CompilerServices;
+using System.Threading;
+using System.Threading.Tasks;
+using Microsoft.Extensions.Caching.Distributed;
+using Microsoft.Extensions.Caching.Memory;
+using Microsoft.Extensions.DependencyInjection;
+using Microsoft.Extensions.Logging;
+using Microsoft.Extensions.Logging.Abstractions;
+using Microsoft.Extensions.Options;
+using Microsoft.Shared.Diagnostics;
+
+namespace Microsoft.Extensions.Caching.Hybrid.Internal;
+
+///
+/// The inbuilt implementation of , as registered via .
+///
+internal sealed partial class DefaultHybridCache : HybridCache
+{
+ [System.Diagnostics.CodeAnalysis.SuppressMessage("Style", "IDE0032:Use auto property", Justification = "Keep usage explicit")]
+ private readonly IDistributedCache? _backendCache;
+ [System.Diagnostics.CodeAnalysis.SuppressMessage("Style", "IDE0032:Use auto property", Justification = "Keep usage explicit")]
+ private readonly IMemoryCache _localCache;
+ private readonly IServiceProvider _services; // we can't resolve per-type serializers until we see each T
+ private readonly IHybridCacheSerializerFactory[] _serializerFactories;
+ [System.Diagnostics.CodeAnalysis.SuppressMessage("Style", "IDE0032:Use auto property", Justification = "Keep usage explicit")]
+ private readonly HybridCacheOptions _options;
+ private readonly ILogger _logger;
+ private readonly CacheFeatures _features; // used to avoid constant type-testing
+
+ private readonly HybridCacheEntryFlags _hardFlags; // *always* present (for example, because no L2)
+ private readonly HybridCacheEntryFlags _defaultFlags; // note this already includes hardFlags
+ private readonly TimeSpan _defaultExpiration;
+ private readonly TimeSpan _defaultLocalCacheExpiration;
+
+ private readonly DistributedCacheEntryOptions _defaultDistributedCacheExpiration;
+
+ [Flags]
+ internal enum CacheFeatures
+ {
+ None = 0,
+ BackendCache = 1 << 0,
+ BackendBuffers = 1 << 1,
+ }
+
+ internal CacheFeatures GetFeatures() => _features;
+
+ // used to restrict features in test suite
+ internal void DebugRemoveFeatures(CacheFeatures features) => Unsafe.AsRef(in _features) &= ~features;
+
+ [MethodImpl(MethodImplOptions.AggressiveInlining)]
+ private CacheFeatures GetFeatures(CacheFeatures mask) => _features & mask;
+
+ public DefaultHybridCache(IOptions options, IServiceProvider services)
+ {
+ _services = Throw.IfNull(services);
+ _localCache = services.GetRequiredService();
+ _options = options.Value;
+ _logger = services.GetService()?.CreateLogger(typeof(HybridCache)) ?? NullLogger.Instance;
+
+ _backendCache = services.GetService(); // note optional
+
+ // ignore L2 if it is really just the same L1, wrapped
+ // (note not just an "is" test; if someone has a custom subclass, who knows what it does?)
+ if (_backendCache is not null
+ && _backendCache.GetType() == typeof(MemoryDistributedCache)
+ && _localCache.GetType() == typeof(MemoryCache))
+ {
+ _backendCache = null;
+ }
+
+ // perform type-tests on the backend once only
+ _features |= _backendCache switch
+ {
+ IBufferDistributedCache => CacheFeatures.BackendCache | CacheFeatures.BackendBuffers,
+ not null => CacheFeatures.BackendCache,
+ _ => CacheFeatures.None
+ };
+
+ // When resolving serializers via the factory API, we will want the *last* instance,
+ // i.e. "last added wins"; we can optimize by reversing the array ahead of time, and
+ // taking the first match
+ var factories = services.GetServices().ToArray();
+ Array.Reverse(factories);
+ _serializerFactories = factories;
+
+ MaximumPayloadBytes = checked((int)_options.MaximumPayloadBytes); // for now hard-limit to 2GiB
+
+ var defaultEntryOptions = _options.DefaultEntryOptions;
+
+ if (_backendCache is null)
+ {
+ _hardFlags |= HybridCacheEntryFlags.DisableDistributedCache;
+ }
+
+ _defaultFlags = (defaultEntryOptions?.Flags ?? HybridCacheEntryFlags.None) | _hardFlags;
+ _defaultExpiration = defaultEntryOptions?.Expiration ?? TimeSpan.FromMinutes(5);
+ _defaultLocalCacheExpiration = defaultEntryOptions?.LocalCacheExpiration ?? TimeSpan.FromMinutes(1);
+ _defaultDistributedCacheExpiration = new DistributedCacheEntryOptions { AbsoluteExpirationRelativeToNow = _defaultExpiration };
+ }
+
+ internal IDistributedCache? BackendCache => _backendCache;
+ internal IMemoryCache LocalCache => _localCache;
+
+ internal HybridCacheOptions Options => _options;
+
+ public override ValueTask GetOrCreateAsync(string key, TState state, Func> underlyingDataCallback,
+ HybridCacheEntryOptions? options = null, IEnumerable? tags = null, CancellationToken cancellationToken = default)
+ {
+ var canBeCanceled = cancellationToken.CanBeCanceled;
+ if (canBeCanceled)
+ {
+ cancellationToken.ThrowIfCancellationRequested();
+ }
+
+ var flags = GetEffectiveFlags(options);
+ if ((flags & HybridCacheEntryFlags.DisableLocalCacheRead) == 0 && _localCache.TryGetValue(key, out var untyped)
+ && untyped is CacheItem typed && typed.TryGetValue(out var value))
+ {
+ // short-circuit
+ return new(value);
+ }
+
+ if (GetOrCreateStampedeState(key, flags, out var stampede, canBeCanceled))
+ {
+ // new query; we're responsible for making it happen
+ if (canBeCanceled)
+ {
+ // *we* might cancel, but someone else might be depending on the result; start the
+ // work independently, then we'll with join the outcome
+ stampede.QueueUserWorkItem(in state, underlyingDataCallback, options);
+ }
+ else
+ {
+ // we're going to run to completion; no need to get complicated
+ _ = stampede.ExecuteDirectAsync(in state, underlyingDataCallback, options); // this larger task includes L2 write etc
+ return stampede.UnwrapReservedAsync();
+ }
+ }
+
+ return stampede.JoinAsync(cancellationToken);
+ }
+
+ public override ValueTask RemoveAsync(string key, CancellationToken token = default)
+ {
+ _localCache.Remove(key);
+ return _backendCache is null ? default : new(_backendCache.RemoveAsync(key, token));
+ }
+
+ public override ValueTask RemoveByTagAsync(string tag, CancellationToken token = default)
+ => default; // tags not yet implemented
+
+ public override ValueTask SetAsync(string key, T value, HybridCacheEntryOptions? options = null, IEnumerable? tags = null, CancellationToken token = default)
+ {
+ // since we're forcing a write: disable L1+L2 read; we'll use a direct pass-thru of the value as the callback, to reuse all the code
+ // note also that stampede token is not shared with anyone else
+ var flags = GetEffectiveFlags(options) | (HybridCacheEntryFlags.DisableLocalCacheRead | HybridCacheEntryFlags.DisableDistributedCacheRead);
+ var state = new StampedeState(this, new StampedeKey(key, flags), token);
+ return new(state.ExecuteDirectAsync(value, static (state, _) => new(state), options)); // note this spans L2 write etc
+ }
+
+ [MethodImpl(MethodImplOptions.AggressiveInlining)]
+ private HybridCacheEntryFlags GetEffectiveFlags(HybridCacheEntryOptions? options)
+ => (options?.Flags | _hardFlags) ?? _defaultFlags;
+}
diff --git a/src/Libraries/Microsoft.Extensions.Caching.Hybrid/Internal/DefaultJsonSerializerFactory.cs b/src/Libraries/Microsoft.Extensions.Caching.Hybrid/Internal/DefaultJsonSerializerFactory.cs
new file mode 100644
index 00000000000..63ce186e1ec
--- /dev/null
+++ b/src/Libraries/Microsoft.Extensions.Caching.Hybrid/Internal/DefaultJsonSerializerFactory.cs
@@ -0,0 +1,42 @@
+// Licensed to the .NET Foundation under one or more agreements.
+// The .NET Foundation licenses this file to you under the MIT license.
+
+using System.Buffers;
+using System.Diagnostics.CodeAnalysis;
+using System.Text.Json;
+
+namespace Microsoft.Extensions.Caching.Hybrid.Internal;
+
+internal sealed class DefaultJsonSerializerFactory : IHybridCacheSerializerFactory
+{
+ public bool TryCreateSerializer([NotNullWhen(true)] out IHybridCacheSerializer? serializer)
+ {
+ // no restriction
+ serializer = new DefaultJsonSerializer();
+ return true;
+ }
+
+ internal sealed class DefaultJsonSerializer : IHybridCacheSerializer
+ {
+ T IHybridCacheSerializer.Deserialize(ReadOnlySequence source)
+ {
+ var reader = new Utf8JsonReader(source);
+#pragma warning disable IDE0079 // unnecessary suppression: TFM-dependent
+#pragma warning disable IL2026, IL3050 // AOT bits
+ return JsonSerializer.Deserialize(ref reader)!;
+#pragma warning restore IL2026, IL3050
+#pragma warning restore IDE0079
+ }
+
+ void IHybridCacheSerializer.Serialize(T value, IBufferWriter target)
+ {
+ using var writer = new Utf8JsonWriter(target);
+#pragma warning disable IDE0079 // unnecessary suppression: TFM-dependent
+#pragma warning disable IL2026, IL3050 // AOT bits
+ JsonSerializer.Serialize(writer, value, JsonSerializerOptions.Default);
+#pragma warning restore IL2026, IL3050
+#pragma warning restore IDE0079
+ }
+ }
+
+}
diff --git a/src/Libraries/Microsoft.Extensions.Caching.Hybrid/Internal/HybridCacheBuilder.cs b/src/Libraries/Microsoft.Extensions.Caching.Hybrid/Internal/HybridCacheBuilder.cs
new file mode 100644
index 00000000000..814bd4c84a8
--- /dev/null
+++ b/src/Libraries/Microsoft.Extensions.Caching.Hybrid/Internal/HybridCacheBuilder.cs
@@ -0,0 +1,16 @@
+// Licensed to the .NET Foundation under one or more agreements.
+// The .NET Foundation licenses this file to you under the MIT license.
+
+using Microsoft.Extensions.DependencyInjection;
+
+namespace Microsoft.Extensions.Caching.Hybrid.Internal;
+
+internal sealed class HybridCacheBuilder : IHybridCacheBuilder
+{
+ public HybridCacheBuilder(IServiceCollection services)
+ {
+ Services = services;
+ }
+
+ public IServiceCollection Services { get; }
+}
diff --git a/src/Libraries/Microsoft.Extensions.Caching.Hybrid/Internal/ImmutableTypeCache.T.cs b/src/Libraries/Microsoft.Extensions.Caching.Hybrid/Internal/ImmutableTypeCache.T.cs
new file mode 100644
index 00000000000..51566cd0e68
--- /dev/null
+++ b/src/Libraries/Microsoft.Extensions.Caching.Hybrid/Internal/ImmutableTypeCache.T.cs
@@ -0,0 +1,15 @@
+// Licensed to the .NET Foundation under one or more agreements.
+// The .NET Foundation licenses this file to you under the MIT license.
+
+namespace Microsoft.Extensions.Caching.Hybrid.Internal;
+
+///
+/// Simple memoize storage for whether the type is blittable, in particular to avoid repeated runtime tests
+/// in down-level TFMs where this is trickier to determine. The JIT is very effective at accessing this memoized value.
+///
+/// The type being processed.
+internal static class ImmutableTypeCache // lazy memoize; T doesn't change per cache instance
+{
+ // note for blittable types: a pure struct will be a full copy every time - nothing shared to mutate
+ public static readonly bool IsImmutable = (typeof(T).IsValueType && ImmutableTypeCache.IsBlittable()) || ImmutableTypeCache.IsTypeImmutable(typeof(T));
+}
diff --git a/src/Libraries/Microsoft.Extensions.Caching.Hybrid/Internal/ImmutableTypeCache.cs b/src/Libraries/Microsoft.Extensions.Caching.Hybrid/Internal/ImmutableTypeCache.cs
new file mode 100644
index 00000000000..87b86e56cf1
--- /dev/null
+++ b/src/Libraries/Microsoft.Extensions.Caching.Hybrid/Internal/ImmutableTypeCache.cs
@@ -0,0 +1,79 @@
+// Licensed to the .NET Foundation under one or more agreements.
+// The .NET Foundation licenses this file to you under the MIT license.
+
+using System;
+using System.ComponentModel;
+using System.Reflection;
+
+#if NETCOREAPP2_0_OR_GREATER || NETSTANDARD2_1_OR_GREATER
+using System.Runtime.CompilerServices;
+#else
+using System.Runtime.InteropServices;
+using System.Runtime.Serialization;
+#endif
+
+namespace Microsoft.Extensions.Caching.Hybrid.Internal;
+
+///
+/// Utility type for determining whether a type is blittable; the logic for this is very
+/// TFM dependent.
+///
+internal static class ImmutableTypeCache
+{
+ internal static bool IsBlittable() // minimize the generic portion (twinned with IsTypeImmutable)
+ {
+#if NETCOREAPP2_0_OR_GREATER || NETSTANDARD2_1_OR_GREATER
+ return !RuntimeHelpers.IsReferenceOrContainsReferences();
+#else
+ // down-level: only blittable types can be pinned
+ try
+ {
+ // get a typed, zeroed, non-null boxed instance of the appropriate type
+ // (can't use (object)default(T), as that would box to null for nullable types)
+ var obj = FormatterServices.GetUninitializedObject(Nullable.GetUnderlyingType(typeof(T)) ?? typeof(T));
+ GCHandle.Alloc(obj, GCHandleType.Pinned).Free();
+ return true;
+ }
+#pragma warning disable CA1031 // Do not catch general exception types: interpret any failure here as "nope"
+ catch
+ {
+ return false;
+ }
+#pragma warning restore CA1031
+
+#endif
+ }
+
+ [System.Diagnostics.CodeAnalysis.SuppressMessage("Blocker Code Smell", "S2178:Short-circuit logic should be used in boolean contexts",
+ Justification = "Non-short-circuiting intentional to remove unnecessary branch")]
+ internal static bool IsTypeImmutable(Type type)
+ {
+ // check for known types
+ if (type == typeof(string))
+ {
+ return true;
+ }
+
+ if (type.IsValueType)
+ {
+ // switch from Foo? to Foo if necessary
+ if (Nullable.GetUnderlyingType(type) is { } nullable)
+ {
+ type = nullable;
+ }
+ }
+
+ if (type.IsValueType || (type.IsClass & type.IsSealed))
+ {
+ // check for [ImmutableObject(true)]; note we're looking at this as a statement about
+ // the overall nullability; for example, a type could contain a private int[] field,
+ // where the field is mutable and the list is mutable; but if the type is annotated:
+ // we're trusting that the API and use-case is such that the type is immutable
+ return type.GetCustomAttribute() is { Immutable: true };
+ }
+
+ // don't trust interfaces and non-sealed types; we might have any concrete
+ // type that has different behaviour
+ return false;
+ }
+}
diff --git a/src/Libraries/Microsoft.Extensions.Caching.Hybrid/Internal/InbuiltTypeSerializer.cs b/src/Libraries/Microsoft.Extensions.Caching.Hybrid/Internal/InbuiltTypeSerializer.cs
new file mode 100644
index 00000000000..3ef26341433
--- /dev/null
+++ b/src/Libraries/Microsoft.Extensions.Caching.Hybrid/Internal/InbuiltTypeSerializer.cs
@@ -0,0 +1,58 @@
+// Licensed to the .NET Foundation under one or more agreements.
+// The .NET Foundation licenses this file to you under the MIT license.
+
+using System.Buffers;
+using System.Text;
+
+#if !NET5_0_OR_GREATER
+using System;
+using System.Diagnostics;
+using System.Runtime.InteropServices;
+#endif
+
+namespace Microsoft.Extensions.Caching.Hybrid.Internal;
+
+internal sealed class InbuiltTypeSerializer : IHybridCacheSerializer, IHybridCacheSerializer
+{
+ public static InbuiltTypeSerializer Instance { get; } = new();
+
+ string IHybridCacheSerializer.Deserialize(ReadOnlySequence source)
+ {
+#if NET5_0_OR_GREATER
+ return Encoding.UTF8.GetString(source);
+#else
+ if (source.IsSingleSegment && MemoryMarshal.TryGetArray(source.First, out var segment))
+ {
+ // we can use the existing single chunk as-is
+ return Encoding.UTF8.GetString(segment.Array, segment.Offset, segment.Count);
+ }
+
+ var length = checked((int)source.Length);
+ var oversized = ArrayPool.Shared.Rent(length);
+ source.CopyTo(oversized);
+ var s = Encoding.UTF8.GetString(oversized, 0, length);
+ ArrayPool.Shared.Return(oversized);
+ return s;
+#endif
+ }
+
+ void IHybridCacheSerializer.Serialize(string value, IBufferWriter target)
+ {
+#if NET5_0_OR_GREATER
+ Encoding.UTF8.GetBytes(value, target);
+#else
+ var length = Encoding.UTF8.GetByteCount(value);
+ var oversized = ArrayPool.Shared.Rent(length);
+ var actual = Encoding.UTF8.GetBytes(value, 0, value.Length, oversized, 0);
+ Debug.Assert(actual == length, "encoding length mismatch");
+ target.Write(new(oversized, 0, length));
+ ArrayPool.Shared.Return(oversized);
+#endif
+ }
+
+ byte[] IHybridCacheSerializer.Deserialize(ReadOnlySequence source)
+ => source.ToArray();
+
+ void IHybridCacheSerializer.Serialize(byte[] value, IBufferWriter target)
+ => target.Write(value);
+}
diff --git a/src/Libraries/Microsoft.Extensions.Caching.Hybrid/Internal/RecyclableArrayBufferWriter.cs b/src/Libraries/Microsoft.Extensions.Caching.Hybrid/Internal/RecyclableArrayBufferWriter.cs
new file mode 100644
index 00000000000..2f2da2c7019
--- /dev/null
+++ b/src/Libraries/Microsoft.Extensions.Caching.Hybrid/Internal/RecyclableArrayBufferWriter.cs
@@ -0,0 +1,202 @@
+// Licensed to the .NET Foundation under one or more agreements.
+// The .NET Foundation licenses this file to you under the MIT license.
+
+using System;
+using System.Buffers;
+using System.Diagnostics;
+using System.Threading;
+using Microsoft.Shared.Diagnostics;
+
+namespace Microsoft.Extensions.Caching.Hybrid.Internal;
+
+// this is effectively a cut-down re-implementation of ArrayBufferWriter
+// from https://github.com/dotnet/runtime/blob/6cd9bf1937c3b4d2f7304a6c534aacde58a202b6/src/libraries/Common/src/System/Buffers/ArrayBufferWriter.cs
+// except it uses the array pool for allocations
+internal sealed class RecyclableArrayBufferWriter : IBufferWriter, IDisposable
+{
+ // Usage note: *normally* you might want to use "using" for this, and that is fine
+ // however, caution should be exercised in exception scenarios where we don't 100%
+ // know that the caller has stopped touching the buffer; in particular, this means
+ // scenarios involving a combination of external code and (for example) "async".
+ // In those cases, it may be preferable to manually dispose in the success case,
+ // and just drop the buffers in the failure case, i.e. instead of:
+ //
+ // using (writer)
+ // { DoStuff(); }
+ //
+ // simply:
+ //
+ // DoStuff();
+ // writer.Dispose();
+ //
+ // This does not represent a problem, and is consistent with many ArrayPool use-cases.
+
+ // Copy of Array.MaxLength.
+ // Used by projects targeting .NET Framework.
+ [System.Diagnostics.CodeAnalysis.SuppressMessage("Major Code Smell", "S125:Sections of code should not be commented out", Justification = "Usage example, please retain")]
+ private const int ArrayMaxLength = 0x7FFFFFC7;
+
+ private const int DefaultInitialBufferSize = 256;
+
+ private T[] _buffer;
+ [System.Diagnostics.CodeAnalysis.SuppressMessage("Style", "IDE0032:Use auto property", Justification = "It is preferable to keep this usage explicit")]
+ private int _index;
+ private int _maxLength;
+
+ public int CommittedBytes => _index;
+ public int FreeCapacity => _buffer.Length - _index;
+
+ private static RecyclableArrayBufferWriter? _spare;
+ public static RecyclableArrayBufferWriter Create(int maxLength)
+ {
+ var obj = Interlocked.Exchange(ref _spare, null) ?? new();
+ Debug.Assert(obj._index == 0, "index should be zero initially");
+ obj._maxLength = maxLength;
+ return obj;
+ }
+
+ private RecyclableArrayBufferWriter()
+ {
+ _buffer = [];
+ _index = 0;
+ _maxLength = int.MaxValue;
+ }
+
+ public void Dispose()
+ {
+ // attempt to reuse everything via "spare"; if that isn't possible,
+ // recycle the buffers instead
+ _index = 0;
+ if (Interlocked.CompareExchange(ref _spare, this, null) != null)
+ {
+ var tmp = _buffer;
+ _buffer = [];
+ if (tmp.Length != 0)
+ {
+ ArrayPool.Shared.Return(tmp);
+ }
+ }
+ }
+
+ [System.Diagnostics.CodeAnalysis.SuppressMessage("Major Code Smell", "S3928:Parameter names used into ArgumentException constructors should match an existing one ",
+ Justification = "False positive; parameter exists")]
+ public void Advance(int count)
+ {
+ _ = Throw.IfLessThan(count, 0);
+
+ if (_index > _buffer.Length - count)
+ {
+ ThrowCount();
+ }
+
+ if (_index + count > _maxLength)
+ {
+ ThrowQuota();
+ }
+
+ _index += count;
+
+ static void ThrowCount()
+ => throw new ArgumentOutOfRangeException(nameof(count));
+
+ static void ThrowQuota()
+ => throw new InvalidOperationException("Max length exceeded");
+ }
+
+ public void ResetInPlace()
+ {
+ // resets the writer *without* resetting the buffer.
+ // the existing memory should be considered "gone"
+ // (to claim the buffer instead, use DetachCommitted)
+ _index = 0;
+ }
+
+ public ReadOnlyMemory GetCommittedMemory() => new(_buffer, 0, _index); // could also directly expose a ReadOnlySpan if useful
+
+ public Memory GetMemory(int sizeHint = 0)
+ {
+ CheckAndResizeBuffer(sizeHint);
+ Debug.Assert(_buffer.Length > _index, "should have some space");
+ return _buffer.AsMemory(_index);
+ }
+
+ public Span GetSpan(int sizeHint = 0)
+ {
+ CheckAndResizeBuffer(sizeHint);
+ Debug.Assert(_buffer.Length > _index, "should have some space");
+ return _buffer.AsSpan(_index);
+ }
+
+ // create a standalone isolated copy of the buffer
+ public T[] ToArray() => _buffer.AsSpan(0, _index).ToArray();
+
+ ///
+ /// Disconnect the current buffer so that we can store it without it being recycled.
+ ///
+ internal T[] DetachCommitted(out int length)
+ {
+ var tmp = _index == 0 ? [] : _buffer;
+ length = _index;
+
+ _buffer = [];
+ _index = 0;
+
+ return tmp;
+ }
+
+ internal T[] GetBuffer(out int length)
+ {
+ length = _index;
+ return _index == 0 ? [] : _buffer;
+ }
+
+ private void CheckAndResizeBuffer(int sizeHint)
+ {
+ if (sizeHint <= 0)
+ {
+ sizeHint = 1;
+ }
+
+ if (sizeHint > FreeCapacity)
+ {
+ var currentLength = _buffer.Length;
+
+ // Attempt to grow by the larger of the sizeHint and double the current size.
+ var growBy = Math.Max(sizeHint, currentLength);
+
+ if (currentLength == 0)
+ {
+ growBy = Math.Max(growBy, DefaultInitialBufferSize);
+ }
+
+ var newSize = currentLength + growBy;
+
+ if ((uint)newSize > int.MaxValue)
+ {
+ // Attempt to grow to ArrayMaxLength.
+ var needed = (uint)(currentLength - FreeCapacity + sizeHint);
+ Debug.Assert(needed > currentLength, "should need to grow");
+
+ if (needed > ArrayMaxLength)
+ {
+ ThrowOutOfMemoryException();
+ }
+
+ newSize = ArrayMaxLength;
+ }
+
+ // resize the backing buffer
+ var oldArray = _buffer;
+ _buffer = ArrayPool.Shared.Rent(newSize);
+ oldArray.AsSpan(0, _index).CopyTo(_buffer);
+ if (oldArray.Length != 0)
+ {
+ ArrayPool.Shared.Return(oldArray);
+ }
+ }
+
+ Debug.Assert(FreeCapacity > 0 && FreeCapacity >= sizeHint, "should be space");
+
+ static void ThrowOutOfMemoryException() => throw new InvalidOperationException("Unable to grow buffer as requested");
+ }
+}
diff --git a/src/Libraries/Microsoft.Extensions.Caching.Hybrid/Internal/readme.md b/src/Libraries/Microsoft.Extensions.Caching.Hybrid/Internal/readme.md
new file mode 100644
index 00000000000..8d6a7d87848
--- /dev/null
+++ b/src/Libraries/Microsoft.Extensions.Caching.Hybrid/Internal/readme.md
@@ -0,0 +1,27 @@
+# HybridCache internal design
+
+`HybridCache` encapsulates serialization, caching and stampede protection.
+
+The `DefaultHybridCache` implementation keeps a collection of `StampedeState` entries
+that represent the current in-flight operations (keyed by `StampedeKey`); if a duplicate
+operation occurs during the execution, the second operation will be joined with that
+same flow, rather than executing independently. When attempting to merge with an
+existing flow, interlocked counting is used: we can only join if we can successfully
+increment the value from a non-zero value (zero meaning all existing consumers have
+canceled, and the shared token is therefore canceled)
+
+The `StampedeState<>` performs back-end fetch operations, resulting not in a `T` (of the final
+value), but instead a `CacheItem`; this is the object that gets put into L1 cache,
+and can describe both mutable and immutable types; the significance here is that for
+mutable types, we need a defensive copy per-call to prevent callers impacting each-other.
+
+`StampedeState<>` combines cancellation (so that operations proceed as long as *a* caller
+is still active); this covers all L2 access and serialization operations, releasing all pending
+shared callers for the same operation. Note that L2 storage can occur *after* callers
+have been released.
+
+To ensure correct buffer recycling, when dealing with cache entries that need defensive copies
+we use more ref-counting while reading the buffer, combined with an eviction callback which
+decrements that counter. This means that we recycle things when evicted, without impacting
+in-progress deserialize operations. To simplify tracking, `BufferChunk` acts like a `byte[]`+`int`
+(we don't need non-zero offset), but also tracking "should this be returned to the pool?".
diff --git a/src/Libraries/Microsoft.Extensions.Caching.Hybrid/Microsoft.Extensions.Caching.Hybrid.csproj b/src/Libraries/Microsoft.Extensions.Caching.Hybrid/Microsoft.Extensions.Caching.Hybrid.csproj
new file mode 100644
index 00000000000..f460c4ee0cc
--- /dev/null
+++ b/src/Libraries/Microsoft.Extensions.Caching.Hybrid/Microsoft.Extensions.Caching.Hybrid.csproj
@@ -0,0 +1,39 @@
+
+
+
+ Multi-level caching implementation building on and extending IDistributedCache
+ $(NetCoreTargetFrameworks)$(ConditionalNet462);netstandard2.0;netstandard2.1
+ true
+ cache;distributedcache;hybrid
+ true
+ true
+ true
+ true
+ true
+ true
+ true
+ dev
+ EXTEXP0018
+ 75
+ 50
+ Fundamentals
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/src/Libraries/Microsoft.Extensions.Caching.Hybrid/Microsoft.Extensions.Caching.Hybrid.json b/src/Libraries/Microsoft.Extensions.Caching.Hybrid/Microsoft.Extensions.Caching.Hybrid.json
new file mode 100644
index 00000000000..2c1a811b223
Binary files /dev/null and b/src/Libraries/Microsoft.Extensions.Caching.Hybrid/Microsoft.Extensions.Caching.Hybrid.json differ
diff --git a/src/Libraries/Microsoft.Extensions.Caching.Hybrid/README.md b/src/Libraries/Microsoft.Extensions.Caching.Hybrid/README.md
new file mode 100644
index 00000000000..02dc3e5bae5
--- /dev/null
+++ b/src/Libraries/Microsoft.Extensions.Caching.Hybrid/README.md
@@ -0,0 +1,81 @@
+# Microsoft.Extensions.Caching.Hybrid
+
+This package contains a concrete implementation of [the `HybridCache` API](https://learn.microsoft.com/dotnet/api/microsoft.extensions.caching.hybrid),
+simplifying and enhancing cache usage that might previously have been built on top of [`IDistributedCache`](https://learn.microsoft.com/dotnet/api/microsoft.extensions.caching.distributed.idistributedcache).
+
+Key features:
+
+- built on top of `IDistributedCache` - all existing cache backends (Redis, SQL Server, CosmosDB, etc) should work immediately
+- simple API (all the cache, serialization, etc details from are encapsulated)
+- cache-stampede protection (combining of concurrent requests for the same data)
+- performance enhancements such as inbuilt support for the newer [`IBufferDistributedCache`](https://learn.microsoft.com/dotnet/api/microsoft.extensions.caching.distributed.ibufferdistributedcache) API
+- fully configurable serialization
+
+Full `HybridCache` documentation is [here](https://learn.microsoft.com/aspnet/core/performance/caching/hybrid).
+
+## Full documentation
+
+See [learn.microsoft.com](https://learn.microsoft.com/aspnet/core/performance/caching/hybrid) for full discussion of `HybridCache`.
+
+## Install the package
+
+From the command-line:
+
+```console
+dotnet add package Microsoft.Extensions.Caching.Hybrid
+```
+
+Or directly in the C# project file:
+
+```xml
+
+
+
+```
+
+## Usage example
+
+The `HybridCache` service can be registered and configured via `IServiceCollection`, for example:
+
+```csharp
+builder.Services.AddHybridCache(/* optional configuration /*);
+```
+
+Note that in many cases you may also wish to register a distributed cache backend, as
+[discussed here](https://learn.microsoft.com/aspnet/core/performance/caching/distributed); for example
+a Redis instance:
+
+```csharp
+builder.Services.AddStackExchangeRedisCache(options =>
+{
+ options.Configuration = builder.Configuration.GetConnectionString("MyRedisConStr");
+});
+```
+
+Once registered, the `HybridCache` instance can be obtained via dependency-injection, allowing the
+`GetOrCreateAsync` API to be used to obtain data:
+
+```csharp
+public class SomeService(HybridCache cache)
+{
+ private HybridCache _cache = cache;
+
+ public async Task GetSomeInfoAsync(string name, int id, CancellationToken token = default)
+ {
+ return await _cache.GetOrCreateAsync(
+ $"{name}-{id}", // Unique key to the cache entry
+ async cancel => await GetDataFromTheSourceAsync(name, id, cancel),
+ cancellationToken: token
+ );
+ }
+
+ private async Task GetDataFromTheSourceAsync(string name, int id, CancellationToken token)
+ {
+ // talk to the underlying data store here - could be SQL, gRPC, HTTP, etc
+ }
+}
+```
+
+Additional usage guidance - including expiration, custom serialization support, and alternate usage
+to reduce delegate allocation - is available
+on [learn.microsoft.com](https://learn.microsoft.com/aspnet/core/performance/caching/hybrid).
diff --git a/src/Libraries/Microsoft.Extensions.Compliance.Redaction/Microsoft.Extensions.Compliance.Redaction.csproj b/src/Libraries/Microsoft.Extensions.Compliance.Redaction/Microsoft.Extensions.Compliance.Redaction.csproj
index 8a6a526acc1..d331d10ff32 100644
--- a/src/Libraries/Microsoft.Extensions.Compliance.Redaction/Microsoft.Extensions.Compliance.Redaction.csproj
+++ b/src/Libraries/Microsoft.Extensions.Compliance.Redaction/Microsoft.Extensions.Compliance.Redaction.csproj
@@ -7,12 +7,15 @@
true
- true
true
true
true
true
true
+
+
+ false
+ $(NoWarn);IL2026
diff --git a/src/Libraries/Microsoft.Extensions.Telemetry.Abstractions/buildTransitive/net8.0/Microsoft.Extensions.Telemetry.Abstractions.props b/src/Libraries/Microsoft.Extensions.Telemetry.Abstractions/buildTransitive/net8.0/Microsoft.Extensions.Telemetry.Abstractions.props
new file mode 100644
index 00000000000..7009535fdef
--- /dev/null
+++ b/src/Libraries/Microsoft.Extensions.Telemetry.Abstractions/buildTransitive/net8.0/Microsoft.Extensions.Telemetry.Abstractions.props
@@ -0,0 +1,6 @@
+
+
+
+ true
+
+
diff --git a/src/Libraries/Microsoft.Extensions.Telemetry.Abstractions/buildTransitive/net8.0/Microsoft.Extensions.Telemetry.Abstractions.targets b/src/Libraries/Microsoft.Extensions.Telemetry.Abstractions/buildTransitive/net8.0/Microsoft.Extensions.Telemetry.Abstractions.targets
new file mode 100644
index 00000000000..650e0e6729b
--- /dev/null
+++ b/src/Libraries/Microsoft.Extensions.Telemetry.Abstractions/buildTransitive/net8.0/Microsoft.Extensions.Telemetry.Abstractions.targets
@@ -0,0 +1,36 @@
+
+
+
+
+ <_Microsoft_Extensions_Logging_AbstractionsAnalyzer
+ Include="@(Analyzer)"
+ Condition="'%(Analyzer.AssemblyName)' == 'Microsoft.Extensions.Logging.Generators' Or
+ '%(Analyzer.NuGetPackageId)' == 'Microsoft.Extensions.Logging.Abstractions'" />
+
+
+
+
+
+
+
+
+
+
+
+ <_Microsoft_Extensions_Logging_AbstractionsAnalyzerWPF
+ Include="@(Analyzer)"
+ Condition="'%(Analyzer.AssemblyName)' == 'Microsoft.Extensions.Logging.Generators' Or
+ '%(Analyzer.NuGetPackageId)' == 'Microsoft.Extensions.Logging.Abstractions'" />
+
+
+
+
+
+
+
diff --git a/test/Analyzers/Microsoft.Analyzers.Extra.Tests/CallAnalysis/LegacyLoggingTests.cs b/test/Analyzers/Microsoft.Analyzers.Extra.Tests/CallAnalysis/LegacyLoggingTests.cs
index afe68ce5685..d74f279aef6 100644
--- a/test/Analyzers/Microsoft.Analyzers.Extra.Tests/CallAnalysis/LegacyLoggingTests.cs
+++ b/test/Analyzers/Microsoft.Analyzers.Extra.Tests/CallAnalysis/LegacyLoggingTests.cs
@@ -73,7 +73,7 @@ public static partial class Log
new LegacyLoggingFixer(),
new[] { Assembly.GetAssembly(typeof(ILogger))!, Assembly.GetAssembly(typeof(LoggerMessageAttribute))! },
new[] { OriginalSource, OriginalTarget },
- defaultNamespace: "Example").ConfigureAwait(false);
+ defaultNamespace: "Example");
var actualSource = l[0];
var actualTarget = l[1];
@@ -166,7 +166,7 @@ public static partial class Log
new LegacyLoggingFixer(),
new[] { Assembly.GetAssembly(typeof(ILogger))!, Assembly.GetAssembly(typeof(LoggerMessageAttribute))! },
new[] { OriginalSource, OriginalTarget },
- defaultNamespace: "Example").ConfigureAwait(false);
+ defaultNamespace: "Example");
var actualSource = l[0];
var actualTarget = l[1];
@@ -230,7 +230,7 @@ static partial class Log
new[] { Assembly.GetAssembly(typeof(ILogger))!, Assembly.GetAssembly(typeof(LoggerMessageAttribute))! },
new[] { OriginalSource },
extraFile: "Log.cs",
- defaultNamespace: "Example").ConfigureAwait(false);
+ defaultNamespace: "Example");
var actualSource = l[0];
var actualTarget = l[1];
@@ -290,7 +290,7 @@ public static partial class Log
new LegacyLoggingFixer(),
new[] { Assembly.GetAssembly(typeof(ILogger))!, Assembly.GetAssembly(typeof(LoggerMessageAttribute))! },
new[] { OriginalSource, OriginalTarget },
- defaultNamespace: "Example").ConfigureAwait(false);
+ defaultNamespace: "Example");
var actualSource = l[0];
var actualTarget = l[1];
@@ -364,7 +364,7 @@ static partial class Log
new LegacyLoggingFixer(),
new[] { Assembly.GetAssembly(typeof(ILogger))!, Assembly.GetAssembly(typeof(LoggerMessageAttribute))! },
new[] { OriginalSource, OriginalTarget },
- defaultNamespace: "Example.Example2").ConfigureAwait(false);
+ defaultNamespace: "Example.Example2");
var actualSource = l[0];
var actualTarget = l[1];
@@ -441,7 +441,7 @@ static partial class Log
new[] { Assembly.GetAssembly(typeof(ILogger))!, Assembly.GetAssembly(typeof(LoggerMessageAttribute))! },
new[] { OriginalSource, OriginalTarget },
extraFile: "Log.cs",
- defaultNamespace: "Example.Example2").ConfigureAwait(false);
+ defaultNamespace: "Example.Example2");
var actualSource = l[0];
var actualTarget = l[2];
@@ -523,7 +523,7 @@ public static partial class Log
new LegacyLoggingFixer(),
new[] { Assembly.GetAssembly(typeof(ILogger))!, Assembly.GetAssembly(typeof(LoggerMessageAttribute))! },
new[] { OriginalSource, OriginalTarget },
- defaultNamespace: "Example").ConfigureAwait(false);
+ defaultNamespace: "Example");
var actualSource = l[0];
var actualTarget = l[1];
@@ -611,7 +611,7 @@ public static partial class Log
new LegacyLoggingFixer(),
new[] { Assembly.GetAssembly(typeof(ILogger))!, Assembly.GetAssembly(typeof(LoggerMessageAttribute))! },
new[] { OriginalSource, OriginalTarget },
- defaultNamespace: "Example").ConfigureAwait(false);
+ defaultNamespace: "Example");
var actualSource = l[0];
var actualTarget = l[1];
@@ -683,7 +683,7 @@ static partial class Log
new[] { OriginalSource, OriginalTarget },
sourceNames: new[] { "primary.cs", "Log.cs" },
extraFile: "Log2.cs",
- defaultNamespace: "Example").ConfigureAwait(false);
+ defaultNamespace: "Example");
var actualSource = l[0];
var actualTarget = l[2];
@@ -754,7 +754,7 @@ public static partial class Log
},
new[] { Assembly.GetAssembly(typeof(ILogger))!, Assembly.GetAssembly(typeof(LoggerMessageAttribute))! },
new[] { OriginalSource, OriginalTarget },
- defaultNamespace: "Example").ConfigureAwait(false);
+ defaultNamespace: "Example");
var actualSource = l[0];
var actualTarget = l[1];
@@ -819,7 +819,7 @@ public static void TestMethod(ILogger logger)
proj.CommitChanges();
var targetDoc = proj.FindDocument("target.cs");
- var targetRoot = await targetDoc.GetSyntaxRootAsync(CancellationToken.None).ConfigureAwait(false);
+ var targetRoot = await targetDoc.GetSyntaxRootAsync(CancellationToken.None);
var targetClass = targetRoot!.FindNode(RoslynTestUtils.MakeTextSpan(TargetSourceCode, 0)) as ClassDeclarationSyntax;
var invocationDoc = proj.FindDocument("invocation.cs");
@@ -828,7 +828,7 @@ public static void TestMethod(ILogger logger)
var (invocationExpression, details) = await f.CheckIfCanFixAsync(
invocationDoc,
InvocationSourceCode.MakeTextSpan(0),
- CancellationToken.None).ConfigureAwait(false);
+ CancellationToken.None);
Assert.NotNull(invocationExpression);
Assert.NotNull(details);
@@ -839,7 +839,7 @@ public static void TestMethod(ILogger logger)
invocationDoc,
invocationExpression!,
details!,
- CancellationToken.None).ConfigureAwait(false);
+ CancellationToken.None);
Assert.Equal("TestA", methodName);
Assert.False(existing);
@@ -852,7 +852,7 @@ public static void TestMethod(ILogger logger)
(invocationExpression, details) = await f.CheckIfCanFixAsync(
invocationDoc,
RoslynTestUtils.MakeTextSpan(InvocationSourceCode, 0),
- CancellationToken.None).ConfigureAwait(false);
+ CancellationToken.None);
Assert.Null(invocationExpression);
Assert.Null(details);
@@ -865,7 +865,7 @@ public static void TestMethod(ILogger logger)
(invocationExpression, details) = await f.CheckIfCanFixAsync(
invocationDoc,
RoslynTestUtils.MakeTextSpan(InvocationSourceCode, 0),
- CancellationToken.None).ConfigureAwait(false);
+ CancellationToken.None);
Assert.Null(invocationExpression);
Assert.Null(details);
@@ -878,7 +878,7 @@ public static void TestMethod(ILogger logger)
(invocationExpression, details) = await f.CheckIfCanFixAsync(
invocationDoc,
RoslynTestUtils.MakeTextSpan(InvocationSourceCode, 0),
- CancellationToken.None).ConfigureAwait(false);
+ CancellationToken.None);
Assert.Null(invocationExpression);
Assert.Null(details);
@@ -891,7 +891,7 @@ public static void TestMethod(ILogger logger)
(invocationExpression, details) = await f.CheckIfCanFixAsync(
invocationDoc,
RoslynTestUtils.MakeTextSpan(InvocationSourceCode, 0),
- CancellationToken.None).ConfigureAwait(false);
+ CancellationToken.None);
Assert.NotNull(invocationExpression);
Assert.NotNull(details);
@@ -902,7 +902,7 @@ public static void TestMethod(ILogger logger)
invocationDoc,
invocationExpression!,
details!,
- CancellationToken.None).ConfigureAwait(false);
+ CancellationToken.None);
Assert.Equal("TestA", methodName);
Assert.False(existing);
@@ -915,7 +915,7 @@ public static void TestMethod(ILogger logger)
(invocationExpression, details) = await f.CheckIfCanFixAsync(
invocationDoc,
RoslynTestUtils.MakeTextSpan(InvocationSourceCode, 0),
- CancellationToken.None).ConfigureAwait(false);
+ CancellationToken.None);
Assert.Null(invocationExpression);
Assert.Null(details);
@@ -928,7 +928,7 @@ public static void TestMethod(ILogger logger)
(invocationExpression, details) = await f.CheckIfCanFixAsync(
invocationDoc,
RoslynTestUtils.MakeTextSpan(InvocationSourceCode, 0),
- CancellationToken.None).ConfigureAwait(false);
+ CancellationToken.None);
Assert.NotNull(invocationExpression);
Assert.NotNull(details);
@@ -939,7 +939,7 @@ public static void TestMethod(ILogger logger)
invocationDoc,
invocationExpression!,
details!,
- CancellationToken.None).ConfigureAwait(false);
+ CancellationToken.None);
Assert.Equal("TestA", methodName);
Assert.False(existing);
diff --git a/test/Analyzers/Microsoft.Analyzers.Extra.Tests/CallAnalysis/SplitTests.cs b/test/Analyzers/Microsoft.Analyzers.Extra.Tests/CallAnalysis/SplitTests.cs
index cad16f25dea..f6efeab1261 100644
--- a/test/Analyzers/Microsoft.Analyzers.Extra.Tests/CallAnalysis/SplitTests.cs
+++ b/test/Analyzers/Microsoft.Analyzers.Extra.Tests/CallAnalysis/SplitTests.cs
@@ -29,9 +29,9 @@ public static void TestMethod()
var d = await RoslynTestUtils.RunAnalyzer(
new CallAnalyzer(),
null,
- new[] { Source }).ConfigureAwait(false);
+ new[] { Source });
- Assert.Equal(1, d.Count);
+ Assert.Single(d);
for (int i = 0; i < d.Count; i++)
{
Source.AssertDiagnostic(i, DiagDescriptors.Split, d[i]);
diff --git a/test/Analyzers/Microsoft.Analyzers.Extra.Tests/CallAnalysis/StartsEndsWithTests.cs b/test/Analyzers/Microsoft.Analyzers.Extra.Tests/CallAnalysis/StartsEndsWithTests.cs
index e46a7cb6f7b..c026aa6be8a 100644
--- a/test/Analyzers/Microsoft.Analyzers.Extra.Tests/CallAnalysis/StartsEndsWithTests.cs
+++ b/test/Analyzers/Microsoft.Analyzers.Extra.Tests/CallAnalysis/StartsEndsWithTests.cs
@@ -56,7 +56,7 @@ public static void DontFlagMe()
var d = await RoslynTestUtils.RunAnalyzer(
new CallAnalyzer(),
null,
- new[] { Source }).ConfigureAwait(false);
+ new[] { Source });
Assert.Equal(8, d.Count);
@@ -114,7 +114,7 @@ public static void DontFlagMe()
var d = await RoslynTestUtils.RunAnalyzer(
new CallAnalyzer(),
null,
- new[] { Source }).ConfigureAwait(false);
+ new[] { Source });
Assert.Equal(8, d.Count);
diff --git a/test/Analyzers/Microsoft.Analyzers.Extra.Tests/CallAnalysis/StaticTimeTests.cs b/test/Analyzers/Microsoft.Analyzers.Extra.Tests/CallAnalysis/StaticTimeTests.cs
index 807e097aedc..bd8c77f9295 100644
--- a/test/Analyzers/Microsoft.Analyzers.Extra.Tests/CallAnalysis/StaticTimeTests.cs
+++ b/test/Analyzers/Microsoft.Analyzers.Extra.Tests/CallAnalysis/StaticTimeTests.cs
@@ -77,7 +77,7 @@ private DateTimeOffset GetTimeOffset(bool condition)
var d = await RoslynTestUtils.RunAnalyzer(
new CallAnalyzer(),
_staticTimeReferences,
- new[] { Source }).ConfigureAwait(false);
+ new[] { Source });
Assert.Equal(17, d.Count);
for (int i = 0; i < d.Count; i++)
diff --git a/test/Analyzers/Microsoft.Analyzers.Extra.Tests/CallAnalysis/ValueTupleTests.cs b/test/Analyzers/Microsoft.Analyzers.Extra.Tests/CallAnalysis/ValueTupleTests.cs
index 2e76eb83d03..cf4f7782d1b 100644
--- a/test/Analyzers/Microsoft.Analyzers.Extra.Tests/CallAnalysis/ValueTupleTests.cs
+++ b/test/Analyzers/Microsoft.Analyzers.Extra.Tests/CallAnalysis/ValueTupleTests.cs
@@ -46,7 +46,7 @@ public void Foo()
var d = await RoslynTestUtils.RunAnalyzer(
new CallAnalyzer(),
null,
- new[] { Source }).ConfigureAwait(false);
+ new[] { Source });
Assert.Equal(16, d.Count);
for (int i = 0; i < d.Count; i++)
diff --git a/test/Analyzers/Microsoft.Analyzers.Extra.Tests/CoalesceAnalyzerTests.cs b/test/Analyzers/Microsoft.Analyzers.Extra.Tests/CoalesceAnalyzerTests.cs
index debc2a7272a..4495c5491ff 100644
--- a/test/Analyzers/Microsoft.Analyzers.Extra.Tests/CoalesceAnalyzerTests.cs
+++ b/test/Analyzers/Microsoft.Analyzers.Extra.Tests/CoalesceAnalyzerTests.cs
@@ -37,9 +37,9 @@ public class Test
var d = await RoslynTestUtils.RunAnalyzer(
new CoalesceAnalyzer(),
null,
- new[] { Source }).ConfigureAwait(false);
+ new[] { Source });
- Assert.Equal(0, d.Count);
+ Assert.Empty(d);
}
[Fact]
@@ -92,7 +92,7 @@ public void Method()
var d = await RoslynTestUtils.RunAnalyzer(
new CoalesceAnalyzer(),
null,
- new[] { Source }).ConfigureAwait(false);
+ new[] { Source });
Assert.Equal(4, d.Count);
for (int i = 0; i < d.Count; i++)
@@ -160,7 +160,7 @@ public void Method()
var d = await RoslynTestUtils.RunAnalyzer(
new CoalesceAnalyzer(),
null,
- new[] { Source }).ConfigureAwait(false);
+ new[] { Source });
Assert.Equal(5, d.Count);
for (int i = 0; i < d.Count; i++)
diff --git a/test/Analyzers/Microsoft.Analyzers.Extra.Tests/ConditionalAccessAnalyzerTests.cs b/test/Analyzers/Microsoft.Analyzers.Extra.Tests/ConditionalAccessAnalyzerTests.cs
index bb27a76eb2f..d561e32c4a2 100644
--- a/test/Analyzers/Microsoft.Analyzers.Extra.Tests/ConditionalAccessAnalyzerTests.cs
+++ b/test/Analyzers/Microsoft.Analyzers.Extra.Tests/ConditionalAccessAnalyzerTests.cs
@@ -104,7 +104,7 @@ public Arg(TParameter arg1)
var d = await RoslynTestUtils.RunAnalyzer(
new ConditionalAccessAnalyzer(),
null,
- new[] { Source }).ConfigureAwait(false);
+ new[] { Source });
#if NET6_0_OR_GREATER
Assert.Equal(8, d.Count);
diff --git a/test/Analyzers/Microsoft.Analyzers.Extra.Tests/MakeExeTypesInternalTests.cs b/test/Analyzers/Microsoft.Analyzers.Extra.Tests/MakeExeTypesInternalTests.cs
index 204d3e83e69..078978bc07f 100644
--- a/test/Analyzers/Microsoft.Analyzers.Extra.Tests/MakeExeTypesInternalTests.cs
+++ b/test/Analyzers/Microsoft.Analyzers.Extra.Tests/MakeExeTypesInternalTests.cs
@@ -70,7 +70,7 @@ internal class Test4
new MakeExeTypesInternalFixer(),
null,
new[] { Source },
- asExecutable: true).ConfigureAwait(false);
+ asExecutable: true);
Assert.Equal(ExpectedFixedSource.Replace("\r\n", "\n", StringComparison.Ordinal), actualFixedSources[0]);
}
@@ -156,9 +156,9 @@ public class Test6
new MakeExeTypesInternalAnalyzer(),
null,
new[] { Source },
- asExecutable: true).ConfigureAwait(false);
+ asExecutable: true);
- Assert.Equal(1, d.Count);
+ Assert.Single(d);
for (int i = 0; i < d.Count; i++)
{
Source.AssertDiagnostic(i, DiagDescriptors.MakeExeTypesInternal, d[i]);
diff --git a/test/Analyzers/Microsoft.Analyzers.Extra.Tests/UsingToStringInLoggersTests.cs b/test/Analyzers/Microsoft.Analyzers.Extra.Tests/UsingToStringInLoggersTests.cs
index 9f41a1945ea..c000711a6ab 100644
--- a/test/Analyzers/Microsoft.Analyzers.Extra.Tests/UsingToStringInLoggersTests.cs
+++ b/test/Analyzers/Microsoft.Analyzers.Extra.Tests/UsingToStringInLoggersTests.cs
@@ -58,14 +58,14 @@ public static void LogTest(ILogger log, object something)
var generated = await RoslynTestUtils.RunGenerator(
new LoggingGenerator(),
References,
- new[] { Source }).ConfigureAwait(false);
+ new[] { Source });
- Assert.Equal(0, generated.diagnostics.Count);
+ Assert.Empty(generated.diagnostics);
var fullDiags = await RoslynTestUtils.RunAnalyzer(
new UsingToStringInLoggersAnalyzer(),
References,
- new[] { Source, generated.generatedSources[0].SourceText.ToString() }).ConfigureAwait(false);
+ new[] { Source, generated.generatedSources[0].SourceText.ToString() });
var d = RoslynTestUtils.FilterDiagnostics(fullDiags, DiagDescriptors.UsingToStringInLoggers);
@@ -113,8 +113,8 @@ public static void LogTest(ILogger log, object something)
var d = await RoslynTestUtils.RunAnalyzer(
new UsingToStringInLoggersAnalyzer(),
References,
- new[] { Source }).ConfigureAwait(false);
+ new[] { Source });
- Assert.Equal(0, d.Count);
+ Assert.Empty(d);
}
}
diff --git a/test/Analyzers/Microsoft.Analyzers.Local.Tests/ApiLifecycle/ApiLifecycleAnalyzerTest.cs b/test/Analyzers/Microsoft.Analyzers.Local.Tests/ApiLifecycle/ApiLifecycleAnalyzerTest.cs
index 87afb05f182..e771b8355d8 100644
--- a/test/Analyzers/Microsoft.Analyzers.Local.Tests/ApiLifecycle/ApiLifecycleAnalyzerTest.cs
+++ b/test/Analyzers/Microsoft.Analyzers.Local.Tests/ApiLifecycle/ApiLifecycleAnalyzerTest.cs
@@ -39,7 +39,7 @@ public async Task Analyzer_Reports_Diagnostics_When_Code_Was_Not_Annotated_Corre
},
options: options,
testAssemblyName: testAssemblyName)
- .ConfigureAwait(false);
+;
Assert.Equal(expectedDiagnostics, diagnostics.Count);
@@ -65,7 +65,7 @@ public async Task Analyzer_Reports_Diagnostics_When_StableCode_Was_Not_Found_In_
},
options: options,
testAssemblyName: testAssemblyName)
- .ConfigureAwait(false);
+;
Assert.Equal(expectedDiagnostics, diagnostics.Count);
diff --git a/test/Analyzers/Microsoft.Analyzers.Local.Tests/CallAnalysis/ToInvariantStringTests.cs b/test/Analyzers/Microsoft.Analyzers.Local.Tests/CallAnalysis/ToInvariantStringTests.cs
index cfc642956e8..21f920c8f7c 100644
--- a/test/Analyzers/Microsoft.Analyzers.Local.Tests/CallAnalysis/ToInvariantStringTests.cs
+++ b/test/Analyzers/Microsoft.Analyzers.Local.Tests/CallAnalysis/ToInvariantStringTests.cs
@@ -37,7 +37,7 @@ public void Foo()
var d = await RoslynTestUtils.RunAnalyzer(
new CallAnalyzer(),
null,
- new[] { Source }).ConfigureAwait(false);
+ new[] { Source });
Assert.Equal(4, d.Count);
for (int i = 0; i < d.Count; i++)
diff --git a/test/Analyzers/Microsoft.Analyzers.Local.Tests/CallAnalysis/UseThrowsTests.cs b/test/Analyzers/Microsoft.Analyzers.Local.Tests/CallAnalysis/UseThrowsTests.cs
index 6a07221d3e7..4046a680dab 100644
--- a/test/Analyzers/Microsoft.Analyzers.Local.Tests/CallAnalysis/UseThrowsTests.cs
+++ b/test/Analyzers/Microsoft.Analyzers.Local.Tests/CallAnalysis/UseThrowsTests.cs
@@ -28,7 +28,7 @@ public static async Task ShouldFindSingleWarning(string original, string excepti
var d = await RoslynTestUtils.RunAnalyzer(
new CallAnalyzer(),
_references,
- new[] { original }).ConfigureAwait(false);
+ new[] { original });
Assert.Single(d);
original.AssertDiagnostic(0, DiagDescriptors.ThrowsStatement, d[0]);
@@ -42,7 +42,7 @@ public static async Task ShouldNotProduceWarnings(string original)
var d = await RoslynTestUtils.RunAnalyzer(
new CallAnalyzer(),
_references,
- new[] { original }).ConfigureAwait(false);
+ new[] { original });
Assert.Empty(d);
}
@@ -276,7 +276,7 @@ public static async Task Expression_ShouldFindSingleWarning(string original)
var d = await RoslynTestUtils.RunAnalyzer(
new CallAnalyzer(),
_references,
- new[] { original }).ConfigureAwait(false);
+ new[] { original });
Assert.Single(d);
original.AssertDiagnostic(0, DiagDescriptors.ThrowsExpression, d[0]);
diff --git a/test/Analyzers/Microsoft.Analyzers.Local.Tests/Resources/RoslynTestUtils.cs b/test/Analyzers/Microsoft.Analyzers.Local.Tests/Resources/RoslynTestUtils.cs
index 223d0702008..6ae88197e4a 100644
--- a/test/Analyzers/Microsoft.Analyzers.Local.Tests/Resources/RoslynTestUtils.cs
+++ b/test/Analyzers/Microsoft.Analyzers.Local.Tests/Resources/RoslynTestUtils.cs
@@ -146,7 +146,7 @@ public static void AssertDiagnostic(this string text, int spanNum, DiagnosticDes
}
catch (ArgumentOutOfRangeException)
{
- Assert.True(false, $"Unexpected warning {actual}");
+ Assert.Fail($"Unexpected warning {actual}");
}
}
diff --git a/test/Generators/Microsoft.Gen.ComplianceReports/Unit/GeneratorTests.cs b/test/Generators/Microsoft.Gen.ComplianceReports/Unit/GeneratorTests.cs
index 0959430a641..28dbb80a36c 100644
--- a/test/Generators/Microsoft.Gen.ComplianceReports/Unit/GeneratorTests.cs
+++ b/test/Generators/Microsoft.Gen.ComplianceReports/Unit/GeneratorTests.cs
@@ -108,7 +108,7 @@ public async Task MissingDataClassificationSymbol()
Source,
},
new OptionsProvider(null))
- .ConfigureAwait(false);
+;
Assert.Empty(d);
}
diff --git a/test/Generators/Microsoft.Gen.ContextualOptions/Unit/EmitterTests.cs b/test/Generators/Microsoft.Gen.ContextualOptions/Unit/EmitterTests.cs
index f5fc3d8b87f..1e1c3eb91be 100644
--- a/test/Generators/Microsoft.Gen.ContextualOptions/Unit/EmitterTests.cs
+++ b/test/Generators/Microsoft.Gen.ContextualOptions/Unit/EmitterTests.cs
@@ -141,7 +141,7 @@ public async Task TestEmitter()
typeof(ReadOnlySpan<>).Assembly
},
sources)
- .ConfigureAwait(false);
+;
Assert.Empty(d);
_ = Assert.Single(r);
diff --git a/test/Generators/Microsoft.Gen.Logging/Unit/EmitterTests.cs b/test/Generators/Microsoft.Gen.Logging/Unit/EmitterTests.cs
index c2075b51f8e..58012c4915b 100644
--- a/test/Generators/Microsoft.Gen.Logging/Unit/EmitterTests.cs
+++ b/test/Generators/Microsoft.Gen.Logging/Unit/EmitterTests.cs
@@ -3,7 +3,6 @@
using System.Collections.Generic;
using System.IO;
-using System.Linq;
using System.Numerics;
using System.Reflection;
using System.Threading.Tasks;
@@ -49,16 +48,16 @@ public async Task TestEmitter()
},
sources,
symbols)
- .ConfigureAwait(false);
+;
// we need this "Where()" hack because Roslyn 4.0 doesn't recognize #pragma warning disable for generator-produced warnings
#pragma warning disable S1067 // Expressions should not be too complex
- Assert.Empty(d.Where(diag
+ Assert.DoesNotContain(d, diag
=> diag.Id != DiagDescriptors.ShouldntMentionExceptionInMessage.Id
&& diag.Id != DiagDescriptors.ShouldntMentionLoggerInMessage.Id
&& diag.Id != DiagDescriptors.ShouldntMentionLogLevelInMessage.Id
&& diag.Id != DiagDescriptors.EmptyLoggingMethod.Id
- && diag.Id != DiagDescriptors.ParameterHasNoCorrespondingTemplate.Id));
+ && diag.Id != DiagDescriptors.ParameterHasNoCorrespondingTemplate.Id);
#pragma warning restore S1067 // Expressions should not be too complex
_ = Assert.Single(r);
diff --git a/test/Generators/Microsoft.Gen.Logging/Unit/ParserTests.cs b/test/Generators/Microsoft.Gen.Logging/Unit/ParserTests.cs
index 66828e25b66..0baa988b85d 100644
--- a/test/Generators/Microsoft.Gen.Logging/Unit/ParserTests.cs
+++ b/test/Generators/Microsoft.Gen.Logging/Unit/ParserTests.cs
@@ -747,7 +747,7 @@ private static async Task RunGenerator(
}
else if (d.Count > 0)
{
- Assert.True(false, $"Expected no diagnostics, got {d.Count} diagnostics");
+ Assert.Fail($"Expected no diagnostics, got {d.Count} diagnostics");
}
}
}
diff --git a/test/Generators/Microsoft.Gen.Metrics/Unit/EmitterTests.cs b/test/Generators/Microsoft.Gen.Metrics/Unit/EmitterTests.cs
index c98d83a5ad7..0bcdb43b0b5 100644
--- a/test/Generators/Microsoft.Gen.Metrics/Unit/EmitterTests.cs
+++ b/test/Generators/Microsoft.Gen.Metrics/Unit/EmitterTests.cs
@@ -37,7 +37,7 @@ public async Task TestEmitter()
Assembly.GetAssembly(typeof(HistogramAttribute<>))!,
},
sources)
- .ConfigureAwait(false);
+;
Assert.Empty(d);
Assert.Equal(2, r.Length);
diff --git a/test/Generators/Shared/RoslynTestUtils.cs b/test/Generators/Shared/RoslynTestUtils.cs
index facdd3791a1..e9c79d115ae 100644
--- a/test/Generators/Shared/RoslynTestUtils.cs
+++ b/test/Generators/Shared/RoslynTestUtils.cs
@@ -144,7 +144,7 @@ public static void AssertDiagnostic(this string text, int spanNum, DiagnosticDes
}
else
{
- Assert.True(false, $"Unexpected diagnostics {actual}");
+ Assert.Fail($"Unexpected diagnostics {actual}");
}
}
@@ -159,7 +159,7 @@ public static void AssertDiagnostics(this string text, DiagnosticDescriptor expe
expectedSpan = text.MakeTextSpan(spanNum);
if (expectedSpan == null)
{
- Assert.True(false, $"No span detected for diagnostic #{spanNum}, {d}");
+ Assert.Fail($"No span detected for diagnostic #{spanNum}, {d}");
}
}
@@ -177,7 +177,7 @@ public static void AssertDiagnostics(this string text, DiagnosticDescriptor expe
if (text.MakeTextSpan(spanNum) != null)
{
- Assert.True(false, $"Diagnostic {spanNum} was not detected");
+ Assert.Fail($"Diagnostic {spanNum} was not detected");
}
}
diff --git a/test/Libraries/Microsoft.AspNetCore.Diagnostics.Middleware.Tests/Latency/AcceptanceTests.cs b/test/Libraries/Microsoft.AspNetCore.Diagnostics.Middleware.Tests/Latency/AcceptanceTests.cs
index c3184e20372..d0d3069ed8a 100644
--- a/test/Libraries/Microsoft.AspNetCore.Diagnostics.Middleware.Tests/Latency/AcceptanceTests.cs
+++ b/test/Libraries/Microsoft.AspNetCore.Diagnostics.Middleware.Tests/Latency/AcceptanceTests.cs
@@ -73,9 +73,9 @@ public async Task RequestLatency_LatencyContextIsStarted()
});
});
}))
- .StartAsync().ConfigureAwait(false);
+ .StartAsync();
- _ = await host.GetTestClient().GetAsync("/").ConfigureAwait(false);
+ _ = await host.GetTestClient().GetAsync("/");
await host.StopAsync();
Assert.True(isInLambda);
diff --git a/test/Libraries/Microsoft.AspNetCore.Diagnostics.Middleware.Tests/Latency/AddServerTimingHeaderMiddlewareTests.cs b/test/Libraries/Microsoft.AspNetCore.Diagnostics.Middleware.Tests/Latency/AddServerTimingHeaderMiddlewareTests.cs
index 75a5192b5f1..d3795677585 100644
--- a/test/Libraries/Microsoft.AspNetCore.Diagnostics.Middleware.Tests/Latency/AddServerTimingHeaderMiddlewareTests.cs
+++ b/test/Libraries/Microsoft.AspNetCore.Diagnostics.Middleware.Tests/Latency/AddServerTimingHeaderMiddlewareTests.cs
@@ -43,7 +43,7 @@ public async Task Middleware_ReturnsTotalMillisecondsElapsed_InsteadOfFraction()
await fakeHttpResponseFeature.StartAsync();
var header = context.Response.Headers[AddServerTimingHeaderMiddleware.ServerTimingHeaderName];
- Assert.NotEmpty(header);
+ Assert.True(header.Count > 0);
Assert.Equal($"reqlatency;dur={TimeAdvanceMs}", header[0]);
}
diff --git a/test/Libraries/Microsoft.AspNetCore.Diagnostics.Middleware.Tests/Latency/ChecpointAcceptanceTests.cs b/test/Libraries/Microsoft.AspNetCore.Diagnostics.Middleware.Tests/Latency/ChecpointAcceptanceTests.cs
index f43d52731a6..dc96ee74b4c 100644
--- a/test/Libraries/Microsoft.AspNetCore.Diagnostics.Middleware.Tests/Latency/ChecpointAcceptanceTests.cs
+++ b/test/Libraries/Microsoft.AspNetCore.Diagnostics.Middleware.Tests/Latency/ChecpointAcceptanceTests.cs
@@ -59,7 +59,7 @@ public async Task RequestCheckpoint_CanMeasureMiddlewarePipeTime()
}))
.StartAsync();
- _ = await host.GetTestClient().GetAsync("/").ConfigureAwait(false);
+ _ = await host.GetTestClient().GetAsync("/");
Assert.True(reachedLambda);
Assert.InRange(exitPipelineValue, 0, 10_000);
diff --git a/test/Libraries/Microsoft.AspNetCore.Diagnostics.Middleware.Tests/Latency/RequestLatencyTelemetryMiddlewareTests.cs b/test/Libraries/Microsoft.AspNetCore.Diagnostics.Middleware.Tests/Latency/RequestLatencyTelemetryMiddlewareTests.cs
index 696d81edaa0..ac256a2cb8a 100644
--- a/test/Libraries/Microsoft.AspNetCore.Diagnostics.Middleware.Tests/Latency/RequestLatencyTelemetryMiddlewareTests.cs
+++ b/test/Libraries/Microsoft.AspNetCore.Diagnostics.Middleware.Tests/Latency/RequestLatencyTelemetryMiddlewareTests.cs
@@ -42,7 +42,7 @@ public async Task RequestLatency_GivenContext_InvokesOperations()
await fakeHttpResponseFeature.StartAsync();
lc.Verify(c => c.Freeze());
var header = httpContextMock.Response.Headers[TelemetryConstants.ServerApplicationNameHeader];
- Assert.NotEmpty(header);
+ Assert.True(header.Count > 0);
Assert.Equal(serverName, header[0]);
Assert.True(nextInvoked);
Assert.True(ex1.Invoked == 1);
@@ -100,7 +100,7 @@ public async Task RequestLatency_WithServerNameHeadersSet_ReturnsLastServerName(
await fakeHttpResponseFeature.StartAsync();
lc.Verify(c => c.Freeze());
var header = httpContextMock.Response.Headers[TelemetryConstants.ServerApplicationNameHeader];
- Assert.NotEmpty(header);
+ Assert.True(header.Count > 0);
Assert.Equal(serverName, header[0]);
Assert.True(nextInvoked);
Assert.True(ex1.Invoked == 1);
diff --git a/test/Libraries/Microsoft.AspNetCore.Diagnostics.Middleware.Tests/Logging/AcceptanceTests.cs b/test/Libraries/Microsoft.AspNetCore.Diagnostics.Middleware.Tests/Logging/AcceptanceTests.cs
index 05860ddaae9..5794709560f 100644
--- a/test/Libraries/Microsoft.AspNetCore.Diagnostics.Middleware.Tests/Logging/AcceptanceTests.cs
+++ b/test/Libraries/Microsoft.AspNetCore.Diagnostics.Middleware.Tests/Logging/AcceptanceTests.cs
@@ -534,20 +534,20 @@ async static (logCollector, client) =>
var sixthRecord = logRecords[5].StructuredState;
Assert.Equal(5, firstRecord!.Count);
- Assert.Equal(1, secondRecord!.Count);
+ Assert.Single(secondRecord!);
Assert.Equal(5, fourthRecord!.Count);
- Assert.Equal(1, fithRecord!.Count);
+ Assert.Single(fithRecord!);
Assert.DoesNotContain(firstRecord, x => x.Key == HttpLoggingTagNames.StatusCode);
Assert.DoesNotContain(firstRecord, x => x.Key == HttpLoggingTagNames.Duration);
- Assert.DoesNotContain(secondRecord, x => x.Key == HttpLoggingTagNames.Duration);
+ Assert.DoesNotContain(secondRecord!, x => x.Key == HttpLoggingTagNames.Duration);
Assert.DoesNotContain(fourthRecord, x => x.Key == HttpLoggingTagNames.StatusCode);
Assert.DoesNotContain(fourthRecord, x => x.Key == HttpLoggingTagNames.Duration);
- Assert.DoesNotContain(fithRecord, x => x.Key == HttpLoggingTagNames.Duration);
+ Assert.DoesNotContain(fithRecord!, x => x.Key == HttpLoggingTagNames.Duration);
- Assert.Equal(1, secondRecord!.Count);
- Assert.Equal(1, fithRecord!.Count);
- Assert.Single(secondRecord, x => x.Key == HttpLoggingTagNames.StatusCode && x.Value == responseStatus);
- Assert.Single(fithRecord, x => x.Key == HttpLoggingTagNames.StatusCode && x.Value == responseStatus);
+ Assert.Single(secondRecord!);
+ Assert.Single(fithRecord!);
+ Assert.Single(secondRecord!, x => x.Key == HttpLoggingTagNames.StatusCode && x.Value == responseStatus);
+ Assert.Single(fithRecord!, x => x.Key == HttpLoggingTagNames.StatusCode && x.Value == responseStatus);
Assert.Equal(2, thirdRecord!.Count);
Assert.Equal(2, sixthRecord!.Count);
diff --git a/test/Libraries/Microsoft.AspNetCore.HeaderParsing.Tests/ParserTests.cs b/test/Libraries/Microsoft.AspNetCore.HeaderParsing.Tests/ParserTests.cs
index c5637869c33..0932aab6ac0 100644
--- a/test/Libraries/Microsoft.AspNetCore.HeaderParsing.Tests/ParserTests.cs
+++ b/test/Libraries/Microsoft.AspNetCore.HeaderParsing.Tests/ParserTests.cs
@@ -76,7 +76,7 @@ public void Cookkie_ReturnsParsedValue()
{
var sv = new StringValues("csrftoken=u32t4o3tb3gg43");
Assert.True(CookieHeaderValueListParser.Instance.TryParse(sv, out var result, out var error));
- Assert.Equal(1, result.Count);
+ Assert.Single(result);
Assert.Equal("csrftoken", result[0].Name.Value);
Assert.Equal("u32t4o3tb3gg43", result[0].Value.Value);
Assert.Null(error);
@@ -202,7 +202,7 @@ public void MediaTypes_ReturnsParsedValue()
{
var sv = new StringValues("text/html; charset=UTF-8");
Assert.True(MediaTypeHeaderValueListParser.Instance.TryParse(sv, out var result, out var error));
- Assert.Equal(1, result.Count);
+ Assert.Single(result);
Assert.Equal("text/html", result[0].MediaType);
Assert.Equal("UTF-8", result[0].Charset);
Assert.Null(error);
@@ -222,7 +222,7 @@ public void EntityTag_ReturnsParsedValue()
{
var sv = new StringValues("\"HelloWorld\"");
Assert.True(EntityTagHeaderValueListParser.Instance.TryParse(sv, out var result, out var error));
- Assert.Equal(1, result!.Count);
+ Assert.Single(result!);
Assert.Equal("\"HelloWorld\"", result[0].Tag);
Assert.Null(error);
}
@@ -241,7 +241,7 @@ public void StringQuality_ReturnsParsedValue()
{
var sv = new StringValues("en-US");
Assert.True(StringWithQualityHeaderValueListParser.Instance.TryParse(sv, out var result, out var error));
- Assert.Equal(1, result!.Count);
+ Assert.Single(result!);
Assert.Equal("en-US", result[0].Value);
Assert.Null(error);
}
@@ -301,7 +301,7 @@ public void Range_ReturnsParsedValue()
var sv = new StringValues("bytes=200-1000");
Assert.True(RangeHeaderValueParser.Instance.TryParse(sv, out var result, out var error));
Assert.Equal("bytes", result!.Unit);
- Assert.Equal(1, result.Ranges.Count);
+ Assert.Single(result.Ranges);
Assert.Equal(200, result.Ranges.Single().From);
Assert.Equal(1000, result.Ranges.Single().To);
Assert.Null(error);
diff --git a/test/Libraries/Microsoft.Extensions.AsyncState.Tests/AsyncStateTests.cs b/test/Libraries/Microsoft.Extensions.AsyncState.Tests/AsyncStateTests.cs
index 268109dafbf..05277a72897 100644
--- a/test/Libraries/Microsoft.Extensions.AsyncState.Tests/AsyncStateTests.cs
+++ b/test/Libraries/Microsoft.Extensions.AsyncState.Tests/AsyncStateTests.cs
@@ -30,7 +30,7 @@ static Task SetAsyncContext(AsyncState state, IThing context, AsyncStateToken to
return Task.CompletedTask;
}
- await SetAsyncContext(state, context, token).ConfigureAwait(false);
+ await SetAsyncContext(state, context, token);
Assert.Same(context, state.Get(token));
}
diff --git a/test/Libraries/Microsoft.Extensions.Caching.Hybrid.Tests/BasicConfig.json b/test/Libraries/Microsoft.Extensions.Caching.Hybrid.Tests/BasicConfig.json
new file mode 100644
index 00000000000..374114fb1db
--- /dev/null
+++ b/test/Libraries/Microsoft.Extensions.Caching.Hybrid.Tests/BasicConfig.json
@@ -0,0 +1,12 @@
+{
+ "no_entry_options": {
+ "MaximumKeyLength": 937
+ },
+ "with_entry_options": {
+ "MaximumKeyLength": 937,
+ "DefaultEntryOptions": {
+ "LocalCacheExpiration": "00:02:00",
+ "Flags": "DisableCompression,DisableLocalCacheRead"
+ }
+ }
+}
diff --git a/test/Libraries/Microsoft.Extensions.Caching.Hybrid.Tests/BufferReleaseTests.cs b/test/Libraries/Microsoft.Extensions.Caching.Hybrid.Tests/BufferReleaseTests.cs
new file mode 100644
index 00000000000..4996406c09a
--- /dev/null
+++ b/test/Libraries/Microsoft.Extensions.Caching.Hybrid.Tests/BufferReleaseTests.cs
@@ -0,0 +1,235 @@
+// Licensed to the .NET Foundation under one or more agreements.
+// The .NET Foundation licenses this file to you under the MIT license.
+
+using System.Buffers;
+using System.Runtime.CompilerServices;
+using Microsoft.Extensions.Caching.Distributed;
+using Microsoft.Extensions.Caching.Hybrid.Internal;
+using Microsoft.Extensions.Caching.Memory;
+using Microsoft.Extensions.DependencyInjection;
+using Microsoft.Extensions.Options;
+using static Microsoft.Extensions.Caching.Hybrid.Internal.DefaultHybridCache;
+
+namespace Microsoft.Extensions.Caching.Hybrid.Tests;
+
+public class BufferReleaseTests // note that buffer ref-counting is only enabled for DEBUG builds; can only verify general behaviour without that
+{
+ private static ServiceProvider GetDefaultCache(out DefaultHybridCache cache, Action? config = null)
+ {
+ var services = new ServiceCollection();
+ config?.Invoke(services);
+ services.AddHybridCache();
+ ServiceProvider provider = services.BuildServiceProvider();
+ cache = Assert.IsType(provider.GetRequiredService());
+ return provider;
+ }
+
+ [Fact]
+ public async Task BufferGetsReleased_NoL2()
+ {
+ using var provider = GetDefaultCache(out var cache);
+#if DEBUG
+ cache.DebugOnlyGetOutstandingBuffers(flush: true);
+#endif
+
+ var key = Me();
+#if DEBUG
+ Assert.Equal(0, cache.DebugOnlyGetOutstandingBuffers());
+#endif
+ var first = await cache.GetOrCreateAsync(key, _ => GetAsync());
+ Assert.NotNull(first);
+#if DEBUG
+ Assert.Equal(1, cache.DebugOnlyGetOutstandingBuffers());
+#endif
+ Assert.True(cache.DebugTryGetCacheItem(key, out var cacheItem));
+
+ // assert that we can reserve the buffer *now* (mostly to see that it behaves differently later)
+ Assert.True(cacheItem.NeedsEvictionCallback, "should be pooled memory");
+ Assert.True(cacheItem.TryReserveBuffer(out _));
+ cacheItem.Release(); // for the above reserve
+
+ var second = await cache.GetOrCreateAsync(key, _ => GetAsync(), _noUnderlying);
+ Assert.NotNull(second);
+ Assert.NotSame(first, second);
+
+ Assert.Equal(1, cacheItem.RefCount);
+ await cache.RemoveAsync(key);
+ var third = await cache.GetOrCreateAsync(key, _ => GetAsync(), _noUnderlying);
+ Assert.Null(third);
+
+ // give it a moment for the eviction callback to kick in
+ for (var i = 0; i < 10 && cacheItem.NeedsEvictionCallback; i++)
+ {
+ await Task.Delay(250);
+ }
+#if DEBUG
+ Assert.Equal(0, cache.DebugOnlyGetOutstandingBuffers());
+#endif
+
+ // assert that we can *no longer* reserve this buffer, because we've already recycled it
+ Assert.False(cacheItem.TryReserveBuffer(out _));
+ Assert.Equal(0, cacheItem.RefCount);
+ Assert.False(cacheItem.NeedsEvictionCallback, "should be recycled now");
+ static ValueTask GetAsync() => new(new Customer { Id = 42, Name = "Fred" });
+ }
+
+ private static readonly HybridCacheEntryOptions _noUnderlying = new() { Flags = HybridCacheEntryFlags.DisableUnderlyingData };
+
+ private class TestCache : MemoryDistributedCache, IBufferDistributedCache
+ {
+ public TestCache(IOptions options)
+ : base(options)
+ {
+ }
+
+ void IBufferDistributedCache.Set(string key, ReadOnlySequence value, DistributedCacheEntryOptions options)
+ => Set(key, value.ToArray(), options); // efficiency not important for this
+
+ ValueTask IBufferDistributedCache.SetAsync(string key, ReadOnlySequence value, DistributedCacheEntryOptions options, CancellationToken token)
+ => new(SetAsync(key, value.ToArray(), options, token)); // efficiency not important for this
+
+ bool IBufferDistributedCache.TryGet(string key, IBufferWriter destination)
+ => Write(destination, Get(key));
+
+ async ValueTask IBufferDistributedCache.TryGetAsync(string key, IBufferWriter destination, CancellationToken token)
+ => Write(destination, await GetAsync(key, token));
+
+ private static bool Write(IBufferWriter destination, byte[]? buffer)
+ {
+ if (buffer is null)
+ {
+ return false;
+ }
+
+ destination.Write(buffer);
+ return true;
+ }
+ }
+
+ [Fact]
+ public async Task BufferDoesNotNeedRelease_LegacyL2() // byte[] API; not pooled
+ {
+ using var provider = GetDefaultCache(out var cache,
+ services => services.AddSingleton());
+
+ cache.DebugRemoveFeatures(CacheFeatures.BackendBuffers);
+
+ // prep the backend with our data
+ var key = Me();
+ Assert.NotNull(cache.BackendCache);
+ IHybridCacheSerializer serializer = cache.GetSerializer();
+ using (RecyclableArrayBufferWriter writer = RecyclableArrayBufferWriter.Create(int.MaxValue))
+ {
+ serializer.Serialize(await GetAsync(), writer);
+ cache.BackendCache.Set(key, writer.ToArray());
+ }
+#if DEBUG
+ cache.DebugOnlyGetOutstandingBuffers(flush: true);
+ Assert.Equal(0, cache.DebugOnlyGetOutstandingBuffers());
+#endif
+ var first = await cache.GetOrCreateAsync(key, _ => GetAsync(), _noUnderlying); // we expect this to come from L2, hence NoUnderlying
+ Assert.NotNull(first);
+#if DEBUG
+ Assert.Equal(0, cache.DebugOnlyGetOutstandingBuffers());
+#endif
+ Assert.True(cache.DebugTryGetCacheItem(key, out var cacheItem));
+
+ // assert that we can reserve the buffer *now* (mostly to see that it behaves differently later)
+ Assert.False(cacheItem.NeedsEvictionCallback, "should NOT be pooled memory");
+ Assert.True(cacheItem.TryReserveBuffer(out _));
+ cacheItem.Release(); // for the above reserve
+
+ var second = await cache.GetOrCreateAsync(key, _ => GetAsync(), _noUnderlying);
+ Assert.NotNull(second);
+ Assert.NotSame(first, second);
+
+ Assert.Equal(1, cacheItem.RefCount);
+ await cache.RemoveAsync(key);
+ var third = await cache.GetOrCreateAsync(key, _ => GetAsync(), _noUnderlying);
+ Assert.Null(third);
+ Assert.Null(await cache.BackendCache.GetAsync(key)); // should be gone from L2 too
+
+ // give it a moment for the eviction callback to kick in
+ for (var i = 0; i < 10 && cacheItem.NeedsEvictionCallback; i++)
+ {
+ await Task.Delay(250);
+ }
+#if DEBUG
+ Assert.Equal(0, cache.DebugOnlyGetOutstandingBuffers());
+#endif
+
+ // assert that we can *no longer* reserve this buffer, because we've already recycled it
+ Assert.True(cacheItem.TryReserveBuffer(out _)); // always readable
+ cacheItem.Release();
+ Assert.Equal(1, cacheItem.RefCount); // not decremented because there was no need to add the hook
+
+ Assert.False(cacheItem.NeedsEvictionCallback, "should still not need recycling");
+ static ValueTask GetAsync() => new(new Customer { Id = 42, Name = "Fred" });
+ }
+
+ [Fact]
+ public async Task BufferGetsReleased_BufferL2() // IBufferWriter API; pooled
+ {
+ using var provider = GetDefaultCache(out var cache,
+ services => services.AddSingleton());
+
+ // prep the backend with our data
+ var key = Me();
+ Assert.NotNull(cache.BackendCache);
+ IHybridCacheSerializer serializer = cache.GetSerializer();
+ using (RecyclableArrayBufferWriter writer = RecyclableArrayBufferWriter.Create(int.MaxValue))
+ {
+ serializer.Serialize(await GetAsync(), writer);
+ cache.BackendCache.Set(key, writer.ToArray());
+ }
+#if DEBUG
+ cache.DebugOnlyGetOutstandingBuffers(flush: true);
+ Assert.Equal(0, cache.DebugOnlyGetOutstandingBuffers());
+#endif
+ var first = await cache.GetOrCreateAsync(key, _ => GetAsync(), _noUnderlying); // we expect this to come from L2, hence NoUnderlying
+ Assert.NotNull(first);
+#if DEBUG
+ Assert.Equal(1, cache.DebugOnlyGetOutstandingBuffers());
+#endif
+ Assert.True(cache.DebugTryGetCacheItem(key, out var cacheItem));
+
+ // assert that we can reserve the buffer *now* (mostly to see that it behaves differently later)
+ Assert.True(cacheItem.NeedsEvictionCallback, "should be pooled memory");
+ Assert.True(cacheItem.TryReserveBuffer(out _));
+ cacheItem.Release(); // for the above reserve
+
+ var second = await cache.GetOrCreateAsync(key, _ => GetAsync(), _noUnderlying);
+ Assert.NotNull(second);
+ Assert.NotSame(first, second);
+
+ Assert.Equal(1, cacheItem.RefCount);
+ await cache.RemoveAsync(key);
+ var third = await cache.GetOrCreateAsync(key, _ => GetAsync(), _noUnderlying);
+ Assert.Null(third);
+ Assert.Null(await cache.BackendCache.GetAsync(key)); // should be gone from L2 too
+
+ // give it a moment for the eviction callback to kick in
+ for (var i = 0; i < 10 && cacheItem.NeedsEvictionCallback; i++)
+ {
+ await Task.Delay(250);
+ }
+#if DEBUG
+ Assert.Equal(0, cache.DebugOnlyGetOutstandingBuffers());
+#endif
+
+ // assert that we can *no longer* reserve this buffer, because we've already recycled it
+ Assert.False(cacheItem.TryReserveBuffer(out _)); // released now
+ Assert.Equal(0, cacheItem.RefCount);
+
+ Assert.False(cacheItem.NeedsEvictionCallback, "should be recycled by now");
+ static ValueTask GetAsync() => new(new Customer { Id = 42, Name = "Fred" });
+ }
+
+ public class Customer
+ {
+ public int Id { get; set; }
+ public string Name { get; set; } = "";
+ }
+
+ private static string Me([CallerMemberName] string caller = "") => caller;
+}
diff --git a/test/Libraries/Microsoft.Extensions.Caching.Hybrid.Tests/DistributedCacheTests.cs b/test/Libraries/Microsoft.Extensions.Caching.Hybrid.Tests/DistributedCacheTests.cs
new file mode 100644
index 00000000000..5a565866f63
--- /dev/null
+++ b/test/Libraries/Microsoft.Extensions.Caching.Hybrid.Tests/DistributedCacheTests.cs
@@ -0,0 +1,397 @@
+// Licensed to the .NET Foundation under one or more agreements.
+// The .NET Foundation licenses this file to you under the MIT license.
+
+using System.Buffers;
+using System.Runtime.CompilerServices;
+using System.Runtime.InteropServices;
+using Microsoft.Extensions.Caching.Distributed;
+using Microsoft.Extensions.Caching.Hybrid.Internal;
+using Microsoft.Extensions.DependencyInjection;
+using Microsoft.Extensions.Internal;
+using Xunit.Abstractions;
+
+namespace Microsoft.Extensions.Caching.Hybrid.Tests;
+
+///
+/// Validate over-arching expectations of DC implementations, in particular behaviour re IBufferDistributedCache added for HybridCache.
+///
+public abstract class DistributedCacheTests
+{
+ protected DistributedCacheTests(ITestOutputHelper log)
+ {
+ Log = log;
+ }
+
+ protected ITestOutputHelper Log { get; }
+ protected abstract ValueTask ConfigureAsync(IServiceCollection services);
+ protected abstract bool CustomClockSupported { get; }
+
+ protected FakeTime Clock { get; } = new();
+
+ protected sealed class FakeTime : TimeProvider, ISystemClock
+ {
+ private DateTimeOffset _now = DateTimeOffset.UtcNow;
+ public void Reset() => _now = DateTimeOffset.UtcNow;
+
+ DateTimeOffset ISystemClock.UtcNow => _now;
+
+ public override DateTimeOffset GetUtcNow() => _now;
+
+ public void Add(TimeSpan delta) => _now += delta;
+ }
+
+ private async ValueTask InitAsync()
+ {
+ Clock.Reset();
+ var services = new ServiceCollection();
+ services.AddSingleton(Clock);
+ services.AddSingleton(Clock);
+ await ConfigureAsync(services);
+ return services;
+ }
+
+ [Theory]
+ [InlineData(0)]
+ [InlineData(128)]
+ [InlineData(1024)]
+ [InlineData(16 * 1024)]
+ public async Task SimpleBufferRoundtrip(int size)
+ {
+ var cache = (await InitAsync()).BuildServiceProvider().GetService();
+ if (cache is null)
+ {
+ Log.WriteLine("Cache is not available");
+ return; // inconclusive
+ }
+
+ var key = $"{Me()}:{size}";
+ cache.Remove(key);
+ Assert.Null(cache.Get(key));
+
+ var expected = new byte[size];
+ new Random().NextBytes(expected);
+ cache.Set(key, expected, _fiveMinutes);
+
+ var actual = cache.Get(key);
+ Assert.NotNull(actual);
+ Assert.True(expected.SequenceEqual(actual));
+ Log.WriteLine("Data validated");
+
+ if (CustomClockSupported)
+ {
+ Clock.Add(TimeSpan.FromMinutes(4));
+ actual = cache.Get(key);
+ Assert.NotNull(actual);
+ Assert.True(expected.SequenceEqual(actual));
+
+ Clock.Add(TimeSpan.FromMinutes(2));
+ actual = cache.Get(key);
+ Assert.Null(actual);
+
+ Log.WriteLine("Expiration validated");
+ }
+ else
+ {
+ Log.WriteLine("Expiration not validated - TimeProvider not supported");
+ }
+ }
+
+ [Theory]
+ [InlineData(0)]
+ [InlineData(128)]
+ [InlineData(1024)]
+ [InlineData(16 * 1024)]
+ public async Task SimpleBufferRoundtripAsync(int size)
+ {
+ var cache = (await InitAsync()).BuildServiceProvider().GetService();
+ if (cache is null)
+ {
+ Log.WriteLine("Cache is not available");
+ return; // inconclusive
+ }
+
+ var key = $"{Me()}:{size}";
+ await cache.RemoveAsync(key);
+ Assert.Null(cache.Get(key));
+
+ var expected = new byte[size];
+ new Random().NextBytes(expected);
+ await cache.SetAsync(key, expected, _fiveMinutes);
+
+ var actual = await cache.GetAsync(key);
+ Assert.NotNull(actual);
+ Assert.True(expected.SequenceEqual(actual));
+ Log.WriteLine("Data validated");
+
+ if (CustomClockSupported)
+ {
+ Clock.Add(TimeSpan.FromMinutes(4));
+ actual = await cache.GetAsync(key);
+ Assert.NotNull(actual);
+ Assert.True(expected.SequenceEqual(actual));
+
+ Clock.Add(TimeSpan.FromMinutes(2));
+ actual = await cache.GetAsync(key);
+ Assert.Null(actual);
+
+ Log.WriteLine("Expiration validated");
+ }
+ else
+ {
+ Log.WriteLine("Expiration not validated - TimeProvider not supported");
+ }
+ }
+
+ public enum SequenceKind
+ {
+ FullArray,
+ PaddedArray,
+ CustomMemory,
+ MultiSegment,
+ }
+
+ [Theory]
+ [InlineData(0, SequenceKind.FullArray)]
+ [InlineData(128, SequenceKind.FullArray)]
+ [InlineData(1024, SequenceKind.FullArray)]
+ [InlineData(16 * 1024, SequenceKind.FullArray)]
+ [InlineData(0, SequenceKind.PaddedArray)]
+ [InlineData(128, SequenceKind.PaddedArray)]
+ [InlineData(1024, SequenceKind.PaddedArray)]
+ [InlineData(16 * 1024, SequenceKind.PaddedArray)]
+ [InlineData(0, SequenceKind.CustomMemory)]
+ [InlineData(128, SequenceKind.CustomMemory)]
+ [InlineData(1024, SequenceKind.CustomMemory)]
+ [InlineData(16 * 1024, SequenceKind.CustomMemory)]
+ [InlineData(0, SequenceKind.MultiSegment)]
+ [InlineData(128, SequenceKind.MultiSegment)]
+ [InlineData(1024, SequenceKind.MultiSegment)]
+ [InlineData(16 * 1024, SequenceKind.MultiSegment)]
+ public async Task ReadOnlySequenceBufferRoundtrip(int size, SequenceKind kind)
+ {
+ var cache = (await InitAsync()).BuildServiceProvider().GetService() as IBufferDistributedCache;
+ if (cache is null)
+ {
+ Log.WriteLine("Cache is not available or does not support IBufferDistributedCache");
+ return; // inconclusive
+ }
+
+ var key = $"{Me()}:{size}/{kind}";
+ cache.Remove(key);
+ Assert.Null(cache.Get(key));
+
+ var payload = Invent(size, kind);
+ ReadOnlyMemory expected = payload.ToArray(); // simplify for testing
+ Assert.Equal(size, expected.Length);
+ cache.Set(key, payload, _fiveMinutes);
+
+ RecyclableArrayBufferWriter writer = RecyclableArrayBufferWriter.Create(int.MaxValue);
+ Assert.True(cache.TryGet(key, writer));
+ Assert.True(expected.Span.SequenceEqual(writer.GetCommittedMemory().Span));
+ writer.ResetInPlace();
+ Log.WriteLine("Data validated");
+
+ if (CustomClockSupported)
+ {
+ Clock.Add(TimeSpan.FromMinutes(4));
+ Assert.True(cache.TryGet(key, writer));
+ Assert.True(expected.Span.SequenceEqual(writer.GetCommittedMemory().Span));
+ writer.ResetInPlace();
+
+ Clock.Add(TimeSpan.FromMinutes(2));
+ Assert.False(cache.TryGet(key, writer));
+ Assert.Equal(0, writer.CommittedBytes);
+
+ Log.WriteLine("Expiration validated");
+ }
+ else
+ {
+ Log.WriteLine("Expiration not validated - TimeProvider not supported");
+ }
+
+ writer.Dispose(); // intentionally only recycle on success
+ }
+
+ [Theory]
+ [InlineData(0, SequenceKind.FullArray)]
+ [InlineData(128, SequenceKind.FullArray)]
+ [InlineData(1024, SequenceKind.FullArray)]
+ [InlineData(16 * 1024, SequenceKind.FullArray)]
+ [InlineData(0, SequenceKind.PaddedArray)]
+ [InlineData(128, SequenceKind.PaddedArray)]
+ [InlineData(1024, SequenceKind.PaddedArray)]
+ [InlineData(16 * 1024, SequenceKind.PaddedArray)]
+ [InlineData(0, SequenceKind.CustomMemory)]
+ [InlineData(128, SequenceKind.CustomMemory)]
+ [InlineData(1024, SequenceKind.CustomMemory)]
+ [InlineData(16 * 1024, SequenceKind.CustomMemory)]
+ [InlineData(0, SequenceKind.MultiSegment)]
+ [InlineData(128, SequenceKind.MultiSegment)]
+ [InlineData(1024, SequenceKind.MultiSegment)]
+ [InlineData(16 * 1024, SequenceKind.MultiSegment)]
+ public async Task ReadOnlySequenceBufferRoundtripAsync(int size, SequenceKind kind)
+ {
+ var cache = (await InitAsync()).BuildServiceProvider().GetService() as IBufferDistributedCache;
+ if (cache is null)
+ {
+ Log.WriteLine("Cache is not available or does not support IBufferDistributedCache");
+ return; // inconclusive
+ }
+
+ var key = $"{Me()}:{size}/{kind}";
+ await cache.RemoveAsync(key);
+ Assert.Null(await cache.GetAsync(key));
+
+ var payload = Invent(size, kind);
+ ReadOnlyMemory expected = payload.ToArray(); // simplify for testing
+ Assert.Equal(size, expected.Length);
+ await cache.SetAsync(key, payload, _fiveMinutes);
+
+ RecyclableArrayBufferWriter writer = RecyclableArrayBufferWriter.Create(int.MaxValue);
+ Assert.True(await cache.TryGetAsync(key, writer));
+ Assert.True(expected.Span.SequenceEqual(writer.GetCommittedMemory().Span));
+ writer.ResetInPlace();
+ Log.WriteLine("Data validated");
+
+ if (CustomClockSupported)
+ {
+ Clock.Add(TimeSpan.FromMinutes(4));
+ Assert.True(await cache.TryGetAsync(key, writer));
+ Assert.True(expected.Span.SequenceEqual(writer.GetCommittedMemory().Span));
+ writer.ResetInPlace();
+
+ Clock.Add(TimeSpan.FromMinutes(2));
+ Assert.False(await cache.TryGetAsync(key, writer));
+ Assert.Equal(0, writer.CommittedBytes);
+
+ Log.WriteLine("Expiration validated");
+ }
+ else
+ {
+ Log.WriteLine("Expiration not validated - TimeProvider not supported");
+ }
+
+ writer.Dispose(); // intentionally only recycle on success
+ }
+
+ [System.Diagnostics.CodeAnalysis.SuppressMessage("Reliability", "CA2000:Dispose objects before losing scope", Justification = "Not relevant for this test - no-op")]
+ private static ReadOnlySequence Invent(int size, SequenceKind kind)
+ {
+ var rand = new Random();
+ ReadOnlySequence payload;
+ switch (kind)
+ {
+ case SequenceKind.FullArray:
+ var arr = new byte[size];
+ rand.NextBytes(arr);
+ payload = new(arr);
+ break;
+ case SequenceKind.PaddedArray:
+ arr = new byte[size + 10];
+ rand.NextBytes(arr);
+ payload = new(arr, 5, arr.Length - 10);
+ break;
+ case SequenceKind.CustomMemory:
+ var mem = new CustomMemory(size, rand).Memory;
+ payload = new(mem);
+ break;
+ case SequenceKind.MultiSegment:
+ if (size == 0)
+ {
+ payload = default;
+ break;
+ }
+
+ if (size < 10)
+ {
+ throw new ArgumentException("small segments not considered"); // a pain to construct
+ }
+
+ CustomSegment first = new(10, rand, null); // we'll take the last 3 of this 10
+ CustomSegment second = new(size - 7, rand, first); // we'll take all of this one
+ CustomSegment third = new(10, rand, second); // we'll take the first 4 of this 10
+ payload = new(first, 7, third, 4);
+ break;
+ default:
+ throw new ArgumentOutOfRangeException(nameof(kind));
+ }
+
+ // now validate what we expect of that payload
+ Assert.Equal(size, payload.Length);
+ switch (kind)
+ {
+ case SequenceKind.CustomMemory or SequenceKind.MultiSegment when size == 0:
+ Assert.True(payload.IsSingleSegment);
+ Assert.True(MemoryMarshal.TryGetArray(payload.First, out _));
+ break;
+ case SequenceKind.MultiSegment:
+ Assert.False(payload.IsSingleSegment);
+ break;
+ case SequenceKind.CustomMemory:
+ Assert.True(payload.IsSingleSegment);
+ Assert.False(MemoryMarshal.TryGetArray(payload.First, out _));
+ break;
+ case SequenceKind.FullArray:
+ Assert.True(payload.IsSingleSegment);
+ Assert.True(MemoryMarshal.TryGetArray(payload.First, out var segment));
+ Assert.Equal(0, segment.Offset);
+ Assert.NotNull(segment.Array);
+ Assert.Equal(size, segment.Count);
+ Assert.Equal(size, segment.Array.Length);
+ break;
+ case SequenceKind.PaddedArray:
+ Assert.True(payload.IsSingleSegment);
+ Assert.True(MemoryMarshal.TryGetArray(payload.First, out segment));
+ Assert.NotEqual(0, segment.Offset);
+ Assert.NotNull(segment.Array);
+ Assert.Equal(size, segment.Count);
+ Assert.NotEqual(size, segment.Array.Length);
+ break;
+ }
+
+ return payload;
+ }
+
+ private class CustomSegment : ReadOnlySequenceSegment
+ {
+ public CustomSegment(int size, Random? rand, CustomSegment? previous)
+ {
+ var arr = new byte[size + 10];
+ rand?.NextBytes(arr);
+ Memory = new(arr, 5, arr.Length - 10);
+ if (previous is not null)
+ {
+ RunningIndex = previous.RunningIndex + previous.Memory.Length;
+ previous.Next = this;
+ }
+ }
+ }
+
+ private class CustomMemory : MemoryManager
+ {
+ private readonly byte[] _data;
+ public CustomMemory(int size, Random? rand = null)
+ {
+ _data = new byte[size + 10];
+ rand?.NextBytes(_data);
+ }
+
+ public override Span GetSpan() => new(_data, 5, _data.Length - 10);
+ public override MemoryHandle Pin(int elementIndex = 0) => throw new NotSupportedException();
+ public override void Unpin() => throw new NotSupportedException();
+ protected override void Dispose(bool disposing)
+ {
+ }
+
+ protected override bool TryGetArray(out ArraySegment segment)
+ {
+ segment = default;
+ return false;
+ }
+ }
+
+ private static readonly DistributedCacheEntryOptions _fiveMinutes
+ = new() { AbsoluteExpirationRelativeToNow = TimeSpan.FromMinutes(5) };
+
+ protected static string Me([CallerMemberName] string caller = "") => caller;
+}
diff --git a/test/Libraries/Microsoft.Extensions.Caching.Hybrid.Tests/FunctionalTests.cs b/test/Libraries/Microsoft.Extensions.Caching.Hybrid.Tests/FunctionalTests.cs
new file mode 100644
index 00000000000..5edd99722ac
--- /dev/null
+++ b/test/Libraries/Microsoft.Extensions.Caching.Hybrid.Tests/FunctionalTests.cs
@@ -0,0 +1,82 @@
+// Licensed to the .NET Foundation under one or more agreements.
+// The .NET Foundation licenses this file to you under the MIT license.
+
+using System.Runtime.CompilerServices;
+using Microsoft.Extensions.Caching.Hybrid.Internal;
+using Microsoft.Extensions.DependencyInjection;
+
+namespace Microsoft.Extensions.Caching.Hybrid.Tests;
+public class FunctionalTests
+{
+ private static ServiceProvider GetDefaultCache(out DefaultHybridCache cache, Action? config = null)
+ {
+ var services = new ServiceCollection();
+ config?.Invoke(services);
+ services.AddHybridCache();
+ ServiceProvider provider = services.BuildServiceProvider();
+ cache = Assert.IsType(provider.GetRequiredService());
+ return provider;
+ }
+
+ [Fact]
+ public async Task RemoveSingleKey()
+ {
+ using var provider = GetDefaultCache(out var cache);
+ var key = Me();
+ Assert.Equal(42, await cache.GetOrCreateAsync(key, _ => new ValueTask(42)));
+
+ // now slightly different func to show delta; should use cached value initially
+ await cache.RemoveAsync("unrelated");
+ Assert.Equal(42, await cache.GetOrCreateAsync(key, _ => new ValueTask(96)));
+
+ // now remove and repeat - should get updated value
+ await cache.RemoveAsync(key);
+ Assert.Equal(96, await cache.GetOrCreateAsync(key, _ => new ValueTask(96)));
+ }
+
+ [Fact]
+ public async Task RemoveNoKeyViaArray()
+ {
+ using var provider = GetDefaultCache(out var cache);
+ var key = Me();
+ Assert.Equal(42, await cache.GetOrCreateAsync(key, _ => new ValueTask(42)));
+
+ // now slightly different func to show delta; should use same cached value
+ await cache.RemoveAsync([]);
+ Assert.Equal(42, await cache.GetOrCreateAsync(key, _ => new ValueTask(96)));
+ }
+
+ [Fact]
+ public async Task RemoveSingleKeyViaArray()
+ {
+ using var provider = GetDefaultCache(out var cache);
+ var key = Me();
+ Assert.Equal(42, await cache.GetOrCreateAsync(key, _ => new ValueTask(42)));
+
+ // now slightly different func to show delta; should use cached value initially
+ await cache.RemoveAsync(["unrelated"]);
+ Assert.Equal(42, await cache.GetOrCreateAsync(key, _ => new ValueTask(96)));
+
+ // now remove and repeat - should get updated value
+ await cache.RemoveAsync([key]);
+ Assert.Equal(96, await cache.GetOrCreateAsync(key, _ => new ValueTask(96)));
+ }
+
+ [Fact]
+ public async Task RemoveMultipleKeysViaArray()
+ {
+ using var provider = GetDefaultCache(out var cache);
+ var key = Me();
+ Assert.Equal(42, await cache.GetOrCreateAsync(key, _ => new ValueTask(42)));
+
+ // now slightly different func to show delta; should use cached value initially
+ Assert.Equal(42, await cache.GetOrCreateAsync(key, _ => new ValueTask(96)));
+
+ // now remove and repeat - should get updated value
+ await cache.RemoveAsync([key, "unrelated"]);
+ Assert.Equal(96, await cache.GetOrCreateAsync(key, _ => new ValueTask(96)));
+ }
+
+ private static string Me([CallerMemberName] string caller = "") => caller;
+
+}
diff --git a/test/Libraries/Microsoft.Extensions.Caching.Hybrid.Tests/L2Tests.cs b/test/Libraries/Microsoft.Extensions.Caching.Hybrid.Tests/L2Tests.cs
new file mode 100644
index 00000000000..850c6a054b9
--- /dev/null
+++ b/test/Libraries/Microsoft.Extensions.Caching.Hybrid.Tests/L2Tests.cs
@@ -0,0 +1,274 @@
+// Licensed to the .NET Foundation under one or more agreements.
+// The .NET Foundation licenses this file to you under the MIT license.
+
+using System.Buffers;
+using System.Runtime.CompilerServices;
+using Microsoft.Extensions.Caching.Distributed;
+using Microsoft.Extensions.Caching.Hybrid.Internal;
+using Microsoft.Extensions.Caching.Memory;
+using Microsoft.Extensions.DependencyInjection;
+using Microsoft.Extensions.Options;
+using Xunit.Abstractions;
+
+namespace Microsoft.Extensions.Caching.Hybrid.Tests;
+public class L2Tests(ITestOutputHelper log)
+{
+ private static string CreateString(bool work = false)
+ {
+ Assert.True(work, "we didn't expect this to be invoked");
+ return Guid.NewGuid().ToString();
+ }
+
+ private static readonly HybridCacheEntryOptions _expiry = new() { Expiration = TimeSpan.FromMinutes(3.5) };
+
+ private static readonly HybridCacheEntryOptions _expiryNoL1 = new() { Flags = HybridCacheEntryFlags.DisableLocalCache, Expiration = TimeSpan.FromMinutes(3.5) };
+
+ private ITestOutputHelper Log => log;
+
+ private class Options(T value) : IOptions
+ where T : class
+ {
+ T IOptions.Value => value;
+ }
+
+ private ServiceProvider GetDefaultCache(bool buffers, out DefaultHybridCache cache)
+ {
+ var services = new ServiceCollection();
+ var localCacheOptions = new Options(new());
+ var localCache = new MemoryDistributedCache(localCacheOptions);
+ services.AddSingleton(buffers ? new BufferLoggingCache(Log, localCache) : new LoggingCache(Log, localCache));
+ services.AddHybridCache();
+ ServiceProvider provider = services.BuildServiceProvider();
+ cache = Assert.IsType(provider.GetRequiredService());
+ return provider;
+ }
+
+ [Theory]
+ [InlineData(true)]
+ [InlineData(false)]
+ public async Task AssertL2Operations_Immutable(bool buffers)
+ {
+ using var provider = GetDefaultCache(buffers, out var cache);
+ var backend = Assert.IsAssignableFrom(cache.BackendCache);
+ Log.WriteLine("Inventing key...");
+ var s = await cache.GetOrCreateAsync(Me(), ct => new ValueTask(CreateString(true)));
+ Assert.Equal(2, backend.OpCount); // GET, SET
+
+ Log.WriteLine("Reading with L1...");
+ for (var i = 0; i < 5; i++)
+ {
+ var x = await cache.GetOrCreateAsync(Me(), ct => new ValueTask(CreateString()));
+ Assert.Equal(s, x);
+ Assert.Same(s, x);
+ }
+
+ Assert.Equal(2, backend.OpCount); // shouldn't be hit
+
+ Log.WriteLine("Reading without L1...");
+ for (var i = 0; i < 5; i++)
+ {
+ var x = await cache.GetOrCreateAsync(Me(), ct => new ValueTask(CreateString()), _expiryNoL1);
+ Assert.Equal(s, x);
+ Assert.NotSame(s, x);
+ }
+
+ Assert.Equal(7, backend.OpCount); // should be read every time
+
+ Log.WriteLine("Setting value directly");
+ s = CreateString(true);
+ await cache.SetAsync(Me(), s);
+ for (var i = 0; i < 5; i++)
+ {
+ var x = await cache.GetOrCreateAsync(Me(), ct => new ValueTask(CreateString()));
+ Assert.Equal(s, x);
+ Assert.Same(s, x);
+ }
+
+ Assert.Equal(8, backend.OpCount); // SET
+
+ Log.WriteLine("Removing key...");
+ await cache.RemoveAsync(Me());
+ Assert.Equal(9, backend.OpCount); // DEL
+
+ Log.WriteLine("Fetching new...");
+ var t = await cache.GetOrCreateAsync(Me(), ct => new ValueTask(CreateString(true)));
+ Assert.NotEqual(s, t);
+ Assert.Equal(11, backend.OpCount); // GET, SET
+ }
+
+ public sealed class Foo
+ {
+ public string Value { get; set; } = "";
+ }
+
+ [Theory]
+ [InlineData(true)]
+ [InlineData(false)]
+ public async Task AssertL2Operations_Mutable(bool buffers)
+ {
+ using var provider = GetDefaultCache(buffers, out var cache);
+ var backend = Assert.IsAssignableFrom(cache.BackendCache);
+ Log.WriteLine("Inventing key...");
+ var s = await cache.GetOrCreateAsync(Me(), ct => new ValueTask(new Foo { Value = CreateString(true) }), _expiry);
+ Assert.Equal(2, backend.OpCount); // GET, SET
+
+ Log.WriteLine("Reading with L1...");
+ for (var i = 0; i < 5; i++)
+ {
+ var x = await cache.GetOrCreateAsync(Me(), ct => new ValueTask(new Foo { Value = CreateString() }), _expiry);
+ Assert.Equal(s.Value, x.Value);
+ Assert.NotSame(s, x);
+ }
+
+ Assert.Equal(2, backend.OpCount); // shouldn't be hit
+
+ Log.WriteLine("Reading without L1...");
+ for (var i = 0; i < 5; i++)
+ {
+ var x = await cache.GetOrCreateAsync(Me(), ct => new ValueTask(new Foo { Value = CreateString() }), _expiryNoL1);
+ Assert.Equal(s.Value, x.Value);
+ Assert.NotSame(s, x);
+ }
+
+ Assert.Equal(7, backend.OpCount); // should be read every time
+
+ Log.WriteLine("Setting value directly");
+ s = new Foo { Value = CreateString(true) };
+ await cache.SetAsync(Me(), s);
+ for (var i = 0; i < 5; i++)
+ {
+ var x = await cache.GetOrCreateAsync(Me(), ct => new ValueTask(new Foo { Value = CreateString() }), _expiry);
+ Assert.Equal(s.Value, x.Value);
+ Assert.NotSame(s, x);
+ }
+
+ Assert.Equal(8, backend.OpCount); // SET
+
+ Log.WriteLine("Removing key...");
+ await cache.RemoveAsync(Me());
+ Assert.Equal(9, backend.OpCount); // DEL
+
+ Log.WriteLine("Fetching new...");
+ var t = await cache.GetOrCreateAsync(Me(), ct => new ValueTask(new Foo { Value = CreateString(true) }), _expiry);
+ Assert.NotEqual(s.Value, t.Value);
+ Assert.Equal(11, backend.OpCount); // GET, SET
+ }
+
+ private class BufferLoggingCache : LoggingCache, IBufferDistributedCache
+ {
+ public BufferLoggingCache(ITestOutputHelper log, IDistributedCache tail)
+ : base(log, tail)
+ {
+ }
+
+ void IBufferDistributedCache.Set(string key, ReadOnlySequence value, DistributedCacheEntryOptions options)
+ {
+ Interlocked.Increment(ref ProtectedOpCount);
+ Log.WriteLine($"Set (ROS-byte): {key}");
+ Tail.Set(key, value.ToArray(), options);
+ }
+
+ ValueTask IBufferDistributedCache.SetAsync(string key, ReadOnlySequence value, DistributedCacheEntryOptions options, CancellationToken token)
+ {
+ Interlocked.Increment(ref ProtectedOpCount);
+ Log.WriteLine($"SetAsync (ROS-byte): {key}");
+ return new(Tail.SetAsync(key, value.ToArray(), options, token));
+ }
+
+ bool IBufferDistributedCache.TryGet(string key, IBufferWriter destination)
+ {
+ Interlocked.Increment(ref ProtectedOpCount);
+ Log.WriteLine($"TryGet: {key}");
+ var buffer = Tail.Get(key);
+ if (buffer is null)
+ {
+ return false;
+ }
+
+ destination.Write(buffer);
+ return true;
+ }
+
+ async ValueTask IBufferDistributedCache.TryGetAsync(string key, IBufferWriter destination, CancellationToken token)
+ {
+ Interlocked.Increment(ref ProtectedOpCount);
+ Log.WriteLine($"TryGetAsync: {key}");
+ var buffer = await Tail.GetAsync(key, token);
+ if (buffer is null)
+ {
+ return false;
+ }
+
+ destination.Write(buffer);
+ return true;
+ }
+ }
+
+ private class LoggingCache(ITestOutputHelper log, IDistributedCache tail) : IDistributedCache
+ {
+ protected ITestOutputHelper Log => log;
+ protected IDistributedCache Tail => tail;
+
+ protected int ProtectedOpCount;
+
+ public int OpCount => Volatile.Read(ref ProtectedOpCount);
+
+ byte[]? IDistributedCache.Get(string key)
+ {
+ Interlocked.Increment(ref ProtectedOpCount);
+ Log.WriteLine($"Get: {key}");
+ return Tail.Get(key);
+ }
+
+ Task IDistributedCache.GetAsync(string key, CancellationToken token)
+ {
+ Interlocked.Increment(ref ProtectedOpCount);
+ Log.WriteLine($"GetAsync: {key}");
+ return Tail.GetAsync(key, token);
+ }
+
+ void IDistributedCache.Refresh(string key)
+ {
+ Interlocked.Increment(ref ProtectedOpCount);
+ Log.WriteLine($"Refresh: {key}");
+ Tail.Refresh(key);
+ }
+
+ Task IDistributedCache.RefreshAsync(string key, CancellationToken token)
+ {
+ Interlocked.Increment(ref ProtectedOpCount);
+ Log.WriteLine($"RefreshAsync: {key}");
+ return Tail.RefreshAsync(key, token);
+ }
+
+ void IDistributedCache.Remove(string key)
+ {
+ Interlocked.Increment(ref ProtectedOpCount);
+ Log.WriteLine($"Remove: {key}");
+ Tail.Remove(key);
+ }
+
+ Task IDistributedCache.RemoveAsync(string key, CancellationToken token)
+ {
+ Interlocked.Increment(ref ProtectedOpCount);
+ Log.WriteLine($"RemoveAsync: {key}");
+ return Tail.RemoveAsync(key, token);
+ }
+
+ void IDistributedCache.Set(string key, byte[] value, DistributedCacheEntryOptions options)
+ {
+ Interlocked.Increment(ref ProtectedOpCount);
+ Log.WriteLine($"Set (byte[]): {key}");
+ Tail.Set(key, value, options);
+ }
+
+ Task IDistributedCache.SetAsync(string key, byte[] value, DistributedCacheEntryOptions options, CancellationToken token)
+ {
+ Interlocked.Increment(ref ProtectedOpCount);
+ Log.WriteLine($"SetAsync (byte[]): {key}");
+ return Tail.SetAsync(key, value, options, token);
+ }
+ }
+
+ private static string Me([CallerMemberName] string caller = "") => caller;
+}
diff --git a/test/Libraries/Microsoft.Extensions.Caching.Hybrid.Tests/Microsoft.Extensions.Caching.Hybrid.Tests.csproj b/test/Libraries/Microsoft.Extensions.Caching.Hybrid.Tests/Microsoft.Extensions.Caching.Hybrid.Tests.csproj
new file mode 100644
index 00000000000..ef80a84eee9
--- /dev/null
+++ b/test/Libraries/Microsoft.Extensions.Caching.Hybrid.Tests/Microsoft.Extensions.Caching.Hybrid.Tests.csproj
@@ -0,0 +1,30 @@
+
+
+