diff --git a/.editorconfig b/.editorconfig
index a46e3e379f..b567338281 100644
--- a/.editorconfig
+++ b/.editorconfig
@@ -26,3 +26,8 @@ dotnet_diagnostic.MSML_ExtendBaseTestClass.severity = none
# The MSML_RelaxTestNaming suppressor for VSTHRD200 is not active for CodeAnalyzer.Tests, so we disable it altogether.
# VSTHRD200: Use "Async" suffix for async methods
dotnet_diagnostic.VSTHRD200.severity = none
+
+# Xml project files
+[*.{csproj}]
+indent_size = 2
+charset = utf-8
diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS
deleted file mode 100644
index 724f77e913..0000000000
--- a/.github/CODEOWNERS
+++ /dev/null
@@ -1,14 +0,0 @@
-# This file contains the default reviewers for ML .NET code
-# For more information on CODEOWNERS file see : https://help.github.com/en/articles/about-code-owners
-
-# For the entire repository
-* @dotnet/mlnet-core
-
-# Reviewers for files owned by AutoML team
-src/Microsoft.ML.AutoML @dotnet/mlnet-automl
-src/Microsoft.ML.CodeGenerator @dotnet/mlnet-automl
-test/Microsoft.ML.AutoML.Tests @dotnet/mlnet-automl
-test/Microsoft.ML.CodeGenerator.Tests @dotnet/mlnet-automl
-pkg/Microsoft.ML.AutoML @dotnet/mlnet-automl
-pkg/Microsoft.ML.CodeGenerator @dotnet/mlnet-automl
-docs/samples/Microsoft.ML.AutoML.Samples @dotnet/mlnet-automl
diff --git a/.gitignore b/.gitignore
index 36b327cc99..654e29215f 100644
--- a/.gitignore
+++ b/.gitignore
@@ -4,7 +4,8 @@
## Get latest from https://github.com/github/gitignore/blob/master/VisualStudio.gitignore
# Tool Runtime Dir
-/[Tt]ools/
+/.dotnet/
+/.packages/
# User-specific files
*.suo
@@ -226,7 +227,7 @@ ClientBin/
*.publishsettings
orleans.codegen.cs
-# Including strong name files can present a security risk
+# Including strong name files can present a security risk
# (https://github.com/github/gitignore/pull/2483#issue-259490424)
#*.snk
@@ -323,7 +324,7 @@ __pycache__/
# OpenCover UI analysis results
OpenCover/
-# Azure Stream Analytics local run output
+# Azure Stream Analytics local run output
ASALocalRun/
# MSBuild Binary and Structured Log
diff --git a/.vsts-dotnet-ci.yml b/.vsts-dotnet-ci.yml
index 5c2dcf30cb..d3315c00c5 100644
--- a/.vsts-dotnet-ci.yml
+++ b/.vsts-dotnet-ci.yml
@@ -6,7 +6,7 @@ resources:
containers:
- container: CentosContainer
image: mcr.microsoft.com/dotnet-buildtools/prereqs:centos-7-mlnet-8bba86b-20190314145033
-
+
- container: UbuntuContainer
image: mcr.microsoft.com/dotnet-buildtools/prereqs:ubuntu-16.04-mlnet-20200515184230-2c829e8
@@ -29,7 +29,7 @@ jobs:
_targetFramework: netcoreapp3.1
innerLoop: true
pool:
- name: Hosted Ubuntu 1604
+ name: Hosted Ubuntu 1604
- template: /build/ci/job-template.yml
parameters:
@@ -38,7 +38,7 @@ jobs:
container: UbuntuContainer
innerLoop: true
pool:
- name: Hosted Ubuntu 1604
+ name: Hosted Ubuntu 1604
- template: /build/ci/job-template.yml
parameters:
diff --git a/BuildToolsVersion.txt b/BuildToolsVersion.txt
deleted file mode 100644
index 61c8d2d4e3..0000000000
--- a/BuildToolsVersion.txt
+++ /dev/null
@@ -1 +0,0 @@
-3.0.0-preview4-04926-01
diff --git a/Directory.Build.props b/Directory.Build.props
index 5ef129c098..dbe8844455 100644
--- a/Directory.Build.props
+++ b/Directory.Build.props
@@ -1,13 +1,24 @@
+
-
-
-
+
+
+
+
+
true
+
+ $(CopyrightNetFoundation)
+ True
+ portable
+ true
+ latest
+
+
Debug
Debug;Release;Debug-netcoreapp3_1;Release-netcoreapp3_1;Debug-netfx;Release-netfx
@@ -15,103 +26,12 @@
x64
$(TargetArchitecture)
$(Platform).$(Configuration)
+ Open
-
- https://api.nuget.org/v3/index.json;
- https://dotnetfeed.blob.core.windows.net/dotnet-core/index.json;
- https://dotnet.myget.org/F/dotnet-core/api/v3/index.json;
- https://dotnet.myget.org/F/roslyn-analyzers/api/v3/index.json;
- https://pkgs.dev.azure.com/dnceng/public/_packaging/MachineLearning/nuget/v3/index.json;
- https://pkgs.dev.azure.com/dnceng/public/_packaging/machinelearning-testdata/nuget/v3/index.json;
-
-
-
-
-
- $(MSBuildThisFileDirectory)
- $(RepoRoot)src/
+ $(ArtifactsDir)pkgassets/
$(RepoRoot)pkg/
-
-
- $([MSBuild]::NormalizeDirectory('$(RepoRoot)', 'bin'))
- $(BinDir)
- $([MSBuild]::NormalizeDirectory('$(BinDir)', 'obj'))
- $(ObjDir)
-
- $(RootIntermediateOutputPath)$(PlatformConfig)\
- $(IntermediateOutputRootPath)$(MSBuildProjectName)\
- $(IntermediateOutputPath)
-
- $(BaseOutputPath)$(PlatformConfig)\$(MSBuildProjectName)\
-
- $(ObjDir)/packages/
-
- $(BinDir)packages_noship/
- $(BinDir)packages/
-
- $(BaseOutputPath)$(NativeTargetArchitecture).$(Configuration)\Native\
-
-
- $(DotNetRestorePackagesPath)
- $(RepoRoot)packages/
- $(PackagesDir)
- $(RepoRoot)Tools/
-
-
-
-
-
-
- $(MajorVersion).$(MinorVersion).$(PatchVersion)
- 00001
- 0
- $(MajorVersion).$(MinorVersion).$(BuildNumberMajor).$(BuildNumberMinor)
-
- false
- true
-
- $(BuildNumberMajor)-$(BuildNumberMinor)
- true
-
-
-
-
- true
- machinelearning
-
-
-
-
- https://github.com/dotnet/$(GitHubRepositoryName)
- true
- $(LatestCommit)
-
-
-
-
-
-
- 8.0
- 4.7
- true
-
-
-
- true
-
-
-
-
- $(ToolsDir)Open.snk
- true
- true
@@ -125,4 +45,34 @@
true
+
+
+ true
+
+
+ true
+
+
+ true
+ snupkg
+
+
+
+
+ 9.0.1
+
+
+
+
+ $(MicrosoftCodeAnalysisCSharpVersion)
+
diff --git a/Directory.Build.targets b/Directory.Build.targets
index 985a15f00a..bbb9d56b9b 100644
--- a/Directory.Build.targets
+++ b/Directory.Build.targets
@@ -1,51 +1,83 @@
-
+
+
+
-
-
+
+
-
-
-
+
+
+ lib
+ .dll
+ .so
+ .dylib
-
+ x64
+ $(TargetArchitecture)
+ $([MSBuild]::NormalizeDirectory('$(RepoRoot)', 'artifacts', 'bin'))
+ $(BinDir)Native\$(NativeTargetArchitecture).$(Configuration)\
-
- lib
- .dll
- .so
- .dylib
-
-
-
-
- $(NativeOutputPath)$(LibPrefix)%(NativeAssemblyReference.Identity)$(LibExtension)
-
-
-
-
-
-
+ AnyCPU
+ $(Platform).$(Configuration)
+ $(BinDir)$(MSBuildProjectName)\Debug
+
+
+
+ $(NativeOutputPath)$(LibPrefix)%(NativeAssemblyReference.Identity)$(LibExtension)
+
+
+
+
+
+
-
-
+
+
+ false
+
+
+ true
-
-
-
- $([System.IO.Path]::Combine('$(IntermediateOutputPath)','$(TargetFrameworkMoniker).AssemblyAttributes$(DefaultLanguageSourceExtension)'))
+ true
+ true
+ true
+ opencover
+ $(BaseOutputPath)$(PlatformConfig)\coverage\coverage.opencover.xml
+
+
+ ExcludeFromCodeCoverage
+
-
-
-
-
\ No newline at end of file
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/DotnetCLIVersion.txt b/DotnetCLIVersion.txt
deleted file mode 100644
index 6b0555c8c7..0000000000
--- a/DotnetCLIVersion.txt
+++ /dev/null
@@ -1 +0,0 @@
-3.1.102
diff --git a/DotnetExtraRuntimeVersion.txt b/DotnetExtraRuntimeVersion.txt
deleted file mode 100644
index 30b1cc8834..0000000000
--- a/DotnetExtraRuntimeVersion.txt
+++ /dev/null
@@ -1 +0,0 @@
-2.1.12
\ No newline at end of file
diff --git a/Microsoft.ML.sln b/Microsoft.ML.sln
index 8c6c84b14c..e52f9d2014 100644
--- a/Microsoft.ML.sln
+++ b/Microsoft.ML.sln
@@ -33,7 +33,7 @@ Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Microsoft.ML.TestFramework"
EndProject
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Microsoft.ML.Predictor.Tests", "test\Microsoft.ML.Predictor.Tests\Microsoft.ML.Predictor.Tests.csproj", "{6B047E09-39C9-4583-96F3-685D84CA4117}"
EndProject
-Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Microsoft.ML.Functional.Tests", "test\Microsoft.ML.Functional.Tests\Microsoft.ML.Functional.Tests.csproj", "{CFED9F0C-FF81-4C96-8D5E-0436264CA7B5}"
+Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Microsoft.ML.IntegrationTests", "test\Microsoft.ML.IntegrationTests\Microsoft.ML.IntegrationTests.csproj", "{CFED9F0C-FF81-4C96-8D5E-0436264CA7B5}"
EndProject
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Microsoft.ML.ResultProcessor", "src\Microsoft.ML.ResultProcessor\Microsoft.ML.ResultProcessor.csproj", "{3769FCC3-9AFF-4C37-97E9-6854324681DF}"
EndProject
@@ -43,47 +43,18 @@ Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Microsoft.ML.Parquet", "src
EndProject
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Microsoft.ML.Sweeper", "src\Microsoft.ML.Sweeper\Microsoft.ML.Sweeper.csproj", "{55C8122D-79EA-48AB-85D0-EB551FC1C427}"
EndProject
-Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "pkg", "pkg", "{D3D38B03-B557-484D-8348-8BADEE4DF592}"
- ProjectSection(SolutionItems) = preProject
- pkg\Directory.Build.props = pkg\Directory.Build.props
- EndProjectSection
-EndProject
-Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Microsoft.ML", "Microsoft.ML", "{DEC8F776-49F7-4D87-836C-FE4DC057D08C}"
- ProjectSection(SolutionItems) = preProject
- pkg\Microsoft.ML\Microsoft.ML.nupkgproj = pkg\Microsoft.ML\Microsoft.ML.nupkgproj
- pkg\Microsoft.ML\Microsoft.ML.symbols.nupkgproj = pkg\Microsoft.ML\Microsoft.ML.symbols.nupkgproj
- EndProjectSection
-EndProject
-Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Microsoft.ML.Parquet", "Microsoft.ML.Parquet", "{6C95FC87-F5F2-4EEF-BB97-567F2F5DD141}"
- ProjectSection(SolutionItems) = preProject
- pkg\Microsoft.ML.Parquet\Microsoft.ML.Parquet.nupkgproj = pkg\Microsoft.ML.Parquet\Microsoft.ML.Parquet.nupkgproj
- EndProjectSection
-EndProject
-Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Microsoft.ML.Benchmarks", "test\Microsoft.ML.Benchmarks\Microsoft.ML.Benchmarks.csproj", "{7A9DB75F-2CA5-4184-9EF5-1F17EB39483F}"
+Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Microsoft.ML.PerformanceTests", "test\Microsoft.ML.PerformanceTests\Microsoft.ML.PerformanceTests.csproj", "{7A9DB75F-2CA5-4184-9EF5-1F17EB39483F}"
EndProject
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Microsoft.ML.Maml", "src\Microsoft.ML.Maml\Microsoft.ML.Maml.csproj", "{64F40A0D-D4C2-4AA7-8470-E9CC437827E4}"
EndProject
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Microsoft.ML.Console", "src\Microsoft.ML.Console\Microsoft.ML.Console.csproj", "{362A98CF-FBF7-4EBB-A11B-990BBF845B15}"
EndProject
-Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "build", "build", "{487213C9-E8A9-4F94-85D7-28A05DBBFE3A}"
-EndProject
-Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "netstandard2.0", "netstandard2.0", "{9252A8EB-ABFB-440C-AB4D-1D562753CE0F}"
- ProjectSection(SolutionItems) = preProject
- pkg\Microsoft.ML\build\netstandard2.0\Microsoft.ML.targets = pkg\Microsoft.ML\build\netstandard2.0\Microsoft.ML.targets
- EndProjectSection
-EndProject
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Microsoft.ML.Sweeper.Tests", "test\Microsoft.ML.Sweeper.Tests\Microsoft.ML.Sweeper.Tests.csproj", "{3DEB504D-7A07-48CE-91A2-8047461CB3D4}"
EndProject
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Microsoft.ML.LightGbm", "src\Microsoft.ML.LightGbm\Microsoft.ML.LightGbm.csproj", "{001F3B4E-FBE4-4001-AFD2-A6A989CD1C25}"
EndProject
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Microsoft.ML.Ensemble", "src\Microsoft.ML.Ensemble\Microsoft.ML.Ensemble.csproj", "{DCF46B79-1FDB-4DBA-A263-D3D64E3AAA27}"
EndProject
-Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Microsoft.ML.CpuMath", "Microsoft.ML.CpuMath", "{BF66A305-DF10-47E4-8D81-42049B149D2B}"
- ProjectSection(SolutionItems) = preProject
- pkg\Microsoft.ML.CpuMath\Microsoft.ML.CpuMath.nupkgproj = pkg\Microsoft.ML.CpuMath\Microsoft.ML.CpuMath.nupkgproj
- pkg\Microsoft.ML.CpuMath\Microsoft.ML.CpuMath.symbols.nupkgproj = pkg\Microsoft.ML.CpuMath\Microsoft.ML.CpuMath.symbols.nupkgproj
- EndProjectSection
-EndProject
Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "tools-local", "tools-local", "{7F13E156-3EBA-4021-84A5-CD56BA72F99E}"
EndProject
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Microsoft.ML.InternalCodeAnalyzer", "tools-local\Microsoft.ML.InternalCodeAnalyzer\Microsoft.ML.InternalCodeAnalyzer.csproj", "{B4E55B2D-2A92-46E7-B72F-E76D6FD83440}"
@@ -132,177 +103,51 @@ Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Microsoft.ML.DnnImageFeatur
EndProject
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Microsoft.ML.EntryPoints", "src\Microsoft.ML.EntryPoints\Microsoft.ML.EntryPoints.csproj", "{7504D46F-E4B3-43CB-9B1C-82F3131F1C99}"
EndProject
-Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Microsoft.ML.Mkl.Components", "Microsoft.ML.Mkl.Components", "{63006A14-B924-48C5-83C9-CFE9DA22B01F}"
- ProjectSection(SolutionItems) = preProject
- pkg\Microsoft.ML.Mkl.Components\Microsoft.ML.Mkl.Components.nupkgproj = pkg\Microsoft.ML.Mkl.Components\Microsoft.ML.Mkl.Components.nupkgproj
- pkg\Microsoft.ML.Mkl.Components\Microsoft.ML.Mkl.Components.symbols.nupkgproj = pkg\Microsoft.ML.Mkl.Components\Microsoft.ML.Mkl.Components.symbols.nupkgproj
- EndProjectSection
-EndProject
-Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Microsoft.ML.ImageAnalytics", "Microsoft.ML.ImageAnalytics", "{1229F799-37F0-4282-B9F0-74BFA97CC362}"
- ProjectSection(SolutionItems) = preProject
- pkg\Microsoft.ML.ImageAnalytics\Microsoft.ML.ImageAnalytics.nupkgproj = pkg\Microsoft.ML.ImageAnalytics\Microsoft.ML.ImageAnalytics.nupkgproj
- pkg\Microsoft.ML.ImageAnalytics\Microsoft.ML.ImageAnalytics.symbols.nupkgproj = pkg\Microsoft.ML.ImageAnalytics\Microsoft.ML.ImageAnalytics.symbols.nupkgproj
- EndProjectSection
-EndProject
-Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Microsoft.ML.LightGbm", "Microsoft.ML.LightGbm", "{DE95FE65-9FF7-4233-93DF-7A8F2805624A}"
- ProjectSection(SolutionItems) = preProject
- pkg\Microsoft.ML.LightGbm\Microsoft.ML.LightGbm.nupkgproj = pkg\Microsoft.ML.LightGbm\Microsoft.ML.LightGbm.nupkgproj
- pkg\Microsoft.ML.LightGbm\Microsoft.ML.LightGbm.symbols.nupkgproj = pkg\Microsoft.ML.LightGbm\Microsoft.ML.LightGbm.symbols.nupkgproj
- EndProjectSection
-EndProject
-Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Microsoft.ML.Mkl.Redist", "Microsoft.ML.Mkl.Redist", "{4CF8095E-B4A3-4326-A550-43098E447288}"
- ProjectSection(SolutionItems) = preProject
- pkg\Microsoft.ML.Mkl.Redist\Microsoft.ML.Mkl.Redist.nupkgproj = pkg\Microsoft.ML.Mkl.Redist\Microsoft.ML.Mkl.Redist.nupkgproj
- EndProjectSection
-EndProject
-Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Microsoft.ML.OnnxConverter", "Microsoft.ML.OnnxConverter", "{19AC192B-75FE-45D5-B219-898E401D5904}"
- ProjectSection(SolutionItems) = preProject
- pkg\Microsoft.ML.OnnxConverter\Microsoft.ML.OnnxConverter.nupkgproj = pkg\Microsoft.ML.OnnxConverter\Microsoft.ML.OnnxConverter.nupkgproj
- pkg\Microsoft.ML.OnnxConverter\Microsoft.ML.OnnxConverter.symbols.nupkgproj = pkg\Microsoft.ML.OnnxConverter\Microsoft.ML.OnnxConverter.symbols.nupkgproj
- EndProjectSection
-EndProject
-Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Microsoft.ML.OnnxTransformer", "Microsoft.ML.OnnxTransformer", "{93FF16AA-635E-421D-96C1-008818C143A2}"
- ProjectSection(SolutionItems) = preProject
- pkg\Microsoft.ML.OnnxTransformer\Microsoft.ML.OnnxTransformer.nupkgproj = pkg\Microsoft.ML.OnnxTransformer\Microsoft.ML.OnnxTransformer.nupkgproj
- pkg\Microsoft.ML.OnnxTransformer\Microsoft.ML.OnnxTransformer.symbols.nupkgproj = pkg\Microsoft.ML.OnnxTransformer\Microsoft.ML.OnnxTransformer.symbols.nupkgproj
- EndProjectSection
-EndProject
-Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Microsoft.ML.Recommender", "Microsoft.ML.Recommender", "{320AF46A-4809-486E-8F9E-A00C8AE47751}"
- ProjectSection(SolutionItems) = preProject
- pkg\Microsoft.ML.Recommender\Microsoft.ML.Recommender.nupkgproj = pkg\Microsoft.ML.Recommender\Microsoft.ML.Recommender.nupkgproj
- pkg\Microsoft.ML.Recommender\Microsoft.ML.Recommender.symbols.nupkgproj = pkg\Microsoft.ML.Recommender\Microsoft.ML.Recommender.symbols.nupkgproj
- EndProjectSection
-EndProject
-Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Microsoft.ML.TensorFlow", "Microsoft.ML.TensorFlow", "{11894B4A-78B4-4523-A6DD-4495722E244F}"
- ProjectSection(SolutionItems) = preProject
- pkg\Microsoft.ML.TensorFlow\Microsoft.ML.TensorFlow.nupkgproj = pkg\Microsoft.ML.TensorFlow\Microsoft.ML.TensorFlow.nupkgproj
- pkg\Microsoft.ML.TensorFlow\Microsoft.ML.TensorFlow.symbols.nupkgproj = pkg\Microsoft.ML.TensorFlow\Microsoft.ML.TensorFlow.symbols.nupkgproj
- EndProjectSection
-EndProject
-Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Microsoft.ML.TimeSeries", "Microsoft.ML.TimeSeries", "{B836F712-7FB6-4B75-A3EB-FB05F8E0D15E}"
- ProjectSection(SolutionItems) = preProject
- pkg\Microsoft.ML.TimeSeries\Microsoft.ML.TimeSeries.nupkgproj = pkg\Microsoft.ML.TimeSeries\Microsoft.ML.TimeSeries.nupkgproj
- pkg\Microsoft.ML.TimeSeries\Microsoft.ML.TimeSeries.symbols.nupkgproj = pkg\Microsoft.ML.TimeSeries\Microsoft.ML.TimeSeries.symbols.nupkgproj
- EndProjectSection
-EndProject
-Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Microsoft.ML.DnnImageFeaturizer.AlexNet", "Microsoft.ML.DnnImageFeaturizer.AlexNet", "{B00098E4-771E-41DF-A3AA-A606AAB334B7}"
- ProjectSection(SolutionItems) = preProject
- pkg\Microsoft.ML.DnnImageFeaturizer.AlexNet\Microsoft.ML.DnnImageFeaturizer.AlexNet.nupkgproj = pkg\Microsoft.ML.DnnImageFeaturizer.AlexNet\Microsoft.ML.DnnImageFeaturizer.AlexNet.nupkgproj
- pkg\Microsoft.ML.DnnImageFeaturizer.AlexNet\Microsoft.ML.DnnImageFeaturizer.AlexNet.symbols.nupkgproj = pkg\Microsoft.ML.DnnImageFeaturizer.AlexNet\Microsoft.ML.DnnImageFeaturizer.AlexNet.symbols.nupkgproj
- EndProjectSection
-EndProject
-Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Microsoft.ML.DnnImageFeaturizer.ResNet18", "Microsoft.ML.DnnImageFeaturizer.ResNet18", "{BD93C0F3-3CED-4BE8-9389-4234250FBFB1}"
- ProjectSection(SolutionItems) = preProject
- pkg\Microsoft.ML.DnnImageFeaturizer.ResNet18\Microsoft.ML.DnnImageFeaturizer.ResNet18.nupkgproj = pkg\Microsoft.ML.DnnImageFeaturizer.ResNet18\Microsoft.ML.DnnImageFeaturizer.ResNet18.nupkgproj
- pkg\Microsoft.ML.DnnImageFeaturizer.ResNet18\Microsoft.ML.DnnImageFeaturizer.ResNet18.symbols.nupkgproj = pkg\Microsoft.ML.DnnImageFeaturizer.ResNet18\Microsoft.ML.DnnImageFeaturizer.ResNet18.symbols.nupkgproj
- EndProjectSection
-EndProject
-Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Microsoft.ML.DnnImageFeaturizer.ResNet50", "Microsoft.ML.DnnImageFeaturizer.ResNet50", "{8EDFB7E5-7E7E-411D-99C5-7A4895D0F9CB}"
- ProjectSection(SolutionItems) = preProject
- pkg\Microsoft.ML.DnnImageFeaturizer.ResNet50\Microsoft.ML.DnnImageFeaturizer.ResNet50.nupkgproj = pkg\Microsoft.ML.DnnImageFeaturizer.ResNet50\Microsoft.ML.DnnImageFeaturizer.ResNet50.nupkgproj
- pkg\Microsoft.ML.DnnImageFeaturizer.ResNet50\Microsoft.ML.DnnImageFeaturizer.ResNet50.symbols.nupkgproj = pkg\Microsoft.ML.DnnImageFeaturizer.ResNet50\Microsoft.ML.DnnImageFeaturizer.ResNet50.symbols.nupkgproj
- EndProjectSection
-EndProject
-Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Microsoft.ML.DnnImageFeaturizer.ResNet101", "Microsoft.ML.DnnImageFeaturizer.ResNet101", "{9E689AD4-F908-493C-B882-B1B33E8F7696}"
- ProjectSection(SolutionItems) = preProject
- pkg\Microsoft.ML.DnnImageFeaturizer.ResNet101\Microsoft.ML.DnnImageFeaturizer.ResNet101.nupkgproj = pkg\Microsoft.ML.DnnImageFeaturizer.ResNet101\Microsoft.ML.DnnImageFeaturizer.ResNet101.nupkgproj
- pkg\Microsoft.ML.DnnImageFeaturizer.ResNet101\Microsoft.ML.DnnImageFeaturizer.ResNet101.symbols.nupkgproj = pkg\Microsoft.ML.DnnImageFeaturizer.ResNet101\Microsoft.ML.DnnImageFeaturizer.ResNet101.symbols.nupkgproj
- EndProjectSection
-EndProject
-Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Microsoft.ML.EntryPoints", "Microsoft.ML.EntryPoints", "{8D8CC016-0020-40EC-BD8E-73F1CE0F9662}"
- ProjectSection(SolutionItems) = preProject
- pkg\Microsoft.ML.EntryPoints\Microsoft.ML.EntryPoints.nupkgproj = pkg\Microsoft.ML.EntryPoints\Microsoft.ML.EntryPoints.nupkgproj
- pkg\Microsoft.ML.EntryPoints\Microsoft.ML.EntryPoints.symbols.nupkgproj = pkg\Microsoft.ML.EntryPoints\Microsoft.ML.EntryPoints.symbols.nupkgproj
- EndProjectSection
-EndProject
-Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "common", "common", "{A84717CB-F11A-41C5-A74D-C0F1D47B7431}"
- ProjectSection(SolutionItems) = preProject
- pkg\common\CommonPackage.props = pkg\common\CommonPackage.props
- pkg\common\DnnImageFeaturizer.props = pkg\common\DnnImageFeaturizer.props
- EndProjectSection
-EndProject
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Microsoft.ML.DataView", "src\Microsoft.ML.DataView\Microsoft.ML.DataView.csproj", "{85D0CAFD-2FE8-496A-88C7-585D35B94243}"
EndProject
-Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Microsoft.ML.DataView", "Microsoft.ML.DataView", "{31D38B21-102B-41C0-9E0A-2FE0BF68D123}"
- ProjectSection(SolutionItems) = preProject
- pkg\Microsoft.ML.DataView\Microsoft.ML.DataView.nupkgproj = pkg\Microsoft.ML.DataView\Microsoft.ML.DataView.nupkgproj
- pkg\Microsoft.ML.DataView\Microsoft.ML.DataView.symbols.nupkgproj = pkg\Microsoft.ML.DataView\Microsoft.ML.DataView.symbols.nupkgproj
- EndProjectSection
-EndProject
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "RemoteExecutorConsoleApp", "test\RemoteExecutorConsoleApp\RemoteExecutorConsoleApp.csproj", "{5E920CAC-5A28-42FB-936E-49C472130953}"
EndProject
-Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Microsoft.ML.Ensemble", "Microsoft.ML.Ensemble", "{AD7058C9-5608-49A8-BE23-58C33A74EE91}"
- ProjectSection(SolutionItems) = preProject
- pkg\Microsoft.ML.Ensemble\Microsoft.ML.Ensemble.nupkgproj = pkg\Microsoft.ML.Ensemble\Microsoft.ML.Ensemble.nupkgproj
- pkg\Microsoft.ML.Ensemble\Microsoft.ML.Ensemble.symbols.nupkgproj = pkg\Microsoft.ML.Ensemble\Microsoft.ML.Ensemble.symbols.nupkgproj
- EndProjectSection
-EndProject
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Microsoft.ML.Experimental", "src\Microsoft.ML.Experimental\Microsoft.ML.Experimental.csproj", "{E02DA82D-3FEE-4C60-BD80-9EC3C3448DFC}"
EndProject
-Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Microsoft.ML.FastTree", "Microsoft.ML.FastTree", "{B1B3F284-FA3D-4D76-A712-FF04495D244B}"
- ProjectSection(SolutionItems) = preProject
- pkg\Microsoft.ML.FastTree\Microsoft.ML.FastTree.nupkgproj = pkg\Microsoft.ML.FastTree\Microsoft.ML.FastTree.nupkgproj
- pkg\Microsoft.ML.FastTree\Microsoft.ML.FastTree.symbols.nupkgproj = pkg\Microsoft.ML.FastTree\Microsoft.ML.FastTree.symbols.nupkgproj
- EndProjectSection
-EndProject
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Microsoft.Extensions.ML", "src\Microsoft.Extensions.ML\Microsoft.Extensions.ML.csproj", "{D6741C37-B5E6-4050-BCBA-9715809EA15B}"
EndProject
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Microsoft.Extensions.ML.Tests", "test\Microsoft.Extensions.ML.Tests\Microsoft.Extensions.ML.Tests.csproj", "{21CAD3A1-5E1F-42C1-BB73-46B6E67F4206}"
EndProject
-Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Microsoft.Extensions.ML", "Microsoft.Extensions.ML", "{AE4F7569-26F3-4160-8A8B-7A57D0DA3350}"
- ProjectSection(SolutionItems) = preProject
- pkg\Microsoft.Extensions.ML\Microsoft.Extensions.ML.nupkgproj = pkg\Microsoft.Extensions.ML\Microsoft.Extensions.ML.nupkgproj
- pkg\Microsoft.Extensions.ML\Microsoft.Extensions.ML.symbols.nupkgproj = pkg\Microsoft.Extensions.ML\Microsoft.Extensions.ML.symbols.nupkgproj
- EndProjectSection
-EndProject
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Microsoft.ML.StableApi", "tools-local\Microsoft.ML.StableApi\Microsoft.ML.StableApi.csproj", "{F308DC6B-7E59-40D7-A581-834E8CD99CFE}"
EndProject
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Microsoft.ML.AutoML.Tests", "test\Microsoft.ML.AutoML.Tests\Microsoft.ML.AutoML.Tests.csproj", "{C2652287-CD6D-40FB-B042-95FB56D09DB8}"
EndProject
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Microsoft.ML.AutoML", "src\Microsoft.ML.AutoML\Microsoft.ML.AutoML.csproj", "{E48285BF-F49A-4EA3-AED0-1BDDBF77EB80}"
EndProject
-Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Microsoft.ML.AutoML", "Microsoft.ML.AutoML", "{F5D11F71-2D61-4AE9-99D7-0F0B54649B15}"
- ProjectSection(SolutionItems) = preProject
- pkg\Microsoft.ML.AutoML\Microsoft.ML.AutoML.nupkgproj = pkg\Microsoft.ML.AutoML\Microsoft.ML.AutoML.nupkgproj
- pkg\Microsoft.ML.AutoML\Microsoft.ML.AutoML.symbols.nupkgproj = pkg\Microsoft.ML.AutoML\Microsoft.ML.AutoML.symbols.nupkgproj
- EndProjectSection
-EndProject
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Microsoft.ML.AutoML.Samples", "docs\samples\Microsoft.ML.AutoML.Samples\Microsoft.ML.AutoML.Samples.csproj", "{A6924919-9E37-4023-8B7F-E85C8E3CC9B3}"
EndProject
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Microsoft.ML.Samples.GPU", "docs\samples\Microsoft.ML.Samples.GPU\Microsoft.ML.Samples.GPU.csproj", "{3C8F910B-7F23-4D25-B521-6D5AC9570ADD}"
EndProject
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Microsoft.ML.Featurizers", "src\Microsoft.ML.Featurizers\Microsoft.ML.Featurizers.csproj", "{E2DD0721-5B0F-4606-8182-4C7EFB834518}"
EndProject
-Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Microsoft.ML.Featurizers", "Microsoft.ML.Featurizers", "{1BA5C784-52E8-4A87-8525-26B2452F2882}"
- ProjectSection(SolutionItems) = preProject
- pkg\Microsoft.ML.Featurizers\Microsoft.ML.Featurizers.nupkgproj = pkg\Microsoft.ML.Featurizers\Microsoft.ML.Featurizers.nupkgproj
- pkg\Microsoft.ML.Featurizers\Microsoft.ML.Featurizers.symbols.nupkgproj = pkg\Microsoft.ML.Featurizers\Microsoft.ML.Featurizers.symbols.nupkgproj
- EndProjectSection
-EndProject
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Microsoft.ML.CodeGenerator", "src\Microsoft.ML.CodeGenerator\Microsoft.ML.CodeGenerator.csproj", "{56CB0850-7341-4D71-9AE4-9EFC472D93DD}"
EndProject
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Microsoft.ML.CodeGenerator.Tests", "test\Microsoft.ML.CodeGenerator.Tests\Microsoft.ML.CodeGenerator.Tests.csproj", "{46CC5637-3DDF-4100-93FC-44BB87B2DB81}"
EndProject
-Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Microsoft.ML.CodeGenerator", "Microsoft.ML.CodeGenerator", "{3817A875-278C-4140-BF66-3C4A8CA55F0D}"
- ProjectSection(SolutionItems) = preProject
- pkg\Microsoft.ML.CodeGenerator\Microsoft.ML.CodeGenerator.nupkgproj = pkg\Microsoft.ML.CodeGenerator\Microsoft.ML.CodeGenerator.nupkgproj
- pkg\Microsoft.ML.CodeGenerator\Microsoft.ML.CodeGenerator.symbols.nupkgproj = pkg\Microsoft.ML.CodeGenerator\Microsoft.ML.CodeGenerator.symbols.nupkgproj
- EndProjectSection
-EndProject
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Microsoft.ML.Vision", "src\Microsoft.ML.Vision\Microsoft.ML.Vision.csproj", "{419F93D5-4135-4DA0-A76E-EFC23E04093D}"
EndProject
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Microsoft.ML.TestFrameworkCommon", "test\Microsoft.ML.TestFrameworkCommon\Microsoft.ML.TestFrameworkCommon.csproj", "{A22FAD27-77E8-4460-8B92-EC7090B7173A}"
EndProject
-Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Microsoft.ML.NightlyBuild.Tests", "test\Microsoft.ML.NightlyBuild.Tests\Microsoft.ML.NightlyBuild.Tests.csproj", "{A1CAC86F-F4BB-4B6D-9D18-E9AE15B3C66E}"
+Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Microsoft.ML.NightlyBuild.Tests", "test\Microsoft.ML.NightlyBuild.Tests\Microsoft.ML.NightlyBuild.Tests.csproj", "{A1CAC86F-F4BB-4B6D-9D18-E9AE15B3C66E}"
EndProject
-Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Microsoft.ML.NugetPackageVersionUpdater", "test\Microsoft.ML.NugetPackageVersionUpdater\Microsoft.ML.NugetPackageVersionUpdater.csproj", "{C8DB58DC-6434-4431-A81F-263D86E2A5F3}"
+Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Microsoft.ML.NugetPackageVersionUpdater", "test\Microsoft.ML.NugetPackageVersionUpdater\Microsoft.ML.NugetPackageVersionUpdater.csproj", "{C8DB58DC-6434-4431-A81F-263D86E2A5F3}"
EndProject
-Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "build", "build", "{C91F81E3-B900-4968-A6DF-F53B515E97E1}"
+Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Microsoft.ML", "src\Microsoft.ML\Microsoft.ML.csproj", "{6CF88209-69DB-4B36-9604-3ECD9F163E96}"
EndProject
-Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "netstandard2.0", "netstandard2.0", "{027DBA48-85B6-46F1-9487-0B49B5057FC0}"
- ProjectSection(SolutionItems) = preProject
- pkg\Microsoft.ML.CpuMath\build\netstandard2.0\Microsoft.ML.CpuMath.props = pkg\Microsoft.ML.CpuMath\build\netstandard2.0\Microsoft.ML.CpuMath.props
- EndProjectSection
+Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Microsoft.ML.Mkl.Redist", "src\Microsoft.ML.Mkl.Redist\Microsoft.ML.Mkl.Redist.csproj", "{4584326B-C5B3-4CAE-B98A-34C5F5AA16F3}"
+EndProject
+Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Microsoft.Data.Analysis", "src\Microsoft.Data.Analysis\Microsoft.Data.Analysis.csproj", "{84150C22-0627-4A11-81C9-F214762855EA}"
+EndProject
+Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Microsoft.Data.Analysis.Interactive", "src\Microsoft.Data.Analysis.Interactive\Microsoft.Data.Analysis.Interactive.csproj", "{D9FDD2D5-BFFC-4A4D-8589-7F63AA3EA923}"
+EndProject
+Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Microsoft.Data.Analysis.Tests", "test\Microsoft.Data.Analysis.Tests\Microsoft.Data.Analysis.Tests.csproj", "{0B765344-11A4-4738-9759-5060599DC134}"
+EndProject
+Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Microsoft.Data.Analysis.Interactive.Tests", "test\Microsoft.Data.Analysis.Interactive.Tests\Microsoft.Data.Analysis.Interactive.Tests.csproj", "{8AFB8CC3-DA0B-4364-BFB3-296A7C54CC25}"
EndProject
Global
GlobalSection(SolutionConfigurationPlatforms) = preSolution
@@ -1639,6 +1484,30 @@ Global
{3C8F910B-7F23-4D25-B521-6D5AC9570ADD}.Release-netfx|Any CPU.Build.0 = Release-netfx|Any CPU
{3C8F910B-7F23-4D25-B521-6D5AC9570ADD}.Release-netfx|x64.ActiveCfg = Release-netfx|Any CPU
{3C8F910B-7F23-4D25-B521-6D5AC9570ADD}.Release-netfx|x64.Build.0 = Release-netfx|Any CPU
+ {E2DD0721-5B0F-4606-8182-4C7EFB834518}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
+ {E2DD0721-5B0F-4606-8182-4C7EFB834518}.Debug|Any CPU.Build.0 = Debug|Any CPU
+ {E2DD0721-5B0F-4606-8182-4C7EFB834518}.Debug|x64.ActiveCfg = Debug|Any CPU
+ {E2DD0721-5B0F-4606-8182-4C7EFB834518}.Debug|x64.Build.0 = Debug|Any CPU
+ {E2DD0721-5B0F-4606-8182-4C7EFB834518}.Debug-netcoreapp3_1|Any CPU.ActiveCfg = Debug-netcoreapp3_1|Any CPU
+ {E2DD0721-5B0F-4606-8182-4C7EFB834518}.Debug-netcoreapp3_1|Any CPU.Build.0 = Debug-netcoreapp3_1|Any CPU
+ {E2DD0721-5B0F-4606-8182-4C7EFB834518}.Debug-netcoreapp3_1|x64.ActiveCfg = Debug-netcoreapp3_1|Any CPU
+ {E2DD0721-5B0F-4606-8182-4C7EFB834518}.Debug-netcoreapp3_1|x64.Build.0 = Debug-netcoreapp3_1|Any CPU
+ {E2DD0721-5B0F-4606-8182-4C7EFB834518}.Debug-netfx|Any CPU.ActiveCfg = Debug-netfx|Any CPU
+ {E2DD0721-5B0F-4606-8182-4C7EFB834518}.Debug-netfx|Any CPU.Build.0 = Debug-netfx|Any CPU
+ {E2DD0721-5B0F-4606-8182-4C7EFB834518}.Debug-netfx|x64.ActiveCfg = Debug-netfx|Any CPU
+ {E2DD0721-5B0F-4606-8182-4C7EFB834518}.Debug-netfx|x64.Build.0 = Debug-netfx|Any CPU
+ {E2DD0721-5B0F-4606-8182-4C7EFB834518}.Release|Any CPU.ActiveCfg = Release|Any CPU
+ {E2DD0721-5B0F-4606-8182-4C7EFB834518}.Release|Any CPU.Build.0 = Release|Any CPU
+ {E2DD0721-5B0F-4606-8182-4C7EFB834518}.Release|x64.ActiveCfg = Release|Any CPU
+ {E2DD0721-5B0F-4606-8182-4C7EFB834518}.Release|x64.Build.0 = Release|Any CPU
+ {E2DD0721-5B0F-4606-8182-4C7EFB834518}.Release-netcoreapp3_1|Any CPU.ActiveCfg = Release-netcoreapp3_1|Any CPU
+ {E2DD0721-5B0F-4606-8182-4C7EFB834518}.Release-netcoreapp3_1|Any CPU.Build.0 = Release-netcoreapp3_1|Any CPU
+ {E2DD0721-5B0F-4606-8182-4C7EFB834518}.Release-netcoreapp3_1|x64.ActiveCfg = Release-netcoreapp3_1|Any CPU
+ {E2DD0721-5B0F-4606-8182-4C7EFB834518}.Release-netcoreapp3_1|x64.Build.0 = Release-netcoreapp3_1|Any CPU
+ {E2DD0721-5B0F-4606-8182-4C7EFB834518}.Release-netfx|Any CPU.ActiveCfg = Release-netfx|Any CPU
+ {E2DD0721-5B0F-4606-8182-4C7EFB834518}.Release-netfx|Any CPU.Build.0 = Release-netfx|Any CPU
+ {E2DD0721-5B0F-4606-8182-4C7EFB834518}.Release-netfx|x64.ActiveCfg = Release-netfx|Any CPU
+ {E2DD0721-5B0F-4606-8182-4C7EFB834518}.Release-netfx|x64.Build.0 = Release-netfx|Any CPU
{56CB0850-7341-4D71-9AE4-9EFC472D93DD}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{56CB0850-7341-4D71-9AE4-9EFC472D93DD}.Debug|Any CPU.Build.0 = Debug|Any CPU
{56CB0850-7341-4D71-9AE4-9EFC472D93DD}.Debug|x64.ActiveCfg = Debug|Any CPU
@@ -1771,30 +1640,150 @@ Global
{C8DB58DC-6434-4431-A81F-263D86E2A5F3}.Release-netfx|Any CPU.Build.0 = Release-netfx|Any CPU
{C8DB58DC-6434-4431-A81F-263D86E2A5F3}.Release-netfx|x64.ActiveCfg = Release-netfx|Any CPU
{C8DB58DC-6434-4431-A81F-263D86E2A5F3}.Release-netfx|x64.Build.0 = Release-netfx|Any CPU
- {E2DD0721-5B0F-4606-8182-4C7EFB834518}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
- {E2DD0721-5B0F-4606-8182-4C7EFB834518}.Debug|Any CPU.Build.0 = Debug|Any CPU
- {E2DD0721-5B0F-4606-8182-4C7EFB834518}.Debug|x64.ActiveCfg = Debug|Any CPU
- {E2DD0721-5B0F-4606-8182-4C7EFB834518}.Debug|x64.Build.0 = Debug|Any CPU
- {E2DD0721-5B0F-4606-8182-4C7EFB834518}.Debug-netcoreapp3_1|Any CPU.ActiveCfg = Debug-netcoreapp3_1|Any CPU
- {E2DD0721-5B0F-4606-8182-4C7EFB834518}.Debug-netcoreapp3_1|Any CPU.Build.0 = Debug-netcoreapp3_1|Any CPU
- {E2DD0721-5B0F-4606-8182-4C7EFB834518}.Debug-netcoreapp3_1|x64.ActiveCfg = Debug-netcoreapp3_1|Any CPU
- {E2DD0721-5B0F-4606-8182-4C7EFB834518}.Debug-netcoreapp3_1|x64.Build.0 = Debug-netcoreapp3_1|Any CPU
- {E2DD0721-5B0F-4606-8182-4C7EFB834518}.Debug-netfx|Any CPU.ActiveCfg = Debug-netfx|Any CPU
- {E2DD0721-5B0F-4606-8182-4C7EFB834518}.Debug-netfx|Any CPU.Build.0 = Debug-netfx|Any CPU
- {E2DD0721-5B0F-4606-8182-4C7EFB834518}.Debug-netfx|x64.ActiveCfg = Debug-netfx|Any CPU
- {E2DD0721-5B0F-4606-8182-4C7EFB834518}.Debug-netfx|x64.Build.0 = Debug-netfx|Any CPU
- {E2DD0721-5B0F-4606-8182-4C7EFB834518}.Release|Any CPU.ActiveCfg = Release|Any CPU
- {E2DD0721-5B0F-4606-8182-4C7EFB834518}.Release|Any CPU.Build.0 = Release|Any CPU
- {E2DD0721-5B0F-4606-8182-4C7EFB834518}.Release|x64.ActiveCfg = Release|Any CPU
- {E2DD0721-5B0F-4606-8182-4C7EFB834518}.Release|x64.Build.0 = Release|Any CPU
- {E2DD0721-5B0F-4606-8182-4C7EFB834518}.Release-netcoreapp3_1|Any CPU.ActiveCfg = Release-netcoreapp3_1|Any CPU
- {E2DD0721-5B0F-4606-8182-4C7EFB834518}.Release-netcoreapp3_1|Any CPU.Build.0 = Release-netcoreapp3_1|Any CPU
- {E2DD0721-5B0F-4606-8182-4C7EFB834518}.Release-netcoreapp3_1|x64.ActiveCfg = Release-netcoreapp3_1|Any CPU
- {E2DD0721-5B0F-4606-8182-4C7EFB834518}.Release-netcoreapp3_1|x64.Build.0 = Release-netcoreapp3_1|Any CPU
- {E2DD0721-5B0F-4606-8182-4C7EFB834518}.Release-netfx|Any CPU.ActiveCfg = Release-netfx|Any CPU
- {E2DD0721-5B0F-4606-8182-4C7EFB834518}.Release-netfx|Any CPU.Build.0 = Release-netfx|Any CPU
- {E2DD0721-5B0F-4606-8182-4C7EFB834518}.Release-netfx|x64.ActiveCfg = Release-netfx|Any CPU
- {E2DD0721-5B0F-4606-8182-4C7EFB834518}.Release-netfx|x64.Build.0 = Release-netfx|Any CPU
+ {6CF88209-69DB-4B36-9604-3ECD9F163E96}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
+ {6CF88209-69DB-4B36-9604-3ECD9F163E96}.Debug|Any CPU.Build.0 = Debug|Any CPU
+ {6CF88209-69DB-4B36-9604-3ECD9F163E96}.Debug|x64.ActiveCfg = Debug|Any CPU
+ {6CF88209-69DB-4B36-9604-3ECD9F163E96}.Debug|x64.Build.0 = Debug|Any CPU
+ {6CF88209-69DB-4B36-9604-3ECD9F163E96}.Debug-netcoreapp3_1|Any CPU.ActiveCfg = Debug-netcoreapp3_1|Any CPU
+ {6CF88209-69DB-4B36-9604-3ECD9F163E96}.Debug-netcoreapp3_1|Any CPU.Build.0 = Debug-netcoreapp3_1|Any CPU
+ {6CF88209-69DB-4B36-9604-3ECD9F163E96}.Debug-netcoreapp3_1|x64.ActiveCfg = Debug-netcoreapp3_1|Any CPU
+ {6CF88209-69DB-4B36-9604-3ECD9F163E96}.Debug-netcoreapp3_1|x64.Build.0 = Debug-netcoreapp3_1|Any CPU
+ {6CF88209-69DB-4B36-9604-3ECD9F163E96}.Debug-netfx|Any CPU.ActiveCfg = Debug-netfx|Any CPU
+ {6CF88209-69DB-4B36-9604-3ECD9F163E96}.Debug-netfx|Any CPU.Build.0 = Debug-netfx|Any CPU
+ {6CF88209-69DB-4B36-9604-3ECD9F163E96}.Debug-netfx|x64.ActiveCfg = Debug-netfx|Any CPU
+ {6CF88209-69DB-4B36-9604-3ECD9F163E96}.Debug-netfx|x64.Build.0 = Debug-netfx|Any CPU
+ {6CF88209-69DB-4B36-9604-3ECD9F163E96}.Release|Any CPU.ActiveCfg = Release|Any CPU
+ {6CF88209-69DB-4B36-9604-3ECD9F163E96}.Release|Any CPU.Build.0 = Release|Any CPU
+ {6CF88209-69DB-4B36-9604-3ECD9F163E96}.Release|x64.ActiveCfg = Release|Any CPU
+ {6CF88209-69DB-4B36-9604-3ECD9F163E96}.Release|x64.Build.0 = Release|Any CPU
+ {6CF88209-69DB-4B36-9604-3ECD9F163E96}.Release-netcoreapp3_1|Any CPU.ActiveCfg = Release-netcoreapp3_1|Any CPU
+ {6CF88209-69DB-4B36-9604-3ECD9F163E96}.Release-netcoreapp3_1|Any CPU.Build.0 = Release-netcoreapp3_1|Any CPU
+ {6CF88209-69DB-4B36-9604-3ECD9F163E96}.Release-netcoreapp3_1|x64.ActiveCfg = Release-netcoreapp3_1|Any CPU
+ {6CF88209-69DB-4B36-9604-3ECD9F163E96}.Release-netcoreapp3_1|x64.Build.0 = Release-netcoreapp3_1|Any CPU
+ {6CF88209-69DB-4B36-9604-3ECD9F163E96}.Release-netfx|Any CPU.ActiveCfg = Release-netfx|Any CPU
+ {6CF88209-69DB-4B36-9604-3ECD9F163E96}.Release-netfx|Any CPU.Build.0 = Release-netfx|Any CPU
+ {6CF88209-69DB-4B36-9604-3ECD9F163E96}.Release-netfx|x64.ActiveCfg = Release-netfx|Any CPU
+ {6CF88209-69DB-4B36-9604-3ECD9F163E96}.Release-netfx|x64.Build.0 = Release-netfx|Any CPU
+ {4584326B-C5B3-4CAE-B98A-34C5F5AA16F3}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
+ {4584326B-C5B3-4CAE-B98A-34C5F5AA16F3}.Debug|Any CPU.Build.0 = Debug|Any CPU
+ {4584326B-C5B3-4CAE-B98A-34C5F5AA16F3}.Debug|x64.ActiveCfg = Debug|Any CPU
+ {4584326B-C5B3-4CAE-B98A-34C5F5AA16F3}.Debug|x64.Build.0 = Debug|Any CPU
+ {4584326B-C5B3-4CAE-B98A-34C5F5AA16F3}.Debug-netcoreapp3_1|Any CPU.ActiveCfg = Debug-netcoreapp3_1|Any CPU
+ {4584326B-C5B3-4CAE-B98A-34C5F5AA16F3}.Debug-netcoreapp3_1|Any CPU.Build.0 = Debug-netcoreapp3_1|Any CPU
+ {4584326B-C5B3-4CAE-B98A-34C5F5AA16F3}.Debug-netcoreapp3_1|x64.ActiveCfg = Debug-netcoreapp3_1|Any CPU
+ {4584326B-C5B3-4CAE-B98A-34C5F5AA16F3}.Debug-netcoreapp3_1|x64.Build.0 = Debug-netcoreapp3_1|Any CPU
+ {4584326B-C5B3-4CAE-B98A-34C5F5AA16F3}.Debug-netfx|Any CPU.ActiveCfg = Debug-netfx|Any CPU
+ {4584326B-C5B3-4CAE-B98A-34C5F5AA16F3}.Debug-netfx|Any CPU.Build.0 = Debug-netfx|Any CPU
+ {4584326B-C5B3-4CAE-B98A-34C5F5AA16F3}.Debug-netfx|x64.ActiveCfg = Debug-netfx|Any CPU
+ {4584326B-C5B3-4CAE-B98A-34C5F5AA16F3}.Debug-netfx|x64.Build.0 = Debug-netfx|Any CPU
+ {4584326B-C5B3-4CAE-B98A-34C5F5AA16F3}.Release|Any CPU.ActiveCfg = Release|Any CPU
+ {4584326B-C5B3-4CAE-B98A-34C5F5AA16F3}.Release|Any CPU.Build.0 = Release|Any CPU
+ {4584326B-C5B3-4CAE-B98A-34C5F5AA16F3}.Release|x64.ActiveCfg = Release|Any CPU
+ {4584326B-C5B3-4CAE-B98A-34C5F5AA16F3}.Release|x64.Build.0 = Release|Any CPU
+ {4584326B-C5B3-4CAE-B98A-34C5F5AA16F3}.Release-netcoreapp3_1|Any CPU.ActiveCfg = Release-netcoreapp3_1|Any CPU
+ {4584326B-C5B3-4CAE-B98A-34C5F5AA16F3}.Release-netcoreapp3_1|Any CPU.Build.0 = Release-netcoreapp3_1|Any CPU
+ {4584326B-C5B3-4CAE-B98A-34C5F5AA16F3}.Release-netcoreapp3_1|x64.ActiveCfg = Release-netcoreapp3_1|Any CPU
+ {4584326B-C5B3-4CAE-B98A-34C5F5AA16F3}.Release-netcoreapp3_1|x64.Build.0 = Release-netcoreapp3_1|Any CPU
+ {4584326B-C5B3-4CAE-B98A-34C5F5AA16F3}.Release-netfx|Any CPU.ActiveCfg = Release-netfx|Any CPU
+ {4584326B-C5B3-4CAE-B98A-34C5F5AA16F3}.Release-netfx|Any CPU.Build.0 = Release-netfx|Any CPU
+ {4584326B-C5B3-4CAE-B98A-34C5F5AA16F3}.Release-netfx|x64.ActiveCfg = Release-netfx|Any CPU
+ {4584326B-C5B3-4CAE-B98A-34C5F5AA16F3}.Release-netfx|x64.Build.0 = Release-netfx|Any CPU
+ {84150C22-0627-4A11-81C9-F214762855EA}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
+ {84150C22-0627-4A11-81C9-F214762855EA}.Debug|Any CPU.Build.0 = Debug|Any CPU
+ {84150C22-0627-4A11-81C9-F214762855EA}.Debug|x64.ActiveCfg = Debug|Any CPU
+ {84150C22-0627-4A11-81C9-F214762855EA}.Debug|x64.Build.0 = Debug|Any CPU
+ {84150C22-0627-4A11-81C9-F214762855EA}.Debug-netcoreapp3_1|Any CPU.ActiveCfg = Debug-netcoreapp3_1|Any CPU
+ {84150C22-0627-4A11-81C9-F214762855EA}.Debug-netcoreapp3_1|Any CPU.Build.0 = Debug-netcoreapp3_1|Any CPU
+ {84150C22-0627-4A11-81C9-F214762855EA}.Debug-netcoreapp3_1|x64.ActiveCfg = Debug-netcoreapp3_1|Any CPU
+ {84150C22-0627-4A11-81C9-F214762855EA}.Debug-netcoreapp3_1|x64.Build.0 = Debug-netcoreapp3_1|Any CPU
+ {84150C22-0627-4A11-81C9-F214762855EA}.Debug-netfx|Any CPU.ActiveCfg = Debug-netfx|Any CPU
+ {84150C22-0627-4A11-81C9-F214762855EA}.Debug-netfx|Any CPU.Build.0 = Debug-netfx|Any CPU
+ {84150C22-0627-4A11-81C9-F214762855EA}.Debug-netfx|x64.ActiveCfg = Debug-netfx|Any CPU
+ {84150C22-0627-4A11-81C9-F214762855EA}.Debug-netfx|x64.Build.0 = Debug-netfx|Any CPU
+ {84150C22-0627-4A11-81C9-F214762855EA}.Release|Any CPU.ActiveCfg = Release|Any CPU
+ {84150C22-0627-4A11-81C9-F214762855EA}.Release|Any CPU.Build.0 = Release|Any CPU
+ {84150C22-0627-4A11-81C9-F214762855EA}.Release|x64.ActiveCfg = Release|Any CPU
+ {84150C22-0627-4A11-81C9-F214762855EA}.Release|x64.Build.0 = Release|Any CPU
+ {84150C22-0627-4A11-81C9-F214762855EA}.Release-netcoreapp3_1|Any CPU.ActiveCfg = Release-netcoreapp3_1|Any CPU
+ {84150C22-0627-4A11-81C9-F214762855EA}.Release-netcoreapp3_1|Any CPU.Build.0 = Release-netcoreapp3_1|Any CPU
+ {84150C22-0627-4A11-81C9-F214762855EA}.Release-netcoreapp3_1|x64.ActiveCfg = Release-netcoreapp3_1|Any CPU
+ {84150C22-0627-4A11-81C9-F214762855EA}.Release-netcoreapp3_1|x64.Build.0 = Release-netcoreapp3_1|Any CPU
+ {84150C22-0627-4A11-81C9-F214762855EA}.Release-netfx|Any CPU.ActiveCfg = Release-netfx|Any CPU
+ {84150C22-0627-4A11-81C9-F214762855EA}.Release-netfx|Any CPU.Build.0 = Release-netfx|Any CPU
+ {84150C22-0627-4A11-81C9-F214762855EA}.Release-netfx|x64.ActiveCfg = Release-netfx|Any CPU
+ {84150C22-0627-4A11-81C9-F214762855EA}.Release-netfx|x64.Build.0 = Release-netfx|Any CPU
+ {D9FDD2D5-BFFC-4A4D-8589-7F63AA3EA923}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
+ {D9FDD2D5-BFFC-4A4D-8589-7F63AA3EA923}.Debug|Any CPU.Build.0 = Debug|Any CPU
+ {D9FDD2D5-BFFC-4A4D-8589-7F63AA3EA923}.Debug|x64.ActiveCfg = Debug|Any CPU
+ {D9FDD2D5-BFFC-4A4D-8589-7F63AA3EA923}.Debug|x64.Build.0 = Debug|Any CPU
+ {D9FDD2D5-BFFC-4A4D-8589-7F63AA3EA923}.Debug-netcoreapp3_1|Any CPU.ActiveCfg = Debug-netcoreapp3_1|Any CPU
+ {D9FDD2D5-BFFC-4A4D-8589-7F63AA3EA923}.Debug-netcoreapp3_1|Any CPU.Build.0 = Debug-netcoreapp3_1|Any CPU
+ {D9FDD2D5-BFFC-4A4D-8589-7F63AA3EA923}.Debug-netcoreapp3_1|x64.ActiveCfg = Debug-netcoreapp3_1|Any CPU
+ {D9FDD2D5-BFFC-4A4D-8589-7F63AA3EA923}.Debug-netcoreapp3_1|x64.Build.0 = Debug-netcoreapp3_1|Any CPU
+ {D9FDD2D5-BFFC-4A4D-8589-7F63AA3EA923}.Debug-netfx|Any CPU.ActiveCfg = Debug-netfx|Any CPU
+ {D9FDD2D5-BFFC-4A4D-8589-7F63AA3EA923}.Debug-netfx|Any CPU.Build.0 = Debug-netfx|Any CPU
+ {D9FDD2D5-BFFC-4A4D-8589-7F63AA3EA923}.Debug-netfx|x64.ActiveCfg = Debug-netfx|Any CPU
+ {D9FDD2D5-BFFC-4A4D-8589-7F63AA3EA923}.Debug-netfx|x64.Build.0 = Debug-netfx|Any CPU
+ {D9FDD2D5-BFFC-4A4D-8589-7F63AA3EA923}.Release|Any CPU.ActiveCfg = Release|Any CPU
+ {D9FDD2D5-BFFC-4A4D-8589-7F63AA3EA923}.Release|Any CPU.Build.0 = Release|Any CPU
+ {D9FDD2D5-BFFC-4A4D-8589-7F63AA3EA923}.Release|x64.ActiveCfg = Release|Any CPU
+ {D9FDD2D5-BFFC-4A4D-8589-7F63AA3EA923}.Release|x64.Build.0 = Release|Any CPU
+ {D9FDD2D5-BFFC-4A4D-8589-7F63AA3EA923}.Release-netcoreapp3_1|Any CPU.ActiveCfg = Release-netcoreapp3_1|Any CPU
+ {D9FDD2D5-BFFC-4A4D-8589-7F63AA3EA923}.Release-netcoreapp3_1|Any CPU.Build.0 = Release-netcoreapp3_1|Any CPU
+ {D9FDD2D5-BFFC-4A4D-8589-7F63AA3EA923}.Release-netcoreapp3_1|x64.ActiveCfg = Release-netcoreapp3_1|Any CPU
+ {D9FDD2D5-BFFC-4A4D-8589-7F63AA3EA923}.Release-netcoreapp3_1|x64.Build.0 = Release-netcoreapp3_1|Any CPU
+ {D9FDD2D5-BFFC-4A4D-8589-7F63AA3EA923}.Release-netfx|Any CPU.ActiveCfg = Release-netfx|Any CPU
+ {D9FDD2D5-BFFC-4A4D-8589-7F63AA3EA923}.Release-netfx|Any CPU.Build.0 = Release-netfx|Any CPU
+ {D9FDD2D5-BFFC-4A4D-8589-7F63AA3EA923}.Release-netfx|x64.ActiveCfg = Release-netfx|Any CPU
+ {D9FDD2D5-BFFC-4A4D-8589-7F63AA3EA923}.Release-netfx|x64.Build.0 = Release-netfx|Any CPU
+ {0B765344-11A4-4738-9759-5060599DC134}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
+ {0B765344-11A4-4738-9759-5060599DC134}.Debug|Any CPU.Build.0 = Debug|Any CPU
+ {0B765344-11A4-4738-9759-5060599DC134}.Debug|x64.ActiveCfg = Debug|Any CPU
+ {0B765344-11A4-4738-9759-5060599DC134}.Debug|x64.Build.0 = Debug|Any CPU
+ {0B765344-11A4-4738-9759-5060599DC134}.Debug-netcoreapp3_1|Any CPU.ActiveCfg = Debug-netcoreapp3_1|Any CPU
+ {0B765344-11A4-4738-9759-5060599DC134}.Debug-netcoreapp3_1|Any CPU.Build.0 = Debug-netcoreapp3_1|Any CPU
+ {0B765344-11A4-4738-9759-5060599DC134}.Debug-netcoreapp3_1|x64.ActiveCfg = Debug-netcoreapp3_1|Any CPU
+ {0B765344-11A4-4738-9759-5060599DC134}.Debug-netcoreapp3_1|x64.Build.0 = Debug-netcoreapp3_1|Any CPU
+ {0B765344-11A4-4738-9759-5060599DC134}.Debug-netfx|Any CPU.ActiveCfg = Debug-netfx|Any CPU
+ {0B765344-11A4-4738-9759-5060599DC134}.Debug-netfx|Any CPU.Build.0 = Debug-netfx|Any CPU
+ {0B765344-11A4-4738-9759-5060599DC134}.Debug-netfx|x64.ActiveCfg = Debug-netfx|Any CPU
+ {0B765344-11A4-4738-9759-5060599DC134}.Debug-netfx|x64.Build.0 = Debug-netfx|Any CPU
+ {0B765344-11A4-4738-9759-5060599DC134}.Release|Any CPU.ActiveCfg = Release|Any CPU
+ {0B765344-11A4-4738-9759-5060599DC134}.Release|Any CPU.Build.0 = Release|Any CPU
+ {0B765344-11A4-4738-9759-5060599DC134}.Release|x64.ActiveCfg = Release|Any CPU
+ {0B765344-11A4-4738-9759-5060599DC134}.Release|x64.Build.0 = Release|Any CPU
+ {0B765344-11A4-4738-9759-5060599DC134}.Release-netcoreapp3_1|Any CPU.ActiveCfg = Release-netcoreapp3_1|Any CPU
+ {0B765344-11A4-4738-9759-5060599DC134}.Release-netcoreapp3_1|Any CPU.Build.0 = Release-netcoreapp3_1|Any CPU
+ {0B765344-11A4-4738-9759-5060599DC134}.Release-netcoreapp3_1|x64.ActiveCfg = Release-netcoreapp3_1|Any CPU
+ {0B765344-11A4-4738-9759-5060599DC134}.Release-netcoreapp3_1|x64.Build.0 = Release-netcoreapp3_1|Any CPU
+ {0B765344-11A4-4738-9759-5060599DC134}.Release-netfx|Any CPU.ActiveCfg = Release-netfx|Any CPU
+ {0B765344-11A4-4738-9759-5060599DC134}.Release-netfx|Any CPU.Build.0 = Release-netfx|Any CPU
+ {0B765344-11A4-4738-9759-5060599DC134}.Release-netfx|x64.ActiveCfg = Release-netfx|Any CPU
+ {0B765344-11A4-4738-9759-5060599DC134}.Release-netfx|x64.Build.0 = Release-netfx|Any CPU
+ {8AFB8CC3-DA0B-4364-BFB3-296A7C54CC25}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
+ {8AFB8CC3-DA0B-4364-BFB3-296A7C54CC25}.Debug|Any CPU.Build.0 = Debug|Any CPU
+ {8AFB8CC3-DA0B-4364-BFB3-296A7C54CC25}.Debug|x64.ActiveCfg = Debug|Any CPU
+ {8AFB8CC3-DA0B-4364-BFB3-296A7C54CC25}.Debug|x64.Build.0 = Debug|Any CPU
+ {8AFB8CC3-DA0B-4364-BFB3-296A7C54CC25}.Debug-netcoreapp3_1|Any CPU.ActiveCfg = Debug-netcoreapp3_1|Any CPU
+ {8AFB8CC3-DA0B-4364-BFB3-296A7C54CC25}.Debug-netcoreapp3_1|Any CPU.Build.0 = Debug-netcoreapp3_1|Any CPU
+ {8AFB8CC3-DA0B-4364-BFB3-296A7C54CC25}.Debug-netcoreapp3_1|x64.ActiveCfg = Debug-netcoreapp3_1|Any CPU
+ {8AFB8CC3-DA0B-4364-BFB3-296A7C54CC25}.Debug-netcoreapp3_1|x64.Build.0 = Debug-netcoreapp3_1|Any CPU
+ {8AFB8CC3-DA0B-4364-BFB3-296A7C54CC25}.Debug-netfx|Any CPU.ActiveCfg = Debug-netfx|Any CPU
+ {8AFB8CC3-DA0B-4364-BFB3-296A7C54CC25}.Debug-netfx|Any CPU.Build.0 = Debug-netfx|Any CPU
+ {8AFB8CC3-DA0B-4364-BFB3-296A7C54CC25}.Debug-netfx|x64.ActiveCfg = Debug-netfx|Any CPU
+ {8AFB8CC3-DA0B-4364-BFB3-296A7C54CC25}.Debug-netfx|x64.Build.0 = Debug-netfx|Any CPU
+ {8AFB8CC3-DA0B-4364-BFB3-296A7C54CC25}.Release|Any CPU.ActiveCfg = Release|Any CPU
+ {8AFB8CC3-DA0B-4364-BFB3-296A7C54CC25}.Release|Any CPU.Build.0 = Release|Any CPU
+ {8AFB8CC3-DA0B-4364-BFB3-296A7C54CC25}.Release|x64.ActiveCfg = Release|Any CPU
+ {8AFB8CC3-DA0B-4364-BFB3-296A7C54CC25}.Release|x64.Build.0 = Release|Any CPU
+ {8AFB8CC3-DA0B-4364-BFB3-296A7C54CC25}.Release-netcoreapp3_1|Any CPU.ActiveCfg = Release-netcoreapp3_1|Any CPU
+ {8AFB8CC3-DA0B-4364-BFB3-296A7C54CC25}.Release-netcoreapp3_1|Any CPU.Build.0 = Release-netcoreapp3_1|Any CPU
+ {8AFB8CC3-DA0B-4364-BFB3-296A7C54CC25}.Release-netcoreapp3_1|x64.ActiveCfg = Release-netcoreapp3_1|Any CPU
+ {8AFB8CC3-DA0B-4364-BFB3-296A7C54CC25}.Release-netcoreapp3_1|x64.Build.0 = Release-netcoreapp3_1|Any CPU
+ {8AFB8CC3-DA0B-4364-BFB3-296A7C54CC25}.Release-netfx|Any CPU.ActiveCfg = Release-netfx|Any CPU
+ {8AFB8CC3-DA0B-4364-BFB3-296A7C54CC25}.Release-netfx|Any CPU.Build.0 = Release-netfx|Any CPU
+ {8AFB8CC3-DA0B-4364-BFB3-296A7C54CC25}.Release-netfx|x64.ActiveCfg = Release-netfx|Any CPU
+ {8AFB8CC3-DA0B-4364-BFB3-296A7C54CC25}.Release-netfx|x64.Build.0 = Release-netfx|Any CPU
EndGlobalSection
GlobalSection(SolutionProperties) = preSolution
HideSolutionNode = FALSE
@@ -1817,17 +1806,12 @@ Global
{B7B593C5-FB8C-4ADA-A638-5B53B47D087E} = {09EADF06-BE25-4228-AB53-95AE3E15B530}
{16BB1454-2108-40E5-B3A6-594654005303} = {09EADF06-BE25-4228-AB53-95AE3E15B530}
{55C8122D-79EA-48AB-85D0-EB551FC1C427} = {09EADF06-BE25-4228-AB53-95AE3E15B530}
- {DEC8F776-49F7-4D87-836C-FE4DC057D08C} = {D3D38B03-B557-484D-8348-8BADEE4DF592}
- {6C95FC87-F5F2-4EEF-BB97-567F2F5DD141} = {D3D38B03-B557-484D-8348-8BADEE4DF592}
{7A9DB75F-2CA5-4184-9EF5-1F17EB39483F} = {AED9C836-31E3-4F3F-8ABC-929555D3F3C4}
{64F40A0D-D4C2-4AA7-8470-E9CC437827E4} = {09EADF06-BE25-4228-AB53-95AE3E15B530}
{362A98CF-FBF7-4EBB-A11B-990BBF845B15} = {09EADF06-BE25-4228-AB53-95AE3E15B530}
- {487213C9-E8A9-4F94-85D7-28A05DBBFE3A} = {DEC8F776-49F7-4D87-836C-FE4DC057D08C}
- {9252A8EB-ABFB-440C-AB4D-1D562753CE0F} = {487213C9-E8A9-4F94-85D7-28A05DBBFE3A}
{3DEB504D-7A07-48CE-91A2-8047461CB3D4} = {AED9C836-31E3-4F3F-8ABC-929555D3F3C4}
{001F3B4E-FBE4-4001-AFD2-A6A989CD1C25} = {09EADF06-BE25-4228-AB53-95AE3E15B530}
{DCF46B79-1FDB-4DBA-A263-D3D64E3AAA27} = {09EADF06-BE25-4228-AB53-95AE3E15B530}
- {BF66A305-DF10-47E4-8D81-42049B149D2B} = {D3D38B03-B557-484D-8348-8BADEE4DF592}
{B4E55B2D-2A92-46E7-B72F-E76D6FD83440} = {7F13E156-3EBA-4021-84A5-CD56BA72F99E}
{3E4ABF07-7970-4BE6-B45B-A13D3C397545} = {AED9C836-31E3-4F3F-8ABC-929555D3F3C4}
{7333EDEF-4144-405C-A5EC-6F42201857D8} = {AED9C836-31E3-4F3F-8ABC-929555D3F3C4}
@@ -1850,47 +1834,29 @@ Global
{4805129D-78C8-46D4-9519-0AD9B0574D6D} = {09EADF06-BE25-4228-AB53-95AE3E15B530}
{DB7CEB5E-8BE6-48A7-87BE-B91D9AE96F71} = {09EADF06-BE25-4228-AB53-95AE3E15B530}
{7504D46F-E4B3-43CB-9B1C-82F3131F1C99} = {09EADF06-BE25-4228-AB53-95AE3E15B530}
- {63006A14-B924-48C5-83C9-CFE9DA22B01F} = {D3D38B03-B557-484D-8348-8BADEE4DF592}
- {1229F799-37F0-4282-B9F0-74BFA97CC362} = {D3D38B03-B557-484D-8348-8BADEE4DF592}
- {DE95FE65-9FF7-4233-93DF-7A8F2805624A} = {D3D38B03-B557-484D-8348-8BADEE4DF592}
- {4CF8095E-B4A3-4326-A550-43098E447288} = {D3D38B03-B557-484D-8348-8BADEE4DF592}
- {19AC192B-75FE-45D5-B219-898E401D5904} = {D3D38B03-B557-484D-8348-8BADEE4DF592}
- {93FF16AA-635E-421D-96C1-008818C143A2} = {D3D38B03-B557-484D-8348-8BADEE4DF592}
- {320AF46A-4809-486E-8F9E-A00C8AE47751} = {D3D38B03-B557-484D-8348-8BADEE4DF592}
- {11894B4A-78B4-4523-A6DD-4495722E244F} = {D3D38B03-B557-484D-8348-8BADEE4DF592}
- {B836F712-7FB6-4B75-A3EB-FB05F8E0D15E} = {D3D38B03-B557-484D-8348-8BADEE4DF592}
- {B00098E4-771E-41DF-A3AA-A606AAB334B7} = {D3D38B03-B557-484D-8348-8BADEE4DF592}
- {BD93C0F3-3CED-4BE8-9389-4234250FBFB1} = {D3D38B03-B557-484D-8348-8BADEE4DF592}
- {8EDFB7E5-7E7E-411D-99C5-7A4895D0F9CB} = {D3D38B03-B557-484D-8348-8BADEE4DF592}
- {9E689AD4-F908-493C-B882-B1B33E8F7696} = {D3D38B03-B557-484D-8348-8BADEE4DF592}
- {8D8CC016-0020-40EC-BD8E-73F1CE0F9662} = {D3D38B03-B557-484D-8348-8BADEE4DF592}
- {A84717CB-F11A-41C5-A74D-C0F1D47B7431} = {D3D38B03-B557-484D-8348-8BADEE4DF592}
{85D0CAFD-2FE8-496A-88C7-585D35B94243} = {09EADF06-BE25-4228-AB53-95AE3E15B530}
- {31D38B21-102B-41C0-9E0A-2FE0BF68D123} = {D3D38B03-B557-484D-8348-8BADEE4DF592}
{5E920CAC-5A28-42FB-936E-49C472130953} = {AED9C836-31E3-4F3F-8ABC-929555D3F3C4}
- {AD7058C9-5608-49A8-BE23-58C33A74EE91} = {D3D38B03-B557-484D-8348-8BADEE4DF592}
{E02DA82D-3FEE-4C60-BD80-9EC3C3448DFC} = {09EADF06-BE25-4228-AB53-95AE3E15B530}
- {B1B3F284-FA3D-4D76-A712-FF04495D244B} = {D3D38B03-B557-484D-8348-8BADEE4DF592}
{D6741C37-B5E6-4050-BCBA-9715809EA15B} = {09EADF06-BE25-4228-AB53-95AE3E15B530}
{21CAD3A1-5E1F-42C1-BB73-46B6E67F4206} = {AED9C836-31E3-4F3F-8ABC-929555D3F3C4}
- {AE4F7569-26F3-4160-8A8B-7A57D0DA3350} = {D3D38B03-B557-484D-8348-8BADEE4DF592}
{F308DC6B-7E59-40D7-A581-834E8CD99CFE} = {7F13E156-3EBA-4021-84A5-CD56BA72F99E}
{C2652287-CD6D-40FB-B042-95FB56D09DB8} = {AED9C836-31E3-4F3F-8ABC-929555D3F3C4}
{E48285BF-F49A-4EA3-AED0-1BDDBF77EB80} = {09EADF06-BE25-4228-AB53-95AE3E15B530}
- {F5D11F71-2D61-4AE9-99D7-0F0B54649B15} = {D3D38B03-B557-484D-8348-8BADEE4DF592}
{A6924919-9E37-4023-8B7F-E85C8E3CC9B3} = {DA452A53-2E94-4433-B08C-041EDEC729E6}
{3C8F910B-7F23-4D25-B521-6D5AC9570ADD} = {DA452A53-2E94-4433-B08C-041EDEC729E6}
+ {E2DD0721-5B0F-4606-8182-4C7EFB834518} = {09EADF06-BE25-4228-AB53-95AE3E15B530}
{56CB0850-7341-4D71-9AE4-9EFC472D93DD} = {09EADF06-BE25-4228-AB53-95AE3E15B530}
{46CC5637-3DDF-4100-93FC-44BB87B2DB81} = {AED9C836-31E3-4F3F-8ABC-929555D3F3C4}
- {3817A875-278C-4140-BF66-3C4A8CA55F0D} = {D3D38B03-B557-484D-8348-8BADEE4DF592}
{419F93D5-4135-4DA0-A76E-EFC23E04093D} = {09EADF06-BE25-4228-AB53-95AE3E15B530}
{A22FAD27-77E8-4460-8B92-EC7090B7173A} = {AED9C836-31E3-4F3F-8ABC-929555D3F3C4}
{A1CAC86F-F4BB-4B6D-9D18-E9AE15B3C66E} = {AED9C836-31E3-4F3F-8ABC-929555D3F3C4}
{C8DB58DC-6434-4431-A81F-263D86E2A5F3} = {AED9C836-31E3-4F3F-8ABC-929555D3F3C4}
- {C91F81E3-B900-4968-A6DF-F53B515E97E1} = {BF66A305-DF10-47E4-8D81-42049B149D2B}
- {027DBA48-85B6-46F1-9487-0B49B5057FC0} = {C91F81E3-B900-4968-A6DF-F53B515E97E1}
- {E2DD0721-5B0F-4606-8182-4C7EFB834518} = {09EADF06-BE25-4228-AB53-95AE3E15B530}
- {1BA5C784-52E8-4A87-8525-26B2452F2882} = {D3D38B03-B557-484D-8348-8BADEE4DF592}
+ {6CF88209-69DB-4B36-9604-3ECD9F163E96} = {09EADF06-BE25-4228-AB53-95AE3E15B530}
+ {4584326B-C5B3-4CAE-B98A-34C5F5AA16F3} = {09EADF06-BE25-4228-AB53-95AE3E15B530}
+ {84150C22-0627-4A11-81C9-F214762855EA} = {09EADF06-BE25-4228-AB53-95AE3E15B530}
+ {D9FDD2D5-BFFC-4A4D-8589-7F63AA3EA923} = {09EADF06-BE25-4228-AB53-95AE3E15B530}
+ {0B765344-11A4-4738-9759-5060599DC134} = {AED9C836-31E3-4F3F-8ABC-929555D3F3C4}
+ {8AFB8CC3-DA0B-4364-BFB3-296A7C54CC25} = {AED9C836-31E3-4F3F-8ABC-929555D3F3C4}
EndGlobalSection
GlobalSection(ExtensibilityGlobals) = postSolution
SolutionGuid = {41165AF1-35BB-4832-A189-73060F82B01D}
diff --git a/NuGet.config b/NuGet.config
new file mode 100644
index 0000000000..976d8d3441
--- /dev/null
+++ b/NuGet.config
@@ -0,0 +1,20 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/README.md b/README.md
index 474113b97c..06b5634b50 100644
--- a/README.md
+++ b/README.md
@@ -1,6 +1,6 @@
# Machine Learning for .NET
-[ML.NET](https://www.microsoft.com/net/learn/apps/machine-learning-and-ai/ml-dotnet) is a cross-platform open-source machine learning framework which makes machine learning accessible to .NET developers with the same code that powers machine learning across many Microsoft products, including Power BI, Windows Defender, and Azure.
+[ML.NET](https://www.microsoft.com/net/learn/apps/machine-learning-and-ai/ml-dotnet) is a cross-platform open-source machine learning framework which makes machine learning accessible to .NET developers with the same code that powers machine learning across many Microsoft products, including Power BI, Windows Defender, and Azure.
ML.NET allows .NET developers to develop/train their own models and infuse custom machine learning into their applications using .NET, even without prior expertise in developing or tuning machine learning models. It provides data loading from files and databases, enables data transformations and includes many ML algorithms.
@@ -14,15 +14,15 @@ If you are new to machine learning, start by learning the basics from this colle
## ML.NET Documentation, tutorials and reference
-Please check our [documentation and tutorials](https://docs.microsoft.com/en-us/dotnet/machine-learning/).
+Please check our [documentation and tutorials](https://docs.microsoft.com/en-us/dotnet/machine-learning/).
See the [API Reference documentation](https://docs.microsoft.com/en-us/dotnet/api/?view=ml-dotnet).
## Sample apps
-We have a GitHub repo with [ML.NET sample apps](https://github.com/dotnet/machinelearning-samples) with many scenarios such as Sentiment analysis, Fraud detection, Product Recommender, Price Prediction, Anomaly Detection, Image Classification, Object Detection and many more.
+We have a GitHub repo with [ML.NET sample apps](https://github.com/dotnet/machinelearning-samples) with many scenarios such as Sentiment analysis, Fraud detection, Product Recommender, Price Prediction, Anomaly Detection, Image Classification, Object Detection and many more.
-In addition to the ML.NET samples provided by Microsoft, we're also highlighting many more samples created by the community showcased in this separate page [ML.NET Community Samples](https://github.com/dotnet/machinelearning-samples/blob/master/docs/COMMUNITY-SAMPLES.md)
+In addition to the ML.NET samples provided by Microsoft, we're also highlighting many more samples created by the community showcased in this separate page [ML.NET Community Samples](https://github.com/dotnet/machinelearning-samples/blob/main/docs/COMMUNITY-SAMPLES.md)
## ML.NET videos playlist at YouTube
@@ -31,7 +31,7 @@ The [ML.NET videos playlist](https://aka.ms/mlnetyoutube) on YouTube contains se
## Operating systems and processor architectures supported by ML.NET
-ML.NET runs on Windows, Linux, and macOS using [.NET Core](https://github.com/dotnet/core), or Windows using .NET Framework.
+ML.NET runs on Windows, Linux, and macOS using [.NET Core](https://github.com/dotnet/core), or Windows using .NET Framework.
64 bit is supported on all platforms. 32 bit is supported on Windows, except for TensorFlow and LightGBM related functionality.
@@ -67,17 +67,17 @@ Daily NuGet builds of the project are also available in our Azure DevOps feed:
To build ML.NET from source please visit our [developers guide](docs/project-docs/developer-guide.md).
-[](https://codecov.io/gh/dotnet/machinelearning)
+[](https://codecov.io/gh/dotnet/machinelearning)
| | Debug | Release |
|:---|----------------:|------------------:|
-|**CentOS**|[](https://dev.azure.com/dnceng/public/_build/latest?definitionId=104&branchName=master)|[](https://dev.azure.com/dnceng/public/_build/latest?definitionId=104&branchName=master)|
-|**Ubuntu**|[](https://dev.azure.com/dnceng/public/_build/latest?definitionId=104&branchName=master)|[](https://dev.azure.com/dnceng/public/_build/latest?definitionId=104&branchName=master)|
-|**macOS**|[](https://dev.azure.com/dnceng/public/_build/latest?definitionId=104&branchName=master)|[](https://dev.azure.com/dnceng/public/_build/latest?definitionId=104&branchName=master)|
-|**Windows x64**|[](https://dev.azure.com/dnceng/public/_build/latest?definitionId=104&branchName=master)|[](https://dev.azure.com/dnceng/public/_build/latest?definitionId=104&branchName=master)|
-|**Windows FullFramework**|[](https://dev.azure.com/dnceng/public/_build/latest?definitionId=104&branchName=master)|[](https://dev.azure.com/dnceng/public/_build/latest?definitionId=104&branchName=master)|
-|**Windows x86**|[](https://dev.azure.com/dnceng/public/_build/latest?definitionId=104&branchName=master)|[](https://dev.azure.com/dnceng/public/_build/latest?definitionId=104&branchName=master)|
-|**Windows NetCore3.1**|[](https://dev.azure.com/dnceng/public/_build/latest?definitionId=104&branchName=master)|[](https://dev.azure.com/dnceng/public/_build/latest?definitionId=104&branchName=master)|
+|**CentOS**|[](https://dev.azure.com/dnceng/public/_build/latest?definitionId=104&branchName=main)|[](https://dev.azure.com/dnceng/public/_build/latest?definitionId=104&branchName=main)|
+|**Ubuntu**|[](https://dev.azure.com/dnceng/public/_build/latest?definitionId=104&branchName=main)|[](https://dev.azure.com/dnceng/public/_build/latest?definitionId=104&branchName=main)|
+|**macOS**|[](https://dev.azure.com/dnceng/public/_build/latest?definitionId=104&branchName=main)|[](https://dev.azure.com/dnceng/public/_build/latest?definitionId=104&branchName=main)|
+|**Windows x64**|[](https://dev.azure.com/dnceng/public/_build/latest?definitionId=104&branchName=main)|[](https://dev.azure.com/dnceng/public/_build/latest?definitionId=104&branchName=main)|
+|**Windows FullFramework**|[](https://dev.azure.com/dnceng/public/_build/latest?definitionId=104&branchName=main)|[](https://dev.azure.com/dnceng/public/_build/latest?definitionId=104&branchName=main)|
+|**Windows x86**|[](https://dev.azure.com/dnceng/public/_build/latest?definitionId=104&branchName=main)|[](https://dev.azure.com/dnceng/public/_build/latest?definitionId=104&branchName=main)|
+|**Windows NetCore3.1**|[](https://dev.azure.com/dnceng/public/_build/latest?definitionId=104&branchName=main)|[](https://dev.azure.com/dnceng/public/_build/latest?definitionId=104&branchName=main)|
## Release process and versioning
diff --git a/build.cmd b/build.cmd
index c020999ade..9aef81fd1f 100644
--- a/build.cmd
+++ b/build.cmd
@@ -1,2 +1,3 @@
-@call "%~dp0run.cmd" build %*
-@exit /b %ERRORLEVEL%
+@echo off
+powershell -ExecutionPolicy ByPass -NoProfile -command "& """%~dp0eng\common\Build.ps1""" -restore -build -warnAsError 0 %*"
+exit /b %ErrorLevel%
diff --git a/build.proj b/build.proj
deleted file mode 100644
index 1d95743b60..0000000000
--- a/build.proj
+++ /dev/null
@@ -1,159 +0,0 @@
-
-
-
-
- true
-
-
-
-
-
-
-
-
-
-
- true
-
-
-
-
- $(RepoRoot)
-
-
-
-
-
-
-
-
-
-
-
-
- CreateOrUpdateCurrentVersionFile;
- RestoreProjects;
- BuildRedist;
- BuildNative;
- $(TraversalBuildDependsOn);
- DownloadExternalTestFiles;
- DownloadTensorflowMetaFiles;
- DeleteTestHost;
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- https://aka.ms/mlnet-resources/benchmarks/%(Identity)
- $(MSBuildThisFileDirectory)/test/data/external/%(Identity)
-
-
-
-
-
-
-
-
-
-
-
-
-
- https://aka.ms/mlnet-resources/meta/%(Identity)
- $([System.IO.Path]::GetTempPath())/MLNET/
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/build.sh b/build.sh
index dc1acca8df..87cb508fc7 100755
--- a/build.sh
+++ b/build.sh
@@ -10,4 +10,4 @@ while [ -h "$SOURCE" ]; do # resolve $SOURCE until the file is no longer a symli
done
DIR="$( cd -P "$( dirname "$SOURCE" )" && pwd )"
-"$DIR/run.sh" build "$@"
+"$DIR/eng/common/build.sh" --restore --build --warnAsError false "$@"
diff --git a/build/.night-build.yml b/build/.night-build.yml
index f68a4e5ff3..4b90a32cf2 100644
--- a/build/.night-build.yml
+++ b/build/.night-build.yml
@@ -8,14 +8,14 @@ pr: none
# no CI builds
trigger: none
-# scheduled trigger, runs at UTC 8:00 every day which is midnight of GMT-8
+# scheduled trigger, runs at UTC 8:00 every day which is midnight of GMT-8
schedules:
- cron: "0 8 * * *"
displayName: Nightly Build at midnight
branches:
include:
- - master
- - releases/1.0
+ - main
+ - releases/1.5.5
- features/automl
- features/integrationPackage
always: true
diff --git a/build/.outer-loop-build.yml b/build/.outer-loop-build.yml
index 92edc2ac5e..984fd67945 100644
--- a/build/.outer-loop-build.yml
+++ b/build/.outer-loop-build.yml
@@ -8,14 +8,14 @@ pr: none
# no CI builds
trigger: none
-# scheduled trigger, runs at UTC 8:00 every day which is midnight of GMT-8
+# scheduled trigger, runs at UTC 8:00 every day which is midnight of GMT-8
schedules:
- cron: "0 8 * * *"
displayName: outer loop build at midnight
branches:
include:
- - master
- - releases/1.0
+ - main
+ - releases/1.5.5
- features/automl
- features/integrationPackage
always: true
@@ -25,7 +25,7 @@ resources:
containers:
- container: CentosContainer
image: mcr.microsoft.com/dotnet-buildtools/prereqs:centos-7-mlnet-8bba86b-20190314145033
-
+
- container: UbuntuContainer
image: mcr.microsoft.com/dotnet-buildtools/prereqs:ubuntu-16.04-mlnet-20200515184230-2c829e8
diff --git a/build/Codecoverage.proj b/build/Codecoverage.proj
index 03595f7ca8..c8c68a762b 100644
--- a/build/Codecoverage.proj
+++ b/build/Codecoverage.proj
@@ -16,12 +16,12 @@
<_ReportGeneratorPath>$(PkgReportGenerator)\tools\net47\ReportGenerator.exe
-
-
+
+
- <_CodecovArgs Include="-f;$(BaseOutputPath)$(PlatformConfig)\coverage\Cobertura.xml" />
-
+ <_CodecovArgs Include="-f;$(ArtifactsDir)bin\coverage\Cobertura.xml" />
+
<_CodecovArgs Include="--required" />
diff --git a/build/Dependencies.props b/build/Dependencies.props
deleted file mode 100644
index 196d7b9bad..0000000000
--- a/build/Dependencies.props
+++ /dev/null
@@ -1,65 +0,0 @@
-
-
-
-
- 10.0.3
- 4.4.0
- 4.4.0
- 1.5.0
- 4.5.1
- 4.3.0
- 4.7.1
-
-
-
-
- 3.10.1
- 2.2.3
- 2.1.0
- 1.5.1
- 0.0.0.9
- 2.1.3
- 4.5.0
- 4.5.0
- 4.5.0
- 1.14.0
- 1
- 0.11.8.1
-
-
-
-
- 2.9.0
-
-
-
-
- 3.3.1
- 4.5.0
- 1.2.0
-
-
-
-
- 1.0.0-beta-62824-02
- 1.9.0
- 1.2.1
- 4.3.6
- 1.0.0-beta.19225.5
-
-
-
-
- 0.12.0
- 1.0.1-beta1.20080.1
- 3.0.1
- 0.0.6-test
- 0.0.6-test
- 0.0.12-test
- 0.0.6-test
- 4.6.1
- 1.2.7
- 1.0.112.2
-
-
-
diff --git a/build/ci/job-template.yml b/build/ci/job-template.yml
index 52abf2dc99..4944eb6c6c 100644
--- a/build/ci/job-template.yml
+++ b/build/ci/job-template.yml
@@ -1,3 +1,4 @@
+#TODO: Need to update build documentation.
parameters:
name: ''
architecture: x64
@@ -21,13 +22,18 @@ jobs:
timeoutInMinutes: 120
cancelTimeoutInMinutes: 10
variables:
- dotnetPath: $(Build.SourcesDirectory)/Tools/dotnetcli/dotnet
+ dotnetPath: $(Build.SourcesDirectory)/.dotnet/dotnet
nugetFeed: https://pkgs.dev.azure.com/dnceng/public/_packaging/MachineLearning/nuget/v3/index.json
nightlyBuildProjPath: $(Build.SourcesDirectory)/test/Microsoft.ML.NightlyBuild.Tests/Microsoft.ML.NightlyBuild.Tests.csproj
- nightlyBuildRunPath: $(Build.SourcesDirectory)/bin/AnyCPU.$(_configuration)/Microsoft.ML.NightlyBuild.Tests/$(_targetFramework)
+ nightlyBuildRunPath: $(Build.SourcesDirectory)/artifacts/bin/Microsoft.ML.NightlyBuild.Tests/$(_configuration)/$(_targetFramework)
+ runNightlyBuildProj: $(Build.SourcesDirectory)/test/run-night-build-tests.proj
packageUpdaterProjPath: $(Build.SourcesDirectory)/test/Microsoft.ML.NugetPackageVersionUpdater/Microsoft.ML.NugetPackageVersionUpdater.csproj
versionFilePath: $(Build.SourcesDirectory)/test/Microsoft.ML.NugetPackageVersionUpdater/latest_versions.txt
PROCDUMP_PATH: '$(Build.SourcesDirectory)/Tools/ProcDump/'
+ ${{ if eq(parameters.buildScript, 'build.cmd') }}:
+ spaceValue: ' '
+ ${{ if eq(parameters.buildScript, './build.sh') }}:
+ spaceValue: '%20'
strategy:
matrix:
${{ if eq(parameters.customMatrixes, '') }}:
@@ -43,19 +49,26 @@ jobs:
_targetFramework: netcoreapp2.1
${{ if ne(parameters.customMatrixes, '') }}:
${{ insert }}: ${{ parameters.customMatrixes }}
-
+
pool: ${{ parameters.pool }}
${{ if ne(parameters.container, '') }}:
container: ${{ parameters.container }}
steps:
+ # Work around MacOS Homebrew image/environment bug: https://github.com/actions/virtual-environments/issues/2322#issuecomment-749211076
+ - ${{ if eq(parameters.pool.name, 'Hosted macOS') }}:
+ - script: |
+ rm -rf /usr/local/bin/2to3
+ displayName: MacOS Homebrew bug Workaround
+ continueOnError: true
+ # Extra MacOS step required to install OS-specific dependencies
- ${{ if eq(parameters.pool.name, 'Hosted macOS') }}:
- - script: brew update && brew install $(Build.SourcesDirectory)/build/libomp.rb && brew unlink python@2 && brew install mono-libgdiplus gettext && brew link gettext --force && brew link libomp --force
- displayName: Install build dependencies
+ - script: brew update && brew install mono-libgdiplus && brew install $(Build.SourcesDirectory)/build/libomp.rb && brew link libomp --force
+ displayName: Install MacOS build dependencies
- ${{ if and( eq(parameters.nightlyBuild, 'true'), eq(parameters.pool.name, 'Hosted Ubuntu 1604')) }}:
- bash: echo "##vso[task.setvariable variable=LD_LIBRARY_PATH]$(nightlyBuildRunPath):$LD_LIBRARY_PATH"
displayName: Set LD_LIBRARY_PATH for Ubuntu and CentOS to locate Native shared library in current running path
- - script: ${{ parameters.buildScript }} -$(_configuration) -buildArch=${{ parameters.architecture }}
+ - script: ${{ parameters.buildScript }} -configuration $(_configuration) /p:TargetArchitecture=${{ parameters.architecture }} /p:TestArchitectures=${{ parameters.architecture }} /p:RestorePackagesPath=$(Build.SourcesDirectory)\packages /p:NUGET_PACKAGES=$(Build.SourcesDirectory)\packages
displayName: Build
- ${{ if eq(parameters.pool.name, 'Hosted macOS') }}:
- task: Bash@3
@@ -70,13 +83,13 @@ jobs:
script: cd packages;find . -type d -path "*/runtimes/osx-*" -exec rm -rv {} +;find . -type d -path "*/runtimes/win-*" -exec rm -rv {} +;cd ..
displayName: Clean up non-Linux runtime folders of NuGet Packages to save disk space
- ${{ if eq(parameters.buildScript, 'build.cmd') }}:
+ - script: dir /s "$(Build.SourcesDirectory)"
+ displayName: show bin folder disk usage
- task: PowerShell@2
inputs:
targetType: inline
- script: Get-ChildItem -Path '.\packages\*\runtimes\*' -Recurse | Select -ExpandProperty FullName | Where {$_ -notlike '*\win-*'} | sort length -Descending | Remove-Item -Recurse -Confirm:$false -Force
+ script: Get-ChildItem -Path '$(Build.SourcesDirectory)\packages\*\runtimes\*' -Recurse | Select -ExpandProperty FullName | Where {$_ -notlike '*\win-*'} | sort length -Descending | Remove-Item -Recurse -Confirm:$false -Force
displayName: Clean up non-Windows runtime folders of NuGet Packages to save disk space
- - script: dir /s "bin"
- displayName: show bin folder disk usage
- ${{ if eq(parameters.nightlyBuild, 'true') }}:
- script: $(dotnetPath) restore $(nightlyBuildProjPath)
displayName: Restore nightly build project
@@ -86,30 +99,25 @@ jobs:
displayName: Update package versions for nightly build
- ${{ if eq(parameters.buildScript, 'build.cmd') }}:
- powershell: |
- Get-ChildItem -Path '.\bin\AnyCPU.*' -Recurse |
+ Get-ChildItem -Path '.\artifacts\bin\*' -Recurse |
Select -ExpandProperty FullName |
- Where {$_ -notlike '*\Microsoft.ML.NightlyBuild.Tests*'} |
- sort length -Descending |
- Remove-Item -force
+ Where {$_ -NotMatch '.*\\Microsoft\.ML\.NightlyBuild\.Tests.*|.*\\Native.*'} |
+ sort length -Descending |
+ Remove-Item -force
Write-Output "Done cleaning up usless project..."
displayName: Clean up useless project
- script: $(dotnetPath) msbuild -restore $(nightlyBuildProjPath) /p:ReferenceTypeForTestFramework="Nuget" /p:Configuration=$(_configuration) /p:TargetArchitecture=${{ parameters.architecture }}
displayName: Build Nightly-Build Project with latest package versions
- - script: ${{ parameters.buildScript }} -$(_configuration) -runnightlybuildtests
+ - script: $(dotnetPath) msbuild $(runNightlyBuildProj) /t:RunNightlyBuildTests /p:Configuration=$(_configuration) /p:TargetArchitecture=${{ parameters.architecture }} /p:TestArchitectures=${{ parameters.architecture }}
displayName: Run Nightly Build Tests
- ${{ if eq(parameters.nightlyBuild, 'false') }}:
- - script: ${{ parameters.buildScript }} -- /t:DownloadExternalTestFiles /p:IncludeBenchmarkData=$(_includeBenchmarkData)
- displayName: Download Benchmark Data
- timeoutInMinutes: 10
- - script: ${{ parameters.buildScript }} -- /t:DownloadTensorflowMetaFiles /p:IncludeTensorflowMetaFile=true
- displayName: Download Tensorflow Meta File
- timeoutInMinutes: 20
- ${{ if eq(parameters.innerLoop, 'false') }}:
- ${{ if and(eq(parameters.runSpecific, 'false'), eq(parameters.useVSTestTask, 'false')) }}:
- - script: ${{ parameters.buildScript }} -$(_configuration) -runtests -coverage=${{ parameters.codeCoverage }}
+ # TODO: Code coverage needs to be fixed.
+ - script: ${{ parameters.buildScript }} /p:Build=false -configuration $(_configuration) /p:TargetArchitecture=${{ parameters.architecture }} /p:TestArchitectures=${{ parameters.architecture }} -test -integrationTest -ci /p:RestorePackagesPath=$(Build.SourcesDirectory)\packages /p:NUGET_PACKAGES=$(Build.SourcesDirectory)\packages /p:Coverage=${{ parameters.codeCoverage }}
displayName: Run All Tests.
- ${{ if and(eq(parameters.runSpecific, 'true'), eq(parameters.useVSTestTask, 'false')) }}:
- - script: ${{ parameters.buildScript }} -$(_configuration) -runSpecificTests -coverage=${{ parameters.codeCoverage }}
+ - script: ${{ parameters.buildScript }} /p:Build=false -configuration $(_configuration) /p:TargetArchitecture=${{ parameters.architecture }} /p:TestArchitectures=${{ parameters.architecture }} -test -integrationTest -ci /p:TestRunnerAdditionalArguments='-trait$(spaceValue)Category=RunSpecificTest' /p:RestorePackagesPath=$(Build.SourcesDirectory)\packages /p:NUGET_PACKAGES=$(Build.SourcesDirectory)\packages /p:Coverage=${{ parameters.codeCoverage }}
displayName: Run Specific Tests.
- ${{ if and(eq(parameters.buildScript, 'build.cmd'), eq(parameters.useVSTestTask, 'true')) }}:
- task: VSTest@2
@@ -119,10 +127,10 @@ jobs:
testAssemblyVer2: |
**\*test.dll
**\*tests.dll
- !**\obj\**
+ !**\obj\**
runSettingsFile: $(Build.SourcesDirectory)/tools-local/vstest.runsettings
searchFolder: '$(System.DefaultWorkingDirectory)'
- vstestLocationMethod: 'version'
+ vstestLocationMethod: 'version'
vsTestVersion: 'latest'
runInParallel: False
runTestsInIsolation: True
@@ -133,18 +141,23 @@ jobs:
collectDumpOn: onAbortOnly
publishRunAttachments: true
- ${{ if eq(parameters.innerLoop, 'true') }}:
- - script: ${{ parameters.buildScript }} -$(_configuration) -runCITests -coverage=${{ parameters.codeCoverage }}
+ - script: ${{ parameters.buildScript }} /p:Build=false -configuration $(_configuration) /p:TargetArchitecture=${{ parameters.architecture }} /p:TestArchitectures=${{ parameters.architecture }} -test -integrationTest -ci /p:TestRunnerAdditionalArguments='-notrait$(spaceValue)Category=SkipInCI' /p:RestorePackagesPath=$(Build.SourcesDirectory)\packages /p:NUGET_PACKAGES=$(Build.SourcesDirectory)\packages /p:Coverage=${{ parameters.codeCoverage }}
displayName: Run CI Tests.
- - script: $(Build.SourcesDirectory)/Tools/dotnetcli/dotnet msbuild -restore build/Codecoverage.proj
+ - script: $(dotnetPath) msbuild -restore build/Codecoverage.proj
displayName: Upload coverage to codecov.io
condition: and(succeeded(), eq(${{ parameters.codeCoverage }}, True))
- task: PublishTestResults@2
displayName: Publish Test Results
condition: succeededOrFailed()
inputs:
- testRunner: 'vSTest'
- searchFolder: '$(System.DefaultWorkingDirectory)/bin'
- testResultsFiles: '**/*.trx'
+ testRunner: 'xUnit'
+ searchFolder: '$(Build.SourcesDirectory)/artifacts/TestResults'
+ # Upload all test results except performance test project. On CI by default performance tests
+ # will not run and test result files will still be generate without details. Avoid uploading
+ # performance test result to avoid warnings on publish test result stage.
+ testResultsFiles: |
+ **/*.xml
+ !**/*PerformanceTests*.xml
testRunTitle: Machinelearning_Tests_${{ parameters.name }}_$(_configuration)_$(Build.BuildNumber)
configuration: $(_configuration)
mergeTestResults: true
@@ -153,16 +166,15 @@ jobs:
condition: not(succeeded())
inputs:
sourceFolder: $(Build.SourcesDirectory)
- contents: '?(msbuild.*|binclash.log|init-tools.log)'
+ contents: 'artifacts/log/**'
targetFolder: $(Build.ArtifactStagingDirectory)
- task: CopyFiles@2
displayName: Stage test output
condition: not(succeeded())
inputs:
- sourceFolder: $(Build.SourcesDirectory)/bin
+ sourceFolder: $(Build.SourcesDirectory)
contents: |
- **/TestOutput/**/*
- **/*.trx
+ artifacts/TestResults/**
targetFolder: $(Build.ArtifactStagingDirectory)
- task: CopyFiles@2
displayName: Stage process dump and pdb if any
@@ -172,7 +184,7 @@ jobs:
contents: |
*.dmp
CrashDumps/*.dmp
- bin/**/*.pdb
+ artifacts/bin/**/*.pdb
targetFolder: $(Build.ArtifactStagingDirectory)
- task: PublishBuildArtifacts@1
displayName: Publish build and test logs
@@ -182,5 +194,5 @@ jobs:
artifactName: ${{ parameters.name }} $(_config_short)
artifactType: container
- ${{ if eq(parameters.nightlyBuild, 'false') }}:
- - script: ${{ parameters.buildScript }} -buildPackages
- displayName: Build Packages
+ - script: ${{ parameters.buildScript }} /p:Build=false -pack -ci -configuration $(_configuration) /p:TargetArchitecture=${{ parameters.architecture }} /p:TestArchitectures=${{ parameters.architecture }} /p:RestorePackagesPath=$(Build.SourcesDirectory)\packages /p:NUGET_PACKAGES=$(Build.SourcesDirectory)\packages
+ displayName: Build Packages
\ No newline at end of file
diff --git a/build/publish.proj b/build/publish.proj
index 065f5294cf..e35ac8f123 100644
--- a/build/publish.proj
+++ b/build/publish.proj
@@ -1,52 +1,20 @@
+
-
-
Microsoft.SymbolUploader.Build.Task
true
600
-
-
-
-
-
-
-
-
-
-
-
- Pushing took too long
-
-
-
-
-
-
- $(ToolsDir)dotnetcli/dotnet
- $(DotnetToolCommand) nuget push --source $(NuGetFeedUrl) --api-key $(NuGetApiKey) --timeout $(NuGetPushTimeoutSeconds)
-
-
-
-
+
-
+
@@ -55,15 +23,15 @@
180
- true
+ false
-
+
-
+
\ No newline at end of file
diff --git a/build/sign.proj b/build/sign.proj
deleted file mode 100644
index 90666e989a..0000000000
--- a/build/sign.proj
+++ /dev/null
@@ -1,50 +0,0 @@
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- $(PackageAssetsPath)
- $(PackageOutputPath)
- $(IntermediateOutputRootPath)
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- Microsoft400
-
-
-
-
-
-
- NuGet
-
-
-
-
-
-
-
-
\ No newline at end of file
diff --git a/build/vsts-ci.yml b/build/vsts-ci.yml
index 36874b2105..a6c730106f 100644
--- a/build/vsts-ci.yml
+++ b/build/vsts-ci.yml
@@ -1,5 +1,5 @@
################################################################################
-# ML.NET's official, signed build
+# ML.NET's official, signed build
################################################################################
resources:
@@ -23,14 +23,18 @@ phases:
- agent.os -equals linux
container: CentosContainer
steps:
+ - script: ./restore.sh
+ displayName: restore all projects
+ - script: ./build.sh -configuration $(BuildConfig) /p:CopyPackageAssets=true /p:SkipRIDAgnosticAssets=true -projects $(Build.SourcesDirectory)/src/Redist/Microsoft.ML.DnnImageFeaturizer.ModelRedist/Microsoft.ML.DnnImageFeaturizer.ModelRedist.proj
+ displayName: build redist
# Only build native assets to avoid conflicts.
- - script: ./build.sh -buildNative -$(BuildConfig) -skipRIDAgnosticAssets
+ - script: ./build.sh -configuration $(BuildConfig) -projects $(Build.SourcesDirectory)/src/Native/Native.proj /p:TargetArchitecture=x64 /p:CopyPackageAssets=true
displayName: Build
- task: PublishBuildArtifacts@1
displayName: Publish Linux package assets
inputs:
- pathToPublish: $(Build.SourcesDirectory)/bin/obj/packages
+ pathToPublish: $(Build.SourcesDirectory)/artifacts/pkgassets
artifactName: PackageAssets
artifactType: container
@@ -46,16 +50,25 @@ phases:
queue:
name: Hosted macOS
steps:
- - script: brew update && brew install $(Build.SourcesDirectory)/build/libomp.rb && brew link libomp --force
+ # Work around MacOS Homebrew image/environment bug: https://github.com/actions/virtual-environments/issues/2322#issuecomment-749211076
+ - script: |
+ rm -rf /usr/local/bin/2to3
+ displayName: MacOS Homebrew bug Workaround
+ continueOnError: true
+ - script: brew update && brew unlink python@3.8 && brew install mono-libgdiplus && brew install $(Build.SourcesDirectory)/build/libomp.rb && brew link libomp --force
displayName: Install build dependencies
+ - script: ./restore.sh
+ displayName: restore all projects
+ - script: ./build.sh -configuration $(BuildConfig) /p:CopyPackageAssets=true /p:SkipRIDAgnosticAssets=true -projects $(Build.SourcesDirectory)/src/Redist/Microsoft.ML.DnnImageFeaturizer.ModelRedist/Microsoft.ML.DnnImageFeaturizer.ModelRedist.proj
+ displayName: build redist
# Only build native assets to avoid conflicts.
- - script: ./build.sh -buildNative -$(BuildConfig) -skipRIDAgnosticAssets
+ - script: ./build.sh -projects $(Build.SourcesDirectory)/src/Native/Native.proj -configuration $(BuildConfig) /p:TargetArchitecture=x64 /p:CopyPackageAssets=true
displayName: Build
- task: PublishBuildArtifacts@1
displayName: Publish macOS package assets
inputs:
- pathToPublish: $(Build.SourcesDirectory)/bin/obj/packages
+ pathToPublish: $(Build.SourcesDirectory)/artifacts/pkgassets
artifactName: PackageAssets
artifactType: container
@@ -73,7 +86,7 @@ phases:
_TeamName: DotNetCore
queue:
name: DotNetCore-Build
- demands:
+ demands:
- agent.os -equals Windows_NT
steps:
@@ -88,27 +101,26 @@ phases:
continueOnError: false
condition: and(succeeded(), in(variables._SignType, 'real', 'test'))
+ - script: ./restore.cmd
+ displayName: restore all projects
+ - script: ./build.cmd -configuration $(BuildConfig) /p:CopyPackageAssets=true /p:SkipRIDAgnosticAssets=true -projects $(Build.SourcesDirectory)/src/Redist/Microsoft.ML.DnnImageFeaturizer.ModelRedist/Microsoft.ML.DnnImageFeaturizer.ModelRedist.proj
+ displayName: build redist
# Only build native assets to avoid conflicts.
- - script: ./build.cmd -buildNative -$(BuildConfig) -buildArch=x86 -skipRIDAgnosticAssets
+ - script: ./build.cmd -projects $(Build.SourcesDirectory)/src/Native/Native.proj -configuration $(BuildConfig) /p:TargetArchitecture=x86 /p:CopyPackageAssets=true
displayName: Build
-
- - task: MSBuild@1
- displayName: Sign Windows_x86 Binaries
- inputs:
- solution: build/sign.proj
- msbuildArguments: /p:SignType=$(_SignType)
- msbuildVersion: 15.0
- continueOnError: false
+
+ - script: ./sign.cmd -configuration $(BuildConfig) /p:TargetArchitecture=x86 /p:SignBinaries=true
+ displayName: sign binaries
- task: PublishBuildArtifacts@1
displayName: Publish Windows_x86 package assets
inputs:
- pathToPublish: $(Build.SourcesDirectory)/bin/obj/packages
+ pathToPublish: $(Build.SourcesDirectory)/artifacts/pkgassets
artifactName: PackageAssets
artifactType: container
# Terminate all dotnet build processes.
- - script: $(Build.SourcesDirectory)/Tools/dotnetcli/dotnet.exe build-server shutdown
+ - script: $(Build.SourcesDirectory)/.dotnet/dotnet.exe build-server shutdown
displayName: Dotnet Server Shutdown
################################################################################
@@ -125,7 +137,7 @@ phases:
_TeamName: DotNetCore
queue:
name: DotNetCore-Build
- demands:
+ demands:
- agent.os -equals Windows_NT
steps:
@@ -140,8 +152,8 @@ phases:
continueOnError: false
condition: and(succeeded(), in(variables._SignType, 'real', 'test'))
- # Build both native and managed assets.
- - script: ./build.cmd -$(BuildConfig)
+ # Build both native and managed assets.
+ - script: ./build.cmd -configuration $(BuildConfig) /p:TargetArchitecture=x64 /p:CopyPackageAssets=true
displayName: Build
- task: ComponentGovernanceComponentDetection@0
@@ -150,24 +162,19 @@ phases:
verbosity: 'Verbose'
alertWarningLevel: 'High'
-
- - task: MSBuild@1
- displayName: Sign Windows_x64 Binaries
- inputs:
- solution: build/sign.proj
- msbuildArguments: /p:SignType=$(_SignType)
- msbuildVersion: 15.0
- continueOnError: false
+
+ - script: ./sign.cmd -configuration $(BuildConfig) /p:TargetArchitecture=x64 /p:SignBinaries=true
+ displayName: sign binaries
- task: PublishBuildArtifacts@1
displayName: Publish Windows_x64 package assets
inputs:
- pathToPublish: $(Build.SourcesDirectory)/bin/obj/packages
+ pathToPublish: $(Build.SourcesDirectory)/artifacts/pkgassets
artifactName: PackageAssets
artifactType: container
# Terminate all dotnet build processes.
- - script: $(Build.SourcesDirectory)/Tools/dotnetcli/dotnet.exe build-server shutdown
+ - script: $(Build.SourcesDirectory)/.dotnet/dotnet.exe build-server shutdown
displayName: Dotnet Server Shutdown
################################################################################
@@ -181,6 +188,7 @@ phases:
variables:
BuildConfig: Release
OfficialBuildId: $(BUILD.BUILDNUMBER)
+ DotnetVersionKind: $[variables.VERSIONKIND] # If no "VERSIONKIND" variable is set when queuing the publishing task, this defaults to empty string.
DOTNET_CLI_TELEMETRY_OPTOUT: 1
DOTNET_SKIP_FIRST_TIME_EXPERIENCE: 1
DOTNET_MULTILEVEL_LOOKUP: 0
@@ -192,7 +200,7 @@ phases:
_MsdlSymbolServerPath: https://microsoftpublicsymbols.artifacts.visualstudio.com/DefaultCollection
queue:
name: DotNetCore-Build
- demands:
+ demands:
- agent.os -equals Windows_NT
steps:
@@ -213,38 +221,53 @@ phases:
displayName: Download package assets
inputs:
artifactName: PackageAssets
- downloadPath: $(Build.SourcesDirectory)/bin/obj/packages
+ downloadPath: $(Build.SourcesDirectory)/artifacts/pkgassets
# Workaround https://github.com/Microsoft/vsts-tasks/issues/6739
- task: CopyFiles@2
displayName: Copy package assets to correct folder
inputs:
- sourceFolder: $(Build.SourcesDirectory)/bin/obj/packages/PackageAssets
- targetFolder: $(Build.SourcesDirectory)/bin/obj/packages
-
- - script: ./build.cmd -buildPackages
- displayName: Create Packages
+ sourceFolder: $(Build.SourcesDirectory)/artifacts/pkgassets/PackageAssets
+ targetFolder: $(Build.SourcesDirectory)/artifacts/pkgassets
- - task: MSBuild@1
- displayName: Sign Packages
- inputs:
- solution: build/sign.proj
- msbuildArguments: /p:SignType=$(_SignType) /p:SignNugetPackages=true
- msbuildVersion: 15.0
+ # Depending on the value of DotNetFinalVersionKind, the name of the package will change.
+ # For our nightly builds we want it to be empty, and when creating the official nugets, we want it to be "release"
+ # the value of the version kind is set when queuing the publishing job on AzureDevOps by adding a VERSIONKIND variable
+ # See more info in: https://github.com/dotnet/arcade/blob/master/Documentation/CorePackages/Versioning.md#package-version
+ - script: ./build.cmd -configuration $(BuildConfig) -pack -ci /p:OfficialBuildId=$(OfficialBuildId) /p:DotNetFinalVersionKind=$(DotnetVersionKind) /p:RestorePackagesPath=$(Build.SourcesDirectory)\packages /p:NUGET_PACKAGES=$(Build.SourcesDirectory)\packages
+ displayName: Build Packages
+
+ - script: ./sign.cmd /p:SignNugetPackages=true /p:RestorePackagesPath=$(Build.SourcesDirectory)\packages /p:NUGET_PACKAGES=$(Build.SourcesDirectory)\packages
+ displayName: sign packages
continueOnError: false
+ # The generated .nupkgs and .snupkgs packages are published to Azure artifacts,
+ # in case we need to debug them. They're found under Release/Shipping
+ - task: PublishBuildArtifacts@1
+ displayName: Push packages and symbol packages to Azure Artifacts
+ inputs:
+ pathToPublish: $(Build.SourcesDirectory)/artifacts/packages
+ artifactName: PackageAssets
+ artifactType: container
+ continueOnError: true
+
- task: NuGetAuthenticate@0
inputs:
nuGetServiceConnections: machinelearning-dnceng-public-feed # To allow publishing to a feed of another organization
- - script: Tools\dotnetcli\dotnet msbuild build\publish.proj /t:PublishPackages /p:NuGetFeedUrl=$(_AzureDevopsFeedUrl) /p:NuGetApiKey=AzureArtifacts
- displayName: Publish Packages to AzureDevOps Feed
+ - task: NuGetCommand@2
+ displayName: Push packages to AzureDevOps feed
+ inputs:
+ command: push
+ packagesToPush: $(Build.SourcesDirectory)/artifacts/**/*.nupkg;!$(Build.SourcesDirectory)/artifacts/**/*.snupkg
+ nuGetFeedType: external
+ publishFeedCredentials: machinelearning-dnceng-public-feed
- task: MSBuild@1
displayName: Publish Symbols to SymWeb Symbol Server
inputs:
solution: build/publish.proj
- msbuildArguments: /t:PublishSymbolPackages /p:SymbolServerPath=$(_SymwebSymbolServerPath) /p:SymbolServerPAT=$(SymwebSymbolServerPAT)
+ msbuildArguments: /t:PublishSymbolPackages /p:SymbolServerPath=$(_SymwebSymbolServerPath) /p:SymbolServerPAT=$(SymwebSymbolServerPAT) /p:RestorePackagesPath=$(Build.SourcesDirectory)\packages /p:NUGET_PACKAGES=$(Build.SourcesDirectory)\packages
msbuildVersion: 15.0
continueOnError: true
@@ -252,10 +275,10 @@ phases:
displayName: Publish Symbols to Msdl Symbol Server
inputs:
solution: build/publish.proj
- msbuildArguments: /t:PublishSymbolPackages /p:SymbolServerPath=$(_MsdlSymbolServerPath) /p:SymbolServerPAT=$(MsdlSymbolServerPAT)
+ msbuildArguments: /t:PublishSymbolPackages /p:SymbolServerPath=$(_MsdlSymbolServerPath) /p:SymbolServerPAT=$(MsdlSymbolServerPAT) /p:RestorePackagesPath=$(Build.SourcesDirectory)\packages /p:NUGET_PACKAGES=$(Build.SourcesDirectory)\packages
msbuildVersion: 15.0
continueOnError: true
# Terminate all dotnet build processes.
- - script: $(Build.SourcesDirectory)/Tools/dotnetcli/dotnet.exe build-server shutdown
- displayName: Dotnet Server Shutdown
\ No newline at end of file
+ - script: $(Build.SourcesDirectory)/.dotnet/dotnet.exe build-server shutdown
+ displayName: Dotnet Server Shutdown
diff --git a/config.json b/config.json
deleted file mode 100644
index f484ae92c6..0000000000
--- a/config.json
+++ /dev/null
@@ -1,250 +0,0 @@
-{
- "settings": {
- "Configuration": {
- "description": "Sets the optimization level for the Build Configuration you want to build.",
- "valueType": "property",
- "values": [ "Debug", "Release", "Debug-netcoreapp3_1", "Release-netcoreapp3_1", "Debug-netfx", "Release-netfx" ],
- "defaultValue": "Debug"
- },
- "TargetArchitecture": {
- "description": "Sets the architecture for the native assets you want to build.",
- "valueType": "property",
- "values": [ "x64", "x86" ],
- "defaultValue": "x64"
- },
- "OfficialBuildId": {
- "description": "Specifies the SeedDate and the revision of the build to generate the version of the libraries.",
- "valueType": "property",
- "values": [],
- "defaultValue": ""
- },
- "BuildNumberMajor": {
- "description": "Product build major number.",
- "valueType": "property",
- "values": [],
- "defaultValue": ""
- },
- "BuildNumberMinor": {
- "description": "Product build minor number.",
- "valueType": "property",
- "values": [],
- "defaultValue": ""
- },
- "SkipRIDAgnosticAssets": {
- "description": "Prevents RID agnostic assets in redist from being built.",
- "valueType": "property",
- "values": [],
- "defaultValue": ""
- },
- "MsBuildLogging": {
- "description": "MsBuild logging options.",
- "valueType": "passThrough",
- "values": [],
- "defaultValue": "/flp:v=normal"
- },
- "MsBuildWarning": {
- "description": "MsBuild warning logging.",
- "valueType": "passThrough",
- "values": [],
- "defaultValue": "/flp2:warningsonly;logfile=msbuild.wrn"
- },
- "MsBuildError": {
- "description": "MsBuild error logging.",
- "valueType": "passThrough",
- "values": [],
- "defaultValue": "/flp3:errorsonly;logfile=msbuild.err"
- },
- "Project": {
- "description": "Project where the commands are going to be applied.",
- "valueType": "passThrough",
- "values": [],
- "defaultValue": ""
- },
- "BuildNative": {
- "description": "MsBuild target that builds the native assets.",
- "valueType": "target",
- "values": [],
- "defaultValue": ""
- },
- "BuildPackages": {
- "description": "MsBuild target that builds packages.",
- "valueType": "target",
- "values": [],
- "defaultValue": ""
- },
- "RunTests": {
- "description": "MsBuild target that run the tests. Call this after building.",
- "valueType": "target",
- "values": [],
- "defaultValue": ""
- },
- "RunCITests": {
- "description": "MsBuild target that run CI tests. Call this after building.",
- "valueType": "target",
- "values": [],
- "defaultValue": ""
- },
- "RunSpecificTests": {
- "description": "MsBuild target that run specific tests only. Call this after building.",
- "valueType": "target",
- "values": [],
- "defaultValue": ""
- },
- "RunNightlyBuildTests": {
- "description": "MsBuild target that run the nightly build tests. Call this after building.",
- "valueType": "target",
- "values": [],
- "defaultValue": ""
- },
- "Coverage": {
- "description": "Turn on code coverge.",
- "valueType": "property",
- "values": ["false", "true"],
- "defaultValue": "false"
- },
- "CleanAllProjects": {
- "description": "MsBuild target that deletes the binary output directory.",
- "valueType": "target",
- "values": [],
- "defaultValue": ""
- }
- },
- "commands": {
- "build": {
- "alias": {
- "debug": {
- "description": "Sets optimization level to debug for managed build configuration. (/p:Configuration=Debug)",
- "settings": {
- "Configuration": "Debug"
- }
- },
- "release": {
- "description": "Sets optimization level to release for managed build configuration. (/p:Configuration=Release)",
- "settings": {
- "Configuration": "Release"
- }
- },
- "debug-netcoreapp3_1": {
- "description": "Sets optimization level to debug for managed build configuration and builds against netcoreapp3.1. (/p:Configuration=Debug-netcoreapp3_1)",
- "settings": {
- "Configuration": "Debug-netcoreapp3_1"
- }
- },
- "release-netcoreapp3_1": {
- "description": "Sets optimization level to release for managed build configuration and builds against netcoreapp3.1. (/p:Configuration=Release-netcoreapp3_1)",
- "settings": {
- "Configuration": "Release-netcoreapp3_1"
- }
- },
- "debug-netfx": {
- "description": "Sets optimization level to debug for managed build configuration and builds against fullframework. (/p:Configuration=Debug-netfx)",
- "settings": {
- "Configuration": "Debug-netfx"
- }
- },
- "release-netfx": {
- "description": "Sets optimization level to release for managed build configuration and builds against fullframework. (/p:Configuration=Release-netfx)",
- "settings": {
- "Configuration": "Release-netfx"
- }
- },
- "buildArch": {
- "description": "Sets the architecture for the native build. (/p:TargetArchitecture=[value])",
- "settings": {
- "TargetArchitecture": "default"
- }
- },
- "buildNative": {
- "description": "Builds the native assets.",
- "settings": {
- "BuildNative": "default"
- }
- },
- "skipRIDAgnosticAssets": {
- "description": "Avoid building RID agnostic assets in redist.",
- "settings": {
- "SkipRIDAgnosticAssets": "default"
- }
- },
- "buildPackages": {
- "description": "Builds the NuGet packages.",
- "settings": {
- "BuildPackages": "default"
- }
- },
- "runtests": {
- "description": "Runs the tests. Call this after building.",
- "settings": {
- "RunTests": "default"
- }
- },
- "runCITests": {
- "description": "Runs CI tests. Call this after building.",
- "settings": {
- "RunCITests": "default"
- }
- },
- "runSpecificTests": {
- "description": "Runs Specific tests. Call this after building.",
- "settings": {
- "RunSpecificTests": "default"
- }
- },
- "runnightlybuildtests": {
- "description": "Runs the nightly build tests. Call this after building.",
- "settings": {
- "RunNightlyBuildTests": "default"
- }
- },
- "verbose": {
- "description": "Passes /flp:v=diag to the msbuild command or the value passed by the user.",
- "settings": {
- "MsBuildLogging": "/flp:v=diag;LogFile=build-managed.log"
- }
- }
- },
- "defaultValues": {
- "toolName": "msbuild",
- "settings": {
- "Project": "build.proj",
- "Configuration": "default",
- "MsBuildLogging": "default",
- "MsBuildWarning": "default",
- "MsBuildError": "default"
- }
- }
- }
- },
- "tools": {
- "msbuild": {
- "osSpecific": {
- "windows": {
- "defaultParameters": "msbuild /nologo /verbosity:minimal /clp:Summary /maxcpucount /l:BinClashLogger,Tools\\Microsoft.DotNet.Build.Tasks.dll;LogFile=binclash.log",
- "path": "Tools/dotnetcli/dotnet"
- },
- "unix": {
- "defaultParameters": "msbuild /nologo /verbosity:minimal /clp:Summary /maxcpucount /l:BinClashLogger,Tools/Microsoft.DotNet.Build.Tasks.dll;LogFile=binclash.log",
- "path": "Tools/dotnetcli/dotnet"
- }
- },
- "valueTypes": {
- "property": "/p:{name}={value}",
- "target": "/t:{name}",
- "internal": "/{name}"
- }
- },
- "terminal": {
- "osSpecific": {
- "windows": {
- "filesExtension": "cmd"
- },
- "unix": {
- "filesExtension": "sh"
- }
- },
- "valueTypes": {
- "property": "--{name}={value}"
- }
- }
- }
-}
diff --git a/docs/README.md b/docs/README.md
index 44ed54f306..da5ab98236 100644
--- a/docs/README.md
+++ b/docs/README.md
@@ -11,10 +11,10 @@ Project Docs
- [Developer Guide](project-docs/developer-guide.md)
- [Contributing to ML.NET](project-docs/contributing.md)
-- [Strong Name Signing](https://github.com/dotnet/corefx/blob/master/Documentation/project-docs/strong-name-signing.md)
-- [Public Signing](https://github.com/dotnet/corefx/blob/master/Documentation/project-docs/public-signing.md)
+- [Strong Name Signing](https://github.com/dotnet/runtime/blob/main/docs/project/strong-name-signing.md)
+- [Public Signing](https://github.com/dotnet/runtime/blob/main/docs/project/public-signing.md)
- [Project NuGet Dependencies](https://github.com/dotnet/buildtools/blob/master/Documentation/project-nuget-dependencies.md)
-- [ML.NET Roadmap](https://github.com/dotnet/machinelearning/blob/master/README.md)
+- [ML.NET Roadmap](https://github.com/dotnet/machinelearning/blob/main/README.md)
- [ML.NET Cookbook](code/MlNetCookBook.md)
- [ML.NET API Reference Documentation](https://docs.microsoft.com/en-us/dotnet/api/?view=ml-dotnet)
@@ -27,7 +27,7 @@ Building from Source
Repo of Samples
====================
-- [ML.NET Samples](https://github.com/dotnet/machinelearning-samples/blob/master/README.md)
+- [ML.NET Samples](https://github.com/dotnet/machinelearning-samples/blob/main/README.md)
Extensions for ML.NET
====================
diff --git a/docs/api-reference/tensorflow-usage.md b/docs/api-reference/tensorflow-usage.md
index d054365f1f..0f4b434566 100644
--- a/docs/api-reference/tensorflow-usage.md
+++ b/docs/api-reference/tensorflow-usage.md
@@ -1,6 +1,6 @@
## Using TensorFlow based APIs
-In order to run any TensorFlow based ML.Net APIs you must first add a NuGet dependency
-on the TensorFlow redist library. There are currently two versions you can use. One which is
+In order to run any TensorFlow based ML.Net APIs you must first add a NuGet dependency
+on the TensorFlow redist library. There are currently two versions you can use. One which is
compiled for GPU support, and one which has CPU support only.
### CPU only
@@ -22,18 +22,18 @@ As of now TensorFlow does not support running on GPUs for MacOS, so we cannot su
You must have at least one CUDA compatible GPU, for a list of compatible GPUs see
[Nvidia's Guide](https://developer.nvidia.com/cuda-gpus).
-Install [CUDA v10.0](https://developer.nvidia.com/cuda-10.0-download-archive) and [CUDNN v7.6.4](https://developer.nvidia.com/rdp/cudnn-download).
+Install [CUDA v10.1](https://developer.nvidia.com/cuda-10.1-download-archive-update2) and [CUDNN v7.6.4](https://developer.nvidia.com/rdp/cudnn-download).
-Make sure you install CUDA v10.0, not any other newer version.
+Make sure you install CUDA v10.1, not any other newer version.
After downloading CUDNN v7.6.4 .zip file and unpacking it, you need to do the following steps:
-`copy \cuda\bin\cudnn64_7.dll to \Program Files\NVIDIA GPU Computing Toolkit\CUDA\v10.0\bin`
+`copy \cuda\bin\cudnn64_7.dll to \Program Files\NVIDIA GPU Computing Toolkit\CUDA\v10.1\bin`
For C/C++ development:
-`Copy \cuda\ include\cudnn.h to \Program Files\NVIDIA GPU Computing Toolkit\CUDA\v10.0\include`
+`Copy \cuda\ include\cudnn.h to \Program Files\NVIDIA GPU Computing Toolkit\CUDA\v10.1\include`
-`Copy \cuda\lib\x64\cudnn.lib to \Program Files\NVIDIA GPU Computing Toolkit\CUDA\v10.0\lib\x64`
+`Copy \cuda\lib\x64\cudnn.lib to \Program Files\NVIDIA GPU Computing Toolkit\CUDA\v10.1\lib\x64`
For further details in cuDNN you can follow the [cuDNN Installation guide](https://docs.nvidia.com/deeplearning/sdk/cudnn-install/index.html#installwindows).
diff --git a/docs/building/MlNetMklDeps/README.md b/docs/building/MlNetMklDeps/README.md
index 0095553cc1..27e54cd98d 100644
--- a/docs/building/MlNetMklDeps/README.md
+++ b/docs/building/MlNetMklDeps/README.md
@@ -1,54 +1,69 @@
-#Instructions to build a custom DLL from Intel's MKL SDK
-ML.NET MKL implementation uses Intel MKL Custom DLL builder to produce a single DLL which contains all of the MKL functions used by it.
-To update the DLL, follow the steps below:
+# The MlNetMklDeps nuget
+ML.NET's repository takes a dependency of the MlNetMklDeps nuget, which contains the MKL binaries of the MKL functions that ML.NET uses. This other nuget is actually also built and managed by the team. In the next section, the steps to create those binaries are described. In this section the contents of the nuget are mentioned.
-Windows (32 and 64 bit):
-- Ensure you have Intel's MKL SDK installed, you can find it here: https://software.intel.com/en-us/mkl.
-- Open an admin command prompt and run the following commands, CAREFULLY INSPECTING THE COMMAND OUTPUT FOR ERRORS.
-- TLCROOT should be the root of your TLC_Resources folder.
-
-Directory layout for nuget file is as follows:
+**MlNetMklDeps nuget follows this layout:**
* licensing (contains Intel's license.txt they ship MKL with along with any third party licenses)
+* MlNetMklDeps.nuspec
* runtimes
-** linux-x64
-*** native (contains linux binaries)
-** osx-x64
-*** native (cntains osx binaries)
-** win-x64
-*** native (contains windows x64 binaries)
-** win-x86
-*** native (contains windows ia32 binaries)
-
-##Windows
-1. In the Intel install directory, go to compilers_and_libraries\windows\mkl\tools\builder
-2. Modify user_example_list file in directory to contain all required functions, that are present in the [mlnetmkl.list
-](mlnetmkl.list) file
-3. "C:\Program Files (x86)\Microsoft Visual Studio 14.0\VC\vcvarsall.bat" x86
-4. nmake libia32 name=MklImports (add threading=sequential if you are building without openmp)
-5. Copy MKL library: copy /Y MklImports.* to the folder that will host the x86 binaries.
-6. Copy openmp library: copy /Y ..\..\..\redist\ia32_win\compiler\libiomp5md* to the folder for x86 binaries.
-7. del MklImports.*
-8. "C:\Program Files (x86)\Microsoft Visual Studio 14.0\VC\vcvarsall.bat" amd64
-9. nmake intel64 name=MklImports (add threading=sequential if you are building without openmp)
-10. Copy mkl library: copy /Y MklImports.* to the folder that will host the x64 binaries.
-11. Copy openmp library: copy /Y ..\..\..\redist\intel64_win\compiler\libiomp5md* to the folder for x86 binaries.
-
-##Linux
-NOTE: Do not copy the libiomp5 file for Linux builds as this relies on OpenMP to be installed on the system.
-1. untar the linux sdk (tar -zxvf name_of_downloaded_file)
-2. Run installation script and follow the instuctions in the dialog screens that are presented ./install.sh
-3. Go to /opt/intel/mkl/tools/builder.
-4. Modify makefile add -Wl,-rpath,'$$ORIGIN' \ -Wl,-z,origin \ after -Wl,--end-group \
-5. Modify user_example_list file in directory to contain all the required functions, that are present in the [mlnetmkl.list](mlnetmkl.list) file
-6. Run make intel64 name=libMklImports (add threading=sequential if you are building without openmp)
-
-##OSX
-NOTE: Do not copy the libiomp5 file for OSX builds as this relies on OpenMP to be installed on the system.
-1. extract and install the dmg (double-click and drag it in the Applications folder)
-2. Go to /opt/mkl/tools/builder.
-3. Modify user_example_list file in directory to contain all the required functions, that are present in the [mlnetmkl.list](mlnetmkl.list) file
-4. Run make libintel64 name=libMklImports (add threading=sequential if you are building without openmp)
-5. Copy libMklImports.dylib from the builder directory to the folder containign the OSX binaries.
+ * linux-x64
+ * native (contains linux binaries)
+ * osx-x64
+ * native (cntains osx binaries)
+ * win-x64
+ * native (contains windows x64 binaries)
+ * win-x86
+ * native (contains windows ia32 binaries)
+
+The .nuspec can be found on this folder:
+https://github.com/dotnet/machinelearning/tree/main/docs/building/MlNetMklDeps
+
+If actually publishing a new version of MlNetMklDeps, remember to update this other file to document any changes:
+https://github.com/dotnet/machinelearning/blob/main/docs/building/MlNetMklDeps/version.md
+
+# Instructions to build the binaries using Intel's MKL SDK
+ML.NET MKL implementation uses Intel MKL Custom Builder to produce the binaries for the functions that we select. Follow the instructions below to produce the binaries for each platform, which will then be added to the MlNetMklDeps nuget described on the previus section.
+
+**Download Intel MKL SDK** before following the instructions below on each platform:
+https://software.intel.com/en-us/mkl
+
+**NOTE about TLC**: The previous version of this instructions said to set the `TLCROOT` variable to "your TLC_Resources folder", since in ML.NET we don't have a `TLC_Resources` folder this seems to be stale instructions, which aren't needed any more. But it might become relevant if trying to test anything MKL related with TLC.
+
+## Windows
+
+1. In the Intel MKL SDK install directory, go to the Builder folder, found in `compilers_and_libraries\windows\mkl\tools\builder`
+2. Replace the contents of the `user_example_list` file, found in that folder, with the contents of the [mlnetmkl.list
+](mlnetmkl.list) file.
+3. Initialize your environment by running the `vcvarsall.bat x86` command found on your Visual Studio installation directory. E.g., it might be found on any path similar to these:
+ * `"C:\Program Files (x86)\Microsoft Visual Studio 14.0\VC\vcvarsall.bat" x86`
+ * `"C:\Program Files (x86)\Microsoft Visual Studio\2017\Enterprise\VC\Auxiliary\Build\vcvarsall.bat" x86`
+4. Back to the Builder folder run the following command to create the binary files:
+`nmake libia32 name=MklImports`. Add `threading=sequential` if you are building without openmp (**NOTE:** it seems that starting on [PR #2867](https://github.com/dotnet/machinelearning/pull/2867) we always want to build using openmp, so there's no need to use this `threading` flag). This will produce `MklImports.dll, MklImports.dll.manifest, MklImports.exp, and MklImports.lib`. To also create `MklImports.pdb` see the note at the end of this section.
+5. Copy the MKLImports files to the folder that will host the binaries inside the nuget: `copy /Y MklImports.*`
+6. Also copy the Openmp library found inside the MKL SDK installation folder to the folder with the binaries: `copy /Y ..\..\..\redist\ia32_win\compiler\libiomp5md*`
+7. Delete the x86 MklImports files from the Builder folder: `del MklImports.*`
+8. Initialize your environment with `vcvarsall.bat amd64`. It should be in the same path as found on step 3.
+9. On the Builder folder: `nmake intel64 name=MklImports` (add `threading=sequential` if you are building without openmp)
+10. Copy the MKL files to the folder for x64 binaries: `copy /Y MklImports.*`
+11. Copy the Openmp library to the folder with the x64 binaries: `copy /Y ..\..\..\redist\intel64_win\compiler\libiomp5md* `
+
+**NOTE to create MklImports.pdb:** If the symbols for the built MklImports.dlls are required, add `/DEBUG:FULL /PDB:MklImports.pdb \` on the makefile after `mkl_custom_vers.res \`, in both the `libintel64` and `libia32` targets, to get the symbols for both x86 and x64 binaries.
+
+## Linux
+**NOTE:** Do not copy the libiomp5 file for Linux builds as this relies on OpenMP to be installed on the system.
+1. Untar the linux Intel MKL SDK: `tar -zxvf name_of_downloaded_file`
+2. Run the installation script and follow the instuctions in the dialog screens that are presented: `./install.sh`
+3. Go to the Builder directory found in `/opt/intel/mkl/tools/builder` (it might be in another path, such as `/home/username/intel/compilers_and_libraries/linux/mkl/tools/builder` depending on your installation).
+4. Modify the makefile found on the Builder directory: add `-Wl,-rpath,'$$ORIGIN' \ -Wl,-z,origin \` after `-Wl,--end-group \`
+5. Modify `user_example_list` file in the Builder directory to contain all the required functions, that are present in the [mlnetmkl.list](mlnetmkl.list) file
+6. Run `make intel64 name=libMklImports` (add `threading=sequential` if you are building without openmp)
+
+## OSX
+**NOTE:** Do not copy the libiomp5 file for OSX builds as this relies on OpenMP to be installed on the system.
+1. Extract and install the Intel MKL SDK dmg (double-click and drag it in the `Applications` folder)
+2. Go to the Builder directory: `/opt/mkl/tools/builder`.
+3. Modify user_example_list file in the Builder directory to contain all the required functions, that are present in the [mlnetmkl.list](mlnetmkl.list) file
+4. Run `make libintel64 name=libMklImports` (add `threading=sequential` if you are building without openmp)
+5. Copy `libMklImports.dylib` from the builder directory to the folder containing the OSX binaries.
6. Fix the id and the rpath running the following commands:
- sudo install_name_tool -id "@loader_path/libMklImports.dylib" libMklImports.dylib
- sudo install_name_tool -id "@rpath/libMklImports.dylib" libMklImports.dylib
+ * `sudo install_name_tool -id "@loader_path/libMklImports.dylib" libMklImports.dylib`
+ * `sudo install_name_tool -id "@rpath/libMklImports.dylib" libMklImports.dylib`
\ No newline at end of file
diff --git a/docs/building/assets/process_architecture_run_tests_vs.png b/docs/building/assets/process_architecture_run_tests_vs.png
new file mode 100644
index 0000000000..99989d82d5
Binary files /dev/null and b/docs/building/assets/process_architecture_run_tests_vs.png differ
diff --git a/docs/building/netcoreapp3.1-instructions.md b/docs/building/netcoreapp3.1-instructions.md
index d912a8cb9a..d46c13a7ae 100644
--- a/docs/building/netcoreapp3.1-instructions.md
+++ b/docs/building/netcoreapp3.1-instructions.md
@@ -5,8 +5,8 @@ ML.NET source code files build for .NET Core 3.1 and .NET Standard 2.0. However,
To run tests on .NET Core 3.1, you need to do a few manual steps.
-1. Run `.\build.cmd -- /p:Configuration=Release-netcoreapp3_1` or `.\build.cmd -Release-netcoreapp3_1` from the root of the repo.
-2. If you want to build the NuGet packages, `.\build.cmd -buildPackages` after step 1.
+1. Run `.\build.cmd -configuration Debug-netcoreapp3_1` or `.\build.cmd -configuration Release-netcoreapp3_1` from the root of the repo.
+2. If you want to build the NuGet packages, `.\build.cmd -pack` after step 1.
If you are using Visual Studio, you will need to do the following:
diff --git a/docs/building/unix-instructions.md b/docs/building/unix-instructions.md
index acc4ecc034..2d13dd7348 100644
--- a/docs/building/unix-instructions.md
+++ b/docs/building/unix-instructions.md
@@ -48,9 +48,11 @@ On macOS a few components are needed which are not provided by a default develop
* libomp 7
* libgdiplus
* gettext
-* All the requirements necessary to run .NET Core 3.1 applications. To view macOS prerequisites click [here](https://docs.microsoft.com/en-us/dotnet/core/macos-prerequisites?tabs=netcore30).
+* All the requirements necessary to run .NET Core 3.1 applications. To view macOS prerequisites click [here](https://docs.microsoft.com/en-us/dotnet/core/install/macos?tabs=netcore31#dependencies).
One way of obtaining CMake and other required libraries is via [Homebrew](https://brew.sh):
```sh
-$ brew update && brew install cmake https://raw.githubusercontent.com/dotnet/machinelearning/master/build/libomp.rb mono-libgdiplus gettext && brew link gettext --force && brew link libomp --force
+$ brew update && brew install cmake https://raw.githubusercontent.com/dotnet/machinelearning/main/build/libomp.rb mono-libgdiplus gettext && brew link gettext --force && brew link libomp --force
```
+
+Please note that newer versions of Homebrew [don't allow installing directly from a URL](https://github.com/Homebrew/brew/issues/8791). If you run into this issue, you may need to download libomp.rb first and install it with the local file instead.
diff --git a/docs/building/windows-instructions.md b/docs/building/windows-instructions.md
index 00d2c4a5e7..18a463fb49 100644
--- a/docs/building/windows-instructions.md
+++ b/docs/building/windows-instructions.md
@@ -56,8 +56,8 @@ You can use the Developer Command Prompt, Powershell or work in any regular cmd.
From a (non-admin) Command Prompt window:
- `build.cmd` - builds the assemblies
-- `build.cmd -runTests` - called after a normal "build.cmd" will run all tests
-- `build.cmd -buildPackages` called after a normal “build.cmd” will create the NuGet packages with the assemblies in “bin"
+- `build.cmd -test -integrationTest` - builds the assemblies and runs all tests, including integration tests.
+- `build.cmd -pack` builds the assemblies and generates the corresponding NuGet packages with the assemblies in `artifacts\packages`"
**Note**: Before working on individual projects or test projects you **must** run `build.cmd` from the root once before beginning that work. It is also a good idea to run `build.cmd` whenever you pull a large set of unknown changes into your branch.
@@ -65,12 +65,20 @@ From a (non-admin) Command Prompt window:
### Running tests from Visual Studio
-After successfully building, run tests in the Visual Studio Test Explorer window.
+After successfully building, run tests through the Visual Studio Test Explorer window.
+
+Before running tests on Visual Studio, make sure you have selected the correct processor architecture (`x64`, `x86`) for running unit tests that your machine supports and that you have built ML.NET on. To check, click on the settings image in the Test Explorer window, then on "Process Architecture for AnyCPU Projects", and then on the correct architecture type, as demonstrated in the image below:
+
+
### Running tests from the command line
-From the root, run `build.cmd` and then `build.cmd -runTests`.
-For more details, or to test an individual project, you can navigate to the test project directory and then use `dotnet test`
+From root, run `build.cmd -test -integrationTest`.
+For more details, or to test an individual project, you can navigate to the test project directory and then use `dotnet test`.
+
+## Running Benchmarks
+
+For more information on running ML.NET benchmarks, please visit the [benchmarking instructions](../../test/Microsoft.ML.PerformanceTests/README.md).
## Known Issues
diff --git a/docs/code/EntryPoints.md b/docs/code/EntryPoints.md
index a59fee1414..c868f9099b 100644
--- a/docs/code/EntryPoints.md
+++ b/docs/code/EntryPoints.md
@@ -1,23 +1,23 @@
-# Entry Points And Helper Classes
+# Entry Points And Helper Classes
## Overview
Entry points are a way to interface with ML.NET components, by specifying an execution graph of connected inputs and outputs of those components.
-Both the manifest describing available components and their inputs/outputs, and an "experiment" graph description, are expressed in JSON.
-The recommended way of interacting with ML.NET through other, non-.NET programming languages, is by composing, and exchanging pipelines or experiment graphs.
+Both the manifest describing available components and their inputs/outputs, and an "experiment" graph description, are expressed in JSON.
+The recommended way of interacting with ML.NET through other, non-.NET programming languages, is by composing, and exchanging pipelines or experiment graphs.
-Through the documentation, we also refer to entry points as 'entry points nodes', and that is because they are the nodes of the graph representing the experiment.
-The graph 'variables', the various values of the experiment graph JSON properties, serve to describe the relationship between the entry point nodes.
-The 'variables' are therefore the edges of the DAG (Directed Acyclic Graph).
+Through the documentation, we also refer to entry points as 'entry points nodes', and that is because they are the nodes of the graph representing the experiment.
+The graph 'variables', the various values of the experiment graph JSON properties, serve to describe the relationship between the entry point nodes.
+The 'variables' are therefore the edges of the DAG (Directed Acyclic Graph).
-All of ML.NET entry points are described by their manifest. The manifest is another JSON object that documents and describes the structure of an entry points.
-Manifests are referenced to understand what an entry point does, and how it should be constructed, in a graph.
+All of ML.NET entry points are described by their manifest. The manifest is another JSON object that documents and describes the structure of an entry points.
+Manifests are referenced to understand what an entry point does, and how it should be constructed, in a graph.
This document briefly describes the structure of the entry points, the structure of an entry point manifest, and mentions the ML.NET classes that help construct an entry point graph.
## EntryPoint manifest - the definition of an entry point
-The components manifest is build by scanning the ML.NET assemblies through reflection and searching for types having the: `SignatureEntryPointModule` signature in their `LoadableClass` assembly attribute definition.
+The components manifest is build by scanning the ML.NET assemblies through reflection and searching for types having the: `SignatureEntryPointModule` signature in their `LoadableClass` assembly attribute definition.
An example of an entry point manifest object, specifically for the `ColumnTypeConverter` transform, is:
```javascript
@@ -115,7 +115,7 @@ An example of an entry point manifest object, specifically for the `ColumnTypeCo
{
"Name": "OutputData",
"Type": "DataView",
- "Desc": "Transformed dataset"
+ "Desc": "Transformed dataset"
},
{
"Name": "Model",
@@ -134,7 +134,7 @@ The respective entry point, constructed based on this manifest would be:
{
"Name": "Transforms.ColumnTypeConverter",
"Inputs": {
- "Column": [{
+ "Column": [{
"Name": "Features",
"Source": "Features"
}],
@@ -220,11 +220,11 @@ parameter.
## How to create an entry point for an existing ML.NET component
-1. Add a `LoadableClass` assembly attribute with the `SignatureEntryPointModule` signature as shown [here](https://github.com/dotnet/machinelearning/blob/005fe05ebd8b0ffe66fe7e3d5b10983d363a4c35/src/Microsoft.ML.StandardTrainers/Standard/LogisticRegression/LogisticRegression.cs#L26).
+1. Add a `LoadableClass` assembly attribute with the `SignatureEntryPointModule` signature as shown [here](https://github.com/dotnet/machinelearning/blob/005fe05ebd8b0ffe66fe7e3d5b10983d363a4c35/src/Microsoft.ML.StandardTrainers/Standard/LogisticRegression/LogisticRegression.cs#L26).
2. Create a public static method, that:
1. Takes an object representing the arguments of the component you want to expose as shown [here](https://github.com/dotnet/machinelearning/blob/005fe05ebd8b0ffe66fe7e3d5b10983d363a4c35/src/Microsoft.ML.StandardTrainers/Standard/LogisticRegression/LogisticRegression.cs#L416)
- 2. Initializes and runs the component, returning one of the nested classes of [`Microsoft.ML.EntryPoints.CommonOutputs`](https://github.com/dotnet/machinelearning/blob/master/src/Microsoft.ML.Data/EntryPoints/CommonOutputs.cs)
+ 2. Initializes and runs the component, returning one of the nested classes of [`Microsoft.ML.EntryPoints.CommonOutputs`](https://github.com/dotnet/machinelearning/blob/main/src/Microsoft.ML.Data/EntryPoints/CommonOutputs.cs)
3. Is annotated with the [`TlcModule.EntryPoint`](https://github.com/dotnet/machinelearning/blob/005fe05ebd8b0ffe66fe7e3d5b10983d363a4c35/src/Microsoft.ML.StandardTrainers/Standard/LogisticRegression/LogisticRegression.cs#L411) attribute
-For an example of a transformer as an entrypoint, see [OneHotVectorizer](https://github.com/dotnet/machinelearning/blob/9db16c85888e7163c671543faee6ba1f47015d68/src/Microsoft.ML.Transforms/OneHotEncoding.cs#L283).
+For an example of a transformer as an entrypoint, see [OneHotVectorizer](https://github.com/dotnet/machinelearning/blob/9db16c85888e7163c671543faee6ba1f47015d68/src/Microsoft.ML.Transforms/OneHotEncoding.cs#L283).
For a trainer-estimator, see [LogisticRegression](https://github.com/dotnet/machinelearning/blob/005fe05ebd8b0ffe66fe7e3d5b10983d363a4c35/src/Microsoft.ML.StandardTrainers/Standard/LogisticRegression/LogisticRegression.cs#L411).
diff --git a/docs/project-docs/contributing.md b/docs/project-docs/contributing.md
index d34ef1a123..1e2b0d94e1 100644
--- a/docs/project-docs/contributing.md
+++ b/docs/project-docs/contributing.md
@@ -6,7 +6,7 @@ This document describes contribution guidelines that are specific to Machine Lea
Coding Style Changes
--------------------
-We intend to bring dotnet/machinelearning into full conformance with the style guidelines described in [Coding Style](https://github.com/dotnet/corefx/blob/master/Documentation/coding-guidelines/coding-style.md). We plan to do that with tooling, in a holistic way. In the meantime, please:
+We intend to bring dotnet/machinelearning into full conformance with the style guidelines described in [Coding Style](https://github.com/dotnet/runtime/blob/main/docs/coding-guidelines/coding-style.md). We plan to do that with tooling, in a holistic way. In the meantime, please:
* **DO NOT** send PRs for style changes. For example, do not send PRs that are focused on changing usage of ```Int32``` to ```int```.
* **DO NOT** send PRs for upgrading code to use newer language features, though it's ok to use newer language features as part of new code that's written. For example, it's ok to use expression-bodied members as part of new code you write, but do not send a PR focused on changing existing properties or methods to use the feature.
diff --git a/docs/project-docs/developer-guide.md b/docs/project-docs/developer-guide.md
index b3531e339e..31c9932351 100644
--- a/docs/project-docs/developer-guide.md
+++ b/docs/project-docs/developer-guide.md
@@ -18,9 +18,9 @@ Developer Workflow
The dev workflow describes the [development process](https://github.com/dotnet/buildtools/blob/master/Documentation/Dev-workflow.md) to follow. It is divided into specific tasks that are fast, transparent and easy to understand.
The tasks are represented in scripts (cmd/sh) in the root of the repo.
-For more information about the different options that each task has, use the argument `-?` when calling the script. For example:
+For more information about the different options that each task has, use the argument `-help` when calling the script. For example:
```
-build -?
+build -help
```
**Examples**
@@ -33,20 +33,19 @@ git submodule update --init
- Building in release mode for platform x64
```
-build.cmd -Release -TargetArchitecture:x64
+build.cmd -configuration Release /p:TargetArchitecture=x64
```
- Building the src and then building and running the tests
```
-build.cmd
-build.cmd -runTests
+build.cmd -test
```
### Building individual projects
**Note**: Before working on individual projects or test projects you **must** run `build` from the root once before beginning that work. It is also a good idea to run `build` whenever you pull a large set of unknown changes into your branch.
-Under the src directory is a set of directories, each of which represents a particular assembly in ML.NET.
+Under the src directory is a set of directories, each of which represents a particular assembly in ML.NET.
For example the src\Microsoft.MachineLearning.Core directory holds the source code for the Microsoft.MachineLearning.Core.dll assembly.
@@ -60,11 +59,19 @@ You can build the tests for Microsoft.MachineLearning.Core.dll by going to
### Building in Release or Debug
By default, building from the root or within a project will build the libraries in Debug mode.
-One can build in Debug or Release mode from the root by doing `build.cmd -Release` or `build.cmd -Debug`.
+One can build in Debug or Release mode from the root by doing `build.cmd -configuration Release` or `build.cmd -configuration Debug`.
+
+Currently, the full list of supported configurations are:
+- `Debug`, `Release` (for .NET Core 2.1)
+- `Debug-netcoreapp3_1`, `Release-netcoreapp3_1` (for .NET Core 3.1)
+- `Debug-netfx`, `Release-netfx` (for .NET Framework 4.6.1)
### Building other Architectures
-We only support 64-bit binaries right now.
+We support both 32-bit and 64-bit binaries. To build in 32-bit, use the `TargetArchitecture` flag as below:
+```
+build.cmd -configuration Debug /p:TargetArchitecture=x86
+```
### Updating manifest and ep-list files
@@ -79,6 +86,20 @@ Steps to update `core_manifest.json` and `core_ep-list.tsv`:
4. Re-enable the skip attribute on the `RegenerateEntryPointCatalog` test.
5. Commit the updated `core_manifest.json` and `core_ep-list.tsv` files to your branch.
+### Running specifics unit tests on CI
+
+It may be necessary to run only specific unit tests on CI, and perhaps even run these tests back to back multiple times. The steps to run one or more unit tests are as follows:
+1. Set `runSpecific: true` and `innerLoop: false` in [.vsts-dotnet-ci.yml](https://github.com/dotnet/machinelearning/blob/main/.vsts-dotnet-ci.yml) per each build you'd like to run the specifics tests on CI.
+2. Import `Microsoft.ML.TestFrameworkCommon.Attributes` in the unit test files that contain specific unit tests to be run.
+3. Add the `[TestCategory("RunSpecificTest")]` to the unit test(s) you'd like to run specifically.
+
+If you would like to run these specific unit test(s) multiple times, do the following for each unit test to run:
+1. Replace the `[Fact]` attribute with `[Theory, IterationData(X)]` where `X` is the number of times to run the unit test.
+2. Add the `int iteration` argument to the unit test you'd like to run multiple times.
+3. Use the `iteration` parameter at least once in the unit test. This may be as simple as printing to console the `iteration` parameter's value.
+
+These steps are demonstrated in this demonstrative [commit](https://github.com/dotnet/machinelearning/commit/2fb5f8cfcd2a81f27bc22ac6749f1ce2045e925b).
+
### Running unit tests through VSTest Task & Collecting memory dumps
During development, there may also arise a need to debug hanging tests. In this scenario, it can be beneficial to collect the memory dump while a given test is hanging.
diff --git a/docs/project-docs/release-process.md b/docs/project-docs/release-process.md
index 7428b6b582..c7f6a670ba 100644
--- a/docs/project-docs/release-process.md
+++ b/docs/project-docs/release-process.md
@@ -6,7 +6,7 @@ This document describes the different kinds of ML. NET releases, how those relea
Types of releases
--------------------
-ML. NET NuGets (of which there are
+ML. NET NuGets (of which there are
approximately 25) are versioned with the following format: `A.B.C<-D>`, where `A`, `B`, and `C` are integers, and `D` is an optional string.
- `A` - **version number**: If `A` is 0, this NuGet is considered a **work in progress (WIP)**, and could be deleted at any time. If `A` is greater than 0, then we plan to support the corresponding NuGet indefinitely.
@@ -16,7 +16,7 @@ approximately 25) are versioned with the following format: `A.B.C<-D>`, where `A
ML. NET has four kinds of releases: daily builds, previews, periodic general availability (GA), and fix. We detail each kind of release below.
-1. **Daily builds:** these can be downloaded from [this NuGet feed](https://dev.azure.com/dnceng/public/_packaging?_a=feed&feed=MachineLearning), and are built automatically each time a commit is made to the `master` branch.
+1. **Daily builds:** these can be downloaded from [this NuGet feed](https://dev.azure.com/dnceng/public/_packaging?_a=feed&feed=MachineLearning), and are built automatically each time a commit is made to the `main` branch.
1. **Preview:** These releases are built from the corresponding `A.B-preview-X` GitHub branch, and are expected to meet a higher quality bar than the daily builds. These can also be downloaded from [this NuGet feed](https://dev.azure.com/dnceng/public/_packaging?_a=feed&feed=MachineLearning), or within Visual Studio, as detailed below. When we introduce new API's in a preview release, we avoid doing a GA release at the same time (unless there are patches required for the last GA release). If there are no new API's, then we go straight to a GA release and skip the preview release.
1. **GA:** These releases are built from the corresponding `A.B` GitHub branch. They are rigorously tested, stable, and meant for general use. They are also the default choice when installing ML. NET via the `Install-Package Microsoft.ML` command, and are published to [nuget.org](https://www.nuget.org/packages/Microsoft.ML/)
1. **Fix:** These releases include patches for bugs in either the preview or GA releases.
diff --git a/docs/release-notes/0.10/release-0.10.md b/docs/release-notes/0.10/release-0.10.md
index ab363e7f9b..922673600d 100644
--- a/docs/release-notes/0.10/release-0.10.md
+++ b/docs/release-notes/0.10/release-0.10.md
@@ -4,7 +4,7 @@
We have also instrumented [code coverage](https://codecov.io/gh/dotnet/machinelearning) tools as part of our CI systems and will continue to push for stability and quality in the code.
-One of the milestones that we have achieved in this release is moving `IDataView` into a new and separate assembly under `Microsoft.Data.DataView` namespace. For detailed documentation on `IDataView` please take a look at [IDataView design principles](https://github.com/dotnet/machinelearning/blob/master/docs/code/IDataViewDesignPrinciples.md).
+One of the milestones that we have achieved in this release is moving `IDataView` into a new and separate assembly under `Microsoft.Data.DataView` namespace. For detailed documentation on `IDataView` please take a look at [IDataView design principles](https://github.com/dotnet/machinelearning/blob/main/docs/code/IDataViewDesignPrinciples.md).
### Installation
@@ -27,7 +27,7 @@ Install-Package Microsoft.ML
Below are a few of the highlights from this release. There are many other improvements in the API.
-* DataView moved into a separate assembly and NuGet package
+* DataView moved into a separate assembly and NuGet package
([#2220](https://github.com/dotnet/machinelearning/pull/2220))
* Improvements in the API for prediction engine
@@ -38,20 +38,20 @@ Below are a few of the highlights from this release. There are many other improv
- Better naming for NuGet packages based on the scenario (Recommendations) instead of the trainer's name
* Support multiple 'feature columns' in FFM (Field-aware Factorization Machines)
-([#2205](https://github.com/dotnet/machinelearning/pull/2205))
+([#2205](https://github.com/dotnet/machinelearning/pull/2205))
- Allows multiple feature column names in advanced trainer arguments so certain FFM trainers can support multiple multiple feature columns as explained in [#2179](https://github.com/dotnet/machinelearning/issues/2179) issue
* Added support for loading map from file through dataview by using ValueMapperTransformer
-([#2232](https://github.com/dotnet/machinelearning/pull/2232))
+([#2232](https://github.com/dotnet/machinelearning/pull/2232))
- This provides support for additional scenarios like a Text/NLP scenario ([#747](https://github.com/dotnet/machinelearning/issues/747)) in TensorFlowTransform where model's expected input is vector of integers
* Added support for running benchmarks on .NET Framework in addition to .NET Core.
-([#2157](https://github.com/dotnet/machinelearning/pull/2157))
- - Benchmarks can be based on [Microsoft.ML.Benchmarks](https://github.com/dotnet/machinelearning/tree/master/test/Microsoft.ML.Benchmarks)
+([#2157](https://github.com/dotnet/machinelearning/pull/2157))
+ - Benchmarks can be based on [Microsoft.ML.Benchmarks](https://github.com/dotnet/machinelearning/tree/main/test/Microsoft.ML.Benchmarks)
- This fixes issues like [#1945](https://github.com/dotnet/machinelearning/issues/1945)
-* Added Tensorflow unfrozen models support in GetModelSchema
-([#2112](https://github.com/dotnet/machinelearning/pull/2112))
+* Added Tensorflow unfrozen models support in GetModelSchema
+([#2112](https://github.com/dotnet/machinelearning/pull/2112))
- Fixes issue [#2102](https://github.com/dotnet/machinelearning/issues/2102)
* Providing API for properly inspecting trees ([#2243](https://github.com/dotnet/machinelearning/pull/2243))
diff --git a/docs/release-notes/1.1.0/release-1.1.0.md b/docs/release-notes/1.1.0/release-1.1.0.md
index 17931dd10b..44baea3935 100644
--- a/docs/release-notes/1.1.0/release-1.1.0.md
+++ b/docs/release-notes/1.1.0/release-1.1.0.md
@@ -1,6 +1,6 @@
-# [ML.NET](http://dot.net/ml) 1.1.0
+# [ML.NET](http://dot.net/ml) 1.1.0
## **New Features**
-- **Image type support in IDataView**
+- **Image type support in IDataView**
[PR#3263](https://github.com/dotnet/machinelearning/pull/3263) added support
for in-memory image as a type in IDataView. Previously it was not possible to
use an image directly in IDataView, and the user had to specify the file path
@@ -14,16 +14,16 @@
[3460](https://github.com/dotnet/machinelearning/issues/3460),
[2121](https://github.com/dotnet/machinelearning/issues/2121),
[2495](https://github.com/dotnet/machinelearning/issues/2495),
- [3784](https://github.com/dotnet/machinelearning/issues/3784).
+ [3784](https://github.com/dotnet/machinelearning/issues/3784).
- Image type support in IDataView was a much requested feature by the users.
+ Image type support in IDataView was a much requested feature by the users.
[Sample to convert gray scale image
in-Memory](https://github.com/dotnet/machinelearning/blob/02a857a7646188fec2d1cba5e187a6c9d0838e23/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/ImageAnalytics/ConvertToGrayScaleInMemory.cs)
| [Sample for custom mapping with in-memory using custom
type](https://github.com/dotnet/machinelearning/blob/02a857a7646188fec2d1cba5e187a6c9d0838e23/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/CustomMappingWithInMemoryCustomType.cs)
-- **Super-Resolution based Anomaly Detector (preview, please provide feedback)**
+- **Super-Resolution based Anomaly Detector (preview, please provide feedback)**
[PR#3693](https://github.com/dotnet/machinelearning/pull/3693) adds a new
anomaly detection algorithm to the
[Microsoft.ML.TimeSeries](https://www.nuget.org/packages/Microsoft.ML.TimeSeries/)
@@ -44,11 +44,11 @@
SR | 0.601 | 0.670 | 0.634 | 2625 | 4370 | 3915 | WindowSize=64, BackAddWindowSize=5, LookaheadWindowSize=5, AveragingWindowSize=3, JudgementWindowSize=64, Threshold=0.45
[Sample for anomaly detection by
- SRCNN](https://github.com/dotnet/machinelearning/blob/master/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/TimeSeries/DetectAnomalyBySrCnn.cs)
+ SRCNN](https://github.com/dotnet/machinelearning/blob/main/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/TimeSeries/DetectAnomalyBySrCnn.cs)
| [Sample for anomaly detection by SRCNN using batch
- prediction](https://github.com/dotnet/machinelearning/blob/master/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/TimeSeries/DetectAnomalyBySrCnnBatchPrediction.cs)
+ prediction](https://github.com/dotnet/machinelearning/blob/main/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/TimeSeries/DetectAnomalyBySrCnnBatchPrediction.cs)
-- **Time Series Forecasting (preview, please provide feedback)**
+- **Time Series Forecasting (preview, please provide feedback)**
[PR#1900](https://github.com/dotnet/machinelearning/pull/1900) introduces a
framework for time series forecasting models and exposes an API for Singular
Spectrum Analysis(SSA) based forecasting model in the
@@ -61,18 +61,18 @@
requested feature by the github community since September 2018. With this
change
[Microsoft.ML.TimeSeries](https://www.nuget.org/packages/Microsoft.ML.TimeSeries/)
- nuget is feature complete for RTM.
+ nuget is feature complete for RTM.
[Sample for
- forecasting](https://github.com/dotnet/machinelearning/blob/master/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/TimeSeries/Forecasting.cs)
+ forecasting](https://github.com/dotnet/machinelearning/blob/main/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/TimeSeries/Forecasting.cs)
| [Sample for forecasting using confidence
- intervals](https://github.com/dotnet/machinelearning/blob/master/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/TimeSeries/ForecastingWithConfidenceInterval.cs)
+ intervals](https://github.com/dotnet/machinelearning/blob/main/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/TimeSeries/ForecastingWithConfidenceInterval.cs)
## **Bug Fixes**
### Serious
- **Math Kernel Library fails to load with latest libomp:** Fixed by
[PR#3721](https://github.com/dotnet/machinelearning/pull/3721) this bug made
- it impossible for anyone to check code into master branch because it was
+ it impossible for anyone to check code into main branch because it was
causing build failures.
- **Transform Wrapper fails at deserialization:** Fixed by
@@ -80,13 +80,13 @@
affected first party(1P) customer. A model trained using
[NimbusML](https://github.com/microsoft/NimbusML)(Python bindings for
[ML.NET](http://dot.net/ml)) and then loaded for scoring/inferencing using
- ML.NET will hit this bug.
+ ML.NET will hit this bug.
- **Index out of bounds exception in KeyToVector transformer:** Fixed by
[PR#3763](https://github.com/dotnet/machinelearning/pull/3763) this bug closes
following github issues:
[3757](https://github.com/dotnet/machinelearning/issues/3757),[1751](https://github.com/dotnet/machinelearning/issues/1751),[2678](https://github.com/dotnet/machinelearning/issues/2678).
- It affected first party customer and also github users.
+ It affected first party customer and also github users.
### Other
- Download images only when not present on disk and print warning messages when
@@ -125,7 +125,7 @@ None
- Sample for WithOnFitDelegate by
[PR#3738](https://github.com/dotnet/machinelearning/pull/3738)
- Sample for loading data using text loader using various techniques by
- [PR#3793](https://github.com/dotnet/machinelearning/pull/3793)
+ [PR#3793](https://github.com/dotnet/machinelearning/pull/3793)
## **Remarks**
- [Microsoft.ML.TensorFlow](https://www.nuget.org/packages/Microsoft.ML.TensorFlow/),
diff --git a/docs/release-notes/1.2.0/release-1.2.0.md b/docs/release-notes/1.2.0/release-1.2.0.md
index c6b703e434..8214ede8d8 100644
--- a/docs/release-notes/1.2.0/release-1.2.0.md
+++ b/docs/release-notes/1.2.0/release-1.2.0.md
@@ -1,6 +1,6 @@
# [ML.NET](http://dot.net/ml) 1.2.0
## **General Availability**
-- **Microsoft.ML.TimeSeries**
+- **Microsoft.ML.TimeSeries**
- Anomaly detection algorithms (Spike and Change Point):
- Independent and identically distributed.
- Singular spectrum analysis.
@@ -10,17 +10,17 @@
- Prediction Engine for online learning
- Enables updating time series model with new observations at scoring so that the user does not have to re-train the time series with old data each time.
- [Samples](https://github.com/dotnet/machinelearning/tree/master/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/TimeSeries)
+ [Samples](https://github.com/dotnet/machinelearning/tree/main/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/TimeSeries)
-- **Microsoft.ML.OnnxTransformer**
+- **Microsoft.ML.OnnxTransformer**
Enables scoring of ONNX models in the learning pipeline. Uses ONNX Runtime v0.4.
-
- [Sample](https://github.com/dotnet/machinelearning/blob/master/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/ApplyOnnxModel.cs)
-- **Microsoft.ML.TensorFlow**
- Enables scoring of TensorFlow models in the learning pipeline. Uses TensorFlow v1.13. Very useful for image and text classification. Users can featurize images or text using DNN models and feed the result into a classical machine learning model like a decision tree or logistic regression trainer.
+ [Sample](https://github.com/dotnet/machinelearning/blob/main/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/ApplyOnnxModel.cs)
- [Samples](https://github.com/dotnet/machinelearning/tree/master/docs/samples/Microsoft.ML.Samples/Dynamic/TensorFlow)
+- **Microsoft.ML.TensorFlow**
+ Enables scoring of TensorFlow models in the learning pipeline. Uses TensorFlow v1.13. Very useful for image and text classification. Users can featurize images or text using DNN models and feed the result into a classical machine learning model like a decision tree or logistic regression trainer.
+
+ [Samples](https://github.com/dotnet/machinelearning/tree/main/docs/samples/Microsoft.ML.Samples/Dynamic/TensorFlow)
## **New Features**
- **Tree-based featurization** ([#3812](https://github.com/dotnet/machinelearning/pull/3812))
@@ -29,13 +29,13 @@
- The leaves it falls into. It's a binary vector with ones happens at the indexes of reached leaves,
- The paths that the input vector passes before hitting the leaves, and
- The reached leaves values.
-
+
Here are two references.
- [p. 9](https://www.csie.ntu.edu.tw/~r01922136/kaggle-2014-criteo.pdf) (a Kaggle solution adopted by FB below).
- [Section 3](http://www.quinonero.net/Publications/predicting-clicks-facebook.pdf). (Facebook)
- [Section of Entity-level personalization with GLMix](https://engineering.linkedin.com/blog/2019/04/ai-behind-linkedin-recruiter-search-and-recommendation-systems). (LinkedIn)
- [Samples](https://github.com/dotnet/machinelearning/tree/master/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/TreeFeaturization)
+ [Samples](https://github.com/dotnet/machinelearning/tree/main/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/TreeFeaturization)
- **Microsoft.Extensions.ML integration package.** ([#3827](https://github.com/dotnet/machinelearning/pull/3827))
@@ -51,7 +51,7 @@
### Serious
- **Time series Sequential Transform needs to have a binding mechanism:** This bug made it impossible to use time series in NimbusML. ([#3875](https://github.com/dotnet/machinelearning/pull/3875))
-- **Build errors resulting from upgrading to VS2019 compilers:** The default CMAKE_C_FLAG for debug configuration sets /ZI to generate a PDB capable of edit and continue. In the new compilers, this is incompatible with /guard:cf which we set for security reasons. ([#3894](https://github.com/dotnet/machinelearning/pull/3894))
+- **Build errors resulting from upgrading to VS2019 compilers:** The default CMAKE_C_FLAG for debug configuration sets /ZI to generate a PDB capable of edit and continue. In the new compilers, this is incompatible with /guard:cf which we set for security reasons. ([#3894](https://github.com/dotnet/machinelearning/pull/3894))
- **LightGBM Evaluation metric parameters:** In LightGbm EvaluateMetricType where if a user specified EvaluateMetricType.Default, the metric would not get added to the options Dictionary, and LightGbmWrappedTraining would throw because of that. ([#3815](https://github.com/dotnet/machinelearning/pull/3815))
@@ -66,10 +66,10 @@ None
- Fixes the Hardcoded Sigmoid value from -0.5 to the value specified during training. ([#3850](https://github.com/dotnet/machinelearning/pull/3850))
- Fix TextLoader constructor and add exception message. ([#3788](https://github.com/dotnet/machinelearning/pull/3788))
- Introduce the `FixZero` argument to the LogMeanVariance normalizer. ([#3916](https://github.com/dotnet/machinelearning/pull/3916))
-- Ensembles trainer now work with ITrainerEstimators instead of ITrainers. ([#3796](https://github.com/dotnet/machinelearning/pull/3796))
+- Ensembles trainer now work with ITrainerEstimators instead of ITrainers. ([#3796](https://github.com/dotnet/machinelearning/pull/3796))
- LightGBM Unbalanced Data Argument. ([#3925](https://github.com/dotnet/machinelearning/pull/3925))
- Tree based trainers implement ICanGetSummaryAsIDataView. ([#3892](https://github.com/dotnet/machinelearning/pull/3892))
-
+
- **CLI and AutoML API**
- Internationalization fixes to generate proper [ML.NET](dot.net/ml) C# code. ([#3725](https://github.com/dotnet/machinelearning/pull/3725))
- Automatic Cross Validation for small datasets, and CV stability fixes. ([#3794](https://github.com/dotnet/machinelearning/pull/3794))
diff --git a/docs/release-notes/1.3.1/release-1.3.1.md b/docs/release-notes/1.3.1/release-1.3.1.md
index 30c2e05525..1ae4b64ef6 100644
--- a/docs/release-notes/1.3.1/release-1.3.1.md
+++ b/docs/release-notes/1.3.1/release-1.3.1.md
@@ -1,7 +1,7 @@
# [ML.NET](http://dot.net/ml) 1.3.1
## **New Features**
-- **Deep Neural Networks Training (PREVIEW)** ([#4057](https://github.com/dotnet/machinelearning/pull/4057))
+- **Deep Neural Networks Training (PREVIEW)** ([#4057](https://github.com/dotnet/machinelearning/pull/4057))
Introduces in-preview 0.15.1 `Microsoft.ML.DNN` package that enables full DNN model retraining and transfer learning in .NET using C# bindings for tensorflow provided by Tensorflow .NET. The goal of this package is to allow high level DNN training and scoring tasks such as image classification, text classification, object detection, etc using simple yet powerful APIs that are framework agnostic but currently they only uses Tensorflow as the backend. The below APIs are in early preview and we hope to get customer feedback that we can incorporate in the next iteration.

@@ -43,21 +43,21 @@
[Design specification](https://github.com/dotnet/machinelearning/blob/cd591dd492833964b6829e8bb2411fb81665ac6d/docs/specs/DNN/dnn_api_spec.md)
- [Image classification (Inception V3) sample](https://github.com/dotnet/machinelearning/blob/master/docs/samples/Microsoft.ML.Samples/Dynamic/ImageClassification/InceptionV3TransferLearning.cs)
+ [Image classification (Inception V3) sample](https://github.com/dotnet/machinelearning/blob/main/docs/samples/Microsoft.ML.Samples/Dynamic/ImageClassification/InceptionV3TransferLearning.cs)
- [Image classification (Resnet V2 101) sample](https://github.com/dotnet/machinelearning/blob/master/docs/samples/Microsoft.ML.Samples/Dynamic/ImageClassification/ResnetV2101TransferLearning.cs)
+ [Image classification (Resnet V2 101) sample](https://github.com/dotnet/machinelearning/blob/main/docs/samples/Microsoft.ML.Samples/Dynamic/ImageClassification/ResnetV2101TransferLearning.cs)
-- **Database Loader (PREVIEW)** ([#4035](https://github.com/dotnet/machinelearning/pull/4035))
- Introduces Database loader that enables training on databases. This loader supports any relational database supported by System.Data in .NET Framework or .NET Core, meaning that you can use many RDBMS such as SQL Server, Azure SQL Database, Oracle, PostgreSQL, MySQL, etc. This feature is in early preview and can be accessed via `Microsoft.ML.Experimental` nuget.
+- **Database Loader (PREVIEW)** ([#4035](https://github.com/dotnet/machinelearning/pull/4035))
+ Introduces Database loader that enables training on databases. This loader supports any relational database supported by System.Data in .NET Framework or .NET Core, meaning that you can use many RDBMS such as SQL Server, Azure SQL Database, Oracle, PostgreSQL, MySQL, etc. This feature is in early preview and can be accessed via `Microsoft.ML.Experimental` nuget.
- [Design specification](https://github.com/dotnet/machinelearning/pull/3857)
-
- [Sample](https://github.com/dotnet/machinelearning/blob/master/test/Microsoft.ML.Tests/DatabaseLoaderTests.cs)
+ [Design specification](https://github.com/dotnet/machinelearning/pull/3857)
+
+ [Sample](https://github.com/dotnet/machinelearning/blob/main/test/Microsoft.ML.Tests/DatabaseLoaderTests.cs)
```cs
public static DatabaseLoader CreateDatabaseLoader(this DataOperationsCatalog catalog,
params DatabaseLoader.Column[] columns)
- ```
+ ```
## **Bug Fixes**
### Serious
@@ -73,7 +73,7 @@
- **Stop LightGbm Warning for Default Metric Input:** Fixes warning, LightGBM `Warning Unknown parameter metric=` is produced when the default metric is used. ([#3965](https://github.com/dotnet/machinelearning/pull/40079))
## **Samples**
-- [**Fraud Detection using the anomaly detection PCA trainer**](https://github.com/dotnet/machinelearning-samples/tree/master/samples/csharp/getting-started/AnomalyDetection_CreditCardFraudDetection)
+- [**Fraud Detection using the anomaly detection PCA trainer**](https://github.com/dotnet/machinelearning-samples/tree/main/samples/csharp/getting-started/AnomalyDetection_CreditCardFraudDetection)
## **Breaking Changes**
None
@@ -82,7 +82,7 @@ None
- Farewell to the Static API ([4009](https://github.com/dotnet/machinelearning/pull/4009))
- AVX and FMA intrinsics in Factorization Machine ([3940](https://github.com/dotnet/machinelearning/pull/3940))
-
+
## **CLI and AutoML API**
- Bug fixes.
diff --git a/docs/release-notes/1.4.0-preview/release-1.4.0-preview.md b/docs/release-notes/1.4.0-preview/release-1.4.0-preview.md
index 003fbcde6c..d51a683409 100644
--- a/docs/release-notes/1.4.0-preview/release-1.4.0-preview.md
+++ b/docs/release-notes/1.4.0-preview/release-1.4.0-preview.md
@@ -1,7 +1,7 @@
# [ML.NET](http://dot.net/ml) 1.4.0-preview
## **New Features**
-- **Deep Neural Networks Training (0.16.0-preview)** ([#4151](https://github.com/dotnet/machinelearning/pull/4151))
+- **Deep Neural Networks Training (0.16.0-preview)** ([#4151](https://github.com/dotnet/machinelearning/pull/4151))
Improves the in-preview `ImageClassification` API further:
- Increases DNN training speed by ~10x compared to the same API in 0.15.1 release.
@@ -39,18 +39,18 @@
[Design specification](https://github.com/dotnet/machinelearning/blob/cd591dd492833964b6829e8bb2411fb81665ac6d/docs/specs/DNN/dnn_api_spec.md)
- [Sample](https://github.com/dotnet/machinelearning/blob/master/docs/samples/Microsoft.ML.Samples/Dynamic/ImageClassification/ResnetV2101TransferLearningTrainTestSplit.cs)
+ [Sample](https://github.com/dotnet/machinelearning/blob/main/docs/samples/Microsoft.ML.Samples/Dynamic/ImageClassification/ResnetV2101TransferLearningTrainTestSplit.cs)
-- **Database Loader (0.16.0-preview)** ([#4070](https://github.com/dotnet/machinelearning/pull/4070),[#4091](https://github.com/dotnet/machinelearning/pull/4091),[#4138](https://github.com/dotnet/machinelearning/pull/4138))
+- **Database Loader (0.16.0-preview)** ([#4070](https://github.com/dotnet/machinelearning/pull/4070),[#4091](https://github.com/dotnet/machinelearning/pull/4091),[#4138](https://github.com/dotnet/machinelearning/pull/4138))
Additional DatabaseLoader support:
- Support DBNull.
- Add `CreateDatabaseLoader` to map columns from a .NET Type.
- Read multiple columns into a single vector
- [Design specification](https://github.com/dotnet/machinelearning/pull/3857)
-
- [Sample](https://github.com/dotnet/machinelearning-samples/tree/master/samples/csharp/getting-started/DatabaseLoader)
+ [Design specification](https://github.com/dotnet/machinelearning/pull/3857)
+
+ [Sample](https://github.com/dotnet/machinelearning-samples/tree/main/samples/csharp/getting-started/DatabaseLoader)
```cs
string connectionString = "YOUR_RELATIONAL_DATABASE_CONNECTION_STRING";
@@ -58,10 +58,10 @@
string commandText = "SELECT * from URLClicks";
DatabaseLoader loader = mlContext.Data.CreateDatabaseLoader();
-
- DatabaseSource dbSource = new DatabaseSource(SqlClientFactory.Instance,
- connectionString,
- commandText);
+
+ DatabaseSource dbSource = new DatabaseSource(SqlClientFactory.Instance,
+ connectionString,
+ commandText);
IDataView dataView = loader.Load(dbSource);
```
@@ -83,7 +83,7 @@ None
## **Enhancements**
None.
-
+
## **CLI and AutoML API**
- AutoML codebase has moved from feature branch to master branch ([#3882](https://github.com/dotnet/machinelearning/pull/3882)).
diff --git a/docs/release-notes/1.4.0-preview2/release-1.4.0-preview2.md b/docs/release-notes/1.4.0-preview2/release-1.4.0-preview2.md
index 584e27b78d..cd6e6eb697 100644
--- a/docs/release-notes/1.4.0-preview2/release-1.4.0-preview2.md
+++ b/docs/release-notes/1.4.0-preview2/release-1.4.0-preview2.md
@@ -10,9 +10,9 @@
- GPU support on Windows and Linux ([#4270](https://github.com/dotnet/machinelearning/pull/4270), [#4277](https://github.com/dotnet/machinelearning/pull/4277))
- Upgraded [TensorFlow .NET](https://github.com/SciSharp/TensorFlow.NET) version to 0.11.3 ([#4205](https://github.com/dotnet/machinelearning/pull/4205))
- [In-memory image inferencing sample](https://github.com/dotnet/machinelearning/blob/master/docs/samples/Microsoft.ML.Samples/Dynamic/ImageClassification/ResnetV2101TransferLearningTrainTestSplit.cs)
- [Early stopping sample](https://github.com/dotnet/machinelearning/blob/master/docs/samples/Microsoft.ML.Samples/Dynamic/ImageClassification/ResnetV2101TransferLearningEarlyStopping.cs)
- [GPU samples](https://github.com/dotnet/machinelearning/tree/master/docs/samples/Microsoft.ML.Samples.GPU)
+ [In-memory image inferencing sample](https://github.com/dotnet/machinelearning/blob/main/docs/samples/Microsoft.ML.Samples/Dynamic/ImageClassification/ResnetV2101TransferLearningTrainTestSplit.cs)
+ [Early stopping sample](https://github.com/dotnet/machinelearning/blob/main/docs/samples/Microsoft.ML.Samples/Dynamic/ImageClassification/ResnetV2101TransferLearningEarlyStopping.cs)
+ [GPU samples](https://github.com/dotnet/machinelearning/tree/main/docs/samples/Microsoft.ML.Samples.GPU)
- **New ONNX Exporters (1.4.0-preview2)**
- LpNormNormalizing transformer ([#4161](https://github.com/dotnet/machinelearning/pull/4161))
@@ -39,13 +39,13 @@ None.
## **Enhancements**
- Improve exception message in LightGBM ([#4214](https://github.com/dotnet/machinelearning/pull/4214))
- FeaturizeText should allow only outputColumnName to be defined ([#4211](https://github.com/dotnet/machinelearning/pull/4211))
-- Fix NgramExtractingTransformer GetSlotNames to not allocate a new delegate on every invoke ([#4247](https://github.com/dotnet/machinelearning/pull/4247))
+- Fix NgramExtractingTransformer GetSlotNames to not allocate a new delegate on every invoke ([#4247](https://github.com/dotnet/machinelearning/pull/4247))
- Resurrect broken code coverage build and re-enable code coverage for pull request ([#4261](https://github.com/dotnet/machinelearning/pull/4261))
- NimbusML entrypoint for permutation feature importance ([#4232](https://github.com/dotnet/machinelearning/pull/4232))
- Reuse memory when copying outputs from TensorFlow graph ([#4260](https://github.com/dotnet/machinelearning/pull/4260))
- DateTime to DateTime standard conversion ([#4273](https://github.com/dotnet/machinelearning/pull/4273))
- CodeCov version upgraded to 1.7.2 ([#4291](https://github.com/dotnet/machinelearning/pull/4291))
-
+
## **CLI and AutoML API**
None.
diff --git a/docs/release-notes/1.4.0/release-1.4.0.md b/docs/release-notes/1.4.0/release-1.4.0.md
index 2b343cc958..4210d9b9f5 100644
--- a/docs/release-notes/1.4.0/release-1.4.0.md
+++ b/docs/release-notes/1.4.0/release-1.4.0.md
@@ -1,10 +1,10 @@
# [ML.NET](http://dot.net/ml) 1.4.0
## **New Features**
-- **General Availability of [Image Classification API](https://docs.microsoft.com/en-us/dotnet/api/microsoft.ml.visioncatalog.imageclassification?view=ml-dotnet#Microsoft_ML_VisionCatalog_ImageClassification_Microsoft_ML_MulticlassClassificationCatalog_MulticlassClassificationTrainers_System_String_System_String_System_String_System_String_Microsoft_ML_IDataView_)**
+- **General Availability of [Image Classification API](https://docs.microsoft.com/en-us/dotnet/api/microsoft.ml.visioncatalog.imageclassification?view=ml-dotnet#Microsoft_ML_VisionCatalog_ImageClassification_Microsoft_ML_MulticlassClassificationCatalog_MulticlassClassificationTrainers_System_String_System_String_System_String_System_String_Microsoft_ML_IDataView_)**
Introduces [`Microsoft.ML.Vision`](https://docs.microsoft.com/en-us/dotnet/api/microsoft.ml.vision?view=ml-dotnet) package that enables image classification by leveraging an existing pre-trained deep neural network model. Here the API trains the last classification layer using TensorFlow by using its C# bindings from TensorFlow .NET. This is a high level API that is simple yet powerful. Below are some of the key features:
- - `GPU training`: Supported on Windows and Linux, more information [here](https://github.com/dotnet/machinelearning/blob/master/docs/api-reference/tensorflow-usage.md).
- - `Early stopping`: Saves time by stopping training automatically when model has been stabelized.
+ - `GPU training`: Supported on Windows and Linux, more information [here](https://github.com/dotnet/machinelearning/blob/main/docs/api-reference/tensorflow-usage.md).
+ - `Early stopping`: Saves time by stopping training automatically when model has been stabelized.
- `Learning rate scheduler`: Learning rate is an integral and potentially difficult part of deep learning. By providing learning rate schedulers, we give users a way to optimize the learning rate with high initial values which can decay over time. High initial learning rate helps to introduce randomness into the system, allowing the Loss function to better find the global minima. While the decayed learning rate helps to stabilize the loss over time. We have implemented [Exponential Decay Learning rate scheduler](https://www.tensorflow.org/api_docs/python/tf/compat/v1/train/exponential_decay) and [Polynomial Decay Learning rate scheduler](https://www.tensorflow.org/api_docs/python/tf/compat/v1/train/polynomial_decay).
- `Pre-trained DNN Architectures`: The supported DNN architectures used internally for `transfer learning` are below:
- Inception V3.
@@ -24,17 +24,17 @@
#### Samples
- [Defaults](https://github.com/dotnet/machinelearning/blob/master/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/MulticlassClassification/ImageClassification/ImageClassificationDefault.cs)
+ [Defaults](https://github.com/dotnet/machinelearning/blob/main/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/MulticlassClassification/ImageClassification/ImageClassificationDefault.cs)
- [Learning rate scheduling](https://github.com/dotnet/machinelearning/blob/master/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/MulticlassClassification/ImageClassification/LearningRateSchedulingCifarResnetTransferLearning.cs)
+ [Learning rate scheduling](https://github.com/dotnet/machinelearning/blob/main/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/MulticlassClassification/ImageClassification/LearningRateSchedulingCifarResnetTransferLearning.cs)
- [Early stopping](https://github.com/dotnet/machinelearning/blob/master/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/MulticlassClassification/ImageClassification/ResnetV2101TransferLearningEarlyStopping.cs)
+ [Early stopping](https://github.com/dotnet/machinelearning/blob/main/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/MulticlassClassification/ImageClassification/ResnetV2101TransferLearningEarlyStopping.cs)
- [ResNet V2 101 train-test split](https://github.com/dotnet/machinelearning/blob/master/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/MulticlassClassification/ImageClassification/ResnetV2101TransferLearningTrainTestSplit.cs)
+ [ResNet V2 101 train-test split](https://github.com/dotnet/machinelearning/blob/main/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/MulticlassClassification/ImageClassification/ResnetV2101TransferLearningTrainTestSplit.cs)
- [End-to-End](https://github.com/dotnet/machinelearning-samples/tree/master/samples/csharp/getting-started/DeepLearning_ImageClassification_Training)
+ [End-to-End](https://github.com/dotnet/machinelearning-samples/tree/main/samples/csharp/getting-started/DeepLearning_ImageClassification_Training)
-- **General Availability of [Database Loader](https://docs.microsoft.com/en-us/dotnet/api/microsoft.ml.databaseloadercatalog?view=ml-dotnet)**
+- **General Availability of [Database Loader](https://docs.microsoft.com/en-us/dotnet/api/microsoft.ml.databaseloadercatalog?view=ml-dotnet)**
The database loader enables to load data from databases into the `IDataView` and therefore enables model training directly against relational databases. This loader supports any relational database provider supported by System.Data in .NET Core or .NET Framework, meaning that you can use any RDBMS such as SQL Server, Azure SQL Database, Oracle, SQLite, PostgreSQL, MySQL, Progress, etc.
It is important to highlight that in the same way as when training from files, when training with a database ML .NET also supports data streaming, meaning that the whole database doesn’t need to fit into memory, it’ll be reading from the database as it needs so you can handle very large databases (i.e. 50GB, 100GB or larger).
@@ -61,20 +61,20 @@
public string FeedbackText;
public string Label;
}
- ```
+ ```
+
+ [Design specification](https://github.com/dotnet/machinelearning/pull/3857)
- [Design specification](https://github.com/dotnet/machinelearning/pull/3857)
-
- [Sample](https://github.com/dotnet/machinelearning-samples/tree/master/samples/csharp/getting-started/DatabaseLoader)
+ [Sample](https://github.com/dotnet/machinelearning-samples/tree/main/samples/csharp/getting-started/DatabaseLoader)
[How to doc](https://docs.microsoft.com/en-us/dotnet/machine-learning/how-to-guides/load-data-ml-net#load-data-from-a-relational-database)
-- **General Availability of PredictionEnginePool for scalable deployment**
+- **General Availability of PredictionEnginePool for scalable deployment**
When deploying an ML model into multi-threaded and scalable .NET Core web applications and services (such as ASP .NET Core web apps, WebAPIs or an Azure Function) it is recommended to use the PredictionEnginePool instead of directly creating the PredictionEngine object on every request due to performance and scalability reasons. For further background information on why the PredictionEnginePool is recommended, read [this](https://devblogs.microsoft.com/cesardelatorre/how-to-optimize-and-run-ml-net-models-on-scalable-asp-net-core-webapis-or-web-apps/) blog post.
-
- [Sample](https://github.com/dotnet/machinelearning-samples/tree/master/samples/csharp/end-to-end-apps/ScalableMLModelOnWebAPI-IntegrationPkg)
-- **General Availability of Enhanced for .NET Core 3.0**
+ [Sample](https://github.com/dotnet/machinelearning-samples/tree/main/samples/csharp/end-to-end-apps/ScalableMLModelOnWebAPI-IntegrationPkg)
+
+- **General Availability of Enhanced for .NET Core 3.0**
This means ML .NET can take advantage of the new features when running in a .NET Core 3.0 application. The first new feature we are using is the new hardware intrinsics feature, which allows .NET code to accelerate math operations by using processor specific instructions.
## **Bug Fixes**
diff --git a/docs/release-notes/1.5.0-preview/release-1.5.0-preview.md b/docs/release-notes/1.5.0-preview/release-1.5.0-preview.md
index d445544af4..55a928fefb 100644
--- a/docs/release-notes/1.5.0-preview/release-1.5.0-preview.md
+++ b/docs/release-notes/1.5.0-preview/release-1.5.0-preview.md
@@ -3,7 +3,7 @@
## **New Features (IN-PREVIEW, please provide feedback)**
- **Export-to-ONNX for below components:**
- WordTokenizingTransformer ([#4451](https://github.com/dotnet/machinelearning/pull/4451))
- - NgramExtractingTransformer ([#4451](https://github.com/dotnet/machinelearning/pull/4451))
+ - NgramExtractingTransformer ([#4451](https://github.com/dotnet/machinelearning/pull/4451))
- OptionalColumnTransform ([#4454](https://github.com/dotnet/machinelearning/pull/4454))
- KeyToValueMappingTransformer ([#4455](https://github.com/dotnet/machinelearning/pull/4455))
- LbfgsMaximumEntropyMulticlassTrainer ([4462](https://github.com/dotnet/machinelearning/pull/4462))
@@ -17,13 +17,13 @@
- **DateTime Transformer** ([#4521](https://github.com/dotnet/machinelearning/pull/4521))
- **Loader and Saver for [SVMLight file format](http://svmlight.joachims.org/)** ([#4190](https://github.com/dotnet/machinelearning/pull/4190))
-
- [Sample](https://github.com/dotnet/machinelearning/blob/master/docs/samples/Microsoft.ML.Samples/Dynamic/DataOperations/LoadingSvmLight.cs)
-- **Expression transformer** ([#4548](https://github.com/dotnet/machinelearning/pull/4548))
+
+ [Sample](https://github.com/dotnet/machinelearning/blob/main/docs/samples/Microsoft.ML.Samples/Dynamic/DataOperations/LoadingSvmLight.cs)
+- **Expression transformer** ([#4548](https://github.com/dotnet/machinelearning/pull/4548))
The expression transformer takes the expression in the form of text using syntax of a simple expression language, and performs the operation defined in the expression on the input columns in each row of the data. The transformer supports having a vector input column, in which case it applies the expression to each slot of the vector independently. The expression language is extendable to user defined operations.
-
- [Sample](https://github.com/dotnet/machinelearning/blob/master/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/Expression.cs)
-
+
+ [Sample](https://github.com/dotnet/machinelearning/blob/main/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/Expression.cs)
+
## **Bug Fixes**
- Fix using permutation feature importance with Binary Prediction Transformer and CalibratedModelParametersBase loaded from disk. ([#4306](https://github.com/dotnet/machinelearning/pull/4306))
- Fixed model saving and loading of OneVersusAllTrainer to include SoftMax. ([#4472](https://github.com/dotnet/machinelearning/pull/4472))
@@ -55,7 +55,7 @@
- Create SafeBoosterHandle and SafeDataSetHandle. ([#4539](https://github.com/dotnet/machinelearning/pull/4539))
- Add IterationDataAttribute. ([#4561](https://github.com/dotnet/machinelearning/pull/4561))
- Add tests for ParameterSet equality. ([#4550](https://github.com/dotnet/machinelearning/pull/4550))
-- Add a test handler for AppDomain.UnhandledException. ([#4557](https://github.com/dotnet/machinelearning/commit/f1f8942a8272a9c87373d11bc89467461c8ecad1))
+- Add a test handler for AppDomain.UnhandledException. ([#4557](https://github.com/dotnet/machinelearning/commit/f1f8942a8272a9c87373d11bc89467461c8ecad1))
## **Breaking Changes**
None
diff --git a/docs/release-notes/1.5.4/release-1.5.4.md b/docs/release-notes/1.5.4/release-1.5.4.md
new file mode 100644
index 0000000000..d33df6b718
--- /dev/null
+++ b/docs/release-notes/1.5.4/release-1.5.4.md
@@ -0,0 +1,45 @@
+# [ML.NET](http://dot.net/ml) 1.5.4
+
+## **New Features**
+- **New API for exporting models to Onnx**. ([#5544](https://github.com/dotnet/machinelearning/pull/5544)). A new API has been added to Onnx converter to specify the output columns you care about. This will export a smaller and more performant model in many cases.
+
+## **Enhancements**
+- Perf improvement for TopK Accuracy and return all topK in Classification Evaluator ([#5395](https://github.com/dotnet/machinelearning/pull/5395)) (**Thank you @jasallen**)
+- Update OnnxRuntime to 1.6 ([#5529](https://github.com/dotnet/machinelearning/pull/5529))
+- Updated tensorflow.net to 0.20.0 ([#5404](https://github.com/dotnet/machinelearning/pull/5404))
+- Added in DcgTruncationLevel to AutoML api and increased default level to 10 ([#5433](https://github.com/dotnet/machinelearning/pull/5433))
+
+## **Bug Fixes**
+- **AutoML.NET specific fixes**.
+ - Fixed AutoFitMaxExperimentTimeTest ([#5506](https://github.com/dotnet/machinelearning/pull/5506))
+ - Fixed code generator tests failure ([#5520](https://github.com/dotnet/machinelearning/pull/5520))
+ - Use Timer and ctx.CancelExecution() to fix AutoML max-time experiment bug ([#5445](https://github.com/dotnet/machinelearning/pull/5445))
+ - Handled exception during GetNextPipeline for AutoML ([#5455](https://github.com/dotnet/machinelearning/pull/5455))
+ - Fixed internationalization bug([#5162](https://github.com/dotnet/machinelearning/pull/5163)) in AutoML parameter sweeping caused by culture dependent float parsing. ([#5163](https://github.com/dotnet/machinelearning/pull/5163))
+ - Fixed MaxModels exit criteria for AutoML unit test ([#5471](https://github.com/dotnet/machinelearning/pull/5471))
+ - Fixed AutoML CrossValSummaryRunner for TopKAccuracyForAllK ([#5548](https://github.com/dotnet/machinelearning/pull/5548))
+- Fixed bug in Tensorflow Transforer with handling primitive types ([#5547](https://github.com/dotnet/machinelearning/pull/5547))
+- Fixed MLNet.CLI build error ([#5546](https://github.com/dotnet/machinelearning/pull/5546))
+- Fixed memory leaks from OnnxTransformer ([#5518](https://github.com/dotnet/machinelearning/pull/5518))
+- Fixed memory leak in object pool ([#5521](https://github.com/dotnet/machinelearning/pull/5521))
+- Fixed Onnx Export for ProduceWordBags ([#5435](https://github.com/dotnet/machinelearning/pull/5435))
+- Upgraded boundary calculation and expected value calculation in SrCnnEntireAnomalyDetector ([#5436](https://github.com/dotnet/machinelearning/pull/5436))
+- Fixed SR anomaly score calculation at beginning ([#5502](https://github.com/dotnet/machinelearning/pull/5502))
+- Improved error message in ColumnConcatenatingEstimator ([#5444](https://github.com/dotnet/machinelearning/pull/5444))
+- Fixed issue 5020, allow ML.NET to load tf model with primitive input and output column ([#5468](https://github.com/dotnet/machinelearning/pull/5468))
+- Fixed issue 4322, enable lda summary output ([#5260](https://github.com/dotnet/machinelearning/pull/5260))
+- Fixed perf regression in ShuffleRows ([#5417](https://github.com/dotnet/machinelearning/pull/5417))
+- Change the _maxCalibrationExamples default on CalibratorUtils ([#5415](https://github.com/dotnet/machinelearning/pull/5415))
+
+
+## **Build / Test updates**
+- Migrated to [Arcade](https://github.com/dotnet/arcade/) build system that is used my multiple dotnet projects. This will give increased build/CI efficiencies going forward. Updated build instructions can be found in the docs/building folder
+- Fixed MacOS builds ([#5467](https://github.com/dotnet/machinelearning/pull/5467) and [#5457](https://github.com/dotnet/machinelearning/pull/5457))
+
+## **Documentation Updates**
+- Fixed Spelling on stopwords ([#5524](https://github.com/dotnet/machinelearning/pull/5524))(**Thank you @LeoGaunt**)
+- Changed LoadRawImages Sample ([#5460](https://github.com/dotnet/machinelearning/pull/5460))
+
+
+## **Breaking Changes**
+- None
diff --git a/docs/release-notes/1.5.5/release-1.5.5.md b/docs/release-notes/1.5.5/release-1.5.5.md
new file mode 100644
index 0000000000..c9d5874256
--- /dev/null
+++ b/docs/release-notes/1.5.5/release-1.5.5.md
@@ -0,0 +1,39 @@
+# [ML.NET](http://dot.net/ml) 1.5.5
+
+## **New Features**
+- **New API allowing confidence parameter to be a double**.([#5623](https://github.com/dotnet/machinelearning/pull/5623))
+. A new API has been added to accept double type for the confidence level. This helps when you need to have higher precision than an int will allow for. (**Thank you @esso23**)
+- **Support to export ValueMapping estimator to ONNX was added** ([#5577](https://github.com/dotnet/machinelearning/pull/5577))
+- **New API to treat TensorFlow output as batched/not-batched** ([#5634](https://github.com/dotnet/machinelearning/pull/5634)) A new API has been added so you can specify if the output from TensorFlow is batched or not.
+
+
+## **Enhancements**
+- Make ColumnInference serializable ([#5611](https://github.com/dotnet/machinelearning/pull/5611))
+
+
+## **Bug Fixes**
+- **AutoML.NET specific fixes**.
+ - Fixed an AutoML aggregate timeout exception ([#5631](https://github.com/dotnet/machinelearning/pull/5631))
+ - Offer suggestions for possibly mistyped label column names in AutoML ([#5624](https://github.com/dotnet/machinelearning/pull/5624)) (**Thank you @Crabzmatic**)
+- Update some ToString conversions ([#5627](https://github.com/dotnet/machinelearning/pull/5627)) (**Thanks @4201104140**)
+- Fixed an issue in SRCnnEntireAnomalyDetector ([#5579](https://github.com/dotnet/machinelearning/pull/5579))
+- Fixed nuget.config multi-feed issue ([#5614](https://github.com/dotnet/machinelearning/pull/5614))
+- Remove references to Microsoft.ML.Scoring ([#5602](https://github.com/dotnet/machinelearning/pull/5602))
+- Fixed Averaged Perceptron default value ([#5586](https://github.com/dotnet/machinelearning/pull/5586))
+
+
+## **Build / Test updates**
+- Fixing official build by adding homebrew bug workaround ([#5596](https://github.com/dotnet/machinelearning/pull/5596))
+- Nuget.config url fix for roslyn compilers ([#5584](https://github.com/dotnet/machinelearning/pull/5584))
+- Add SymSgdNative reference to AutoML.Tests.csproj ([#5559](https://github.com/dotnet/machinelearning/pull/5559))
+
+
+## **Documentation Updates**
+- Updated documentation for the correct version of CUDA for TensorFlow. ([#5635](https://github.com/dotnet/machinelearning/pull/5635))
+- Updates documentation for an issue with brew and installing libomp. ([#5635](https://github.com/dotnet/machinelearning/pull/5635))
+- Updated an ONNX url to the correct url. ([#5635](https://github.com/dotnet/machinelearning/pull/5635))
+- Added a note in the documentation that the PredictionEngine is not thread safe. ([#5583](https://github.com/dotnet/machinelearning/pull/5583))
+
+
+## **Breaking Changes**
+- None
diff --git a/docs/samples/Microsoft.ML.AutoML.Samples/Microsoft.ML.AutoML.Samples.csproj b/docs/samples/Microsoft.ML.AutoML.Samples/Microsoft.ML.AutoML.Samples.csproj
index 9d951867e1..4628e6b6e2 100644
--- a/docs/samples/Microsoft.ML.AutoML.Samples/Microsoft.ML.AutoML.Samples.csproj
+++ b/docs/samples/Microsoft.ML.AutoML.Samples/Microsoft.ML.AutoML.Samples.csproj
@@ -7,6 +7,14 @@
+
+ all
+
+
+
+ all
+
+
diff --git a/docs/samples/Microsoft.ML.Samples.GPU/Microsoft.ML.Samples.GPU.csproj b/docs/samples/Microsoft.ML.Samples.GPU/Microsoft.ML.Samples.GPU.csproj
index 0c01186724..2b54ea24b0 100644
--- a/docs/samples/Microsoft.ML.Samples.GPU/Microsoft.ML.Samples.GPU.csproj
+++ b/docs/samples/Microsoft.ML.Samples.GPU/Microsoft.ML.Samples.GPU.csproj
@@ -20,9 +20,13 @@
+
+
+
+
@@ -48,14 +52,14 @@
-
+
DnnImageModels\ResNet18Onnx\ResNet18.onnx
PreserveNewest
-
+
DnnImageModels\ResNetPrepOnnx\ResNetPreprocess.onnx
PreserveNewest
diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/ModelOperations/OnnxConversion.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/ModelOperations/OnnxConversion.cs
index 8a55b8fc64..cbd4162385 100644
--- a/docs/samples/Microsoft.ML.Samples/Dynamic/ModelOperations/OnnxConversion.cs
+++ b/docs/samples/Microsoft.ML.Samples/Dynamic/ModelOperations/OnnxConversion.cs
@@ -74,7 +74,7 @@ public static void Example()
//However, you can also specify a custom OpSet version by using the following code
//Currently, we support OpSet versions 9 for most transformers, but there are certain transformers that require a higher OpSet version
//Please refer to the following link for most update information of what OpSet version we support
- //https://github.com/dotnet/machinelearning/blob/master/src/Microsoft.ML.OnnxConverter/OnnxExportExtensions.cs
+ //https://github.com/dotnet/machinelearning/blob/main/src/Microsoft.ML.OnnxConverter/OnnxExportExtensions.cs
int customOpSetVersion = 9;
using (var stream = File.Create("sample_onnx_conversion_2.onnx"))
mlContext.Model.ConvertToOnnx(transformer, originalData, customOpSetVersion, stream);
@@ -82,7 +82,8 @@ public static void Example()
//Create the pipeline using onnx file.
var onnxModelPath = "your_path_to_sample_onnx_conversion_1.onnx";
var onnxEstimator = mlContext.Transforms.ApplyOnnxModel(onnxModelPath);
- var onnxTransformer = onnxEstimator.Fit(trainTestOriginalData.TrainSet);
+ //Make sure to either use the 'using' clause or explicitly dispose the returned onnxTransformer to prevent memory leaks
+ using var onnxTransformer = onnxEstimator.Fit(trainTestOriginalData.TrainSet);
//Inference the testset
var output = transformer.Transform(trainTestOriginalData.TestSet);
diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/ApplyONNXModelWithInMemoryImages.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/ApplyONNXModelWithInMemoryImages.cs
index 4c1c3a3d4b..71bd61bf90 100644
--- a/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/ApplyONNXModelWithInMemoryImages.cs
+++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/ApplyONNXModelWithInMemoryImages.cs
@@ -13,7 +13,7 @@ public static class ApplyOnnxModelWithInMemoryImages
public static void Example()
{
// Download the squeeznet image model from ONNX model zoo, version 1.2
- // https://github.com/onnx/models/tree/master/squeezenet or use
+ // https://github.com/onnx/models/tree/master/vision/classification/squeezenet or use
// Microsoft.ML.Onnx.TestModels nuget.
// It's a multiclass classifier. It consumes an input "data_0" and
// produces an output "softmaxout_1".
@@ -45,7 +45,7 @@ public static void Example()
// Map column "data_0" to column "softmaxout_1"
var pipeline = mlContext.Transforms.ExtractPixels("data_0", "Image")
.Append(mlContext.Transforms.ApplyOnnxModel("softmaxout_1",
- "data_0", modelPath));
+ "data_0", modelPath));
var model = pipeline.Fit(dataView);
var onnx = model.Transform(dataView);
@@ -60,12 +60,12 @@ public static void Example()
ImageDataPoint>(onnx, false).ToList();
// The scores are probabilities of all possible classes, so they should
- // all be positive.
+ // all be positive.
foreach (var dataPoint in transformedDataPoints)
{
var firstClassProb = dataPoint.Scores.First();
var lastClassProb = dataPoint.Scores.Last();
- Console.WriteLine("The probability of being the first class is " +
+ Console.WriteLine("The probability of being the first class is " +
(firstClassProb * 100) + "%.");
Console.WriteLine($"The probability of being the last class is " +
diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/Conversion/MapKeyToBinaryVector.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/Conversion/MapKeyToBinaryVector.cs
index 57ae091124..9933ef030e 100644
--- a/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/Conversion/MapKeyToBinaryVector.cs
+++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/Conversion/MapKeyToBinaryVector.cs
@@ -8,11 +8,11 @@ namespace Samples.Dynamic
class MapKeyToBinaryVector
{
/// This example demonstrates the use of MapKeyToVector by mapping keys to
- /// floats[] of 0 and 1, representing the number in binary format.
+ /// floats[] of 0 and 1, representing the number in binary format.
/// Because the ML.NET KeyType maps the missing value to zero, counting
/// starts at 1, so the uint values converted to KeyTypes will appear
- /// skewed by one.
- /// See https://github.com/dotnet/machinelearning/blob/master/docs/code/IDataViewTypeSystem.md#key-types
+ /// skewed by one.
+ /// See https://github.com/dotnet/machinelearning/blob/main/docs/code/IDataViewTypeSystem.md#key-types
public static void Example()
{
// Create a new ML context, for ML.NET operations. It can be used for
diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/Conversion/MapKeyToValueMultiColumn.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/Conversion/MapKeyToValueMultiColumn.cs
index bc7e2fded9..41eb6929b4 100644
--- a/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/Conversion/MapKeyToValueMultiColumn.cs
+++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/Conversion/MapKeyToValueMultiColumn.cs
@@ -7,7 +7,7 @@ namespace Samples.Dynamic
{
/// This example demonstrates the use of the ValueToKeyMappingEstimator, by
/// mapping KeyType values to the original strings. For more on ML.NET KeyTypes
- /// see: https://github.com/dotnet/machinelearning/blob/master/docs/code/IDataViewTypeSystem.md#key-types
+ /// see: https://github.com/dotnet/machinelearning/blob/main/docs/code/IDataViewTypeSystem.md#key-types
public class MapKeyToValueMultiColumn
{
public static void Example()
@@ -26,7 +26,7 @@ public static void Example()
// by ML.NET API.
var dataView = mlContext.Data.LoadFromEnumerable(examples);
- // Create a pipeline.
+ // Create a pipeline.
var pipeline =
// Convert the string labels into key types.
mlContext.Transforms.Conversion.MapValueToKey("Label")
@@ -34,8 +34,8 @@ public static void Example()
.Append(mlContext.MulticlassClassification.Trainers.
SdcaMaximumEntropy());
- // Train the model and do predictions on same data set.
- // Typically predictions would be in a different, validation set.
+ // Train the model and do predictions on same data set.
+ // Typically predictions would be in a different, validation set.
var dataWithPredictions = pipeline.Fit(dataView).Transform(dataView);
// At this point, the Label column is transformed from strings, to
diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/Conversion/MapKeyToVector.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/Conversion/MapKeyToVector.cs
index f9a4809fa5..fd1646f014 100644
--- a/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/Conversion/MapKeyToVector.cs
+++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/Conversion/MapKeyToVector.cs
@@ -11,7 +11,7 @@ class MapKeyToVector
/// This example demonstrates the use of MapKeyToVector by mapping keys to
/// floats[]. Because the ML.NET KeyType maps the missing value to zero,
/// counting starts at 1, so the uint values converted to KeyTypes will
- /// appear skewed by one. See https://github.com/dotnet/machinelearning/blob/master/docs/code/IDataViewTypeSystem.md#key-types
+ /// appear skewed by one. See https://github.com/dotnet/machinelearning/blob/main/docs/code/IDataViewTypeSystem.md#key-types
public static void Example()
{
// Create a new ML context, for ML.NET operations. It can be used for
@@ -50,7 +50,7 @@ public static void Example()
// Fits the pipeline to the data.
IDataView transformedData = pipeline.Fit(data).Transform(data);
-
+
// Getting the resulting data as an IEnumerable.
// This will contain the newly created columns.
IEnumerable features = mlContext.Data.CreateEnumerable<
@@ -62,7 +62,7 @@ public static void Example()
foreach (var featureRow in features)
Console.WriteLine(featureRow.Timeframe + " " +
string.Join(',', featureRow.TimeframeVector.Select(x=>x)) + " "
- + string.Join(',', featureRow.PartsCount.Select(x => x)) +
+ + string.Join(',', featureRow.PartsCount.Select(x => x)) +
" " + string.Join(',', featureRow.PartsNoCount.Select(
x => x)));
diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/Conversion/MapKeyToVectorMultiColumn.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/Conversion/MapKeyToVectorMultiColumn.cs
index 606c371497..aa8df148aa 100644
--- a/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/Conversion/MapKeyToVectorMultiColumn.cs
+++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/Conversion/MapKeyToVectorMultiColumn.cs
@@ -10,8 +10,8 @@ public class MapKeyToVectorMultiColumn
/// This example demonstrates the use of MapKeyToVector by mapping keys to
/// floats[] for multiple columns at once. Because the ML.NET KeyType maps
/// the missing value to zero, counting starts at 1, so the uint values
- /// converted to KeyTypes will appear skewed by one.
- /// See https://github.com/dotnet/machinelearning/blob/master/docs/code/IDataViewTypeSystem.md#key-types
+ /// converted to KeyTypes will appear skewed by one.
+ /// See https://github.com/dotnet/machinelearning/blob/main/docs/code/IDataViewTypeSystem.md#key-types
public static void Example()
{
// Create a new ML context, for ML.NET operations. It can be used for
diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/Conversion/MapValueToKeyMultiColumn.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/Conversion/MapValueToKeyMultiColumn.cs
index 8e8c0e62bd..6d94eb53c6 100644
--- a/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/Conversion/MapValueToKeyMultiColumn.cs
+++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/Conversion/MapValueToKeyMultiColumn.cs
@@ -8,7 +8,7 @@ public static class MapValueToKeyMultiColumn
{
/// This example demonstrates the use of the ValueToKeyMappingEstimator, by
/// mapping strings to KeyType values. For more on ML.NET KeyTypes see:
- /// https://github.com/dotnet/machinelearning/blob/master/docs/code/IDataViewTypeSystem.md#key-types
+ /// https://github.com/dotnet/machinelearning/blob/main/docs/code/IDataViewTypeSystem.md#key-types
/// It is possible to have multiple values map to the same category.
public static void Example()
{
@@ -64,7 +64,7 @@ public static void Example()
// are not found in the lookup IDataView they will get mapped to the
// missing value, 0. The keyData are shared among the columns, therefore
// the keys are not contiguous for the column. Create the lookup map
- // data IEnumerable.
+ // data IEnumerable.
var lookupData = new[] {
new LookupMap { Key = "0-4yrs" },
new LookupMap { Key = "6-11yrs" },
diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/TimeSeries/DetectChangePointBySsa.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/TimeSeries/DetectChangePointBySsa.cs
index 7d545770f9..23fb7e5c9c 100644
--- a/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/TimeSeries/DetectChangePointBySsa.cs
+++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/TimeSeries/DetectChangePointBySsa.cs
@@ -52,7 +52,7 @@ public static void Example()
// Setup SsaChangePointDetector arguments
var inputColumnName = nameof(TimeSeriesData.Value);
var outputColumnName = nameof(ChangePointPrediction.Prediction);
- int confidence = 95;
+ double confidence = 95;
int changeHistoryLength = 8;
// Train the change point detector.
diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/TimeSeries/DetectChangePointBySsaBatchPrediction.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/TimeSeries/DetectChangePointBySsaBatchPrediction.cs
index 25819052d8..85732f9259 100644
--- a/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/TimeSeries/DetectChangePointBySsaBatchPrediction.cs
+++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/TimeSeries/DetectChangePointBySsaBatchPrediction.cs
@@ -59,7 +59,7 @@ public static void Example()
// The transformed data.
var transformedData = ml.Transforms.DetectChangePointBySsa(
- outputColumnName, inputColumnName, 95, 8, TrainingSize,
+ outputColumnName, inputColumnName, 95.0d, 8, TrainingSize,
SeasonalitySize + 1).Fit(dataView).Transform(dataView);
// Getting the data of the newly created column as an IEnumerable of
diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/TimeSeries/DetectChangePointBySsaStream.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/TimeSeries/DetectChangePointBySsaStream.cs
index c65d3af987..dfab85aee2 100644
--- a/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/TimeSeries/DetectChangePointBySsaStream.cs
+++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/TimeSeries/DetectChangePointBySsaStream.cs
@@ -52,7 +52,7 @@ public static void Example()
// Setup SsaChangePointDetector arguments
var inputColumnName = nameof(TimeSeriesData.Value);
var outputColumnName = nameof(ChangePointPrediction.Prediction);
- int confidence = 95;
+ double confidence = 95;
int changeHistoryLength = 8;
// Train the change point detector.
diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/TimeSeries/DetectIidChangePoint.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/TimeSeries/DetectIidChangePoint.cs
index 4e44a73607..852fc9f8e9 100644
--- a/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/TimeSeries/DetectIidChangePoint.cs
+++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/TimeSeries/DetectIidChangePoint.cs
@@ -55,7 +55,7 @@ public static void Example()
// Time Series model.
ITransformer model = ml.Transforms.DetectIidChangePoint(
- outputColumnName, inputColumnName, 95, Size / 4).Fit(dataView);
+ outputColumnName, inputColumnName, 95.0d, Size / 4).Fit(dataView);
// Create a time series prediction engine from the model.
var engine = model.CreateTimeSeriesEngine
+
+
+
@@ -977,14 +980,14 @@
-
+
DnnImageModels\ResNet18Onnx\ResNet18.onnx
PreserveNewest
-
+
DnnImageModels\ResNetPrepOnnx\ResNetPreprocess.onnx
PreserveNewest
diff --git a/docs/specs/mlnet-cli/MLNET-CLI-Specs.md b/docs/specs/mlnet-cli/MLNET-CLI-Specs.md
index 6a4f13469a..a0c013d405 100644
--- a/docs/specs/mlnet-cli/MLNET-CLI-Specs.md
+++ b/docs/specs/mlnet-cli/MLNET-CLI-Specs.md
@@ -9,7 +9,7 @@ The CLI will be branded as the ML.NET CLI since this CLI will also have addition
The .NET AutoML API (.NET based) will be part of the [ML.NET](https://github.com/dotnet/machinelearning) API.
AutoML features will be used for certain important foundational features of the ML.NET CLI.
-This specs-doc focuses most of all on the CLI features related to AutoML, but it will also consider (in less detail) the scenarios where AutoML is not needed, so the CLI syntax will be consistent end-to-end for all the possible scenarios in the future.
+This specs-doc focuses most of all on the CLI features related to AutoML, but it will also consider (in less detail) the scenarios where AutoML is not needed, so the CLI syntax will be consistent end-to-end for all the possible scenarios in the future.
# Problem to solve
@@ -17,9 +17,9 @@ Customers (.NET developers) have tolds us through many channels that they can ge
The subset of .NET developers who at the same time are skilled on data science and machine learning in general is very small compared to the total number of regular .NET developers.
-We need a way to enable regular .NET developers to easily use [ML.NET](https://github.com/dotnet/machinelearning) to create custom models solving typical ML scenarios in the enterprise.
+We need a way to enable regular .NET developers to easily use [ML.NET](https://github.com/dotnet/machinelearning) to create custom models solving typical ML scenarios in the enterprise.
-If we don't provide a really simple way to use [ML.NET](https://github.com/dotnet/machinelearning) for regular developers (almost no data science knowledge at all), then we won't be able to really "democratize" machine learning for .NET developers.
+If we don't provide a really simple way to use [ML.NET](https://github.com/dotnet/machinelearning) for regular developers (almost no data science knowledge at all), then we won't be able to really "democratize" machine learning for .NET developers.
## Evidence
@@ -66,24 +66,24 @@ The business goals are the following, depending on the possible scenarios:
The goals for the features is to automate the following steps when building a model to achieve the above business goals:
-**Foundational features:**
+**Foundational features:**
- Provide an end-to-end **ML.NET CLI** for developers (i.e. *"mlnet new"*) to generate either the final trained model and the pipeline's C#/ML.NET implementation code in a similar fashion to the [.NET Core CLI](https://docs.microsoft.com/en-us/dotnet/core/tools/?tabs=netcore2x). The CLI is also a foundation upon which higher-level tools, such as Integrated Development Environments (IDEs) can rest.
- Automatic selection of best columns (input variables) to be used by the model
-- Automatic featurization and feature engineering
+- Automatic featurization and feature engineering
- Automatic learner sweeping and selection
- Automatic hyperparameter sweeping and selection
-
+
# Solution
-The solution that can target any kind of developer and fits especially well when generating code is a CLI (Command-Line Interface).
+The solution that can target any kind of developer and fits especially well when generating code is a CLI (Command-Line Interface).
The main support reasons are:
- A CLI can be the foundation where other higher level tooling (UI, IDEs) can also rest on.
- A CLI makes sense as a tool for "code generation". For the end-user experience, just using a C# API to generate C# code would not make sense and would feel a bit weird for the developer experience used to .NET approaches.
-- A CLI is aligned to modern cross-platform frameworks experience such as .NET Core, Python, Node, etc.
+- A CLI is aligned to modern cross-platform frameworks experience such as .NET Core, Python, Node, etc.
## Positioning the CLI and AutoML in ML.NET
@@ -110,7 +110,7 @@ The .NET AutoML API will be used for the most important foundational features th
- The CLI proposed here will not provide for “continue sweeping” after sweeping has ended.
- When running locally with the by default behavior (no Azure), the CLI will be able to work without needing to make any remote service call or requiring any authentication from the user.
- The CLI will provide feedback output (such as % work done or high level details on what's happening under the covers) while working on the long-running tasks.
-- The ML.NET CLI will be aligned and integrated to the [.NET Core CLI](https://docs.microsoft.com/en-us/dotnet/core/tools/?tabs=netcore2x). A good approach is to implement the ML.NET CLI as a [.NET Core Global Tool](https://docs.microsoft.com/en-us/dotnet/core/tools/global-tools) (i.e. named "mlnet" package) on top of the "dotnet CLI".
+- The ML.NET CLI will be aligned and integrated to the [.NET Core CLI](https://docs.microsoft.com/en-us/dotnet/core/tools/?tabs=netcore2x). A good approach is to implement the ML.NET CLI as a [.NET Core Global Tool](https://docs.microsoft.com/en-us/dotnet/core/tools/global-tools) (i.e. named "mlnet" package) on top of the "dotnet CLI".
- [System-Command-Line](https://github.com/dotnet/command-line-api) package can precisely help on those typical CLI console apps features (CLI plumbing) so if can be implemented a lot easier than by implementing a CLI app from scratch in features like command line parsing, invocation and rendering of terminal output. Otherwise, a CLI implementation starting from scratch can be significantly more costly to develop.
### CLI default behaviour and overridability
@@ -136,13 +136,13 @@ The conditions for each version are incremental on top of previous versions.
- NuGet package available at any private NuGet feed ([Azure DevOps?](https://docs.microsoft.com/en-us/azure/devops/artifacts/get-started-nuget?view=azure-devops&tabs=new-nav) [MyGet?](https://docs.myget.org/docs/reference/security)) with daily drops from CI/CD pipelines.
-- Add telemetry for the CLI NuGet package.
+- Add telemetry for the CLI NuGet package.
- Minimum MVP functionality for users to test custom datasets. Implementation of the CLI arguments especified for v0.1 in the syntax section and code generation section.
- Documentation: Have needed documentation for users to get started autonomously and provide feedback (Installation, getting started and test without help from the dev team).
-- Test with ML.NET Samples: Quality of generated code/models targeting the samples should improve the current published samples.
+- Test with ML.NET Samples: Quality of generated code/models targeting the samples should improve the current published samples.
Nice to have for v0.1:
@@ -188,7 +188,7 @@ Nice to have for v0.1:
- `weight-column` // indicates the weight/importance of training example. Related: `sample_weight` argument for training data such as in [Python AutoML here](https://docs.microsoft.com/en-us/python/api/azureml-train-automl/azureml.train.automl.automl?view=azure-ml-py).
- Add additonal commands to do *"machine learning without code"*:
- - *train*: It will only generate the best model .ZIP file. For example:
+ - *train*: It will only generate the best model .ZIP file. For example:
- `mlnet train --ml-task Regression --dataset "/MyDataSets/Sales.csv"`
- *predict*: Having a serialized model .ZIP file, you can test a single prediction. For example:
@@ -229,7 +229,7 @@ Nice to have for v0.1:
## Tool name
-**mlnet**
+**mlnet**
## Installing the tool
@@ -255,9 +255,9 @@ mlnet
- A new project or group of projects (by default, a single console app) with the generated .NET code.
- The "best model" (or multiple "best models") as serialized files.
-- An analysis report of the generated models.
+- An analysis report of the generated models.
-By default (with no app template parameter), the CLI will create a single console application containing either the training code and the model scoring code. However, in next ML.NET CLI versions (heading v1.0), when the user provides an app-type template argument (such as `mlnet new web`), it will be able to generate different application type projects (web, console, desktop, Unity, etc.) for the model scoring code.
+By default (with no app template parameter), the CLI will create a single console application containing either the training code and the model scoring code. However, in next ML.NET CLI versions (heading v1.0), when the user provides an app-type template argument (such as `mlnet new web`), it will be able to generate different application type projects (web, console, desktop, Unity, etc.) for the model scoring code.
## Examples
@@ -293,7 +293,7 @@ Create and train a model based on parameters specified in the .rsp file plus mor
(*Release v1.0 examples*)
-CLI command specifying to run AutoML compute in Azure's cloud
+CLI command specifying to run AutoML compute in Azure's cloud
` mlnet new --ml-task BinaryClassification --dataset "/MyDataSets/Cars.csv" --label-column-name "InsuranceRisk" --azure-automl-service-key "key-value"`
@@ -304,7 +304,7 @@ Syntax should be aligned to [dotnet new](https://docs.microsoft.com/en-us/dotnet
#### Syntax
```console
-mlnet new
+mlnet new
--------------- (v0.1) -------------------
@@ -380,7 +380,7 @@ There should be a pre-validation of the arguments with basic rules in the CLI to
`--ml-task` (string) (*Release 0.1*)
-A single string providing the ML problem to solve. For instance, any of the following depending on the supported tasks in .NET AutoML:
+A single string providing the ML problem to solve. For instance, any of the following depending on the supported tasks in .NET AutoML:
*Release 0.1*
- `regression` - Choose if the ML Model will be used to predict a numeric value
@@ -417,7 +417,7 @@ This argument provides the filepath to either one of the following:
File path pointing to the test dataset file, for example when using a 80% - 20% approach when making regular validations to obtain accuracy metrics.
-If using `--test-dataset`, then `--dataset` is also required.
+If using `--test-dataset`, then `--dataset` is also required.
The `--test-dataset` argument is optional unless the --validation-dataset is used. In that case, the user must use the three arguments.
@@ -431,7 +431,7 @@ If using a `validation dataset`, this is how the behaviour should be:
- The `test-dataset` and `--dataset` arguments are also required.
-- The `validation-dataset` dataset is used to estimate prediction error for model selection.
+- The `validation-dataset` dataset is used to estimate prediction error for model selection.
- The `test-dataset` is used for assessment of the generalization error of the final chosen model. Ideally, the test set should be kept in a “vault,” and be brought out only at the end of the data analysis.
@@ -439,7 +439,7 @@ Basically, when using a `validation dataset` plus the `test dataset`, the valida
1. In the first part you just look at your models and select the best performing approach using the validation data (=validation)
2. Then you estimate the accuracy of the selected approach (=test).
-
+
Hence, the separation of data could be 80/10/10 or 75/15/10. For example:
- `training-dataset` file should have 75% of the data.
@@ -452,7 +452,7 @@ In any case, those percentages will be decided by the user using the CLI who wil
`--label-column-name` (string) (*Release 0.1*)
-With this argument, a specifc objective/target column (the variable that you want to predict) can be specified by using the column's name set in the dataset's header.
+With this argument, a specifc objective/target column (the variable that you want to predict) can be specified by using the column's name set in the dataset's header.
If you do not explicitly specify a label column, the CLI will automatically infer what's the label column in the dataset (To be implemented in v0.2 or v1.0).
@@ -475,11 +475,11 @@ This argument is used only for supervised ML task such as a *classification prob
`--has-header` (bool) (*Release 0.1*)
Specify if the dataset file(s) have a header row.
-Possible values are:
+Possible values are:
- `true`
- `false`
-The by default value is `true`.
+The by default value is `true`.
In order to use the `--label-column-name` argument you need to have a header in the file.
@@ -498,7 +498,7 @@ Therefore, if `--max-exploration-time` is very short (say less than 1 minute), t
`--name` (string) (*Release 0.1*)
-The name for the created output project or solution. If no name is specified, the name 'Sample' is used.
+The name for the created output project or solution. If no name is specified, the name 'Sample' is used.
The ML.NET model file (.ZIP file) will get the same name, as well.
@@ -514,11 +514,11 @@ Location/folder to place the generated output. The default is the current direct
`--verbosity` (string) - (*Release 0.1*)
-Sets the verbosity level of the in the std output.
+Sets the verbosity level of the in the std output.
Allowed values are:
-- `q[uiet]`
+- `q[uiet]`
- `m[inimal]` (by default)
- `diag[nostic]` (logging information level)
@@ -586,13 +586,13 @@ Path to .YAML file (internally it might be easier and more robust to internally
All these parameters must be optional (opt-in). If not provided by the user, AutoML should infer all of them based on the provided dataset.
-However, this advanced configuration allows the users with knowledge on its dataset and ML skills to provide "shortcuts" and improvements so AutoML can work on a more oppinionated direction for a specific model in less time.
+However, this advanced configuration allows the users with knowledge on its dataset and ML skills to provide "shortcuts" and improvements so AutoML can work on a more oppinionated direction for a specific model in less time.
-These parameters should be similar to the ones provided by the .NET AutoML API, probably also similar to the [Python AutoMLConfig class](https://docs.microsoft.com/en-us/python/api/azureml-train-automl/azureml.train.automl.automlconfig(class)?view=azure-ml-py) where the user can drill down and specify more specific configuration such as number of `iterations`, `primary-metric`, `n_cross_validations`, `enable_cache`, etc.
+These parameters should be similar to the ones provided by the .NET AutoML API, probably also similar to the [Python AutoMLConfig class](https://docs.microsoft.com/en-us/python/api/azureml-train-automl/azureml.train.automl.automlconfig(class)?view=azure-ml-py) where the user can drill down and specify more specific configuration such as number of `iterations`, `primary-metric`, `n_cross_validations`, `enable_cache`, etc.
In cases where a parameter in this file coincides with a CLI argument (i.e. label-column-name, etc.), the parameter in the .YAML file will be overriden by the CLI argument which has higher priority.
-*TO BE DEFINED*: If there's a 1:1 mapping between the first level of paramenters in the .YAML file and the CLI arguments, how do we deal with expected syntax/naming of the args?:
+*TO BE DEFINED*: If there's a 1:1 mapping between the first level of paramenters in the .YAML file and the CLI arguments, how do we deal with expected syntax/naming of the args?:
- `--test-dataset` vs. `test_dataset`
*IMPORTANT: The whole list and explanation of each advanced AutoML config parameters still have to be defined.*
@@ -675,11 +675,11 @@ Sample JSON
**Matching CLI arguments with first level of .YAML file elements**
The first level items in the YAML file must match the CLI arguments.
-The CLI arguments will always override the items in the YAML file.
+The CLI arguments will always override the items in the YAML file.
**Embeding .YAML text as inline text in the CLI** (v1.0)
-Since the first level elements of the .YAML file should match the arguments, it should be possible to embed YAML text directly in-line as arguments, in a similar way than the original MAML.exe CLI does.
+Since the first level elements of the .YAML file should match the arguments, it should be possible to embed YAML text directly in-line as arguments, in a similar way than the original MAML.exe CLI does.
Sample CLI command with embeded extended:
@@ -691,15 +691,15 @@ mlnet new --ml-task Regression --dataset "/MyDataSets/Sales.csv" --label-column
----------------------------------------------------------
`@args-response-file` (string) (*Release 0.2*)
-Path to `.rsp` file containing the arguments/parameters to be used by the CLI.
+Path to `.rsp` file containing the arguments/parameters to be used by the CLI.
In this case we use the `@` syntax so it is consistent to other CLIs using the `.rsp` files, such as the [C# Compiler Options](https://docs.microsoft.com/en-us/dotnet/csharp/language-reference/compiler-options/response-file-compiler-option), etc.
This `.rsp` file should provide the same arguments than the ones supported by the CLI command (`mlnet new` arguments), except the `--args-config-file` argument, logically.
-The advantage of using an `.rsp` file for loading existing arguments, compared to a .YAML or .JSON file, is that we can use exactly the same argument names than the ones used in the CLI, such as `--label-column-name`, `--test-dataset` instead of having to map argument names to JSON-style names such as `labelColumnName` or `testDataset` or .YAML style and having to maitain two different schemas.
+The advantage of using an `.rsp` file for loading existing arguments, compared to a .YAML or .JSON file, is that we can use exactly the same argument names than the ones used in the CLI, such as `--label-column-name`, `--test-dataset` instead of having to map argument names to JSON-style names such as `labelColumnName` or `testDataset` or .YAML style and having to maitain two different schemas.
-The parameters provided in the `.rsp` file will be overriden by any parameter provided in the CLI command itself.
+The parameters provided in the `.rsp` file will be overriden by any parameter provided in the CLI command itself.
*Sample .rsp file with CLI arguments and optional configuration*
@@ -730,11 +730,11 @@ Lists application types supported, such as 'console', 'web', etc.
*NOTE: For the 0.1 preview version, only the `console` app-type (by default) will be implemented.*
-The app-type project to generate the code when the command is invoked.
+The app-type project to generate the code when the command is invoked.
If `--app-type` argument is not provided, the `console` template will be used by default.
-The command `new` will contain a default list of templates. Use `--list-app-types` to obtain a list of the available templates. The following table shows the templates that could come built-in with the ML.NET CLI.
+The command `new` will contain a default list of templates. Use `--list-app-types` to obtain a list of the available templates. The following table shows the templates that could come built-in with the ML.NET CLI.
The default language for the template is shown inside the brackets.
@@ -782,9 +782,9 @@ As introduced, the CLI will generate the following assets as result of the opera
The training code for a single generated console app project should be similar to the following sample app:
-- Sentiment Analysis sample: https://github.com/dotnet/machinelearning-samples/blob/master/samples/csharp/getting-started/BinaryClassification_SentimentAnalysis/SentimentAnalysis/SentimentAnalysisConsoleApp/Program.cs
-
- Notes:
+- Sentiment Analysis sample: https://github.com/dotnet/machinelearning-samples/blob/main/samples/csharp/getting-started/BinaryClassification_SentimentAnalysis/SentimentAnalysis/SentimentAnalysisConsoleApp/Program.cs
+
+ Notes:
- This sample code is evolving when upgrading to upcoming preview versions, such as 0.10, 0.11, etc.)
- Columns being loaded, instead of being explicit in the TextLoader, they should use a class such as this:
@@ -796,7 +796,7 @@ For v0.1, it must load data from text file using the data-structure (observation
// Read the data into a data view.
var dataView = mlContext.Data.ReadFromTextFile(dataPath, hasHeader: true);
-// The data model. This type will be used from multiple code.
+// The data model. This type will be used from multiple code.
private class InspectedRow
{
[LoadColumn(0)]
@@ -819,7 +819,7 @@ When having tens, hundreds or thousands of contiguous numeric columns (usually o
hasHeader: true
);
- var trainData = reader.Read(trainDatasetFileNamePath));
+ var trainData = reader.Read(trainDatasetFileNamePath));
```
Rules to follow:
@@ -844,11 +844,11 @@ If you have more than a few dozens of columns, those columns should be grouped i
Related sample code is available in the following ML.NET tests:
-Loading data:
-https://github.com/dotnet/machinelearning/blob/master/test/Microsoft.ML.Tests/ScenariosWithDirectInstantiation/TensorflowTests.cs#L375
+Loading data:
+https://github.com/dotnet/machinelearning/blob/main/test/Microsoft.ML.Tests/ScenariosWithDirectInstantiation/TensorflowTests.cs#L375
Data observation class:
-https://github.com/dotnet/machinelearning/blob/master/test/Microsoft.ML.Tests/ScenariosWithDirectInstantiation/TensorflowTests.cs#L700
+https://github.com/dotnet/machinelearning/blob/main/test/Microsoft.ML.Tests/ScenariosWithDirectInstantiation/TensorflowTests.cs#L700
### Generated code for solution with multiple projects (Since v1.0)
@@ -857,12 +857,12 @@ By v1.0, the CLI should be able to generate solutions with multiple projects. Fo
Solution:
- Training project: Console project with model-training ML.NET code
- Class library project with common code (Data/Observation class, Prediction class, etc.)
-- End-user project with model-scoring ML.NET code, such as any of the following (Depending on the supported app-types):
+- End-user project with model-scoring ML.NET code, such as any of the following (Depending on the supported app-types):
- Web app project (ASP.NET Core Razor app)
- Web API project (ASP.NET Core Web API)
- Console project
- Unity project
- - WPF project
+ - WPF project
## Results report (HTML)
@@ -885,7 +885,7 @@ The visualization of results by class is very nice.
- Clustering plotting showing the identified clusters per model.
- Other related plotting for Ranking, Recommendations and Anomaly detection.
-It is important to always show reports and visual charts comparing quality metrics and performance for the best ranked models, meaning having reports with comparisons between multiple models.
+It is important to always show reports and visual charts comparing quality metrics and performance for the best ranked models, meaning having reports with comparisons between multiple models.
Some report results can be comparable to the [results in AutoML for Python](https://docs.microsoft.com/en-us/azure/machine-learning/service/tutorial-auto-train-models#explore-the-results):
@@ -902,17 +902,17 @@ Some report results can be comparable to the [results in AutoML for Python](http
### Results/reports: Viewable and exportable (v1.0 or vNext)
-Results (reports) should be both viewable and exportable. Often you want to do results processing automation so would be nice to export the reports data as a .json file, then later can be viewed by a vieweer.
+Results (reports) should be both viewable and exportable. Often you want to do results processing automation so would be nice to export the reports data as a .json file, then later can be viewed by a vieweer.
Comparable references from other CLIs:
- TLC exe's results processor and result visualization tab.
-- [Uber Ludwig](https://uber.github.io/ludwig/getting_started/#programmatic-api):
+- [Uber Ludwig](https://uber.github.io/ludwig/getting_started/#programmatic-api):
`ludwig visualize --visualization compare_performance --test_statistics path/to/test_statistics_model_1.json path/to/test_statistics_model_2.json`
# Open questions
-- If there's a 1:1 mapping between the first level of paramenters in the .YAML file and the CLI arguments, how do we deal with expected syntax/naming of the args? Same IDs/text?:
+- If there's a 1:1 mapping between the first level of paramenters in the .YAML file and the CLI arguments, how do we deal with expected syntax/naming of the args? Same IDs/text?:
- `--test-dataset` vs. `test_dataset`
- If we were using a sub-command for the ML-Task (such as `mlnet new regression`) instead of a regular argument (such as `mlnet new --ml-task regression`), how can a sub-command be specified in the response file?
diff --git a/docs/specs/mlnet-database-loader/mlnet-database-loader-specs.md b/docs/specs/mlnet-database-loader/mlnet-database-loader-specs.md
index b84d428b3a..3254250a26 100644
--- a/docs/specs/mlnet-database-loader/mlnet-database-loader-specs.md
+++ b/docs/specs/mlnet-database-loader/mlnet-database-loader-specs.md
@@ -1,7 +1,7 @@
# Specs for ML.NET Relational Database Loader
-This specs-doc focuses on the features needed for the base **ML.NET API**, most of all.
-The scenarios related to ML.NET **AutoML API**, the **CLI** and **VS Model Builder** will also be considered and covered in this document by in a significantly less detail since there should be different spec docs for those additional tools and APIs.
+This specs-doc focuses on the features needed for the base **ML.NET API**, most of all.
+The scenarios related to ML.NET **AutoML API**, the **CLI** and **VS Model Builder** will also be considered and covered in this document by in a significantly less detail since there should be different spec docs for those additional tools and APIs.
# Problem to solve
@@ -11,7 +11,7 @@ ML.NET 1.0 and 1.1 only supports the [IDataView LoadFromEnumerable()](https://do
Within the 'databases scope' problem there are multiple areas.
-The **scope** for this feature is initially limited to **relational databases** with higher priority on SQL Server and Azure SQL Database, but one of the goals is to make this loader/connector compatible with any relational database which is supported by .NET providers.
+The **scope** for this feature is initially limited to **relational databases** with higher priority on SQL Server and Azure SQL Database, but one of the goals is to make this loader/connector compatible with any relational database which is supported by .NET providers.
- Scope to support in this feature:
- Relational Databases, such as:
@@ -49,11 +49,11 @@ The business goals are the following, depending on the possible scenarios:
- Ability for developers to load and automatically stream data from relational databases in order to train/evaluate ML.NET models.
- The code to load from a database should be extremely easy, a single line of code in most cases.
-- Tooling (Model Builder in VS and the CLI) and AutoML API should also support this feature.
+- Tooling (Model Builder in VS and the CLI) and AutoML API should also support this feature.
# Solution
-The solution is to create an ML.NET database loader classes supporting the above scenarios.
+The solution is to create an ML.NET database loader classes supporting the above scenarios.
The main supported features are:
@@ -69,7 +69,7 @@ The main supported features are:
- Cross-validation scenario. Single database source. Internally it'll be split in multiple folds (such as 5 folds) for multiple trains and tests. This should be transparent from a database connection point of view which only needs one database source.
-- **Additional support for AutoML API, CLI and Model Builder:** Loading data from databases should be supported by AutoML API, Model Builder in VS and the ML.NET CLI.
+- **Additional support for AutoML API, CLI and Model Builder:** Loading data from databases should be supported by AutoML API, Model Builder in VS and the ML.NET CLI.
--------------------------------------
@@ -101,7 +101,7 @@ The way to support those frameworks would be by creating a **.NET Standard 2.0 l
- PostgreSQL providers - Test on:
- Npgsql open source ADO.NET Data Provider for PostgreSQL
- This ML.NET database loader won't probably need Entity Framework, but for a relationship, see [EF providers](https://docs.microsoft.com/en-us/ef/core/providers/) for a relationship to ADO.NET providers.
+ This ML.NET database loader won't probably need Entity Framework, but for a relationship, see [EF providers](https://docs.microsoft.com/en-us/ef/core/providers/) for a relationship to ADO.NET providers.
- **P2 RDBMS support/tested priorities:**
@@ -112,8 +112,8 @@ The way to support those frameworks would be by creating a **.NET Standard 2.0 l
- Data Provider for EntityClient Provider (Entity Data Model (EDM))
- **CRITICAL: Implement support for 'handle and continue' after transient errors happening in Azure SQL Database (or any DB):** When using Azure SQL Database as the source of your training database, because databases in Azure SQL DB can be moved to different servers across the internal Azure SQL Database cluster, transient failures (usually for just a few seconds) in the form of connectivity exceptions can happen. Even further, by design in Azure SQL Database, if a process is blocking too many resources in SQL, sometimes the database connection can be thrown away in favor of other customers/databases.
-There are several strategies in order to handle database transient errors (see [Working with SQL Database connection issues and transient errors](https://docs.microsoft.com/en-us/azure/sql-database/sql-database-connectivity-issues)) like doing a 'Retry strategy' and start with a new connection again. But that strategy is only okay for short/fast queries. That simple strategy which throws away all the progress made and start the same query again wouldn't be good when training with a very large table because it could mean that the training operation "never finishes" if you have at least one transient error on every "training try".
-We'll need to come up with a reasonably general pattern (probably something that reasons about primary keys), but this scenario is not simple.
+There are several strategies in order to handle database transient errors (see [Working with SQL Database connection issues and transient errors](https://docs.microsoft.com/en-us/azure/sql-database/sql-database-connectivity-issues)) like doing a 'Retry strategy' and start with a new connection again. But that strategy is only okay for short/fast queries. That simple strategy which throws away all the progress made and start the same query again wouldn't be good when training with a very large table because it could mean that the training operation "never finishes" if you have at least one transient error on every "training try".
+We'll need to come up with a reasonably general pattern (probably something that reasons about primary keys), but this scenario is not simple.
See [related issue](https://github.com/dotnet/machinelearning-samples/issues/507)
@@ -123,17 +123,17 @@ We'll need to come up with a reasonably general pattern (probably something that
2. RDBMS-server running database and .NET code using ML.NET code
-- **NuGet packages and libraries design**:
+- **NuGet packages and libraries design**:
The implementation of this feature should be packaged following the following approach, which is aligned and consistent to the current approach used by the .NET Framework and .NET Core in the System.Data.Common and System.Data.SqlClient:
- Implementation code with NO depedencies to specific database providers (such as SQL Server, Oracle, MySQL, etc.) will be packaged in the same NuGet package and library than the existing TextLoader-related classes which is in the Microsoft.ML.Data library. This code is basically the foundational API for the Database loader where the user has to provide any specific database connection (so dependencies are taken in user's code).
- Implementation code WITH dependencies to data proviers (such as SQL Server, Oracle, MySQL, etc.) that might be created when creating additional convenient APIs where the user only needs to provide a connection string and table-name or SQL statement, will be placed in a segregated class library and NuGet package, so that ML.NET core packages don't depend on specific database providers.
-
+
- **Support for sparse data**: The database loader should support sparse data, at least up to the maximum number of columns in SQL Server (1,024 columns per nonwide table, 30,000 columns per wide table or 4,096 columns per SELECT statement).
- ML.NET supports sparse data such as in the following example using a [sparse matrix](https://en.wikipedia.org/wiki/Sparse_matrix) of thousands or even millions of columns even when in this example only 200 columns have real data (sparse data):
+ ML.NET supports sparse data such as in the following example using a [sparse matrix](https://en.wikipedia.org/wiki/Sparse_matrix) of thousands or even millions of columns even when in this example only 200 columns have real data (sparse data):
- - [ML.NET sample using millions of columns with sparse data](https://github.com/dotnet/machinelearning-samples/tree/master/samples/csharp/getting-started/LargeDatasets)
+ - [ML.NET sample using millions of columns with sparse data](https://github.com/dotnet/machinelearning-samples/tree/main/samples/csharp/getting-started/LargeDatasets)
SQL Server supports [Sparse columns](https://docs.microsoft.com/en-us/sql/relational-databases/tables/use-sparse-columns?view=sql-server-2017), however, it is just a way to optimize storage for null values. It still needs to have a real column created in the table per each logical column (i.e. 1,000 columns defined in the SQL table) even when it might not have data.
@@ -180,7 +180,7 @@ Example code using it:
MLContext mlContext = new MLContext();
//Example loading from a SQL Server or SQL Azure database with a SQL query sentence
-IDataView trainingDataView = mlContext.Data.LoadFromDbSqlQuery(connString: myConnString, sqlQuerySentence: "Select * from InputMLModelDataset where InputMLModelDataset.CompanyName = 'MSFT'");
+IDataView trainingDataView = mlContext.Data.LoadFromDbSqlQuery(connString: myConnString, sqlQuerySentence: "Select * from InputMLModelDataset where InputMLModelDataset.CompanyName = 'MSFT'");
```
**2. (Foundational method) Data loading from a database with a System.Data.IDataReader object:**
@@ -229,8 +229,8 @@ Example code using it:
MLContext mlContext = new MLContext();
//Example loading from a SQL Server or SQL Azure database table
-IDataView trainingDataView = mlContext.Data.LoadFromDbTable(connString: myConnString,
- tableName: "TrainingDataTable");
+IDataView trainingDataView = mlContext.Data.LoadFromDbTable(connString: myConnString,
+ tableName: "TrainingDataTable");
```
**4. ('Nice to have') Data loading from a database view:**
@@ -248,8 +248,8 @@ Example code using it:
MLContext mlContext = new MLContext();
//Example loading from a SQL Server or SQL Azure database view
-IDataView trainingDataView = mlContext.Data.LoadFromDbView(connString: myConnString,
- viewName: "TrainingDatabaseView");
+IDataView trainingDataView = mlContext.Data.LoadFromDbView(connString: myConnString,
+ viewName: "TrainingDatabaseView");
```
## Support connectivity from .NET assemblies embedded into the RDBMS server
@@ -258,7 +258,7 @@ As introduced, the database loader should not only support remote/network connec
The only difference is the way you define the connection string, which simply provides **'context' string** (instead of server name, user, etc. when using the network), such as:
-- Code example running on [SQL Server CLR integration](https://docs.microsoft.com/en-us/sql/relational-databases/clr-integration/clr-integration-overview?view=sql-server-2017)
+- Code example running on [SQL Server CLR integration](https://docs.microsoft.com/en-us/sql/relational-databases/clr-integration/clr-integration-overview?view=sql-server-2017)
```
//SQL Server
@@ -273,7 +273,7 @@ The only difference is the way you define the connection string, which simply pr
```
//Oracle
OracleConnection con = new OracleConnection();
- con.ConnectionString = "context connection=true";
+ con.ConnectionString = "context connection=true";
```
- See here an [exampleof a C# stored procedure in Oracle ](https://www.oracle.com/technetwork/articles/dotnet/williams-sps-089817.html?printOnly=1)
@@ -286,7 +286,7 @@ ML.NET won't implement components creating concrete database objects such as **C
Also, note that the fact that ML.NET will be supported to be used within user components using CLR integration, that doesn't mean that the user can do it on any RDBMS. There are RDBMS such as Azure SQL Database with single databases and elastic pools and other RDBMS that don't support that feature. Other RDBMS suchas SQL Server on-premises or Azure SQL Database Managed Instances, Oracle, etc. do support it.
-For instance:
+For instance:
- [Feature comparison: Azure SQL Database versus SQL Server](https://docs.microsoft.com/en-us/azure/sql-database/sql-database-features)
@@ -295,7 +295,7 @@ For instance:
There can be two different approaches here:
- Use similar input data classes/types to ML.NET 1.x input data classes
-- Use similar input data classes/types to Entity Framework POCO entity data model classes
+- Use similar input data classes/types to Entity Framework POCO entity data model classes
## Approach A: Using ML.NET input data classes
@@ -358,7 +358,7 @@ This last approach is similar to the Entity Framework POCO entity class approach
## Approach B: Using Entity Framework POCO entity data model classes
-When using Entity Framework, a POCO entity is a class that doesn't depend on any framework-specific base class. This is also why they are persistence-ignorant objects following the [persistence ignorance principle](https://deviq.com/persistence-ignorance/).
+When using Entity Framework, a POCO entity is a class that doesn't depend on any framework-specific base class. This is also why they are persistence-ignorant objects following the [persistence ignorance principle](https://deviq.com/persistence-ignorance/).
It is like any other normal .NET CLR class, which is why it is called POCO ("Plain Old CLR Object").
@@ -373,7 +373,7 @@ public class ModelInputData
public byte[] Photo { get; set; }
public decimal Height { get; set; }
public float Weight { get; set; }
-
+
public StudentAddress StudentAddress { get; set; }
public Grade Grade { get; set; }
}
@@ -387,10 +387,10 @@ Cons:
- EF does not support loading many columns at the same time into a single vector column.
- EF requires a mandatory ID property in the POCO class
- ML.NET might not support certain .NET types allowed by EF POCO classes (i.e. DateTime, etc.).
-- ML.NET doesn't support embedded/navigation/relationship entity types such as `StudentAddress` in the sample above, neither complex-types in EF.
+- ML.NET doesn't support embedded/navigation/relationship entity types such as `StudentAddress` in the sample above, neither complex-types in EF.
- Input data classes won't be consistent/similar to ML.NET input data classes when using dataset files.
-### Selected approach for input data class when reading from a database
+### Selected approach for input data class when reading from a database
*TO BE DISCUSSED/CONFIRMED:*
@@ -406,7 +406,7 @@ Supporting the same scope of POCO entities supported by entity Framework seems p
Sample CLI command:
```
-> mlnet auto-train --task regression --db-conn-string "YOUR-DATABASE-CONNECTION-STRING" --db-table "MyTrainingDbTable" --label-column-name Price
+> mlnet auto-train --task regression --db-conn-string "YOUR-DATABASE-CONNECTION-STRING" --db-table "MyTrainingDbTable" --label-column-name Price
```
**2. CLI training from a database view:**
@@ -414,7 +414,7 @@ Sample CLI command:
Sample CLI command:
```
-> mlnet auto-train --task regression --db-conn-string "YOUR-DATABASE-CONNECTION-STRING" --db-view "MyTrainingDbView" --label-column-name Price
+> mlnet auto-train --task regression --db-conn-string "YOUR-DATABASE-CONNECTION-STRING" --db-view "MyTrainingDbView" --label-column-name Price
```
**3. CLI training from a database with a SQL query sentence:**
@@ -422,7 +422,7 @@ Sample CLI command:
Sample CLI command:
```
-> mlnet auto-train --task regression --db-conn-string "YOUR-DATABASE-CONNECTION-STRING" --sql-query "SELECT * FROM MyTrainingDbTable WHERE Company = 'MSFT'" --label-column-name Price
+> mlnet auto-train --task regression --db-conn-string "YOUR-DATABASE-CONNECTION-STRING" --sql-query "SELECT * FROM MyTrainingDbTable WHERE Company = 'MSFT'" --label-column-name Price
```
@@ -434,10 +434,10 @@ For ML.NET AutoML the C# code to use is the same than for regular ML.NET code si
MLContext mlContext = new MLContext();
//Load train dataset from a database table
-IDataView trainDataView = mlContext.Data.LoadFromDatabaseTable(connString: myConnString, tableName: "MyTrainDataTable");
+IDataView trainDataView = mlContext.Data.LoadFromDatabaseTable(connString: myConnString, tableName: "MyTrainDataTable");
//Load train dataset from a database table
-IDataView testDataView = mlContext.Data.LoadFromDatabaseTable(connString: myConnString, tableName: "MyTestDataTable");
+IDataView testDataView = mlContext.Data.LoadFromDatabaseTable(connString: myConnString, tableName: "MyTestDataTable");
// Run AutoML experiment
var progressHandler = new BinaryExperimentProgressHandler();
@@ -447,7 +447,7 @@ ExperimentResult experimentResult = mlContext.Auto(
.Execute(trainingDataView, progressHandler: progressHandler);
```
-Therefore, most of the code above is regular AutoML API code and the only pieces of code using the DatabaseLoader are using the same API than when using regular ML.NET code for loading data from a database.
+Therefore, most of the code above is regular AutoML API code and the only pieces of code using the DatabaseLoader are using the same API than when using regular ML.NET code for loading data from a database.
# Model Builder for Visual Studio mock UI samples
@@ -458,7 +458,7 @@ TBD
# Open questions
-- QUESTION 1 TBD:
+- QUESTION 1 TBD:
# References
diff --git a/build/BranchInfo.props b/eng/BranchInfo.props
similarity index 95%
rename from build/BranchInfo.props
rename to eng/BranchInfo.props
index 5ed7f0fb5f..fe62447068 100644
--- a/build/BranchInfo.props
+++ b/eng/BranchInfo.props
@@ -30,11 +30,11 @@
1
5
- 3
+ 6
0
17
- 3
+ 6
diff --git a/eng/Build.props b/eng/Build.props
new file mode 100644
index 0000000000..de6a651461
--- /dev/null
+++ b/eng/Build.props
@@ -0,0 +1,17 @@
+
+
+
+
+
+ true
+ false
+
+
+
+
+
+
+
+
+
+
diff --git a/build/ExternalBenchmarkDataFiles.props b/eng/ExternalBenchmarkDataFiles.props
similarity index 100%
rename from build/ExternalBenchmarkDataFiles.props
rename to eng/ExternalBenchmarkDataFiles.props
diff --git a/eng/Signing.props b/eng/Signing.props
new file mode 100644
index 0000000000..5213afc61e
--- /dev/null
+++ b/eng/Signing.props
@@ -0,0 +1,14 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/build/TensorflowMetaFiles.props b/eng/TensorflowMetaFiles.props
similarity index 100%
rename from build/TensorflowMetaFiles.props
rename to eng/TensorflowMetaFiles.props
diff --git a/eng/Tools.props b/eng/Tools.props
new file mode 100644
index 0000000000..a0890081f2
--- /dev/null
+++ b/eng/Tools.props
@@ -0,0 +1,9 @@
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/eng/Version.Details.xml b/eng/Version.Details.xml
new file mode 100644
index 0000000000..e348d0998a
--- /dev/null
+++ b/eng/Version.Details.xml
@@ -0,0 +1,67 @@
+
+
+
+
+ https://dev.azure.com/dnceng/internal/_git/dotnet-symuploader
+ 9bdfdb0af37d2e93bdecf238a8a51c0a965444d6
+
+
+
+
+ https://github.com/dotnet/arcade
+ a81e6e87cf21837abfde2da6eb9b057bea9f49fc
+
+
+ https://github.com/dotnet/arcade
+ a81e6e87cf21837abfde2da6eb9b057bea9f49fc
+
+
+ https://github.com/dotnet/arcade
+ a81e6e87cf21837abfde2da6eb9b057bea9f49fc
+
+
+ https://github.com/dotnet/arcade
+ a81e6e87cf21837abfde2da6eb9b057bea9f49fc
+
+
+ https://github.com/dotnet/arcade
+ a81e6e87cf21837abfde2da6eb9b057bea9f49fc
+
+
+ https://github.com/dotnet/arcade-services
+ 869869342f1ec338de96adcea6e003b61f195256
+
+
+ https://github.com/dotnet/arcade-services
+ 592654f4a6855d7738a7c7c780355ac54457fdae
+
+
+ https://github.com/dotnet/xharness
+ 3b64ab7ab565cfd19fe7102e3d76271f16f0fc6d
+
+
+ https://github.com/dotnet/roslyn
+ d57cda76c2b76cff75487a085d289cfadd99150b
+
+
+ https://github.com/dotnet/sourcelink
+ 8a3edd1902dbfe3adba65f22e3bb7aa2cc73e97f
+
+
+ https://github.com/dotnet/sourcelink
+ 8a3edd1902dbfe3adba65f22e3bb7aa2cc73e97f
+
+
+ https://github.com/dotnet/symreader-converter
+ c5ba7c88f92e2dde156c324a8c8edc04d9fa4fe0
+
+
+ https://github.com/dotnet/symreader-converter
+ c5ba7c88f92e2dde156c324a8c8edc04d9fa4fe0
+
+
+ https://github.com/dotnet/xliff-tasks
+ 975065e08307a459dc2649b1c852f5c4cafd2f91
+
+
+
diff --git a/eng/Versions.props b/eng/Versions.props
new file mode 100644
index 0000000000..96685cc388
--- /dev/null
+++ b/eng/Versions.props
@@ -0,0 +1,141 @@
+
+
+
+
+
+ true
+ 1.5.6
+ preview
+ 1.0.0.0
+
+
+ 10.0.3
+ 4.4.0
+ 4.3.0
+ 4.7.1
+
+
+ 3.10.1
+ 2.2.3
+ 2.1.0
+ 1.6.0
+ 0.0.0.9
+ 2.1.3
+ 4.5.0
+ 4.5.0
+ 4.5.0
+ 2.3.1
+ 2
+ 0.20.1
+ 3.3.1
+ 1.0.0-beta.21155.3
+ 1.0.0-beta.21155.3
+ 2.0.0
+ 4.3.0
+ 4.5.0
+ 1.2.0
+
+
+ 1.0.0-beta-62824-02
+ 1.9.0
+ 1.2.1
+ 4.3.6
+ 1.0.0-beta.19225.5
+ 1.1.0-beta-20206-02
+ 3.0.0-preview4-04926-01
+
+
+ 0.12.0
+ 1.0.1-beta1.20374.2
+ 3.0.1
+ 0.0.6-test
+ 0.0.7-test
+ 0.0.13-test
+ 0.0.6-test
+ 4.6.1
+ 1.2.7
+ 1.0.112.2
+
+
+ false
+ true
+
+ 2.2.1
+ 1.0.2
+ 4.0.0-rc3-24214-00
+ 1.10.1
+ 0.25.2
+ 2.0.8
+ 4.3.4
+ 12.3.0
+ 3.0.0
+ 1.3.1
+ 0.1.0
+ 15.7.179
+ 15.7.179
+ 15.7.179
+ 15.7.179
+ 2.6.3
+ 2.9.0
+ 3.4.0
+ 3.19.8
+ 2.3.13
+ 2.1.0
+ 2.0.0
+ 2.1.0
+ 3.1.0
+ 3.8.0-3.20460.2
+ 4.8.3
+ 5.3.0.1
+ 2.3.0
+ 9.0.1
+ 4.7.0
+ 4.4.0
+ 5.6.0-preview.2.6489
+ 0.32.0
+ 2.2.143
+ 3.0.0
+ 4.5.0
+ 1.5.0
+ 4.0.0
+ 4.3.0
+ 4.5.0
+ 4.3.0
+ 4.5.3
+ 4.5.0
+ 1.6.0
+ 4.7.0
+ 4.3.0
+ 4.5.0
+ 4.5.2
+ 4.4.0
+ 8.5.0
+ 2.4.0
+ 2.0.3
+ 2.4.0
+ 5.0.0-beta.20461.7
+ 5.0.0-beta.20461.7
+ 1.22.0
+ 1.1.2
+ 2.0.0
+ 1.6.0
+ 1.0.0
+ 2.0.4
+ 1.1.0-beta2-19575-01
+ 1.1.0-beta2-19575-01
+ 1.7.0
+ 1.1.0-beta.20258.6
+ 1.1.0-beta-20464-02
+ 1.1.0-beta-20464-02
+ 5.0.0-beta.20461.7
+ 1.0.0-beta.20420.1
+ 1.1.0-beta.20461.2
+ 1.0.0-prerelease.20457.1
+ 1.1.145102
+
+
diff --git a/eng/common/CIBuild.cmd b/eng/common/CIBuild.cmd
new file mode 100644
index 0000000000..56c2f25ac2
--- /dev/null
+++ b/eng/common/CIBuild.cmd
@@ -0,0 +1,2 @@
+@echo off
+powershell -ExecutionPolicy ByPass -NoProfile -command "& """%~dp0Build.ps1""" -restore -build -test -sign -pack -publish -ci %*"
\ No newline at end of file
diff --git a/eng/common/PSScriptAnalyzerSettings.psd1 b/eng/common/PSScriptAnalyzerSettings.psd1
new file mode 100644
index 0000000000..4c1ea7c98e
--- /dev/null
+++ b/eng/common/PSScriptAnalyzerSettings.psd1
@@ -0,0 +1,11 @@
+@{
+ IncludeRules=@('PSAvoidUsingCmdletAliases',
+ 'PSAvoidUsingWMICmdlet',
+ 'PSAvoidUsingPositionalParameters',
+ 'PSAvoidUsingInvokeExpression',
+ 'PSUseDeclaredVarsMoreThanAssignments',
+ 'PSUseCmdletCorrectly',
+ 'PSStandardDSCFunctionsInResource',
+ 'PSUseIdenticalMandatoryParametersForDSC',
+ 'PSUseIdenticalParametersForDSC')
+}
\ No newline at end of file
diff --git a/eng/common/README.md b/eng/common/README.md
new file mode 100644
index 0000000000..ff49c37152
--- /dev/null
+++ b/eng/common/README.md
@@ -0,0 +1,28 @@
+# Don't touch this folder
+
+ uuuuuuuuuuuuuuuuuuuu
+ u" uuuuuuuuuuuuuuuuuu "u
+ u" u$$$$$$$$$$$$$$$$$$$$u "u
+ u" u$$$$$$$$$$$$$$$$$$$$$$$$u "u
+ u" u$$$$$$$$$$$$$$$$$$$$$$$$$$$$u "u
+ u" u$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$u "u
+ u" u$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$u "u
+ $ $$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$ $
+ $ $$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$ $
+ $ $$$" ... "$... ...$" ... "$$$ ... "$$$ $
+ $ $$$u `"$$$$$$$ $$$ $$$$$ $$ $$$ $$$ $
+ $ $$$$$$uu "$$$$ $$$ $$$$$ $$ """ u$$$ $
+ $ $$$""$$$ $$$$ $$$u "$$$" u$$ $$$$$$$$ $
+ $ $$$$....,$$$$$..$$$$$....,$$$$..$$$$$$$$ $
+ $ $$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$ $
+ "u "$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$" u"
+ "u "$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$" u"
+ "u "$$$$$$$$$$$$$$$$$$$$$$$$$$$$" u"
+ "u "$$$$$$$$$$$$$$$$$$$$$$$$" u"
+ "u "$$$$$$$$$$$$$$$$$$$$" u"
+ "u """""""""""""""""" u"
+ """"""""""""""""""""
+
+!!! Changes made in this directory are subject to being overwritten by automation !!!
+
+The files in this directory are shared by all Arcade repos and managed by automation. If you need to make changes to these files, open an issue or submit a pull request to https://github.com/dotnet/arcade first.
diff --git a/eng/common/SetupNugetSources.ps1 b/eng/common/SetupNugetSources.ps1
new file mode 100644
index 0000000000..bb3617133f
--- /dev/null
+++ b/eng/common/SetupNugetSources.ps1
@@ -0,0 +1,160 @@
+# This file is a temporary workaround for internal builds to be able to restore from private AzDO feeds.
+# This file should be removed as part of this issue: https://github.com/dotnet/arcade/issues/4080
+#
+# What the script does is iterate over all package sources in the pointed NuGet.config and add a credential entry
+# under for each Maestro managed private feed. Two additional credential
+# entries are also added for the two private static internal feeds: dotnet3-internal and dotnet3-internal-transport.
+#
+# This script needs to be called in every job that will restore packages and which the base repo has
+# private AzDO feeds in the NuGet.config.
+#
+# See example YAML call for this script below. Note the use of the variable `$(dn-bot-dnceng-artifact-feeds-rw)`
+# from the AzureDevOps-Artifact-Feeds-Pats variable group.
+#
+# Any disabledPackageSources entries which start with "darc-int" will be re-enabled as part of this script executing
+#
+# - task: PowerShell@2
+# displayName: Setup Private Feeds Credentials
+# condition: eq(variables['Agent.OS'], 'Windows_NT')
+# inputs:
+# filePath: $(Build.SourcesDirectory)/eng/common/SetupNugetSources.ps1
+# arguments: -ConfigFile $(Build.SourcesDirectory)/NuGet.config -Password $Env:Token
+# env:
+# Token: $(dn-bot-dnceng-artifact-feeds-rw)
+
+[CmdletBinding()]
+param (
+ [Parameter(Mandatory = $true)][string]$ConfigFile,
+ [Parameter(Mandatory = $true)][string]$Password
+)
+
+$ErrorActionPreference = "Stop"
+Set-StrictMode -Version 2.0
+[Net.ServicePointManager]::SecurityProtocol = [Net.SecurityProtocolType]::Tls12
+
+. $PSScriptRoot\tools.ps1
+
+# Add source entry to PackageSources
+function AddPackageSource($sources, $SourceName, $SourceEndPoint, $creds, $Username, $Password) {
+ $packageSource = $sources.SelectSingleNode("add[@key='$SourceName']")
+
+ if ($packageSource -eq $null)
+ {
+ $packageSource = $doc.CreateElement("add")
+ $packageSource.SetAttribute("key", $SourceName)
+ $packageSource.SetAttribute("value", $SourceEndPoint)
+ $sources.AppendChild($packageSource) | Out-Null
+ }
+ else {
+ Write-Host "Package source $SourceName already present."
+ }
+
+ AddCredential -Creds $creds -Source $SourceName -Username $Username -Password $Password
+}
+
+# Add a credential node for the specified source
+function AddCredential($creds, $source, $username, $password) {
+ # Looks for credential configuration for the given SourceName. Create it if none is found.
+ $sourceElement = $creds.SelectSingleNode($Source)
+ if ($sourceElement -eq $null)
+ {
+ $sourceElement = $doc.CreateElement($Source)
+ $creds.AppendChild($sourceElement) | Out-Null
+ }
+
+ # Add the node to the credential if none is found.
+ $usernameElement = $sourceElement.SelectSingleNode("add[@key='Username']")
+ if ($usernameElement -eq $null)
+ {
+ $usernameElement = $doc.CreateElement("add")
+ $usernameElement.SetAttribute("key", "Username")
+ $sourceElement.AppendChild($usernameElement) | Out-Null
+ }
+ $usernameElement.SetAttribute("value", $Username)
+
+ # Add the to the credential if none is found.
+ # Add it as a clear text because there is no support for encrypted ones in non-windows .Net SDKs.
+ # -> https://github.com/NuGet/Home/issues/5526
+ $passwordElement = $sourceElement.SelectSingleNode("add[@key='ClearTextPassword']")
+ if ($passwordElement -eq $null)
+ {
+ $passwordElement = $doc.CreateElement("add")
+ $passwordElement.SetAttribute("key", "ClearTextPassword")
+ $sourceElement.AppendChild($passwordElement) | Out-Null
+ }
+ $passwordElement.SetAttribute("value", $Password)
+}
+
+function InsertMaestroPrivateFeedCredentials($Sources, $Creds, $Username, $Password) {
+ $maestroPrivateSources = $Sources.SelectNodes("add[contains(@key,'darc-int')]")
+
+ Write-Host "Inserting credentials for $($maestroPrivateSources.Count) Maestro's private feeds."
+
+ ForEach ($PackageSource in $maestroPrivateSources) {
+ Write-Host "`tInserting credential for Maestro's feed:" $PackageSource.Key
+ AddCredential -Creds $creds -Source $PackageSource.Key -Username $Username -Password $Password
+ }
+}
+
+function EnablePrivatePackageSources($DisabledPackageSources) {
+ $maestroPrivateSources = $DisabledPackageSources.SelectNodes("add[contains(@key,'darc-int')]")
+ ForEach ($DisabledPackageSource in $maestroPrivateSources) {
+ Write-Host "`tEnsuring private source '$($DisabledPackageSource.key)' is enabled"
+ $DisabledPackageSource.SetAttribute("value", "false")
+ }
+}
+
+if (!(Test-Path $ConfigFile -PathType Leaf)) {
+ Write-PipelineTelemetryError -Category 'Build' -Message "Eng/common/SetupNugetSources.ps1 returned a non-zero exit code. Couldn't find the NuGet config file: $ConfigFile"
+ ExitWithExitCode 1
+}
+
+if (!$Password) {
+ Write-PipelineTelemetryError -Category 'Build' -Message 'Eng/common/SetupNugetSources.ps1 returned a non-zero exit code. Please supply a valid PAT'
+ ExitWithExitCode 1
+}
+
+# Load NuGet.config
+$doc = New-Object System.Xml.XmlDocument
+$filename = (Get-Item $ConfigFile).FullName
+$doc.Load($filename)
+
+# Get reference to or create one if none exist already
+$sources = $doc.DocumentElement.SelectSingleNode("packageSources")
+if ($sources -eq $null) {
+ $sources = $doc.CreateElement("packageSources")
+ $doc.DocumentElement.AppendChild($sources) | Out-Null
+}
+
+# Looks for a node. Create it if none is found.
+$creds = $doc.DocumentElement.SelectSingleNode("packageSourceCredentials")
+if ($creds -eq $null) {
+ $creds = $doc.CreateElement("packageSourceCredentials")
+ $doc.DocumentElement.AppendChild($creds) | Out-Null
+}
+
+# Check for disabledPackageSources; we'll enable any darc-int ones we find there
+$disabledSources = $doc.DocumentElement.SelectSingleNode("disabledPackageSources")
+if ($disabledSources -ne $null) {
+ Write-Host "Checking for any darc-int disabled package sources in the disabledPackageSources node"
+ EnablePrivatePackageSources -DisabledPackageSources $disabledSources
+}
+
+$userName = "dn-bot"
+
+# Insert credential nodes for Maestro's private feeds
+InsertMaestroPrivateFeedCredentials -Sources $sources -Creds $creds -Username $userName -Password $Password
+
+$dotnet31Source = $sources.SelectSingleNode("add[@key='dotnet3.1']")
+if ($dotnet31Source -ne $null) {
+ AddPackageSource -Sources $sources -SourceName "dotnet3.1-internal" -SourceEndPoint "https://pkgs.dev.azure.com/dnceng/_packaging/dotnet3.1-internal/nuget/v2" -Creds $creds -Username $userName -Password $Password
+ AddPackageSource -Sources $sources -SourceName "dotnet3.1-internal-transport" -SourceEndPoint "https://pkgs.dev.azure.com/dnceng/_packaging/dotnet3.1-internal-transport/nuget/v2" -Creds $creds -Username $userName -Password $Password
+}
+
+$dotnet5Source = $sources.SelectSingleNode("add[@key='dotnet5']")
+if ($dotnet5Source -ne $null) {
+ AddPackageSource -Sources $sources -SourceName "dotnet5-internal" -SourceEndPoint "https://pkgs.dev.azure.com/dnceng/internal/_packaging/dotnet5-internal/nuget/v2" -Creds $creds -Username $userName -Password $Password
+ AddPackageSource -Sources $sources -SourceName "dotnet5-internal-transport" -SourceEndPoint "https://pkgs.dev.azure.com/dnceng/internal/_packaging/dotnet5-internal-transport/nuget/v2" -Creds $creds -Username $userName -Password $Password
+}
+
+$doc.Save($filename)
diff --git a/eng/common/SetupNugetSources.sh b/eng/common/SetupNugetSources.sh
new file mode 100755
index 0000000000..ef33382954
--- /dev/null
+++ b/eng/common/SetupNugetSources.sh
@@ -0,0 +1,167 @@
+#!/usr/bin/env bash
+
+# This file is a temporary workaround for internal builds to be able to restore from private AzDO feeds.
+# This file should be removed as part of this issue: https://github.com/dotnet/arcade/issues/4080
+#
+# What the script does is iterate over all package sources in the pointed NuGet.config and add a credential entry
+# under for each Maestro's managed private feed. Two additional credential
+# entries are also added for the two private static internal feeds: dotnet3-internal and dotnet3-internal-transport.
+#
+# This script needs to be called in every job that will restore packages and which the base repo has
+# private AzDO feeds in the NuGet.config.
+#
+# See example YAML call for this script below. Note the use of the variable `$(dn-bot-dnceng-artifact-feeds-rw)`
+# from the AzureDevOps-Artifact-Feeds-Pats variable group.
+#
+# Any disabledPackageSources entries which start with "darc-int" will be re-enabled as part of this script executing.
+#
+# - task: Bash@3
+# displayName: Setup Private Feeds Credentials
+# inputs:
+# filePath: $(Build.SourcesDirectory)/eng/common/SetupNugetSources.sh
+# arguments: $(Build.SourcesDirectory)/NuGet.config $Token
+# condition: ne(variables['Agent.OS'], 'Windows_NT')
+# env:
+# Token: $(dn-bot-dnceng-artifact-feeds-rw)
+
+ConfigFile=$1
+CredToken=$2
+NL='\n'
+TB=' '
+
+source="${BASH_SOURCE[0]}"
+
+# resolve $source until the file is no longer a symlink
+while [[ -h "$source" ]]; do
+ scriptroot="$( cd -P "$( dirname "$source" )" && pwd )"
+ source="$(readlink "$source")"
+ # if $source was a relative symlink, we need to resolve it relative to the path where the
+ # symlink file was located
+ [[ $source != /* ]] && source="$scriptroot/$source"
+done
+scriptroot="$( cd -P "$( dirname "$source" )" && pwd )"
+
+. "$scriptroot/tools.sh"
+
+if [ ! -f "$ConfigFile" ]; then
+ Write-PipelineTelemetryError -Category 'Build' "Error: Eng/common/SetupNugetSources.sh returned a non-zero exit code. Couldn't find the NuGet config file: $ConfigFile"
+ ExitWithExitCode 1
+fi
+
+if [ -z "$CredToken" ]; then
+ Write-PipelineTelemetryError -category 'Build' "Error: Eng/common/SetupNugetSources.sh returned a non-zero exit code. Please supply a valid PAT"
+ ExitWithExitCode 1
+fi
+
+if [[ `uname -s` == "Darwin" ]]; then
+ NL=$'\\\n'
+ TB=''
+fi
+
+# Ensure there is a ... section.
+grep -i "" $ConfigFile
+if [ "$?" != "0" ]; then
+ echo "Adding ... section."
+ ConfigNodeHeader=""
+ PackageSourcesTemplate="${TB}${NL}${TB}"
+
+ sed -i.bak "s|$ConfigNodeHeader|$ConfigNodeHeader${NL}$PackageSourcesTemplate|" $ConfigFile
+fi
+
+# Ensure there is a ... section.
+grep -i "" $ConfigFile
+if [ "$?" != "0" ]; then
+ echo "Adding ... section."
+
+ PackageSourcesNodeFooter=""
+ PackageSourceCredentialsTemplate="${TB}${NL}${TB}"
+
+ sed -i.bak "s|$PackageSourcesNodeFooter|$PackageSourcesNodeFooter${NL}$PackageSourceCredentialsTemplate|" $ConfigFile
+fi
+
+PackageSources=()
+
+# Ensure dotnet3.1-internal and dotnet3.1-internal-transport are in the packageSources if the public dotnet3.1 feeds are present
+grep -i ""
+
+ sed -i.bak "s|$PackageSourcesNodeFooter|$PackageSourceTemplate${NL}$PackageSourcesNodeFooter|" $ConfigFile
+ fi
+ PackageSources+=('dotnet3.1-internal')
+
+ grep -i "" $ConfigFile
+ if [ "$?" != "0" ]; then
+ echo "Adding dotnet3.1-internal-transport to the packageSources."
+ PackageSourcesNodeFooter=""
+ PackageSourceTemplate="${TB}"
+
+ sed -i.bak "s|$PackageSourcesNodeFooter|$PackageSourceTemplate${NL}$PackageSourcesNodeFooter|" $ConfigFile
+ fi
+ PackageSources+=('dotnet3.1-internal-transport')
+fi
+
+# Ensure dotnet5-internal and dotnet5-internal-transport are in the packageSources if the public dotnet5 feeds are present
+grep -i ""
+
+ sed -i.bak "s|$PackageSourcesNodeFooter|$PackageSourceTemplate${NL}$PackageSourcesNodeFooter|" $ConfigFile
+ fi
+ PackageSources+=('dotnet5-internal')
+
+ grep -i "" $ConfigFile
+ if [ "$?" != "0" ]; then
+ echo "Adding dotnet5-internal-transport to the packageSources."
+ PackageSourcesNodeFooter=""
+ PackageSourceTemplate="${TB}"
+
+ sed -i.bak "s|$PackageSourcesNodeFooter|$PackageSourceTemplate${NL}$PackageSourcesNodeFooter|" $ConfigFile
+ fi
+ PackageSources+=('dotnet5-internal-transport')
+fi
+
+# I want things split line by line
+PrevIFS=$IFS
+IFS=$'\n'
+PackageSources+="$IFS"
+PackageSources+=$(grep -oh '"darc-int-[^"]*"' $ConfigFile | tr -d '"')
+IFS=$PrevIFS
+
+for FeedName in ${PackageSources[@]} ; do
+ # Check if there is no existing credential for this FeedName
+ grep -i "<$FeedName>" $ConfigFile
+ if [ "$?" != "0" ]; then
+ echo "Adding credentials for $FeedName."
+
+ PackageSourceCredentialsNodeFooter=""
+ NewCredential="${TB}${TB}<$FeedName>${NL}${NL}${NL}$FeedName>"
+
+ sed -i.bak "s|$PackageSourceCredentialsNodeFooter|$NewCredential${NL}$PackageSourceCredentialsNodeFooter|" $ConfigFile
+ fi
+done
+
+# Re-enable any entries in disabledPackageSources where the feed name contains darc-int
+grep -i "" $ConfigFile
+if [ "$?" == "0" ]; then
+ DisabledDarcIntSources=()
+ echo "Re-enabling any disabled \"darc-int\" package sources in $ConfigFile"
+ DisabledDarcIntSources+=$(grep -oh '"darc-int-[^"]*" value="true"' $ConfigFile | tr -d '"')
+ for DisabledSourceName in ${DisabledDarcIntSources[@]} ; do
+ if [[ $DisabledSourceName == darc-int* ]]
+ then
+ OldDisableValue="add key=\"$DisabledSourceName\" value=\"true\""
+ NewDisableValue="add key=\"$DisabledSourceName\" value=\"false\""
+ sed -i.bak "s|$OldDisableValue|$NewDisableValue|" $ConfigFile
+ echo "Neutralized disablePackageSources entry for '$DisabledSourceName'"
+ fi
+ done
+fi
diff --git a/eng/common/build.ps1 b/eng/common/build.ps1
new file mode 100644
index 0000000000..1fd7f686fa
--- /dev/null
+++ b/eng/common/build.ps1
@@ -0,0 +1,161 @@
+[CmdletBinding(PositionalBinding=$false)]
+Param(
+ [string][Alias('c')]$configuration = "Debug",
+ [string]$platform = $null,
+ [string] $projects,
+ [string][Alias('v')]$verbosity = "minimal",
+ [string] $msbuildEngine = $null,
+ [bool] $warnAsError = $true,
+ [bool] $nodeReuse = $true,
+ [bool] $useDefaultDotnetInstall = $false,
+ [switch][Alias('r')]$restore,
+ [switch] $deployDeps,
+ [switch][Alias('b')]$build,
+ [switch] $rebuild,
+ [switch] $deploy,
+ [switch][Alias('t')]$test,
+ [switch] $integrationTest,
+ [switch] $performanceTest,
+ [switch] $sign,
+ [switch] $pack,
+ [switch] $publish,
+ [switch] $clean,
+ [switch][Alias('bl')]$binaryLog,
+ [switch][Alias('nobl')]$excludeCIBinarylog,
+ [switch] $ci,
+ [switch] $prepareMachine,
+ [string] $runtimeSourceFeed = '',
+ [string] $runtimeSourceFeedKey = '',
+ [switch] $help,
+ [Parameter(ValueFromRemainingArguments=$true)][String[]]$properties
+)
+
+# Unset 'Platform' environment variable to avoid unwanted collision in InstallDotNetCore.targets file
+# some computer has this env var defined (e.g. Some HP)
+if($env:Platform) {
+ $env:Platform=""
+}
+function Print-Usage() {
+ Write-Host "Common settings:"
+ Write-Host " -configuration Build configuration: 'Debug' or 'Release' (short: -c)"
+ Write-Host " -platform Platform configuration: 'x86', 'x64' or any valid Platform value to pass to msbuild"
+ Write-Host " -verbosity Msbuild verbosity: q[uiet], m[inimal], n[ormal], d[etailed], and diag[nostic] (short: -v)"
+ Write-Host " -binaryLog Output binary log (short: -bl)"
+ Write-Host " -help Print help and exit"
+ Write-Host ""
+
+ Write-Host "Actions:"
+ Write-Host " -restore Restore dependencies (short: -r)"
+ Write-Host " -build Build solution (short: -b)"
+ Write-Host " -rebuild Rebuild solution"
+ Write-Host " -deploy Deploy built VSIXes"
+ Write-Host " -deployDeps Deploy dependencies (e.g. VSIXes for integration tests)"
+ Write-Host " -test Run all unit tests in the solution (short: -t)"
+ Write-Host " -integrationTest Run all integration tests in the solution"
+ Write-Host " -performanceTest Run all performance tests in the solution"
+ Write-Host " -pack Package build outputs into NuGet packages and Willow components"
+ Write-Host " -sign Sign build outputs"
+ Write-Host " -publish Publish artifacts (e.g. symbols)"
+ Write-Host " -clean Clean the solution"
+ Write-Host ""
+
+ Write-Host "Advanced settings:"
+ Write-Host " -projects Semi-colon delimited list of sln/proj's to build. Globbing is supported (*.sln)"
+ Write-Host " -ci Set when running on CI server"
+ Write-Host " -excludeCIBinarylog Don't output binary log (short: -nobl)"
+ Write-Host " -prepareMachine Prepare machine for CI run, clean up processes after build"
+ Write-Host " -warnAsError Sets warnaserror msbuild parameter ('true' or 'false')"
+ Write-Host " -msbuildEngine Msbuild engine to use to run build ('dotnet', 'vs', or unspecified)."
+ Write-Host " -useDefaultDotnetInstall Use dotnet-install.* scripts from public location as opposed to from eng common folder"
+ Write-Host ""
+
+ Write-Host "Command line arguments not listed above are passed thru to msbuild."
+ Write-Host "The above arguments can be shortened as much as to be unambiguous (e.g. -co for configuration, -t for test, etc.)."
+}
+
+. $PSScriptRoot\tools.ps1
+
+function InitializeCustomToolset {
+ if (-not $restore) {
+ return
+ }
+
+ $script = Join-Path $EngRoot 'restore-toolset.ps1'
+
+ if (Test-Path $script) {
+ . $script
+ }
+}
+
+function Build {
+ $toolsetBuildProj = InitializeToolset
+ InitializeCustomToolset
+
+ $bl = if ($binaryLog) { '/bl:' + (Join-Path $LogDir 'Build.binlog') } else { '' }
+ $platformArg = if ($platform) { "/p:Platform=$platform" } else { '' }
+
+ if ($projects) {
+ # Re-assign properties to a new variable because PowerShell doesn't let us append properties directly for unclear reasons.
+ # Explicitly set the type as string[] because otherwise PowerShell would make this char[] if $properties is empty.
+ [string[]] $msbuildArgs = $properties
+
+ # Resolve relative project paths into full paths
+ $projects = ($projects.Split(';').ForEach({Resolve-Path $_}) -join ';')
+
+ $msbuildArgs += "/p:Projects=$projects"
+ $properties = $msbuildArgs
+ }
+
+ MSBuild $toolsetBuildProj `
+ $bl `
+ $platformArg `
+ /p:Configuration=$configuration `
+ /p:RepoRoot=$RepoRoot `
+ /p:Restore=$restore `
+ /p:DeployDeps=$deployDeps `
+ /p:Build=$build `
+ /p:Rebuild=$rebuild `
+ /p:Deploy=$deploy `
+ /p:Test=$test `
+ /p:Pack=$pack `
+ /p:IntegrationTest=$integrationTest `
+ /p:PerformanceTest=$performanceTest `
+ /p:Sign=$sign `
+ /p:Publish=$publish `
+ @properties
+}
+
+try {
+ if ($clean) {
+ if (Test-Path $ArtifactsDir) {
+ Remove-Item -Recurse -Force $ArtifactsDir
+ Write-Host 'Artifacts directory deleted.'
+ }
+ exit 0
+ }
+
+ if ($help -or (($null -ne $properties) -and ($properties.Contains('/help') -or $properties.Contains('/?')))) {
+ Print-Usage
+ exit 0
+ }
+
+ if ($ci) {
+ if (-not $excludeCIBinarylog) {
+ $binaryLog = $true
+ }
+ $nodeReuse = $false
+ }
+
+ if ($restore) {
+ InitializeNativeTools
+ }
+
+ Build
+}
+catch {
+ Write-Host $_.ScriptStackTrace
+ Write-PipelineTelemetryError -Category 'InitializeToolset' -Message $_
+ ExitWithExitCode 1
+}
+
+ExitWithExitCode 0
diff --git a/eng/common/build.sh b/eng/common/build.sh
new file mode 100755
index 0000000000..19849adbee
--- /dev/null
+++ b/eng/common/build.sh
@@ -0,0 +1,239 @@
+#!/usr/bin/env bash
+
+# Stop script if unbound variable found (use ${var:-} if intentional)
+set -u
+
+# Stop script if command returns non-zero exit code.
+# Prevents hidden errors caused by missing error code propagation.
+set -e
+
+usage()
+{
+ echo "Common settings:"
+ echo " --configuration Build configuration: 'Debug' or 'Release' (short: -c)"
+ echo " --verbosity Msbuild verbosity: q[uiet], m[inimal], n[ormal], d[etailed], and diag[nostic] (short: -v)"
+ echo " --binaryLog Create MSBuild binary log (short: -bl)"
+ echo " --help Print help and exit (short: -h)"
+ echo ""
+
+ echo "Actions:"
+ echo " --restore Restore dependencies (short: -r)"
+ echo " --build Build solution (short: -b)"
+ echo " --rebuild Rebuild solution"
+ echo " --test Run all unit tests in the solution (short: -t)"
+ echo " --integrationTest Run all integration tests in the solution"
+ echo " --performanceTest Run all performance tests in the solution"
+ echo " --pack Package build outputs into NuGet packages and Willow components"
+ echo " --sign Sign build outputs"
+ echo " --publish Publish artifacts (e.g. symbols)"
+ echo " --clean Clean the solution"
+ echo ""
+
+ echo "Advanced settings:"
+ echo " --projects Project or solution file(s) to build"
+ echo " --ci Set when running on CI server"
+ echo " --excludeCIBinarylog Don't output binary log (short: -nobl)"
+ echo " --prepareMachine Prepare machine for CI run, clean up processes after build"
+ echo " --nodeReuse Sets nodereuse msbuild parameter ('true' or 'false')"
+ echo " --warnAsError Sets warnaserror msbuild parameter ('true' or 'false')"
+ echo " --useDefaultDotnetInstall Use dotnet-install.* scripts from public location as opposed to from eng common folder"
+
+ echo ""
+ echo "Command line arguments not listed above are passed thru to msbuild."
+ echo "Arguments can also be passed in with a single hyphen."
+}
+
+source="${BASH_SOURCE[0]}"
+
+# resolve $source until the file is no longer a symlink
+while [[ -h "$source" ]]; do
+ scriptroot="$( cd -P "$( dirname "$source" )" && pwd )"
+ source="$(readlink "$source")"
+ # if $source was a relative symlink, we need to resolve it relative to the path where the
+ # symlink file was located
+ [[ $source != /* ]] && source="$scriptroot/$source"
+done
+scriptroot="$( cd -P "$( dirname "$source" )" && pwd )"
+
+restore=false
+build=false
+rebuild=false
+test=false
+integration_test=false
+performance_test=false
+pack=false
+publish=false
+sign=false
+public=false
+ci=false
+clean=false
+
+warn_as_error=true
+node_reuse=true
+binary_log=false
+exclude_ci_binary_log=false
+pipelines_log=false
+
+projects=''
+configuration='Debug'
+prepare_machine=false
+verbosity='minimal'
+runtime_source_feed=''
+runtime_source_feed_key=''
+use_default_dotnet_install=false
+
+properties=''
+while [[ $# > 0 ]]; do
+ opt="$(echo "${1/#--/-}" | awk '{print tolower($0)}')"
+ case "$opt" in
+ -help|-h)
+ usage
+ exit 0
+ ;;
+ -clean)
+ clean=true
+ ;;
+ -configuration|-c)
+ configuration=$2
+ shift
+ ;;
+ -verbosity|-v)
+ verbosity=$2
+ shift
+ ;;
+ -binarylog|-bl)
+ binary_log=true
+ ;;
+ -excludeCIBinarylog|-nobl)
+ exclude_ci_binary_log=true
+ ;;
+ -pipelineslog|-pl)
+ pipelines_log=true
+ ;;
+ -restore|-r)
+ restore=true
+ ;;
+ -build|-b)
+ build=true
+ ;;
+ -rebuild)
+ rebuild=true
+ ;;
+ -pack)
+ pack=true
+ ;;
+ -test|-t)
+ test=true
+ ;;
+ -integrationtest)
+ integration_test=true
+ ;;
+ -performancetest)
+ performance_test=true
+ ;;
+ -sign)
+ sign=true
+ ;;
+ -publish)
+ publish=true
+ ;;
+ -preparemachine)
+ prepare_machine=true
+ ;;
+ -projects)
+ projects=$2
+ shift
+ ;;
+ -ci)
+ ci=true
+ ;;
+ -warnaserror)
+ warn_as_error=$2
+ shift
+ ;;
+ -nodereuse)
+ node_reuse=$2
+ shift
+ ;;
+ -runtimesourcefeed)
+ runtime_source_feed=$2
+ shift
+ ;;
+ -runtimesourcefeedkey)
+ runtime_source_feed_key=$2
+ shift
+ ;;
+ -usedefaultdotnetinstall)
+ use_default_dotnet_install=$2
+ shift
+ ;;
+ *)
+ properties="$properties $1"
+ ;;
+ esac
+
+ shift
+done
+
+if [[ "$ci" == true ]]; then
+ pipelines_log=true
+ node_reuse=false
+ if [[ "$exclude_ci_binary_log" == false ]]; then
+ binary_log=true
+ fi
+fi
+
+. "$scriptroot/tools.sh"
+
+function InitializeCustomToolset {
+ local script="$eng_root/restore-toolset.sh"
+
+ if [[ -a "$script" ]]; then
+ . "$script"
+ fi
+}
+
+function Build {
+ InitializeToolset
+ InitializeCustomToolset
+
+ if [[ ! -z "$projects" ]]; then
+ properties="$properties /p:Projects=$projects"
+ fi
+
+ local bl=""
+ if [[ "$binary_log" == true ]]; then
+ bl="/bl:\"$log_dir/Build.binlog\""
+ fi
+
+ MSBuild $_InitializeToolset \
+ $bl \
+ /p:Configuration=$configuration \
+ /p:RepoRoot="$repo_root" \
+ /p:Restore=$restore \
+ /p:Build=$build \
+ /p:Rebuild=$rebuild \
+ /p:Test=$test \
+ /p:Pack=$pack \
+ /p:IntegrationTest=$integration_test \
+ /p:PerformanceTest=$performance_test \
+ /p:Sign=$sign \
+ /p:Publish=$publish \
+ $properties
+
+ ExitWithExitCode 0
+}
+
+if [[ "$clean" == true ]]; then
+ if [ -d "$artifacts_dir" ]; then
+ rm -rf $artifacts_dir
+ echo "Artifacts directory deleted."
+ fi
+ exit 0
+fi
+
+if [[ "$restore" == true ]]; then
+ InitializeNativeTools
+fi
+
+Build
diff --git a/eng/common/cibuild.sh b/eng/common/cibuild.sh
new file mode 100755
index 0000000000..1a02c0dec8
--- /dev/null
+++ b/eng/common/cibuild.sh
@@ -0,0 +1,16 @@
+#!/usr/bin/env bash
+
+source="${BASH_SOURCE[0]}"
+
+# resolve $SOURCE until the file is no longer a symlink
+while [[ -h $source ]]; do
+ scriptroot="$( cd -P "$( dirname "$source" )" && pwd )"
+ source="$(readlink "$source")"
+
+ # if $source was a relative symlink, we need to resolve it relative to the path where
+ # the symlink file was located
+ [[ $source != /* ]] && source="$scriptroot/$source"
+done
+scriptroot="$( cd -P "$( dirname "$source" )" && pwd )"
+
+. "$scriptroot/build.sh" --restore --build --test --pack --publish --ci $@
\ No newline at end of file
diff --git a/eng/common/cross/arm/sources.list.bionic b/eng/common/cross/arm/sources.list.bionic
new file mode 100644
index 0000000000..2109557409
--- /dev/null
+++ b/eng/common/cross/arm/sources.list.bionic
@@ -0,0 +1,11 @@
+deb http://ports.ubuntu.com/ubuntu-ports/ bionic main restricted universe
+deb-src http://ports.ubuntu.com/ubuntu-ports/ bionic main restricted universe
+
+deb http://ports.ubuntu.com/ubuntu-ports/ bionic-updates main restricted universe
+deb-src http://ports.ubuntu.com/ubuntu-ports/ bionic-updates main restricted universe
+
+deb http://ports.ubuntu.com/ubuntu-ports/ bionic-backports main restricted
+deb-src http://ports.ubuntu.com/ubuntu-ports/ bionic-backports main restricted
+
+deb http://ports.ubuntu.com/ubuntu-ports/ bionic-security main restricted universe multiverse
+deb-src http://ports.ubuntu.com/ubuntu-ports/ bionic-security main restricted universe multiverse
diff --git a/eng/common/cross/arm/sources.list.jessie b/eng/common/cross/arm/sources.list.jessie
new file mode 100644
index 0000000000..4d142ac9b1
--- /dev/null
+++ b/eng/common/cross/arm/sources.list.jessie
@@ -0,0 +1,3 @@
+# Debian (sid) # UNSTABLE
+deb http://ftp.debian.org/debian/ sid main contrib non-free
+deb-src http://ftp.debian.org/debian/ sid main contrib non-free
diff --git a/eng/common/cross/arm/sources.list.trusty b/eng/common/cross/arm/sources.list.trusty
new file mode 100644
index 0000000000..07d8f88d82
--- /dev/null
+++ b/eng/common/cross/arm/sources.list.trusty
@@ -0,0 +1,11 @@
+deb http://ports.ubuntu.com/ubuntu-ports/ trusty main restricted universe
+deb-src http://ports.ubuntu.com/ubuntu-ports/ trusty main restricted universe
+
+deb http://ports.ubuntu.com/ubuntu-ports/ trusty-updates main restricted universe
+deb-src http://ports.ubuntu.com/ubuntu-ports/ trusty-updates main restricted universe
+
+deb http://ports.ubuntu.com/ubuntu-ports/ trusty-backports main restricted
+deb-src http://ports.ubuntu.com/ubuntu-ports/ trusty-backports main restricted
+
+deb http://ports.ubuntu.com/ubuntu-ports/ trusty-security main restricted universe multiverse
+deb-src http://ports.ubuntu.com/ubuntu-ports/ trusty-security main restricted universe multiverse
\ No newline at end of file
diff --git a/eng/common/cross/arm/sources.list.xenial b/eng/common/cross/arm/sources.list.xenial
new file mode 100644
index 0000000000..eacd86b7df
--- /dev/null
+++ b/eng/common/cross/arm/sources.list.xenial
@@ -0,0 +1,11 @@
+deb http://ports.ubuntu.com/ubuntu-ports/ xenial main restricted universe
+deb-src http://ports.ubuntu.com/ubuntu-ports/ xenial main restricted universe
+
+deb http://ports.ubuntu.com/ubuntu-ports/ xenial-updates main restricted universe
+deb-src http://ports.ubuntu.com/ubuntu-ports/ xenial-updates main restricted universe
+
+deb http://ports.ubuntu.com/ubuntu-ports/ xenial-backports main restricted
+deb-src http://ports.ubuntu.com/ubuntu-ports/ xenial-backports main restricted
+
+deb http://ports.ubuntu.com/ubuntu-ports/ xenial-security main restricted universe multiverse
+deb-src http://ports.ubuntu.com/ubuntu-ports/ xenial-security main restricted universe multiverse
\ No newline at end of file
diff --git a/eng/common/cross/arm/sources.list.zesty b/eng/common/cross/arm/sources.list.zesty
new file mode 100644
index 0000000000..ea2c14a787
--- /dev/null
+++ b/eng/common/cross/arm/sources.list.zesty
@@ -0,0 +1,11 @@
+deb http://ports.ubuntu.com/ubuntu-ports/ zesty main restricted universe
+deb-src http://ports.ubuntu.com/ubuntu-ports/ zesty main restricted universe
+
+deb http://ports.ubuntu.com/ubuntu-ports/ zesty-updates main restricted universe
+deb-src http://ports.ubuntu.com/ubuntu-ports/ zesty-updates main restricted universe
+
+deb http://ports.ubuntu.com/ubuntu-ports/ zesty-backports main restricted
+deb-src http://ports.ubuntu.com/ubuntu-ports/ zesty-backports main restricted
+
+deb http://ports.ubuntu.com/ubuntu-ports/ zesty-security main restricted universe multiverse
+deb-src http://ports.ubuntu.com/ubuntu-ports/ zesty-security main restricted universe multiverse
diff --git a/eng/common/cross/arm/trusty-lttng-2.4.patch b/eng/common/cross/arm/trusty-lttng-2.4.patch
new file mode 100644
index 0000000000..8e4dd7ae71
--- /dev/null
+++ b/eng/common/cross/arm/trusty-lttng-2.4.patch
@@ -0,0 +1,71 @@
+From e72c9d7ead60e3317bd6d1fade995c07021c947b Mon Sep 17 00:00:00 2001
+From: Mathieu Desnoyers
+Date: Thu, 7 May 2015 13:25:04 -0400
+Subject: [PATCH] Fix: building probe providers with C++ compiler
+
+Robert Daniels wrote:
+> > I'm attempting to use lttng userspace tracing with a C++ application
+> > on an ARM platform. I'm using GCC 4.8.4 on Linux 3.14 with the 2.6
+> > release of lttng. I've compiled lttng-modules, lttng-ust, and
+> > lttng-tools and have been able to get a simple test working with C
+> > code. When I attempt to run the hello.cxx test on my target it will
+> > segfault.
+>
+>
+> I spent a little time digging into this issue and finally discovered the
+> cause of my segfault with ARM C++ tracepoints.
+>
+> There is a struct called 'lttng_event' in ust-events.h which contains an
+> empty union 'u'. This was the cause of my issue. Under C, this empty union
+> compiles to a zero byte member while under C++ it compiles to a one byte
+> member, and in my case was four-byte aligned which caused my C++ code to
+> have the 'cds_list_head node' offset incorrectly by four bytes. This lead
+> to an incorrect linked list structure which caused my issue.
+>
+> Since this union is empty, I simply removed it from the struct and everything
+> worked correctly.
+>
+> I don't know the history or purpose behind this empty union so I'd like to
+> know if this is a safe fix. If it is I can submit a patch with the union
+> removed.
+
+That's a very nice catch!
+
+We do not support building tracepoint probe provider with
+g++ yet, as stated in lttng-ust(3):
+
+"- Note for C++ support: although an application instrumented with
+ tracepoints can be compiled with g++, tracepoint probes should be
+ compiled with gcc (only tested with gcc so far)."
+
+However, if it works fine with this fix, then I'm tempted to take it,
+especially because removing the empty union does not appear to affect
+the layout of struct lttng_event as seen from liblttng-ust, which must
+be compiled with a C compiler, and from probe providers compiled with
+a C compiler. So all we are changing is the layout of a probe provider
+compiled with a C++ compiler, which is anyway buggy at the moment,
+because it is not compatible with the layout expected by liblttng-ust
+compiled with a C compiler.
+
+Reported-by: Robert Daniels
+Signed-off-by: Mathieu Desnoyers
+---
+ include/lttng/ust-events.h | 2 --
+ 1 file changed, 2 deletions(-)
+
+diff --git a/usr/include/lttng/ust-events.h b/usr/include/lttng/ust-events.h
+index 328a875..3d7a274 100644
+--- a/usr/include/lttng/ust-events.h
++++ b/usr/include/lttng/ust-events.h
+@@ -407,8 +407,6 @@ struct lttng_event {
+ void *_deprecated1;
+ struct lttng_ctx *ctx;
+ enum lttng_ust_instrumentation instrumentation;
+- union {
+- } u;
+ struct cds_list_head node; /* Event list in session */
+ struct cds_list_head _deprecated2;
+ void *_deprecated3;
+--
+2.7.4
+
diff --git a/eng/common/cross/arm/trusty.patch b/eng/common/cross/arm/trusty.patch
new file mode 100644
index 0000000000..2f2972f8eb
--- /dev/null
+++ b/eng/common/cross/arm/trusty.patch
@@ -0,0 +1,97 @@
+diff -u -r a/usr/include/urcu/uatomic/generic.h b/usr/include/urcu/uatomic/generic.h
+--- a/usr/include/urcu/uatomic/generic.h 2014-03-28 06:04:42.000000000 +0900
++++ b/usr/include/urcu/uatomic/generic.h 2017-02-13 10:35:21.189927116 +0900
+@@ -65,17 +65,17 @@
+ switch (len) {
+ #ifdef UATOMIC_HAS_ATOMIC_BYTE
+ case 1:
+- return __sync_val_compare_and_swap_1(addr, old, _new);
++ return __sync_val_compare_and_swap_1((uint8_t *) addr, old, _new);
+ #endif
+ #ifdef UATOMIC_HAS_ATOMIC_SHORT
+ case 2:
+- return __sync_val_compare_and_swap_2(addr, old, _new);
++ return __sync_val_compare_and_swap_2((uint16_t *) addr, old, _new);
+ #endif
+ case 4:
+- return __sync_val_compare_and_swap_4(addr, old, _new);
++ return __sync_val_compare_and_swap_4((uint32_t *) addr, old, _new);
+ #if (CAA_BITS_PER_LONG == 64)
+ case 8:
+- return __sync_val_compare_and_swap_8(addr, old, _new);
++ return __sync_val_compare_and_swap_8((uint64_t *) addr, old, _new);
+ #endif
+ }
+ _uatomic_link_error();
+@@ -100,20 +100,20 @@
+ switch (len) {
+ #ifdef UATOMIC_HAS_ATOMIC_BYTE
+ case 1:
+- __sync_and_and_fetch_1(addr, val);
++ __sync_and_and_fetch_1((uint8_t *) addr, val);
+ return;
+ #endif
+ #ifdef UATOMIC_HAS_ATOMIC_SHORT
+ case 2:
+- __sync_and_and_fetch_2(addr, val);
++ __sync_and_and_fetch_2((uint16_t *) addr, val);
+ return;
+ #endif
+ case 4:
+- __sync_and_and_fetch_4(addr, val);
++ __sync_and_and_fetch_4((uint32_t *) addr, val);
+ return;
+ #if (CAA_BITS_PER_LONG == 64)
+ case 8:
+- __sync_and_and_fetch_8(addr, val);
++ __sync_and_and_fetch_8((uint64_t *) addr, val);
+ return;
+ #endif
+ }
+@@ -139,20 +139,20 @@
+ switch (len) {
+ #ifdef UATOMIC_HAS_ATOMIC_BYTE
+ case 1:
+- __sync_or_and_fetch_1(addr, val);
++ __sync_or_and_fetch_1((uint8_t *) addr, val);
+ return;
+ #endif
+ #ifdef UATOMIC_HAS_ATOMIC_SHORT
+ case 2:
+- __sync_or_and_fetch_2(addr, val);
++ __sync_or_and_fetch_2((uint16_t *) addr, val);
+ return;
+ #endif
+ case 4:
+- __sync_or_and_fetch_4(addr, val);
++ __sync_or_and_fetch_4((uint32_t *) addr, val);
+ return;
+ #if (CAA_BITS_PER_LONG == 64)
+ case 8:
+- __sync_or_and_fetch_8(addr, val);
++ __sync_or_and_fetch_8((uint64_t *) addr, val);
+ return;
+ #endif
+ }
+@@ -180,17 +180,17 @@
+ switch (len) {
+ #ifdef UATOMIC_HAS_ATOMIC_BYTE
+ case 1:
+- return __sync_add_and_fetch_1(addr, val);
++ return __sync_add_and_fetch_1((uint8_t *) addr, val);
+ #endif
+ #ifdef UATOMIC_HAS_ATOMIC_SHORT
+ case 2:
+- return __sync_add_and_fetch_2(addr, val);
++ return __sync_add_and_fetch_2((uint16_t *) addr, val);
+ #endif
+ case 4:
+- return __sync_add_and_fetch_4(addr, val);
++ return __sync_add_and_fetch_4((uint32_t *) addr, val);
+ #if (CAA_BITS_PER_LONG == 64)
+ case 8:
+- return __sync_add_and_fetch_8(addr, val);
++ return __sync_add_and_fetch_8((uint64_t *) addr, val);
+ #endif
+ }
+ _uatomic_link_error();
diff --git a/eng/common/cross/arm64/sources.list.bionic b/eng/common/cross/arm64/sources.list.bionic
new file mode 100644
index 0000000000..2109557409
--- /dev/null
+++ b/eng/common/cross/arm64/sources.list.bionic
@@ -0,0 +1,11 @@
+deb http://ports.ubuntu.com/ubuntu-ports/ bionic main restricted universe
+deb-src http://ports.ubuntu.com/ubuntu-ports/ bionic main restricted universe
+
+deb http://ports.ubuntu.com/ubuntu-ports/ bionic-updates main restricted universe
+deb-src http://ports.ubuntu.com/ubuntu-ports/ bionic-updates main restricted universe
+
+deb http://ports.ubuntu.com/ubuntu-ports/ bionic-backports main restricted
+deb-src http://ports.ubuntu.com/ubuntu-ports/ bionic-backports main restricted
+
+deb http://ports.ubuntu.com/ubuntu-ports/ bionic-security main restricted universe multiverse
+deb-src http://ports.ubuntu.com/ubuntu-ports/ bionic-security main restricted universe multiverse
diff --git a/eng/common/cross/arm64/sources.list.buster b/eng/common/cross/arm64/sources.list.buster
new file mode 100644
index 0000000000..7194ac64a9
--- /dev/null
+++ b/eng/common/cross/arm64/sources.list.buster
@@ -0,0 +1,11 @@
+deb http://deb.debian.org/debian buster main
+deb-src http://deb.debian.org/debian buster main
+
+deb http://deb.debian.org/debian-security/ buster/updates main
+deb-src http://deb.debian.org/debian-security/ buster/updates main
+
+deb http://deb.debian.org/debian buster-updates main
+deb-src http://deb.debian.org/debian buster-updates main
+
+deb http://deb.debian.org/debian buster-backports main contrib non-free
+deb-src http://deb.debian.org/debian buster-backports main contrib non-free
diff --git a/eng/common/cross/arm64/sources.list.stretch b/eng/common/cross/arm64/sources.list.stretch
new file mode 100644
index 0000000000..0e12157743
--- /dev/null
+++ b/eng/common/cross/arm64/sources.list.stretch
@@ -0,0 +1,12 @@
+deb http://deb.debian.org/debian stretch main
+deb-src http://deb.debian.org/debian stretch main
+
+deb http://deb.debian.org/debian-security/ stretch/updates main
+deb-src http://deb.debian.org/debian-security/ stretch/updates main
+
+deb http://deb.debian.org/debian stretch-updates main
+deb-src http://deb.debian.org/debian stretch-updates main
+
+deb http://deb.debian.org/debian stretch-backports main contrib non-free
+deb-src http://deb.debian.org/debian stretch-backports main contrib non-free
+
diff --git a/eng/common/cross/arm64/sources.list.trusty b/eng/common/cross/arm64/sources.list.trusty
new file mode 100644
index 0000000000..07d8f88d82
--- /dev/null
+++ b/eng/common/cross/arm64/sources.list.trusty
@@ -0,0 +1,11 @@
+deb http://ports.ubuntu.com/ubuntu-ports/ trusty main restricted universe
+deb-src http://ports.ubuntu.com/ubuntu-ports/ trusty main restricted universe
+
+deb http://ports.ubuntu.com/ubuntu-ports/ trusty-updates main restricted universe
+deb-src http://ports.ubuntu.com/ubuntu-ports/ trusty-updates main restricted universe
+
+deb http://ports.ubuntu.com/ubuntu-ports/ trusty-backports main restricted
+deb-src http://ports.ubuntu.com/ubuntu-ports/ trusty-backports main restricted
+
+deb http://ports.ubuntu.com/ubuntu-ports/ trusty-security main restricted universe multiverse
+deb-src http://ports.ubuntu.com/ubuntu-ports/ trusty-security main restricted universe multiverse
\ No newline at end of file
diff --git a/eng/common/cross/arm64/sources.list.xenial b/eng/common/cross/arm64/sources.list.xenial
new file mode 100644
index 0000000000..eacd86b7df
--- /dev/null
+++ b/eng/common/cross/arm64/sources.list.xenial
@@ -0,0 +1,11 @@
+deb http://ports.ubuntu.com/ubuntu-ports/ xenial main restricted universe
+deb-src http://ports.ubuntu.com/ubuntu-ports/ xenial main restricted universe
+
+deb http://ports.ubuntu.com/ubuntu-ports/ xenial-updates main restricted universe
+deb-src http://ports.ubuntu.com/ubuntu-ports/ xenial-updates main restricted universe
+
+deb http://ports.ubuntu.com/ubuntu-ports/ xenial-backports main restricted
+deb-src http://ports.ubuntu.com/ubuntu-ports/ xenial-backports main restricted
+
+deb http://ports.ubuntu.com/ubuntu-ports/ xenial-security main restricted universe multiverse
+deb-src http://ports.ubuntu.com/ubuntu-ports/ xenial-security main restricted universe multiverse
\ No newline at end of file
diff --git a/eng/common/cross/arm64/sources.list.zesty b/eng/common/cross/arm64/sources.list.zesty
new file mode 100644
index 0000000000..ea2c14a787
--- /dev/null
+++ b/eng/common/cross/arm64/sources.list.zesty
@@ -0,0 +1,11 @@
+deb http://ports.ubuntu.com/ubuntu-ports/ zesty main restricted universe
+deb-src http://ports.ubuntu.com/ubuntu-ports/ zesty main restricted universe
+
+deb http://ports.ubuntu.com/ubuntu-ports/ zesty-updates main restricted universe
+deb-src http://ports.ubuntu.com/ubuntu-ports/ zesty-updates main restricted universe
+
+deb http://ports.ubuntu.com/ubuntu-ports/ zesty-backports main restricted
+deb-src http://ports.ubuntu.com/ubuntu-ports/ zesty-backports main restricted
+
+deb http://ports.ubuntu.com/ubuntu-ports/ zesty-security main restricted universe multiverse
+deb-src http://ports.ubuntu.com/ubuntu-ports/ zesty-security main restricted universe multiverse
diff --git a/eng/common/cross/arm64/tizen-build-rootfs.sh b/eng/common/cross/arm64/tizen-build-rootfs.sh
new file mode 100755
index 0000000000..13bfddb5e2
--- /dev/null
+++ b/eng/common/cross/arm64/tizen-build-rootfs.sh
@@ -0,0 +1,35 @@
+#!/usr/bin/env bash
+set -e
+
+__CrossDir=$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )
+__TIZEN_CROSSDIR="$__CrossDir/tizen"
+
+if [[ -z "$ROOTFS_DIR" ]]; then
+ echo "ROOTFS_DIR is not defined."
+ exit 1;
+fi
+
+TIZEN_TMP_DIR=$ROOTFS_DIR/tizen_tmp
+mkdir -p $TIZEN_TMP_DIR
+
+# Download files
+echo ">>Start downloading files"
+VERBOSE=1 $__CrossDir/tizen-fetch.sh $TIZEN_TMP_DIR
+echo "<>Start constructing Tizen rootfs"
+TIZEN_RPM_FILES=`ls $TIZEN_TMP_DIR/*.rpm`
+cd $ROOTFS_DIR
+for f in $TIZEN_RPM_FILES; do
+ rpm2cpio $f | cpio -idm --quiet
+done
+echo "<>Start configuring Tizen rootfs"
+ln -sfn asm-arm64 ./usr/include/asm
+patch -p1 < $__TIZEN_CROSSDIR/tizen.patch
+echo "</dev/null; then
+ VERBOSE=0
+fi
+
+Log()
+{
+ if [ $VERBOSE -ge $1 ]; then
+ echo ${@:2}
+ fi
+}
+
+Inform()
+{
+ Log 1 -e "\x1B[0;34m$@\x1B[m"
+}
+
+Debug()
+{
+ Log 2 -e "\x1B[0;32m$@\x1B[m"
+}
+
+Error()
+{
+ >&2 Log 0 -e "\x1B[0;31m$@\x1B[m"
+}
+
+Fetch()
+{
+ URL=$1
+ FILE=$2
+ PROGRESS=$3
+ if [ $VERBOSE -ge 1 ] && [ $PROGRESS ]; then
+ CURL_OPT="--progress-bar"
+ else
+ CURL_OPT="--silent"
+ fi
+ curl $CURL_OPT $URL > $FILE
+}
+
+hash curl 2> /dev/null || { Error "Require 'curl' Aborting."; exit 1; }
+hash xmllint 2> /dev/null || { Error "Require 'xmllint' Aborting."; exit 1; }
+hash sha256sum 2> /dev/null || { Error "Require 'sha256sum' Aborting."; exit 1; }
+
+TMPDIR=$1
+if [ ! -d $TMPDIR ]; then
+ TMPDIR=./tizen_tmp
+ Debug "Create temporary directory : $TMPDIR"
+ mkdir -p $TMPDIR
+fi
+
+TIZEN_URL=http://download.tizen.org/snapshots/tizen/
+BUILD_XML=build.xml
+REPOMD_XML=repomd.xml
+PRIMARY_XML=primary.xml
+TARGET_URL="http://__not_initialized"
+
+Xpath_get()
+{
+ XPATH_RESULT=''
+ XPATH=$1
+ XML_FILE=$2
+ RESULT=$(xmllint --xpath $XPATH $XML_FILE)
+ if [[ -z ${RESULT// } ]]; then
+ Error "Can not find target from $XML_FILE"
+ Debug "Xpath = $XPATH"
+ exit 1
+ fi
+ XPATH_RESULT=$RESULT
+}
+
+fetch_tizen_pkgs_init()
+{
+ TARGET=$1
+ PROFILE=$2
+ Debug "Initialize TARGET=$TARGET, PROFILE=$PROFILE"
+
+ TMP_PKG_DIR=$TMPDIR/tizen_${PROFILE}_pkgs
+ if [ -d $TMP_PKG_DIR ]; then rm -rf $TMP_PKG_DIR; fi
+ mkdir -p $TMP_PKG_DIR
+
+ PKG_URL=$TIZEN_URL/$PROFILE/latest
+
+ BUILD_XML_URL=$PKG_URL/$BUILD_XML
+ TMP_BUILD=$TMP_PKG_DIR/$BUILD_XML
+ TMP_REPOMD=$TMP_PKG_DIR/$REPOMD_XML
+ TMP_PRIMARY=$TMP_PKG_DIR/$PRIMARY_XML
+ TMP_PRIMARYGZ=${TMP_PRIMARY}.gz
+
+ Fetch $BUILD_XML_URL $TMP_BUILD
+
+ Debug "fetch $BUILD_XML_URL to $TMP_BUILD"
+
+ TARGET_XPATH="//build/buildtargets/buildtarget[@name=\"$TARGET\"]/repo[@type=\"binary\"]/text()"
+ Xpath_get $TARGET_XPATH $TMP_BUILD
+ TARGET_PATH=$XPATH_RESULT
+ TARGET_URL=$PKG_URL/$TARGET_PATH
+
+ REPOMD_URL=$TARGET_URL/repodata/repomd.xml
+ PRIMARY_XPATH='string(//*[local-name()="data"][@type="primary"]/*[local-name()="location"]/@href)'
+
+ Fetch $REPOMD_URL $TMP_REPOMD
+
+ Debug "fetch $REPOMD_URL to $TMP_REPOMD"
+
+ Xpath_get $PRIMARY_XPATH $TMP_REPOMD
+ PRIMARY_XML_PATH=$XPATH_RESULT
+ PRIMARY_URL=$TARGET_URL/$PRIMARY_XML_PATH
+
+ Fetch $PRIMARY_URL $TMP_PRIMARYGZ
+
+ Debug "fetch $PRIMARY_URL to $TMP_PRIMARYGZ"
+
+ gunzip $TMP_PRIMARYGZ
+
+ Debug "unzip $TMP_PRIMARYGZ to $TMP_PRIMARY"
+}
+
+fetch_tizen_pkgs()
+{
+ ARCH=$1
+ PACKAGE_XPATH_TPL='string(//*[local-name()="metadata"]/*[local-name()="package"][*[local-name()="name"][text()="_PKG_"]][*[local-name()="arch"][text()="_ARCH_"]]/*[local-name()="location"]/@href)'
+
+ PACKAGE_CHECKSUM_XPATH_TPL='string(//*[local-name()="metadata"]/*[local-name()="package"][*[local-name()="name"][text()="_PKG_"]][*[local-name()="arch"][text()="_ARCH_"]]/*[local-name()="checksum"]/text())'
+
+ for pkg in ${@:2}
+ do
+ Inform "Fetching... $pkg"
+ XPATH=${PACKAGE_XPATH_TPL/_PKG_/$pkg}
+ XPATH=${XPATH/_ARCH_/$ARCH}
+ Xpath_get $XPATH $TMP_PRIMARY
+ PKG_PATH=$XPATH_RESULT
+
+ XPATH=${PACKAGE_CHECKSUM_XPATH_TPL/_PKG_/$pkg}
+ XPATH=${XPATH/_ARCH_/$ARCH}
+ Xpath_get $XPATH $TMP_PRIMARY
+ CHECKSUM=$XPATH_RESULT
+
+ PKG_URL=$TARGET_URL/$PKG_PATH
+ PKG_FILE=$(basename $PKG_PATH)
+ PKG_PATH=$TMPDIR/$PKG_FILE
+
+ Debug "Download $PKG_URL to $PKG_PATH"
+ Fetch $PKG_URL $PKG_PATH true
+
+ echo "$CHECKSUM $PKG_PATH" | sha256sum -c - > /dev/null
+ if [ $? -ne 0 ]; then
+ Error "Fail to fetch $PKG_URL to $PKG_PATH"
+ Debug "Checksum = $CHECKSUM"
+ exit 1
+ fi
+ done
+}
+
+Inform "Initialize arm base"
+fetch_tizen_pkgs_init standard base
+Inform "fetch common packages"
+fetch_tizen_pkgs aarch64 gcc glibc glibc-devel libicu libicu-devel libatomic linux-glibc-devel
+Inform "fetch coreclr packages"
+fetch_tizen_pkgs aarch64 lldb lldb-devel libgcc libstdc++ libstdc++-devel libunwind libunwind-devel lttng-ust-devel lttng-ust userspace-rcu-devel userspace-rcu
+Inform "fetch corefx packages"
+fetch_tizen_pkgs aarch64 libcom_err libcom_err-devel zlib zlib-devel libopenssl11 libopenssl1.1-devel krb5 krb5-devel
+
+Inform "Initialize standard unified"
+fetch_tizen_pkgs_init standard unified
+Inform "fetch corefx packages"
+fetch_tizen_pkgs aarch64 gssdp gssdp-devel tizen-release
+
diff --git a/eng/common/cross/arm64/tizen/tizen.patch b/eng/common/cross/arm64/tizen/tizen.patch
new file mode 100644
index 0000000000..af7c8be059
--- /dev/null
+++ b/eng/common/cross/arm64/tizen/tizen.patch
@@ -0,0 +1,9 @@
+diff -u -r a/usr/lib/libc.so b/usr/lib/libc.so
+--- a/usr/lib64/libc.so 2016-12-30 23:00:08.284951863 +0900
++++ b/usr/lib64/libc.so 2016-12-30 23:00:32.140951815 +0900
+@@ -2,4 +2,4 @@
+ Use the shared library, but some functions are only in
+ the static library, so try that secondarily. */
+ OUTPUT_FORMAT(elf64-littleaarch64)
+-GROUP ( /lib64/libc.so.6 /usr/lib64/libc_nonshared.a AS_NEEDED ( /lib/ld-linux-aarch64.so.1 ) )
++GROUP ( libc.so.6 libc_nonshared.a AS_NEEDED ( ld-linux-aarch64.so.1 ) )
diff --git a/eng/common/cross/armel/sources.list.jessie b/eng/common/cross/armel/sources.list.jessie
new file mode 100644
index 0000000000..3d9c3059d8
--- /dev/null
+++ b/eng/common/cross/armel/sources.list.jessie
@@ -0,0 +1,3 @@
+# Debian (jessie) # Stable
+deb http://ftp.debian.org/debian/ jessie main contrib non-free
+deb-src http://ftp.debian.org/debian/ jessie main contrib non-free
diff --git a/eng/common/cross/armel/tizen-build-rootfs.sh b/eng/common/cross/armel/tizen-build-rootfs.sh
new file mode 100755
index 0000000000..9a4438af61
--- /dev/null
+++ b/eng/common/cross/armel/tizen-build-rootfs.sh
@@ -0,0 +1,35 @@
+#!/usr/bin/env bash
+set -e
+
+__ARM_SOFTFP_CrossDir=$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )
+__TIZEN_CROSSDIR="$__ARM_SOFTFP_CrossDir/tizen"
+
+if [[ -z "$ROOTFS_DIR" ]]; then
+ echo "ROOTFS_DIR is not defined."
+ exit 1;
+fi
+
+TIZEN_TMP_DIR=$ROOTFS_DIR/tizen_tmp
+mkdir -p $TIZEN_TMP_DIR
+
+# Download files
+echo ">>Start downloading files"
+VERBOSE=1 $__ARM_SOFTFP_CrossDir/tizen-fetch.sh $TIZEN_TMP_DIR
+echo "<>Start constructing Tizen rootfs"
+TIZEN_RPM_FILES=`ls $TIZEN_TMP_DIR/*.rpm`
+cd $ROOTFS_DIR
+for f in $TIZEN_RPM_FILES; do
+ rpm2cpio $f | cpio -idm --quiet
+done
+echo "<>Start configuring Tizen rootfs"
+ln -sfn asm-arm ./usr/include/asm
+patch -p1 < $__TIZEN_CROSSDIR/tizen.patch
+echo "</dev/null; then
+ VERBOSE=0
+fi
+
+Log()
+{
+ if [ $VERBOSE -ge $1 ]; then
+ echo ${@:2}
+ fi
+}
+
+Inform()
+{
+ Log 1 -e "\x1B[0;34m$@\x1B[m"
+}
+
+Debug()
+{
+ Log 2 -e "\x1B[0;32m$@\x1B[m"
+}
+
+Error()
+{
+ >&2 Log 0 -e "\x1B[0;31m$@\x1B[m"
+}
+
+Fetch()
+{
+ URL=$1
+ FILE=$2
+ PROGRESS=$3
+ if [ $VERBOSE -ge 1 ] && [ $PROGRESS ]; then
+ CURL_OPT="--progress-bar"
+ else
+ CURL_OPT="--silent"
+ fi
+ curl $CURL_OPT $URL > $FILE
+}
+
+hash curl 2> /dev/null || { Error "Require 'curl' Aborting."; exit 1; }
+hash xmllint 2> /dev/null || { Error "Require 'xmllint' Aborting."; exit 1; }
+hash sha256sum 2> /dev/null || { Error "Require 'sha256sum' Aborting."; exit 1; }
+
+TMPDIR=$1
+if [ ! -d $TMPDIR ]; then
+ TMPDIR=./tizen_tmp
+ Debug "Create temporary directory : $TMPDIR"
+ mkdir -p $TMPDIR
+fi
+
+TIZEN_URL=http://download.tizen.org/snapshots/tizen
+BUILD_XML=build.xml
+REPOMD_XML=repomd.xml
+PRIMARY_XML=primary.xml
+TARGET_URL="http://__not_initialized"
+
+Xpath_get()
+{
+ XPATH_RESULT=''
+ XPATH=$1
+ XML_FILE=$2
+ RESULT=$(xmllint --xpath $XPATH $XML_FILE)
+ if [[ -z ${RESULT// } ]]; then
+ Error "Can not find target from $XML_FILE"
+ Debug "Xpath = $XPATH"
+ exit 1
+ fi
+ XPATH_RESULT=$RESULT
+}
+
+fetch_tizen_pkgs_init()
+{
+ TARGET=$1
+ PROFILE=$2
+ Debug "Initialize TARGET=$TARGET, PROFILE=$PROFILE"
+
+ TMP_PKG_DIR=$TMPDIR/tizen_${PROFILE}_pkgs
+ if [ -d $TMP_PKG_DIR ]; then rm -rf $TMP_PKG_DIR; fi
+ mkdir -p $TMP_PKG_DIR
+
+ PKG_URL=$TIZEN_URL/$PROFILE/latest
+
+ BUILD_XML_URL=$PKG_URL/$BUILD_XML
+ TMP_BUILD=$TMP_PKG_DIR/$BUILD_XML
+ TMP_REPOMD=$TMP_PKG_DIR/$REPOMD_XML
+ TMP_PRIMARY=$TMP_PKG_DIR/$PRIMARY_XML
+ TMP_PRIMARYGZ=${TMP_PRIMARY}.gz
+
+ Fetch $BUILD_XML_URL $TMP_BUILD
+
+ Debug "fetch $BUILD_XML_URL to $TMP_BUILD"
+
+ TARGET_XPATH="//build/buildtargets/buildtarget[@name=\"$TARGET\"]/repo[@type=\"binary\"]/text()"
+ Xpath_get $TARGET_XPATH $TMP_BUILD
+ TARGET_PATH=$XPATH_RESULT
+ TARGET_URL=$PKG_URL/$TARGET_PATH
+
+ REPOMD_URL=$TARGET_URL/repodata/repomd.xml
+ PRIMARY_XPATH='string(//*[local-name()="data"][@type="primary"]/*[local-name()="location"]/@href)'
+
+ Fetch $REPOMD_URL $TMP_REPOMD
+
+ Debug "fetch $REPOMD_URL to $TMP_REPOMD"
+
+ Xpath_get $PRIMARY_XPATH $TMP_REPOMD
+ PRIMARY_XML_PATH=$XPATH_RESULT
+ PRIMARY_URL=$TARGET_URL/$PRIMARY_XML_PATH
+
+ Fetch $PRIMARY_URL $TMP_PRIMARYGZ
+
+ Debug "fetch $PRIMARY_URL to $TMP_PRIMARYGZ"
+
+ gunzip $TMP_PRIMARYGZ
+
+ Debug "unzip $TMP_PRIMARYGZ to $TMP_PRIMARY"
+}
+
+fetch_tizen_pkgs()
+{
+ ARCH=$1
+ PACKAGE_XPATH_TPL='string(//*[local-name()="metadata"]/*[local-name()="package"][*[local-name()="name"][text()="_PKG_"]][*[local-name()="arch"][text()="_ARCH_"]]/*[local-name()="location"]/@href)'
+
+ PACKAGE_CHECKSUM_XPATH_TPL='string(//*[local-name()="metadata"]/*[local-name()="package"][*[local-name()="name"][text()="_PKG_"]][*[local-name()="arch"][text()="_ARCH_"]]/*[local-name()="checksum"]/text())'
+
+ for pkg in ${@:2}
+ do
+ Inform "Fetching... $pkg"
+ XPATH=${PACKAGE_XPATH_TPL/_PKG_/$pkg}
+ XPATH=${XPATH/_ARCH_/$ARCH}
+ Xpath_get $XPATH $TMP_PRIMARY
+ PKG_PATH=$XPATH_RESULT
+
+ XPATH=${PACKAGE_CHECKSUM_XPATH_TPL/_PKG_/$pkg}
+ XPATH=${XPATH/_ARCH_/$ARCH}
+ Xpath_get $XPATH $TMP_PRIMARY
+ CHECKSUM=$XPATH_RESULT
+
+ PKG_URL=$TARGET_URL/$PKG_PATH
+ PKG_FILE=$(basename $PKG_PATH)
+ PKG_PATH=$TMPDIR/$PKG_FILE
+
+ Debug "Download $PKG_URL to $PKG_PATH"
+ Fetch $PKG_URL $PKG_PATH true
+
+ echo "$CHECKSUM $PKG_PATH" | sha256sum -c - > /dev/null
+ if [ $? -ne 0 ]; then
+ Error "Fail to fetch $PKG_URL to $PKG_PATH"
+ Debug "Checksum = $CHECKSUM"
+ exit 1
+ fi
+ done
+}
+
+Inform "Initialize arm base"
+fetch_tizen_pkgs_init standard base
+Inform "fetch common packages"
+fetch_tizen_pkgs armv7l gcc glibc glibc-devel libicu libicu-devel libatomic linux-glibc-devel
+Inform "fetch coreclr packages"
+fetch_tizen_pkgs armv7l lldb lldb-devel libgcc libstdc++ libstdc++-devel libunwind libunwind-devel lttng-ust-devel lttng-ust userspace-rcu-devel userspace-rcu
+Inform "fetch corefx packages"
+fetch_tizen_pkgs armv7l libcom_err libcom_err-devel zlib zlib-devel libopenssl11 libopenssl1.1-devel krb5 krb5-devel
+
+Inform "Initialize standard unified"
+fetch_tizen_pkgs_init standard unified
+Inform "fetch corefx packages"
+fetch_tizen_pkgs armv7l gssdp gssdp-devel tizen-release
+
diff --git a/eng/common/cross/armel/tizen/tizen-dotnet.ks b/eng/common/cross/armel/tizen/tizen-dotnet.ks
new file mode 100644
index 0000000000..506d455bd4
--- /dev/null
+++ b/eng/common/cross/armel/tizen/tizen-dotnet.ks
@@ -0,0 +1,50 @@
+lang en_US.UTF-8
+keyboard us
+timezone --utc Asia/Seoul
+
+part / --fstype="ext4" --size=3500 --ondisk=mmcblk0 --label rootfs --fsoptions=defaults,noatime
+
+rootpw tizen
+desktop --autologinuser=root
+user --name root --groups audio,video --password 'tizen'
+
+repo --name=standard --baseurl=http://download.tizen.org/releases/milestone/tizen/unified/latest/repos/standard/packages/ --ssl_verify=no
+repo --name=base --baseurl=http://download.tizen.org/releases/milestone/tizen/base/latest/repos/standard/packages/ --ssl_verify=no
+
+%packages
+tar
+gzip
+
+sed
+grep
+gawk
+perl
+
+binutils
+findutils
+util-linux
+lttng-ust
+userspace-rcu
+procps-ng
+tzdata
+ca-certificates
+
+
+### Core FX
+libicu
+libunwind
+iputils
+zlib
+krb5
+libcurl
+libopenssl
+
+%end
+
+%post
+
+### Update /tmp privilege
+chmod 777 /tmp
+####################################
+
+%end
diff --git a/eng/common/cross/armel/tizen/tizen.patch b/eng/common/cross/armel/tizen/tizen.patch
new file mode 100644
index 0000000000..ca7c7c1ff7
--- /dev/null
+++ b/eng/common/cross/armel/tizen/tizen.patch
@@ -0,0 +1,9 @@
+diff -u -r a/usr/lib/libc.so b/usr/lib/libc.so
+--- a/usr/lib/libc.so 2016-12-30 23:00:08.284951863 +0900
++++ b/usr/lib/libc.so 2016-12-30 23:00:32.140951815 +0900
+@@ -2,4 +2,4 @@
+ Use the shared library, but some functions are only in
+ the static library, so try that secondarily. */
+ OUTPUT_FORMAT(elf32-littlearm)
+-GROUP ( /lib/libc.so.6 /usr/lib/libc_nonshared.a AS_NEEDED ( /lib/ld-linux.so.3 ) )
++GROUP ( libc.so.6 libc_nonshared.a AS_NEEDED ( ld-linux.so.3 ) )
diff --git a/eng/common/cross/build-android-rootfs.sh b/eng/common/cross/build-android-rootfs.sh
new file mode 100755
index 0000000000..e7f12edb56
--- /dev/null
+++ b/eng/common/cross/build-android-rootfs.sh
@@ -0,0 +1,131 @@
+#!/usr/bin/env bash
+set -e
+__NDK_Version=r21
+
+usage()
+{
+ echo "Creates a toolchain and sysroot used for cross-compiling for Android."
+ echo.
+ echo "Usage: $0 [BuildArch] [ApiLevel]"
+ echo.
+ echo "BuildArch is the target architecture of Android. Currently only arm64 is supported."
+ echo "ApiLevel is the target Android API level. API levels usually match to Android releases. See https://source.android.com/source/build-numbers.html"
+ echo.
+ echo "By default, the toolchain and sysroot will be generated in cross/android-rootfs/toolchain/[BuildArch]. You can change this behavior"
+ echo "by setting the TOOLCHAIN_DIR environment variable"
+ echo.
+ echo "By default, the NDK will be downloaded into the cross/android-rootfs/android-ndk-$__NDK_Version directory. If you already have an NDK installation,"
+ echo "you can set the NDK_DIR environment variable to have this script use that installation of the NDK."
+ echo "By default, this script will generate a file, android_platform, in the root of the ROOTFS_DIR directory that contains the RID for the supported and tested Android build: android.28-arm64. This file is to replace '/etc/os-release', which is not available for Android."
+ exit 1
+}
+
+__ApiLevel=28 # The minimum platform for arm64 is API level 21 but the minimum version that support glob(3) is 28. See $ANDROID_NDK/toolchains/llvm/prebuilt/linux-x86_64/sysroot/usr/include/glob.h
+__BuildArch=arm64
+__AndroidArch=aarch64
+__AndroidToolchain=aarch64-linux-android
+
+for i in "$@"
+ do
+ lowerI="$(echo $i | awk '{print tolower($0)}')"
+ case $lowerI in
+ -?|-h|--help)
+ usage
+ exit 1
+ ;;
+ arm64)
+ __BuildArch=arm64
+ __AndroidArch=aarch64
+ __AndroidToolchain=aarch64-linux-android
+ ;;
+ arm)
+ __BuildArch=arm
+ __AndroidArch=arm
+ __AndroidToolchain=arm-linux-androideabi
+ ;;
+ *[0-9])
+ __ApiLevel=$i
+ ;;
+ *)
+ __UnprocessedBuildArgs="$__UnprocessedBuildArgs $i"
+ ;;
+ esac
+done
+
+# Obtain the location of the bash script to figure out where the root of the repo is.
+__ScriptBaseDir="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
+
+__CrossDir="$__ScriptBaseDir/../../../.tools/android-rootfs"
+
+if [[ ! -f "$__CrossDir" ]]; then
+ mkdir -p "$__CrossDir"
+fi
+
+# Resolve absolute path to avoid `../` in build logs
+__CrossDir="$( cd "$__CrossDir" && pwd )"
+
+__NDK_Dir="$__CrossDir/android-ndk-$__NDK_Version"
+__lldb_Dir="$__CrossDir/lldb"
+__ToolchainDir="$__CrossDir/android-ndk-$__NDK_Version"
+
+if [[ -n "$TOOLCHAIN_DIR" ]]; then
+ __ToolchainDir=$TOOLCHAIN_DIR
+fi
+
+if [[ -n "$NDK_DIR" ]]; then
+ __NDK_Dir=$NDK_DIR
+fi
+
+echo "Target API level: $__ApiLevel"
+echo "Target architecture: $__BuildArch"
+echo "NDK location: $__NDK_Dir"
+echo "Target Toolchain location: $__ToolchainDir"
+
+# Download the NDK if required
+if [ ! -d $__NDK_Dir ]; then
+ echo Downloading the NDK into $__NDK_Dir
+ mkdir -p $__NDK_Dir
+ wget -q --progress=bar:force:noscroll --show-progress https://dl.google.com/android/repository/android-ndk-$__NDK_Version-linux-x86_64.zip -O $__CrossDir/android-ndk-$__NDK_Version-linux-x86_64.zip
+ unzip -q $__CrossDir/android-ndk-$__NDK_Version-linux-x86_64.zip -d $__CrossDir
+fi
+
+if [ ! -d $__lldb_Dir ]; then
+ mkdir -p $__lldb_Dir
+ echo Downloading LLDB into $__lldb_Dir
+ wget -q --progress=bar:force:noscroll --show-progress https://dl.google.com/android/repository/lldb-2.3.3614996-linux-x86_64.zip -O $__CrossDir/lldb-2.3.3614996-linux-x86_64.zip
+ unzip -q $__CrossDir/lldb-2.3.3614996-linux-x86_64.zip -d $__lldb_Dir
+fi
+
+echo "Download dependencies..."
+__TmpDir=$__CrossDir/tmp/$__BuildArch/
+mkdir -p "$__TmpDir"
+
+# combined dependencies for coreclr, installer and libraries
+__AndroidPackages="libicu"
+__AndroidPackages+=" libandroid-glob"
+__AndroidPackages+=" liblzma"
+__AndroidPackages+=" krb5"
+__AndroidPackages+=" openssl"
+
+for path in $(wget -qO- http://termux.net/dists/stable/main/binary-$__AndroidArch/Packages |\
+ grep -A15 "Package: \(${__AndroidPackages// /\\|}\)" | grep -v "static\|tool" | grep Filename); do
+
+ if [[ "$path" != "Filename:" ]]; then
+ echo "Working on: $path"
+ wget -qO- http://termux.net/$path | dpkg -x - "$__TmpDir"
+ fi
+done
+
+cp -R "$__TmpDir/data/data/com.termux/files/usr/"* "$__ToolchainDir/sysroot/usr/"
+
+# Generate platform file for build.sh script to assign to __DistroRid
+echo "Generating platform file..."
+echo "RID=android.${__ApiLevel}-${__BuildArch}" > $__ToolchainDir/sysroot/android_platform
+
+echo "Now to build coreclr, libraries and installers; run:"
+echo ROOTFS_DIR=\$\(realpath $__ToolchainDir/sysroot\) ./build.sh --cross --arch $__BuildArch \
+ --subsetCategory coreclr
+echo ROOTFS_DIR=\$\(realpath $__ToolchainDir/sysroot\) ./build.sh --cross --arch $__BuildArch \
+ --subsetCategory libraries
+echo ROOTFS_DIR=\$\(realpath $__ToolchainDir/sysroot\) ./build.sh --cross --arch $__BuildArch \
+ --subsetCategory installer
diff --git a/eng/common/cross/build-rootfs.sh b/eng/common/cross/build-rootfs.sh
new file mode 100755
index 0000000000..ffdff38542
--- /dev/null
+++ b/eng/common/cross/build-rootfs.sh
@@ -0,0 +1,349 @@
+#!/usr/bin/env bash
+
+set -e
+
+usage()
+{
+ echo "Usage: $0 [BuildArch] [CodeName] [lldbx.y] [--skipunmount] --rootfsdir ]"
+ echo "BuildArch can be: arm(default), armel, arm64, x86"
+ echo "CodeName - optional, Code name for Linux, can be: trusty, xenial(default), zesty, bionic, alpine. If BuildArch is armel, LinuxCodeName is jessie(default) or tizen."
+ echo " for FreeBSD can be: freebsd11 or freebsd12."
+ echo " for illumos can be: illumos."
+ echo "lldbx.y - optional, LLDB version, can be: lldb3.9(default), lldb4.0, lldb5.0, lldb6.0 no-lldb. Ignored for alpine and FReeBSD"
+ echo "--skipunmount - optional, will skip the unmount of rootfs folder."
+ echo "--use-mirror - optional, use mirror URL to fetch resources, when available."
+ exit 1
+}
+
+__CodeName=xenial
+__CrossDir=$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )
+__InitialDir=$PWD
+__BuildArch=arm
+__AlpineArch=armv7
+__QEMUArch=arm
+__UbuntuArch=armhf
+__UbuntuRepo="http://ports.ubuntu.com/"
+__LLDB_Package="liblldb-3.9-dev"
+__SkipUnmount=0
+
+# base development support
+__UbuntuPackages="build-essential"
+
+__AlpinePackages="alpine-base"
+__AlpinePackages+=" build-base"
+__AlpinePackages+=" linux-headers"
+__AlpinePackagesEdgeCommunity=" lldb-dev"
+__AlpinePackagesEdgeMain=" llvm10-libs"
+__AlpinePackagesEdgeMain+=" python3"
+__AlpinePackagesEdgeMain+=" libedit"
+
+# symlinks fixer
+__UbuntuPackages+=" symlinks"
+
+# CoreCLR and CoreFX dependencies
+__UbuntuPackages+=" libicu-dev"
+__UbuntuPackages+=" liblttng-ust-dev"
+__UbuntuPackages+=" libunwind8-dev"
+
+__AlpinePackages+=" gettext-dev"
+__AlpinePackages+=" icu-dev"
+__AlpinePackages+=" libunwind-dev"
+__AlpinePackages+=" lttng-ust-dev"
+
+# CoreFX dependencies
+__UbuntuPackages+=" libcurl4-openssl-dev"
+__UbuntuPackages+=" libkrb5-dev"
+__UbuntuPackages+=" libssl-dev"
+__UbuntuPackages+=" zlib1g-dev"
+
+__AlpinePackages+=" curl-dev"
+__AlpinePackages+=" krb5-dev"
+__AlpinePackages+=" openssl-dev"
+__AlpinePackages+=" zlib-dev"
+
+__FreeBSDBase="12.1-RELEASE"
+__FreeBSDPkg="1.12.0"
+__FreeBSDPackages="libunwind"
+__FreeBSDPackages+=" icu"
+__FreeBSDPackages+=" libinotify"
+__FreeBSDPackages+=" lttng-ust"
+__FreeBSDPackages+=" krb5"
+
+__IllumosPackages="icu-64.2nb2"
+__IllumosPackages+=" mit-krb5-1.16.2nb4"
+__IllumosPackages+=" openssl-1.1.1e"
+__IllumosPackages+=" zlib-1.2.11"
+
+__UseMirror=0
+
+__UnprocessedBuildArgs=
+while :; do
+ if [ $# -le 0 ]; then
+ break
+ fi
+
+ lowerI="$(echo $1 | awk '{print tolower($0)}')"
+ case $lowerI in
+ -?|-h|--help)
+ usage
+ exit 1
+ ;;
+ arm)
+ __BuildArch=arm
+ __UbuntuArch=armhf
+ __AlpineArch=armv7
+ __QEMUArch=arm
+ ;;
+ arm64)
+ __BuildArch=arm64
+ __UbuntuArch=arm64
+ __AlpineArch=aarch64
+ __QEMUArch=aarch64
+ ;;
+ armel)
+ __BuildArch=armel
+ __UbuntuArch=armel
+ __UbuntuRepo="http://ftp.debian.org/debian/"
+ __CodeName=jessie
+ ;;
+ x86)
+ __BuildArch=x86
+ __UbuntuArch=i386
+ __UbuntuRepo="http://archive.ubuntu.com/ubuntu/"
+ ;;
+ lldb3.6)
+ __LLDB_Package="lldb-3.6-dev"
+ ;;
+ lldb3.8)
+ __LLDB_Package="lldb-3.8-dev"
+ ;;
+ lldb3.9)
+ __LLDB_Package="liblldb-3.9-dev"
+ ;;
+ lldb4.0)
+ __LLDB_Package="liblldb-4.0-dev"
+ ;;
+ lldb5.0)
+ __LLDB_Package="liblldb-5.0-dev"
+ ;;
+ lldb6.0)
+ __LLDB_Package="liblldb-6.0-dev"
+ ;;
+ no-lldb)
+ unset __LLDB_Package
+ ;;
+ trusty) # Ubuntu 14.04
+ if [ "$__CodeName" != "jessie" ]; then
+ __CodeName=trusty
+ fi
+ ;;
+ xenial) # Ubuntu 16.04
+ if [ "$__CodeName" != "jessie" ]; then
+ __CodeName=xenial
+ fi
+ ;;
+ zesty) # Ubuntu 17.04
+ if [ "$__CodeName" != "jessie" ]; then
+ __CodeName=zesty
+ fi
+ ;;
+ bionic) # Ubuntu 18.04
+ if [ "$__CodeName" != "jessie" ]; then
+ __CodeName=bionic
+ fi
+ ;;
+ jessie) # Debian 8
+ __CodeName=jessie
+ __UbuntuRepo="http://ftp.debian.org/debian/"
+ ;;
+ stretch) # Debian 9
+ __CodeName=stretch
+ __UbuntuRepo="http://ftp.debian.org/debian/"
+ __LLDB_Package="liblldb-6.0-dev"
+ ;;
+ buster) # Debian 10
+ __CodeName=buster
+ __UbuntuRepo="http://ftp.debian.org/debian/"
+ __LLDB_Package="liblldb-6.0-dev"
+ ;;
+ tizen)
+ if [ "$__BuildArch" != "armel" ] && [ "$__BuildArch" != "arm64" ]; then
+ echo "Tizen is available only for armel and arm64."
+ usage;
+ exit 1;
+ fi
+ __CodeName=
+ __UbuntuRepo=
+ __Tizen=tizen
+ ;;
+ alpine)
+ __CodeName=alpine
+ __UbuntuRepo=
+ ;;
+ freebsd11)
+ __FreeBSDBase="11.3-RELEASE"
+ ;&
+ freebsd12)
+ __CodeName=freebsd
+ __BuildArch=x64
+ __SkipUnmount=1
+ ;;
+ illumos)
+ __CodeName=illumos
+ __BuildArch=x64
+ __SkipUnmount=1
+ ;;
+ --skipunmount)
+ __SkipUnmount=1
+ ;;
+ --rootfsdir|-rootfsdir)
+ shift
+ __RootfsDir=$1
+ ;;
+ --use-mirror)
+ __UseMirror=1
+ ;;
+ *)
+ __UnprocessedBuildArgs="$__UnprocessedBuildArgs $1"
+ ;;
+ esac
+
+ shift
+done
+
+if [ "$__BuildArch" == "armel" ]; then
+ __LLDB_Package="lldb-3.5-dev"
+fi
+__UbuntuPackages+=" ${__LLDB_Package:-}"
+
+if [ -z "$__RootfsDir" ] && [ ! -z "$ROOTFS_DIR" ]; then
+ __RootfsDir=$ROOTFS_DIR
+fi
+
+if [ -z "$__RootfsDir" ]; then
+ __RootfsDir="$__CrossDir/../../../.tools/rootfs/$__BuildArch"
+fi
+
+if [ -d "$__RootfsDir" ]; then
+ if [ $__SkipUnmount == 0 ]; then
+ umount $__RootfsDir/* || true
+ fi
+ rm -rf $__RootfsDir
+fi
+
+mkdir -p $__RootfsDir
+__RootfsDir="$( cd "$__RootfsDir" && pwd )"
+
+if [[ "$__CodeName" == "alpine" ]]; then
+ __ApkToolsVersion=2.9.1
+ __AlpineVersion=3.9
+ __ApkToolsDir=$(mktemp -d)
+ wget https://github.com/alpinelinux/apk-tools/releases/download/v$__ApkToolsVersion/apk-tools-$__ApkToolsVersion-x86_64-linux.tar.gz -P $__ApkToolsDir
+ tar -xf $__ApkToolsDir/apk-tools-$__ApkToolsVersion-x86_64-linux.tar.gz -C $__ApkToolsDir
+ mkdir -p $__RootfsDir/usr/bin
+ cp -v /usr/bin/qemu-$__QEMUArch-static $__RootfsDir/usr/bin
+
+ $__ApkToolsDir/apk-tools-$__ApkToolsVersion/apk \
+ -X http://dl-cdn.alpinelinux.org/alpine/v$__AlpineVersion/main \
+ -X http://dl-cdn.alpinelinux.org/alpine/v$__AlpineVersion/community \
+ -U --allow-untrusted --root $__RootfsDir --arch $__AlpineArch --initdb \
+ add $__AlpinePackages
+
+ $__ApkToolsDir/apk-tools-$__ApkToolsVersion/apk \
+ -X http://dl-cdn.alpinelinux.org/alpine/edge/main \
+ -U --allow-untrusted --root $__RootfsDir --arch $__AlpineArch --initdb \
+ add $__AlpinePackagesEdgeMain
+
+ $__ApkToolsDir/apk-tools-$__ApkToolsVersion/apk \
+ -X http://dl-cdn.alpinelinux.org/alpine/edge/community \
+ -U --allow-untrusted --root $__RootfsDir --arch $__AlpineArch --initdb \
+ add $__AlpinePackagesEdgeCommunity
+
+ rm -r $__ApkToolsDir
+elif [[ "$__CodeName" == "freebsd" ]]; then
+ mkdir -p $__RootfsDir/usr/local/etc
+ wget -O - https://download.freebsd.org/ftp/releases/amd64/${__FreeBSDBase}/base.txz | tar -C $__RootfsDir -Jxf - ./lib ./usr/lib ./usr/libdata ./usr/include ./usr/share/keys ./etc ./bin/freebsd-version
+ # For now, ask for 11 ABI even on 12. This can be revisited later.
+ echo "ABI = \"FreeBSD:11:amd64\"; FINGERPRINTS = \"${__RootfsDir}/usr/share/keys\"; REPOS_DIR = [\"${__RootfsDir}/etc/pkg\"]; REPO_AUTOUPDATE = NO; RUN_SCRIPTS = NO;" > ${__RootfsDir}/usr/local/etc/pkg.conf
+ echo "FreeBSD: { url: "pkg+http://pkg.FreeBSD.org/\${ABI}/quarterly", mirror_type: \"srv\", signature_type: \"fingerprints\", fingerprints: \"${__RootfsDir}/usr/share/keys/pkg\", enabled: yes }" > ${__RootfsDir}/etc/pkg/FreeBSD.conf
+ mkdir -p $__RootfsDir/tmp
+ # get and build package manager
+ wget -O - https://github.com/freebsd/pkg/archive/${__FreeBSDPkg}.tar.gz | tar -C $__RootfsDir/tmp -zxf -
+ cd $__RootfsDir/tmp/pkg-${__FreeBSDPkg}
+ # needed for install to succeed
+ mkdir -p $__RootfsDir/host/etc
+ ./autogen.sh && ./configure --prefix=$__RootfsDir/host && make && make install
+ rm -rf $__RootfsDir/tmp/pkg-${__FreeBSDPkg}
+ # install packages we need.
+ INSTALL_AS_USER=$(whoami) $__RootfsDir/host/sbin/pkg -r $__RootfsDir -C $__RootfsDir/usr/local/etc/pkg.conf update
+ INSTALL_AS_USER=$(whoami) $__RootfsDir/host/sbin/pkg -r $__RootfsDir -C $__RootfsDir/usr/local/etc/pkg.conf install --yes $__FreeBSDPackages
+elif [[ "$__CodeName" == "illumos" ]]; then
+ mkdir "$__RootfsDir/tmp"
+ pushd "$__RootfsDir/tmp"
+ JOBS="$(getconf _NPROCESSORS_ONLN)"
+ echo "Downloading sysroot."
+ wget -O - https://github.com/illumos/sysroot/releases/download/20181213-de6af22ae73b-v1/illumos-sysroot-i386-20181213-de6af22ae73b-v1.tar.gz | tar -C "$__RootfsDir" -xzf -
+ echo "Building binutils. Please wait.."
+ wget -O - https://ftp.gnu.org/gnu/binutils/binutils-2.33.1.tar.bz2 | tar -xjf -
+ mkdir build-binutils && cd build-binutils
+ ../binutils-2.33.1/configure --prefix="$__RootfsDir" --target="x86_64-sun-solaris2.10" --program-prefix="x86_64-illumos-" --with-sysroot="$__RootfsDir"
+ make -j "$JOBS" && make install && cd ..
+ echo "Building gcc. Please wait.."
+ wget -O - https://ftp.gnu.org/gnu/gcc/gcc-8.4.0/gcc-8.4.0.tar.xz | tar -xJf -
+ CFLAGS="-fPIC"
+ CXXFLAGS="-fPIC"
+ CXXFLAGS_FOR_TARGET="-fPIC"
+ CFLAGS_FOR_TARGET="-fPIC"
+ export CFLAGS CXXFLAGS CXXFLAGS_FOR_TARGET CFLAGS_FOR_TARGET
+ mkdir build-gcc && cd build-gcc
+ ../gcc-8.4.0/configure --prefix="$__RootfsDir" --target="x86_64-sun-solaris2.10" --program-prefix="x86_64-illumos-" --with-sysroot="$__RootfsDir" --with-gnu-as \
+ --with-gnu-ld --disable-nls --disable-libgomp --disable-libquadmath --disable-libssp --disable-libvtv --disable-libcilkrts --disable-libada --disable-libsanitizer \
+ --disable-libquadmath-support --disable-shared --enable-tls
+ make -j "$JOBS" && make install && cd ..
+ BaseUrl=https://pkgsrc.joyent.com
+ if [[ "$__UseMirror" == 1 ]]; then
+ BaseUrl=http://pkgsrc.smartos.skylime.net
+ fi
+ BaseUrl="$BaseUrl"/packages/SmartOS/2020Q1/x86_64/All
+ echo "Downloading dependencies."
+ read -ra array <<<"$__IllumosPackages"
+ for package in "${array[@]}"; do
+ echo "Installing $package..."
+ wget "$BaseUrl"/"$package".tgz
+ ar -x "$package".tgz
+ tar --skip-old-files -xzf "$package".tmp.tgz -C "$__RootfsDir" 2>/dev/null
+ done
+ echo "Cleaning up temporary files."
+ popd
+ rm -rf "$__RootfsDir"/{tmp,+*}
+ mkdir -p "$__RootfsDir"/usr/include/net
+ mkdir -p "$__RootfsDir"/usr/include/netpacket
+ wget -P "$__RootfsDir"/usr/include/net https://raw.githubusercontent.com/illumos/illumos-gate/master/usr/src/uts/common/io/bpf/net/bpf.h
+ wget -P "$__RootfsDir"/usr/include/net https://raw.githubusercontent.com/illumos/illumos-gate/master/usr/src/uts/common/io/bpf/net/dlt.h
+ wget -P "$__RootfsDir"/usr/include/netpacket https://raw.githubusercontent.com/illumos/illumos-gate/master/usr/src/uts/common/inet/sockmods/netpacket/packet.h
+ wget -P "$__RootfsDir"/usr/include/sys https://raw.githubusercontent.com/illumos/illumos-gate/master/usr/src/uts/common/sys/sdt.h
+elif [[ -n $__CodeName ]]; then
+ qemu-debootstrap --arch $__UbuntuArch $__CodeName $__RootfsDir $__UbuntuRepo
+ cp $__CrossDir/$__BuildArch/sources.list.$__CodeName $__RootfsDir/etc/apt/sources.list
+ chroot $__RootfsDir apt-get update
+ chroot $__RootfsDir apt-get -f -y install
+ chroot $__RootfsDir apt-get -y install $__UbuntuPackages
+ chroot $__RootfsDir symlinks -cr /usr
+
+ if [ $__SkipUnmount == 0 ]; then
+ umount $__RootfsDir/* || true
+ fi
+
+ if [[ "$__BuildArch" == "arm" && "$__CodeName" == "trusty" ]]; then
+ pushd $__RootfsDir
+ patch -p1 < $__CrossDir/$__BuildArch/trusty.patch
+ patch -p1 < $__CrossDir/$__BuildArch/trusty-lttng-2.4.patch
+ popd
+ fi
+elif [[ "$__Tizen" == "tizen" ]]; then
+ ROOTFS_DIR=$__RootfsDir $__CrossDir/$__BuildArch/tizen-build-rootfs.sh
+else
+ echo "Unsupported target platform."
+ usage;
+ exit 1
+fi
diff --git a/eng/common/cross/toolchain.cmake b/eng/common/cross/toolchain.cmake
new file mode 100644
index 0000000000..137736c0a2
--- /dev/null
+++ b/eng/common/cross/toolchain.cmake
@@ -0,0 +1,235 @@
+set(CROSS_ROOTFS $ENV{ROOTFS_DIR})
+
+set(TARGET_ARCH_NAME $ENV{TARGET_BUILD_ARCH})
+if(EXISTS ${CROSS_ROOTFS}/bin/freebsd-version)
+ set(CMAKE_SYSTEM_NAME FreeBSD)
+elseif(EXISTS ${CROSS_ROOTFS}/usr/platform/i86pc)
+ set(CMAKE_SYSTEM_NAME SunOS)
+ set(ILLUMOS 1)
+else()
+ set(CMAKE_SYSTEM_NAME Linux)
+endif()
+set(CMAKE_SYSTEM_VERSION 1)
+
+if(TARGET_ARCH_NAME STREQUAL "armel")
+ set(CMAKE_SYSTEM_PROCESSOR armv7l)
+ set(TOOLCHAIN "arm-linux-gnueabi")
+ if("$ENV{__DistroRid}" MATCHES "tizen.*")
+ set(TIZEN_TOOLCHAIN "armv7l-tizen-linux-gnueabi/9.2.0")
+ endif()
+elseif(TARGET_ARCH_NAME STREQUAL "arm")
+ set(CMAKE_SYSTEM_PROCESSOR armv7l)
+ if(EXISTS ${CROSS_ROOTFS}/usr/lib/gcc/armv7-alpine-linux-musleabihf)
+ set(TOOLCHAIN "armv7-alpine-linux-musleabihf")
+ elseif(EXISTS ${CROSS_ROOTFS}/usr/lib/gcc/armv6-alpine-linux-musleabihf)
+ set(TOOLCHAIN "armv6-alpine-linux-musleabihf")
+ else()
+ set(TOOLCHAIN "arm-linux-gnueabihf")
+ endif()
+elseif(TARGET_ARCH_NAME STREQUAL "arm64")
+ set(CMAKE_SYSTEM_PROCESSOR aarch64)
+ if(EXISTS ${CROSS_ROOTFS}/usr/lib/gcc/aarch64-alpine-linux-musl)
+ set(TOOLCHAIN "aarch64-alpine-linux-musl")
+ else()
+ set(TOOLCHAIN "aarch64-linux-gnu")
+ endif()
+ if("$ENV{__DistroRid}" MATCHES "tizen.*")
+ set(TIZEN_TOOLCHAIN "aarch64-tizen-linux-gnu/9.2.0")
+ endif()
+elseif(TARGET_ARCH_NAME STREQUAL "x86")
+ set(CMAKE_SYSTEM_PROCESSOR i686)
+ set(TOOLCHAIN "i686-linux-gnu")
+elseif (CMAKE_SYSTEM_NAME STREQUAL "FreeBSD")
+ set(CMAKE_SYSTEM_PROCESSOR "x86_64")
+ set(triple "x86_64-unknown-freebsd11")
+elseif (ILLUMOS)
+ set(CMAKE_SYSTEM_PROCESSOR "x86_64")
+ set(TOOLCHAIN "x86_64-illumos")
+else()
+ message(FATAL_ERROR "Arch is ${TARGET_ARCH_NAME}. Only armel, arm, arm64 and x86 are supported!")
+endif()
+
+if(DEFINED ENV{TOOLCHAIN})
+ set(TOOLCHAIN $ENV{TOOLCHAIN})
+endif()
+
+# Specify include paths
+if(DEFINED TIZEN_TOOLCHAIN)
+ if(TARGET_ARCH_NAME STREQUAL "armel")
+ include_directories(SYSTEM ${CROSS_ROOTFS}/usr/lib/gcc/${TIZEN_TOOLCHAIN}/include/c++/)
+ include_directories(SYSTEM ${CROSS_ROOTFS}/usr/lib/gcc/${TIZEN_TOOLCHAIN}/include/c++/armv7l-tizen-linux-gnueabi)
+ endif()
+ if(TARGET_ARCH_NAME STREQUAL "arm64")
+ include_directories(SYSTEM ${CROSS_ROOTFS}/usr/lib64/gcc/${TIZEN_TOOLCHAIN}/include/c++/)
+ include_directories(SYSTEM ${CROSS_ROOTFS}/usr/lib64/gcc/${TIZEN_TOOLCHAIN}/include/c++/aarch64-tizen-linux-gnu)
+ endif()
+endif()
+
+if("$ENV{__DistroRid}" MATCHES "android.*")
+ if(TARGET_ARCH_NAME STREQUAL "arm")
+ set(ANDROID_ABI armeabi-v7a)
+ elseif(TARGET_ARCH_NAME STREQUAL "arm64")
+ set(ANDROID_ABI arm64-v8a)
+ endif()
+
+ # extract platform number required by the NDK's toolchain
+ string(REGEX REPLACE ".*\\.([0-9]+)-.*" "\\1" ANDROID_PLATFORM "$ENV{__DistroRid}")
+
+ set(ANDROID_TOOLCHAIN clang)
+ set(FEATURE_EVENT_TRACE 0) # disable event trace as there is no lttng-ust package in termux repository
+ set(CMAKE_SYSTEM_LIBRARY_PATH "${CROSS_ROOTFS}/usr/lib")
+ set(CMAKE_SYSTEM_INCLUDE_PATH "${CROSS_ROOTFS}/usr/include")
+
+ # include official NDK toolchain script
+ include(${CROSS_ROOTFS}/../build/cmake/android.toolchain.cmake)
+elseif(CMAKE_SYSTEM_NAME STREQUAL "FreeBSD")
+ # we cross-compile by instructing clang
+ set(CMAKE_C_COMPILER_TARGET ${triple})
+ set(CMAKE_CXX_COMPILER_TARGET ${triple})
+ set(CMAKE_ASM_COMPILER_TARGET ${triple})
+ set(CMAKE_SYSROOT "${CROSS_ROOTFS}")
+elseif(ILLUMOS)
+ set(CMAKE_SYSROOT "${CROSS_ROOTFS}")
+
+ include_directories(SYSTEM ${CROSS_ROOTFS}/include)
+
+ set(TOOLSET_PREFIX ${TOOLCHAIN}-)
+ function(locate_toolchain_exec exec var)
+ string(TOUPPER ${exec} EXEC_UPPERCASE)
+ if(NOT "$ENV{CLR_${EXEC_UPPERCASE}}" STREQUAL "")
+ set(${var} "$ENV{CLR_${EXEC_UPPERCASE}}" PARENT_SCOPE)
+ return()
+ endif()
+
+ find_program(EXEC_LOCATION_${exec}
+ NAMES
+ "${TOOLSET_PREFIX}${exec}${CLR_CMAKE_COMPILER_FILE_NAME_VERSION}"
+ "${TOOLSET_PREFIX}${exec}")
+
+ if (EXEC_LOCATION_${exec} STREQUAL "EXEC_LOCATION_${exec}-NOTFOUND")
+ message(FATAL_ERROR "Unable to find toolchain executable. Name: ${exec}, Prefix: ${TOOLSET_PREFIX}.")
+ endif()
+ set(${var} ${EXEC_LOCATION_${exec}} PARENT_SCOPE)
+ endfunction()
+
+ set(CMAKE_SYSTEM_PREFIX_PATH "${CROSS_ROOTFS}")
+
+ locate_toolchain_exec(gcc CMAKE_C_COMPILER)
+ locate_toolchain_exec(g++ CMAKE_CXX_COMPILER)
+
+ set(CMAKE_C_STANDARD_LIBRARIES "${CMAKE_C_STANDARD_LIBRARIES} -lssp")
+ set(CMAKE_CXX_STANDARD_LIBRARIES "${CMAKE_CXX_STANDARD_LIBRARIES} -lssp")
+else()
+ set(CMAKE_SYSROOT "${CROSS_ROOTFS}")
+
+ set(CMAKE_C_COMPILER_EXTERNAL_TOOLCHAIN "${CROSS_ROOTFS}/usr")
+ set(CMAKE_CXX_COMPILER_EXTERNAL_TOOLCHAIN "${CROSS_ROOTFS}/usr")
+ set(CMAKE_ASM_COMPILER_EXTERNAL_TOOLCHAIN "${CROSS_ROOTFS}/usr")
+endif()
+
+# Specify link flags
+
+function(add_toolchain_linker_flag Flag)
+ set(Config "${ARGV1}")
+ set(CONFIG_SUFFIX "")
+ if (NOT Config STREQUAL "")
+ set(CONFIG_SUFFIX "_${Config}")
+ endif()
+ set("CMAKE_EXE_LINKER_FLAGS${CONFIG_SUFFIX}" "${CMAKE_EXE_LINKER_FLAGS${CONFIG_SUFFIX}} ${Flag}" PARENT_SCOPE)
+ set("CMAKE_SHARED_LINKER_FLAGS${CONFIG_SUFFIX}" "${CMAKE_SHARED_LINKER_FLAGS${CONFIG_SUFFIX}} ${Flag}" PARENT_SCOPE)
+endfunction()
+
+
+if(TARGET_ARCH_NAME STREQUAL "armel")
+ if(DEFINED TIZEN_TOOLCHAIN) # For Tizen only
+ add_toolchain_linker_flag("-B${CROSS_ROOTFS}/usr/lib/gcc/${TIZEN_TOOLCHAIN}")
+ add_toolchain_linker_flag("-L${CROSS_ROOTFS}/lib")
+ add_toolchain_linker_flag("-L${CROSS_ROOTFS}/usr/lib")
+ add_toolchain_linker_flag("-L${CROSS_ROOTFS}/usr/lib/gcc/${TIZEN_TOOLCHAIN}")
+ endif()
+elseif(TARGET_ARCH_NAME STREQUAL "arm64")
+ if(DEFINED TIZEN_TOOLCHAIN) # For Tizen only
+ add_toolchain_linker_flag("-B${CROSS_ROOTFS}/usr/lib64/gcc/${TIZEN_TOOLCHAIN}")
+ add_toolchain_linker_flag("-L${CROSS_ROOTFS}/lib64")
+ add_toolchain_linker_flag("-L${CROSS_ROOTFS}/usr/lib64")
+ add_toolchain_linker_flag("-L${CROSS_ROOTFS}/usr/lib64/gcc/${TIZEN_TOOLCHAIN}")
+
+ add_toolchain_linker_flag("-Wl,--rpath-link=${CROSS_ROOTFS}/lib64")
+ add_toolchain_linker_flag("-Wl,--rpath-link=${CROSS_ROOTFS}/usr/lib64")
+ add_toolchain_linker_flag("-Wl,--rpath-link=${CROSS_ROOTFS}/usr/lib64/gcc/${TIZEN_TOOLCHAIN}")
+ endif()
+elseif(TARGET_ARCH_NAME STREQUAL "x86")
+ add_toolchain_linker_flag(-m32)
+elseif(ILLUMOS)
+ add_toolchain_linker_flag("-L${CROSS_ROOTFS}/lib/amd64")
+ add_toolchain_linker_flag("-L${CROSS_ROOTFS}/usr/amd64/lib")
+endif()
+
+# Specify compile options
+
+if((TARGET_ARCH_NAME MATCHES "^(arm|armel|arm64)$" AND NOT "$ENV{__DistroRid}" MATCHES "android.*") OR ILLUMOS)
+ set(CMAKE_C_COMPILER_TARGET ${TOOLCHAIN})
+ set(CMAKE_CXX_COMPILER_TARGET ${TOOLCHAIN})
+ set(CMAKE_ASM_COMPILER_TARGET ${TOOLCHAIN})
+endif()
+
+if(TARGET_ARCH_NAME MATCHES "^(arm|armel)$")
+ add_compile_options(-mthumb)
+ if (NOT DEFINED CLR_ARM_FPU_TYPE)
+ set (CLR_ARM_FPU_TYPE vfpv3)
+ endif (NOT DEFINED CLR_ARM_FPU_TYPE)
+
+ add_compile_options (-mfpu=${CLR_ARM_FPU_TYPE})
+ if (NOT DEFINED CLR_ARM_FPU_CAPABILITY)
+ set (CLR_ARM_FPU_CAPABILITY 0x7)
+ endif (NOT DEFINED CLR_ARM_FPU_CAPABILITY)
+
+ add_definitions (-DCLR_ARM_FPU_CAPABILITY=${CLR_ARM_FPU_CAPABILITY})
+
+ if(TARGET_ARCH_NAME STREQUAL "armel")
+ add_compile_options(-mfloat-abi=softfp)
+ endif()
+elseif(TARGET_ARCH_NAME STREQUAL "x86")
+ add_compile_options(-m32)
+ add_compile_options(-Wno-error=unused-command-line-argument)
+endif()
+
+if(DEFINED TIZEN_TOOLCHAIN)
+ if(TARGET_ARCH_NAME MATCHES "^(armel|arm64)$")
+ add_compile_options(-Wno-deprecated-declarations) # compile-time option
+ add_compile_options(-D__extern_always_inline=inline) # compile-time option
+ endif()
+endif()
+
+# Set LLDB include and library paths for builds that need lldb.
+if(TARGET_ARCH_NAME MATCHES "^(arm|armel|x86)$")
+ if(TARGET_ARCH_NAME STREQUAL "x86")
+ set(LLVM_CROSS_DIR "$ENV{LLVM_CROSS_HOME}")
+ else() # arm/armel case
+ set(LLVM_CROSS_DIR "$ENV{LLVM_ARM_HOME}")
+ endif()
+ if(LLVM_CROSS_DIR)
+ set(WITH_LLDB_LIBS "${LLVM_CROSS_DIR}/lib/" CACHE STRING "")
+ set(WITH_LLDB_INCLUDES "${LLVM_CROSS_DIR}/include" CACHE STRING "")
+ set(LLDB_H "${WITH_LLDB_INCLUDES}" CACHE STRING "")
+ set(LLDB "${LLVM_CROSS_DIR}/lib/liblldb.so" CACHE STRING "")
+ else()
+ if(TARGET_ARCH_NAME STREQUAL "x86")
+ set(WITH_LLDB_LIBS "${CROSS_ROOTFS}/usr/lib/i386-linux-gnu" CACHE STRING "")
+ set(CHECK_LLVM_DIR "${CROSS_ROOTFS}/usr/lib/llvm-3.8/include")
+ if(EXISTS "${CHECK_LLVM_DIR}" AND IS_DIRECTORY "${CHECK_LLVM_DIR}")
+ set(WITH_LLDB_INCLUDES "${CHECK_LLVM_DIR}")
+ else()
+ set(WITH_LLDB_INCLUDES "${CROSS_ROOTFS}/usr/lib/llvm-3.6/include")
+ endif()
+ else() # arm/armel case
+ set(WITH_LLDB_LIBS "${CROSS_ROOTFS}/usr/lib/${TOOLCHAIN}" CACHE STRING "")
+ set(WITH_LLDB_INCLUDES "${CROSS_ROOTFS}/usr/lib/llvm-3.6/include" CACHE STRING "")
+ endif()
+ endif()
+endif()
+
+set(CMAKE_FIND_ROOT_PATH_MODE_PROGRAM NEVER)
+set(CMAKE_FIND_ROOT_PATH_MODE_LIBRARY ONLY)
+set(CMAKE_FIND_ROOT_PATH_MODE_INCLUDE ONLY)
+set(CMAKE_FIND_ROOT_PATH_MODE_PACKAGE ONLY)
diff --git a/eng/common/darc-init.ps1 b/eng/common/darc-init.ps1
new file mode 100644
index 0000000000..435e764134
--- /dev/null
+++ b/eng/common/darc-init.ps1
@@ -0,0 +1,47 @@
+param (
+ $darcVersion = $null,
+ $versionEndpoint = 'https://maestro-prod.westus2.cloudapp.azure.com/api/assets/darc-version?api-version=2019-01-16',
+ $verbosity = 'minimal',
+ $toolpath = $null
+)
+
+. $PSScriptRoot\tools.ps1
+
+function InstallDarcCli ($darcVersion, $toolpath) {
+ $darcCliPackageName = 'microsoft.dotnet.darc'
+
+ $dotnetRoot = InitializeDotNetCli -install:$true
+ $dotnet = "$dotnetRoot\dotnet.exe"
+ $toolList = & "$dotnet" tool list -g
+
+ if ($toolList -like "*$darcCliPackageName*") {
+ & "$dotnet" tool uninstall $darcCliPackageName -g
+ }
+
+ # If the user didn't explicitly specify the darc version,
+ # query the Maestro API for the correct version of darc to install.
+ if (-not $darcVersion) {
+ $darcVersion = $(Invoke-WebRequest -Uri $versionEndpoint -UseBasicParsing).Content
+ }
+
+ $arcadeServicesSource = 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-eng/nuget/v3/index.json'
+
+ Write-Host "Installing Darc CLI version $darcVersion..."
+ Write-Host 'You may need to restart your command window if this is the first dotnet tool you have installed.'
+ if (-not $toolpath) {
+ Write-Host "'$dotnet' tool install $darcCliPackageName --version $darcVersion --add-source '$arcadeServicesSource' -v $verbosity -g"
+ & "$dotnet" tool install $darcCliPackageName --version $darcVersion --add-source "$arcadeServicesSource" -v $verbosity -g
+ }else {
+ Write-Host "'$dotnet' tool install $darcCliPackageName --version $darcVersion --add-source '$arcadeServicesSource' -v $verbosity --tool-path '$toolpath'"
+ & "$dotnet" tool install $darcCliPackageName --version $darcVersion --add-source "$arcadeServicesSource" -v $verbosity --tool-path "$toolpath"
+ }
+}
+
+try {
+ InstallDarcCli $darcVersion $toolpath
+}
+catch {
+ Write-Host $_.ScriptStackTrace
+ Write-PipelineTelemetryError -Category 'Darc' -Message $_
+ ExitWithExitCode 1
+}
\ No newline at end of file
diff --git a/eng/common/darc-init.sh b/eng/common/darc-init.sh
new file mode 100755
index 0000000000..d981d7bbf3
--- /dev/null
+++ b/eng/common/darc-init.sh
@@ -0,0 +1,82 @@
+#!/usr/bin/env bash
+
+source="${BASH_SOURCE[0]}"
+darcVersion=''
+versionEndpoint='https://maestro-prod.westus2.cloudapp.azure.com/api/assets/darc-version?api-version=2019-01-16'
+verbosity='minimal'
+
+while [[ $# > 0 ]]; do
+ opt="$(echo "$1" | awk '{print tolower($0)}')"
+ case "$opt" in
+ --darcversion)
+ darcVersion=$2
+ shift
+ ;;
+ --versionendpoint)
+ versionEndpoint=$2
+ shift
+ ;;
+ --verbosity)
+ verbosity=$2
+ shift
+ ;;
+ --toolpath)
+ toolpath=$2
+ shift
+ ;;
+ *)
+ echo "Invalid argument: $1"
+ usage
+ exit 1
+ ;;
+ esac
+
+ shift
+done
+
+# resolve $source until the file is no longer a symlink
+while [[ -h "$source" ]]; do
+ scriptroot="$( cd -P "$( dirname "$source" )" && pwd )"
+ source="$(readlink "$source")"
+ # if $source was a relative symlink, we need to resolve it relative to the path where the
+ # symlink file was located
+ [[ $source != /* ]] && source="$scriptroot/$source"
+done
+scriptroot="$( cd -P "$( dirname "$source" )" && pwd )"
+
+. "$scriptroot/tools.sh"
+
+if [ -z "$darcVersion" ]; then
+ darcVersion=$(curl -X GET "$versionEndpoint" -H "accept: text/plain")
+fi
+
+function InstallDarcCli {
+ local darc_cli_package_name="microsoft.dotnet.darc"
+
+ InitializeDotNetCli
+ local dotnet_root=$_InitializeDotNetCli
+
+ if [ -z "$toolpath" ]; then
+ local tool_list=$($dotnet_root/dotnet tool list -g)
+ if [[ $tool_list = *$darc_cli_package_name* ]]; then
+ echo $($dotnet_root/dotnet tool uninstall $darc_cli_package_name -g)
+ fi
+ else
+ local tool_list=$($dotnet_root/dotnet tool list --tool-path "$toolpath")
+ if [[ $tool_list = *$darc_cli_package_name* ]]; then
+ echo $($dotnet_root/dotnet tool uninstall $darc_cli_package_name --tool-path "$toolpath")
+ fi
+ fi
+
+ local arcadeServicesSource="https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-tools/nuget/v3/index.json"
+
+ echo "Installing Darc CLI version $darcVersion..."
+ echo "You may need to restart your command shell if this is the first dotnet tool you have installed."
+ if [ -z "$toolpath" ]; then
+ echo $($dotnet_root/dotnet tool install $darc_cli_package_name --version $darcVersion --add-source "$arcadeServicesSource" -v $verbosity -g)
+ else
+ echo $($dotnet_root/dotnet tool install $darc_cli_package_name --version $darcVersion --add-source "$arcadeServicesSource" -v $verbosity --tool-path "$toolpath")
+ fi
+}
+
+InstallDarcCli
diff --git a/eng/common/dotnet-install-scripts/dotnet-install.ps1 b/eng/common/dotnet-install-scripts/dotnet-install.ps1
new file mode 100644
index 0000000000..f63b533f25
--- /dev/null
+++ b/eng/common/dotnet-install-scripts/dotnet-install.ps1
@@ -0,0 +1,774 @@
+#
+# Copyright (c) .NET Foundation and contributors. All rights reserved.
+# Licensed under the MIT license. See LICENSE file in the project root for full license information.
+#
+
+# Copied from https://dot.net/v1/dotnet-install.ps1 on 8/26/2020
+
+<#
+.SYNOPSIS
+ Installs dotnet cli
+.DESCRIPTION
+ Installs dotnet cli. If dotnet installation already exists in the given directory
+ it will update it only if the requested version differs from the one already installed.
+.PARAMETER Channel
+ Default: LTS
+ Download from the Channel specified. Possible values:
+ - Current - most current release
+ - LTS - most current supported release
+ - 2-part version in a format A.B - represents a specific release
+ examples: 2.0, 1.0
+ - Branch name
+ examples: release/2.0.0, Master
+ Note: The version parameter overrides the channel parameter.
+.PARAMETER Version
+ Default: latest
+ Represents a build version on specific channel. Possible values:
+ - latest - most latest build on specific channel
+ - coherent - most latest coherent build on specific channel
+ coherent applies only to SDK downloads
+ - 3-part version in a format A.B.C - represents specific version of build
+ examples: 2.0.0-preview2-006120, 1.1.0
+.PARAMETER InstallDir
+ Default: %LocalAppData%\Microsoft\dotnet
+ Path to where to install dotnet. Note that binaries will be placed directly in a given directory.
+.PARAMETER Architecture
+ Default: - this value represents currently running OS architecture
+ Architecture of dotnet binaries to be installed.
+ Possible values are: , amd64, x64, x86, arm64, arm
+.PARAMETER SharedRuntime
+ This parameter is obsolete and may be removed in a future version of this script.
+ The recommended alternative is '-Runtime dotnet'.
+ Installs just the shared runtime bits, not the entire SDK.
+.PARAMETER Runtime
+ Installs just a shared runtime, not the entire SDK.
+ Possible values:
+ - dotnet - the Microsoft.NETCore.App shared runtime
+ - aspnetcore - the Microsoft.AspNetCore.App shared runtime
+ - windowsdesktop - the Microsoft.WindowsDesktop.App shared runtime
+.PARAMETER DryRun
+ If set it will not perform installation but instead display what command line to use to consistently install
+ currently requested version of dotnet cli. In example if you specify version 'latest' it will display a link
+ with specific version so that this command can be used deterministicly in a build script.
+ It also displays binaries location if you prefer to install or download it yourself.
+.PARAMETER NoPath
+ By default this script will set environment variable PATH for the current process to the binaries folder inside installation folder.
+ If set it will display binaries location but not set any environment variable.
+.PARAMETER Verbose
+ Displays diagnostics information.
+.PARAMETER AzureFeed
+ Default: https://dotnetcli.azureedge.net/dotnet
+ This parameter typically is not changed by the user.
+ It allows changing the URL for the Azure feed used by this installer.
+.PARAMETER UncachedFeed
+ This parameter typically is not changed by the user.
+ It allows changing the URL for the Uncached feed used by this installer.
+.PARAMETER FeedCredential
+ Used as a query string to append to the Azure feed.
+ It allows changing the URL to use non-public blob storage accounts.
+.PARAMETER ProxyAddress
+ If set, the installer will use the proxy when making web requests
+.PARAMETER ProxyUseDefaultCredentials
+ Default: false
+ Use default credentials, when using proxy address.
+.PARAMETER ProxyBypassList
+ If set with ProxyAddress, will provide the list of comma separated urls that will bypass the proxy
+.PARAMETER SkipNonVersionedFiles
+ Default: false
+ Skips installing non-versioned files if they already exist, such as dotnet.exe.
+.PARAMETER NoCdn
+ Disable downloading from the Azure CDN, and use the uncached feed directly.
+.PARAMETER JSonFile
+ Determines the SDK version from a user specified global.json file
+ Note: global.json must have a value for 'SDK:Version'
+#>
+[cmdletbinding()]
+param(
+ [string]$Channel="LTS",
+ [string]$Version="Latest",
+ [string]$JSonFile,
+ [string]$InstallDir="",
+ [string]$Architecture="",
+ [ValidateSet("dotnet", "aspnetcore", "windowsdesktop", IgnoreCase = $false)]
+ [string]$Runtime,
+ [Obsolete("This parameter may be removed in a future version of this script. The recommended alternative is '-Runtime dotnet'.")]
+ [switch]$SharedRuntime,
+ [switch]$DryRun,
+ [switch]$NoPath,
+ [string]$AzureFeed="https://dotnetcli.azureedge.net/dotnet",
+ [string]$UncachedFeed="https://dotnetcli.blob.core.windows.net/dotnet",
+ [string]$FeedCredential,
+ [string]$ProxyAddress,
+ [switch]$ProxyUseDefaultCredentials,
+ [string[]]$ProxyBypassList=@(),
+ [switch]$SkipNonVersionedFiles,
+ [switch]$NoCdn
+)
+
+Set-StrictMode -Version Latest
+$ErrorActionPreference="Stop"
+$ProgressPreference="SilentlyContinue"
+
+if ($NoCdn) {
+ $AzureFeed = $UncachedFeed
+}
+
+$BinFolderRelativePath=""
+
+if ($SharedRuntime -and (-not $Runtime)) {
+ $Runtime = "dotnet"
+}
+
+# example path with regex: shared/1.0.0-beta-12345/somepath
+$VersionRegEx="/\d+\.\d+[^/]+/"
+$OverrideNonVersionedFiles = !$SkipNonVersionedFiles
+
+function Say($str) {
+ try
+ {
+ Write-Host "dotnet-install: $str"
+ }
+ catch
+ {
+ # Some platforms cannot utilize Write-Host (Azure Functions, for instance). Fall back to Write-Output
+ Write-Output "dotnet-install: $str"
+ }
+}
+
+function Say-Verbose($str) {
+ try
+ {
+ Write-Verbose "dotnet-install: $str"
+ }
+ catch
+ {
+ # Some platforms cannot utilize Write-Verbose (Azure Functions, for instance). Fall back to Write-Output
+ Write-Output "dotnet-install: $str"
+ }
+}
+
+function Say-Invocation($Invocation) {
+ $command = $Invocation.MyCommand;
+ $args = (($Invocation.BoundParameters.Keys | foreach { "-$_ `"$($Invocation.BoundParameters[$_])`"" }) -join " ")
+ Say-Verbose "$command $args"
+}
+
+function Invoke-With-Retry([ScriptBlock]$ScriptBlock, [int]$MaxAttempts = 3, [int]$SecondsBetweenAttempts = 1) {
+ $Attempts = 0
+
+ while ($true) {
+ try {
+ return $ScriptBlock.Invoke()
+ }
+ catch {
+ $Attempts++
+ if ($Attempts -lt $MaxAttempts) {
+ Start-Sleep $SecondsBetweenAttempts
+ }
+ else {
+ throw
+ }
+ }
+ }
+}
+
+function Get-Machine-Architecture() {
+ Say-Invocation $MyInvocation
+
+ # On PS x86, PROCESSOR_ARCHITECTURE reports x86 even on x64 systems.
+ # To get the correct architecture, we need to use PROCESSOR_ARCHITEW6432.
+ # PS x64 doesn't define this, so we fall back to PROCESSOR_ARCHITECTURE.
+ # Possible values: amd64, x64, x86, arm64, arm
+
+ if( $ENV:PROCESSOR_ARCHITEW6432 -ne $null )
+ {
+ return $ENV:PROCESSOR_ARCHITEW6432
+ }
+
+ return $ENV:PROCESSOR_ARCHITECTURE
+}
+
+function Get-CLIArchitecture-From-Architecture([string]$Architecture) {
+ Say-Invocation $MyInvocation
+
+ switch ($Architecture.ToLower()) {
+ { $_ -eq "" } { return Get-CLIArchitecture-From-Architecture $(Get-Machine-Architecture) }
+ { ($_ -eq "amd64") -or ($_ -eq "x64") } { return "x64" }
+ { $_ -eq "x86" } { return "x86" }
+ { $_ -eq "arm" } { return "arm" }
+ { $_ -eq "arm64" } { return "arm64" }
+ default { throw "Architecture not supported. If you think this is a bug, report it at https://github.com/dotnet/sdk/issues" }
+ }
+}
+
+# The version text returned from the feeds is a 1-line or 2-line string:
+# For the SDK and the dotnet runtime (2 lines):
+# Line 1: # commit_hash
+# Line 2: # 4-part version
+# For the aspnetcore runtime (1 line):
+# Line 1: # 4-part version
+function Get-Version-Info-From-Version-Text([string]$VersionText) {
+ Say-Invocation $MyInvocation
+
+ $Data = -split $VersionText
+
+ $VersionInfo = @{
+ CommitHash = $(if ($Data.Count -gt 1) { $Data[0] })
+ Version = $Data[-1] # last line is always the version number.
+ }
+ return $VersionInfo
+}
+
+function Load-Assembly([string] $Assembly) {
+ try {
+ Add-Type -Assembly $Assembly | Out-Null
+ }
+ catch {
+ # On Nano Server, Powershell Core Edition is used. Add-Type is unable to resolve base class assemblies because they are not GAC'd.
+ # Loading the base class assemblies is not unnecessary as the types will automatically get resolved.
+ }
+}
+
+function GetHTTPResponse([Uri] $Uri)
+{
+ Invoke-With-Retry(
+ {
+
+ $HttpClient = $null
+
+ try {
+ # HttpClient is used vs Invoke-WebRequest in order to support Nano Server which doesn't support the Invoke-WebRequest cmdlet.
+ Load-Assembly -Assembly System.Net.Http
+
+ if(-not $ProxyAddress) {
+ try {
+ # Despite no proxy being explicitly specified, we may still be behind a default proxy
+ $DefaultProxy = [System.Net.WebRequest]::DefaultWebProxy;
+ if($DefaultProxy -and (-not $DefaultProxy.IsBypassed($Uri))) {
+ $ProxyAddress = $DefaultProxy.GetProxy($Uri).OriginalString
+ $ProxyUseDefaultCredentials = $true
+ }
+ } catch {
+ # Eat the exception and move forward as the above code is an attempt
+ # at resolving the DefaultProxy that may not have been a problem.
+ $ProxyAddress = $null
+ Say-Verbose("Exception ignored: $_.Exception.Message - moving forward...")
+ }
+ }
+
+ if($ProxyAddress) {
+ $HttpClientHandler = New-Object System.Net.Http.HttpClientHandler
+ $HttpClientHandler.Proxy = New-Object System.Net.WebProxy -Property @{
+ Address=$ProxyAddress;
+ UseDefaultCredentials=$ProxyUseDefaultCredentials;
+ BypassList = $ProxyBypassList;
+ }
+ $HttpClient = New-Object System.Net.Http.HttpClient -ArgumentList $HttpClientHandler
+ }
+ else {
+
+ $HttpClient = New-Object System.Net.Http.HttpClient
+ }
+ # Default timeout for HttpClient is 100s. For a 50 MB download this assumes 500 KB/s average, any less will time out
+ # 20 minutes allows it to work over much slower connections.
+ $HttpClient.Timeout = New-TimeSpan -Minutes 20
+ $Response = $HttpClient.GetAsync("${Uri}${FeedCredential}").Result
+ if (($Response -eq $null) -or (-not ($Response.IsSuccessStatusCode))) {
+ # The feed credential is potentially sensitive info. Do not log FeedCredential to console output.
+ $ErrorMsg = "Failed to download $Uri."
+ if ($Response -ne $null) {
+ $ErrorMsg += " $Response"
+ }
+
+ throw $ErrorMsg
+ }
+
+ return $Response
+ }
+ finally {
+ if ($HttpClient -ne $null) {
+ $HttpClient.Dispose()
+ }
+ }
+ })
+}
+
+function Get-Latest-Version-Info([string]$AzureFeed, [string]$Channel, [bool]$Coherent) {
+ Say-Invocation $MyInvocation
+
+ $VersionFileUrl = $null
+ if ($Runtime -eq "dotnet") {
+ $VersionFileUrl = "$UncachedFeed/Runtime/$Channel/latest.version"
+ }
+ elseif ($Runtime -eq "aspnetcore") {
+ $VersionFileUrl = "$UncachedFeed/aspnetcore/Runtime/$Channel/latest.version"
+ }
+ # Currently, the WindowsDesktop runtime is manufactured with the .Net core runtime
+ elseif ($Runtime -eq "windowsdesktop") {
+ $VersionFileUrl = "$UncachedFeed/Runtime/$Channel/latest.version"
+ }
+ elseif (-not $Runtime) {
+ if ($Coherent) {
+ $VersionFileUrl = "$UncachedFeed/Sdk/$Channel/latest.coherent.version"
+ }
+ else {
+ $VersionFileUrl = "$UncachedFeed/Sdk/$Channel/latest.version"
+ }
+ }
+ else {
+ throw "Invalid value for `$Runtime"
+ }
+ try {
+ $Response = GetHTTPResponse -Uri $VersionFileUrl
+ }
+ catch {
+ throw "Could not resolve version information."
+ }
+ $StringContent = $Response.Content.ReadAsStringAsync().Result
+
+ switch ($Response.Content.Headers.ContentType) {
+ { ($_ -eq "application/octet-stream") } { $VersionText = $StringContent }
+ { ($_ -eq "text/plain") } { $VersionText = $StringContent }
+ { ($_ -eq "text/plain; charset=UTF-8") } { $VersionText = $StringContent }
+ default { throw "``$Response.Content.Headers.ContentType`` is an unknown .version file content type." }
+ }
+
+ $VersionInfo = Get-Version-Info-From-Version-Text $VersionText
+
+ return $VersionInfo
+}
+
+function Parse-Jsonfile-For-Version([string]$JSonFile) {
+ Say-Invocation $MyInvocation
+
+ If (-Not (Test-Path $JSonFile)) {
+ throw "Unable to find '$JSonFile'"
+ }
+ try {
+ $JSonContent = Get-Content($JSonFile) -Raw | ConvertFrom-Json | Select-Object -expand "sdk" -ErrorAction SilentlyContinue
+ }
+ catch {
+ throw "Json file unreadable: '$JSonFile'"
+ }
+ if ($JSonContent) {
+ try {
+ $JSonContent.PSObject.Properties | ForEach-Object {
+ $PropertyName = $_.Name
+ if ($PropertyName -eq "version") {
+ $Version = $_.Value
+ Say-Verbose "Version = $Version"
+ }
+ }
+ }
+ catch {
+ throw "Unable to parse the SDK node in '$JSonFile'"
+ }
+ }
+ else {
+ throw "Unable to find the SDK node in '$JSonFile'"
+ }
+ If ($Version -eq $null) {
+ throw "Unable to find the SDK:version node in '$JSonFile'"
+ }
+ return $Version
+}
+
+function Get-Specific-Version-From-Version([string]$AzureFeed, [string]$Channel, [string]$Version, [string]$JSonFile) {
+ Say-Invocation $MyInvocation
+
+ if (-not $JSonFile) {
+ switch ($Version.ToLower()) {
+ { $_ -eq "latest" } {
+ $LatestVersionInfo = Get-Latest-Version-Info -AzureFeed $AzureFeed -Channel $Channel -Coherent $False
+ return $LatestVersionInfo.Version
+ }
+ { $_ -eq "coherent" } {
+ $LatestVersionInfo = Get-Latest-Version-Info -AzureFeed $AzureFeed -Channel $Channel -Coherent $True
+ return $LatestVersionInfo.Version
+ }
+ default { return $Version }
+ }
+ }
+ else {
+ return Parse-Jsonfile-For-Version $JSonFile
+ }
+}
+
+function Get-Download-Link([string]$AzureFeed, [string]$SpecificVersion, [string]$CLIArchitecture) {
+ Say-Invocation $MyInvocation
+
+ # If anything fails in this lookup it will default to $SpecificVersion
+ $SpecificProductVersion = Get-Product-Version -AzureFeed $AzureFeed -SpecificVersion $SpecificVersion
+
+ if ($Runtime -eq "dotnet") {
+ $PayloadURL = "$AzureFeed/Runtime/$SpecificVersion/dotnet-runtime-$SpecificProductVersion-win-$CLIArchitecture.zip"
+ }
+ elseif ($Runtime -eq "aspnetcore") {
+ $PayloadURL = "$AzureFeed/aspnetcore/Runtime/$SpecificVersion/aspnetcore-runtime-$SpecificProductVersion-win-$CLIArchitecture.zip"
+ }
+ elseif ($Runtime -eq "windowsdesktop") {
+ $PayloadURL = "$AzureFeed/Runtime/$SpecificVersion/windowsdesktop-runtime-$SpecificProductVersion-win-$CLIArchitecture.zip"
+ }
+ elseif (-not $Runtime) {
+ $PayloadURL = "$AzureFeed/Sdk/$SpecificVersion/dotnet-sdk-$SpecificProductVersion-win-$CLIArchitecture.zip"
+ }
+ else {
+ throw "Invalid value for `$Runtime"
+ }
+
+ Say-Verbose "Constructed primary named payload URL: $PayloadURL"
+
+ return $PayloadURL, $SpecificProductVersion
+}
+
+function Get-LegacyDownload-Link([string]$AzureFeed, [string]$SpecificVersion, [string]$CLIArchitecture) {
+ Say-Invocation $MyInvocation
+
+ if (-not $Runtime) {
+ $PayloadURL = "$AzureFeed/Sdk/$SpecificVersion/dotnet-dev-win-$CLIArchitecture.$SpecificVersion.zip"
+ }
+ elseif ($Runtime -eq "dotnet") {
+ $PayloadURL = "$AzureFeed/Runtime/$SpecificVersion/dotnet-win-$CLIArchitecture.$SpecificVersion.zip"
+ }
+ else {
+ return $null
+ }
+
+ Say-Verbose "Constructed legacy named payload URL: $PayloadURL"
+
+ return $PayloadURL
+}
+
+function Get-Product-Version([string]$AzureFeed, [string]$SpecificVersion) {
+ Say-Invocation $MyInvocation
+
+ if ($Runtime -eq "dotnet") {
+ $ProductVersionTxtURL = "$AzureFeed/Runtime/$SpecificVersion/productVersion.txt"
+ }
+ elseif ($Runtime -eq "aspnetcore") {
+ $ProductVersionTxtURL = "$AzureFeed/aspnetcore/Runtime/$SpecificVersion/productVersion.txt"
+ }
+ elseif ($Runtime -eq "windowsdesktop") {
+ $ProductVersionTxtURL = "$AzureFeed/Runtime/$SpecificVersion/productVersion.txt"
+ }
+ elseif (-not $Runtime) {
+ $ProductVersionTxtURL = "$AzureFeed/Sdk/$SpecificVersion/productVersion.txt"
+ }
+ else {
+ throw "Invalid value specified for `$Runtime"
+ }
+
+ Say-Verbose "Checking for existence of $ProductVersionTxtURL"
+
+ try {
+ $productVersionResponse = GetHTTPResponse($productVersionTxtUrl)
+
+ if ($productVersionResponse.StatusCode -eq 200) {
+ $productVersion = $productVersionResponse.Content.ReadAsStringAsync().Result.Trim()
+ if ($productVersion -ne $SpecificVersion)
+ {
+ Say "Using alternate version $productVersion found in $ProductVersionTxtURL"
+ }
+
+ return $productVersion
+ }
+ else {
+ Say-Verbose "Got StatusCode $($productVersionResponse.StatusCode) trying to get productVersion.txt at $productVersionTxtUrl, so using default value of $SpecificVersion"
+ $productVersion = $SpecificVersion
+ }
+ } catch {
+ Say-Verbose "Could not read productVersion.txt at $productVersionTxtUrl, so using default value of $SpecificVersion"
+ $productVersion = $SpecificVersion
+ }
+
+ return $productVersion
+}
+
+function Get-User-Share-Path() {
+ Say-Invocation $MyInvocation
+
+ $InstallRoot = $env:DOTNET_INSTALL_DIR
+ if (!$InstallRoot) {
+ $InstallRoot = "$env:LocalAppData\Microsoft\dotnet"
+ }
+ return $InstallRoot
+}
+
+function Resolve-Installation-Path([string]$InstallDir) {
+ Say-Invocation $MyInvocation
+
+ if ($InstallDir -eq "") {
+ return Get-User-Share-Path
+ }
+ return $InstallDir
+}
+
+function Is-Dotnet-Package-Installed([string]$InstallRoot, [string]$RelativePathToPackage, [string]$SpecificVersion) {
+ Say-Invocation $MyInvocation
+
+ $DotnetPackagePath = Join-Path -Path $InstallRoot -ChildPath $RelativePathToPackage | Join-Path -ChildPath $SpecificVersion
+ Say-Verbose "Is-Dotnet-Package-Installed: DotnetPackagePath=$DotnetPackagePath"
+ return Test-Path $DotnetPackagePath -PathType Container
+}
+
+function Get-Absolute-Path([string]$RelativeOrAbsolutePath) {
+ # Too much spam
+ # Say-Invocation $MyInvocation
+
+ return $ExecutionContext.SessionState.Path.GetUnresolvedProviderPathFromPSPath($RelativeOrAbsolutePath)
+}
+
+function Get-Path-Prefix-With-Version($path) {
+ $match = [regex]::match($path, $VersionRegEx)
+ if ($match.Success) {
+ return $entry.FullName.Substring(0, $match.Index + $match.Length)
+ }
+
+ return $null
+}
+
+function Get-List-Of-Directories-And-Versions-To-Unpack-From-Dotnet-Package([System.IO.Compression.ZipArchive]$Zip, [string]$OutPath) {
+ Say-Invocation $MyInvocation
+
+ $ret = @()
+ foreach ($entry in $Zip.Entries) {
+ $dir = Get-Path-Prefix-With-Version $entry.FullName
+ if ($dir -ne $null) {
+ $path = Get-Absolute-Path $(Join-Path -Path $OutPath -ChildPath $dir)
+ if (-Not (Test-Path $path -PathType Container)) {
+ $ret += $dir
+ }
+ }
+ }
+
+ $ret = $ret | Sort-Object | Get-Unique
+
+ $values = ($ret | foreach { "$_" }) -join ";"
+ Say-Verbose "Directories to unpack: $values"
+
+ return $ret
+}
+
+# Example zip content and extraction algorithm:
+# Rule: files if extracted are always being extracted to the same relative path locally
+# .\
+# a.exe # file does not exist locally, extract
+# b.dll # file exists locally, override only if $OverrideFiles set
+# aaa\ # same rules as for files
+# ...
+# abc\1.0.0\ # directory contains version and exists locally
+# ... # do not extract content under versioned part
+# abc\asd\ # same rules as for files
+# ...
+# def\ghi\1.0.1\ # directory contains version and does not exist locally
+# ... # extract content
+function Extract-Dotnet-Package([string]$ZipPath, [string]$OutPath) {
+ Say-Invocation $MyInvocation
+
+ Load-Assembly -Assembly System.IO.Compression.FileSystem
+ Set-Variable -Name Zip
+ try {
+ $Zip = [System.IO.Compression.ZipFile]::OpenRead($ZipPath)
+
+ $DirectoriesToUnpack = Get-List-Of-Directories-And-Versions-To-Unpack-From-Dotnet-Package -Zip $Zip -OutPath $OutPath
+
+ foreach ($entry in $Zip.Entries) {
+ $PathWithVersion = Get-Path-Prefix-With-Version $entry.FullName
+ if (($PathWithVersion -eq $null) -Or ($DirectoriesToUnpack -contains $PathWithVersion)) {
+ $DestinationPath = Get-Absolute-Path $(Join-Path -Path $OutPath -ChildPath $entry.FullName)
+ $DestinationDir = Split-Path -Parent $DestinationPath
+ $OverrideFiles=$OverrideNonVersionedFiles -Or (-Not (Test-Path $DestinationPath))
+ if ((-Not $DestinationPath.EndsWith("\")) -And $OverrideFiles) {
+ New-Item -ItemType Directory -Force -Path $DestinationDir | Out-Null
+ [System.IO.Compression.ZipFileExtensions]::ExtractToFile($entry, $DestinationPath, $OverrideNonVersionedFiles)
+ }
+ }
+ }
+ }
+ finally {
+ if ($Zip -ne $null) {
+ $Zip.Dispose()
+ }
+ }
+}
+
+function DownloadFile($Source, [string]$OutPath) {
+ if ($Source -notlike "http*") {
+ # Using System.IO.Path.GetFullPath to get the current directory
+ # does not work in this context - $pwd gives the current directory
+ if (![System.IO.Path]::IsPathRooted($Source)) {
+ $Source = $(Join-Path -Path $pwd -ChildPath $Source)
+ }
+ $Source = Get-Absolute-Path $Source
+ Say "Copying file from $Source to $OutPath"
+ Copy-Item $Source $OutPath
+ return
+ }
+
+ $Stream = $null
+
+ try {
+ $Response = GetHTTPResponse -Uri $Source
+ $Stream = $Response.Content.ReadAsStreamAsync().Result
+ $File = [System.IO.File]::Create($OutPath)
+ $Stream.CopyTo($File)
+ $File.Close()
+ }
+ finally {
+ if ($Stream -ne $null) {
+ $Stream.Dispose()
+ }
+ }
+}
+
+function Prepend-Sdk-InstallRoot-To-Path([string]$InstallRoot, [string]$BinFolderRelativePath) {
+ $BinPath = Get-Absolute-Path $(Join-Path -Path $InstallRoot -ChildPath $BinFolderRelativePath)
+ if (-Not $NoPath) {
+ $SuffixedBinPath = "$BinPath;"
+ if (-Not $env:path.Contains($SuffixedBinPath)) {
+ Say "Adding to current process PATH: `"$BinPath`". Note: This change will not be visible if PowerShell was run as a child process."
+ $env:path = $SuffixedBinPath + $env:path
+ } else {
+ Say-Verbose "Current process PATH already contains `"$BinPath`""
+ }
+ }
+ else {
+ Say "Binaries of dotnet can be found in $BinPath"
+ }
+}
+
+$CLIArchitecture = Get-CLIArchitecture-From-Architecture $Architecture
+$SpecificVersion = Get-Specific-Version-From-Version -AzureFeed $AzureFeed -Channel $Channel -Version $Version -JSonFile $JSonFile
+$DownloadLink, $EffectiveVersion = Get-Download-Link -AzureFeed $AzureFeed -SpecificVersion $SpecificVersion -CLIArchitecture $CLIArchitecture
+$LegacyDownloadLink = Get-LegacyDownload-Link -AzureFeed $AzureFeed -SpecificVersion $SpecificVersion -CLIArchitecture $CLIArchitecture
+
+$InstallRoot = Resolve-Installation-Path $InstallDir
+Say-Verbose "InstallRoot: $InstallRoot"
+$ScriptName = $MyInvocation.MyCommand.Name
+
+if ($DryRun) {
+ Say "Payload URLs:"
+ Say "Primary named payload URL: $DownloadLink"
+ if ($LegacyDownloadLink) {
+ Say "Legacy named payload URL: $LegacyDownloadLink"
+ }
+ $RepeatableCommand = ".\$ScriptName -Version `"$SpecificVersion`" -InstallDir `"$InstallRoot`" -Architecture `"$CLIArchitecture`""
+ if ($Runtime -eq "dotnet") {
+ $RepeatableCommand+=" -Runtime `"dotnet`""
+ }
+ elseif ($Runtime -eq "aspnetcore") {
+ $RepeatableCommand+=" -Runtime `"aspnetcore`""
+ }
+ foreach ($key in $MyInvocation.BoundParameters.Keys) {
+ if (-not (@("Architecture","Channel","DryRun","InstallDir","Runtime","SharedRuntime","Version") -contains $key)) {
+ $RepeatableCommand+=" -$key `"$($MyInvocation.BoundParameters[$key])`""
+ }
+ }
+ Say "Repeatable invocation: $RepeatableCommand"
+ exit 0
+}
+
+if ($Runtime -eq "dotnet") {
+ $assetName = ".NET Core Runtime"
+ $dotnetPackageRelativePath = "shared\Microsoft.NETCore.App"
+}
+elseif ($Runtime -eq "aspnetcore") {
+ $assetName = "ASP.NET Core Runtime"
+ $dotnetPackageRelativePath = "shared\Microsoft.AspNetCore.App"
+}
+elseif ($Runtime -eq "windowsdesktop") {
+ $assetName = ".NET Core Windows Desktop Runtime"
+ $dotnetPackageRelativePath = "shared\Microsoft.WindowsDesktop.App"
+}
+elseif (-not $Runtime) {
+ $assetName = ".NET Core SDK"
+ $dotnetPackageRelativePath = "sdk"
+}
+else {
+ throw "Invalid value for `$Runtime"
+}
+
+if ($SpecificVersion -ne $EffectiveVersion)
+{
+ Say "Performing installation checks for effective version: $EffectiveVersion"
+ $SpecificVersion = $EffectiveVersion
+}
+
+# Check if the SDK version is already installed.
+$isAssetInstalled = Is-Dotnet-Package-Installed -InstallRoot $InstallRoot -RelativePathToPackage $dotnetPackageRelativePath -SpecificVersion $SpecificVersion
+if ($isAssetInstalled) {
+ Say "$assetName version $SpecificVersion is already installed."
+ Prepend-Sdk-InstallRoot-To-Path -InstallRoot $InstallRoot -BinFolderRelativePath $BinFolderRelativePath
+ exit 0
+}
+
+New-Item -ItemType Directory -Force -Path $InstallRoot | Out-Null
+
+$installDrive = $((Get-Item $InstallRoot).PSDrive.Name);
+$diskInfo = Get-PSDrive -Name $installDrive
+if ($diskInfo.Free / 1MB -le 100) {
+ Say "There is not enough disk space on drive ${installDrive}:"
+ exit 0
+}
+
+$ZipPath = [System.IO.Path]::combine([System.IO.Path]::GetTempPath(), [System.IO.Path]::GetRandomFileName())
+Say-Verbose "Zip path: $ZipPath"
+
+$DownloadFailed = $false
+Say "Downloading link: $DownloadLink"
+try {
+ DownloadFile -Source $DownloadLink -OutPath $ZipPath
+}
+catch {
+ Say "Cannot download: $DownloadLink"
+ if ($LegacyDownloadLink) {
+ $DownloadLink = $LegacyDownloadLink
+ $ZipPath = [System.IO.Path]::combine([System.IO.Path]::GetTempPath(), [System.IO.Path]::GetRandomFileName())
+ Say-Verbose "Legacy zip path: $ZipPath"
+ Say "Downloading legacy link: $DownloadLink"
+ try {
+ DownloadFile -Source $DownloadLink -OutPath $ZipPath
+ }
+ catch {
+ Say "Cannot download: $DownloadLink"
+ $DownloadFailed = $true
+ }
+ }
+ else {
+ $DownloadFailed = $true
+ }
+}
+
+if ($DownloadFailed) {
+ throw "Could not find/download: `"$assetName`" with version = $SpecificVersion`nRefer to: https://aka.ms/dotnet-os-lifecycle for information on .NET Core support"
+}
+
+Say "Extracting zip from $DownloadLink"
+Extract-Dotnet-Package -ZipPath $ZipPath -OutPath $InstallRoot
+
+# Check if the SDK version is installed; if not, fail the installation.
+$isAssetInstalled = $false
+
+# if the version contains "RTM" or "servicing"; check if a 'release-type' SDK version is installed.
+if ($SpecificVersion -Match "rtm" -or $SpecificVersion -Match "servicing") {
+ $ReleaseVersion = $SpecificVersion.Split("-")[0]
+ Say-Verbose "Checking installation: version = $ReleaseVersion"
+ $isAssetInstalled = Is-Dotnet-Package-Installed -InstallRoot $InstallRoot -RelativePathToPackage $dotnetPackageRelativePath -SpecificVersion $ReleaseVersion
+}
+
+# Check if the SDK version is installed.
+if (!$isAssetInstalled) {
+ Say-Verbose "Checking installation: version = $SpecificVersion"
+ $isAssetInstalled = Is-Dotnet-Package-Installed -InstallRoot $InstallRoot -RelativePathToPackage $dotnetPackageRelativePath -SpecificVersion $SpecificVersion
+}
+
+if (!$isAssetInstalled) {
+ throw "`"$assetName`" with version = $SpecificVersion failed to install with an unknown error."
+}
+
+Remove-Item $ZipPath
+
+Prepend-Sdk-InstallRoot-To-Path -InstallRoot $InstallRoot -BinFolderRelativePath $BinFolderRelativePath
+
+Say "Installation finished"
+exit 0
\ No newline at end of file
diff --git a/eng/common/dotnet-install-scripts/dotnet-install.sh b/eng/common/dotnet-install-scripts/dotnet-install.sh
new file mode 100755
index 0000000000..92161141f6
--- /dev/null
+++ b/eng/common/dotnet-install-scripts/dotnet-install.sh
@@ -0,0 +1,1133 @@
+#!/usr/bin/env bash
+# Copyright (c) .NET Foundation and contributors. All rights reserved.
+# Licensed under the MIT license. See LICENSE file in the project root for full license information.
+#
+
+# Stop script on NZEC
+set -e
+# Stop script if unbound variable found (use ${var:-} if intentional)
+set -u
+# By default cmd1 | cmd2 returns exit code of cmd2 regardless of cmd1 success
+# This is causing it to fail
+set -o pipefail
+
+# Use in the the functions: eval $invocation
+invocation='say_verbose "Calling: ${yellow:-}${FUNCNAME[0]} ${green:-}$*${normal:-}"'
+
+# standard output may be used as a return value in the functions
+# we need a way to write text on the screen in the functions so that
+# it won't interfere with the return value.
+# Exposing stream 3 as a pipe to standard output of the script itself
+exec 3>&1
+
+# Setup some colors to use. These need to work in fairly limited shells, like the Ubuntu Docker container where there are only 8 colors.
+# See if stdout is a terminal
+if [ -t 1 ] && command -v tput > /dev/null; then
+ # see if it supports colors
+ ncolors=$(tput colors)
+ if [ -n "$ncolors" ] && [ $ncolors -ge 8 ]; then
+ bold="$(tput bold || echo)"
+ normal="$(tput sgr0 || echo)"
+ black="$(tput setaf 0 || echo)"
+ red="$(tput setaf 1 || echo)"
+ green="$(tput setaf 2 || echo)"
+ yellow="$(tput setaf 3 || echo)"
+ blue="$(tput setaf 4 || echo)"
+ magenta="$(tput setaf 5 || echo)"
+ cyan="$(tput setaf 6 || echo)"
+ white="$(tput setaf 7 || echo)"
+ fi
+fi
+
+say_warning() {
+ printf "%b\n" "${yellow:-}dotnet_install: Warning: $1${normal:-}"
+}
+
+say_err() {
+ printf "%b\n" "${red:-}dotnet_install: Error: $1${normal:-}" >&2
+}
+
+say() {
+ # using stream 3 (defined in the beginning) to not interfere with stdout of functions
+ # which may be used as return value
+ printf "%b\n" "${cyan:-}dotnet-install:${normal:-} $1" >&3
+}
+
+say_verbose() {
+ if [ "$verbose" = true ]; then
+ say "$1"
+ fi
+}
+
+# This platform list is finite - if the SDK/Runtime has supported Linux distribution-specific assets,
+# then and only then should the Linux distribution appear in this list.
+# Adding a Linux distribution to this list does not imply distribution-specific support.
+get_legacy_os_name_from_platform() {
+ eval $invocation
+
+ platform="$1"
+ case "$platform" in
+ "centos.7")
+ echo "centos"
+ return 0
+ ;;
+ "debian.8")
+ echo "debian"
+ return 0
+ ;;
+ "debian.9")
+ echo "debian.9"
+ return 0
+ ;;
+ "fedora.23")
+ echo "fedora.23"
+ return 0
+ ;;
+ "fedora.24")
+ echo "fedora.24"
+ return 0
+ ;;
+ "fedora.27")
+ echo "fedora.27"
+ return 0
+ ;;
+ "fedora.28")
+ echo "fedora.28"
+ return 0
+ ;;
+ "opensuse.13.2")
+ echo "opensuse.13.2"
+ return 0
+ ;;
+ "opensuse.42.1")
+ echo "opensuse.42.1"
+ return 0
+ ;;
+ "opensuse.42.3")
+ echo "opensuse.42.3"
+ return 0
+ ;;
+ "rhel.7"*)
+ echo "rhel"
+ return 0
+ ;;
+ "ubuntu.14.04")
+ echo "ubuntu"
+ return 0
+ ;;
+ "ubuntu.16.04")
+ echo "ubuntu.16.04"
+ return 0
+ ;;
+ "ubuntu.16.10")
+ echo "ubuntu.16.10"
+ return 0
+ ;;
+ "ubuntu.18.04")
+ echo "ubuntu.18.04"
+ return 0
+ ;;
+ "alpine.3.4.3")
+ echo "alpine"
+ return 0
+ ;;
+ esac
+ return 1
+}
+
+get_linux_platform_name() {
+ eval $invocation
+
+ if [ -n "$runtime_id" ]; then
+ echo "${runtime_id%-*}"
+ return 0
+ else
+ if [ -e /etc/os-release ]; then
+ . /etc/os-release
+ echo "$ID${VERSION_ID:+.${VERSION_ID}}"
+ return 0
+ elif [ -e /etc/redhat-release ]; then
+ local redhatRelease=$(&1 || true) | grep -q musl
+}
+
+get_current_os_name() {
+ eval $invocation
+
+ local uname=$(uname)
+ if [ "$uname" = "Darwin" ]; then
+ echo "osx"
+ return 0
+ elif [ "$uname" = "FreeBSD" ]; then
+ echo "freebsd"
+ return 0
+ elif [ "$uname" = "Linux" ]; then
+ local linux_platform_name
+ linux_platform_name="$(get_linux_platform_name)" || { echo "linux" && return 0 ; }
+
+ if [ "$linux_platform_name" = "rhel.6" ]; then
+ echo $linux_platform_name
+ return 0
+ elif is_musl_based_distro; then
+ echo "linux-musl"
+ return 0
+ else
+ echo "linux"
+ return 0
+ fi
+ fi
+
+ say_err "OS name could not be detected: UName = $uname"
+ return 1
+}
+
+get_legacy_os_name() {
+ eval $invocation
+
+ local uname=$(uname)
+ if [ "$uname" = "Darwin" ]; then
+ echo "osx"
+ return 0
+ elif [ -n "$runtime_id" ]; then
+ echo $(get_legacy_os_name_from_platform "${runtime_id%-*}" || echo "${runtime_id%-*}")
+ return 0
+ else
+ if [ -e /etc/os-release ]; then
+ . /etc/os-release
+ os=$(get_legacy_os_name_from_platform "$ID${VERSION_ID:+.${VERSION_ID}}" || echo "")
+ if [ -n "$os" ]; then
+ echo "$os"
+ return 0
+ fi
+ fi
+ fi
+
+ say_verbose "Distribution specific OS name and version could not be detected: UName = $uname"
+ return 1
+}
+
+machine_has() {
+ eval $invocation
+
+ hash "$1" > /dev/null 2>&1
+ return $?
+}
+
+
+check_min_reqs() {
+ local hasMinimum=false
+ if machine_has "curl"; then
+ hasMinimum=true
+ elif machine_has "wget"; then
+ hasMinimum=true
+ fi
+
+ if [ "$hasMinimum" = "false" ]; then
+ say_err "curl (recommended) or wget are required to download dotnet. Install missing prerequisite to proceed."
+ return 1
+ fi
+ return 0
+}
+
+check_pre_reqs() {
+ eval $invocation
+
+ if [ "${DOTNET_INSTALL_SKIP_PREREQS:-}" = "1" ]; then
+ return 0
+ fi
+
+ if [ "$(uname)" = "Linux" ]; then
+ if is_musl_based_distro; then
+ if ! command -v scanelf > /dev/null; then
+ say_warning "scanelf not found, please install pax-utils package."
+ return 0
+ fi
+ LDCONFIG_COMMAND="scanelf --ldpath -BF '%f'"
+ [ -z "$($LDCONFIG_COMMAND 2>/dev/null | grep libintl)" ] && say_warning "Unable to locate libintl. Probable prerequisite missing; install libintl (or gettext)."
+ else
+ if [ ! -x "$(command -v ldconfig)" ]; then
+ say_verbose "ldconfig is not in PATH, trying /sbin/ldconfig."
+ LDCONFIG_COMMAND="/sbin/ldconfig"
+ else
+ LDCONFIG_COMMAND="ldconfig"
+ fi
+ local librarypath=${LD_LIBRARY_PATH:-}
+ LDCONFIG_COMMAND="$LDCONFIG_COMMAND -NXv ${librarypath//:/ }"
+ fi
+
+ [ -z "$($LDCONFIG_COMMAND 2>/dev/null | grep zlib)" ] && say_warning "Unable to locate zlib. Probable prerequisite missing; install zlib."
+ [ -z "$($LDCONFIG_COMMAND 2>/dev/null | grep ssl)" ] && say_warning "Unable to locate libssl. Probable prerequisite missing; install libssl."
+ [ -z "$($LDCONFIG_COMMAND 2>/dev/null | grep libicu)" ] && say_warning "Unable to locate libicu. Probable prerequisite missing; install libicu."
+ [ -z "$($LDCONFIG_COMMAND 2>/dev/null | grep lttng)" ] && say_warning "Unable to locate liblttng. Probable prerequisite missing; install libcurl."
+ [ -z "$($LDCONFIG_COMMAND 2>/dev/null | grep libcurl)" ] && say_warning "Unable to locate libcurl. Probable prerequisite missing; install libcurl."
+ fi
+
+ return 0
+}
+
+# args:
+# input - $1
+to_lowercase() {
+ #eval $invocation
+
+ echo "$1" | tr '[:upper:]' '[:lower:]'
+ return 0
+}
+
+# args:
+# input - $1
+remove_trailing_slash() {
+ #eval $invocation
+
+ local input="${1:-}"
+ echo "${input%/}"
+ return 0
+}
+
+# args:
+# input - $1
+remove_beginning_slash() {
+ #eval $invocation
+
+ local input="${1:-}"
+ echo "${input#/}"
+ return 0
+}
+
+# args:
+# root_path - $1
+# child_path - $2 - this parameter can be empty
+combine_paths() {
+ eval $invocation
+
+ # TODO: Consider making it work with any number of paths. For now:
+ if [ ! -z "${3:-}" ]; then
+ say_err "combine_paths: Function takes two parameters."
+ return 1
+ fi
+
+ local root_path="$(remove_trailing_slash "$1")"
+ local child_path="$(remove_beginning_slash "${2:-}")"
+ say_verbose "combine_paths: root_path=$root_path"
+ say_verbose "combine_paths: child_path=$child_path"
+ echo "$root_path/$child_path"
+ return 0
+}
+
+get_machine_architecture() {
+ eval $invocation
+
+ if command -v uname > /dev/null; then
+ CPUName=$(uname -m)
+ case $CPUName in
+ armv7l)
+ echo "arm"
+ return 0
+ ;;
+ aarch64)
+ echo "arm64"
+ return 0
+ ;;
+ esac
+ fi
+
+ # Always default to 'x64'
+ echo "x64"
+ return 0
+}
+
+# args:
+# architecture - $1
+get_normalized_architecture_from_architecture() {
+ eval $invocation
+
+ local architecture="$(to_lowercase "$1")"
+ case "$architecture" in
+ \)
+ echo "$(get_normalized_architecture_from_architecture "$(get_machine_architecture)")"
+ return 0
+ ;;
+ amd64|x64)
+ echo "x64"
+ return 0
+ ;;
+ arm)
+ echo "arm"
+ return 0
+ ;;
+ arm64)
+ echo "arm64"
+ return 0
+ ;;
+ esac
+
+ say_err "Architecture \`$architecture\` not supported. If you think this is a bug, report it at https://github.com/dotnet/sdk/issues"
+ return 1
+}
+
+# The version text returned from the feeds is a 1-line or 2-line string:
+# For the SDK and the dotnet runtime (2 lines):
+# Line 1: # commit_hash
+# Line 2: # 4-part version
+# For the aspnetcore runtime (1 line):
+# Line 1: # 4-part version
+
+# args:
+# version_text - stdin
+get_version_from_version_info() {
+ eval $invocation
+
+ cat | tail -n 1 | sed 's/\r$//'
+ return 0
+}
+
+# args:
+# install_root - $1
+# relative_path_to_package - $2
+# specific_version - $3
+is_dotnet_package_installed() {
+ eval $invocation
+
+ local install_root="$1"
+ local relative_path_to_package="$2"
+ local specific_version="${3//[$'\t\r\n']}"
+
+ local dotnet_package_path="$(combine_paths "$(combine_paths "$install_root" "$relative_path_to_package")" "$specific_version")"
+ say_verbose "is_dotnet_package_installed: dotnet_package_path=$dotnet_package_path"
+
+ if [ -d "$dotnet_package_path" ]; then
+ return 0
+ else
+ return 1
+ fi
+}
+
+# args:
+# azure_feed - $1
+# channel - $2
+# normalized_architecture - $3
+# coherent - $4
+get_latest_version_info() {
+ eval $invocation
+
+ local azure_feed="$1"
+ local channel="$2"
+ local normalized_architecture="$3"
+ local coherent="$4"
+
+ local version_file_url=null
+ if [[ "$runtime" == "dotnet" ]]; then
+ version_file_url="$uncached_feed/Runtime/$channel/latest.version"
+ elif [[ "$runtime" == "aspnetcore" ]]; then
+ version_file_url="$uncached_feed/aspnetcore/Runtime/$channel/latest.version"
+ elif [ -z "$runtime" ]; then
+ if [ "$coherent" = true ]; then
+ version_file_url="$uncached_feed/Sdk/$channel/latest.coherent.version"
+ else
+ version_file_url="$uncached_feed/Sdk/$channel/latest.version"
+ fi
+ else
+ say_err "Invalid value for \$runtime"
+ return 1
+ fi
+ say_verbose "get_latest_version_info: latest url: $version_file_url"
+
+ download "$version_file_url"
+ return $?
+}
+
+# args:
+# json_file - $1
+parse_jsonfile_for_version() {
+ eval $invocation
+
+ local json_file="$1"
+ if [ ! -f "$json_file" ]; then
+ say_err "Unable to find \`$json_file\`"
+ return 1
+ fi
+
+ sdk_section=$(cat $json_file | awk '/"sdk"/,/}/')
+ if [ -z "$sdk_section" ]; then
+ say_err "Unable to parse the SDK node in \`$json_file\`"
+ return 1
+ fi
+
+ sdk_list=$(echo $sdk_section | awk -F"[{}]" '{print $2}')
+ sdk_list=${sdk_list//[\" ]/}
+ sdk_list=${sdk_list//,/$'\n'}
+ sdk_list="$(echo -e "${sdk_list}" | tr -d '[[:space:]]')"
+
+ local version_info=""
+ while read -r line; do
+ IFS=:
+ while read -r key value; do
+ if [[ "$key" == "version" ]]; then
+ version_info=$value
+ fi
+ done <<< "$line"
+ done <<< "$sdk_list"
+ if [ -z "$version_info" ]; then
+ say_err "Unable to find the SDK:version node in \`$json_file\`"
+ return 1
+ fi
+
+ unset IFS;
+ echo "$version_info"
+ return 0
+}
+
+# args:
+# azure_feed - $1
+# channel - $2
+# normalized_architecture - $3
+# version - $4
+# json_file - $5
+get_specific_version_from_version() {
+ eval $invocation
+
+ local azure_feed="$1"
+ local channel="$2"
+ local normalized_architecture="$3"
+ local version="$(to_lowercase "$4")"
+ local json_file="$5"
+
+ if [ -z "$json_file" ]; then
+ case "$version" in
+ latest)
+ local version_info
+ version_info="$(get_latest_version_info "$azure_feed" "$channel" "$normalized_architecture" false)" || return 1
+ say_verbose "get_specific_version_from_version: version_info=$version_info"
+ echo "$version_info" | get_version_from_version_info
+ return 0
+ ;;
+ coherent)
+ local version_info
+ version_info="$(get_latest_version_info "$azure_feed" "$channel" "$normalized_architecture" true)" || return 1
+ say_verbose "get_specific_version_from_version: version_info=$version_info"
+ echo "$version_info" | get_version_from_version_info
+ return 0
+ ;;
+ *)
+ echo "$version"
+ return 0
+ ;;
+ esac
+ else
+ local version_info
+ version_info="$(parse_jsonfile_for_version "$json_file")" || return 1
+ echo "$version_info"
+ return 0
+ fi
+}
+
+# args:
+# azure_feed - $1
+# channel - $2
+# normalized_architecture - $3
+# specific_version - $4
+construct_download_link() {
+ eval $invocation
+
+ local azure_feed="$1"
+ local channel="$2"
+ local normalized_architecture="$3"
+ local specific_version="${4//[$'\t\r\n']}"
+ local specific_product_version="$(get_specific_product_version "$1" "$4")"
+
+ local osname
+ osname="$(get_current_os_name)" || return 1
+
+ local download_link=null
+ if [[ "$runtime" == "dotnet" ]]; then
+ download_link="$azure_feed/Runtime/$specific_version/dotnet-runtime-$specific_product_version-$osname-$normalized_architecture.tar.gz"
+ elif [[ "$runtime" == "aspnetcore" ]]; then
+ download_link="$azure_feed/aspnetcore/Runtime/$specific_version/aspnetcore-runtime-$specific_product_version-$osname-$normalized_architecture.tar.gz"
+ elif [ -z "$runtime" ]; then
+ download_link="$azure_feed/Sdk/$specific_version/dotnet-sdk-$specific_product_version-$osname-$normalized_architecture.tar.gz"
+ else
+ return 1
+ fi
+
+ echo "$download_link"
+ return 0
+}
+
+# args:
+# azure_feed - $1
+# specific_version - $2
+get_specific_product_version() {
+ # If we find a 'productVersion.txt' at the root of any folder, we'll use its contents
+ # to resolve the version of what's in the folder, superseding the specified version.
+ eval $invocation
+
+ local azure_feed="$1"
+ local specific_version="${2//[$'\t\r\n']}"
+ local specific_product_version=$specific_version
+
+ local download_link=null
+ if [[ "$runtime" == "dotnet" ]]; then
+ download_link="$azure_feed/Runtime/$specific_version/productVersion.txt${feed_credential}"
+ elif [[ "$runtime" == "aspnetcore" ]]; then
+ download_link="$azure_feed/aspnetcore/Runtime/$specific_version/productVersion.txt${feed_credential}"
+ elif [ -z "$runtime" ]; then
+ download_link="$azure_feed/Sdk/$specific_version/productVersion.txt${feed_credential}"
+ else
+ return 1
+ fi
+
+ specific_product_version=$(curl -s --fail "$download_link")
+ if [ $? -ne 0 ]
+ then
+ specific_product_version=$(wget -qO- "$download_link")
+ if [ $? -ne 0 ]
+ then
+ specific_product_version=$specific_version
+ fi
+ fi
+ specific_product_version="${specific_product_version//[$'\t\r\n']}"
+
+ echo "$specific_product_version"
+ return 0
+}
+
+# args:
+# azure_feed - $1
+# channel - $2
+# normalized_architecture - $3
+# specific_version - $4
+construct_legacy_download_link() {
+ eval $invocation
+
+ local azure_feed="$1"
+ local channel="$2"
+ local normalized_architecture="$3"
+ local specific_version="${4//[$'\t\r\n']}"
+
+ local distro_specific_osname
+ distro_specific_osname="$(get_legacy_os_name)" || return 1
+
+ local legacy_download_link=null
+ if [[ "$runtime" == "dotnet" ]]; then
+ legacy_download_link="$azure_feed/Runtime/$specific_version/dotnet-$distro_specific_osname-$normalized_architecture.$specific_version.tar.gz"
+ elif [ -z "$runtime" ]; then
+ legacy_download_link="$azure_feed/Sdk/$specific_version/dotnet-dev-$distro_specific_osname-$normalized_architecture.$specific_version.tar.gz"
+ else
+ return 1
+ fi
+
+ echo "$legacy_download_link"
+ return 0
+}
+
+get_user_install_path() {
+ eval $invocation
+
+ if [ ! -z "${DOTNET_INSTALL_DIR:-}" ]; then
+ echo "$DOTNET_INSTALL_DIR"
+ else
+ echo "$HOME/.dotnet"
+ fi
+ return 0
+}
+
+# args:
+# install_dir - $1
+resolve_installation_path() {
+ eval $invocation
+
+ local install_dir=$1
+ if [ "$install_dir" = "" ]; then
+ local user_install_path="$(get_user_install_path)"
+ say_verbose "resolve_installation_path: user_install_path=$user_install_path"
+ echo "$user_install_path"
+ return 0
+ fi
+
+ echo "$install_dir"
+ return 0
+}
+
+# args:
+# relative_or_absolute_path - $1
+get_absolute_path() {
+ eval $invocation
+
+ local relative_or_absolute_path=$1
+ echo "$(cd "$(dirname "$1")" && pwd -P)/$(basename "$1")"
+ return 0
+}
+
+# args:
+# input_files - stdin
+# root_path - $1
+# out_path - $2
+# override - $3
+copy_files_or_dirs_from_list() {
+ eval $invocation
+
+ local root_path="$(remove_trailing_slash "$1")"
+ local out_path="$(remove_trailing_slash "$2")"
+ local override="$3"
+ local osname="$(get_current_os_name)"
+ local override_switch=$(
+ if [ "$override" = false ]; then
+ if [ "$osname" = "linux-musl" ]; then
+ printf -- "-u";
+ else
+ printf -- "-n";
+ fi
+ fi)
+
+ cat | uniq | while read -r file_path; do
+ local path="$(remove_beginning_slash "${file_path#$root_path}")"
+ local target="$out_path/$path"
+ if [ "$override" = true ] || (! ([ -d "$target" ] || [ -e "$target" ])); then
+ mkdir -p "$out_path/$(dirname "$path")"
+ if [ -d "$target" ]; then
+ rm -rf "$target"
+ fi
+ cp -R $override_switch "$root_path/$path" "$target"
+ fi
+ done
+}
+
+# args:
+# zip_path - $1
+# out_path - $2
+extract_dotnet_package() {
+ eval $invocation
+
+ local zip_path="$1"
+ local out_path="$2"
+
+ local temp_out_path="$(mktemp -d "$temporary_file_template")"
+
+ local failed=false
+ tar -xzf "$zip_path" -C "$temp_out_path" > /dev/null || failed=true
+
+ local folders_with_version_regex='^.*/[0-9]+\.[0-9]+[^/]+/'
+ find "$temp_out_path" -type f | grep -Eo "$folders_with_version_regex" | sort | copy_files_or_dirs_from_list "$temp_out_path" "$out_path" false
+ find "$temp_out_path" -type f | grep -Ev "$folders_with_version_regex" | copy_files_or_dirs_from_list "$temp_out_path" "$out_path" "$override_non_versioned_files"
+
+ rm -rf "$temp_out_path"
+
+ if [ "$failed" = true ]; then
+ say_err "Extraction failed"
+ return 1
+ fi
+}
+
+# args:
+# remote_path - $1
+# [out_path] - $2 - stdout if not provided
+download() {
+ eval $invocation
+
+ local remote_path="$1"
+ local out_path="${2:-}"
+
+ if [[ "$remote_path" != "http"* ]]; then
+ cp "$remote_path" "$out_path"
+ return $?
+ fi
+
+ local failed=false
+ if machine_has "curl"; then
+ downloadcurl "$remote_path" "$out_path" || failed=true
+ elif machine_has "wget"; then
+ downloadwget "$remote_path" "$out_path" || failed=true
+ else
+ failed=true
+ fi
+ if [ "$failed" = true ]; then
+ say_verbose "Download failed: $remote_path"
+ return 1
+ fi
+ return 0
+}
+
+downloadcurl() {
+ eval $invocation
+ local remote_path="$1"
+ local out_path="${2:-}"
+
+ # Append feed_credential as late as possible before calling curl to avoid logging feed_credential
+ remote_path="${remote_path}${feed_credential}"
+
+ local curl_options="--retry 20 --retry-delay 2 --connect-timeout 15 -sSL -f --create-dirs "
+ local failed=false
+ if [ -z "$out_path" ]; then
+ curl $curl_options "$remote_path" || failed=true
+ else
+ curl $curl_options -o "$out_path" "$remote_path" || failed=true
+ fi
+ if [ "$failed" = true ]; then
+ say_verbose "Curl download failed"
+ return 1
+ fi
+ return 0
+}
+
+downloadwget() {
+ eval $invocation
+ local remote_path="$1"
+ local out_path="${2:-}"
+
+ # Append feed_credential as late as possible before calling wget to avoid logging feed_credential
+ remote_path="${remote_path}${feed_credential}"
+ local wget_options="--tries 20 --waitretry 2 --connect-timeout 15 "
+ local failed=false
+ if [ -z "$out_path" ]; then
+ wget -q $wget_options -O - "$remote_path" || failed=true
+ else
+ wget $wget_options -O "$out_path" "$remote_path" || failed=true
+ fi
+ if [ "$failed" = true ]; then
+ say_verbose "Wget download failed"
+ return 1
+ fi
+ return 0
+}
+
+calculate_vars() {
+ eval $invocation
+ valid_legacy_download_link=true
+
+ normalized_architecture="$(get_normalized_architecture_from_architecture "$architecture")"
+ say_verbose "normalized_architecture=$normalized_architecture"
+
+ specific_version="$(get_specific_version_from_version "$azure_feed" "$channel" "$normalized_architecture" "$version" "$json_file")"
+ specific_product_version="$(get_specific_product_version "$azure_feed" "$specific_version")"
+ say_verbose "specific_version=$specific_version"
+ if [ -z "$specific_version" ]; then
+ say_err "Could not resolve version information."
+ return 1
+ fi
+
+ download_link="$(construct_download_link "$azure_feed" "$channel" "$normalized_architecture" "$specific_version")"
+ say_verbose "Constructed primary named payload URL: $download_link"
+
+ legacy_download_link="$(construct_legacy_download_link "$azure_feed" "$channel" "$normalized_architecture" "$specific_version")" || valid_legacy_download_link=false
+
+ if [ "$valid_legacy_download_link" = true ]; then
+ say_verbose "Constructed legacy named payload URL: $legacy_download_link"
+ else
+ say_verbose "Cound not construct a legacy_download_link; omitting..."
+ fi
+
+ install_root="$(resolve_installation_path "$install_dir")"
+ say_verbose "InstallRoot: $install_root"
+}
+
+install_dotnet() {
+ eval $invocation
+ local download_failed=false
+ local asset_name=''
+ local asset_relative_path=''
+
+ if [[ "$runtime" == "dotnet" ]]; then
+ asset_relative_path="shared/Microsoft.NETCore.App"
+ asset_name=".NET Core Runtime"
+ elif [[ "$runtime" == "aspnetcore" ]]; then
+ asset_relative_path="shared/Microsoft.AspNetCore.App"
+ asset_name="ASP.NET Core Runtime"
+ elif [ -z "$runtime" ]; then
+ asset_relative_path="sdk"
+ asset_name=".NET Core SDK"
+ else
+ say_err "Invalid value for \$runtime"
+ return 1
+ fi
+
+ # Check if the SDK version is already installed.
+ if is_dotnet_package_installed "$install_root" "$asset_relative_path" "$specific_version"; then
+ say "$asset_name version $specific_version is already installed."
+ return 0
+ fi
+
+ mkdir -p "$install_root"
+ zip_path="$(mktemp "$temporary_file_template")"
+ say_verbose "Zip path: $zip_path"
+
+ say "Downloading link: $download_link"
+
+ # Failures are normal in the non-legacy case for ultimately legacy downloads.
+ # Do not output to stderr, since output to stderr is considered an error.
+ download "$download_link" "$zip_path" 2>&1 || download_failed=true
+
+ # if the download fails, download the legacy_download_link
+ if [ "$download_failed" = true ]; then
+ say "Cannot download: $download_link"
+
+ if [ "$valid_legacy_download_link" = true ]; then
+ download_failed=false
+ download_link="$legacy_download_link"
+ zip_path="$(mktemp "$temporary_file_template")"
+ say_verbose "Legacy zip path: $zip_path"
+ say "Downloading legacy link: $download_link"
+ download "$download_link" "$zip_path" 2>&1 || download_failed=true
+
+ if [ "$download_failed" = true ]; then
+ say "Cannot download: $download_link"
+ fi
+ fi
+ fi
+
+ if [ "$download_failed" = true ]; then
+ say_err "Could not find/download: \`$asset_name\` with version = $specific_version"
+ say_err "Refer to: https://aka.ms/dotnet-os-lifecycle for information on .NET Core support"
+ return 1
+ fi
+
+ say "Extracting zip from $download_link"
+ extract_dotnet_package "$zip_path" "$install_root"
+
+ # Check if the SDK version is installed; if not, fail the installation.
+ # if the version contains "RTM" or "servicing"; check if a 'release-type' SDK version is installed.
+ if [[ $specific_version == *"rtm"* || $specific_version == *"servicing"* ]]; then
+ IFS='-'
+ read -ra verArr <<< "$specific_version"
+ release_version="${verArr[0]}"
+ unset IFS;
+ say_verbose "Checking installation: version = $release_version"
+ if is_dotnet_package_installed "$install_root" "$asset_relative_path" "$release_version"; then
+ return 0
+ fi
+ fi
+
+ # Check if the standard SDK version is installed.
+ say_verbose "Checking installation: version = $specific_product_version"
+ if is_dotnet_package_installed "$install_root" "$asset_relative_path" "$specific_product_version"; then
+ return 0
+ fi
+
+ say_err "\`$asset_name\` with version = $specific_product_version failed to install with an unknown error."
+ return 1
+}
+
+args=("$@")
+
+local_version_file_relative_path="/.version"
+bin_folder_relative_path=""
+temporary_file_template="${TMPDIR:-/tmp}/dotnet.XXXXXXXXX"
+
+channel="LTS"
+version="Latest"
+json_file=""
+install_dir=""
+architecture=""
+dry_run=false
+no_path=false
+no_cdn=false
+azure_feed="https://dotnetcli.azureedge.net/dotnet"
+uncached_feed="https://dotnetcli.blob.core.windows.net/dotnet"
+feed_credential=""
+verbose=false
+runtime=""
+runtime_id=""
+override_non_versioned_files=true
+non_dynamic_parameters=""
+
+while [ $# -ne 0 ]
+do
+ name="$1"
+ case "$name" in
+ -c|--channel|-[Cc]hannel)
+ shift
+ channel="$1"
+ ;;
+ -v|--version|-[Vv]ersion)
+ shift
+ version="$1"
+ ;;
+ -i|--install-dir|-[Ii]nstall[Dd]ir)
+ shift
+ install_dir="$1"
+ ;;
+ --arch|--architecture|-[Aa]rch|-[Aa]rchitecture)
+ shift
+ architecture="$1"
+ ;;
+ --shared-runtime|-[Ss]hared[Rr]untime)
+ say_warning "The --shared-runtime flag is obsolete and may be removed in a future version of this script. The recommended usage is to specify '--runtime dotnet'."
+ if [ -z "$runtime" ]; then
+ runtime="dotnet"
+ fi
+ ;;
+ --runtime|-[Rr]untime)
+ shift
+ runtime="$1"
+ if [[ "$runtime" != "dotnet" ]] && [[ "$runtime" != "aspnetcore" ]]; then
+ say_err "Unsupported value for --runtime: '$1'. Valid values are 'dotnet' and 'aspnetcore'."
+ if [[ "$runtime" == "windowsdesktop" ]]; then
+ say_err "WindowsDesktop archives are manufactured for Windows platforms only."
+ fi
+ exit 1
+ fi
+ ;;
+ --dry-run|-[Dd]ry[Rr]un)
+ dry_run=true
+ ;;
+ --no-path|-[Nn]o[Pp]ath)
+ no_path=true
+ non_dynamic_parameters+=" $name"
+ ;;
+ --verbose|-[Vv]erbose)
+ verbose=true
+ non_dynamic_parameters+=" $name"
+ ;;
+ --no-cdn|-[Nn]o[Cc]dn)
+ no_cdn=true
+ non_dynamic_parameters+=" $name"
+ ;;
+ --azure-feed|-[Aa]zure[Ff]eed)
+ shift
+ azure_feed="$1"
+ non_dynamic_parameters+=" $name "\""$1"\"""
+ ;;
+ --uncached-feed|-[Uu]ncached[Ff]eed)
+ shift
+ uncached_feed="$1"
+ non_dynamic_parameters+=" $name "\""$1"\"""
+ ;;
+ --feed-credential|-[Ff]eed[Cc]redential)
+ shift
+ feed_credential="$1"
+ non_dynamic_parameters+=" $name "\""$1"\"""
+ ;;
+ --runtime-id|-[Rr]untime[Ii]d)
+ shift
+ runtime_id="$1"
+ non_dynamic_parameters+=" $name "\""$1"\"""
+ ;;
+ --jsonfile|-[Jj][Ss]on[Ff]ile)
+ shift
+ json_file="$1"
+ ;;
+ --skip-non-versioned-files|-[Ss]kip[Nn]on[Vv]ersioned[Ff]iles)
+ override_non_versioned_files=false
+ non_dynamic_parameters+=" $name"
+ ;;
+ -?|--?|-h|--help|-[Hh]elp)
+ script_name="$(basename "$0")"
+ echo ".NET Tools Installer"
+ echo "Usage: $script_name [-c|--channel ] [-v|--version ] [-p|--prefix ]"
+ echo " $script_name -h|-?|--help"
+ echo ""
+ echo "$script_name is a simple command line interface for obtaining dotnet cli."
+ echo ""
+ echo "Options:"
+ echo " -c,--channel Download from the channel specified, Defaults to \`$channel\`."
+ echo " -Channel"
+ echo " Possible values:"
+ echo " - Current - most current release"
+ echo " - LTS - most current supported release"
+ echo " - 2-part version in a format A.B - represents a specific release"
+ echo " examples: 2.0; 1.0"
+ echo " - Branch name"
+ echo " examples: release/2.0.0; Master"
+ echo " Note: The version parameter overrides the channel parameter."
+ echo " -v,--version Use specific VERSION, Defaults to \`$version\`."
+ echo " -Version"
+ echo " Possible values:"
+ echo " - latest - most latest build on specific channel"
+ echo " - coherent - most latest coherent build on specific channel"
+ echo " coherent applies only to SDK downloads"
+ echo " - 3-part version in a format A.B.C - represents specific version of build"
+ echo " examples: 2.0.0-preview2-006120; 1.1.0"
+ echo " -i,--install-dir Install under specified location (see Install Location below)"
+ echo " -InstallDir"
+ echo " --architecture Architecture of dotnet binaries to be installed, Defaults to \`$architecture\`."
+ echo " --arch,-Architecture,-Arch"
+ echo " Possible values: x64, arm, and arm64"
+ echo " --runtime Installs a shared runtime only, without the SDK."
+ echo " -Runtime"
+ echo " Possible values:"
+ echo " - dotnet - the Microsoft.NETCore.App shared runtime"
+ echo " - aspnetcore - the Microsoft.AspNetCore.App shared runtime"
+ echo " --dry-run,-DryRun Do not perform installation. Display download link."
+ echo " --no-path, -NoPath Do not set PATH for the current process."
+ echo " --verbose,-Verbose Display diagnostics information."
+ echo " --azure-feed,-AzureFeed Azure feed location. Defaults to $azure_feed, This parameter typically is not changed by the user."
+ echo " --uncached-feed,-UncachedFeed Uncached feed location. This parameter typically is not changed by the user."
+ echo " --feed-credential,-FeedCredential Azure feed shared access token. This parameter typically is not specified."
+ echo " --skip-non-versioned-files Skips non-versioned files if they already exist, such as the dotnet executable."
+ echo " -SkipNonVersionedFiles"
+ echo " --no-cdn,-NoCdn Disable downloading from the Azure CDN, and use the uncached feed directly."
+ echo " --jsonfile Determines the SDK version from a user specified global.json file."
+ echo " Note: global.json must have a value for 'SDK:Version'"
+ echo " --runtime-id Installs the .NET Tools for the given platform (use linux-x64 for portable linux)."
+ echo " -RuntimeId"
+ echo " -?,--?,-h,--help,-Help Shows this help message"
+ echo ""
+ echo "Obsolete parameters:"
+ echo " --shared-runtime The recommended alternative is '--runtime dotnet'."
+ echo " This parameter is obsolete and may be removed in a future version of this script."
+ echo " Installs just the shared runtime bits, not the entire SDK."
+ echo ""
+ echo "Install Location:"
+ echo " Location is chosen in following order:"
+ echo " - --install-dir option"
+ echo " - Environmental variable DOTNET_INSTALL_DIR"
+ echo " - $HOME/.dotnet"
+ exit 0
+ ;;
+ *)
+ say_err "Unknown argument \`$name\`"
+ exit 1
+ ;;
+ esac
+
+ shift
+done
+
+if [ "$no_cdn" = true ]; then
+ azure_feed="$uncached_feed"
+fi
+
+check_min_reqs
+calculate_vars
+script_name=$(basename "$0")
+
+if [ "$dry_run" = true ]; then
+ say "Payload URLs:"
+ say "Primary named payload URL: $download_link"
+ if [ "$valid_legacy_download_link" = true ]; then
+ say "Legacy named payload URL: $legacy_download_link"
+ fi
+ repeatable_command="./$script_name --version "\""$specific_version"\"" --install-dir "\""$install_root"\"" --architecture "\""$normalized_architecture"\"""
+ if [[ "$runtime" == "dotnet" ]]; then
+ repeatable_command+=" --runtime "\""dotnet"\"""
+ elif [[ "$runtime" == "aspnetcore" ]]; then
+ repeatable_command+=" --runtime "\""aspnetcore"\"""
+ fi
+ repeatable_command+="$non_dynamic_parameters"
+ say "Repeatable invocation: $repeatable_command"
+ exit 0
+fi
+
+check_pre_reqs
+install_dotnet
+
+bin_path="$(get_absolute_path "$(combine_paths "$install_root" "$bin_folder_relative_path")")"
+if [ "$no_path" = false ]; then
+ say "Adding to current process PATH: \`$bin_path\`. Note: This change will be visible only when sourcing script."
+ export PATH="$bin_path":"$PATH"
+else
+ say "Binaries of dotnet can be found in $bin_path"
+fi
+
+say "Installation finished successfully."
diff --git a/eng/common/dotnet-install.cmd b/eng/common/dotnet-install.cmd
new file mode 100644
index 0000000000..b1c2642e76
--- /dev/null
+++ b/eng/common/dotnet-install.cmd
@@ -0,0 +1,2 @@
+@echo off
+powershell -ExecutionPolicy ByPass -NoProfile -command "& """%~dp0dotnet-install.ps1""" %*"
\ No newline at end of file
diff --git a/eng/common/dotnet-install.ps1 b/eng/common/dotnet-install.ps1
new file mode 100644
index 0000000000..811f0f717f
--- /dev/null
+++ b/eng/common/dotnet-install.ps1
@@ -0,0 +1,28 @@
+[CmdletBinding(PositionalBinding=$false)]
+Param(
+ [string] $verbosity = 'minimal',
+ [string] $architecture = '',
+ [string] $version = 'Latest',
+ [string] $runtime = 'dotnet',
+ [string] $RuntimeSourceFeed = '',
+ [string] $RuntimeSourceFeedKey = ''
+)
+
+. $PSScriptRoot\tools.ps1
+
+$dotnetRoot = Join-Path $RepoRoot '.dotnet'
+
+$installdir = $dotnetRoot
+try {
+ if ($architecture -and $architecture.Trim() -eq 'x86') {
+ $installdir = Join-Path $installdir 'x86'
+ }
+ InstallDotNet $installdir $version $architecture $runtime $true -RuntimeSourceFeed $RuntimeSourceFeed -RuntimeSourceFeedKey $RuntimeSourceFeedKey
+}
+catch {
+ Write-Host $_.ScriptStackTrace
+ Write-PipelineTelemetryError -Category 'InitializeToolset' -Message $_
+ ExitWithExitCode 1
+}
+
+ExitWithExitCode 0
diff --git a/eng/common/dotnet-install.sh b/eng/common/dotnet-install.sh
new file mode 100755
index 0000000000..ead6a1d9a2
--- /dev/null
+++ b/eng/common/dotnet-install.sh
@@ -0,0 +1,89 @@
+#!/usr/bin/env bash
+
+source="${BASH_SOURCE[0]}"
+# resolve $source until the file is no longer a symlink
+while [[ -h "$source" ]]; do
+ scriptroot="$( cd -P "$( dirname "$source" )" && pwd )"
+ source="$(readlink "$source")"
+ # if $source was a relative symlink, we need to resolve it relative to the path where the
+ # symlink file was located
+ [[ $source != /* ]] && source="$scriptroot/$source"
+done
+scriptroot="$( cd -P "$( dirname "$source" )" && pwd )"
+
+. "$scriptroot/tools.sh"
+
+version='Latest'
+architecture=''
+runtime='dotnet'
+runtimeSourceFeed=''
+runtimeSourceFeedKey=''
+while [[ $# > 0 ]]; do
+ opt="$(echo "$1" | awk '{print tolower($0)}')"
+ case "$opt" in
+ -version|-v)
+ shift
+ version="$1"
+ ;;
+ -architecture|-a)
+ shift
+ architecture="$1"
+ ;;
+ -runtime|-r)
+ shift
+ runtime="$1"
+ ;;
+ -runtimesourcefeed)
+ shift
+ runtimeSourceFeed="$1"
+ ;;
+ -runtimesourcefeedkey)
+ shift
+ runtimeSourceFeedKey="$1"
+ ;;
+ *)
+ Write-PipelineTelemetryError -Category 'Build' -Message "Invalid argument: $1"
+ exit 1
+ ;;
+ esac
+ shift
+done
+
+# Use uname to determine what the CPU is.
+cpuname=$(uname -p)
+# Some Linux platforms report unknown for platform, but the arch for machine.
+if [[ "$cpuname" == "unknown" ]]; then
+ cpuname=$(uname -m)
+fi
+
+case $cpuname in
+ aarch64)
+ buildarch=arm64
+ ;;
+ amd64|x86_64)
+ buildarch=x64
+ ;;
+ armv*l)
+ buildarch=arm
+ ;;
+ i686)
+ buildarch=x86
+ ;;
+ *)
+ echo "Unknown CPU $cpuname detected, treating it as x64"
+ buildarch=x64
+ ;;
+esac
+
+dotnetRoot="$repo_root/.dotnet"
+if [[ $architecture != "" ]] && [[ $architecture != $buildarch ]]; then
+ dotnetRoot="$dotnetRoot/$architecture"
+fi
+
+InstallDotNet $dotnetRoot $version "$architecture" $runtime true $runtimeSourceFeed $runtimeSourceFeedKey || {
+ local exit_code=$?
+ Write-PipelineTelemetryError -Category 'InitializeToolset' -Message "dotnet-install.sh failed (exit code '$exit_code')." >&2
+ ExitWithExitCode $exit_code
+}
+
+ExitWithExitCode 0
diff --git a/eng/common/enable-cross-org-publishing.ps1 b/eng/common/enable-cross-org-publishing.ps1
new file mode 100644
index 0000000000..da09da4f1f
--- /dev/null
+++ b/eng/common/enable-cross-org-publishing.ps1
@@ -0,0 +1,13 @@
+param(
+ [string] $token
+)
+
+
+. $PSScriptRoot\pipeline-logging-functions.ps1
+
+# Write-PipelineSetVariable will no-op if a variable named $ci is not defined
+# Since this script is only ever called in AzDO builds, just universally set it
+$ci = $true
+
+Write-PipelineSetVariable -Name 'VSS_NUGET_ACCESSTOKEN' -Value $token -IsMultiJobVariable $false
+Write-PipelineSetVariable -Name 'VSS_NUGET_URI_PREFIXES' -Value 'https://dnceng.pkgs.visualstudio.com/;https://pkgs.dev.azure.com/dnceng/;https://devdiv.pkgs.visualstudio.com/;https://pkgs.dev.azure.com/devdiv/' -IsMultiJobVariable $false
diff --git a/eng/common/generate-graph-files.ps1 b/eng/common/generate-graph-files.ps1
new file mode 100644
index 0000000000..0728b1a8b5
--- /dev/null
+++ b/eng/common/generate-graph-files.ps1
@@ -0,0 +1,86 @@
+Param(
+ [Parameter(Mandatory=$true)][string] $barToken, # Token generated at https://maestro-prod.westus2.cloudapp.azure.com/Account/Tokens
+ [Parameter(Mandatory=$true)][string] $gitHubPat, # GitHub personal access token from https://github.com/settings/tokens (no auth scopes needed)
+ [Parameter(Mandatory=$true)][string] $azdoPat, # Azure Dev Ops tokens from https://dev.azure.com/dnceng/_details/security/tokens (code read scope needed)
+ [Parameter(Mandatory=$true)][string] $outputFolder, # Where the graphviz.txt file will be created
+ [string] $darcVersion, # darc's version
+ [string] $graphvizVersion = '2.38', # GraphViz version
+ [switch] $includeToolset # Whether the graph should include toolset dependencies or not. i.e. arcade, optimization. For more about
+ # toolset dependencies see https://github.com/dotnet/arcade/blob/master/Documentation/Darc.md#toolset-vs-product-dependencies
+)
+
+function CheckExitCode ([string]$stage)
+{
+ $exitCode = $LASTEXITCODE
+ if ($exitCode -ne 0) {
+ Write-PipelineTelemetryError -Category 'Arcade' -Message "Something failed in stage: '$stage'. Check for errors above. Exiting now..."
+ ExitWithExitCode $exitCode
+ }
+}
+
+try {
+ $ErrorActionPreference = 'Stop'
+ . $PSScriptRoot\tools.ps1
+
+ Import-Module -Name (Join-Path $PSScriptRoot 'native\CommonLibrary.psm1')
+
+ Push-Location $PSScriptRoot
+
+ Write-Host 'Installing darc...'
+ . .\darc-init.ps1 -darcVersion $darcVersion
+ CheckExitCode 'Running darc-init'
+
+ $engCommonBaseDir = Join-Path $PSScriptRoot 'native\'
+ $graphvizInstallDir = CommonLibrary\Get-NativeInstallDirectory
+ $nativeToolBaseUri = 'https://netcorenativeassets.blob.core.windows.net/resource-packages/external'
+ $installBin = Join-Path $graphvizInstallDir 'bin'
+
+ Write-Host 'Installing dot...'
+ .\native\install-tool.ps1 -ToolName graphviz -InstallPath $installBin -BaseUri $nativeToolBaseUri -CommonLibraryDirectory $engCommonBaseDir -Version $graphvizVersion -Verbose
+
+ $darcExe = "$env:USERPROFILE\.dotnet\tools"
+ $darcExe = Resolve-Path "$darcExe\darc.exe"
+
+ Create-Directory $outputFolder
+
+ # Generate 3 graph descriptions:
+ # 1. Flat with coherency information
+ # 2. Graphviz (dot) file
+ # 3. Standard dependency graph
+ $graphVizFilePath = "$outputFolder\graphviz.txt"
+ $graphVizImageFilePath = "$outputFolder\graph.png"
+ $normalGraphFilePath = "$outputFolder\graph-full.txt"
+ $flatGraphFilePath = "$outputFolder\graph-flat.txt"
+ $baseOptions = @( '--github-pat', "$gitHubPat", '--azdev-pat', "$azdoPat", '--password', "$barToken" )
+
+ if ($includeToolset) {
+ Write-Host 'Toolsets will be included in the graph...'
+ $baseOptions += @( '--include-toolset' )
+ }
+
+ Write-Host 'Generating standard dependency graph...'
+ & "$darcExe" get-dependency-graph @baseOptions --output-file $normalGraphFilePath
+ CheckExitCode 'Generating normal dependency graph'
+
+ Write-Host 'Generating flat dependency graph and graphviz file...'
+ & "$darcExe" get-dependency-graph @baseOptions --flat --coherency --graphviz $graphVizFilePath --output-file $flatGraphFilePath
+ CheckExitCode 'Generating flat and graphviz dependency graph'
+
+ Write-Host "Generating graph image $graphVizFilePath"
+ $dotFilePath = Join-Path $installBin "graphviz\$graphvizVersion\release\bin\dot.exe"
+ & "$dotFilePath" -Tpng -o"$graphVizImageFilePath" "$graphVizFilePath"
+ CheckExitCode 'Generating graphviz image'
+
+ Write-Host "'$graphVizFilePath', '$flatGraphFilePath', '$normalGraphFilePath' and '$graphVizImageFilePath' created!"
+}
+catch {
+ if (!$includeToolset) {
+ Write-Host 'This might be a toolset repo which includes only toolset dependencies. ' -NoNewline -ForegroundColor Yellow
+ Write-Host 'Since -includeToolset is not set there is no graph to create. Include -includeToolset and try again...' -ForegroundColor Yellow
+ }
+ Write-Host $_.ScriptStackTrace
+ Write-PipelineTelemetryError -Category 'Arcade' -Message $_
+ ExitWithExitCode 1
+} finally {
+ Pop-Location
+}
\ No newline at end of file
diff --git a/eng/common/helixpublish.proj b/eng/common/helixpublish.proj
new file mode 100644
index 0000000000..d7f185856e
--- /dev/null
+++ b/eng/common/helixpublish.proj
@@ -0,0 +1,26 @@
+
+
+
+ msbuild
+
+
+
+
+ %(Identity)
+
+
+
+
+
+ $(WorkItemDirectory)
+ $(WorkItemCommand)
+ $(WorkItemTimeout)
+
+
+
+
+
+
+
+
+
diff --git a/eng/common/init-tools-native.cmd b/eng/common/init-tools-native.cmd
new file mode 100644
index 0000000000..438cd548c4
--- /dev/null
+++ b/eng/common/init-tools-native.cmd
@@ -0,0 +1,3 @@
+@echo off
+powershell -NoProfile -NoLogo -ExecutionPolicy ByPass -command "& """%~dp0init-tools-native.ps1""" %*"
+exit /b %ErrorLevel%
\ No newline at end of file
diff --git a/eng/common/init-tools-native.ps1 b/eng/common/init-tools-native.ps1
new file mode 100644
index 0000000000..db830c00a6
--- /dev/null
+++ b/eng/common/init-tools-native.ps1
@@ -0,0 +1,152 @@
+<#
+.SYNOPSIS
+Entry point script for installing native tools
+
+.DESCRIPTION
+Reads $RepoRoot\global.json file to determine native assets to install
+and executes installers for those tools
+
+.PARAMETER BaseUri
+Base file directory or Url from which to acquire tool archives
+
+.PARAMETER InstallDirectory
+Directory to install native toolset. This is a command-line override for the default
+Install directory precedence order:
+- InstallDirectory command-line override
+- NETCOREENG_INSTALL_DIRECTORY environment variable
+- (default) %USERPROFILE%/.netcoreeng/native
+
+.PARAMETER Clean
+Switch specifying to not install anything, but cleanup native asset folders
+
+.PARAMETER Force
+Clean and then install tools
+
+.PARAMETER DownloadRetries
+Total number of retry attempts
+
+.PARAMETER RetryWaitTimeInSeconds
+Wait time between retry attempts in seconds
+
+.PARAMETER GlobalJsonFile
+File path to global.json file
+
+.NOTES
+#>
+[CmdletBinding(PositionalBinding=$false)]
+Param (
+ [string] $BaseUri = 'https://netcorenativeassets.blob.core.windows.net/resource-packages/external',
+ [string] $InstallDirectory,
+ [switch] $Clean = $False,
+ [switch] $Force = $False,
+ [int] $DownloadRetries = 5,
+ [int] $RetryWaitTimeInSeconds = 30,
+ [string] $GlobalJsonFile
+)
+
+if (!$GlobalJsonFile) {
+ $GlobalJsonFile = Join-Path (Get-Item $PSScriptRoot).Parent.Parent.FullName 'global.json'
+}
+
+Set-StrictMode -version 2.0
+$ErrorActionPreference='Stop'
+
+. $PSScriptRoot\pipeline-logging-functions.ps1
+Import-Module -Name (Join-Path $PSScriptRoot 'native\CommonLibrary.psm1')
+
+try {
+ # Define verbose switch if undefined
+ $Verbose = $VerbosePreference -Eq 'Continue'
+
+ $EngCommonBaseDir = Join-Path $PSScriptRoot 'native\'
+ $NativeBaseDir = $InstallDirectory
+ if (!$NativeBaseDir) {
+ $NativeBaseDir = CommonLibrary\Get-NativeInstallDirectory
+ }
+ $Env:CommonLibrary_NativeInstallDir = $NativeBaseDir
+ $InstallBin = Join-Path $NativeBaseDir 'bin'
+ $InstallerPath = Join-Path $EngCommonBaseDir 'install-tool.ps1'
+
+ # Process tools list
+ Write-Host "Processing $GlobalJsonFile"
+ If (-Not (Test-Path $GlobalJsonFile)) {
+ Write-Host "Unable to find '$GlobalJsonFile'"
+ exit 0
+ }
+ $NativeTools = Get-Content($GlobalJsonFile) -Raw |
+ ConvertFrom-Json |
+ Select-Object -Expand 'native-tools' -ErrorAction SilentlyContinue
+ if ($NativeTools) {
+ $NativeTools.PSObject.Properties | ForEach-Object {
+ $ToolName = $_.Name
+ $ToolVersion = $_.Value
+ $LocalInstallerArguments = @{ ToolName = "$ToolName" }
+ $LocalInstallerArguments += @{ InstallPath = "$InstallBin" }
+ $LocalInstallerArguments += @{ BaseUri = "$BaseUri" }
+ $LocalInstallerArguments += @{ CommonLibraryDirectory = "$EngCommonBaseDir" }
+ $LocalInstallerArguments += @{ Version = "$ToolVersion" }
+
+ if ($Verbose) {
+ $LocalInstallerArguments += @{ Verbose = $True }
+ }
+ if (Get-Variable 'Force' -ErrorAction 'SilentlyContinue') {
+ if($Force) {
+ $LocalInstallerArguments += @{ Force = $True }
+ }
+ }
+ if ($Clean) {
+ $LocalInstallerArguments += @{ Clean = $True }
+ }
+
+ Write-Verbose "Installing $ToolName version $ToolVersion"
+ Write-Verbose "Executing '$InstallerPath $($LocalInstallerArguments.Keys.ForEach({"-$_ '$($LocalInstallerArguments.$_)'"}) -join ' ')'"
+ & $InstallerPath @LocalInstallerArguments
+ if ($LASTEXITCODE -Ne "0") {
+ $errMsg = "$ToolName installation failed"
+ if ((Get-Variable 'DoNotAbortNativeToolsInstallationOnFailure' -ErrorAction 'SilentlyContinue') -and $DoNotAbortNativeToolsInstallationOnFailure) {
+ $showNativeToolsWarning = $true
+ if ((Get-Variable 'DoNotDisplayNativeToolsInstallationWarnings' -ErrorAction 'SilentlyContinue') -and $DoNotDisplayNativeToolsInstallationWarnings) {
+ $showNativeToolsWarning = $false
+ }
+ if ($showNativeToolsWarning) {
+ Write-Warning $errMsg
+ }
+ $toolInstallationFailure = $true
+ } else {
+ # We cannot change this to Write-PipelineTelemetryError because of https://github.com/dotnet/arcade/issues/4482
+ Write-Host $errMsg
+ exit 1
+ }
+ }
+ }
+
+ if ((Get-Variable 'toolInstallationFailure' -ErrorAction 'SilentlyContinue') -and $toolInstallationFailure) {
+ # We cannot change this to Write-PipelineTelemetryError because of https://github.com/dotnet/arcade/issues/4482
+ Write-Host 'Native tools bootstrap failed'
+ exit 1
+ }
+ }
+ else {
+ Write-Host 'No native tools defined in global.json'
+ exit 0
+ }
+
+ if ($Clean) {
+ exit 0
+ }
+ if (Test-Path $InstallBin) {
+ Write-Host 'Native tools are available from ' (Convert-Path -Path $InstallBin)
+ Write-Host "##vso[task.prependpath]$(Convert-Path -Path $InstallBin)"
+ return $InstallBin
+ }
+ else {
+ Write-PipelineTelemetryError -Category 'NativeToolsBootstrap' -Message 'Native tools install directory does not exist, installation failed'
+ exit 1
+ }
+ exit 0
+}
+catch {
+ Write-Host $_.ScriptStackTrace
+ Write-PipelineTelemetryError -Category 'NativeToolsBootstrap' -Message $_
+ ExitWithExitCode 1
+}
diff --git a/eng/common/init-tools-native.sh b/eng/common/init-tools-native.sh
new file mode 100755
index 0000000000..29fc5db8ae
--- /dev/null
+++ b/eng/common/init-tools-native.sh
@@ -0,0 +1,173 @@
+#!/usr/bin/env bash
+
+source="${BASH_SOURCE[0]}"
+scriptroot="$( cd -P "$( dirname "$source" )" && pwd )"
+
+base_uri='https://netcorenativeassets.blob.core.windows.net/resource-packages/external'
+install_directory=''
+clean=false
+force=false
+download_retries=5
+retry_wait_time_seconds=30
+global_json_file="$(dirname "$(dirname "${scriptroot}")")/global.json"
+declare -A native_assets
+
+. $scriptroot/pipeline-logging-functions.sh
+. $scriptroot/native/common-library.sh
+
+while (($# > 0)); do
+ lowerI="$(echo $1 | awk '{print tolower($0)}')"
+ case $lowerI in
+ --baseuri)
+ base_uri=$2
+ shift 2
+ ;;
+ --installdirectory)
+ install_directory=$2
+ shift 2
+ ;;
+ --clean)
+ clean=true
+ shift 1
+ ;;
+ --force)
+ force=true
+ shift 1
+ ;;
+ --donotabortonfailure)
+ donotabortonfailure=true
+ shift 1
+ ;;
+ --donotdisplaywarnings)
+ donotdisplaywarnings=true
+ shift 1
+ ;;
+ --downloadretries)
+ download_retries=$2
+ shift 2
+ ;;
+ --retrywaittimeseconds)
+ retry_wait_time_seconds=$2
+ shift 2
+ ;;
+ --help)
+ echo "Common settings:"
+ echo " --installdirectory Directory to install native toolset."
+ echo " This is a command-line override for the default"
+ echo " Install directory precedence order:"
+ echo " - InstallDirectory command-line override"
+ echo " - NETCOREENG_INSTALL_DIRECTORY environment variable"
+ echo " - (default) %USERPROFILE%/.netcoreeng/native"
+ echo ""
+ echo " --clean Switch specifying not to install anything, but cleanup native asset folders"
+ echo " --donotabortonfailure Switch specifiying whether to abort native tools installation on failure"
+ echo " --donotdisplaywarnings Switch specifiying whether to display warnings during native tools installation on failure"
+ echo " --force Clean and then install tools"
+ echo " --help Print help and exit"
+ echo ""
+ echo "Advanced settings:"
+ echo " --baseuri Base URI for where to download native tools from"
+ echo " --downloadretries Number of times a download should be attempted"
+ echo " --retrywaittimeseconds Wait time between download attempts"
+ echo ""
+ exit 0
+ ;;
+ esac
+done
+
+function ReadGlobalJsonNativeTools {
+ # Get the native-tools section from the global.json.
+ local native_tools_section=$(cat $global_json_file | awk '/"native-tools"/,/}/')
+ # Only extract the contents of the object.
+ local native_tools_list=$(echo $native_tools_section | awk -F"[{}]" '{print $2}')
+ native_tools_list=${native_tools_list//[\" ]/}
+ native_tools_list=$( echo "$native_tools_list" | sed 's/\s//g' | sed 's/,/\n/g' )
+
+ local old_IFS=$IFS
+ while read -r line; do
+ # Lines are of the form: 'tool:version'
+ IFS=:
+ while read -r key value; do
+ native_assets[$key]=$value
+ done <<< "$line"
+ done <<< "$native_tools_list"
+ IFS=$old_IFS
+
+ return 0;
+}
+
+native_base_dir=$install_directory
+if [[ -z $install_directory ]]; then
+ native_base_dir=$(GetNativeInstallDirectory)
+fi
+
+install_bin="${native_base_dir}/bin"
+installed_any=false
+
+ReadGlobalJsonNativeTools
+
+if [[ ${#native_assets[@]} -eq 0 ]]; then
+ echo "No native tools defined in global.json"
+ exit 0;
+else
+ native_installer_dir="$scriptroot/native"
+ for tool in "${!native_assets[@]}"
+ do
+ tool_version=${native_assets[$tool]}
+ installer_path="$native_installer_dir/install-$tool.sh"
+ installer_command="$installer_path"
+ installer_command+=" --baseuri $base_uri"
+ installer_command+=" --installpath $install_bin"
+ installer_command+=" --version $tool_version"
+ echo $installer_command
+
+ if [[ $force = true ]]; then
+ installer_command+=" --force"
+ fi
+
+ if [[ $clean = true ]]; then
+ installer_command+=" --clean"
+ fi
+
+ if [[ -a $installer_path ]]; then
+ $installer_command
+ if [[ $? != 0 ]]; then
+ if [[ $donotabortonfailure = true ]]; then
+ if [[ $donotdisplaywarnings != true ]]; then
+ Write-PipelineTelemetryError -category 'NativeToolsBootstrap' "Execution Failed"
+ fi
+ else
+ Write-PipelineTelemetryError -category 'NativeToolsBootstrap' "Execution Failed"
+ exit 1
+ fi
+ else
+ $installed_any = true
+ fi
+ else
+ if [[ $donotabortonfailure == true ]]; then
+ if [[ $donotdisplaywarnings != true ]]; then
+ Write-PipelineTelemetryError -category 'NativeToolsBootstrap' "Execution Failed: no install script"
+ fi
+ else
+ Write-PipelineTelemetryError -category 'NativeToolsBootstrap' "Execution Failed: no install script"
+ exit 1
+ fi
+ fi
+ done
+fi
+
+if [[ $clean = true ]]; then
+ exit 0
+fi
+
+if [[ -d $install_bin ]]; then
+ echo "Native tools are available from $install_bin"
+ echo "##vso[task.prependpath]$install_bin"
+else
+ if [[ $installed_any = true ]]; then
+ Write-PipelineTelemetryError -category 'NativeToolsBootstrap' "Native tools install directory does not exist, installation failed"
+ exit 1
+ fi
+fi
+
+exit 0
diff --git a/eng/common/internal-feed-operations.ps1 b/eng/common/internal-feed-operations.ps1
new file mode 100644
index 0000000000..b8f6529fdc
--- /dev/null
+++ b/eng/common/internal-feed-operations.ps1
@@ -0,0 +1,134 @@
+param(
+ [Parameter(Mandatory=$true)][string] $Operation,
+ [string] $AuthToken,
+ [string] $CommitSha,
+ [string] $RepoName,
+ [switch] $IsFeedPrivate
+)
+
+$ErrorActionPreference = 'Stop'
+Set-StrictMode -Version 2.0
+. $PSScriptRoot\tools.ps1
+
+# Sets VSS_NUGET_EXTERNAL_FEED_ENDPOINTS based on the "darc-int-*" feeds defined in NuGet.config. This is needed
+# in build agents by CredProvider to authenticate the restore requests to internal feeds as specified in
+# https://github.com/microsoft/artifacts-credprovider/blob/0f53327cd12fd893d8627d7b08a2171bf5852a41/README.md#environment-variables. This should ONLY be called from identified
+# internal builds
+function SetupCredProvider {
+ param(
+ [string] $AuthToken
+ )
+
+ # Install the Cred Provider NuGet plugin
+ Write-Host 'Setting up Cred Provider NuGet plugin in the agent...'
+ Write-Host "Getting 'installcredprovider.ps1' from 'https://github.com/microsoft/artifacts-credprovider'..."
+
+ $url = 'https://raw.githubusercontent.com/microsoft/artifacts-credprovider/master/helpers/installcredprovider.ps1'
+
+ Write-Host "Writing the contents of 'installcredprovider.ps1' locally..."
+ Invoke-WebRequest $url -OutFile installcredprovider.ps1
+
+ Write-Host 'Installing plugin...'
+ .\installcredprovider.ps1 -Force
+
+ Write-Host "Deleting local copy of 'installcredprovider.ps1'..."
+ Remove-Item .\installcredprovider.ps1
+
+ if (-Not("$env:USERPROFILE\.nuget\plugins\netcore")) {
+ Write-PipelineTelemetryError -Category 'Arcade' -Message 'CredProvider plugin was not installed correctly!'
+ ExitWithExitCode 1
+ }
+ else {
+ Write-Host 'CredProvider plugin was installed correctly!'
+ }
+
+ # Then, we set the 'VSS_NUGET_EXTERNAL_FEED_ENDPOINTS' environment variable to restore from the stable
+ # feeds successfully
+
+ $nugetConfigPath = "$RepoRoot\NuGet.config"
+
+ if (-Not (Test-Path -Path $nugetConfigPath)) {
+ Write-PipelineTelemetryError -Category 'Build' -Message 'NuGet.config file not found in repo root!'
+ ExitWithExitCode 1
+ }
+
+ $endpoints = New-Object System.Collections.ArrayList
+ $nugetConfigPackageSources = Select-Xml -Path $nugetConfigPath -XPath "//packageSources/add[contains(@key, 'darc-int-')]/@value" | foreach{$_.Node.Value}
+
+ if (($nugetConfigPackageSources | Measure-Object).Count -gt 0 ) {
+ foreach ($stableRestoreResource in $nugetConfigPackageSources) {
+ $trimmedResource = ([string]$stableRestoreResource).Trim()
+ [void]$endpoints.Add(@{endpoint="$trimmedResource"; password="$AuthToken"})
+ }
+ }
+
+ if (($endpoints | Measure-Object).Count -gt 0) {
+ # [SuppressMessage("Microsoft.Security", "CS002:SecretInNextLine", Justification="Endpoint code example with no real credentials.")]
+ # Create the JSON object. It should look like '{"endpointCredentials": [{"endpoint":"http://example.index.json", "username":"optional", "password":"accesstoken"}]}'
+ $endpointCredentials = @{endpointCredentials=$endpoints} | ConvertTo-Json -Compress
+
+ # Create the environment variables the AzDo way
+ Write-LoggingCommand -Area 'task' -Event 'setvariable' -Data $endpointCredentials -Properties @{
+ 'variable' = 'VSS_NUGET_EXTERNAL_FEED_ENDPOINTS'
+ 'issecret' = 'false'
+ }
+
+ # We don't want sessions cached since we will be updating the endpoints quite frequently
+ Write-LoggingCommand -Area 'task' -Event 'setvariable' -Data 'False' -Properties @{
+ 'variable' = 'NUGET_CREDENTIALPROVIDER_SESSIONTOKENCACHE_ENABLED'
+ 'issecret' = 'false'
+ }
+ }
+ else
+ {
+ Write-Host 'No internal endpoints found in NuGet.config'
+ }
+}
+
+#Workaround for https://github.com/microsoft/msbuild/issues/4430
+function InstallDotNetSdkAndRestoreArcade {
+ $dotnetTempDir = "$RepoRoot\dotnet"
+ $dotnetSdkVersion="2.1.507" # After experimentation we know this version works when restoring the SDK (compared to 3.0.*)
+ $dotnet = "$dotnetTempDir\dotnet.exe"
+ $restoreProjPath = "$PSScriptRoot\restore.proj"
+
+ Write-Host "Installing dotnet SDK version $dotnetSdkVersion to restore Arcade SDK..."
+ InstallDotNetSdk "$dotnetTempDir" "$dotnetSdkVersion"
+
+ '' | Out-File "$restoreProjPath"
+
+ & $dotnet restore $restoreProjPath
+
+ Write-Host 'Arcade SDK restored!'
+
+ if (Test-Path -Path $restoreProjPath) {
+ Remove-Item $restoreProjPath
+ }
+
+ if (Test-Path -Path $dotnetTempDir) {
+ Remove-Item $dotnetTempDir -Recurse
+ }
+}
+
+try {
+ Push-Location $PSScriptRoot
+
+ if ($Operation -like 'setup') {
+ SetupCredProvider $AuthToken
+ }
+ elseif ($Operation -like 'install-restore') {
+ InstallDotNetSdkAndRestoreArcade
+ }
+ else {
+ Write-PipelineTelemetryError -Category 'Arcade' -Message "Unknown operation '$Operation'!"
+ ExitWithExitCode 1
+ }
+}
+catch {
+ Write-Host $_.ScriptStackTrace
+ Write-PipelineTelemetryError -Category 'Arcade' -Message $_
+ ExitWithExitCode 1
+}
+finally {
+ Pop-Location
+}
diff --git a/eng/common/internal-feed-operations.sh b/eng/common/internal-feed-operations.sh
new file mode 100755
index 0000000000..9ed225e7e5
--- /dev/null
+++ b/eng/common/internal-feed-operations.sh
@@ -0,0 +1,143 @@
+#!/usr/bin/env bash
+
+set -e
+
+# Sets VSS_NUGET_EXTERNAL_FEED_ENDPOINTS based on the "darc-int-*" feeds defined in NuGet.config. This is needed
+# in build agents by CredProvider to authenticate the restore requests to internal feeds as specified in
+# https://github.com/microsoft/artifacts-credprovider/blob/0f53327cd12fd893d8627d7b08a2171bf5852a41/README.md#environment-variables.
+# This should ONLY be called from identified internal builds
+function SetupCredProvider {
+ local authToken=$1
+
+ # Install the Cred Provider NuGet plugin
+ echo "Setting up Cred Provider NuGet plugin in the agent..."...
+ echo "Getting 'installcredprovider.ps1' from 'https://github.com/microsoft/artifacts-credprovider'..."
+
+ local url="https://raw.githubusercontent.com/microsoft/artifacts-credprovider/master/helpers/installcredprovider.sh"
+
+ echo "Writing the contents of 'installcredprovider.ps1' locally..."
+ local installcredproviderPath="installcredprovider.sh"
+ if command -v curl > /dev/null; then
+ curl $url > "$installcredproviderPath"
+ else
+ wget -q -O "$installcredproviderPath" "$url"
+ fi
+
+ echo "Installing plugin..."
+ . "$installcredproviderPath"
+
+ echo "Deleting local copy of 'installcredprovider.sh'..."
+ rm installcredprovider.sh
+
+ if [ ! -d "$HOME/.nuget/plugins" ]; then
+ Write-PipelineTelemetryError -category 'Build' 'CredProvider plugin was not installed correctly!'
+ ExitWithExitCode 1
+ else
+ echo "CredProvider plugin was installed correctly!"
+ fi
+
+ # Then, we set the 'VSS_NUGET_EXTERNAL_FEED_ENDPOINTS' environment variable to restore from the stable
+ # feeds successfully
+
+ local nugetConfigPath="$repo_root/NuGet.config"
+
+ if [ ! "$nugetConfigPath" ]; then
+ Write-PipelineTelemetryError -category 'Build' "NuGet.config file not found in repo's root!"
+ ExitWithExitCode 1
+ fi
+
+ local endpoints='['
+ local nugetConfigPackageValues=`cat "$nugetConfigPath" | grep "key=\"darc-int-"`
+ local pattern="value=\"(.*)\""
+
+ for value in $nugetConfigPackageValues
+ do
+ if [[ $value =~ $pattern ]]; then
+ local endpoint="${BASH_REMATCH[1]}"
+ endpoints+="{\"endpoint\": \"$endpoint\", \"password\": \"$authToken\"},"
+ fi
+ done
+
+ endpoints=${endpoints%?}
+ endpoints+=']'
+
+ if [ ${#endpoints} -gt 2 ]; then
+ # [SuppressMessage("Microsoft.Security", "CS002:SecretInNextLine", Justification="Endpoint code example with no real credentials.")]
+ # Create the JSON object. It should look like '{"endpointCredentials": [{"endpoint":"http://example.index.json", "username":"optional", "password":"accesstoken"}]}'
+ local endpointCredentials="{\"endpointCredentials\": "$endpoints"}"
+
+ echo "##vso[task.setvariable variable=VSS_NUGET_EXTERNAL_FEED_ENDPOINTS]$endpointCredentials"
+ echo "##vso[task.setvariable variable=NUGET_CREDENTIALPROVIDER_SESSIONTOKENCACHE_ENABLED]False"
+ else
+ echo "No internal endpoints found in NuGet.config"
+ fi
+}
+
+# Workaround for https://github.com/microsoft/msbuild/issues/4430
+function InstallDotNetSdkAndRestoreArcade {
+ local dotnetTempDir="$repo_root/dotnet"
+ local dotnetSdkVersion="2.1.507" # After experimentation we know this version works when restoring the SDK (compared to 3.0.*)
+ local restoreProjPath="$repo_root/eng/common/restore.proj"
+
+ echo "Installing dotnet SDK version $dotnetSdkVersion to restore Arcade SDK..."
+ echo "" > "$restoreProjPath"
+
+ InstallDotNetSdk "$dotnetTempDir" "$dotnetSdkVersion"
+
+ local res=`$dotnetTempDir/dotnet restore $restoreProjPath`
+ echo "Arcade SDK restored!"
+
+ # Cleanup
+ if [ "$restoreProjPath" ]; then
+ rm "$restoreProjPath"
+ fi
+
+ if [ "$dotnetTempDir" ]; then
+ rm -r $dotnetTempDir
+ fi
+}
+
+source="${BASH_SOURCE[0]}"
+operation=''
+authToken=''
+repoName=''
+
+while [[ $# > 0 ]]; do
+ opt="$(echo "$1" | awk '{print tolower($0)}')"
+ case "$opt" in
+ --operation)
+ operation=$2
+ shift
+ ;;
+ --authtoken)
+ authToken=$2
+ shift
+ ;;
+ *)
+ echo "Invalid argument: $1"
+ usage
+ exit 1
+ ;;
+ esac
+
+ shift
+done
+
+while [[ -h "$source" ]]; do
+ scriptroot="$( cd -P "$( dirname "$source" )" && pwd )"
+ source="$(readlink "$source")"
+ # if $source was a relative symlink, we need to resolve it relative to the path where the
+ # symlink file was located
+ [[ $source != /* ]] && source="$scriptroot/$source"
+done
+scriptroot="$( cd -P "$( dirname "$source" )" && pwd )"
+
+. "$scriptroot/tools.sh"
+
+if [ "$operation" = "setup" ]; then
+ SetupCredProvider $authToken
+elif [ "$operation" = "install-restore" ]; then
+ InstallDotNetSdkAndRestoreArcade
+else
+ echo "Unknown operation '$operation'!"
+fi
diff --git a/eng/common/internal/Directory.Build.props b/eng/common/internal/Directory.Build.props
new file mode 100644
index 0000000000..dbf99d82a5
--- /dev/null
+++ b/eng/common/internal/Directory.Build.props
@@ -0,0 +1,4 @@
+
+
+
+
diff --git a/eng/common/internal/Tools.csproj b/eng/common/internal/Tools.csproj
new file mode 100644
index 0000000000..f46d5efe2e
--- /dev/null
+++ b/eng/common/internal/Tools.csproj
@@ -0,0 +1,28 @@
+
+
+
+
+ net472
+ false
+ false
+
+
+
+
+
+
+
+
+
+
+ https://devdiv.pkgs.visualstudio.com/_packaging/dotnet-core-internal-tooling/nuget/v3/index.json;
+
+
+ $(RestoreSources);
+ https://devdiv.pkgs.visualstudio.com/_packaging/VS/nuget/v3/index.json;
+
+
+
+
+
+
diff --git a/eng/common/msbuild.ps1 b/eng/common/msbuild.ps1
new file mode 100644
index 0000000000..c640123000
--- /dev/null
+++ b/eng/common/msbuild.ps1
@@ -0,0 +1,26 @@
+[CmdletBinding(PositionalBinding=$false)]
+Param(
+ [string] $verbosity = 'minimal',
+ [bool] $warnAsError = $true,
+ [bool] $nodeReuse = $true,
+ [switch] $ci,
+ [switch] $prepareMachine,
+ [Parameter(ValueFromRemainingArguments=$true)][String[]]$extraArgs
+)
+
+. $PSScriptRoot\tools.ps1
+
+try {
+ if ($ci) {
+ $nodeReuse = $false
+ }
+
+ MSBuild @extraArgs
+}
+catch {
+ Write-Host $_.ScriptStackTrace
+ Write-PipelineTelemetryError -Category 'Build' -Message $_
+ ExitWithExitCode 1
+}
+
+ExitWithExitCode 0
\ No newline at end of file
diff --git a/eng/common/msbuild.sh b/eng/common/msbuild.sh
new file mode 100755
index 0000000000..8160cd5a59
--- /dev/null
+++ b/eng/common/msbuild.sh
@@ -0,0 +1,58 @@
+#!/usr/bin/env bash
+
+source="${BASH_SOURCE[0]}"
+
+# resolve $source until the file is no longer a symlink
+while [[ -h "$source" ]]; do
+ scriptroot="$( cd -P "$( dirname "$source" )" && pwd )"
+ source="$(readlink "$source")"
+ # if $source was a relative symlink, we need to resolve it relative to the path where the
+ # symlink file was located
+ [[ $source != /* ]] && source="$scriptroot/$source"
+done
+scriptroot="$( cd -P "$( dirname "$source" )" && pwd )"
+
+verbosity='minimal'
+warn_as_error=true
+node_reuse=true
+prepare_machine=false
+extra_args=''
+
+while (($# > 0)); do
+ lowerI="$(echo $1 | awk '{print tolower($0)}')"
+ case $lowerI in
+ --verbosity)
+ verbosity=$2
+ shift 2
+ ;;
+ --warnaserror)
+ warn_as_error=$2
+ shift 2
+ ;;
+ --nodereuse)
+ node_reuse=$2
+ shift 2
+ ;;
+ --ci)
+ ci=true
+ shift 1
+ ;;
+ --preparemachine)
+ prepare_machine=true
+ shift 1
+ ;;
+ *)
+ extra_args="$extra_args $1"
+ shift 1
+ ;;
+ esac
+done
+
+. "$scriptroot/tools.sh"
+
+if [[ "$ci" == true ]]; then
+ node_reuse=false
+fi
+
+MSBuild $extra_args
+ExitWithExitCode 0
diff --git a/eng/common/native/CommonLibrary.psm1 b/eng/common/native/CommonLibrary.psm1
new file mode 100644
index 0000000000..d7d1a65109
--- /dev/null
+++ b/eng/common/native/CommonLibrary.psm1
@@ -0,0 +1,399 @@
+<#
+.SYNOPSIS
+Helper module to install an archive to a directory
+
+.DESCRIPTION
+Helper module to download and extract an archive to a specified directory
+
+.PARAMETER Uri
+Uri of artifact to download
+
+.PARAMETER InstallDirectory
+Directory to extract artifact contents to
+
+.PARAMETER Force
+Force download / extraction if file or contents already exist. Default = False
+
+.PARAMETER DownloadRetries
+Total number of retry attempts. Default = 5
+
+.PARAMETER RetryWaitTimeInSeconds
+Wait time between retry attempts in seconds. Default = 30
+
+.NOTES
+Returns False if download or extraction fail, True otherwise
+#>
+function DownloadAndExtract {
+ [CmdletBinding(PositionalBinding=$false)]
+ Param (
+ [Parameter(Mandatory=$True)]
+ [string] $Uri,
+ [Parameter(Mandatory=$True)]
+ [string] $InstallDirectory,
+ [switch] $Force = $False,
+ [int] $DownloadRetries = 5,
+ [int] $RetryWaitTimeInSeconds = 30
+ )
+ # Define verbose switch if undefined
+ $Verbose = $VerbosePreference -Eq "Continue"
+
+ $TempToolPath = CommonLibrary\Get-TempPathFilename -Path $Uri
+
+ # Download native tool
+ $DownloadStatus = CommonLibrary\Get-File -Uri $Uri `
+ -Path $TempToolPath `
+ -DownloadRetries $DownloadRetries `
+ -RetryWaitTimeInSeconds $RetryWaitTimeInSeconds `
+ -Force:$Force `
+ -Verbose:$Verbose
+
+ if ($DownloadStatus -Eq $False) {
+ Write-Error "Download failed"
+ return $False
+ }
+
+ # Extract native tool
+ $UnzipStatus = CommonLibrary\Expand-Zip -ZipPath $TempToolPath `
+ -OutputDirectory $InstallDirectory `
+ -Force:$Force `
+ -Verbose:$Verbose
+
+ if ($UnzipStatus -Eq $False) {
+ # Retry Download one more time with Force=true
+ $DownloadRetryStatus = CommonLibrary\Get-File -Uri $Uri `
+ -Path $TempToolPath `
+ -DownloadRetries 1 `
+ -RetryWaitTimeInSeconds $RetryWaitTimeInSeconds `
+ -Force:$True `
+ -Verbose:$Verbose
+
+ if ($DownloadRetryStatus -Eq $False) {
+ Write-Error "Last attempt of download failed as well"
+ return $False
+ }
+
+ # Retry unzip again one more time with Force=true
+ $UnzipRetryStatus = CommonLibrary\Expand-Zip -ZipPath $TempToolPath `
+ -OutputDirectory $InstallDirectory `
+ -Force:$True `
+ -Verbose:$Verbose
+ if ($UnzipRetryStatus -Eq $False)
+ {
+ Write-Error "Last attempt of unzip failed as well"
+ # Clean up partial zips and extracts
+ if (Test-Path $TempToolPath) {
+ Remove-Item $TempToolPath -Force
+ }
+ if (Test-Path $InstallDirectory) {
+ Remove-Item $InstallDirectory -Force -Recurse
+ }
+ return $False
+ }
+ }
+
+ return $True
+}
+
+<#
+.SYNOPSIS
+Download a file, retry on failure
+
+.DESCRIPTION
+Download specified file and retry if attempt fails
+
+.PARAMETER Uri
+Uri of file to download. If Uri is a local path, the file will be copied instead of downloaded
+
+.PARAMETER Path
+Path to download or copy uri file to
+
+.PARAMETER Force
+Overwrite existing file if present. Default = False
+
+.PARAMETER DownloadRetries
+Total number of retry attempts. Default = 5
+
+.PARAMETER RetryWaitTimeInSeconds
+Wait time between retry attempts in seconds Default = 30
+
+#>
+function Get-File {
+ [CmdletBinding(PositionalBinding=$false)]
+ Param (
+ [Parameter(Mandatory=$True)]
+ [string] $Uri,
+ [Parameter(Mandatory=$True)]
+ [string] $Path,
+ [int] $DownloadRetries = 5,
+ [int] $RetryWaitTimeInSeconds = 30,
+ [switch] $Force = $False
+ )
+ $Attempt = 0
+
+ if ($Force) {
+ if (Test-Path $Path) {
+ Remove-Item $Path -Force
+ }
+ }
+ if (Test-Path $Path) {
+ Write-Host "File '$Path' already exists, skipping download"
+ return $True
+ }
+
+ $DownloadDirectory = Split-Path -ErrorAction Ignore -Path "$Path" -Parent
+ if (-Not (Test-Path $DownloadDirectory)) {
+ New-Item -path $DownloadDirectory -force -itemType "Directory" | Out-Null
+ }
+
+ $TempPath = "$Path.tmp"
+ if (Test-Path -IsValid -Path $Uri) {
+ Write-Verbose "'$Uri' is a file path, copying temporarily to '$TempPath'"
+ Copy-Item -Path $Uri -Destination $TempPath
+ Write-Verbose "Moving temporary file to '$Path'"
+ Move-Item -Path $TempPath -Destination $Path
+ return $?
+ }
+ else {
+ Write-Verbose "Downloading $Uri"
+ # Don't display the console progress UI - it's a huge perf hit
+ $ProgressPreference = 'SilentlyContinue'
+ while($Attempt -Lt $DownloadRetries)
+ {
+ try {
+ Invoke-WebRequest -UseBasicParsing -Uri $Uri -OutFile $TempPath
+ Write-Verbose "Downloaded to temporary location '$TempPath'"
+ Move-Item -Path $TempPath -Destination $Path
+ Write-Verbose "Moved temporary file to '$Path'"
+ return $True
+ }
+ catch {
+ $Attempt++
+ if ($Attempt -Lt $DownloadRetries) {
+ $AttemptsLeft = $DownloadRetries - $Attempt
+ Write-Warning "Download failed, $AttemptsLeft attempts remaining, will retry in $RetryWaitTimeInSeconds seconds"
+ Start-Sleep -Seconds $RetryWaitTimeInSeconds
+ }
+ else {
+ Write-Error $_
+ Write-Error $_.Exception
+ }
+ }
+ }
+ }
+
+ return $False
+}
+
+<#
+.SYNOPSIS
+Generate a shim for a native tool
+
+.DESCRIPTION
+Creates a wrapper script (shim) that passes arguments forward to native tool assembly
+
+.PARAMETER ShimName
+The name of the shim
+
+.PARAMETER ShimDirectory
+The directory where shims are stored
+
+.PARAMETER ToolFilePath
+Path to file that shim forwards to
+
+.PARAMETER Force
+Replace shim if already present. Default = False
+
+.NOTES
+Returns $True if generating shim succeeds, $False otherwise
+#>
+function New-ScriptShim {
+ [CmdletBinding(PositionalBinding=$false)]
+ Param (
+ [Parameter(Mandatory=$True)]
+ [string] $ShimName,
+ [Parameter(Mandatory=$True)]
+ [string] $ShimDirectory,
+ [Parameter(Mandatory=$True)]
+ [string] $ToolFilePath,
+ [Parameter(Mandatory=$True)]
+ [string] $BaseUri,
+ [switch] $Force
+ )
+ try {
+ Write-Verbose "Generating '$ShimName' shim"
+
+ if (-Not (Test-Path $ToolFilePath)){
+ Write-Error "Specified tool file path '$ToolFilePath' does not exist"
+ return $False
+ }
+
+ # WinShimmer is a small .NET Framework program that creates .exe shims to bootstrapped programs
+ # Many of the checks for installed programs expect a .exe extension for Windows tools, rather
+ # than a .bat or .cmd file.
+ # Source: https://github.com/dotnet/arcade/tree/master/src/WinShimmer
+ if (-Not (Test-Path "$ShimDirectory\WinShimmer\winshimmer.exe")) {
+ $InstallStatus = DownloadAndExtract -Uri "$BaseUri/windows/winshimmer/WinShimmer.zip" `
+ -InstallDirectory $ShimDirectory\WinShimmer `
+ -Force:$Force `
+ -DownloadRetries 2 `
+ -RetryWaitTimeInSeconds 5 `
+ -Verbose:$Verbose
+ }
+
+ if ((Test-Path (Join-Path $ShimDirectory "$ShimName.exe"))) {
+ Write-Host "$ShimName.exe already exists; replacing..."
+ Remove-Item (Join-Path $ShimDirectory "$ShimName.exe")
+ }
+
+ & "$ShimDirectory\WinShimmer\winshimmer.exe" $ShimName $ToolFilePath $ShimDirectory
+ return $True
+ }
+ catch {
+ Write-Host $_
+ Write-Host $_.Exception
+ return $False
+ }
+}
+
+<#
+.SYNOPSIS
+Returns the machine architecture of the host machine
+
+.NOTES
+Returns 'x64' on 64 bit machines
+ Returns 'x86' on 32 bit machines
+#>
+function Get-MachineArchitecture {
+ $ProcessorArchitecture = $Env:PROCESSOR_ARCHITECTURE
+ $ProcessorArchitectureW6432 = $Env:PROCESSOR_ARCHITEW6432
+ if($ProcessorArchitecture -Eq "X86")
+ {
+ if(($ProcessorArchitectureW6432 -Eq "") -Or
+ ($ProcessorArchitectureW6432 -Eq "X86")) {
+ return "x86"
+ }
+ $ProcessorArchitecture = $ProcessorArchitectureW6432
+ }
+ if (($ProcessorArchitecture -Eq "AMD64") -Or
+ ($ProcessorArchitecture -Eq "IA64") -Or
+ ($ProcessorArchitecture -Eq "ARM64")) {
+ return "x64"
+ }
+ return "x86"
+}
+
+<#
+.SYNOPSIS
+Get the name of a temporary folder under the native install directory
+#>
+function Get-TempDirectory {
+ return Join-Path (Get-NativeInstallDirectory) "temp/"
+}
+
+function Get-TempPathFilename {
+ [CmdletBinding(PositionalBinding=$false)]
+ Param (
+ [Parameter(Mandatory=$True)]
+ [string] $Path
+ )
+ $TempDir = CommonLibrary\Get-TempDirectory
+ $TempFilename = Split-Path $Path -leaf
+ $TempPath = Join-Path $TempDir $TempFilename
+ return $TempPath
+}
+
+<#
+.SYNOPSIS
+Returns the base directory to use for native tool installation
+
+.NOTES
+Returns the value of the NETCOREENG_INSTALL_DIRECTORY if that environment variable
+is set, or otherwise returns an install directory under the %USERPROFILE%
+#>
+function Get-NativeInstallDirectory {
+ $InstallDir = $Env:NETCOREENG_INSTALL_DIRECTORY
+ if (!$InstallDir) {
+ $InstallDir = Join-Path $Env:USERPROFILE ".netcoreeng/native/"
+ }
+ return $InstallDir
+}
+
+<#
+.SYNOPSIS
+Unzip an archive
+
+.DESCRIPTION
+Powershell module to unzip an archive to a specified directory
+
+.PARAMETER ZipPath (Required)
+Path to archive to unzip
+
+.PARAMETER OutputDirectory (Required)
+Output directory for archive contents
+
+.PARAMETER Force
+Overwrite output directory contents if they already exist
+
+.NOTES
+- Returns True and does not perform an extraction if output directory already exists but Overwrite is not True.
+- Returns True if unzip operation is successful
+- Returns False if Overwrite is True and it is unable to remove contents of OutputDirectory
+- Returns False if unable to extract zip archive
+#>
+function Expand-Zip {
+ [CmdletBinding(PositionalBinding=$false)]
+ Param (
+ [Parameter(Mandatory=$True)]
+ [string] $ZipPath,
+ [Parameter(Mandatory=$True)]
+ [string] $OutputDirectory,
+ [switch] $Force
+ )
+
+ Write-Verbose "Extracting '$ZipPath' to '$OutputDirectory'"
+ try {
+ if ((Test-Path $OutputDirectory) -And (-Not $Force)) {
+ Write-Host "Directory '$OutputDirectory' already exists, skipping extract"
+ return $True
+ }
+ if (Test-Path $OutputDirectory) {
+ Write-Verbose "'Force' is 'True', but '$OutputDirectory' exists, removing directory"
+ Remove-Item $OutputDirectory -Force -Recurse
+ if ($? -Eq $False) {
+ Write-Error "Unable to remove '$OutputDirectory'"
+ return $False
+ }
+ }
+
+ $TempOutputDirectory = Join-Path "$(Split-Path -Parent $OutputDirectory)" "$(Split-Path -Leaf $OutputDirectory).tmp"
+ if (Test-Path $TempOutputDirectory) {
+ Remove-Item $TempOutputDirectory -Force -Recurse
+ }
+ New-Item -Path $TempOutputDirectory -Force -ItemType "Directory" | Out-Null
+
+ Add-Type -assembly "system.io.compression.filesystem"
+ [io.compression.zipfile]::ExtractToDirectory("$ZipPath", "$TempOutputDirectory")
+ if ($? -Eq $False) {
+ Write-Error "Unable to extract '$ZipPath'"
+ return $False
+ }
+
+ Move-Item -Path $TempOutputDirectory -Destination $OutputDirectory
+ }
+ catch {
+ Write-Host $_
+ Write-Host $_.Exception
+
+ return $False
+ }
+ return $True
+}
+
+export-modulemember -function DownloadAndExtract
+export-modulemember -function Expand-Zip
+export-modulemember -function Get-File
+export-modulemember -function Get-MachineArchitecture
+export-modulemember -function Get-NativeInstallDirectory
+export-modulemember -function Get-TempDirectory
+export-modulemember -function Get-TempPathFilename
+export-modulemember -function New-ScriptShim
diff --git a/eng/common/native/common-library.sh b/eng/common/native/common-library.sh
new file mode 100755
index 0000000000..bf272dcf55
--- /dev/null
+++ b/eng/common/native/common-library.sh
@@ -0,0 +1,168 @@
+#!/usr/bin/env bash
+
+function GetNativeInstallDirectory {
+ local install_dir
+
+ if [[ -z $NETCOREENG_INSTALL_DIRECTORY ]]; then
+ install_dir=$HOME/.netcoreeng/native/
+ else
+ install_dir=$NETCOREENG_INSTALL_DIRECTORY
+ fi
+
+ echo $install_dir
+ return 0
+}
+
+function GetTempDirectory {
+
+ echo $(GetNativeInstallDirectory)temp/
+ return 0
+}
+
+function ExpandZip {
+ local zip_path=$1
+ local output_directory=$2
+ local force=${3:-false}
+
+ echo "Extracting $zip_path to $output_directory"
+ if [[ -d $output_directory ]] && [[ $force = false ]]; then
+ echo "Directory '$output_directory' already exists, skipping extract"
+ return 0
+ fi
+
+ if [[ -d $output_directory ]]; then
+ echo "'Force flag enabled, but '$output_directory' exists. Removing directory"
+ rm -rf $output_directory
+ if [[ $? != 0 ]]; then
+ Write-PipelineTelemetryError -category 'NativeToolsBootstrap' "Unable to remove '$output_directory'"
+ return 1
+ fi
+ fi
+
+ echo "Creating directory: '$output_directory'"
+ mkdir -p $output_directory
+
+ echo "Extracting archive"
+ tar -xf $zip_path -C $output_directory
+ if [[ $? != 0 ]]; then
+ Write-PipelineTelemetryError -category 'NativeToolsBootstrap' "Unable to extract '$zip_path'"
+ return 1
+ fi
+
+ return 0
+}
+
+function GetCurrentOS {
+ local unameOut="$(uname -s)"
+ case $unameOut in
+ Linux*) echo "Linux";;
+ Darwin*) echo "MacOS";;
+ esac
+ return 0
+}
+
+function GetFile {
+ local uri=$1
+ local path=$2
+ local force=${3:-false}
+ local download_retries=${4:-5}
+ local retry_wait_time_seconds=${5:-30}
+
+ if [[ -f $path ]]; then
+ if [[ $force = false ]]; then
+ echo "File '$path' already exists. Skipping download"
+ return 0
+ else
+ rm -rf $path
+ fi
+ fi
+
+ if [[ -f $uri ]]; then
+ echo "'$uri' is a file path, copying file to '$path'"
+ cp $uri $path
+ return $?
+ fi
+
+ echo "Downloading $uri"
+ # Use curl if available, otherwise use wget
+ if command -v curl > /dev/null; then
+ curl "$uri" -sSL --retry $download_retries --retry-delay $retry_wait_time_seconds --create-dirs -o "$path" --fail
+ else
+ wget -q -O "$path" "$uri" --tries="$download_retries"
+ fi
+
+ return $?
+}
+
+function GetTempPathFileName {
+ local path=$1
+
+ local temp_dir=$(GetTempDirectory)
+ local temp_file_name=$(basename $path)
+ echo $temp_dir$temp_file_name
+ return 0
+}
+
+function DownloadAndExtract {
+ local uri=$1
+ local installDir=$2
+ local force=${3:-false}
+ local download_retries=${4:-5}
+ local retry_wait_time_seconds=${5:-30}
+
+ local temp_tool_path=$(GetTempPathFileName $uri)
+
+ echo "downloading to: $temp_tool_path"
+
+ # Download file
+ GetFile "$uri" "$temp_tool_path" $force $download_retries $retry_wait_time_seconds
+ if [[ $? != 0 ]]; then
+ Write-PipelineTelemetryError -category 'NativeToolsBootstrap' "Failed to download '$uri' to '$temp_tool_path'."
+ return 1
+ fi
+
+ # Extract File
+ echo "extracting from $temp_tool_path to $installDir"
+ ExpandZip "$temp_tool_path" "$installDir" $force $download_retries $retry_wait_time_seconds
+ if [[ $? != 0 ]]; then
+ Write-PipelineTelemetryError -category 'NativeToolsBootstrap' "Failed to extract '$temp_tool_path' to '$installDir'."
+ return 1
+ fi
+
+ return 0
+}
+
+function NewScriptShim {
+ local shimpath=$1
+ local tool_file_path=$2
+ local force=${3:-false}
+
+ echo "Generating '$shimpath' shim"
+ if [[ -f $shimpath ]]; then
+ if [[ $force = false ]]; then
+ echo "File '$shimpath' already exists." >&2
+ return 1
+ else
+ rm -rf $shimpath
+ fi
+ fi
+
+ if [[ ! -f $tool_file_path ]]; then
+ Write-PipelineTelemetryError -category 'NativeToolsBootstrap' "Specified tool file path:'$tool_file_path' does not exist"
+ return 1
+ fi
+
+ local shim_contents=$'#!/usr/bin/env bash\n'
+ shim_contents+="SHIMARGS="$'$1\n'
+ shim_contents+="$tool_file_path"$' $SHIMARGS\n'
+
+ # Write shim file
+ echo "$shim_contents" > $shimpath
+
+ chmod +x $shimpath
+
+ echo "Finished generating shim '$shimpath'"
+
+ return $?
+}
+
diff --git a/eng/common/native/find-native-compiler.sh b/eng/common/native/find-native-compiler.sh
new file mode 100755
index 0000000000..aed19d07d5
--- /dev/null
+++ b/eng/common/native/find-native-compiler.sh
@@ -0,0 +1,121 @@
+#!/usr/bin/env bash
+#
+# This file locates the native compiler with the given name and version and sets the environment variables to locate it.
+#
+
+source="${BASH_SOURCE[0]}"
+
+# resolve $SOURCE until the file is no longer a symlink
+while [[ -h $source ]]; do
+ scriptroot="$( cd -P "$( dirname "$source" )" && pwd )"
+ source="$(readlink "$source")"
+
+ # if $source was a relative symlink, we need to resolve it relative to the path where the
+ # symlink file was located
+ [[ $source != /* ]] && source="$scriptroot/$source"
+done
+scriptroot="$( cd -P "$( dirname "$source" )" && pwd )"
+
+if [ $# -lt 0 ]
+then
+ echo "Usage..."
+ echo "find-native-compiler.sh "
+ echo "Specify the name of compiler (clang or gcc)."
+ echo "Specify the major version of compiler."
+ echo "Specify the minor version of compiler."
+ exit 1
+fi
+
+. $scriptroot/../pipeline-logging-functions.sh
+
+compiler="$1"
+cxxCompiler="$compiler++"
+majorVersion="$2"
+minorVersion="$3"
+
+if [ "$compiler" = "gcc" ]; then cxxCompiler="g++"; fi
+
+check_version_exists() {
+ desired_version=-1
+
+ # Set up the environment to be used for building with the desired compiler.
+ if command -v "$compiler-$1.$2" > /dev/null; then
+ desired_version="-$1.$2"
+ elif command -v "$compiler$1$2" > /dev/null; then
+ desired_version="$1$2"
+ elif command -v "$compiler-$1$2" > /dev/null; then
+ desired_version="-$1$2"
+ fi
+
+ echo "$desired_version"
+}
+
+if [ -z "$CLR_CC" ]; then
+
+ # Set default versions
+ if [ -z "$majorVersion" ]; then
+ # note: gcc (all versions) and clang versions higher than 6 do not have minor version in file name, if it is zero.
+ if [ "$compiler" = "clang" ]; then versions=( 9 8 7 6.0 5.0 4.0 3.9 3.8 3.7 3.6 3.5 )
+ elif [ "$compiler" = "gcc" ]; then versions=( 9 8 7 6 5 4.9 ); fi
+
+ for version in "${versions[@]}"; do
+ parts=(${version//./ })
+ desired_version="$(check_version_exists "${parts[0]}" "${parts[1]}")"
+ if [ "$desired_version" != "-1" ]; then majorVersion="${parts[0]}"; break; fi
+ done
+
+ if [ -z "$majorVersion" ]; then
+ if command -v "$compiler" > /dev/null; then
+ if [ "$(uname)" != "Darwin" ]; then
+ Write-PipelineTelemetryError -category "Build" -type "warning" "Specific version of $compiler not found, falling back to use the one in PATH."
+ fi
+ export CC="$(command -v "$compiler")"
+ export CXX="$(command -v "$cxxCompiler")"
+ else
+ Write-PipelineTelemetryError -category "Build" "No usable version of $compiler found."
+ exit 1
+ fi
+ else
+ if [ "$compiler" = "clang" ] && [ "$majorVersion" -lt 5 ]; then
+ if [ "$build_arch" = "arm" ] || [ "$build_arch" = "armel" ]; then
+ if command -v "$compiler" > /dev/null; then
+ Write-PipelineTelemetryError -category "Build" -type "warning" "Found clang version $majorVersion which is not supported on arm/armel architectures, falling back to use clang from PATH."
+ export CC="$(command -v "$compiler")"
+ export CXX="$(command -v "$cxxCompiler")"
+ else
+ Write-PipelineTelemetryError -category "Build" "Found clang version $majorVersion which is not supported on arm/armel architectures, and there is no clang in PATH."
+ exit 1
+ fi
+ fi
+ fi
+ fi
+ else
+ desired_version="$(check_version_exists "$majorVersion" "$minorVersion")"
+ if [ "$desired_version" = "-1" ]; then
+ Write-PipelineTelemetryError -category "Build" "Could not find specific version of $compiler: $majorVersion $minorVersion."
+ exit 1
+ fi
+ fi
+
+ if [ -z "$CC" ]; then
+ export CC="$(command -v "$compiler$desired_version")"
+ export CXX="$(command -v "$cxxCompiler$desired_version")"
+ if [ -z "$CXX" ]; then export CXX="$(command -v "$cxxCompiler")"; fi
+ fi
+else
+ if [ ! -f "$CLR_CC" ]; then
+ Write-PipelineTelemetryError -category "Build" "CLR_CC is set but path '$CLR_CC' does not exist"
+ exit 1
+ fi
+ export CC="$CLR_CC"
+ export CXX="$CLR_CXX"
+fi
+
+if [ -z "$CC" ]; then
+ Write-PipelineTelemetryError -category "Build" "Unable to find $compiler."
+ exit 1
+fi
+
+export CCC_CC="$CC"
+export CCC_CXX="$CXX"
+export SCAN_BUILD_COMMAND="$(command -v "scan-build$desired_version")"
diff --git a/eng/common/native/install-cmake-test.sh b/eng/common/native/install-cmake-test.sh
new file mode 100755
index 0000000000..12339a4076
--- /dev/null
+++ b/eng/common/native/install-cmake-test.sh
@@ -0,0 +1,117 @@
+#!/usr/bin/env bash
+
+source="${BASH_SOURCE[0]}"
+scriptroot="$( cd -P "$( dirname "$source" )" && pwd )"
+
+. $scriptroot/common-library.sh
+
+base_uri=
+install_path=
+version=
+clean=false
+force=false
+download_retries=5
+retry_wait_time_seconds=30
+
+while (($# > 0)); do
+ lowerI="$(echo $1 | awk '{print tolower($0)}')"
+ case $lowerI in
+ --baseuri)
+ base_uri=$2
+ shift 2
+ ;;
+ --installpath)
+ install_path=$2
+ shift 2
+ ;;
+ --version)
+ version=$2
+ shift 2
+ ;;
+ --clean)
+ clean=true
+ shift 1
+ ;;
+ --force)
+ force=true
+ shift 1
+ ;;
+ --downloadretries)
+ download_retries=$2
+ shift 2
+ ;;
+ --retrywaittimeseconds)
+ retry_wait_time_seconds=$2
+ shift 2
+ ;;
+ --help)
+ echo "Common settings:"
+ echo " --baseuri Base file directory or Url wrom which to acquire tool archives"
+ echo " --installpath Base directory to install native tool to"
+ echo " --clean Don't install the tool, just clean up the current install of the tool"
+ echo " --force Force install of tools even if they previously exist"
+ echo " --help Print help and exit"
+ echo ""
+ echo "Advanced settings:"
+ echo " --downloadretries Total number of retry attempts"
+ echo " --retrywaittimeseconds Wait time between retry attempts in seconds"
+ echo ""
+ exit 0
+ ;;
+ esac
+done
+
+tool_name="cmake-test"
+tool_os=$(GetCurrentOS)
+tool_folder=$(echo $tool_os | awk '{print tolower($0)}')
+tool_arch="x86_64"
+tool_name_moniker="$tool_name-$version-$tool_os-$tool_arch"
+tool_install_directory="$install_path/$tool_name/$version"
+tool_file_path="$tool_install_directory/$tool_name_moniker/bin/$tool_name"
+shim_path="$install_path/$tool_name.sh"
+uri="${base_uri}/$tool_folder/$tool_name/$tool_name_moniker.tar.gz"
+
+# Clean up tool and installers
+if [[ $clean = true ]]; then
+ echo "Cleaning $tool_install_directory"
+ if [[ -d $tool_install_directory ]]; then
+ rm -rf $tool_install_directory
+ fi
+
+ echo "Cleaning $shim_path"
+ if [[ -f $shim_path ]]; then
+ rm -rf $shim_path
+ fi
+
+ tool_temp_path=$(GetTempPathFileName $uri)
+ echo "Cleaning $tool_temp_path"
+ if [[ -f $tool_temp_path ]]; then
+ rm -rf $tool_temp_path
+ fi
+
+ exit 0
+fi
+
+# Install tool
+if [[ -f $tool_file_path ]] && [[ $force = false ]]; then
+ echo "$tool_name ($version) already exists, skipping install"
+ exit 0
+fi
+
+DownloadAndExtract $uri $tool_install_directory $force $download_retries $retry_wait_time_seconds
+
+if [[ $? != 0 ]]; then
+ Write-PipelineTelemetryError -category 'NativeToolsBootstrap' 'Installation failed'
+ exit 1
+fi
+
+# Generate Shim
+# Always rewrite shims so that we are referencing the expected version
+NewScriptShim $shim_path $tool_file_path true
+
+if [[ $? != 0 ]]; then
+ Write-PipelineTelemetryError -category 'NativeToolsBootstrap' 'Shim generation failed'
+ exit 1
+fi
+
+exit 0
\ No newline at end of file
diff --git a/eng/common/native/install-cmake.sh b/eng/common/native/install-cmake.sh
new file mode 100755
index 0000000000..18041be876
--- /dev/null
+++ b/eng/common/native/install-cmake.sh
@@ -0,0 +1,117 @@
+#!/usr/bin/env bash
+
+source="${BASH_SOURCE[0]}"
+scriptroot="$( cd -P "$( dirname "$source" )" && pwd )"
+
+. $scriptroot/common-library.sh
+
+base_uri=
+install_path=
+version=
+clean=false
+force=false
+download_retries=5
+retry_wait_time_seconds=30
+
+while (($# > 0)); do
+ lowerI="$(echo $1 | awk '{print tolower($0)}')"
+ case $lowerI in
+ --baseuri)
+ base_uri=$2
+ shift 2
+ ;;
+ --installpath)
+ install_path=$2
+ shift 2
+ ;;
+ --version)
+ version=$2
+ shift 2
+ ;;
+ --clean)
+ clean=true
+ shift 1
+ ;;
+ --force)
+ force=true
+ shift 1
+ ;;
+ --downloadretries)
+ download_retries=$2
+ shift 2
+ ;;
+ --retrywaittimeseconds)
+ retry_wait_time_seconds=$2
+ shift 2
+ ;;
+ --help)
+ echo "Common settings:"
+ echo " --baseuri Base file directory or Url wrom which to acquire tool archives"
+ echo " --installpath Base directory to install native tool to"
+ echo " --clean Don't install the tool, just clean up the current install of the tool"
+ echo " --force Force install of tools even if they previously exist"
+ echo " --help Print help and exit"
+ echo ""
+ echo "Advanced settings:"
+ echo " --downloadretries Total number of retry attempts"
+ echo " --retrywaittimeseconds Wait time between retry attempts in seconds"
+ echo ""
+ exit 0
+ ;;
+ esac
+done
+
+tool_name="cmake"
+tool_os=$(GetCurrentOS)
+tool_folder=$(echo $tool_os | awk '{print tolower($0)}')
+tool_arch="x86_64"
+tool_name_moniker="$tool_name-$version-$tool_os-$tool_arch"
+tool_install_directory="$install_path/$tool_name/$version"
+tool_file_path="$tool_install_directory/$tool_name_moniker/bin/$tool_name"
+shim_path="$install_path/$tool_name.sh"
+uri="${base_uri}/$tool_folder/$tool_name/$tool_name_moniker.tar.gz"
+
+# Clean up tool and installers
+if [[ $clean = true ]]; then
+ echo "Cleaning $tool_install_directory"
+ if [[ -d $tool_install_directory ]]; then
+ rm -rf $tool_install_directory
+ fi
+
+ echo "Cleaning $shim_path"
+ if [[ -f $shim_path ]]; then
+ rm -rf $shim_path
+ fi
+
+ tool_temp_path=$(GetTempPathFileName $uri)
+ echo "Cleaning $tool_temp_path"
+ if [[ -f $tool_temp_path ]]; then
+ rm -rf $tool_temp_path
+ fi
+
+ exit 0
+fi
+
+# Install tool
+if [[ -f $tool_file_path ]] && [[ $force = false ]]; then
+ echo "$tool_name ($version) already exists, skipping install"
+ exit 0
+fi
+
+DownloadAndExtract $uri $tool_install_directory $force $download_retries $retry_wait_time_seconds
+
+if [[ $? != 0 ]]; then
+ Write-PipelineTelemetryError -category 'NativeToolsBootstrap' 'Installation failed'
+ exit 1
+fi
+
+# Generate Shim
+# Always rewrite shims so that we are referencing the expected version
+NewScriptShim $shim_path $tool_file_path true
+
+if [[ $? != 0 ]]; then
+ Write-PipelineTelemetryError -category 'NativeToolsBootstrap' 'Shim generation failed'
+ exit 1
+fi
+
+exit 0
\ No newline at end of file
diff --git a/eng/common/native/install-tool.ps1 b/eng/common/native/install-tool.ps1
new file mode 100644
index 0000000000..f397e1c75d
--- /dev/null
+++ b/eng/common/native/install-tool.ps1
@@ -0,0 +1,132 @@
+<#
+.SYNOPSIS
+Install native tool
+
+.DESCRIPTION
+Install cmake native tool from Azure blob storage
+
+.PARAMETER InstallPath
+Base directory to install native tool to
+
+.PARAMETER BaseUri
+Base file directory or Url from which to acquire tool archives
+
+.PARAMETER CommonLibraryDirectory
+Path to folder containing common library modules
+
+.PARAMETER Force
+Force install of tools even if they previously exist
+
+.PARAMETER Clean
+Don't install the tool, just clean up the current install of the tool
+
+.PARAMETER DownloadRetries
+Total number of retry attempts
+
+.PARAMETER RetryWaitTimeInSeconds
+Wait time between retry attempts in seconds
+
+.NOTES
+Returns 0 if install succeeds, 1 otherwise
+#>
+[CmdletBinding(PositionalBinding=$false)]
+Param (
+ [Parameter(Mandatory=$True)]
+ [string] $ToolName,
+ [Parameter(Mandatory=$True)]
+ [string] $InstallPath,
+ [Parameter(Mandatory=$True)]
+ [string] $BaseUri,
+ [Parameter(Mandatory=$True)]
+ [string] $Version,
+ [string] $CommonLibraryDirectory = $PSScriptRoot,
+ [switch] $Force = $False,
+ [switch] $Clean = $False,
+ [int] $DownloadRetries = 5,
+ [int] $RetryWaitTimeInSeconds = 30
+)
+
+. $PSScriptRoot\..\pipeline-logging-functions.ps1
+
+# Import common library modules
+Import-Module -Name (Join-Path $CommonLibraryDirectory "CommonLibrary.psm1")
+
+try {
+ # Define verbose switch if undefined
+ $Verbose = $VerbosePreference -Eq "Continue"
+
+ $Arch = CommonLibrary\Get-MachineArchitecture
+ $ToolOs = "win64"
+ if($Arch -Eq "x32") {
+ $ToolOs = "win32"
+ }
+ $ToolNameMoniker = "$ToolName-$Version-$ToolOs-$Arch"
+ $ToolInstallDirectory = Join-Path $InstallPath "$ToolName\$Version\"
+ $Uri = "$BaseUri/windows/$ToolName/$ToolNameMoniker.zip"
+ $ShimPath = Join-Path $InstallPath "$ToolName.exe"
+
+ if ($Clean) {
+ Write-Host "Cleaning $ToolInstallDirectory"
+ if (Test-Path $ToolInstallDirectory) {
+ Remove-Item $ToolInstallDirectory -Force -Recurse
+ }
+ Write-Host "Cleaning $ShimPath"
+ if (Test-Path $ShimPath) {
+ Remove-Item $ShimPath -Force
+ }
+ $ToolTempPath = CommonLibrary\Get-TempPathFilename -Path $Uri
+ Write-Host "Cleaning $ToolTempPath"
+ if (Test-Path $ToolTempPath) {
+ Remove-Item $ToolTempPath -Force
+ }
+ exit 0
+ }
+
+ # Install tool
+ if ((Test-Path $ToolInstallDirectory) -And (-Not $Force)) {
+ Write-Verbose "$ToolName ($Version) already exists, skipping install"
+ }
+ else {
+ $InstallStatus = CommonLibrary\DownloadAndExtract -Uri $Uri `
+ -InstallDirectory $ToolInstallDirectory `
+ -Force:$Force `
+ -DownloadRetries $DownloadRetries `
+ -RetryWaitTimeInSeconds $RetryWaitTimeInSeconds `
+ -Verbose:$Verbose
+
+ if ($InstallStatus -Eq $False) {
+ Write-PipelineTelemetryError "Installation failed" -Category "NativeToolsetBootstrapping"
+ exit 1
+ }
+ }
+
+ $ToolFilePath = Get-ChildItem $ToolInstallDirectory -Recurse -Filter "$ToolName.exe" | % { $_.FullName }
+ if (@($ToolFilePath).Length -Gt 1) {
+ Write-Error "There are multiple copies of $ToolName in $($ToolInstallDirectory): `n$(@($ToolFilePath | out-string))"
+ exit 1
+ } elseif (@($ToolFilePath).Length -Lt 1) {
+ Write-Host "$ToolName was not found in $ToolFilePath."
+ exit 1
+ }
+
+ # Generate shim
+ # Always rewrite shims so that we are referencing the expected version
+ $GenerateShimStatus = CommonLibrary\New-ScriptShim -ShimName $ToolName `
+ -ShimDirectory $InstallPath `
+ -ToolFilePath "$ToolFilePath" `
+ -BaseUri $BaseUri `
+ -Force:$Force `
+ -Verbose:$Verbose
+
+ if ($GenerateShimStatus -Eq $False) {
+ Write-PipelineTelemetryError "Generate shim failed" -Category "NativeToolsetBootstrapping"
+ return 1
+ }
+
+ exit 0
+}
+catch {
+ Write-Host $_.ScriptStackTrace
+ Write-PipelineTelemetryError -Category "NativeToolsetBootstrapping" -Message $_
+ exit 1
+}
diff --git a/eng/common/performance/blazor_perf.proj b/eng/common/performance/blazor_perf.proj
new file mode 100644
index 0000000000..3b25359c43
--- /dev/null
+++ b/eng/common/performance/blazor_perf.proj
@@ -0,0 +1,30 @@
+
+
+ python3
+ $(HelixPreCommands);chmod +x $HELIX_WORKITEM_PAYLOAD/SOD/SizeOnDisk
+
+
+
+
+ %(Identity)
+
+
+
+
+ %HELIX_CORRELATION_PAYLOAD%\performance\src\scenarios\
+ $(ScenarioDirectory)blazor\
+
+
+ $HELIX_CORRELATION_PAYLOAD/performance/src/scenarios/
+ $(ScenarioDirectory)blazor/
+
+
+
+
+ $(WorkItemDirectory)
+ cd $(BlazorDirectory);$(Python) pre.py publish --msbuild %27/p:_TrimmerDumpDependencies=true%27 --msbuild-static AdditionalMonoLinkerOptions=%27"%24(AdditionalMonoLinkerOptions) --dump-dependencies"%27 --binlog %27./traces/blazor_publish.binlog%27
+ $(Python) test.py sod --scenario-name "%(Identity)"
+ $(Python) post.py
+
+
+
\ No newline at end of file
diff --git a/eng/common/performance/crossgen_perf.proj b/eng/common/performance/crossgen_perf.proj
new file mode 100644
index 0000000000..4264920382
--- /dev/null
+++ b/eng/common/performance/crossgen_perf.proj
@@ -0,0 +1,69 @@
+
+
+
+
+ %(Identity)
+
+
+
+
+
+ py -3
+ $(HelixPreCommands)
+ %HELIX_CORRELATION_PAYLOAD%\Core_Root
+ %HELIX_CORRELATION_PAYLOAD%\performance\src\scenarios\
+ $(ScenarioDirectory)crossgen\
+ $(ScenarioDirectory)crossgen2\
+
+
+ python3
+ $(HelixPreCommands);chmod +x $HELIX_WORKITEM_PAYLOAD/startup/Startup;chmod +x $HELIX_WORKITEM_PAYLOAD/startup/perfcollect;sudo apt update
+ $HELIX_CORRELATION_PAYLOAD/Core_Root
+ $HELIX_CORRELATION_PAYLOAD/performance/src/scenarios/
+ $(ScenarioDirectory)crossgen/
+ $(ScenarioDirectory)crossgen2/
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ $(WorkItemDirectory)
+ $(Python) $(CrossgenDirectory)test.py crossgen --core-root $(CoreRoot) --test-name %(Identity)
+
+
+
+
+
+ $(WorkItemDirectory)
+ $(Python) $(Crossgen2Directory)test.py crossgen2 --core-root $(CoreRoot) --single %(Identity)
+
+
+
+
+
+
+ 4:00
+
+
+
+ 4:00
+
+
+ $(WorkItemDirectory)
+ $(Python) $(Crossgen2Directory)test.py crossgen2 --core-root $(CoreRoot) --composite $(Crossgen2Directory)framework-r2r.dll.rsp
+ 1:00
+
+
+
\ No newline at end of file
diff --git a/eng/common/performance/microbenchmarks.proj b/eng/common/performance/microbenchmarks.proj
new file mode 100644
index 0000000000..94b6efbc92
--- /dev/null
+++ b/eng/common/performance/microbenchmarks.proj
@@ -0,0 +1,144 @@
+
+
+
+ %HELIX_CORRELATION_PAYLOAD%\performance\scripts\benchmarks_ci.py --csproj %HELIX_CORRELATION_PAYLOAD%\performance\$(TargetCsproj)
+ --dotnet-versions %DOTNET_VERSION% --cli-source-info args --cli-branch %PERFLAB_BRANCH% --cli-commit-sha %PERFLAB_HASH% --cli-repository https://github.com/%PERFLAB_REPO% --cli-source-timestamp %PERFLAB_BUILDTIMESTAMP%
+ py -3
+ %HELIX_CORRELATION_PAYLOAD%\Core_Root\CoreRun.exe
+ %HELIX_CORRELATION_PAYLOAD%\Baseline_Core_Root\CoreRun.exe
+
+ $(HelixPreCommands);call %HELIX_CORRELATION_PAYLOAD%\performance\tools\machine-setup.cmd;set PYTHONPATH=%HELIX_WORKITEM_PAYLOAD%\scripts%3B%HELIX_WORKITEM_PAYLOAD%
+ %HELIX_CORRELATION_PAYLOAD%\artifacts\BenchmarkDotNet.Artifacts
+ %HELIX_CORRELATION_PAYLOAD%\artifacts\BenchmarkDotNet.Artifacts_Baseline
+ %HELIX_CORRELATION_PAYLOAD%\performance\src\tools\ResultsComparer\ResultsComparer.csproj
+ %HELIX_CORRELATION_PAYLOAD%\performance\tools\dotnet\$(Architecture)\dotnet.exe
+ %25%25
+ %HELIX_WORKITEM_ROOT%\testResults.xml
+
+
+
+ $HELIX_CORRELATION_PAYLOAD
+ $(BaseDirectory)/performance
+
+
+
+ $HELIX_WORKITEM_PAYLOAD
+ $(BaseDirectory)
+
+
+
+ $(PerformanceDirectory)/scripts/benchmarks_ci.py --csproj $(PerformanceDirectory)/$(TargetCsproj)
+ --dotnet-versions $DOTNET_VERSION --cli-source-info args --cli-branch $PERFLAB_BRANCH --cli-commit-sha $PERFLAB_HASH --cli-repository https://github.com/$PERFLAB_REPO --cli-source-timestamp $PERFLAB_BUILDTIMESTAMP
+ python3
+ $(BaseDirectory)/Core_Root/corerun
+ $(BaseDirectory)/Baseline_Core_Root/corerun
+ $(HelixPreCommands);chmod +x $(PerformanceDirectory)/tools/machine-setup.sh;. $(PerformanceDirectory)/tools/machine-setup.sh
+ $(BaseDirectory)/artifacts/BenchmarkDotNet.Artifacts
+ $(BaseDirectory)/artifacts/BenchmarkDotNet.Artifacts_Baseline
+ $(PerformanceDirectory)/src/tools/ResultsComparer/ResultsComparer.csproj
+ $(PerformanceDirectory)/tools/dotnet/$(Architecture)/dotnet
+ %25
+ $HELIX_WORKITEM_ROOT/testResults.xml
+
+
+
+ $(CliArguments) --wasm
+
+
+
+ --corerun %HELIX_CORRELATION_PAYLOAD%\dotnet-mono\shared\Microsoft.NETCore.App\6.0.0\corerun.exe
+
+
+ --corerun $(BaseDirectory)/dotnet-mono/shared/Microsoft.NETCore.App/6.0.0/corerun
+
+
+
+ --corerun $(CoreRun)
+
+
+
+ --corerun $(BaselineCoreRun)
+
+
+
+ $(Python) $(WorkItemCommand) --incremental no --architecture $(Architecture) -f $(_Framework) $(PerfLabArguments)
+
+
+
+ $(WorkItemCommand) $(CliArguments)
+
+
+
+ 2:30
+ 0:15
+
+
+
+
+ %(Identity)
+
+
+
+
+ 30
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ false
+
+
+
+
+
+ $(WorkItemDirectory)
+ $(WorkItemCommand) --bdn-artifacts $(BaselineArtifactsDirectory) --bdn-arguments="--anyCategories $(BDNCategories) $(ExtraBenchmarkDotNetArguments) $(BaselineCoreRunArgument) --partition-count $(PartitionCount) --partition-index %(HelixWorkItem.Index)"
+ $(WorkItemCommand) --bdn-artifacts $(ArtifactsDirectory) --bdn-arguments="--anyCategories $(BDNCategories) $(ExtraBenchmarkDotNetArguments) $(CoreRunArgument) --partition-count $(PartitionCount) --partition-index %(HelixWorkItem.Index)"
+ $(DotnetExe) run -f $(_Framework) -p $(ResultsComparer) --base $(BaselineArtifactsDirectory) --diff $(ArtifactsDirectory) --threshold 2$(Percent) --xml $(XMLResults);$(FinalCommand)
+ $(WorkItemTimeout)
+
+
+
+
+
+ $(WorkItemDirectory)
+ $(WorkItemCommand) --bdn-artifacts $(BaselineArtifactsDirectory) --bdn-arguments="--anyCategories $(BDNCategories) $(ExtraBenchmarkDotNetArguments) $(BaselineCoreRunArgument)"
+ $(WorkItemCommand) --bdn-artifacts $(ArtifactsDirectory) --bdn-arguments="--anyCategories $(BDNCategories) $(ExtraBenchmarkDotNetArguments) $(CoreRunArgument)"
+ $(DotnetExe) run -f $(_Framework) -p $(ResultsComparer) --base $(BaselineArtifactsDirectory) --diff $(ArtifactsDirectory) --threshold 2$(Percent) --xml $(XMLResults)
+ 4:00
+
+
+
\ No newline at end of file
diff --git a/eng/common/performance/performance-setup.ps1 b/eng/common/performance/performance-setup.ps1
new file mode 100644
index 0000000000..656c0bd902
--- /dev/null
+++ b/eng/common/performance/performance-setup.ps1
@@ -0,0 +1,147 @@
+Param(
+ [string] $SourceDirectory=$env:BUILD_SOURCESDIRECTORY,
+ [string] $CoreRootDirectory,
+ [string] $BaselineCoreRootDirectory,
+ [string] $Architecture="x64",
+ [string] $Framework="net5.0",
+ [string] $CompilationMode="Tiered",
+ [string] $Repository=$env:BUILD_REPOSITORY_NAME,
+ [string] $Branch=$env:BUILD_SOURCEBRANCH,
+ [string] $CommitSha=$env:BUILD_SOURCEVERSION,
+ [string] $BuildNumber=$env:BUILD_BUILDNUMBER,
+ [string] $RunCategories="Libraries Runtime",
+ [string] $Csproj="src\benchmarks\micro\MicroBenchmarks.csproj",
+ [string] $Kind="micro",
+ [switch] $LLVM,
+ [switch] $MonoInterpreter,
+ [switch] $MonoAOT,
+ [switch] $Internal,
+ [switch] $Compare,
+ [string] $MonoDotnet="",
+ [string] $Configurations="CompilationMode=$CompilationMode RunKind=$Kind"
+)
+
+$RunFromPerformanceRepo = ($Repository -eq "dotnet/performance") -or ($Repository -eq "dotnet-performance")
+$UseCoreRun = ($CoreRootDirectory -ne [string]::Empty)
+$UseBaselineCoreRun = ($BaselineCoreRootDirectory -ne [string]::Empty)
+
+$PayloadDirectory = (Join-Path $SourceDirectory "Payload")
+$PerformanceDirectory = (Join-Path $PayloadDirectory "performance")
+$WorkItemDirectory = (Join-Path $SourceDirectory "workitem")
+$ExtraBenchmarkDotNetArguments = "--iterationCount 1 --warmupCount 0 --invocationCount 1 --unrollFactor 1 --strategy ColdStart --stopOnFirstError true"
+$Creator = $env:BUILD_DEFINITIONNAME
+$PerfLabArguments = ""
+$HelixSourcePrefix = "pr"
+
+$Queue = "Windows.10.Amd64.ClientRS4.DevEx.15.8.Open"
+
+# TODO: Implement a better logic to determine if Framework is .NET Core or >= .NET 5.
+if ($Framework.StartsWith("netcoreapp") -or ($Framework -eq "net5.0")) {
+ $Queue = "Windows.10.Amd64.ClientRS5.Open"
+}
+
+if ($Compare) {
+ $Queue = "Windows.10.Amd64.19H1.Tiger.Perf.Open"
+ $PerfLabArguments = ""
+ $ExtraBenchmarkDotNetArguments = ""
+}
+
+if ($Internal) {
+ $Queue = "Windows.10.Amd64.19H1.Tiger.Perf"
+ $PerfLabArguments = "--upload-to-perflab-container"
+ $ExtraBenchmarkDotNetArguments = ""
+ $Creator = ""
+ $HelixSourcePrefix = "official"
+}
+
+if($MonoInterpreter)
+{
+ $ExtraBenchmarkDotNetArguments = "--category-exclusion-filter NoInterpreter"
+}
+
+if($MonoDotnet -ne "")
+{
+ $Configurations += " LLVM=$LLVM MonoInterpreter=$MonoInterpreter MonoAOT=$MonoAOT"
+ if($ExtraBenchmarkDotNetArguments -eq "")
+ {
+ #FIX ME: We need to block these tests as they don't run on mono for now
+ $ExtraBenchmarkDotNetArguments = "--exclusion-filter *Perf_Image* *Perf_NamedPipeStream*"
+ }
+ else
+ {
+ #FIX ME: We need to block these tests as they don't run on mono for now
+ $ExtraBenchmarkDotNetArguments += " --exclusion-filter *Perf_Image* *Perf_NamedPipeStream*"
+ }
+}
+
+# FIX ME: This is a workaround until we get this from the actual pipeline
+$CommonSetupArguments="--channel master --queue $Queue --build-number $BuildNumber --build-configs $Configurations --architecture $Architecture"
+$SetupArguments = "--repository https://github.com/$Repository --branch $Branch --get-perf-hash --commit-sha $CommitSha $CommonSetupArguments"
+
+
+#This grabs the LKG version number of dotnet and passes it to our scripts
+$VersionJSON = Get-Content global.json | ConvertFrom-Json
+$DotNetVersion = $VersionJSON.tools.dotnet
+$SetupArguments = "--dotnet-versions $DotNetVersion $SetupArguments"
+
+
+if ($RunFromPerformanceRepo) {
+ $SetupArguments = "--perf-hash $CommitSha $CommonSetupArguments"
+
+ robocopy $SourceDirectory $PerformanceDirectory /E /XD $PayloadDirectory $SourceDirectory\artifacts $SourceDirectory\.git
+}
+else {
+ git clone --branch master --depth 1 --quiet https://github.com/dotnet/performance $PerformanceDirectory
+}
+
+if($MonoDotnet -ne "")
+{
+ $UsingMono = "true"
+ $MonoDotnetPath = (Join-Path $PayloadDirectory "dotnet-mono")
+ Move-Item -Path $MonoDotnet -Destination $MonoDotnetPath
+}
+
+if ($UseCoreRun) {
+ $NewCoreRoot = (Join-Path $PayloadDirectory "Core_Root")
+ Move-Item -Path $CoreRootDirectory -Destination $NewCoreRoot
+}
+if ($UseBaselineCoreRun) {
+ $NewBaselineCoreRoot = (Join-Path $PayloadDirectory "Baseline_Core_Root")
+ Move-Item -Path $BaselineCoreRootDirectory -Destination $NewBaselineCoreRoot
+}
+
+$DocsDir = (Join-Path $PerformanceDirectory "docs")
+robocopy $DocsDir $WorkItemDirectory
+
+# Set variables that we will need to have in future steps
+$ci = $true
+
+. "$PSScriptRoot\..\pipeline-logging-functions.ps1"
+
+# Directories
+Write-PipelineSetVariable -Name 'PayloadDirectory' -Value "$PayloadDirectory" -IsMultiJobVariable $false
+Write-PipelineSetVariable -Name 'PerformanceDirectory' -Value "$PerformanceDirectory" -IsMultiJobVariable $false
+Write-PipelineSetVariable -Name 'WorkItemDirectory' -Value "$WorkItemDirectory" -IsMultiJobVariable $false
+
+# Script Arguments
+Write-PipelineSetVariable -Name 'Python' -Value "py -3" -IsMultiJobVariable $false
+Write-PipelineSetVariable -Name 'ExtraBenchmarkDotNetArguments' -Value "$ExtraBenchmarkDotNetArguments" -IsMultiJobVariable $false
+Write-PipelineSetVariable -Name 'SetupArguments' -Value "$SetupArguments" -IsMultiJobVariable $false
+Write-PipelineSetVariable -Name 'PerfLabArguments' -Value "$PerfLabArguments" -IsMultiJobVariable $false
+Write-PipelineSetVariable -Name 'BDNCategories' -Value "$RunCategories" -IsMultiJobVariable $false
+Write-PipelineSetVariable -Name 'TargetCsproj' -Value "$Csproj" -IsMultiJobVariable $false
+Write-PipelineSetVariable -Name 'Kind' -Value "$Kind" -IsMultiJobVariable $false
+Write-PipelineSetVariable -Name 'Architecture' -Value "$Architecture" -IsMultiJobVariable $false
+Write-PipelineSetVariable -Name 'UseCoreRun' -Value "$UseCoreRun" -IsMultiJobVariable $false
+Write-PipelineSetVariable -Name 'UseBaselineCoreRun' -Value "$UseBaselineCoreRun" -IsMultiJobVariable $false
+Write-PipelineSetVariable -Name 'RunFromPerfRepo' -Value "$RunFromPerformanceRepo" -IsMultiJobVariable $false
+Write-PipelineSetVariable -Name 'Compare' -Value "$Compare" -IsMultiJobVariable $false
+Write-PipelineSetVariable -Name 'MonoDotnet' -Value "$UsingMono" -IsMultiJobVariable $false
+
+# Helix Arguments
+Write-PipelineSetVariable -Name 'Creator' -Value "$Creator" -IsMultiJobVariable $false
+Write-PipelineSetVariable -Name 'Queue' -Value "$Queue" -IsMultiJobVariable $false
+Write-PipelineSetVariable -Name 'HelixSourcePrefix' -Value "$HelixSourcePrefix" -IsMultiJobVariable $false
+Write-PipelineSetVariable -Name '_BuildConfig' -Value "$Architecture.$Kind.$Framework" -IsMultiJobVariable $false
+
+exit 0
\ No newline at end of file
diff --git a/eng/common/performance/performance-setup.sh b/eng/common/performance/performance-setup.sh
new file mode 100755
index 0000000000..806e56c612
--- /dev/null
+++ b/eng/common/performance/performance-setup.sh
@@ -0,0 +1,289 @@
+#!/usr/bin/env bash
+
+source_directory=$BUILD_SOURCESDIRECTORY
+core_root_directory=
+baseline_core_root_directory=
+architecture=x64
+framework=net5.0
+compilation_mode=tiered
+repository=$BUILD_REPOSITORY_NAME
+branch=$BUILD_SOURCEBRANCH
+commit_sha=$BUILD_SOURCEVERSION
+build_number=$BUILD_BUILDNUMBER
+internal=false
+compare=false
+mono_dotnet=
+kind="micro"
+llvm=false
+monointerpreter=false
+monoaot=false
+run_categories="Libraries Runtime"
+csproj="src\benchmarks\micro\MicroBenchmarks.csproj"
+configurations="CompliationMode=$compilation_mode RunKind=$kind"
+run_from_perf_repo=false
+use_core_run=true
+use_baseline_core_run=true
+using_mono=false
+wasm_runtime_loc=
+using_wasm=false
+use_latest_dotnet=false
+
+while (($# > 0)); do
+ lowerI="$(echo $1 | awk '{print tolower($0)}')"
+ case $lowerI in
+ --sourcedirectory)
+ source_directory=$2
+ shift 2
+ ;;
+ --corerootdirectory)
+ core_root_directory=$2
+ shift 2
+ ;;
+ --baselinecorerootdirectory)
+ baseline_core_root_directory=$2
+ shift 2
+ ;;
+ --architecture)
+ architecture=$2
+ shift 2
+ ;;
+ --framework)
+ framework=$2
+ shift 2
+ ;;
+ --compilationmode)
+ compilation_mode=$2
+ shift 2
+ ;;
+ --repository)
+ repository=$2
+ shift 2
+ ;;
+ --branch)
+ branch=$2
+ shift 2
+ ;;
+ --commitsha)
+ commit_sha=$2
+ shift 2
+ ;;
+ --buildnumber)
+ build_number=$2
+ shift 2
+ ;;
+ --kind)
+ kind=$2
+ configurations="CompilationMode=$compilation_mode RunKind=$kind"
+ shift 2
+ ;;
+ --runcategories)
+ run_categories=$2
+ shift 2
+ ;;
+ --csproj)
+ csproj=$2
+ shift 2
+ ;;
+ --internal)
+ internal=true
+ shift 1
+ ;;
+ --llvm)
+ llvm=true
+ shift 1
+ ;;
+ --monointerpreter)
+ monointerpreter=true
+ shift 1
+ ;;
+ --monoaot)
+ monoaot=true
+ shift 1
+ ;;
+ --monodotnet)
+ mono_dotnet=$2
+ shift 2
+ ;;
+ --wasm)
+ wasm_runtime_loc=$2
+ shift 2
+ ;;
+ --compare)
+ compare=true
+ shift 1
+ ;;
+ --configurations)
+ configurations=$2
+ shift 2
+ ;;
+ --latestdotnet)
+ use_latest_dotnet=true
+ shift 1
+ ;;
+ *)
+ echo "Common settings:"
+ echo " --corerootdirectory Directory where Core_Root exists, if running perf testing with --corerun"
+ echo " --architecture Architecture of the testing being run"
+ echo " --configurations List of key=value pairs that will be passed to perf testing infrastructure."
+ echo " ex: --configurations \"CompilationMode=Tiered OptimzationLevel=PGO\""
+ echo " --help Print help and exit"
+ echo ""
+ echo "Advanced settings:"
+ echo " --framework The framework to run, if not running in master"
+ echo " --compliationmode The compilation mode if not passing --configurations"
+ echo " --sourcedirectory The directory of the sources. Defaults to env:BUILD_SOURCESDIRECTORY"
+ echo " --repository The name of the repository in the / format. Defaults to env:BUILD_REPOSITORY_NAME"
+ echo " --branch The name of the branch. Defaults to env:BUILD_SOURCEBRANCH"
+ echo " --commitsha The commit sha1 to run against. Defaults to env:BUILD_SOURCEVERSION"
+ echo " --buildnumber The build number currently running. Defaults to env:BUILD_BUILDNUMBER"
+ echo " --csproj The relative path to the benchmark csproj whose tests should be run. Defaults to src\benchmarks\micro\MicroBenchmarks.csproj"
+ echo " --kind Related to csproj. The kind of benchmarks that should be run. Defaults to micro"
+ echo " --runcategories Related to csproj. Categories of benchmarks to run. Defaults to \"coreclr corefx\""
+ echo " --internal If the benchmarks are running as an official job."
+ echo " --monodotnet Pass the path to the mono dotnet for mono performance testing."
+ echo " --wasm Path to the unpacked wasm runtime pack."
+ echo " --latestdotnet --dotnet-versions will not be specified. --dotnet-versions defaults to LKG version in global.json "
+ echo ""
+ exit 0
+ ;;
+ esac
+done
+
+if [ "$repository" == "dotnet/performance" ] || [ "$repository" == "dotnet-performance" ]; then
+ run_from_perf_repo=true
+fi
+
+if [ -z "$configurations" ]; then
+ configurations="CompilationMode=$compilation_mode"
+fi
+
+if [ -z "$core_root_directory" ]; then
+ use_core_run=false
+fi
+
+if [ -z "$baseline_core_root_directory" ]; then
+ use_baseline_core_run=false
+fi
+
+payload_directory=$source_directory/Payload
+performance_directory=$payload_directory/performance
+workitem_directory=$source_directory/workitem
+extra_benchmark_dotnet_arguments="--iterationCount 1 --warmupCount 0 --invocationCount 1 --unrollFactor 1 --strategy ColdStart --stopOnFirstError true"
+perflab_arguments=
+queue=Ubuntu.1804.Amd64.Open
+creator=$BUILD_DEFINITIONNAME
+helix_source_prefix="pr"
+
+if [[ "$compare" == true ]]; then
+ extra_benchmark_dotnet_arguments=
+ perflab_arguments=
+
+ # No open queues for arm64
+ if [[ "$architecture" = "arm64" ]]; then
+ echo "Compare not available for arm64"
+ exit 1
+ fi
+
+ queue=Ubuntu.1804.Amd64.Tiger.Perf.Open
+fi
+
+if [[ "$internal" == true ]]; then
+ perflab_arguments="--upload-to-perflab-container"
+ helix_source_prefix="official"
+ creator=
+ extra_benchmark_dotnet_arguments=
+
+ if [[ "$architecture" = "arm64" ]]; then
+ queue=Ubuntu.1804.Arm64.Perf
+ else
+ queue=Ubuntu.1804.Amd64.Tiger.Perf
+ fi
+fi
+
+if [[ "$mono_dotnet" != "" ]] && [[ "$monointerpreter" == "false" ]]; then
+ configurations="$configurations LLVM=$llvm MonoInterpreter=$monointerpreter MonoAOT=$monoaot"
+ extra_benchmark_dotnet_arguments="$extra_benchmark_dotnet_arguments --category-exclusion-filter NoMono"
+fi
+
+if [[ "$wasm_runtime_loc" != "" ]]; then
+ configurations="CompilationMode=wasm RunKind=$kind"
+ extra_benchmark_dotnet_arguments="$extra_benchmark_dotnet_arguments --category-exclusion-filter NoInterpreter NoWASM NoMono"
+fi
+
+if [[ "$mono_dotnet" != "" ]] && [[ "$monointerpreter" == "true" ]]; then
+ extra_benchmark_dotnet_arguments="$extra_benchmark_dotnet_arguments --category-exclusion-filter NoInterpreter NoMono"
+fi
+
+common_setup_arguments="--channel master --queue $queue --build-number $build_number --build-configs $configurations --architecture $architecture"
+setup_arguments="--repository https://github.com/$repository --branch $branch --get-perf-hash --commit-sha $commit_sha $common_setup_arguments"
+
+
+if [[ "$use_latest_dotnet" = false ]]; then
+ # Get the tools section from the global.json.
+ # This grabs the LKG version number of dotnet and passes it to our scripts
+ dotnet_version=`cat global.json | python3 -c 'import json,sys;obj=json.load(sys.stdin);print(obj["tools"]["dotnet"])'`
+ setup_arguments="--dotnet-versions $dotnet_version $setup_arguments"
+fi
+
+if [[ "$run_from_perf_repo" = true ]]; then
+ payload_directory=
+ workitem_directory=$source_directory
+ performance_directory=$workitem_directory
+ setup_arguments="--perf-hash $commit_sha $common_setup_arguments"
+else
+ git clone --branch master --depth 1 --quiet https://github.com/dotnet/performance $performance_directory
+
+ docs_directory=$performance_directory/docs
+ mv $docs_directory $workitem_directory
+fi
+
+if [[ "$wasm_runtime_loc" != "" ]]; then
+ using_wasm=true
+ wasm_dotnet_path=$payload_directory/dotnet-wasm
+ mv $wasm_runtime_loc $wasm_dotnet_path
+ extra_benchmark_dotnet_arguments="$extra_benchmark_dotnet_arguments --wasmMainJS \$HELIX_CORRELATION_PAYLOAD/dotnet-wasm/runtime-test.js --wasmEngine /home/helixbot/.jsvu/v8 --customRuntimePack \$HELIX_CORRELATION_PAYLOAD/dotnet-wasm"
+fi
+
+if [[ "$mono_dotnet" != "" ]]; then
+ using_mono=true
+ mono_dotnet_path=$payload_directory/dotnet-mono
+ mv $mono_dotnet $mono_dotnet_path
+fi
+
+if [[ "$use_core_run" = true ]]; then
+ new_core_root=$payload_directory/Core_Root
+ mv $core_root_directory $new_core_root
+fi
+
+if [[ "$use_baseline_core_run" = true ]]; then
+ new_baseline_core_root=$payload_directory/Baseline_Core_Root
+ mv $baseline_core_root_directory $new_baseline_core_root
+fi
+
+ci=true
+
+_script_dir=$(pwd)/eng/common
+. "$_script_dir/pipeline-logging-functions.sh"
+
+# Make sure all of our variables are available for future steps
+Write-PipelineSetVariable -name "UseCoreRun" -value "$use_core_run" -is_multi_job_variable false
+Write-PipelineSetVariable -name "UseBaselineCoreRun" -value "$use_baseline_core_run" -is_multi_job_variable false
+Write-PipelineSetVariable -name "Architecture" -value "$architecture" -is_multi_job_variable false
+Write-PipelineSetVariable -name "PayloadDirectory" -value "$payload_directory" -is_multi_job_variable false
+Write-PipelineSetVariable -name "PerformanceDirectory" -value "$performance_directory" -is_multi_job_variable false
+Write-PipelineSetVariable -name "WorkItemDirectory" -value "$workitem_directory" -is_multi_job_variable false
+Write-PipelineSetVariable -name "Queue" -value "$queue" -is_multi_job_variable false
+Write-PipelineSetVariable -name "SetupArguments" -value "$setup_arguments" -is_multi_job_variable false
+Write-PipelineSetVariable -name "Python" -value "python3" -is_multi_job_variable false
+Write-PipelineSetVariable -name "PerfLabArguments" -value "$perflab_arguments" -is_multi_job_variable false
+Write-PipelineSetVariable -name "ExtraBenchmarkDotNetArguments" -value "$extra_benchmark_dotnet_arguments" -is_multi_job_variable false
+Write-PipelineSetVariable -name "BDNCategories" -value "$run_categories" -is_multi_job_variable false
+Write-PipelineSetVariable -name "TargetCsproj" -value "$csproj" -is_multi_job_variable false
+Write-PipelineSetVariable -name "RunFromPerfRepo" -value "$run_from_perf_repo" -is_multi_job_variable false
+Write-PipelineSetVariable -name "Creator" -value "$creator" -is_multi_job_variable false
+Write-PipelineSetVariable -name "HelixSourcePrefix" -value "$helix_source_prefix" -is_multi_job_variable false
+Write-PipelineSetVariable -name "Kind" -value "$kind" -is_multi_job_variable false
+Write-PipelineSetVariable -name "_BuildConfig" -value "$architecture.$kind.$framework" -is_multi_job_variable false
+Write-PipelineSetVariable -name "Compare" -value "$compare" -is_multi_job_variable false
+Write-PipelineSetVariable -name "MonoDotnet" -value "$using_mono" -is_multi_job_variable false
+Write-PipelineSetVariable -name "WasmDotnet" -value "$using_wasm" -is_multi_job_variable false
diff --git a/eng/common/pipeline-logging-functions.ps1 b/eng/common/pipeline-logging-functions.ps1
new file mode 100644
index 0000000000..8484451f3a
--- /dev/null
+++ b/eng/common/pipeline-logging-functions.ps1
@@ -0,0 +1,242 @@
+# Source for this file was taken from https://github.com/microsoft/azure-pipelines-task-lib/blob/11c9439d4af17e6475d9fe058e6b2e03914d17e6/powershell/VstsTaskSdk/LoggingCommandFunctions.ps1 and modified.
+
+# NOTE: You should not be calling these method directly as they are likely to change. Instead you should be calling the Write-Pipeline* functions defined in tools.ps1
+
+$script:loggingCommandPrefix = '##vso['
+$script:loggingCommandEscapeMappings = @( # TODO: WHAT ABOUT "="? WHAT ABOUT "%"?
+ New-Object psobject -Property @{ Token = ';' ; Replacement = '%3B' }
+ New-Object psobject -Property @{ Token = "`r" ; Replacement = '%0D' }
+ New-Object psobject -Property @{ Token = "`n" ; Replacement = '%0A' }
+ New-Object psobject -Property @{ Token = "]" ; Replacement = '%5D' }
+)
+# TODO: BUG: Escape % ???
+# TODO: Add test to verify don't need to escape "=".
+
+# Specify "-Force" to force pipeline formatted output even if "$ci" is false or not set
+function Write-PipelineTelemetryError {
+ [CmdletBinding()]
+ param(
+ [Parameter(Mandatory = $true)]
+ [string]$Category,
+ [Parameter(Mandatory = $true)]
+ [string]$Message,
+ [Parameter(Mandatory = $false)]
+ [string]$Type = 'error',
+ [string]$ErrCode,
+ [string]$SourcePath,
+ [string]$LineNumber,
+ [string]$ColumnNumber,
+ [switch]$AsOutput,
+ [switch]$Force)
+
+ $PSBoundParameters.Remove('Category') | Out-Null
+
+ if($Force -Or ((Test-Path variable:ci) -And $ci)) {
+ $Message = "(NETCORE_ENGINEERING_TELEMETRY=$Category) $Message"
+ }
+ $PSBoundParameters.Remove('Message') | Out-Null
+ $PSBoundParameters.Add('Message', $Message)
+ Write-PipelineTaskError @PSBoundParameters
+}
+
+# Specify "-Force" to force pipeline formatted output even if "$ci" is false or not set
+function Write-PipelineTaskError {
+ [CmdletBinding()]
+ param(
+ [Parameter(Mandatory = $true)]
+ [string]$Message,
+ [Parameter(Mandatory = $false)]
+ [string]$Type = 'error',
+ [string]$ErrCode,
+ [string]$SourcePath,
+ [string]$LineNumber,
+ [string]$ColumnNumber,
+ [switch]$AsOutput,
+ [switch]$Force
+ )
+
+ if(!$Force -And (-Not (Test-Path variable:ci) -Or !$ci)) {
+ if($Type -eq 'error') {
+ Write-Host $Message -ForegroundColor Red
+ return
+ }
+ elseif ($Type -eq 'warning') {
+ Write-Host $Message -ForegroundColor Yellow
+ return
+ }
+ }
+
+ if(($Type -ne 'error') -and ($Type -ne 'warning')) {
+ Write-Host $Message
+ return
+ }
+ $PSBoundParameters.Remove('Force') | Out-Null
+ if(-not $PSBoundParameters.ContainsKey('Type')) {
+ $PSBoundParameters.Add('Type', 'error')
+ }
+ Write-LogIssue @PSBoundParameters
+ }
+
+ function Write-PipelineSetVariable {
+ [CmdletBinding()]
+ param(
+ [Parameter(Mandatory = $true)]
+ [string]$Name,
+ [string]$Value,
+ [switch]$Secret,
+ [switch]$AsOutput,
+ [bool]$IsMultiJobVariable=$true)
+
+ if((Test-Path variable:ci) -And $ci) {
+ Write-LoggingCommand -Area 'task' -Event 'setvariable' -Data $Value -Properties @{
+ 'variable' = $Name
+ 'isSecret' = $Secret
+ 'isOutput' = $IsMultiJobVariable
+ } -AsOutput:$AsOutput
+ }
+ }
+
+ function Write-PipelinePrependPath {
+ [CmdletBinding()]
+ param(
+ [Parameter(Mandatory=$true)]
+ [string]$Path,
+ [switch]$AsOutput)
+
+ if((Test-Path variable:ci) -And $ci) {
+ Write-LoggingCommand -Area 'task' -Event 'prependpath' -Data $Path -AsOutput:$AsOutput
+ }
+ }
+
+<########################################
+# Private functions.
+########################################>
+function Format-LoggingCommandData {
+ [CmdletBinding()]
+ param([string]$Value, [switch]$Reverse)
+
+ if (!$Value) {
+ return ''
+ }
+
+ if (!$Reverse) {
+ foreach ($mapping in $script:loggingCommandEscapeMappings) {
+ $Value = $Value.Replace($mapping.Token, $mapping.Replacement)
+ }
+ } else {
+ for ($i = $script:loggingCommandEscapeMappings.Length - 1 ; $i -ge 0 ; $i--) {
+ $mapping = $script:loggingCommandEscapeMappings[$i]
+ $Value = $Value.Replace($mapping.Replacement, $mapping.Token)
+ }
+ }
+
+ return $Value
+}
+
+function Format-LoggingCommand {
+ [CmdletBinding()]
+ param(
+ [Parameter(Mandatory = $true)]
+ [string]$Area,
+ [Parameter(Mandatory = $true)]
+ [string]$Event,
+ [string]$Data,
+ [hashtable]$Properties)
+
+ # Append the preamble.
+ [System.Text.StringBuilder]$sb = New-Object -TypeName System.Text.StringBuilder
+ $null = $sb.Append($script:loggingCommandPrefix).Append($Area).Append('.').Append($Event)
+
+ # Append the properties.
+ if ($Properties) {
+ $first = $true
+ foreach ($key in $Properties.Keys) {
+ [string]$value = Format-LoggingCommandData $Properties[$key]
+ if ($value) {
+ if ($first) {
+ $null = $sb.Append(' ')
+ $first = $false
+ } else {
+ $null = $sb.Append(';')
+ }
+
+ $null = $sb.Append("$key=$value")
+ }
+ }
+ }
+
+ # Append the tail and output the value.
+ $Data = Format-LoggingCommandData $Data
+ $sb.Append(']').Append($Data).ToString()
+}
+
+function Write-LoggingCommand {
+ [CmdletBinding(DefaultParameterSetName = 'Parameters')]
+ param(
+ [Parameter(Mandatory = $true, ParameterSetName = 'Parameters')]
+ [string]$Area,
+ [Parameter(Mandatory = $true, ParameterSetName = 'Parameters')]
+ [string]$Event,
+ [Parameter(ParameterSetName = 'Parameters')]
+ [string]$Data,
+ [Parameter(ParameterSetName = 'Parameters')]
+ [hashtable]$Properties,
+ [Parameter(Mandatory = $true, ParameterSetName = 'Object')]
+ $Command,
+ [switch]$AsOutput)
+
+ if ($PSCmdlet.ParameterSetName -eq 'Object') {
+ Write-LoggingCommand -Area $Command.Area -Event $Command.Event -Data $Command.Data -Properties $Command.Properties -AsOutput:$AsOutput
+ return
+ }
+
+ $command = Format-LoggingCommand -Area $Area -Event $Event -Data $Data -Properties $Properties
+ if ($AsOutput) {
+ $command
+ } else {
+ Write-Host $command
+ }
+}
+
+function Write-LogIssue {
+ [CmdletBinding()]
+ param(
+ [ValidateSet('warning', 'error')]
+ [Parameter(Mandatory = $true)]
+ [string]$Type,
+ [string]$Message,
+ [string]$ErrCode,
+ [string]$SourcePath,
+ [string]$LineNumber,
+ [string]$ColumnNumber,
+ [switch]$AsOutput)
+
+ $command = Format-LoggingCommand -Area 'task' -Event 'logissue' -Data $Message -Properties @{
+ 'type' = $Type
+ 'code' = $ErrCode
+ 'sourcepath' = $SourcePath
+ 'linenumber' = $LineNumber
+ 'columnnumber' = $ColumnNumber
+ }
+ if ($AsOutput) {
+ return $command
+ }
+
+ if ($Type -eq 'error') {
+ $foregroundColor = $host.PrivateData.ErrorForegroundColor
+ $backgroundColor = $host.PrivateData.ErrorBackgroundColor
+ if ($foregroundColor -isnot [System.ConsoleColor] -or $backgroundColor -isnot [System.ConsoleColor]) {
+ $foregroundColor = [System.ConsoleColor]::Red
+ $backgroundColor = [System.ConsoleColor]::Black
+ }
+ } else {
+ $foregroundColor = $host.PrivateData.WarningForegroundColor
+ $backgroundColor = $host.PrivateData.WarningBackgroundColor
+ if ($foregroundColor -isnot [System.ConsoleColor] -or $backgroundColor -isnot [System.ConsoleColor]) {
+ $foregroundColor = [System.ConsoleColor]::Yellow
+ $backgroundColor = [System.ConsoleColor]::Black
+ }
+ }
+
+ Write-Host $command -ForegroundColor $foregroundColor -BackgroundColor $backgroundColor
+}
diff --git a/eng/common/pipeline-logging-functions.sh b/eng/common/pipeline-logging-functions.sh
new file mode 100755
index 0000000000..6cd0a3400e
--- /dev/null
+++ b/eng/common/pipeline-logging-functions.sh
@@ -0,0 +1,182 @@
+#!/usr/bin/env bash
+
+function Write-PipelineTelemetryError {
+ local telemetry_category=''
+ local force=false
+ local function_args=()
+ local message=''
+ while [[ $# -gt 0 ]]; do
+ opt="$(echo "${1/#--/-}" | awk '{print tolower($0)}')"
+ case "$opt" in
+ -category|-c)
+ telemetry_category=$2
+ shift
+ ;;
+ -force|-f)
+ force=true
+ ;;
+ -*)
+ function_args+=("$1 $2")
+ shift
+ ;;
+ *)
+ message=$*
+ ;;
+ esac
+ shift
+ done
+
+ if [[ $force != true ]] && [[ "$ci" != true ]]; then
+ echo "$message" >&2
+ return
+ fi
+
+ if [[ $force == true ]]; then
+ function_args+=("-force")
+ fi
+ message="(NETCORE_ENGINEERING_TELEMETRY=$telemetry_category) $message"
+ function_args+=("$message")
+ Write-PipelineTaskError ${function_args[@]}
+}
+
+function Write-PipelineTaskError {
+ local message_type="error"
+ local sourcepath=''
+ local linenumber=''
+ local columnnumber=''
+ local error_code=''
+ local force=false
+
+ while [[ $# -gt 0 ]]; do
+ opt="$(echo "${1/#--/-}" | awk '{print tolower($0)}')"
+ case "$opt" in
+ -type|-t)
+ message_type=$2
+ shift
+ ;;
+ -sourcepath|-s)
+ sourcepath=$2
+ shift
+ ;;
+ -linenumber|-ln)
+ linenumber=$2
+ shift
+ ;;
+ -columnnumber|-cn)
+ columnnumber=$2
+ shift
+ ;;
+ -errcode|-e)
+ error_code=$2
+ shift
+ ;;
+ -force|-f)
+ force=true
+ ;;
+ *)
+ break
+ ;;
+ esac
+
+ shift
+ done
+
+ if [[ $force != true ]] && [[ "$ci" != true ]]; then
+ echo "$@" >&2
+ return
+ fi
+
+ local message="##vso[task.logissue"
+
+ message="$message type=$message_type"
+
+ if [ -n "$sourcepath" ]; then
+ message="$message;sourcepath=$sourcepath"
+ fi
+
+ if [ -n "$linenumber" ]; then
+ message="$message;linenumber=$linenumber"
+ fi
+
+ if [ -n "$columnnumber" ]; then
+ message="$message;columnnumber=$columnnumber"
+ fi
+
+ if [ -n "$error_code" ]; then
+ message="$message;code=$error_code"
+ fi
+
+ message="$message]$*"
+ echo "$message"
+}
+
+function Write-PipelineSetVariable {
+ if [[ "$ci" != true ]]; then
+ return
+ fi
+
+ local name=''
+ local value=''
+ local secret=false
+ local as_output=false
+ local is_multi_job_variable=true
+
+ while [[ $# -gt 0 ]]; do
+ opt="$(echo "${1/#--/-}" | awk '{print tolower($0)}')"
+ case "$opt" in
+ -name|-n)
+ name=$2
+ shift
+ ;;
+ -value|-v)
+ value=$2
+ shift
+ ;;
+ -secret|-s)
+ secret=true
+ ;;
+ -as_output|-a)
+ as_output=true
+ ;;
+ -is_multi_job_variable|-i)
+ is_multi_job_variable=$2
+ shift
+ ;;
+ esac
+ shift
+ done
+
+ value=${value/;/%3B}
+ value=${value/\\r/%0D}
+ value=${value/\\n/%0A}
+ value=${value/]/%5D}
+
+ local message="##vso[task.setvariable variable=$name;isSecret=$secret;isOutput=$is_multi_job_variable]$value"
+
+ if [[ "$as_output" == true ]]; then
+ $message
+ else
+ echo "$message"
+ fi
+}
+
+function Write-PipelinePrependPath {
+ local prepend_path=''
+
+ while [[ $# -gt 0 ]]; do
+ opt="$(echo "${1/#--/-}" | awk '{print tolower($0)}')"
+ case "$opt" in
+ -path|-p)
+ prepend_path=$2
+ shift
+ ;;
+ esac
+ shift
+ done
+
+ export PATH="$prepend_path:$PATH"
+
+ if [[ "$ci" == true ]]; then
+ echo "##vso[task.prependpath]$prepend_path"
+ fi
+}
\ No newline at end of file
diff --git a/eng/common/post-build/add-build-to-channel.ps1 b/eng/common/post-build/add-build-to-channel.ps1
new file mode 100644
index 0000000000..de2d957922
--- /dev/null
+++ b/eng/common/post-build/add-build-to-channel.ps1
@@ -0,0 +1,48 @@
+param(
+ [Parameter(Mandatory=$true)][int] $BuildId,
+ [Parameter(Mandatory=$true)][int] $ChannelId,
+ [Parameter(Mandatory=$true)][string] $MaestroApiAccessToken,
+ [Parameter(Mandatory=$false)][string] $MaestroApiEndPoint = 'https://maestro-prod.westus2.cloudapp.azure.com',
+ [Parameter(Mandatory=$false)][string] $MaestroApiVersion = '2019-01-16'
+)
+
+try {
+ . $PSScriptRoot\post-build-utils.ps1
+
+ # Check that the channel we are going to promote the build to exist
+ $channelInfo = Get-MaestroChannel -ChannelId $ChannelId
+
+ if (!$channelInfo) {
+ Write-PipelineTelemetryCategory -Category 'PromoteBuild' -Message "Channel with BAR ID $ChannelId was not found in BAR!"
+ ExitWithExitCode 1
+ }
+
+ # Get info about which channel(s) the build has already been promoted to
+ $buildInfo = Get-MaestroBuild -BuildId $BuildId
+
+ if (!$buildInfo) {
+ Write-PipelineTelemetryError -Category 'PromoteBuild' -Message "Build with BAR ID $BuildId was not found in BAR!"
+ ExitWithExitCode 1
+ }
+
+ # Find whether the build is already assigned to the channel or not
+ if ($buildInfo.channels) {
+ foreach ($channel in $buildInfo.channels) {
+ if ($channel.Id -eq $ChannelId) {
+ Write-Host "The build with BAR ID $BuildId is already on channel $ChannelId!"
+ ExitWithExitCode 0
+ }
+ }
+ }
+
+ Write-Host "Promoting build '$BuildId' to channel '$ChannelId'."
+
+ Assign-BuildToChannel -BuildId $BuildId -ChannelId $ChannelId
+
+ Write-Host 'done.'
+}
+catch {
+ Write-Host $_
+ Write-PipelineTelemetryError -Category 'PromoteBuild' -Message "There was an error while trying to promote build '$BuildId' to channel '$ChannelId'"
+ ExitWithExitCode 1
+}
diff --git a/eng/common/post-build/check-channel-consistency.ps1 b/eng/common/post-build/check-channel-consistency.ps1
new file mode 100644
index 0000000000..63f3464c98
--- /dev/null
+++ b/eng/common/post-build/check-channel-consistency.ps1
@@ -0,0 +1,40 @@
+param(
+ [Parameter(Mandatory=$true)][string] $PromoteToChannels, # List of channels that the build should be promoted to
+ [Parameter(Mandatory=$true)][array] $AvailableChannelIds # List of channel IDs available in the YAML implementation
+)
+
+try {
+ . $PSScriptRoot\post-build-utils.ps1
+
+ if ($PromoteToChannels -eq "") {
+ Write-PipelineTaskError -Type 'warning' -Message "This build won't publish assets as it's not configured to any Maestro channel. If that wasn't intended use Darc to configure a default channel using add-default-channel for this branch or to promote it to a channel using add-build-to-channel. See https://github.com/dotnet/arcade/blob/master/Documentation/Darc.md#assigning-an-individual-build-to-a-channel for more info."
+ ExitWithExitCode 0
+ }
+
+ # Check that every channel that Maestro told to promote the build to
+ # is available in YAML
+ $PromoteToChannelsIds = $PromoteToChannels -split "\D" | Where-Object { $_ }
+
+ $hasErrors = $false
+
+ foreach ($id in $PromoteToChannelsIds) {
+ if (($id -ne 0) -and ($id -notin $AvailableChannelIds)) {
+ Write-PipelineTaskError -Message "Channel $id is not present in the post-build YAML configuration! This is an error scenario. Please contact @dnceng."
+ $hasErrors = $true
+ }
+ }
+
+ # The `Write-PipelineTaskError` doesn't error the script and we might report several errors
+ # in the previous lines. The check below makes sure that we return an error state from the
+ # script if we reported any validation error
+ if ($hasErrors) {
+ ExitWithExitCode 1
+ }
+
+ Write-Host 'done.'
+}
+catch {
+ Write-Host $_
+ Write-PipelineTelemetryError -Category 'CheckChannelConsistency' -Message "There was an error while trying to check consistency of Maestro default channels for the build and post-build YAML configuration."
+ ExitWithExitCode 1
+}
diff --git a/eng/common/post-build/nuget-validation.ps1 b/eng/common/post-build/nuget-validation.ps1
new file mode 100644
index 0000000000..dab3534ab5
--- /dev/null
+++ b/eng/common/post-build/nuget-validation.ps1
@@ -0,0 +1,24 @@
+# This script validates NuGet package metadata information using this
+# tool: https://github.com/NuGet/NuGetGallery/tree/jver-verify/src/VerifyMicrosoftPackage
+
+param(
+ [Parameter(Mandatory=$true)][string] $PackagesPath, # Path to where the packages to be validated are
+ [Parameter(Mandatory=$true)][string] $ToolDestinationPath # Where the validation tool should be downloaded to
+)
+
+try {
+ . $PSScriptRoot\post-build-utils.ps1
+
+ $url = 'https://raw.githubusercontent.com/NuGet/NuGetGallery/3e25ad135146676bcab0050a516939d9958bfa5d/src/VerifyMicrosoftPackage/verify.ps1'
+
+ New-Item -ItemType 'directory' -Path ${ToolDestinationPath} -Force
+
+ Invoke-WebRequest $url -OutFile ${ToolDestinationPath}\verify.ps1
+
+ & ${ToolDestinationPath}\verify.ps1 ${PackagesPath}\*.nupkg
+}
+catch {
+ Write-Host $_.ScriptStackTrace
+ Write-PipelineTelemetryError -Category 'NuGetValidation' -Message $_
+ ExitWithExitCode 1
+}
diff --git a/eng/common/post-build/post-build-utils.ps1 b/eng/common/post-build/post-build-utils.ps1
new file mode 100644
index 0000000000..7d49744795
--- /dev/null
+++ b/eng/common/post-build/post-build-utils.ps1
@@ -0,0 +1,91 @@
+# Most of the functions in this file require the variables `MaestroApiEndPoint`,
+# `MaestroApiVersion` and `MaestroApiAccessToken` to be globally available.
+
+$ErrorActionPreference = 'Stop'
+Set-StrictMode -Version 2.0
+
+# `tools.ps1` checks $ci to perform some actions. Since the post-build
+# scripts don't necessarily execute in the same agent that run the
+# build.ps1/sh script this variable isn't automatically set.
+$ci = $true
+$disableConfigureToolsetImport = $true
+. $PSScriptRoot\..\tools.ps1
+
+function Create-MaestroApiRequestHeaders([string]$ContentType = 'application/json') {
+ Validate-MaestroVars
+
+ $headers = New-Object 'System.Collections.Generic.Dictionary[[String],[String]]'
+ $headers.Add('Accept', $ContentType)
+ $headers.Add('Authorization',"Bearer $MaestroApiAccessToken")
+ return $headers
+}
+
+function Get-MaestroChannel([int]$ChannelId) {
+ Validate-MaestroVars
+
+ $apiHeaders = Create-MaestroApiRequestHeaders
+ $apiEndpoint = "$MaestroApiEndPoint/api/channels/${ChannelId}?api-version=$MaestroApiVersion"
+
+ $result = try { Invoke-WebRequest -Method Get -Uri $apiEndpoint -Headers $apiHeaders | ConvertFrom-Json } catch { Write-Host "Error: $_" }
+ return $result
+}
+
+function Get-MaestroBuild([int]$BuildId) {
+ Validate-MaestroVars
+
+ $apiHeaders = Create-MaestroApiRequestHeaders -AuthToken $MaestroApiAccessToken
+ $apiEndpoint = "$MaestroApiEndPoint/api/builds/${BuildId}?api-version=$MaestroApiVersion"
+
+ $result = try { return Invoke-WebRequest -Method Get -Uri $apiEndpoint -Headers $apiHeaders | ConvertFrom-Json } catch { Write-Host "Error: $_" }
+ return $result
+}
+
+function Get-MaestroSubscriptions([string]$SourceRepository, [int]$ChannelId) {
+ Validate-MaestroVars
+
+ $SourceRepository = [System.Web.HttpUtility]::UrlEncode($SourceRepository)
+ $apiHeaders = Create-MaestroApiRequestHeaders -AuthToken $MaestroApiAccessToken
+ $apiEndpoint = "$MaestroApiEndPoint/api/subscriptions?sourceRepository=$SourceRepository&channelId=$ChannelId&api-version=$MaestroApiVersion"
+
+ $result = try { Invoke-WebRequest -Method Get -Uri $apiEndpoint -Headers $apiHeaders | ConvertFrom-Json } catch { Write-Host "Error: $_" }
+ return $result
+}
+
+function Assign-BuildToChannel([int]$BuildId, [int]$ChannelId) {
+ Validate-MaestroVars
+
+ $apiHeaders = Create-MaestroApiRequestHeaders -AuthToken $MaestroApiAccessToken
+ $apiEndpoint = "$MaestroApiEndPoint/api/channels/${ChannelId}/builds/${BuildId}?api-version=$MaestroApiVersion"
+ Invoke-WebRequest -Method Post -Uri $apiEndpoint -Headers $apiHeaders | Out-Null
+}
+
+function Trigger-Subscription([string]$SubscriptionId) {
+ Validate-MaestroVars
+
+ $apiHeaders = Create-MaestroApiRequestHeaders -AuthToken $MaestroApiAccessToken
+ $apiEndpoint = "$MaestroApiEndPoint/api/subscriptions/$SubscriptionId/trigger?api-version=$MaestroApiVersion"
+ Invoke-WebRequest -Uri $apiEndpoint -Headers $apiHeaders -Method Post | Out-Null
+}
+
+function Validate-MaestroVars {
+ try {
+ Get-Variable MaestroApiEndPoint -Scope Global | Out-Null
+ Get-Variable MaestroApiVersion -Scope Global | Out-Null
+ Get-Variable MaestroApiAccessToken -Scope Global | Out-Null
+
+ if (!($MaestroApiEndPoint -Match '^http[s]?://maestro-(int|prod).westus2.cloudapp.azure.com$')) {
+ Write-PipelineTelemetryError -Category 'MaestroVars' -Message "MaestroApiEndPoint is not a valid Maestro URL. '$MaestroApiEndPoint'"
+ ExitWithExitCode 1
+ }
+
+ if (!($MaestroApiVersion -Match '^[0-9]{4}-[0-9]{2}-[0-9]{2}$')) {
+ Write-PipelineTelemetryError -Category 'MaestroVars' -Message "MaestroApiVersion does not match a version string in the format yyyy-MM-DD. '$MaestroApiVersion'"
+ ExitWithExitCode 1
+ }
+ }
+ catch {
+ Write-PipelineTelemetryError -Category 'MaestroVars' -Message 'Error: Variables `MaestroApiEndPoint`, `MaestroApiVersion` and `MaestroApiAccessToken` are required while using this script.'
+ Write-Host $_
+ ExitWithExitCode 1
+ }
+}
diff --git a/eng/common/post-build/publish-using-darc.ps1 b/eng/common/post-build/publish-using-darc.ps1
new file mode 100644
index 0000000000..650b13b089
--- /dev/null
+++ b/eng/common/post-build/publish-using-darc.ps1
@@ -0,0 +1,74 @@
+param(
+ [Parameter(Mandatory=$true)][int] $BuildId,
+ [Parameter(Mandatory=$true)][int] $PublishingInfraVersion,
+ [Parameter(Mandatory=$true)][string] $AzdoToken,
+ [Parameter(Mandatory=$true)][string] $MaestroToken,
+ [Parameter(Mandatory=$false)][string] $MaestroApiEndPoint = 'https://maestro-prod.westus2.cloudapp.azure.com',
+ [Parameter(Mandatory=$true)][string] $WaitPublishingFinish,
+ [Parameter(Mandatory=$false)][string] $EnableSourceLinkValidation,
+ [Parameter(Mandatory=$false)][string] $EnableSigningValidation,
+ [Parameter(Mandatory=$false)][string] $EnableNugetValidation,
+ [Parameter(Mandatory=$false)][string] $PublishInstallersAndChecksums,
+ [Parameter(Mandatory=$false)][string] $ArtifactsPublishingAdditionalParameters,
+ [Parameter(Mandatory=$false)][string] $SigningValidationAdditionalParameters
+)
+
+try {
+ . $PSScriptRoot\post-build-utils.ps1
+ # Hard coding darc version till the next arcade-services roll out, cos this version has required API changes for darc add-build-to-channel
+ $darc = Get-Darc "1.1.0-beta.20418.1"
+
+ $optionalParams = [System.Collections.ArrayList]::new()
+
+ if ("" -ne $ArtifactsPublishingAdditionalParameters) {
+ $optionalParams.Add("artifact-publishing-parameters") | Out-Null
+ $optionalParams.Add($ArtifactsPublishingAdditionalParameters) | Out-Null
+ }
+
+ if ("false" -eq $WaitPublishingFinish) {
+ $optionalParams.Add("--no-wait") | Out-Null
+ }
+
+ if ("false" -ne $PublishInstallersAndChecksums) {
+ $optionalParams.Add("--publish-installers-and-checksums") | Out-Null
+ }
+
+ if ("true" -eq $EnableNugetValidation) {
+ $optionalParams.Add("--validate-nuget") | Out-Null
+ }
+
+ if ("true" -eq $EnableSourceLinkValidation) {
+ $optionalParams.Add("--validate-sourcelinkchecksums") | Out-Null
+ }
+
+ if ("true" -eq $EnableSigningValidation) {
+ $optionalParams.Add("--validate-signingchecksums") | Out-Null
+
+ if ("" -ne $SigningValidationAdditionalParameters) {
+ $optionalParams.Add("--signing-validation-parameters") | Out-Null
+ $optionalParams.Add($SigningValidationAdditionalParameters) | Out-Null
+ }
+ }
+
+ & $darc add-build-to-channel `
+ --id $buildId `
+ --publishing-infra-version $PublishingInfraVersion `
+ --default-channels `
+ --source-branch master `
+ --azdev-pat $AzdoToken `
+ --bar-uri $MaestroApiEndPoint `
+ --password $MaestroToken `
+ @optionalParams
+
+ if ($LastExitCode -ne 0) {
+ Write-Host "Problems using Darc to promote build ${buildId} to default channels. Stopping execution..."
+ exit 1
+ }
+
+ Write-Host 'done.'
+}
+catch {
+ Write-Host $_
+ Write-PipelineTelemetryError -Category 'PromoteBuild' -Message "There was an error while trying to publish build '$BuildId' to default channels."
+ ExitWithExitCode 1
+}
diff --git a/eng/common/post-build/sourcelink-validation.ps1 b/eng/common/post-build/sourcelink-validation.ps1
new file mode 100644
index 0000000000..c7e7ae67d8
--- /dev/null
+++ b/eng/common/post-build/sourcelink-validation.ps1
@@ -0,0 +1,276 @@
+param(
+ [Parameter(Mandatory=$true)][string] $InputPath, # Full path to directory where Symbols.NuGet packages to be checked are stored
+ [Parameter(Mandatory=$true)][string] $ExtractPath, # Full path to directory where the packages will be extracted during validation
+ [Parameter(Mandatory=$false)][string] $GHRepoName, # GitHub name of the repo including the Org. E.g., dotnet/arcade
+ [Parameter(Mandatory=$false)][string] $GHCommit, # GitHub commit SHA used to build the packages
+ [Parameter(Mandatory=$true)][string] $SourcelinkCliVersion # Version of SourceLink CLI to use
+)
+
+. $PSScriptRoot\post-build-utils.ps1
+
+# Cache/HashMap (File -> Exist flag) used to consult whether a file exist
+# in the repository at a specific commit point. This is populated by inserting
+# all files present in the repo at a specific commit point.
+$global:RepoFiles = @{}
+
+# Maximum number of jobs to run in parallel
+$MaxParallelJobs = 6
+
+# Wait time between check for system load
+$SecondsBetweenLoadChecks = 10
+
+$ValidatePackage = {
+ param(
+ [string] $PackagePath # Full path to a Symbols.NuGet package
+ )
+
+ . $using:PSScriptRoot\..\tools.ps1
+
+ # Ensure input file exist
+ if (!(Test-Path $PackagePath)) {
+ Write-Host "Input file does not exist: $PackagePath"
+ return 1
+ }
+
+ # Extensions for which we'll look for SourceLink information
+ # For now we'll only care about Portable & Embedded PDBs
+ $RelevantExtensions = @('.dll', '.exe', '.pdb')
+
+ Write-Host -NoNewLine 'Validating ' ([System.IO.Path]::GetFileName($PackagePath)) '...'
+
+ $PackageId = [System.IO.Path]::GetFileNameWithoutExtension($PackagePath)
+ $ExtractPath = Join-Path -Path $using:ExtractPath -ChildPath $PackageId
+ $FailedFiles = 0
+
+ Add-Type -AssemblyName System.IO.Compression.FileSystem
+
+ [System.IO.Directory]::CreateDirectory($ExtractPath) | Out-Null
+
+ try {
+ $zip = [System.IO.Compression.ZipFile]::OpenRead($PackagePath)
+
+ $zip.Entries |
+ Where-Object {$RelevantExtensions -contains [System.IO.Path]::GetExtension($_.Name)} |
+ ForEach-Object {
+ $FileName = $_.FullName
+ $Extension = [System.IO.Path]::GetExtension($_.Name)
+ $FakeName = -Join((New-Guid), $Extension)
+ $TargetFile = Join-Path -Path $ExtractPath -ChildPath $FakeName
+
+ # We ignore resource DLLs
+ if ($FileName.EndsWith('.resources.dll')) {
+ return
+ }
+
+ [System.IO.Compression.ZipFileExtensions]::ExtractToFile($_, $TargetFile, $true)
+
+ $ValidateFile = {
+ param(
+ [string] $FullPath, # Full path to the module that has to be checked
+ [string] $RealPath,
+ [ref] $FailedFiles
+ )
+
+ $sourcelinkExe = "$env:USERPROFILE\.dotnet\tools"
+ $sourcelinkExe = Resolve-Path "$sourcelinkExe\sourcelink.exe"
+ $SourceLinkInfos = & $sourcelinkExe print-urls $FullPath | Out-String
+
+ if ($LASTEXITCODE -eq 0 -and -not ([string]::IsNullOrEmpty($SourceLinkInfos))) {
+ $NumFailedLinks = 0
+
+ # We only care about Http addresses
+ $Matches = (Select-String '(http[s]?)(:\/\/)([^\s,]+)' -Input $SourceLinkInfos -AllMatches).Matches
+
+ if ($Matches.Count -ne 0) {
+ $Matches.Value |
+ ForEach-Object {
+ $Link = $_
+ $CommitUrl = "https://raw.githubusercontent.com/${using:GHRepoName}/${using:GHCommit}/"
+
+ $FilePath = $Link.Replace($CommitUrl, "")
+ $Status = 200
+ $Cache = $using:RepoFiles
+
+ if ( !($Cache.ContainsKey($FilePath)) ) {
+ try {
+ $Uri = $Link -as [System.URI]
+
+ # Only GitHub links are valid
+ if ($Uri.AbsoluteURI -ne $null -and ($Uri.Host -match 'github' -or $Uri.Host -match 'githubusercontent')) {
+ $Status = (Invoke-WebRequest -Uri $Link -UseBasicParsing -Method HEAD -TimeoutSec 5).StatusCode
+ }
+ else {
+ $Status = 0
+ }
+ }
+ catch {
+ write-host $_
+ $Status = 0
+ }
+ }
+
+ if ($Status -ne 200) {
+ if ($NumFailedLinks -eq 0) {
+ if ($FailedFiles.Value -eq 0) {
+ Write-Host
+ }
+
+ Write-Host "`tFile $RealPath has broken links:"
+ }
+
+ Write-Host "`t`tFailed to retrieve $Link"
+
+ $NumFailedLinks++
+ }
+ }
+ }
+
+ if ($NumFailedLinks -ne 0) {
+ $FailedFiles.value++
+ $global:LASTEXITCODE = 1
+ }
+ }
+ }
+
+ &$ValidateFile $TargetFile $FileName ([ref]$FailedFiles)
+ }
+ }
+ catch {
+
+ }
+ finally {
+ $zip.Dispose()
+ }
+
+ if ($FailedFiles -eq 0) {
+ Write-Host 'Passed.'
+ return [pscustomobject]@{
+ result = 0
+ packagePath = $PackagePath
+ }
+ }
+ else {
+ Write-PipelineTelemetryError -Category 'SourceLink' -Message "$PackagePath has broken SourceLink links."
+ return [pscustomobject]@{
+ result = 1
+ packagePath = $PackagePath
+ }
+ }
+}
+
+function CheckJobResult(
+ $result,
+ $packagePath,
+ [ref]$ValidationFailures) {
+ if ($jobResult.result -ne '0') {
+ Write-PipelineTelemetryError -Category 'SourceLink' -Message "$packagePath has broken SourceLink links."
+ $ValidationFailures.Value++
+ }
+}
+
+function ValidateSourceLinkLinks {
+ if ($GHRepoName -ne '' -and !($GHRepoName -Match '^[^\s\/]+/[^\s\/]+$')) {
+ if (!($GHRepoName -Match '^[^\s-]+-[^\s]+$')) {
+ Write-PipelineTelemetryError -Category 'SourceLink' -Message "GHRepoName should be in the format / or -. '$GHRepoName'"
+ ExitWithExitCode 1
+ }
+ else {
+ $GHRepoName = $GHRepoName -replace '^([^\s-]+)-([^\s]+)$', '$1/$2';
+ }
+ }
+
+ if ($GHCommit -ne '' -and !($GHCommit -Match '^[0-9a-fA-F]{40}$')) {
+ Write-PipelineTelemetryError -Category 'SourceLink' -Message "GHCommit should be a 40 chars hexadecimal string. '$GHCommit'"
+ ExitWithExitCode 1
+ }
+
+ if ($GHRepoName -ne '' -and $GHCommit -ne '') {
+ $RepoTreeURL = -Join('http://api.github.com/repos/', $GHRepoName, '/git/trees/', $GHCommit, '?recursive=1')
+ $CodeExtensions = @('.cs', '.vb', '.fs', '.fsi', '.fsx', '.fsscript')
+
+ try {
+ # Retrieve the list of files in the repo at that particular commit point and store them in the RepoFiles hash
+ $Data = Invoke-WebRequest $RepoTreeURL -UseBasicParsing | ConvertFrom-Json | Select-Object -ExpandProperty tree
+
+ foreach ($file in $Data) {
+ $Extension = [System.IO.Path]::GetExtension($file.path)
+
+ if ($CodeExtensions.Contains($Extension)) {
+ $RepoFiles[$file.path] = 1
+ }
+ }
+ }
+ catch {
+ Write-Host "Problems downloading the list of files from the repo. Url used: $RepoTreeURL . Execution will proceed without caching."
+ }
+ }
+ elseif ($GHRepoName -ne '' -or $GHCommit -ne '') {
+ Write-Host 'For using the http caching mechanism both GHRepoName and GHCommit should be informed.'
+ }
+
+ if (Test-Path $ExtractPath) {
+ Remove-Item $ExtractPath -Force -Recurse -ErrorAction SilentlyContinue
+ }
+
+ $ValidationFailures = 0
+
+ # Process each NuGet package in parallel
+ Get-ChildItem "$InputPath\*.symbols.nupkg" |
+ ForEach-Object {
+ Start-Job -ScriptBlock $ValidatePackage -ArgumentList $_.FullName | Out-Null
+ $NumJobs = @(Get-Job -State 'Running').Count
+
+ while ($NumJobs -ge $MaxParallelJobs) {
+ Write-Host "There are $NumJobs validation jobs running right now. Waiting $SecondsBetweenLoadChecks seconds to check again."
+ sleep $SecondsBetweenLoadChecks
+ $NumJobs = @(Get-Job -State 'Running').Count
+ }
+
+ foreach ($Job in @(Get-Job -State 'Completed')) {
+ $jobResult = Wait-Job -Id $Job.Id | Receive-Job
+ CheckJobResult $jobResult.result $jobResult.packagePath ([ref]$ValidationFailures)
+ Remove-Job -Id $Job.Id
+ }
+ }
+
+ foreach ($Job in @(Get-Job)) {
+ $jobResult = Wait-Job -Id $Job.Id | Receive-Job
+ if ($jobResult -ne '0') {
+ $ValidationFailures++
+ }
+ Remove-Job -Id $Job.Id
+ }
+ if ($ValidationFailures -gt 0) {
+ Write-PipelineTelemetryError -Category 'SourceLink' -Message "$ValidationFailures package(s) failed validation."
+ ExitWithExitCode 1
+ }
+}
+
+function InstallSourcelinkCli {
+ $sourcelinkCliPackageName = 'sourcelink'
+
+ $dotnetRoot = InitializeDotNetCli -install:$true
+ $dotnet = "$dotnetRoot\dotnet.exe"
+ $toolList = & "$dotnet" tool list --global
+
+ if (($toolList -like "*$sourcelinkCliPackageName*") -and ($toolList -like "*$sourcelinkCliVersion*")) {
+ Write-Host "SourceLink CLI version $sourcelinkCliVersion is already installed."
+ }
+ else {
+ Write-Host "Installing SourceLink CLI version $sourcelinkCliVersion..."
+ Write-Host 'You may need to restart your command window if this is the first dotnet tool you have installed.'
+ & "$dotnet" tool install $sourcelinkCliPackageName --version $sourcelinkCliVersion --verbosity "minimal" --global
+ }
+}
+
+try {
+ InstallSourcelinkCli
+
+ ValidateSourceLinkLinks
+}
+catch {
+ Write-Host $_.Exception
+ Write-Host $_.ScriptStackTrace
+ Write-PipelineTelemetryError -Category 'SourceLink' -Message $_
+ ExitWithExitCode 1
+}
diff --git a/eng/common/post-build/symbols-validation.ps1 b/eng/common/post-build/symbols-validation.ps1
new file mode 100644
index 0000000000..fcc6019b49
--- /dev/null
+++ b/eng/common/post-build/symbols-validation.ps1
@@ -0,0 +1,268 @@
+param(
+ [Parameter(Mandatory=$true)][string] $InputPath, # Full path to directory where NuGet packages to be checked are stored
+ [Parameter(Mandatory=$true)][string] $ExtractPath, # Full path to directory where the packages will be extracted during validation
+ [Parameter(Mandatory=$true)][string] $DotnetSymbolVersion, # Version of dotnet symbol to use
+ [Parameter(Mandatory=$false)][switch] $ContinueOnError, # If we should keep checking symbols after an error
+ [Parameter(Mandatory=$false)][switch] $Clean # Clean extracted symbols directory after checking symbols
+)
+
+# Maximum number of jobs to run in parallel
+$MaxParallelJobs = 6
+
+# Wait time between check for system load
+$SecondsBetweenLoadChecks = 10
+
+$CountMissingSymbols = {
+ param(
+ [string] $PackagePath # Path to a NuGet package
+ )
+
+ . $using:PSScriptRoot\..\tools.ps1
+
+ Add-Type -AssemblyName System.IO.Compression.FileSystem
+
+ # Ensure input file exist
+ if (!(Test-Path $PackagePath)) {
+ Write-PipelineTaskError "Input file does not exist: $PackagePath"
+ return -2
+ }
+
+ # Extensions for which we'll look for symbols
+ $RelevantExtensions = @('.dll', '.exe', '.so', '.dylib')
+
+ # How many files are missing symbol information
+ $MissingSymbols = 0
+
+ $PackageId = [System.IO.Path]::GetFileNameWithoutExtension($PackagePath)
+ $PackageGuid = New-Guid
+ $ExtractPath = Join-Path -Path $using:ExtractPath -ChildPath $PackageGuid
+ $SymbolsPath = Join-Path -Path $ExtractPath -ChildPath 'Symbols'
+
+ try {
+ [System.IO.Compression.ZipFile]::ExtractToDirectory($PackagePath, $ExtractPath)
+ }
+ catch {
+ Write-Host "Something went wrong extracting $PackagePath"
+ Write-Host $_
+ return [pscustomobject]@{
+ result = -1
+ packagePath = $PackagePath
+ }
+ }
+
+ Get-ChildItem -Recurse $ExtractPath |
+ Where-Object {$RelevantExtensions -contains $_.Extension} |
+ ForEach-Object {
+ $FileName = $_.FullName
+ if ($FileName -Match '\\ref\\') {
+ Write-Host "`t Ignoring reference assembly file " $FileName
+ return
+ }
+
+ $FirstMatchingSymbolDescriptionOrDefault = {
+ param(
+ [string] $FullPath, # Full path to the module that has to be checked
+ [string] $TargetServerParam, # Parameter to pass to `Symbol Tool` indicating the server to lookup for symbols
+ [string] $SymbolsPath
+ )
+
+ $FileName = [System.IO.Path]::GetFileName($FullPath)
+ $Extension = [System.IO.Path]::GetExtension($FullPath)
+
+ # Those below are potential symbol files that the `dotnet symbol` might
+ # return. Which one will be returned depend on the type of file we are
+ # checking and which type of file was uploaded.
+
+ # The file itself is returned
+ $SymbolPath = $SymbolsPath + '\' + $FileName
+
+ # PDB file for the module
+ $PdbPath = $SymbolPath.Replace($Extension, '.pdb')
+
+ # PDB file for R2R module (created by crossgen)
+ $NGenPdb = $SymbolPath.Replace($Extension, '.ni.pdb')
+
+ # DBG file for a .so library
+ $SODbg = $SymbolPath.Replace($Extension, '.so.dbg')
+
+ # DWARF file for a .dylib
+ $DylibDwarf = $SymbolPath.Replace($Extension, '.dylib.dwarf')
+
+ $dotnetSymbolExe = "$env:USERPROFILE\.dotnet\tools"
+ $dotnetSymbolExe = Resolve-Path "$dotnetSymbolExe\dotnet-symbol.exe"
+
+ & $dotnetSymbolExe --symbols --modules --windows-pdbs $TargetServerParam $FullPath -o $SymbolsPath | Out-Null
+
+ if (Test-Path $PdbPath) {
+ return 'PDB'
+ }
+ elseif (Test-Path $NGenPdb) {
+ return 'NGen PDB'
+ }
+ elseif (Test-Path $SODbg) {
+ return 'DBG for SO'
+ }
+ elseif (Test-Path $DylibDwarf) {
+ return 'Dwarf for Dylib'
+ }
+ elseif (Test-Path $SymbolPath) {
+ return 'Module'
+ }
+ else {
+ return $null
+ }
+ }
+
+ $SymbolsOnMSDL = & $FirstMatchingSymbolDescriptionOrDefault $FileName '--microsoft-symbol-server' $SymbolsPath
+ $SymbolsOnSymWeb = & $FirstMatchingSymbolDescriptionOrDefault $FileName '--internal-server' $SymbolsPath
+
+ Write-Host -NoNewLine "`t Checking file " $FileName "... "
+
+ if ($SymbolsOnMSDL -ne $null -and $SymbolsOnSymWeb -ne $null) {
+ Write-Host "Symbols found on MSDL ($SymbolsOnMSDL) and SymWeb ($SymbolsOnSymWeb)"
+ }
+ else {
+ $MissingSymbols++
+
+ if ($SymbolsOnMSDL -eq $null -and $SymbolsOnSymWeb -eq $null) {
+ Write-Host 'No symbols found on MSDL or SymWeb!'
+ }
+ else {
+ if ($SymbolsOnMSDL -eq $null) {
+ Write-Host 'No symbols found on MSDL!'
+ }
+ else {
+ Write-Host 'No symbols found on SymWeb!'
+ }
+ }
+ }
+ }
+
+ if ($using:Clean) {
+ Remove-Item $ExtractPath -Recurse -Force
+ }
+
+ Pop-Location
+
+ return [pscustomobject]@{
+ result = $MissingSymbols
+ packagePath = $PackagePath
+ }
+}
+
+function CheckJobResult(
+ $result,
+ $packagePath,
+ [ref]$DupedSymbols,
+ [ref]$TotalFailures) {
+ if ($result -eq '-1') {
+ Write-PipelineTelemetryError -Category 'CheckSymbols' -Message "$packagePath has duplicated symbol files"
+ $DupedSymbols.Value++
+ }
+ elseif ($jobResult.result -ne '0') {
+ Write-PipelineTelemetryError -Category 'CheckSymbols' -Message "Missing symbols for $result modules in the package $packagePath"
+ $TotalFailures.Value++
+ }
+}
+
+function CheckSymbolsAvailable {
+ if (Test-Path $ExtractPath) {
+ Remove-Item $ExtractPath -Force -Recurse -ErrorAction SilentlyContinue
+ }
+
+ $TotalFailures = 0
+ $DupedSymbols = 0
+
+ Get-ChildItem "$InputPath\*.nupkg" |
+ ForEach-Object {
+ $FileName = $_.Name
+ $FullName = $_.FullName
+
+ # These packages from Arcade-Services include some native libraries that
+ # our current symbol uploader can't handle. Below is a workaround until
+ # we get issue: https://github.com/dotnet/arcade/issues/2457 sorted.
+ if ($FileName -Match 'Microsoft\.DotNet\.Darc\.') {
+ Write-Host "Ignoring Arcade-services file: $FileName"
+ Write-Host
+ return
+ }
+ elseif ($FileName -Match 'Microsoft\.DotNet\.Maestro\.Tasks\.') {
+ Write-Host "Ignoring Arcade-services file: $FileName"
+ Write-Host
+ return
+ }
+
+ Write-Host "Validating $FileName "
+
+ Start-Job -ScriptBlock $CountMissingSymbols -ArgumentList $FullName | Out-Null
+
+ $NumJobs = @(Get-Job -State 'Running').Count
+
+ while ($NumJobs -ge $MaxParallelJobs) {
+ Write-Host "There are $NumJobs validation jobs running right now. Waiting $SecondsBetweenLoadChecks seconds to check again."
+ sleep $SecondsBetweenLoadChecks
+ $NumJobs = @(Get-Job -State 'Running').Count
+ }
+
+ foreach ($Job in @(Get-Job -State 'Completed')) {
+ $jobResult = Wait-Job -Id $Job.Id | Receive-Job
+ CheckJobResult $jobResult.result $jobResult.packagePath ([ref]$DupedSymbols) ([ref]$TotalFailures)
+ Remove-Job -Id $Job.Id
+ }
+ Write-Host
+ }
+
+ foreach ($Job in @(Get-Job)) {
+ $jobResult = Wait-Job -Id $Job.Id | Receive-Job
+ CheckJobResult $jobResult.result $jobResult.packagePath ([ref]$DupedSymbols) ([ref]$TotalFailures)
+ }
+
+ if ($TotalFailures -gt 0 -or $DupedSymbols -gt 0) {
+ if ($TotalFailures -gt 0) {
+ Write-PipelineTelemetryError -Category 'CheckSymbols' -Message "Symbols missing for $TotalFailures packages"
+ }
+
+ if ($DupedSymbols -gt 0) {
+ Write-PipelineTelemetryError -Category 'CheckSymbols' -Message "$DupedSymbols packages had duplicated symbol files"
+ }
+
+ ExitWithExitCode 1
+ }
+ else {
+ Write-Host "All symbols validated!"
+ }
+}
+
+function InstallDotnetSymbol {
+ $dotnetSymbolPackageName = 'dotnet-symbol'
+
+ $dotnetRoot = InitializeDotNetCli -install:$true
+ $dotnet = "$dotnetRoot\dotnet.exe"
+ $toolList = & "$dotnet" tool list --global
+
+ if (($toolList -like "*$dotnetSymbolPackageName*") -and ($toolList -like "*$dotnetSymbolVersion*")) {
+ Write-Host "dotnet-symbol version $dotnetSymbolVersion is already installed."
+ }
+ else {
+ Write-Host "Installing dotnet-symbol version $dotnetSymbolVersion..."
+ Write-Host 'You may need to restart your command window if this is the first dotnet tool you have installed.'
+ & "$dotnet" tool install $dotnetSymbolPackageName --version $dotnetSymbolVersion --verbosity "minimal" --global
+ }
+}
+
+try {
+ . $PSScriptRoot\post-build-utils.ps1
+
+ InstallDotnetSymbol
+
+ foreach ($Job in @(Get-Job)) {
+ Remove-Job -Id $Job.Id
+ }
+
+ CheckSymbolsAvailable
+}
+catch {
+ Write-Host $_.ScriptStackTrace
+ Write-PipelineTelemetryError -Category 'CheckSymbols' -Message $_
+ ExitWithExitCode 1
+}
diff --git a/eng/common/post-build/trigger-subscriptions.ps1 b/eng/common/post-build/trigger-subscriptions.ps1
new file mode 100644
index 0000000000..55dea518ac
--- /dev/null
+++ b/eng/common/post-build/trigger-subscriptions.ps1
@@ -0,0 +1,64 @@
+param(
+ [Parameter(Mandatory=$true)][string] $SourceRepo,
+ [Parameter(Mandatory=$true)][int] $ChannelId,
+ [Parameter(Mandatory=$true)][string] $MaestroApiAccessToken,
+ [Parameter(Mandatory=$false)][string] $MaestroApiEndPoint = 'https://maestro-prod.westus2.cloudapp.azure.com',
+ [Parameter(Mandatory=$false)][string] $MaestroApiVersion = '2019-01-16'
+)
+
+try {
+ . $PSScriptRoot\post-build-utils.ps1
+
+ # Get all the $SourceRepo subscriptions
+ $normalizedSourceRepo = $SourceRepo.Replace('dnceng@', '')
+ $subscriptions = Get-MaestroSubscriptions -SourceRepository $normalizedSourceRepo -ChannelId $ChannelId
+
+ if (!$subscriptions) {
+ Write-PipelineTelemetryError -Category 'TriggerSubscriptions' -Message "No subscriptions found for source repo '$normalizedSourceRepo' in channel '$ChannelId'"
+ ExitWithExitCode 0
+ }
+
+ $subscriptionsToTrigger = New-Object System.Collections.Generic.List[string]
+ $failedTriggeredSubscription = $false
+
+ # Get all enabled subscriptions that need dependency flow on 'everyBuild'
+ foreach ($subscription in $subscriptions) {
+ if ($subscription.enabled -and $subscription.policy.updateFrequency -like 'everyBuild' -and $subscription.channel.id -eq $ChannelId) {
+ Write-Host "Should trigger this subscription: ${$subscription.id}"
+ [void]$subscriptionsToTrigger.Add($subscription.id)
+ }
+ }
+
+ foreach ($subscriptionToTrigger in $subscriptionsToTrigger) {
+ try {
+ Write-Host "Triggering subscription '$subscriptionToTrigger'."
+
+ Trigger-Subscription -SubscriptionId $subscriptionToTrigger
+
+ Write-Host 'done.'
+ }
+ catch
+ {
+ Write-Host "There was an error while triggering subscription '$subscriptionToTrigger'"
+ Write-Host $_
+ Write-Host $_.ScriptStackTrace
+ $failedTriggeredSubscription = $true
+ }
+ }
+
+ if ($subscriptionsToTrigger.Count -eq 0) {
+ Write-Host "No subscription matched source repo '$normalizedSourceRepo' and channel ID '$ChannelId'."
+ }
+ elseif ($failedTriggeredSubscription) {
+ Write-PipelineTelemetryError -Category 'TriggerSubscriptions' -Message 'At least one subscription failed to be triggered...'
+ ExitWithExitCode 1
+ }
+ else {
+ Write-Host 'All subscriptions were triggered successfully!'
+ }
+}
+catch {
+ Write-Host $_.ScriptStackTrace
+ Write-PipelineTelemetryError -Category 'TriggerSubscriptions' -Message $_
+ ExitWithExitCode 1
+}
diff --git a/eng/common/sdk-task.ps1 b/eng/common/sdk-task.ps1
new file mode 100644
index 0000000000..e159c6f184
--- /dev/null
+++ b/eng/common/sdk-task.ps1
@@ -0,0 +1,97 @@
+[CmdletBinding(PositionalBinding=$false)]
+Param(
+ [string] $configuration = 'Debug',
+ [string] $task,
+ [string] $verbosity = 'minimal',
+ [string] $msbuildEngine = $null,
+ [switch] $restore,
+ [switch] $prepareMachine,
+ [switch] $help,
+ [Parameter(ValueFromRemainingArguments=$true)][String[]]$properties
+)
+
+$ci = $true
+$binaryLog = $true
+$warnAsError = $true
+
+. $PSScriptRoot\tools.ps1
+
+function Print-Usage() {
+ Write-Host "Common settings:"
+ Write-Host " -task Name of Arcade task (name of a project in SdkTasks directory of the Arcade SDK package)"
+ Write-Host " -restore Restore dependencies"
+ Write-Host " -verbosity Msbuild verbosity: q[uiet], m[inimal], n[ormal], d[etailed], and diag[nostic]"
+ Write-Host " -help Print help and exit"
+ Write-Host ""
+
+ Write-Host "Advanced settings:"
+ Write-Host " -prepareMachine Prepare machine for CI run"
+ Write-Host " -msbuildEngine Msbuild engine to use to run build ('dotnet', 'vs', or unspecified)."
+ Write-Host ""
+ Write-Host "Command line arguments not listed above are passed thru to msbuild."
+}
+
+function Build([string]$target) {
+ $logSuffix = if ($target -eq 'Execute') { '' } else { ".$target" }
+ $log = Join-Path $LogDir "$task$logSuffix.binlog"
+ $outputPath = Join-Path $ToolsetDir "$task\\"
+
+ MSBuild $taskProject `
+ /bl:$log `
+ /t:$target `
+ /p:Configuration=$configuration `
+ /p:RepoRoot=$RepoRoot `
+ /p:BaseIntermediateOutputPath=$outputPath `
+ /v:$verbosity `
+ @properties
+}
+
+try {
+ if ($help -or (($null -ne $properties) -and ($properties.Contains('/help') -or $properties.Contains('/?')))) {
+ Print-Usage
+ exit 0
+ }
+
+ if ($task -eq "") {
+ Write-PipelineTelemetryError -Category 'Build' -Message "Missing required parameter '-task '" -ForegroundColor Red
+ Print-Usage
+ ExitWithExitCode 1
+ }
+
+ if( $msbuildEngine -eq "vs") {
+ # Ensure desktop MSBuild is available for sdk tasks.
+ if( -not ($GlobalJson.tools.PSObject.Properties.Name -contains "vs" )) {
+ $GlobalJson.tools | Add-Member -Name "vs" -Value (ConvertFrom-Json "{ `"version`": `"16.5`" }") -MemberType NoteProperty
+ }
+ if( -not ($GlobalJson.tools.PSObject.Properties.Name -match "xcopy-msbuild" )) {
+ $GlobalJson.tools | Add-Member -Name "xcopy-msbuild" -Value "16.5.0-alpha" -MemberType NoteProperty
+ }
+ if ($GlobalJson.tools."xcopy-msbuild".Trim() -ine "none") {
+ $xcopyMSBuildToolsFolder = InitializeXCopyMSBuild $GlobalJson.tools."xcopy-msbuild" -install $true
+ }
+ if ($xcopyMSBuildToolsFolder -eq $null) {
+ throw 'Unable to get xcopy downloadable version of msbuild'
+ }
+
+ $global:_MSBuildExe = "$($xcopyMSBuildToolsFolder)\MSBuild\Current\Bin\MSBuild.exe"
+ }
+
+ $taskProject = GetSdkTaskProject $task
+ if (!(Test-Path $taskProject)) {
+ Write-PipelineTelemetryError -Category 'Build' -Message "Unknown task: $task" -ForegroundColor Red
+ ExitWithExitCode 1
+ }
+
+ if ($restore) {
+ Build 'Restore'
+ }
+
+ Build 'Execute'
+}
+catch {
+ Write-Host $_.ScriptStackTrace
+ Write-PipelineTelemetryError -Category 'Build' -Message $_
+ ExitWithExitCode 1
+}
+
+ExitWithExitCode 0
diff --git a/eng/common/sdl/NuGet.config b/eng/common/sdl/NuGet.config
new file mode 100644
index 0000000000..0c5451c114
--- /dev/null
+++ b/eng/common/sdl/NuGet.config
@@ -0,0 +1,13 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/eng/common/sdl/execute-all-sdl-tools.ps1 b/eng/common/sdl/execute-all-sdl-tools.ps1
new file mode 100644
index 0000000000..b681d797cd
--- /dev/null
+++ b/eng/common/sdl/execute-all-sdl-tools.ps1
@@ -0,0 +1,120 @@
+Param(
+ [string] $GuardianPackageName, # Required: the name of guardian CLI package (not needed if GuardianCliLocation is specified)
+ [string] $NugetPackageDirectory, # Required: directory where NuGet packages are installed (not needed if GuardianCliLocation is specified)
+ [string] $GuardianCliLocation, # Optional: Direct location of Guardian CLI executable if GuardianPackageName & NugetPackageDirectory are not specified
+ [string] $Repository=$env:BUILD_REPOSITORY_NAME, # Required: the name of the repository (e.g. dotnet/arcade)
+ [string] $BranchName=$env:BUILD_SOURCEBRANCH, # Optional: name of branch or version of gdn settings; defaults to master
+ [string] $SourceDirectory=$env:BUILD_SOURCESDIRECTORY, # Required: the directory where source files are located
+ [string] $ArtifactsDirectory = (Join-Path $env:BUILD_ARTIFACTSTAGINGDIRECTORY ('artifacts')), # Required: the directory where build artifacts are located
+ [string] $AzureDevOpsAccessToken, # Required: access token for dnceng; should be provided via KeyVault
+ [string[]] $SourceToolsList, # Optional: list of SDL tools to run on source code
+ [string[]] $ArtifactToolsList, # Optional: list of SDL tools to run on built artifacts
+ [bool] $TsaPublish=$False, # Optional: true will publish results to TSA; only set to true after onboarding to TSA; TSA is the automated framework used to upload test results as bugs.
+ [string] $TsaBranchName=$env:BUILD_SOURCEBRANCH, # Optional: required for TSA publish; defaults to $(Build.SourceBranchName); TSA is the automated framework used to upload test results as bugs.
+ [string] $TsaRepositoryName=$env:BUILD_REPOSITORY_NAME, # Optional: TSA repository name; will be generated automatically if not submitted; TSA is the automated framework used to upload test results as bugs.
+ [string] $BuildNumber=$env:BUILD_BUILDNUMBER, # Optional: required for TSA publish; defaults to $(Build.BuildNumber)
+ [bool] $UpdateBaseline=$False, # Optional: if true, will update the baseline in the repository; should only be run after fixing any issues which need to be fixed
+ [bool] $TsaOnboard=$False, # Optional: if true, will onboard the repository to TSA; should only be run once; TSA is the automated framework used to upload test results as bugs.
+ [string] $TsaInstanceUrl, # Optional: only needed if TsaOnboard or TsaPublish is true; the instance-url registered with TSA; TSA is the automated framework used to upload test results as bugs.
+ [string] $TsaCodebaseName, # Optional: only needed if TsaOnboard or TsaPublish is true; the name of the codebase registered with TSA; TSA is the automated framework used to upload test results as bugs.
+ [string] $TsaProjectName, # Optional: only needed if TsaOnboard or TsaPublish is true; the name of the project registered with TSA; TSA is the automated framework used to upload test results as bugs.
+ [string] $TsaNotificationEmail, # Optional: only needed if TsaOnboard is true; the email(s) which will receive notifications of TSA bug filings (e.g. alias@microsoft.com); TSA is the automated framework used to upload test results as bugs.
+ [string] $TsaCodebaseAdmin, # Optional: only needed if TsaOnboard is true; the aliases which are admins of the TSA codebase (e.g. DOMAIN\alias); TSA is the automated framework used to upload test results as bugs.
+ [string] $TsaBugAreaPath, # Optional: only needed if TsaOnboard is true; the area path where TSA will file bugs in AzDO; TSA is the automated framework used to upload test results as bugs.
+ [string] $TsaIterationPath, # Optional: only needed if TsaOnboard is true; the iteration path where TSA will file bugs in AzDO; TSA is the automated framework used to upload test results as bugs.
+ [string] $GuardianLoggerLevel='Standard', # Optional: the logger level for the Guardian CLI; options are Trace, Verbose, Standard, Warning, and Error
+ [string[]] $CrScanAdditionalRunConfigParams, # Optional: Additional Params to custom build a CredScan run config in the format @("xyz:abc","sdf:1")
+ [string[]] $PoliCheckAdditionalRunConfigParams, # Optional: Additional Params to custom build a Policheck run config in the format @("xyz:abc","sdf:1")
+ [bool] $BreakOnFailure=$False # Optional: Fail the build if there were errors during the run
+)
+
+try {
+ $ErrorActionPreference = 'Stop'
+ Set-StrictMode -Version 2.0
+ $disableConfigureToolsetImport = $true
+ $LASTEXITCODE = 0
+
+ # `tools.ps1` checks $ci to perform some actions. Since the SDL
+ # scripts don't necessarily execute in the same agent that run the
+ # build.ps1/sh script this variable isn't automatically set.
+ $ci = $true
+ . $PSScriptRoot\..\tools.ps1
+
+ #Replace repo names to the format of org/repo
+ if (!($Repository.contains('/'))) {
+ $RepoName = $Repository -replace '(.*?)-(.*)', '$1/$2';
+ }
+ else{
+ $RepoName = $Repository;
+ }
+
+ if ($GuardianPackageName) {
+ $guardianCliLocation = Join-Path $NugetPackageDirectory (Join-Path $GuardianPackageName (Join-Path 'tools' 'guardian.cmd'))
+ } else {
+ $guardianCliLocation = $GuardianCliLocation
+ }
+
+ $workingDirectory = (Split-Path $SourceDirectory -Parent)
+ $ValidPath = Test-Path $guardianCliLocation
+
+ if ($ValidPath -eq $False)
+ {
+ Write-PipelineTelemetryError -Force -Category 'Sdl' -Message 'Invalid Guardian CLI Location.'
+ ExitWithExitCode 1
+ }
+
+ & $(Join-Path $PSScriptRoot 'init-sdl.ps1') -GuardianCliLocation $guardianCliLocation -Repository $RepoName -BranchName $BranchName -WorkingDirectory $workingDirectory -AzureDevOpsAccessToken $AzureDevOpsAccessToken -GuardianLoggerLevel $GuardianLoggerLevel
+ $gdnFolder = Join-Path $workingDirectory '.gdn'
+
+ if ($TsaOnboard) {
+ if ($TsaCodebaseName -and $TsaNotificationEmail -and $TsaCodebaseAdmin -and $TsaBugAreaPath) {
+ Write-Host "$guardianCliLocation tsa-onboard --codebase-name `"$TsaCodebaseName`" --notification-alias `"$TsaNotificationEmail`" --codebase-admin `"$TsaCodebaseAdmin`" --instance-url `"$TsaInstanceUrl`" --project-name `"$TsaProjectName`" --area-path `"$TsaBugAreaPath`" --iteration-path `"$TsaIterationPath`" --working-directory $workingDirectory --logger-level $GuardianLoggerLevel"
+ & $guardianCliLocation tsa-onboard --codebase-name "$TsaCodebaseName" --notification-alias "$TsaNotificationEmail" --codebase-admin "$TsaCodebaseAdmin" --instance-url "$TsaInstanceUrl" --project-name "$TsaProjectName" --area-path "$TsaBugAreaPath" --iteration-path "$TsaIterationPath" --working-directory $workingDirectory --logger-level $GuardianLoggerLevel
+ if ($LASTEXITCODE -ne 0) {
+ Write-PipelineTelemetryError -Force -Category 'Sdl' -Message "Guardian tsa-onboard failed with exit code $LASTEXITCODE."
+ ExitWithExitCode $LASTEXITCODE
+ }
+ } else {
+ Write-PipelineTelemetryError -Force -Category 'Sdl' -Message 'Could not onboard to TSA -- not all required values ($TsaCodebaseName, $TsaNotificationEmail, $TsaCodebaseAdmin, $TsaBugAreaPath) were specified.'
+ ExitWithExitCode 1
+ }
+ }
+
+ if ($ArtifactToolsList -and $ArtifactToolsList.Count -gt 0) {
+ & $(Join-Path $PSScriptRoot 'run-sdl.ps1') -GuardianCliLocation $guardianCliLocation -WorkingDirectory $workingDirectory -TargetDirectory $ArtifactsDirectory -GdnFolder $gdnFolder -ToolsList $ArtifactToolsList -AzureDevOpsAccessToken $AzureDevOpsAccessToken -UpdateBaseline $UpdateBaseline -GuardianLoggerLevel $GuardianLoggerLevel -CrScanAdditionalRunConfigParams $CrScanAdditionalRunConfigParams -PoliCheckAdditionalRunConfigParams $PoliCheckAdditionalRunConfigParams
+ }
+ if ($SourceToolsList -and $SourceToolsList.Count -gt 0) {
+ & $(Join-Path $PSScriptRoot 'run-sdl.ps1') -GuardianCliLocation $guardianCliLocation -WorkingDirectory $workingDirectory -TargetDirectory $SourceDirectory -GdnFolder $gdnFolder -ToolsList $SourceToolsList -AzureDevOpsAccessToken $AzureDevOpsAccessToken -UpdateBaseline $UpdateBaseline -GuardianLoggerLevel $GuardianLoggerLevel -CrScanAdditionalRunConfigParams $CrScanAdditionalRunConfigParams -PoliCheckAdditionalRunConfigParams $PoliCheckAdditionalRunConfigParams
+ }
+
+ if ($UpdateBaseline) {
+ & (Join-Path $PSScriptRoot 'push-gdn.ps1') -Repository $RepoName -BranchName $BranchName -GdnFolder $GdnFolder -AzureDevOpsAccessToken $AzureDevOpsAccessToken -PushReason 'Update baseline'
+ }
+
+ if ($TsaPublish) {
+ if ($TsaBranchName -and $BuildNumber) {
+ if (-not $TsaRepositoryName) {
+ $TsaRepositoryName = "$($Repository)-$($BranchName)"
+ }
+ Write-Host "$guardianCliLocation tsa-publish --all-tools --repository-name `"$TsaRepositoryName`" --branch-name `"$TsaBranchName`" --build-number `"$BuildNumber`" --codebase-name `"$TsaCodebaseName`" --notification-alias `"$TsaNotificationEmail`" --codebase-admin `"$TsaCodebaseAdmin`" --instance-url `"$TsaInstanceUrl`" --project-name `"$TsaProjectName`" --area-path `"$TsaBugAreaPath`" --iteration-path `"$TsaIterationPath`" --working-directory $workingDirectory --logger-level $GuardianLoggerLevel"
+ & $guardianCliLocation tsa-publish --all-tools --repository-name "$TsaRepositoryName" --branch-name "$TsaBranchName" --build-number "$BuildNumber" --onboard $True --codebase-name "$TsaCodebaseName" --notification-alias "$TsaNotificationEmail" --codebase-admin "$TsaCodebaseAdmin" --instance-url "$TsaInstanceUrl" --project-name "$TsaProjectName" --area-path "$TsaBugAreaPath" --iteration-path "$TsaIterationPath" --working-directory $workingDirectory --logger-level $GuardianLoggerLevel
+ if ($LASTEXITCODE -ne 0) {
+ Write-PipelineTelemetryError -Force -Category 'Sdl' -Message "Guardian tsa-publish failed with exit code $LASTEXITCODE."
+ ExitWithExitCode $LASTEXITCODE
+ }
+ } else {
+ Write-PipelineTelemetryError -Force -Category 'Sdl' -Message 'Could not publish to TSA -- not all required values ($TsaBranchName, $BuildNumber) were specified.'
+ ExitWithExitCode 1
+ }
+ }
+
+ if ($BreakOnFailure) {
+ Write-Host "Failing the build in case of breaking results..."
+ & $guardianCliLocation break
+ }
+}
+catch {
+ Write-Host $_.ScriptStackTrace
+ Write-PipelineTelemetryError -Force -Category 'Sdl' -Message $_
+ exit 1
+}
diff --git a/eng/common/sdl/extract-artifact-packages.ps1 b/eng/common/sdl/extract-artifact-packages.ps1
new file mode 100644
index 0000000000..7f28d9c59e
--- /dev/null
+++ b/eng/common/sdl/extract-artifact-packages.ps1
@@ -0,0 +1,80 @@
+param(
+ [Parameter(Mandatory=$true)][string] $InputPath, # Full path to directory where artifact packages are stored
+ [Parameter(Mandatory=$true)][string] $ExtractPath # Full path to directory where the packages will be extracted
+)
+
+$ErrorActionPreference = 'Stop'
+Set-StrictMode -Version 2.0
+
+$disableConfigureToolsetImport = $true
+
+function ExtractArtifacts {
+ if (!(Test-Path $InputPath)) {
+ Write-Host "Input Path does not exist: $InputPath"
+ ExitWithExitCode 0
+ }
+ $Jobs = @()
+ Get-ChildItem "$InputPath\*.nupkg" |
+ ForEach-Object {
+ $Jobs += Start-Job -ScriptBlock $ExtractPackage -ArgumentList $_.FullName
+ }
+
+ foreach ($Job in $Jobs) {
+ Wait-Job -Id $Job.Id | Receive-Job
+ }
+}
+
+try {
+ # `tools.ps1` checks $ci to perform some actions. Since the SDL
+ # scripts don't necessarily execute in the same agent that run the
+ # build.ps1/sh script this variable isn't automatically set.
+ $ci = $true
+ . $PSScriptRoot\..\tools.ps1
+
+ $ExtractPackage = {
+ param(
+ [string] $PackagePath # Full path to a NuGet package
+ )
+
+ if (!(Test-Path $PackagePath)) {
+ Write-PipelineTelemetryError -Category 'Build' -Message "Input file does not exist: $PackagePath"
+ ExitWithExitCode 1
+ }
+
+ $RelevantExtensions = @('.dll', '.exe', '.pdb')
+ Write-Host -NoNewLine 'Extracting ' ([System.IO.Path]::GetFileName($PackagePath)) '...'
+
+ $PackageId = [System.IO.Path]::GetFileNameWithoutExtension($PackagePath)
+ $ExtractPath = Join-Path -Path $using:ExtractPath -ChildPath $PackageId
+
+ Add-Type -AssemblyName System.IO.Compression.FileSystem
+
+ [System.IO.Directory]::CreateDirectory($ExtractPath);
+
+ try {
+ $zip = [System.IO.Compression.ZipFile]::OpenRead($PackagePath)
+
+ $zip.Entries |
+ Where-Object {$RelevantExtensions -contains [System.IO.Path]::GetExtension($_.Name)} |
+ ForEach-Object {
+ $TargetFile = Join-Path -Path $ExtractPath -ChildPath $_.Name
+
+ [System.IO.Compression.ZipFileExtensions]::ExtractToFile($_, $TargetFile, $true)
+ }
+ }
+ catch {
+ Write-Host $_
+ Write-PipelineTelemetryError -Force -Category 'Sdl' -Message $_
+ ExitWithExitCode 1
+ }
+ finally {
+ $zip.Dispose()
+ }
+ }
+ Measure-Command { ExtractArtifacts }
+}
+catch {
+ Write-Host $_
+ Write-PipelineTelemetryError -Force -Category 'Sdl' -Message $_
+ ExitWithExitCode 1
+}
diff --git a/eng/common/sdl/init-sdl.ps1 b/eng/common/sdl/init-sdl.ps1
new file mode 100644
index 0000000000..a68bf0b88e
--- /dev/null
+++ b/eng/common/sdl/init-sdl.ps1
@@ -0,0 +1,67 @@
+Param(
+ [string] $GuardianCliLocation,
+ [string] $Repository,
+ [string] $BranchName='master',
+ [string] $WorkingDirectory,
+ [string] $AzureDevOpsAccessToken,
+ [string] $GuardianLoggerLevel='Standard'
+)
+
+$ErrorActionPreference = 'Stop'
+Set-StrictMode -Version 2.0
+$disableConfigureToolsetImport = $true
+$LASTEXITCODE = 0
+
+# `tools.ps1` checks $ci to perform some actions. Since the SDL
+# scripts don't necessarily execute in the same agent that run the
+# build.ps1/sh script this variable isn't automatically set.
+$ci = $true
+. $PSScriptRoot\..\tools.ps1
+
+# Don't display the console progress UI - it's a huge perf hit
+$ProgressPreference = 'SilentlyContinue'
+
+# Construct basic auth from AzDO access token; construct URI to the repository's gdn folder stored in that repository; construct location of zip file
+$encodedPat = [Convert]::ToBase64String([System.Text.Encoding]::ASCII.GetBytes(":$AzureDevOpsAccessToken"))
+$escapedRepository = [Uri]::EscapeDataString("/$Repository/$BranchName/.gdn")
+$uri = "https://dev.azure.com/dnceng/internal/_apis/git/repositories/sdl-tool-cfg/Items?path=$escapedRepository&versionDescriptor[versionOptions]=0&`$format=zip&api-version=5.0"
+$zipFile = "$WorkingDirectory/gdn.zip"
+
+Add-Type -AssemblyName System.IO.Compression.FileSystem
+$gdnFolder = (Join-Path $WorkingDirectory '.gdn')
+try {
+ # We try to download the zip; if the request fails (e.g. the file doesn't exist), we catch it and init guardian instead
+ Write-Host 'Downloading gdn folder from internal config repostiory...'
+ Invoke-WebRequest -Headers @{ "Accept"="application/zip"; "Authorization"="Basic $encodedPat" } -Uri $uri -OutFile $zipFile
+ if (Test-Path $gdnFolder) {
+ # Remove the gdn folder if it exists (it shouldn't unless there's too much caching; this is just in case)
+ Remove-Item -Force -Recurse $gdnFolder
+ }
+ [System.IO.Compression.ZipFile]::ExtractToDirectory($zipFile, $WorkingDirectory)
+ Write-Host $gdnFolder
+ ExitWithExitCode 0
+} catch [System.Net.WebException] { } # Catch and ignore webexception
+try {
+ # if the folder does not exist, we'll do a guardian init and push it to the remote repository
+ Write-Host 'Initializing Guardian...'
+ Write-Host "$GuardianCliLocation init --working-directory $WorkingDirectory --logger-level $GuardianLoggerLevel"
+ & $GuardianCliLocation init --working-directory $WorkingDirectory --logger-level $GuardianLoggerLevel
+ if ($LASTEXITCODE -ne 0) {
+ Write-PipelineTelemetryError -Force -Category 'Build' -Message "Guardian init failed with exit code $LASTEXITCODE."
+ ExitWithExitCode $LASTEXITCODE
+ }
+ # We create the mainbaseline so it can be edited later
+ Write-Host "$GuardianCliLocation baseline --working-directory $WorkingDirectory --name mainbaseline"
+ & $GuardianCliLocation baseline --working-directory $WorkingDirectory --name mainbaseline
+ if ($LASTEXITCODE -ne 0) {
+ Write-PipelineTelemetryError -Force -Category 'Build' -Message "Guardian baseline failed with exit code $LASTEXITCODE."
+ ExitWithExitCode $LASTEXITCODE
+ }
+ & $(Join-Path $PSScriptRoot 'push-gdn.ps1') -Repository $Repository -BranchName $BranchName -GdnFolder $gdnFolder -AzureDevOpsAccessToken $AzureDevOpsAccessToken -PushReason 'Initialize gdn folder'
+ ExitWithExitCode 0
+}
+catch {
+ Write-Host $_.ScriptStackTrace
+ Write-PipelineTelemetryError -Force -Category 'Sdl' -Message $_
+ ExitWithExitCode 1
+}
diff --git a/eng/common/sdl/packages.config b/eng/common/sdl/packages.config
new file mode 100644
index 0000000000..968b39bef5
--- /dev/null
+++ b/eng/common/sdl/packages.config
@@ -0,0 +1,4 @@
+
+
+
+
diff --git a/eng/common/sdl/push-gdn.ps1 b/eng/common/sdl/push-gdn.ps1
new file mode 100644
index 0000000000..d8fd2d82a6
--- /dev/null
+++ b/eng/common/sdl/push-gdn.ps1
@@ -0,0 +1,69 @@
+Param(
+ [string] $Repository,
+ [string] $BranchName='master',
+ [string] $GdnFolder,
+ [string] $AzureDevOpsAccessToken,
+ [string] $PushReason
+)
+
+$ErrorActionPreference = 'Stop'
+Set-StrictMode -Version 2.0
+$disableConfigureToolsetImport = $true
+$LASTEXITCODE = 0
+
+try {
+ # `tools.ps1` checks $ci to perform some actions. Since the SDL
+ # scripts don't necessarily execute in the same agent that run the
+ # build.ps1/sh script this variable isn't automatically set.
+ $ci = $true
+ . $PSScriptRoot\..\tools.ps1
+
+ # We create the temp directory where we'll store the sdl-config repository
+ $sdlDir = Join-Path $env:TEMP 'sdl'
+ if (Test-Path $sdlDir) {
+ Remove-Item -Force -Recurse $sdlDir
+ }
+
+ Write-Host "git clone https://dnceng:`$AzureDevOpsAccessToken@dev.azure.com/dnceng/internal/_git/sdl-tool-cfg $sdlDir"
+ git clone https://dnceng:$AzureDevOpsAccessToken@dev.azure.com/dnceng/internal/_git/sdl-tool-cfg $sdlDir
+ if ($LASTEXITCODE -ne 0) {
+ Write-PipelineTelemetryError -Force -Category 'Sdl' -Message "Git clone failed with exit code $LASTEXITCODE."
+ ExitWithExitCode $LASTEXITCODE
+ }
+ # We copy the .gdn folder from our local run into the git repository so it can be committed
+ $sdlRepositoryFolder = Join-Path (Join-Path (Join-Path $sdlDir $Repository) $BranchName) '.gdn'
+ if (Get-Command Robocopy) {
+ Robocopy /S $GdnFolder $sdlRepositoryFolder
+ } else {
+ rsync -r $GdnFolder $sdlRepositoryFolder
+ }
+ # cd to the sdl-config directory so we can run git there
+ Push-Location $sdlDir
+ # git add . --> git commit --> git push
+ Write-Host 'git add .'
+ git add .
+ if ($LASTEXITCODE -ne 0) {
+ Write-PipelineTelemetryError -Force -Category 'Sdl' -Message "Git add failed with exit code $LASTEXITCODE."
+ ExitWithExitCode $LASTEXITCODE
+ }
+ Write-Host "git -c user.email=`"dn-bot@microsoft.com`" -c user.name=`"Dotnet Bot`" commit -m `"$PushReason for $Repository/$BranchName`""
+ git -c user.email="dn-bot@microsoft.com" -c user.name="Dotnet Bot" commit -m "$PushReason for $Repository/$BranchName"
+ if ($LASTEXITCODE -ne 0) {
+ Write-PipelineTelemetryError -Force -Category 'Sdl' -Message "Git commit failed with exit code $LASTEXITCODE."
+ ExitWithExitCode $LASTEXITCODE
+ }
+ Write-Host 'git push'
+ git push
+ if ($LASTEXITCODE -ne 0) {
+ Write-PipelineTelemetryError -Force -Category 'Sdl' -Message "Git push failed with exit code $LASTEXITCODE."
+ ExitWithExitCode $LASTEXITCODE
+ }
+
+ # Return to the original directory
+ Pop-Location
+}
+catch {
+ Write-Host $_.ScriptStackTrace
+ Write-PipelineTelemetryError -Category 'Sdl' -Message $_
+ ExitWithExitCode 1
+}
diff --git a/eng/common/sdl/run-sdl.ps1 b/eng/common/sdl/run-sdl.ps1
new file mode 100644
index 0000000000..fe95ab35aa
--- /dev/null
+++ b/eng/common/sdl/run-sdl.ps1
@@ -0,0 +1,73 @@
+Param(
+ [string] $GuardianCliLocation,
+ [string] $WorkingDirectory,
+ [string] $TargetDirectory,
+ [string] $GdnFolder,
+ [string[]] $ToolsList,
+ [string] $UpdateBaseline,
+ [string] $GuardianLoggerLevel='Standard',
+ [string[]] $CrScanAdditionalRunConfigParams,
+ [string[]] $PoliCheckAdditionalRunConfigParams
+)
+
+$ErrorActionPreference = 'Stop'
+Set-StrictMode -Version 2.0
+$disableConfigureToolsetImport = $true
+$LASTEXITCODE = 0
+
+try {
+ # `tools.ps1` checks $ci to perform some actions. Since the SDL
+ # scripts don't necessarily execute in the same agent that run the
+ # build.ps1/sh script this variable isn't automatically set.
+ $ci = $true
+ . $PSScriptRoot\..\tools.ps1
+
+ # We store config files in the r directory of .gdn
+ Write-Host $ToolsList
+ $gdnConfigPath = Join-Path $GdnFolder 'r'
+ $ValidPath = Test-Path $GuardianCliLocation
+
+ if ($ValidPath -eq $False)
+ {
+ Write-PipelineTelemetryError -Force -Category 'Sdl' -Message "Invalid Guardian CLI Location."
+ ExitWithExitCode 1
+ }
+
+ $configParam = @('--config')
+
+ foreach ($tool in $ToolsList) {
+ $gdnConfigFile = Join-Path $gdnConfigPath "$tool-configure.gdnconfig"
+ Write-Host $tool
+ # We have to manually configure tools that run on source to look at the source directory only
+ if ($tool -eq 'credscan') {
+ Write-Host "$GuardianCliLocation configure --working-directory $WorkingDirectory --tool $tool --output-path $gdnConfigFile --logger-level $GuardianLoggerLevel --noninteractive --force --args `" TargetDirectory < $TargetDirectory `" `" OutputType < pre `" $(If ($CrScanAdditionalRunConfigParams) {$CrScanAdditionalRunConfigParams})"
+ & $GuardianCliLocation configure --working-directory $WorkingDirectory --tool $tool --output-path $gdnConfigFile --logger-level $GuardianLoggerLevel --noninteractive --force --args " TargetDirectory < $TargetDirectory " "OutputType < pre" $(If ($CrScanAdditionalRunConfigParams) {$CrScanAdditionalRunConfigParams})
+ if ($LASTEXITCODE -ne 0) {
+ Write-PipelineTelemetryError -Force -Category 'Sdl' -Message "Guardian configure for $tool failed with exit code $LASTEXITCODE."
+ ExitWithExitCode $LASTEXITCODE
+ }
+ }
+ if ($tool -eq 'policheck') {
+ Write-Host "$GuardianCliLocation configure --working-directory $WorkingDirectory --tool $tool --output-path $gdnConfigFile --logger-level $GuardianLoggerLevel --noninteractive --force --args `" Target < $TargetDirectory `" $(If ($PoliCheckAdditionalRunConfigParams) {$PoliCheckAdditionalRunConfigParams})"
+ & $GuardianCliLocation configure --working-directory $WorkingDirectory --tool $tool --output-path $gdnConfigFile --logger-level $GuardianLoggerLevel --noninteractive --force --args " Target < $TargetDirectory " $(If ($PoliCheckAdditionalRunConfigParams) {$PoliCheckAdditionalRunConfigParams})
+ if ($LASTEXITCODE -ne 0) {
+ Write-PipelineTelemetryError -Force -Category 'Sdl' -Message "Guardian configure for $tool failed with exit code $LASTEXITCODE."
+ ExitWithExitCode $LASTEXITCODE
+ }
+ }
+
+ $configParam+=$gdnConfigFile
+ }
+
+ Write-Host "$GuardianCliLocation run --working-directory $WorkingDirectory --baseline mainbaseline --update-baseline $UpdateBaseline --logger-level $GuardianLoggerLevel $configParam"
+ & $GuardianCliLocation run --working-directory $WorkingDirectory --tool $tool --baseline mainbaseline --update-baseline $UpdateBaseline --logger-level $GuardianLoggerLevel $configParam
+ if ($LASTEXITCODE -ne 0) {
+ Write-PipelineTelemetryError -Force -Category 'Sdl' -Message "Guardian run for $ToolsList using $configParam failed with exit code $LASTEXITCODE."
+ ExitWithExitCode $LASTEXITCODE
+ }
+}
+catch {
+ Write-Host $_.ScriptStackTrace
+ Write-PipelineTelemetryError -Force -Category 'Sdl' -Message $_
+ ExitWithExitCode 1
+}
diff --git a/eng/common/templates/job/execute-sdl.yml b/eng/common/templates/job/execute-sdl.yml
new file mode 100644
index 0000000000..c64c4f5686
--- /dev/null
+++ b/eng/common/templates/job/execute-sdl.yml
@@ -0,0 +1,91 @@
+parameters:
+ enable: 'false' # Whether the SDL validation job should execute or not
+ overrideParameters: '' # Optional: to override values for parameters.
+ additionalParameters: '' # Optional: parameters that need user specific values eg: '-SourceToolsList @("abc","def") -ArtifactToolsList @("ghi","jkl")'
+ # There is some sort of bug (has been reported) in Azure DevOps where if this parameter is named
+ # 'continueOnError', the parameter value is not correctly picked up.
+ # This can also be remedied by the caller (post-build.yml) if it does not use a nested parameter
+ sdlContinueOnError: false # optional: determines whether to continue the build if the step errors;
+ downloadArtifacts: true # optional: determines if the artifacts should be dowloaded
+ dependsOn: '' # Optional: dependencies of the job
+ artifactNames: '' # Optional: patterns supplied to DownloadBuildArtifacts
+ # Usage:
+ # artifactNames:
+ # - 'BlobArtifacts'
+ # - 'Artifacts_Windows_NT_Release'
+
+jobs:
+- job: Run_SDL
+ dependsOn: ${{ parameters.dependsOn }}
+ displayName: Run SDL tool
+ condition: eq( ${{ parameters.enable }}, 'true')
+ variables:
+ - group: DotNet-VSTS-Bot
+ - name: AzDOProjectName
+ value: $[ dependencies.setupMaestroVars.outputs['setReleaseVars.AzDOProjectName'] ]
+ - name: AzDOPipelineId
+ value: $[ dependencies.setupMaestroVars.outputs['setReleaseVars.AzDOPipelineId'] ]
+ - name: AzDOBuildId
+ value: $[ dependencies.setupMaestroVars.outputs['setReleaseVars.AzDOBuildId'] ]
+ pool:
+ name: Hosted VS2017
+ steps:
+ - checkout: self
+ clean: true
+ - ${{ if ne(parameters.downloadArtifacts, 'false')}}:
+ - ${{ if ne(parameters.artifactNames, '') }}:
+ - ${{ each artifactName in parameters.artifactNames }}:
+ - task: DownloadBuildArtifacts@0
+ displayName: Download Build Artifacts
+ inputs:
+ buildType: specific
+ buildVersionToDownload: specific
+ project: $(AzDOProjectName)
+ pipeline: $(AzDOPipelineId)
+ buildId: $(AzDOBuildId)
+ artifactName: ${{ artifactName }}
+ downloadPath: $(Build.ArtifactStagingDirectory)\artifacts
+ - ${{ if eq(parameters.artifactNames, '') }}:
+ - task: DownloadBuildArtifacts@0
+ displayName: Download Build Artifacts
+ inputs:
+ buildType: specific
+ buildVersionToDownload: specific
+ project: $(AzDOProjectName)
+ pipeline: $(AzDOPipelineId)
+ buildId: $(AzDOBuildId)
+ downloadType: specific files
+ itemPattern: "**"
+ downloadPath: $(Build.ArtifactStagingDirectory)\artifacts
+ - powershell: eng/common/sdl/extract-artifact-packages.ps1
+ -InputPath $(Build.ArtifactStagingDirectory)\artifacts\BlobArtifacts
+ -ExtractPath $(Build.ArtifactStagingDirectory)\artifacts\BlobArtifacts
+ displayName: Extract Blob Artifacts
+ continueOnError: ${{ parameters.sdlContinueOnError }}
+ - powershell: eng/common/sdl/extract-artifact-packages.ps1
+ -InputPath $(Build.ArtifactStagingDirectory)\artifacts\PackageArtifacts
+ -ExtractPath $(Build.ArtifactStagingDirectory)\artifacts\PackageArtifacts
+ displayName: Extract Package Artifacts
+ continueOnError: ${{ parameters.sdlContinueOnError }}
+ - task: NuGetToolInstaller@1
+ displayName: 'Install NuGet.exe'
+ - task: NuGetCommand@2
+ displayName: 'Install Guardian'
+ inputs:
+ restoreSolution: $(Build.SourcesDirectory)\eng\common\sdl\packages.config
+ feedsToUse: config
+ nugetConfigPath: $(Build.SourcesDirectory)\eng\common\sdl\NuGet.config
+ externalFeedCredentials: GuardianConnect
+ restoreDirectory: $(Build.SourcesDirectory)\.packages
+ - ${{ if ne(parameters.overrideParameters, '') }}:
+ - powershell: eng/common/sdl/execute-all-sdl-tools.ps1 ${{ parameters.overrideParameters }}
+ displayName: Execute SDL
+ continueOnError: ${{ parameters.sdlContinueOnError }}
+ - ${{ if eq(parameters.overrideParameters, '') }}:
+ - powershell: eng/common/sdl/execute-all-sdl-tools.ps1
+ -GuardianPackageName Microsoft.Guardian.Cli.win10-x64.0.20.1
+ -NugetPackageDirectory $(Build.SourcesDirectory)\.packages
+ -AzureDevOpsAccessToken $(dn-bot-dotnet-build-rw-code-rw)
+ ${{ parameters.additionalParameters }}
+ displayName: Execute SDL
+ continueOnError: ${{ parameters.sdlContinueOnError }}
diff --git a/eng/common/templates/job/generate-graph-files.yml b/eng/common/templates/job/generate-graph-files.yml
new file mode 100644
index 0000000000..e54ce956f9
--- /dev/null
+++ b/eng/common/templates/job/generate-graph-files.yml
@@ -0,0 +1,48 @@
+parameters:
+ # Optional: dependencies of the job
+ dependsOn: ''
+
+ # Optional: A defined YAML pool - https://docs.microsoft.com/en-us/azure/devops/pipelines/yaml-schema?view=vsts&tabs=schema#pool
+ pool: {}
+
+ # Optional: Include toolset dependencies in the generated graph files
+ includeToolset: false
+
+jobs:
+- job: Generate_Graph_Files
+
+ dependsOn: ${{ parameters.dependsOn }}
+
+ displayName: Generate Graph Files
+
+ pool: ${{ parameters.pool }}
+
+ variables:
+ # Publish-Build-Assets provides: MaestroAccessToken, BotAccount-dotnet-maestro-bot-PAT
+ # DotNet-AllOrgs-Darc-Pats provides: dn-bot-devdiv-dnceng-rw-code-pat
+ - group: Publish-Build-Assets
+ - group: DotNet-AllOrgs-Darc-Pats
+ - name: _GraphArguments
+ value: -gitHubPat $(BotAccount-dotnet-maestro-bot-PAT)
+ -azdoPat $(dn-bot-devdiv-dnceng-rw-code-pat)
+ -barToken $(MaestroAccessToken)
+ -outputFolder '$(Build.StagingDirectory)/GraphFiles/'
+ - ${{ if ne(parameters.includeToolset, 'false') }}:
+ - name: _GraphArguments
+ value: ${{ variables._GraphArguments }} -includeToolset
+
+ steps:
+ - task: PowerShell@2
+ displayName: Generate Graph Files
+ inputs:
+ filePath: eng\common\generate-graph-files.ps1
+ arguments: $(_GraphArguments)
+ continueOnError: true
+ - task: PublishBuildArtifacts@1
+ displayName: Publish Graph to Artifacts
+ inputs:
+ PathtoPublish: '$(Build.StagingDirectory)/GraphFiles'
+ PublishLocation: Container
+ ArtifactName: GraphFiles
+ continueOnError: true
+ condition: always()
diff --git a/eng/common/templates/job/job.yml b/eng/common/templates/job/job.yml
new file mode 100644
index 0000000000..e78ed9a1c6
--- /dev/null
+++ b/eng/common/templates/job/job.yml
@@ -0,0 +1,242 @@
+# Internal resources (telemetry, microbuild) can only be accessed from non-public projects,
+# and some (Microbuild) should only be applied to non-PR cases for internal builds.
+
+parameters:
+# Job schema parameters - https://docs.microsoft.com/en-us/azure/devops/pipelines/yaml-schema?view=vsts&tabs=schema#job
+ cancelTimeoutInMinutes: ''
+ condition: ''
+ container: ''
+ continueOnError: false
+ dependsOn: ''
+ displayName: ''
+ pool: ''
+ steps: []
+ strategy: ''
+ timeoutInMinutes: ''
+ variables: []
+ workspace: ''
+
+# Job base template specific parameters
+ # See schema documentation - https://github.com/dotnet/arcade/blob/master/Documentation/AzureDevOps/TemplateSchema.md
+ artifacts: ''
+ enableMicrobuild: false
+ enablePublishBuildArtifacts: false
+ enablePublishBuildAssets: false
+ enablePublishTestResults: false
+ enablePublishUsingPipelines: false
+ useBuildManifest: false
+ mergeTestResults: false
+ testRunTitle: ''
+ name: ''
+ preSteps: []
+ runAsPublic: false
+
+jobs:
+- job: ${{ parameters.name }}
+
+ ${{ if ne(parameters.cancelTimeoutInMinutes, '') }}:
+ cancelTimeoutInMinutes: ${{ parameters.cancelTimeoutInMinutes }}
+
+ ${{ if ne(parameters.condition, '') }}:
+ condition: ${{ parameters.condition }}
+
+ ${{ if ne(parameters.container, '') }}:
+ container: ${{ parameters.container }}
+
+ ${{ if ne(parameters.continueOnError, '') }}:
+ continueOnError: ${{ parameters.continueOnError }}
+
+ ${{ if ne(parameters.dependsOn, '') }}:
+ dependsOn: ${{ parameters.dependsOn }}
+
+ ${{ if ne(parameters.displayName, '') }}:
+ displayName: ${{ parameters.displayName }}
+
+ ${{ if ne(parameters.pool, '') }}:
+ pool: ${{ parameters.pool }}
+
+ ${{ if ne(parameters.strategy, '') }}:
+ strategy: ${{ parameters.strategy }}
+
+ ${{ if ne(parameters.timeoutInMinutes, '') }}:
+ timeoutInMinutes: ${{ parameters.timeoutInMinutes }}
+
+ variables:
+ - ${{ if ne(parameters.enableTelemetry, 'false') }}:
+ - name: DOTNET_CLI_TELEMETRY_PROFILE
+ value: '$(Build.Repository.Uri)'
+ - ${{ if eq(parameters.enableRichCodeNavigation, 'true') }}:
+ - name: EnableRichCodeNavigation
+ value: 'true'
+ - ${{ each variable in parameters.variables }}:
+ # handle name-value variable syntax
+ # example:
+ # - name: [key]
+ # value: [value]
+ - ${{ if ne(variable.name, '') }}:
+ - name: ${{ variable.name }}
+ value: ${{ variable.value }}
+
+ # handle variable groups
+ - ${{ if ne(variable.group, '') }}:
+ - group: ${{ variable.group }}
+
+ # handle key-value variable syntax.
+ # example:
+ # - [key]: [value]
+ - ${{ if and(eq(variable.name, ''), eq(variable.group, '')) }}:
+ - ${{ each pair in variable }}:
+ - name: ${{ pair.key }}
+ value: ${{ pair.value }}
+
+ # DotNet-HelixApi-Access provides 'HelixApiAccessToken' for internal builds
+ - ${{ if and(eq(parameters.enableTelemetry, 'true'), eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
+ - group: DotNet-HelixApi-Access
+
+ ${{ if ne(parameters.workspace, '') }}:
+ workspace: ${{ parameters.workspace }}
+
+ steps:
+ - ${{ if ne(parameters.preSteps, '') }}:
+ - ${{ each preStep in parameters.preSteps }}:
+ - ${{ preStep }}
+
+ - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
+ - ${{ if eq(parameters.enableMicrobuild, 'true') }}:
+ - task: MicroBuildSigningPlugin@2
+ displayName: Install MicroBuild plugin
+ inputs:
+ signType: $(_SignType)
+ zipSources: false
+ feedSource: https://dnceng.pkgs.visualstudio.com/_packaging/MicroBuildToolset/nuget/v3/index.json
+ env:
+ TeamName: $(_TeamName)
+ continueOnError: ${{ parameters.continueOnError }}
+ condition: and(succeeded(), in(variables['_SignType'], 'real', 'test'), eq(variables['Agent.Os'], 'Windows_NT'))
+
+ - task: NuGetAuthenticate@0
+
+ - ${{ if or(eq(parameters.artifacts.download, 'true'), ne(parameters.artifacts.download, '')) }}:
+ - task: DownloadPipelineArtifact@2
+ inputs:
+ buildType: current
+ artifactName: ${{ coalesce(parameters.artifacts.download.name, 'Artifacts_$(Agent.OS)_$(_BuildConfig)') }}
+ targetPath: ${{ coalesce(parameters.artifacts.download.path, 'artifacts') }}
+ itemPattern: ${{ coalesce(parameters.artifacts.download.pattern, '**') }}
+
+ - ${{ each step in parameters.steps }}:
+ - ${{ step }}
+
+ - ${{ if eq(parameters.enableRichCodeNavigation, true) }}:
+ - task: RichCodeNavIndexer@0
+ displayName: RichCodeNav Upload
+ inputs:
+ languages: 'csharp'
+ environment: ${{ coalesce(parameters.richCodeNavigationEnvironment, 'prod') }}
+ richNavLogOutputDirectory: $(Build.SourcesDirectory)/artifacts/bin
+ continueOnError: true
+
+ - ${{ if eq(parameters.enableMicrobuild, 'true') }}:
+ - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
+ - task: MicroBuildCleanup@1
+ displayName: Execute Microbuild cleanup tasks
+ condition: and(always(), in(variables['_SignType'], 'real', 'test'), eq(variables['Agent.Os'], 'Windows_NT'))
+ continueOnError: ${{ parameters.continueOnError }}
+ env:
+ TeamName: $(_TeamName)
+
+ - ${{ if ne(parameters.artifacts.publish, '') }}:
+ - ${{ if or(eq(parameters.artifacts.publish.artifacts, 'true'), ne(parameters.artifacts.publish.artifacts, '')) }}:
+ - task: CopyFiles@2
+ displayName: Gather binaries for publish to artifacts
+ inputs:
+ SourceFolder: 'artifacts/bin'
+ Contents: '**'
+ TargetFolder: '$(Build.ArtifactStagingDirectory)/artifacts/bin'
+ - task: CopyFiles@2
+ displayName: Gather packages for publish to artifacts
+ inputs:
+ SourceFolder: 'artifacts/packages'
+ Contents: '**'
+ TargetFolder: '$(Build.ArtifactStagingDirectory)/artifacts/packages'
+ - task: PublishBuildArtifacts@1
+ displayName: Publish pipeline artifacts
+ inputs:
+ PathtoPublish: '$(Build.ArtifactStagingDirectory)/artifacts'
+ PublishLocation: Container
+ ArtifactName: ${{ coalesce(parameters.artifacts.publish.artifacts.name , 'Artifacts_$(Agent.Os)_$(_BuildConfig)') }}
+ continueOnError: true
+ condition: always()
+ - ${{ if or(eq(parameters.artifacts.publish.logs, 'true'), ne(parameters.artifacts.publish.logs, '')) }}:
+ - publish: artifacts/log
+ artifact: ${{ coalesce(parameters.artifacts.publish.logs.name, 'Logs_Build_$(Agent.Os)_$(_BuildConfig)') }}
+ displayName: Publish logs
+ continueOnError: true
+ condition: always()
+ - ${{ if or(eq(parameters.artifacts.publish.manifests, 'true'), ne(parameters.artifacts.publish.manifests, '')) }}:
+ - ${{ if and(ne(parameters.enablePublishUsingPipelines, 'true'), eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
+ - task: CopyFiles@2
+ displayName: Gather Asset Manifests
+ inputs:
+ SourceFolder: '$(Build.SourcesDirectory)/artifacts/log/$(_BuildConfig)/AssetManifest'
+ TargetFolder: '$(Build.ArtifactStagingDirectory)/AssetManifests'
+ continueOnError: ${{ parameters.continueOnError }}
+ condition: and(succeeded(), eq(variables['_DotNetPublishToBlobFeed'], 'true'))
+
+ - task: PublishBuildArtifacts@1
+ displayName: Push Asset Manifests
+ inputs:
+ PathtoPublish: '$(Build.ArtifactStagingDirectory)/AssetManifests'
+ PublishLocation: Container
+ ArtifactName: AssetManifests
+ continueOnError: ${{ parameters.continueOnError }}
+ condition: and(succeeded(), eq(variables['_DotNetPublishToBlobFeed'], 'true'))
+
+ - ${{ if ne(parameters.enablePublishBuildArtifacts, 'false') }}:
+ - task: PublishBuildArtifacts@1
+ displayName: Publish Logs
+ inputs:
+ PathtoPublish: '$(Build.SourcesDirectory)/artifacts/log/$(_BuildConfig)'
+ PublishLocation: Container
+ ArtifactName: ${{ coalesce(parameters.enablePublishBuildArtifacts.artifactName, '$(Agent.Os)_$(Agent.JobName)' ) }}
+ continueOnError: true
+ condition: always()
+
+ - ${{ if eq(parameters.enablePublishTestResults, 'true') }}:
+ - task: PublishTestResults@2
+ displayName: Publish Test Results
+ inputs:
+ testResultsFormat: 'xUnit'
+ testResultsFiles: '*.xml'
+ searchFolder: '$(Build.SourcesDirectory)/artifacts/TestResults/$(_BuildConfig)'
+ testRunTitle: ${{ coalesce(parameters.testRunTitle, parameters.name, '$(System.JobName)') }}-xunit
+ mergeTestResults: ${{ parameters.mergeTestResults }}
+ continueOnError: true
+ condition: always()
+
+ - ${{ if and(eq(parameters.enablePublishBuildAssets, true), ne(parameters.enablePublishUsingPipelines, 'true'), eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
+ - task: CopyFiles@2
+ displayName: Gather Asset Manifests
+ inputs:
+ SourceFolder: '$(Build.SourcesDirectory)/artifacts/log/$(_BuildConfig)/AssetManifest'
+ TargetFolder: '$(Build.StagingDirectory)/AssetManifests'
+ continueOnError: ${{ parameters.continueOnError }}
+ condition: and(succeeded(), eq(variables['_DotNetPublishToBlobFeed'], 'true'))
+
+ - task: PublishBuildArtifacts@1
+ displayName: Push Asset Manifests
+ inputs:
+ PathtoPublish: '$(Build.StagingDirectory)/AssetManifests'
+ PublishLocation: Container
+ ArtifactName: AssetManifests
+ continueOnError: ${{ parameters.continueOnError }}
+ condition: and(succeeded(), eq(variables['_DotNetPublishToBlobFeed'], 'true'))
+
+ - ${{ if eq(parameters.useBuildManifest, true) }}:
+ - task: PublishBuildArtifacts@1
+ displayName: Publish Build Manifest
+ inputs:
+ PathToPublish: '$(Build.SourcesDirectory)/artifacts/log/$(_BuildConfig)/manifest.props'
+ PublishLocation: Container
+ ArtifactName: BuildManifests
+ continueOnError: ${{ parameters.continueOnError }}
diff --git a/eng/common/templates/job/performance.yml b/eng/common/templates/job/performance.yml
new file mode 100644
index 0000000000..f877fd7a89
--- /dev/null
+++ b/eng/common/templates/job/performance.yml
@@ -0,0 +1,95 @@
+parameters:
+ steps: [] # optional -- any additional steps that need to happen before pulling down the performance repo and sending the performance benchmarks to helix (ie building your repo)
+ variables: [] # optional -- list of additional variables to send to the template
+ jobName: '' # required -- job name
+ displayName: '' # optional -- display name for the job. Will use jobName if not passed
+ pool: '' # required -- name of the Build pool
+ container: '' # required -- name of the container
+ osGroup: '' # required -- operating system for the job
+ extraSetupParameters: '' # optional -- extra arguments to pass to the setup script
+ frameworks: ['netcoreapp3.0'] # optional -- list of frameworks to run against
+ continueOnError: 'false' # optional -- determines whether to continue the build if the step errors
+ dependsOn: '' # optional -- dependencies of the job
+ timeoutInMinutes: 320 # optional -- timeout for the job
+ enableTelemetry: false # optional -- enable for telemetry
+
+jobs:
+- template: ../jobs/jobs.yml
+ parameters:
+ dependsOn: ${{ parameters.dependsOn }}
+ enableTelemetry: ${{ parameters.enableTelemetry }}
+ enablePublishBuildArtifacts: true
+ continueOnError: ${{ parameters.continueOnError }}
+
+ jobs:
+ - job: '${{ parameters.jobName }}'
+
+ ${{ if ne(parameters.displayName, '') }}:
+ displayName: '${{ parameters.displayName }}'
+ ${{ if eq(parameters.displayName, '') }}:
+ displayName: '${{ parameters.jobName }}'
+
+ timeoutInMinutes: ${{ parameters.timeoutInMinutes }}
+
+ variables:
+
+ - ${{ each variable in parameters.variables }}:
+ - ${{ if ne(variable.name, '') }}:
+ - name: ${{ variable.name }}
+ value: ${{ variable.value }}
+ - ${{ if ne(variable.group, '') }}:
+ - group: ${{ variable.group }}
+
+ - IsInternal: ''
+ - HelixApiAccessToken: ''
+ - HelixPreCommand: ''
+
+ - ${{ if and(ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
+ - ${{ if eq( parameters.osGroup, 'Windows_NT') }}:
+ - HelixPreCommand: 'set "PERFLAB_UPLOAD_TOKEN=$(PerfCommandUploadToken)"'
+ - IsInternal: -Internal
+ - ${{ if ne(parameters.osGroup, 'Windows_NT') }}:
+ - HelixPreCommand: 'export PERFLAB_UPLOAD_TOKEN="$(PerfCommandUploadTokenLinux)"'
+ - IsInternal: --internal
+
+ - group: DotNet-HelixApi-Access
+ - group: dotnet-benchview
+
+ workspace:
+ clean: all
+ pool:
+ ${{ parameters.pool }}
+ container: ${{ parameters.container }}
+ strategy:
+ matrix:
+ ${{ each framework in parameters.frameworks }}:
+ ${{ framework }}:
+ _Framework: ${{ framework }}
+ steps:
+ - checkout: self
+ clean: true
+ # Run all of the steps to setup repo
+ - ${{ each step in parameters.steps }}:
+ - ${{ step }}
+ - powershell: $(Build.SourcesDirectory)\eng\common\performance\performance-setup.ps1 $(IsInternal) -Framework $(_Framework) ${{ parameters.extraSetupParameters }}
+ displayName: Performance Setup (Windows)
+ condition: and(succeeded(), eq(variables['Agent.Os'], 'Windows_NT'))
+ continueOnError: ${{ parameters.continueOnError }}
+ - script: $(Build.SourcesDirectory)/eng/common/performance/performance-setup.sh $(IsInternal) --framework $(_Framework) ${{ parameters.extraSetupParameters }}
+ displayName: Performance Setup (Unix)
+ condition: and(succeeded(), ne(variables['Agent.Os'], 'Windows_NT'))
+ continueOnError: ${{ parameters.continueOnError }}
+ - script: $(Python) $(PerformanceDirectory)/scripts/ci_setup.py $(SetupArguments)
+ displayName: Run ci setup script
+ # Run perf testing in helix
+ - template: /eng/common/templates/steps/perf-send-to-helix.yml
+ parameters:
+ HelixSource: '$(HelixSourcePrefix)/$(Build.Repository.Name)/$(Build.SourceBranch)' # sources must start with pr/, official/, prodcon/, or agent/
+ HelixType: 'test/performance/$(Kind)/$(_Framework)/$(Architecture)'
+ HelixAccessToken: $(HelixApiAccessToken)
+ HelixTargetQueues: $(Queue)
+ HelixPreCommands: $(HelixPreCommand)
+ Creator: $(Creator)
+ WorkItemTimeout: 4:00 # 4 hours
+ WorkItemDirectory: '$(WorkItemDirectory)' # WorkItemDirectory can not be empty, so we send it some docs to keep it happy
+ CorrelationPayloadDirectory: '$(PayloadDirectory)' # it gets checked out to a folder with shorter path than WorkItemDirectory so we can avoid file name too long exceptions
\ No newline at end of file
diff --git a/eng/common/templates/job/publish-build-assets.yml b/eng/common/templates/job/publish-build-assets.yml
new file mode 100644
index 0000000000..d0c3cc2b3b
--- /dev/null
+++ b/eng/common/templates/job/publish-build-assets.yml
@@ -0,0 +1,93 @@
+parameters:
+ configuration: 'Debug'
+
+ # Optional: condition for the job to run
+ condition: ''
+
+ # Optional: 'true' if future jobs should run even if this job fails
+ continueOnError: false
+
+ # Optional: dependencies of the job
+ dependsOn: ''
+
+ # Optional: Include PublishBuildArtifacts task
+ enablePublishBuildArtifacts: false
+
+ # Optional: A defined YAML pool - https://docs.microsoft.com/en-us/azure/devops/pipelines/yaml-schema?view=vsts&tabs=schema#pool
+ pool: {}
+
+ # Optional: should run as a public build even in the internal project
+ # if 'true', the build won't run any of the internal only steps, even if it is running in non-public projects.
+ runAsPublic: false
+
+ # Optional: whether the build's artifacts will be published using release pipelines or direct feed publishing
+ publishUsingPipelines: false
+
+jobs:
+- job: Asset_Registry_Publish
+
+ dependsOn: ${{ parameters.dependsOn }}
+
+ displayName: Publish to Build Asset Registry
+
+ pool: ${{ parameters.pool }}
+
+ variables:
+ - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
+ - name: _BuildConfig
+ value: ${{ parameters.configuration }}
+ - group: Publish-Build-Assets
+ # Skip component governance and codesign validation for SDL. These jobs
+ # create no content.
+ - name: skipComponentGovernanceDetection
+ value: true
+ - name: runCodesignValidationInjection
+ value: false
+
+ steps:
+ - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
+ - task: DownloadBuildArtifacts@0
+ displayName: Download artifact
+ inputs:
+ artifactName: AssetManifests
+ downloadPath: '$(Build.StagingDirectory)/Download'
+ condition: ${{ parameters.condition }}
+ continueOnError: ${{ parameters.continueOnError }}
+
+ - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
+ - task: NuGetAuthenticate@0
+
+ - task: PowerShell@2
+ displayName: Publish Build Assets
+ inputs:
+ filePath: eng\common\sdk-task.ps1
+ arguments: -task PublishBuildAssets -restore -msbuildEngine dotnet
+ /p:ManifestsPath='$(Build.StagingDirectory)/Download/AssetManifests'
+ /p:BuildAssetRegistryToken=$(MaestroAccessToken)
+ /p:MaestroApiEndpoint=https://maestro-prod.westus2.cloudapp.azure.com
+ /p:PublishUsingPipelines=${{ parameters.publishUsingPipelines }}
+ /p:Configuration=$(_BuildConfig)
+ /p:OfficialBuildId=$(Build.BuildNumber)
+ condition: ${{ parameters.condition }}
+ continueOnError: ${{ parameters.continueOnError }}
+
+ - task: powershell@2
+ displayName: Create ReleaseConfigs Artifact
+ inputs:
+ targetType: inline
+ script: |
+ Add-Content -Path "$(Build.StagingDirectory)/ReleaseConfigs.txt" -Value $(BARBuildId)
+ Add-Content -Path "$(Build.StagingDirectory)/ReleaseConfigs.txt" -Value "$(DefaultChannels)"
+ Add-Content -Path "$(Build.StagingDirectory)/ReleaseConfigs.txt" -Value $(IsStableBuild)
+
+ - task: PublishBuildArtifacts@1
+ displayName: Publish ReleaseConfigs Artifact
+ inputs:
+ PathtoPublish: '$(Build.StagingDirectory)/ReleaseConfigs.txt'
+ PublishLocation: Container
+ ArtifactName: ReleaseConfigs
+
+ - ${{ if eq(parameters.enablePublishBuildArtifacts, 'true') }}:
+ - template: /eng/common/templates/steps/publish-logs.yml
+ parameters:
+ JobLabel: 'Publish_Artifacts_Logs'
diff --git a/eng/common/templates/jobs/jobs.yml b/eng/common/templates/jobs/jobs.yml
new file mode 100644
index 0000000000..c08225a9a9
--- /dev/null
+++ b/eng/common/templates/jobs/jobs.yml
@@ -0,0 +1,72 @@
+parameters:
+ # See schema documentation in /Documentation/AzureDevOps/TemplateSchema.md
+ continueOnError: false
+
+ # Optional: Include PublishBuildArtifacts task
+ enablePublishBuildArtifacts: false
+
+ # Optional: Enable publishing using release pipelines
+ enablePublishUsingPipelines: false
+
+ graphFileGeneration:
+ # Optional: Enable generating the graph files at the end of the build
+ enabled: false
+ # Optional: Include toolset dependencies in the generated graph files
+ includeToolset: false
+
+ # Required: A collection of jobs to run - https://docs.microsoft.com/en-us/azure/devops/pipelines/yaml-schema?view=vsts&tabs=schema#job
+ jobs: []
+
+ # Optional: Override automatically derived dependsOn value for "publish build assets" job
+ publishBuildAssetsDependsOn: ''
+
+ # Optional: should run as a public build even in the internal project
+ # if 'true', the build won't run any of the internal only steps, even if it is running in non-public projects.
+ runAsPublic: false
+
+# Internal resources (telemetry, microbuild) can only be accessed from non-public projects,
+# and some (Microbuild) should only be applied to non-PR cases for internal builds.
+
+jobs:
+- ${{ each job in parameters.jobs }}:
+ - template: ../job/job.yml
+ parameters:
+ # pass along parameters
+ ${{ each parameter in parameters }}:
+ ${{ if ne(parameter.key, 'jobs') }}:
+ ${{ parameter.key }}: ${{ parameter.value }}
+
+ # pass along job properties
+ ${{ each property in job }}:
+ ${{ if ne(property.key, 'job') }}:
+ ${{ property.key }}: ${{ property.value }}
+
+ name: ${{ job.job }}
+
+- ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
+ - ${{ if or(eq(parameters.enablePublishBuildAssets, true), eq(parameters.artifacts.publish.manifests, 'true'), ne(parameters.artifacts.publish.manifests, '')) }}:
+ - template: ../job/publish-build-assets.yml
+ parameters:
+ continueOnError: ${{ parameters.continueOnError }}
+ dependsOn:
+ - ${{ if ne(parameters.publishBuildAssetsDependsOn, '') }}:
+ - ${{ each job in parameters.publishBuildAssetsDependsOn }}:
+ - ${{ job.job }}
+ - ${{ if eq(parameters.publishBuildAssetsDependsOn, '') }}:
+ - ${{ each job in parameters.jobs }}:
+ - ${{ job.job }}
+ pool:
+ vmImage: vs2017-win2016
+ runAsPublic: ${{ parameters.runAsPublic }}
+ publishUsingPipelines: ${{ parameters.enablePublishUsingPipelines }}
+ enablePublishBuildArtifacts: ${{ parameters.enablePublishBuildArtifacts }}
+
+ - ${{ if eq(parameters.graphFileGeneration.enabled, true) }}:
+ - template: ../job/generate-graph-files.yml
+ parameters:
+ continueOnError: ${{ parameters.continueOnError }}
+ includeToolset: ${{ parameters.graphFileGeneration.includeToolset }}
+ dependsOn:
+ - Asset_Registry_Publish
+ pool:
+ vmImage: vs2017-win2016
diff --git a/eng/common/templates/phases/base.yml b/eng/common/templates/phases/base.yml
new file mode 100644
index 0000000000..0123cf43b1
--- /dev/null
+++ b/eng/common/templates/phases/base.yml
@@ -0,0 +1,130 @@
+parameters:
+ # Optional: Clean sources before building
+ clean: true
+
+ # Optional: Git fetch depth
+ fetchDepth: ''
+
+ # Optional: name of the phase (not specifying phase name may cause name collisions)
+ name: ''
+ # Optional: display name of the phase
+ displayName: ''
+
+ # Optional: condition for the job to run
+ condition: ''
+
+ # Optional: dependencies of the phase
+ dependsOn: ''
+
+ # Required: A defined YAML queue
+ queue: {}
+
+ # Required: build steps
+ steps: []
+
+ # Optional: variables
+ variables: {}
+
+ # Optional: should run as a public build even in the internal project
+ # if 'true', the build won't run any of the internal only steps, even if it is running in non-public projects.
+ runAsPublic: false
+
+ ## Telemetry variables
+
+ # Optional: enable sending telemetry
+ # if 'true', these "variables" must be specified in the variables object or as part of the queue matrix
+ # _HelixBuildConfig - differentiate between Debug, Release, other
+ # _HelixSource - Example: build/product
+ # _HelixType - Example: official/dotnet/arcade/$(Build.SourceBranch)
+ enableTelemetry: false
+
+ # Optional: Enable installing Microbuild plugin
+ # if 'true', these "variables" must be specified in the variables object or as part of the queue matrix
+ # _TeamName - the name of your team
+ # _SignType - 'test' or 'real'
+ enableMicrobuild: false
+
+# Internal resources (telemetry, microbuild) can only be accessed from non-public projects,
+# and some (Microbuild) should only be applied to non-PR cases for internal builds.
+
+phases:
+- phase: ${{ parameters.name }}
+
+ ${{ if ne(parameters.displayName, '') }}:
+ displayName: ${{ parameters.displayName }}
+
+ ${{ if ne(parameters.condition, '') }}:
+ condition: ${{ parameters.condition }}
+
+ ${{ if ne(parameters.dependsOn, '') }}:
+ dependsOn: ${{ parameters.dependsOn }}
+
+ queue: ${{ parameters.queue }}
+
+ ${{ if ne(parameters.variables, '') }}:
+ variables:
+ ${{ insert }}: ${{ parameters.variables }}
+
+ steps:
+ - checkout: self
+ clean: ${{ parameters.clean }}
+ ${{ if ne(parameters.fetchDepth, '') }}:
+ fetchDepth: ${{ parameters.fetchDepth }}
+
+ - ${{ if eq(parameters.enableTelemetry, 'true') }}:
+ - template: /eng/common/templates/steps/telemetry-start.yml
+ parameters:
+ buildConfig: $(_HelixBuildConfig)
+ helixSource: $(_HelixSource)
+ helixType: $(_HelixType)
+ runAsPublic: ${{ parameters.runAsPublic }}
+
+ - ${{ if eq(parameters.enableMicrobuild, 'true') }}:
+ # Internal only resource, and Microbuild signing shouldn't be applied to PRs.
+ - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
+ - task: MicroBuildSigningPlugin@2
+ displayName: Install MicroBuild plugin
+ inputs:
+ signType: $(_SignType)
+ zipSources: false
+ feedSource: https://dnceng.pkgs.visualstudio.com/_packaging/MicroBuildToolset/nuget/v3/index.json
+
+ env:
+ TeamName: $(_TeamName)
+ continueOnError: false
+ condition: and(succeeded(), in(variables['_SignType'], 'real', 'test'), eq(variables['Agent.Os'], 'Windows_NT'))
+
+ # Run provided build steps
+ - ${{ parameters.steps }}
+
+ - ${{ if eq(parameters.enableMicrobuild, 'true') }}:
+ # Internal only resources
+ - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
+ - task: MicroBuildCleanup@1
+ displayName: Execute Microbuild cleanup tasks
+ condition: and(always(), in(variables['_SignType'], 'real', 'test'), eq(variables['Agent.Os'], 'Windows_NT'))
+ env:
+ TeamName: $(_TeamName)
+
+ - ${{ if eq(parameters.enableTelemetry, 'true') }}:
+ - template: /eng/common/templates/steps/telemetry-end.yml
+ parameters:
+ helixSource: $(_HelixSource)
+ helixType: $(_HelixType)
+
+ - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
+ - task: CopyFiles@2
+ displayName: Gather Asset Manifests
+ inputs:
+ SourceFolder: '$(Build.SourcesDirectory)/artifacts/log/$(_BuildConfig)/AssetManifest'
+ TargetFolder: '$(Build.StagingDirectory)/AssetManifests'
+ continueOnError: false
+ condition: and(succeeded(), eq(variables['_DotNetPublishToBlobFeed'], 'true'))
+ - task: PublishBuildArtifacts@1
+ displayName: Push Asset Manifests
+ inputs:
+ PathtoPublish: '$(Build.StagingDirectory)/AssetManifests'
+ PublishLocation: Container
+ ArtifactName: AssetManifests
+ continueOnError: false
+ condition: and(succeeded(), eq(variables['_DotNetPublishToBlobFeed'], 'true'))
diff --git a/eng/common/templates/phases/publish-build-assets.yml b/eng/common/templates/phases/publish-build-assets.yml
new file mode 100644
index 0000000000..a0a8074282
--- /dev/null
+++ b/eng/common/templates/phases/publish-build-assets.yml
@@ -0,0 +1,51 @@
+parameters:
+ dependsOn: ''
+ queue: {}
+ configuration: 'Debug'
+ condition: succeeded()
+ continueOnError: false
+ runAsPublic: false
+ publishUsingPipelines: false
+phases:
+ - phase: Asset_Registry_Publish
+ displayName: Publish to Build Asset Registry
+ dependsOn: ${{ parameters.dependsOn }}
+ queue: ${{ parameters.queue }}
+ variables:
+ _BuildConfig: ${{ parameters.configuration }}
+ steps:
+ - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
+ - task: DownloadBuildArtifacts@0
+ displayName: Download artifact
+ inputs:
+ artifactName: AssetManifests
+ downloadPath: '$(Build.StagingDirectory)/Download'
+ condition: ${{ parameters.condition }}
+ continueOnError: ${{ parameters.continueOnError }}
+ - task: AzureKeyVault@1
+ inputs:
+ azureSubscription: 'DotNet-Engineering-Services_KeyVault'
+ KeyVaultName: EngKeyVault
+ SecretsFilter: 'MaestroAccessToken'
+ condition: ${{ parameters.condition }}
+ continueOnError: ${{ parameters.continueOnError }}
+ - task: PowerShell@2
+ displayName: Publish Build Assets
+ inputs:
+ filePath: eng\common\sdk-task.ps1
+ arguments: -task PublishBuildAssets -restore -msbuildEngine dotnet
+ /p:ManifestsPath='$(Build.StagingDirectory)/Download/AssetManifests'
+ /p:BuildAssetRegistryToken=$(MaestroAccessToken)
+ /p:MaestroApiEndpoint=https://maestro-prod.westus2.cloudapp.azure.com
+ /p:PublishUsingPipelines=${{ parameters.publishUsingPipelines }}
+ /p:Configuration=$(_BuildConfig)
+ condition: ${{ parameters.condition }}
+ continueOnError: ${{ parameters.continueOnError }}
+ - task: PublishBuildArtifacts@1
+ displayName: Publish Logs to VSTS
+ inputs:
+ PathtoPublish: '$(Build.SourcesDirectory)/artifacts/log/$(_BuildConfig)'
+ PublishLocation: Container
+ ArtifactName: $(Agent.Os)_Asset_Registry_Publish
+ continueOnError: true
+ condition: always()
diff --git a/eng/common/templates/post-build/channels/generic-internal-channel.yml b/eng/common/templates/post-build/channels/generic-internal-channel.yml
new file mode 100644
index 0000000000..7ae5255921
--- /dev/null
+++ b/eng/common/templates/post-build/channels/generic-internal-channel.yml
@@ -0,0 +1,182 @@
+parameters:
+ BARBuildId: ''
+ PromoteToChannelIds: ''
+ artifactsPublishingAdditionalParameters: ''
+ dependsOn:
+ - Validate
+ publishInstallersAndChecksums: true
+ symbolPublishingAdditionalParameters: ''
+ stageName: ''
+ channelName: ''
+ channelId: ''
+ transportFeed: ''
+ shippingFeed: ''
+ symbolsFeed: ''
+
+stages:
+- stage: ${{ parameters.stageName }}
+ dependsOn: ${{ parameters.dependsOn }}
+ variables:
+ - template: ../common-variables.yml
+ displayName: ${{ parameters.channelName }} Publishing
+ jobs:
+ - template: ../setup-maestro-vars.yml
+ parameters:
+ BARBuildId: ${{ parameters.BARBuildId }}
+ PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }}
+
+ - job: publish_symbols
+ displayName: Symbol Publishing
+ dependsOn: setupMaestroVars
+ condition: contains(dependencies.setupMaestroVars.outputs['setReleaseVars.TargetChannels'], format('[{0}]', ${{ parameters.channelId }} ))
+ variables:
+ - group: DotNet-Symbol-Server-Pats
+ - name: AzDOProjectName
+ value: $[ dependencies.setupMaestroVars.outputs['setReleaseVars.AzDOProjectName'] ]
+ - name: AzDOPipelineId
+ value: $[ dependencies.setupMaestroVars.outputs['setReleaseVars.AzDOPipelineId'] ]
+ - name: AzDOBuildId
+ value: $[ dependencies.setupMaestroVars.outputs['setReleaseVars.AzDOBuildId'] ]
+ pool:
+ vmImage: 'windows-2019'
+ steps:
+ # This is necessary whenever we want to publish/restore to an AzDO private feed
+ - task: NuGetAuthenticate@0
+ displayName: 'Authenticate to AzDO Feeds'
+
+ - task: DownloadBuildArtifacts@0
+ displayName: Download Build Assets
+ continueOnError: true
+ inputs:
+ buildType: specific
+ buildVersionToDownload: specific
+ project: $(AzDOProjectName)
+ pipeline: $(AzDOPipelineId)
+ buildId: $(AzDOBuildId)
+ downloadType: 'specific'
+ itemPattern: |
+ PdbArtifacts/**
+ BlobArtifacts/**
+ downloadPath: '$(Build.ArtifactStagingDirectory)'
+
+ # This is necessary whenever we want to publish/restore to an AzDO private feed
+ # Since sdk-task.ps1 tries to restore packages we need to do this authentication here
+ # otherwise it'll complain about accessing a private feed.
+ - task: NuGetAuthenticate@0
+ displayName: 'Authenticate to AzDO Feeds'
+
+ - task: PowerShell@2
+ displayName: Enable cross-org publishing
+ inputs:
+ filePath: eng\common\enable-cross-org-publishing.ps1
+ arguments: -token $(dn-bot-dnceng-artifact-feeds-rw)
+
+ - task: PowerShell@2
+ displayName: Publish
+ inputs:
+ filePath: eng\common\sdk-task.ps1
+ arguments: -task PublishToSymbolServers -restore -msbuildEngine dotnet
+ /p:DotNetSymbolServerTokenMsdl=$(microsoft-symbol-server-pat)
+ /p:DotNetSymbolServerTokenSymWeb=$(symweb-symbol-server-pat)
+ /p:PDBArtifactsDirectory='$(Build.ArtifactStagingDirectory)/PDBArtifacts/'
+ /p:BlobBasePath='$(Build.ArtifactStagingDirectory)/BlobArtifacts/'
+ /p:SymbolPublishingExclusionsFile='$(Build.SourcesDirectory)/eng/SymbolPublishingExclusionsFile.txt'
+ /p:Configuration=Release
+ /p:PublishToMSDL=false
+ ${{ parameters.symbolPublishingAdditionalParameters }}
+
+ - template: ../../steps/publish-logs.yml
+ parameters:
+ StageLabel: '${{ parameters.stageName }}'
+ JobLabel: 'SymbolPublishing'
+
+ - job: publish_assets
+ displayName: Publish Assets
+ dependsOn: setupMaestroVars
+ timeoutInMinutes: 120
+ variables:
+ - name: BARBuildId
+ value: $[ dependencies.setupMaestroVars.outputs['setReleaseVars.BARBuildId'] ]
+ - name: IsStableBuild
+ value: $[ dependencies.setupMaestroVars.outputs['setReleaseVars.IsStableBuild'] ]
+ - name: AzDOProjectName
+ value: $[ dependencies.setupMaestroVars.outputs['setReleaseVars.AzDOProjectName'] ]
+ - name: AzDOPipelineId
+ value: $[ dependencies.setupMaestroVars.outputs['setReleaseVars.AzDOPipelineId'] ]
+ - name: AzDOBuildId
+ value: $[ dependencies.setupMaestroVars.outputs['setReleaseVars.AzDOBuildId'] ]
+ condition: contains(dependencies.setupMaestroVars.outputs['setReleaseVars.TargetChannels'], format('[{0}]', ${{ parameters.channelId }} ))
+ pool:
+ vmImage: 'windows-2019'
+ steps:
+ - task: DownloadBuildArtifacts@0
+ displayName: Download Build Assets
+ continueOnError: true
+ inputs:
+ buildType: specific
+ buildVersionToDownload: specific
+ project: $(AzDOProjectName)
+ pipeline: $(AzDOPipelineId)
+ buildId: $(AzDOBuildId)
+ downloadType: 'specific'
+ itemPattern: |
+ PackageArtifacts/**
+ BlobArtifacts/**
+ AssetManifests/**
+ downloadPath: '$(Build.ArtifactStagingDirectory)'
+
+ - task: NuGetToolInstaller@1
+ displayName: 'Install NuGet.exe'
+
+ # This is necessary whenever we want to publish/restore to an AzDO private feed
+ - task: NuGetAuthenticate@0
+ displayName: 'Authenticate to AzDO Feeds'
+
+ - task: PowerShell@2
+ displayName: Enable cross-org publishing
+ inputs:
+ filePath: eng\common\enable-cross-org-publishing.ps1
+ arguments: -token $(dn-bot-dnceng-artifact-feeds-rw)
+
+ - task: PowerShell@2
+ displayName: Publish Assets
+ inputs:
+ filePath: eng\common\sdk-task.ps1
+ arguments: -task PublishArtifactsInManifest -restore -msbuildEngine dotnet
+ /p:PublishingInfraVersion=2
+ /p:IsStableBuild=$(IsStableBuild)
+ /p:IsInternalBuild=$(IsInternalBuild)
+ /p:RepositoryName=$(Build.Repository.Name)
+ /p:CommitSha=$(Build.SourceVersion)
+ /p:NugetPath=$(NuGetExeToolPath)
+ /p:AzdoTargetFeedPAT='$(dn-bot-dnceng-universal-packages-rw)'
+ /p:AzureStorageTargetFeedPAT='$(dotnetfeed-storage-access-key-1)'
+ /p:BARBuildId=$(BARBuildId)
+ /p:MaestroApiEndpoint='$(MaestroApiEndPoint)'
+ /p:BuildAssetRegistryToken='$(MaestroApiAccessToken)'
+ /p:ManifestsBasePath='$(Build.ArtifactStagingDirectory)/AssetManifests/'
+ /p:BlobBasePath='$(Build.ArtifactStagingDirectory)/BlobArtifacts/'
+ /p:PackageBasePath='$(Build.ArtifactStagingDirectory)/PackageArtifacts/'
+ /p:Configuration=Release
+ /p:PublishInstallersAndChecksums=${{ parameters.publishInstallersAndChecksums }}
+ /p:ChecksumsTargetStaticFeed=$(InternalChecksumsBlobFeedUrl)
+ /p:ChecksumsAzureAccountKey=$(InternalChecksumsBlobFeedKey)
+ /p:InstallersTargetStaticFeed=$(InternalInstallersBlobFeedUrl)
+ /p:InstallersAzureAccountKey=$(InternalInstallersBlobFeedKey)
+ /p:AzureDevOpsStaticShippingFeed='${{ parameters.shippingFeed }}'
+ /p:AzureDevOpsStaticShippingFeedKey='$(dn-bot-dnceng-artifact-feeds-rw)'
+ /p:AzureDevOpsStaticTransportFeed='${{ parameters.transportFeed }}'
+ /p:AzureDevOpsStaticTransportFeedKey='$(dn-bot-dnceng-artifact-feeds-rw)'
+ /p:AzureDevOpsStaticSymbolsFeed='${{ parameters.symbolsFeed }}'
+ /p:AzureDevOpsStaticSymbolsFeedKey='$(dn-bot-dnceng-artifact-feeds-rw)'
+ /p:PublishToMSDL=false
+ ${{ parameters.artifactsPublishingAdditionalParameters }}
+
+ - template: ../../steps/publish-logs.yml
+ parameters:
+ StageLabel: '${{ parameters.stageName }}'
+ JobLabel: 'AssetsPublishing'
+
+ - template: ../../steps/add-build-to-channel.yml
+ parameters:
+ ChannelId: ${{ parameters.channelId }}
diff --git a/eng/common/templates/post-build/channels/generic-public-channel.yml b/eng/common/templates/post-build/channels/generic-public-channel.yml
new file mode 100644
index 0000000000..6cf39dbb29
--- /dev/null
+++ b/eng/common/templates/post-build/channels/generic-public-channel.yml
@@ -0,0 +1,184 @@
+parameters:
+ BARBuildId: ''
+ PromoteToChannelIds: ''
+ artifactsPublishingAdditionalParameters: ''
+ dependsOn:
+ - Validate
+ publishInstallersAndChecksums: true
+ symbolPublishingAdditionalParameters: ''
+ stageName: ''
+ channelName: ''
+ channelId: ''
+ transportFeed: ''
+ shippingFeed: ''
+ symbolsFeed: ''
+ # If the channel name is empty, no links will be generated
+ akaMSChannelName: ''
+
+stages:
+- stage: ${{ parameters.stageName }}
+ dependsOn: ${{ parameters.dependsOn }}
+ variables:
+ - template: ../common-variables.yml
+ displayName: ${{ parameters.channelName }} Publishing
+ jobs:
+ - template: ../setup-maestro-vars.yml
+ parameters:
+ BARBuildId: ${{ parameters.BARBuildId }}
+ PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }}
+
+ - job: publish_symbols
+ displayName: Symbol Publishing
+ dependsOn: setupMaestroVars
+ condition: contains(dependencies.setupMaestroVars.outputs['setReleaseVars.TargetChannels'], format('[{0}]', ${{ parameters.channelId }} ))
+ variables:
+ - group: DotNet-Symbol-Server-Pats
+ - name: AzDOProjectName
+ value: $[ dependencies.setupMaestroVars.outputs['setReleaseVars.AzDOProjectName'] ]
+ - name: AzDOPipelineId
+ value: $[ dependencies.setupMaestroVars.outputs['setReleaseVars.AzDOPipelineId'] ]
+ - name: AzDOBuildId
+ value: $[ dependencies.setupMaestroVars.outputs['setReleaseVars.AzDOBuildId'] ]
+ pool:
+ vmImage: 'windows-2019'
+ steps:
+ - task: DownloadBuildArtifacts@0
+ displayName: Download Build Assets
+ continueOnError: true
+ inputs:
+ buildType: specific
+ buildVersionToDownload: specific
+ project: $(AzDOProjectName)
+ pipeline: $(AzDOPipelineId)
+ buildId: $(AzDOBuildId)
+ downloadType: 'specific'
+ itemPattern: |
+ PdbArtifacts/**
+ BlobArtifacts/**
+ downloadPath: '$(Build.ArtifactStagingDirectory)'
+
+ # This is necessary whenever we want to publish/restore to an AzDO private feed
+ # Since sdk-task.ps1 tries to restore packages we need to do this authentication here
+ # otherwise it'll complain about accessing a private feed.
+ - task: NuGetAuthenticate@0
+ displayName: 'Authenticate to AzDO Feeds'
+
+ - task: PowerShell@2
+ displayName: Enable cross-org publishing
+ inputs:
+ filePath: eng\common\enable-cross-org-publishing.ps1
+ arguments: -token $(dn-bot-dnceng-artifact-feeds-rw)
+
+ - task: PowerShell@2
+ displayName: Publish
+ inputs:
+ filePath: eng\common\sdk-task.ps1
+ arguments: -task PublishToSymbolServers -restore -msbuildEngine dotnet
+ /p:DotNetSymbolServerTokenMsdl=$(microsoft-symbol-server-pat)
+ /p:DotNetSymbolServerTokenSymWeb=$(symweb-symbol-server-pat)
+ /p:PDBArtifactsDirectory='$(Build.ArtifactStagingDirectory)/PDBArtifacts/'
+ /p:BlobBasePath='$(Build.ArtifactStagingDirectory)/BlobArtifacts/'
+ /p:SymbolPublishingExclusionsFile='$(Build.SourcesDirectory)/eng/SymbolPublishingExclusionsFile.txt'
+ /p:Configuration=Release
+ ${{ parameters.symbolPublishingAdditionalParameters }}
+
+ - template: ../../steps/publish-logs.yml
+ parameters:
+ StageLabel: '${{ parameters.stageName }}'
+ JobLabel: 'SymbolPublishing'
+
+ - job: publish_assets
+ displayName: Publish Assets
+ dependsOn: setupMaestroVars
+ timeoutInMinutes: 120
+ variables:
+ - name: BARBuildId
+ value: $[ dependencies.setupMaestroVars.outputs['setReleaseVars.BARBuildId'] ]
+ - name: IsStableBuild
+ value: $[ dependencies.setupMaestroVars.outputs['setReleaseVars.IsStableBuild'] ]
+ - name: AzDOProjectName
+ value: $[ dependencies.setupMaestroVars.outputs['setReleaseVars.AzDOProjectName'] ]
+ - name: AzDOPipelineId
+ value: $[ dependencies.setupMaestroVars.outputs['setReleaseVars.AzDOPipelineId'] ]
+ - name: AzDOBuildId
+ value: $[ dependencies.setupMaestroVars.outputs['setReleaseVars.AzDOBuildId'] ]
+ - name: ArtifactsCategory
+ value: ${{ coalesce(variables._DotNetArtifactsCategory, '.NETCore') }}
+ condition: contains(dependencies.setupMaestroVars.outputs['setReleaseVars.TargetChannels'], format('[{0}]', ${{ parameters.channelId }} ))
+ pool:
+ vmImage: 'windows-2019'
+ steps:
+ - task: DownloadBuildArtifacts@0
+ displayName: Download Build Assets
+ continueOnError: true
+ inputs:
+ buildType: specific
+ buildVersionToDownload: specific
+ project: $(AzDOProjectName)
+ pipeline: $(AzDOPipelineId)
+ buildId: $(AzDOBuildId)
+ downloadType: 'specific'
+ itemPattern: |
+ PackageArtifacts/**
+ BlobArtifacts/**
+ AssetManifests/**
+ downloadPath: '$(Build.ArtifactStagingDirectory)'
+
+ - task: NuGetToolInstaller@1
+ displayName: 'Install NuGet.exe'
+
+ # This is necessary whenever we want to publish/restore to an AzDO private feed
+ - task: NuGetAuthenticate@0
+ displayName: 'Authenticate to AzDO Feeds'
+
+ - task: PowerShell@2
+ displayName: Enable cross-org publishing
+ inputs:
+ filePath: eng\common\enable-cross-org-publishing.ps1
+ arguments: -token $(dn-bot-dnceng-artifact-feeds-rw)
+
+ - task: PowerShell@2
+ displayName: Publish Assets
+ inputs:
+ filePath: eng\common\sdk-task.ps1
+ arguments: -task PublishArtifactsInManifest -restore -msbuildEngine dotnet
+ /p:PublishingInfraVersion=2
+ /p:ArtifactsCategory=$(ArtifactsCategory)
+ /p:IsStableBuild=$(IsStableBuild)
+ /p:IsInternalBuild=$(IsInternalBuild)
+ /p:RepositoryName=$(Build.Repository.Name)
+ /p:CommitSha=$(Build.SourceVersion)
+ /p:NugetPath=$(NuGetExeToolPath)
+ /p:AzdoTargetFeedPAT='$(dn-bot-dnceng-universal-packages-rw)'
+ /p:AzureStorageTargetFeedPAT='$(dotnetfeed-storage-access-key-1)'
+ /p:BARBuildId=$(BARBuildId)
+ /p:MaestroApiEndpoint='$(MaestroApiEndPoint)'
+ /p:BuildAssetRegistryToken='$(MaestroApiAccessToken)'
+ /p:ManifestsBasePath='$(Build.ArtifactStagingDirectory)/AssetManifests/'
+ /p:BlobBasePath='$(Build.ArtifactStagingDirectory)/BlobArtifacts/'
+ /p:PackageBasePath='$(Build.ArtifactStagingDirectory)/PackageArtifacts/'
+ /p:Configuration=Release
+ /p:PublishInstallersAndChecksums=${{ parameters.publishInstallersAndChecksums }}
+ /p:InstallersTargetStaticFeed=$(InstallersBlobFeedUrl)
+ /p:InstallersAzureAccountKey=$(dotnetcli-storage-key)
+ /p:ChecksumsTargetStaticFeed=$(ChecksumsBlobFeedUrl)
+ /p:ChecksumsAzureAccountKey=$(dotnetclichecksums-storage-key)
+ /p:AzureDevOpsStaticShippingFeed='${{ parameters.shippingFeed }}'
+ /p:AzureDevOpsStaticShippingFeedKey='$(dn-bot-dnceng-artifact-feeds-rw)'
+ /p:AzureDevOpsStaticTransportFeed='${{ parameters.transportFeed }}'
+ /p:AzureDevOpsStaticTransportFeedKey='$(dn-bot-dnceng-artifact-feeds-rw)'
+ /p:AzureDevOpsStaticSymbolsFeed='${{ parameters.symbolsFeed }}'
+ /p:AzureDevOpsStaticSymbolsFeedKey='$(dn-bot-dnceng-artifact-feeds-rw)'
+ /p:LatestLinkShortUrlPrefix=dotnet/'${{ parameters.akaMSChannelName }}'
+ /p:AkaMSClientId=$(akams-client-id)
+ /p:AkaMSClientSecret=$(akams-client-secret)
+ ${{ parameters.artifactsPublishingAdditionalParameters }}
+
+ - template: ../../steps/publish-logs.yml
+ parameters:
+ StageLabel: '${{ parameters.stageName }}'
+ JobLabel: 'AssetsPublishing'
+
+ - template: ../../steps/add-build-to-channel.yml
+ parameters:
+ ChannelId: ${{ parameters.channelId }}
diff --git a/eng/common/templates/post-build/common-variables.yml b/eng/common/templates/post-build/common-variables.yml
new file mode 100644
index 0000000000..c99fd75037
--- /dev/null
+++ b/eng/common/templates/post-build/common-variables.yml
@@ -0,0 +1,99 @@
+variables:
+ - group: AzureDevOps-Artifact-Feeds-Pats
+ - group: DotNet-Blob-Feed
+ - group: DotNet-DotNetCli-Storage
+ - group: DotNet-MSRC-Storage
+ - group: Publish-Build-Assets
+
+ # .NET Core 3.1 Dev
+ - name: PublicDevRelease_31_Channel_Id
+ value: 128
+
+ # .NET 5 Dev
+ - name: Net_5_Dev_Channel_Id
+ value: 131
+
+ # .NET Eng - Validation
+ - name: Net_Eng_Validation_Channel_Id
+ value: 9
+
+ # .NET Eng - Latest
+ - name: Net_Eng_Latest_Channel_Id
+ value: 2
+
+ # .NET 3 Eng - Validation
+ - name: NET_3_Eng_Validation_Channel_Id
+ value: 390
+
+ # .NET 3 Eng
+ - name: NetCore_3_Tools_Channel_Id
+ value: 344
+
+ # .NET Core 3.0 Internal Servicing
+ - name: InternalServicing_30_Channel_Id
+ value: 184
+
+ # .NET Core 3.0 Release
+ - name: PublicRelease_30_Channel_Id
+ value: 19
+
+ # .NET Core 3.1 Release
+ - name: PublicRelease_31_Channel_Id
+ value: 129
+
+ # General Testing
+ - name: GeneralTesting_Channel_Id
+ value: 529
+
+ # .NET Core 3.1 Blazor Features
+ - name: NetCore_31_Blazor_Features_Channel_Id
+ value: 531
+
+ # .NET Core Experimental
+ - name: NetCore_Experimental_Channel_Id
+ value: 562
+
+ # Whether the build is internal or not
+ - name: IsInternalBuild
+ value: ${{ and(ne(variables['System.TeamProject'], 'public'), contains(variables['Build.SourceBranch'], 'internal')) }}
+
+ # Default Maestro++ API Endpoint and API Version
+ - name: MaestroApiEndPoint
+ value: "https://maestro-prod.westus2.cloudapp.azure.com"
+ - name: MaestroApiAccessToken
+ value: $(MaestroAccessToken)
+ - name: MaestroApiVersion
+ value: "2020-02-20"
+
+ - name: SourceLinkCLIVersion
+ value: 3.0.0
+ - name: SymbolToolVersion
+ value: 1.0.1
+
+ # Feed Configurations
+ # These should include the suffix "/index.json"
+
+ # Default locations for Installers and checksums
+ # Public Locations
+ - name: ChecksumsBlobFeedUrl
+ value: https://dotnetclichecksums.blob.core.windows.net/dotnet/index.json
+ - name: InstallersBlobFeedUrl
+ value: https://dotnetcli.blob.core.windows.net/dotnet/index.json
+
+ # Private Locations
+ - name: InternalChecksumsBlobFeedUrl
+ value: https://dotnetclichecksumsmsrc.blob.core.windows.net/dotnet/index.json
+ - name: InternalChecksumsBlobFeedKey
+ value: $(dotnetclichecksumsmsrc-storage-key)
+
+ - name: InternalInstallersBlobFeedUrl
+ value: https://dotnetclimsrc.blob.core.windows.net/dotnet/index.json
+ - name: InternalInstallersBlobFeedKey
+ value: $(dotnetclimsrc-access-key)
+
+ # Skip component governance and codesign validation for SDL. These jobs
+ # create no content.
+ - name: skipComponentGovernanceDetection
+ value: true
+ - name: runCodesignValidationInjection
+ value: false
diff --git a/eng/common/templates/post-build/post-build.yml b/eng/common/templates/post-build/post-build.yml
new file mode 100644
index 0000000000..df06f5371e
--- /dev/null
+++ b/eng/common/templates/post-build/post-build.yml
@@ -0,0 +1,605 @@
+parameters:
+ # Which publishing infra should be used. THIS SHOULD MATCH THE VERSION ON THE BUILD MANIFEST.
+ # Publishing V2 accepts optionally outlining the publishing stages - default is inline.
+ # Publishing V3 DOES NOT accept inlining the publishing stages.
+ publishingInfraVersion: 2
+ # When set to true the publishing templates from the repo will be used
+ # otherwise Darc add-build-to-channel will be used to trigger the promotion pipeline
+ inline: true
+
+ # Only used if inline==false. When set to true will stall the current build until
+ # the Promotion Pipeline build finishes. Otherwise, the current build will continue
+ # execution concurrently with the promotion build.
+ waitPublishingFinish: true
+
+ BARBuildId: ''
+ PromoteToChannelIds: ''
+
+ enableSourceLinkValidation: false
+ enableSigningValidation: true
+ enableSymbolValidation: false
+ enableNugetValidation: true
+ publishInstallersAndChecksums: true
+ SDLValidationParameters:
+ enable: false
+ continueOnError: false
+ params: ''
+ artifactNames: ''
+ downloadArtifacts: true
+
+ # These parameters let the user customize the call to sdk-task.ps1 for publishing
+ # symbols & general artifacts as well as for signing validation
+ symbolPublishingAdditionalParameters: ''
+ artifactsPublishingAdditionalParameters: ''
+ signingValidationAdditionalParameters: ''
+ useBuildManifest: false
+
+ # Which stages should finish execution before post-build stages start
+ validateDependsOn:
+ - build
+ publishDependsOn:
+ - Validate
+
+ # Channel ID's instantiated in this file.
+ # When adding a new channel implementation the call to `check-channel-consistency.ps1`
+ # needs to be updated with the new channel ID
+ NetEngLatestChannelId: 2
+ NetEngValidationChannelId: 9
+ NetDev5ChannelId: 131
+ NetDev6ChannelId: 1296
+ GeneralTestingChannelId: 529
+ NETCoreToolingDevChannelId: 548
+ NETCoreToolingReleaseChannelId: 549
+ NETInternalToolingChannelId: 551
+ NETCoreExperimentalChannelId: 562
+ NetEngServicesIntChannelId: 678
+ NetEngServicesProdChannelId: 679
+ Net5Preview8ChannelId: 1155
+ Net5RC1ChannelId: 1157
+ Net5RC2ChannelId: 1329
+ NetCoreSDK313xxChannelId: 759
+ NetCoreSDK313xxInternalChannelId: 760
+ NetCoreSDK314xxChannelId: 921
+ NetCoreSDK314xxInternalChannelId: 922
+ VS166ChannelId: 1010
+ VS167ChannelId: 1011
+ VS168ChannelId: 1154
+ VSMasterChannelId: 1012
+
+stages:
+- stage: Validate
+ dependsOn: ${{ parameters.validateDependsOn }}
+ displayName: Validate Build Assets
+ variables:
+ - template: common-variables.yml
+ jobs:
+ - template: setup-maestro-vars.yml
+ parameters:
+ BARBuildId: ${{ parameters.BARBuildId }}
+ PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }}
+
+ - job:
+ displayName: Post-build Checks
+ dependsOn: setupMaestroVars
+ variables:
+ - name: TargetChannels
+ value: $[ dependencies.setupMaestroVars.outputs['setReleaseVars.TargetChannels'] ]
+ pool:
+ vmImage: 'windows-2019'
+ steps:
+ - task: PowerShell@2
+ displayName: Maestro Channels Consistency
+ inputs:
+ filePath: $(Build.SourcesDirectory)/eng/common/post-build/check-channel-consistency.ps1
+ arguments: -PromoteToChannels "$(TargetChannels)"
+ -AvailableChannelIds ${{parameters.NetEngLatestChannelId}},${{parameters.NetEngValidationChannelId}},${{parameters.NetDev5ChannelId}},${{parameters.NetDev6ChannelId}},${{parameters.GeneralTestingChannelId}},${{parameters.NETCoreToolingDevChannelId}},${{parameters.NETCoreToolingReleaseChannelId}},${{parameters.NETInternalToolingChannelId}},${{parameters.NETCoreExperimentalChannelId}},${{parameters.NetEngServicesIntChannelId}},${{parameters.NetEngServicesProdChannelId}},${{parameters.Net5Preview8ChannelId}},${{parameters.Net5RC1ChannelId}},${{parameters.Net5RC2ChannelId}},${{parameters.NetCoreSDK313xxChannelId}},${{parameters.NetCoreSDK313xxInternalChannelId}},${{parameters.NetCoreSDK314xxChannelId}},${{parameters.NetCoreSDK314xxInternalChannelId}},${{parameters.VS166ChannelId}},${{parameters.VS167ChannelId}},${{parameters.VS168ChannelId}},${{parameters.VSMasterChannelId}}
+
+ - job:
+ displayName: NuGet Validation
+ dependsOn: setupMaestroVars
+ condition: eq( ${{ parameters.enableNugetValidation }}, 'true')
+ pool:
+ vmImage: 'windows-2019'
+ variables:
+ - name: AzDOProjectName
+ value: $[ dependencies.setupMaestroVars.outputs['setReleaseVars.AzDOProjectName'] ]
+ - name: AzDOPipelineId
+ value: $[ dependencies.setupMaestroVars.outputs['setReleaseVars.AzDOPipelineId'] ]
+ - name: AzDOBuildId
+ value: $[ dependencies.setupMaestroVars.outputs['setReleaseVars.AzDOBuildId'] ]
+ steps:
+ - task: DownloadBuildArtifacts@0
+ displayName: Download Package Artifacts
+ inputs:
+ buildType: specific
+ buildVersionToDownload: specific
+ project: $(AzDOProjectName)
+ pipeline: $(AzDOPipelineId)
+ buildId: $(AzDOBuildId)
+ artifactName: PackageArtifacts
+
+ - task: PowerShell@2
+ displayName: Validate
+ inputs:
+ filePath: $(Build.SourcesDirectory)/eng/common/post-build/nuget-validation.ps1
+ arguments: -PackagesPath $(Build.ArtifactStagingDirectory)/PackageArtifacts/
+ -ToolDestinationPath $(Agent.BuildDirectory)/Extract/
+
+ - job:
+ displayName: Signing Validation
+ dependsOn: setupMaestroVars
+ condition: eq( ${{ parameters.enableSigningValidation }}, 'true')
+ variables:
+ - template: common-variables.yml
+ - name: AzDOProjectName
+ value: $[ dependencies.setupMaestroVars.outputs['setReleaseVars.AzDOProjectName'] ]
+ - name: AzDOPipelineId
+ value: $[ dependencies.setupMaestroVars.outputs['setReleaseVars.AzDOPipelineId'] ]
+ - name: AzDOBuildId
+ value: $[ dependencies.setupMaestroVars.outputs['setReleaseVars.AzDOBuildId'] ]
+ pool:
+ vmImage: 'windows-2019'
+ steps:
+ - ${{ if eq(parameters.useBuildManifest, true) }}:
+ - task: DownloadBuildArtifacts@0
+ displayName: Download build manifest
+ inputs:
+ buildType: specific
+ buildVersionToDownload: specific
+ project: $(AzDOProjectName)
+ pipeline: $(AzDOPipelineId)
+ buildId: $(AzDOBuildId)
+ artifactName: BuildManifests
+ - task: DownloadBuildArtifacts@0
+ displayName: Download Package Artifacts
+ inputs:
+ buildType: specific
+ buildVersionToDownload: specific
+ project: $(AzDOProjectName)
+ pipeline: $(AzDOPipelineId)
+ buildId: $(AzDOBuildId)
+ artifactName: PackageArtifacts
+
+ # This is necessary whenever we want to publish/restore to an AzDO private feed
+ # Since sdk-task.ps1 tries to restore packages we need to do this authentication here
+ # otherwise it'll complain about accessing a private feed.
+ - task: NuGetAuthenticate@0
+ displayName: 'Authenticate to AzDO Feeds'
+
+ - task: PowerShell@2
+ displayName: Enable cross-org publishing
+ inputs:
+ filePath: eng\common\enable-cross-org-publishing.ps1
+ arguments: -token $(dn-bot-dnceng-artifact-feeds-rw)
+
+ # Signing validation will optionally work with the buildmanifest file which is downloaded from
+ # Azure DevOps above.
+ - task: PowerShell@2
+ displayName: Validate
+ inputs:
+ filePath: eng\common\sdk-task.ps1
+ arguments: -task SigningValidation -restore -msbuildEngine vs
+ /p:PackageBasePath='$(Build.ArtifactStagingDirectory)/PackageArtifacts'
+ /p:SignCheckExclusionsFile='$(Build.SourcesDirectory)/eng/SignCheckExclusionsFile.txt'
+ ${{ parameters.signingValidationAdditionalParameters }}
+
+ - template: ../steps/publish-logs.yml
+ parameters:
+ StageLabel: 'Validation'
+ JobLabel: 'Signing'
+
+ - job:
+ displayName: SourceLink Validation
+ dependsOn: setupMaestroVars
+ condition: eq( ${{ parameters.enableSourceLinkValidation }}, 'true')
+ variables:
+ - template: common-variables.yml
+ - name: AzDOProjectName
+ value: $[ dependencies.setupMaestroVars.outputs['setReleaseVars.AzDOProjectName'] ]
+ - name: AzDOPipelineId
+ value: $[ dependencies.setupMaestroVars.outputs['setReleaseVars.AzDOPipelineId'] ]
+ - name: AzDOBuildId
+ value: $[ dependencies.setupMaestroVars.outputs['setReleaseVars.AzDOBuildId'] ]
+ pool:
+ vmImage: 'windows-2019'
+ steps:
+ - task: DownloadBuildArtifacts@0
+ displayName: Download Blob Artifacts
+ inputs:
+ buildType: specific
+ buildVersionToDownload: specific
+ project: $(AzDOProjectName)
+ pipeline: $(AzDOPipelineId)
+ buildId: $(AzDOBuildId)
+ artifactName: BlobArtifacts
+
+ - task: PowerShell@2
+ displayName: Validate
+ inputs:
+ filePath: $(Build.SourcesDirectory)/eng/common/post-build/sourcelink-validation.ps1
+ arguments: -InputPath $(Build.ArtifactStagingDirectory)/BlobArtifacts/
+ -ExtractPath $(Agent.BuildDirectory)/Extract/
+ -GHRepoName $(Build.Repository.Name)
+ -GHCommit $(Build.SourceVersion)
+ -SourcelinkCliVersion $(SourceLinkCLIVersion)
+ continueOnError: true
+
+ - template: /eng/common/templates/job/execute-sdl.yml
+ parameters:
+ enable: ${{ parameters.SDLValidationParameters.enable }}
+ dependsOn: setupMaestroVars
+ additionalParameters: ${{ parameters.SDLValidationParameters.params }}
+ continueOnError: ${{ parameters.SDLValidationParameters.continueOnError }}
+ artifactNames: ${{ parameters.SDLValidationParameters.artifactNames }}
+ downloadArtifacts: ${{ parameters.SDLValidationParameters.downloadArtifacts }}
+
+- ${{ if or(ge(parameters.publishingInfraVersion, 3), eq(parameters.inline, 'false')) }}:
+ - stage: publish_using_darc
+ dependsOn: Validate
+ displayName: Publish using Darc
+ variables:
+ - template: common-variables.yml
+ jobs:
+ - template: setup-maestro-vars.yml
+ parameters:
+ BARBuildId: ${{ parameters.BARBuildId }}
+ PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }}
+
+ - job:
+ displayName: Publish Using Darc
+ dependsOn: setupMaestroVars
+ variables:
+ - name: BARBuildId
+ value: $[ dependencies.setupMaestroVars.outputs['setReleaseVars.BARBuildId'] ]
+ pool:
+ vmImage: 'windows-2019'
+ steps:
+ - task: PowerShell@2
+ displayName: Publish Using Darc
+ inputs:
+ filePath: $(Build.SourcesDirectory)/eng/common/post-build/publish-using-darc.ps1
+ arguments: -BuildId $(BARBuildId)
+ -PublishingInfraVersion ${{ parameters.PublishingInfraVersion }}
+ -AzdoToken '$(publishing-dnceng-devdiv-code-r-build-re)'
+ -MaestroToken '$(MaestroApiAccessToken)'
+ -WaitPublishingFinish ${{ parameters.waitPublishingFinish }}
+ -PublishInstallersAndChecksums ${{ parameters.publishInstallersAndChecksums }}
+
+- ${{ if and(le(parameters.publishingInfraVersion, 2), eq(parameters.inline, 'true')) }}:
+ - template: \eng\common\templates\post-build\channels\generic-public-channel.yml
+ parameters:
+ BARBuildId: ${{ parameters.BARBuildId }}
+ PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }}
+ artifactsPublishingAdditionalParameters: ${{ parameters.artifactsPublishingAdditionalParameters }}
+ dependsOn: ${{ parameters.publishDependsOn }}
+ publishInstallersAndChecksums: ${{ parameters.publishInstallersAndChecksums }}
+ symbolPublishingAdditionalParameters: ${{ parameters.symbolPublishingAdditionalParameters }}
+ stageName: 'NetCore_Dev5_Publish'
+ channelName: '.NET 5 Dev'
+ akaMSChannelName: 'net5/dev'
+ channelId: ${{ parameters.NetDev5ChannelId }}
+ transportFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet5-transport/nuget/v3/index.json'
+ shippingFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet5/nuget/v3/index.json'
+ symbolsFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet5-symbols/nuget/v3/index.json'
+
+ - template: \eng\common\templates\post-build\channels\generic-public-channel.yml
+ parameters:
+ BARBuildId: ${{ parameters.BARBuildId }}
+ PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }}
+ artifactsPublishingAdditionalParameters: ${{ parameters.artifactsPublishingAdditionalParameters }}
+ dependsOn: ${{ parameters.publishDependsOn }}
+ publishInstallersAndChecksums: ${{ parameters.publishInstallersAndChecksums }}
+ symbolPublishingAdditionalParameters: ${{ parameters.symbolPublishingAdditionalParameters }}
+ stageName: 'NetCore_Dev6_Publish'
+ channelName: '.NET 6 Dev'
+ akaMSChannelName: 'net6/dev'
+ channelId: ${{ parameters.NetDev6ChannelId }}
+ transportFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet6-transport/nuget/v3/index.json'
+ shippingFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet6/nuget/v3/index.json'
+ symbolsFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet6-symbols/nuget/v3/index.json'
+
+ - template: \eng\common\templates\post-build\channels\generic-internal-channel.yml
+ parameters:
+ BARBuildId: ${{ parameters.BARBuildId }}
+ PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }}
+ artifactsPublishingAdditionalParameters: ${{ parameters.artifactsPublishingAdditionalParameters }}
+ dependsOn: ${{ parameters.publishDependsOn }}
+ publishInstallersAndChecksums: ${{ parameters.publishInstallersAndChecksums }}
+ symbolPublishingAdditionalParameters: ${{ parameters.symbolPublishingAdditionalParameters }}
+ stageName: 'Net5_Preview8_Publish'
+ channelName: '.NET 5 Preview 8'
+ akaMSChannelName: 'net5/preview8'
+ channelId: ${{ parameters.Net5Preview8ChannelId }}
+ transportFeed: 'https://pkgs.dev.azure.com/dnceng/internal/_packaging/dotnet5-internal-transport/nuget/v3/index.json'
+ shippingFeed: 'https://pkgs.dev.azure.com/dnceng/internal/_packaging/dotnet5-internal/nuget/v3/index.json'
+ symbolsFeed: 'https://pkgs.dev.azure.com/dnceng/internal/_packaging/dotnet5-internal-symbols/nuget/v3/index.json'
+
+ - template: \eng\common\templates\post-build\channels\generic-public-channel.yml
+ parameters:
+ BARBuildId: ${{ parameters.BARBuildId }}
+ PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }}
+ artifactsPublishingAdditionalParameters: ${{ parameters.artifactsPublishingAdditionalParameters }}
+ dependsOn: ${{ parameters.publishDependsOn }}
+ publishInstallersAndChecksums: ${{ parameters.publishInstallersAndChecksums }}
+ symbolPublishingAdditionalParameters: ${{ parameters.symbolPublishingAdditionalParameters }}
+ stageName: 'Net5_RC1_Publish'
+ channelName: '.NET 5 RC 1'
+ akaMSChannelName: 'net5/rc1'
+ channelId: ${{ parameters.Net5RC1ChannelId }}
+ transportFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet5-transport/nuget/v3/index.json'
+ shippingFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet5/nuget/v3/index.json'
+ symbolsFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet5-symbols/nuget/v3/index.json'
+
+ - template: \eng\common\templates\post-build\channels\generic-public-channel.yml
+ parameters:
+ BARBuildId: ${{ parameters.BARBuildId }}
+ PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }}
+ artifactsPublishingAdditionalParameters: ${{ parameters.artifactsPublishingAdditionalParameters }}
+ dependsOn: ${{ parameters.publishDependsOn }}
+ publishInstallersAndChecksums: ${{ parameters.publishInstallersAndChecksums }}
+ symbolPublishingAdditionalParameters: ${{ parameters.symbolPublishingAdditionalParameters }}
+ stageName: 'Net5_RC2_Publish'
+ channelName: '.NET 5 RC 2'
+ akaMSChannelName: 'net5/rc2'
+ channelId: ${{ parameters.Net5RC2ChannelId }}
+ transportFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet5-transport/nuget/v3/index.json'
+ shippingFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet5/nuget/v3/index.json'
+ symbolsFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet5-symbols/nuget/v3/index.json'
+
+ - template: \eng\common\templates\post-build\channels\generic-public-channel.yml
+ parameters:
+ BARBuildId: ${{ parameters.BARBuildId }}
+ PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }}
+ artifactsPublishingAdditionalParameters: ${{ parameters.artifactsPublishingAdditionalParameters }}
+ dependsOn: ${{ parameters.publishDependsOn }}
+ publishInstallersAndChecksums: ${{ parameters.publishInstallersAndChecksums }}
+ symbolPublishingAdditionalParameters: ${{ parameters.symbolPublishingAdditionalParameters }}
+ stageName: 'Net_Eng_Latest_Publish'
+ channelName: '.NET Eng - Latest'
+ akaMSChannelName: 'eng/daily'
+ channelId: ${{ parameters.NetEngLatestChannelId }}
+ transportFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-eng/nuget/v3/index.json'
+ shippingFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-eng/nuget/v3/index.json'
+ symbolsFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-eng-symbols/nuget/v3/index.json'
+
+ - template: \eng\common\templates\post-build\channels\generic-public-channel.yml
+ parameters:
+ BARBuildId: ${{ parameters.BARBuildId }}
+ PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }}
+ artifactsPublishingAdditionalParameters: ${{ parameters.artifactsPublishingAdditionalParameters }}
+ dependsOn: ${{ parameters.publishDependsOn }}
+ publishInstallersAndChecksums: ${{ parameters.publishInstallersAndChecksums }}
+ symbolPublishingAdditionalParameters: ${{ parameters.symbolPublishingAdditionalParameters }}
+ stageName: 'Net_Eng_Validation_Publish'
+ channelName: '.NET Eng - Validation'
+ akaMSChannelName: 'eng/validation'
+ channelId: ${{ parameters.NetEngValidationChannelId }}
+ transportFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-eng/nuget/v3/index.json'
+ shippingFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-eng/nuget/v3/index.json'
+ symbolsFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-eng-symbols/nuget/v3/index.json'
+
+ - template: \eng\common\templates\post-build\channels\generic-public-channel.yml
+ parameters:
+ BARBuildId: ${{ parameters.BARBuildId }}
+ PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }}
+ artifactsPublishingAdditionalParameters: ${{ parameters.artifactsPublishingAdditionalParameters }}
+ dependsOn: ${{ parameters.publishDependsOn }}
+ publishInstallersAndChecksums: ${{ parameters.publishInstallersAndChecksums }}
+ symbolPublishingAdditionalParameters: ${{ parameters.symbolPublishingAdditionalParameters }}
+ stageName: 'General_Testing_Publish'
+ channelName: 'General Testing'
+ akaMSChannelName: 'generaltesting'
+ channelId: ${{ parameters.GeneralTestingChannelId }}
+ transportFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/general-testing/nuget/v3/index.json'
+ shippingFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/general-testing/nuget/v3/index.json'
+ symbolsFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/general-testing-symbols/nuget/v3/index.json'
+
+ - template: \eng\common\templates\post-build\channels\generic-public-channel.yml
+ parameters:
+ BARBuildId: ${{ parameters.BARBuildId }}
+ PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }}
+ artifactsPublishingAdditionalParameters: ${{ parameters.artifactsPublishingAdditionalParameters }}
+ dependsOn: ${{ parameters.publishDependsOn }}
+ publishInstallersAndChecksums: ${{ parameters.publishInstallersAndChecksums }}
+ symbolPublishingAdditionalParameters: ${{ parameters.symbolPublishingAdditionalParameters }}
+ stageName: 'NETCore_Tooling_Dev_Publishing'
+ channelName: '.NET Core Tooling Dev'
+ channelId: ${{ parameters.NETCoreToolingDevChannelId }}
+ transportFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-tools/nuget/v3/index.json'
+ shippingFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-tools/nuget/v3/index.json'
+ symbolsFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-tools-symbols/nuget/v3/index.json'
+
+ - template: \eng\common\templates\post-build\channels\generic-public-channel.yml
+ parameters:
+ BARBuildId: ${{ parameters.BARBuildId }}
+ PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }}
+ artifactsPublishingAdditionalParameters: ${{ parameters.artifactsPublishingAdditionalParameters }}
+ dependsOn: ${{ parameters.publishDependsOn }}
+ publishInstallersAndChecksums: ${{ parameters.publishInstallersAndChecksums }}
+ symbolPublishingAdditionalParameters: ${{ parameters.symbolPublishingAdditionalParameters }}
+ stageName: 'NETCore_Tooling_Release_Publishing'
+ channelName: '.NET Core Tooling Release'
+ channelId: ${{ parameters.NETCoreToolingReleaseChannelId }}
+ transportFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-tools/nuget/v3/index.json'
+ shippingFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-tools/nuget/v3/index.json'
+ symbolsFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-tools-symbols/nuget/v3/index.json'
+
+ - template: \eng\common\templates\post-build\channels\generic-internal-channel.yml
+ parameters:
+ BARBuildId: ${{ parameters.BARBuildId }}
+ PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }}
+ artifactsPublishingAdditionalParameters: ${{ parameters.artifactsPublishingAdditionalParameters }}
+ dependsOn: ${{ parameters.publishDependsOn }}
+ publishInstallersAndChecksums: ${{ parameters.publishInstallersAndChecksums }}
+ symbolPublishingAdditionalParameters: ${{ parameters.symbolPublishingAdditionalParameters }}
+ stageName: 'NET_Internal_Tooling_Publishing'
+ channelName: '.NET Internal Tooling'
+ channelId: ${{ parameters.NETInternalToolingChannelId }}
+ transportFeed: 'https://pkgs.dev.azure.com/dnceng/internal/_packaging/dotnet-tools-internal/nuget/v3/index.json'
+ shippingFeed: 'https://pkgs.dev.azure.com/dnceng/internal/_packaging/dotnet-tools-internal/nuget/v3/index.json'
+ symbolsFeed: 'https://pkgs.dev.azure.com/dnceng/internal/_packaging/dotnet-tools-internal-symbols/nuget/v3/index.json'
+
+ - template: \eng\common\templates\post-build\channels\generic-public-channel.yml
+ parameters:
+ BARBuildId: ${{ parameters.BARBuildId }}
+ PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }}
+ artifactsPublishingAdditionalParameters: ${{ parameters.artifactsPublishingAdditionalParameters }}
+ dependsOn: ${{ parameters.publishDependsOn }}
+ publishInstallersAndChecksums: ${{ parameters.publishInstallersAndChecksums }}
+ symbolPublishingAdditionalParameters: ${{ parameters.symbolPublishingAdditionalParameters }}
+ stageName: 'NETCore_Experimental_Publishing'
+ channelName: '.NET Core Experimental'
+ channelId: ${{ parameters.NETCoreExperimentalChannelId }}
+ transportFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-experimental/nuget/v3/index.json'
+ shippingFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-experimental/nuget/v3/index.json'
+ symbolsFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-experimental-symbols/nuget/v3/index.json'
+
+ - template: \eng\common\templates\post-build\channels\generic-public-channel.yml
+ parameters:
+ BARBuildId: ${{ parameters.BARBuildId }}
+ PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }}
+ artifactsPublishingAdditionalParameters: ${{ parameters.artifactsPublishingAdditionalParameters }}
+ dependsOn: ${{ parameters.publishDependsOn }}
+ publishInstallersAndChecksums: ${{ parameters.publishInstallersAndChecksums }}
+ symbolPublishingAdditionalParameters: ${{ parameters.symbolPublishingAdditionalParameters }}
+ stageName: 'Net_Eng_Services_Int_Publish'
+ channelName: '.NET Eng Services - Int'
+ channelId: ${{ parameters.NetEngServicesIntChannelId }}
+ transportFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-eng/nuget/v3/index.json'
+ shippingFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-eng/nuget/v3/index.json'
+ symbolsFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-eng-symbols/nuget/v3/index.json'
+
+ - template: \eng\common\templates\post-build\channels\generic-public-channel.yml
+ parameters:
+ BARBuildId: ${{ parameters.BARBuildId }}
+ PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }}
+ artifactsPublishingAdditionalParameters: ${{ parameters.artifactsPublishingAdditionalParameters }}
+ dependsOn: ${{ parameters.publishDependsOn }}
+ publishInstallersAndChecksums: ${{ parameters.publishInstallersAndChecksums }}
+ symbolPublishingAdditionalParameters: ${{ parameters.symbolPublishingAdditionalParameters }}
+ stageName: 'Net_Eng_Services_Prod_Publish'
+ channelName: '.NET Eng Services - Prod'
+ channelId: ${{ parameters.NetEngServicesProdChannelId }}
+ transportFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-eng/nuget/v3/index.json'
+ shippingFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-eng/nuget/v3/index.json'
+ symbolsFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-eng-symbols/nuget/v3/index.json'
+
+ - template: \eng\common\templates\post-build\channels\generic-public-channel.yml
+ parameters:
+ BARBuildId: ${{ parameters.BARBuildId }}
+ PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }}
+ artifactsPublishingAdditionalParameters: ${{ parameters.artifactsPublishingAdditionalParameters }}
+ dependsOn: ${{ parameters.publishDependsOn }}
+ publishInstallersAndChecksums: ${{ parameters.publishInstallersAndChecksums }}
+ symbolPublishingAdditionalParameters: ${{ parameters.symbolPublishingAdditionalParameters }}
+ stageName: 'NETCore_SDK_314xx_Publishing'
+ channelName: '.NET Core SDK 3.1.4xx'
+ channelId: ${{ parameters.NetCoreSDK314xxChannelId }}
+ transportFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet3.1-transport/nuget/v3/index.json'
+ shippingFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet3.1/nuget/v3/index.json'
+ symbolsFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet3.1-symbols/nuget/v3/index.json'
+
+ - template: \eng\common\templates\post-build\channels\generic-internal-channel.yml
+ parameters:
+ BARBuildId: ${{ parameters.BARBuildId }}
+ PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }}
+ artifactsPublishingAdditionalParameters: ${{ parameters.artifactsPublishingAdditionalParameters }}
+ dependsOn: ${{ parameters.publishDependsOn }}
+ publishInstallersAndChecksums: ${{ parameters.publishInstallersAndChecksums }}
+ symbolPublishingAdditionalParameters: ${{ parameters.symbolPublishingAdditionalParameters }}
+ stageName: 'NETCore_SDK_314xx_Internal_Publishing'
+ channelName: '.NET Core SDK 3.1.4xx Internal'
+ channelId: ${{ parameters.NetCoreSDK314xxInternalChannelId }}
+ transportFeed: 'https://pkgs.dev.azure.com/dnceng/_packaging/dotnet3.1-internal-transport/nuget/v3/index.json'
+ shippingFeed: 'https://pkgs.dev.azure.com/dnceng/_packaging/dotnet3.1-internal/nuget/v3/index.json'
+ symbolsFeed: 'https://pkgs.dev.azure.com/dnceng/_packaging/dotnet3.1-internal-symbols/nuget/v3/index.json'
+
+ - template: \eng\common\templates\post-build\channels\generic-public-channel.yml
+ parameters:
+ BARBuildId: ${{ parameters.BARBuildId }}
+ PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }}
+ artifactsPublishingAdditionalParameters: ${{ parameters.artifactsPublishingAdditionalParameters }}
+ dependsOn: ${{ parameters.publishDependsOn }}
+ publishInstallersAndChecksums: ${{ parameters.publishInstallersAndChecksums }}
+ symbolPublishingAdditionalParameters: ${{ parameters.symbolPublishingAdditionalParameters }}
+ stageName: 'NETCore_SDK_313xx_Publishing'
+ channelName: '.NET Core SDK 3.1.3xx'
+ channelId: ${{ parameters.NetCoreSDK313xxChannelId }}
+ transportFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet3.1-transport/nuget/v3/index.json'
+ shippingFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet3.1/nuget/v3/index.json'
+ symbolsFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet3.1-symbols/nuget/v3/index.json'
+
+ - template: \eng\common\templates\post-build\channels\generic-internal-channel.yml
+ parameters:
+ BARBuildId: ${{ parameters.BARBuildId }}
+ PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }}
+ artifactsPublishingAdditionalParameters: ${{ parameters.artifactsPublishingAdditionalParameters }}
+ dependsOn: ${{ parameters.publishDependsOn }}
+ publishInstallersAndChecksums: ${{ parameters.publishInstallersAndChecksums }}
+ symbolPublishingAdditionalParameters: ${{ parameters.symbolPublishingAdditionalParameters }}
+ stageName: 'NETCore_SDK_313xx_Internal_Publishing'
+ channelName: '.NET Core SDK 3.1.3xx Internal'
+ channelId: ${{ parameters.NetCoreSDK313xxInternalChannelId }}
+ transportFeed: 'https://pkgs.dev.azure.com/dnceng/_packaging/dotnet3.1-internal-transport/nuget/v3/index.json'
+ shippingFeed: 'https://pkgs.dev.azure.com/dnceng/_packaging/dotnet3.1-internal/nuget/v3/index.json'
+ symbolsFeed: 'https://pkgs.dev.azure.com/dnceng/_packaging/dotnet3.1-internal-symbols/nuget/v3/index.json'
+
+ - template: \eng\common\templates\post-build\channels\generic-public-channel.yml
+ parameters:
+ BARBuildId: ${{ parameters.BARBuildId }}
+ PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }}
+ artifactsPublishingAdditionalParameters: ${{ parameters.artifactsPublishingAdditionalParameters }}
+ dependsOn: ${{ parameters.publishDependsOn }}
+ publishInstallersAndChecksums: ${{ parameters.publishInstallersAndChecksums }}
+ symbolPublishingAdditionalParameters: ${{ parameters.symbolPublishingAdditionalParameters }}
+ stageName: 'VS16_6_Publishing'
+ channelName: 'VS 16.6'
+ channelId: ${{ parameters.VS166ChannelId }}
+ transportFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-tools-transport/nuget/v3/index.json'
+ shippingFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-tools/nuget/v3/index.json'
+ symbolsFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-tools-symbols/nuget/v3/index.json'
+
+ - template: \eng\common\templates\post-build\channels\generic-public-channel.yml
+ parameters:
+ BARBuildId: ${{ parameters.BARBuildId }}
+ PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }}
+ artifactsPublishingAdditionalParameters: ${{ parameters.artifactsPublishingAdditionalParameters }}
+ dependsOn: ${{ parameters.publishDependsOn }}
+ publishInstallersAndChecksums: ${{ parameters.publishInstallersAndChecksums }}
+ symbolPublishingAdditionalParameters: ${{ parameters.symbolPublishingAdditionalParameters }}
+ stageName: 'VS16_7_Publishing'
+ channelName: 'VS 16.7'
+ channelId: ${{ parameters.VS167ChannelId }}
+ transportFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-tools-transport/nuget/v3/index.json'
+ shippingFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-tools/nuget/v3/index.json'
+ symbolsFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-tools-symbols/nuget/v3/index.json'
+
+ - template: \eng\common\templates\post-build\channels\generic-public-channel.yml
+ parameters:
+ BARBuildId: ${{ parameters.BARBuildId }}
+ PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }}
+ artifactsPublishingAdditionalParameters: ${{ parameters.artifactsPublishingAdditionalParameters }}
+ dependsOn: ${{ parameters.publishDependsOn }}
+ publishInstallersAndChecksums: ${{ parameters.publishInstallersAndChecksums }}
+ symbolPublishingAdditionalParameters: ${{ parameters.symbolPublishingAdditionalParameters }}
+ stageName: 'VS16_8_Publishing'
+ channelName: 'VS 16.8'
+ channelId: ${{ parameters.VS168ChannelId }}
+ transportFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-tools-transport/nuget/v3/index.json'
+ shippingFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-tools/nuget/v3/index.json'
+ symbolsFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-tools-symbols/nuget/v3/index.json'
+
+ - template: \eng\common\templates\post-build\channels\generic-public-channel.yml
+ parameters:
+ BARBuildId: ${{ parameters.BARBuildId }}
+ PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }}
+ artifactsPublishingAdditionalParameters: ${{ parameters.artifactsPublishingAdditionalParameters }}
+ dependsOn: ${{ parameters.publishDependsOn }}
+ publishInstallersAndChecksums: ${{ parameters.publishInstallersAndChecksums }}
+ symbolPublishingAdditionalParameters: ${{ parameters.symbolPublishingAdditionalParameters }}
+ stageName: 'VS_Master_Publishing'
+ channelName: 'VS Master'
+ channelId: ${{ parameters.VSMasterChannelId }}
+ transportFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-tools-transport/nuget/v3/index.json'
+ shippingFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-tools/nuget/v3/index.json'
+ symbolsFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-tools-symbols/nuget/v3/index.json'
diff --git a/eng/common/templates/post-build/setup-maestro-vars.yml b/eng/common/templates/post-build/setup-maestro-vars.yml
new file mode 100644
index 0000000000..d0cbfb6c6f
--- /dev/null
+++ b/eng/common/templates/post-build/setup-maestro-vars.yml
@@ -0,0 +1,77 @@
+parameters:
+ BARBuildId: ''
+ PromoteToChannelIds: ''
+
+jobs:
+- job: setupMaestroVars
+ displayName: Setup Maestro Vars
+ variables:
+ - template: common-variables.yml
+ pool:
+ vmImage: 'windows-2019'
+ steps:
+ - checkout: none
+
+ - ${{ if eq(coalesce(parameters.PromoteToChannelIds, 0), 0) }}:
+ - task: DownloadBuildArtifacts@0
+ displayName: Download Release Configs
+ inputs:
+ buildType: current
+ artifactName: ReleaseConfigs
+
+ - task: PowerShell@2
+ name: setReleaseVars
+ displayName: Set Release Configs Vars
+ inputs:
+ targetType: inline
+ script: |
+ try {
+ if (!$Env:PromoteToMaestroChannels -or $Env:PromoteToMaestroChannels.Trim() -eq '') {
+ $Content = Get-Content $(Build.StagingDirectory)/ReleaseConfigs/ReleaseConfigs.txt
+
+ $BarId = $Content | Select -Index 0
+ $Channels = $Content | Select -Index 1
+ $IsStableBuild = $Content | Select -Index 2
+
+ $AzureDevOpsProject = $Env:System_TeamProject
+ $AzureDevOpsBuildDefinitionId = $Env:System_DefinitionId
+ $AzureDevOpsBuildId = $Env:Build_BuildId
+ }
+ else {
+ $buildApiEndpoint = "${Env:MaestroApiEndPoint}/api/builds/${Env:BARBuildId}?api-version=${Env:MaestroApiVersion}"
+
+ $apiHeaders = New-Object 'System.Collections.Generic.Dictionary[[String],[String]]'
+ $apiHeaders.Add('Accept', 'application/json')
+ $apiHeaders.Add('Authorization',"Bearer ${Env:MAESTRO_API_TOKEN}")
+
+ $buildInfo = try { Invoke-WebRequest -Method Get -Uri $buildApiEndpoint -Headers $apiHeaders | ConvertFrom-Json } catch { Write-Host "Error: $_" }
+
+ $BarId = $Env:BARBuildId
+ $Channels = $Env:PromoteToMaestroChannels -split ","
+ $Channels = $Channels -join "]["
+ $Channels = "[$Channels]"
+
+ $IsStableBuild = $buildInfo.stable
+ $AzureDevOpsProject = $buildInfo.azureDevOpsProject
+ $AzureDevOpsBuildDefinitionId = $buildInfo.azureDevOpsBuildDefinitionId
+ $AzureDevOpsBuildId = $buildInfo.azureDevOpsBuildId
+ }
+
+ Write-Host "##vso[task.setvariable variable=BARBuildId;isOutput=true]$BarId"
+ Write-Host "##vso[task.setvariable variable=TargetChannels;isOutput=true]$Channels"
+ Write-Host "##vso[task.setvariable variable=IsStableBuild;isOutput=true]$IsStableBuild"
+
+ Write-Host "##vso[task.setvariable variable=AzDOProjectName;isOutput=true]$AzureDevOpsProject"
+ Write-Host "##vso[task.setvariable variable=AzDOPipelineId;isOutput=true]$AzureDevOpsBuildDefinitionId"
+ Write-Host "##vso[task.setvariable variable=AzDOBuildId;isOutput=true]$AzureDevOpsBuildId"
+ }
+ catch {
+ Write-Host $_
+ Write-Host $_.Exception
+ Write-Host $_.ScriptStackTrace
+ exit 1
+ }
+ env:
+ MAESTRO_API_TOKEN: $(MaestroApiAccessToken)
+ BARBuildId: ${{ parameters.BARBuildId }}
+ PromoteToMaestroChannels: ${{ parameters.PromoteToChannelIds }}
diff --git a/eng/common/templates/post-build/trigger-subscription.yml b/eng/common/templates/post-build/trigger-subscription.yml
new file mode 100644
index 0000000000..da669030da
--- /dev/null
+++ b/eng/common/templates/post-build/trigger-subscription.yml
@@ -0,0 +1,13 @@
+parameters:
+ ChannelId: 0
+
+steps:
+- task: PowerShell@2
+ displayName: Triggering subscriptions
+ inputs:
+ filePath: $(Build.SourcesDirectory)/eng/common/post-build/trigger-subscriptions.ps1
+ arguments: -SourceRepo $(Build.Repository.Uri)
+ -ChannelId ${{ parameters.ChannelId }}
+ -MaestroApiAccessToken $(MaestroAccessToken)
+ -MaestroApiEndPoint $(MaestroApiEndPoint)
+ -MaestroApiVersion $(MaestroApiVersion)
diff --git a/eng/common/templates/steps/add-build-to-channel.yml b/eng/common/templates/steps/add-build-to-channel.yml
new file mode 100644
index 0000000000..f67a210d62
--- /dev/null
+++ b/eng/common/templates/steps/add-build-to-channel.yml
@@ -0,0 +1,13 @@
+parameters:
+ ChannelId: 0
+
+steps:
+- task: PowerShell@2
+ displayName: Add Build to Channel
+ inputs:
+ filePath: $(Build.SourcesDirectory)/eng/common/post-build/add-build-to-channel.ps1
+ arguments: -BuildId $(BARBuildId)
+ -ChannelId ${{ parameters.ChannelId }}
+ -MaestroApiAccessToken $(MaestroApiAccessToken)
+ -MaestroApiEndPoint $(MaestroApiEndPoint)
+ -MaestroApiVersion $(MaestroApiVersion)
diff --git a/eng/common/templates/steps/build-reason.yml b/eng/common/templates/steps/build-reason.yml
new file mode 100644
index 0000000000..eba58109b5
--- /dev/null
+++ b/eng/common/templates/steps/build-reason.yml
@@ -0,0 +1,12 @@
+# build-reason.yml
+# Description: runs steps if build.reason condition is valid. conditions is a string of valid build reasons
+# to include steps (',' separated).
+parameters:
+ conditions: ''
+ steps: []
+
+steps:
+ - ${{ if and( not(startsWith(parameters.conditions, 'not')), contains(parameters.conditions, variables['build.reason'])) }}:
+ - ${{ parameters.steps }}
+ - ${{ if and( startsWith(parameters.conditions, 'not'), not(contains(parameters.conditions, variables['build.reason']))) }}:
+ - ${{ parameters.steps }}
diff --git a/eng/common/templates/steps/perf-send-to-helix.yml b/eng/common/templates/steps/perf-send-to-helix.yml
new file mode 100644
index 0000000000..e003fe2ef2
--- /dev/null
+++ b/eng/common/templates/steps/perf-send-to-helix.yml
@@ -0,0 +1,68 @@
+# Please remember to update the documentation if you make changes to these parameters!
+parameters:
+ ProjectFile: '' # required -- project file that specifies the helix workitems
+ HelixSource: 'pr/default' # required -- sources must start with pr/, official/, prodcon/, or agent/
+ HelixType: 'tests/default/' # required -- Helix telemetry which identifies what type of data this is; should include "test" for clarity and must end in '/'
+ HelixBuild: $(Build.BuildNumber) # required -- the build number Helix will use to identify this -- automatically set to the AzDO build number
+ HelixTargetQueues: '' # required -- semicolon delimited list of Helix queues to test on; see https://helix.dot.net/ for a list of queues
+ HelixAccessToken: '' # required -- access token to make Helix API requests; should be provided by the appropriate variable group
+ HelixPreCommands: '' # optional -- commands to run before Helix work item execution
+ HelixPostCommands: '' # optional -- commands to run after Helix work item execution
+ WorkItemDirectory: '' # optional -- a payload directory to zip up and send to Helix; requires WorkItemCommand; incompatible with XUnitProjects
+ CorrelationPayloadDirectory: '' # optional -- a directory to zip up and send to Helix as a correlation payload
+ IncludeDotNetCli: false # optional -- true will download a version of the .NET CLI onto the Helix machine as a correlation payload; requires DotNetCliPackageType and DotNetCliVersion
+ DotNetCliPackageType: '' # optional -- either 'sdk' or 'runtime'; determines whether the sdk or runtime will be sent to Helix; see https://raw.githubusercontent.com/dotnet/core/master/release-notes/releases.json
+ DotNetCliVersion: '' # optional -- version of the CLI to send to Helix; based on this: https://raw.githubusercontent.com/dotnet/core/master/release-notes/releases.json
+ EnableXUnitReporter: false # optional -- true enables XUnit result reporting to Mission Control
+ WaitForWorkItemCompletion: true # optional -- true will make the task wait until work items have been completed and fail the build if work items fail. False is "fire and forget."
+ Creator: '' # optional -- if the build is external, use this to specify who is sending the job
+ DisplayNamePrefix: 'Send job to Helix' # optional -- rename the beginning of the displayName of the steps in AzDO
+ condition: succeeded() # optional -- condition for step to execute; defaults to succeeded()
+ continueOnError: false # optional -- determines whether to continue the build if the step errors; defaults to false
+
+
+steps:
+ - powershell: $(Build.SourcesDirectory)\eng\common\msbuild.ps1 $(Build.SourcesDirectory)\eng\common\performance\${{ parameters.ProjectFile }} /restore /t:Test /bl:$(Build.SourcesDirectory)\artifacts\log\$env:BuildConfig\SendToHelix.binlog
+ displayName: ${{ parameters.DisplayNamePrefix }} (Windows)
+ env:
+ BuildConfig: $(_BuildConfig)
+ HelixSource: ${{ parameters.HelixSource }}
+ HelixType: ${{ parameters.HelixType }}
+ HelixBuild: ${{ parameters.HelixBuild }}
+ HelixTargetQueues: ${{ parameters.HelixTargetQueues }}
+ HelixAccessToken: ${{ parameters.HelixAccessToken }}
+ HelixPreCommands: ${{ parameters.HelixPreCommands }}
+ HelixPostCommands: ${{ parameters.HelixPostCommands }}
+ WorkItemDirectory: ${{ parameters.WorkItemDirectory }}
+ CorrelationPayloadDirectory: ${{ parameters.CorrelationPayloadDirectory }}
+ IncludeDotNetCli: ${{ parameters.IncludeDotNetCli }}
+ DotNetCliPackageType: ${{ parameters.DotNetCliPackageType }}
+ DotNetCliVersion: ${{ parameters.DotNetCliVersion }}
+ EnableXUnitReporter: ${{ parameters.EnableXUnitReporter }}
+ WaitForWorkItemCompletion: ${{ parameters.WaitForWorkItemCompletion }}
+ Creator: ${{ parameters.Creator }}
+ SYSTEM_ACCESSTOKEN: $(System.AccessToken)
+ condition: and(${{ parameters.condition }}, eq(variables['Agent.Os'], 'Windows_NT'))
+ continueOnError: ${{ parameters.continueOnError }}
+ - script: $BUILD_SOURCESDIRECTORY/eng/common/msbuild.sh $BUILD_SOURCESDIRECTORY/eng/common/performance/${{ parameters.ProjectFile }} /restore /t:Test /bl:$BUILD_SOURCESDIRECTORY/artifacts/log/$BuildConfig/SendToHelix.binlog
+ displayName: ${{ parameters.DisplayNamePrefix }} (Unix)
+ env:
+ BuildConfig: $(_BuildConfig)
+ HelixSource: ${{ parameters.HelixSource }}
+ HelixType: ${{ parameters.HelixType }}
+ HelixBuild: ${{ parameters.HelixBuild }}
+ HelixTargetQueues: ${{ parameters.HelixTargetQueues }}
+ HelixAccessToken: ${{ parameters.HelixAccessToken }}
+ HelixPreCommands: ${{ parameters.HelixPreCommands }}
+ HelixPostCommands: ${{ parameters.HelixPostCommands }}
+ WorkItemDirectory: ${{ parameters.WorkItemDirectory }}
+ CorrelationPayloadDirectory: ${{ parameters.CorrelationPayloadDirectory }}
+ IncludeDotNetCli: ${{ parameters.IncludeDotNetCli }}
+ DotNetCliPackageType: ${{ parameters.DotNetCliPackageType }}
+ DotNetCliVersion: ${{ parameters.DotNetCliVersion }}
+ EnableXUnitReporter: ${{ parameters.EnableXUnitReporter }}
+ WaitForWorkItemCompletion: ${{ parameters.WaitForWorkItemCompletion }}
+ Creator: ${{ parameters.Creator }}
+ SYSTEM_ACCESSTOKEN: $(System.AccessToken)
+ condition: and(${{ parameters.condition }}, ne(variables['Agent.Os'], 'Windows_NT'))
+ continueOnError: ${{ parameters.continueOnError }}
diff --git a/eng/common/templates/steps/publish-logs.yml b/eng/common/templates/steps/publish-logs.yml
new file mode 100644
index 0000000000..88f238f36b
--- /dev/null
+++ b/eng/common/templates/steps/publish-logs.yml
@@ -0,0 +1,23 @@
+parameters:
+ StageLabel: ''
+ JobLabel: ''
+
+steps:
+- task: Powershell@2
+ displayName: Prepare Binlogs to Upload
+ inputs:
+ targetType: inline
+ script: |
+ New-Item -ItemType Directory $(Build.SourcesDirectory)/PostBuildLogs/${{parameters.StageLabel}}/${{parameters.JobLabel}}/
+ Move-Item -Path $(Build.SourcesDirectory)/artifacts/log/Debug/* $(Build.SourcesDirectory)/PostBuildLogs/${{parameters.StageLabel}}/${{parameters.JobLabel}}/
+ continueOnError: true
+ condition: always()
+
+- task: PublishBuildArtifacts@1
+ displayName: Publish Logs
+ inputs:
+ PathtoPublish: '$(Build.SourcesDirectory)/PostBuildLogs'
+ PublishLocation: Container
+ ArtifactName: PostBuildLogs
+ continueOnError: true
+ condition: always()
diff --git a/eng/common/templates/steps/run-on-unix.yml b/eng/common/templates/steps/run-on-unix.yml
new file mode 100644
index 0000000000..e1733814f6
--- /dev/null
+++ b/eng/common/templates/steps/run-on-unix.yml
@@ -0,0 +1,7 @@
+parameters:
+ agentOs: ''
+ steps: []
+
+steps:
+- ${{ if ne(parameters.agentOs, 'Windows_NT') }}:
+ - ${{ parameters.steps }}
diff --git a/eng/common/templates/steps/run-on-windows.yml b/eng/common/templates/steps/run-on-windows.yml
new file mode 100644
index 0000000000..73e7e9c275
--- /dev/null
+++ b/eng/common/templates/steps/run-on-windows.yml
@@ -0,0 +1,7 @@
+parameters:
+ agentOs: ''
+ steps: []
+
+steps:
+- ${{ if eq(parameters.agentOs, 'Windows_NT') }}:
+ - ${{ parameters.steps }}
diff --git a/eng/common/templates/steps/run-script-ifequalelse.yml b/eng/common/templates/steps/run-script-ifequalelse.yml
new file mode 100644
index 0000000000..3d1242f558
--- /dev/null
+++ b/eng/common/templates/steps/run-script-ifequalelse.yml
@@ -0,0 +1,33 @@
+parameters:
+ # if parameter1 equals parameter 2, run 'ifScript' command, else run 'elsescript' command
+ parameter1: ''
+ parameter2: ''
+ ifScript: ''
+ elseScript: ''
+
+ # name of script step
+ name: Script
+
+ # display name of script step
+ displayName: If-Equal-Else Script
+
+ # environment
+ env: {}
+
+ # conditional expression for step execution
+ condition: ''
+
+steps:
+- ${{ if and(ne(parameters.ifScript, ''), eq(parameters.parameter1, parameters.parameter2)) }}:
+ - script: ${{ parameters.ifScript }}
+ name: ${{ parameters.name }}
+ displayName: ${{ parameters.displayName }}
+ env: ${{ parameters.env }}
+ condition: ${{ parameters.condition }}
+
+- ${{ if and(ne(parameters.elseScript, ''), ne(parameters.parameter1, parameters.parameter2)) }}:
+ - script: ${{ parameters.elseScript }}
+ name: ${{ parameters.name }}
+ displayName: ${{ parameters.displayName }}
+ env: ${{ parameters.env }}
+ condition: ${{ parameters.condition }}
\ No newline at end of file
diff --git a/eng/common/templates/steps/send-to-helix.yml b/eng/common/templates/steps/send-to-helix.yml
new file mode 100644
index 0000000000..5eceb48725
--- /dev/null
+++ b/eng/common/templates/steps/send-to-helix.yml
@@ -0,0 +1,94 @@
+# Please remember to update the documentation if you make changes to these parameters!
+parameters:
+ HelixSource: 'pr/default' # required -- sources must start with pr/, official/, prodcon/, or agent/
+ HelixType: 'tests/default/' # required -- Helix telemetry which identifies what type of data this is; should include "test" for clarity and must end in '/'
+ HelixBuild: $(Build.BuildNumber) # required -- the build number Helix will use to identify this -- automatically set to the AzDO build number
+ HelixTargetQueues: '' # required -- semicolon delimited list of Helix queues to test on; see https://helix.dot.net/ for a list of queues
+ HelixAccessToken: '' # required -- access token to make Helix API requests; should be provided by the appropriate variable group
+ HelixConfiguration: '' # optional -- additional property attached to a job
+ HelixPreCommands: '' # optional -- commands to run before Helix work item execution
+ HelixPostCommands: '' # optional -- commands to run after Helix work item execution
+ WorkItemDirectory: '' # optional -- a payload directory to zip up and send to Helix; requires WorkItemCommand; incompatible with XUnitProjects
+ WorkItemCommand: '' # optional -- a command to execute on the payload; requires WorkItemDirectory; incompatible with XUnitProjects
+ WorkItemTimeout: '' # optional -- a timeout in TimeSpan.Parse-ready value (e.g. 00:02:00) for the work item command; requires WorkItemDirectory; incompatible with XUnitProjects
+ CorrelationPayloadDirectory: '' # optional -- a directory to zip up and send to Helix as a correlation payload
+ XUnitProjects: '' # optional -- semicolon delimited list of XUnitProjects to parse and send to Helix; requires XUnitRuntimeTargetFramework, XUnitPublishTargetFramework, XUnitRunnerVersion, and IncludeDotNetCli=true
+ XUnitWorkItemTimeout: '' # optional -- the workitem timeout in seconds for all workitems created from the xUnit projects specified by XUnitProjects
+ XUnitPublishTargetFramework: '' # optional -- framework to use to publish your xUnit projects
+ XUnitRuntimeTargetFramework: '' # optional -- framework to use for the xUnit console runner
+ XUnitRunnerVersion: '' # optional -- version of the xUnit nuget package you wish to use on Helix; required for XUnitProjects
+ IncludeDotNetCli: false # optional -- true will download a version of the .NET CLI onto the Helix machine as a correlation payload; requires DotNetCliPackageType and DotNetCliVersion
+ DotNetCliPackageType: '' # optional -- either 'sdk' or 'runtime'; determines whether the sdk or runtime will be sent to Helix; see https://raw.githubusercontent.com/dotnet/core/master/release-notes/releases-index.json
+ DotNetCliVersion: '' # optional -- version of the CLI to send to Helix; based on this: https://raw.githubusercontent.com/dotnet/core/master/release-notes/releases-index.json
+ EnableXUnitReporter: false # optional -- true enables XUnit result reporting to Mission Control
+ WaitForWorkItemCompletion: true # optional -- true will make the task wait until work items have been completed and fail the build if work items fail. False is "fire and forget."
+ IsExternal: false # [DEPRECATED] -- doesn't do anything, jobs are external if HelixAccessToken is empty and Creator is set
+ HelixBaseUri: 'https://helix.dot.net/' # optional -- sets the Helix API base URI (allows targeting int)
+ Creator: '' # optional -- if the build is external, use this to specify who is sending the job
+ DisplayNamePrefix: 'Run Tests' # optional -- rename the beginning of the displayName of the steps in AzDO
+ condition: succeeded() # optional -- condition for step to execute; defaults to succeeded()
+ continueOnError: false # optional -- determines whether to continue the build if the step errors; defaults to false
+
+steps:
+ - powershell: 'powershell "$env:BUILD_SOURCESDIRECTORY\eng\common\msbuild.ps1 $env:BUILD_SOURCESDIRECTORY\eng\common\helixpublish.proj /restore /t:Test /bl:$env:BUILD_SOURCESDIRECTORY\artifacts\log\$env:BuildConfig\SendToHelix.binlog"'
+ displayName: ${{ parameters.DisplayNamePrefix }} (Windows)
+ env:
+ BuildConfig: $(_BuildConfig)
+ HelixSource: ${{ parameters.HelixSource }}
+ HelixType: ${{ parameters.HelixType }}
+ HelixBuild: ${{ parameters.HelixBuild }}
+ HelixConfiguration: ${{ parameters.HelixConfiguration }}
+ HelixTargetQueues: ${{ parameters.HelixTargetQueues }}
+ HelixAccessToken: ${{ parameters.HelixAccessToken }}
+ HelixPreCommands: ${{ parameters.HelixPreCommands }}
+ HelixPostCommands: ${{ parameters.HelixPostCommands }}
+ WorkItemDirectory: ${{ parameters.WorkItemDirectory }}
+ WorkItemCommand: ${{ parameters.WorkItemCommand }}
+ WorkItemTimeout: ${{ parameters.WorkItemTimeout }}
+ CorrelationPayloadDirectory: ${{ parameters.CorrelationPayloadDirectory }}
+ XUnitProjects: ${{ parameters.XUnitProjects }}
+ XUnitWorkItemTimeout: ${{ parameters.XUnitWorkItemTimeout }}
+ XUnitPublishTargetFramework: ${{ parameters.XUnitPublishTargetFramework }}
+ XUnitRuntimeTargetFramework: ${{ parameters.XUnitRuntimeTargetFramework }}
+ XUnitRunnerVersion: ${{ parameters.XUnitRunnerVersion }}
+ IncludeDotNetCli: ${{ parameters.IncludeDotNetCli }}
+ DotNetCliPackageType: ${{ parameters.DotNetCliPackageType }}
+ DotNetCliVersion: ${{ parameters.DotNetCliVersion }}
+ EnableXUnitReporter: ${{ parameters.EnableXUnitReporter }}
+ WaitForWorkItemCompletion: ${{ parameters.WaitForWorkItemCompletion }}
+ HelixBaseUri: ${{ parameters.HelixBaseUri }}
+ Creator: ${{ parameters.Creator }}
+ SYSTEM_ACCESSTOKEN: $(System.AccessToken)
+ condition: and(${{ parameters.condition }}, eq(variables['Agent.Os'], 'Windows_NT'))
+ continueOnError: ${{ parameters.continueOnError }}
+ - script: $BUILD_SOURCESDIRECTORY/eng/common/msbuild.sh $BUILD_SOURCESDIRECTORY/eng/common/helixpublish.proj /restore /t:Test /bl:$BUILD_SOURCESDIRECTORY/artifacts/log/$BuildConfig/SendToHelix.binlog
+ displayName: ${{ parameters.DisplayNamePrefix }} (Unix)
+ env:
+ BuildConfig: $(_BuildConfig)
+ HelixSource: ${{ parameters.HelixSource }}
+ HelixType: ${{ parameters.HelixType }}
+ HelixBuild: ${{ parameters.HelixBuild }}
+ HelixConfiguration: ${{ parameters.HelixConfiguration }}
+ HelixTargetQueues: ${{ parameters.HelixTargetQueues }}
+ HelixAccessToken: ${{ parameters.HelixAccessToken }}
+ HelixPreCommands: ${{ parameters.HelixPreCommands }}
+ HelixPostCommands: ${{ parameters.HelixPostCommands }}
+ WorkItemDirectory: ${{ parameters.WorkItemDirectory }}
+ WorkItemCommand: ${{ parameters.WorkItemCommand }}
+ WorkItemTimeout: ${{ parameters.WorkItemTimeout }}
+ CorrelationPayloadDirectory: ${{ parameters.CorrelationPayloadDirectory }}
+ XUnitProjects: ${{ parameters.XUnitProjects }}
+ XUnitWorkItemTimeout: ${{ parameters.XUnitWorkItemTimeout }}
+ XUnitPublishTargetFramework: ${{ parameters.XUnitPublishTargetFramework }}
+ XUnitRuntimeTargetFramework: ${{ parameters.XUnitRuntimeTargetFramework }}
+ XUnitRunnerVersion: ${{ parameters.XUnitRunnerVersion }}
+ IncludeDotNetCli: ${{ parameters.IncludeDotNetCli }}
+ DotNetCliPackageType: ${{ parameters.DotNetCliPackageType }}
+ DotNetCliVersion: ${{ parameters.DotNetCliVersion }}
+ EnableXUnitReporter: ${{ parameters.EnableXUnitReporter }}
+ WaitForWorkItemCompletion: ${{ parameters.WaitForWorkItemCompletion }}
+ HelixBaseUri: ${{ parameters.HelixBaseUri }}
+ Creator: ${{ parameters.Creator }}
+ SYSTEM_ACCESSTOKEN: $(System.AccessToken)
+ condition: and(${{ parameters.condition }}, ne(variables['Agent.Os'], 'Windows_NT'))
+ continueOnError: ${{ parameters.continueOnError }}
diff --git a/eng/common/templates/steps/telemetry-end.yml b/eng/common/templates/steps/telemetry-end.yml
new file mode 100644
index 0000000000..fadc04ca1b
--- /dev/null
+++ b/eng/common/templates/steps/telemetry-end.yml
@@ -0,0 +1,102 @@
+parameters:
+ maxRetries: 5
+ retryDelay: 10 # in seconds
+
+steps:
+- bash: |
+ if [ "$AGENT_JOBSTATUS" = "Succeeded" ] || [ "$AGENT_JOBSTATUS" = "PartiallySucceeded" ]; then
+ errorCount=0
+ else
+ errorCount=1
+ fi
+ warningCount=0
+
+ curlStatus=1
+ retryCount=0
+ # retry loop to harden against spotty telemetry connections
+ # we don't retry successes and 4xx client errors
+ until [[ $curlStatus -eq 0 || ( $curlStatus -ge 400 && $curlStatus -le 499 ) || $retryCount -ge $MaxRetries ]]
+ do
+ if [ $retryCount -gt 0 ]; then
+ echo "Failed to send telemetry to Helix; waiting $RetryDelay seconds before retrying..."
+ sleep $RetryDelay
+ fi
+
+ # create a temporary file for curl output
+ res=`mktemp`
+
+ curlResult=`
+ curl --verbose --output $res --write-out "%{http_code}"\
+ -H 'Content-Type: application/json' \
+ -H "X-Helix-Job-Token: $Helix_JobToken" \
+ -H 'Content-Length: 0' \
+ -X POST -G "https://helix.dot.net/api/2018-03-14/telemetry/job/build/$Helix_WorkItemId/finish" \
+ --data-urlencode "errorCount=$errorCount" \
+ --data-urlencode "warningCount=$warningCount"`
+ curlStatus=$?
+
+ if [ $curlStatus -eq 0 ]; then
+ if [ $curlResult -gt 299 ] || [ $curlResult -lt 200 ]; then
+ curlStatus=$curlResult
+ fi
+ fi
+
+ let retryCount++
+ done
+
+ if [ $curlStatus -ne 0 ]; then
+ echo "Failed to Send Build Finish information after $retryCount retries"
+ vstsLogOutput="vso[task.logissue type=error;sourcepath=templates/steps/telemetry-end.yml;code=1;]Failed to Send Build Finish information: $curlStatus"
+ echo "##$vstsLogOutput"
+ exit 1
+ fi
+ displayName: Send Unix Build End Telemetry
+ env:
+ # defined via VSTS variables in start-job.sh
+ Helix_JobToken: $(Helix_JobToken)
+ Helix_WorkItemId: $(Helix_WorkItemId)
+ MaxRetries: ${{ parameters.maxRetries }}
+ RetryDelay: ${{ parameters.retryDelay }}
+ condition: and(always(), ne(variables['Agent.Os'], 'Windows_NT'))
+- powershell: |
+ if (($env:Agent_JobStatus -eq 'Succeeded') -or ($env:Agent_JobStatus -eq 'PartiallySucceeded')) {
+ $ErrorCount = 0
+ } else {
+ $ErrorCount = 1
+ }
+ $WarningCount = 0
+
+ # Basic retry loop to harden against server flakiness
+ $retryCount = 0
+ while ($retryCount -lt $env:MaxRetries) {
+ try {
+ Invoke-RestMethod -Uri "https://helix.dot.net/api/2018-03-14/telemetry/job/build/$env:Helix_WorkItemId/finish?errorCount=$ErrorCount&warningCount=$WarningCount" -Method Post -ContentType "application/json" -Body "" `
+ -Headers @{ 'X-Helix-Job-Token'=$env:Helix_JobToken }
+ break
+ }
+ catch {
+ $statusCode = $_.Exception.Response.StatusCode.value__
+ if ($statusCode -ge 400 -and $statusCode -le 499) {
+ Write-Host "##vso[task.logissue]error Failed to send telemetry to Helix (status code $statusCode); not retrying (4xx client error)"
+ Write-Host "##vso[task.logissue]error ", $_.Exception.GetType().FullName, $_.Exception.Message
+ exit 1
+ }
+ Write-Host "Failed to send telemetry to Helix (status code $statusCode); waiting $env:RetryDelay seconds before retrying..."
+ $retryCount++
+ sleep $env:RetryDelay
+ continue
+ }
+ }
+
+ if ($retryCount -ge $env:MaxRetries) {
+ Write-Host "##vso[task.logissue]error Failed to send telemetry to Helix after $retryCount retries."
+ exit 1
+ }
+ displayName: Send Windows Build End Telemetry
+ env:
+ # defined via VSTS variables in start-job.ps1
+ Helix_JobToken: $(Helix_JobToken)
+ Helix_WorkItemId: $(Helix_WorkItemId)
+ MaxRetries: ${{ parameters.maxRetries }}
+ RetryDelay: ${{ parameters.retryDelay }}
+ condition: and(always(),eq(variables['Agent.Os'], 'Windows_NT'))
diff --git a/eng/common/templates/steps/telemetry-start.yml b/eng/common/templates/steps/telemetry-start.yml
new file mode 100644
index 0000000000..32c01ef0b5
--- /dev/null
+++ b/eng/common/templates/steps/telemetry-start.yml
@@ -0,0 +1,241 @@
+parameters:
+ helixSource: 'undefined_defaulted_in_telemetry.yml'
+ helixType: 'undefined_defaulted_in_telemetry.yml'
+ buildConfig: ''
+ runAsPublic: false
+ maxRetries: 5
+ retryDelay: 10 # in seconds
+
+steps:
+- ${{ if and(eq(parameters.runAsPublic, 'false'), not(eq(variables['System.TeamProject'], 'public'))) }}:
+ - task: AzureKeyVault@1
+ inputs:
+ azureSubscription: 'HelixProd_KeyVault'
+ KeyVaultName: HelixProdKV
+ SecretsFilter: 'HelixApiAccessToken'
+ condition: always()
+- bash: |
+ # create a temporary file
+ jobInfo=`mktemp`
+
+ # write job info content to temporary file
+ cat > $jobInfo < powershell invocations
+# as dot sourcing isn't possible.
+function InitializeDotNetCli([bool]$install, [bool]$createSdkLocationFile) {
+ if (Test-Path variable:global:_DotNetInstallDir) {
+ return $global:_DotNetInstallDir
+ }
+
+ # Don't resolve runtime, shared framework, or SDK from other locations to ensure build determinism
+ $env:DOTNET_MULTILEVEL_LOOKUP=0
+
+ # Disable first run since we do not need all ASP.NET packages restored.
+ $env:DOTNET_SKIP_FIRST_TIME_EXPERIENCE=1
+
+ # Disable telemetry on CI.
+ if ($ci) {
+ $env:DOTNET_CLI_TELEMETRY_OPTOUT=1
+ }
+
+ # Source Build uses DotNetCoreSdkDir variable
+ if ($env:DotNetCoreSdkDir -ne $null) {
+ $env:DOTNET_INSTALL_DIR = $env:DotNetCoreSdkDir
+ }
+
+ # Find the first path on %PATH% that contains the dotnet.exe
+ if ($useInstalledDotNetCli -and (-not $globalJsonHasRuntimes) -and ($env:DOTNET_INSTALL_DIR -eq $null)) {
+ $dotnetExecutable = GetExecutableFileName 'dotnet'
+ $dotnetCmd = Get-Command $dotnetExecutable -ErrorAction SilentlyContinue
+
+ if ($dotnetCmd -ne $null) {
+ $env:DOTNET_INSTALL_DIR = Split-Path $dotnetCmd.Path -Parent
+ }
+ }
+
+ $dotnetSdkVersion = $GlobalJson.tools.dotnet
+
+ # Use dotnet installation specified in DOTNET_INSTALL_DIR if it contains the required SDK version,
+ # otherwise install the dotnet CLI and SDK to repo local .dotnet directory to avoid potential permission issues.
+ if ((-not $globalJsonHasRuntimes) -and ($env:DOTNET_INSTALL_DIR -ne $null) -and (Test-Path(Join-Path $env:DOTNET_INSTALL_DIR "sdk\$dotnetSdkVersion"))) {
+ $dotnetRoot = $env:DOTNET_INSTALL_DIR
+ } else {
+ $dotnetRoot = Join-Path $RepoRoot '.dotnet'
+
+ if (-not (Test-Path(Join-Path $dotnetRoot "sdk\$dotnetSdkVersion"))) {
+ if ($install) {
+ InstallDotNetSdk $dotnetRoot $dotnetSdkVersion
+ } else {
+ Write-PipelineTelemetryError -Category 'InitializeToolset' -Message "Unable to find dotnet with SDK version '$dotnetSdkVersion'"
+ ExitWithExitCode 1
+ }
+ }
+
+ $env:DOTNET_INSTALL_DIR = $dotnetRoot
+ }
+
+ # Creates a temporary file under the toolset dir.
+ # The following code block is protecting against concurrent access so that this function can
+ # be called in parallel.
+ if ($createSdkLocationFile) {
+ do {
+ $sdkCacheFileTemp = Join-Path $ToolsetDir $([System.IO.Path]::GetRandomFileName())
+ }
+ until (!(Test-Path $sdkCacheFileTemp))
+ Set-Content -Path $sdkCacheFileTemp -Value $dotnetRoot
+
+ try {
+ Rename-Item -Force -Path $sdkCacheFileTemp 'sdk.txt'
+ } catch {
+ # Somebody beat us
+ Remove-Item -Path $sdkCacheFileTemp
+ }
+ }
+
+ # Add dotnet to PATH. This prevents any bare invocation of dotnet in custom
+ # build steps from using anything other than what we've downloaded.
+ # It also ensures that VS msbuild will use the downloaded sdk targets.
+ $env:PATH = "$dotnetRoot;$env:PATH"
+
+ # Make Sure that our bootstrapped dotnet cli is available in future steps of the Azure Pipelines build
+ Write-PipelinePrependPath -Path $dotnetRoot
+
+ Write-PipelineSetVariable -Name 'DOTNET_MULTILEVEL_LOOKUP' -Value '0'
+ Write-PipelineSetVariable -Name 'DOTNET_SKIP_FIRST_TIME_EXPERIENCE' -Value '1'
+
+ return $global:_DotNetInstallDir = $dotnetRoot
+}
+
+function GetDotNetInstallScript([string] $dotnetRoot) {
+ $installScript = Join-Path $dotnetRoot 'dotnet-install.ps1'
+ if (!(Test-Path $installScript)) {
+ create-directory $dotnetroot
+
+ if ($useDefaultDotnetInstall)
+ {
+ $progresspreference = 'silentlycontinue' # don't display the console progress ui - it's a huge perf hit
+
+ $maxretries = 5
+ $retries = 1
+
+ $uri = "https://dot.net/$dotnetinstallscriptversion/dotnet-install.ps1"
+
+ while($true) {
+ try {
+ write-host "get $uri"
+ invoke-webrequest $uri -outfile $installscript
+ break
+ }
+ catch {
+ write-host "failed to download '$uri'"
+ write-error $_.exception.message -erroraction continue
+ }
+
+ if (++$retries -le $maxretries) {
+ $delayinseconds = [math]::pow(2, $retries) - 1 # exponential backoff
+ write-host "retrying. waiting for $delayinseconds seconds before next attempt ($retries of $maxretries)."
+ start-sleep -seconds $delayinseconds
+ }
+ else {
+ throw "unable to download file in $maxretries attempts."
+ }
+ }
+ }
+ else
+ {
+ # Use a special version of the script from eng/common that understands the existence of a "productVersion.txt" in a dotnet path.
+ # See https://github.com/dotnet/arcade/issues/6047 for details
+ $engCommonCopy = Resolve-Path (Join-Path $PSScriptRoot 'dotnet-install-scripts\dotnet-install.ps1')
+ Copy-Item $engCommonCopy -Destination $installScript -Force
+ }
+ }
+ return $installScript
+}
+
+function InstallDotNetSdk([string] $dotnetRoot, [string] $version, [string] $architecture = '', [switch] $noPath) {
+ InstallDotNet $dotnetRoot $version $architecture '' $false $runtimeSourceFeed $runtimeSourceFeedKey -noPath:$noPath
+}
+
+function InstallDotNet([string] $dotnetRoot,
+ [string] $version,
+ [string] $architecture = '',
+ [string] $runtime = '',
+ [bool] $skipNonVersionedFiles = $false,
+ [string] $runtimeSourceFeed = '',
+ [string] $runtimeSourceFeedKey = '',
+ [switch] $noPath) {
+
+ $installScript = GetDotNetInstallScript $dotnetRoot
+ $installParameters = @{
+ Version = $version
+ InstallDir = $dotnetRoot
+ }
+
+ if ($architecture) { $installParameters.Architecture = $architecture }
+ if ($runtime) { $installParameters.Runtime = $runtime }
+ if ($skipNonVersionedFiles) { $installParameters.SkipNonVersionedFiles = $skipNonVersionedFiles }
+ if ($noPath) { $installParameters.NoPath = $True }
+
+ try {
+ & $installScript @installParameters
+ }
+ catch {
+ if ($runtimeSourceFeed -or $runtimeSourceFeedKey) {
+ Write-Host "Failed to install dotnet from public location. Trying from '$runtimeSourceFeed'"
+ if ($runtimeSourceFeed) { $installParameters.AzureFeed = $runtimeSourceFeed }
+
+ if ($runtimeSourceFeedKey) {
+ $decodedBytes = [System.Convert]::FromBase64String($runtimeSourceFeedKey)
+ $decodedString = [System.Text.Encoding]::UTF8.GetString($decodedBytes)
+ $installParameters.FeedCredential = $decodedString
+ }
+
+ try {
+ & $installScript @installParameters
+ }
+ catch {
+ Write-PipelineTelemetryError -Category 'InitializeToolset' -Message "Failed to install dotnet from custom location '$runtimeSourceFeed'."
+ ExitWithExitCode 1
+ }
+ } else {
+ Write-PipelineTelemetryError -Category 'InitializeToolset' -Message "Failed to install dotnet from public location."
+ ExitWithExitCode 1
+ }
+ }
+}
+
+#
+# Locates Visual Studio MSBuild installation.
+# The preference order for MSBuild to use is as follows:
+#
+# 1. MSBuild from an active VS command prompt
+# 2. MSBuild from a compatible VS installation
+# 3. MSBuild from the xcopy tool package
+#
+# Returns full path to msbuild.exe.
+# Throws on failure.
+#
+function InitializeVisualStudioMSBuild([bool]$install, [object]$vsRequirements = $null) {
+ if (-not (IsWindowsPlatform)) {
+ throw "Cannot initialize Visual Studio on non-Windows"
+ }
+
+ if (Test-Path variable:global:_MSBuildExe) {
+ return $global:_MSBuildExe
+ }
+
+ $vsMinVersionReqdStr = '16.5'
+ $vsMinVersionReqd = [Version]::new($vsMinVersionReqdStr)
+
+ if (!$vsRequirements) { $vsRequirements = $GlobalJson.tools.vs }
+ $vsMinVersionStr = if ($vsRequirements.version) { $vsRequirements.version } else { $vsMinVersionReqdStr }
+ $vsMinVersion = [Version]::new($vsMinVersionStr)
+
+ # Try msbuild command available in the environment.
+ if ($env:VSINSTALLDIR -ne $null) {
+ $msbuildCmd = Get-Command 'msbuild.exe' -ErrorAction SilentlyContinue
+ if ($msbuildCmd -ne $null) {
+ # Workaround for https://github.com/dotnet/roslyn/issues/35793
+ # Due to this issue $msbuildCmd.Version returns 0.0.0.0 for msbuild.exe 16.2+
+ $msbuildVersion = [Version]::new((Get-Item $msbuildCmd.Path).VersionInfo.ProductVersion.Split([char[]]@('-', '+'))[0])
+
+ if ($msbuildVersion -ge $vsMinVersion) {
+ return $global:_MSBuildExe = $msbuildCmd.Path
+ }
+
+ # Report error - the developer environment is initialized with incompatible VS version.
+ throw "Developer Command Prompt for VS $($env:VisualStudioVersion) is not recent enough. Please upgrade to $vsMinVersionStr or build from a plain CMD window"
+ }
+ }
+
+ # Locate Visual Studio installation or download x-copy msbuild.
+ $vsInfo = LocateVisualStudio $vsRequirements
+ if ($vsInfo -ne $null) {
+ $vsInstallDir = $vsInfo.installationPath
+ $vsMajorVersion = $vsInfo.installationVersion.Split('.')[0]
+
+ InitializeVisualStudioEnvironmentVariables $vsInstallDir $vsMajorVersion
+ } else {
+
+ if (Get-Member -InputObject $GlobalJson.tools -Name 'xcopy-msbuild') {
+ $xcopyMSBuildVersion = $GlobalJson.tools.'xcopy-msbuild'
+ $vsMajorVersion = $xcopyMSBuildVersion.Split('.')[0]
+ } else {
+ #if vs version provided in global.json is incompatible then use the default version for xcopy msbuild download
+ if($vsMinVersion -lt $vsMinVersionReqd){
+ Write-Host "Using xcopy-msbuild version of $vsMinVersionReqdStr.0-alpha since VS version $vsMinVersionStr provided in global.json is not compatible"
+ $vsMajorVersion = $vsMinVersionReqd.Major
+ $vsMinorVersion = $vsMinVersionReqd.Minor
+ }
+ else{
+ $vsMajorVersion = $vsMinVersion.Major
+ $vsMinorVersion = $vsMinVersion.Minor
+ }
+
+ $xcopyMSBuildVersion = "$vsMajorVersion.$vsMinorVersion.0-alpha"
+ }
+
+ $vsInstallDir = $null
+ if ($xcopyMSBuildVersion.Trim() -ine "none") {
+ $vsInstallDir = InitializeXCopyMSBuild $xcopyMSBuildVersion $install
+ }
+ if ($vsInstallDir -eq $null) {
+ throw 'Unable to find Visual Studio that has required version and components installed'
+ }
+ }
+
+ $msbuildVersionDir = if ([int]$vsMajorVersion -lt 16) { "$vsMajorVersion.0" } else { "Current" }
+ return $global:_MSBuildExe = Join-Path $vsInstallDir "MSBuild\$msbuildVersionDir\Bin\msbuild.exe"
+}
+
+function InitializeVisualStudioEnvironmentVariables([string] $vsInstallDir, [string] $vsMajorVersion) {
+ $env:VSINSTALLDIR = $vsInstallDir
+ Set-Item "env:VS$($vsMajorVersion)0COMNTOOLS" (Join-Path $vsInstallDir "Common7\Tools\")
+
+ $vsSdkInstallDir = Join-Path $vsInstallDir "VSSDK\"
+ if (Test-Path $vsSdkInstallDir) {
+ Set-Item "env:VSSDK$($vsMajorVersion)0Install" $vsSdkInstallDir
+ $env:VSSDKInstall = $vsSdkInstallDir
+ }
+}
+
+function InstallXCopyMSBuild([string]$packageVersion) {
+ return InitializeXCopyMSBuild $packageVersion -install $true
+}
+
+function InitializeXCopyMSBuild([string]$packageVersion, [bool]$install) {
+ $packageName = 'RoslynTools.MSBuild'
+ $packageDir = Join-Path $ToolsDir "msbuild\$packageVersion"
+ $packagePath = Join-Path $packageDir "$packageName.$packageVersion.nupkg"
+
+ if (!(Test-Path $packageDir)) {
+ if (!$install) {
+ return $null
+ }
+
+ Create-Directory $packageDir
+ Write-Host "Downloading $packageName $packageVersion"
+ $ProgressPreference = 'SilentlyContinue' # Don't display the console progress UI - it's a huge perf hit
+ Invoke-WebRequest "https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-eng/nuget/v3/flat2/$packageName/$packageVersion/$packageName.$packageVersion.nupkg" -OutFile $packagePath
+ Unzip $packagePath $packageDir
+ }
+
+ return Join-Path $packageDir 'tools'
+}
+
+#
+# Locates Visual Studio instance that meets the minimal requirements specified by tools.vs object in global.json.
+#
+# The following properties of tools.vs are recognized:
+# "version": "{major}.{minor}"
+# Two part minimal VS version, e.g. "15.9", "16.0", etc.
+# "components": ["componentId1", "componentId2", ...]
+# Array of ids of workload components that must be available in the VS instance.
+# See e.g. https://docs.microsoft.com/en-us/visualstudio/install/workload-component-id-vs-enterprise?view=vs-2017
+#
+# Returns JSON describing the located VS instance (same format as returned by vswhere),
+# or $null if no instance meeting the requirements is found on the machine.
+#
+function LocateVisualStudio([object]$vsRequirements = $null){
+ if (-not (IsWindowsPlatform)) {
+ throw "Cannot run vswhere on non-Windows platforms."
+ }
+
+ if (Get-Member -InputObject $GlobalJson.tools -Name 'vswhere') {
+ $vswhereVersion = $GlobalJson.tools.vswhere
+ } else {
+ $vswhereVersion = '2.5.2'
+ }
+
+ $vsWhereDir = Join-Path $ToolsDir "vswhere\$vswhereVersion"
+ $vsWhereExe = Join-Path $vsWhereDir 'vswhere.exe'
+
+ if (!(Test-Path $vsWhereExe)) {
+ Create-Directory $vsWhereDir
+ Write-Host 'Downloading vswhere'
+ try {
+ Invoke-WebRequest "https://netcorenativeassets.blob.core.windows.net/resource-packages/external/windows/vswhere/$vswhereVersion/vswhere.exe" -OutFile $vswhereExe
+ }
+ catch {
+ Write-PipelineTelemetryError -Category 'InitializeToolset' -Message $_
+ }
+ }
+
+ if (!$vsRequirements) { $vsRequirements = $GlobalJson.tools.vs }
+ $args = @('-latest', '-prerelease', '-format', 'json', '-requires', 'Microsoft.Component.MSBuild', '-products', '*')
+
+ if (Get-Member -InputObject $vsRequirements -Name 'version') {
+ $args += '-version'
+ $args += $vsRequirements.version
+ }
+
+ if (Get-Member -InputObject $vsRequirements -Name 'components') {
+ foreach ($component in $vsRequirements.components) {
+ $args += '-requires'
+ $args += $component
+ }
+ }
+
+ $vsInfo =& $vsWhereExe $args | ConvertFrom-Json
+
+ if ($lastExitCode -ne 0) {
+ return $null
+ }
+
+ # use first matching instance
+ return $vsInfo[0]
+}
+
+function InitializeBuildTool() {
+ if (Test-Path variable:global:_BuildTool) {
+ return $global:_BuildTool
+ }
+
+ if (-not $msbuildEngine) {
+ $msbuildEngine = GetDefaultMSBuildEngine
+ }
+
+ # Initialize dotnet cli if listed in 'tools'
+ $dotnetRoot = $null
+ if (Get-Member -InputObject $GlobalJson.tools -Name 'dotnet') {
+ $dotnetRoot = InitializeDotNetCli -install:$restore
+ }
+
+ if ($msbuildEngine -eq 'dotnet') {
+ if (!$dotnetRoot) {
+ Write-PipelineTelemetryError -Category 'InitializeToolset' -Message "/global.json must specify 'tools.dotnet'."
+ ExitWithExitCode 1
+ }
+ $dotnetPath = Join-Path $dotnetRoot (GetExecutableFileName 'dotnet')
+ $buildTool = @{ Path = $dotnetPath; Command = 'msbuild'; Tool = 'dotnet'; Framework = 'netcoreapp2.1' }
+ } elseif ($msbuildEngine -eq "vs") {
+ try {
+ $msbuildPath = InitializeVisualStudioMSBuild -install:$restore
+ } catch {
+ Write-PipelineTelemetryError -Category 'InitializeToolset' -Message $_
+ ExitWithExitCode 1
+ }
+
+ $buildTool = @{ Path = $msbuildPath; Command = ""; Tool = "vs"; Framework = "net472" }
+ } else {
+ Write-PipelineTelemetryError -Category 'InitializeToolset' -Message "Unexpected value of -msbuildEngine: '$msbuildEngine'."
+ ExitWithExitCode 1
+ }
+
+ return $global:_BuildTool = $buildTool
+}
+
+function GetDefaultMSBuildEngine() {
+ # Presence of tools.vs indicates the repo needs to build using VS msbuild on Windows.
+ if (Get-Member -InputObject $GlobalJson.tools -Name 'vs') {
+ return 'vs'
+ }
+
+ if (Get-Member -InputObject $GlobalJson.tools -Name 'dotnet') {
+ return 'dotnet'
+ }
+
+ Write-PipelineTelemetryError -Category 'InitializeToolset' -Message "-msbuildEngine must be specified, or /global.json must specify 'tools.dotnet' or 'tools.vs'."
+ ExitWithExitCode 1
+}
+
+function GetNuGetPackageCachePath() {
+ if ($env:NUGET_PACKAGES -eq $null) {
+ # Use local cache on CI to ensure deterministic build,
+ # use global cache in dev builds to avoid cost of downloading packages.
+ # For directory normalization, see also: https://github.com/NuGet/Home/issues/7968
+ if ($useGlobalNuGetCache) {
+ $env:NUGET_PACKAGES = Join-Path $env:UserProfile '.nuget\packages\'
+ } else {
+ $env:NUGET_PACKAGES = Join-Path $RepoRoot '.packages\'
+ }
+ }
+
+ return $env:NUGET_PACKAGES
+}
+
+# Returns a full path to an Arcade SDK task project file.
+function GetSdkTaskProject([string]$taskName) {
+ return Join-Path (Split-Path (InitializeToolset) -Parent) "SdkTasks\$taskName.proj"
+}
+
+function InitializeNativeTools() {
+ if (-Not (Test-Path variable:DisableNativeToolsetInstalls) -And (Get-Member -InputObject $GlobalJson -Name "native-tools")) {
+ $nativeArgs= @{}
+ if ($ci) {
+ $nativeArgs = @{
+ InstallDirectory = "$ToolsDir"
+ }
+ }
+ & "$PSScriptRoot/init-tools-native.ps1" @nativeArgs
+ }
+}
+
+function InitializeToolset() {
+ if (Test-Path variable:global:_ToolsetBuildProj) {
+ return $global:_ToolsetBuildProj
+ }
+
+ $nugetCache = GetNuGetPackageCachePath
+
+ $toolsetVersion = $GlobalJson.'msbuild-sdks'.'Microsoft.DotNet.Arcade.Sdk'
+ $toolsetLocationFile = Join-Path $ToolsetDir "$toolsetVersion.txt"
+
+ if (Test-Path $toolsetLocationFile) {
+ $path = Get-Content $toolsetLocationFile -TotalCount 1
+ if (Test-Path $path) {
+ return $global:_ToolsetBuildProj = $path
+ }
+ }
+
+ if (-not $restore) {
+ Write-PipelineTelemetryError -Category 'InitializeToolset' -Message "Toolset version $toolsetVersion has not been restored."
+ ExitWithExitCode 1
+ }
+
+ $buildTool = InitializeBuildTool
+
+ $proj = Join-Path $ToolsetDir 'restore.proj'
+ $bl = if ($binaryLog) { '/bl:' + (Join-Path $LogDir 'ToolsetRestore.binlog') } else { '' }
+
+ '' | Set-Content $proj
+
+ MSBuild-Core $proj $bl /t:__WriteToolsetLocation /clp:ErrorsOnly`;NoSummary /p:__ToolsetLocationOutputFile=$toolsetLocationFile
+
+ $path = Get-Content $toolsetLocationFile -Encoding UTF8 -TotalCount 1
+ if (!(Test-Path $path)) {
+ throw "Invalid toolset path: $path"
+ }
+
+ return $global:_ToolsetBuildProj = $path
+}
+
+function ExitWithExitCode([int] $exitCode) {
+ if ($ci -and $prepareMachine) {
+ Stop-Processes
+ }
+ exit $exitCode
+}
+
+function Stop-Processes() {
+ Write-Host 'Killing running build processes...'
+ foreach ($processName in $processesToStopOnExit) {
+ Get-Process -Name $processName -ErrorAction SilentlyContinue | Stop-Process
+ }
+}
+
+#
+# Executes msbuild (or 'dotnet msbuild') with arguments passed to the function.
+# The arguments are automatically quoted.
+# Terminates the script if the build fails.
+#
+function MSBuild() {
+ if ($pipelinesLog) {
+ $buildTool = InitializeBuildTool
+
+ if ($ci -and $buildTool.Tool -eq 'dotnet') {
+ $env:NUGET_PLUGIN_HANDSHAKE_TIMEOUT_IN_SECONDS = 20
+ $env:NUGET_PLUGIN_REQUEST_TIMEOUT_IN_SECONDS = 20
+ Write-PipelineSetVariable -Name 'NUGET_PLUGIN_HANDSHAKE_TIMEOUT_IN_SECONDS' -Value '20'
+ Write-PipelineSetVariable -Name 'NUGET_PLUGIN_REQUEST_TIMEOUT_IN_SECONDS' -Value '20'
+ }
+
+ $toolsetBuildProject = InitializeToolset
+ $path = Split-Path -parent $toolsetBuildProject
+ $path = Join-Path $path (Join-Path $buildTool.Framework 'Microsoft.DotNet.Arcade.Sdk.dll')
+ $args += "/logger:$path"
+ }
+
+ MSBuild-Core @args
+}
+
+#
+# Executes msbuild (or 'dotnet msbuild') with arguments passed to the function.
+# The arguments are automatically quoted.
+# Terminates the script if the build fails.
+#
+function MSBuild-Core() {
+ if ($ci) {
+ if (!$binaryLog -and !$excludeCIBinarylog) {
+ Write-PipelineTelemetryError -Category 'Build' -Message 'Binary log must be enabled in CI build, or explicitly opted-out from with the -excludeCIBinarylog switch.'
+ ExitWithExitCode 1
+ }
+
+ if ($nodeReuse) {
+ Write-PipelineTelemetryError -Category 'Build' -Message 'Node reuse must be disabled in CI build.'
+ ExitWithExitCode 1
+ }
+ }
+
+ $buildTool = InitializeBuildTool
+
+ $cmdArgs = "$($buildTool.Command) /m /nologo /clp:Summary /v:$verbosity /nr:$nodeReuse /p:ContinuousIntegrationBuild=$ci"
+
+ if ($warnAsError) {
+ $cmdArgs += ' /warnaserror /p:TreatWarningsAsErrors=true'
+ }
+ else {
+ $cmdArgs += ' /p:TreatWarningsAsErrors=false'
+ }
+
+ foreach ($arg in $args) {
+ if ($arg -ne $null -and $arg.Trim() -ne "") {
+ $cmdArgs += " `"$arg`""
+ }
+ }
+
+ $env:ARCADE_BUILD_TOOL_COMMAND = "$($buildTool.Path) $cmdArgs"
+
+ $exitCode = Exec-Process $buildTool.Path $cmdArgs
+
+ if ($exitCode -ne 0) {
+ Write-PipelineTelemetryError -Category 'Build' -Message 'Build failed.'
+
+ $buildLog = GetMSBuildBinaryLogCommandLineArgument $args
+ if ($buildLog -ne $null) {
+ Write-Host "See log: $buildLog" -ForegroundColor DarkGray
+ }
+
+ ExitWithExitCode $exitCode
+ }
+}
+
+function GetMSBuildBinaryLogCommandLineArgument($arguments) {
+ foreach ($argument in $arguments) {
+ if ($argument -ne $null) {
+ $arg = $argument.Trim()
+ if ($arg.StartsWith('/bl:', "OrdinalIgnoreCase")) {
+ return $arg.Substring('/bl:'.Length)
+ }
+
+ if ($arg.StartsWith('/binaryLogger:', 'OrdinalIgnoreCase')) {
+ return $arg.Substring('/binaryLogger:'.Length)
+ }
+ }
+ }
+
+ return $null
+}
+
+function GetExecutableFileName($baseName) {
+ if (IsWindowsPlatform) {
+ return "$baseName.exe"
+ }
+ else {
+ return $baseName
+ }
+}
+
+function IsWindowsPlatform() {
+ return [environment]::OSVersion.Platform -eq [PlatformID]::Win32NT
+}
+
+function Get-Darc($version) {
+ $darcPath = "$TempDir\darc\$(New-Guid)"
+ if ($version -ne $null) {
+ & $PSScriptRoot\darc-init.ps1 -toolpath $darcPath -darcVersion $version | Out-Host
+ } else {
+ & $PSScriptRoot\darc-init.ps1 -toolpath $darcPath | Out-Host
+ }
+ return "$darcPath\darc.exe"
+}
+
+. $PSScriptRoot\pipeline-logging-functions.ps1
+
+$RepoRoot = Resolve-Path (Join-Path $PSScriptRoot '..\..')
+$EngRoot = Resolve-Path (Join-Path $PSScriptRoot '..')
+$ArtifactsDir = Join-Path $RepoRoot 'artifacts'
+$ToolsetDir = Join-Path $ArtifactsDir 'toolset'
+$ToolsDir = Join-Path $RepoRoot '.tools'
+$LogDir = Join-Path (Join-Path $ArtifactsDir 'log') $configuration
+$TempDir = Join-Path (Join-Path $ArtifactsDir 'tmp') $configuration
+$GlobalJson = Get-Content -Raw -Path (Join-Path $RepoRoot 'global.json') | ConvertFrom-Json
+# true if global.json contains a "runtimes" section
+$globalJsonHasRuntimes = if ($GlobalJson.tools.PSObject.Properties.Name -Match 'runtimes') { $true } else { $false }
+
+Create-Directory $ToolsetDir
+Create-Directory $TempDir
+Create-Directory $LogDir
+
+Write-PipelineSetVariable -Name 'Artifacts' -Value $ArtifactsDir
+Write-PipelineSetVariable -Name 'Artifacts.Toolset' -Value $ToolsetDir
+Write-PipelineSetVariable -Name 'Artifacts.Log' -Value $LogDir
+Write-PipelineSetVariable -Name 'TEMP' -Value $TempDir
+Write-PipelineSetVariable -Name 'TMP' -Value $TempDir
+
+# Import custom tools configuration, if present in the repo.
+# Note: Import in global scope so that the script set top-level variables without qualification.
+if (!$disableConfigureToolsetImport) {
+ $configureToolsetScript = Join-Path $EngRoot 'configure-toolset.ps1'
+ if (Test-Path $configureToolsetScript) {
+ . $configureToolsetScript
+ if ((Test-Path variable:failOnConfigureToolsetError) -And $failOnConfigureToolsetError) {
+ if ((Test-Path variable:LastExitCode) -And ($LastExitCode -ne 0)) {
+ Write-PipelineTelemetryError -Category 'Build' -Message 'configure-toolset.ps1 returned a non-zero exit code'
+ ExitWithExitCode $LastExitCode
+ }
+ }
+ }
+}
diff --git a/eng/common/tools.sh b/eng/common/tools.sh
new file mode 100755
index 0000000000..c722a05853
--- /dev/null
+++ b/eng/common/tools.sh
@@ -0,0 +1,507 @@
+#!/usr/bin/env bash
+
+# Initialize variables if they aren't already defined.
+
+# CI mode - set to true on CI server for PR validation build or official build.
+ci=${ci:-false}
+
+# Set to true to use the pipelines logger which will enable Azure logging output.
+# https://github.com/Microsoft/azure-pipelines-tasks/blob/master/docs/authoring/commands.md
+# This flag is meant as a temporary opt-opt for the feature while validate it across
+# our consumers. It will be deleted in the future.
+if [[ "$ci" == true ]]; then
+ pipelines_log=${pipelines_log:-true}
+else
+ pipelines_log=${pipelines_log:-false}
+fi
+
+# Build configuration. Common values include 'Debug' and 'Release', but the repository may use other names.
+configuration=${configuration:-'Debug'}
+
+# Set to true to opt out of outputting binary log while running in CI
+exclude_ci_binary_log=${exclude_ci_binary_log:-false}
+
+if [[ "$ci" == true && "$exclude_ci_binary_log" == false ]]; then
+ binary_log_default=true
+else
+ binary_log_default=false
+fi
+
+# Set to true to output binary log from msbuild. Note that emitting binary log slows down the build.
+binary_log=${binary_log:-$binary_log_default}
+
+# Turns on machine preparation/clean up code that changes the machine state (e.g. kills build processes).
+prepare_machine=${prepare_machine:-false}
+
+# True to restore toolsets and dependencies.
+restore=${restore:-true}
+
+# Adjusts msbuild verbosity level.
+verbosity=${verbosity:-'minimal'}
+
+# Set to true to reuse msbuild nodes. Recommended to not reuse on CI.
+if [[ "$ci" == true ]]; then
+ node_reuse=${node_reuse:-false}
+else
+ node_reuse=${node_reuse:-true}
+fi
+
+# Configures warning treatment in msbuild.
+warn_as_error=${warn_as_error:-true}
+
+# True to attempt using .NET Core already that meets requirements specified in global.json
+# installed on the machine instead of downloading one.
+use_installed_dotnet_cli=${use_installed_dotnet_cli:-true}
+
+# Enable repos to use a particular version of the on-line dotnet-install scripts.
+# default URL: https://dot.net/v1/dotnet-install.sh
+dotnetInstallScriptVersion=${dotnetInstallScriptVersion:-'v1'}
+
+# True to use global NuGet cache instead of restoring packages to repository-local directory.
+if [[ "$ci" == true ]]; then
+ use_global_nuget_cache=${use_global_nuget_cache:-false}
+else
+ use_global_nuget_cache=${use_global_nuget_cache:-true}
+fi
+
+# Used when restoring .NET SDK from alternative feeds
+runtime_source_feed=${runtime_source_feed:-''}
+runtime_source_feed_key=${runtime_source_feed_key:-''}
+
+# Determines if dotnet-install.sh comes from the eng/common folder or the internet
+# (default = public version)
+use_default_dotnet_install=${use_default_dotnet_install:-false}
+
+# Resolve any symlinks in the given path.
+function ResolvePath {
+ local path=$1
+
+ while [[ -h $path ]]; do
+ local dir="$( cd -P "$( dirname "$path" )" && pwd )"
+ path="$(readlink "$path")"
+
+ # if $path was a relative symlink, we need to resolve it relative to the path where the
+ # symlink file was located
+ [[ $path != /* ]] && path="$dir/$path"
+ done
+
+ # return value
+ _ResolvePath="$path"
+}
+
+# ReadVersionFromJson [json key]
+function ReadGlobalVersion {
+ local key=$1
+
+ local line=$(awk "/$key/ {print; exit}" "$global_json_file")
+ local pattern="\"$key\" *: *\"(.*)\""
+
+ if [[ ! $line =~ $pattern ]]; then
+ Write-PipelineTelemetryError -category 'Build' "Error: Cannot find \"$key\" in $global_json_file"
+ ExitWithExitCode 1
+ fi
+
+ # return value
+ _ReadGlobalVersion=${BASH_REMATCH[1]}
+}
+
+function InitializeDotNetCli {
+ if [[ -n "${_InitializeDotNetCli:-}" ]]; then
+ return
+ fi
+
+ local install=$1
+
+ # Don't resolve runtime, shared framework, or SDK from other locations to ensure build determinism
+ export DOTNET_MULTILEVEL_LOOKUP=0
+
+ # Disable first run since we want to control all package sources
+ export DOTNET_SKIP_FIRST_TIME_EXPERIENCE=1
+
+ # Disable telemetry on CI
+ if [[ $ci == true ]]; then
+ export DOTNET_CLI_TELEMETRY_OPTOUT=1
+ fi
+
+ # LTTNG is the logging infrastructure used by Core CLR. Need this variable set
+ # so it doesn't output warnings to the console.
+ export LTTNG_HOME="$HOME"
+
+ # Source Build uses DotNetCoreSdkDir variable
+ if [[ -n "${DotNetCoreSdkDir:-}" ]]; then
+ export DOTNET_INSTALL_DIR="$DotNetCoreSdkDir"
+ fi
+
+ # Find the first path on $PATH that contains the dotnet.exe
+ if [[ "$use_installed_dotnet_cli" == true && $global_json_has_runtimes == false && -z "${DOTNET_INSTALL_DIR:-}" ]]; then
+ local dotnet_path=`command -v dotnet`
+ if [[ -n "$dotnet_path" ]]; then
+ ResolvePath "$dotnet_path"
+ export DOTNET_INSTALL_DIR=`dirname "$_ResolvePath"`
+ fi
+ fi
+
+ ReadGlobalVersion "dotnet"
+ local dotnet_sdk_version=$_ReadGlobalVersion
+ local dotnet_root=""
+
+ # Use dotnet installation specified in DOTNET_INSTALL_DIR if it contains the required SDK version,
+ # otherwise install the dotnet CLI and SDK to repo local .dotnet directory to avoid potential permission issues.
+ if [[ $global_json_has_runtimes == false && -n "${DOTNET_INSTALL_DIR:-}" && -d "$DOTNET_INSTALL_DIR/sdk/$dotnet_sdk_version" ]]; then
+ dotnet_root="$DOTNET_INSTALL_DIR"
+ else
+ dotnet_root="$repo_root/.dotnet"
+
+ export DOTNET_INSTALL_DIR="$dotnet_root"
+
+ if [[ ! -d "$DOTNET_INSTALL_DIR/sdk/$dotnet_sdk_version" ]]; then
+ if [[ "$install" == true ]]; then
+ InstallDotNetSdk "$dotnet_root" "$dotnet_sdk_version"
+ else
+ Write-PipelineTelemetryError -category 'InitializeToolset' "Unable to find dotnet with SDK version '$dotnet_sdk_version'"
+ ExitWithExitCode 1
+ fi
+ fi
+ fi
+
+ # Add dotnet to PATH. This prevents any bare invocation of dotnet in custom
+ # build steps from using anything other than what we've downloaded.
+ Write-PipelinePrependPath -path "$dotnet_root"
+
+ Write-PipelineSetVariable -name "DOTNET_MULTILEVEL_LOOKUP" -value "0"
+ Write-PipelineSetVariable -name "DOTNET_SKIP_FIRST_TIME_EXPERIENCE" -value "1"
+
+ # return value
+ _InitializeDotNetCli="$dotnet_root"
+}
+
+function InstallDotNetSdk {
+ local root=$1
+ local version=$2
+ local architecture="unset"
+ if [[ $# -ge 3 ]]; then
+ architecture=$3
+ fi
+ InstallDotNet "$root" "$version" $architecture 'sdk' 'false' $runtime_source_feed $runtime_source_feed_key
+}
+
+function InstallDotNet {
+ local root=$1
+ local version=$2
+
+ GetDotNetInstallScript "$root"
+ local install_script=$_GetDotNetInstallScript
+
+ local archArg=''
+ if [[ -n "${3:-}" ]] && [ "$3" != 'unset' ]; then
+ archArg="--architecture $3"
+ fi
+ local runtimeArg=''
+ if [[ -n "${4:-}" ]] && [ "$4" != 'sdk' ]; then
+ runtimeArg="--runtime $4"
+ fi
+ local skipNonVersionedFilesArg=""
+ if [[ "$#" -ge "5" ]] && [[ "$5" != 'false' ]]; then
+ skipNonVersionedFilesArg="--skip-non-versioned-files"
+ fi
+ bash "$install_script" --version $version --install-dir "$root" $archArg $runtimeArg $skipNonVersionedFilesArg || {
+ local exit_code=$?
+ echo "Failed to install dotnet SDK from public location (exit code '$exit_code')."
+
+ local runtimeSourceFeed=''
+ if [[ -n "${6:-}" ]]; then
+ runtimeSourceFeed="--azure-feed $6"
+ fi
+
+ local runtimeSourceFeedKey=''
+ if [[ -n "${7:-}" ]]; then
+ # The 'base64' binary on alpine uses '-d' and doesn't support '--decode'
+ # '-d'. To work around this, do a simple detection and switch the parameter
+ # accordingly.
+ decodeArg="--decode"
+ if base64 --help 2>&1 | grep -q "BusyBox"; then
+ decodeArg="-d"
+ fi
+ decodedFeedKey=`echo $7 | base64 $decodeArg`
+ runtimeSourceFeedKey="--feed-credential $decodedFeedKey"
+ fi
+
+ if [[ -n "$runtimeSourceFeed" || -n "$runtimeSourceFeedKey" ]]; then
+ bash "$install_script" --version $version --install-dir "$root" $archArg $runtimeArg $skipNonVersionedFilesArg $runtimeSourceFeed $runtimeSourceFeedKey || {
+ local exit_code=$?
+ Write-PipelineTelemetryError -category 'InitializeToolset' "Failed to install dotnet SDK from custom location '$runtimeSourceFeed' (exit code '$exit_code')."
+ ExitWithExitCode $exit_code
+ }
+ else
+ if [[ $exit_code != 0 ]]; then
+ Write-PipelineTelemetryError -category 'InitializeToolset' "Failed to install dotnet SDK from public location (exit code '$exit_code')."
+ fi
+ ExitWithExitCode $exit_code
+ fi
+ }
+}
+
+function with_retries {
+ local maxRetries=5
+ local retries=1
+ echo "Trying to run '$@' for maximum of $maxRetries attempts."
+ while [[ $((retries++)) -le $maxRetries ]]; do
+ "$@"
+
+ if [[ $? == 0 ]]; then
+ echo "Ran '$@' successfully."
+ return 0
+ fi
+
+ timeout=$((2**$retries-1))
+ echo "Failed to execute '$@'. Waiting $timeout seconds before next attempt ($retries out of $maxRetries)." 1>&2
+ sleep $timeout
+ done
+
+ echo "Failed to execute '$@' for $maxRetries times." 1>&2
+
+ return 1
+}
+
+function GetDotNetInstallScript {
+ local root=$1
+ local install_script="$root/dotnet-install.sh"
+ local install_script_url="https://dot.net/$dotnetInstallScriptVersion/dotnet-install.sh"
+
+ if [[ ! -a "$install_script" ]]; then
+ mkdir -p "$root"
+
+ if [[ "$use_default_dotnet_install" == true ]]; then
+ echo "Downloading '$install_script_url'"
+
+ # Use curl if available, otherwise use wget
+ if command -v curl > /dev/null; then
+ with_retries curl "$install_script_url" -sSL --retry 10 --create-dirs -o "$install_script" || {
+ local exit_code=$?
+ Write-PipelineTelemetryError -category 'InitializeToolset' "Failed to acquire dotnet install script (exit code '$exit_code')."
+ ExitWithExitCode $exit_code
+ }
+ else
+ with_retries wget -v -O "$install_script" "$install_script_url" || {
+ local exit_code=$?
+ Write-PipelineTelemetryError -category 'InitializeToolset' "Failed to acquire dotnet install script (exit code '$exit_code')."
+ ExitWithExitCode $exit_code
+ }
+ fi
+ else
+ # Use a special version of the script from eng/common that understands the existence of a "productVersion.txt" in a dotnet path.
+ # See https://github.com/dotnet/arcade/issues/6047 for details
+ cp $repo_root/eng/common/dotnet-install-scripts/dotnet-install.sh $install_script
+ fi
+ fi
+
+ # return value
+ _GetDotNetInstallScript="$install_script"
+}
+
+function InitializeBuildTool {
+ if [[ -n "${_InitializeBuildTool:-}" ]]; then
+ return
+ fi
+
+ InitializeDotNetCli $restore
+
+ # return values
+ _InitializeBuildTool="$_InitializeDotNetCli/dotnet"
+ _InitializeBuildToolCommand="msbuild"
+ _InitializeBuildToolFramework="netcoreapp2.1"
+}
+
+function GetNuGetPackageCachePath {
+ if [[ -z ${NUGET_PACKAGES:-} ]]; then
+ if [[ "$use_global_nuget_cache" == true ]]; then
+ export NUGET_PACKAGES="$HOME/.nuget/packages"
+ else
+ export NUGET_PACKAGES="$repo_root/.packages"
+ fi
+ fi
+
+ # return value
+ _GetNuGetPackageCachePath=$NUGET_PACKAGES
+}
+
+function InitializeNativeTools() {
+ if [[ -n "${DisableNativeToolsetInstalls:-}" ]]; then
+ return
+ fi
+ if grep -Fq "native-tools" $global_json_file
+ then
+ local nativeArgs=""
+ if [[ "$ci" == true ]]; then
+ nativeArgs="--installDirectory $tools_dir"
+ fi
+ "$_script_dir/init-tools-native.sh" $nativeArgs
+ fi
+}
+
+function InitializeToolset {
+ if [[ -n "${_InitializeToolset:-}" ]]; then
+ return
+ fi
+
+ GetNuGetPackageCachePath
+
+ ReadGlobalVersion "Microsoft.DotNet.Arcade.Sdk"
+
+ local toolset_version=$_ReadGlobalVersion
+ local toolset_location_file="$toolset_dir/$toolset_version.txt"
+
+ if [[ -a "$toolset_location_file" ]]; then
+ local path=`cat "$toolset_location_file"`
+ if [[ -a "$path" ]]; then
+ # return value
+ _InitializeToolset="$path"
+ return
+ fi
+ fi
+
+ if [[ "$restore" != true ]]; then
+ Write-PipelineTelemetryError -category 'InitializeToolset' "Toolset version $toolset_version has not been restored."
+ ExitWithExitCode 2
+ fi
+
+ local proj="$toolset_dir/restore.proj"
+
+ local bl=""
+ if [[ "$binary_log" == true ]]; then
+ bl="/bl:$log_dir/ToolsetRestore.binlog"
+ fi
+
+ echo '' > "$proj"
+ MSBuild-Core "$proj" $bl /t:__WriteToolsetLocation /clp:ErrorsOnly\;NoSummary /p:__ToolsetLocationOutputFile="$toolset_location_file"
+
+ local toolset_build_proj=`cat "$toolset_location_file"`
+
+ if [[ ! -a "$toolset_build_proj" ]]; then
+ Write-PipelineTelemetryError -category 'Build' "Invalid toolset path: $toolset_build_proj"
+ ExitWithExitCode 3
+ fi
+
+ # return value
+ _InitializeToolset="$toolset_build_proj"
+}
+
+function ExitWithExitCode {
+ if [[ "$ci" == true && "$prepare_machine" == true ]]; then
+ StopProcesses
+ fi
+ exit $1
+}
+
+function StopProcesses {
+ echo "Killing running build processes..."
+ pkill -9 "dotnet" || true
+ pkill -9 "vbcscompiler" || true
+ return 0
+}
+
+function MSBuild {
+ local args=$@
+ if [[ "$pipelines_log" == true ]]; then
+ InitializeBuildTool
+ InitializeToolset
+
+ if [[ "$ci" == true ]]; then
+ export NUGET_PLUGIN_HANDSHAKE_TIMEOUT_IN_SECONDS=20
+ export NUGET_PLUGIN_REQUEST_TIMEOUT_IN_SECONDS=20
+ Write-PipelineSetVariable -name "NUGET_PLUGIN_HANDSHAKE_TIMEOUT_IN_SECONDS" -value "20"
+ Write-PipelineSetVariable -name "NUGET_PLUGIN_REQUEST_TIMEOUT_IN_SECONDS" -value "20"
+ fi
+
+ local toolset_dir="${_InitializeToolset%/*}"
+ local logger_path="$toolset_dir/$_InitializeBuildToolFramework/Microsoft.DotNet.Arcade.Sdk.dll"
+ args=( "${args[@]}" "-logger:$logger_path" )
+ fi
+
+ MSBuild-Core ${args[@]}
+}
+
+function MSBuild-Core {
+ if [[ "$ci" == true ]]; then
+ if [[ "$binary_log" != true && "$exclude_ci_binary_log" != true ]]; then
+ Write-PipelineTelemetryError -category 'Build' "Binary log must be enabled in CI build, or explicitly opted-out from with the -noBinaryLog switch."
+ ExitWithExitCode 1
+ fi
+
+ if [[ "$node_reuse" == true ]]; then
+ Write-PipelineTelemetryError -category 'Build' "Node reuse must be disabled in CI build."
+ ExitWithExitCode 1
+ fi
+ fi
+
+ InitializeBuildTool
+
+ local warnaserror_switch=""
+ if [[ $warn_as_error == true ]]; then
+ warnaserror_switch="/warnaserror"
+ fi
+
+ function RunBuildTool {
+ export ARCADE_BUILD_TOOL_COMMAND="$_InitializeBuildTool $@"
+
+ "$_InitializeBuildTool" "$@" || {
+ local exit_code=$?
+ Write-PipelineTaskError "Build failed (exit code '$exit_code')."
+ ExitWithExitCode $exit_code
+ }
+ }
+
+ RunBuildTool "$_InitializeBuildToolCommand" /m /nologo /clp:Summary /v:$verbosity /nr:$node_reuse $warnaserror_switch /p:TreatWarningsAsErrors=$warn_as_error /p:ContinuousIntegrationBuild=$ci "$@"
+}
+
+ResolvePath "${BASH_SOURCE[0]}"
+_script_dir=`dirname "$_ResolvePath"`
+
+. "$_script_dir/pipeline-logging-functions.sh"
+
+eng_root=`cd -P "$_script_dir/.." && pwd`
+repo_root=`cd -P "$_script_dir/../.." && pwd`
+artifacts_dir="$repo_root/artifacts"
+toolset_dir="$artifacts_dir/toolset"
+tools_dir="$repo_root/.tools"
+log_dir="$artifacts_dir/log/$configuration"
+temp_dir="$artifacts_dir/tmp/$configuration"
+
+global_json_file="$repo_root/global.json"
+# determine if global.json contains a "runtimes" entry
+global_json_has_runtimes=false
+dotnetlocal_key=$(awk "/runtimes/ {print; exit}" "$global_json_file") || true
+if [[ -n "$dotnetlocal_key" ]]; then
+ global_json_has_runtimes=true
+fi
+
+# HOME may not be defined in some scenarios, but it is required by NuGet
+if [[ -z $HOME ]]; then
+ export HOME="$repo_root/artifacts/.home/"
+ mkdir -p "$HOME"
+fi
+
+mkdir -p "$toolset_dir"
+mkdir -p "$temp_dir"
+mkdir -p "$log_dir"
+
+Write-PipelineSetVariable -name "Artifacts" -value "$artifacts_dir"
+Write-PipelineSetVariable -name "Artifacts.Toolset" -value "$toolset_dir"
+Write-PipelineSetVariable -name "Artifacts.Log" -value "$log_dir"
+Write-PipelineSetVariable -name "Temp" -value "$temp_dir"
+Write-PipelineSetVariable -name "TMP" -value "$temp_dir"
+
+# Import custom tools configuration, if present in the repo.
+if [ -z "${disable_configure_toolset_import:-}" ]; then
+ configure_toolset_script="$eng_root/configure-toolset.sh"
+ if [[ -a "$configure_toolset_script" ]]; then
+ . "$configure_toolset_script"
+ fi
+fi
+
+# TODO: https://github.com/dotnet/arcade/issues/1468
+# Temporary workaround to avoid breaking change.
+# Remove once repos are updated.
+if [[ -n "${useInstalledDotNetCli:-}" ]]; then
+ use_installed_dotnet_cli="$useInstalledDotNetCli"
+fi
diff --git a/pkg/common/CommonPackage.props b/eng/pkg/CommonPackage.props
similarity index 100%
rename from pkg/common/CommonPackage.props
rename to eng/pkg/CommonPackage.props
diff --git a/pkg/common/DnnImageFeaturizer.props b/eng/pkg/DnnImageFeaturizer.props
similarity index 100%
rename from pkg/common/DnnImageFeaturizer.props
rename to eng/pkg/DnnImageFeaturizer.props
diff --git a/eng/pkg/Pack.props b/eng/pkg/Pack.props
new file mode 100644
index 0000000000..496cb2d50e
--- /dev/null
+++ b/eng/pkg/Pack.props
@@ -0,0 +1,78 @@
+
+
+
+ $(ArtifactsDir)pkgassets/
+ true
+ snupkg
+ true
+ $(MSBuildProjectName.Replace('.symbols', ''))
+ true
+ true
+
+
+
+
+
+ Microsoft
+ LICENSE
+ https://dot.net/ml
+ mlnetlogo.png
+ https://aka.ms/mlnetreleasenotes
+
+ ML.NET ML Machine Learning
+
+ .dll
+ .so
+ .dylib
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ PreserveNewest
+ false
+ %(Filename)%(Extension)
+
+
+ PreserveNewest
+ false
+ %(Filename)%(Extension)
+
+
+
+
+
+
+ None
+
+
+
+
\ No newline at end of file
diff --git a/pkg/_._ b/eng/pkg/_._
similarity index 100%
rename from pkg/_._
rename to eng/pkg/_._
diff --git a/pkg/mlnetlogo.png b/eng/pkg/mlnetlogo.png
similarity index 100%
rename from pkg/mlnetlogo.png
rename to eng/pkg/mlnetlogo.png
diff --git a/eng/snk/Test.snk b/eng/snk/Test.snk
new file mode 100644
index 0000000000..8082d18e9d
Binary files /dev/null and b/eng/snk/Test.snk differ
diff --git a/global.json b/global.json
new file mode 100644
index 0000000000..ee6291512f
--- /dev/null
+++ b/global.json
@@ -0,0 +1,16 @@
+{
+ "tools": {
+ "dotnet": "3.1.102",
+ "runtimes": {
+ "dotnet/x64": ["$(MicrosoftNETCorePlatformsVersion)", "$(MicrosoftNETCore3PlatformsVersion)"],
+ "dotnet/x86": ["$(MicrosoftNETCorePlatformsVersion)", "$(MicrosoftNETCore3PlatformsVersion)"]
+ }
+ },
+ "msbuild-sdks": {
+ "Microsoft.DotNet.Arcade.Sdk": "5.0.0-beta.20461.7",
+ "Microsoft.DotNet.Helix.Sdk": "5.0.0-beta.20461.7",
+ "Microsoft.Build.Traversal": "2.1.1",
+ "Microsoft.SourceLink.GitHub": "1.1.0-beta-20206-02",
+ "Microsoft.SourceLink.Common": "1.1.0-beta-20206-02"
+ }
+ }
diff --git a/init-tools.cmd b/init-tools.cmd
deleted file mode 100644
index 4c7893ec49..0000000000
--- a/init-tools.cmd
+++ /dev/null
@@ -1,128 +0,0 @@
-@if not defined _echo @echo off
-setlocal
-
-set INIT_TOOLS_LOG=%~dp0init-tools.log
-if [%PACKAGES_DIR%]==[] set PACKAGES_DIR=%~dp0packages
-if [%TOOLRUNTIME_DIR%]==[] set TOOLRUNTIME_DIR=%~dp0Tools
-set DOTNET_PATH=%TOOLRUNTIME_DIR%\dotnetcli\
-if [%DOTNET_CMD%]==[] set DOTNET_CMD=%DOTNET_PATH%dotnet.exe
-if [%BUILDTOOLS_SOURCE%]==[] set BUILDTOOLS_SOURCE=https://dotnet.myget.org/F/dotnet-buildtools/api/v3/index.json
-set /P BUILDTOOLS_VERSION=< "%~dp0BuildToolsVersion.txt"
-set BUILD_TOOLS_PATH=%PACKAGES_DIR%\Microsoft.DotNet.BuildTools\%BUILDTOOLS_VERSION%\lib
-set INIT_TOOLS_RESTORE_PROJECT=%~dp0init-tools.msbuild
-set BUILD_TOOLS_SEMAPHORE_DIR=%TOOLRUNTIME_DIR%\%BUILDTOOLS_VERSION%
-set BUILD_TOOLS_SEMAPHORE=%BUILD_TOOLS_SEMAPHORE_DIR%\init-tools.completed
-set ARCH=x64
-
-:: if force option is specified then clean the tool runtime and build tools package directory to force it to get recreated
-if [%1]==[force] (
- if exist "%TOOLRUNTIME_DIR%" rmdir /S /Q "%TOOLRUNTIME_DIR%"
- if exist "%PACKAGES_DIR%\Microsoft.DotNet.BuildTools" rmdir /S /Q "%PACKAGES_DIR%\Microsoft.DotNet.BuildTools"
-)
-
-:: If semaphore exists do nothing
-if exist "%BUILD_TOOLS_SEMAPHORE%" (
- echo Tools are already initialized.
- goto :EOF
-)
-
-if exist "%TOOLRUNTIME_DIR%" rmdir /S /Q "%TOOLRUNTIME_DIR%"
-
-if exist "%DotNetBuildToolsDir%" (
- echo Using tools from '%DotNetBuildToolsDir%'.
- mklink /j "%TOOLRUNTIME_DIR%" "%DotNetBuildToolsDir%"
-
- if not exist "%DOTNET_CMD%" (
- echo ERROR: Ensure that '%DotNetBuildToolsDir%' contains the .NET Core SDK at '%DOTNET_PATH%'
- exit /b 1
- )
-
- echo Done initializing tools.
- if NOT exist "%BUILD_TOOLS_SEMAPHORE_DIR%" mkdir "%BUILD_TOOLS_SEMAPHORE_DIR%"
- echo Using tools from '%DotNetBuildToolsDir%'. > "%BUILD_TOOLS_SEMAPHORE%"
- exit /b 0
-)
-
-echo Running %0 > "%INIT_TOOLS_LOG%"
-
-set /p DOTNET_VERSION=< "%~dp0DotnetCLIVersion.txt"
-set /p DOTNET_EXTRA_RUNTIME_VERSION=< "%~dp0DotnetExtraRuntimeVersion.txt"
-
-:Arg_Loop
-if [%1] == [] goto :ArchSet
-if /i [%1] == [x86] ( set ARCH=x86)
-shift
-goto :Arg_Loop
-
-:ArchSet
-if exist "%DOTNET_CMD%" goto :afterdotnetrestore
-
-if NOT exist "%DOTNET_PATH%" mkdir "%DOTNET_PATH%"
-
-:: set registry to take dump automatically when test process crashes
-if NOT [%AGENT_ID%] == [] (
- reg add "HKEY_LOCAL_MACHINE\SOFTWARE\Microsoft\Windows\Windows Error Reporting\LocalDumps" /f
- reg add "HKEY_LOCAL_MACHINE\SOFTWARE\Microsoft\Windows\Windows Error Reporting\LocalDumps" /f /v DumpType /t REG_DWORD /d 2
- reg add "HKEY_LOCAL_MACHINE\SOFTWARE\Microsoft\Windows\Windows Error Reporting\LocalDumps" /f /v DumpCount /t REG_DWORD /d 2
- reg add "HKEY_LOCAL_MACHINE\SOFTWARE\Microsoft\Windows\Windows Error Reporting\LocalDumps" /f /v DumpFolder /t REG_SZ /d "%~dp0CrashDumps"
-)
-
-:: install procdump.exe to take process dump when test crashes, hangs or fails
-echo Installing procdump.exe
-powershell -Command "Invoke-WebRequest https://download.sysinternals.com/files/Procdump.zip -UseBasicParsing -outfile procdump.zip | Out-Null"
-powershell -Command "Expand-Archive -Force procdump.zip Tools\ProcDump"
-del /f procdump.zip
-echo Finish install procdump.exe
-
-:: install the extra runtime first, so the SDK install will overwrite the root dotnet executable
-echo Installing dotnet runtime %DOTNET_EXTRA_RUNTIME_VERSION%...
-set DOTNET_EXTRA_RUNTIME_ZIP_NAME=dotnet-runtime-%DOTNET_EXTRA_RUNTIME_VERSION%-win-%ARCH%.zip
-set DOTNET_EXTRA_RUNTIME_REMOTE_PATH=https://dotnetcli.azureedge.net/dotnet/Runtime/%DOTNET_EXTRA_RUNTIME_VERSION%/%DOTNET_EXTRA_RUNTIME_ZIP_NAME%
-set DOTNET_EXTRA_RUNTIME_LOCAL_PATH=%DOTNET_PATH%%DOTNET_EXTRA_RUNTIME_ZIP_NAME%
-echo Installing '%DOTNET_EXTRA_RUNTIME_REMOTE_PATH%' to '%DOTNET_EXTRA_RUNTIME_LOCAL_PATH%' >> "%INIT_TOOLS_LOG%"
-powershell -NoProfile -ExecutionPolicy unrestricted -Command "[Net.ServicePointManager]::SecurityProtocol = [Net.SecurityProtocolType]::Tls12;$retryCount = 0; $success = $false; $proxyCredentialsRequired = $false; do { try { $wc = New-Object Net.WebClient; if ($proxyCredentialsRequired) { [Net.WebRequest]::DefaultWebProxy.Credentials = [Net.CredentialCache]::DefaultNetworkCredentials; } $wc.DownloadFile('%DOTNET_EXTRA_RUNTIME_REMOTE_PATH%', '%DOTNET_EXTRA_RUNTIME_LOCAL_PATH%'); $success = $true; } catch { if ($retryCount -ge 6) { throw; } else { $we = $_.Exception.InnerException -as [Net.WebException]; $proxyCredentialsRequired = ($we -ne $null -and ([Net.HttpWebResponse]$we.Response).StatusCode -eq [Net.HttpStatusCode]::ProxyAuthenticationRequired); Start-Sleep -Seconds (5 * $retryCount); $retryCount++; } } } while ($success -eq $false); Expand-Archive '%DOTNET_EXTRA_RUNTIME_LOCAL_PATH%' '%DOTNET_PATH%';" >> "%INIT_TOOLS_LOG%"
-
-echo Installing dotnet cli %DOTNET_VERSION%...
-set DOTNET_ZIP_NAME=dotnet-sdk-%DOTNET_VERSION%-win-%ARCH%.zip
-set DOTNET_REMOTE_PATH=https://dotnetcli.azureedge.net/dotnet/Sdk/%DOTNET_VERSION%/%DOTNET_ZIP_NAME%
-set DOTNET_LOCAL_PATH=%DOTNET_PATH%%DOTNET_ZIP_NAME%
-echo Installing '%DOTNET_REMOTE_PATH%' to '%DOTNET_LOCAL_PATH%' >> "%INIT_TOOLS_LOG%"
-powershell -NoProfile -ExecutionPolicy unrestricted -Command "[Net.ServicePointManager]::SecurityProtocol = [Net.SecurityProtocolType]::Tls12;$retryCount = 0; $success = $false; $proxyCredentialsRequired = $false; do { try { $wc = New-Object Net.WebClient; if ($proxyCredentialsRequired) { [Net.WebRequest]::DefaultWebProxy.Credentials = [Net.CredentialCache]::DefaultNetworkCredentials; } $wc.DownloadFile('%DOTNET_REMOTE_PATH%', '%DOTNET_LOCAL_PATH%'); $success = $true; } catch { if ($retryCount -ge 6) { throw; } else { $we = $_.Exception.InnerException -as [Net.WebException]; $proxyCredentialsRequired = ($we -ne $null -and ([Net.HttpWebResponse]$we.Response).StatusCode -eq [Net.HttpStatusCode]::ProxyAuthenticationRequired); Start-Sleep -Seconds (5 * $retryCount); $retryCount++; } } } while ($success -eq $false); Expand-Archive '%DOTNET_LOCAL_PATH%' '%DOTNET_PATH%' -Force; " >> "%INIT_TOOLS_LOG%"
-
-if NOT exist "%DOTNET_LOCAL_PATH%" (
- echo ERROR: Could not install dotnet cli correctly. 1>&2
- goto :error
-)
-
-:afterdotnetrestore
-
-if exist "%BUILD_TOOLS_PATH%" goto :afterbuildtoolsrestore
-echo Restoring BuildTools version %BUILDTOOLS_VERSION%...
-echo Running: "%DOTNET_CMD%" restore "%INIT_TOOLS_RESTORE_PROJECT%" --no-cache --packages "%PACKAGES_DIR%" --source "%BUILDTOOLS_SOURCE%" /p:BuildToolsPackageVersion=%BUILDTOOLS_VERSION% /p:ToolsDir=%TOOLRUNTIME_DIR% >> "%INIT_TOOLS_LOG%"
-call "%DOTNET_CMD%" restore "%INIT_TOOLS_RESTORE_PROJECT%" --no-cache --packages "%PACKAGES_DIR%" --source "%BUILDTOOLS_SOURCE%" /p:BuildToolsPackageVersion=%BUILDTOOLS_VERSION% /p:ToolsDir="%TOOLRUNTIME_DIR%" >> "%INIT_TOOLS_LOG%"
-if NOT exist "%BUILD_TOOLS_PATH%\init-tools.cmd" (
- echo ERROR: Could not restore build tools correctly. 1>&2
- goto :error
-)
-
-:afterbuildtoolsrestore
-
-echo Initializing BuildTools...
-echo Running: "%BUILD_TOOLS_PATH%\init-tools.cmd" "%~dp0" "%DOTNET_CMD%" "%TOOLRUNTIME_DIR%" "%PACKAGES_DIR%" >> "%INIT_TOOLS_LOG%"
-call "%BUILD_TOOLS_PATH%\init-tools.cmd" "%~dp0" "%DOTNET_CMD%" "%TOOLRUNTIME_DIR%" "%PACKAGES_DIR%" >> "%INIT_TOOLS_LOG%"
-set INIT_TOOLS_ERRORLEVEL=%ERRORLEVEL%
-if not [%INIT_TOOLS_ERRORLEVEL%]==[0] (
- echo ERROR: An error occured when trying to initialize the tools. 1>&2
- goto :error
-)
-
-:: Create semaphore file
-echo Done initializing tools.
-if NOT exist "%BUILD_TOOLS_SEMAPHORE_DIR%" mkdir "%BUILD_TOOLS_SEMAPHORE_DIR%"
-echo Init-Tools.cmd completed for BuildTools Version: %BUILDTOOLS_VERSION% > "%BUILD_TOOLS_SEMAPHORE%"
-exit /b 0
-
-:error
-echo Please check the detailed log that follows. 1>&2
-type "%INIT_TOOLS_LOG%" 1>&2
-exit /b 1
diff --git a/init-tools.msbuild b/init-tools.msbuild
deleted file mode 100644
index 7bb7fa0043..0000000000
--- a/init-tools.msbuild
+++ /dev/null
@@ -1,13 +0,0 @@
-
-
- netcoreapp1.0
- false
- true
- $(MSBuildThisFileDirectory)Tools/$(BuildToolsPackageVersion)
- Microsoft.SymbolUploader.Build.Task
-
-
-
-
-
-
\ No newline at end of file
diff --git a/init-tools.sh b/init-tools.sh
deleted file mode 100755
index 492df5711f..0000000000
--- a/init-tools.sh
+++ /dev/null
@@ -1,207 +0,0 @@
-#!/usr/bin/env bash
-
-__scriptpath=$(cd "$(dirname "$0")"; pwd -P)
-__init_tools_log="$__scriptpath/init-tools.log"
-__PACKAGES_DIR="$__scriptpath/packages"
-__TOOLRUNTIME_DIR="$__scriptpath/Tools"
-__DOTNET_PATH="$__TOOLRUNTIME_DIR/dotnetcli"
-__DOTNET_CMD="$__DOTNET_PATH/dotnet"
-if [ -z "${__BUILDTOOLS_SOURCE:-}" ]; then __BUILDTOOLS_SOURCE=https://dotnet.myget.org/F/dotnet-buildtools/api/v3/index.json; fi
-export __BUILDTOOLS_USE_CSPROJ=true
-__BUILD_TOOLS_PACKAGE_VERSION=$(cat "$__scriptpath/BuildToolsVersion.txt" | sed 's/\r$//') # remove CR if mounted repo on Windows drive
-
-DotNetCliFileName="DotnetCLIVersion.txt"
-DotNetExtraRuntimeFileName="DotnetExtraRuntimeVersion.txt"
-
-__DOTNET_TOOLS_VERSION=$(cat "$__scriptpath/$DotNetCliFileName" | sed 's/\r$//') # remove CR if mounted repo on Windows drive
-__DOTNET_EXTRA_RUNTIME_VERSION=$(cat "$__scriptpath/$DotNetExtraRuntimeFileName" | sed 's/\r$//') # remove CR if mounted repo on Windows drive
-__BUILD_TOOLS_PATH="$__PACKAGES_DIR/microsoft.dotnet.buildtools/$__BUILD_TOOLS_PACKAGE_VERSION/lib"
-__INIT_TOOLS_RESTORE_PROJECT="$__scriptpath/init-tools.msbuild"
-__BUILD_TOOLS_SEMAPHORE="$__TOOLRUNTIME_DIR/$__BUILD_TOOLS_PACKAGE_VERSION/init-tools.complete"
-
-if [ -e "$__BUILD_TOOLS_SEMAPHORE" ]; then
- echo "Tools are already initialized"
- return #return instead of exit because this script is inlined in other scripts which we don't want to exit
-fi
-
-if [ -e "$__TOOLRUNTIME_DIR" ]; then rm -rf -- "$__TOOLRUNTIME_DIR"; fi
-
-if [ -d "${DotNetBuildToolsDir:-}" ]; then
- echo "Using tools from '$DotNetBuildToolsDir'."
- ln -s "$DotNetBuildToolsDir" "$__TOOLRUNTIME_DIR"
-
- if [ ! -e "$__DOTNET_CMD" ]; then
- echo "ERROR: Ensure that $DotNetBuildToolsDir contains the .NET Core SDK at $__DOTNET_PATH"
- exit 1
- fi
-
- echo "Done initializing tools."
- mkdir -p "$(dirname "$__BUILD_TOOLS_SEMAPHORE")" && touch "$__BUILD_TOOLS_SEMAPHORE"
- return #return instead of exit because this script is inlined in other scripts which we don't want to exit
-fi
-
-echo "Running: $__scriptpath/init-tools.sh" > "$__init_tools_log"
-
-display_error_message()
-{
- echo "Please check the detailed log that follows." 1>&2
- cat "$__init_tools_log" 1>&2
-}
-
-# Executes a command and retries if it fails.
-execute_with_retry() {
- local count=0
- local retries=${retries:-5}
- local waitFactor=${waitFactor:-6}
- until "$@"; do
- local exit=$?
- count=$(( $count + 1 ))
- if [ $count -lt $retries ]; then
- local wait=$(( waitFactor ** (( count - 1 )) ))
- echo "Retry $count/$retries exited $exit, retrying in $wait seconds..."
- sleep $wait
- else
- say_err "Retry $count/$retries exited $exit, no more retries left."
- return $exit
- fi
- done
-
- return 0
-}
-
-if [ ! -e "$__DOTNET_PATH" ]; then
- if [ -z "${__DOTNET_PKG:-}" ]; then
- if [ "$(uname -m | grep "i[3456]86")" = "i686" ]; then
- echo "Warning: build not supported on 32 bit Unix"
- fi
-
- __PKG_ARCH=x64
-
- OSName=$(uname -s)
- case $OSName in
- Darwin)
- OS=OSX
- __PKG_RID=osx
- ulimit -n 2048
- # Format x.y.z as single integer with three digits for each part
- VERSION=`sw_vers -productVersion| sed -e 's/\./ /g' | xargs printf "%03d%03d%03d"`
- if [ "$VERSION" -lt 010012000 ]; then
- echo error: macOS version `sw_vers -productVersion` is too old. 10.12 is needed as minimum.
- exit 1
- fi
- ;;
-
- Linux)
- __PKG_RID=linux
- OS=Linux
-
- if [ -e /etc/os-release ]; then
- source /etc/os-release
- if [[ $ID == "alpine" ]]; then
- __PKG_RID=linux-musl
- fi
- elif [ -e /etc/redhat-release ]; then
- redhatRelease=$( /dev/null; then
- curl --retry 10 -sSL --create-dirs -o $__DOTNET_PATH/dotnet.extra.runtime.tar ${__DOTNET_EXTRA_RUNTIME_LOCATION}
- else
- wget -q -O $__DOTNET_PATH/dotnet.extra.runtime.tar ${__DOTNET_EXTRA_RUNTIME_LOCATION}
- fi
- else
- echo "Copying '$DotNetExtraRuntimeTarPath' to '$__DOTNET_PATH/dotnet.extra.runtime.tar'"
- cp $DotNetExtraRuntimeTarPath $__DOTNET_PATH/dotnet.extra.runtime.tar
- fi
- cd "$__DOTNET_PATH"
- tar -xf "$__DOTNET_PATH/dotnet.extra.runtime.tar"
- }
- execute_with_retry install_dotnet_extra_runtime >> "$__init_tools_log" 2>&1
-
- echo "Installing dotnet cli ${__DOTNET_TOOLS_VERSION}..."
- __DOTNET_LOCATION="https://dotnetcli.azureedge.net/dotnet/Sdk/${__DOTNET_TOOLS_VERSION}/${__DOTNET_PKG}.tar.gz"
-
- install_dotnet_cli() {
- if [[ -z "${DotNetBootstrapCliTarPath-}" ]]; then
- echo "Installing '${__DOTNET_LOCATION}' to '$__DOTNET_PATH/dotnet.tar'"
- rm -rf -- "$__DOTNET_PATH/*"
- # curl has HTTPS CA trust-issues less often than wget, so lets try that first.
- if command -v curl > /dev/null; then
- curl --retry 10 -sSL --create-dirs -o $__DOTNET_PATH/dotnet.tar ${__DOTNET_LOCATION}
- else
- wget -q -O $__DOTNET_PATH/dotnet.tar ${__DOTNET_LOCATION}
- fi
- else
- echo "Copying '$DotNetBootstrapCliTarPath' to '$__DOTNET_PATH/dotnet.tar'"
- cp $DotNetBootstrapCliTarPath $__DOTNET_PATH/dotnet.tar
- fi
- cd "$__DOTNET_PATH"
- tar -xf "$__DOTNET_PATH/dotnet.tar"
- }
- execute_with_retry install_dotnet_cli >> "$__init_tools_log" 2>&1
-
- cd "$__scriptpath"
-fi
-
-if [ ! -e "$__BUILD_TOOLS_PATH" ]; then
- echo "Restoring BuildTools version $__BUILD_TOOLS_PACKAGE_VERSION..."
- echo "Running: $__DOTNET_CMD restore \"$__INIT_TOOLS_RESTORE_PROJECT\" --no-cache --packages $__PACKAGES_DIR --source $__BUILDTOOLS_SOURCE /p:BuildToolsPackageVersion=$__BUILD_TOOLS_PACKAGE_VERSION /p:ToolsDir=$__TOOLRUNTIME_DIR" >> "$__init_tools_log"
- "$__DOTNET_CMD" restore "$__INIT_TOOLS_RESTORE_PROJECT" --no-cache --packages "$__PACKAGES_DIR" --source "$__BUILDTOOLS_SOURCE" /p:BuildToolsPackageVersion=$__BUILD_TOOLS_PACKAGE_VERSION /p:ToolsDir="$__TOOLRUNTIME_DIR" >> "$__init_tools_log"
- if [ ! -e "$__BUILD_TOOLS_PATH/init-tools.sh" ]; then
- echo "ERROR: Could not restore build tools correctly." 1>&2
- display_error_message
- fi
-fi
-
-echo "Initializing BuildTools..."
-echo "Running: $__BUILD_TOOLS_PATH/init-tools.sh $__scriptpath $__DOTNET_CMD $__TOOLRUNTIME_DIR $__PACKAGES_DIR" >> "$__init_tools_log"
-
-# Executables restored with .NET Core 2.0 do not have executable permission flags. https://github.com/NuGet/Home/issues/4424
-chmod +x "$__BUILD_TOOLS_PATH/init-tools.sh"
-"$__BUILD_TOOLS_PATH/init-tools.sh" "$__scriptpath" "$__DOTNET_CMD" "$__TOOLRUNTIME_DIR" "$__PACKAGES_DIR" >> "$__init_tools_log"
-if [ "$?" != "0" ]; then
- echo "ERROR: An error occurred when trying to initialize the tools." 1>&2
- display_error_message
- exit 1
-fi
-
-echo "Making all .sh files executable under Tools."
-# Executables restored with .NET Core 2.0 do not have executable permission flags. https://github.com/NuGet/Home/issues/4424
-ls "$__scriptpath/Tools/"*.sh | xargs chmod +x
-ls "$__scriptpath/Tools/scripts/docker/"*.sh | xargs chmod +x
-
-mkdir -p "$(dirname "$__BUILD_TOOLS_SEMAPHORE")" && touch "$__BUILD_TOOLS_SEMAPHORE"
-
-echo "Done initializing tools."
diff --git a/pkg/Directory.Build.props b/pkg/Directory.Build.props
deleted file mode 100644
index 16ad2d7b8d..0000000000
--- a/pkg/Directory.Build.props
+++ /dev/null
@@ -1,59 +0,0 @@
-
-
-
-
- true
- false
- false
-
- true
- $(MSBuildProjectName.Replace('.symbols', ''))
-
-
- $(IntermediateOutputRootPath)$(MSBuildProjectName).NupkgProj\
- $(IntermediateOutputPath)
-
-
-
-
- Microsoft
- LICENSE
- https://dot.net/ml
- mlnetlogo.png
- https://aka.ms/mlnetreleasenotes
-
- ML.NET ML Machine Learning
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- None
-
-
-
-
\ No newline at end of file
diff --git a/pkg/Microsoft.Extensions.ML/Microsoft.Extensions.ML.nupkgproj b/pkg/Microsoft.Extensions.ML/Microsoft.Extensions.ML.nupkgproj
deleted file mode 100644
index 4dbb257b8f..0000000000
--- a/pkg/Microsoft.Extensions.ML/Microsoft.Extensions.ML.nupkgproj
+++ /dev/null
@@ -1,16 +0,0 @@
-
-
-
- netstandard2.0
- An integration package for ML.NET models on scalable web apps and services.
-
-
-
-
-
-
-
-
-
-
-
diff --git a/pkg/Microsoft.Extensions.ML/Microsoft.Extensions.ML.symbols.nupkgproj b/pkg/Microsoft.Extensions.ML/Microsoft.Extensions.ML.symbols.nupkgproj
deleted file mode 100644
index 0b7af4d817..0000000000
--- a/pkg/Microsoft.Extensions.ML/Microsoft.Extensions.ML.symbols.nupkgproj
+++ /dev/null
@@ -1,5 +0,0 @@
-
-
-
-
-
diff --git a/pkg/Microsoft.ML.AutoML/Microsoft.ML.AutoML.nupkgproj b/pkg/Microsoft.ML.AutoML/Microsoft.ML.AutoML.nupkgproj
deleted file mode 100644
index 8a9fc5db71..0000000000
--- a/pkg/Microsoft.ML.AutoML/Microsoft.ML.AutoML.nupkgproj
+++ /dev/null
@@ -1,17 +0,0 @@
-
-
-
- netstandard2.0
- ML.NET AutoML: Optimizes an ML pipeline for your dataset, by automatically locating the best feature engineering, model, and hyperparameters
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/pkg/Microsoft.ML.AutoML/Microsoft.ML.AutoML.symbols.nupkgproj b/pkg/Microsoft.ML.AutoML/Microsoft.ML.AutoML.symbols.nupkgproj
deleted file mode 100644
index a648ab1d59..0000000000
--- a/pkg/Microsoft.ML.AutoML/Microsoft.ML.AutoML.symbols.nupkgproj
+++ /dev/null
@@ -1,3 +0,0 @@
-
-
-
diff --git a/pkg/Microsoft.ML.CodeGenerator/Microsoft.ML.CodeGenerator.nupkgproj b/pkg/Microsoft.ML.CodeGenerator/Microsoft.ML.CodeGenerator.nupkgproj
deleted file mode 100644
index 48c288fb6b..0000000000
--- a/pkg/Microsoft.ML.CodeGenerator/Microsoft.ML.CodeGenerator.nupkgproj
+++ /dev/null
@@ -1,17 +0,0 @@
-
-
-
- netstandard2.0
- ML.NET Code Generator
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/pkg/Microsoft.ML.CodeGenerator/Microsoft.ML.CodeGenerator.symbols.nupkgproj b/pkg/Microsoft.ML.CodeGenerator/Microsoft.ML.CodeGenerator.symbols.nupkgproj
deleted file mode 100644
index cc330b9cbb..0000000000
--- a/pkg/Microsoft.ML.CodeGenerator/Microsoft.ML.CodeGenerator.symbols.nupkgproj
+++ /dev/null
@@ -1,3 +0,0 @@
-
-
-
diff --git a/pkg/Microsoft.ML.CpuMath/Microsoft.ML.CpuMath.nupkgproj b/pkg/Microsoft.ML.CpuMath/Microsoft.ML.CpuMath.nupkgproj
deleted file mode 100644
index e60e2b232d..0000000000
--- a/pkg/Microsoft.ML.CpuMath/Microsoft.ML.CpuMath.nupkgproj
+++ /dev/null
@@ -1,16 +0,0 @@
-
-
-
- netstandard2.0;netcoreapp3.1
- Microsoft.ML.CpuMath contains optimized math routines for ML.NET.
-
-
-
-
-
-
-
-
-
-
-
diff --git a/pkg/Microsoft.ML.CpuMath/Microsoft.ML.CpuMath.symbols.nupkgproj b/pkg/Microsoft.ML.CpuMath/Microsoft.ML.CpuMath.symbols.nupkgproj
deleted file mode 100644
index 8b44c50d7f..0000000000
--- a/pkg/Microsoft.ML.CpuMath/Microsoft.ML.CpuMath.symbols.nupkgproj
+++ /dev/null
@@ -1,13 +0,0 @@
-
-
-
-
-
-
- $(NoWarn);NU5129
-
-
-
diff --git a/pkg/Microsoft.ML.DataView/Microsoft.ML.DataView.nupkgproj b/pkg/Microsoft.ML.DataView/Microsoft.ML.DataView.nupkgproj
deleted file mode 100644
index ab500b904e..0000000000
--- a/pkg/Microsoft.ML.DataView/Microsoft.ML.DataView.nupkgproj
+++ /dev/null
@@ -1,13 +0,0 @@
-
-
-
- netstandard2.0
- Contains the IDataView system which is a set of interfaces and components that provide efficient, compositional processing of schematized data for machine learning and advanced analytics applications.
-
-
-
-
-
-
-
-
diff --git a/pkg/Microsoft.ML.DataView/Microsoft.ML.DataView.symbols.nupkgproj b/pkg/Microsoft.ML.DataView/Microsoft.ML.DataView.symbols.nupkgproj
deleted file mode 100644
index 988c4c5f72..0000000000
--- a/pkg/Microsoft.ML.DataView/Microsoft.ML.DataView.symbols.nupkgproj
+++ /dev/null
@@ -1,5 +0,0 @@
-
-
-
-
-
diff --git a/pkg/Microsoft.ML.DnnImageFeaturizer.AlexNet/Microsoft.ML.DnnImageFeaturizer.AlexNet.nupkgproj b/pkg/Microsoft.ML.DnnImageFeaturizer.AlexNet/Microsoft.ML.DnnImageFeaturizer.AlexNet.nupkgproj
deleted file mode 100644
index 4fb4e52d0a..0000000000
--- a/pkg/Microsoft.ML.DnnImageFeaturizer.AlexNet/Microsoft.ML.DnnImageFeaturizer.AlexNet.nupkgproj
+++ /dev/null
@@ -1,17 +0,0 @@
-
-
-
- netstandard2.0
- ML.NET component for pretrained AlexNet image featurization
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/pkg/Microsoft.ML.DnnImageFeaturizer.AlexNet/Microsoft.ML.DnnImageFeaturizer.AlexNet.symbols.nupkgproj b/pkg/Microsoft.ML.DnnImageFeaturizer.AlexNet/Microsoft.ML.DnnImageFeaturizer.AlexNet.symbols.nupkgproj
deleted file mode 100644
index 8c6a7fcc4c..0000000000
--- a/pkg/Microsoft.ML.DnnImageFeaturizer.AlexNet/Microsoft.ML.DnnImageFeaturizer.AlexNet.symbols.nupkgproj
+++ /dev/null
@@ -1,5 +0,0 @@
-
-
-
-
-
diff --git a/pkg/Microsoft.ML.DnnImageFeaturizer.ResNet101/Microsoft.ML.DnnImageFeaturizer.ResNet101.nupkgproj b/pkg/Microsoft.ML.DnnImageFeaturizer.ResNet101/Microsoft.ML.DnnImageFeaturizer.ResNet101.nupkgproj
deleted file mode 100644
index 3d667604cd..0000000000
--- a/pkg/Microsoft.ML.DnnImageFeaturizer.ResNet101/Microsoft.ML.DnnImageFeaturizer.ResNet101.nupkgproj
+++ /dev/null
@@ -1,16 +0,0 @@
-
-
-
- netstandard2.0
- ML.NET component for pretrained ResNet101 image featurization
-
-
-
-
-
-
-
-
-
-
-
diff --git a/pkg/Microsoft.ML.DnnImageFeaturizer.ResNet101/Microsoft.ML.DnnImageFeaturizer.ResNet101.symbols.nupkgproj b/pkg/Microsoft.ML.DnnImageFeaturizer.ResNet101/Microsoft.ML.DnnImageFeaturizer.ResNet101.symbols.nupkgproj
deleted file mode 100644
index 7035bef747..0000000000
--- a/pkg/Microsoft.ML.DnnImageFeaturizer.ResNet101/Microsoft.ML.DnnImageFeaturizer.ResNet101.symbols.nupkgproj
+++ /dev/null
@@ -1,5 +0,0 @@
-
-
-
-
-
diff --git a/pkg/Microsoft.ML.DnnImageFeaturizer.ResNet18/Microsoft.ML.DnnImageFeaturizer.ResNet18.nupkgproj b/pkg/Microsoft.ML.DnnImageFeaturizer.ResNet18/Microsoft.ML.DnnImageFeaturizer.ResNet18.nupkgproj
deleted file mode 100644
index 1055b4cbbe..0000000000
--- a/pkg/Microsoft.ML.DnnImageFeaturizer.ResNet18/Microsoft.ML.DnnImageFeaturizer.ResNet18.nupkgproj
+++ /dev/null
@@ -1,16 +0,0 @@
-
-
-
- netstandard2.0
- ML.NET component for pretrained ResNet18 image featurization
-
-
-
-
-
-
-
-
-
-
-
diff --git a/pkg/Microsoft.ML.DnnImageFeaturizer.ResNet18/Microsoft.ML.DnnImageFeaturizer.ResNet18.symbols.nupkgproj b/pkg/Microsoft.ML.DnnImageFeaturizer.ResNet18/Microsoft.ML.DnnImageFeaturizer.ResNet18.symbols.nupkgproj
deleted file mode 100644
index 9fb3f5ca75..0000000000
--- a/pkg/Microsoft.ML.DnnImageFeaturizer.ResNet18/Microsoft.ML.DnnImageFeaturizer.ResNet18.symbols.nupkgproj
+++ /dev/null
@@ -1,5 +0,0 @@
-
-
-
-
-
diff --git a/pkg/Microsoft.ML.DnnImageFeaturizer.ResNet50/Microsoft.ML.DnnImageFeaturizer.ResNet50.nupkgproj b/pkg/Microsoft.ML.DnnImageFeaturizer.ResNet50/Microsoft.ML.DnnImageFeaturizer.ResNet50.nupkgproj
deleted file mode 100644
index 2c33df9ff8..0000000000
--- a/pkg/Microsoft.ML.DnnImageFeaturizer.ResNet50/Microsoft.ML.DnnImageFeaturizer.ResNet50.nupkgproj
+++ /dev/null
@@ -1,16 +0,0 @@
-
-
-
- netstandard2.0
- ML.NET component for pretrained ResNet50 image featurization
-
-
-
-
-
-
-
-
-
-
-
diff --git a/pkg/Microsoft.ML.DnnImageFeaturizer.ResNet50/Microsoft.ML.DnnImageFeaturizer.ResNet50.symbols.nupkgproj b/pkg/Microsoft.ML.DnnImageFeaturizer.ResNet50/Microsoft.ML.DnnImageFeaturizer.ResNet50.symbols.nupkgproj
deleted file mode 100644
index 2b04e494f9..0000000000
--- a/pkg/Microsoft.ML.DnnImageFeaturizer.ResNet50/Microsoft.ML.DnnImageFeaturizer.ResNet50.symbols.nupkgproj
+++ /dev/null
@@ -1,5 +0,0 @@
-
-
-
-
-
diff --git a/pkg/Microsoft.ML.Ensemble/Microsoft.ML.Ensemble.nupkgproj b/pkg/Microsoft.ML.Ensemble/Microsoft.ML.Ensemble.nupkgproj
deleted file mode 100644
index f8b8082047..0000000000
--- a/pkg/Microsoft.ML.Ensemble/Microsoft.ML.Ensemble.nupkgproj
+++ /dev/null
@@ -1,12 +0,0 @@
-
-
-
- netstandard2.0
- ML.NET component for Ensembles
-
-
-
-
-
-
-
\ No newline at end of file
diff --git a/pkg/Microsoft.ML.Ensemble/Microsoft.ML.Ensemble.symbols.nupkgproj b/pkg/Microsoft.ML.Ensemble/Microsoft.ML.Ensemble.symbols.nupkgproj
deleted file mode 100644
index bb48a51cab..0000000000
--- a/pkg/Microsoft.ML.Ensemble/Microsoft.ML.Ensemble.symbols.nupkgproj
+++ /dev/null
@@ -1,3 +0,0 @@
-
-
-
diff --git a/pkg/Microsoft.ML.EntryPoints/Microsoft.ML.EntryPoints.nupkgproj b/pkg/Microsoft.ML.EntryPoints/Microsoft.ML.EntryPoints.nupkgproj
deleted file mode 100644
index b845fdeb45..0000000000
--- a/pkg/Microsoft.ML.EntryPoints/Microsoft.ML.EntryPoints.nupkgproj
+++ /dev/null
@@ -1,12 +0,0 @@
-
-
-
- netstandard2.0
- Microsoft.ML.EntryPoints contains the ML.NET entry point API catalog.
-
-
-
-
-
-
-
diff --git a/pkg/Microsoft.ML.EntryPoints/Microsoft.ML.EntryPoints.symbols.nupkgproj b/pkg/Microsoft.ML.EntryPoints/Microsoft.ML.EntryPoints.symbols.nupkgproj
deleted file mode 100644
index 3fa0255960..0000000000
--- a/pkg/Microsoft.ML.EntryPoints/Microsoft.ML.EntryPoints.symbols.nupkgproj
+++ /dev/null
@@ -1,5 +0,0 @@
-
-
-
-
-
diff --git a/pkg/Microsoft.ML.Experimental/Microsoft.ML.Experimental.nupkgproj b/pkg/Microsoft.ML.Experimental/Microsoft.ML.Experimental.nupkgproj
deleted file mode 100644
index edf80ad475..0000000000
--- a/pkg/Microsoft.ML.Experimental/Microsoft.ML.Experimental.nupkgproj
+++ /dev/null
@@ -1,12 +0,0 @@
-
-
-
- netstandard2.0
- Microsoft.ML.Experimental contains experimental work such extension methods to access internal methods.
-
-
-
-
-
-
-
diff --git a/pkg/Microsoft.ML.Experimental/Microsoft.ML.Experimental.symbols.nupkgproj b/pkg/Microsoft.ML.Experimental/Microsoft.ML.Experimental.symbols.nupkgproj
deleted file mode 100644
index c869da5d2b..0000000000
--- a/pkg/Microsoft.ML.Experimental/Microsoft.ML.Experimental.symbols.nupkgproj
+++ /dev/null
@@ -1,5 +0,0 @@
-
-
-
-
-
diff --git a/pkg/Microsoft.ML.FastTree/Microsoft.ML.FastTree.nupkgproj b/pkg/Microsoft.ML.FastTree/Microsoft.ML.FastTree.nupkgproj
deleted file mode 100644
index ac6f2452ad..0000000000
--- a/pkg/Microsoft.ML.FastTree/Microsoft.ML.FastTree.nupkgproj
+++ /dev/null
@@ -1,16 +0,0 @@
-
-
-
- netstandard2.0
- ML.NET component for FastTree
-
-
-
-
-
-
-
-
-
-
-
diff --git a/pkg/Microsoft.ML.FastTree/Microsoft.ML.FastTree.symbols.nupkgproj b/pkg/Microsoft.ML.FastTree/Microsoft.ML.FastTree.symbols.nupkgproj
deleted file mode 100644
index 9f4c5712ff..0000000000
--- a/pkg/Microsoft.ML.FastTree/Microsoft.ML.FastTree.symbols.nupkgproj
+++ /dev/null
@@ -1,5 +0,0 @@
-
-
-
-
-
diff --git a/pkg/Microsoft.ML.Featurizers/Microsoft.ML.Featurizers.nupkgproj b/pkg/Microsoft.ML.Featurizers/Microsoft.ML.Featurizers.nupkgproj
deleted file mode 100644
index ea35d7d019..0000000000
--- a/pkg/Microsoft.ML.Featurizers/Microsoft.ML.Featurizers.nupkgproj
+++ /dev/null
@@ -1,14 +0,0 @@
-
-
-
- netstandard2.0;netcoreapp2.1
- ML.NET featurizers with native code implementation
-
-
-
-
-
-
-
-
-
diff --git a/pkg/Microsoft.ML.Featurizers/Microsoft.ML.Featurizers.symbols.nupkgproj b/pkg/Microsoft.ML.Featurizers/Microsoft.ML.Featurizers.symbols.nupkgproj
deleted file mode 100644
index 483e51c61a..0000000000
--- a/pkg/Microsoft.ML.Featurizers/Microsoft.ML.Featurizers.symbols.nupkgproj
+++ /dev/null
@@ -1,5 +0,0 @@
-
-
-
-
-
diff --git a/pkg/Microsoft.ML.ImageAnalytics/Microsoft.ML.ImageAnalytics.nupkgproj b/pkg/Microsoft.ML.ImageAnalytics/Microsoft.ML.ImageAnalytics.nupkgproj
deleted file mode 100644
index bb59e06653..0000000000
--- a/pkg/Microsoft.ML.ImageAnalytics/Microsoft.ML.ImageAnalytics.nupkgproj
+++ /dev/null
@@ -1,13 +0,0 @@
-
-
-
- netstandard2.0
- ML.NET component for Image support
-
-
-
-
-
-
-
-
diff --git a/pkg/Microsoft.ML.ImageAnalytics/Microsoft.ML.ImageAnalytics.symbols.nupkgproj b/pkg/Microsoft.ML.ImageAnalytics/Microsoft.ML.ImageAnalytics.symbols.nupkgproj
deleted file mode 100644
index b36800ea0b..0000000000
--- a/pkg/Microsoft.ML.ImageAnalytics/Microsoft.ML.ImageAnalytics.symbols.nupkgproj
+++ /dev/null
@@ -1,5 +0,0 @@
-
-
-
-
-
diff --git a/pkg/Microsoft.ML.LightGbm/Microsoft.ML.LightGbm.nupkgproj b/pkg/Microsoft.ML.LightGbm/Microsoft.ML.LightGbm.nupkgproj
deleted file mode 100644
index d4e48e8dd0..0000000000
--- a/pkg/Microsoft.ML.LightGbm/Microsoft.ML.LightGbm.nupkgproj
+++ /dev/null
@@ -1,14 +0,0 @@
-
-
-
- netstandard2.0
- ML.NET component for LightGBM
-
-
-
-
-
-
-
-
-
diff --git a/pkg/Microsoft.ML.LightGbm/Microsoft.ML.LightGbm.symbols.nupkgproj b/pkg/Microsoft.ML.LightGbm/Microsoft.ML.LightGbm.symbols.nupkgproj
deleted file mode 100644
index 91a20f3ed8..0000000000
--- a/pkg/Microsoft.ML.LightGbm/Microsoft.ML.LightGbm.symbols.nupkgproj
+++ /dev/null
@@ -1,5 +0,0 @@
-
-
-
-
-
diff --git a/pkg/Microsoft.ML.Mkl.Components/Microsoft.ML.Mkl.Components.nupkgproj b/pkg/Microsoft.ML.Mkl.Components/Microsoft.ML.Mkl.Components.nupkgproj
deleted file mode 100644
index 1bd16d235d..0000000000
--- a/pkg/Microsoft.ML.Mkl.Components/Microsoft.ML.Mkl.Components.nupkgproj
+++ /dev/null
@@ -1,17 +0,0 @@
-
-
-
- netstandard2.0
- ML.NET additional learners making use of Intel Mkl.
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/pkg/Microsoft.ML.Mkl.Components/Microsoft.ML.Mkl.Components.symbols.nupkgproj b/pkg/Microsoft.ML.Mkl.Components/Microsoft.ML.Mkl.Components.symbols.nupkgproj
deleted file mode 100644
index 0eaabcdd6e..0000000000
--- a/pkg/Microsoft.ML.Mkl.Components/Microsoft.ML.Mkl.Components.symbols.nupkgproj
+++ /dev/null
@@ -1,5 +0,0 @@
-
-
-
-
-
diff --git a/pkg/Microsoft.ML.Mkl.Redist/Microsoft.ML.Mkl.Redist.nupkgproj b/pkg/Microsoft.ML.Mkl.Redist/Microsoft.ML.Mkl.Redist.nupkgproj
deleted file mode 100644
index bda6fb1277..0000000000
--- a/pkg/Microsoft.ML.Mkl.Redist/Microsoft.ML.Mkl.Redist.nupkgproj
+++ /dev/null
@@ -1,20 +0,0 @@
-
-
-
- Intel
- netstandard2.0
- LICENSE.txt
- $(MSBuildProjectName) contains the MKL library redistributed as a NuGet package.
- $(PackageTags) MLNET MKL
-
-
-
- false
-
-
-
-
-
-
-
-
diff --git a/pkg/Microsoft.ML.OnnxConverter/Microsoft.ML.OnnxConverter.nupkgproj b/pkg/Microsoft.ML.OnnxConverter/Microsoft.ML.OnnxConverter.nupkgproj
deleted file mode 100644
index bcc86939e2..0000000000
--- a/pkg/Microsoft.ML.OnnxConverter/Microsoft.ML.OnnxConverter.nupkgproj
+++ /dev/null
@@ -1,13 +0,0 @@
-
-
-
- netstandard2.0
- ML.NET component for exporting ONNX Models
-
-
-
-
-
-
-
-
diff --git a/pkg/Microsoft.ML.OnnxConverter/Microsoft.ML.OnnxConverter.symbols.nupkgproj b/pkg/Microsoft.ML.OnnxConverter/Microsoft.ML.OnnxConverter.symbols.nupkgproj
deleted file mode 100644
index ccc3e94770..0000000000
--- a/pkg/Microsoft.ML.OnnxConverter/Microsoft.ML.OnnxConverter.symbols.nupkgproj
+++ /dev/null
@@ -1,5 +0,0 @@
-
-
-
-
-
diff --git a/pkg/Microsoft.ML.OnnxTransformer/Microsoft.ML.OnnxTransformer.nupkgproj b/pkg/Microsoft.ML.OnnxTransformer/Microsoft.ML.OnnxTransformer.nupkgproj
deleted file mode 100644
index 3c7d9f2ccd..0000000000
--- a/pkg/Microsoft.ML.OnnxTransformer/Microsoft.ML.OnnxTransformer.nupkgproj
+++ /dev/null
@@ -1,14 +0,0 @@
-
-
-
- netstandard2.0
- ML.NET component for Microsoft.ML.Scoring library
-
-
-
-
-
-
-
-
-
diff --git a/pkg/Microsoft.ML.OnnxTransformer/Microsoft.ML.OnnxTransformer.symbols.nupkgproj b/pkg/Microsoft.ML.OnnxTransformer/Microsoft.ML.OnnxTransformer.symbols.nupkgproj
deleted file mode 100644
index 6358a82311..0000000000
--- a/pkg/Microsoft.ML.OnnxTransformer/Microsoft.ML.OnnxTransformer.symbols.nupkgproj
+++ /dev/null
@@ -1,5 +0,0 @@
-
-
-
-
-
diff --git a/pkg/Microsoft.ML.Parquet/Microsoft.ML.Parquet.nupkgproj b/pkg/Microsoft.ML.Parquet/Microsoft.ML.Parquet.nupkgproj
deleted file mode 100644
index 750926db34..0000000000
--- a/pkg/Microsoft.ML.Parquet/Microsoft.ML.Parquet.nupkgproj
+++ /dev/null
@@ -1,13 +0,0 @@
-
-
-
- netstandard2.0
- ML.NET components for Apache Parquet support.
-
-
-
-
-
-
-
-
diff --git a/pkg/Microsoft.ML.Parquet/Microsoft.ML.Parquet.symbols.nupkgproj b/pkg/Microsoft.ML.Parquet/Microsoft.ML.Parquet.symbols.nupkgproj
deleted file mode 100644
index bc14894823..0000000000
--- a/pkg/Microsoft.ML.Parquet/Microsoft.ML.Parquet.symbols.nupkgproj
+++ /dev/null
@@ -1,5 +0,0 @@
-
-
-
-
-
diff --git a/pkg/Microsoft.ML.Recommender/Microsoft.ML.Recommender.nupkgproj b/pkg/Microsoft.ML.Recommender/Microsoft.ML.Recommender.nupkgproj
deleted file mode 100644
index bc0e57f8c0..0000000000
--- a/pkg/Microsoft.ML.Recommender/Microsoft.ML.Recommender.nupkgproj
+++ /dev/null
@@ -1,14 +0,0 @@
-
-
-
- netstandard2.0
- LIBMF, the core computation library for matrix factorization in ML.NET
-
-
-
-
-
-
-
-
-
diff --git a/pkg/Microsoft.ML.Recommender/Microsoft.ML.Recommender.symbols.nupkgproj b/pkg/Microsoft.ML.Recommender/Microsoft.ML.Recommender.symbols.nupkgproj
deleted file mode 100644
index d82f31ad89..0000000000
--- a/pkg/Microsoft.ML.Recommender/Microsoft.ML.Recommender.symbols.nupkgproj
+++ /dev/null
@@ -1,5 +0,0 @@
-
-
-
-
-
diff --git a/pkg/Microsoft.ML.SampleUtils/Microsoft.ML.SampleUtils.nupkgproj b/pkg/Microsoft.ML.SampleUtils/Microsoft.ML.SampleUtils.nupkgproj
deleted file mode 100644
index e2b85df502..0000000000
--- a/pkg/Microsoft.ML.SampleUtils/Microsoft.ML.SampleUtils.nupkgproj
+++ /dev/null
@@ -1,12 +0,0 @@
-
-
-
- netstandard2.0
- Sample utils for Microsoft.ML.Samples
-
-
-
-
-
-
-
diff --git a/pkg/Microsoft.ML.SampleUtils/Microsoft.ML.SampleUtils.symbols.nupkgproj b/pkg/Microsoft.ML.SampleUtils/Microsoft.ML.SampleUtils.symbols.nupkgproj
deleted file mode 100644
index 2a2228e6af..0000000000
--- a/pkg/Microsoft.ML.SampleUtils/Microsoft.ML.SampleUtils.symbols.nupkgproj
+++ /dev/null
@@ -1,5 +0,0 @@
-
-
-
-
-
diff --git a/pkg/Microsoft.ML.TensorFlow/Microsoft.ML.TensorFlow.nupkgproj b/pkg/Microsoft.ML.TensorFlow/Microsoft.ML.TensorFlow.nupkgproj
deleted file mode 100644
index ac054f6136..0000000000
--- a/pkg/Microsoft.ML.TensorFlow/Microsoft.ML.TensorFlow.nupkgproj
+++ /dev/null
@@ -1,15 +0,0 @@
-
-
-
- netstandard2.0
- Microsoft.ML.TensorFlow contains ML.NET integration of TensorFlow.
-
-
-
-
-
-
-
-
-
-
diff --git a/pkg/Microsoft.ML.TensorFlow/Microsoft.ML.TensorFlow.symbols.nupkgproj b/pkg/Microsoft.ML.TensorFlow/Microsoft.ML.TensorFlow.symbols.nupkgproj
deleted file mode 100644
index a2a2a153f7..0000000000
--- a/pkg/Microsoft.ML.TensorFlow/Microsoft.ML.TensorFlow.symbols.nupkgproj
+++ /dev/null
@@ -1,5 +0,0 @@
-
-
-
-
-
diff --git a/pkg/Microsoft.ML.TimeSeries/Microsoft.ML.TimeSeries.nupkgproj b/pkg/Microsoft.ML.TimeSeries/Microsoft.ML.TimeSeries.nupkgproj
deleted file mode 100644
index d0325da023..0000000000
--- a/pkg/Microsoft.ML.TimeSeries/Microsoft.ML.TimeSeries.nupkgproj
+++ /dev/null
@@ -1,13 +0,0 @@
-
-
-
- netstandard2.0
- Microsoft.ML.TimeSeries contains ML.NET Time Series prediction algorithms. Uses Intel Mkl.
-
-
-
-
-
-
-
-
diff --git a/pkg/Microsoft.ML.TimeSeries/Microsoft.ML.TimeSeries.symbols.nupkgproj b/pkg/Microsoft.ML.TimeSeries/Microsoft.ML.TimeSeries.symbols.nupkgproj
deleted file mode 100644
index 05f361fa6c..0000000000
--- a/pkg/Microsoft.ML.TimeSeries/Microsoft.ML.TimeSeries.symbols.nupkgproj
+++ /dev/null
@@ -1,5 +0,0 @@
-
-
-
-
-
diff --git a/pkg/Microsoft.ML.Vision/Microsoft.ML.Vision.nupkgproj b/pkg/Microsoft.ML.Vision/Microsoft.ML.Vision.nupkgproj
deleted file mode 100644
index f3f00e368c..0000000000
--- a/pkg/Microsoft.ML.Vision/Microsoft.ML.Vision.nupkgproj
+++ /dev/null
@@ -1,12 +0,0 @@
-
-
-
- netstandard2.0
- Microsoft.ML.Vision contains high level APIs for vision tasks like image classification.
-
-
-
-
-
-
-
diff --git a/pkg/Microsoft.ML.Vision/Microsoft.ML.Vision.symbols.nupkgproj b/pkg/Microsoft.ML.Vision/Microsoft.ML.Vision.symbols.nupkgproj
deleted file mode 100644
index 1ba48a28b9..0000000000
--- a/pkg/Microsoft.ML.Vision/Microsoft.ML.Vision.symbols.nupkgproj
+++ /dev/null
@@ -1,5 +0,0 @@
-
-
-
-
-
diff --git a/pkg/Microsoft.ML/Microsoft.ML.nupkgproj b/pkg/Microsoft.ML/Microsoft.ML.nupkgproj
deleted file mode 100644
index 43ad73f248..0000000000
--- a/pkg/Microsoft.ML/Microsoft.ML.nupkgproj
+++ /dev/null
@@ -1,24 +0,0 @@
-
-
-
- netstandard2.0
- ML.NET is a cross-platform open-source machine learning framework which makes machine learning accessible to .NET developers.
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/pkg/Microsoft.ML/Microsoft.ML.symbols.nupkgproj b/pkg/Microsoft.ML/Microsoft.ML.symbols.nupkgproj
deleted file mode 100644
index fc08dc12dc..0000000000
--- a/pkg/Microsoft.ML/Microsoft.ML.symbols.nupkgproj
+++ /dev/null
@@ -1,13 +0,0 @@
-
-
-
-
-
-
- $(NoWarn);NU5129
-
-
-
diff --git a/restore.cmd b/restore.cmd
new file mode 100644
index 0000000000..185ccb3178
--- /dev/null
+++ b/restore.cmd
@@ -0,0 +1,3 @@
+@echo off
+powershell -ExecutionPolicy ByPass -NoProfile -command "& """%~dp0eng\common\Build.ps1""" -restore -warnAsError 0 %*"
+exit /b %ErrorLevel%
\ No newline at end of file
diff --git a/restore.sh b/restore.sh
new file mode 100755
index 0000000000..e14c4b82ec
--- /dev/null
+++ b/restore.sh
@@ -0,0 +1,13 @@
+#!/usr/bin/env bash
+
+set -e
+
+SOURCE="${BASH_SOURCE[0]}"
+while [ -h "$SOURCE" ]; do # resolve $SOURCE until the file is no longer a symlink
+ DIR="$( cd -P "$( dirname "$SOURCE" )" && pwd )"
+ SOURCE="$(readlink "$SOURCE")"
+ [[ "$SOURCE" != /* ]] && SOURCE="$DIR/$SOURCE" # if $SOURCE was a relative symlink, we need to resolve it relative to the path where the symlink file was located
+done
+DIR="$( cd -P "$( dirname "$SOURCE" )" && pwd )"
+
+"$DIR/eng/common/build.sh" --restore --warnAsError false "$@"
\ No newline at end of file
diff --git a/run.cmd b/run.cmd
deleted file mode 100644
index ccbe714db8..0000000000
--- a/run.cmd
+++ /dev/null
@@ -1,28 +0,0 @@
-@if not defined _echo @echo off
-setlocal
-
-:: Clear the 'Platform' env variable for this session, as it's a per-project setting within the build, and
-:: misleading value (such as 'MCD' in HP PCs) may lead to build breakage (corefx issue: #69).
-set Platform=
-
-:: Disable telemetry, first time experience, and global sdk look for the CLI
-set DOTNET_CLI_TELEMETRY_OPTOUT=1
-set DOTNET_SKIP_FIRST_TIME_EXPERIENCE=1
-set DOTNET_MULTILEVEL_LOOKUP=0
-
-:: Restore the Tools directory
-call "%~dp0init-tools.cmd" %*
-if NOT [%ERRORLEVEL%]==[0] exit /b 1
-
-set _toolRuntime=%~dp0Tools
-set _dotnet=%_toolRuntime%\dotnetcli\dotnet.exe
-set _json=%~dp0config.json
-
-:: run.exe depends on running in the root directory, notably because the config.json specifies
-:: a relative path to the binclash logger
-
-pushd "%~dp0"
-call "%_dotnet%" "%_toolRuntime%\run.exe" "%_json%" %*
-popd
-
-exit /b %ERRORLEVEL%
\ No newline at end of file
diff --git a/run.sh b/run.sh
deleted file mode 100755
index fe936e3ecc..0000000000
--- a/run.sh
+++ /dev/null
@@ -1,21 +0,0 @@
-#!/usr/bin/env bash
-
-__scriptpath=$(cd "$(dirname "$0")"; pwd -P)
-
-# Disable telemetry, first time experience, and global sdk look for the CLI
-export DOTNET_CLI_TELEMETRY_OPTOUT=1
-export DOTNET_SKIP_FIRST_TIME_EXPERIENCE=1
-export DOTNET_MULTILEVEL_LOOKUP=0
-
-# Set the terminal language to en-US in order to avoid problems with MSBuild compareversion command
-export LANG=en_US.UTF-8
-
-# Source the init-tools.sh script rather than execute in order to preserve ulimit values in child-processes. https://github.com/dotnet/corefx/issues/19152
-. "$__scriptpath/init-tools.sh"
-
-__toolRuntime=$__scriptpath/Tools
-__dotnet=$__toolRuntime/dotnetcli/dotnet
-
-cd "$__scriptpath"
-"$__dotnet" "$__toolRuntime/run.exe" "$__scriptpath/config.json" "$@"
-exit $?
diff --git a/sign.cmd b/sign.cmd
new file mode 100644
index 0000000000..6e75c6d287
--- /dev/null
+++ b/sign.cmd
@@ -0,0 +1,3 @@
+@echo off
+powershell -ExecutionPolicy ByPass -NoProfile -command "& """%~dp0eng\common\Build.ps1""" -sign -warnAsError 0 %*"
+exit /b %ErrorLevel%
\ No newline at end of file
diff --git a/src/Directory.Build.props b/src/Directory.Build.props
index c686486a81..71c462977b 100644
--- a/src/Directory.Build.props
+++ b/src/Directory.Build.props
@@ -8,12 +8,12 @@
When you are ready to tackle adding them, remove
below, and when you get clean, also remove
-->
- $(NoWarn);1591
+ $(NoWarn);1591;NU5118
$(WarningsNotAsErrors);1591
$(MSBuildThisFileDirectory)\Source.ruleset
- $(BaseOutputPath)$(TargetArchitecture).$(Configuration)\Native
+ $(BaseOutputPath)$(TargetArchitecture).$(Configuration)
win
linux
@@ -25,7 +25,6 @@
false
$(IsStableProject)
- $(ToolsDir)dotnetcli/dotnet
+ $(NoWarn);1591;NU5100;MSML_GeneralName;MSML_ParameterLocalVarName;MSML_PrivateFieldName;MSML_TypeParamName;SA1028;SA1507;SX1101;MSML_NoInstanceInitializers
+ $(TargetsForTfmSpecificContentInPackage);AddMDAIToInteractiveExtensionsFolder
+
+
+
+
+
+
+
+
+
+
+ <_ItemsToIncludeForInteractive Update="@(_ItemsToIncludeForInteractive)" PackagePath="interactive-extensions/dotnet" />
+
+
+
+
+
+
+ True
+ True
+ Converters.tt
+
+
+ True
+ True
+ PrimitiveDataFrameColumn.BinaryOperators.tt
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ TextTemplatingFileGenerator
+ Converters.cs
+
+
+ TextTemplatingFileGenerator
+ PrimitiveDataFrameColumn.BinaryOperationAPIs.ExplodedColumns.cs
+
+
+ TextTemplatingFileGenerator
+ PrimitiveDataFrameColumn.BinaryOperationImplementations.Exploded.cs
+
+
+ TextTemplatingFileGenerator
+ DataFrameColumn.BinaryOperations.cs
+
+
+ TextTemplatingFileGenerator
+ DataFrameColumn.BinaryOperators.cs
+
+
+ TextTemplatingFileGenerator
+ DataFrameColumn.Computations.cs
+
+
+ TextTemplatingFileGenerator
+ DataFrame.BinaryOperations.cs
+
+
+ TextTemplatingFileGenerator
+ DataFrame.BinaryOperators.cs
+
+
+ TextTemplatingFileGenerator
+ DataFrameBinaryOperators.cs
+
+
+ TextTemplatingFileGenerator
+ PrimitiveDataFrameColumn.BinaryOperations.Combinations.ttinclude
+
+
+ True
+ True
+ PrimitiveDataFrameColumn.BinaryOperations.Combinations.tt
+
+
+ TextTemplatingFileGenerator
+ PrimitiveDataFrameColumn.BinaryOperations.cs
+
+
+ TextTemplatingFileGenerator
+ PrimitiveDataFrameColumn.BinaryOperators.cs
+
+
+ TextTemplatingFileGenerator
+ PrimitiveDataFrameColumn.Computations.cs
+
+
+ TextTemplatingFileGenerator
+ PrimitiveDataFrameColumn.ReversedBinaryOperations.cs
+
+
+ TextTemplatingFileGenerator
+ PrimitiveColumnArithmetic.cs
+
+
+ TextTemplatingFileGenerator
+ PrimitiveDataFrameColumnComputations.cs
+
+
+ TextTemplatingFileGenerator
+ PrimitiveColumnContainer.BinaryOperations.cs
+
+
+ TextTemplatingFileGenerator
+ PrimitiveDataFrameColumnArithmetic.cs
+
+
+ TextTemplatingFileGenerator
+ PrimitiveColumnContainer.BinaryOperations.cs
+
+
+
+
+
+
+
+
+
+ True
+ True
+ Converters.tt
+
+
+ True
+ True
+ PrimitiveDataFrameColumn.BinaryOperationImplementations.Exploded.tt
+
+
+ True
+ True
+ DataFrameColumn.BinaryOperations.tt
+
+
+ True
+ True
+ PrimitiveDataFrameColumn.BinaryOperationAPIs.ExplodedColumns.tt
+
+
+ True
+ True
+ DataFrameColumn.BinaryOperators.tt
+
+
+ True
+ True
+ DataFrameColumn.Computations.tt
+
+
+ True
+ True
+ DataFrame.BinaryOperations.tt
+
+
+ True
+ True
+ DataFrame.BinaryOperators.tt
+
+
+ True
+ True
+ DataFrameBinaryOperators.tt
+
+
+ True
+ True
+ PrimitiveDataFrameColumn.BinaryOperations.tt
+
+
+ True
+ True
+ PrimitiveDataFrameColumn.BinaryOperators.tt
+
+
+ True
+ True
+ PrimitiveDataFrameColumn.Computations.tt
+
+
+ True
+ True
+ PrimitiveDataFrameColumn.ReversedBinaryOperations.tt
+
+
+ True
+ True
+ PrimitiveColumnArithmetic.tt
+
+
+ True
+ True
+ PrimitiveDataFrameColumnComputations.tt
+
+
+ True
+ True
+ PrimitiveColumnContainer.BinaryOperations.tt
+
+
+ True
+ True
+ PrimitiveDataFrameColumnArithmetic.tt
+
+
+ True
+ True
+ PrimitiveColumnContainer.BinaryOperations.tt
+
+
+ True
+ True
+ Strings.resx
+
+
+
+
+
+ ResXFileCodeGenerator
+ Strings.Designer.cs
+ Microsoft.Data
+
+
+
diff --git a/src/Microsoft.Data.Analysis/PrimitiveColumnContainer.BinaryOperations.cs b/src/Microsoft.Data.Analysis/PrimitiveColumnContainer.BinaryOperations.cs
new file mode 100644
index 0000000000..f8a8410a39
--- /dev/null
+++ b/src/Microsoft.Data.Analysis/PrimitiveColumnContainer.BinaryOperations.cs
@@ -0,0 +1,234 @@
+
+// Licensed to the .NET Foundation under one or more agreements.
+// The .NET Foundation licenses this file to you under the MIT license.
+// See the LICENSE file in the project root for more information.
+
+// Generated from PrimitiveColumnContainer.BinaryOperations.tt. Do not modify directly
+
+namespace Microsoft.Data.Analysis
+{
+ internal partial class PrimitiveColumnContainer
+ where T : struct
+ {
+ public PrimitiveColumnContainer Add(PrimitiveColumnContainer right)
+ {
+ PrimitiveDataFrameColumnArithmetic.Instance.Add(this, right);
+ return this;
+ }
+
+ public PrimitiveColumnContainer Add(T scalar)
+ {
+ PrimitiveDataFrameColumnArithmetic.Instance.Add(this, scalar);
+ return this;
+ }
+
+ public PrimitiveColumnContainer Subtract(PrimitiveColumnContainer right)
+ {
+ PrimitiveDataFrameColumnArithmetic.Instance.Subtract(this, right);
+ return this;
+ }
+
+ public PrimitiveColumnContainer Subtract(T scalar)
+ {
+ PrimitiveDataFrameColumnArithmetic.Instance.Subtract(this, scalar);
+ return this;
+ }
+
+ public PrimitiveColumnContainer Multiply(PrimitiveColumnContainer right)
+ {
+ PrimitiveDataFrameColumnArithmetic.Instance.Multiply(this, right);
+ return this;
+ }
+
+ public PrimitiveColumnContainer Multiply(T scalar)
+ {
+ PrimitiveDataFrameColumnArithmetic.Instance.Multiply(this, scalar);
+ return this;
+ }
+
+ public PrimitiveColumnContainer Divide(PrimitiveColumnContainer right)
+ {
+ PrimitiveDataFrameColumnArithmetic.Instance.Divide(this, right);
+ return this;
+ }
+
+ public PrimitiveColumnContainer Divide(T scalar)
+ {
+ PrimitiveDataFrameColumnArithmetic.Instance.Divide(this, scalar);
+ return this;
+ }
+
+ public PrimitiveColumnContainer Modulo(PrimitiveColumnContainer right)
+ {
+ PrimitiveDataFrameColumnArithmetic.Instance.Modulo(this, right);
+ return this;
+ }
+
+ public PrimitiveColumnContainer Modulo(T scalar)
+ {
+ PrimitiveDataFrameColumnArithmetic.Instance.Modulo(this, scalar);
+ return this;
+ }
+
+ public PrimitiveColumnContainer And(PrimitiveColumnContainer right)
+ {
+ PrimitiveDataFrameColumnArithmetic.Instance.And(this, right);
+ return this;
+ }
+
+ public PrimitiveColumnContainer And(T scalar)
+ {
+ PrimitiveDataFrameColumnArithmetic.Instance.And(this, scalar);
+ return this;
+ }
+
+ public PrimitiveColumnContainer Or(PrimitiveColumnContainer right)
+ {
+ PrimitiveDataFrameColumnArithmetic.Instance.Or(this, right);
+ return this;
+ }
+
+ public PrimitiveColumnContainer Or(T scalar)
+ {
+ PrimitiveDataFrameColumnArithmetic.Instance.Or(this, scalar);
+ return this;
+ }
+
+ public PrimitiveColumnContainer Xor(PrimitiveColumnContainer right)
+ {
+ PrimitiveDataFrameColumnArithmetic.Instance.Xor(this, right);
+ return this;
+ }
+
+ public PrimitiveColumnContainer Xor(T scalar)
+ {
+ PrimitiveDataFrameColumnArithmetic.Instance.Xor(this, scalar);
+ return this;
+ }
+
+ public PrimitiveColumnContainer LeftShift(int value)
+ {
+ PrimitiveDataFrameColumnArithmetic.Instance.LeftShift(this, value);
+ return this;
+ }
+
+ public PrimitiveColumnContainer RightShift(int value)
+ {
+ PrimitiveDataFrameColumnArithmetic.Instance.RightShift(this, value);
+ return this;
+ }
+
+ public PrimitiveColumnContainer ElementwiseEquals(PrimitiveColumnContainer right, PrimitiveColumnContainer ret)
+ {
+ PrimitiveDataFrameColumnArithmetic.Instance.ElementwiseEquals(this, right, ret);
+ return this;
+ }
+
+ public PrimitiveColumnContainer ElementwiseEquals(T scalar, PrimitiveColumnContainer ret)
+ {
+ PrimitiveDataFrameColumnArithmetic.Instance.ElementwiseEquals(this, scalar, ret);
+ return this;
+ }
+
+ public PrimitiveColumnContainer ElementwiseNotEquals(PrimitiveColumnContainer right, PrimitiveColumnContainer ret)
+ {
+ PrimitiveDataFrameColumnArithmetic.Instance.ElementwiseNotEquals(this, right, ret);
+ return this;
+ }
+
+ public PrimitiveColumnContainer ElementwiseNotEquals(T scalar, PrimitiveColumnContainer ret)
+ {
+ PrimitiveDataFrameColumnArithmetic.Instance.ElementwiseNotEquals(this, scalar, ret);
+ return this;
+ }
+
+ public PrimitiveColumnContainer ElementwiseGreaterThanOrEqual(PrimitiveColumnContainer right, PrimitiveColumnContainer ret)
+ {
+ PrimitiveDataFrameColumnArithmetic.Instance.ElementwiseGreaterThanOrEqual(this, right, ret);
+ return this;
+ }
+
+ public PrimitiveColumnContainer ElementwiseGreaterThanOrEqual(T scalar, PrimitiveColumnContainer ret)
+ {
+ PrimitiveDataFrameColumnArithmetic.Instance.ElementwiseGreaterThanOrEqual(this, scalar, ret);
+ return this;
+ }
+
+ public PrimitiveColumnContainer ElementwiseLessThanOrEqual(PrimitiveColumnContainer right, PrimitiveColumnContainer ret)
+ {
+ PrimitiveDataFrameColumnArithmetic.Instance.ElementwiseLessThanOrEqual(this, right, ret);
+ return this;
+ }
+
+ public PrimitiveColumnContainer ElementwiseLessThanOrEqual(T scalar, PrimitiveColumnContainer ret)
+ {
+ PrimitiveDataFrameColumnArithmetic.Instance.ElementwiseLessThanOrEqual(this, scalar, ret);
+ return this;
+ }
+
+ public PrimitiveColumnContainer ElementwiseGreaterThan(PrimitiveColumnContainer right, PrimitiveColumnContainer ret)
+ {
+ PrimitiveDataFrameColumnArithmetic.Instance.ElementwiseGreaterThan(this, right, ret);
+ return this;
+ }
+
+ public PrimitiveColumnContainer ElementwiseGreaterThan(T scalar, PrimitiveColumnContainer ret)
+ {
+ PrimitiveDataFrameColumnArithmetic.Instance.ElementwiseGreaterThan(this, scalar, ret);
+ return this;
+ }
+
+ public PrimitiveColumnContainer ElementwiseLessThan(PrimitiveColumnContainer right, PrimitiveColumnContainer ret)
+ {
+ PrimitiveDataFrameColumnArithmetic.Instance.ElementwiseLessThan(this, right, ret);
+ return this;
+ }
+
+ public PrimitiveColumnContainer ElementwiseLessThan(T scalar, PrimitiveColumnContainer ret)
+ {
+ PrimitiveDataFrameColumnArithmetic.Instance.ElementwiseLessThan(this, scalar, ret);
+ return this;
+ }
+
+ public PrimitiveColumnContainer ReverseAdd(T scalar)
+ {
+ PrimitiveDataFrameColumnArithmetic.Instance.Add(scalar, this);
+ return this;
+ }
+ public PrimitiveColumnContainer ReverseSubtract(T scalar)
+ {
+ PrimitiveDataFrameColumnArithmetic.Instance.Subtract(scalar, this);
+ return this;
+ }
+ public PrimitiveColumnContainer ReverseMultiply(T scalar)
+ {
+ PrimitiveDataFrameColumnArithmetic.Instance.Multiply(scalar, this);
+ return this;
+ }
+ public PrimitiveColumnContainer ReverseDivide(T scalar)
+ {
+ PrimitiveDataFrameColumnArithmetic.Instance.Divide(scalar, this);
+ return this;
+ }
+ public PrimitiveColumnContainer ReverseModulo(T scalar)
+ {
+ PrimitiveDataFrameColumnArithmetic.Instance.Modulo(scalar, this);
+ return this;
+ }
+ public PrimitiveColumnContainer ReverseAnd(T scalar)
+ {
+ PrimitiveDataFrameColumnArithmetic.Instance.And(scalar, this);
+ return this;
+ }
+ public PrimitiveColumnContainer ReverseOr(T scalar)
+ {
+ PrimitiveDataFrameColumnArithmetic.Instance.Or(scalar, this);
+ return this;
+ }
+ public PrimitiveColumnContainer ReverseXor(T scalar)
+ {
+ PrimitiveDataFrameColumnArithmetic.Instance.Xor(scalar, this);
+ return this;
+ }
+ }
+}
diff --git a/src/Microsoft.Data.Analysis/PrimitiveColumnContainer.BinaryOperations.tt b/src/Microsoft.Data.Analysis/PrimitiveColumnContainer.BinaryOperations.tt
new file mode 100644
index 0000000000..1006508524
--- /dev/null
+++ b/src/Microsoft.Data.Analysis/PrimitiveColumnContainer.BinaryOperations.tt
@@ -0,0 +1,56 @@
+<#@ template debug="false" hostspecific="false" language="C#" #>
+<#@ assembly name="System.Core" #>
+<#@ import namespace="System.Linq" #>
+<#@ import namespace="System.Text" #>
+<#@ import namespace="System.Collections.Generic" #>
+<#@ output extension=".cs" #>
+<#@ include file="ColumnArithmeticTemplate.ttinclude" #>
+// Licensed to the .NET Foundation under one or more agreements.
+// The .NET Foundation licenses this file to you under the MIT license.
+// See the LICENSE file in the project root for more information.
+
+// Generated from PrimitiveColumnContainer.BinaryOperations.tt. Do not modify directly
+
+namespace Microsoft.Data.Analysis
+{
+ internal partial class PrimitiveColumnContainer
+ where T : struct
+ {
+<# foreach (MethodConfiguration method in methodConfiguration) { #>
+<# if (method.MethodType == MethodType.Comparison || method.MethodType == MethodType.ComparisonScalar) { #>
+ public <#= method.GetSingleArgumentMethodSignature("PrimitiveColumnContainer", "T") #>
+ {
+<# if (method.MethodType == MethodType.ComparisonScalar ) { #>
+ PrimitiveDataFrameColumnArithmetic.Instance.<#=method.MethodName#>(this, scalar, ret);
+<# } else { #>
+ PrimitiveDataFrameColumnArithmetic.Instance.<#=method.MethodName#>(this, right, ret);
+<# } #>
+ return this;
+ }
+
+<# } else { #>
+ public <#= method.GetSingleArgumentMethodSignature("PrimitiveColumnContainer", "T")#>
+ {
+<# if (method.MethodType == MethodType.BinaryScalar) { #>
+ PrimitiveDataFrameColumnArithmetic.Instance.<#=method.MethodName#>(this, scalar);
+<# } else if (method.MethodType == MethodType.BinaryInt) { #>
+ PrimitiveDataFrameColumnArithmetic.Instance.<#=method.MethodName#>(this, value);
+<# } else { #>
+ PrimitiveDataFrameColumnArithmetic.Instance.<#=method.MethodName#>(this, right);
+<# } #>
+ return this;
+ }
+
+<# } #>
+<# } #>
+<# foreach (MethodConfiguration method in methodConfiguration) { #>
+<# if (method.MethodType == MethodType.BinaryScalar) { #>
+ public PrimitiveColumnContainer Reverse<#=method.MethodName#>(T scalar)
+ {
+ PrimitiveDataFrameColumnArithmetic.Instance.<#=method.MethodName#>(scalar, this);
+ return this;
+ }
+<# } #>
+<# } #>
+ }
+}
diff --git a/src/Microsoft.Data.Analysis/PrimitiveColumnContainer.cs b/src/Microsoft.Data.Analysis/PrimitiveColumnContainer.cs
new file mode 100644
index 0000000000..2e04f63fee
--- /dev/null
+++ b/src/Microsoft.Data.Analysis/PrimitiveColumnContainer.cs
@@ -0,0 +1,850 @@
+// Licensed to the .NET Foundation under one or more agreements.
+// The .NET Foundation licenses this file to you under the MIT license.
+// See the LICENSE file in the project root for more information.
+
+using System;
+using System.Collections;
+using System.Collections.Generic;
+using System.Diagnostics;
+using System.Runtime.CompilerServices;
+using System.Runtime.InteropServices;
+using System.Text;
+
+namespace Microsoft.Data.Analysis
+{
+ ///
+ /// PrimitiveDataFrameColumnContainer is just a store for the column data. APIs that want to change the data must be defined in PrimitiveDataFrameColumn
+ ///
+ ///
+ internal partial class PrimitiveColumnContainer : IEnumerable
+ where T : struct
+ {
+ public IList> Buffers = new List>();
+
+ // To keep the mapping simple, each buffer is mapped 1v1 to a nullBitMapBuffer
+ // A set bit implies a valid value. An unset bit => null value
+ public IList> NullBitMapBuffers = new List>();
+
+ // Need a way to differentiate between columns initialized with default values and those with null values in SetValidityBit
+ internal bool _modifyNullCountWhileIndexing = true;
+
+ public PrimitiveColumnContainer(T[] values)
+ {
+ values = values ?? throw new ArgumentNullException(nameof(values));
+ long length = values.LongLength;
+ DataFrameBuffer curBuffer;
+ if (Buffers.Count == 0)
+ {
+ curBuffer = new DataFrameBuffer();
+ Buffers.Add(curBuffer);
+ NullBitMapBuffers.Add(new DataFrameBuffer());
+ }
+ else
+ {
+ curBuffer = (DataFrameBuffer)Buffers[Buffers.Count - 1];
+ }
+ for (long i = 0; i < length; i++)
+ {
+ if (curBuffer.Length == ReadOnlyDataFrameBuffer.MaxCapacity)
+ {
+ curBuffer = new DataFrameBuffer();
+ Buffers.Add(curBuffer);
+ NullBitMapBuffers.Add(new DataFrameBuffer());
+ }
+ curBuffer.Append(values[i]);
+ SetValidityBit(Length, true);
+ Length++;
+ }
+ }
+
+ public PrimitiveColumnContainer(IEnumerable values)
+ {
+ values = values ?? throw new ArgumentNullException(nameof(values));
+ foreach (T value in values)
+ {
+ Append(value);
+ }
+ }
+ public PrimitiveColumnContainer(IEnumerable values)
+ {
+ values = values ?? throw new ArgumentNullException(nameof(values));
+ foreach (T? value in values)
+ {
+ Append(value);
+ }
+ }
+
+ public PrimitiveColumnContainer(ReadOnlyMemory buffer, ReadOnlyMemory nullBitMap, int length, int nullCount)
+ {
+ ReadOnlyDataFrameBuffer dataBuffer;
+ if (buffer.IsEmpty)
+ {
+ DataFrameBuffer mutableBuffer = new DataFrameBuffer();
+ mutableBuffer.EnsureCapacity(length);
+ mutableBuffer.Length = length;
+ mutableBuffer.RawSpan.Fill(default(T));
+ dataBuffer = mutableBuffer;
+ }
+ else
+ {
+ dataBuffer = new ReadOnlyDataFrameBuffer(buffer, length);
+ }
+ Buffers.Add(dataBuffer);
+ int bitMapBufferLength = (length + 7) / 8;
+ ReadOnlyDataFrameBuffer nullDataFrameBuffer;
+ if (nullBitMap.IsEmpty)
+ {
+ if (nullCount != 0)
+ {
+ throw new ArgumentNullException(Strings.InconsistentNullBitMapAndNullCount, nameof(nullBitMap));
+ }
+ if (!buffer.IsEmpty)
+ {
+ // Create a new bitMap with all the bits up to length set
+ var bitMap = new byte[bitMapBufferLength];
+ bitMap.AsSpan().Fill(255);
+ int lastByte = 1 << (length - (bitMapBufferLength - 1) * 8);
+ bitMap[bitMapBufferLength - 1] = (byte)(lastByte - 1);
+ nullDataFrameBuffer = new DataFrameBuffer(bitMap, bitMapBufferLength);
+ }
+ else
+ {
+ nullDataFrameBuffer = new DataFrameBuffer();
+ }
+ }
+ else
+ {
+ if (nullBitMap.Length < bitMapBufferLength)
+ {
+ throw new ArgumentException(Strings.InconsistentNullBitMapAndLength, nameof(nullBitMap));
+ }
+ nullDataFrameBuffer = new ReadOnlyDataFrameBuffer(nullBitMap, bitMapBufferLength);
+ }
+ NullBitMapBuffers.Add(nullDataFrameBuffer);
+ Length = length;
+ NullCount = nullCount;
+ }
+
+ public PrimitiveColumnContainer(long length = 0)
+ {
+ while (length > 0)
+ {
+ if (Buffers.Count == 0)
+ {
+ Buffers.Add(new DataFrameBuffer());
+ NullBitMapBuffers.Add(new DataFrameBuffer());
+ }
+ DataFrameBuffer lastBuffer = (DataFrameBuffer)Buffers[Buffers.Count - 1];
+ if (lastBuffer.Length == ReadOnlyDataFrameBuffer.MaxCapacity)
+ {
+ lastBuffer = new DataFrameBuffer();
+ Buffers.Add(lastBuffer);
+ NullBitMapBuffers.Add(new DataFrameBuffer());
+ }
+ int allocatable = (int)Math.Min(length, ReadOnlyDataFrameBuffer.MaxCapacity);
+ lastBuffer.EnsureCapacity(allocatable);
+ DataFrameBuffer lastNullBitMapBuffer = (DataFrameBuffer)(NullBitMapBuffers[NullBitMapBuffers.Count - 1]);
+ int nullBufferAllocatable = (allocatable + 7) / 8;
+ lastNullBitMapBuffer.EnsureCapacity(nullBufferAllocatable);
+ lastBuffer.Length = allocatable;
+ lastNullBitMapBuffer.Length = nullBufferAllocatable;
+ length -= allocatable;
+ Length += lastBuffer.Length;
+ NullCount += lastBuffer.Length;
+ }
+ }
+
+ public void Resize(long length)
+ {
+ if (length < Length)
+ throw new ArgumentException(Strings.CannotResizeDown, nameof(length));
+ AppendMany(default, length - Length);
+ }
+
+ public void Append(T? value)
+ {
+ if (Buffers.Count == 0)
+ {
+ Buffers.Add(new DataFrameBuffer());
+ NullBitMapBuffers.Add(new DataFrameBuffer());
+ }
+ int bufferIndex = Buffers.Count - 1;
+ ReadOnlyDataFrameBuffer lastBuffer = Buffers[bufferIndex];
+ if (lastBuffer.Length == ReadOnlyDataFrameBuffer.MaxCapacity)
+ {
+ lastBuffer = new DataFrameBuffer();
+ Buffers.Add(lastBuffer);
+ NullBitMapBuffers.Add(new DataFrameBuffer());
+ }
+ DataFrameBuffer mutableLastBuffer = DataFrameBuffer.GetMutableBuffer(lastBuffer);
+ Buffers[bufferIndex] = mutableLastBuffer;
+ mutableLastBuffer.Append(value ?? default);
+ SetValidityBit(Length, value.HasValue);
+ Length++;
+ }
+
+ public void AppendMany(T? value, long count)
+ {
+ if (!value.HasValue)
+ {
+ NullCount += count;
+ }
+
+ while (count > 0)
+ {
+ if (Buffers.Count == 0)
+ {
+ Buffers.Add(new DataFrameBuffer());
+ NullBitMapBuffers.Add(new DataFrameBuffer());
+ }
+ int bufferIndex = Buffers.Count - 1;
+ ReadOnlyDataFrameBuffer lastBuffer = Buffers[bufferIndex];
+ if (lastBuffer.Length == ReadOnlyDataFrameBuffer.MaxCapacity)
+ {
+ lastBuffer = new DataFrameBuffer();
+ Buffers.Add(lastBuffer);
+ NullBitMapBuffers.Add(new DataFrameBuffer());
+ }
+ DataFrameBuffer mutableLastBuffer = DataFrameBuffer.GetMutableBuffer(lastBuffer);
+ Buffers[bufferIndex] = mutableLastBuffer;
+ int allocatable = (int)Math.Min(count, ReadOnlyDataFrameBuffer.MaxCapacity);
+ mutableLastBuffer.EnsureCapacity(allocatable);
+ mutableLastBuffer.RawSpan.Slice(lastBuffer.Length, allocatable).Fill(value ?? default);
+ mutableLastBuffer.Length += allocatable;
+ Length += allocatable;
+
+ int nullBitMapBufferIndex = NullBitMapBuffers.Count - 1;
+ ReadOnlyDataFrameBuffer lastNullBitMapBuffer = NullBitMapBuffers[nullBitMapBufferIndex];
+ DataFrameBuffer mutableLastNullBitMapBuffer = DataFrameBuffer.GetMutableBuffer(lastNullBitMapBuffer);
+ NullBitMapBuffers[nullBitMapBufferIndex] = mutableLastNullBitMapBuffer;
+ int nullBitMapAllocatable = (int)(((uint)allocatable) / 8) + 1;
+ mutableLastNullBitMapBuffer.EnsureCapacity(nullBitMapAllocatable);
+ _modifyNullCountWhileIndexing = false;
+ for (long i = Length - count; i < Length; i++)
+ {
+ SetValidityBit(i, value.HasValue ? true : false);
+ }
+ _modifyNullCountWhileIndexing = true;
+ count -= allocatable;
+ }
+ }
+
+ public void ApplyElementwise(Func func)
+ {
+ for (int b = 0; b < Buffers.Count; b++)
+ {
+ ReadOnlyDataFrameBuffer buffer = Buffers[b];
+ long prevLength = checked(Buffers[0].Length * b);
+ DataFrameBuffer mutableBuffer = DataFrameBuffer.GetMutableBuffer(buffer);
+ Buffers[b] = mutableBuffer;
+ Span span = mutableBuffer.Span;
+ DataFrameBuffer mutableNullBitMapBuffer = DataFrameBuffer.GetMutableBuffer(NullBitMapBuffers[b]);
+ NullBitMapBuffers[b] = mutableNullBitMapBuffer;
+ Span nullBitMapSpan = mutableNullBitMapBuffer.Span;
+ for (int i = 0; i < span.Length; i++)
+ {
+ long curIndex = i + prevLength;
+ bool isValid = IsValid(nullBitMapSpan, i);
+ T? value = func(isValid ? span[i] : default(T?), curIndex);
+ span[i] = value.GetValueOrDefault();
+ SetValidityBit(nullBitMapSpan, i, value != null);
+ }
+ }
+ }
+
+ public void Apply(Func func, PrimitiveColumnContainer resultContainer)
+ where TResult : unmanaged
+ {
+ for (int b = 0; b < Buffers.Count; b++)
+ {
+ ReadOnlyDataFrameBuffer buffer = Buffers[b];
+ long prevLength = checked(Buffers[0].Length * b);
+ DataFrameBuffer mutableBuffer = DataFrameBuffer