From 0b976692d87584fde47b4f50923465e1f71b8643 Mon Sep 17 00:00:00 2001 From: "dotnet-maestro[bot]" Date: Wed, 5 Jun 2024 22:33:28 +0000 Subject: [PATCH 01/20] Update dependencies from https://github.com/dotnet/arcade build 20240520.4 Microsoft.DotNet.Arcade.Sdk , Microsoft.DotNet.Helix.Sdk From Version 8.0.0-beta.23168.1 -> To Version 8.0.0-beta.24270.4 --- eng/Version.Details.xml | 8 +- eng/common/SetupNugetSources.ps1 | 28 +- eng/common/SetupNugetSources.sh | 2 +- eng/common/cross/arm/sources.list.xenial | 2 +- eng/common/cross/arm64/sources.list.xenial | 2 +- eng/common/cross/build-rootfs.sh | 166 ++++++---- eng/common/cross/toolchain.cmake | 52 +++- eng/common/cross/x64/sources.list.bionic | 11 + eng/common/cross/x64/sources.list.xenial | 11 + eng/common/darc-init.ps1 | 2 +- eng/common/darc-init.sh | 2 +- eng/common/dotnet-install.sh | 4 + eng/common/loc/P22DotNetHtmlLocalization.lss | Bin 3810 -> 1876 bytes eng/common/native/init-compiler.sh | 4 +- eng/common/native/init-distro-rid.sh | 130 ++++++++ eng/common/native/init-os-and-arch.sh | 80 +++++ .../post-build/add-build-to-channel.ps1 | 2 +- eng/common/post-build/publish-using-darc.ps1 | 6 +- .../post-build/trigger-subscriptions.ps1 | 2 +- eng/common/sdk-task.ps1 | 2 +- eng/common/sdl/configure-sdl-tool.ps1 | 42 ++- eng/common/sdl/execute-all-sdl-tools.ps1 | 4 +- eng/common/sdl/extract-artifact-packages.ps1 | 20 +- eng/common/sdl/trim-assets-version.ps1 | 75 +++++ eng/common/templates-official/job/job.yml | 264 ++++++++++++++++ .../templates-official/job/onelocbuild.yml | 112 +++++++ .../job/publish-build-assets.yml | 155 ++++++++++ .../templates-official/job/source-build.yml | 67 ++++ .../job/source-index-stage1.yml | 85 ++++++ .../templates-official/jobs/codeql-build.yml | 31 ++ eng/common/templates-official/jobs/jobs.yml | 97 ++++++ .../templates-official/jobs/source-build.yml | 46 +++ .../post-build/common-variables.yml | 22 ++ .../post-build/post-build.yml | 285 ++++++++++++++++++ .../post-build/setup-maestro-vars.yml | 70 +++++ .../post-build/trigger-subscription.yml | 13 + .../steps/add-build-to-channel.yml | 13 + .../templates-official/steps/build-reason.yml | 12 + .../steps/component-governance.yml | 13 + .../steps/execute-codeql.yml | 32 ++ .../templates-official/steps/execute-sdl.yml | 88 ++++++ .../steps/generate-sbom.yml | 48 +++ .../templates-official/steps/publish-logs.yml | 23 ++ .../templates-official/steps/retain-build.yml | 28 ++ .../steps/send-to-helix.yml | 91 ++++++ .../templates-official/steps/source-build.yml | 129 ++++++++ .../variables/pool-providers.yml | 45 +++ .../variables/sdl-variables.yml | 7 + eng/common/templates/job/execute-sdl.yml | 5 + eng/common/templates/job/job.yml | 23 +- .../templates/job/publish-build-assets.yml | 18 +- .../templates/job/source-index-stage1.yml | 44 ++- .../templates/post-build/common-variables.yml | 2 +- .../templates/post-build/post-build.yml | 20 +- .../templates/steps/component-governance.yml | 13 + eng/common/templates/steps/execute-sdl.yml | 2 +- eng/common/templates/steps/generate-sbom.yml | 2 +- eng/common/templates/steps/source-build.yml | 15 + .../templates/variables/pool-providers.yml | 12 +- eng/common/tools.ps1 | 50 ++- eng/common/tools.sh | 42 ++- global.json | 10 +- 62 files changed, 2500 insertions(+), 191 deletions(-) create mode 100644 eng/common/cross/x64/sources.list.bionic create mode 100644 eng/common/cross/x64/sources.list.xenial create mode 100644 eng/common/native/init-distro-rid.sh create mode 100644 eng/common/native/init-os-and-arch.sh create mode 100644 eng/common/sdl/trim-assets-version.ps1 create mode 100644 eng/common/templates-official/job/job.yml create mode 100644 eng/common/templates-official/job/onelocbuild.yml create mode 100644 eng/common/templates-official/job/publish-build-assets.yml create mode 100644 eng/common/templates-official/job/source-build.yml create mode 100644 eng/common/templates-official/job/source-index-stage1.yml create mode 100644 eng/common/templates-official/jobs/codeql-build.yml create mode 100644 eng/common/templates-official/jobs/jobs.yml create mode 100644 eng/common/templates-official/jobs/source-build.yml create mode 100644 eng/common/templates-official/post-build/common-variables.yml create mode 100644 eng/common/templates-official/post-build/post-build.yml create mode 100644 eng/common/templates-official/post-build/setup-maestro-vars.yml create mode 100644 eng/common/templates-official/post-build/trigger-subscription.yml create mode 100644 eng/common/templates-official/steps/add-build-to-channel.yml create mode 100644 eng/common/templates-official/steps/build-reason.yml create mode 100644 eng/common/templates-official/steps/component-governance.yml create mode 100644 eng/common/templates-official/steps/execute-codeql.yml create mode 100644 eng/common/templates-official/steps/execute-sdl.yml create mode 100644 eng/common/templates-official/steps/generate-sbom.yml create mode 100644 eng/common/templates-official/steps/publish-logs.yml create mode 100644 eng/common/templates-official/steps/retain-build.yml create mode 100644 eng/common/templates-official/steps/send-to-helix.yml create mode 100644 eng/common/templates-official/steps/source-build.yml create mode 100644 eng/common/templates-official/variables/pool-providers.yml create mode 100644 eng/common/templates-official/variables/sdl-variables.yml create mode 100644 eng/common/templates/steps/component-governance.yml diff --git a/eng/Version.Details.xml b/eng/Version.Details.xml index 72caae2cec902..0d3c5a6747382 100644 --- a/eng/Version.Details.xml +++ b/eng/Version.Details.xml @@ -13,18 +13,18 @@ - + https://github.com/dotnet/arcade - b12f035e893c34ec2c965d75f6e21b7a2667e98d + f2b2071632d5d4c46d0f904f2b0d917b1752551b https://github.com/dotnet/roslyn 5d10d428050c0d6afef30a072c4ae68776621877 - + https://github.com/dotnet/arcade - b12f035e893c34ec2c965d75f6e21b7a2667e98d + f2b2071632d5d4c46d0f904f2b0d917b1752551b https://github.com/dotnet/roslyn-analyzers diff --git a/eng/common/SetupNugetSources.ps1 b/eng/common/SetupNugetSources.ps1 index 6e99723945183..efa2fd72bfaa2 100644 --- a/eng/common/SetupNugetSources.ps1 +++ b/eng/common/SetupNugetSources.ps1 @@ -35,7 +35,7 @@ Set-StrictMode -Version 2.0 . $PSScriptRoot\tools.ps1 # Add source entry to PackageSources -function AddPackageSource($sources, $SourceName, $SourceEndPoint, $creds, $Username, $Password) { +function AddPackageSource($sources, $SourceName, $SourceEndPoint, $creds, $Username, $pwd) { $packageSource = $sources.SelectSingleNode("add[@key='$SourceName']") if ($packageSource -eq $null) @@ -48,12 +48,11 @@ function AddPackageSource($sources, $SourceName, $SourceEndPoint, $creds, $Usern else { Write-Host "Package source $SourceName already present." } - - AddCredential -Creds $creds -Source $SourceName -Username $Username -Password $Password + AddCredential -Creds $creds -Source $SourceName -Username $Username -pwd $pwd } # Add a credential node for the specified source -function AddCredential($creds, $source, $username, $password) { +function AddCredential($creds, $source, $username, $pwd) { # Looks for credential configuration for the given SourceName. Create it if none is found. $sourceElement = $creds.SelectSingleNode($Source) if ($sourceElement -eq $null) @@ -82,17 +81,18 @@ function AddCredential($creds, $source, $username, $password) { $passwordElement.SetAttribute("key", "ClearTextPassword") $sourceElement.AppendChild($passwordElement) | Out-Null } - $passwordElement.SetAttribute("value", $Password) + + $passwordElement.SetAttribute("value", $pwd) } -function InsertMaestroPrivateFeedCredentials($Sources, $Creds, $Username, $Password) { +function InsertMaestroPrivateFeedCredentials($Sources, $Creds, $Username, $pwd) { $maestroPrivateSources = $Sources.SelectNodes("add[contains(@key,'darc-int')]") Write-Host "Inserting credentials for $($maestroPrivateSources.Count) Maestro's private feeds." ForEach ($PackageSource in $maestroPrivateSources) { Write-Host "`tInserting credential for Maestro's feed:" $PackageSource.Key - AddCredential -Creds $creds -Source $PackageSource.Key -Username $Username -Password $Password + AddCredential -Creds $creds -Source $PackageSource.Key -Username $Username -pwd $pwd } } @@ -144,24 +144,24 @@ if ($disabledSources -ne $null) { $userName = "dn-bot" # Insert credential nodes for Maestro's private feeds -InsertMaestroPrivateFeedCredentials -Sources $sources -Creds $creds -Username $userName -Password $Password +InsertMaestroPrivateFeedCredentials -Sources $sources -Creds $creds -Username $userName -pwd $Password # 3.1 uses a different feed url format so it's handled differently here $dotnet31Source = $sources.SelectSingleNode("add[@key='dotnet3.1']") if ($dotnet31Source -ne $null) { - AddPackageSource -Sources $sources -SourceName "dotnet3.1-internal" -SourceEndPoint "https://pkgs.dev.azure.com/dnceng/_packaging/dotnet3.1-internal/nuget/v2" -Creds $creds -Username $userName -Password $Password - AddPackageSource -Sources $sources -SourceName "dotnet3.1-internal-transport" -SourceEndPoint "https://pkgs.dev.azure.com/dnceng/_packaging/dotnet3.1-internal-transport/nuget/v2" -Creds $creds -Username $userName -Password $Password + AddPackageSource -Sources $sources -SourceName "dotnet3.1-internal" -SourceEndPoint "https://pkgs.dev.azure.com/dnceng/_packaging/dotnet3.1-internal/nuget/v2" -Creds $creds -Username $userName -pwd $Password + AddPackageSource -Sources $sources -SourceName "dotnet3.1-internal-transport" -SourceEndPoint "https://pkgs.dev.azure.com/dnceng/_packaging/dotnet3.1-internal-transport/nuget/v2" -Creds $creds -Username $userName -pwd $Password } -$dotnetVersions = @('5','6','7') +$dotnetVersions = @('5','6','7','8') foreach ($dotnetVersion in $dotnetVersions) { $feedPrefix = "dotnet" + $dotnetVersion; $dotnetSource = $sources.SelectSingleNode("add[@key='$feedPrefix']") if ($dotnetSource -ne $null) { - AddPackageSource -Sources $sources -SourceName "$feedPrefix-internal" -SourceEndPoint "https://pkgs.dev.azure.com/dnceng/internal/_packaging/$feedPrefix-internal/nuget/v2" -Creds $creds -Username $userName -Password $Password - AddPackageSource -Sources $sources -SourceName "$feedPrefix-internal-transport" -SourceEndPoint "https://pkgs.dev.azure.com/dnceng/internal/_packaging/$feedPrefix-internal-transport/nuget/v2" -Creds $creds -Username $userName -Password $Password + AddPackageSource -Sources $sources -SourceName "$feedPrefix-internal" -SourceEndPoint "https://pkgs.dev.azure.com/dnceng/internal/_packaging/$feedPrefix-internal/nuget/v2" -Creds $creds -Username $userName -pwd $Password + AddPackageSource -Sources $sources -SourceName "$feedPrefix-internal-transport" -SourceEndPoint "https://pkgs.dev.azure.com/dnceng/internal/_packaging/$feedPrefix-internal-transport/nuget/v2" -Creds $creds -Username $userName -pwd $Password } } -$doc.Save($filename) +$doc.Save($filename) \ No newline at end of file diff --git a/eng/common/SetupNugetSources.sh b/eng/common/SetupNugetSources.sh index 8af7d899db121..d387c7eac95e5 100644 --- a/eng/common/SetupNugetSources.sh +++ b/eng/common/SetupNugetSources.sh @@ -105,7 +105,7 @@ if [ "$?" == "0" ]; then PackageSources+=('dotnet3.1-internal-transport') fi -DotNetVersions=('5' '6' '7') +DotNetVersions=('5' '6' '7' '8') for DotNetVersion in ${DotNetVersions[@]} ; do FeedPrefix="dotnet${DotNetVersion}"; diff --git a/eng/common/cross/arm/sources.list.xenial b/eng/common/cross/arm/sources.list.xenial index eacd86b7df3c1..56fbb36a59f62 100644 --- a/eng/common/cross/arm/sources.list.xenial +++ b/eng/common/cross/arm/sources.list.xenial @@ -8,4 +8,4 @@ deb http://ports.ubuntu.com/ubuntu-ports/ xenial-backports main restricted deb-src http://ports.ubuntu.com/ubuntu-ports/ xenial-backports main restricted deb http://ports.ubuntu.com/ubuntu-ports/ xenial-security main restricted universe multiverse -deb-src http://ports.ubuntu.com/ubuntu-ports/ xenial-security main restricted universe multiverse \ No newline at end of file +deb-src http://ports.ubuntu.com/ubuntu-ports/ xenial-security main restricted universe multiverse diff --git a/eng/common/cross/arm64/sources.list.xenial b/eng/common/cross/arm64/sources.list.xenial index eacd86b7df3c1..56fbb36a59f62 100644 --- a/eng/common/cross/arm64/sources.list.xenial +++ b/eng/common/cross/arm64/sources.list.xenial @@ -8,4 +8,4 @@ deb http://ports.ubuntu.com/ubuntu-ports/ xenial-backports main restricted deb-src http://ports.ubuntu.com/ubuntu-ports/ xenial-backports main restricted deb http://ports.ubuntu.com/ubuntu-ports/ xenial-security main restricted universe multiverse -deb-src http://ports.ubuntu.com/ubuntu-ports/ xenial-security main restricted universe multiverse \ No newline at end of file +deb-src http://ports.ubuntu.com/ubuntu-ports/ xenial-security main restricted universe multiverse diff --git a/eng/common/cross/build-rootfs.sh b/eng/common/cross/build-rootfs.sh index ff113f7335329..9caf9b021dbdd 100755 --- a/eng/common/cross/build-rootfs.sh +++ b/eng/common/cross/build-rootfs.sh @@ -14,6 +14,7 @@ usage() echo "lldbx.y - optional, LLDB version, can be: lldb3.9(default), lldb4.0, lldb5.0, lldb6.0 no-lldb. Ignored for alpine and FreeBSD" echo "llvmx[.y] - optional, LLVM version for LLVM related packages." echo "--skipunmount - optional, will skip the unmount of rootfs folder." + echo "--skipsigcheck - optional, will skip package signature checks (allowing untrusted packages)." echo "--use-mirror - optional, use mirror URL to fetch resources, when available." echo "--jobs N - optional, restrict to N jobs." exit 1 @@ -26,6 +27,7 @@ __AlpineArch=armv7 __FreeBSDArch=arm __FreeBSDMachineArch=armv7 __IllumosArch=arm7 +__HaikuArch=arm __QEMUArch=arm __UbuntuArch=armhf __UbuntuRepo="http://ports.ubuntu.com/" @@ -69,7 +71,7 @@ __AlpinePackages+=" krb5-dev" __AlpinePackages+=" openssl-dev" __AlpinePackages+=" zlib-dev" -__FreeBSDBase="12.3-RELEASE" +__FreeBSDBase="12.4-RELEASE" __FreeBSDPkg="1.17.0" __FreeBSDABI="12" __FreeBSDPackages="libunwind" @@ -84,8 +86,12 @@ __IllumosPackages+=" mit-krb5" __IllumosPackages+=" openssl" __IllumosPackages+=" zlib" -__HaikuPackages="gmp" +__HaikuPackages="gcc_syslibs" +__HaikuPackages+=" gcc_syslibs_devel" +__HaikuPackages+=" gmp" __HaikuPackages+=" gmp_devel" +__HaikuPackages+=" icu66" +__HaikuPackages+=" icu66_devel" __HaikuPackages+=" krb5" __HaikuPackages+=" krb5_devel" __HaikuPackages+=" libiconv" @@ -94,12 +100,36 @@ __HaikuPackages+=" llvm12_libunwind" __HaikuPackages+=" llvm12_libunwind_devel" __HaikuPackages+=" mpfr" __HaikuPackages+=" mpfr_devel" +__HaikuPackages+=" openssl" +__HaikuPackages+=" openssl_devel" +__HaikuPackages+=" zlib" +__HaikuPackages+=" zlib_devel" # ML.NET dependencies __UbuntuPackages+=" libomp5" __UbuntuPackages+=" libomp-dev" +# Taken from https://github.com/alpinelinux/alpine-chroot-install/blob/6d08f12a8a70dd9b9dc7d997c88aa7789cc03c42/alpine-chroot-install#L85-L133 +__AlpineKeys=' +4a6a0840:MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA1yHJxQgsHQREclQu4Ohe\nqxTxd1tHcNnvnQTu/UrTky8wWvgXT+jpveroeWWnzmsYlDI93eLI2ORakxb3gA2O\nQ0Ry4ws8vhaxLQGC74uQR5+/yYrLuTKydFzuPaS1dK19qJPXB8GMdmFOijnXX4SA\njixuHLe1WW7kZVtjL7nufvpXkWBGjsfrvskdNA/5MfxAeBbqPgaq0QMEfxMAn6/R\nL5kNepi/Vr4S39Xvf2DzWkTLEK8pcnjNkt9/aafhWqFVW7m3HCAII6h/qlQNQKSo\nGuH34Q8GsFG30izUENV9avY7hSLq7nggsvknlNBZtFUcmGoQrtx3FmyYsIC8/R+B\nywIDAQAB +5243ef4b:MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAvNijDxJ8kloskKQpJdx+\nmTMVFFUGDoDCbulnhZMJoKNkSuZOzBoFC94omYPtxnIcBdWBGnrm6ncbKRlR+6oy\nDO0W7c44uHKCFGFqBhDasdI4RCYP+fcIX/lyMh6MLbOxqS22TwSLhCVjTyJeeH7K\naA7vqk+QSsF4TGbYzQDDpg7+6aAcNzg6InNePaywA6hbT0JXbxnDWsB+2/LLSF2G\nmnhJlJrWB1WGjkz23ONIWk85W4S0XB/ewDefd4Ly/zyIciastA7Zqnh7p3Ody6Q0\nsS2MJzo7p3os1smGjUF158s6m/JbVh4DN6YIsxwl2OjDOz9R0OycfJSDaBVIGZzg\ncQIDAQAB +524d27bb:MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAr8s1q88XpuJWLCZALdKj\nlN8wg2ePB2T9aIcaxryYE/Jkmtu+ZQ5zKq6BT3y/udt5jAsMrhHTwroOjIsF9DeG\ne8Y3vjz+Hh4L8a7hZDaw8jy3CPag47L7nsZFwQOIo2Cl1SnzUc6/owoyjRU7ab0p\niWG5HK8IfiybRbZxnEbNAfT4R53hyI6z5FhyXGS2Ld8zCoU/R4E1P0CUuXKEN4p0\n64dyeUoOLXEWHjgKiU1mElIQj3k/IF02W89gDj285YgwqA49deLUM7QOd53QLnx+\nxrIrPv3A+eyXMFgexNwCKQU9ZdmWa00MjjHlegSGK8Y2NPnRoXhzqSP9T9i2HiXL\nVQIDAQAB +5261cecb:MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAwlzMkl7b5PBdfMzGdCT0\ncGloRr5xGgVmsdq5EtJvFkFAiN8Ac9MCFy/vAFmS8/7ZaGOXoCDWbYVLTLOO2qtX\nyHRl+7fJVh2N6qrDDFPmdgCi8NaE+3rITWXGrrQ1spJ0B6HIzTDNEjRKnD4xyg4j\ng01FMcJTU6E+V2JBY45CKN9dWr1JDM/nei/Pf0byBJlMp/mSSfjodykmz4Oe13xB\nCa1WTwgFykKYthoLGYrmo+LKIGpMoeEbY1kuUe04UiDe47l6Oggwnl+8XD1MeRWY\nsWgj8sF4dTcSfCMavK4zHRFFQbGp/YFJ/Ww6U9lA3Vq0wyEI6MCMQnoSMFwrbgZw\nwwIDAQAB +58199dcc:MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA3v8/ye/V/t5xf4JiXLXa\nhWFRozsnmn3hobON20GdmkrzKzO/eUqPOKTpg2GtvBhK30fu5oY5uN2ORiv2Y2ht\neLiZ9HVz3XP8Fm9frha60B7KNu66FO5P2o3i+E+DWTPqqPcCG6t4Znk2BypILcit\nwiPKTsgbBQR2qo/cO01eLLdt6oOzAaF94NH0656kvRewdo6HG4urbO46tCAizvCR\nCA7KGFMyad8WdKkTjxh8YLDLoOCtoZmXmQAiwfRe9pKXRH/XXGop8SYptLqyVVQ+\ntegOD9wRs2tOlgcLx4F/uMzHN7uoho6okBPiifRX+Pf38Vx+ozXh056tjmdZkCaV\naQIDAQAB +58cbb476:MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAoSPnuAGKtRIS5fEgYPXD\n8pSGvKAmIv3A08LBViDUe+YwhilSHbYXUEAcSH1KZvOo1WT1x2FNEPBEFEFU1Eyc\n+qGzbA03UFgBNvArurHQ5Z/GngGqE7IarSQFSoqewYRtFSfp+TL9CUNBvM0rT7vz\n2eMu3/wWG+CBmb92lkmyWwC1WSWFKO3x8w+Br2IFWvAZqHRt8oiG5QtYvcZL6jym\nY8T6sgdDlj+Y+wWaLHs9Fc+7vBuyK9C4O1ORdMPW15qVSl4Lc2Wu1QVwRiKnmA+c\nDsH/m7kDNRHM7TjWnuj+nrBOKAHzYquiu5iB3Qmx+0gwnrSVf27Arc3ozUmmJbLj\nzQIDAQAB +58e4f17d:MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAvBxJN9ErBgdRcPr5g4hV\nqyUSGZEKuvQliq2Z9SRHLh2J43+EdB6A+yzVvLnzcHVpBJ+BZ9RV30EM9guck9sh\nr+bryZcRHyjG2wiIEoduxF2a8KeWeQH7QlpwGhuobo1+gA8L0AGImiA6UP3LOirl\nI0G2+iaKZowME8/tydww4jx5vG132JCOScMjTalRsYZYJcjFbebQQolpqRaGB4iG\nWqhytWQGWuKiB1A22wjmIYf3t96l1Mp+FmM2URPxD1gk/BIBnX7ew+2gWppXOK9j\n1BJpo0/HaX5XoZ/uMqISAAtgHZAqq+g3IUPouxTphgYQRTRYpz2COw3NF43VYQrR\nbQIDAQAB +60ac2099:MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAwR4uJVtJOnOFGchnMW5Y\nj5/waBdG1u5BTMlH+iQMcV5+VgWhmpZHJCBz3ocD+0IGk2I68S5TDOHec/GSC0lv\n6R9o6F7h429GmgPgVKQsc8mPTPtbjJMuLLs4xKc+viCplXc0Nc0ZoHmCH4da6fCV\ntdpHQjVe6F9zjdquZ4RjV6R6JTiN9v924dGMAkbW/xXmamtz51FzondKC52Gh8Mo\n/oA0/T0KsCMCi7tb4QNQUYrf+Xcha9uus4ww1kWNZyfXJB87a2kORLiWMfs2IBBJ\nTmZ2Fnk0JnHDb8Oknxd9PvJPT0mvyT8DA+KIAPqNvOjUXP4bnjEHJcoCP9S5HkGC\nIQIDAQAB +6165ee59:MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAutQkua2CAig4VFSJ7v54\nALyu/J1WB3oni7qwCZD3veURw7HxpNAj9hR+S5N/pNeZgubQvJWyaPuQDm7PTs1+\ntFGiYNfAsiibX6Rv0wci3M+z2XEVAeR9Vzg6v4qoofDyoTbovn2LztaNEjTkB+oK\ntlvpNhg1zhou0jDVYFniEXvzjckxswHVb8cT0OMTKHALyLPrPOJzVtM9C1ew2Nnc\n3848xLiApMu3NBk0JqfcS3Bo5Y2b1FRVBvdt+2gFoKZix1MnZdAEZ8xQzL/a0YS5\nHd0wj5+EEKHfOd3A75uPa/WQmA+o0cBFfrzm69QDcSJSwGpzWrD1ScH3AK8nWvoj\nv7e9gukK/9yl1b4fQQ00vttwJPSgm9EnfPHLAtgXkRloI27H6/PuLoNvSAMQwuCD\nhQRlyGLPBETKkHeodfLoULjhDi1K2gKJTMhtbnUcAA7nEphkMhPWkBpgFdrH+5z4\nLxy+3ek0cqcI7K68EtrffU8jtUj9LFTUC8dERaIBs7NgQ/LfDbDfGh9g6qVj1hZl\nk9aaIPTm/xsi8v3u+0qaq7KzIBc9s59JOoA8TlpOaYdVgSQhHHLBaahOuAigH+VI\nisbC9vmqsThF2QdDtQt37keuqoda2E6sL7PUvIyVXDRfwX7uMDjlzTxHTymvq2Ck\nhtBqojBnThmjJQFgZXocHG8CAwEAAQ== +61666e3f:MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAlEyxkHggKCXC2Wf5Mzx4\nnZLFZvU2bgcA3exfNPO/g1YunKfQY+Jg4fr6tJUUTZ3XZUrhmLNWvpvSwDS19ZmC\nIXOu0+V94aNgnhMsk9rr59I8qcbsQGIBoHzuAl8NzZCgdbEXkiY90w1skUw8J57z\nqCsMBydAueMXuWqF5nGtYbi5vHwK42PffpiZ7G5Kjwn8nYMW5IZdL6ZnMEVJUWC9\nI4waeKg0yskczYDmZUEAtrn3laX9677ToCpiKrvmZYjlGl0BaGp3cxggP2xaDbUq\nqfFxWNgvUAb3pXD09JM6Mt6HSIJaFc9vQbrKB9KT515y763j5CC2KUsilszKi3mB\nHYe5PoebdjS7D1Oh+tRqfegU2IImzSwW3iwA7PJvefFuc/kNIijfS/gH/cAqAK6z\nbhdOtE/zc7TtqW2Wn5Y03jIZdtm12CxSxwgtCF1NPyEWyIxAQUX9ACb3M0FAZ61n\nfpPrvwTaIIxxZ01L3IzPLpbc44x/DhJIEU+iDt6IMTrHOphD9MCG4631eIdB0H1b\n6zbNX1CXTsafqHRFV9XmYYIeOMggmd90s3xIbEujA6HKNP/gwzO6CDJ+nHFDEqoF\nSkxRdTkEqjTjVKieURW7Swv7zpfu5PrsrrkyGnsRrBJJzXlm2FOOxnbI2iSL1B5F\nrO5kbUxFeZUIDq+7Yv4kLWcCAwEAAQ== +616a9724:MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAnC+bR4bHf/L6QdU4puhQ\ngl1MHePszRC38bzvVFDUJsmCaMCL2suCs2A2yxAgGb9pu9AJYLAmxQC4mM3jNqhg\n/E7yuaBbek3O02zN/ctvflJ250wZCy+z0ZGIp1ak6pu1j14IwHokl9j36zNfGtfv\nADVOcdpWITFFlPqwq1qt/H3UsKVmtiF3BNWWTeUEQwKvlU8ymxgS99yn0+4OPyNT\nL3EUeS+NQJtDS01unau0t7LnjUXn+XIneWny8bIYOQCuVR6s/gpIGuhBaUqwaJOw\n7jkJZYF2Ij7uPb4b5/R3vX2FfxxqEHqssFSg8FFUNTZz3qNZs0CRVyfA972g9WkJ\nhPfn31pQYil4QGRibCMIeU27YAEjXoqfJKEPh4UWMQsQLrEfdGfb8VgwrPbniGfU\nL3jKJR3VAafL9330iawzVQDlIlwGl6u77gEXMl9K0pfazunYhAp+BMP+9ot5ckK+\nosmrqj11qMESsAj083GeFdfV3pXEIwUytaB0AKEht9DbqUfiE/oeZ/LAXgySMtVC\nsbC4ESmgVeY2xSBIJdDyUap7FR49GGrw0W49NUv9gRgQtGGaNVQQO9oGL2PBC41P\niWF9GLoX30HIz1P8PF/cZvicSSPkQf2Z6TV+t0ebdGNS5DjapdnCrq8m9Z0pyKsQ\nuxAL2a7zX8l5i1CZh1ycUGsCAwEAAQ== +616abc23:MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEA0MfCDrhODRCIxR9Dep1s\neXafh5CE5BrF4WbCgCsevyPIdvTeyIaW4vmO3bbG4VzhogDZju+R3IQYFuhoXP5v\nY+zYJGnwrgz3r5wYAvPnLEs1+dtDKYOgJXQj+wLJBW1mzRDL8FoRXOe5iRmn1EFS\nwZ1DoUvyu7/J5r0itKicZp3QKED6YoilXed+1vnS4Sk0mzN4smuMR9eO1mMCqNp9\n9KTfRDHTbakIHwasECCXCp50uXdoW6ig/xUAFanpm9LtK6jctNDbXDhQmgvAaLXZ\nLvFqoaYJ/CvWkyYCgL6qxvMvVmPoRv7OPcyni4xR/WgWa0MSaEWjgPx3+yj9fiMA\n1S02pFWFDOr5OUF/O4YhFJvUCOtVsUPPfA/Lj6faL0h5QI9mQhy5Zb9TTaS9jB6p\nLw7u0dJlrjFedk8KTJdFCcaGYHP6kNPnOxMylcB/5WcztXZVQD5WpCicGNBxCGMm\nW64SgrV7M07gQfL/32QLsdqPUf0i8hoVD8wfQ3EpbQzv6Fk1Cn90bZqZafg8XWGY\nwddhkXk7egrr23Djv37V2okjzdqoyLBYBxMz63qQzFoAVv5VoY2NDTbXYUYytOvG\nGJ1afYDRVWrExCech1mX5ZVUB1br6WM+psFLJFoBFl6mDmiYt0vMYBddKISsvwLl\nIJQkzDwtXzT2cSjoj3T5QekCAwEAAQ== +616ac3bc:MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAvaaoSLab+IluixwKV5Od\n0gib2YurjPatGIbn5Ov2DLUFYiebj2oJINXJSwUOO+4WcuHFEqiL/1rya+k5hLZt\nhnPL1tn6QD4rESznvGSasRCQNT2vS/oyZbTYJRyAtFkEYLlq0t3S3xBxxHWuvIf0\nqVxVNYpQWyM3N9RIeYBR/euXKJXileSHk/uq1I5wTC0XBIHWcthczGN0m9wBEiWS\n0m3cnPk4q0Ea8mUJ91Rqob19qETz6VbSPYYpZk3qOycjKosuwcuzoMpwU8KRiMFd\n5LHtX0Hx85ghGsWDVtS0c0+aJa4lOMGvJCAOvDfqvODv7gKlCXUpgumGpLdTmaZ8\n1RwqspAe3IqBcdKTqRD4m2mSg23nVx2FAY3cjFvZQtfooT7q1ItRV5RgH6FhQSl7\n+6YIMJ1Bf8AAlLdRLpg+doOUGcEn+pkDiHFgI8ylH1LKyFKw+eXaAml/7DaWZk1d\ndqggwhXOhc/UUZFQuQQ8A8zpA13PcbC05XxN2hyP93tCEtyynMLVPtrRwDnHxFKa\nqKzs3rMDXPSXRn3ZZTdKH3069ApkEjQdpcwUh+EmJ1Ve/5cdtzT6kKWCjKBFZP/s\n91MlRrX2BTRdHaU5QJkUheUtakwxuHrdah2F94lRmsnQlpPr2YseJu6sIE+Dnx4M\nCfhdVbQL2w54R645nlnohu8CAwEAAQ== +616adfeb:MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAq0BFD1D4lIxQcsqEpQzU\npNCYM3aP1V/fxxVdT4DWvSI53JHTwHQamKdMWtEXetWVbP5zSROniYKFXd/xrD9X\n0jiGHey3lEtylXRIPxe5s+wXoCmNLcJVnvTcDtwx/ne2NLHxp76lyc25At+6RgE6\nADjLVuoD7M4IFDkAsd8UQ8zM0Dww9SylIk/wgV3ZkifecvgUQRagrNUdUjR56EBZ\nraQrev4hhzOgwelT0kXCu3snbUuNY/lU53CoTzfBJ5UfEJ5pMw1ij6X0r5S9IVsy\nKLWH1hiO0NzU2c8ViUYCly4Fe9xMTFc6u2dy/dxf6FwERfGzETQxqZvSfrRX+GLj\n/QZAXiPg5178hT/m0Y3z5IGenIC/80Z9NCi+byF1WuJlzKjDcF/TU72zk0+PNM/H\nKuppf3JT4DyjiVzNC5YoWJT2QRMS9KLP5iKCSThwVceEEg5HfhQBRT9M6KIcFLSs\nmFjx9kNEEmc1E8hl5IR3+3Ry8G5/bTIIruz14jgeY9u5jhL8Vyyvo41jgt9sLHR1\n/J1TxKfkgksYev7PoX6/ZzJ1ksWKZY5NFoDXTNYUgzFUTOoEaOg3BAQKadb3Qbbq\nXIrxmPBdgrn9QI7NCgfnAY3Tb4EEjs3ON/BNyEhUENcXOH6I1NbcuBQ7g9P73kE4\nVORdoc8MdJ5eoKBpO8Ww8HECAwEAAQ== +616ae350:MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAyduVzi1mWm+lYo2Tqt/0\nXkCIWrDNP1QBMVPrE0/ZlU2bCGSoo2Z9FHQKz/mTyMRlhNqTfhJ5qU3U9XlyGOPJ\npiM+b91g26pnpXJ2Q2kOypSgOMOPA4cQ42PkHBEqhuzssfj9t7x47ppS94bboh46\nxLSDRff/NAbtwTpvhStV3URYkxFG++cKGGa5MPXBrxIp+iZf9GnuxVdST5PGiVGP\nODL/b69sPJQNbJHVquqUTOh5Ry8uuD2WZuXfKf7/C0jC/ie9m2+0CttNu9tMciGM\nEyKG1/Xhk5iIWO43m4SrrT2WkFlcZ1z2JSf9Pjm4C2+HovYpihwwdM/OdP8Xmsnr\nDzVB4YvQiW+IHBjStHVuyiZWc+JsgEPJzisNY0Wyc/kNyNtqVKpX6dRhMLanLmy+\nf53cCSI05KPQAcGj6tdL+D60uKDkt+FsDa0BTAobZ31OsFVid0vCXtsbplNhW1IF\nHwsGXBTVcfXg44RLyL8Lk/2dQxDHNHzAUslJXzPxaHBLmt++2COa2EI1iWlvtznk\nOk9WP8SOAIj+xdqoiHcC4j72BOVVgiITIJNHrbppZCq6qPR+fgXmXa+sDcGh30m6\n9Wpbr28kLMSHiENCWTdsFij+NQTd5S47H7XTROHnalYDuF1RpS+DpQidT5tUimaT\nJZDr++FjKrnnijbyNF8b98UCAwEAAQ== +616db30d:MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAnpUpyWDWjlUk3smlWeA0\nlIMW+oJ38t92CRLHH3IqRhyECBRW0d0aRGtq7TY8PmxjjvBZrxTNDpJT6KUk4LRm\na6A6IuAI7QnNK8SJqM0DLzlpygd7GJf8ZL9SoHSH+gFsYF67Cpooz/YDqWrlN7Vw\ntO00s0B+eXy+PCXYU7VSfuWFGK8TGEv6HfGMALLjhqMManyvfp8hz3ubN1rK3c8C\nUS/ilRh1qckdbtPvoDPhSbTDmfU1g/EfRSIEXBrIMLg9ka/XB9PvWRrekrppnQzP\nhP9YE3x/wbFc5QqQWiRCYyQl/rgIMOXvIxhkfe8H5n1Et4VAorkpEAXdsfN8KSVv\nLSMazVlLp9GYq5SUpqYX3KnxdWBgN7BJoZ4sltsTpHQ/34SXWfu3UmyUveWj7wp0\nx9hwsPirVI00EEea9AbP7NM2rAyu6ukcm4m6ATd2DZJIViq2es6m60AE6SMCmrQF\nwmk4H/kdQgeAELVfGOm2VyJ3z69fQuywz7xu27S6zTKi05Qlnohxol4wVb6OB7qG\nLPRtK9ObgzRo/OPumyXqlzAi/Yvyd1ZQk8labZps3e16bQp8+pVPiumWioMFJDWV\nGZjCmyMSU8V6MB6njbgLHoyg2LCukCAeSjbPGGGYhnKLm1AKSoJh3IpZuqcKCk5C\n8CM1S15HxV78s9dFntEqIokCAwEAAQ== +' __Keyring= +__SkipSigCheck=0 __UseMirror=0 __UnprocessedBuildArgs= @@ -184,11 +214,13 @@ while :; do ;; x64) __BuildArch=x64 + __AlpineArch=x86_64 __UbuntuArch=amd64 __FreeBSDArch=amd64 __FreeBSDMachineArch=amd64 __illumosArch=x86_64 - __UbuntuRepo= + __HaikuArch=x86_64 + __UbuntuRepo="http://archive.ubuntu.com/ubuntu/" ;; x86) __BuildArch=x86 @@ -308,7 +340,7 @@ while :; do ;; freebsd13) __CodeName=freebsd - __FreeBSDBase="13.0-RELEASE" + __FreeBSDBase="13.2-RELEASE" __FreeBSDABI="13" __SkipUnmount=1 ;; @@ -318,12 +350,14 @@ while :; do ;; haiku) __CodeName=haiku - __BuildArch=x64 __SkipUnmount=1 ;; --skipunmount) __SkipUnmount=1 ;; + --skipsigcheck) + __SkipSigCheck=1 + ;; --rootfsdir|-rootfsdir) shift __RootfsDir="$1" @@ -351,7 +385,6 @@ case "$__AlpineVersion" in edge) __AlpineLlvmLibsLookup=1 ;; *) if [[ "$__AlpineArch" =~ s390x|ppc64le ]]; then - echo boo __AlpineVersion=3.15 # minimum version that supports lldb-dev __AlpinePackages+=" llvm12-libs" elif [[ "$__AlpineArch" == "x86" ]]; then @@ -380,6 +413,11 @@ if [[ "$__BuildArch" == "armel" ]]; then __LLDB_Package="lldb-3.5-dev" fi +if [[ "$__CodeName" == "xenial" && "$__UbuntuArch" == "armhf" ]]; then + # libnuma-dev is not available on armhf for xenial + __UbuntuPackages="${__UbuntuPackages//libnuma-dev/}" +fi + __UbuntuPackages+=" ${__LLDB_Package:-}" if [[ -n "$__LLVM_MajorVersion" ]]; then @@ -406,13 +444,18 @@ __RootfsDir="$( cd "$__RootfsDir" && pwd )" if [[ "$__CodeName" == "alpine" ]]; then __ApkToolsVersion=2.12.11 + __ApkToolsSHA512SUM=53e57b49230da07ef44ee0765b9592580308c407a8d4da7125550957bb72cb59638e04f8892a18b584451c8d841d1c7cb0f0ab680cc323a3015776affaa3be33 __ApkToolsDir="$(mktemp -d)" + __ApkKeysDir="$(mktemp -d)" wget "https://gitlab.alpinelinux.org/api/v4/projects/5/packages/generic//v$__ApkToolsVersion/x86_64/apk.static" -P "$__ApkToolsDir" + echo "$__ApkToolsSHA512SUM $__ApkToolsDir/apk.static" | sha512sum -c chmod +x "$__ApkToolsDir/apk.static" - mkdir -p "$__RootfsDir"/usr/bin - cp -v "/usr/bin/qemu-$__QEMUArch-static" "$__RootfsDir/usr/bin" + if [[ -f "/usr/bin/qemu-$__QEMUArch-static" ]]; then + mkdir -p "$__RootfsDir"/usr/bin + cp -v "/usr/bin/qemu-$__QEMUArch-static" "$__RootfsDir/usr/bin" + fi if [[ "$__AlpineVersion" == "edge" ]]; then version=edge @@ -420,17 +463,30 @@ if [[ "$__CodeName" == "alpine" ]]; then version="v$__AlpineVersion" fi + for line in $__AlpineKeys; do + id="${line%%:*}" + content="${line#*:}" + + echo -e "-----BEGIN PUBLIC KEY-----\n$content\n-----END PUBLIC KEY-----" > "$__ApkKeysDir/alpine-devel@lists.alpinelinux.org-$id.rsa.pub" + done + + if [[ "$__SkipSigCheck" == "1" ]]; then + __ApkSignatureArg="--allow-untrusted" + else + __ApkSignatureArg="--keys-dir $__ApkKeysDir" + fi + # initialize DB "$__ApkToolsDir/apk.static" \ -X "http://dl-cdn.alpinelinux.org/alpine/$version/main" \ -X "http://dl-cdn.alpinelinux.org/alpine/$version/community" \ - -U --allow-untrusted --root "$__RootfsDir" --arch "$__AlpineArch" --initdb add + -U $__ApkSignatureArg --root "$__RootfsDir" --arch "$__AlpineArch" --initdb add if [[ "$__AlpineLlvmLibsLookup" == 1 ]]; then __AlpinePackages+=" $("$__ApkToolsDir/apk.static" \ -X "http://dl-cdn.alpinelinux.org/alpine/$version/main" \ -X "http://dl-cdn.alpinelinux.org/alpine/$version/community" \ - -U --allow-untrusted --root "$__RootfsDir" --arch "$__AlpineArch" \ + -U $__ApkSignatureArg --root "$__RootfsDir" --arch "$__AlpineArch" \ search 'llvm*-libs' | sort | tail -1 | sed 's/-[^-]*//2g')" fi @@ -438,7 +494,7 @@ if [[ "$__CodeName" == "alpine" ]]; then "$__ApkToolsDir/apk.static" \ -X "http://dl-cdn.alpinelinux.org/alpine/$version/main" \ -X "http://dl-cdn.alpinelinux.org/alpine/$version/community" \ - -U --allow-untrusted --root "$__RootfsDir" --arch "$__AlpineArch" \ + -U $__ApkSignatureArg --root "$__RootfsDir" --arch "$__AlpineArch" \ add $__AlpinePackages rm -r "$__ApkToolsDir" @@ -512,69 +568,61 @@ elif [[ "$__CodeName" == "illumos" ]]; then elif [[ "$__CodeName" == "haiku" ]]; then JOBS=${MAXJOBS:="$(getconf _NPROCESSORS_ONLN)"} - echo "Building Haiku sysroot for x86_64" + echo "Building Haiku sysroot for $__HaikuArch" mkdir -p "$__RootfsDir/tmp" - cd "$__RootfsDir/tmp" - git clone -b hrev56235 https://review.haiku-os.org/haiku - git clone -b btrev43195 https://review.haiku-os.org/buildtools - cd "$__RootfsDir/tmp/buildtools" && git checkout 7487388f5110021d400b9f3b88e1a7f310dc066d - - # Fetch some unmerged patches - cd "$__RootfsDir/tmp/haiku" - ## Add development build profile (slimmer than nightly) - git fetch origin refs/changes/64/4164/1 && git -c commit.gpgsign=false cherry-pick FETCH_HEAD - - # Build jam - cd "$__RootfsDir/tmp/buildtools/jam" - make - - # Configure cross tools - echo "Building cross-compiler" - mkdir -p "$__RootfsDir/generated" - cd "$__RootfsDir/generated" - "$__RootfsDir/tmp/haiku/configure" -j"$JOBS" --sysroot "$__RootfsDir" --cross-tools-source "$__RootfsDir/tmp/buildtools" --build-cross-tools x86_64 - - # Build Haiku packages - echo "Building Haiku" - echo 'HAIKU_BUILD_PROFILE = "development-raw" ;' > UserProfileConfig - "$__RootfsDir/tmp/buildtools/jam/jam0" -j"$JOBS" -q 'package' 'Haiku' - - BaseUrl="https://depot.haiku-os.org/__api/v2/pkg/get-pkg" - - # Download additional packages - echo "Downloading additional required packages" + pushd "$__RootfsDir/tmp" + + mkdir "$__RootfsDir/tmp/download" + + echo "Downloading Haiku package tool" + git clone https://github.com/haiku/haiku-toolchains-ubuntu --depth 1 $__RootfsDir/tmp/script + wget -O "$__RootfsDir/tmp/download/hosttools.zip" $($__RootfsDir/tmp/script/fetch.sh --hosttools) + unzip -o "$__RootfsDir/tmp/download/hosttools.zip" -d "$__RootfsDir/tmp/bin" + + DepotBaseUrl="https://depot.haiku-os.org/__api/v2/pkg/get-pkg" + HpkgBaseUrl="https://eu.hpkg.haiku-os.org/haiku/master/$__HaikuArch/current" + + # Download Haiku packages + echo "Downloading Haiku packages" read -ra array <<<"$__HaikuPackages" for package in "${array[@]}"; do echo "Downloading $package..." # API documented here: https://github.com/haiku/haikudepotserver/blob/master/haikudepotserver-api2/src/main/resources/api2/pkg.yaml#L60 # The schema here: https://github.com/haiku/haikudepotserver/blob/master/haikudepotserver-api2/src/main/resources/api2/pkg.yaml#L598 - hpkgDownloadUrl="$(wget -qO- --post-data='{"name":"'"$package"'","repositorySourceCode":"haikuports_x86_64","versionType":"LATEST","naturalLanguageCode":"en"}' \ - --header='Content-Type:application/json' "$BaseUrl" | jq -r '.result.versions[].hpkgDownloadURL')" - wget -P "$__RootfsDir/generated/download" "$hpkgDownloadUrl" + hpkgDownloadUrl="$(wget -qO- --post-data='{"name":"'"$package"'","repositorySourceCode":"haikuports_'$__HaikuArch'","versionType":"LATEST","naturalLanguageCode":"en"}' \ + --header='Content-Type:application/json' "$DepotBaseUrl" | jq -r '.result.versions[].hpkgDownloadURL')" + wget -P "$__RootfsDir/tmp/download" "$hpkgDownloadUrl" + done + for package in haiku haiku_devel; do + echo "Downloading $package..." + hpkgVersion="$(wget -qO- $HpkgBaseUrl | sed -n 's/^.*version: "\([^"]*\)".*$/\1/p')" + wget -P "$__RootfsDir/tmp/download" "$HpkgBaseUrl/packages/$package-$hpkgVersion-1-$__HaikuArch.hpkg" done - # Setup the sysroot - echo "Setting up sysroot and extracting needed packages" + # Set up the sysroot + echo "Setting up sysroot and extracting required packages" mkdir -p "$__RootfsDir/boot/system" - for file in "$__RootfsDir/generated/objects/haiku/x86_64/packaging/packages/"*.hpkg; do - "$__RootfsDir/generated/objects/linux/x86_64/release/tools/package/package" extract -C "$__RootfsDir/boot/system" "$file" - done - for file in "$__RootfsDir/generated/download/"*.hpkg; do - "$__RootfsDir/generated/objects/linux/x86_64/release/tools/package/package" extract -C "$__RootfsDir/boot/system" "$file" + for file in "$__RootfsDir/tmp/download/"*.hpkg; do + echo "Extracting $file..." + LD_LIBRARY_PATH="$__RootfsDir/tmp/bin" "$__RootfsDir/tmp/bin/package" extract -C "$__RootfsDir/boot/system" "$file" done + # Download buildtools + echo "Downloading Haiku buildtools" + wget -O "$__RootfsDir/tmp/download/buildtools.zip" $($__RootfsDir/tmp/script/fetch.sh --buildtools --arch=$__HaikuArch) + unzip -o "$__RootfsDir/tmp/download/buildtools.zip" -d "$__RootfsDir" + # Cleaning up temporary files echo "Cleaning up temporary files" + popd rm -rf "$__RootfsDir/tmp" - for name in "$__RootfsDir/generated/"*; do - if [[ "$name" =~ "cross-tools-" ]]; then - : # Keep the cross-compiler - else - rm -rf "$name" - fi - done elif [[ -n "$__CodeName" ]]; then - qemu-debootstrap $__Keyring --arch "$__UbuntuArch" "$__CodeName" "$__RootfsDir" "$__UbuntuRepo" + + if [[ "$__SkipSigCheck" == "0" ]]; then + __Keyring="$__Keyring --force-check-gpg" + fi + + debootstrap "--variant=minbase" $__Keyring --arch "$__UbuntuArch" "$__CodeName" "$__RootfsDir" "$__UbuntuRepo" cp "$__CrossDir/$__BuildArch/sources.list.$__CodeName" "$__RootfsDir/etc/apt/sources.list" chroot "$__RootfsDir" apt-get update chroot "$__RootfsDir" apt-get -f -y install diff --git a/eng/common/cross/toolchain.cmake b/eng/common/cross/toolchain.cmake index ccfb9951a52cb..0998e875e5f78 100644 --- a/eng/common/cross/toolchain.cmake +++ b/eng/common/cross/toolchain.cmake @@ -6,6 +6,7 @@ unset(FREEBSD) unset(ILLUMOS) unset(ANDROID) unset(TIZEN) +unset(HAIKU) set(TARGET_ARCH_NAME $ENV{TARGET_BUILD_ARCH}) if(EXISTS ${CROSS_ROOTFS}/bin/freebsd-version) @@ -16,6 +17,7 @@ elseif(EXISTS ${CROSS_ROOTFS}/usr/platform/i86pc) set(ILLUMOS 1) elseif(EXISTS ${CROSS_ROOTFS}/boot/system/develop/headers/config/HaikuConfig.h) set(CMAKE_SYSTEM_NAME Haiku) + set(HAIKU 1) else() set(CMAKE_SYSTEM_NAME Linux) set(LINUX 1) @@ -67,16 +69,30 @@ elseif(TARGET_ARCH_NAME STREQUAL "armv6") endif() elseif(TARGET_ARCH_NAME STREQUAL "ppc64le") set(CMAKE_SYSTEM_PROCESSOR ppc64le) - set(TOOLCHAIN "powerpc64le-linux-gnu") + if(EXISTS ${CROSS_ROOTFS}/usr/lib/gcc/powerpc64le-alpine-linux-musl) + set(TOOLCHAIN "powerpc64le-alpine-linux-musl") + else() + set(TOOLCHAIN "powerpc64le-linux-gnu") + endif() elseif(TARGET_ARCH_NAME STREQUAL "riscv64") set(CMAKE_SYSTEM_PROCESSOR riscv64) - set(TOOLCHAIN "riscv64-linux-gnu") + if(EXISTS ${CROSS_ROOTFS}/usr/lib/gcc/riscv64-alpine-linux-musl) + set(TOOLCHAIN "riscv64-alpine-linux-musl") + else() + set(TOOLCHAIN "riscv64-linux-gnu") + endif() elseif(TARGET_ARCH_NAME STREQUAL "s390x") set(CMAKE_SYSTEM_PROCESSOR s390x) - set(TOOLCHAIN "s390x-linux-gnu") + if(EXISTS ${CROSS_ROOTFS}/usr/lib/gcc/s390x-alpine-linux-musl) + set(TOOLCHAIN "s390x-alpine-linux-musl") + else() + set(TOOLCHAIN "s390x-linux-gnu") + endif() elseif(TARGET_ARCH_NAME STREQUAL "x64") set(CMAKE_SYSTEM_PROCESSOR x86_64) - if(LINUX) + if(EXISTS ${CROSS_ROOTFS}/usr/lib/gcc/x86_64-alpine-linux-musl) + set(TOOLCHAIN "x86_64-alpine-linux-musl") + elseif(LINUX) set(TOOLCHAIN "x86_64-linux-gnu") if(TIZEN) set(TIZEN_TOOLCHAIN "x86_64-tizen-linux-gnu/9.2.0") @@ -86,11 +102,15 @@ elseif(TARGET_ARCH_NAME STREQUAL "x64") elseif(ILLUMOS) set(TOOLCHAIN "x86_64-illumos") elseif(HAIKU) - set(TOOLCHAIN "x64_64-unknown-haiku") + set(TOOLCHAIN "x86_64-unknown-haiku") endif() elseif(TARGET_ARCH_NAME STREQUAL "x86") set(CMAKE_SYSTEM_PROCESSOR i686) - set(TOOLCHAIN "i686-linux-gnu") + if(EXISTS ${CROSS_ROOTFS}/usr/lib/gcc/i586-alpine-linux-musl) + set(TOOLCHAIN "i586-alpine-linux-musl") + else() + set(TOOLCHAIN "i686-linux-gnu") + endif() if(TIZEN) set(TIZEN_TOOLCHAIN "i586-tizen-linux-gnu/9.2.0") endif() @@ -187,6 +207,7 @@ elseif(ILLUMOS) set(CMAKE_CXX_STANDARD_LIBRARIES "${CMAKE_CXX_STANDARD_LIBRARIES} -lssp") elseif(HAIKU) set(CMAKE_SYSROOT "${CROSS_ROOTFS}") + set(CMAKE_PROGRAM_PATH "${CMAKE_PROGRAM_PATH};${CROSS_ROOTFS}/cross-tools-x86_64/bin") set(TOOLSET_PREFIX ${TOOLCHAIN}-) function(locate_toolchain_exec exec var) @@ -196,10 +217,7 @@ elseif(HAIKU) return() endif() - set(SEARCH_PATH "${CROSS_ROOTFS}/generated/cross-tools-x86_64/bin") - find_program(EXEC_LOCATION_${exec} - PATHS ${SEARCH_PATH} NAMES "${TOOLSET_PREFIX}${exec}${CLR_CMAKE_COMPILER_FILE_NAME_VERSION}" "${TOOLSET_PREFIX}${exec}") @@ -264,8 +282,11 @@ elseif(TARGET_ARCH_NAME MATCHES "^(arm64|x64)$") add_toolchain_linker_flag("-Wl,--rpath-link=${CROSS_ROOTFS}/usr/lib64/gcc/${TIZEN_TOOLCHAIN}") endif() elseif(TARGET_ARCH_NAME STREQUAL "x86") + if(EXISTS ${CROSS_ROOTFS}/usr/lib/gcc/i586-alpine-linux-musl) + add_toolchain_linker_flag("--target=${TOOLCHAIN}") + add_toolchain_linker_flag("-Wl,--rpath-link=${CROSS_ROOTFS}/usr/lib/gcc/${TOOLCHAIN}") + endif() add_toolchain_linker_flag(-m32) - if(TIZEN) add_toolchain_linker_flag("-B${CROSS_ROOTFS}/usr/lib/gcc/${TIZEN_TOOLCHAIN}") add_toolchain_linker_flag("-L${CROSS_ROOTFS}/lib") @@ -275,11 +296,14 @@ elseif(TARGET_ARCH_NAME STREQUAL "x86") elseif(ILLUMOS) add_toolchain_linker_flag("-L${CROSS_ROOTFS}/lib/amd64") add_toolchain_linker_flag("-L${CROSS_ROOTFS}/usr/amd64/lib") +elseif(HAIKU) + add_toolchain_linker_flag("-lnetwork") + add_toolchain_linker_flag("-lroot") endif() # Specify compile options -if((TARGET_ARCH_NAME MATCHES "^(arm|arm64|armel|armv6|ppc64le|riscv64|s390x)$" AND NOT ANDROID AND NOT FREEBSD) OR ILLUMOS OR HAIKU) +if((TARGET_ARCH_NAME MATCHES "^(arm|arm64|armel|armv6|ppc64le|riscv64|s390x|x64|x86)$" AND NOT ANDROID AND NOT FREEBSD) OR ILLUMOS OR HAIKU) set(CMAKE_C_COMPILER_TARGET ${TOOLCHAIN}) set(CMAKE_CXX_COMPILER_TARGET ${TOOLCHAIN}) set(CMAKE_ASM_COMPILER_TARGET ${TOOLCHAIN}) @@ -298,10 +322,16 @@ if(TARGET_ARCH_NAME MATCHES "^(arm|armel)$") add_definitions (-DCLR_ARM_FPU_CAPABILITY=${CLR_ARM_FPU_CAPABILITY}) + # persist variables across multiple try_compile passes + list(APPEND CMAKE_TRY_COMPILE_PLATFORM_VARIABLES CLR_ARM_FPU_TYPE CLR_ARM_FPU_CAPABILITY) + if(TARGET_ARCH_NAME STREQUAL "armel") add_compile_options(-mfloat-abi=softfp) endif() elseif(TARGET_ARCH_NAME STREQUAL "x86") + if(EXISTS ${CROSS_ROOTFS}/usr/lib/gcc/i586-alpine-linux-musl) + add_compile_options(--target=${TOOLCHAIN}) + endif() add_compile_options(-m32) add_compile_options(-Wno-error=unused-command-line-argument) endif() diff --git a/eng/common/cross/x64/sources.list.bionic b/eng/common/cross/x64/sources.list.bionic new file mode 100644 index 0000000000000..a71ccadcffaf8 --- /dev/null +++ b/eng/common/cross/x64/sources.list.bionic @@ -0,0 +1,11 @@ +deb http://archive.ubuntu.com/ubuntu/ bionic main restricted universe +deb-src http://archive.ubuntu.com/ubuntu/ bionic main restricted universe + +deb http://archive.ubuntu.com/ubuntu/ bionic-updates main restricted universe +deb-src http://archive.ubuntu.com/ubuntu/ bionic-updates main restricted universe + +deb http://archive.ubuntu.com/ubuntu/ bionic-backports main restricted +deb-src http://archive.ubuntu.com/ubuntu/ bionic-backports main restricted + +deb http://archive.ubuntu.com/ubuntu/ bionic-security main restricted universe multiverse +deb-src http://archive.ubuntu.com/ubuntu/ bionic-security main restricted universe multiverse diff --git a/eng/common/cross/x64/sources.list.xenial b/eng/common/cross/x64/sources.list.xenial new file mode 100644 index 0000000000000..ad9c5a0144ef0 --- /dev/null +++ b/eng/common/cross/x64/sources.list.xenial @@ -0,0 +1,11 @@ +deb http://archive.ubuntu.com/ubuntu/ xenial main restricted universe +deb-src http://archive.ubuntu.com/ubuntu/ xenial main restricted universe + +deb http://archive.ubuntu.com/ubuntu/ xenial-updates main restricted universe +deb-src http://archive.ubuntu.com/ubuntu/ xenial-updates main restricted universe + +deb http://archive.ubuntu.com/ubuntu/ xenial-backports main restricted +deb-src http://archive.ubuntu.com/ubuntu/ xenial-backports main restricted + +deb http://archive.ubuntu.com/ubuntu/ xenial-security main restricted universe multiverse +deb-src http://archive.ubuntu.com/ubuntu/ xenial-security main restricted universe multiverse diff --git a/eng/common/darc-init.ps1 b/eng/common/darc-init.ps1 index 435e7641341b1..8fda30bdce2b0 100644 --- a/eng/common/darc-init.ps1 +++ b/eng/common/darc-init.ps1 @@ -1,6 +1,6 @@ param ( $darcVersion = $null, - $versionEndpoint = 'https://maestro-prod.westus2.cloudapp.azure.com/api/assets/darc-version?api-version=2019-01-16', + $versionEndpoint = 'https://maestro.dot.net/api/assets/darc-version?api-version=2019-01-16', $verbosity = 'minimal', $toolpath = $null ) diff --git a/eng/common/darc-init.sh b/eng/common/darc-init.sh index 84c1d0cc2e75a..c305ae6bd771e 100755 --- a/eng/common/darc-init.sh +++ b/eng/common/darc-init.sh @@ -2,7 +2,7 @@ source="${BASH_SOURCE[0]}" darcVersion='' -versionEndpoint='https://maestro-prod.westus2.cloudapp.azure.com/api/assets/darc-version?api-version=2019-01-16' +versionEndpoint='https://maestro.dot.net/api/assets/darc-version?api-version=2019-01-16' verbosity='minimal' while [[ $# > 0 ]]; do diff --git a/eng/common/dotnet-install.sh b/eng/common/dotnet-install.sh index abd045a3247f0..7e69e3a9e24a7 100755 --- a/eng/common/dotnet-install.sh +++ b/eng/common/dotnet-install.sh @@ -54,6 +54,10 @@ cpuname=$(uname -m) case $cpuname in arm64|aarch64) buildarch=arm64 + if [ "$(getconf LONG_BIT)" -lt 64 ]; then + # This is 32-bit OS running on 64-bit CPU (for example Raspberry Pi OS) + buildarch=arm + fi ;; loongarch64) buildarch=loongarch64 diff --git a/eng/common/loc/P22DotNetHtmlLocalization.lss b/eng/common/loc/P22DotNetHtmlLocalization.lss index 6661fed566e49b0c206665bc21f135e06c9b89c4..5d892d619398f9feeac4cefc1c53ff18a807f732 100644 GIT binary patch literal 1876 zcmd5-O>Y`85WVMDO!tse0|`=3Kq5qFvJwFroLr(PGHdUyD`QjJ8$y14$Ln3lZW2{J zwGs!I&o^(Lhrz|mTEa6oR%kVh&N`>j#re@-x_nwr2#IG`%ct-0baH+K&@f(3mgC!a zLE`z$`_TL4VSG6vqld@GGPwJ;L@RorHxAn^xdXw5(R4X4f7_@k)pTF-lorz22$--N zNp~~4=EJDUfxSBovWjDy#&0y*A$Y%{951Lgf#O!hPmkZU}#I!)Wng z1f<%)R3}u5SL$^NOII+VJC!`W+V>zz0i%$|xtEKJRoWJsS2-4>j6oFy0;9}2)ZXS? z!4+5B;BS?^g*W;n#c6Okuah`~R7l#LlUM`Wwgi;X`GgRCXg2Aj93+p!!wlyufdm#b2J0d!FWGIX~B#Nn{?*T z-%qWy16eI?CAp^@t!rT|vicQ-vsH(!kP=oLa9cnNr~Mvq*K+4!SBpR#0ixlQVLn!4 z_-b`9x-gH#c?z0gti{WerraQ#kQ%)%r?a{+35PnWsUFnNN^8lf0v68Oy&oSBa0E$V ze5J~88to5MDZA?699~0Him^%vH|v?chtS*Ws!BWhyEn^bjuBh5M+wg@h`3AQ cw&^?j6MgS4IM#l5)SHHef6(}&EB`Bh1GJK71^@s6 literal 3810 zcmd^CT~8BH5S?ce|HG9Bo&v?0;P_m6l= znU-wb=}VLT7UH{>{5H;0M4iLmRE8QeBRr|>&k=uV*L;heKY+dq>B$0^=0I}|x`)s{ zht4t9zaiEh5Fe>E-;zVT;c4G?v;9N0G=rWwo~*(Cs`OS6ZL`HL;vsL0J@I%$+0YvE zx{9ukK|FtFx1PlPD5M;6ZM>f;1BhCf?`8y6QH*?RT9T>XwF z#~m_N+i^UKE^j{e;KdNW`kH9Rbj{G8tDY}mafCgG+m3H`I@_PhrDmcIzxD&IX@s083kV|lLUE^0(h6wWRPN0QN1n^PU5eX8r6OZ*s^g)tt77#SZCB}znxye#U$Dtinr6lnVu z!LzA{A}0~no7p$thFGJAnI}oSW||9H=Bz}I7kD#2MLg7WfrlE5o9sQjePc>qmv+6iQCmdp(y}(Vr diff --git a/eng/common/native/init-compiler.sh b/eng/common/native/init-compiler.sh index 7aee4213e1b06..2d5660642b8d4 100644 --- a/eng/common/native/init-compiler.sh +++ b/eng/common/native/init-compiler.sh @@ -63,8 +63,8 @@ if [ -z "$CLR_CC" ]; then # Set default versions if [ -z "$majorVersion" ]; then # note: gcc (all versions) and clang versions higher than 6 do not have minor version in file name, if it is zero. - if [ "$compiler" = "clang" ]; then versions="16 15 14 13 12 11 10 9 8 7 6.0 5.0 4.0 3.9 3.8 3.7 3.6 3.5" - elif [ "$compiler" = "gcc" ]; then versions="12 11 10 9 8 7 6 5 4.9"; fi + if [ "$compiler" = "clang" ]; then versions="18 17 16 15 14 13 12 11 10 9 8 7 6.0 5.0 4.0 3.9 3.8 3.7 3.6 3.5" + elif [ "$compiler" = "gcc" ]; then versions="13 12 11 10 9 8 7 6 5 4.9"; fi for version in $versions; do _major="${version%%.*}" diff --git a/eng/common/native/init-distro-rid.sh b/eng/common/native/init-distro-rid.sh new file mode 100644 index 0000000000000..de1687b2ccbe7 --- /dev/null +++ b/eng/common/native/init-distro-rid.sh @@ -0,0 +1,130 @@ +#!/usr/bin/env bash + +# getNonPortableDistroRid +# +# Input: +# targetOs: (str) +# targetArch: (str) +# rootfsDir: (str) +# +# Return: +# non-portable rid +getNonPortableDistroRid() +{ + local targetOs="$1" + local targetArch="$2" + local rootfsDir="$3" + local nonPortableRid="" + + if [ "$targetOs" = "linux" ]; then + if [ -e "${rootfsDir}/etc/os-release" ]; then + source "${rootfsDir}/etc/os-release" + + if [[ "${ID}" == "rhel" || "${ID}" == "rocky" || "${ID}" == "alpine" ]]; then + # remove the last version digit + VERSION_ID="${VERSION_ID%.*}" + fi + + if [[ "${VERSION_ID:-}" =~ ^([[:digit:]]|\.)+$ ]]; then + nonPortableRid="${ID}.${VERSION_ID}-${targetArch}" + else + # Rolling release distros either do not set VERSION_ID, set it as blank or + # set it to non-version looking string (such as TEMPLATE_VERSION_ID on ArchLinux); + # so omit it here to be consistent with everything else. + nonPortableRid="${ID}-${targetArch}" + fi + + elif [ -e "${rootfsDir}/android_platform" ]; then + source "$rootfsDir"/android_platform + nonPortableRid="$RID" + fi + fi + + if [ "$targetOs" = "freebsd" ]; then + # $rootfsDir can be empty. freebsd-version is shell script and it should always work. + __freebsd_major_version=$($rootfsDir/bin/freebsd-version | { read v; echo "${v%%.*}"; }) + nonPortableRid="freebsd.$__freebsd_major_version-${targetArch}" + elif command -v getprop && getprop ro.product.system.model 2>&1 | grep -qi android; then + __android_sdk_version=$(getprop ro.build.version.sdk) + nonPortableRid="android.$__android_sdk_version-${targetArch}" + elif [ "$targetOs" = "illumos" ]; then + __uname_version=$(uname -v) + case "$__uname_version" in + omnios-*) + __omnios_major_version=$(echo "${__uname_version:8:2}") + nonPortableRid=omnios."$__omnios_major_version"-"$targetArch" + ;; + joyent_*) + __smartos_major_version=$(echo "${__uname_version:7:4}") + nonPortableRid=smartos."$__smartos_major_version"-"$targetArch" + ;; + illumos_*) + nonPortableRid=openindiana-"$targetArch" + ;; + esac + elif [ "$targetOs" = "solaris" ]; then + __uname_version=$(uname -v) + __solaris_major_version=$(echo "${__uname_version%.*}") + nonPortableRid=solaris."$__solaris_major_version"-"$targetArch" + elif [ "$targetOs" = "haiku" ]; then + __uname_release=$(uname -r) + nonPortableRid=haiku.r"$__uname_release"-"$targetArch" + fi + + echo "$(echo $nonPortableRid | tr '[:upper:]' '[:lower:]')" +} + +# initDistroRidGlobal +# +# Input: +# os: (str) +# arch: (str) +# rootfsDir?: (nullable:string) +# +# Return: +# None +# +# Notes: +# +# It is important to note that the function does not return anything, but it +# exports the following variables on success: +# +# __DistroRid : Non-portable rid of the target platform. +# __PortableTargetOS : OS-part of the portable rid that corresponds to the target platform. +# +initDistroRidGlobal() +{ + local targetOs="$1" + local targetArch="$2" + local rootfsDir="" + if [ "$#" -ge 3 ]; then + rootfsDir="$3" + fi + + if [ -n "${rootfsDir}" ]; then + # We may have a cross build. Check for the existence of the rootfsDir + if [ ! -e "${rootfsDir}" ]; then + echo "Error rootfsDir has been passed, but the location is not valid." + exit 1 + fi + fi + + __DistroRid=$(getNonPortableDistroRid "${targetOs}" "${targetArch}" "${rootfsDir}") + + if [ -z "${__PortableTargetOS:-}" ]; then + __PortableTargetOS="$targetOs" + + STRINGS="$(command -v strings || true)" + if [ -z "$STRINGS" ]; then + STRINGS="$(command -v llvm-strings || true)" + fi + + # Check for musl-based distros (e.g Alpine Linux, Void Linux). + if "${rootfsDir}/usr/bin/ldd" --version 2>&1 | grep -q musl || + ( [ -n "$STRINGS" ] && "$STRINGS" "${rootfsDir}/usr/bin/ldd" 2>&1 | grep -q musl ); then + __PortableTargetOS="linux-musl" + fi + fi + + export __DistroRid __PortableTargetOS +} diff --git a/eng/common/native/init-os-and-arch.sh b/eng/common/native/init-os-and-arch.sh new file mode 100644 index 0000000000000..e693617a6c2b6 --- /dev/null +++ b/eng/common/native/init-os-and-arch.sh @@ -0,0 +1,80 @@ +#!/usr/bin/env bash + +# Use uname to determine what the OS is. +OSName=$(uname -s | tr '[:upper:]' '[:lower:]') + +if command -v getprop && getprop ro.product.system.model 2>&1 | grep -qi android; then + OSName="android" +fi + +case "$OSName" in +freebsd|linux|netbsd|openbsd|sunos|android|haiku) + os="$OSName" ;; +darwin) + os=osx ;; +*) + echo "Unsupported OS $OSName detected!" + exit 1 ;; +esac + +# On Solaris, `uname -m` is discouraged, see https://docs.oracle.com/cd/E36784_01/html/E36870/uname-1.html +# and `uname -p` returns processor type (e.g. i386 on amd64). +# The appropriate tool to determine CPU is isainfo(1) https://docs.oracle.com/cd/E36784_01/html/E36870/isainfo-1.html. +if [ "$os" = "sunos" ]; then + if uname -o 2>&1 | grep -q illumos; then + os="illumos" + else + os="solaris" + fi + CPUName=$(isainfo -n) +else + # For the rest of the operating systems, use uname(1) to determine what the CPU is. + CPUName=$(uname -m) +fi + +case "$CPUName" in + arm64|aarch64) + arch=arm64 + ;; + + loongarch64) + arch=loongarch64 + ;; + + riscv64) + arch=riscv64 + ;; + + amd64|x86_64) + arch=x64 + ;; + + armv7l|armv8l) + if (NAME=""; . /etc/os-release; test "$NAME" = "Tizen"); then + arch=armel + else + arch=arm + fi + ;; + + armv6l) + arch=armv6 + ;; + + i[3-6]86) + echo "Unsupported CPU $CPUName detected, build might not succeed!" + arch=x86 + ;; + + s390x) + arch=s390x + ;; + + ppc64le) + arch=ppc64le + ;; + *) + echo "Unknown CPU $CPUName detected!" + exit 1 + ;; +esac diff --git a/eng/common/post-build/add-build-to-channel.ps1 b/eng/common/post-build/add-build-to-channel.ps1 index de2d957922a65..49938f0c89f76 100644 --- a/eng/common/post-build/add-build-to-channel.ps1 +++ b/eng/common/post-build/add-build-to-channel.ps1 @@ -2,7 +2,7 @@ param( [Parameter(Mandatory=$true)][int] $BuildId, [Parameter(Mandatory=$true)][int] $ChannelId, [Parameter(Mandatory=$true)][string] $MaestroApiAccessToken, - [Parameter(Mandatory=$false)][string] $MaestroApiEndPoint = 'https://maestro-prod.westus2.cloudapp.azure.com', + [Parameter(Mandatory=$false)][string] $MaestroApiEndPoint = 'https://maestro.dot.net', [Parameter(Mandatory=$false)][string] $MaestroApiVersion = '2019-01-16' ) diff --git a/eng/common/post-build/publish-using-darc.ps1 b/eng/common/post-build/publish-using-darc.ps1 index 8508397d77640..5a3a32ea8d75b 100644 --- a/eng/common/post-build/publish-using-darc.ps1 +++ b/eng/common/post-build/publish-using-darc.ps1 @@ -3,7 +3,7 @@ param( [Parameter(Mandatory=$true)][int] $PublishingInfraVersion, [Parameter(Mandatory=$true)][string] $AzdoToken, [Parameter(Mandatory=$true)][string] $MaestroToken, - [Parameter(Mandatory=$false)][string] $MaestroApiEndPoint = 'https://maestro-prod.westus2.cloudapp.azure.com', + [Parameter(Mandatory=$false)][string] $MaestroApiEndPoint = 'https://maestro.dot.net', [Parameter(Mandatory=$true)][string] $WaitPublishingFinish, [Parameter(Mandatory=$false)][string] $ArtifactsPublishingAdditionalParameters, [Parameter(Mandatory=$false)][string] $SymbolPublishingAdditionalParameters @@ -12,7 +12,7 @@ param( try { . $PSScriptRoot\post-build-utils.ps1 - $darc = Get-Darc + $darc = Get-Darc $optionalParams = [System.Collections.ArrayList]::new() @@ -46,7 +46,7 @@ try { } Write-Host 'done.' -} +} catch { Write-Host $_ Write-PipelineTelemetryError -Category 'PromoteBuild' -Message "There was an error while trying to publish build '$BuildId' to default channels." diff --git a/eng/common/post-build/trigger-subscriptions.ps1 b/eng/common/post-build/trigger-subscriptions.ps1 index 55dea518ac585..ac9a95778fcd9 100644 --- a/eng/common/post-build/trigger-subscriptions.ps1 +++ b/eng/common/post-build/trigger-subscriptions.ps1 @@ -2,7 +2,7 @@ param( [Parameter(Mandatory=$true)][string] $SourceRepo, [Parameter(Mandatory=$true)][int] $ChannelId, [Parameter(Mandatory=$true)][string] $MaestroApiAccessToken, - [Parameter(Mandatory=$false)][string] $MaestroApiEndPoint = 'https://maestro-prod.westus2.cloudapp.azure.com', + [Parameter(Mandatory=$false)][string] $MaestroApiEndPoint = 'https://maestro.dot.net', [Parameter(Mandatory=$false)][string] $MaestroApiVersion = '2019-01-16' ) diff --git a/eng/common/sdk-task.ps1 b/eng/common/sdk-task.ps1 index e10a596879746..73828dd30d317 100644 --- a/eng/common/sdk-task.ps1 +++ b/eng/common/sdk-task.ps1 @@ -64,7 +64,7 @@ try { $GlobalJson.tools | Add-Member -Name "vs" -Value (ConvertFrom-Json "{ `"version`": `"16.5`" }") -MemberType NoteProperty } if( -not ($GlobalJson.tools.PSObject.Properties.Name -match "xcopy-msbuild" )) { - $GlobalJson.tools | Add-Member -Name "xcopy-msbuild" -Value "17.4.1" -MemberType NoteProperty + $GlobalJson.tools | Add-Member -Name "xcopy-msbuild" -Value "17.8.1-2" -MemberType NoteProperty } if ($GlobalJson.tools."xcopy-msbuild".Trim() -ine "none") { $xcopyMSBuildToolsFolder = InitializeXCopyMSBuild $GlobalJson.tools."xcopy-msbuild" -install $true diff --git a/eng/common/sdl/configure-sdl-tool.ps1 b/eng/common/sdl/configure-sdl-tool.ps1 index bdbf49e6c71de..27f5a4115fc74 100644 --- a/eng/common/sdl/configure-sdl-tool.ps1 +++ b/eng/common/sdl/configure-sdl-tool.ps1 @@ -17,7 +17,9 @@ Param( # Optional: Additional params to add to any tool using PoliCheck. [string[]] $PoliCheckAdditionalRunConfigParams, # Optional: Additional params to add to any tool using CodeQL/Semmle. - [string[]] $CodeQLAdditionalRunConfigParams + [string[]] $CodeQLAdditionalRunConfigParams, + # Optional: Additional params to add to any tool using Binskim. + [string[]] $BinskimAdditionalRunConfigParams ) $ErrorActionPreference = 'Stop' @@ -69,22 +71,34 @@ try { $gdnConfigFile = Join-Path $gdnConfigPath "$toolConfigName-configure.gdnconfig" # For some tools, add default and automatic args. - if ($tool.Name -eq 'credscan') { - if ($targetDirectory) { - $tool.Args += "`"TargetDirectory < $TargetDirectory`"" + switch -Exact ($tool.Name) { + 'credscan' { + if ($targetDirectory) { + $tool.Args += "`"TargetDirectory < $TargetDirectory`"" + } + $tool.Args += "`"OutputType < pre`"" + $tool.Args += $CrScanAdditionalRunConfigParams } - $tool.Args += "`"OutputType < pre`"" - $tool.Args += $CrScanAdditionalRunConfigParams - } elseif ($tool.Name -eq 'policheck') { - if ($targetDirectory) { - $tool.Args += "`"Target < $TargetDirectory`"" + 'policheck' { + if ($targetDirectory) { + $tool.Args += "`"Target < $TargetDirectory`"" + } + $tool.Args += $PoliCheckAdditionalRunConfigParams } - $tool.Args += $PoliCheckAdditionalRunConfigParams - } elseif ($tool.Name -eq 'semmle' -or $tool.Name -eq 'codeql') { - if ($targetDirectory) { - $tool.Args += "`"SourceCodeDirectory < $TargetDirectory`"" + {$_ -in 'semmle', 'codeql'} { + if ($targetDirectory) { + $tool.Args += "`"SourceCodeDirectory < $TargetDirectory`"" + } + $tool.Args += $CodeQLAdditionalRunConfigParams + } + 'binskim' { + if ($targetDirectory) { + # Binskim crashes due to specific PDBs. GitHub issue: https://github.com/microsoft/binskim/issues/924. + # We are excluding all `_.pdb` files from the scan. + $tool.Args += "`"Target < $TargetDirectory\**;-:file|$TargetDirectory\**\_.pdb`"" + } + $tool.Args += $BinskimAdditionalRunConfigParams } - $tool.Args += $CodeQLAdditionalRunConfigParams } # Create variable pointing to the args array directly so we can use splat syntax later. diff --git a/eng/common/sdl/execute-all-sdl-tools.ps1 b/eng/common/sdl/execute-all-sdl-tools.ps1 index 4797e012c7d2f..4715d75e974d3 100644 --- a/eng/common/sdl/execute-all-sdl-tools.ps1 +++ b/eng/common/sdl/execute-all-sdl-tools.ps1 @@ -35,6 +35,7 @@ Param( [string[]] $CrScanAdditionalRunConfigParams, # Optional: Additional Params to custom build a CredScan run config in the format @("xyz:abc","sdf:1") [string[]] $PoliCheckAdditionalRunConfigParams, # Optional: Additional Params to custom build a Policheck run config in the format @("xyz:abc","sdf:1") [string[]] $CodeQLAdditionalRunConfigParams, # Optional: Additional Params to custom build a Semmle/CodeQL run config in the format @("xyz < abc","sdf < 1") + [string[]] $BinskimAdditionalRunConfigParams, # Optional: Additional Params to custom build a Binskim run config in the format @("xyz < abc","sdf < 1") [bool] $BreakOnFailure=$False # Optional: Fail the build if there were errors during the run ) @@ -107,7 +108,8 @@ try { -GuardianLoggerLevel $GuardianLoggerLevel ` -CrScanAdditionalRunConfigParams $CrScanAdditionalRunConfigParams ` -PoliCheckAdditionalRunConfigParams $PoliCheckAdditionalRunConfigParams ` - -CodeQLAdditionalRunConfigParams $CodeQLAdditionalRunConfigParams + -CodeQLAdditionalRunConfigParams $CodeQLAdditionalRunConfigParams ` + -BinskimAdditionalRunConfigParams $BinskimAdditionalRunConfigParams if ($BreakOnFailure) { Exit-IfNZEC "Sdl" } diff --git a/eng/common/sdl/extract-artifact-packages.ps1 b/eng/common/sdl/extract-artifact-packages.ps1 index 7f28d9c59ec69..f031ed5b25e9f 100644 --- a/eng/common/sdl/extract-artifact-packages.ps1 +++ b/eng/common/sdl/extract-artifact-packages.ps1 @@ -35,31 +35,33 @@ try { param( [string] $PackagePath # Full path to a NuGet package ) - + if (!(Test-Path $PackagePath)) { Write-PipelineTelemetryError -Category 'Build' -Message "Input file does not exist: $PackagePath" ExitWithExitCode 1 } - + $RelevantExtensions = @('.dll', '.exe', '.pdb') Write-Host -NoNewLine 'Extracting ' ([System.IO.Path]::GetFileName($PackagePath)) '...' - + $PackageId = [System.IO.Path]::GetFileNameWithoutExtension($PackagePath) $ExtractPath = Join-Path -Path $using:ExtractPath -ChildPath $PackageId - + Add-Type -AssemblyName System.IO.Compression.FileSystem - + [System.IO.Directory]::CreateDirectory($ExtractPath); - + try { $zip = [System.IO.Compression.ZipFile]::OpenRead($PackagePath) $zip.Entries | Where-Object {$RelevantExtensions -contains [System.IO.Path]::GetExtension($_.Name)} | ForEach-Object { - $TargetFile = Join-Path -Path $ExtractPath -ChildPath $_.Name - - [System.IO.Compression.ZipFileExtensions]::ExtractToFile($_, $TargetFile, $true) + $TargetPath = Join-Path -Path $ExtractPath -ChildPath (Split-Path -Path $_.FullName) + [System.IO.Directory]::CreateDirectory($TargetPath); + + $TargetFile = Join-Path -Path $ExtractPath -ChildPath $_.FullName + [System.IO.Compression.ZipFileExtensions]::ExtractToFile($_, $TargetFile) } } catch { diff --git a/eng/common/sdl/trim-assets-version.ps1 b/eng/common/sdl/trim-assets-version.ps1 new file mode 100644 index 0000000000000..a2e0048770452 --- /dev/null +++ b/eng/common/sdl/trim-assets-version.ps1 @@ -0,0 +1,75 @@ +<# +.SYNOPSIS +Install and run the 'Microsoft.DotNet.VersionTools.Cli' tool with the 'trim-artifacts-version' command to trim the version from the NuGet assets file name. + +.PARAMETER InputPath +Full path to directory where artifact packages are stored + +.PARAMETER Recursive +Search for NuGet packages recursively + +#> + +Param( + [string] $InputPath, + [bool] $Recursive = $true +) + +$CliToolName = "Microsoft.DotNet.VersionTools.Cli" + +function Install-VersionTools-Cli { + param( + [Parameter(Mandatory=$true)][string]$Version + ) + + Write-Host "Installing the package '$CliToolName' with a version of '$version' ..." + $feed = "https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-eng/nuget/v3/index.json" + + $argumentList = @("tool", "install", "--local", "$CliToolName", "--add-source $feed", "--no-cache", "--version $Version", "--create-manifest-if-needed") + Start-Process "$dotnet" -Verbose -ArgumentList $argumentList -NoNewWindow -Wait +} + +# ------------------------------------------------------------------- + +if (!(Test-Path $InputPath)) { + Write-Host "Input Path '$InputPath' does not exist" + ExitWithExitCode 1 +} + +$ErrorActionPreference = 'Stop' +Set-StrictMode -Version 2.0 + +$disableConfigureToolsetImport = $true +$global:LASTEXITCODE = 0 + +# `tools.ps1` checks $ci to perform some actions. Since the SDL +# scripts don't necessarily execute in the same agent that run the +# build.ps1/sh script this variable isn't automatically set. +$ci = $true +. $PSScriptRoot\..\tools.ps1 + +try { + $dotnetRoot = InitializeDotNetCli -install:$true + $dotnet = "$dotnetRoot\dotnet.exe" + + $toolsetVersion = Read-ArcadeSdkVersion + Install-VersionTools-Cli -Version $toolsetVersion + + $cliToolFound = (& "$dotnet" tool list --local | Where-Object {$_.Split(' ')[0] -eq $CliToolName}) + if ($null -eq $cliToolFound) { + Write-PipelineTelemetryError -Force -Category 'Sdl' -Message "The '$CliToolName' tool is not installed." + ExitWithExitCode 1 + } + + Exec-BlockVerbosely { + & "$dotnet" $CliToolName trim-assets-version ` + --assets-path $InputPath ` + --recursive $Recursive + Exit-IfNZEC "Sdl" + } +} +catch { + Write-Host $_ + Write-PipelineTelemetryError -Force -Category 'Sdl' -Message $_ + ExitWithExitCode 1 +} \ No newline at end of file diff --git a/eng/common/templates-official/job/job.yml b/eng/common/templates-official/job/job.yml new file mode 100644 index 0000000000000..1f035fee73f4a --- /dev/null +++ b/eng/common/templates-official/job/job.yml @@ -0,0 +1,264 @@ +# Internal resources (telemetry, microbuild) can only be accessed from non-public projects, +# and some (Microbuild) should only be applied to non-PR cases for internal builds. + +parameters: +# Job schema parameters - https://docs.microsoft.com/en-us/azure/devops/pipelines/yaml-schema?view=vsts&tabs=schema#job + cancelTimeoutInMinutes: '' + condition: '' + container: '' + continueOnError: false + dependsOn: '' + displayName: '' + pool: '' + steps: [] + strategy: '' + timeoutInMinutes: '' + variables: [] + workspace: '' + templateContext: '' + +# Job base template specific parameters + # See schema documentation - https://github.com/dotnet/arcade/blob/master/Documentation/AzureDevOps/TemplateSchema.md + artifacts: '' + enableMicrobuild: false + enablePublishBuildArtifacts: false + enablePublishBuildAssets: false + enablePublishTestResults: false + enablePublishUsingPipelines: false + enableBuildRetry: false + disableComponentGovernance: '' + componentGovernanceIgnoreDirectories: '' + mergeTestResults: false + testRunTitle: '' + testResultsFormat: '' + name: '' + preSteps: [] + runAsPublic: false +# Sbom related params + enableSbom: true + PackageVersion: 7.0.0 + BuildDropPath: '$(Build.SourcesDirectory)/artifacts' + +jobs: +- job: ${{ parameters.name }} + + ${{ if ne(parameters.cancelTimeoutInMinutes, '') }}: + cancelTimeoutInMinutes: ${{ parameters.cancelTimeoutInMinutes }} + + ${{ if ne(parameters.condition, '') }}: + condition: ${{ parameters.condition }} + + ${{ if ne(parameters.container, '') }}: + container: ${{ parameters.container }} + + ${{ if ne(parameters.continueOnError, '') }}: + continueOnError: ${{ parameters.continueOnError }} + + ${{ if ne(parameters.dependsOn, '') }}: + dependsOn: ${{ parameters.dependsOn }} + + ${{ if ne(parameters.displayName, '') }}: + displayName: ${{ parameters.displayName }} + + ${{ if ne(parameters.pool, '') }}: + pool: ${{ parameters.pool }} + + ${{ if ne(parameters.strategy, '') }}: + strategy: ${{ parameters.strategy }} + + ${{ if ne(parameters.timeoutInMinutes, '') }}: + timeoutInMinutes: ${{ parameters.timeoutInMinutes }} + + ${{ if ne(parameters.templateContext, '') }}: + templateContext: ${{ parameters.templateContext }} + + variables: + - ${{ if ne(parameters.enableTelemetry, 'false') }}: + - name: DOTNET_CLI_TELEMETRY_PROFILE + value: '$(Build.Repository.Uri)' + - ${{ if eq(parameters.enableRichCodeNavigation, 'true') }}: + - name: EnableRichCodeNavigation + value: 'true' + # Retry signature validation up to three times, waiting 2 seconds between attempts. + # See https://learn.microsoft.com/en-us/nuget/reference/errors-and-warnings/nu3028#retry-untrusted-root-failures + - name: NUGET_EXPERIMENTAL_CHAIN_BUILD_RETRY_POLICY + value: 3,2000 + - ${{ each variable in parameters.variables }}: + # handle name-value variable syntax + # example: + # - name: [key] + # value: [value] + - ${{ if ne(variable.name, '') }}: + - name: ${{ variable.name }} + value: ${{ variable.value }} + + # handle variable groups + - ${{ if ne(variable.group, '') }}: + - group: ${{ variable.group }} + + # handle template variable syntax + # example: + # - template: path/to/template.yml + # parameters: + # [key]: [value] + - ${{ if ne(variable.template, '') }}: + - template: ${{ variable.template }} + ${{ if ne(variable.parameters, '') }}: + parameters: ${{ variable.parameters }} + + # handle key-value variable syntax. + # example: + # - [key]: [value] + - ${{ if and(eq(variable.name, ''), eq(variable.group, ''), eq(variable.template, '')) }}: + - ${{ each pair in variable }}: + - name: ${{ pair.key }} + value: ${{ pair.value }} + + # DotNet-HelixApi-Access provides 'HelixApiAccessToken' for internal builds + - ${{ if and(eq(parameters.enableTelemetry, 'true'), eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}: + - group: DotNet-HelixApi-Access + + ${{ if ne(parameters.workspace, '') }}: + workspace: ${{ parameters.workspace }} + + steps: + - ${{ if ne(parameters.preSteps, '') }}: + - ${{ each preStep in parameters.preSteps }}: + - ${{ preStep }} + + - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}: + - ${{ if eq(parameters.enableMicrobuild, 'true') }}: + - task: MicroBuildSigningPlugin@4 + displayName: Install MicroBuild plugin + inputs: + signType: $(_SignType) + zipSources: false + feedSource: https://dnceng.pkgs.visualstudio.com/_packaging/MicroBuildToolset/nuget/v3/index.json + env: + TeamName: $(_TeamName) + MicroBuildOutputFolderOverride: '$(Agent.TempDirectory)' + continueOnError: ${{ parameters.continueOnError }} + condition: and(succeeded(), in(variables['_SignType'], 'real', 'test'), eq(variables['Agent.Os'], 'Windows_NT')) + + - ${{ if and(eq(parameters.runAsPublic, 'false'), eq(variables['System.TeamProject'], 'internal')) }}: + - task: NuGetAuthenticate@1 + + - ${{ if and(ne(parameters.artifacts.download, 'false'), ne(parameters.artifacts.download, '')) }}: + - task: DownloadPipelineArtifact@2 + inputs: + buildType: current + artifactName: ${{ coalesce(parameters.artifacts.download.name, 'Artifacts_$(Agent.OS)_$(_BuildConfig)') }} + targetPath: ${{ coalesce(parameters.artifacts.download.path, 'artifacts') }} + itemPattern: ${{ coalesce(parameters.artifacts.download.pattern, '**') }} + + - ${{ each step in parameters.steps }}: + - ${{ step }} + + - ${{ if eq(parameters.enableRichCodeNavigation, true) }}: + - task: RichCodeNavIndexer@0 + displayName: RichCodeNav Upload + inputs: + languages: ${{ coalesce(parameters.richCodeNavigationLanguage, 'csharp') }} + environment: ${{ coalesce(parameters.richCodeNavigationEnvironment, 'production') }} + richNavLogOutputDirectory: $(Build.SourcesDirectory)/artifacts/bin + uploadRichNavArtifacts: ${{ coalesce(parameters.richCodeNavigationUploadArtifacts, false) }} + continueOnError: true + + - template: /eng/common/templates-official/steps/component-governance.yml + parameters: + ${{ if eq(parameters.disableComponentGovernance, '') }}: + ${{ if and(ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest'), eq(parameters.runAsPublic, 'false'), or(startsWith(variables['Build.SourceBranch'], 'refs/heads/release/'), startsWith(variables['Build.SourceBranch'], 'refs/heads/dotnet/'), startsWith(variables['Build.SourceBranch'], 'refs/heads/microsoft/'), eq(variables['Build.SourceBranch'], 'refs/heads/main'))) }}: + disableComponentGovernance: false + ${{ else }}: + disableComponentGovernance: true + ${{ else }}: + disableComponentGovernance: ${{ parameters.disableComponentGovernance }} + componentGovernanceIgnoreDirectories: ${{ parameters.componentGovernanceIgnoreDirectories }} + + - ${{ if eq(parameters.enableMicrobuild, 'true') }}: + - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}: + - task: MicroBuildCleanup@1 + displayName: Execute Microbuild cleanup tasks + condition: and(always(), in(variables['_SignType'], 'real', 'test'), eq(variables['Agent.Os'], 'Windows_NT')) + continueOnError: ${{ parameters.continueOnError }} + env: + TeamName: $(_TeamName) + + - ${{ if ne(parameters.artifacts.publish, '') }}: + - ${{ if and(ne(parameters.artifacts.publish.artifacts, 'false'), ne(parameters.artifacts.publish.artifacts, '')) }}: + - task: CopyFiles@2 + displayName: Gather binaries for publish to artifacts + inputs: + SourceFolder: 'artifacts/bin' + Contents: '**' + TargetFolder: '$(Build.ArtifactStagingDirectory)/artifacts/bin' + - task: CopyFiles@2 + displayName: Gather packages for publish to artifacts + inputs: + SourceFolder: 'artifacts/packages' + Contents: '**' + TargetFolder: '$(Build.ArtifactStagingDirectory)/artifacts/packages' + - task: 1ES.PublishBuildArtifacts@1 + displayName: Publish pipeline artifacts + inputs: + PathtoPublish: '$(Build.ArtifactStagingDirectory)/artifacts' + PublishLocation: Container + ArtifactName: ${{ coalesce(parameters.artifacts.publish.artifacts.name , 'Artifacts_$(Agent.Os)_$(_BuildConfig)') }} + continueOnError: true + condition: always() + - ${{ if and(ne(parameters.artifacts.publish.logs, 'false'), ne(parameters.artifacts.publish.logs, '')) }}: + - task: 1ES.PublishPipelineArtifact@1 + inputs: + targetPath: 'artifacts/log' + artifactName: ${{ coalesce(parameters.artifacts.publish.logs.name, 'Logs_Build_$(Agent.Os)_$(_BuildConfig)') }} + displayName: 'Publish logs' + continueOnError: true + condition: always() + + - ${{ if ne(parameters.enablePublishBuildArtifacts, 'false') }}: + - task: 1ES.PublishBuildArtifacts@1 + displayName: Publish Logs + inputs: + PathtoPublish: '$(Build.SourcesDirectory)/artifacts/log/$(_BuildConfig)' + PublishLocation: Container + ArtifactName: ${{ coalesce(parameters.enablePublishBuildArtifacts.artifactName, '$(Agent.Os)_$(Agent.JobName)' ) }} + continueOnError: true + condition: always() + + - ${{ if or(and(eq(parameters.enablePublishTestResults, 'true'), eq(parameters.testResultsFormat, '')), eq(parameters.testResultsFormat, 'xunit')) }}: + - task: PublishTestResults@2 + displayName: Publish XUnit Test Results + inputs: + testResultsFormat: 'xUnit' + testResultsFiles: '*.xml' + searchFolder: '$(Build.SourcesDirectory)/artifacts/TestResults/$(_BuildConfig)' + testRunTitle: ${{ coalesce(parameters.testRunTitle, parameters.name, '$(System.JobName)') }}-xunit + mergeTestResults: ${{ parameters.mergeTestResults }} + continueOnError: true + condition: always() + - ${{ if or(and(eq(parameters.enablePublishTestResults, 'true'), eq(parameters.testResultsFormat, '')), eq(parameters.testResultsFormat, 'vstest')) }}: + - task: PublishTestResults@2 + displayName: Publish TRX Test Results + inputs: + testResultsFormat: 'VSTest' + testResultsFiles: '*.trx' + searchFolder: '$(Build.SourcesDirectory)/artifacts/TestResults/$(_BuildConfig)' + testRunTitle: ${{ coalesce(parameters.testRunTitle, parameters.name, '$(System.JobName)') }}-trx + mergeTestResults: ${{ parameters.mergeTestResults }} + continueOnError: true + condition: always() + + - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest'), eq(parameters.enableSbom, 'true')) }}: + - template: /eng/common/templates-official/steps/generate-sbom.yml + parameters: + PackageVersion: ${{ parameters.packageVersion}} + BuildDropPath: ${{ parameters.buildDropPath }} + IgnoreDirectories: ${{ parameters.componentGovernanceIgnoreDirectories }} + + - ${{ if eq(parameters.enableBuildRetry, 'true') }}: + - task: 1ES.PublishPipelineArtifact@1 + inputs: + targetPath: '$(Build.SourcesDirectory)\eng\common\BuildConfiguration' + artifactName: 'BuildConfiguration' + displayName: 'Publish build retry configuration' + continueOnError: true \ No newline at end of file diff --git a/eng/common/templates-official/job/onelocbuild.yml b/eng/common/templates-official/job/onelocbuild.yml new file mode 100644 index 0000000000000..52b4d05d3f8dd --- /dev/null +++ b/eng/common/templates-official/job/onelocbuild.yml @@ -0,0 +1,112 @@ +parameters: + # Optional: dependencies of the job + dependsOn: '' + + # Optional: A defined YAML pool - https://docs.microsoft.com/en-us/azure/devops/pipelines/yaml-schema?view=vsts&tabs=schema#pool + pool: '' + + CeapexPat: $(dn-bot-ceapex-package-r) # PAT for the loc AzDO instance https://dev.azure.com/ceapex + GithubPat: $(BotAccount-dotnet-bot-repo-PAT) + + SourcesDirectory: $(Build.SourcesDirectory) + CreatePr: true + AutoCompletePr: false + ReusePr: true + UseLfLineEndings: true + UseCheckedInLocProjectJson: false + SkipLocProjectJsonGeneration: false + LanguageSet: VS_Main_Languages + LclSource: lclFilesInRepo + LclPackageId: '' + RepoType: gitHub + GitHubOrg: dotnet + MirrorRepo: '' + MirrorBranch: main + condition: '' + JobNameSuffix: '' + +jobs: +- job: OneLocBuild${{ parameters.JobNameSuffix }} + + dependsOn: ${{ parameters.dependsOn }} + + displayName: OneLocBuild${{ parameters.JobNameSuffix }} + + variables: + - group: OneLocBuildVariables # Contains the CeapexPat and GithubPat + - name: _GenerateLocProjectArguments + value: -SourcesDirectory ${{ parameters.SourcesDirectory }} + -LanguageSet "${{ parameters.LanguageSet }}" + -CreateNeutralXlfs + - ${{ if eq(parameters.UseCheckedInLocProjectJson, 'true') }}: + - name: _GenerateLocProjectArguments + value: ${{ variables._GenerateLocProjectArguments }} -UseCheckedInLocProjectJson + - template: /eng/common/templates-official/variables/pool-providers.yml + + ${{ if ne(parameters.pool, '') }}: + pool: ${{ parameters.pool }} + ${{ if eq(parameters.pool, '') }}: + pool: + # We don't use the collection uri here because it might vary (.visualstudio.com vs. dev.azure.com) + ${{ if eq(variables['System.TeamProject'], 'DevDiv') }}: + name: AzurePipelines-EO + image: 1ESPT-Windows2022 + demands: Cmd + os: windows + # If it's not devdiv, it's dnceng + ${{ if ne(variables['System.TeamProject'], 'DevDiv') }}: + name: $(DncEngInternalBuildPool) + image: 1es-windows-2022 + os: windows + + steps: + - ${{ if ne(parameters.SkipLocProjectJsonGeneration, 'true') }}: + - task: Powershell@2 + inputs: + filePath: $(Build.SourcesDirectory)/eng/common/generate-locproject.ps1 + arguments: $(_GenerateLocProjectArguments) + displayName: Generate LocProject.json + condition: ${{ parameters.condition }} + + - task: OneLocBuild@2 + displayName: OneLocBuild + env: + SYSTEM_ACCESSTOKEN: $(System.AccessToken) + inputs: + locProj: eng/Localize/LocProject.json + outDir: $(Build.ArtifactStagingDirectory) + lclSource: ${{ parameters.LclSource }} + lclPackageId: ${{ parameters.LclPackageId }} + isCreatePrSelected: ${{ parameters.CreatePr }} + isAutoCompletePrSelected: ${{ parameters.AutoCompletePr }} + ${{ if eq(parameters.CreatePr, true) }}: + isUseLfLineEndingsSelected: ${{ parameters.UseLfLineEndings }} + ${{ if eq(parameters.RepoType, 'gitHub') }}: + isShouldReusePrSelected: ${{ parameters.ReusePr }} + packageSourceAuth: patAuth + patVariable: ${{ parameters.CeapexPat }} + ${{ if eq(parameters.RepoType, 'gitHub') }}: + repoType: ${{ parameters.RepoType }} + gitHubPatVariable: "${{ parameters.GithubPat }}" + ${{ if ne(parameters.MirrorRepo, '') }}: + isMirrorRepoSelected: true + gitHubOrganization: ${{ parameters.GitHubOrg }} + mirrorRepo: ${{ parameters.MirrorRepo }} + mirrorBranch: ${{ parameters.MirrorBranch }} + condition: ${{ parameters.condition }} + + - task: 1ES.PublishBuildArtifacts@1 + displayName: Publish Localization Files + inputs: + PathtoPublish: '$(Build.ArtifactStagingDirectory)/loc' + PublishLocation: Container + ArtifactName: Loc + condition: ${{ parameters.condition }} + + - task: 1ES.PublishBuildArtifacts@1 + displayName: Publish LocProject.json + inputs: + PathtoPublish: '$(Build.SourcesDirectory)/eng/Localize/' + PublishLocation: Container + ArtifactName: Loc + condition: ${{ parameters.condition }} \ No newline at end of file diff --git a/eng/common/templates-official/job/publish-build-assets.yml b/eng/common/templates-official/job/publish-build-assets.yml new file mode 100644 index 0000000000000..589ac80a18b74 --- /dev/null +++ b/eng/common/templates-official/job/publish-build-assets.yml @@ -0,0 +1,155 @@ +parameters: + configuration: 'Debug' + + # Optional: condition for the job to run + condition: '' + + # Optional: 'true' if future jobs should run even if this job fails + continueOnError: false + + # Optional: dependencies of the job + dependsOn: '' + + # Optional: Include PublishBuildArtifacts task + enablePublishBuildArtifacts: false + + # Optional: A defined YAML pool - https://docs.microsoft.com/en-us/azure/devops/pipelines/yaml-schema?view=vsts&tabs=schema#pool + pool: {} + + # Optional: should run as a public build even in the internal project + # if 'true', the build won't run any of the internal only steps, even if it is running in non-public projects. + runAsPublic: false + + # Optional: whether the build's artifacts will be published using release pipelines or direct feed publishing + publishUsingPipelines: false + + # Optional: whether the build's artifacts will be published using release pipelines or direct feed publishing + publishAssetsImmediately: false + + artifactsPublishingAdditionalParameters: '' + + signingValidationAdditionalParameters: '' + +jobs: +- job: Asset_Registry_Publish + + dependsOn: ${{ parameters.dependsOn }} + timeoutInMinutes: 150 + + ${{ if eq(parameters.publishAssetsImmediately, 'true') }}: + displayName: Publish Assets + ${{ else }}: + displayName: Publish to Build Asset Registry + + variables: + - template: /eng/common/templates-official/variables/pool-providers.yml + - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}: + - group: Publish-Build-Assets + - group: AzureDevOps-Artifact-Feeds-Pats + - name: runCodesignValidationInjection + value: false + - ${{ if eq(parameters.publishAssetsImmediately, 'true') }}: + - template: /eng/common/templates-official/post-build/common-variables.yml + + pool: + # We don't use the collection uri here because it might vary (.visualstudio.com vs. dev.azure.com) + ${{ if eq(variables['System.TeamProject'], 'DevDiv') }}: + name: AzurePipelines-EO + image: 1ESPT-Windows2022 + demands: Cmd + os: windows + # If it's not devdiv, it's dnceng + ${{ if ne(variables['System.TeamProject'], 'DevDiv') }}: + name: NetCore1ESPool-Publishing-Internal + image: windows.vs2019.amd64 + os: windows + steps: + - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}: + - task: DownloadBuildArtifacts@0 + displayName: Download artifact + inputs: + artifactName: AssetManifests + downloadPath: '$(Build.StagingDirectory)/Download' + checkDownloadedFiles: true + condition: ${{ parameters.condition }} + continueOnError: ${{ parameters.continueOnError }} + + - task: NuGetAuthenticate@1 + + - task: PowerShell@2 + displayName: Publish Build Assets + inputs: + filePath: eng\common\sdk-task.ps1 + arguments: -task PublishBuildAssets -restore -msbuildEngine dotnet + /p:ManifestsPath='$(Build.StagingDirectory)/Download/AssetManifests' + /p:BuildAssetRegistryToken=$(MaestroAccessToken) + /p:MaestroApiEndpoint=https://maestro-prod.westus2.cloudapp.azure.com + /p:PublishUsingPipelines=${{ parameters.publishUsingPipelines }} + /p:OfficialBuildId=$(Build.BuildNumber) + condition: ${{ parameters.condition }} + continueOnError: ${{ parameters.continueOnError }} + + - task: powershell@2 + displayName: Create ReleaseConfigs Artifact + inputs: + targetType: inline + script: | + New-Item -Path "$(Build.StagingDirectory)/ReleaseConfigs" -ItemType Directory -Force + $filePath = "$(Build.StagingDirectory)/ReleaseConfigs/ReleaseConfigs.txt" + Add-Content -Path $filePath -Value $(BARBuildId) + Add-Content -Path $filePath -Value "$(DefaultChannels)" + Add-Content -Path $filePath -Value $(IsStableBuild) + + - task: 1ES.PublishBuildArtifacts@1 + displayName: Publish ReleaseConfigs Artifact + inputs: + PathtoPublish: '$(Build.StagingDirectory)/ReleaseConfigs' + PublishLocation: Container + ArtifactName: ReleaseConfigs + + - task: powershell@2 + displayName: Check if SymbolPublishingExclusionsFile.txt exists + inputs: + targetType: inline + script: | + $symbolExclusionfile = "$(Build.SourcesDirectory)/eng/SymbolPublishingExclusionsFile.txt" + if(Test-Path -Path $symbolExclusionfile) + { + Write-Host "SymbolExclusionFile exists" + Write-Host "##vso[task.setvariable variable=SymbolExclusionFile]true" + } + else{ + Write-Host "Symbols Exclusion file does not exists" + Write-Host "##vso[task.setvariable variable=SymbolExclusionFile]false" + } + + - task: 1ES.PublishBuildArtifacts@1 + displayName: Publish SymbolPublishingExclusionsFile Artifact + condition: eq(variables['SymbolExclusionFile'], 'true') + inputs: + PathtoPublish: '$(Build.SourcesDirectory)/eng/SymbolPublishingExclusionsFile.txt' + PublishLocation: Container + ArtifactName: ReleaseConfigs + + - ${{ if eq(parameters.publishAssetsImmediately, 'true') }}: + - template: /eng/common/templates-official/post-build/setup-maestro-vars.yml + parameters: + BARBuildId: ${{ parameters.BARBuildId }} + PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }} + + - task: PowerShell@2 + displayName: Publish Using Darc + inputs: + filePath: $(Build.SourcesDirectory)/eng/common/post-build/publish-using-darc.ps1 + arguments: -BuildId $(BARBuildId) + -PublishingInfraVersion 3 + -AzdoToken '$(publishing-dnceng-devdiv-code-r-build-re)' + -MaestroToken '$(MaestroApiAccessToken)' + -WaitPublishingFinish true + -ArtifactsPublishingAdditionalParameters '${{ parameters.artifactsPublishingAdditionalParameters }}' + -SymbolPublishingAdditionalParameters '${{ parameters.symbolPublishingAdditionalParameters }}' + + - ${{ if eq(parameters.enablePublishBuildArtifacts, 'true') }}: + - template: /eng/common/templates-official/steps/publish-logs.yml + parameters: + JobLabel: 'Publish_Artifacts_Logs' diff --git a/eng/common/templates-official/job/source-build.yml b/eng/common/templates-official/job/source-build.yml new file mode 100644 index 0000000000000..f193dfbe23668 --- /dev/null +++ b/eng/common/templates-official/job/source-build.yml @@ -0,0 +1,67 @@ +parameters: + # This template adds arcade-powered source-build to CI. The template produces a server job with a + # default ID 'Source_Build_Complete' to put in a dependency list if necessary. + + # Specifies the prefix for source-build jobs added to pipeline. Use this if disambiguation needed. + jobNamePrefix: 'Source_Build' + + # Defines the platform on which to run the job. By default, a linux-x64 machine, suitable for + # managed-only repositories. This is an object with these properties: + # + # name: '' + # The name of the job. This is included in the job ID. + # targetRID: '' + # The name of the target RID to use, instead of the one auto-detected by Arcade. + # nonPortable: false + # Enables non-portable mode. This means a more specific RID (e.g. fedora.32-x64 rather than + # linux-x64), and compiling against distro-provided packages rather than portable ones. + # skipPublishValidation: false + # Disables publishing validation. By default, a check is performed to ensure no packages are + # published by source-build. + # container: '' + # A container to use. Runs in docker. + # pool: {} + # A pool to use. Runs directly on an agent. + # buildScript: '' + # Specifies the build script to invoke to perform the build in the repo. The default + # './build.sh' should work for typical Arcade repositories, but this is customizable for + # difficult situations. + # jobProperties: {} + # A list of job properties to inject at the top level, for potential extensibility beyond + # container and pool. + platform: {} + +jobs: +- job: ${{ parameters.jobNamePrefix }}_${{ parameters.platform.name }} + displayName: Source-Build (${{ parameters.platform.name }}) + + ${{ each property in parameters.platform.jobProperties }}: + ${{ property.key }}: ${{ property.value }} + + ${{ if ne(parameters.platform.container, '') }}: + container: ${{ parameters.platform.container }} + + ${{ if eq(parameters.platform.pool, '') }}: + # The default VM host AzDO pool. This should be capable of running Docker containers: almost all + # source-build builds run in Docker, including the default managed platform. + # /eng/common/templates-official/variables/pool-providers.yml can't be used here (some customers declare variables already), so duplicate its logic + pool: + ${{ if eq(variables['System.TeamProject'], 'public') }}: + name: $[replace(replace(eq(contains(coalesce(variables['System.PullRequest.TargetBranch'], variables['Build.SourceBranch'], 'refs/heads/main'), 'release'), 'true'), True, 'NetCore-Svc-Public' ), False, 'NetCore-Public')] + demands: ImageOverride -equals Build.Ubuntu.1804.Amd64.Open + + ${{ if eq(variables['System.TeamProject'], 'internal') }}: + name: $[replace(replace(eq(contains(coalesce(variables['System.PullRequest.TargetBranch'], variables['Build.SourceBranch'], 'refs/heads/main'), 'release'), 'true'), True, 'NetCore1ESPool-Svc-Internal'), False, 'NetCore1ESPool-Internal')] + image: 1es-mariner-2 + os: linux + + ${{ if ne(parameters.platform.pool, '') }}: + pool: ${{ parameters.platform.pool }} + + workspace: + clean: all + + steps: + - template: /eng/common/templates-official/steps/source-build.yml + parameters: + platform: ${{ parameters.platform }} diff --git a/eng/common/templates-official/job/source-index-stage1.yml b/eng/common/templates-official/job/source-index-stage1.yml new file mode 100644 index 0000000000000..43ee0c202fc79 --- /dev/null +++ b/eng/common/templates-official/job/source-index-stage1.yml @@ -0,0 +1,85 @@ +parameters: + runAsPublic: false + sourceIndexUploadPackageVersion: 2.0.0-20240502.12 + sourceIndexProcessBinlogPackageVersion: 1.0.1-20240129.2 + sourceIndexPackageSource: https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-tools/nuget/v3/index.json + sourceIndexBuildCommand: powershell -NoLogo -NoProfile -ExecutionPolicy Bypass -Command "eng/common/build.ps1 -restore -build -binarylog -ci" + preSteps: [] + binlogPath: artifacts/log/Debug/Build.binlog + condition: '' + dependsOn: '' + pool: '' + +jobs: +- job: SourceIndexStage1 + dependsOn: ${{ parameters.dependsOn }} + condition: ${{ parameters.condition }} + variables: + - name: SourceIndexUploadPackageVersion + value: ${{ parameters.sourceIndexUploadPackageVersion }} + - name: SourceIndexProcessBinlogPackageVersion + value: ${{ parameters.sourceIndexProcessBinlogPackageVersion }} + - name: SourceIndexPackageSource + value: ${{ parameters.sourceIndexPackageSource }} + - name: BinlogPath + value: ${{ parameters.binlogPath }} + - template: /eng/common/templates/variables/pool-providers.yml + + ${{ if ne(parameters.pool, '') }}: + pool: ${{ parameters.pool }} + ${{ if eq(parameters.pool, '') }}: + pool: + ${{ if eq(variables['System.TeamProject'], 'public') }}: + name: $(DncEngPublicBuildPool) + demands: ImageOverride -equals windows.vs2019.amd64.open + ${{ if eq(variables['System.TeamProject'], 'internal') }}: + name: $(DncEngInternalBuildPool) + demands: ImageOverride -equals windows.vs2019.amd64 + + steps: + - ${{ each preStep in parameters.preSteps }}: + - ${{ preStep }} + + - task: UseDotNet@2 + displayName: Use .NET 8 SDK + inputs: + packageType: sdk + version: 8.0.x + installationPath: $(Agent.TempDirectory)/dotnet + workingDirectory: $(Agent.TempDirectory) + + - script: | + $(Agent.TempDirectory)/dotnet/dotnet tool install BinLogToSln --version $(sourceIndexProcessBinlogPackageVersion) --add-source $(SourceIndexPackageSource) --tool-path $(Agent.TempDirectory)/.source-index/tools + $(Agent.TempDirectory)/dotnet/dotnet tool install UploadIndexStage1 --version $(sourceIndexUploadPackageVersion) --add-source $(SourceIndexPackageSource) --tool-path $(Agent.TempDirectory)/.source-index/tools + displayName: Download Tools + # Set working directory to temp directory so 'dotnet' doesn't try to use global.json and use the repo's sdk. + workingDirectory: $(Agent.TempDirectory) + + - script: ${{ parameters.sourceIndexBuildCommand }} + displayName: Build Repository + + - script: $(Agent.TempDirectory)/.source-index/tools/BinLogToSln -i $(BinlogPath) -r $(Build.SourcesDirectory) -n $(Build.Repository.Name) -o .source-index/stage1output + displayName: Process Binlog into indexable sln + + - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}: + - task: AzureCLI@2 + displayName: Get stage 1 auth token + inputs: + azureSubscription: 'SourceDotNet Stage1 Publish' + addSpnToEnvironment: true + scriptType: 'ps' + scriptLocation: 'inlineScript' + inlineScript: | + echo "##vso[task.setvariable variable=ARM_CLIENT_ID]$env:servicePrincipalId" + echo "##vso[task.setvariable variable=ARM_ID_TOKEN]$env:idToken" + echo "##vso[task.setvariable variable=ARM_TENANT_ID]$env:tenantId" + + - script: | + echo "Client ID: $(ARM_CLIENT_ID)" + echo "ID Token: $(ARM_ID_TOKEN)" + echo "Tenant ID: $(ARM_TENANT_ID)" + az login --service-principal -u $(ARM_CLIENT_ID) --tenant $(ARM_TENANT_ID) --allow-no-subscriptions --federated-token $(ARM_ID_TOKEN) + displayName: "Login to Azure" + + - script: $(Agent.TempDirectory)/.source-index/tools/UploadIndexStage1 -i .source-index/stage1output -n $(Build.Repository.Name) -s netsourceindexstage1 -b stage1 + displayName: Upload stage1 artifacts to source index \ No newline at end of file diff --git a/eng/common/templates-official/jobs/codeql-build.yml b/eng/common/templates-official/jobs/codeql-build.yml new file mode 100644 index 0000000000000..b68d3c2f31990 --- /dev/null +++ b/eng/common/templates-official/jobs/codeql-build.yml @@ -0,0 +1,31 @@ +parameters: + # See schema documentation in /Documentation/AzureDevOps/TemplateSchema.md + continueOnError: false + # Required: A collection of jobs to run - https://docs.microsoft.com/en-us/azure/devops/pipelines/yaml-schema?view=vsts&tabs=schema#job + jobs: [] + # Optional: if specified, restore and use this version of Guardian instead of the default. + overrideGuardianVersion: '' + +jobs: +- template: /eng/common/templates-official/jobs/jobs.yml + parameters: + enableMicrobuild: false + enablePublishBuildArtifacts: false + enablePublishTestResults: false + enablePublishBuildAssets: false + enablePublishUsingPipelines: false + enableTelemetry: true + + variables: + - group: Publish-Build-Assets + # The Guardian version specified in 'eng/common/sdl/packages.config'. This value must be kept in + # sync with the packages.config file. + - name: DefaultGuardianVersion + value: 0.109.0 + - name: GuardianPackagesConfigFile + value: $(Build.SourcesDirectory)\eng\common\sdl\packages.config + - name: GuardianVersion + value: ${{ coalesce(parameters.overrideGuardianVersion, '$(DefaultGuardianVersion)') }} + + jobs: ${{ parameters.jobs }} + diff --git a/eng/common/templates-official/jobs/jobs.yml b/eng/common/templates-official/jobs/jobs.yml new file mode 100644 index 0000000000000..857a0f8ba43e8 --- /dev/null +++ b/eng/common/templates-official/jobs/jobs.yml @@ -0,0 +1,97 @@ +parameters: + # See schema documentation in /Documentation/AzureDevOps/TemplateSchema.md + continueOnError: false + + # Optional: Include PublishBuildArtifacts task + enablePublishBuildArtifacts: false + + # Optional: Enable publishing using release pipelines + enablePublishUsingPipelines: false + + # Optional: Enable running the source-build jobs to build repo from source + enableSourceBuild: false + + # Optional: Parameters for source-build template. + # See /eng/common/templates-official/jobs/source-build.yml for options + sourceBuildParameters: [] + + graphFileGeneration: + # Optional: Enable generating the graph files at the end of the build + enabled: false + # Optional: Include toolset dependencies in the generated graph files + includeToolset: false + + # Required: A collection of jobs to run - https://docs.microsoft.com/en-us/azure/devops/pipelines/yaml-schema?view=vsts&tabs=schema#job + jobs: [] + + # Optional: Override automatically derived dependsOn value for "publish build assets" job + publishBuildAssetsDependsOn: '' + + # Optional: Publish the assets as soon as the publish to BAR stage is complete, rather doing so in a separate stage. + publishAssetsImmediately: false + + # Optional: If using publishAssetsImmediately and additional parameters are needed, can be used to send along additional parameters (normally sent to post-build.yml) + artifactsPublishingAdditionalParameters: '' + signingValidationAdditionalParameters: '' + + # Optional: should run as a public build even in the internal project + # if 'true', the build won't run any of the internal only steps, even if it is running in non-public projects. + runAsPublic: false + + enableSourceIndex: false + sourceIndexParams: {} + +# Internal resources (telemetry, microbuild) can only be accessed from non-public projects, +# and some (Microbuild) should only be applied to non-PR cases for internal builds. + +jobs: +- ${{ each job in parameters.jobs }}: + - template: ../job/job.yml + parameters: + # pass along parameters + ${{ each parameter in parameters }}: + ${{ if ne(parameter.key, 'jobs') }}: + ${{ parameter.key }}: ${{ parameter.value }} + + # pass along job properties + ${{ each property in job }}: + ${{ if ne(property.key, 'job') }}: + ${{ property.key }}: ${{ property.value }} + + name: ${{ job.job }} + +- ${{ if eq(parameters.enableSourceBuild, true) }}: + - template: /eng/common/templates-official/jobs/source-build.yml + parameters: + allCompletedJobId: Source_Build_Complete + ${{ each parameter in parameters.sourceBuildParameters }}: + ${{ parameter.key }}: ${{ parameter.value }} + +- ${{ if eq(parameters.enableSourceIndex, 'true') }}: + - template: ../job/source-index-stage1.yml + parameters: + runAsPublic: ${{ parameters.runAsPublic }} + ${{ each parameter in parameters.sourceIndexParams }}: + ${{ parameter.key }}: ${{ parameter.value }} + +- ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}: + - ${{ if or(eq(parameters.enablePublishBuildAssets, true), eq(parameters.artifacts.publish.manifests, 'true'), ne(parameters.artifacts.publish.manifests, '')) }}: + - template: ../job/publish-build-assets.yml + parameters: + continueOnError: ${{ parameters.continueOnError }} + dependsOn: + - ${{ if ne(parameters.publishBuildAssetsDependsOn, '') }}: + - ${{ each job in parameters.publishBuildAssetsDependsOn }}: + - ${{ job.job }} + - ${{ if eq(parameters.publishBuildAssetsDependsOn, '') }}: + - ${{ each job in parameters.jobs }}: + - ${{ job.job }} + - ${{ if eq(parameters.enableSourceBuild, true) }}: + - Source_Build_Complete + + runAsPublic: ${{ parameters.runAsPublic }} + publishUsingPipelines: ${{ parameters.enablePublishUsingPipelines }} + publishAssetsImmediately: ${{ parameters.publishAssetsImmediately }} + enablePublishBuildArtifacts: ${{ parameters.enablePublishBuildArtifacts }} + artifactsPublishingAdditionalParameters: ${{ parameters.artifactsPublishingAdditionalParameters }} + signingValidationAdditionalParameters: ${{ parameters.signingValidationAdditionalParameters }} diff --git a/eng/common/templates-official/jobs/source-build.yml b/eng/common/templates-official/jobs/source-build.yml new file mode 100644 index 0000000000000..08e5db9bb1161 --- /dev/null +++ b/eng/common/templates-official/jobs/source-build.yml @@ -0,0 +1,46 @@ +parameters: + # This template adds arcade-powered source-build to CI. A job is created for each platform, as + # well as an optional server job that completes when all platform jobs complete. + + # The name of the "join" job for all source-build platforms. If set to empty string, the job is + # not included. Existing repo pipelines can use this job depend on all source-build jobs + # completing without maintaining a separate list of every single job ID: just depend on this one + # server job. By default, not included. Recommended name if used: 'Source_Build_Complete'. + allCompletedJobId: '' + + # See /eng/common/templates-official/job/source-build.yml + jobNamePrefix: 'Source_Build' + + # This is the default platform provided by Arcade, intended for use by a managed-only repo. + defaultManagedPlatform: + name: 'Managed' + container: 'mcr.microsoft.com/dotnet-buildtools/prereqs:centos-stream8' + + # Defines the platforms on which to run build jobs. One job is created for each platform, and the + # object in this array is sent to the job template as 'platform'. If no platforms are specified, + # one job runs on 'defaultManagedPlatform'. + platforms: [] + +jobs: + +- ${{ if ne(parameters.allCompletedJobId, '') }}: + - job: ${{ parameters.allCompletedJobId }} + displayName: Source-Build Complete + pool: server + dependsOn: + - ${{ each platform in parameters.platforms }}: + - ${{ parameters.jobNamePrefix }}_${{ platform.name }} + - ${{ if eq(length(parameters.platforms), 0) }}: + - ${{ parameters.jobNamePrefix }}_${{ parameters.defaultManagedPlatform.name }} + +- ${{ each platform in parameters.platforms }}: + - template: /eng/common/templates-official/job/source-build.yml + parameters: + jobNamePrefix: ${{ parameters.jobNamePrefix }} + platform: ${{ platform }} + +- ${{ if eq(length(parameters.platforms), 0) }}: + - template: /eng/common/templates-official/job/source-build.yml + parameters: + jobNamePrefix: ${{ parameters.jobNamePrefix }} + platform: ${{ parameters.defaultManagedPlatform }} diff --git a/eng/common/templates-official/post-build/common-variables.yml b/eng/common/templates-official/post-build/common-variables.yml new file mode 100644 index 0000000000000..c24193acfc981 --- /dev/null +++ b/eng/common/templates-official/post-build/common-variables.yml @@ -0,0 +1,22 @@ +variables: + - group: Publish-Build-Assets + + # Whether the build is internal or not + - name: IsInternalBuild + value: ${{ and(ne(variables['System.TeamProject'], 'public'), contains(variables['Build.SourceBranch'], 'internal')) }} + + # Default Maestro++ API Endpoint and API Version + - name: MaestroApiEndPoint + value: "https://maestro-prod.westus2.cloudapp.azure.com" + - name: MaestroApiAccessToken + value: $(MaestroAccessToken) + - name: MaestroApiVersion + value: "2020-02-20" + + - name: SourceLinkCLIVersion + value: 3.0.0 + - name: SymbolToolVersion + value: 1.0.1 + + - name: runCodesignValidationInjection + value: false diff --git a/eng/common/templates-official/post-build/post-build.yml b/eng/common/templates-official/post-build/post-build.yml new file mode 100644 index 0000000000000..da1f40958b450 --- /dev/null +++ b/eng/common/templates-official/post-build/post-build.yml @@ -0,0 +1,285 @@ +parameters: + # Which publishing infra should be used. THIS SHOULD MATCH THE VERSION ON THE BUILD MANIFEST. + # Publishing V1 is no longer supported + # Publishing V2 is no longer supported + # Publishing V3 is the default + - name: publishingInfraVersion + displayName: Which version of publishing should be used to promote the build definition? + type: number + default: 3 + values: + - 3 + + - name: BARBuildId + displayName: BAR Build Id + type: number + default: 0 + + - name: PromoteToChannelIds + displayName: Channel to promote BARBuildId to + type: string + default: '' + + - name: enableSourceLinkValidation + displayName: Enable SourceLink validation + type: boolean + default: false + + - name: enableSigningValidation + displayName: Enable signing validation + type: boolean + default: true + + - name: enableSymbolValidation + displayName: Enable symbol validation + type: boolean + default: false + + - name: enableNugetValidation + displayName: Enable NuGet validation + type: boolean + default: true + + - name: publishInstallersAndChecksums + displayName: Publish installers and checksums + type: boolean + default: true + + - name: SDLValidationParameters + type: object + default: + enable: false + publishGdn: false + continueOnError: false + params: '' + artifactNames: '' + downloadArtifacts: true + + # These parameters let the user customize the call to sdk-task.ps1 for publishing + # symbols & general artifacts as well as for signing validation + - name: symbolPublishingAdditionalParameters + displayName: Symbol publishing additional parameters + type: string + default: '' + + - name: artifactsPublishingAdditionalParameters + displayName: Artifact publishing additional parameters + type: string + default: '' + + - name: signingValidationAdditionalParameters + displayName: Signing validation additional parameters + type: string + default: '' + + # Which stages should finish execution before post-build stages start + - name: validateDependsOn + type: object + default: + - build + + - name: publishDependsOn + type: object + default: + - Validate + + # Optional: Call asset publishing rather than running in a separate stage + - name: publishAssetsImmediately + type: boolean + default: false + +stages: +- ${{ if or(eq( parameters.enableNugetValidation, 'true'), eq(parameters.enableSigningValidation, 'true'), eq(parameters.enableSourceLinkValidation, 'true'), eq(parameters.SDLValidationParameters.enable, 'true')) }}: + - stage: Validate + dependsOn: ${{ parameters.validateDependsOn }} + displayName: Validate Build Assets + variables: + - template: common-variables.yml + - template: /eng/common/templates-official/variables/pool-providers.yml + jobs: + - job: + displayName: NuGet Validation + condition: and(succeededOrFailed(), eq( ${{ parameters.enableNugetValidation }}, 'true')) + pool: + # We don't use the collection uri here because it might vary (.visualstudio.com vs. dev.azure.com) + ${{ if eq(variables['System.TeamProject'], 'DevDiv') }}: + name: AzurePipelines-EO + image: 1ESPT-Windows2022 + demands: Cmd + os: windows + # If it's not devdiv, it's dnceng + ${{ else }}: + name: $(DncEngInternalBuildPool) + image: 1es-windows-2022 + os: windows + + steps: + - template: setup-maestro-vars.yml + parameters: + BARBuildId: ${{ parameters.BARBuildId }} + PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }} + + - task: DownloadBuildArtifacts@0 + displayName: Download Package Artifacts + inputs: + buildType: specific + buildVersionToDownload: specific + project: $(AzDOProjectName) + pipeline: $(AzDOPipelineId) + buildId: $(AzDOBuildId) + artifactName: PackageArtifacts + checkDownloadedFiles: true + + - task: PowerShell@2 + displayName: Validate + inputs: + filePath: $(Build.SourcesDirectory)/eng/common/post-build/nuget-validation.ps1 + arguments: -PackagesPath $(Build.ArtifactStagingDirectory)/PackageArtifacts/ + -ToolDestinationPath $(Agent.BuildDirectory)/Extract/ + + - job: + displayName: Signing Validation + condition: and( eq( ${{ parameters.enableSigningValidation }}, 'true'), ne( variables['PostBuildSign'], 'true')) + pool: + # We don't use the collection uri here because it might vary (.visualstudio.com vs. dev.azure.com) + ${{ if eq(variables['System.TeamProject'], 'DevDiv') }}: + name: AzurePipelines-EO + image: 1ESPT-Windows2022 + demands: Cmd + os: windows + # If it's not devdiv, it's dnceng + ${{ else }}: + name: $(DncEngInternalBuildPool) + image: 1es-windows-2022 + os: windows + steps: + - template: setup-maestro-vars.yml + parameters: + BARBuildId: ${{ parameters.BARBuildId }} + PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }} + + - task: DownloadBuildArtifacts@0 + displayName: Download Package Artifacts + inputs: + buildType: specific + buildVersionToDownload: specific + project: $(AzDOProjectName) + pipeline: $(AzDOPipelineId) + buildId: $(AzDOBuildId) + artifactName: PackageArtifacts + checkDownloadedFiles: true + itemPattern: | + ** + !**/Microsoft.SourceBuild.Intermediate.*.nupkg + + # This is necessary whenever we want to publish/restore to an AzDO private feed + # Since sdk-task.ps1 tries to restore packages we need to do this authentication here + # otherwise it'll complain about accessing a private feed. + - task: NuGetAuthenticate@1 + displayName: 'Authenticate to AzDO Feeds' + + # Signing validation will optionally work with the buildmanifest file which is downloaded from + # Azure DevOps above. + - task: PowerShell@2 + displayName: Validate + inputs: + filePath: eng\common\sdk-task.ps1 + arguments: -task SigningValidation -restore -msbuildEngine vs + /p:PackageBasePath='$(Build.ArtifactStagingDirectory)/PackageArtifacts' + /p:SignCheckExclusionsFile='$(Build.SourcesDirectory)/eng/SignCheckExclusionsFile.txt' + ${{ parameters.signingValidationAdditionalParameters }} + + - template: ../steps/publish-logs.yml + parameters: + StageLabel: 'Validation' + JobLabel: 'Signing' + BinlogToolVersion: $(BinlogToolVersion) + + - job: + displayName: SourceLink Validation + condition: eq( ${{ parameters.enableSourceLinkValidation }}, 'true') + pool: + # We don't use the collection uri here because it might vary (.visualstudio.com vs. dev.azure.com) + ${{ if eq(variables['System.TeamProject'], 'DevDiv') }}: + name: AzurePipelines-EO + image: 1ESPT-Windows2022 + demands: Cmd + os: windows + # If it's not devdiv, it's dnceng + ${{ else }}: + name: $(DncEngInternalBuildPool) + image: 1es-windows-2022 + os: windows + steps: + - template: setup-maestro-vars.yml + parameters: + BARBuildId: ${{ parameters.BARBuildId }} + PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }} + + - task: DownloadBuildArtifacts@0 + displayName: Download Blob Artifacts + inputs: + buildType: specific + buildVersionToDownload: specific + project: $(AzDOProjectName) + pipeline: $(AzDOPipelineId) + buildId: $(AzDOBuildId) + artifactName: BlobArtifacts + checkDownloadedFiles: true + + - task: PowerShell@2 + displayName: Validate + inputs: + filePath: $(Build.SourcesDirectory)/eng/common/post-build/sourcelink-validation.ps1 + arguments: -InputPath $(Build.ArtifactStagingDirectory)/BlobArtifacts/ + -ExtractPath $(Agent.BuildDirectory)/Extract/ + -GHRepoName $(Build.Repository.Name) + -GHCommit $(Build.SourceVersion) + -SourcelinkCliVersion $(SourceLinkCLIVersion) + continueOnError: true + +- ${{ if ne(parameters.publishAssetsImmediately, 'true') }}: + - stage: publish_using_darc + ${{ if or(eq(parameters.enableNugetValidation, 'true'), eq(parameters.enableSigningValidation, 'true'), eq(parameters.enableSourceLinkValidation, 'true'), eq(parameters.SDLValidationParameters.enable, 'true')) }}: + dependsOn: ${{ parameters.publishDependsOn }} + ${{ else }}: + dependsOn: ${{ parameters.validateDependsOn }} + displayName: Publish using Darc + variables: + - template: common-variables.yml + - template: /eng/common/templates-official/variables/pool-providers.yml + jobs: + - job: + displayName: Publish Using Darc + timeoutInMinutes: 120 + pool: + # We don't use the collection uri here because it might vary (.visualstudio.com vs. dev.azure.com) + ${{ if eq(variables['System.TeamProject'], 'DevDiv') }}: + name: AzurePipelines-EO + image: 1ESPT-Windows2022 + demands: Cmd + os: windows + # If it's not devdiv, it's dnceng + ${{ else }}: + name: NetCore1ESPool-Publishing-Internal + image: windows.vs2019.amd64 + os: windows + steps: + - template: setup-maestro-vars.yml + parameters: + BARBuildId: ${{ parameters.BARBuildId }} + PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }} + + - task: NuGetAuthenticate@1 + + - task: PowerShell@2 + displayName: Publish Using Darc + inputs: + filePath: $(Build.SourcesDirectory)/eng/common/post-build/publish-using-darc.ps1 + arguments: -BuildId $(BARBuildId) + -PublishingInfraVersion ${{ parameters.publishingInfraVersion }} + -AzdoToken '$(publishing-dnceng-devdiv-code-r-build-re)' + -MaestroToken '$(MaestroApiAccessToken)' + -WaitPublishingFinish true + -ArtifactsPublishingAdditionalParameters '${{ parameters.artifactsPublishingAdditionalParameters }}' + -SymbolPublishingAdditionalParameters '${{ parameters.symbolPublishingAdditionalParameters }}' diff --git a/eng/common/templates-official/post-build/setup-maestro-vars.yml b/eng/common/templates-official/post-build/setup-maestro-vars.yml new file mode 100644 index 0000000000000..0c87f149a4ad7 --- /dev/null +++ b/eng/common/templates-official/post-build/setup-maestro-vars.yml @@ -0,0 +1,70 @@ +parameters: + BARBuildId: '' + PromoteToChannelIds: '' + +steps: + - ${{ if eq(coalesce(parameters.PromoteToChannelIds, 0), 0) }}: + - task: DownloadBuildArtifacts@0 + displayName: Download Release Configs + inputs: + buildType: current + artifactName: ReleaseConfigs + checkDownloadedFiles: true + + - task: PowerShell@2 + name: setReleaseVars + displayName: Set Release Configs Vars + inputs: + targetType: inline + pwsh: true + script: | + try { + if (!$Env:PromoteToMaestroChannels -or $Env:PromoteToMaestroChannels.Trim() -eq '') { + $Content = Get-Content $(Build.StagingDirectory)/ReleaseConfigs/ReleaseConfigs.txt + + $BarId = $Content | Select -Index 0 + $Channels = $Content | Select -Index 1 + $IsStableBuild = $Content | Select -Index 2 + + $AzureDevOpsProject = $Env:System_TeamProject + $AzureDevOpsBuildDefinitionId = $Env:System_DefinitionId + $AzureDevOpsBuildId = $Env:Build_BuildId + } + else { + $buildApiEndpoint = "${Env:MaestroApiEndPoint}/api/builds/${Env:BARBuildId}?api-version=${Env:MaestroApiVersion}" + + $apiHeaders = New-Object 'System.Collections.Generic.Dictionary[[String],[String]]' + $apiHeaders.Add('Accept', 'application/json') + $apiHeaders.Add('Authorization',"Bearer ${Env:MAESTRO_API_TOKEN}") + + $buildInfo = try { Invoke-WebRequest -Method Get -Uri $buildApiEndpoint -Headers $apiHeaders | ConvertFrom-Json } catch { Write-Host "Error: $_" } + + $BarId = $Env:BARBuildId + $Channels = $Env:PromoteToMaestroChannels -split "," + $Channels = $Channels -join "][" + $Channels = "[$Channels]" + + $IsStableBuild = $buildInfo.stable + $AzureDevOpsProject = $buildInfo.azureDevOpsProject + $AzureDevOpsBuildDefinitionId = $buildInfo.azureDevOpsBuildDefinitionId + $AzureDevOpsBuildId = $buildInfo.azureDevOpsBuildId + } + + Write-Host "##vso[task.setvariable variable=BARBuildId]$BarId" + Write-Host "##vso[task.setvariable variable=TargetChannels]$Channels" + Write-Host "##vso[task.setvariable variable=IsStableBuild]$IsStableBuild" + + Write-Host "##vso[task.setvariable variable=AzDOProjectName]$AzureDevOpsProject" + Write-Host "##vso[task.setvariable variable=AzDOPipelineId]$AzureDevOpsBuildDefinitionId" + Write-Host "##vso[task.setvariable variable=AzDOBuildId]$AzureDevOpsBuildId" + } + catch { + Write-Host $_ + Write-Host $_.Exception + Write-Host $_.ScriptStackTrace + exit 1 + } + env: + MAESTRO_API_TOKEN: $(MaestroApiAccessToken) + BARBuildId: ${{ parameters.BARBuildId }} + PromoteToMaestroChannels: ${{ parameters.PromoteToChannelIds }} diff --git a/eng/common/templates-official/post-build/trigger-subscription.yml b/eng/common/templates-official/post-build/trigger-subscription.yml new file mode 100644 index 0000000000000..da669030daf6e --- /dev/null +++ b/eng/common/templates-official/post-build/trigger-subscription.yml @@ -0,0 +1,13 @@ +parameters: + ChannelId: 0 + +steps: +- task: PowerShell@2 + displayName: Triggering subscriptions + inputs: + filePath: $(Build.SourcesDirectory)/eng/common/post-build/trigger-subscriptions.ps1 + arguments: -SourceRepo $(Build.Repository.Uri) + -ChannelId ${{ parameters.ChannelId }} + -MaestroApiAccessToken $(MaestroAccessToken) + -MaestroApiEndPoint $(MaestroApiEndPoint) + -MaestroApiVersion $(MaestroApiVersion) diff --git a/eng/common/templates-official/steps/add-build-to-channel.yml b/eng/common/templates-official/steps/add-build-to-channel.yml new file mode 100644 index 0000000000000..f67a210d62f3e --- /dev/null +++ b/eng/common/templates-official/steps/add-build-to-channel.yml @@ -0,0 +1,13 @@ +parameters: + ChannelId: 0 + +steps: +- task: PowerShell@2 + displayName: Add Build to Channel + inputs: + filePath: $(Build.SourcesDirectory)/eng/common/post-build/add-build-to-channel.ps1 + arguments: -BuildId $(BARBuildId) + -ChannelId ${{ parameters.ChannelId }} + -MaestroApiAccessToken $(MaestroApiAccessToken) + -MaestroApiEndPoint $(MaestroApiEndPoint) + -MaestroApiVersion $(MaestroApiVersion) diff --git a/eng/common/templates-official/steps/build-reason.yml b/eng/common/templates-official/steps/build-reason.yml new file mode 100644 index 0000000000000..eba58109b52c9 --- /dev/null +++ b/eng/common/templates-official/steps/build-reason.yml @@ -0,0 +1,12 @@ +# build-reason.yml +# Description: runs steps if build.reason condition is valid. conditions is a string of valid build reasons +# to include steps (',' separated). +parameters: + conditions: '' + steps: [] + +steps: + - ${{ if and( not(startsWith(parameters.conditions, 'not')), contains(parameters.conditions, variables['build.reason'])) }}: + - ${{ parameters.steps }} + - ${{ if and( startsWith(parameters.conditions, 'not'), not(contains(parameters.conditions, variables['build.reason']))) }}: + - ${{ parameters.steps }} diff --git a/eng/common/templates-official/steps/component-governance.yml b/eng/common/templates-official/steps/component-governance.yml new file mode 100644 index 0000000000000..cbba0596709da --- /dev/null +++ b/eng/common/templates-official/steps/component-governance.yml @@ -0,0 +1,13 @@ +parameters: + disableComponentGovernance: false + componentGovernanceIgnoreDirectories: '' + +steps: +- ${{ if eq(parameters.disableComponentGovernance, 'true') }}: + - script: echo "##vso[task.setvariable variable=skipComponentGovernanceDetection]true" + displayName: Set skipComponentGovernanceDetection variable +- ${{ if ne(parameters.disableComponentGovernance, 'true') }}: + - task: ComponentGovernanceComponentDetection@0 + continueOnError: true + inputs: + ignoreDirectories: ${{ parameters.componentGovernanceIgnoreDirectories }} \ No newline at end of file diff --git a/eng/common/templates-official/steps/execute-codeql.yml b/eng/common/templates-official/steps/execute-codeql.yml new file mode 100644 index 0000000000000..9b4a5ffa30a78 --- /dev/null +++ b/eng/common/templates-official/steps/execute-codeql.yml @@ -0,0 +1,32 @@ +parameters: + # Language that should be analyzed. Defaults to csharp + language: csharp + # Build Commands + buildCommands: '' + overrideParameters: '' # Optional: to override values for parameters. + additionalParameters: '' # Optional: parameters that need user specific values eg: '-SourceToolsList @("abc","def") -ArtifactToolsList @("ghi","jkl")' + # Optional: if specified, restore and use this version of Guardian instead of the default. + overrideGuardianVersion: '' + # Optional: if true, publish the '.gdn' folder as a pipeline artifact. This can help with in-depth + # diagnosis of problems with specific tool configurations. + publishGuardianDirectoryToPipeline: false + # The script to run to execute all SDL tools. Use this if you want to use a script to define SDL + # parameters rather than relying on YAML. It may be better to use a local script, because you can + # reproduce results locally without piecing together a command based on the YAML. + executeAllSdlToolsScript: 'eng/common/sdl/execute-all-sdl-tools.ps1' + # There is some sort of bug (has been reported) in Azure DevOps where if this parameter is named + # 'continueOnError', the parameter value is not correctly picked up. + # This can also be remedied by the caller (post-build.yml) if it does not use a nested parameter + # optional: determines whether to continue the build if the step errors; + sdlContinueOnError: false + +steps: +- template: /eng/common/templates-official/steps/execute-sdl.yml + parameters: + overrideGuardianVersion: ${{ parameters.overrideGuardianVersion }} + executeAllSdlToolsScript: ${{ parameters.executeAllSdlToolsScript }} + overrideParameters: ${{ parameters.overrideParameters }} + additionalParameters: '${{ parameters.additionalParameters }} + -CodeQLAdditionalRunConfigParams @("BuildCommands < ${{ parameters.buildCommands }}", "Language < ${{ parameters.language }}")' + publishGuardianDirectoryToPipeline: ${{ parameters.publishGuardianDirectoryToPipeline }} + sdlContinueOnError: ${{ parameters.sdlContinueOnError }} \ No newline at end of file diff --git a/eng/common/templates-official/steps/execute-sdl.yml b/eng/common/templates-official/steps/execute-sdl.yml new file mode 100644 index 0000000000000..07426fde05d82 --- /dev/null +++ b/eng/common/templates-official/steps/execute-sdl.yml @@ -0,0 +1,88 @@ +parameters: + overrideGuardianVersion: '' + executeAllSdlToolsScript: '' + overrideParameters: '' + additionalParameters: '' + publishGuardianDirectoryToPipeline: false + sdlContinueOnError: false + condition: '' + +steps: +- task: NuGetAuthenticate@1 + inputs: + nuGetServiceConnections: GuardianConnect + +- task: NuGetToolInstaller@1 + displayName: 'Install NuGet.exe' + +- ${{ if ne(parameters.overrideGuardianVersion, '') }}: + - pwsh: | + Set-Location -Path $(Build.SourcesDirectory)\eng\common\sdl + . .\sdl.ps1 + $guardianCliLocation = Install-Gdn -Path $(Build.SourcesDirectory)\.artifacts -Version ${{ parameters.overrideGuardianVersion }} + Write-Host "##vso[task.setvariable variable=GuardianCliLocation]$guardianCliLocation" + displayName: Install Guardian (Overridden) + +- ${{ if eq(parameters.overrideGuardianVersion, '') }}: + - pwsh: | + Set-Location -Path $(Build.SourcesDirectory)\eng\common\sdl + . .\sdl.ps1 + $guardianCliLocation = Install-Gdn -Path $(Build.SourcesDirectory)\.artifacts + Write-Host "##vso[task.setvariable variable=GuardianCliLocation]$guardianCliLocation" + displayName: Install Guardian + +- ${{ if ne(parameters.overrideParameters, '') }}: + - powershell: ${{ parameters.executeAllSdlToolsScript }} ${{ parameters.overrideParameters }} + displayName: Execute SDL (Overridden) + continueOnError: ${{ parameters.sdlContinueOnError }} + condition: ${{ parameters.condition }} + +- ${{ if eq(parameters.overrideParameters, '') }}: + - powershell: ${{ parameters.executeAllSdlToolsScript }} + -GuardianCliLocation $(GuardianCliLocation) + -NugetPackageDirectory $(Build.SourcesDirectory)\.packages + -AzureDevOpsAccessToken $(dn-bot-dotnet-build-rw-code-rw) + ${{ parameters.additionalParameters }} + displayName: Execute SDL + continueOnError: ${{ parameters.sdlContinueOnError }} + condition: ${{ parameters.condition }} + +- ${{ if ne(parameters.publishGuardianDirectoryToPipeline, 'false') }}: + # We want to publish the Guardian results and configuration for easy diagnosis. However, the + # '.gdn' dir is a mix of configuration, results, extracted dependencies, and Guardian default + # tooling files. Some of these files are large and aren't useful during an investigation, so + # exclude them by simply deleting them before publishing. (As of writing, there is no documented + # way to selectively exclude a dir from the pipeline artifact publish task.) + - task: DeleteFiles@1 + displayName: Delete Guardian dependencies to avoid uploading + inputs: + SourceFolder: $(Agent.BuildDirectory)/.gdn + Contents: | + c + i + condition: succeededOrFailed() + + - publish: $(Agent.BuildDirectory)/.gdn + artifact: GuardianConfiguration + displayName: Publish GuardianConfiguration + condition: succeededOrFailed() + + # Publish the SARIF files in a container named CodeAnalysisLogs to enable integration + # with the "SARIF SAST Scans Tab" Azure DevOps extension + - task: CopyFiles@2 + displayName: Copy SARIF files + inputs: + flattenFolders: true + sourceFolder: $(Agent.BuildDirectory)/.gdn/rc/ + contents: '**/*.sarif' + targetFolder: $(Build.SourcesDirectory)/CodeAnalysisLogs + condition: succeededOrFailed() + + # Use PublishBuildArtifacts because the SARIF extension only checks this case + # see microsoft/sarif-azuredevops-extension#4 + - task: PublishBuildArtifacts@1 + displayName: Publish SARIF files to CodeAnalysisLogs container + inputs: + pathToPublish: $(Build.SourcesDirectory)/CodeAnalysisLogs + artifactName: CodeAnalysisLogs + condition: succeededOrFailed() \ No newline at end of file diff --git a/eng/common/templates-official/steps/generate-sbom.yml b/eng/common/templates-official/steps/generate-sbom.yml new file mode 100644 index 0000000000000..1bf43bf807af3 --- /dev/null +++ b/eng/common/templates-official/steps/generate-sbom.yml @@ -0,0 +1,48 @@ +# BuildDropPath - The root folder of the drop directory for which the manifest file will be generated. +# PackageName - The name of the package this SBOM represents. +# PackageVersion - The version of the package this SBOM represents. +# ManifestDirPath - The path of the directory where the generated manifest files will be placed +# IgnoreDirectories - Directories to ignore for SBOM generation. This will be passed through to the CG component detector. + +parameters: + PackageVersion: 8.0.0 + BuildDropPath: '$(Build.SourcesDirectory)/artifacts' + PackageName: '.NET' + ManifestDirPath: $(Build.ArtifactStagingDirectory)/sbom + IgnoreDirectories: '' + sbomContinueOnError: true + +steps: +- task: PowerShell@2 + displayName: Prep for SBOM generation in (Non-linux) + condition: or(eq(variables['Agent.Os'], 'Windows_NT'), eq(variables['Agent.Os'], 'Darwin')) + inputs: + filePath: ./eng/common/generate-sbom-prep.ps1 + arguments: ${{parameters.manifestDirPath}} + +# Chmodding is a workaround for https://github.com/dotnet/arcade/issues/8461 +- script: | + chmod +x ./eng/common/generate-sbom-prep.sh + ./eng/common/generate-sbom-prep.sh ${{parameters.manifestDirPath}} + displayName: Prep for SBOM generation in (Linux) + condition: eq(variables['Agent.Os'], 'Linux') + continueOnError: ${{ parameters.sbomContinueOnError }} + +- task: AzureArtifacts.manifest-generator-task.manifest-generator-task.ManifestGeneratorTask@0 + displayName: 'Generate SBOM manifest' + continueOnError: ${{ parameters.sbomContinueOnError }} + inputs: + PackageName: ${{ parameters.packageName }} + BuildDropPath: ${{ parameters.buildDropPath }} + PackageVersion: ${{ parameters.packageVersion }} + ManifestDirPath: ${{ parameters.manifestDirPath }} + ${{ if ne(parameters.IgnoreDirectories, '') }}: + AdditionalComponentDetectorArgs: '--IgnoreDirectories ${{ parameters.IgnoreDirectories }}' + +- task: 1ES.PublishPipelineArtifact@1 + displayName: Publish SBOM manifest + continueOnError: ${{parameters.sbomContinueOnError}} + inputs: + targetPath: '${{parameters.manifestDirPath}}' + artifactName: $(ARTIFACT_NAME) + diff --git a/eng/common/templates-official/steps/publish-logs.yml b/eng/common/templates-official/steps/publish-logs.yml new file mode 100644 index 0000000000000..04012fed182a1 --- /dev/null +++ b/eng/common/templates-official/steps/publish-logs.yml @@ -0,0 +1,23 @@ +parameters: + StageLabel: '' + JobLabel: '' + +steps: +- task: Powershell@2 + displayName: Prepare Binlogs to Upload + inputs: + targetType: inline + script: | + New-Item -ItemType Directory $(Build.SourcesDirectory)/PostBuildLogs/${{parameters.StageLabel}}/${{parameters.JobLabel}}/ + Move-Item -Path $(Build.SourcesDirectory)/artifacts/log/Debug/* $(Build.SourcesDirectory)/PostBuildLogs/${{parameters.StageLabel}}/${{parameters.JobLabel}}/ + continueOnError: true + condition: always() + +- task: 1ES.PublishBuildArtifacts@1 + displayName: Publish Logs + inputs: + PathtoPublish: '$(Build.SourcesDirectory)/PostBuildLogs' + PublishLocation: Container + ArtifactName: PostBuildLogs + continueOnError: true + condition: always() diff --git a/eng/common/templates-official/steps/retain-build.yml b/eng/common/templates-official/steps/retain-build.yml new file mode 100644 index 0000000000000..83d97a26a01ff --- /dev/null +++ b/eng/common/templates-official/steps/retain-build.yml @@ -0,0 +1,28 @@ +parameters: + # Optional azure devops PAT with build execute permissions for the build's organization, + # only needed if the build that should be retained ran on a different organization than + # the pipeline where this template is executing from + Token: '' + # Optional BuildId to retain, defaults to the current running build + BuildId: '' + # Azure devops Organization URI for the build in the https://dev.azure.com/ format. + # Defaults to the organization the current pipeline is running on + AzdoOrgUri: '$(System.CollectionUri)' + # Azure devops project for the build. Defaults to the project the current pipeline is running on + AzdoProject: '$(System.TeamProject)' + +steps: + - task: powershell@2 + inputs: + targetType: 'filePath' + filePath: eng/common/retain-build.ps1 + pwsh: true + arguments: > + -AzdoOrgUri: ${{parameters.AzdoOrgUri}} + -AzdoProject ${{parameters.AzdoProject}} + -Token ${{coalesce(parameters.Token, '$env:SYSTEM_ACCESSTOKEN') }} + -BuildId ${{coalesce(parameters.BuildId, '$env:BUILD_ID')}} + displayName: Enable permanent build retention + env: + SYSTEM_ACCESSTOKEN: $(System.AccessToken) + BUILD_ID: $(Build.BuildId) \ No newline at end of file diff --git a/eng/common/templates-official/steps/send-to-helix.yml b/eng/common/templates-official/steps/send-to-helix.yml new file mode 100644 index 0000000000000..3eb7e2d5f840c --- /dev/null +++ b/eng/common/templates-official/steps/send-to-helix.yml @@ -0,0 +1,91 @@ +# Please remember to update the documentation if you make changes to these parameters! +parameters: + HelixSource: 'pr/default' # required -- sources must start with pr/, official/, prodcon/, or agent/ + HelixType: 'tests/default/' # required -- Helix telemetry which identifies what type of data this is; should include "test" for clarity and must end in '/' + HelixBuild: $(Build.BuildNumber) # required -- the build number Helix will use to identify this -- automatically set to the AzDO build number + HelixTargetQueues: '' # required -- semicolon-delimited list of Helix queues to test on; see https://helix.dot.net/ for a list of queues + HelixAccessToken: '' # required -- access token to make Helix API requests; should be provided by the appropriate variable group + HelixConfiguration: '' # optional -- additional property attached to a job + HelixPreCommands: '' # optional -- commands to run before Helix work item execution + HelixPostCommands: '' # optional -- commands to run after Helix work item execution + WorkItemDirectory: '' # optional -- a payload directory to zip up and send to Helix; requires WorkItemCommand; incompatible with XUnitProjects + WorkItemCommand: '' # optional -- a command to execute on the payload; requires WorkItemDirectory; incompatible with XUnitProjects + WorkItemTimeout: '' # optional -- a timeout in TimeSpan.Parse-ready value (e.g. 00:02:00) for the work item command; requires WorkItemDirectory; incompatible with XUnitProjects + CorrelationPayloadDirectory: '' # optional -- a directory to zip up and send to Helix as a correlation payload + XUnitProjects: '' # optional -- semicolon-delimited list of XUnitProjects to parse and send to Helix; requires XUnitRuntimeTargetFramework, XUnitPublishTargetFramework, XUnitRunnerVersion, and IncludeDotNetCli=true + XUnitWorkItemTimeout: '' # optional -- the workitem timeout in seconds for all workitems created from the xUnit projects specified by XUnitProjects + XUnitPublishTargetFramework: '' # optional -- framework to use to publish your xUnit projects + XUnitRuntimeTargetFramework: '' # optional -- framework to use for the xUnit console runner + XUnitRunnerVersion: '' # optional -- version of the xUnit nuget package you wish to use on Helix; required for XUnitProjects + IncludeDotNetCli: false # optional -- true will download a version of the .NET CLI onto the Helix machine as a correlation payload; requires DotNetCliPackageType and DotNetCliVersion + DotNetCliPackageType: '' # optional -- either 'sdk', 'runtime' or 'aspnetcore-runtime'; determines whether the sdk or runtime will be sent to Helix; see https://raw.githubusercontent.com/dotnet/core/main/release-notes/releases-index.json + DotNetCliVersion: '' # optional -- version of the CLI to send to Helix; based on this: https://raw.githubusercontent.com/dotnet/core/main/release-notes/releases-index.json + WaitForWorkItemCompletion: true # optional -- true will make the task wait until work items have been completed and fail the build if work items fail. False is "fire and forget." + IsExternal: false # [DEPRECATED] -- doesn't do anything, jobs are external if HelixAccessToken is empty and Creator is set + HelixBaseUri: 'https://helix.dot.net/' # optional -- sets the Helix API base URI (allows targeting https://helix.int-dot.net ) + Creator: '' # optional -- if the build is external, use this to specify who is sending the job + DisplayNamePrefix: 'Run Tests' # optional -- rename the beginning of the displayName of the steps in AzDO + condition: succeeded() # optional -- condition for step to execute; defaults to succeeded() + continueOnError: false # optional -- determines whether to continue the build if the step errors; defaults to false + +steps: + - powershell: 'powershell "$env:BUILD_SOURCESDIRECTORY\eng\common\msbuild.ps1 $env:BUILD_SOURCESDIRECTORY\eng\common\helixpublish.proj /restore /p:TreatWarningsAsErrors=false /t:Test /bl:$env:BUILD_SOURCESDIRECTORY\artifacts\log\$env:BuildConfig\SendToHelix.binlog"' + displayName: ${{ parameters.DisplayNamePrefix }} (Windows) + env: + BuildConfig: $(_BuildConfig) + HelixSource: ${{ parameters.HelixSource }} + HelixType: ${{ parameters.HelixType }} + HelixBuild: ${{ parameters.HelixBuild }} + HelixConfiguration: ${{ parameters.HelixConfiguration }} + HelixTargetQueues: ${{ parameters.HelixTargetQueues }} + HelixAccessToken: ${{ parameters.HelixAccessToken }} + HelixPreCommands: ${{ parameters.HelixPreCommands }} + HelixPostCommands: ${{ parameters.HelixPostCommands }} + WorkItemDirectory: ${{ parameters.WorkItemDirectory }} + WorkItemCommand: ${{ parameters.WorkItemCommand }} + WorkItemTimeout: ${{ parameters.WorkItemTimeout }} + CorrelationPayloadDirectory: ${{ parameters.CorrelationPayloadDirectory }} + XUnitProjects: ${{ parameters.XUnitProjects }} + XUnitWorkItemTimeout: ${{ parameters.XUnitWorkItemTimeout }} + XUnitPublishTargetFramework: ${{ parameters.XUnitPublishTargetFramework }} + XUnitRuntimeTargetFramework: ${{ parameters.XUnitRuntimeTargetFramework }} + XUnitRunnerVersion: ${{ parameters.XUnitRunnerVersion }} + IncludeDotNetCli: ${{ parameters.IncludeDotNetCli }} + DotNetCliPackageType: ${{ parameters.DotNetCliPackageType }} + DotNetCliVersion: ${{ parameters.DotNetCliVersion }} + WaitForWorkItemCompletion: ${{ parameters.WaitForWorkItemCompletion }} + HelixBaseUri: ${{ parameters.HelixBaseUri }} + Creator: ${{ parameters.Creator }} + SYSTEM_ACCESSTOKEN: $(System.AccessToken) + condition: and(${{ parameters.condition }}, eq(variables['Agent.Os'], 'Windows_NT')) + continueOnError: ${{ parameters.continueOnError }} + - script: $BUILD_SOURCESDIRECTORY/eng/common/msbuild.sh $BUILD_SOURCESDIRECTORY/eng/common/helixpublish.proj /restore /p:TreatWarningsAsErrors=false /t:Test /bl:$BUILD_SOURCESDIRECTORY/artifacts/log/$BuildConfig/SendToHelix.binlog + displayName: ${{ parameters.DisplayNamePrefix }} (Unix) + env: + BuildConfig: $(_BuildConfig) + HelixSource: ${{ parameters.HelixSource }} + HelixType: ${{ parameters.HelixType }} + HelixBuild: ${{ parameters.HelixBuild }} + HelixConfiguration: ${{ parameters.HelixConfiguration }} + HelixTargetQueues: ${{ parameters.HelixTargetQueues }} + HelixAccessToken: ${{ parameters.HelixAccessToken }} + HelixPreCommands: ${{ parameters.HelixPreCommands }} + HelixPostCommands: ${{ parameters.HelixPostCommands }} + WorkItemDirectory: ${{ parameters.WorkItemDirectory }} + WorkItemCommand: ${{ parameters.WorkItemCommand }} + WorkItemTimeout: ${{ parameters.WorkItemTimeout }} + CorrelationPayloadDirectory: ${{ parameters.CorrelationPayloadDirectory }} + XUnitProjects: ${{ parameters.XUnitProjects }} + XUnitWorkItemTimeout: ${{ parameters.XUnitWorkItemTimeout }} + XUnitPublishTargetFramework: ${{ parameters.XUnitPublishTargetFramework }} + XUnitRuntimeTargetFramework: ${{ parameters.XUnitRuntimeTargetFramework }} + XUnitRunnerVersion: ${{ parameters.XUnitRunnerVersion }} + IncludeDotNetCli: ${{ parameters.IncludeDotNetCli }} + DotNetCliPackageType: ${{ parameters.DotNetCliPackageType }} + DotNetCliVersion: ${{ parameters.DotNetCliVersion }} + WaitForWorkItemCompletion: ${{ parameters.WaitForWorkItemCompletion }} + HelixBaseUri: ${{ parameters.HelixBaseUri }} + Creator: ${{ parameters.Creator }} + SYSTEM_ACCESSTOKEN: $(System.AccessToken) + condition: and(${{ parameters.condition }}, ne(variables['Agent.Os'], 'Windows_NT')) + continueOnError: ${{ parameters.continueOnError }} diff --git a/eng/common/templates-official/steps/source-build.yml b/eng/common/templates-official/steps/source-build.yml new file mode 100644 index 0000000000000..829f17c34d118 --- /dev/null +++ b/eng/common/templates-official/steps/source-build.yml @@ -0,0 +1,129 @@ +parameters: + # This template adds arcade-powered source-build to CI. + + # This is a 'steps' template, and is intended for advanced scenarios where the existing build + # infra has a careful build methodology that must be followed. For example, a repo + # (dotnet/runtime) might choose to clone the GitHub repo only once and store it as a pipeline + # artifact for all subsequent jobs to use, to reduce dependence on a strong network connection to + # GitHub. Using this steps template leaves room for that infra to be included. + + # Defines the platform on which to run the steps. See 'eng/common/templates-official/job/source-build.yml' + # for details. The entire object is described in the 'job' template for simplicity, even though + # the usage of the properties on this object is split between the 'job' and 'steps' templates. + platform: {} + +steps: +# Build. Keep it self-contained for simple reusability. (No source-build-specific job variables.) +- script: | + set -x + df -h + + # If building on the internal project, the artifact feeds variable may be available (usually only if needed) + # In that case, call the feed setup script to add internal feeds corresponding to public ones. + # In addition, add an msbuild argument to copy the WIP from the repo to the target build location. + # This is because SetupNuGetSources.sh will alter the current NuGet.config file, and we need to preserve those + # changes. + internalRestoreArgs= + if [ '$(dn-bot-dnceng-artifact-feeds-rw)' != '$''(dn-bot-dnceng-artifact-feeds-rw)' ]; then + # Temporarily work around https://github.com/dotnet/arcade/issues/7709 + chmod +x $(Build.SourcesDirectory)/eng/common/SetupNugetSources.sh + $(Build.SourcesDirectory)/eng/common/SetupNugetSources.sh $(Build.SourcesDirectory)/NuGet.config $(dn-bot-dnceng-artifact-feeds-rw) + internalRestoreArgs='/p:CopyWipIntoInnerSourceBuildRepo=true' + + # The 'Copy WIP' feature of source build uses git stash to apply changes from the original repo. + # This only works if there is a username/email configured, which won't be the case in most CI runs. + git config --get user.email + if [ $? -ne 0 ]; then + git config user.email dn-bot@microsoft.com + git config user.name dn-bot + fi + fi + + # If building on the internal project, the internal storage variable may be available (usually only if needed) + # In that case, add variables to allow the download of internal runtimes if the specified versions are not found + # in the default public locations. + internalRuntimeDownloadArgs= + if [ '$(dotnetbuilds-internal-container-read-token-base64)' != '$''(dotnetbuilds-internal-container-read-token-base64)' ]; then + internalRuntimeDownloadArgs='/p:DotNetRuntimeSourceFeed=https://dotnetbuilds.blob.core.windows.net/internal /p:DotNetRuntimeSourceFeedKey=$(dotnetbuilds-internal-container-read-token-base64) --runtimesourcefeed https://dotnetbuilds.blob.core.windows.net/internal --runtimesourcefeedkey $(dotnetbuilds-internal-container-read-token-base64)' + fi + + buildConfig=Release + # Check if AzDO substitutes in a build config from a variable, and use it if so. + if [ '$(_BuildConfig)' != '$''(_BuildConfig)' ]; then + buildConfig='$(_BuildConfig)' + fi + + officialBuildArgs= + if [ '${{ and(ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}' = 'True' ]; then + officialBuildArgs='/p:DotNetPublishUsingPipelines=true /p:OfficialBuildId=$(BUILD.BUILDNUMBER)' + fi + + targetRidArgs= + if [ '${{ parameters.platform.targetRID }}' != '' ]; then + targetRidArgs='/p:TargetRid=${{ parameters.platform.targetRID }}' + fi + + runtimeOsArgs= + if [ '${{ parameters.platform.runtimeOS }}' != '' ]; then + runtimeOsArgs='/p:RuntimeOS=${{ parameters.platform.runtimeOS }}' + fi + + baseOsArgs= + if [ '${{ parameters.platform.baseOS }}' != '' ]; then + baseOsArgs='/p:BaseOS=${{ parameters.platform.baseOS }}' + fi + + publishArgs= + if [ '${{ parameters.platform.skipPublishValidation }}' != 'true' ]; then + publishArgs='--publish' + fi + + assetManifestFileName=SourceBuild_RidSpecific.xml + if [ '${{ parameters.platform.name }}' != '' ]; then + assetManifestFileName=SourceBuild_${{ parameters.platform.name }}.xml + fi + + ${{ coalesce(parameters.platform.buildScript, './build.sh') }} --ci \ + --configuration $buildConfig \ + --restore --build --pack $publishArgs -bl \ + $officialBuildArgs \ + $internalRuntimeDownloadArgs \ + $internalRestoreArgs \ + $targetRidArgs \ + $runtimeOsArgs \ + $baseOsArgs \ + /p:SourceBuildNonPortable=${{ parameters.platform.nonPortable }} \ + /p:ArcadeBuildFromSource=true \ + /p:AssetManifestFileName=$assetManifestFileName + displayName: Build + +# Upload build logs for diagnosis. +- task: CopyFiles@2 + displayName: Prepare BuildLogs staging directory + inputs: + SourceFolder: '$(Build.SourcesDirectory)' + Contents: | + **/*.log + **/*.binlog + artifacts/source-build/self/prebuilt-report/** + TargetFolder: '$(Build.StagingDirectory)/BuildLogs' + CleanTargetFolder: true + continueOnError: true + condition: succeededOrFailed() + +- task: 1ES.PublishPipelineArtifact@1 + displayName: Publish BuildLogs + inputs: + targetPath: '$(Build.StagingDirectory)/BuildLogs' + artifactName: BuildLogs_SourceBuild_${{ parameters.platform.name }}_Attempt$(System.JobAttempt) + continueOnError: true + condition: succeededOrFailed() + +# Manually inject component detection so that we can ignore the source build upstream cache, which contains +# a nupkg cache of input packages (a local feed). +# This path must match the upstream cache path in property 'CurrentRepoSourceBuiltNupkgCacheDir' +# in src\Microsoft.DotNet.Arcade.Sdk\tools\SourceBuild\SourceBuildArcade.targets +- task: ComponentGovernanceComponentDetection@0 + displayName: Component Detection (Exclude upstream cache) + inputs: + ignoreDirectories: '$(Build.SourcesDirectory)/artifacts/source-build/self/src/artifacts/obj/source-built-upstream-cache' diff --git a/eng/common/templates-official/variables/pool-providers.yml b/eng/common/templates-official/variables/pool-providers.yml new file mode 100644 index 0000000000000..1f308b24efc43 --- /dev/null +++ b/eng/common/templates-official/variables/pool-providers.yml @@ -0,0 +1,45 @@ +# Select a pool provider based off branch name. Anything with branch name containing 'release' must go into an -Svc pool, +# otherwise it should go into the "normal" pools. This separates out the queueing and billing of released branches. + +# Motivation: +# Once a given branch of a repository's output has been officially "shipped" once, it is then considered to be COGS +# (Cost of goods sold) and should be moved to a servicing pool provider. This allows both separation of queueing +# (allowing release builds and main PR builds to not intefere with each other) and billing (required for COGS. +# Additionally, the pool provider name itself may be subject to change when the .NET Core Engineering Services +# team needs to move resources around and create new and potentially differently-named pools. Using this template +# file from an Arcade-ified repo helps guard against both having to update one's release/* branches and renaming. + +# How to use: +# This yaml assumes your shipped product branches use the naming convention "release/..." (which many do). +# If we find alternate naming conventions in broad usage it can be added to the condition below. +# +# First, import the template in an arcade-ified repo to pick up the variables, e.g.: +# +# variables: +# - template: /eng/common/templates-official/variables/pool-providers.yml +# +# ... then anywhere specifying the pool provider use the runtime variables, +# $(DncEngInternalBuildPool) +# +# pool: +# name: $(DncEngInternalBuildPool) +# image: 1es-windows-2022 + +variables: + # Coalesce the target and source branches so we know when a PR targets a release branch + # If these variables are somehow missing, fall back to main (tends to have more capacity) + + # Any new -Svc alternative pools should have variables added here to allow for splitting work + + - name: DncEngInternalBuildPool + value: $[ + replace( + replace( + eq(contains(coalesce(variables['System.PullRequest.TargetBranch'], variables['Build.SourceBranch'], 'refs/heads/main'), 'release'), 'true'), + True, + 'NetCore1ESPool-Svc-Internal' + ), + False, + 'NetCore1ESPool-Internal' + ) + ] \ No newline at end of file diff --git a/eng/common/templates-official/variables/sdl-variables.yml b/eng/common/templates-official/variables/sdl-variables.yml new file mode 100644 index 0000000000000..dbdd66d4a4b3a --- /dev/null +++ b/eng/common/templates-official/variables/sdl-variables.yml @@ -0,0 +1,7 @@ +variables: +# The Guardian version specified in 'eng/common/sdl/packages.config'. This value must be kept in +# sync with the packages.config file. +- name: DefaultGuardianVersion + value: 0.109.0 +- name: GuardianPackagesConfigFile + value: $(Build.SourcesDirectory)\eng\common\sdl\packages.config \ No newline at end of file diff --git a/eng/common/templates/job/execute-sdl.yml b/eng/common/templates/job/execute-sdl.yml index 7aabaa18017bf..7870f93bc1765 100644 --- a/eng/common/templates/job/execute-sdl.yml +++ b/eng/common/templates/job/execute-sdl.yml @@ -105,6 +105,11 @@ jobs: downloadPath: $(Build.ArtifactStagingDirectory)\artifacts checkDownloadedFiles: true + - powershell: eng/common/sdl/trim-assets-version.ps1 + -InputPath $(Build.ArtifactStagingDirectory)\artifacts + displayName: Trim the version from the NuGet packages + continueOnError: ${{ parameters.sdlContinueOnError }} + - powershell: eng/common/sdl/extract-artifact-packages.ps1 -InputPath $(Build.ArtifactStagingDirectory)\artifacts\BlobArtifacts -ExtractPath $(Build.ArtifactStagingDirectory)\artifacts\BlobArtifacts diff --git a/eng/common/templates/job/job.yml b/eng/common/templates/job/job.yml index 7b714ec2a8097..8ec5c4f2d9f91 100644 --- a/eng/common/templates/job/job.yml +++ b/eng/common/templates/job/job.yml @@ -15,6 +15,7 @@ parameters: timeoutInMinutes: '' variables: [] workspace: '' + templateContext: '' # Job base template specific parameters # See schema documentation - https://github.com/dotnet/arcade/blob/master/Documentation/AzureDevOps/TemplateSchema.md @@ -25,7 +26,7 @@ parameters: enablePublishTestResults: false enablePublishUsingPipelines: false enableBuildRetry: false - disableComponentGovernance: false + disableComponentGovernance: '' componentGovernanceIgnoreDirectories: '' mergeTestResults: false testRunTitle: '' @@ -68,6 +69,9 @@ jobs: ${{ if ne(parameters.timeoutInMinutes, '') }}: timeoutInMinutes: ${{ parameters.timeoutInMinutes }} + ${{ if ne(parameters.templateContext, '') }}: + templateContext: ${{ parameters.templateContext }} + variables: - ${{ if ne(parameters.enableTelemetry, 'false') }}: - name: DOTNET_CLI_TELEMETRY_PROFILE @@ -136,7 +140,7 @@ jobs: condition: and(succeeded(), in(variables['_SignType'], 'real', 'test'), eq(variables['Agent.Os'], 'Windows_NT')) - ${{ if and(eq(parameters.runAsPublic, 'false'), eq(variables['System.TeamProject'], 'internal')) }}: - - task: NuGetAuthenticate@0 + - task: NuGetAuthenticate@1 - ${{ if and(ne(parameters.artifacts.download, 'false'), ne(parameters.artifacts.download, '')) }}: - task: DownloadPipelineArtifact@2 @@ -159,11 +163,16 @@ jobs: uploadRichNavArtifacts: ${{ coalesce(parameters.richCodeNavigationUploadArtifacts, false) }} continueOnError: true - - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest'), ne(parameters.disableComponentGovernance, 'true')) }}: - - task: ComponentGovernanceComponentDetection@0 - continueOnError: true - inputs: - ignoreDirectories: ${{ parameters.componentGovernanceIgnoreDirectories }} + - template: /eng/common/templates/steps/component-governance.yml + parameters: + ${{ if eq(parameters.disableComponentGovernance, '') }}: + ${{ if and(ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest'), eq(parameters.runAsPublic, 'false'), or(startsWith(variables['Build.SourceBranch'], 'refs/heads/release/'), startsWith(variables['Build.SourceBranch'], 'refs/heads/dotnet/'), startsWith(variables['Build.SourceBranch'], 'refs/heads/microsoft/'), eq(variables['Build.SourceBranch'], 'refs/heads/main'))) }}: + disableComponentGovernance: false + ${{ else }}: + disableComponentGovernance: true + ${{ else }}: + disableComponentGovernance: ${{ parameters.disableComponentGovernance }} + componentGovernanceIgnoreDirectories: ${{ parameters.componentGovernanceIgnoreDirectories }} - ${{ if eq(parameters.enableMicrobuild, 'true') }}: - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}: diff --git a/eng/common/templates/job/publish-build-assets.yml b/eng/common/templates/job/publish-build-assets.yml index 42017109f374d..8ec0151def21a 100644 --- a/eng/common/templates/job/publish-build-assets.yml +++ b/eng/common/templates/job/publish-build-assets.yml @@ -58,7 +58,7 @@ jobs: demands: Cmd # If it's not devdiv, it's dnceng ${{ if ne(variables['System.TeamProject'], 'DevDiv') }}: - name: $(DncEngInternalBuildPool) + name: NetCore1ESPool-Publishing-Internal demands: ImageOverride -equals windows.vs2019.amd64 steps: @@ -71,8 +71,8 @@ jobs: checkDownloadedFiles: true condition: ${{ parameters.condition }} continueOnError: ${{ parameters.continueOnError }} - - - task: NuGetAuthenticate@0 + + - task: NuGetAuthenticate@1 - task: PowerShell@2 displayName: Publish Build Assets @@ -81,12 +81,12 @@ jobs: arguments: -task PublishBuildAssets -restore -msbuildEngine dotnet /p:ManifestsPath='$(Build.StagingDirectory)/Download/AssetManifests' /p:BuildAssetRegistryToken=$(MaestroAccessToken) - /p:MaestroApiEndpoint=https://maestro-prod.westus2.cloudapp.azure.com + /p:MaestroApiEndpoint=https://maestro.dot.net /p:PublishUsingPipelines=${{ parameters.publishUsingPipelines }} /p:OfficialBuildId=$(Build.BuildNumber) condition: ${{ parameters.condition }} continueOnError: ${{ parameters.continueOnError }} - + - task: powershell@2 displayName: Create ReleaseConfigs Artifact inputs: @@ -95,7 +95,7 @@ jobs: Add-Content -Path "$(Build.StagingDirectory)/ReleaseConfigs.txt" -Value $(BARBuildId) Add-Content -Path "$(Build.StagingDirectory)/ReleaseConfigs.txt" -Value "$(DefaultChannels)" Add-Content -Path "$(Build.StagingDirectory)/ReleaseConfigs.txt" -Value $(IsStableBuild) - + - task: PublishBuildArtifacts@1 displayName: Publish ReleaseConfigs Artifact inputs: @@ -121,7 +121,7 @@ jobs: - task: PublishBuildArtifacts@1 displayName: Publish SymbolPublishingExclusionsFile Artifact - condition: eq(variables['SymbolExclusionFile'], 'true') + condition: eq(variables['SymbolExclusionFile'], 'true') inputs: PathtoPublish: '$(Build.SourcesDirectory)/eng/SymbolPublishingExclusionsFile.txt' PublishLocation: Container @@ -137,7 +137,7 @@ jobs: displayName: Publish Using Darc inputs: filePath: $(Build.SourcesDirectory)/eng/common/post-build/publish-using-darc.ps1 - arguments: -BuildId $(BARBuildId) + arguments: -BuildId $(BARBuildId) -PublishingInfraVersion 3 -AzdoToken '$(publishing-dnceng-devdiv-code-r-build-re)' -MaestroToken '$(MaestroApiAccessToken)' @@ -148,4 +148,4 @@ jobs: - ${{ if eq(parameters.enablePublishBuildArtifacts, 'true') }}: - template: /eng/common/templates/steps/publish-logs.yml parameters: - JobLabel: 'Publish_Artifacts_Logs' + JobLabel: 'Publish_Artifacts_Logs' diff --git a/eng/common/templates/job/source-index-stage1.yml b/eng/common/templates/job/source-index-stage1.yml index b98202aa02d82..43ee0c202fc79 100644 --- a/eng/common/templates/job/source-index-stage1.yml +++ b/eng/common/templates/job/source-index-stage1.yml @@ -1,6 +1,7 @@ parameters: runAsPublic: false - sourceIndexPackageVersion: 1.0.1-20230228.2 + sourceIndexUploadPackageVersion: 2.0.0-20240502.12 + sourceIndexProcessBinlogPackageVersion: 1.0.1-20240129.2 sourceIndexPackageSource: https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-tools/nuget/v3/index.json sourceIndexBuildCommand: powershell -NoLogo -NoProfile -ExecutionPolicy Bypass -Command "eng/common/build.ps1 -restore -build -binarylog -ci" preSteps: [] @@ -14,14 +15,14 @@ jobs: dependsOn: ${{ parameters.dependsOn }} condition: ${{ parameters.condition }} variables: - - name: SourceIndexPackageVersion - value: ${{ parameters.sourceIndexPackageVersion }} + - name: SourceIndexUploadPackageVersion + value: ${{ parameters.sourceIndexUploadPackageVersion }} + - name: SourceIndexProcessBinlogPackageVersion + value: ${{ parameters.sourceIndexProcessBinlogPackageVersion }} - name: SourceIndexPackageSource value: ${{ parameters.sourceIndexPackageSource }} - name: BinlogPath value: ${{ parameters.binlogPath }} - - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}: - - group: source-dot-net stage1 variables - template: /eng/common/templates/variables/pool-providers.yml ${{ if ne(parameters.pool, '') }}: @@ -40,16 +41,16 @@ jobs: - ${{ preStep }} - task: UseDotNet@2 - displayName: Use .NET Core SDK 6 + displayName: Use .NET 8 SDK inputs: packageType: sdk - version: 6.0.x + version: 8.0.x installationPath: $(Agent.TempDirectory)/dotnet workingDirectory: $(Agent.TempDirectory) - script: | - $(Agent.TempDirectory)/dotnet/dotnet tool install BinLogToSln --version $(SourceIndexPackageVersion) --add-source $(SourceIndexPackageSource) --tool-path $(Agent.TempDirectory)/.source-index/tools - $(Agent.TempDirectory)/dotnet/dotnet tool install UploadIndexStage1 --version $(SourceIndexPackageVersion) --add-source $(SourceIndexPackageSource) --tool-path $(Agent.TempDirectory)/.source-index/tools + $(Agent.TempDirectory)/dotnet/dotnet tool install BinLogToSln --version $(sourceIndexProcessBinlogPackageVersion) --add-source $(SourceIndexPackageSource) --tool-path $(Agent.TempDirectory)/.source-index/tools + $(Agent.TempDirectory)/dotnet/dotnet tool install UploadIndexStage1 --version $(sourceIndexUploadPackageVersion) --add-source $(SourceIndexPackageSource) --tool-path $(Agent.TempDirectory)/.source-index/tools displayName: Download Tools # Set working directory to temp directory so 'dotnet' doesn't try to use global.json and use the repo's sdk. workingDirectory: $(Agent.TempDirectory) @@ -61,7 +62,24 @@ jobs: displayName: Process Binlog into indexable sln - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}: - - script: $(Agent.TempDirectory)/.source-index/tools/UploadIndexStage1 -i .source-index/stage1output -n $(Build.Repository.Name) - displayName: Upload stage1 artifacts to source index - env: - BLOB_CONTAINER_URL: $(source-dot-net-stage1-blob-container-url) + - task: AzureCLI@2 + displayName: Get stage 1 auth token + inputs: + azureSubscription: 'SourceDotNet Stage1 Publish' + addSpnToEnvironment: true + scriptType: 'ps' + scriptLocation: 'inlineScript' + inlineScript: | + echo "##vso[task.setvariable variable=ARM_CLIENT_ID]$env:servicePrincipalId" + echo "##vso[task.setvariable variable=ARM_ID_TOKEN]$env:idToken" + echo "##vso[task.setvariable variable=ARM_TENANT_ID]$env:tenantId" + + - script: | + echo "Client ID: $(ARM_CLIENT_ID)" + echo "ID Token: $(ARM_ID_TOKEN)" + echo "Tenant ID: $(ARM_TENANT_ID)" + az login --service-principal -u $(ARM_CLIENT_ID) --tenant $(ARM_TENANT_ID) --allow-no-subscriptions --federated-token $(ARM_ID_TOKEN) + displayName: "Login to Azure" + + - script: $(Agent.TempDirectory)/.source-index/tools/UploadIndexStage1 -i .source-index/stage1output -n $(Build.Repository.Name) -s netsourceindexstage1 -b stage1 + displayName: Upload stage1 artifacts to source index \ No newline at end of file diff --git a/eng/common/templates/post-build/common-variables.yml b/eng/common/templates/post-build/common-variables.yml index c24193acfc981..173914f2364a7 100644 --- a/eng/common/templates/post-build/common-variables.yml +++ b/eng/common/templates/post-build/common-variables.yml @@ -7,7 +7,7 @@ variables: # Default Maestro++ API Endpoint and API Version - name: MaestroApiEndPoint - value: "https://maestro-prod.westus2.cloudapp.azure.com" + value: "https://maestro.dot.net" - name: MaestroApiAccessToken value: $(MaestroAccessToken) - name: MaestroApiVersion diff --git a/eng/common/templates/post-build/post-build.yml b/eng/common/templates/post-build/post-build.yml index ef720f9d78198..aba44a25a3387 100644 --- a/eng/common/templates/post-build/post-build.yml +++ b/eng/common/templates/post-build/post-build.yml @@ -39,7 +39,7 @@ parameters: displayName: Enable NuGet validation type: boolean default: true - + - name: publishInstallersAndChecksums displayName: Publish installers and checksums type: boolean @@ -131,8 +131,8 @@ stages: displayName: Validate inputs: filePath: $(Build.SourcesDirectory)/eng/common/post-build/nuget-validation.ps1 - arguments: -PackagesPath $(Build.ArtifactStagingDirectory)/PackageArtifacts/ - -ToolDestinationPath $(Agent.BuildDirectory)/Extract/ + arguments: -PackagesPath $(Build.ArtifactStagingDirectory)/PackageArtifacts/ + -ToolDestinationPath $(Agent.BuildDirectory)/Extract/ - job: displayName: Signing Validation @@ -169,7 +169,7 @@ stages: # This is necessary whenever we want to publish/restore to an AzDO private feed # Since sdk-task.ps1 tries to restore packages we need to do this authentication here # otherwise it'll complain about accessing a private feed. - - task: NuGetAuthenticate@0 + - task: NuGetAuthenticate@1 displayName: 'Authenticate to AzDO Feeds' # Signing validation will optionally work with the buildmanifest file which is downloaded from @@ -221,9 +221,9 @@ stages: displayName: Validate inputs: filePath: $(Build.SourcesDirectory)/eng/common/post-build/sourcelink-validation.ps1 - arguments: -InputPath $(Build.ArtifactStagingDirectory)/BlobArtifacts/ - -ExtractPath $(Agent.BuildDirectory)/Extract/ - -GHRepoName $(Build.Repository.Name) + arguments: -InputPath $(Build.ArtifactStagingDirectory)/BlobArtifacts/ + -ExtractPath $(Agent.BuildDirectory)/Extract/ + -GHRepoName $(Build.Repository.Name) -GHCommit $(Build.SourceVersion) -SourcelinkCliVersion $(SourceLinkCLIVersion) continueOnError: true @@ -258,7 +258,7 @@ stages: demands: Cmd # If it's not devdiv, it's dnceng ${{ else }}: - name: $(DncEngInternalBuildPool) + name: NetCore1ESPool-Publishing-Internal demands: ImageOverride -equals windows.vs2019.amd64 steps: - template: setup-maestro-vars.yml @@ -266,13 +266,13 @@ stages: BARBuildId: ${{ parameters.BARBuildId }} PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }} - - task: NuGetAuthenticate@0 + - task: NuGetAuthenticate@1 - task: PowerShell@2 displayName: Publish Using Darc inputs: filePath: $(Build.SourcesDirectory)/eng/common/post-build/publish-using-darc.ps1 - arguments: -BuildId $(BARBuildId) + arguments: -BuildId $(BARBuildId) -PublishingInfraVersion ${{ parameters.publishingInfraVersion }} -AzdoToken '$(publishing-dnceng-devdiv-code-r-build-re)' -MaestroToken '$(MaestroApiAccessToken)' diff --git a/eng/common/templates/steps/component-governance.yml b/eng/common/templates/steps/component-governance.yml new file mode 100644 index 0000000000000..cbba0596709da --- /dev/null +++ b/eng/common/templates/steps/component-governance.yml @@ -0,0 +1,13 @@ +parameters: + disableComponentGovernance: false + componentGovernanceIgnoreDirectories: '' + +steps: +- ${{ if eq(parameters.disableComponentGovernance, 'true') }}: + - script: echo "##vso[task.setvariable variable=skipComponentGovernanceDetection]true" + displayName: Set skipComponentGovernanceDetection variable +- ${{ if ne(parameters.disableComponentGovernance, 'true') }}: + - task: ComponentGovernanceComponentDetection@0 + continueOnError: true + inputs: + ignoreDirectories: ${{ parameters.componentGovernanceIgnoreDirectories }} \ No newline at end of file diff --git a/eng/common/templates/steps/execute-sdl.yml b/eng/common/templates/steps/execute-sdl.yml index 9dd5709f66dc7..07426fde05d82 100644 --- a/eng/common/templates/steps/execute-sdl.yml +++ b/eng/common/templates/steps/execute-sdl.yml @@ -33,7 +33,7 @@ steps: - ${{ if ne(parameters.overrideParameters, '') }}: - powershell: ${{ parameters.executeAllSdlToolsScript }} ${{ parameters.overrideParameters }} - displayName: Execute SDL + displayName: Execute SDL (Overridden) continueOnError: ${{ parameters.sdlContinueOnError }} condition: ${{ parameters.condition }} diff --git a/eng/common/templates/steps/generate-sbom.yml b/eng/common/templates/steps/generate-sbom.yml index a06373f38fa5d..2b21eae427328 100644 --- a/eng/common/templates/steps/generate-sbom.yml +++ b/eng/common/templates/steps/generate-sbom.yml @@ -5,7 +5,7 @@ # IgnoreDirectories - Directories to ignore for SBOM generation. This will be passed through to the CG component detector. parameters: - PackageVersion: 7.0.0 + PackageVersion: 8.0.0 BuildDropPath: '$(Build.SourcesDirectory)/artifacts' PackageName: '.NET' ManifestDirPath: $(Build.ArtifactStagingDirectory)/sbom diff --git a/eng/common/templates/steps/source-build.yml b/eng/common/templates/steps/source-build.yml index a97a185a367a9..41bbb915736a6 100644 --- a/eng/common/templates/steps/source-build.yml +++ b/eng/common/templates/steps/source-build.yml @@ -68,6 +68,11 @@ steps: runtimeOsArgs='/p:RuntimeOS=${{ parameters.platform.runtimeOS }}' fi + baseOsArgs= + if [ '${{ parameters.platform.baseOS }}' != '' ]; then + baseOsArgs='/p:BaseOS=${{ parameters.platform.baseOS }}' + fi + publishArgs= if [ '${{ parameters.platform.skipPublishValidation }}' != 'true' ]; then publishArgs='--publish' @@ -86,6 +91,7 @@ steps: $internalRestoreArgs \ $targetRidArgs \ $runtimeOsArgs \ + $baseOsArgs \ /p:SourceBuildNonPortable=${{ parameters.platform.nonPortable }} \ /p:ArcadeBuildFromSource=true \ /p:AssetManifestFileName=$assetManifestFileName @@ -112,3 +118,12 @@ steps: artifactName: BuildLogs_SourceBuild_${{ parameters.platform.name }}_Attempt$(System.JobAttempt) continueOnError: true condition: succeededOrFailed() + +# Manually inject component detection so that we can ignore the source build upstream cache, which contains +# a nupkg cache of input packages (a local feed). +# This path must match the upstream cache path in property 'CurrentRepoSourceBuiltNupkgCacheDir' +# in src\Microsoft.DotNet.Arcade.Sdk\tools\SourceBuild\SourceBuildArcade.targets +- task: ComponentGovernanceComponentDetection@0 + displayName: Component Detection (Exclude upstream cache) + inputs: + ignoreDirectories: '$(Build.SourcesDirectory)/artifacts/source-build/self/src/artifacts/obj/source-built-upstream-cache' diff --git a/eng/common/templates/variables/pool-providers.yml b/eng/common/templates/variables/pool-providers.yml index 9cc5c550d3b36..d236f9fdbb153 100644 --- a/eng/common/templates/variables/pool-providers.yml +++ b/eng/common/templates/variables/pool-providers.yml @@ -1,15 +1,15 @@ -# Select a pool provider based off branch name. Anything with branch name containing 'release' must go into an -Svc pool, +# Select a pool provider based off branch name. Anything with branch name containing 'release' must go into an -Svc pool, # otherwise it should go into the "normal" pools. This separates out the queueing and billing of released branches. -# Motivation: +# Motivation: # Once a given branch of a repository's output has been officially "shipped" once, it is then considered to be COGS # (Cost of goods sold) and should be moved to a servicing pool provider. This allows both separation of queueing # (allowing release builds and main PR builds to not intefere with each other) and billing (required for COGS. -# Additionally, the pool provider name itself may be subject to change when the .NET Core Engineering Services -# team needs to move resources around and create new and potentially differently-named pools. Using this template +# Additionally, the pool provider name itself may be subject to change when the .NET Core Engineering Services +# team needs to move resources around and create new and potentially differently-named pools. Using this template # file from an Arcade-ified repo helps guard against both having to update one's release/* branches and renaming. -# How to use: +# How to use: # This yaml assumes your shipped product branches use the naming convention "release/..." (which many do). # If we find alternate naming conventions in broad usage it can be added to the condition below. # @@ -54,4 +54,4 @@ variables: False, 'NetCore1ESPool-Internal' ) - ] \ No newline at end of file + ] diff --git a/eng/common/tools.ps1 b/eng/common/tools.ps1 index 8ad03be3eccb2..eb188cfda415b 100644 --- a/eng/common/tools.ps1 +++ b/eng/common/tools.ps1 @@ -287,6 +287,25 @@ function InstallDotNet([string] $dotnetRoot, [string] $runtimeSourceFeedKey = '', [switch] $noPath) { + $dotnetVersionLabel = "'sdk v$version'" + + if ($runtime -ne '' -and $runtime -ne 'sdk') { + $runtimePath = $dotnetRoot + $runtimePath = $runtimePath + "\shared" + if ($runtime -eq "dotnet") { $runtimePath = $runtimePath + "\Microsoft.NETCore.App" } + if ($runtime -eq "aspnetcore") { $runtimePath = $runtimePath + "\Microsoft.AspNetCore.App" } + if ($runtime -eq "windowsdesktop") { $runtimePath = $runtimePath + "\Microsoft.WindowsDesktop.App" } + $runtimePath = $runtimePath + "\" + $version + + $dotnetVersionLabel = "runtime toolset '$runtime/$architecture v$version'" + + if (Test-Path $runtimePath) { + Write-Host " Runtime toolset '$runtime/$architecture v$version' already installed." + $installSuccess = $true + Exit + } + } + $installScript = GetDotNetInstallScript $dotnetRoot $installParameters = @{ Version = $version @@ -323,18 +342,18 @@ function InstallDotNet([string] $dotnetRoot, } else { $location = "public location"; } - Write-Host "Attempting to install dotnet from $location." + Write-Host " Attempting to install $dotnetVersionLabel from $location." try { & $installScript @variation $installSuccess = $true break } catch { - Write-Host "Failed to install dotnet from $location." + Write-Host " Failed to install $dotnetVersionLabel from $location." } } if (-not $installSuccess) { - Write-PipelineTelemetryError -Category 'InitializeToolset' -Message "Failed to install dotnet from any of the specified locations." + Write-PipelineTelemetryError -Category 'InitializeToolset' -Message "Failed to install $dotnetVersionLabel from any of the specified locations." ExitWithExitCode 1 } } @@ -360,13 +379,13 @@ function InitializeVisualStudioMSBuild([bool]$install, [object]$vsRequirements = } # Minimum VS version to require. - $vsMinVersionReqdStr = '16.8' + $vsMinVersionReqdStr = '17.7' $vsMinVersionReqd = [Version]::new($vsMinVersionReqdStr) # If the version of msbuild is going to be xcopied, # use this version. Version matches a package here: - # https://dev.azure.com/dnceng/public/_packaging?_a=package&feed=dotnet-eng&package=RoslynTools.MSBuild&protocolType=NuGet&version=17.4.1&view=overview - $defaultXCopyMSBuildVersion = '17.4.1' + # https://dev.azure.com/dnceng/public/_artifacts/feed/dotnet-eng/NuGet/RoslynTools.MSBuild/versions/17.8.1-2 + $defaultXCopyMSBuildVersion = '17.8.1-2' if (!$vsRequirements) { if (Get-Member -InputObject $GlobalJson.tools -Name 'vs') { @@ -399,7 +418,8 @@ function InitializeVisualStudioMSBuild([bool]$install, [object]$vsRequirements = # Locate Visual Studio installation or download x-copy msbuild. $vsInfo = LocateVisualStudio $vsRequirements if ($vsInfo -ne $null) { - $vsInstallDir = $vsInfo.installationPath + # Ensure vsInstallDir has a trailing slash + $vsInstallDir = Join-Path $vsInfo.installationPath "\" $vsMajorVersion = $vsInfo.installationVersion.Split('.')[0] InitializeVisualStudioEnvironmentVariables $vsInstallDir $vsMajorVersion @@ -581,7 +601,15 @@ function InitializeBuildTool() { ExitWithExitCode 1 } $dotnetPath = Join-Path $dotnetRoot (GetExecutableFileName 'dotnet') - $buildTool = @{ Path = $dotnetPath; Command = 'msbuild'; Tool = 'dotnet'; Framework = 'net8.0' } + + # Use override if it exists - commonly set by source-build + if ($null -eq $env:_OverrideArcadeInitializeBuildToolFramework) { + $initializeBuildToolFramework="net8.0" + } else { + $initializeBuildToolFramework=$env:_OverrideArcadeInitializeBuildToolFramework + } + + $buildTool = @{ Path = $dotnetPath; Command = 'msbuild'; Tool = 'dotnet'; Framework = $initializeBuildToolFramework } } elseif ($msbuildEngine -eq "vs") { try { $msbuildPath = InitializeVisualStudioMSBuild -install:$restore @@ -651,6 +679,10 @@ function InitializeNativeTools() { } } +function Read-ArcadeSdkVersion() { + return $GlobalJson.'msbuild-sdks'.'Microsoft.DotNet.Arcade.Sdk' +} + function InitializeToolset() { if (Test-Path variable:global:_ToolsetBuildProj) { return $global:_ToolsetBuildProj @@ -658,7 +690,7 @@ function InitializeToolset() { $nugetCache = GetNuGetPackageCachePath - $toolsetVersion = $GlobalJson.'msbuild-sdks'.'Microsoft.DotNet.Arcade.Sdk' + $toolsetVersion = Read-ArcadeSdkVersion $toolsetLocationFile = Join-Path $ToolsetDir "$toolsetVersion.txt" if (Test-Path $toolsetLocationFile) { diff --git a/eng/common/tools.sh b/eng/common/tools.sh index cf9fb1ea2d32b..3392e3a99921f 100755 --- a/eng/common/tools.sh +++ b/eng/common/tools.sh @@ -184,6 +184,35 @@ function InstallDotNetSdk { function InstallDotNet { local root=$1 local version=$2 + local runtime=$4 + + local dotnetVersionLabel="'$runtime v$version'" + if [[ -n "${4:-}" ]] && [ "$4" != 'sdk' ]; then + runtimePath="$root" + runtimePath="$runtimePath/shared" + case "$runtime" in + dotnet) + runtimePath="$runtimePath/Microsoft.NETCore.App" + ;; + aspnetcore) + runtimePath="$runtimePath/Microsoft.AspNetCore.App" + ;; + windowsdesktop) + runtimePath="$runtimePath/Microsoft.WindowsDesktop.App" + ;; + *) + ;; + esac + runtimePath="$runtimePath/$version" + + dotnetVersionLabel="runtime toolset '$runtime/$architecture v$version'" + + if [ -d "$runtimePath" ]; then + echo " Runtime toolset '$runtime/$architecture v$version' already installed." + local installSuccess=1 + return + fi + fi GetDotNetInstallScript "$root" local install_script=$_GetDotNetInstallScript @@ -228,17 +257,17 @@ function InstallDotNet { for variationName in "${variations[@]}"; do local name="$variationName[@]" local variation=("${!name}") - echo "Attempting to install dotnet from $variationName." + echo " Attempting to install $dotnetVersionLabel from $variationName." bash "$install_script" "${variation[@]}" && installSuccess=1 if [[ "$installSuccess" -eq 1 ]]; then break fi - echo "Failed to install dotnet from $variationName." + echo " Failed to install $dotnetVersionLabel from $variationName." done if [[ "$installSuccess" -eq 0 ]]; then - Write-PipelineTelemetryError -category 'InitializeToolset' "Failed to install dotnet SDK from any of the specified locations." + Write-PipelineTelemetryError -category 'InitializeToolset' "Failed to install $dotnetVersionLabel from any of the specified locations." ExitWithExitCode 1 fi } @@ -312,7 +341,12 @@ function InitializeBuildTool { # return values _InitializeBuildTool="$_InitializeDotNetCli/dotnet" _InitializeBuildToolCommand="msbuild" - _InitializeBuildToolFramework="net8.0" + # use override if it exists - commonly set by source-build + if [[ "${_OverrideArcadeInitializeBuildToolFramework:-x}" == "x" ]]; then + _InitializeBuildToolFramework="net8.0" + else + _InitializeBuildToolFramework="${_OverrideArcadeInitializeBuildToolFramework}" + fi } # Set RestoreNoCache as a workaround for https://github.com/NuGet/Home/issues/3116 diff --git a/global.json b/global.json index aa76203043e47..36585c32da5e1 100644 --- a/global.json +++ b/global.json @@ -1,18 +1,18 @@ { "sdk": { - "version": "8.0.100-preview.1.23115.2", + "version": "8.0.101", "allowPrerelease": false, "rollForward": "latestPatch" }, "tools": { - "dotnet": "8.0.100-preview.1.23115.2", + "dotnet": "8.0.101", "vs": { "version": "17.4.1" }, "xcopy-msbuild": "17.4.1" }, "msbuild-sdks": { - "Microsoft.DotNet.Arcade.Sdk": "8.0.0-beta.23168.1", - "Microsoft.DotNet.Helix.Sdk": "8.0.0-beta.23168.1" + "Microsoft.DotNet.Arcade.Sdk": "8.0.0-beta.24270.4", + "Microsoft.DotNet.Helix.Sdk": "8.0.0-beta.24270.4" } -} \ No newline at end of file +} From 1276451046cd0c387c05661f45d004bd87d6095c Mon Sep 17 00:00:00 2001 From: Joey Robichaud Date: Wed, 5 Jun 2024 16:17:38 -0700 Subject: [PATCH 02/20] Backport test fixes from #70248 --- .../Collections/ImmutableSegmentedListBuilderTest.cs | 2 +- .../CodeAnalysisTest/Collections/ImmutableSegmentedListTest.cs | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/src/Compilers/Core/CodeAnalysisTest/Collections/ImmutableSegmentedListBuilderTest.cs b/src/Compilers/Core/CodeAnalysisTest/Collections/ImmutableSegmentedListBuilderTest.cs index 90c8bfb3be889..f369a082b2b58 100644 --- a/src/Compilers/Core/CodeAnalysisTest/Collections/ImmutableSegmentedListBuilderTest.cs +++ b/src/Compilers/Core/CodeAnalysisTest/Collections/ImmutableSegmentedListBuilderTest.cs @@ -380,7 +380,7 @@ public void ItemRef() var builder = new ImmutableSegmentedList.Builder(list); ref readonly var safeRef = ref builder.ItemRef(1); - ref var unsafeRef = ref Unsafe.AsRef(safeRef); + ref var unsafeRef = ref Unsafe.AsRef(in safeRef); Assert.Equal(2, builder.ItemRef(1)); diff --git a/src/Compilers/Core/CodeAnalysisTest/Collections/ImmutableSegmentedListTest.cs b/src/Compilers/Core/CodeAnalysisTest/Collections/ImmutableSegmentedListTest.cs index 87f16a050f0e5..b153618443d3e 100644 --- a/src/Compilers/Core/CodeAnalysisTest/Collections/ImmutableSegmentedListTest.cs +++ b/src/Compilers/Core/CodeAnalysisTest/Collections/ImmutableSegmentedListTest.cs @@ -815,7 +815,7 @@ public void ItemRef() var list = new[] { 1, 2, 3 }.ToImmutableSegmentedList(); ref readonly var safeRef = ref list.ItemRef(1); - ref var unsafeRef = ref Unsafe.AsRef(safeRef); + ref var unsafeRef = ref Unsafe.AsRef(in safeRef); Assert.Equal(2, list.ItemRef(1)); From 5e7daff4bb6fe7cf6485e7d0662ca7589e9cb08a Mon Sep 17 00:00:00 2001 From: Joey Robichaud Date: Wed, 5 Jun 2024 16:22:23 -0700 Subject: [PATCH 03/20] Backport pragma disable from #70248 --- src/Workspaces/Core/Portable/Workspace/Solution/Checksum.cs | 2 ++ 1 file changed, 2 insertions(+) diff --git a/src/Workspaces/Core/Portable/Workspace/Solution/Checksum.cs b/src/Workspaces/Core/Portable/Workspace/Solution/Checksum.cs index c3ae1f521642b..b10c316ccac35 100644 --- a/src/Workspaces/Core/Portable/Workspace/Solution/Checksum.cs +++ b/src/Workspaces/Core/Portable/Workspace/Solution/Checksum.cs @@ -119,7 +119,9 @@ public void WriteTo(ObjectWriter writer) public void WriteTo(Span span) { Contract.ThrowIfFalse(span.Length >= HashSize); +#pragma warning disable CS9191 // The 'ref' modifier for an argument corresponding to 'in' parameter is equivalent to 'in'. Consider using 'in' instead. Contract.ThrowIfFalse(MemoryMarshal.TryWrite(span, ref Unsafe.AsRef(in _checksum))); +#pragma warning restore CS9191 } public static Checksum ReadFrom(ObjectReader reader) From e7ebb8af8141cfefe09be7c0d0b348be6196349c Mon Sep 17 00:00:00 2001 From: Joey Robichaud Date: Wed, 5 Jun 2024 16:47:00 -0700 Subject: [PATCH 04/20] Pass Refs as ref --- .../Collections/ImmutableSegmentedListBuilderTest.cs | 2 +- .../CodeAnalysisTest/Collections/ImmutableSegmentedListTest.cs | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/src/Compilers/Core/CodeAnalysisTest/Collections/ImmutableSegmentedListBuilderTest.cs b/src/Compilers/Core/CodeAnalysisTest/Collections/ImmutableSegmentedListBuilderTest.cs index f369a082b2b58..edfe41c4f1043 100644 --- a/src/Compilers/Core/CodeAnalysisTest/Collections/ImmutableSegmentedListBuilderTest.cs +++ b/src/Compilers/Core/CodeAnalysisTest/Collections/ImmutableSegmentedListBuilderTest.cs @@ -380,7 +380,7 @@ public void ItemRef() var builder = new ImmutableSegmentedList.Builder(list); ref readonly var safeRef = ref builder.ItemRef(1); - ref var unsafeRef = ref Unsafe.AsRef(in safeRef); + ref var unsafeRef = ref Unsafe.AsRef(ref safeRef); Assert.Equal(2, builder.ItemRef(1)); diff --git a/src/Compilers/Core/CodeAnalysisTest/Collections/ImmutableSegmentedListTest.cs b/src/Compilers/Core/CodeAnalysisTest/Collections/ImmutableSegmentedListTest.cs index b153618443d3e..6595b66ffc87a 100644 --- a/src/Compilers/Core/CodeAnalysisTest/Collections/ImmutableSegmentedListTest.cs +++ b/src/Compilers/Core/CodeAnalysisTest/Collections/ImmutableSegmentedListTest.cs @@ -815,7 +815,7 @@ public void ItemRef() var list = new[] { 1, 2, 3 }.ToImmutableSegmentedList(); ref readonly var safeRef = ref list.ItemRef(1); - ref var unsafeRef = ref Unsafe.AsRef(in safeRef); + ref var unsafeRef = ref Unsafe.AsRef(ref safeRef); Assert.Equal(2, list.ItemRef(1)); From 7efe8d82d992df5a03e2b01015aaee8cefbce850 Mon Sep 17 00:00:00 2001 From: Joey Robichaud Date: Wed, 5 Jun 2024 16:53:51 -0700 Subject: [PATCH 05/20] Backport XUnit.targets change from #68573 --- eng/targets/XUnit.targets | 1 - 1 file changed, 1 deletion(-) diff --git a/eng/targets/XUnit.targets b/eng/targets/XUnit.targets index e2e574a4290b6..e803a80906a25 100644 --- a/eng/targets/XUnit.targets +++ b/eng/targets/XUnit.targets @@ -7,7 +7,6 @@ - From d0fc8b730d13d510b43c42f72ba4c878ec5ea8b5 Mon Sep 17 00:00:00 2001 From: Joey Robichaud Date: Wed, 5 Jun 2024 17:07:00 -0700 Subject: [PATCH 06/20] Revert "Pass Refs as ref" --- .../Collections/ImmutableSegmentedListBuilderTest.cs | 2 +- .../CodeAnalysisTest/Collections/ImmutableSegmentedListTest.cs | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/src/Compilers/Core/CodeAnalysisTest/Collections/ImmutableSegmentedListBuilderTest.cs b/src/Compilers/Core/CodeAnalysisTest/Collections/ImmutableSegmentedListBuilderTest.cs index edfe41c4f1043..f369a082b2b58 100644 --- a/src/Compilers/Core/CodeAnalysisTest/Collections/ImmutableSegmentedListBuilderTest.cs +++ b/src/Compilers/Core/CodeAnalysisTest/Collections/ImmutableSegmentedListBuilderTest.cs @@ -380,7 +380,7 @@ public void ItemRef() var builder = new ImmutableSegmentedList.Builder(list); ref readonly var safeRef = ref builder.ItemRef(1); - ref var unsafeRef = ref Unsafe.AsRef(ref safeRef); + ref var unsafeRef = ref Unsafe.AsRef(in safeRef); Assert.Equal(2, builder.ItemRef(1)); diff --git a/src/Compilers/Core/CodeAnalysisTest/Collections/ImmutableSegmentedListTest.cs b/src/Compilers/Core/CodeAnalysisTest/Collections/ImmutableSegmentedListTest.cs index 6595b66ffc87a..b153618443d3e 100644 --- a/src/Compilers/Core/CodeAnalysisTest/Collections/ImmutableSegmentedListTest.cs +++ b/src/Compilers/Core/CodeAnalysisTest/Collections/ImmutableSegmentedListTest.cs @@ -815,7 +815,7 @@ public void ItemRef() var list = new[] { 1, 2, 3 }.ToImmutableSegmentedList(); ref readonly var safeRef = ref list.ItemRef(1); - ref var unsafeRef = ref Unsafe.AsRef(ref safeRef); + ref var unsafeRef = ref Unsafe.AsRef(in safeRef); Assert.Equal(2, list.ItemRef(1)); From 544f07f5e0e1f7ea5890d69a01aa7c24bd1b9f74 Mon Sep 17 00:00:00 2001 From: Joey Robichaud Date: Thu, 6 Jun 2024 07:52:09 -0700 Subject: [PATCH 07/20] Backport removing xunit.assert from #68573 --- ...oft.VisualStudio.LanguageServices.New.IntegrationTests.csproj | 1 - .../Microsoft.CodeAnalysis.Workspaces.Test.Utilities.csproj | 1 - 2 files changed, 2 deletions(-) diff --git a/src/VisualStudio/IntegrationTest/New.IntegrationTests/Microsoft.VisualStudio.LanguageServices.New.IntegrationTests.csproj b/src/VisualStudio/IntegrationTest/New.IntegrationTests/Microsoft.VisualStudio.LanguageServices.New.IntegrationTests.csproj index 43eb3d7562387..64fa338b5a8e2 100644 --- a/src/VisualStudio/IntegrationTest/New.IntegrationTests/Microsoft.VisualStudio.LanguageServices.New.IntegrationTests.csproj +++ b/src/VisualStudio/IntegrationTest/New.IntegrationTests/Microsoft.VisualStudio.LanguageServices.New.IntegrationTests.csproj @@ -61,7 +61,6 @@ - diff --git a/src/Workspaces/CoreTestUtilities/Microsoft.CodeAnalysis.Workspaces.Test.Utilities.csproj b/src/Workspaces/CoreTestUtilities/Microsoft.CodeAnalysis.Workspaces.Test.Utilities.csproj index 0eab42b807afb..04e389484f68e 100644 --- a/src/Workspaces/CoreTestUtilities/Microsoft.CodeAnalysis.Workspaces.Test.Utilities.csproj +++ b/src/Workspaces/CoreTestUtilities/Microsoft.CodeAnalysis.Workspaces.Test.Utilities.csproj @@ -32,7 +32,6 @@ - From 3333f90d9f329d7a0fe34502524bf802a798eb61 Mon Sep 17 00:00:00 2001 From: Joey Robichaud Date: Thu, 6 Jun 2024 08:22:01 -0700 Subject: [PATCH 08/20] Disable a warning. --- .../Collections/ImmutableSegmentedListBuilderTest.cs | 3 ++- .../CodeAnalysisTest/Collections/ImmutableSegmentedListTest.cs | 2 ++ 2 files changed, 4 insertions(+), 1 deletion(-) diff --git a/src/Compilers/Core/CodeAnalysisTest/Collections/ImmutableSegmentedListBuilderTest.cs b/src/Compilers/Core/CodeAnalysisTest/Collections/ImmutableSegmentedListBuilderTest.cs index f369a082b2b58..f4dfcda5f16e5 100644 --- a/src/Compilers/Core/CodeAnalysisTest/Collections/ImmutableSegmentedListBuilderTest.cs +++ b/src/Compilers/Core/CodeAnalysisTest/Collections/ImmutableSegmentedListBuilderTest.cs @@ -380,8 +380,9 @@ public void ItemRef() var builder = new ImmutableSegmentedList.Builder(list); ref readonly var safeRef = ref builder.ItemRef(1); +#pragma warning disable CS1620 // Argument 1 must be passed with the 'ref' keyword ref var unsafeRef = ref Unsafe.AsRef(in safeRef); - +#pragma warning restore CS1620 Assert.Equal(2, builder.ItemRef(1)); unsafeRef = 4; diff --git a/src/Compilers/Core/CodeAnalysisTest/Collections/ImmutableSegmentedListTest.cs b/src/Compilers/Core/CodeAnalysisTest/Collections/ImmutableSegmentedListTest.cs index b153618443d3e..d380f8ef426c9 100644 --- a/src/Compilers/Core/CodeAnalysisTest/Collections/ImmutableSegmentedListTest.cs +++ b/src/Compilers/Core/CodeAnalysisTest/Collections/ImmutableSegmentedListTest.cs @@ -815,7 +815,9 @@ public void ItemRef() var list = new[] { 1, 2, 3 }.ToImmutableSegmentedList(); ref readonly var safeRef = ref list.ItemRef(1); +#pragma warning disable CS1620 // Argument 1 must be passed with the 'ref' keyword ref var unsafeRef = ref Unsafe.AsRef(in safeRef); +#pragma warning restore CS1620 Assert.Equal(2, list.ItemRef(1)); From 8f8e3d1d2e47c5a1c93858964ecc8ca6a9b98080 Mon Sep 17 00:00:00 2001 From: Joey Robichaud Date: Thu, 6 Jun 2024 09:00:38 -0700 Subject: [PATCH 09/20] Revert "Disable a warning." --- .../Collections/ImmutableSegmentedListBuilderTest.cs | 2 -- .../CodeAnalysisTest/Collections/ImmutableSegmentedListTest.cs | 2 -- 2 files changed, 4 deletions(-) diff --git a/src/Compilers/Core/CodeAnalysisTest/Collections/ImmutableSegmentedListBuilderTest.cs b/src/Compilers/Core/CodeAnalysisTest/Collections/ImmutableSegmentedListBuilderTest.cs index f4dfcda5f16e5..38108885df288 100644 --- a/src/Compilers/Core/CodeAnalysisTest/Collections/ImmutableSegmentedListBuilderTest.cs +++ b/src/Compilers/Core/CodeAnalysisTest/Collections/ImmutableSegmentedListBuilderTest.cs @@ -380,9 +380,7 @@ public void ItemRef() var builder = new ImmutableSegmentedList.Builder(list); ref readonly var safeRef = ref builder.ItemRef(1); -#pragma warning disable CS1620 // Argument 1 must be passed with the 'ref' keyword ref var unsafeRef = ref Unsafe.AsRef(in safeRef); -#pragma warning restore CS1620 Assert.Equal(2, builder.ItemRef(1)); unsafeRef = 4; diff --git a/src/Compilers/Core/CodeAnalysisTest/Collections/ImmutableSegmentedListTest.cs b/src/Compilers/Core/CodeAnalysisTest/Collections/ImmutableSegmentedListTest.cs index d380f8ef426c9..b153618443d3e 100644 --- a/src/Compilers/Core/CodeAnalysisTest/Collections/ImmutableSegmentedListTest.cs +++ b/src/Compilers/Core/CodeAnalysisTest/Collections/ImmutableSegmentedListTest.cs @@ -815,9 +815,7 @@ public void ItemRef() var list = new[] { 1, 2, 3 }.ToImmutableSegmentedList(); ref readonly var safeRef = ref list.ItemRef(1); -#pragma warning disable CS1620 // Argument 1 must be passed with the 'ref' keyword ref var unsafeRef = ref Unsafe.AsRef(in safeRef); -#pragma warning restore CS1620 Assert.Equal(2, list.ItemRef(1)); From 995239d89dd4c17a186fb6d9a166a4f2b5a0c215 Mon Sep 17 00:00:00 2001 From: Joey Robichaud Date: Thu, 6 Jun 2024 09:01:20 -0700 Subject: [PATCH 10/20] Restore newline --- .../Collections/ImmutableSegmentedListBuilderTest.cs | 1 + 1 file changed, 1 insertion(+) diff --git a/src/Compilers/Core/CodeAnalysisTest/Collections/ImmutableSegmentedListBuilderTest.cs b/src/Compilers/Core/CodeAnalysisTest/Collections/ImmutableSegmentedListBuilderTest.cs index 38108885df288..f369a082b2b58 100644 --- a/src/Compilers/Core/CodeAnalysisTest/Collections/ImmutableSegmentedListBuilderTest.cs +++ b/src/Compilers/Core/CodeAnalysisTest/Collections/ImmutableSegmentedListBuilderTest.cs @@ -381,6 +381,7 @@ public void ItemRef() ref readonly var safeRef = ref builder.ItemRef(1); ref var unsafeRef = ref Unsafe.AsRef(in safeRef); + Assert.Equal(2, builder.ItemRef(1)); unsafeRef = 4; From de68f73d264e6b0a5226e81a4b32bb1519c0bee1 Mon Sep 17 00:00:00 2001 From: Joey Robichaud Date: Thu, 6 Jun 2024 09:34:39 -0700 Subject: [PATCH 11/20] Move to a newer SDK --- global.json | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/global.json b/global.json index 36585c32da5e1..020b20b3a411d 100644 --- a/global.json +++ b/global.json @@ -1,11 +1,11 @@ { "sdk": { - "version": "8.0.101", + "version": "8.0.306", "allowPrerelease": false, "rollForward": "latestPatch" }, "tools": { - "dotnet": "8.0.101", + "dotnet": "8.0.306", "vs": { "version": "17.4.1" }, From 119a8d4217499d1af6f5f846cdd5015a7d3704b1 Mon Sep 17 00:00:00 2001 From: Joey Robichaud Date: Thu, 6 Jun 2024 09:41:48 -0700 Subject: [PATCH 12/20] Fix sdk version --- global.json | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/global.json b/global.json index 020b20b3a411d..f6fcb3b29b638 100644 --- a/global.json +++ b/global.json @@ -1,11 +1,11 @@ { "sdk": { - "version": "8.0.306", + "version": "8.0.301", "allowPrerelease": false, "rollForward": "latestPatch" }, "tools": { - "dotnet": "8.0.306", + "dotnet": "8.0.301", "vs": { "version": "17.4.1" }, From 8644b2c88722270fca615278dc1ccb0def3e2374 Mon Sep 17 00:00:00 2001 From: Joey Robichaud Date: Thu, 6 Jun 2024 10:23:52 -0700 Subject: [PATCH 13/20] Use a more recent VS --- global.json | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/global.json b/global.json index f6fcb3b29b638..369a1e097dfbe 100644 --- a/global.json +++ b/global.json @@ -7,9 +7,9 @@ "tools": { "dotnet": "8.0.301", "vs": { - "version": "17.4.1" + "version": "17.6.0" }, - "xcopy-msbuild": "17.4.1" + "xcopy-msbuild": "17.6.0-2" }, "msbuild-sdks": { "Microsoft.DotNet.Arcade.Sdk": "8.0.0-beta.24270.4", From 91a5a0f103c76d582a906e64609354179659331a Mon Sep 17 00:00:00 2001 From: Joey Robichaud Date: Thu, 6 Jun 2024 12:53:30 -0700 Subject: [PATCH 14/20] Revert to the SDK used in Arcade and update VS version --- global.json | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/global.json b/global.json index 369a1e097dfbe..431b9e41ef4ab 100644 --- a/global.json +++ b/global.json @@ -1,15 +1,15 @@ { "sdk": { - "version": "8.0.301", + "version": "8.0.101", "allowPrerelease": false, - "rollForward": "latestPatch" + "rollForward": "disable" }, "tools": { - "dotnet": "8.0.301", + "dotnet": "8.0.101", "vs": { - "version": "17.6.0" + "version": "17.8.0" }, - "xcopy-msbuild": "17.6.0-2" + "xcopy-msbuild": "17.8.1-2" }, "msbuild-sdks": { "Microsoft.DotNet.Arcade.Sdk": "8.0.0-beta.24270.4", From 01c76369437f99c09ef0f44a898b6d6a87301205 Mon Sep 17 00:00:00 2001 From: Joey Robichaud Date: Thu, 6 Jun 2024 16:19:25 -0700 Subject: [PATCH 15/20] Go back to the .NET 8p7 SDK which predates the ref readonly changes made in rc1. --- global.json | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/global.json b/global.json index 431b9e41ef4ab..8c63a12fd823f 100644 --- a/global.json +++ b/global.json @@ -1,11 +1,11 @@ { "sdk": { - "version": "8.0.101", + "version": "8.0.100-preview.7.23376.3", "allowPrerelease": false, "rollForward": "disable" }, "tools": { - "dotnet": "8.0.101", + "dotnet": "8.0.100-preview.7.23376.3", "vs": { "version": "17.8.0" }, From f435dbbc97f1b6671c694065f854a3e06e1ed005 Mon Sep 17 00:00:00 2001 From: Joey Robichaud Date: Fri, 7 Jun 2024 13:59:48 -0700 Subject: [PATCH 16/20] Revert "Backport test fixes from #70428" --- .../Collections/ImmutableSegmentedListBuilderTest.cs | 2 +- .../CodeAnalysisTest/Collections/ImmutableSegmentedListTest.cs | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/src/Compilers/Core/CodeAnalysisTest/Collections/ImmutableSegmentedListBuilderTest.cs b/src/Compilers/Core/CodeAnalysisTest/Collections/ImmutableSegmentedListBuilderTest.cs index f369a082b2b58..90c8bfb3be889 100644 --- a/src/Compilers/Core/CodeAnalysisTest/Collections/ImmutableSegmentedListBuilderTest.cs +++ b/src/Compilers/Core/CodeAnalysisTest/Collections/ImmutableSegmentedListBuilderTest.cs @@ -380,7 +380,7 @@ public void ItemRef() var builder = new ImmutableSegmentedList.Builder(list); ref readonly var safeRef = ref builder.ItemRef(1); - ref var unsafeRef = ref Unsafe.AsRef(in safeRef); + ref var unsafeRef = ref Unsafe.AsRef(safeRef); Assert.Equal(2, builder.ItemRef(1)); diff --git a/src/Compilers/Core/CodeAnalysisTest/Collections/ImmutableSegmentedListTest.cs b/src/Compilers/Core/CodeAnalysisTest/Collections/ImmutableSegmentedListTest.cs index b153618443d3e..87f16a050f0e5 100644 --- a/src/Compilers/Core/CodeAnalysisTest/Collections/ImmutableSegmentedListTest.cs +++ b/src/Compilers/Core/CodeAnalysisTest/Collections/ImmutableSegmentedListTest.cs @@ -815,7 +815,7 @@ public void ItemRef() var list = new[] { 1, 2, 3 }.ToImmutableSegmentedList(); ref readonly var safeRef = ref list.ItemRef(1); - ref var unsafeRef = ref Unsafe.AsRef(in safeRef); + ref var unsafeRef = ref Unsafe.AsRef(safeRef); Assert.Equal(2, list.ItemRef(1)); From b9076bb817e96b73ab04f910ba72711f51368847 Mon Sep 17 00:00:00 2001 From: Joey Robichaud Date: Fri, 7 Jun 2024 14:00:08 -0700 Subject: [PATCH 17/20] Require VS 17.6 for build --- global.json | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/global.json b/global.json index 8c63a12fd823f..616d08766c61a 100644 --- a/global.json +++ b/global.json @@ -7,9 +7,9 @@ "tools": { "dotnet": "8.0.100-preview.7.23376.3", "vs": { - "version": "17.8.0" + "version": "17.6.0" }, - "xcopy-msbuild": "17.8.1-2" + "xcopy-msbuild": "17.6.0-2" }, "msbuild-sdks": { "Microsoft.DotNet.Arcade.Sdk": "8.0.0-beta.24270.4", From 1c9c74745e41b2849a50fe54b73058564beb7a27 Mon Sep 17 00:00:00 2001 From: Joey Robichaud Date: Fri, 7 Jun 2024 16:50:24 -0700 Subject: [PATCH 18/20] Backport test fixes from #68212 --- .../CodeGen/CodeGenFunctionPointersTests.cs | 2 +- .../Semantic/Semantics/DelegateTypeTests.cs | 31 +++++++++++++------ 2 files changed, 22 insertions(+), 11 deletions(-) diff --git a/src/Compilers/CSharp/Test/Emit/CodeGen/CodeGenFunctionPointersTests.cs b/src/Compilers/CSharp/Test/Emit/CodeGen/CodeGenFunctionPointersTests.cs index 0ac5eae823bc5..7b852b5010787 100644 --- a/src/Compilers/CSharp/Test/Emit/CodeGen/CodeGenFunctionPointersTests.cs +++ b/src/Compilers/CSharp/Test/Emit/CodeGen/CodeGenFunctionPointersTests.cs @@ -2479,7 +2479,7 @@ static void Main() Console.WriteLine(t.ToString()); } } -", expectedOutput: "System.IntPtr"); +", expectedOutput: ExecutionConditionUtil.IsCoreClr ? "System.Void()" : "System.IntPtr"); verifier.VerifyIL("C.Main()", expectedIL: @" { diff --git a/src/Compilers/CSharp/Test/Semantic/Semantics/DelegateTypeTests.cs b/src/Compilers/CSharp/Test/Semantic/Semantics/DelegateTypeTests.cs index 43f066abf8992..ec0019dfd8b5e 100644 --- a/src/Compilers/CSharp/Test/Semantic/Semantics/DelegateTypeTests.cs +++ b/src/Compilers/CSharp/Test/Semantic/Semantics/DelegateTypeTests.cs @@ -9110,16 +9110,27 @@ static void Main() var comp = CreateCompilation(source, options: TestOptions.UnsafeReleaseExe); comp.VerifyDiagnostics(); - CompileAndVerify(comp, expectedOutput: -@"System.Func`1[(fnptr)[]] -System.Action`1[(fnptr)[]] -<>F{00000001}`2[System.Int32,(fnptr)[]] -<>A{00000001}`1[(fnptr)[]] -System.Func`1[(fnptr)[]] -System.Action`1[(fnptr)[]] -<>F{00000001}`2[System.Int32,(fnptr)[]] -<>A{00000001}`1[(fnptr)[]] -"); + CompileAndVerify(comp, expectedOutput: ExecutionConditionUtil.IsCoreClr + ? """ + System.Func`1[System.Int32&(System.Int32)[]] + System.Action`1[System.Void(System.Int32, System.Int32&)[]] + <>F{00000001}`2[System.Int32,System.Int32&(System.Int32)[]] + <>A{00000001}`1[System.Void(System.Int32, System.Int32&)[]] + System.Func`1[System.Int32&()[]] + System.Action`1[System.Void(System.Int32&)[]] + <>F{00000001}`2[System.Int32,System.Int32&()[]] + <>A{00000001}`1[System.Void(System.Int32&)[]] + """ + : """ + System.Func`1[(fnptr)[]] + System.Action`1[(fnptr)[]] + <>F{00000001}`2[System.Int32,(fnptr)[]] + <>A{00000001}`1[(fnptr)[]] + System.Func`1[(fnptr)[]] + System.Action`1[(fnptr)[]] + <>F{00000001}`2[System.Int32,(fnptr)[]] + <>A{00000001}`1[(fnptr)[]] + """); } [Fact] From 7cdcef36a9ce50fa8e93b942fb93c858fe204b0c Mon Sep 17 00:00:00 2001 From: Joey Robichaud Date: Tue, 11 Jun 2024 15:14:48 -0700 Subject: [PATCH 19/20] Skip failing test. The EndToEnd OverflowOnFluentCall test is fixed in #67913, but would be a significant change to backport. --- src/Compilers/CSharp/Test/EndToEnd/EndToEndTests.cs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/Compilers/CSharp/Test/EndToEnd/EndToEndTests.cs b/src/Compilers/CSharp/Test/EndToEnd/EndToEndTests.cs index 15446d280c917..3a9c6bfef3659 100644 --- a/src/Compilers/CSharp/Test/EndToEnd/EndToEndTests.cs +++ b/src/Compilers/CSharp/Test/EndToEnd/EndToEndTests.cs @@ -98,7 +98,7 @@ bool runTestAndCatch(int depth) // This test is a canary attempting to make sure that we don't regress the # of fluent calls that // the compiler can handle. [WorkItem(16669, "https://github.com/dotnet/roslyn/issues/16669")] - [ConditionalFact(typeof(WindowsOrLinuxOnly)), WorkItem(34880, "https://github.com/dotnet/roslyn/issues/34880")] + [ConditionalFact(typeof(WindowsOrLinuxOnly), AlwaysSkip = "https://github.com/dotnet/roslyn/issues/67912"), WorkItem(34880, "https://github.com/dotnet/roslyn/issues/34880")] public void OverflowOnFluentCall() { int numberFluentCalls = (IntPtr.Size, ExecutionConditionUtil.Configuration) switch From 30c81a5a066b4171eced3be044977816d84be13e Mon Sep 17 00:00:00 2001 From: Joey Robichaud Date: Tue, 11 Jun 2024 15:49:34 -0700 Subject: [PATCH 20/20] Backport RestoreSource from #73532 --- eng/InternalTools.props | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/eng/InternalTools.props b/eng/InternalTools.props index 1bb4d145598c3..e25171cebacde 100644 --- a/eng/InternalTools.props +++ b/eng/InternalTools.props @@ -6,6 +6,10 @@ https://devdiv.pkgs.visualstudio.com/_packaging/Engineering/nuget/v3/index.json; https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-public/nuget/v3/index.json; + + $(RestoreSources); + https://pkgs.dev.azure.com/devdiv/_packaging/dotnet-core-internal-tooling/nuget/v3/index.json +