diff --git a/.vsts-ci.yml b/.vsts-ci.yml
index 06e7588923..962c50693b 100644
--- a/.vsts-ci.yml
+++ b/.vsts-ci.yml
@@ -10,6 +10,7 @@ trigger:
branches:
include:
- main
+ - release/*
pr:
autoCancel: false
@@ -38,11 +39,11 @@ stages:
# For public or PR jobs, use the hosted pool. For internal jobs use the internal pool.
# Will eventually change this to two BYOC pools.
${{ if ne(variables['System.TeamProject'], 'internal') }}:
- name: NetCorePublic-Pool
- queue: BuildPool.Windows.10.Amd64.VS2019.Open
+ name: NetCore1ESPool-Svc-Public
+ demands: ImageOverride -equals Build.Windows.10.Amd64.VS2019.Open
${{ if eq(variables['System.TeamProject'], 'internal') }}:
- name: NetCoreInternal-Pool
- queue: BuildPool.Windows.10.Amd64.VS2019
+ name: NetCore1ESPool-Svc-Internal
+ demands: ImageOverride -equals Build.Windows.10.Amd64.VS2019
variables:
# Only enable publishing in official builds.
- ${{ if and(ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
@@ -94,6 +95,7 @@ stages:
- script: eng\common\cibuild.cmd
-configuration $(_BuildConfig)
-prepareMachine
+ -msbuildEngine dotnet
$(_BuildArgs)
displayName: Build and Publish
- task: PublishBuildArtifacts@1
diff --git a/build.cmd b/build.cmd
index 5dca62a2a5..f0bb763d1b 100644
--- a/build.cmd
+++ b/build.cmd
@@ -1,3 +1,3 @@
@echo off
-powershell -NoLogo -NoProfile -ExecutionPolicy ByPass %~dp0eng\common\build.ps1 -build -restore %*
+powershell -NoLogo -NoProfile -ExecutionPolicy ByPass %~dp0eng\common\build.ps1 -build -restore -msbuildEngine dotnet %*
exit /b %ErrorLevel%
diff --git a/docs/src/Binding/Binding.csproj b/docs/src/Binding/Binding.csproj
index e0f271acf4..b24eeb3877 100644
--- a/docs/src/Binding/Binding.csproj
+++ b/docs/src/Binding/Binding.csproj
@@ -2,7 +2,7 @@
Exe
- net5.0
+ net6.0
9
diff --git a/eng/Version.Details.xml b/eng/Version.Details.xml
index 6ca4714942..9bcc98b8fb 100644
--- a/eng/Version.Details.xml
+++ b/eng/Version.Details.xml
@@ -3,9 +3,9 @@
-
+
https://github.com/dotnet/arcade
- 0ca849f0b71866b007fedaaa938cee63f8d056a6
+ 9b7027ba718462aa6410cef61a8be5a4283e7528
diff --git a/eng/common/SetupNugetSources.ps1 b/eng/common/SetupNugetSources.ps1
index a0b5fc37f4..18823840b1 100644
--- a/eng/common/SetupNugetSources.ps1
+++ b/eng/common/SetupNugetSources.ps1
@@ -158,4 +158,10 @@ if ($dotnet5Source -ne $null) {
AddPackageSource -Sources $sources -SourceName "dotnet5-internal-transport" -SourceEndPoint "https://pkgs.dev.azure.com/dnceng/internal/_packaging/dotnet5-internal-transport/nuget/v2" -Creds $creds -Username $userName -Password $Password
}
+$dotnet6Source = $sources.SelectSingleNode("add[@key='dotnet6']")
+if ($dotnet6Source -ne $null) {
+ AddPackageSource -Sources $sources -SourceName "dotnet6-internal" -SourceEndPoint "https://pkgs.dev.azure.com/dnceng/internal/_packaging/dotnet6-internal/nuget/v2" -Creds $creds -Username $userName -Password $Password
+ AddPackageSource -Sources $sources -SourceName "dotnet6-internal-transport" -SourceEndPoint "https://pkgs.dev.azure.com/dnceng/internal/_packaging/dotnet6-internal-transport/nuget/v2" -Creds $creds -Username $userName -Password $Password
+}
+
$doc.Save($filename)
diff --git a/eng/common/SetupNugetSources.sh b/eng/common/SetupNugetSources.sh
index 2734601c13..ad3fb74fd2 100644
--- a/eng/common/SetupNugetSources.sh
+++ b/eng/common/SetupNugetSources.sh
@@ -129,6 +129,30 @@ if [ "$?" == "0" ]; then
PackageSources+=('dotnet5-internal-transport')
fi
+# Ensure dotnet6-internal and dotnet6-internal-transport are in the packageSources if the public dotnet6 feeds are present
+grep -i ""
+
+ sed -i.bak "s|$PackageSourcesNodeFooter|$PackageSourceTemplate${NL}$PackageSourcesNodeFooter|" $ConfigFile
+ fi
+ PackageSources+=('dotnet6-internal')
+
+ grep -i "" $ConfigFile
+ if [ "$?" != "0" ]; then
+ echo "Adding dotnet6-internal-transport to the packageSources."
+ PackageSourcesNodeFooter=""
+ PackageSourceTemplate="${TB}"
+
+ sed -i.bak "s|$PackageSourcesNodeFooter|$PackageSourceTemplate${NL}$PackageSourcesNodeFooter|" $ConfigFile
+ fi
+ PackageSources+=('dotnet6-internal-transport')
+fi
+
# I want things split line by line
PrevIFS=$IFS
IFS=$'\n'
diff --git a/eng/common/build.ps1 b/eng/common/build.ps1
index 94a91c0817..8943da242f 100644
--- a/eng/common/build.ps1
+++ b/eng/common/build.ps1
@@ -25,6 +25,7 @@ Param(
[switch] $prepareMachine,
[string] $runtimeSourceFeed = '',
[string] $runtimeSourceFeedKey = '',
+ [switch] $excludePrereleaseVS,
[switch] $help,
[Parameter(ValueFromRemainingArguments=$true)][String[]]$properties
)
@@ -65,6 +66,7 @@ function Print-Usage() {
Write-Host " -prepareMachine Prepare machine for CI run, clean up processes after build"
Write-Host " -warnAsError Sets warnaserror msbuild parameter ('true' or 'false')"
Write-Host " -msbuildEngine Msbuild engine to use to run build ('dotnet', 'vs', or unspecified)."
+ Write-Host " -excludePrereleaseVS Set to exclude build engines in prerelease versions of Visual Studio"
Write-Host ""
Write-Host "Command line arguments not listed above are passed thru to msbuild."
diff --git a/eng/common/cross/arm/sources.list.trusty b/eng/common/cross/arm/sources.list.trusty
deleted file mode 100644
index 07d8f88d82..0000000000
--- a/eng/common/cross/arm/sources.list.trusty
+++ /dev/null
@@ -1,11 +0,0 @@
-deb http://ports.ubuntu.com/ubuntu-ports/ trusty main restricted universe
-deb-src http://ports.ubuntu.com/ubuntu-ports/ trusty main restricted universe
-
-deb http://ports.ubuntu.com/ubuntu-ports/ trusty-updates main restricted universe
-deb-src http://ports.ubuntu.com/ubuntu-ports/ trusty-updates main restricted universe
-
-deb http://ports.ubuntu.com/ubuntu-ports/ trusty-backports main restricted
-deb-src http://ports.ubuntu.com/ubuntu-ports/ trusty-backports main restricted
-
-deb http://ports.ubuntu.com/ubuntu-ports/ trusty-security main restricted universe multiverse
-deb-src http://ports.ubuntu.com/ubuntu-ports/ trusty-security main restricted universe multiverse
\ No newline at end of file
diff --git a/eng/common/cross/arm/trusty-lttng-2.4.patch b/eng/common/cross/arm/trusty-lttng-2.4.patch
deleted file mode 100644
index 8e4dd7ae71..0000000000
--- a/eng/common/cross/arm/trusty-lttng-2.4.patch
+++ /dev/null
@@ -1,71 +0,0 @@
-From e72c9d7ead60e3317bd6d1fade995c07021c947b Mon Sep 17 00:00:00 2001
-From: Mathieu Desnoyers
-Date: Thu, 7 May 2015 13:25:04 -0400
-Subject: [PATCH] Fix: building probe providers with C++ compiler
-
-Robert Daniels wrote:
-> > I'm attempting to use lttng userspace tracing with a C++ application
-> > on an ARM platform. I'm using GCC 4.8.4 on Linux 3.14 with the 2.6
-> > release of lttng. I've compiled lttng-modules, lttng-ust, and
-> > lttng-tools and have been able to get a simple test working with C
-> > code. When I attempt to run the hello.cxx test on my target it will
-> > segfault.
->
->
-> I spent a little time digging into this issue and finally discovered the
-> cause of my segfault with ARM C++ tracepoints.
->
-> There is a struct called 'lttng_event' in ust-events.h which contains an
-> empty union 'u'. This was the cause of my issue. Under C, this empty union
-> compiles to a zero byte member while under C++ it compiles to a one byte
-> member, and in my case was four-byte aligned which caused my C++ code to
-> have the 'cds_list_head node' offset incorrectly by four bytes. This lead
-> to an incorrect linked list structure which caused my issue.
->
-> Since this union is empty, I simply removed it from the struct and everything
-> worked correctly.
->
-> I don't know the history or purpose behind this empty union so I'd like to
-> know if this is a safe fix. If it is I can submit a patch with the union
-> removed.
-
-That's a very nice catch!
-
-We do not support building tracepoint probe provider with
-g++ yet, as stated in lttng-ust(3):
-
-"- Note for C++ support: although an application instrumented with
- tracepoints can be compiled with g++, tracepoint probes should be
- compiled with gcc (only tested with gcc so far)."
-
-However, if it works fine with this fix, then I'm tempted to take it,
-especially because removing the empty union does not appear to affect
-the layout of struct lttng_event as seen from liblttng-ust, which must
-be compiled with a C compiler, and from probe providers compiled with
-a C compiler. So all we are changing is the layout of a probe provider
-compiled with a C++ compiler, which is anyway buggy at the moment,
-because it is not compatible with the layout expected by liblttng-ust
-compiled with a C compiler.
-
-Reported-by: Robert Daniels
-Signed-off-by: Mathieu Desnoyers
----
- include/lttng/ust-events.h | 2 --
- 1 file changed, 2 deletions(-)
-
-diff --git a/usr/include/lttng/ust-events.h b/usr/include/lttng/ust-events.h
-index 328a875..3d7a274 100644
---- a/usr/include/lttng/ust-events.h
-+++ b/usr/include/lttng/ust-events.h
-@@ -407,8 +407,6 @@ struct lttng_event {
- void *_deprecated1;
- struct lttng_ctx *ctx;
- enum lttng_ust_instrumentation instrumentation;
-- union {
-- } u;
- struct cds_list_head node; /* Event list in session */
- struct cds_list_head _deprecated2;
- void *_deprecated3;
---
-2.7.4
-
diff --git a/eng/common/cross/arm/trusty.patch b/eng/common/cross/arm/trusty.patch
deleted file mode 100644
index 2f2972f8eb..0000000000
--- a/eng/common/cross/arm/trusty.patch
+++ /dev/null
@@ -1,97 +0,0 @@
-diff -u -r a/usr/include/urcu/uatomic/generic.h b/usr/include/urcu/uatomic/generic.h
---- a/usr/include/urcu/uatomic/generic.h 2014-03-28 06:04:42.000000000 +0900
-+++ b/usr/include/urcu/uatomic/generic.h 2017-02-13 10:35:21.189927116 +0900
-@@ -65,17 +65,17 @@
- switch (len) {
- #ifdef UATOMIC_HAS_ATOMIC_BYTE
- case 1:
-- return __sync_val_compare_and_swap_1(addr, old, _new);
-+ return __sync_val_compare_and_swap_1((uint8_t *) addr, old, _new);
- #endif
- #ifdef UATOMIC_HAS_ATOMIC_SHORT
- case 2:
-- return __sync_val_compare_and_swap_2(addr, old, _new);
-+ return __sync_val_compare_and_swap_2((uint16_t *) addr, old, _new);
- #endif
- case 4:
-- return __sync_val_compare_and_swap_4(addr, old, _new);
-+ return __sync_val_compare_and_swap_4((uint32_t *) addr, old, _new);
- #if (CAA_BITS_PER_LONG == 64)
- case 8:
-- return __sync_val_compare_and_swap_8(addr, old, _new);
-+ return __sync_val_compare_and_swap_8((uint64_t *) addr, old, _new);
- #endif
- }
- _uatomic_link_error();
-@@ -100,20 +100,20 @@
- switch (len) {
- #ifdef UATOMIC_HAS_ATOMIC_BYTE
- case 1:
-- __sync_and_and_fetch_1(addr, val);
-+ __sync_and_and_fetch_1((uint8_t *) addr, val);
- return;
- #endif
- #ifdef UATOMIC_HAS_ATOMIC_SHORT
- case 2:
-- __sync_and_and_fetch_2(addr, val);
-+ __sync_and_and_fetch_2((uint16_t *) addr, val);
- return;
- #endif
- case 4:
-- __sync_and_and_fetch_4(addr, val);
-+ __sync_and_and_fetch_4((uint32_t *) addr, val);
- return;
- #if (CAA_BITS_PER_LONG == 64)
- case 8:
-- __sync_and_and_fetch_8(addr, val);
-+ __sync_and_and_fetch_8((uint64_t *) addr, val);
- return;
- #endif
- }
-@@ -139,20 +139,20 @@
- switch (len) {
- #ifdef UATOMIC_HAS_ATOMIC_BYTE
- case 1:
-- __sync_or_and_fetch_1(addr, val);
-+ __sync_or_and_fetch_1((uint8_t *) addr, val);
- return;
- #endif
- #ifdef UATOMIC_HAS_ATOMIC_SHORT
- case 2:
-- __sync_or_and_fetch_2(addr, val);
-+ __sync_or_and_fetch_2((uint16_t *) addr, val);
- return;
- #endif
- case 4:
-- __sync_or_and_fetch_4(addr, val);
-+ __sync_or_and_fetch_4((uint32_t *) addr, val);
- return;
- #if (CAA_BITS_PER_LONG == 64)
- case 8:
-- __sync_or_and_fetch_8(addr, val);
-+ __sync_or_and_fetch_8((uint64_t *) addr, val);
- return;
- #endif
- }
-@@ -180,17 +180,17 @@
- switch (len) {
- #ifdef UATOMIC_HAS_ATOMIC_BYTE
- case 1:
-- return __sync_add_and_fetch_1(addr, val);
-+ return __sync_add_and_fetch_1((uint8_t *) addr, val);
- #endif
- #ifdef UATOMIC_HAS_ATOMIC_SHORT
- case 2:
-- return __sync_add_and_fetch_2(addr, val);
-+ return __sync_add_and_fetch_2((uint16_t *) addr, val);
- #endif
- case 4:
-- return __sync_add_and_fetch_4(addr, val);
-+ return __sync_add_and_fetch_4((uint32_t *) addr, val);
- #if (CAA_BITS_PER_LONG == 64)
- case 8:
-- return __sync_add_and_fetch_8(addr, val);
-+ return __sync_add_and_fetch_8((uint64_t *) addr, val);
- #endif
- }
- _uatomic_link_error();
diff --git a/eng/common/cross/arm64/sources.list.trusty b/eng/common/cross/arm64/sources.list.trusty
deleted file mode 100644
index 07d8f88d82..0000000000
--- a/eng/common/cross/arm64/sources.list.trusty
+++ /dev/null
@@ -1,11 +0,0 @@
-deb http://ports.ubuntu.com/ubuntu-ports/ trusty main restricted universe
-deb-src http://ports.ubuntu.com/ubuntu-ports/ trusty main restricted universe
-
-deb http://ports.ubuntu.com/ubuntu-ports/ trusty-updates main restricted universe
-deb-src http://ports.ubuntu.com/ubuntu-ports/ trusty-updates main restricted universe
-
-deb http://ports.ubuntu.com/ubuntu-ports/ trusty-backports main restricted
-deb-src http://ports.ubuntu.com/ubuntu-ports/ trusty-backports main restricted
-
-deb http://ports.ubuntu.com/ubuntu-ports/ trusty-security main restricted universe multiverse
-deb-src http://ports.ubuntu.com/ubuntu-ports/ trusty-security main restricted universe multiverse
\ No newline at end of file
diff --git a/eng/common/cross/arm64/tizen-fetch.sh b/eng/common/cross/arm64/tizen-fetch.sh
index a48a6f51c4..16d1301f21 100644
--- a/eng/common/cross/arm64/tizen-fetch.sh
+++ b/eng/common/cross/arm64/tizen-fetch.sh
@@ -157,7 +157,7 @@ fetch_tizen_pkgs()
Inform "Initialize arm base"
fetch_tizen_pkgs_init standard base
Inform "fetch common packages"
-fetch_tizen_pkgs aarch64 gcc glibc glibc-devel libicu libicu-devel libatomic linux-glibc-devel
+fetch_tizen_pkgs aarch64 gcc glibc glibc-devel libicu libicu-devel libatomic linux-glibc-devel keyutils keyutils-devel libkeyutils
Inform "fetch coreclr packages"
fetch_tizen_pkgs aarch64 lldb lldb-devel libgcc libstdc++ libstdc++-devel libunwind libunwind-devel lttng-ust-devel lttng-ust userspace-rcu-devel userspace-rcu
Inform "fetch corefx packages"
diff --git a/eng/common/cross/build-rootfs.sh b/eng/common/cross/build-rootfs.sh
index b26622444f..5c05b39f10 100644
--- a/eng/common/cross/build-rootfs.sh
+++ b/eng/common/cross/build-rootfs.sh
@@ -6,10 +6,10 @@ usage()
{
echo "Usage: $0 [BuildArch] [CodeName] [lldbx.y] [--skipunmount] --rootfsdir ]"
echo "BuildArch can be: arm(default), armel, arm64, x86"
- echo "CodeName - optional, Code name for Linux, can be: trusty, xenial(default), zesty, bionic, alpine, alpine3.9 or alpine3.13. If BuildArch is armel, LinuxCodeName is jessie(default) or tizen."
- echo " for FreeBSD can be: freebsd11 or freebsd12."
+ echo "CodeName - optional, Code name for Linux, can be: xenial(default), zesty, bionic, alpine, alpine3.9 or alpine3.13. If BuildArch is armel, LinuxCodeName is jessie(default) or tizen."
+ echo " for FreeBSD can be: freebsd11, freebsd12, freebsd13"
echo " for illumos can be: illumos."
- echo "lldbx.y - optional, LLDB version, can be: lldb3.9(default), lldb4.0, lldb5.0, lldb6.0 no-lldb. Ignored for alpine and FReeBSD"
+ echo "lldbx.y - optional, LLDB version, can be: lldb3.9(default), lldb4.0, lldb5.0, lldb6.0 no-lldb. Ignored for alpine and FreeBSD"
echo "--skipunmount - optional, will skip the unmount of rootfs folder."
echo "--use-mirror - optional, use mirror URL to fetch resources, when available."
exit 1
@@ -33,7 +33,6 @@ __AlpinePackages="alpine-base"
__AlpinePackages+=" build-base"
__AlpinePackages+=" linux-headers"
__AlpinePackagesEdgeCommunity=" lldb-dev"
-__AlpinePackagesEdgeMain=" llvm10-libs"
__AlpinePackagesEdgeMain+=" python3"
__AlpinePackagesEdgeMain+=" libedit"
@@ -61,19 +60,25 @@ __AlpinePackages+=" krb5-dev"
__AlpinePackages+=" openssl-dev"
__AlpinePackages+=" zlib-dev"
-__FreeBSDBase="12.1-RELEASE"
+__FreeBSDBase="12.2-RELEASE"
__FreeBSDPkg="1.12.0"
+__FreeBSDABI="12"
__FreeBSDPackages="libunwind"
__FreeBSDPackages+=" icu"
__FreeBSDPackages+=" libinotify"
__FreeBSDPackages+=" lttng-ust"
__FreeBSDPackages+=" krb5"
+__FreeBSDPackages+=" terminfo-db"
__IllumosPackages="icu-64.2nb2"
__IllumosPackages+=" mit-krb5-1.16.2nb4"
__IllumosPackages+=" openssl-1.1.1e"
__IllumosPackages+=" zlib-1.2.11"
+# ML.NET dependencies
+__UbuntuPackages+=" libomp5"
+__UbuntuPackages+=" libomp-dev"
+
__UseMirror=0
__UnprocessedBuildArgs=
@@ -111,6 +116,8 @@ while :; do
__UbuntuArch=s390x
__UbuntuRepo="http://ports.ubuntu.com/ubuntu-ports/"
__UbuntuPackages=$(echo ${__UbuntuPackages} | sed 's/ libunwind8-dev//')
+ __UbuntuPackages=$(echo ${__UbuntuPackages} | sed 's/ libomp-dev//')
+ __UbuntuPackages=$(echo ${__UbuntuPackages} | sed 's/ libomp5//')
unset __LLDB_Package
;;
x86)
@@ -139,11 +146,6 @@ while :; do
no-lldb)
unset __LLDB_Package
;;
- trusty) # Ubuntu 14.04
- if [ "$__CodeName" != "jessie" ]; then
- __CodeName=trusty
- fi
- ;;
xenial) # Ubuntu 16.04
if [ "$__CodeName" != "jessie" ]; then
__CodeName=xenial
@@ -187,6 +189,8 @@ while :; do
__CodeName=alpine
__UbuntuRepo=
__AlpineVersion=3.9
+ __AlpinePackagesEdgeMain+=" llvm11-libs"
+ __AlpinePackagesEdgeMain+=" clang-libs"
;;
alpine3.13)
__CodeName=alpine
@@ -197,15 +201,24 @@ while :; do
__AlpinePackagesEdgeCommunity=
__AlpinePackages+=$__AlpinePackagesEdgeMain
__AlpinePackagesEdgeMain=
+ __AlpinePackages+=" llvm10-libs"
;;
freebsd11)
__FreeBSDBase="11.3-RELEASE"
+ __FreeBSDABI="11"
;&
freebsd12)
__CodeName=freebsd
__BuildArch=x64
__SkipUnmount=1
;;
+ freebsd13)
+ __CodeName=freebsd
+ __FreeBSDBase="13.0-RELEASE"
+ __FreeBSDABI="13"
+ __BuildArch=x64
+ __SkipUnmount=1
+ ;;
illumos)
__CodeName=illumos
__BuildArch=x64
@@ -283,9 +296,9 @@ if [[ "$__CodeName" == "alpine" ]]; then
rm -r $__ApkToolsDir
elif [[ "$__CodeName" == "freebsd" ]]; then
mkdir -p $__RootfsDir/usr/local/etc
+ JOBS="$(getconf _NPROCESSORS_ONLN)"
wget -O - https://download.freebsd.org/ftp/releases/amd64/${__FreeBSDBase}/base.txz | tar -C $__RootfsDir -Jxf - ./lib ./usr/lib ./usr/libdata ./usr/include ./usr/share/keys ./etc ./bin/freebsd-version
- # For now, ask for 11 ABI even on 12. This can be revisited later.
- echo "ABI = \"FreeBSD:11:amd64\"; FINGERPRINTS = \"${__RootfsDir}/usr/share/keys\"; REPOS_DIR = [\"${__RootfsDir}/etc/pkg\"]; REPO_AUTOUPDATE = NO; RUN_SCRIPTS = NO;" > ${__RootfsDir}/usr/local/etc/pkg.conf
+ echo "ABI = \"FreeBSD:${__FreeBSDABI}:amd64\"; FINGERPRINTS = \"${__RootfsDir}/usr/share/keys\"; REPOS_DIR = [\"${__RootfsDir}/etc/pkg\"]; REPO_AUTOUPDATE = NO; RUN_SCRIPTS = NO;" > ${__RootfsDir}/usr/local/etc/pkg.conf
echo "FreeBSD: { url: "pkg+http://pkg.FreeBSD.org/\${ABI}/quarterly", mirror_type: \"srv\", signature_type: \"fingerprints\", fingerprints: \"${__RootfsDir}/usr/share/keys/pkg\", enabled: yes }" > ${__RootfsDir}/etc/pkg/FreeBSD.conf
mkdir -p $__RootfsDir/tmp
# get and build package manager
@@ -293,7 +306,7 @@ elif [[ "$__CodeName" == "freebsd" ]]; then
cd $__RootfsDir/tmp/pkg-${__FreeBSDPkg}
# needed for install to succeed
mkdir -p $__RootfsDir/host/etc
- ./autogen.sh && ./configure --prefix=$__RootfsDir/host && make && make install
+ ./autogen.sh && ./configure --prefix=$__RootfsDir/host && make -j "$JOBS" && make install
rm -rf $__RootfsDir/tmp/pkg-${__FreeBSDPkg}
# install packages we need.
INSTALL_AS_USER=$(whoami) $__RootfsDir/host/sbin/pkg -r $__RootfsDir -C $__RootfsDir/usr/local/etc/pkg.conf update
@@ -356,13 +369,6 @@ elif [[ -n $__CodeName ]]; then
umount $__RootfsDir/* || true
fi
- if [[ "$__BuildArch" == "arm" && "$__CodeName" == "trusty" ]]; then
- pushd $__RootfsDir
- patch -p1 < $__CrossDir/$__BuildArch/trusty.patch
- patch -p1 < $__CrossDir/$__BuildArch/trusty-lttng-2.4.patch
- popd
- fi
-
if [[ "$__BuildArch" == "armel" && "$__CodeName" == "jessie" ]]; then
pushd $__RootfsDir
patch -p1 < $__CrossDir/$__BuildArch/armel.jessie.patch
diff --git a/eng/common/cross/x86/sources.list.trusty b/eng/common/cross/x86/sources.list.trusty
deleted file mode 100644
index 9b3085436e..0000000000
--- a/eng/common/cross/x86/sources.list.trusty
+++ /dev/null
@@ -1,11 +0,0 @@
-deb http://archive.ubuntu.com/ubuntu/ trusty main restricted universe
-deb-src http://archive.ubuntu.com/ubuntu/ trusty main restricted universe
-
-deb http://archive.ubuntu.com/ubuntu/ trusty-updates main restricted universe
-deb-src http://archive.ubuntu.com/ubuntu/ trusty-updates main restricted universe
-
-deb http://archive.ubuntu.com/ubuntu/ trusty-backports main restricted
-deb-src http://archive.ubuntu.com/ubuntu/ trusty-backports main restricted
-
-deb http://archive.ubuntu.com/ubuntu/ trusty-security main restricted universe multiverse
-deb-src http://archive.ubuntu.com/ubuntu/ trusty-security main restricted universe multiverse
diff --git a/eng/common/dotnet-install.sh b/eng/common/dotnet-install.sh
index d6efeb4434..fdfeea66e7 100755
--- a/eng/common/dotnet-install.sh
+++ b/eng/common/dotnet-install.sh
@@ -70,7 +70,7 @@ case $cpuname in
;;
esac
-dotnetRoot="$repo_root/.dotnet"
+dotnetRoot="${repo_root}.dotnet"
if [[ $architecture != "" ]] && [[ $architecture != $buildarch ]]; then
dotnetRoot="$dotnetRoot/$architecture"
fi
diff --git a/eng/common/generate-locproject.ps1 b/eng/common/generate-locproject.ps1
index 9e2f7ad98b..25e97ac007 100644
--- a/eng/common/generate-locproject.ps1
+++ b/eng/common/generate-locproject.ps1
@@ -14,7 +14,7 @@ $ErrorActionPreference = "Stop"
Import-Module -Name (Join-Path $PSScriptRoot 'native\CommonLibrary.psm1')
-$exclusionsFilePath = "$SourcesDirectory\Localize\LocExclusions.json"
+$exclusionsFilePath = "$SourcesDirectory\eng\Localize\LocExclusions.json"
$exclusions = @{ Exclusions = @() }
if (Test-Path -Path $exclusionsFilePath)
{
@@ -25,8 +25,15 @@ Push-Location "$SourcesDirectory" # push location for Resolve-Path -Relative to
# Template files
$jsonFiles = @()
-$jsonFiles += Get-ChildItem -Recurse -Path "$SourcesDirectory" | Where-Object { $_.FullName -Match "\.template\.config\\localize\\en\..+\.json" } # .NET templating pattern
-$jsonFiles += Get-ChildItem -Recurse -Path "$SourcesDirectory" | Where-Object { $_.FullName -Match "en\\strings\.json" } # current winforms pattern
+$jsonTemplateFiles = Get-ChildItem -Recurse -Path "$SourcesDirectory" | Where-Object { $_.FullName -Match "\.template\.config\\localize\\.+\.en\.json" } # .NET templating pattern
+$jsonTemplateFiles | ForEach-Object {
+ $null = $_.Name -Match "(.+)\.[\w-]+\.json" # matches '[filename].[langcode].json
+
+ $destinationFile = "$($_.Directory.FullName)\$($Matches.1).json"
+ $jsonFiles += Copy-Item "$($_.FullName)" -Destination $destinationFile -PassThru
+}
+
+$jsonWinformsTemplateFiles = Get-ChildItem -Recurse -Path "$SourcesDirectory" | Where-Object { $_.FullName -Match "en\\strings\.json" } # current winforms pattern
$xlfFiles = @()
@@ -38,13 +45,13 @@ if ($allXlfFiles) {
$langXlfFiles = Get-ChildItem -Recurse -Path "$SourcesDirectory\*\*.$firstLangCode.xlf"
}
$langXlfFiles | ForEach-Object {
- $null = $_.Name -Match "([^.]+)\.[\w-]+\.xlf" # matches '[filename].[langcode].xlf'
+ $null = $_.Name -Match "(.+)\.[\w-]+\.xlf" # matches '[filename].[langcode].xlf
$destinationFile = "$($_.Directory.FullName)\$($Matches.1).xlf"
$xlfFiles += Copy-Item "$($_.FullName)" -Destination $destinationFile -PassThru
}
-$locFiles = $jsonFiles + $xlfFiles
+$locFiles = $jsonFiles + $jsonWinformsTemplateFiles + $xlfFiles
$locJson = @{
Projects = @(
@@ -52,7 +59,7 @@ $locJson = @{
LanguageSet = $LanguageSet
LocItems = @(
$locFiles | ForEach-Object {
- $outputPath = "Localize\$(($_.DirectoryName | Resolve-Path -Relative) + "\")"
+ $outputPath = "$(($_.DirectoryName | Resolve-Path -Relative) + "\")"
$continue = $true
foreach ($exclusion in $exclusions.Exclusions) {
if ($outputPath.Contains($exclusion))
@@ -66,10 +73,19 @@ $locJson = @{
}
if ($continue)
{
- return @{
- SourceFile = $sourceFile
- CopyOption = "LangIDOnName"
- OutputPath = $outputPath
+ if ($_.Directory.Name -eq 'en' -and $_.Extension -eq '.json') {
+ return @{
+ SourceFile = $sourceFile
+ CopyOption = "LangIDOnPath"
+ OutputPath = "$($_.Directory.Parent.FullName | Resolve-Path -Relative)\"
+ }
+ }
+ else {
+ return @{
+ SourceFile = $sourceFile
+ CopyOption = "LangIDOnName"
+ OutputPath = $outputPath
+ }
}
}
}
@@ -79,19 +95,19 @@ $locJson = @{
}
$json = ConvertTo-Json $locJson -Depth 5
-Write-Host "(NETCORE_ENGINEERING_TELEMETRY=Build) LocProject.json generated:`n`n$json`n`n"
+Write-Host "LocProject.json generated:`n`n$json`n`n"
Pop-Location
if (!$UseCheckedInLocProjectJson) {
- New-Item "$SourcesDirectory\Localize\LocProject.json" -Force # Need this to make sure the Localize directory is created
- Set-Content "$SourcesDirectory\Localize\LocProject.json" $json
+ New-Item "$SourcesDirectory\eng\Localize\LocProject.json" -Force # Need this to make sure the Localize directory is created
+ Set-Content "$SourcesDirectory\eng\Localize\LocProject.json" $json
}
else {
- New-Item "$SourcesDirectory\Localize\LocProject-generated.json" -Force # Need this to make sure the Localize directory is created
- Set-Content "$SourcesDirectory\Localize\LocProject-generated.json" $json
+ New-Item "$SourcesDirectory\eng\Localize\LocProject-generated.json" -Force # Need this to make sure the Localize directory is created
+ Set-Content "$SourcesDirectory\eng\Localize\LocProject-generated.json" $json
- if ((Get-FileHash "$SourcesDirectory\Localize\LocProject-generated.json").Hash -ne (Get-FileHash "$SourcesDirectory\Localize\LocProject.json").Hash) {
- Write-PipelineTaskError -Type "warning" -Message "Existing LocProject.json differs from generated LocProject.json. Download LocProject-generated.json and compare them."
+ if ((Get-FileHash "$SourcesDirectory\eng\Localize\LocProject-generated.json").Hash -ne (Get-FileHash "$SourcesDirectory\eng\Localize\LocProject.json").Hash) {
+ Write-PipelineTelemetryError -Category "OneLocBuild" -Message "Existing LocProject.json differs from generated LocProject.json. Download LocProject-generated.json and compare them."
exit 1
}
diff --git a/eng/common/internal-feed-operations.ps1 b/eng/common/internal-feed-operations.ps1
index 418c09930c..92b77347d9 100644
--- a/eng/common/internal-feed-operations.ps1
+++ b/eng/common/internal-feed-operations.ps1
@@ -45,11 +45,11 @@ function SetupCredProvider {
# Then, we set the 'VSS_NUGET_EXTERNAL_FEED_ENDPOINTS' environment variable to restore from the stable
# feeds successfully
- $nugetConfigPath = "$RepoRoot\NuGet.config"
+ $nugetConfigPath = Join-Path $RepoRoot "NuGet.config"
if (-Not (Test-Path -Path $nugetConfigPath)) {
Write-PipelineTelemetryError -Category 'Build' -Message 'NuGet.config file not found in repo root!'
- ExitWithExitCode 1
+ ExitWithExitCode 1
}
$endpoints = New-Object System.Collections.ArrayList
@@ -85,7 +85,7 @@ function SetupCredProvider {
#Workaround for https://github.com/microsoft/msbuild/issues/4430
function InstallDotNetSdkAndRestoreArcade {
- $dotnetTempDir = "$RepoRoot\dotnet"
+ $dotnetTempDir = Join-Path $RepoRoot "dotnet"
$dotnetSdkVersion="2.1.507" # After experimentation we know this version works when restoring the SDK (compared to 3.0.*)
$dotnet = "$dotnetTempDir\dotnet.exe"
$restoreProjPath = "$PSScriptRoot\restore.proj"
diff --git a/eng/common/internal-feed-operations.sh b/eng/common/internal-feed-operations.sh
index e2233e7812..9378223ba0 100644
--- a/eng/common/internal-feed-operations.sh
+++ b/eng/common/internal-feed-operations.sh
@@ -39,7 +39,7 @@ function SetupCredProvider {
# Then, we set the 'VSS_NUGET_EXTERNAL_FEED_ENDPOINTS' environment variable to restore from the stable
# feeds successfully
- local nugetConfigPath="$repo_root/NuGet.config"
+ local nugetConfigPath="{$repo_root}NuGet.config"
if [ ! "$nugetConfigPath" ]; then
Write-PipelineTelemetryError -category 'Build' "NuGet.config file not found in repo's root!"
diff --git a/eng/common/internal/Tools.csproj b/eng/common/internal/Tools.csproj
index f46d5efe2e..beb9c4648e 100644
--- a/eng/common/internal/Tools.csproj
+++ b/eng/common/internal/Tools.csproj
@@ -1,5 +1,4 @@
-
-
+
net472
diff --git a/eng/common/msbuild.ps1 b/eng/common/msbuild.ps1
index c640123000..eea19cd845 100644
--- a/eng/common/msbuild.ps1
+++ b/eng/common/msbuild.ps1
@@ -5,6 +5,7 @@ Param(
[bool] $nodeReuse = $true,
[switch] $ci,
[switch] $prepareMachine,
+ [switch] $excludePrereleaseVS,
[Parameter(ValueFromRemainingArguments=$true)][String[]]$extraArgs
)
diff --git a/eng/common/performance/blazor_perf.proj b/eng/common/performance/blazor_perf.proj
deleted file mode 100644
index 3b25359c43..0000000000
--- a/eng/common/performance/blazor_perf.proj
+++ /dev/null
@@ -1,30 +0,0 @@
-
-
- python3
- $(HelixPreCommands);chmod +x $HELIX_WORKITEM_PAYLOAD/SOD/SizeOnDisk
-
-
-
-
- %(Identity)
-
-
-
-
- %HELIX_CORRELATION_PAYLOAD%\performance\src\scenarios\
- $(ScenarioDirectory)blazor\
-
-
- $HELIX_CORRELATION_PAYLOAD/performance/src/scenarios/
- $(ScenarioDirectory)blazor/
-
-
-
-
- $(WorkItemDirectory)
- cd $(BlazorDirectory);$(Python) pre.py publish --msbuild %27/p:_TrimmerDumpDependencies=true%27 --msbuild-static AdditionalMonoLinkerOptions=%27"%24(AdditionalMonoLinkerOptions) --dump-dependencies"%27 --binlog %27./traces/blazor_publish.binlog%27
- $(Python) test.py sod --scenario-name "%(Identity)"
- $(Python) post.py
-
-
-
\ No newline at end of file
diff --git a/eng/common/performance/crossgen_perf.proj b/eng/common/performance/crossgen_perf.proj
deleted file mode 100644
index eb8bdd9c44..0000000000
--- a/eng/common/performance/crossgen_perf.proj
+++ /dev/null
@@ -1,110 +0,0 @@
-
-
-
-
- %(Identity)
-
-
-
-
-
- py -3
- $(HelixPreCommands)
- %HELIX_CORRELATION_PAYLOAD%\Core_Root
- %HELIX_CORRELATION_PAYLOAD%\performance\src\scenarios\
- $(ScenarioDirectory)crossgen\
- $(ScenarioDirectory)crossgen2\
-
-
- python3
- $(HelixPreCommands);chmod +x $HELIX_WORKITEM_PAYLOAD/startup/Startup;chmod +x $HELIX_WORKITEM_PAYLOAD/startup/perfcollect;sudo apt update;chmod +x $HELIX_WORKITEM_PAYLOAD/SOD/SizeOnDisk
- $HELIX_CORRELATION_PAYLOAD/Core_Root
- $HELIX_CORRELATION_PAYLOAD/performance/src/scenarios/
- $(ScenarioDirectory)crossgen/
- $(ScenarioDirectory)crossgen2/
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- $(WorkItemDirectory)
- $(Python) $(CrossgenDirectory)test.py crossgen --core-root $(CoreRoot) --test-name %(Identity)
-
-
-
-
-
- $(WorkItemDirectory)
- $(Python) $(Crossgen2Directory)test.py crossgen2 --core-root $(CoreRoot) --single %(Identity)
-
-
-
-
-
- $(WorkItemDirectory)
- $(Python) $(Crossgen2Directory)test.py crossgen2 --core-root $(CoreRoot) --single %(Identity) --singlethreaded True
-
-
-
-
-
- $(WorkItemDirectory)
- $(Python) $(CrossgenDirectory)pre.py crossgen --core-root $(CoreRoot) --single %(Identity)
- $(Python) $(CrossgenDirectory)test.py sod --scenario-name "Crossgen %(Identity) Size" --dirs ./crossgen.out/
- $(Python) $(CrossgenDirectory)post.py
-
-
-
-
-
- $(WorkItemDirectory)
- $(Python) $(Crossgen2Directory)pre.py crossgen2 --core-root $(CoreRoot) --single %(Identity)
- $(Python) $(Crossgen2Directory)test.py sod --scenario-name "Crossgen2 %(Identity) Size" --dirs ./crossgen.out/
- $(Python) $(Crossgen2Directory)post.py
-
-
-
-
-
-
- 4:00
-
-
-
- 4:00
-
-
- 4:00
-
-
- $(WorkItemDirectory)
- $(Python) $(Crossgen2Directory)test.py crossgen2 --core-root $(CoreRoot) --composite $(Crossgen2Directory)framework-r2r.dll.rsp
- 1:00
-
-
- 4:00
-
-
- 4:00
-
-
-
\ No newline at end of file
diff --git a/eng/common/performance/microbenchmarks.proj b/eng/common/performance/microbenchmarks.proj
deleted file mode 100644
index 318ca5f1b8..0000000000
--- a/eng/common/performance/microbenchmarks.proj
+++ /dev/null
@@ -1,144 +0,0 @@
-
-
-
- %HELIX_CORRELATION_PAYLOAD%\performance\scripts\benchmarks_ci.py --csproj %HELIX_CORRELATION_PAYLOAD%\performance\$(TargetCsproj)
- --dotnet-versions %DOTNET_VERSION% --cli-source-info args --cli-branch %PERFLAB_BRANCH% --cli-commit-sha %PERFLAB_HASH% --cli-repository https://github.com/%PERFLAB_REPO% --cli-source-timestamp %PERFLAB_BUILDTIMESTAMP%
- py -3
- %HELIX_CORRELATION_PAYLOAD%\Core_Root\CoreRun.exe
- %HELIX_CORRELATION_PAYLOAD%\Baseline_Core_Root\CoreRun.exe
-
- $(HelixPreCommands);call %HELIX_CORRELATION_PAYLOAD%\performance\tools\machine-setup.cmd;set PYTHONPATH=%HELIX_WORKITEM_PAYLOAD%\scripts%3B%HELIX_WORKITEM_PAYLOAD%
- %HELIX_CORRELATION_PAYLOAD%\artifacts\BenchmarkDotNet.Artifacts
- %HELIX_CORRELATION_PAYLOAD%\artifacts\BenchmarkDotNet.Artifacts_Baseline
- %HELIX_CORRELATION_PAYLOAD%\performance\src\tools\ResultsComparer\ResultsComparer.csproj
- %HELIX_CORRELATION_PAYLOAD%\performance\tools\dotnet\$(Architecture)\dotnet.exe
- %25%25
- %HELIX_WORKITEM_ROOT%\testResults.xml
-
-
-
- $HELIX_CORRELATION_PAYLOAD
- $(BaseDirectory)/performance
-
-
-
- $HELIX_WORKITEM_PAYLOAD
- $(BaseDirectory)
-
-
-
- $(PerformanceDirectory)/scripts/benchmarks_ci.py --csproj $(PerformanceDirectory)/$(TargetCsproj)
- --dotnet-versions $DOTNET_VERSION --cli-source-info args --cli-branch $PERFLAB_BRANCH --cli-commit-sha $PERFLAB_HASH --cli-repository https://github.com/$PERFLAB_REPO --cli-source-timestamp $PERFLAB_BUILDTIMESTAMP
- python3
- $(BaseDirectory)/Core_Root/corerun
- $(BaseDirectory)/Baseline_Core_Root/corerun
- $(HelixPreCommands);chmod +x $(PerformanceDirectory)/tools/machine-setup.sh;. $(PerformanceDirectory)/tools/machine-setup.sh
- $(BaseDirectory)/artifacts/BenchmarkDotNet.Artifacts
- $(BaseDirectory)/artifacts/BenchmarkDotNet.Artifacts_Baseline
- $(PerformanceDirectory)/src/tools/ResultsComparer/ResultsComparer.csproj
- $(PerformanceDirectory)/tools/dotnet/$(Architecture)/dotnet
- %25
- $HELIX_WORKITEM_ROOT/testResults.xml
-
-
-
- $(CliArguments) --wasm
-
-
-
- --corerun %HELIX_CORRELATION_PAYLOAD%\dotnet-mono\shared\Microsoft.NETCore.App\6.0.0\corerun.exe
-
-
- --corerun $(BaseDirectory)/dotnet-mono/shared/Microsoft.NETCore.App/6.0.0/corerun
-
-
-
- --corerun $(CoreRun)
-
-
-
- --corerun $(BaselineCoreRun)
-
-
-
- $(Python) $(WorkItemCommand) --incremental no --architecture $(Architecture) -f $(_Framework) $(PerfLabArguments)
-
-
-
- $(WorkItemCommand) $(CliArguments)
-
-
-
- 2:30
- 0:15
-
-
-
-
- %(Identity)
-
-
-
-
- 30
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- false
-
-
-
-
-
- $(WorkItemDirectory)
- $(WorkItemCommand) --bdn-artifacts $(BaselineArtifactsDirectory) --bdn-arguments="--anyCategories $(BDNCategories) $(ExtraBenchmarkDotNetArguments) $(BaselineCoreRunArgument) --partition-count $(PartitionCount) --partition-index %(HelixWorkItem.Index)"
- $(WorkItemCommand) --bdn-artifacts $(ArtifactsDirectory) --bdn-arguments="--anyCategories $(BDNCategories) $(ExtraBenchmarkDotNetArguments) $(CoreRunArgument) --partition-count $(PartitionCount) --partition-index %(HelixWorkItem.Index)"
- $(DotnetExe) run -f $(_Framework) -p $(ResultsComparer) --base $(BaselineArtifactsDirectory) --diff $(ArtifactsDirectory) --threshold 2$(Percent) --xml $(XMLResults);$(FinalCommand)
- $(WorkItemTimeout)
-
-
-
-
-
- $(WorkItemDirectory)
- $(WorkItemCommand) --bdn-artifacts $(BaselineArtifactsDirectory) --bdn-arguments="--anyCategories $(BDNCategories) $(ExtraBenchmarkDotNetArguments) $(BaselineCoreRunArgument)"
- $(WorkItemCommand) --bdn-artifacts $(ArtifactsDirectory) --bdn-arguments="--anyCategories $(BDNCategories) $(ExtraBenchmarkDotNetArguments) $(CoreRunArgument)"
- $(DotnetExe) run -f $(_Framework) -p $(ResultsComparer) --base $(BaselineArtifactsDirectory) --diff $(ArtifactsDirectory) --threshold 2$(Percent) --xml $(XMLResults)
- 4:00
-
-
-
diff --git a/eng/common/performance/performance-setup.ps1 b/eng/common/performance/performance-setup.ps1
deleted file mode 100644
index 9a64b07e69..0000000000
--- a/eng/common/performance/performance-setup.ps1
+++ /dev/null
@@ -1,139 +0,0 @@
-Param(
- [string] $SourceDirectory=$env:BUILD_SOURCESDIRECTORY,
- [string] $CoreRootDirectory,
- [string] $BaselineCoreRootDirectory,
- [string] $Architecture="x64",
- [string] $Framework="net5.0",
- [string] $CompilationMode="Tiered",
- [string] $Repository=$env:BUILD_REPOSITORY_NAME,
- [string] $Branch=$env:BUILD_SOURCEBRANCH,
- [string] $CommitSha=$env:BUILD_SOURCEVERSION,
- [string] $BuildNumber=$env:BUILD_BUILDNUMBER,
- [string] $RunCategories="Libraries Runtime",
- [string] $Csproj="src\benchmarks\micro\MicroBenchmarks.csproj",
- [string] $Kind="micro",
- [switch] $LLVM,
- [switch] $MonoInterpreter,
- [switch] $MonoAOT,
- [switch] $Internal,
- [switch] $Compare,
- [string] $MonoDotnet="",
- [string] $Configurations="CompilationMode=$CompilationMode RunKind=$Kind",
- [string] $LogicalMachine=""
-)
-
-$RunFromPerformanceRepo = ($Repository -eq "dotnet/performance") -or ($Repository -eq "dotnet-performance")
-$UseCoreRun = ($CoreRootDirectory -ne [string]::Empty)
-$UseBaselineCoreRun = ($BaselineCoreRootDirectory -ne [string]::Empty)
-
-$PayloadDirectory = (Join-Path $SourceDirectory "Payload")
-$PerformanceDirectory = (Join-Path $PayloadDirectory "performance")
-$WorkItemDirectory = (Join-Path $SourceDirectory "workitem")
-$ExtraBenchmarkDotNetArguments = "--iterationCount 1 --warmupCount 0 --invocationCount 1 --unrollFactor 1 --strategy ColdStart --stopOnFirstError true"
-$Creator = $env:BUILD_DEFINITIONNAME
-$PerfLabArguments = ""
-$HelixSourcePrefix = "pr"
-
-$Queue = ""
-
-if ($Internal) {
- switch ($LogicalMachine) {
- "perftiger" { $Queue = "Windows.10.Amd64.19H1.Tiger.Perf" }
- "perfowl" { $Queue = "Windows.10.Amd64.20H2.Owl.Perf" }
- "perfsurf" { $Queue = "Windows.10.Arm64.Perf.Surf" }
- Default { $Queue = "Windows.10.Amd64.19H1.Tiger.Perf" }
- }
- $PerfLabArguments = "--upload-to-perflab-container"
- $ExtraBenchmarkDotNetArguments = ""
- $Creator = ""
- $HelixSourcePrefix = "official"
-}
-else {
- $Queue = "Windows.10.Amd64.ClientRS4.DevEx.15.8.Open"
-}
-
-if($MonoInterpreter)
-{
- $ExtraBenchmarkDotNetArguments = "--category-exclusion-filter NoInterpreter"
-}
-
-if($MonoDotnet -ne "")
-{
- $Configurations += " LLVM=$LLVM MonoInterpreter=$MonoInterpreter MonoAOT=$MonoAOT"
- if($ExtraBenchmarkDotNetArguments -eq "")
- {
- #FIX ME: We need to block these tests as they don't run on mono for now
- $ExtraBenchmarkDotNetArguments = "--exclusion-filter *Perf_Image* *Perf_NamedPipeStream*"
- }
- else
- {
- #FIX ME: We need to block these tests as they don't run on mono for now
- $ExtraBenchmarkDotNetArguments += " --exclusion-filter *Perf_Image* *Perf_NamedPipeStream*"
- }
-}
-
-# FIX ME: This is a workaround until we get this from the actual pipeline
-$CommonSetupArguments="--channel master --queue $Queue --build-number $BuildNumber --build-configs $Configurations --architecture $Architecture"
-$SetupArguments = "--repository https://github.com/$Repository --branch $Branch --get-perf-hash --commit-sha $CommitSha $CommonSetupArguments"
-
-
-if ($RunFromPerformanceRepo) {
- $SetupArguments = "--perf-hash $CommitSha $CommonSetupArguments"
-
- robocopy $SourceDirectory $PerformanceDirectory /E /XD $PayloadDirectory $SourceDirectory\artifacts $SourceDirectory\.git
-}
-else {
- git clone --branch master --depth 1 --quiet https://github.com/dotnet/performance $PerformanceDirectory
-}
-
-if($MonoDotnet -ne "")
-{
- $UsingMono = "true"
- $MonoDotnetPath = (Join-Path $PayloadDirectory "dotnet-mono")
- Move-Item -Path $MonoDotnet -Destination $MonoDotnetPath
-}
-
-if ($UseCoreRun) {
- $NewCoreRoot = (Join-Path $PayloadDirectory "Core_Root")
- Move-Item -Path $CoreRootDirectory -Destination $NewCoreRoot
-}
-if ($UseBaselineCoreRun) {
- $NewBaselineCoreRoot = (Join-Path $PayloadDirectory "Baseline_Core_Root")
- Move-Item -Path $BaselineCoreRootDirectory -Destination $NewBaselineCoreRoot
-}
-
-$DocsDir = (Join-Path $PerformanceDirectory "docs")
-robocopy $DocsDir $WorkItemDirectory
-
-# Set variables that we will need to have in future steps
-$ci = $true
-
-. "$PSScriptRoot\..\pipeline-logging-functions.ps1"
-
-# Directories
-Write-PipelineSetVariable -Name 'PayloadDirectory' -Value "$PayloadDirectory" -IsMultiJobVariable $false
-Write-PipelineSetVariable -Name 'PerformanceDirectory' -Value "$PerformanceDirectory" -IsMultiJobVariable $false
-Write-PipelineSetVariable -Name 'WorkItemDirectory' -Value "$WorkItemDirectory" -IsMultiJobVariable $false
-
-# Script Arguments
-Write-PipelineSetVariable -Name 'Python' -Value "py -3" -IsMultiJobVariable $false
-Write-PipelineSetVariable -Name 'ExtraBenchmarkDotNetArguments' -Value "$ExtraBenchmarkDotNetArguments" -IsMultiJobVariable $false
-Write-PipelineSetVariable -Name 'SetupArguments' -Value "$SetupArguments" -IsMultiJobVariable $false
-Write-PipelineSetVariable -Name 'PerfLabArguments' -Value "$PerfLabArguments" -IsMultiJobVariable $false
-Write-PipelineSetVariable -Name 'BDNCategories' -Value "$RunCategories" -IsMultiJobVariable $false
-Write-PipelineSetVariable -Name 'TargetCsproj' -Value "$Csproj" -IsMultiJobVariable $false
-Write-PipelineSetVariable -Name 'Kind' -Value "$Kind" -IsMultiJobVariable $false
-Write-PipelineSetVariable -Name 'Architecture' -Value "$Architecture" -IsMultiJobVariable $false
-Write-PipelineSetVariable -Name 'UseCoreRun' -Value "$UseCoreRun" -IsMultiJobVariable $false
-Write-PipelineSetVariable -Name 'UseBaselineCoreRun' -Value "$UseBaselineCoreRun" -IsMultiJobVariable $false
-Write-PipelineSetVariable -Name 'RunFromPerfRepo' -Value "$RunFromPerformanceRepo" -IsMultiJobVariable $false
-Write-PipelineSetVariable -Name 'Compare' -Value "$Compare" -IsMultiJobVariable $false
-Write-PipelineSetVariable -Name 'MonoDotnet' -Value "$UsingMono" -IsMultiJobVariable $false
-
-# Helix Arguments
-Write-PipelineSetVariable -Name 'Creator' -Value "$Creator" -IsMultiJobVariable $false
-Write-PipelineSetVariable -Name 'Queue' -Value "$Queue" -IsMultiJobVariable $false
-Write-PipelineSetVariable -Name 'HelixSourcePrefix' -Value "$HelixSourcePrefix" -IsMultiJobVariable $false
-Write-PipelineSetVariable -Name '_BuildConfig' -Value "$Architecture.$Kind.$Framework" -IsMultiJobVariable $false
-
-exit 0
\ No newline at end of file
diff --git a/eng/common/performance/performance-setup.sh b/eng/common/performance/performance-setup.sh
deleted file mode 100644
index 33b60b5033..0000000000
--- a/eng/common/performance/performance-setup.sh
+++ /dev/null
@@ -1,297 +0,0 @@
-#!/usr/bin/env bash
-
-source_directory=$BUILD_SOURCESDIRECTORY
-core_root_directory=
-baseline_core_root_directory=
-architecture=x64
-framework=net5.0
-compilation_mode=tiered
-repository=$BUILD_REPOSITORY_NAME
-branch=$BUILD_SOURCEBRANCH
-commit_sha=$BUILD_SOURCEVERSION
-build_number=$BUILD_BUILDNUMBER
-internal=false
-compare=false
-mono_dotnet=
-kind="micro"
-llvm=false
-monointerpreter=false
-monoaot=false
-run_categories="Libraries Runtime"
-csproj="src\benchmarks\micro\MicroBenchmarks.csproj"
-configurations="CompliationMode=$compilation_mode RunKind=$kind"
-run_from_perf_repo=false
-use_core_run=true
-use_baseline_core_run=true
-using_mono=false
-wasm_runtime_loc=
-using_wasm=false
-use_latest_dotnet=false
-logical_machine=
-
-while (($# > 0)); do
- lowerI="$(echo $1 | tr "[:upper:]" "[:lower:]")"
- case $lowerI in
- --sourcedirectory)
- source_directory=$2
- shift 2
- ;;
- --corerootdirectory)
- core_root_directory=$2
- shift 2
- ;;
- --baselinecorerootdirectory)
- baseline_core_root_directory=$2
- shift 2
- ;;
- --architecture)
- architecture=$2
- shift 2
- ;;
- --framework)
- framework=$2
- shift 2
- ;;
- --compilationmode)
- compilation_mode=$2
- shift 2
- ;;
- --logicalmachine)
- logical_machine=$2
- shift 2
- ;;
- --repository)
- repository=$2
- shift 2
- ;;
- --branch)
- branch=$2
- shift 2
- ;;
- --commitsha)
- commit_sha=$2
- shift 2
- ;;
- --buildnumber)
- build_number=$2
- shift 2
- ;;
- --kind)
- kind=$2
- configurations="CompilationMode=$compilation_mode RunKind=$kind"
- shift 2
- ;;
- --runcategories)
- run_categories=$2
- shift 2
- ;;
- --csproj)
- csproj=$2
- shift 2
- ;;
- --internal)
- internal=true
- shift 1
- ;;
- --alpine)
- alpine=true
- shift 1
- ;;
- --llvm)
- llvm=true
- shift 1
- ;;
- --monointerpreter)
- monointerpreter=true
- shift 1
- ;;
- --monoaot)
- monoaot=true
- shift 1
- ;;
- --monodotnet)
- mono_dotnet=$2
- shift 2
- ;;
- --wasm)
- wasm_runtime_loc=$2
- shift 2
- ;;
- --compare)
- compare=true
- shift 1
- ;;
- --configurations)
- configurations=$2
- shift 2
- ;;
- --latestdotnet)
- use_latest_dotnet=true
- shift 1
- ;;
- *)
- echo "Common settings:"
- echo " --corerootdirectory Directory where Core_Root exists, if running perf testing with --corerun"
- echo " --architecture Architecture of the testing being run"
- echo " --configurations List of key=value pairs that will be passed to perf testing infrastructure."
- echo " ex: --configurations \"CompilationMode=Tiered OptimzationLevel=PGO\""
- echo " --help Print help and exit"
- echo ""
- echo "Advanced settings:"
- echo " --framework The framework to run, if not running in master"
- echo " --compliationmode The compilation mode if not passing --configurations"
- echo " --sourcedirectory The directory of the sources. Defaults to env:BUILD_SOURCESDIRECTORY"
- echo " --repository The name of the repository in the / format. Defaults to env:BUILD_REPOSITORY_NAME"
- echo " --branch The name of the branch. Defaults to env:BUILD_SOURCEBRANCH"
- echo " --commitsha The commit sha1 to run against. Defaults to env:BUILD_SOURCEVERSION"
- echo " --buildnumber The build number currently running. Defaults to env:BUILD_BUILDNUMBER"
- echo " --csproj The relative path to the benchmark csproj whose tests should be run. Defaults to src\benchmarks\micro\MicroBenchmarks.csproj"
- echo " --kind Related to csproj. The kind of benchmarks that should be run. Defaults to micro"
- echo " --runcategories Related to csproj. Categories of benchmarks to run. Defaults to \"coreclr corefx\""
- echo " --internal If the benchmarks are running as an official job."
- echo " --monodotnet Pass the path to the mono dotnet for mono performance testing."
- echo " --wasm Path to the unpacked wasm runtime pack."
- echo " --latestdotnet --dotnet-versions will not be specified. --dotnet-versions defaults to LKG version in global.json "
- echo " --alpine Set for runs on Alpine"
- echo ""
- exit 0
- ;;
- esac
-done
-
-if [ "$repository" == "dotnet/performance" ] || [ "$repository" == "dotnet-performance" ]; then
- run_from_perf_repo=true
-fi
-
-if [ -z "$configurations" ]; then
- configurations="CompilationMode=$compilation_mode"
-fi
-
-if [ -z "$core_root_directory" ]; then
- use_core_run=false
-fi
-
-if [ -z "$baseline_core_root_directory" ]; then
- use_baseline_core_run=false
-fi
-
-payload_directory=$source_directory/Payload
-performance_directory=$payload_directory/performance
-workitem_directory=$source_directory/workitem
-extra_benchmark_dotnet_arguments="--iterationCount 1 --warmupCount 0 --invocationCount 1 --unrollFactor 1 --strategy ColdStart --stopOnFirstError true"
-perflab_arguments=
-queue=Ubuntu.1804.Amd64.Open
-creator=$BUILD_DEFINITIONNAME
-helix_source_prefix="pr"
-
-if [[ "$internal" == true ]]; then
- perflab_arguments="--upload-to-perflab-container"
- helix_source_prefix="official"
- creator=
- extra_benchmark_dotnet_arguments=
-
- if [[ "$architecture" = "arm64" ]]; then
- queue=Ubuntu.1804.Arm64.Perf
- else
- if [[ "$logical_machine" = "perfowl" ]]; then
- queue=Ubuntu.1804.Amd64.Owl.Perf
- else
- queue=Ubuntu.1804.Amd64.Tiger.Perf
- fi
- fi
-
- if [[ "$alpine" = "true" ]]; then
- queue=alpine.amd64.tiger.perf
- fi
-else
- if [[ "$architecture" = "arm64" ]]; then
- queue=ubuntu.1804.armarch.open
- else
- queue=Ubuntu.1804.Amd64.Open
- fi
-
- if [[ "$alpine" = "true" ]]; then
- queue=alpine.amd64.tiger.perf
- fi
-fi
-
-if [[ "$mono_dotnet" != "" ]] && [[ "$monointerpreter" == "false" ]]; then
- configurations="$configurations LLVM=$llvm MonoInterpreter=$monointerpreter MonoAOT=$monoaot"
- extra_benchmark_dotnet_arguments="$extra_benchmark_dotnet_arguments --category-exclusion-filter NoMono"
-fi
-
-if [[ "$wasm_runtime_loc" != "" ]]; then
- configurations="CompilationMode=wasm RunKind=$kind"
- extra_benchmark_dotnet_arguments="$extra_benchmark_dotnet_arguments --category-exclusion-filter NoInterpreter NoWASM NoMono"
-fi
-
-if [[ "$mono_dotnet" != "" ]] && [[ "$monointerpreter" == "true" ]]; then
- configurations="$configurations LLVM=$llvm MonoInterpreter=$monointerpreter MonoAOT=$monoaot"
- extra_benchmark_dotnet_arguments="$extra_benchmark_dotnet_arguments --category-exclusion-filter NoInterpreter NoMono"
-fi
-
-common_setup_arguments="--channel master --queue $queue --build-number $build_number --build-configs $configurations --architecture $architecture"
-setup_arguments="--repository https://github.com/$repository --branch $branch --get-perf-hash --commit-sha $commit_sha $common_setup_arguments"
-
-if [[ "$run_from_perf_repo" = true ]]; then
- payload_directory=
- workitem_directory=$source_directory
- performance_directory=$workitem_directory
- setup_arguments="--perf-hash $commit_sha $common_setup_arguments"
-else
- git clone --branch master --depth 1 --quiet https://github.com/dotnet/performance $performance_directory
-
- docs_directory=$performance_directory/docs
- mv $docs_directory $workitem_directory
-fi
-
-if [[ "$wasm_runtime_loc" != "" ]]; then
- using_wasm=true
- wasm_dotnet_path=$payload_directory/dotnet-wasm
- mv $wasm_runtime_loc $wasm_dotnet_path
- extra_benchmark_dotnet_arguments="$extra_benchmark_dotnet_arguments --wasmMainJS \$HELIX_CORRELATION_PAYLOAD/dotnet-wasm/runtime-test.js --wasmEngine /home/helixbot/.jsvu/v8 --customRuntimePack \$HELIX_CORRELATION_PAYLOAD/dotnet-wasm"
-fi
-
-if [[ "$mono_dotnet" != "" ]]; then
- using_mono=true
- mono_dotnet_path=$payload_directory/dotnet-mono
- mv $mono_dotnet $mono_dotnet_path
-fi
-
-if [[ "$use_core_run" = true ]]; then
- new_core_root=$payload_directory/Core_Root
- mv $core_root_directory $new_core_root
-fi
-
-if [[ "$use_baseline_core_run" = true ]]; then
- new_baseline_core_root=$payload_directory/Baseline_Core_Root
- mv $baseline_core_root_directory $new_baseline_core_root
-fi
-
-ci=true
-
-_script_dir=$(pwd)/eng/common
-. "$_script_dir/pipeline-logging-functions.sh"
-
-# Make sure all of our variables are available for future steps
-Write-PipelineSetVariable -name "UseCoreRun" -value "$use_core_run" -is_multi_job_variable false
-Write-PipelineSetVariable -name "UseBaselineCoreRun" -value "$use_baseline_core_run" -is_multi_job_variable false
-Write-PipelineSetVariable -name "Architecture" -value "$architecture" -is_multi_job_variable false
-Write-PipelineSetVariable -name "PayloadDirectory" -value "$payload_directory" -is_multi_job_variable false
-Write-PipelineSetVariable -name "PerformanceDirectory" -value "$performance_directory" -is_multi_job_variable false
-Write-PipelineSetVariable -name "WorkItemDirectory" -value "$workitem_directory" -is_multi_job_variable false
-Write-PipelineSetVariable -name "Queue" -value "$queue" -is_multi_job_variable false
-Write-PipelineSetVariable -name "SetupArguments" -value "$setup_arguments" -is_multi_job_variable false
-Write-PipelineSetVariable -name "Python" -value "python3" -is_multi_job_variable false
-Write-PipelineSetVariable -name "PerfLabArguments" -value "$perflab_arguments" -is_multi_job_variable false
-Write-PipelineSetVariable -name "ExtraBenchmarkDotNetArguments" -value "$extra_benchmark_dotnet_arguments" -is_multi_job_variable false
-Write-PipelineSetVariable -name "BDNCategories" -value "$run_categories" -is_multi_job_variable false
-Write-PipelineSetVariable -name "TargetCsproj" -value "$csproj" -is_multi_job_variable false
-Write-PipelineSetVariable -name "RunFromPerfRepo" -value "$run_from_perf_repo" -is_multi_job_variable false
-Write-PipelineSetVariable -name "Creator" -value "$creator" -is_multi_job_variable false
-Write-PipelineSetVariable -name "HelixSourcePrefix" -value "$helix_source_prefix" -is_multi_job_variable false
-Write-PipelineSetVariable -name "Kind" -value "$kind" -is_multi_job_variable false
-Write-PipelineSetVariable -name "_BuildConfig" -value "$architecture.$kind.$framework" -is_multi_job_variable false
-Write-PipelineSetVariable -name "Compare" -value "$compare" -is_multi_job_variable false
-Write-PipelineSetVariable -name "MonoDotnet" -value "$using_mono" -is_multi_job_variable false
-Write-PipelineSetVariable -name "WasmDotnet" -value "$using_wasm" -is_multi_job_variable false
diff --git a/eng/common/post-build/sourcelink-validation.ps1 b/eng/common/post-build/sourcelink-validation.ps1
index 1c46f7b634..85c8986171 100644
--- a/eng/common/post-build/sourcelink-validation.ps1
+++ b/eng/common/post-build/sourcelink-validation.ps1
@@ -14,7 +14,9 @@ param(
$global:RepoFiles = @{}
# Maximum number of jobs to run in parallel
-$MaxParallelJobs = 6
+$MaxParallelJobs = 16
+
+$MaxRetries = 5
# Wait time between check for system load
$SecondsBetweenLoadChecks = 10
@@ -29,7 +31,10 @@ $ValidatePackage = {
# Ensure input file exist
if (!(Test-Path $PackagePath)) {
Write-Host "Input file does not exist: $PackagePath"
- return 1
+ return [pscustomobject]@{
+ result = 1
+ packagePath = $PackagePath
+ }
}
# Extensions for which we'll look for SourceLink information
@@ -59,7 +64,10 @@ $ValidatePackage = {
# We ignore resource DLLs
if ($FileName.EndsWith('.resources.dll')) {
- return
+ return [pscustomobject]@{
+ result = 0
+ packagePath = $PackagePath
+ }
}
[System.IO.Compression.ZipFileExtensions]::ExtractToFile($_, $TargetFile, $true)
@@ -91,36 +99,49 @@ $ValidatePackage = {
$Status = 200
$Cache = $using:RepoFiles
- if ( !($Cache.ContainsKey($FilePath)) ) {
- try {
- $Uri = $Link -as [System.URI]
-
- # Only GitHub links are valid
- if ($Uri.AbsoluteURI -ne $null -and ($Uri.Host -match 'github' -or $Uri.Host -match 'githubusercontent')) {
- $Status = (Invoke-WebRequest -Uri $Link -UseBasicParsing -Method HEAD -TimeoutSec 5).StatusCode
+ $totalRetries = 0
+
+ while ($totalRetries -lt $using:MaxRetries) {
+ if ( !($Cache.ContainsKey($FilePath)) ) {
+ try {
+ $Uri = $Link -as [System.URI]
+
+ # Only GitHub links are valid
+ if ($Uri.AbsoluteURI -ne $null -and ($Uri.Host -match 'github' -or $Uri.Host -match 'githubusercontent')) {
+ $Status = (Invoke-WebRequest -Uri $Link -UseBasicParsing -Method HEAD -TimeoutSec 5).StatusCode
+ }
+ else {
+ # If it's not a github link, we want to break out of the loop and not retry.
+ $Status = 0
+ $totalRetries = $using:MaxRetries
+ }
}
- else {
+ catch {
+ Write-Host $_
$Status = 0
}
}
- catch {
- write-host $_
- $Status = 0
- }
- }
- if ($Status -ne 200) {
- if ($NumFailedLinks -eq 0) {
- if ($FailedFiles.Value -eq 0) {
- Write-Host
+ if ($Status -ne 200) {
+ $totalRetries++
+
+ if ($totalRetries -ge $using:MaxRetries) {
+ if ($NumFailedLinks -eq 0) {
+ if ($FailedFiles.Value -eq 0) {
+ Write-Host
+ }
+
+ Write-Host "`tFile $RealPath has broken links:"
+ }
+
+ Write-Host "`t`tFailed to retrieve $Link"
+
+ $NumFailedLinks++
}
-
- Write-Host "`tFile $RealPath has broken links:"
}
-
- Write-Host "`t`tFailed to retrieve $Link"
-
- $NumFailedLinks++
+ else {
+ break
+ }
}
}
}
@@ -136,7 +157,7 @@ $ValidatePackage = {
}
}
catch {
-
+ Write-Host $_
}
finally {
$zip.Dispose()
@@ -220,6 +241,7 @@ function ValidateSourceLinkLinks {
# Process each NuGet package in parallel
Get-ChildItem "$InputPath\*.symbols.nupkg" |
ForEach-Object {
+ Write-Host "Starting $($_.FullName)"
Start-Job -ScriptBlock $ValidatePackage -ArgumentList $_.FullName | Out-Null
$NumJobs = @(Get-Job -State 'Running').Count
@@ -267,6 +289,10 @@ function InstallSourcelinkCli {
try {
InstallSourcelinkCli
+ foreach ($Job in @(Get-Job)) {
+ Remove-Job -Id $Job.Id
+ }
+
ValidateSourceLinkLinks
}
catch {
diff --git a/eng/common/post-build/symbols-validation.ps1 b/eng/common/post-build/symbols-validation.ps1
index 99bf28cd5c..a5af041ba7 100644
--- a/eng/common/post-build/symbols-validation.ps1
+++ b/eng/common/post-build/symbols-validation.ps1
@@ -1,13 +1,14 @@
param(
- [Parameter(Mandatory=$true)][string] $InputPath, # Full path to directory where NuGet packages to be checked are stored
- [Parameter(Mandatory=$true)][string] $ExtractPath, # Full path to directory where the packages will be extracted during validation
- [Parameter(Mandatory=$true)][string] $DotnetSymbolVersion, # Version of dotnet symbol to use
- [Parameter(Mandatory=$false)][switch] $ContinueOnError, # If we should keep checking symbols after an error
- [Parameter(Mandatory=$false)][switch] $Clean # Clean extracted symbols directory after checking symbols
+ [Parameter(Mandatory = $true)][string] $InputPath, # Full path to directory where NuGet packages to be checked are stored
+ [Parameter(Mandatory = $true)][string] $ExtractPath, # Full path to directory where the packages will be extracted during validation
+ [Parameter(Mandatory = $true)][string] $DotnetSymbolVersion, # Version of dotnet symbol to use
+ [Parameter(Mandatory = $false)][switch] $CheckForWindowsPdbs, # If we should check for the existence of windows pdbs in addition to portable PDBs
+ [Parameter(Mandatory = $false)][switch] $ContinueOnError, # If we should keep checking symbols after an error
+ [Parameter(Mandatory = $false)][switch] $Clean # Clean extracted symbols directory after checking symbols
)
# Maximum number of jobs to run in parallel
-$MaxParallelJobs = 6
+$MaxParallelJobs = 16
# Max number of retries
$MaxRetry = 5
@@ -19,9 +20,15 @@ $SecondsBetweenLoadChecks = 10
Set-Variable -Name "ERROR_BADEXTRACT" -Option Constant -Value -1
Set-Variable -Name "ERROR_FILEDOESNOTEXIST" -Option Constant -Value -2
+$WindowsPdbVerificationParam = ""
+if ($CheckForWindowsPdbs) {
+ $WindowsPdbVerificationParam = "--windows-pdbs"
+}
+
$CountMissingSymbols = {
param(
- [string] $PackagePath # Path to a NuGet package
+ [string] $PackagePath, # Path to a NuGet package
+ [string] $WindowsPdbVerificationParam # If we should check for the existence of windows pdbs in addition to portable PDBs
)
. $using:PSScriptRoot\..\tools.ps1
@@ -34,7 +41,7 @@ $CountMissingSymbols = {
if (!(Test-Path $PackagePath)) {
Write-PipelineTaskError "Input file does not exist: $PackagePath"
return [pscustomobject]@{
- result = $using:ERROR_FILEDOESNOTEXIST
+ result = $using:ERROR_FILEDOESNOTEXIST
packagePath = $PackagePath
}
}
@@ -57,24 +64,25 @@ $CountMissingSymbols = {
Write-Host "Something went wrong extracting $PackagePath"
Write-Host $_
return [pscustomobject]@{
- result = $using:ERROR_BADEXTRACT
+ result = $using:ERROR_BADEXTRACT
packagePath = $PackagePath
}
}
Get-ChildItem -Recurse $ExtractPath |
- Where-Object {$RelevantExtensions -contains $_.Extension} |
- ForEach-Object {
- $FileName = $_.FullName
- if ($FileName -Match '\\ref\\') {
- Write-Host "`t Ignoring reference assembly file " $FileName
- return
- }
+ Where-Object { $RelevantExtensions -contains $_.Extension } |
+ ForEach-Object {
+ $FileName = $_.FullName
+ if ($FileName -Match '\\ref\\') {
+ Write-Host "`t Ignoring reference assembly file " $FileName
+ return
+ }
- $FirstMatchingSymbolDescriptionOrDefault = {
+ $FirstMatchingSymbolDescriptionOrDefault = {
param(
- [string] $FullPath, # Full path to the module that has to be checked
- [string] $TargetServerParam, # Parameter to pass to `Symbol Tool` indicating the server to lookup for symbols
+ [string] $FullPath, # Full path to the module that has to be checked
+ [string] $TargetServerParam, # Parameter to pass to `Symbol Tool` indicating the server to lookup for symbols
+ [string] $WindowsPdbVerificationParam, # Parameter to pass to potential check for windows-pdbs.
[string] $SymbolsPath
)
@@ -99,15 +107,16 @@ $CountMissingSymbols = {
# DWARF file for a .dylib
$DylibDwarf = $SymbolPath.Replace($Extension, '.dylib.dwarf')
-
+
$dotnetSymbolExe = "$env:USERPROFILE\.dotnet\tools"
$dotnetSymbolExe = Resolve-Path "$dotnetSymbolExe\dotnet-symbol.exe"
$totalRetries = 0
while ($totalRetries -lt $using:MaxRetry) {
+
# Save the output and get diagnostic output
- $output = & $dotnetSymbolExe --symbols --modules --windows-pdbs $TargetServerParam $FullPath -o $SymbolsPath --diagnostics | Out-String
+ $output = & $dotnetSymbolExe --symbols --modules $WindowsPdbVerificationParam $TargetServerParam $FullPath -o $SymbolsPath --diagnostics | Out-String
if (Test-Path $PdbPath) {
return 'PDB'
@@ -124,42 +133,50 @@ $CountMissingSymbols = {
elseif (Test-Path $SymbolPath) {
return 'Module'
}
- elseif ($output.Contains("503 Service Unavailable")) {
- # If we got a 503 error, we should retry.
+ else
+ {
$totalRetries++
}
- else {
- return $null
- }
}
return $null
}
- $SymbolsOnMSDL = & $FirstMatchingSymbolDescriptionOrDefault $FileName '--microsoft-symbol-server' $SymbolsPath
- $SymbolsOnSymWeb = & $FirstMatchingSymbolDescriptionOrDefault $FileName '--internal-server' $SymbolsPath
-
- Write-Host -NoNewLine "`t Checking file " $FileName "... "
+ $FileGuid = New-Guid
+ $ExpandedSymbolsPath = Join-Path -Path $SymbolsPath -ChildPath $FileGuid
+
+ $SymbolsOnMSDL = & $FirstMatchingSymbolDescriptionOrDefault `
+ -FullPath $FileName `
+ -TargetServerParam '--microsoft-symbol-server' `
+ -SymbolsPath "$ExpandedSymbolsPath-msdl" `
+ -WindowsPdbVerificationParam $WindowsPdbVerificationParam
+ $SymbolsOnSymWeb = & $FirstMatchingSymbolDescriptionOrDefault `
+ -FullPath $FileName `
+ -TargetServerParam '--internal-server' `
+ -SymbolsPath "$ExpandedSymbolsPath-symweb" `
+ -WindowsPdbVerificationParam $WindowsPdbVerificationParam
+
+ Write-Host -NoNewLine "`t Checking file " $FileName "... "
- if ($SymbolsOnMSDL -ne $null -and $SymbolsOnSymWeb -ne $null) {
- Write-Host "Symbols found on MSDL ($SymbolsOnMSDL) and SymWeb ($SymbolsOnSymWeb)"
+ if ($SymbolsOnMSDL -ne $null -and $SymbolsOnSymWeb -ne $null) {
+ Write-Host "Symbols found on MSDL ($SymbolsOnMSDL) and SymWeb ($SymbolsOnSymWeb)"
+ }
+ else {
+ $MissingSymbols++
+
+ if ($SymbolsOnMSDL -eq $null -and $SymbolsOnSymWeb -eq $null) {
+ Write-Host 'No symbols found on MSDL or SymWeb!'
}
else {
- $MissingSymbols++
-
- if ($SymbolsOnMSDL -eq $null -and $SymbolsOnSymWeb -eq $null) {
- Write-Host 'No symbols found on MSDL or SymWeb!'
+ if ($SymbolsOnMSDL -eq $null) {
+ Write-Host 'No symbols found on MSDL!'
}
else {
- if ($SymbolsOnMSDL -eq $null) {
- Write-Host 'No symbols found on MSDL!'
- }
- else {
- Write-Host 'No symbols found on SymWeb!'
- }
+ Write-Host 'No symbols found on SymWeb!'
}
}
}
+ }
if ($using:Clean) {
Remove-Item $ExtractPath -Recurse -Force
@@ -168,16 +185,16 @@ $CountMissingSymbols = {
Pop-Location
return [pscustomobject]@{
- result = $MissingSymbols
- packagePath = $PackagePath
- }
+ result = $MissingSymbols
+ packagePath = $PackagePath
+ }
}
function CheckJobResult(
- $result,
- $packagePath,
- [ref]$DupedSymbols,
- [ref]$TotalFailures) {
+ $result,
+ $packagePath,
+ [ref]$DupedSymbols,
+ [ref]$TotalFailures) {
if ($result -eq $ERROR_BADEXTRACT) {
Write-PipelineTelemetryError -Category 'CheckSymbols' -Message "$packagePath has duplicated symbol files"
$DupedSymbols.Value++
@@ -200,6 +217,7 @@ function CheckSymbolsAvailable {
Remove-Item $ExtractPath -Force -Recurse -ErrorAction SilentlyContinue
}
+ $TotalPackages = 0
$TotalFailures = 0
$DupedSymbols = 0
@@ -222,7 +240,9 @@ function CheckSymbolsAvailable {
return
}
- Start-Job -ScriptBlock $CountMissingSymbols -ArgumentList $FullName | Out-Null
+ $TotalPackages++
+
+ Start-Job -ScriptBlock $CountMissingSymbols -ArgumentList @($FullName,$WindowsPdbVerificationParam) | Out-Null
$NumJobs = @(Get-Job -State 'Running').Count
@@ -247,11 +267,11 @@ function CheckSymbolsAvailable {
if ($TotalFailures -gt 0 -or $DupedSymbols -gt 0) {
if ($TotalFailures -gt 0) {
- Write-PipelineTelemetryError -Category 'CheckSymbols' -Message "Symbols missing for $TotalFailures packages"
+ Write-PipelineTelemetryError -Category 'CheckSymbols' -Message "Symbols missing for $TotalFailures/$TotalPackages packages"
}
if ($DupedSymbols -gt 0) {
- Write-PipelineTelemetryError -Category 'CheckSymbols' -Message "$DupedSymbols packages had duplicated symbol files"
+ Write-PipelineTelemetryError -Category 'CheckSymbols' -Message "$DupedSymbols/$TotalPackages packages had duplicated symbol files and could not be extracted"
}
ExitWithExitCode 1
diff --git a/eng/common/sdk-task.ps1 b/eng/common/sdk-task.ps1
index f55c43c6f4..b1bca63ab1 100644
--- a/eng/common/sdk-task.ps1
+++ b/eng/common/sdk-task.ps1
@@ -34,7 +34,7 @@ function Print-Usage() {
function Build([string]$target) {
$logSuffix = if ($target -eq 'Execute') { '' } else { ".$target" }
$log = Join-Path $LogDir "$task$logSuffix.binlog"
- $outputPath = Join-Path $ToolsetDir "$task\\"
+ $outputPath = Join-Path $ToolsetDir "$task\"
MSBuild $taskProject `
/bl:$log `
@@ -53,7 +53,7 @@ try {
}
if ($task -eq "") {
- Write-PipelineTelemetryError -Category 'Build' -Message "Missing required parameter '-task '" -ForegroundColor Red
+ Write-PipelineTelemetryError -Category 'Build' -Message "Missing required parameter '-task '"
Print-Usage
ExitWithExitCode 1
}
@@ -64,7 +64,7 @@ try {
$GlobalJson.tools | Add-Member -Name "vs" -Value (ConvertFrom-Json "{ `"version`": `"16.5`" }") -MemberType NoteProperty
}
if( -not ($GlobalJson.tools.PSObject.Properties.Name -match "xcopy-msbuild" )) {
- $GlobalJson.tools | Add-Member -Name "xcopy-msbuild" -Value "16.8.0-preview3" -MemberType NoteProperty
+ $GlobalJson.tools | Add-Member -Name "xcopy-msbuild" -Value "16.10.0-preview2" -MemberType NoteProperty
}
if ($GlobalJson.tools."xcopy-msbuild".Trim() -ine "none") {
$xcopyMSBuildToolsFolder = InitializeXCopyMSBuild $GlobalJson.tools."xcopy-msbuild" -install $true
@@ -78,7 +78,7 @@ try {
$taskProject = GetSdkTaskProject $task
if (!(Test-Path $taskProject)) {
- Write-PipelineTelemetryError -Category 'Build' -Message "Unknown task: $task" -ForegroundColor Red
+ Write-PipelineTelemetryError -Category 'Build' -Message "Unknown task: $task"
ExitWithExitCode 1
}
diff --git a/eng/common/sdl/configure-sdl-tool.ps1 b/eng/common/sdl/configure-sdl-tool.ps1
new file mode 100644
index 0000000000..4999c30708
--- /dev/null
+++ b/eng/common/sdl/configure-sdl-tool.ps1
@@ -0,0 +1,109 @@
+Param(
+ [string] $GuardianCliLocation,
+ [string] $WorkingDirectory,
+ [string] $TargetDirectory,
+ [string] $GdnFolder,
+ # The list of Guardian tools to configure. For each object in the array:
+ # - If the item is a [hashtable], it must contain these entries:
+ # - Name = The tool name as Guardian knows it.
+ # - Scenario = (Optional) Scenario-specific name for this configuration entry. It must be unique
+ # among all tool entries with the same Name.
+ # - Args = (Optional) Array of Guardian tool configuration args, like '@("Target > C:\temp")'
+ # - If the item is a [string] $v, it is treated as '@{ Name="$v" }'
+ [object[]] $ToolsList,
+ [string] $GuardianLoggerLevel='Standard',
+ # Optional: Additional params to add to any tool using CredScan.
+ [string[]] $CrScanAdditionalRunConfigParams,
+ # Optional: Additional params to add to any tool using PoliCheck.
+ [string[]] $PoliCheckAdditionalRunConfigParams
+)
+
+$ErrorActionPreference = 'Stop'
+Set-StrictMode -Version 2.0
+$disableConfigureToolsetImport = $true
+$global:LASTEXITCODE = 0
+
+try {
+ # `tools.ps1` checks $ci to perform some actions. Since the SDL
+ # scripts don't necessarily execute in the same agent that run the
+ # build.ps1/sh script this variable isn't automatically set.
+ $ci = $true
+ . $PSScriptRoot\..\tools.ps1
+
+ # Normalize tools list: all in [hashtable] form with defined values for each key.
+ $ToolsList = $ToolsList |
+ ForEach-Object {
+ if ($_ -is [string]) {
+ $_ = @{ Name = $_ }
+ }
+
+ if (-not ($_['Scenario'])) { $_.Scenario = "" }
+ if (-not ($_['Args'])) { $_.Args = @() }
+ $_
+ }
+
+ Write-Host "List of tools to configure:"
+ $ToolsList | ForEach-Object { $_ | Out-String | Write-Host }
+
+ # We store config files in the r directory of .gdn
+ $gdnConfigPath = Join-Path $GdnFolder 'r'
+ $ValidPath = Test-Path $GuardianCliLocation
+
+ if ($ValidPath -eq $False)
+ {
+ Write-PipelineTelemetryError -Force -Category 'Sdl' -Message "Invalid Guardian CLI Location."
+ ExitWithExitCode 1
+ }
+
+ foreach ($tool in $ToolsList) {
+ # Put together the name and scenario to make a unique key.
+ $toolConfigName = $tool.Name
+ if ($tool.Scenario) {
+ $toolConfigName += "_" + $tool.Scenario
+ }
+
+ Write-Host "=== Configuring $toolConfigName..."
+
+ $gdnConfigFile = Join-Path $gdnConfigPath "$toolConfigName-configure.gdnconfig"
+
+ # For some tools, add default and automatic args.
+ if ($tool.Name -eq 'credscan') {
+ if ($targetDirectory) {
+ $tool.Args += "TargetDirectory < $TargetDirectory"
+ }
+ $tool.Args += "OutputType < pre"
+ $tool.Args += $CrScanAdditionalRunConfigParams
+ } elseif ($tool.Name -eq 'policheck') {
+ if ($targetDirectory) {
+ $tool.Args += "Target < $TargetDirectory"
+ }
+ $tool.Args += $PoliCheckAdditionalRunConfigParams
+ }
+
+ # Create variable pointing to the args array directly so we can use splat syntax later.
+ $toolArgs = $tool.Args
+
+ # Configure the tool. If args array is provided or the current tool has some default arguments
+ # defined, add "--args" and splat each element on the end. Arg format is "{Arg id} < {Value}",
+ # one per parameter. Doc page for "guardian configure":
+ # https://dev.azure.com/securitytools/SecurityIntegration/_wiki/wikis/Guardian/1395/configure
+ Exec-BlockVerbosely {
+ & $GuardianCliLocation configure `
+ --working-directory $WorkingDirectory `
+ --tool $tool.Name `
+ --output-path $gdnConfigFile `
+ --logger-level $GuardianLoggerLevel `
+ --noninteractive `
+ --force `
+ $(if ($toolArgs) { "--args" }) @toolArgs
+ Exit-IfNZEC "Sdl"
+ }
+
+ Write-Host "Created '$toolConfigName' configuration file: $gdnConfigFile"
+ }
+}
+catch {
+ Write-Host $_.ScriptStackTrace
+ Write-PipelineTelemetryError -Force -Category 'Sdl' -Message $_
+ ExitWithExitCode 1
+}
diff --git a/eng/common/sdl/execute-all-sdl-tools.ps1 b/eng/common/sdl/execute-all-sdl-tools.ps1
index 81b729f74a..1157151f48 100644
--- a/eng/common/sdl/execute-all-sdl-tools.ps1
+++ b/eng/common/sdl/execute-all-sdl-tools.ps1
@@ -7,8 +7,17 @@ Param(
[string] $SourceDirectory=$env:BUILD_SOURCESDIRECTORY, # Required: the directory where source files are located
[string] $ArtifactsDirectory = (Join-Path $env:BUILD_ARTIFACTSTAGINGDIRECTORY ('artifacts')), # Required: the directory where build artifacts are located
[string] $AzureDevOpsAccessToken, # Required: access token for dnceng; should be provided via KeyVault
- [string[]] $SourceToolsList, # Optional: list of SDL tools to run on source code
- [string[]] $ArtifactToolsList, # Optional: list of SDL tools to run on built artifacts
+
+ # Optional: list of SDL tools to run on source code. See 'configure-sdl-tool.ps1' for tools list
+ # format.
+ [object[]] $SourceToolsList,
+ # Optional: list of SDL tools to run on built artifacts. See 'configure-sdl-tool.ps1' for tools
+ # list format.
+ [object[]] $ArtifactToolsList,
+ # Optional: list of SDL tools to run without automatically specifying a target directory. See
+ # 'configure-sdl-tool.ps1' for tools list format.
+ [object[]] $CustomToolsList,
+
[bool] $TsaPublish=$False, # Optional: true will publish results to TSA; only set to true after onboarding to TSA; TSA is the automated framework used to upload test results as bugs.
[string] $TsaBranchName=$env:BUILD_SOURCEBRANCH, # Optional: required for TSA publish; defaults to $(Build.SourceBranchName); TSA is the automated framework used to upload test results as bugs.
[string] $TsaRepositoryName=$env:BUILD_REPOSITORY_NAME, # Optional: TSA repository name; will be generated automatically if not submitted; TSA is the automated framework used to upload test results as bugs.
@@ -32,7 +41,7 @@ try {
$ErrorActionPreference = 'Stop'
Set-StrictMode -Version 2.0
$disableConfigureToolsetImport = $true
- $LASTEXITCODE = 0
+ $global:LASTEXITCODE = 0
# `tools.ps1` checks $ci to perform some actions. Since the SDL
# scripts don't necessarily execute in the same agent that run the
@@ -63,13 +72,16 @@ try {
ExitWithExitCode 1
}
- & $(Join-Path $PSScriptRoot 'init-sdl.ps1') -GuardianCliLocation $guardianCliLocation -Repository $RepoName -BranchName $BranchName -WorkingDirectory $workingDirectory -AzureDevOpsAccessToken $AzureDevOpsAccessToken -GuardianLoggerLevel $GuardianLoggerLevel
+ Exec-BlockVerbosely {
+ & $(Join-Path $PSScriptRoot 'init-sdl.ps1') -GuardianCliLocation $guardianCliLocation -Repository $RepoName -BranchName $BranchName -WorkingDirectory $workingDirectory -AzureDevOpsAccessToken $AzureDevOpsAccessToken -GuardianLoggerLevel $GuardianLoggerLevel
+ }
$gdnFolder = Join-Path $workingDirectory '.gdn'
if ($TsaOnboard) {
if ($TsaCodebaseName -and $TsaNotificationEmail -and $TsaCodebaseAdmin -and $TsaBugAreaPath) {
- Write-Host "$guardianCliLocation tsa-onboard --codebase-name `"$TsaCodebaseName`" --notification-alias `"$TsaNotificationEmail`" --codebase-admin `"$TsaCodebaseAdmin`" --instance-url `"$TsaInstanceUrl`" --project-name `"$TsaProjectName`" --area-path `"$TsaBugAreaPath`" --iteration-path `"$TsaIterationPath`" --working-directory $workingDirectory --logger-level $GuardianLoggerLevel"
- & $guardianCliLocation tsa-onboard --codebase-name "$TsaCodebaseName" --notification-alias "$TsaNotificationEmail" --codebase-admin "$TsaCodebaseAdmin" --instance-url "$TsaInstanceUrl" --project-name "$TsaProjectName" --area-path "$TsaBugAreaPath" --iteration-path "$TsaIterationPath" --working-directory $workingDirectory --logger-level $GuardianLoggerLevel
+ Exec-BlockVerbosely {
+ & $guardianCliLocation tsa-onboard --codebase-name "$TsaCodebaseName" --notification-alias "$TsaNotificationEmail" --codebase-admin "$TsaCodebaseAdmin" --instance-url "$TsaInstanceUrl" --project-name "$TsaProjectName" --area-path "$TsaBugAreaPath" --iteration-path "$TsaIterationPath" --working-directory $workingDirectory --logger-level $GuardianLoggerLevel
+ }
if ($LASTEXITCODE -ne 0) {
Write-PipelineTelemetryError -Force -Category 'Sdl' -Message "Guardian tsa-onboard failed with exit code $LASTEXITCODE."
ExitWithExitCode $LASTEXITCODE
@@ -80,11 +92,41 @@ try {
}
}
- if ($ArtifactToolsList -and $ArtifactToolsList.Count -gt 0) {
- & $(Join-Path $PSScriptRoot 'run-sdl.ps1') -GuardianCliLocation $guardianCliLocation -WorkingDirectory $workingDirectory -TargetDirectory $ArtifactsDirectory -GdnFolder $gdnFolder -ToolsList $ArtifactToolsList -AzureDevOpsAccessToken $AzureDevOpsAccessToken -UpdateBaseline $UpdateBaseline -GuardianLoggerLevel $GuardianLoggerLevel -CrScanAdditionalRunConfigParams $CrScanAdditionalRunConfigParams -PoliCheckAdditionalRunConfigParams $PoliCheckAdditionalRunConfigParams
+ # Configure a list of tools with a default target directory. Populates the ".gdn/r" directory.
+ function Configure-ToolsList([object[]] $tools, [string] $targetDirectory) {
+ if ($tools -and $tools.Count -gt 0) {
+ Exec-BlockVerbosely {
+ & $(Join-Path $PSScriptRoot 'configure-sdl-tool.ps1') `
+ -GuardianCliLocation $guardianCliLocation `
+ -WorkingDirectory $workingDirectory `
+ -TargetDirectory $targetDirectory `
+ -GdnFolder $gdnFolder `
+ -ToolsList $tools `
+ -AzureDevOpsAccessToken $AzureDevOpsAccessToken `
+ -GuardianLoggerLevel $GuardianLoggerLevel `
+ -CrScanAdditionalRunConfigParams $CrScanAdditionalRunConfigParams `
+ -PoliCheckAdditionalRunConfigParams $PoliCheckAdditionalRunConfigParams
+ if ($BreakOnFailure) {
+ Exit-IfNZEC "Sdl"
+ }
+ }
+ }
}
- if ($SourceToolsList -and $SourceToolsList.Count -gt 0) {
- & $(Join-Path $PSScriptRoot 'run-sdl.ps1') -GuardianCliLocation $guardianCliLocation -WorkingDirectory $workingDirectory -TargetDirectory $SourceDirectory -GdnFolder $gdnFolder -ToolsList $SourceToolsList -AzureDevOpsAccessToken $AzureDevOpsAccessToken -UpdateBaseline $UpdateBaseline -GuardianLoggerLevel $GuardianLoggerLevel -CrScanAdditionalRunConfigParams $CrScanAdditionalRunConfigParams -PoliCheckAdditionalRunConfigParams $PoliCheckAdditionalRunConfigParams
+
+ # Configure Artifact and Source tools with default Target directories.
+ Configure-ToolsList $ArtifactToolsList $ArtifactsDirectory
+ Configure-ToolsList $SourceToolsList $SourceDirectory
+ # Configure custom tools with no default Target directory.
+ Configure-ToolsList $CustomToolsList $null
+
+ # At this point, all tools are configured in the ".gdn" directory. Run them all in a single call.
+ # (If we used "run" multiple times, each run would overwrite data from earlier runs.)
+ Exec-BlockVerbosely {
+ & $(Join-Path $PSScriptRoot 'run-sdl.ps1') `
+ -GuardianCliLocation $guardianCliLocation `
+ -WorkingDirectory $workingDirectory `
+ -UpdateBaseline $UpdateBaseline `
+ -GdnFolder $gdnFolder
}
if ($TsaPublish) {
@@ -92,8 +134,9 @@ try {
if (-not $TsaRepositoryName) {
$TsaRepositoryName = "$($Repository)-$($BranchName)"
}
- Write-Host "$guardianCliLocation tsa-publish --all-tools --repository-name `"$TsaRepositoryName`" --branch-name `"$TsaBranchName`" --build-number `"$BuildNumber`" --codebase-name `"$TsaCodebaseName`" --notification-alias `"$TsaNotificationEmail`" --codebase-admin `"$TsaCodebaseAdmin`" --instance-url `"$TsaInstanceUrl`" --project-name `"$TsaProjectName`" --area-path `"$TsaBugAreaPath`" --iteration-path `"$TsaIterationPath`" --working-directory $workingDirectory --logger-level $GuardianLoggerLevel"
- & $guardianCliLocation tsa-publish --all-tools --repository-name "$TsaRepositoryName" --branch-name "$TsaBranchName" --build-number "$BuildNumber" --onboard $True --codebase-name "$TsaCodebaseName" --notification-alias "$TsaNotificationEmail" --codebase-admin "$TsaCodebaseAdmin" --instance-url "$TsaInstanceUrl" --project-name "$TsaProjectName" --area-path "$TsaBugAreaPath" --iteration-path "$TsaIterationPath" --working-directory $workingDirectory --logger-level $GuardianLoggerLevel
+ Exec-BlockVerbosely {
+ & $guardianCliLocation tsa-publish --all-tools --repository-name "$TsaRepositoryName" --branch-name "$TsaBranchName" --build-number "$BuildNumber" --onboard $True --codebase-name "$TsaCodebaseName" --notification-alias "$TsaNotificationEmail" --codebase-admin "$TsaCodebaseAdmin" --instance-url "$TsaInstanceUrl" --project-name "$TsaProjectName" --area-path "$TsaBugAreaPath" --iteration-path "$TsaIterationPath" --working-directory $workingDirectory --logger-level $GuardianLoggerLevel
+ }
if ($LASTEXITCODE -ne 0) {
Write-PipelineTelemetryError -Force -Category 'Sdl' -Message "Guardian tsa-publish failed with exit code $LASTEXITCODE."
ExitWithExitCode $LASTEXITCODE
@@ -106,7 +149,11 @@ try {
if ($BreakOnFailure) {
Write-Host "Failing the build in case of breaking results..."
- & $guardianCliLocation break
+ Exec-BlockVerbosely {
+ & $guardianCliLocation break --working-directory $workingDirectory --logger-level $GuardianLoggerLevel
+ }
+ } else {
+ Write-Host "Letting the build pass even if there were breaking results..."
}
}
catch {
diff --git a/eng/common/sdl/extract-artifact-archives.ps1 b/eng/common/sdl/extract-artifact-archives.ps1
new file mode 100644
index 0000000000..68da4fbf25
--- /dev/null
+++ b/eng/common/sdl/extract-artifact-archives.ps1
@@ -0,0 +1,63 @@
+# This script looks for each archive file in a directory and extracts it into the target directory.
+# For example, the file "$InputPath/bin.tar.gz" extracts to "$ExtractPath/bin.tar.gz.extracted/**".
+# Uses the "tar" utility added to Windows 10 / Windows 2019 that supports tar.gz and zip.
+param(
+ # Full path to directory where archives are stored.
+ [Parameter(Mandatory=$true)][string] $InputPath,
+ # Full path to directory to extract archives into. May be the same as $InputPath.
+ [Parameter(Mandatory=$true)][string] $ExtractPath
+)
+
+$ErrorActionPreference = 'Stop'
+Set-StrictMode -Version 2.0
+
+$disableConfigureToolsetImport = $true
+
+try {
+ # `tools.ps1` checks $ci to perform some actions. Since the SDL
+ # scripts don't necessarily execute in the same agent that run the
+ # build.ps1/sh script this variable isn't automatically set.
+ $ci = $true
+ . $PSScriptRoot\..\tools.ps1
+
+ Measure-Command {
+ $jobs = @()
+
+ # Find archive files for non-Windows and Windows builds.
+ $archiveFiles = @(
+ Get-ChildItem (Join-Path $InputPath "*.tar.gz")
+ Get-ChildItem (Join-Path $InputPath "*.zip")
+ )
+
+ foreach ($targzFile in $archiveFiles) {
+ $jobs += Start-Job -ScriptBlock {
+ $file = $using:targzFile
+ $fileName = [System.IO.Path]::GetFileName($file)
+ $extractDir = Join-Path $using:ExtractPath "$fileName.extracted"
+
+ New-Item $extractDir -ItemType Directory -Force | Out-Null
+
+ Write-Host "Extracting '$file' to '$extractDir'..."
+
+ # Pipe errors to stdout to prevent PowerShell detecting them and quitting the job early.
+ # This type of quit skips the catch, so we wouldn't be able to tell which file triggered the
+ # error. Save output so it can be stored in the exception string along with context.
+ $output = tar -xf $file -C $extractDir 2>&1
+ # Handle NZEC manually rather than using Exit-IfNZEC: we are in a background job, so we
+ # don't have access to the outer scope.
+ if ($LASTEXITCODE -ne 0) {
+ throw "Error extracting '$file': non-zero exit code ($LASTEXITCODE). Output: '$output'"
+ }
+
+ Write-Host "Extracted to $extractDir"
+ }
+ }
+
+ Receive-Job $jobs -Wait
+ }
+}
+catch {
+ Write-Host $_
+ Write-PipelineTelemetryError -Force -Category 'Sdl' -Message $_
+ ExitWithExitCode 1
+}
diff --git a/eng/common/sdl/init-sdl.ps1 b/eng/common/sdl/init-sdl.ps1
index 1fe9271193..3ac1d92b37 100644
--- a/eng/common/sdl/init-sdl.ps1
+++ b/eng/common/sdl/init-sdl.ps1
@@ -10,7 +10,7 @@ Param(
$ErrorActionPreference = 'Stop'
Set-StrictMode -Version 2.0
$disableConfigureToolsetImport = $true
-$LASTEXITCODE = 0
+$global:LASTEXITCODE = 0
# `tools.ps1` checks $ci to perform some actions. Since the SDL
# scripts don't necessarily execute in the same agent that run the
diff --git a/eng/common/sdl/run-sdl.ps1 b/eng/common/sdl/run-sdl.ps1
index fe95ab35aa..2eac8c78f1 100644
--- a/eng/common/sdl/run-sdl.ps1
+++ b/eng/common/sdl/run-sdl.ps1
@@ -1,19 +1,15 @@
Param(
[string] $GuardianCliLocation,
[string] $WorkingDirectory,
- [string] $TargetDirectory,
[string] $GdnFolder,
- [string[]] $ToolsList,
[string] $UpdateBaseline,
- [string] $GuardianLoggerLevel='Standard',
- [string[]] $CrScanAdditionalRunConfigParams,
- [string[]] $PoliCheckAdditionalRunConfigParams
+ [string] $GuardianLoggerLevel='Standard'
)
$ErrorActionPreference = 'Stop'
Set-StrictMode -Version 2.0
$disableConfigureToolsetImport = $true
-$LASTEXITCODE = 0
+$global:LASTEXITCODE = 0
try {
# `tools.ps1` checks $ci to perform some actions. Since the SDL
@@ -23,7 +19,6 @@ try {
. $PSScriptRoot\..\tools.ps1
# We store config files in the r directory of .gdn
- Write-Host $ToolsList
$gdnConfigPath = Join-Path $GdnFolder 'r'
$ValidPath = Test-Path $GuardianCliLocation
@@ -33,37 +28,18 @@ try {
ExitWithExitCode 1
}
- $configParam = @('--config')
-
- foreach ($tool in $ToolsList) {
- $gdnConfigFile = Join-Path $gdnConfigPath "$tool-configure.gdnconfig"
- Write-Host $tool
- # We have to manually configure tools that run on source to look at the source directory only
- if ($tool -eq 'credscan') {
- Write-Host "$GuardianCliLocation configure --working-directory $WorkingDirectory --tool $tool --output-path $gdnConfigFile --logger-level $GuardianLoggerLevel --noninteractive --force --args `" TargetDirectory < $TargetDirectory `" `" OutputType < pre `" $(If ($CrScanAdditionalRunConfigParams) {$CrScanAdditionalRunConfigParams})"
- & $GuardianCliLocation configure --working-directory $WorkingDirectory --tool $tool --output-path $gdnConfigFile --logger-level $GuardianLoggerLevel --noninteractive --force --args " TargetDirectory < $TargetDirectory " "OutputType < pre" $(If ($CrScanAdditionalRunConfigParams) {$CrScanAdditionalRunConfigParams})
- if ($LASTEXITCODE -ne 0) {
- Write-PipelineTelemetryError -Force -Category 'Sdl' -Message "Guardian configure for $tool failed with exit code $LASTEXITCODE."
- ExitWithExitCode $LASTEXITCODE
- }
- }
- if ($tool -eq 'policheck') {
- Write-Host "$GuardianCliLocation configure --working-directory $WorkingDirectory --tool $tool --output-path $gdnConfigFile --logger-level $GuardianLoggerLevel --noninteractive --force --args `" Target < $TargetDirectory `" $(If ($PoliCheckAdditionalRunConfigParams) {$PoliCheckAdditionalRunConfigParams})"
- & $GuardianCliLocation configure --working-directory $WorkingDirectory --tool $tool --output-path $gdnConfigFile --logger-level $GuardianLoggerLevel --noninteractive --force --args " Target < $TargetDirectory " $(If ($PoliCheckAdditionalRunConfigParams) {$PoliCheckAdditionalRunConfigParams})
- if ($LASTEXITCODE -ne 0) {
- Write-PipelineTelemetryError -Force -Category 'Sdl' -Message "Guardian configure for $tool failed with exit code $LASTEXITCODE."
- ExitWithExitCode $LASTEXITCODE
- }
- }
-
- $configParam+=$gdnConfigFile
- }
-
- Write-Host "$GuardianCliLocation run --working-directory $WorkingDirectory --baseline mainbaseline --update-baseline $UpdateBaseline --logger-level $GuardianLoggerLevel $configParam"
- & $GuardianCliLocation run --working-directory $WorkingDirectory --tool $tool --baseline mainbaseline --update-baseline $UpdateBaseline --logger-level $GuardianLoggerLevel $configParam
- if ($LASTEXITCODE -ne 0) {
- Write-PipelineTelemetryError -Force -Category 'Sdl' -Message "Guardian run for $ToolsList using $configParam failed with exit code $LASTEXITCODE."
- ExitWithExitCode $LASTEXITCODE
+ $gdnConfigFiles = Get-ChildItem $gdnConfigPath -Recurse -Include '*.gdnconfig'
+ Write-Host "Discovered Guardian config files:"
+ $gdnConfigFiles | Out-String | Write-Host
+
+ Exec-BlockVerbosely {
+ & $GuardianCliLocation run `
+ --working-directory $WorkingDirectory `
+ --baseline mainbaseline `
+ --update-baseline $UpdateBaseline `
+ --logger-level $GuardianLoggerLevel `
+ --config @gdnConfigFiles
+ Exit-IfNZEC "Sdl"
}
}
catch {
diff --git a/eng/common/templates/job/execute-sdl.yml b/eng/common/templates/job/execute-sdl.yml
index 4a32181fd8..69eb67849d 100644
--- a/eng/common/templates/job/execute-sdl.yml
+++ b/eng/common/templates/job/execute-sdl.yml
@@ -2,17 +2,41 @@ parameters:
enable: 'false' # Whether the SDL validation job should execute or not
overrideParameters: '' # Optional: to override values for parameters.
additionalParameters: '' # Optional: parameters that need user specific values eg: '-SourceToolsList @("abc","def") -ArtifactToolsList @("ghi","jkl")'
+ # Optional: if specified, restore and use this version of Guardian instead of the default.
+ overrideGuardianVersion: ''
+ # Optional: if true, publish the '.gdn' folder as a pipeline artifact. This can help with in-depth
+ # diagnosis of problems with specific tool configurations.
+ publishGuardianDirectoryToPipeline: false
+ # The script to run to execute all SDL tools. Use this if you want to use a script to define SDL
+ # parameters rather than relying on YAML. It may be better to use a local script, because you can
+ # reproduce results locally without piecing together a command based on the YAML.
+ executeAllSdlToolsScript: 'eng/common/sdl/execute-all-sdl-tools.ps1'
# There is some sort of bug (has been reported) in Azure DevOps where if this parameter is named
# 'continueOnError', the parameter value is not correctly picked up.
# This can also be remedied by the caller (post-build.yml) if it does not use a nested parameter
sdlContinueOnError: false # optional: determines whether to continue the build if the step errors;
- downloadArtifacts: true # optional: determines if the artifacts should be dowloaded
+ # optional: determines if build artifacts should be downloaded.
+ downloadArtifacts: true
+ # optional: determines if this job should search the directory of downloaded artifacts for
+ # 'tar.gz' and 'zip' archive files and extract them before running SDL validation tasks.
+ extractArchiveArtifacts: false
dependsOn: '' # Optional: dependencies of the job
artifactNames: '' # Optional: patterns supplied to DownloadBuildArtifacts
# Usage:
# artifactNames:
# - 'BlobArtifacts'
# - 'Artifacts_Windows_NT_Release'
+ # Optional: download a list of pipeline artifacts. 'downloadArtifacts' controls build artifacts,
+ # not pipeline artifacts, so doesn't affect the use of this parameter.
+ pipelineArtifactNames: []
+ # Optional: location and ID of the AzDO build that the build/pipeline artifacts should be
+ # downloaded from. By default, uses runtime expressions to decide based on the variables set by
+ # the 'setupMaestroVars' dependency. Overriding this parameter is necessary if SDL tasks are
+ # running without Maestro++/BAR involved, or to download artifacts from a specific existing build
+ # to iterate quickly on SDL changes.
+ AzDOProjectName: $[ dependencies.setupMaestroVars.outputs['setReleaseVars.AzDOProjectName'] ]
+ AzDOPipelineId: $[ dependencies.setupMaestroVars.outputs['setReleaseVars.AzDOPipelineId'] ]
+ AzDOBuildId: $[ dependencies.setupMaestroVars.outputs['setReleaseVars.AzDOBuildId'] ]
jobs:
- job: Run_SDL
@@ -22,16 +46,29 @@ jobs:
variables:
- group: DotNet-VSTS-Bot
- name: AzDOProjectName
- value: $[ dependencies.setupMaestroVars.outputs['setReleaseVars.AzDOProjectName'] ]
+ value: ${{ parameters.AzDOProjectName }}
- name: AzDOPipelineId
- value: $[ dependencies.setupMaestroVars.outputs['setReleaseVars.AzDOPipelineId'] ]
+ value: ${{ parameters.AzDOPipelineId }}
- name: AzDOBuildId
- value: $[ dependencies.setupMaestroVars.outputs['setReleaseVars.AzDOBuildId'] ]
+ value: ${{ parameters.AzDOBuildId }}
+ # The Guardian version specified in 'eng/common/sdl/packages.config'. This value must be kept in
+ # sync with the packages.config file.
+ - name: DefaultGuardianVersion
+ value: 0.53.3
+ - name: GuardianVersion
+ value: ${{ coalesce(parameters.overrideGuardianVersion, '$(DefaultGuardianVersion)') }}
+ - name: GuardianPackagesConfigFile
+ value: $(Build.SourcesDirectory)\eng\common\sdl\packages.config
pool:
- name: Hosted VS2017
+ # To extract archives (.tar.gz, .zip), we need access to "tar", added in Windows 10/2019.
+ ${{ if eq(parameters.extractArchiveArtifacts, 'false') }}:
+ name: Hosted VS2017
+ ${{ if ne(parameters.extractArchiveArtifacts, 'false') }}:
+ vmImage: windows-2019
steps:
- checkout: self
clean: true
+
- ${{ if ne(parameters.downloadArtifacts, 'false')}}:
- ${{ if ne(parameters.artifactNames, '') }}:
- ${{ each artifactName in parameters.artifactNames }}:
@@ -59,16 +96,51 @@ jobs:
itemPattern: "**"
downloadPath: $(Build.ArtifactStagingDirectory)\artifacts
checkDownloadedFiles: true
+
+ - ${{ each artifactName in parameters.pipelineArtifactNames }}:
+ - task: DownloadPipelineArtifact@2
+ displayName: Download Pipeline Artifacts
+ inputs:
+ buildType: specific
+ buildVersionToDownload: specific
+ project: $(AzDOProjectName)
+ pipeline: $(AzDOPipelineId)
+ buildId: $(AzDOBuildId)
+ artifactName: ${{ artifactName }}
+ downloadPath: $(Build.ArtifactStagingDirectory)\artifacts
+ checkDownloadedFiles: true
+
- powershell: eng/common/sdl/extract-artifact-packages.ps1
-InputPath $(Build.ArtifactStagingDirectory)\artifacts\BlobArtifacts
-ExtractPath $(Build.ArtifactStagingDirectory)\artifacts\BlobArtifacts
displayName: Extract Blob Artifacts
continueOnError: ${{ parameters.sdlContinueOnError }}
+
- powershell: eng/common/sdl/extract-artifact-packages.ps1
-InputPath $(Build.ArtifactStagingDirectory)\artifacts\PackageArtifacts
-ExtractPath $(Build.ArtifactStagingDirectory)\artifacts\PackageArtifacts
displayName: Extract Package Artifacts
continueOnError: ${{ parameters.sdlContinueOnError }}
+
+ - ${{ if ne(parameters.extractArchiveArtifacts, 'false') }}:
+ - powershell: eng/common/sdl/extract-artifact-archives.ps1
+ -InputPath $(Build.ArtifactStagingDirectory)\artifacts
+ -ExtractPath $(Build.ArtifactStagingDirectory)\artifacts
+ displayName: Extract Archive Artifacts
+ continueOnError: ${{ parameters.sdlContinueOnError }}
+
+ - ${{ if ne(parameters.overrideGuardianVersion, '') }}:
+ - powershell: |
+ $content = Get-Content $(GuardianPackagesConfigFile)
+
+ Write-Host "packages.config content was:`n$content"
+
+ $content = $content.Replace('$(DefaultGuardianVersion)', '$(GuardianVersion)')
+ $content | Set-Content $(GuardianPackagesConfigFile)
+
+ Write-Host "packages.config content updated to:`n$content"
+ displayName: Use overridden Guardian version ${{ parameters.overrideGuardianVersion }}
+
- task: NuGetToolInstaller@1
displayName: 'Install NuGet.exe'
- task: NuGetCommand@2
@@ -79,15 +151,35 @@ jobs:
nugetConfigPath: $(Build.SourcesDirectory)\eng\common\sdl\NuGet.config
externalFeedCredentials: GuardianConnect
restoreDirectory: $(Build.SourcesDirectory)\.packages
+
- ${{ if ne(parameters.overrideParameters, '') }}:
- - powershell: eng/common/sdl/execute-all-sdl-tools.ps1 ${{ parameters.overrideParameters }}
+ - powershell: ${{ parameters.executeAllSdlToolsScript }} ${{ parameters.overrideParameters }}
displayName: Execute SDL
continueOnError: ${{ parameters.sdlContinueOnError }}
- ${{ if eq(parameters.overrideParameters, '') }}:
- - powershell: eng/common/sdl/execute-all-sdl-tools.ps1
- -GuardianPackageName Microsoft.Guardian.Cli.0.53.3
+ - powershell: ${{ parameters.executeAllSdlToolsScript }}
+ -GuardianPackageName Microsoft.Guardian.Cli.$(GuardianVersion)
-NugetPackageDirectory $(Build.SourcesDirectory)\.packages
-AzureDevOpsAccessToken $(dn-bot-dotnet-build-rw-code-rw)
${{ parameters.additionalParameters }}
displayName: Execute SDL
continueOnError: ${{ parameters.sdlContinueOnError }}
+
+ - ${{ if ne(parameters.publishGuardianDirectoryToPipeline, 'false') }}:
+ # We want to publish the Guardian results and configuration for easy diagnosis. However, the
+ # '.gdn' dir is a mix of configuration, results, extracted dependencies, and Guardian default
+ # tooling files. Some of these files are large and aren't useful during an investigation, so
+ # exclude them by simply deleting them before publishing. (As of writing, there is no documented
+ # way to selectively exclude a dir from the pipeline artifact publish task.)
+ - task: DeleteFiles@1
+ displayName: Delete Guardian dependencies to avoid uploading
+ inputs:
+ SourceFolder: $(Agent.BuildDirectory)/.gdn
+ Contents: |
+ c
+ i
+ condition: succeededOrFailed()
+ - publish: $(Agent.BuildDirectory)/.gdn
+ artifact: GuardianConfiguration
+ displayName: Publish GuardianConfiguration
+ condition: succeededOrFailed()
diff --git a/eng/common/templates/job/onelocbuild.yml b/eng/common/templates/job/onelocbuild.yml
index 6abc041cd1..e8bc77d2eb 100644
--- a/eng/common/templates/job/onelocbuild.yml
+++ b/eng/common/templates/job/onelocbuild.yml
@@ -11,11 +11,17 @@ parameters:
SourcesDirectory: $(Build.SourcesDirectory)
CreatePr: true
+ AutoCompletePr: false
+ UseLfLineEndings: true
UseCheckedInLocProjectJson: false
LanguageSet: VS_Main_Languages
LclSource: lclFilesInRepo
LclPackageId: ''
RepoType: gitHub
+ GitHubOrg: dotnet
+ MirrorRepo: ''
+ MirrorBranch: main
+ condition: ''
jobs:
- job: OneLocBuild
@@ -43,20 +49,32 @@ jobs:
filePath: $(Build.SourcesDirectory)/eng/common/generate-locproject.ps1
arguments: $(_GenerateLocProjectArguments)
displayName: Generate LocProject.json
+ condition: ${{ parameters.condition }}
- task: OneLocBuild@2
displayName: OneLocBuild
+ env:
+ SYSTEM_ACCESSTOKEN: $(System.AccessToken)
inputs:
- locProj: Localize/LocProject.json
+ locProj: eng/Localize/LocProject.json
outDir: $(Build.ArtifactStagingDirectory)
lclSource: ${{ parameters.LclSource }}
lclPackageId: ${{ parameters.LclPackageId }}
isCreatePrSelected: ${{ parameters.CreatePr }}
- repoType: ${{ parameters.RepoType }}
- gitHubPatVariable: "${{ parameters.GithubPat }}"
+ ${{ if eq(parameters.CreatePr, true) }}:
+ isAutoCompletePrSelected: ${{ parameters.AutoCompletePr }}
+ isUseLfLineEndingsSelected: ${{ parameters.UseLfLineEndings }}
packageSourceAuth: patAuth
patVariable: ${{ parameters.CeapexPat }}
- condition: always()
+ ${{ if eq(parameters.RepoType, 'gitHub') }}:
+ repoType: ${{ parameters.RepoType }}
+ gitHubPatVariable: "${{ parameters.GithubPat }}"
+ ${{ if ne(parameters.MirrorRepo, '') }}:
+ isMirrorRepoSelected: true
+ gitHubOrganization: ${{ parameters.GitHubOrg }}
+ mirrorRepo: ${{ parameters.MirrorRepo }}
+ mirrorBranch: ${{ parameters.MirrorBranch }}
+ condition: ${{ parameters.condition }}
- task: PublishBuildArtifacts@1
displayName: Publish Localization Files
@@ -64,12 +82,12 @@ jobs:
PathtoPublish: '$(Build.ArtifactStagingDirectory)/loc'
PublishLocation: Container
ArtifactName: Loc
- condition: always()
+ condition: ${{ parameters.condition }}
- task: PublishBuildArtifacts@1
displayName: Publish LocProject.json
inputs:
- PathtoPublish: '$(Build.SourcesDirectory)/Localize/'
+ PathtoPublish: '$(Build.SourcesDirectory)/eng/Localize/'
PublishLocation: Container
ArtifactName: Loc
- condition: always()
\ No newline at end of file
+ condition: ${{ parameters.condition }}
\ No newline at end of file
diff --git a/eng/common/templates/job/performance.yml b/eng/common/templates/job/performance.yml
deleted file mode 100644
index f877fd7a89..0000000000
--- a/eng/common/templates/job/performance.yml
+++ /dev/null
@@ -1,95 +0,0 @@
-parameters:
- steps: [] # optional -- any additional steps that need to happen before pulling down the performance repo and sending the performance benchmarks to helix (ie building your repo)
- variables: [] # optional -- list of additional variables to send to the template
- jobName: '' # required -- job name
- displayName: '' # optional -- display name for the job. Will use jobName if not passed
- pool: '' # required -- name of the Build pool
- container: '' # required -- name of the container
- osGroup: '' # required -- operating system for the job
- extraSetupParameters: '' # optional -- extra arguments to pass to the setup script
- frameworks: ['netcoreapp3.0'] # optional -- list of frameworks to run against
- continueOnError: 'false' # optional -- determines whether to continue the build if the step errors
- dependsOn: '' # optional -- dependencies of the job
- timeoutInMinutes: 320 # optional -- timeout for the job
- enableTelemetry: false # optional -- enable for telemetry
-
-jobs:
-- template: ../jobs/jobs.yml
- parameters:
- dependsOn: ${{ parameters.dependsOn }}
- enableTelemetry: ${{ parameters.enableTelemetry }}
- enablePublishBuildArtifacts: true
- continueOnError: ${{ parameters.continueOnError }}
-
- jobs:
- - job: '${{ parameters.jobName }}'
-
- ${{ if ne(parameters.displayName, '') }}:
- displayName: '${{ parameters.displayName }}'
- ${{ if eq(parameters.displayName, '') }}:
- displayName: '${{ parameters.jobName }}'
-
- timeoutInMinutes: ${{ parameters.timeoutInMinutes }}
-
- variables:
-
- - ${{ each variable in parameters.variables }}:
- - ${{ if ne(variable.name, '') }}:
- - name: ${{ variable.name }}
- value: ${{ variable.value }}
- - ${{ if ne(variable.group, '') }}:
- - group: ${{ variable.group }}
-
- - IsInternal: ''
- - HelixApiAccessToken: ''
- - HelixPreCommand: ''
-
- - ${{ if and(ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
- - ${{ if eq( parameters.osGroup, 'Windows_NT') }}:
- - HelixPreCommand: 'set "PERFLAB_UPLOAD_TOKEN=$(PerfCommandUploadToken)"'
- - IsInternal: -Internal
- - ${{ if ne(parameters.osGroup, 'Windows_NT') }}:
- - HelixPreCommand: 'export PERFLAB_UPLOAD_TOKEN="$(PerfCommandUploadTokenLinux)"'
- - IsInternal: --internal
-
- - group: DotNet-HelixApi-Access
- - group: dotnet-benchview
-
- workspace:
- clean: all
- pool:
- ${{ parameters.pool }}
- container: ${{ parameters.container }}
- strategy:
- matrix:
- ${{ each framework in parameters.frameworks }}:
- ${{ framework }}:
- _Framework: ${{ framework }}
- steps:
- - checkout: self
- clean: true
- # Run all of the steps to setup repo
- - ${{ each step in parameters.steps }}:
- - ${{ step }}
- - powershell: $(Build.SourcesDirectory)\eng\common\performance\performance-setup.ps1 $(IsInternal) -Framework $(_Framework) ${{ parameters.extraSetupParameters }}
- displayName: Performance Setup (Windows)
- condition: and(succeeded(), eq(variables['Agent.Os'], 'Windows_NT'))
- continueOnError: ${{ parameters.continueOnError }}
- - script: $(Build.SourcesDirectory)/eng/common/performance/performance-setup.sh $(IsInternal) --framework $(_Framework) ${{ parameters.extraSetupParameters }}
- displayName: Performance Setup (Unix)
- condition: and(succeeded(), ne(variables['Agent.Os'], 'Windows_NT'))
- continueOnError: ${{ parameters.continueOnError }}
- - script: $(Python) $(PerformanceDirectory)/scripts/ci_setup.py $(SetupArguments)
- displayName: Run ci setup script
- # Run perf testing in helix
- - template: /eng/common/templates/steps/perf-send-to-helix.yml
- parameters:
- HelixSource: '$(HelixSourcePrefix)/$(Build.Repository.Name)/$(Build.SourceBranch)' # sources must start with pr/, official/, prodcon/, or agent/
- HelixType: 'test/performance/$(Kind)/$(_Framework)/$(Architecture)'
- HelixAccessToken: $(HelixApiAccessToken)
- HelixTargetQueues: $(Queue)
- HelixPreCommands: $(HelixPreCommand)
- Creator: $(Creator)
- WorkItemTimeout: 4:00 # 4 hours
- WorkItemDirectory: '$(WorkItemDirectory)' # WorkItemDirectory can not be empty, so we send it some docs to keep it happy
- CorrelationPayloadDirectory: '$(PayloadDirectory)' # it gets checked out to a folder with shorter path than WorkItemDirectory so we can avoid file name too long exceptions
\ No newline at end of file
diff --git a/eng/common/templates/job/publish-build-assets.yml b/eng/common/templates/job/publish-build-assets.yml
index 3b9e2524ff..fe9dfdf720 100644
--- a/eng/common/templates/job/publish-build-assets.yml
+++ b/eng/common/templates/job/publish-build-assets.yml
@@ -94,7 +94,31 @@ jobs:
PathtoPublish: '$(Build.StagingDirectory)/ReleaseConfigs.txt'
PublishLocation: Container
ArtifactName: ReleaseConfigs
-
+
+ - task: powershell@2
+ displayName: Check if SymbolPublishingExclusionsFile.txt exists
+ inputs:
+ targetType: inline
+ script: |
+ $symbolExclusionfile = "$(Build.SourcesDirectory)/eng/SymbolPublishingExclusionsFile.txt"
+ if(Test-Path -Path $symbolExclusionfile)
+ {
+ Write-Host "SymbolExclusionFile exists"
+ Write-Host "##vso[task.setvariable variable=SymbolExclusionFile]true"
+ }
+ else{
+ Write-Host "Symbols Exclusion file does not exists"
+ Write-Host "##vso[task.setvariable variable=SymbolExclusionFile]false"
+ }
+
+ - task: PublishBuildArtifacts@1
+ displayName: Publish SymbolPublishingExclusionsFile Artifact
+ condition: eq(variables['SymbolExclusionFile'], 'true')
+ inputs:
+ PathtoPublish: '$(Build.SourcesDirectory)/eng/SymbolPublishingExclusionsFile.txt'
+ PublishLocation: Container
+ ArtifactName: ReleaseConfigs
+
- ${{ if eq(parameters.enablePublishBuildArtifacts, 'true') }}:
- template: /eng/common/templates/steps/publish-logs.yml
parameters:
diff --git a/eng/common/templates/job/source-build.yml b/eng/common/templates/job/source-build.yml
index aad4146492..5023d36dcb 100644
--- a/eng/common/templates/job/source-build.yml
+++ b/eng/common/templates/job/source-build.yml
@@ -15,6 +15,9 @@ parameters:
# nonPortable: false
# Enables non-portable mode. This means a more specific RID (e.g. fedora.32-x64 rather than
# linux-x64), and compiling against distro-provided packages rather than portable ones.
+ # skipPublishValidation: false
+ # Disables publishing validation. By default, a check is performed to ensure no packages are
+ # published by source-build.
# container: ''
# A container to use. Runs in docker.
# pool: {}
diff --git a/eng/common/templates/job/source-index-stage1.yml b/eng/common/templates/job/source-index-stage1.yml
index c002a2b1b0..b58d42364b 100644
--- a/eng/common/templates/job/source-index-stage1.yml
+++ b/eng/common/templates/job/source-index-stage1.yml
@@ -1,15 +1,19 @@
parameters:
runAsPublic: false
- sourceIndexPackageVersion: 1.0.1-20210225.1
+ sourceIndexPackageVersion: 1.0.1-20210614.1
sourceIndexPackageSource: https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-tools/nuget/v3/index.json
sourceIndexBuildCommand: powershell -NoLogo -NoProfile -ExecutionPolicy Bypass -Command "eng/common/build.ps1 -restore -build -binarylog -ci"
preSteps: []
binlogPath: artifacts/log/Debug/Build.binlog
pool:
vmImage: vs2017-win2016
+ condition: ''
+ dependsOn: ''
jobs:
- job: SourceIndexStage1
+ dependsOn: ${{ parameters.dependsOn }}
+ condition: ${{ parameters.condition }}
variables:
- name: SourceIndexPackageVersion
value: ${{ parameters.sourceIndexPackageVersion }}
diff --git a/eng/common/templates/phases/base.yml b/eng/common/templates/phases/base.yml
deleted file mode 100644
index 0123cf43b1..0000000000
--- a/eng/common/templates/phases/base.yml
+++ /dev/null
@@ -1,130 +0,0 @@
-parameters:
- # Optional: Clean sources before building
- clean: true
-
- # Optional: Git fetch depth
- fetchDepth: ''
-
- # Optional: name of the phase (not specifying phase name may cause name collisions)
- name: ''
- # Optional: display name of the phase
- displayName: ''
-
- # Optional: condition for the job to run
- condition: ''
-
- # Optional: dependencies of the phase
- dependsOn: ''
-
- # Required: A defined YAML queue
- queue: {}
-
- # Required: build steps
- steps: []
-
- # Optional: variables
- variables: {}
-
- # Optional: should run as a public build even in the internal project
- # if 'true', the build won't run any of the internal only steps, even if it is running in non-public projects.
- runAsPublic: false
-
- ## Telemetry variables
-
- # Optional: enable sending telemetry
- # if 'true', these "variables" must be specified in the variables object or as part of the queue matrix
- # _HelixBuildConfig - differentiate between Debug, Release, other
- # _HelixSource - Example: build/product
- # _HelixType - Example: official/dotnet/arcade/$(Build.SourceBranch)
- enableTelemetry: false
-
- # Optional: Enable installing Microbuild plugin
- # if 'true', these "variables" must be specified in the variables object or as part of the queue matrix
- # _TeamName - the name of your team
- # _SignType - 'test' or 'real'
- enableMicrobuild: false
-
-# Internal resources (telemetry, microbuild) can only be accessed from non-public projects,
-# and some (Microbuild) should only be applied to non-PR cases for internal builds.
-
-phases:
-- phase: ${{ parameters.name }}
-
- ${{ if ne(parameters.displayName, '') }}:
- displayName: ${{ parameters.displayName }}
-
- ${{ if ne(parameters.condition, '') }}:
- condition: ${{ parameters.condition }}
-
- ${{ if ne(parameters.dependsOn, '') }}:
- dependsOn: ${{ parameters.dependsOn }}
-
- queue: ${{ parameters.queue }}
-
- ${{ if ne(parameters.variables, '') }}:
- variables:
- ${{ insert }}: ${{ parameters.variables }}
-
- steps:
- - checkout: self
- clean: ${{ parameters.clean }}
- ${{ if ne(parameters.fetchDepth, '') }}:
- fetchDepth: ${{ parameters.fetchDepth }}
-
- - ${{ if eq(parameters.enableTelemetry, 'true') }}:
- - template: /eng/common/templates/steps/telemetry-start.yml
- parameters:
- buildConfig: $(_HelixBuildConfig)
- helixSource: $(_HelixSource)
- helixType: $(_HelixType)
- runAsPublic: ${{ parameters.runAsPublic }}
-
- - ${{ if eq(parameters.enableMicrobuild, 'true') }}:
- # Internal only resource, and Microbuild signing shouldn't be applied to PRs.
- - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
- - task: MicroBuildSigningPlugin@2
- displayName: Install MicroBuild plugin
- inputs:
- signType: $(_SignType)
- zipSources: false
- feedSource: https://dnceng.pkgs.visualstudio.com/_packaging/MicroBuildToolset/nuget/v3/index.json
-
- env:
- TeamName: $(_TeamName)
- continueOnError: false
- condition: and(succeeded(), in(variables['_SignType'], 'real', 'test'), eq(variables['Agent.Os'], 'Windows_NT'))
-
- # Run provided build steps
- - ${{ parameters.steps }}
-
- - ${{ if eq(parameters.enableMicrobuild, 'true') }}:
- # Internal only resources
- - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
- - task: MicroBuildCleanup@1
- displayName: Execute Microbuild cleanup tasks
- condition: and(always(), in(variables['_SignType'], 'real', 'test'), eq(variables['Agent.Os'], 'Windows_NT'))
- env:
- TeamName: $(_TeamName)
-
- - ${{ if eq(parameters.enableTelemetry, 'true') }}:
- - template: /eng/common/templates/steps/telemetry-end.yml
- parameters:
- helixSource: $(_HelixSource)
- helixType: $(_HelixType)
-
- - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
- - task: CopyFiles@2
- displayName: Gather Asset Manifests
- inputs:
- SourceFolder: '$(Build.SourcesDirectory)/artifacts/log/$(_BuildConfig)/AssetManifest'
- TargetFolder: '$(Build.StagingDirectory)/AssetManifests'
- continueOnError: false
- condition: and(succeeded(), eq(variables['_DotNetPublishToBlobFeed'], 'true'))
- - task: PublishBuildArtifacts@1
- displayName: Push Asset Manifests
- inputs:
- PathtoPublish: '$(Build.StagingDirectory)/AssetManifests'
- PublishLocation: Container
- ArtifactName: AssetManifests
- continueOnError: false
- condition: and(succeeded(), eq(variables['_DotNetPublishToBlobFeed'], 'true'))
diff --git a/eng/common/templates/phases/publish-build-assets.yml b/eng/common/templates/phases/publish-build-assets.yml
deleted file mode 100644
index 4e51e472e2..0000000000
--- a/eng/common/templates/phases/publish-build-assets.yml
+++ /dev/null
@@ -1,52 +0,0 @@
-parameters:
- dependsOn: ''
- queue: {}
- configuration: 'Debug'
- condition: succeeded()
- continueOnError: false
- runAsPublic: false
- publishUsingPipelines: false
-phases:
- - phase: Asset_Registry_Publish
- displayName: Publish to Build Asset Registry
- dependsOn: ${{ parameters.dependsOn }}
- queue: ${{ parameters.queue }}
- variables:
- _BuildConfig: ${{ parameters.configuration }}
- steps:
- - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
- - task: DownloadBuildArtifacts@0
- displayName: Download artifact
- inputs:
- artifactName: AssetManifests
- downloadPath: '$(Build.StagingDirectory)/Download'
- checkDownloadedFiles: true
- condition: ${{ parameters.condition }}
- continueOnError: ${{ parameters.continueOnError }}
- - task: AzureKeyVault@1
- inputs:
- azureSubscription: 'DotNet-Engineering-Services_KeyVault'
- KeyVaultName: EngKeyVault
- SecretsFilter: 'MaestroAccessToken'
- condition: ${{ parameters.condition }}
- continueOnError: ${{ parameters.continueOnError }}
- - task: PowerShell@2
- displayName: Publish Build Assets
- inputs:
- filePath: eng\common\sdk-task.ps1
- arguments: -task PublishBuildAssets -restore -msbuildEngine dotnet
- /p:ManifestsPath='$(Build.StagingDirectory)/Download/AssetManifests'
- /p:BuildAssetRegistryToken=$(MaestroAccessToken)
- /p:MaestroApiEndpoint=https://maestro-prod.westus2.cloudapp.azure.com
- /p:PublishUsingPipelines=${{ parameters.publishUsingPipelines }}
- /p:Configuration=$(_BuildConfig)
- condition: ${{ parameters.condition }}
- continueOnError: ${{ parameters.continueOnError }}
- - task: PublishBuildArtifacts@1
- displayName: Publish Logs to VSTS
- inputs:
- PathtoPublish: '$(Build.SourcesDirectory)/artifacts/log/$(_BuildConfig)'
- PublishLocation: Container
- ArtifactName: $(Agent.Os)_Asset_Registry_Publish
- continueOnError: true
- condition: always()
diff --git a/eng/common/templates/post-build/channels/generic-internal-channel.yml b/eng/common/templates/post-build/channels/generic-internal-channel.yml
index 58fa9a35b8..8990dfc8c8 100644
--- a/eng/common/templates/post-build/channels/generic-internal-channel.yml
+++ b/eng/common/templates/post-build/channels/generic-internal-channel.yml
@@ -40,6 +40,9 @@ stages:
pool:
vmImage: 'windows-2019'
steps:
+ - script: echo "##vso[task.logissue type=warning]Going forward, v2 Arcade publishing is no longer supported. Please read https://github.com/dotnet/arcade/blob/main/Documentation/CorePackages/Publishing.md for details, then contact dnceng if you have further questions."
+ displayName: Warn about v2 Arcade Publishing Usage
+
# This is necessary whenever we want to publish/restore to an AzDO private feed
- task: NuGetAuthenticate@0
displayName: 'Authenticate to AzDO Feeds'
@@ -110,6 +113,9 @@ stages:
pool:
vmImage: 'windows-2019'
steps:
+ - script: echo "##vso[task.logissue type=warning]Going forward, v2 Arcade publishing is no longer supported. Please read https://github.com/dotnet/arcade/blob/main/Documentation/CorePackages/Publishing.md for details, then contact dnceng if you have further questions."
+ displayName: Warn about v2 Arcade Publishing Usage
+
- task: DownloadBuildArtifacts@0
displayName: Download Build Assets
continueOnError: true
diff --git a/eng/common/templates/post-build/channels/generic-public-channel.yml b/eng/common/templates/post-build/channels/generic-public-channel.yml
index b50c0b3bdb..3220c6a4f9 100644
--- a/eng/common/templates/post-build/channels/generic-public-channel.yml
+++ b/eng/common/templates/post-build/channels/generic-public-channel.yml
@@ -42,6 +42,9 @@ stages:
pool:
vmImage: 'windows-2019'
steps:
+ - script: echo "##vso[task.logissue type=warning]Going forward, v2 Arcade publishing is no longer supported. Please read https://github.com/dotnet/arcade/blob/main/Documentation/CorePackages/Publishing.md for details, then contact dnceng if you have further questions."
+ displayName: Warn about v2 Arcade Publishing Usage
+
- task: DownloadBuildArtifacts@0
displayName: Download Build Assets
continueOnError: true
@@ -109,6 +112,9 @@ stages:
pool:
vmImage: 'windows-2019'
steps:
+ - script: echo "##vso[task.logissue type=warning]Going forward, v2 Arcade publishing is no longer supported. Please read https://github.com/dotnet/arcade/blob/main/Documentation/CorePackages/Publishing.md for details, then contact dnceng if you have further questions."
+ displayName: Warn about v2 Arcade Publishing Usage
+
- task: DownloadBuildArtifacts@0
displayName: Download Build Assets
continueOnError: true
diff --git a/eng/common/templates/steps/perf-send-to-helix.yml b/eng/common/templates/steps/perf-send-to-helix.yml
deleted file mode 100644
index 0fb786fabf..0000000000
--- a/eng/common/templates/steps/perf-send-to-helix.yml
+++ /dev/null
@@ -1,50 +0,0 @@
-# Please remember to update the documentation if you make changes to these parameters!
-parameters:
- ProjectFile: '' # required -- project file that specifies the helix workitems
- HelixSource: 'pr/default' # required -- sources must start with pr/, official/, prodcon/, or agent/
- HelixType: 'tests/default/' # required -- Helix telemetry which identifies what type of data this is; should include "test" for clarity and must end in '/'
- HelixBuild: $(Build.BuildNumber) # required -- the build number Helix will use to identify this -- automatically set to the AzDO build number
- HelixTargetQueues: '' # required -- semicolon delimited list of Helix queues to test on; see https://helix.dot.net/ for a list of queues
- HelixAccessToken: '' # required -- access token to make Helix API requests; should be provided by the appropriate variable group
- HelixPreCommands: '' # optional -- commands to run before Helix work item execution
- HelixPostCommands: '' # optional -- commands to run after Helix work item execution
- WorkItemDirectory: '' # optional -- a payload directory to zip up and send to Helix; requires WorkItemCommand; incompatible with XUnitProjects
- CorrelationPayloadDirectory: '' # optional -- a directory to zip up and send to Helix as a correlation payload
- IncludeDotNetCli: false # optional -- true will download a version of the .NET CLI onto the Helix machine as a correlation payload; requires DotNetCliPackageType and DotNetCliVersion
- DotNetCliPackageType: '' # optional -- either 'sdk', 'runtime' or 'aspnetcore-runtime'; determines whether the sdk or runtime will be sent to Helix; see https://raw.githubusercontent.com/dotnet/core/main/release-notes/releases.json
- DotNetCliVersion: '' # optional -- version of the CLI to send to Helix; based on this: https://raw.githubusercontent.com/dotnet/core/main/release-notes/releases.json
- EnableXUnitReporter: false # optional -- true enables XUnit result reporting to Mission Control
- WaitForWorkItemCompletion: true # optional -- true will make the task wait until work items have been completed and fail the build if work items fail. False is "fire and forget."
- Creator: '' # optional -- if the build is external, use this to specify who is sending the job
- DisplayNamePrefix: 'Send job to Helix' # optional -- rename the beginning of the displayName of the steps in AzDO
- condition: succeeded() # optional -- condition for step to execute; defaults to succeeded()
- continueOnError: false # optional -- determines whether to continue the build if the step errors; defaults to false
- osGroup: '' # required -- operating system for the job
-
-
-steps:
-- template: /eng/pipelines/common/templates/runtimes/send-to-helix-inner-step.yml
- parameters:
- osGroup: ${{ parameters.osGroup }}
- sendParams: $(Build.SourcesDirectory)/eng/common/performance/${{ parameters.ProjectFile }} /restore /t:Test /bl:$(Build.SourcesDirectory)/artifacts/log/$(_BuildConfig)/SendToHelix.binlog
- displayName: ${{ parameters.DisplayNamePrefix }}
- condition: ${{ parameters.condition }}
- shouldContinueOnError: ${{ parameters.continueOnError }}
- environment:
- BuildConfig: $(_BuildConfig)
- HelixSource: ${{ parameters.HelixSource }}
- HelixType: ${{ parameters.HelixType }}
- HelixBuild: ${{ parameters.HelixBuild }}
- HelixTargetQueues: ${{ parameters.HelixTargetQueues }}
- HelixAccessToken: ${{ parameters.HelixAccessToken }}
- HelixPreCommands: ${{ parameters.HelixPreCommands }}
- HelixPostCommands: ${{ parameters.HelixPostCommands }}
- WorkItemDirectory: ${{ parameters.WorkItemDirectory }}
- CorrelationPayloadDirectory: ${{ parameters.CorrelationPayloadDirectory }}
- IncludeDotNetCli: ${{ parameters.IncludeDotNetCli }}
- DotNetCliPackageType: ${{ parameters.DotNetCliPackageType }}
- DotNetCliVersion: ${{ parameters.DotNetCliVersion }}
- EnableXUnitReporter: ${{ parameters.EnableXUnitReporter }}
- WaitForWorkItemCompletion: ${{ parameters.WaitForWorkItemCompletion }}
- Creator: ${{ parameters.Creator }}
- SYSTEM_ACCESSTOKEN: $(System.AccessToken)
diff --git a/eng/common/templates/steps/source-build.yml b/eng/common/templates/steps/source-build.yml
index 8e336b7d16..ba40dc82f1 100644
--- a/eng/common/templates/steps/source-build.yml
+++ b/eng/common/templates/steps/source-build.yml
@@ -18,6 +18,35 @@ steps:
set -x
df -h
+ # If building on the internal project, the artifact feeds variable may be available (usually only if needed)
+ # In that case, call the feed setup script to add internal feeds corresponding to public ones.
+ # In addition, add an msbuild argument to copy the WIP from the repo to the target build location.
+ # This is because SetupNuGetSources.sh will alter the current NuGet.config file, and we need to preserve those
+ # changes.
+ $internalRestoreArgs=
+ if [ '$(dn-bot-dnceng-artifact-feeds-rw)' != '$''(dn-bot-dnceng-artifact-feeds-rw)' ]; then
+ # Temporarily work around https://github.com/dotnet/arcade/issues/7709
+ chmod +x $(Build.SourcesDirectory)/eng/common/SetupNugetSources.sh
+ $(Build.SourcesDirectory)/eng/common/SetupNugetSources.sh $(Build.SourcesDirectory)/NuGet.config $(dn-bot-dnceng-artifact-feeds-rw)
+ internalRestoreArgs='/p:CopyWipIntoInnerSourceBuildRepo=true'
+
+ # The 'Copy WIP' feature of source build uses git stash to apply changes from the original repo.
+ # This only works if there is a username/email configured, which won't be the case in most CI runs.
+ git config --get user.email
+ if [ $? -ne 0 ]; then
+ git config user.email dn-bot@microsoft.com
+ git config user.name dn-bot
+ fi
+ fi
+
+ # If building on the internal project, the internal storage variable may be available (usually only if needed)
+ # In that case, add variables to allow the download of internal runtimes if the specified versions are not found
+ # in the default public locations.
+ internalRuntimeDownloadArgs=
+ if [ '$(dotnetclimsrc-read-sas-token-base64)' != '$''(dotnetclimsrc-read-sas-token-base64)' ]; then
+ internalRuntimeDownloadArgs='/p:DotNetRuntimeSourceFeed=https://dotnetclimsrc.blob.core.windows.net/dotnet /p:DotNetRuntimeSourceFeedKey=$(dotnetclimsrc-read-sas-token-base64) --runtimesourcefeed https://dotnetclimsrc.blob.core.windows.net/dotnet --runtimesourcefeedkey $(dotnetclimsrc-read-sas-token-base64)'
+ fi
+
buildConfig=Release
# Check if AzDO substitutes in a build config from a variable, and use it if so.
if [ '$(_BuildConfig)' != '$''(_BuildConfig)' ]; then
@@ -34,10 +63,17 @@ steps:
targetRidArgs='/p:TargetRid=${{ parameters.platform.targetRID }}'
fi
+ publishArgs=
+ if [ '${{ parameters.platform.skipPublishValidation }}' != 'true' ]; then
+ publishArgs='--publish'
+ fi
+
${{ coalesce(parameters.platform.buildScript, './build.sh') }} --ci \
--configuration $buildConfig \
- --restore --build --pack --publish \
+ --restore --build --pack $publishArgs -bl \
$officialBuildArgs \
+ $internalRuntimeDownloadArgs \
+ $internalRestoreArgs \
$targetRidArgs \
/p:SourceBuildNonPortable=${{ parameters.platform.nonPortable }} \
/p:ArcadeBuildFromSource=true
diff --git a/eng/common/tools.ps1 b/eng/common/tools.ps1
index d52467eea1..5d526c74d5 100644
--- a/eng/common/tools.ps1
+++ b/eng/common/tools.ps1
@@ -42,12 +42,15 @@
[bool]$useInstalledDotNetCli = if (Test-Path variable:useInstalledDotNetCli) { $useInstalledDotNetCli } else { $true }
# Enable repos to use a particular version of the on-line dotnet-install scripts.
-# default URL: https://dot.net/v1/dotnet-install.ps1
+# default URL: https://dotnet.microsoft.com/download/dotnet/scripts/v1/dotnet-install.ps1
[string]$dotnetInstallScriptVersion = if (Test-Path variable:dotnetInstallScriptVersion) { $dotnetInstallScriptVersion } else { 'v1' }
# True to use global NuGet cache instead of restoring packages to repository-local directory.
[bool]$useGlobalNuGetCache = if (Test-Path variable:useGlobalNuGetCache) { $useGlobalNuGetCache } else { !$ci }
+# True to exclude prerelease versions Visual Studio during build
+[bool]$excludePrereleaseVS = if (Test-Path variable:excludePrereleaseVS) { $excludePrereleaseVS } else { $false }
+
# An array of names of processes to stop on script exit if prepareMachine is true.
$processesToStopOnExit = if (Test-Path variable:processesToStopOnExit) { $processesToStopOnExit } else { @('msbuild', 'dotnet', 'vbcscompiler') }
@@ -103,6 +106,46 @@ function Exec-Process([string]$command, [string]$commandArgs) {
}
}
+# Take the given block, print it, print what the block probably references from the current set of
+# variables using low-effort string matching, then run the block.
+#
+# This is intended to replace the pattern of manually copy-pasting a command, wrapping it in quotes,
+# and printing it using "Write-Host". The copy-paste method is more readable in build logs, but less
+# maintainable and less reliable. It is easy to make a mistake and modify the command without
+# properly updating the "Write-Host" line, resulting in misleading build logs. The probability of
+# this mistake makes the pattern hard to trust when it shows up in build logs. Finding the bug in
+# existing source code can also be difficult, because the strings are not aligned to each other and
+# the line may be 300+ columns long.
+#
+# By removing the need to maintain two copies of the command, Exec-BlockVerbosely avoids the issues.
+#
+# In Bash (or any posix-like shell), "set -x" prints usable verbose output automatically.
+# "Set-PSDebug" appears to be similar at first glance, but unfortunately, it isn't very useful: it
+# doesn't print any info about the variables being used by the command, which is normally the
+# interesting part to diagnose.
+function Exec-BlockVerbosely([scriptblock] $block) {
+ Write-Host "--- Running script block:"
+ $blockString = $block.ToString().Trim()
+ Write-Host $blockString
+
+ Write-Host "--- List of variables that might be used:"
+ # For each variable x in the environment, check the block for a reference to x via simple "$x" or
+ # "@x" syntax. This doesn't detect other ways to reference variables ("${x}" nor "$variable:x",
+ # among others). It only catches what this function was originally written for: simple
+ # command-line commands.
+ $variableTable = Get-Variable |
+ Where-Object {
+ $blockString.Contains("`$$($_.Name)") -or $blockString.Contains("@$($_.Name)")
+ } |
+ Format-Table -AutoSize -HideTableHeaders -Wrap |
+ Out-String
+ Write-Host $variableTable.Trim()
+
+ Write-Host "--- Executing:"
+ & $block
+ Write-Host "--- Done running script block!"
+}
+
# createSdkLocationFile parameter enables a file being generated under the toolset directory
# which writes the sdk's location into. This is only necessary for cmd --> powershell invocations
# as dot sourcing isn't possible.
@@ -190,38 +233,42 @@ function InitializeDotNetCli([bool]$install, [bool]$createSdkLocationFile) {
return $global:_DotNetInstallDir = $dotnetRoot
}
+function Retry($downloadBlock, $maxRetries = 5) {
+ $retries = 1
+
+ while($true) {
+ try {
+ & $downloadBlock
+ break
+ }
+ catch {
+ Write-PipelineTelemetryError -Category 'InitializeToolset' -Message $_
+ }
+
+ if (++$retries -le $maxRetries) {
+ $delayInSeconds = [math]::Pow(2, $retries) - 1 # Exponential backoff
+ Write-Host "Retrying. Waiting for $delayInSeconds seconds before next attempt ($retries of $maxRetries)."
+ Start-Sleep -Seconds $delayInSeconds
+ }
+ else {
+ Write-PipelineTelemetryError -Category 'InitializeToolset' -Message "Unable to download file in $maxRetries attempts."
+ break
+ }
+
+ }
+}
+
function GetDotNetInstallScript([string] $dotnetRoot) {
$installScript = Join-Path $dotnetRoot 'dotnet-install.ps1'
if (!(Test-Path $installScript)) {
Create-Directory $dotnetRoot
$ProgressPreference = 'SilentlyContinue' # Don't display the console progress UI - it's a huge perf hit
+ $uri = "https://dotnet.microsoft.com/download/dotnet/scripts/$dotnetInstallScriptVersion/dotnet-install.ps1"
- $maxRetries = 5
- $retries = 1
-
- $uri = "https://dot.net/$dotnetInstallScriptVersion/dotnet-install.ps1"
-
- while($true) {
- try {
- Write-Host "GET $uri"
- Invoke-WebRequest $uri -OutFile $installScript
- break
- }
- catch {
- Write-Host "Failed to download '$uri'"
- Write-Error $_.Exception.Message -ErrorAction Continue
- }
-
- if (++$retries -le $maxRetries) {
- $delayInSeconds = [math]::Pow(2, $retries) - 1 # Exponential backoff
- Write-Host "Retrying. Waiting for $delayInSeconds seconds before next attempt ($retries of $maxRetries)."
- Start-Sleep -Seconds $delayInSeconds
- }
- else {
- throw "Unable to download file in $maxRetries attempts."
- }
-
- }
+ Retry({
+ Write-Host "GET $uri"
+ Invoke-WebRequest $uri -OutFile $installScript
+ })
}
return $installScript
@@ -305,8 +352,8 @@ function InitializeVisualStudioMSBuild([bool]$install, [object]$vsRequirements =
# If the version of msbuild is going to be xcopied,
# use this version. Version matches a package here:
- # https://dev.azure.com/dnceng/public/_packaging?_a=package&feed=dotnet-eng&package=RoslynTools.MSBuild&protocolType=NuGet&version=16.8.0-preview3&view=overview
- $defaultXCopyMSBuildVersion = '16.8.0-preview3'
+ # https://dev.azure.com/dnceng/public/_packaging?_a=package&feed=dotnet-eng&package=RoslynTools.MSBuild&protocolType=NuGet&version=16.10.0-preview2&view=overview
+ $defaultXCopyMSBuildVersion = '16.10.0-preview2'
if (!$vsRequirements) { $vsRequirements = $GlobalJson.tools.vs }
$vsMinVersionStr = if ($vsRequirements.version) { $vsRequirements.version } else { $vsMinVersionReqdStr }
@@ -371,7 +418,16 @@ function InitializeVisualStudioMSBuild([bool]$install, [object]$vsRequirements =
}
$msbuildVersionDir = if ([int]$vsMajorVersion -lt 16) { "$vsMajorVersion.0" } else { "Current" }
- return $global:_MSBuildExe = Join-Path $vsInstallDir "MSBuild\$msbuildVersionDir\Bin\msbuild.exe"
+
+ $local:BinFolder = Join-Path $vsInstallDir "MSBuild\$msbuildVersionDir\Bin"
+ $local:Prefer64bit = if (Get-Member -InputObject $vsRequirements -Name 'Prefer64bit') { $vsRequirements.Prefer64bit } else { $false }
+ if ($local:Prefer64bit -and (Test-Path(Join-Path $local:BinFolder "amd64"))) {
+ $global:_MSBuildExe = Join-Path $local:BinFolder "amd64\msbuild.exe"
+ } else {
+ $global:_MSBuildExe = Join-Path $local:BinFolder "msbuild.exe"
+ }
+
+ return $global:_MSBuildExe
}
function InitializeVisualStudioEnvironmentVariables([string] $vsInstallDir, [string] $vsMajorVersion) {
@@ -400,9 +456,13 @@ function InitializeXCopyMSBuild([string]$packageVersion, [bool]$install) {
}
Create-Directory $packageDir
+
Write-Host "Downloading $packageName $packageVersion"
$ProgressPreference = 'SilentlyContinue' # Don't display the console progress UI - it's a huge perf hit
- Invoke-WebRequest "https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-eng/nuget/v3/flat2/$packageName/$packageVersion/$packageName.$packageVersion.nupkg" -OutFile $packagePath
+ Retry({
+ Invoke-WebRequest "https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-eng/nuget/v3/flat2/$packageName/$packageVersion/$packageName.$packageVersion.nupkg" -OutFile $packagePath
+ })
+
Unzip $packagePath $packageDir
}
@@ -439,31 +499,17 @@ function LocateVisualStudio([object]$vsRequirements = $null){
if (!(Test-Path $vsWhereExe)) {
Create-Directory $vsWhereDir
Write-Host 'Downloading vswhere'
- $maxRetries = 5
- $retries = 1
-
- while($true) {
- try {
- Invoke-WebRequest "https://netcorenativeassets.blob.core.windows.net/resource-packages/external/windows/vswhere/$vswhereVersion/vswhere.exe" -OutFile $vswhereExe
- break
- }
- catch{
- Write-PipelineTelemetryError -Category 'InitializeToolset' -Message $_
- }
-
- if (++$retries -le $maxRetries) {
- $delayInSeconds = [math]::Pow(2, $retries) - 1 # Exponential backoff
- Write-Host "Retrying. Waiting for $delayInSeconds seconds before next attempt ($retries of $maxRetries)."
- Start-Sleep -Seconds $delayInSeconds
- }
- else {
- Write-PipelineTelemetryError -Category 'InitializeToolset' -Message "Unable to download file in $maxRetries attempts."
- }
- }
+ Retry({
+ Invoke-WebRequest "https://netcorenativeassets.blob.core.windows.net/resource-packages/external/windows/vswhere/$vswhereVersion/vswhere.exe" -OutFile $vswhereExe
+ })
}
if (!$vsRequirements) { $vsRequirements = $GlobalJson.tools.vs }
- $args = @('-latest', '-prerelease', '-format', 'json', '-requires', 'Microsoft.Component.MSBuild', '-products', '*')
+ $args = @('-latest', '-format', 'json', '-requires', 'Microsoft.Component.MSBuild', '-products', '*')
+
+ if (!$excludePrereleaseVS) {
+ $args += '-prerelease'
+ }
if (Get-Member -InputObject $vsRequirements -Name 'version') {
$args += '-version'
@@ -489,7 +535,13 @@ function LocateVisualStudio([object]$vsRequirements = $null){
function InitializeBuildTool() {
if (Test-Path variable:global:_BuildTool) {
- return $global:_BuildTool
+ # If the requested msbuild parameters do not match, clear the cached variables.
+ if($global:_BuildTool.Contains('ExcludePrereleaseVS') -and $global:_BuildTool.ExcludePrereleaseVS -ne $excludePrereleaseVS) {
+ Remove-Item variable:global:_BuildTool
+ Remove-Item variable:global:_MSBuildExe
+ } else {
+ return $global:_BuildTool
+ }
}
if (-not $msbuildEngine) {
@@ -517,7 +569,7 @@ function InitializeBuildTool() {
ExitWithExitCode 1
}
- $buildTool = @{ Path = $msbuildPath; Command = ""; Tool = "vs"; Framework = "net472" }
+ $buildTool = @{ Path = $msbuildPath; Command = ""; Tool = "vs"; Framework = "net472"; ExcludePrereleaseVS = $excludePrereleaseVS }
} else {
Write-PipelineTelemetryError -Category 'InitializeToolset' -Message "Unexpected value of -msbuildEngine: '$msbuildEngine'."
ExitWithExitCode 1
@@ -542,7 +594,7 @@ function GetDefaultMSBuildEngine() {
function GetNuGetPackageCachePath() {
if ($env:NUGET_PACKAGES -eq $null) {
- # Use local cache on CI to ensure deterministic build.
+ # Use local cache on CI to ensure deterministic build.
# Avoid using the http cache as workaround for https://github.com/NuGet/Home/issues/3116
# use global cache in dev builds to avoid cost of downloading packages.
# For directory normalization, see also: https://github.com/NuGet/Home/issues/7968
@@ -620,6 +672,17 @@ function ExitWithExitCode([int] $exitCode) {
exit $exitCode
}
+# Check if $LASTEXITCODE is a nonzero exit code (NZEC). If so, print a Azure Pipeline error for
+# diagnostics, then exit the script with the $LASTEXITCODE.
+function Exit-IfNZEC([string] $category = "General") {
+ Write-Host "Exit code $LASTEXITCODE"
+ if ($LASTEXITCODE -ne 0) {
+ $message = "Last command failed with exit code $LASTEXITCODE."
+ Write-PipelineTelemetryError -Force -Category $category -Message $message
+ ExitWithExitCode $LASTEXITCODE
+ }
+}
+
function Stop-Processes() {
Write-Host 'Killing running build processes...'
foreach ($processName in $processesToStopOnExit) {
@@ -699,7 +762,10 @@ function MSBuild-Core() {
}
foreach ($arg in $args) {
- if ($arg -ne $null -and $arg.Trim() -ne "") {
+ if ($null -ne $arg -and $arg.Trim() -ne "") {
+ if ($arg.EndsWith('\')) {
+ $arg = $arg + "\"
+ }
$cmdArgs += " `"$arg`""
}
}
@@ -771,7 +837,7 @@ function Get-Darc($version) {
. $PSScriptRoot\pipeline-logging-functions.ps1
-$RepoRoot = Resolve-Path (Join-Path $PSScriptRoot '..\..')
+$RepoRoot = Resolve-Path (Join-Path $PSScriptRoot '..\..\')
$EngRoot = Resolve-Path (Join-Path $PSScriptRoot '..')
$ArtifactsDir = Join-Path $RepoRoot 'artifacts'
$ToolsetDir = Join-Path $ArtifactsDir 'toolset'
diff --git a/eng/common/tools.sh b/eng/common/tools.sh
index 5fad1846e5..828119be41 100644
--- a/eng/common/tools.sh
+++ b/eng/common/tools.sh
@@ -54,7 +54,7 @@ warn_as_error=${warn_as_error:-true}
use_installed_dotnet_cli=${use_installed_dotnet_cli:-true}
# Enable repos to use a particular version of the on-line dotnet-install scripts.
-# default URL: https://dot.net/v1/dotnet-install.sh
+# default URL: https://dotnet.microsoft.com/download/dotnet/scripts/v1/dotnet-install.sh
dotnetInstallScriptVersion=${dotnetInstallScriptVersion:-'v1'}
# True to use global NuGet cache instead of restoring packages to repository-local directory.
@@ -262,7 +262,7 @@ function with_retries {
function GetDotNetInstallScript {
local root=$1
local install_script="$root/dotnet-install.sh"
- local install_script_url="https://dot.net/$dotnetInstallScriptVersion/dotnet-install.sh"
+ local install_script_url="https://dotnet.microsoft.com/download/dotnet/scripts/$dotnetInstallScriptVersion/dotnet-install.sh"
if [[ ! -a "$install_script" ]]; then
mkdir -p "$root"
@@ -485,13 +485,14 @@ _script_dir=`dirname "$_ResolvePath"`
eng_root=`cd -P "$_script_dir/.." && pwd`
repo_root=`cd -P "$_script_dir/../.." && pwd`
-artifacts_dir="$repo_root/artifacts"
+repo_root="${repo_root}/"
+artifacts_dir="${repo_root}artifacts"
toolset_dir="$artifacts_dir/toolset"
-tools_dir="$repo_root/.tools"
+tools_dir="${repo_root}.tools"
log_dir="$artifacts_dir/log/$configuration"
temp_dir="$artifacts_dir/tmp/$configuration"
-global_json_file="$repo_root/global.json"
+global_json_file="${repo_root}global.json"
# determine if global.json contains a "runtimes" entry
global_json_has_runtimes=false
if command -v jq &> /dev/null; then
@@ -504,7 +505,7 @@ fi
# HOME may not be defined in some scenarios, but it is required by NuGet
if [[ -z $HOME ]]; then
- export HOME="$repo_root/artifacts/.home/"
+ export HOME="${repo_root}artifacts/.home/"
mkdir -p "$HOME"
fi
diff --git a/eng/source-build-patches/0002-Use-package-version-properties-for-reference-version.patch b/eng/source-build-patches/0002-Use-package-version-properties-for-reference-version.patch
index de512e27d7..b72717b0a4 100644
--- a/eng/source-build-patches/0002-Use-package-version-properties-for-reference-version.patch
+++ b/eng/source-build-patches/0002-Use-package-version-properties-for-reference-version.patch
@@ -33,14 +33,14 @@ index 1ae8dd3..aaa2c4a 100644
+++ b/src/System.CommandLine/System.CommandLine.csproj
@@ -19,8 +19,8 @@
-
+
-
-
+
+
-
+
--
2.18.0
diff --git a/eng/source-build-patches/0003-Update-to-netcoreapp3.1-to-avoid-prebuilts.patch b/eng/source-build-patches/0003-Update-to-netcoreapp3.1-to-avoid-prebuilts.patch
index ae461533d8..ea2c44a94e 100644
--- a/eng/source-build-patches/0003-Update-to-netcoreapp3.1-to-avoid-prebuilts.patch
+++ b/eng/source-build-patches/0003-Update-to-netcoreapp3.1-to-avoid-prebuilts.patch
@@ -1,9 +1,9 @@
From b9a21ec1c0a77ef4e1e44840a0526545fea456de Mon Sep 17 00:00:00 2001
From: Davis Goodin
Date: Thu, 29 Oct 2020 01:48:54 -0500
-Subject: [PATCH] Update to net5.0 to avoid prebuilts
+Subject: [PATCH] Update to net6.0 to avoid prebuilts
-net5.0 avoids prebuilts (Microsoft.CSharp) but works with the
+net6.0 avoids prebuilts (Microsoft.CSharp) but works with the
rest of source-build: runtime and roslyn.
Includes a code fix for ref nullability with the new framework.
@@ -24,7 +24,7 @@ index b3a542fd..06167997 100644
true
- netstandard2.0
-+ net5.0
++ net6.0
This package includes the experimental DragonFruit app model for System.CommandLine, which allows you to create a command line application using only a Main method while getting support for complex type binding, error reporting, help, shell completions, and more.
@@ -37,7 +37,7 @@ index 1d00cff2..7342c1c5 100644
true
- netstandard2.0;netstandard2.1
-+ net5.0
++ net6.0
9
This package provides support for using System.CommandLine with Microsoft.Extensions.Hosting.
@@ -50,7 +50,7 @@ index d552286e..84026ebe 100644
true
- netstandard2.0
-+ net5.0
++ net6.0
9
This package provides support for structured command line output rendering. Write code once that renders correctly in multiple output modes, including System.Console, virtual terminal (using ANSI escape sequences), and plain text.
@@ -76,11 +76,11 @@ index aaa2c4a3..5e875a73 100644
true
System.CommandLine
- netstandard2.0
-+ net5.0
- 9
++ net6.0
+ 10.0
enable
This package includes a powerful command line parser and other tools for building command line applications, including:
-@@ -18,9 +18,4 @@
+@@ -25,9 +25,4 @@
@@ -90,6 +90,5 @@ index aaa2c4a3..5e875a73 100644
-
-
---
2.25.2
diff --git a/eng/source-build-patches/0004-Exclude-System.CommandLine.Hosting.patch b/eng/source-build-patches/0004-Exclude-System.CommandLine.Hosting.patch
index 3f86b7caf9..ea5910f9ec 100644
--- a/eng/source-build-patches/0004-Exclude-System.CommandLine.Hosting.patch
+++ b/eng/source-build-patches/0004-Exclude-System.CommandLine.Hosting.patch
@@ -13,7 +13,7 @@ index 7342c1c..e4a97a0 100644
--- a/src/System.CommandLine.Hosting/System.CommandLine.Hosting.csproj
+++ b/src/System.CommandLine.Hosting/System.CommandLine.Hosting.csproj
@@ -5,6 +5,7 @@
- net5.0
+ net6.0
9
This package provides support for using System.CommandLine with Microsoft.Extensions.Hosting.
+ true
@@ -30,6 +30,5 @@ index 7342c1c..e4a97a0 100644
---
2.25.2
diff --git a/eng/source-build-patches/0005-remove-duplicate-attributes.patch b/eng/source-build-patches/0005-remove-duplicate-attributes.patch
index 8113da026b..918b191c24 100644
--- a/eng/source-build-patches/0005-remove-duplicate-attributes.patch
+++ b/eng/source-build-patches/0005-remove-duplicate-attributes.patch
@@ -87,7 +87,7 @@ diff --git a/src/System.CommandLine/System.CommandLine.csproj b/src/System.Comma
index bc666e8..4e52e94 100644
--- a/src/System.CommandLine/System.CommandLine.csproj
+++ b/src/System.CommandLine/System.CommandLine.csproj
-@@ -20,7 +20,9 @@
+@@ -20,7 +22,9 @@
diff --git a/global.json b/global.json
index 8e9411ee92..7b65e1867a 100644
--- a/global.json
+++ b/global.json
@@ -1,12 +1,17 @@
{
+ "sdk": {
+ "version": "6.0.100-preview.6.21355.2",
+ "allowPrerelease": true,
+ "rollForward": "minor"
+ },
"tools": {
- "dotnet": "5.0.100",
+ "dotnet": "6.0.100-preview.6.21355.2",
"vs": {
- "version": "16.8"
+ "version": "16.10"
},
- "xcopy-msbuild": "16.8.0-preview2.1"
+ "xcopy-msbuild": "16.10.0-preview2"
},
"msbuild-sdks": {
- "Microsoft.DotNet.Arcade.Sdk": "6.0.0-beta.21167.3"
+ "Microsoft.DotNet.Arcade.Sdk": "6.0.0-beta.21413.4"
}
-}
+}
\ No newline at end of file
diff --git a/samples/DragonFruit/DragonFruit.csproj b/samples/DragonFruit/DragonFruit.csproj
index 2a9cb34d9c..40a81f2bdd 100644
--- a/samples/DragonFruit/DragonFruit.csproj
+++ b/samples/DragonFruit/DragonFruit.csproj
@@ -2,7 +2,7 @@
Exe
- net5.0
+ net6.0
true
diff --git a/samples/HostingPlayground/HostingPlayground.csproj b/samples/HostingPlayground/HostingPlayground.csproj
index bce0c33b0f..2514608ce3 100644
--- a/samples/HostingPlayground/HostingPlayground.csproj
+++ b/samples/HostingPlayground/HostingPlayground.csproj
@@ -2,7 +2,7 @@
Exe
- net5.0
+ net6.0
true
9
$(NoWarn);CS1591
diff --git a/samples/RenderingPlayground/RenderingPlayground.csproj b/samples/RenderingPlayground/RenderingPlayground.csproj
index 91eb00b8c5..aedc79300d 100644
--- a/samples/RenderingPlayground/RenderingPlayground.csproj
+++ b/samples/RenderingPlayground/RenderingPlayground.csproj
@@ -2,7 +2,7 @@
Exe
- net5.0
+ net6.0
true
9
diff --git a/src/System.CommandLine.Benchmarks/System.CommandLine.Benchmarks.csproj b/src/System.CommandLine.Benchmarks/System.CommandLine.Benchmarks.csproj
index 2975eac115..dba3a32450 100644
--- a/src/System.CommandLine.Benchmarks/System.CommandLine.Benchmarks.csproj
+++ b/src/System.CommandLine.Benchmarks/System.CommandLine.Benchmarks.csproj
@@ -9,8 +9,8 @@
false
- net461;net5.0;
- net5.0;
+ net461;net6.0;
+ net6.0;
9
diff --git a/src/System.CommandLine.DragonFruit.Tests/System.CommandLine.DragonFruit.Tests.csproj b/src/System.CommandLine.DragonFruit.Tests/System.CommandLine.DragonFruit.Tests.csproj
index 8994befbd7..23aa033d61 100644
--- a/src/System.CommandLine.DragonFruit.Tests/System.CommandLine.DragonFruit.Tests.csproj
+++ b/src/System.CommandLine.DragonFruit.Tests/System.CommandLine.DragonFruit.Tests.csproj
@@ -1,6 +1,6 @@
- net5.0
+ net6.0
AutoGeneratedProgram
true
diff --git a/src/System.CommandLine.Hosting.Tests/System.CommandLine.Hosting.Tests.csproj b/src/System.CommandLine.Hosting.Tests/System.CommandLine.Hosting.Tests.csproj
index 34bac83c0f..a4e99105d5 100644
--- a/src/System.CommandLine.Hosting.Tests/System.CommandLine.Hosting.Tests.csproj
+++ b/src/System.CommandLine.Hosting.Tests/System.CommandLine.Hosting.Tests.csproj
@@ -1,7 +1,7 @@
- net5.0
+ net6.0
$(TargetFrameworks);net462
9
false
@@ -18,7 +18,7 @@
-
+
diff --git a/src/System.CommandLine.Rendering.Tests/System.CommandLine.Rendering.Tests.csproj b/src/System.CommandLine.Rendering.Tests/System.CommandLine.Rendering.Tests.csproj
index c445d2df42..447e19088e 100644
--- a/src/System.CommandLine.Rendering.Tests/System.CommandLine.Rendering.Tests.csproj
+++ b/src/System.CommandLine.Rendering.Tests/System.CommandLine.Rendering.Tests.csproj
@@ -1,7 +1,7 @@
- net5.0
+ net6.0
9
false
diff --git a/src/System.CommandLine.Suggest.Tests/EndToEndTestApp/EndToEndTestApp.csproj b/src/System.CommandLine.Suggest.Tests/EndToEndTestApp/EndToEndTestApp.csproj
index 1e277af463..b02804285c 100644
--- a/src/System.CommandLine.Suggest.Tests/EndToEndTestApp/EndToEndTestApp.csproj
+++ b/src/System.CommandLine.Suggest.Tests/EndToEndTestApp/EndToEndTestApp.csproj
@@ -6,7 +6,7 @@
Exe
- net5.0
+ net6.0
9
diff --git a/src/System.CommandLine.Suggest.Tests/dotnet-suggest.Tests.csproj b/src/System.CommandLine.Suggest.Tests/dotnet-suggest.Tests.csproj
index 1eae028185..da6abedf0b 100644
--- a/src/System.CommandLine.Suggest.Tests/dotnet-suggest.Tests.csproj
+++ b/src/System.CommandLine.Suggest.Tests/dotnet-suggest.Tests.csproj
@@ -1,7 +1,7 @@
- net5.0
+ net6.0
diff --git a/src/System.CommandLine.Suggest/dotnet-suggest.csproj b/src/System.CommandLine.Suggest/dotnet-suggest.csproj
index 84e75bd1e3..ae97dfe61f 100644
--- a/src/System.CommandLine.Suggest/dotnet-suggest.csproj
+++ b/src/System.CommandLine.Suggest/dotnet-suggest.csproj
@@ -1,7 +1,7 @@
Exe
- net5.0
+ net6.0
true
true
dotnet-suggest
diff --git a/src/System.CommandLine.Tests/System.CommandLine.Tests.csproj b/src/System.CommandLine.Tests/System.CommandLine.Tests.csproj
index 09c2a2c444..8abb292c39 100644
--- a/src/System.CommandLine.Tests/System.CommandLine.Tests.csproj
+++ b/src/System.CommandLine.Tests/System.CommandLine.Tests.csproj
@@ -1,6 +1,6 @@
- net5.0
+ net6.0
$(TargetFrameworks);net462
9
false
diff --git a/src/System.CommandLine/System.CommandLine.csproj b/src/System.CommandLine/System.CommandLine.csproj
index 9d57b41cf6..f9e2f0b416 100644
--- a/src/System.CommandLine/System.CommandLine.csproj
+++ b/src/System.CommandLine/System.CommandLine.csproj
@@ -4,7 +4,7 @@
true
System.CommandLine
netstandard2.0
- 9
+ 10.0
enable
This package includes a powerful command line parser and other tools for building command line applications, including: