From fbe3b892be0f6e8765ebb674a1546b751c826ba1 Mon Sep 17 00:00:00 2001 From: Juan Hoyos <19413848+hoyosjs@users.noreply.github.com> Date: Thu, 30 Sep 2021 04:05:04 -0700 Subject: [PATCH] Merge changes to release branch (#2629) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * fixed dotnet-trace when output is not redirected (#2448) Co-authored-by: Mikelle Rogers * [main] Update dependencies from dotnet/runtime (#2435) * Fix asssuption in SOS what the IXClrDataModule::GetFlags returns. * Update to lldb-included Alpine 3.13 container * Try node image for agent comms * Use container with correct metadata to node path * Remove libintl dependencies * Update dependencies from https://github.com/dotnet/runtime build 20210724.7 Microsoft.NETCore.App.Runtime.win-x64 , VS.Redist.Common.NetCore.SharedFramework.x64.6.0 From Version 6.0.0-preview.7.21361.10 -> To Version 6.0.0-rc.1.21374.7 * Disable testing against dumps on Alpine for now (until the 6.0 createdump change is in) Co-authored-by: dotnet-maestro[bot] Co-authored-by: Mike McLaughlin Co-authored-by: Juan Hoyos Co-authored-by: Juan Sebastian Hoyos Ayala * Update dependencies from https://github.com/dotnet/runtime build 20210725.2 (#2449) [main] Update dependencies from dotnet/runtime * [main] Update dependencies from dotnet/aspnetcore (#2439) [main] Update dependencies from dotnet/aspnetcore * Update dependencies from https://github.com/dotnet/runtime build 20210726.24 (#2454) [main] Update dependencies from dotnet/runtime * Update dependencies from https://github.com/dotnet/aspnetcore build 20210727.2 (#2453) [main] Update dependencies from dotnet/aspnetcore * Revert "Update `ThreadCounts` usage based on a change (#2324)" (#2452) - Depends on https://github.com/dotnet/runtime/pull/56346 - Reverted commit 3d57bee719e6db6b19dce4026bdc7c47b1c3519b from PR https://github.com/dotnet/diagnostics/pull/2324 since the relevant change to `ThreadCounts` was reverted in https://github.com/dotnet/runtime/pull/56346 * Add thread adjustment reason for cooperative blocking (#2455) * Update dependencies from https://github.com/dotnet/runtime build 20210728.2 (#2457) [main] Update dependencies from dotnet/runtime * Update dependencies from https://github.com/dotnet/aspnetcore build 20210728.1 (#2456) [main] Update dependencies from dotnet/aspnetcore * Update alpine image that includes lldb python support and enable tests on 6.0 (#2460) * Update alpine image that includes lldb python support * Enable lldb and dump tests on Alpine * Don't run the bpmd portions of the tests on Alpine * Add requiresCapPtraceContainer:true to Alpine legs * We should check against alloc_allocated instead of heap_segment_allocated for ephemeral segment (#2450) * Update dependencies from https://github.com/dotnet/aspnetcore build 20210729.1 (#2461) [main] Update dependencies from dotnet/aspnetcore * Update dependencies from https://github.com/dotnet/runtime build 20210728.9 (#2462) [main] Update dependencies from dotnet/runtime * Remove old IVTs and add new one for dotnet-monitor. (#2463) * Fix unknown type/methods in core dumps. (#2442) Fixes some of the issues in https://github.com/dotnet/diagnostics/issues/2375 The problem is that the image mapping memory service didn't convert the rva from a loaded layout calculated from the in-memory module to the file layout (the PEReader with the downloaded image). On Windows, images (native or managed) are always loaded layout so return false in IModule.IsFileLayout without calling GetPEInfo() to avoid the recursion that broken getting the info about coreclr.dll. It turns out that the heap dumps generated on Windows don't have the image in-memory. Don't get module version in GetPEInfo() to determine the layout. Cleanup. Skip relocations that span cache blocks. This happens very rarely and should not affect anything unless we get really really unlucky. * Update dependencies from https://github.com/dotnet/aspnetcore build 20210729.10 (#2464) [main] Update dependencies from dotnet/aspnetcore * Update dependencies from https://github.com/dotnet/runtime build 20210730.1 (#2465) [main] Update dependencies from dotnet/runtime * Update dependencies from https://github.com/dotnet/aspnetcore build 20210731.1 (#2467) [main] Update dependencies from dotnet/aspnetcore * Update dependencies from https://github.com/dotnet/runtime build 20210730.13 (#2468) [main] Update dependencies from dotnet/runtime * Update dependencies from https://github.com/dotnet/aspnetcore build 20210731.3 (#2469) [main] Update dependencies from dotnet/aspnetcore * Update dependencies from https://github.com/dotnet/runtime build 20210801.1 (#2470) [main] Update dependencies from dotnet/runtime * Update dependencies from https://github.com/dotnet/runtime build 20210801.3 (#2471) [main] Update dependencies from dotnet/runtime * Correcting grammar in "stopping the trace" output (#2458) * Update dependencies from https://github.com/dotnet/runtime build 20210802.10 (#2474) [main] Update dependencies from dotnet/runtime * Update dependencies from https://github.com/dotnet/runtime build 20210803.13 (#2475) [main] Update dependencies from dotnet/runtime * Update dependencies from https://github.com/dotnet/aspnetcore build 20210804.1 (#2479) [main] Update dependencies from dotnet/aspnetcore * Update dependencies from https://github.com/dotnet/aspnetcore build 20210804.17 (#2481) [main] Update dependencies from dotnet/aspnetcore * Update dependencies from https://github.com/dotnet/runtime build 20210805.1 (#2482) [main] Update dependencies from dotnet/runtime * Prevent queueing workitems in debuggee compilation on prebuild path (#2480) * dotnet-counters support for the new System.Diagnostics.Metrics APIs (#2373) * Refactoring and additional parse error handling The logic to create the EventPipeProvider list was intermingled with the parsing logic for the command line arguments. I eliminated the intermediate representation as List _counterList and convert directly from command line args -> CounterFilter (renamed CounterSet). EventPipeProviders can now be computed directly from the CounterSet with no parsing mixed in. While refactoring the parsing code I also added more error handling for bad inputs, fixed incorrect descriptions of the format in the help text. * Support System.Diagnostics.Metrics Let dotnet-counters show metrics that were collected via our new System.Diagnostics.Metrics APIs. * Code review feedback * Update dependencies from https://github.com/dotnet/symstore build 20210802.1 (#2484) [main] Update dependencies from dotnet/symstore * Update dependencies from https://github.com/dotnet/runtime build 20210805.12 (#2488) [main] Update dependencies from dotnet/runtime * Update dependencies from https://github.com/dotnet/aspnetcore build 20210806.1 (#2487) [main] Update dependencies from dotnet/aspnetcore * Fix dotnet-counters console alignment (#2485) Text was oddly shifting one character to the right during updates. Fixed the off-by-one error in the update text positioning. * Update dependencies from https://github.com/dotnet/runtime build 20210806.5 (#2491) [main] Update dependencies from dotnet/runtime * Update dependencies from https://github.com/dotnet/aspnetcore build 20210806.20 (#2490) [main] Update dependencies from dotnet/aspnetcore * Update dependencies from https://github.com/dotnet/aspnetcore build 20210807.1 (#2492) [main] Update dependencies from dotnet/aspnetcore * Use absolute path for unix domain socket in listen mode (#2489) * Update dependencies from https://github.com/dotnet/aspnetcore build 20210808.1 (#2493) [main] Update dependencies from dotnet/aspnetcore * minor formatting fixes (#2494) * Update dependencies from https://github.com/dotnet/symstore build 20210809.1 (#2496) [main] Update dependencies from dotnet/symstore * Update dependencies from https://github.com/dotnet/aspnetcore build 20210810.1 (#2495) [main] Update dependencies from dotnet/aspnetcore * Update dependencies from https://github.com/dotnet/aspnetcore build 20210810.16 (#2497) [main] Update dependencies from dotnet/aspnetcore * [main] Update dependencies from dotnet/runtime (#2498) * Update dependencies from https://github.com/dotnet/runtime build 20210811.2 Microsoft.NETCore.App.Runtime.win-x64 , VS.Redist.Common.NetCore.SharedFramework.x64.6.0 From Version 6.0.0-rc.1.21406.5 -> To Version 6.0.0-rc.1.21411.2 * Fix SOS test failures with new runtime Co-authored-by: dotnet-maestro[bot] Co-authored-by: Mike McLaughlin * Update dependencies from https://github.com/dotnet/runtime build 20210811.5 (#2500) [main] Update dependencies from dotnet/runtime * Update dependencies from https://github.com/dotnet/aspnetcore build 20210811.15 (#2499) [main] Update dependencies from dotnet/aspnetcore * Avoid checking for ephemeral heap segment in DumpGen (#2501) * Update dependencies from https://github.com/dotnet/aspnetcore build 20210812.9 (#2502) [main] Update dependencies from dotnet/aspnetcore * Update dependencies from https://github.com/dotnet/runtime build 20210813.5 (#2503) [main] Update dependencies from dotnet/runtime * Update dependencies from https://github.com/dotnet/aspnetcore build 20210813.12 (#2504) [main] Update dependencies from dotnet/aspnetcore * Update dependencies from https://github.com/dotnet/runtime build 20210814.1 (#2505) [main] Update dependencies from dotnet/runtime * Update dependencies from https://github.com/dotnet/runtime build 20210814.4 (#2506) [main] Update dependencies from dotnet/runtime * Update dependencies from https://github.com/dotnet/runtime build 20210815.6 (#2507) [main] Update dependencies from dotnet/runtime * Update dependencies from https://github.com/dotnet/aspnetcore build 20210817.1 (#2510) [main] Update dependencies from dotnet/aspnetcore * Update dependencies from https://github.com/dotnet/runtime build 20210817.1 (#2511) [main] Update dependencies from dotnet/runtime * Update dependencies from https://github.com/dotnet/aspnetcore build 20210817.14 (#2513) [main] Update dependencies from dotnet/aspnetcore * Update dependencies from https://github.com/dotnet/aspnetcore build 20210818.11 (#2516) [main] Update dependencies from dotnet/aspnetcore * Update dependencies from https://github.com/dotnet/aspnetcore build 20210819.24 (#2517) [main] Update dependencies from dotnet/aspnetcore * [main] Update dependencies from dotnet/runtime (#2514) [main] Update dependencies from dotnet/runtime * Update dependencies from https://github.com/dotnet/runtime build 20210820.29 (#2520) [main] Update dependencies from dotnet/runtime * Update dependencies from https://github.com/dotnet/aspnetcore build 20210820.26 (#2519) [main] Update dependencies from dotnet/aspnetcore * Update dependencies from https://github.com/dotnet/aspnetcore build 20210821.2 (#2521) [main] Update dependencies from dotnet/aspnetcore * Add new 'symbolicate' commands to dotnet-stack (#2436) * Initial trace event trigger implementation and tests. (#2508) * Initial trace event trigger implementation and tests. * Fix build problems with newer 6.0 SDKs/arcade (#2525) * Update dependencies from https://github.com/dotnet/installer build 20210418.6 Microsoft.Dotnet.Sdk.Internal From Version 6.0.100-preview.1.21103.13 -> To Version 6.0.100-preview.4.21218.6 * Update dependencies from https://github.com/dotnet/installer build 20210426.1 Microsoft.Dotnet.Sdk.Internal From Version 6.0.100-preview.1.21103.13 -> To Version 6.0.100-preview.5.21226.1 * Update dependencies from https://github.com/dotnet/installer build 20210502.3 Microsoft.Dotnet.Sdk.Internal From Version 6.0.100-preview.1.21103.13 -> To Version 6.0.100-preview.5.21252.3 * Update dependencies from https://github.com/dotnet/installer build 20210510.2 Microsoft.Dotnet.Sdk.Internal From Version 6.0.100-preview.1.21103.13 -> To Version 6.0.100-preview.5.21260.2 * Update dependencies from https://github.com/dotnet/installer build 20210516.3 Microsoft.Dotnet.Sdk.Internal From Version 6.0.100-preview.1.21103.13 -> To Version 6.0.100-preview.5.21266.3 * Update dependencies from https://github.com/dotnet/installer build 20210524.3 Microsoft.Dotnet.Sdk.Internal From Version 6.0.100-preview.1.21103.13 -> To Version 6.0.100-preview.6.21274.3 * Update dependencies from https://github.com/dotnet/installer build 20210530.2 Microsoft.Dotnet.Sdk.Internal From Version 6.0.100-preview.1.21103.13 -> To Version 6.0.100-preview.6.21280.2 * Update dependencies from https://github.com/dotnet/installer build 20210606.2 Microsoft.Dotnet.Sdk.Internal From Version 6.0.100-preview.1.21103.13 -> To Version 6.0.100-preview.6.21306.2 * Update dependencies from https://github.com/dotnet/installer build 20210613.2 Microsoft.Dotnet.Sdk.Internal From Version 6.0.100-preview.1.21103.13 -> To Version 6.0.100-preview.6.21313.2 * Update dependencies from https://github.com/dotnet/installer build 20210621.2 Microsoft.Dotnet.Sdk.Internal From Version 6.0.100-preview.1.21103.13 -> To Version 6.0.100-preview.7.21321.2 * Update dependencies from https://github.com/dotnet/installer build 20210627.2 Microsoft.Dotnet.Sdk.Internal From Version 6.0.100-preview.1.21103.13 -> To Version 6.0.100-preview.7.21327.2 * Update dependencies from https://github.com/dotnet/installer build 20210704.4 Microsoft.Dotnet.Sdk.Internal From Version 6.0.100-preview.1.21103.13 -> To Version 6.0.100-preview.7.21354.4 * Update dependencies from https://github.com/dotnet/installer build 20210710.1 Microsoft.Dotnet.Sdk.Internal From Version 6.0.100-preview.1.21103.13 -> To Version 6.0.100-preview.7.21360.1 * Update dependencies from https://github.com/dotnet/installer build 20210719.3 Microsoft.Dotnet.Sdk.Internal From Version 6.0.100-preview.1.21103.13 -> To Version 6.0.100-rc.1.21369.3 * Update dependencies from https://github.com/dotnet/installer build 20210726.3 Microsoft.Dotnet.Sdk.Internal From Version 6.0.100-preview.1.21103.13 -> To Version 6.0.100-rc.1.21376.3 * Update dependencies from https://github.com/dotnet/installer build 20210801.2 Microsoft.Dotnet.Sdk.Internal From Version 6.0.100-preview.1.21103.13 -> To Version 6.0.100-rc.1.21401.2 * Update dependencies from https://github.com/dotnet/installer build 20210808.2 Microsoft.Dotnet.Sdk.Internal From Version 6.0.100-preview.1.21103.13 -> To Version 6.0.100-rc.1.21408.2 * Update dependencies from https://github.com/dotnet/installer build 20210815.2 Microsoft.Dotnet.Sdk.Internal From Version 6.0.100-preview.1.21103.13 -> To Version 6.0.100-rc.1.21415.2 * Update dependencies from https://github.com/dotnet/installer build 20210823.2 Microsoft.Dotnet.Sdk.Internal From Version 6.0.100-preview.1.21103.13 -> To Version 6.0.100-rc.2.21423.2 * Update dependencies from https://github.com/dotnet/arcade build 20210514.2 Microsoft.DotNet.Arcade.Sdk , Microsoft.DotNet.RemoteExecutor From Version 6.0.0-beta.21160.7 -> To Version 6.0.0-beta.21264.2 * Update dependencies from https://github.com/dotnet/arcade build 20210521.3 Microsoft.DotNet.Arcade.Sdk , Microsoft.DotNet.RemoteExecutor From Version 6.0.0-beta.21160.7 -> To Version 6.0.0-beta.21271.3 * Update dependencies from https://github.com/dotnet/arcade build 20210528.1 Microsoft.DotNet.Arcade.Sdk , Microsoft.DotNet.RemoteExecutor From Version 6.0.0-beta.21160.7 -> To Version 6.0.0-beta.21278.1 * Update dependencies from https://github.com/dotnet/arcade build 20210604.1 Microsoft.DotNet.Arcade.Sdk , Microsoft.DotNet.RemoteExecutor From Version 6.0.0-beta.21160.7 -> To Version 6.0.0-beta.21304.1 * Update dependencies from https://github.com/dotnet/arcade build 20210611.3 Microsoft.DotNet.Arcade.Sdk , Microsoft.DotNet.RemoteExecutor From Version 6.0.0-beta.21160.7 -> To Version 6.0.0-beta.21311.3 * Update dependencies from https://github.com/dotnet/arcade build 20210619.2 Microsoft.DotNet.Arcade.Sdk , Microsoft.DotNet.RemoteExecutor From Version 6.0.0-beta.21160.7 -> To Version 6.0.0-beta.21319.2 * Update dependencies from https://github.com/dotnet/arcade build 20210624.3 Microsoft.DotNet.Arcade.Sdk , Microsoft.DotNet.RemoteExecutor From Version 6.0.0-beta.21160.7 -> To Version 6.0.0-beta.21324.3 * Update dependencies from https://github.com/dotnet/arcade build 20210701.2 Microsoft.DotNet.Arcade.Sdk , Microsoft.DotNet.RemoteExecutor From Version 6.0.0-beta.21160.7 -> To Version 6.0.0-beta.21351.2 * Update dependencies from https://github.com/dotnet/arcade build 20210707.3 Microsoft.DotNet.Arcade.Sdk , Microsoft.DotNet.RemoteExecutor From Version 6.0.0-beta.21160.7 -> To Version 6.0.0-beta.21357.3 * Update dependencies from https://github.com/dotnet/arcade build 20210716.1 Microsoft.DotNet.Arcade.Sdk , Microsoft.DotNet.RemoteExecutor From Version 6.0.0-beta.21160.7 -> To Version 6.0.0-beta.21366.1 * Update dependencies from https://github.com/dotnet/arcade build 20210723.11 Microsoft.DotNet.Arcade.Sdk , Microsoft.DotNet.RemoteExecutor From Version 6.0.0-beta.21160.7 -> To Version 6.0.0-beta.21373.11 * Update dependencies from https://github.com/dotnet/arcade build 20210729.2 Microsoft.DotNet.Arcade.Sdk , Microsoft.DotNet.RemoteExecutor From Version 6.0.0-beta.21160.7 -> To Version 6.0.0-beta.21379.2 * Update dependencies from https://github.com/dotnet/arcade build 20210806.6 Microsoft.DotNet.Arcade.Sdk , Microsoft.DotNet.RemoteExecutor From Version 6.0.0-beta.21160.7 -> To Version 6.0.0-beta.21406.6 * Update dependencies from https://github.com/dotnet/arcade build 20210812.1 Microsoft.DotNet.Arcade.Sdk , Microsoft.DotNet.RemoteExecutor From Version 6.0.0-beta.21160.7 -> To Version 6.0.0-beta.21412.1 * Update dependencies from https://github.com/dotnet/arcade build 20210820.4 Microsoft.DotNet.Arcade.Sdk , Microsoft.DotNet.RemoteExecutor From Version 6.0.0-beta.21160.7 -> To Version 6.0.0-beta.21420.4 * Fix build problems with newer 6.0 SDKs/arcade Co-authored-by: dotnet-maestro[bot] * Update dependencies from https://github.com/dotnet/aspnetcore build 20210825.1 (#2524) Microsoft.AspNetCore.App.Ref.Internal , Microsoft.AspNetCore.App.Ref From Version 6.0.0-rc.2.21421.2 -> To Version 6.0.0-rc.2.21425.1 Co-authored-by: dotnet-maestro[bot] * [main] Update dependencies from dotnet/runtime (#2523) * Update dependencies from https://github.com/dotnet/runtime build 20210823.19 Microsoft.NETCore.App.Runtime.win-x64 , VS.Redist.Common.NetCore.SharedFramework.x64.6.0 From Version 6.0.0-rc.2.21420.29 -> To Version 6.0.0-rc.2.21423.19 * Update dependencies from https://github.com/dotnet/runtime build 20210824.25 Microsoft.NETCore.App.Runtime.win-x64 , VS.Redist.Common.NetCore.SharedFramework.x64.6.0 From Version 6.0.0-rc.2.21420.29 -> To Version 6.0.0-rc.2.21424.25 Co-authored-by: dotnet-maestro[bot] * Update dependencies from https://github.com/dotnet/symstore build 20210823.1 (#2522) Microsoft.SymbolStore From Version 1.0.240901 -> To Version 1.0.242301 Co-authored-by: dotnet-maestro[bot] * Update dependencies from https://github.com/dotnet/aspnetcore build 20210826.1 (#2526) [main] Update dependencies from dotnet/aspnetcore * Update dependencies from https://github.com/dotnet/runtime build 20210825.6 (#2527) [main] Update dependencies from dotnet/runtime * Update dependencies from https://github.com/microsoft/clrmd build 20210826.6 (#2530) [main] Update dependencies from microsoft/clrmd * Fix minor spelling error in dotnet-dsrouter warning message. (#2528) * Update dependencies from https://github.com/dotnet/aspnetcore build 20210827.1 (#2529) [main] Update dependencies from dotnet/aspnetcore * Update dependencies from https://github.com/dotnet/symstore build 20210826.1 (#2531) [main] Update dependencies from dotnet/symstore * Update dependencies from https://github.com/dotnet/runtime build 20210827.2 (#2532) [main] Update dependencies from dotnet/runtime * Update dependencies from https://github.com/dotnet/runtime build 20210828.1 (#2534) [main] Update dependencies from dotnet/runtime * Update dependencies from https://github.com/dotnet/aspnetcore build 20210828.1 (#2533) [main] Update dependencies from dotnet/aspnetcore * Update dependencies from https://github.com/dotnet/aspnetcore build 20210828.15 (#2535) [main] Update dependencies from dotnet/aspnetcore * Update dependencies from https://github.com/dotnet/arcade build 20210827.6 (#2537) [main] Update dependencies from dotnet/arcade * Update dependencies from https://github.com/dotnet/installer build 20210830.3 (#2540) [main] Update dependencies from dotnet/installer * Update dependencies from https://github.com/dotnet/runtime build 20210829.8 (#2539) [main] Update dependencies from dotnet/runtime * Update dependencies from https://github.com/dotnet/aspnetcore build 20210830.3 (#2538) [main] Update dependencies from dotnet/aspnetcore * Update dependencies from https://github.com/dotnet/installer build 20210830.28 (#2543) [main] Update dependencies from dotnet/installer * Refactor IPC communication to allow for async and cancellation. (#2350) * Refactor IPC communication to allow for async and cancellation. Refactor tests to flex both non-async and async methods. * Update dependencies from https://github.com/dotnet/symstore build 20210830.1 (#2545) [main] Update dependencies from dotnet/symstore * Update dependencies from https://github.com/dotnet/runtime build 20210830.24 (#2546) [main] Update dependencies from dotnet/runtime * [main] Update dependencies from dotnet/aspnetcore (#2544) [main] Update dependencies from dotnet/aspnetcore * Update dependencies from https://github.com/dotnet/runtime build 20210831.13 (#2548) [main] Update dependencies from dotnet/runtime * Update dependencies from https://github.com/dotnet/aspnetcore build 20210902.1 (#2549) [main] Update dependencies from dotnet/aspnetcore * Update dependencies from https://github.com/dotnet/runtime build 20210902.2 (#2551) [main] Update dependencies from dotnet/runtime * Remove blob feed (#2553) * Update dependencies from https://github.com/microsoft/clrmd build 20210831.1 (#2547) Microsoft.Diagnostics.Runtime , Microsoft.Diagnostics.Runtime.Utilities From Version 2.0.242606 -> To Version 2.0.243101 Co-authored-by: dotnet-maestro[bot] * Update dependencies from https://github.com/dotnet/symstore build 20210901.1 (#2550) Microsoft.SymbolStore From Version 1.0.243001 -> To Version 1.0.245101 Co-authored-by: dotnet-maestro[bot] * Update dependencies from https://github.com/dotnet/runtime build 20210902.24 (#2556) [main] Update dependencies from dotnet/runtime * Update dependencies from https://github.com/dotnet/aspnetcore build 20210903.2 (#2555) Microsoft.AspNetCore.App.Ref.Internal , Microsoft.AspNetCore.App.Ref From Version 6.0.0-rc.2.21452.1 -> To Version 6.0.0-rc.2.21453.2 Co-authored-by: dotnet-maestro[bot] * Update dependencies from https://github.com/dotnet/runtime build 20210904.1 (#2560) Microsoft.NETCore.App.Runtime.win-x64 , VS.Redist.Common.NetCore.SharedFramework.x64.6.0 From Version 6.0.0-rc.2.21452.24 -> To Version 6.0.0-rc.2.21454.1 Co-authored-by: dotnet-maestro[bot] * Update dependencies from https://github.com/dotnet/installer build 20210906.2 (#2562) Microsoft.Dotnet.Sdk.Internal From Version 6.0.100-rc.2.21430.28 -> To Version 6.0.100-rc.2.21456.2 Co-authored-by: dotnet-maestro[bot] * [main] Update dependencies from dotnet/aspnetcore (#2559) * Update dependencies from https://github.com/dotnet/aspnetcore build 20210903.33 Microsoft.AspNetCore.App.Ref.Internal , Microsoft.AspNetCore.App.Ref From Version 6.0.0-rc.2.21453.2 -> To Version 6.0.0-rc.2.21453.33 * Update dependencies from https://github.com/dotnet/aspnetcore build 20210905.6 Microsoft.AspNetCore.App.Ref.Internal , Microsoft.AspNetCore.App.Ref From Version 6.0.0-rc.2.21453.2 -> To Version 6.0.0-rc.2.21455.6 Co-authored-by: dotnet-maestro[bot] * [main] Update dependencies from dotnet/arcade (#2561) [main] Update dependencies from dotnet/arcade - Change netcoreapp2.1 to netcoreapp3.1 and stop testing against 2.1 - Don't host the managed SOS code on 2.1 anymore * In dotnet-trace/counters disposed of the server when we received ctrl+C while waiting for clients to connect. closes #2426 (#2476) * Disposed of the server when we received ctrl+C while waiting for clients to connect * removed deletion of files to alow the server to clean things up * Disabling EventLogsPipeUnitTests.TestLogsWildcardCategory and EventCounterTriggerTests.EventCounterTriggerWithEventPipePipelineTest (#2569) * Disabling EventLogsPipeUnitTests.TestLogsWildcardCategory and EventCounterTriggerTests.EventCounterTriggerWithEventPipePipelineTest They are constantly failing. * Only skip on Windows * Disabled EventLogsPipelineUnitTests.TestLogsAllCategoriesAllLevels for Windows also * Disable an unreliable SOS test on MacOS (https://github.com/dotnet/diagnostics/issues/1950) * Update dependencies from https://github.com/dotnet/symstore build 20210907.1 (#2566) Microsoft.SymbolStore From Version 1.0.245101 -> To Version 1.0.245701 Co-authored-by: dotnet-maestro[bot] * Update dependencies from https://github.com/dotnet/aspnetcore build 20210908.2 (#2565) Microsoft.AspNetCore.App.Ref.Internal , Microsoft.AspNetCore.App.Ref From Version 6.0.0-rc.2.21455.6 -> To Version 6.0.0-rc.2.21458.2 Co-authored-by: dotnet-maestro[bot] * Update dependencies from https://github.com/dotnet/runtime build 20210908.2 (#2567) Microsoft.NETCore.App.Runtime.win-x64 , VS.Redist.Common.NetCore.SharedFramework.x64.6.0 From Version 6.0.0-rc.2.21454.1 -> To Version 6.0.0-rc.2.21458.2 Co-authored-by: dotnet-maestro[bot] * Fix default IPC port for IPC server, TCP client mode. (#2557) * Update dependencies from https://github.com/dotnet/aspnetcore build 20210908.47 (#2570) [main] Update dependencies from dotnet/aspnetcore * Update dependencies from https://github.com/dotnet/symstore build 20210908.1 (#2571) [main] Update dependencies from dotnet/symstore * Update dependencies from https://github.com/dotnet/runtime build 20210908.16 (#2572) [main] Update dependencies from dotnet/runtime * Enable crash dump report generation in SOS tests (#2558) Enable crash dump report generation in SOS tests Load crash report json file and check some common values. Enable Windows triage and heap dump testing Check for ctrl-c on console writelines Fix arm32/x86 sign extensions problems in C++ data targets Fix dotnet-dump collect dump type and the exception display in commands Fix eeversion command when private build version like 42.42.42.42424 Add the directory of the dump to the symbol search path Remove "ChangeEngineState" message on every stop in windbg. Fix module relocations fixes. It was using the wrong rva. Needed the original rva not the translated file layout one. Better SOS module load failure message. Remove System.Memory dependencies * Update dependencies from https://github.com/dotnet/runtime build 20210909.18 (#2575) [main] Update dependencies from dotnet/runtime * Update dependencies from https://github.com/dotnet/aspnetcore build 20210910.4 (#2574) [main] Update dependencies from dotnet/aspnetcore * Enable ProcessInfo2 in DiagnosticsClient. (#2564) Update tests to reflect when entrypoint is available. * Update dependencies from https://github.com/dotnet/runtime build 20210910.14 (#2579) [main] Update dependencies from dotnet/runtime * Update dependencies from https://github.com/dotnet/aspnetcore build 20210911.3 (#2578) [main] Update dependencies from dotnet/aspnetcore * Fix reverse named pipe server resume hang. (#2573) Running a reverse connect (--diagnostic-port) against a runtime running with nosuspend, hangs/deadlocks IPC client and IPC server thread on start collect -> resume command sequence. Happens since IPC server thread will write out initial header data into stream but client only waits for session id and will then issue a resume command. If the reverse server is not setup with any buffering this will cause IPC server thread to block when writing into session, server thread won’t be able to handle incoming resume command, and client won’t read anything from session unblocking server thread, since its waiting on completion of resume command. This is not an issue on Unix domain, TCP/IP sockets since it by default will allocate smaller in/out buffers, preventing server to block. It’s not an issue on runtime IPC named pipe listener port’s, since they are allocated with 16 KB in/out buffers. It is however an issue on reverse servers created by IpcWindowsNamedPipeServerTransport since those will default to 0 byte in/out buffers, triggering the issue. Fix increase the default size of in/out buffers in sync with default named pipe in/out buffers used by runtime, 16KB. Not an issue on CI since CI uses its own implementation of reverse named pipe server, using 16 KB as in/out buffers as well. Keeping Unix Domain, TCP/IP sockets as is, since runtime also uses defaults in its implementation. CI on the other hand uses 16 KB for Unix Domain Socket in/out buffers, something IpcTcpSocketServerTransport could do as well, but could be adjusted if ever needed. The number of bytes written into stream when starting up streaming is small (xxx) and should fit into the default buffer sizes used by Unix Domain, TCP/IP sockets. This fix is however just fixing the symptom of the underlying issue, that the IPC server thread writes into a stream that it expects reader to consume or it might block from processing further of commands. One alternative runtime fix would be to move the write of stream init data into streaming thread, but since we also write all rundown data on IPC server thread it won’t solve the complete problem since stop collect command could cause same issues. If client doesn’t have a consumer reading the data on the stopped stream, while client is stopping it in parallel, it will block. This is much worse scenario since the size of the data written back as part of rundown will most likely be bigger than any IPC buffer sizes. Changing this behaviour in runtime requires some re-architecture of current IPC infrastructure moving write into trace session away from server thread. * Update dependencies from https://github.com/dotnet/aspnetcore build 20210911.23 (#2580) [main] Update dependencies from dotnet/aspnetcore * Update dependencies from https://github.com/dotnet/runtime build 20210911.1 (#2581) [main] Update dependencies from dotnet/runtime * Update dependencies from https://github.com/dotnet/arcade build 20210909.5 (#2582) [main] Update dependencies from dotnet/arcade * Update dependencies from https://github.com/dotnet/aspnetcore build 20210912.1 (#2583) [main] Update dependencies from dotnet/aspnetcore * Update dependencies from https://github.com/dotnet/installer build 20210910.37 (#2584) [main] Update dependencies from dotnet/installer * altered dotnet-counters to work with inputRedirection #2329 (#2466) * altered dotnet-counters to work with inputRedirection #2329 * Altered to allow for multiple key presses and removed debugging code * Altered to allow for multiple key presses * Update dependencies from https://github.com/dotnet/runtime build 20210913.21 (#2587) [main] Update dependencies from dotnet/runtime * Update dependencies from https://github.com/dotnet/symstore build 20210913.1 (#2586) [main] Update dependencies from dotnet/symstore * Update dependencies from https://github.com/dotnet/aspnetcore build 20210913.5 (#2585) Microsoft.AspNetCore.App.Ref.Internal , Microsoft.AspNetCore.App.Ref From Version 6.0.0-rc.2.21462.1 -> To Version 6.0.0-rc.2.21463.5 Co-authored-by: dotnet-maestro[bot] * Update dependencies from https://github.com/dotnet/aspnetcore build 20210915.1 (#2588) [main] Update dependencies from dotnet/aspnetcore * Update dependencies from https://github.com/dotnet/runtime build 20210914.21 (#2589) [main] Update dependencies from dotnet/runtime * Update dependencies from https://github.com/dotnet/symstore build 20210915.1 (#2594) [main] Update dependencies from dotnet/symstore * Update dependencies from https://github.com/dotnet/aspnetcore build 20210915.22 (#2593) [main] Update dependencies from dotnet/aspnetcore * Update dependencies from https://github.com/dotnet/aspnetcore build 20210916.24 (#2597) [main] Update dependencies from dotnet/aspnetcore * Update dependencies from https://github.com/dotnet/installer build 20210916.6 (#2602) [main] Update dependencies from dotnet/installer * Update dependencies from https://github.com/dotnet/arcade build 20210916.4 (#2601) [main] Update dependencies from dotnet/arcade * Asp.net triggers (#2592) * Asp.net triggers * PR Feedback * StatusCodeRange PR Feedback * Additional PR feedback * PR feedback * Fix misc SOS bugs (#2600) Fix misc SOS bugs * Fix bug in the AddFilesFromDirectoryToTpaList issue: https://github.com/dotnet/diagnostics/issues/2596 * Added some logging to GetLineByOffset * Improve SymbolService.ParseSymbolPath support for Watson. Issue https://github.com/dotnet/diagnostics/issues/2512. Can now handle the various symbol paths that Watson can throw at us. Doesn't support actually calling the symbol server dll like in the symsrv*symaudit.dll*\\server\share syntax. The dll is ignored. * Minor doc updates * Better loadsymbols error message when no server is set * Update dependencies from https://github.com/dotnet/aspnetcore build 20210917.1 (#2599) [main] Update dependencies from dotnet/aspnetcore * Update dependencies from https://github.com/dotnet/symstore build 20210920.1 (#2605) [main] Update dependencies from dotnet/symstore * Update dependencies from https://github.com/dotnet/aspnetcore build 20210921.8 (#2604) [main] Update dependencies from dotnet/aspnetcore * [main] Update dependencies from dotnet/runtime (#2606) * Update dependencies from https://github.com/dotnet/runtime build 20210920.22 Microsoft.NETCore.App.Runtime.win-x64 , VS.Redist.Common.NetCore.SharedFramework.x64.6.0 From Version 6.0.0-rc.2.21464.21 -> To Version 6.0.0-rtm.21470.22 * Fix SOS bpmd tests on 6.0 runtime Co-authored-by: dotnet-maestro[bot] Co-authored-by: Mike McLaughlin * Update dependencies from https://github.com/dotnet/aspnetcore build 20210921.45 (#2609) [main] Update dependencies from dotnet/aspnetcore * Update dependencies from https://github.com/dotnet/runtime build 20210921.19 (#2610) [main] Update dependencies from dotnet/runtime * Validate ProcessInfo is same (except entrypoint) before and after resume. (#2603) * Update dependencies from https://github.com/dotnet/aspnetcore build 20210923.1 (#2612) [main] Update dependencies from dotnet/aspnetcore * Update dependencies from https://github.com/dotnet/runtime build 20210922.13 (#2613) [main] Update dependencies from dotnet/runtime * Update dependencies from https://github.com/dotnet/aspnetcore build 20210924.1 (#2614) [main] Update dependencies from dotnet/aspnetcore * Update dependencies from https://github.com/dotnet/aspnetcore build 20210924.41 (#2616) [main] Update dependencies from dotnet/aspnetcore * Disabling TestLogsAllCategoriesDefaultLevel on windows due to intermittent failures (#2617) * Update dependencies from https://github.com/dotnet/aspnetcore build 20210925.9 (#2618) [main] Update dependencies from dotnet/aspnetcore * Update dependencies from https://github.com/dotnet/arcade build 20210924.2 (#2619) [main] Update dependencies from dotnet/arcade * Update dependencies from https://github.com/dotnet/installer build 20210926.2 (#2620) [main] Update dependencies from dotnet/installer * Add Guid Serialization to support new SetStartupProfiler method (#2621) * Update dependencies from https://github.com/dotnet/aspnetcore build 20210927.30 (#2622) [main] Update dependencies from dotnet/aspnetcore * Update dependencies from https://github.com/dotnet/symstore build 20210927.1 (#2623) [main] Update dependencies from dotnet/symstore * Update dependencies from https://github.com/dotnet/runtime build 20210927.8 (#2624) [main] Update dependencies from dotnet/runtime * Update dependencies from https://github.com/dotnet/aspnetcore build 20210929.1 (#2626) [main] Update dependencies from dotnet/aspnetcore * Update dependencies from https://github.com/dotnet/runtime build 20210928.11 (#2627) [main] Update dependencies from dotnet/runtime * Add glob support for asp.net triggers (#2615) * Add glob support for asp.net triggers * PR feedback * Pr feedback Co-authored-by: mikelle-rogers <45022607+mikelle-rogers@users.noreply.github.com> Co-authored-by: Mikelle Rogers Co-authored-by: dotnet-maestro[bot] <42748379+dotnet-maestro[bot]@users.noreply.github.com> Co-authored-by: dotnet-maestro[bot] Co-authored-by: Mike McLaughlin Co-authored-by: Koundinya Veluri Co-authored-by: Andrew Au Co-authored-by: Justin Anderson Co-authored-by: Matt Thalman Co-authored-by: Noah Falk Co-authored-by: Filip Navara Co-authored-by: John Zabroski Co-authored-by: JongHeonChoi Co-authored-by: Johan Lorensson Co-authored-by: Matt Mitchell Co-authored-by: Wiktor Kopec Co-authored-by: Tom McDonald Co-authored-by: Patrick Fenelon --- NuGet.config | 2 - debuggees.sln | 45 +- diagnostics.yml | 13 +- documentation/FAQ.md | 2 + documentation/debugging-coredump.md | 2 + documentation/dotnet-trace-instructions.md | 14 +- documentation/lldb/linux-instructions.md | 10 +- dotnet.sh | 0 eng/Build-Native.cmd | 4 +- eng/CleanupPrivateBuild.csproj | 2 +- eng/CreateVersionFile.csproj | 2 +- eng/InstallRuntimes.proj | 8 - eng/Version.Details.xml | 40 +- eng/Versions.props | 26 +- eng/build.sh | 4 +- eng/common/SetupNugetSources.ps1 | 6 + eng/common/SetupNugetSources.sh | 24 + eng/common/build.ps1 | 2 + eng/common/build.sh | 4 + eng/common/cross/arm/sources.list.trusty | 11 - eng/common/cross/arm/trusty-lttng-2.4.patch | 71 -- eng/common/cross/arm/trusty.patch | 97 -- eng/common/cross/arm64/sources.list.trusty | 11 - eng/common/cross/arm64/tizen-fetch.sh | 2 +- eng/common/cross/build-rootfs.sh | 78 +- eng/common/cross/toolchain.cmake | 9 +- eng/common/cross/x86/sources.list.trusty | 11 - eng/common/dotnet-install.sh | 2 +- eng/common/generate-locproject.ps1 | 117 +++ eng/common/init-tools-native.sh | 2 +- eng/common/internal-feed-operations.ps1 | 6 +- eng/common/internal-feed-operations.sh | 2 +- eng/common/internal/Tools.csproj | 3 +- eng/common/msbuild.ps1 | 1 + eng/common/native/common-library.sh | 8 +- ...nd-native-compiler.sh => init-compiler.sh} | 102 +- eng/common/performance/blazor_perf.proj | 30 - eng/common/performance/crossgen_perf.proj | 110 --- eng/common/performance/microbenchmarks.proj | 144 --- eng/common/performance/performance-setup.ps1 | 139 --- eng/common/performance/performance-setup.sh | 297 ------ .../post-build/sourcelink-validation.ps1 | 91 +- eng/common/post-build/symbols-validation.ps1 | 124 +-- eng/common/sdk-task.ps1 | 11 +- eng/common/sdl/configure-sdl-tool.ps1 | 109 +++ eng/common/sdl/execute-all-sdl-tools.ps1 | 73 +- eng/common/sdl/extract-artifact-archives.ps1 | 63 ++ eng/common/sdl/init-sdl.ps1 | 2 +- eng/common/sdl/run-sdl.ps1 | 52 +- eng/common/templates/job/execute-sdl.yml | 108 ++- eng/common/templates/job/job.yml | 2 +- eng/common/templates/job/onelocbuild.yml | 93 ++ eng/common/templates/job/performance.yml | 95 -- .../templates/job/publish-build-assets.yml | 26 +- eng/common/templates/job/source-build.yml | 3 + .../templates/job/source-index-stage1.yml | 27 +- eng/common/templates/phases/base.yml | 130 --- .../templates/phases/publish-build-assets.yml | 52 -- .../channels/generic-internal-channel.yml | 6 + .../channels/generic-public-channel.yml | 6 + .../templates/steps/perf-send-to-helix.yml | 50 - eng/common/templates/steps/source-build.yml | 38 +- eng/common/tools.ps1 | 224 +++-- eng/common/tools.sh | 29 +- global.json | 4 +- .../ImageMappingMemoryService.cs | 148 +-- .../MetadataMappingMemoryService.cs | 7 +- .../Module.cs | 58 +- .../ModuleService.cs | 24 +- .../ModuleServiceFromDataReader.cs | 7 +- .../SymbolService.cs | 115 ++- .../CommandBase.cs | 4 + ...Microsoft.Diagnostics.DebugServices.csproj | 1 - .../ClrMDHelper.cs | 14 +- .../DumpConcurrentDictionaryCommand.cs | 2 +- .../DumpGen.cs | 2 +- .../AspNetTriggerSourceConfiguration.cs | 70 ++ .../HttpRequestSourceConfiguration.cs | 2 +- .../MonitoringSourceConfiguration.cs | 10 + .../Counters/CounterFilter.cs | 13 +- .../Counters/EventCounterPipeline.cs | 65 +- .../Counters/TraceEventExtensions.cs | 78 ++ .../EventPipeStreamProvider.cs | 19 +- ...ft.Diagnostics.Monitoring.EventPipe.csproj | 4 +- .../AspNet/AspNetRequestCountTrigger.cs | 27 + .../AspNetRequestCountTriggerSettings.cs | 14 + .../AspNet/AspNetRequestDurationTrigger.cs | 84 ++ .../AspNetRequestDurationTriggerSettings.cs | 24 + .../AspNet/AspNetRequestStatusTrigger.cs | 32 + .../AspNetRequestStatusTriggerSettings.cs | 67 ++ .../Triggers/AspNet/AspNetTrigger.cs | 133 +++ .../Triggers/AspNet/AspNetTriggerFactories.cs | 25 + .../Triggers/AspNet/AspNetTriggerSettings.cs | 73 ++ .../Triggers/AspNet/GlobMatcher.cs | 73 ++ .../EventCounter/EventCounterTrigger.cs | 91 ++ .../EventCounterTriggerFactory.cs | 21 + .../EventCounter/EventCounterTriggerImpl.cs | 92 ++ .../EventCounterTriggerSettings.cs | 95 ++ .../Triggers/ITraceEventTrigger.cs | 31 + .../Triggers/ITraceEventTriggerFactory.cs | 18 + .../Pipelines/EventPipeTriggerPipeline.cs | 95 ++ .../EventPipeTriggerPipelineSettings.cs | 26 + .../Pipelines/TraceEventTriggerPipeline.cs | 121 +++ .../Triggers/SlidingWindow.cs | 73 ++ .../Microsoft.Diagnostics.Monitoring.csproj | 2 - .../DiagnosticsClient/DiagnosticsClient.cs | 455 +++++---- .../DiagnosticsClient/EventPipeSession.cs | 132 ++- .../DiagnosticsIpc/IpcClient.cs | 94 +- .../DiagnosticsIpc/IpcCommands.cs | 11 - .../DiagnosticsIpc/IpcHeader.cs | 17 +- .../DiagnosticsIpc/IpcMessage.cs | 12 +- .../DiagnosticsIpc/IpcResponse.cs | 27 + .../DiagnosticsIpc/IpcServerTransport.cs | 2 +- .../DiagnosticsIpc/ProcessEnvironment.cs | 27 +- ...icrosoft.Diagnostics.NETCore.Client.csproj | 11 +- .../StreamExtensions.cs | 34 + .../Command/CommandProcessor.cs | 25 +- .../CliDebuggeeCompiler.cs | 2 +- .../DotNetBuildDebuggeeTestStep.cs | 10 +- .../Microsoft.Diagnostics.TestHelpers.csproj | 2 +- .../PrebuiltDebuggeeCompiler.cs | 2 +- .../SdkPrebuiltDebuggeeCompiler.cs | 2 +- .../TestConfiguration.cs | 2 +- src/SOS/SOS.Extensions/HostServices.cs | 2 +- .../ModuleServiceFromDebuggerServices.cs | 10 +- src/SOS/SOS.Hosting/SOSLibrary.cs | 4 +- .../SOS.Hosting/SymbolServiceExtensions.cs | 18 + src/SOS/SOS.Hosting/SymbolServiceWrapper.cs | 8 +- .../Unix/Debugger.Tests.Config.txt | 31 +- .../Windows/Debugger.Tests.Config.txt | 44 +- .../Debuggees/Directory.Build.props | 2 +- .../SOS.UnitTests/Debuggees/WebApp/Program.cs | 46 - .../WebApp/Properties/launchSettings.json | 27 - .../SOS.UnitTests/Debuggees/WebApp/Startup.cs | 42 - .../Debuggees/WebApp/WebApp.csproj | 14 - src/SOS/SOS.UnitTests/SOS.UnitTests.csproj | 1 + src/SOS/SOS.UnitTests/SOS.cs | 85 +- src/SOS/SOS.UnitTests/SOSRunner.cs | 77 +- .../SOS.UnitTests/Scripts/DualRuntimes.script | 5 - src/SOS/SOS.UnitTests/Scripts/GCTests.script | 10 +- .../Scripts/NestedExceptionTest.script | 3 + .../Scripts/OtherCommands.script | 10 +- .../Scripts/StackAndOtherTests.script | 16 +- src/SOS/SOS.UnitTests/Scripts/WebApp.script | 5 - src/SOS/Strike/dbgengservices.cpp | 2 - src/SOS/Strike/eeheap.cpp | 11 +- src/SOS/Strike/platform/cordebugdatatarget.h | 1 + src/SOS/Strike/platform/datatarget.cpp | 1 + src/SOS/Strike/strike.cpp | 10 +- src/SOS/Strike/symbols.cpp | 7 +- src/SOS/Strike/util.cpp | 25 +- src/SOS/Strike/util.h | 2 + src/SOS/extensions/hostcoreclr.cpp | 37 +- .../TestDebuggee/TestDebuggee.csproj | 2 +- .../ReversedServerHelpers.cs | 19 +- .../CommandLineErrorException.cs | 22 + src/Tools/dotnet-counters/CounterFilter.cs | 39 - src/Tools/dotnet-counters/CounterMonitor.cs | 869 +++++++++++++----- src/Tools/dotnet-counters/CounterPayload.cs | 118 +-- src/Tools/dotnet-counters/CounterProvider.cs | 5 - src/Tools/dotnet-counters/CounterSet.cs | 71 ++ .../dotnet-counters/Exporters/CSVExporter.cs | 22 +- .../Exporters/ConsoleWriter.cs | 149 ++- .../Exporters/ICounterRenderer.cs | 3 +- .../dotnet-counters/Exporters/JSONExporter.cs | 67 +- src/Tools/dotnet-counters/Program.cs | 74 +- .../dotnet-counters/dotnet-counters.csproj | 3 +- .../DiagnosticsServerRouterCommands.cs | 5 +- .../dotnet-dsrouter/dotnet-dsrouter.csproj | 3 +- src/Tools/dotnet-dump/Analyzer.cs | 3 +- src/Tools/dotnet-dump/Dumper.Windows.cs | 15 +- src/Tools/dotnet-dump/Dumper.cs | 5 + src/Tools/dotnet-dump/dotnet-dump.csproj | 3 +- src/Tools/dotnet-gcdump/dotnet-gcdump.csproj | 3 +- src/Tools/dotnet-sos/dotnet-sos.csproj | 3 +- src/Tools/dotnet-stack/Program.cs | 1 + src/Tools/dotnet-stack/Symbolicate.cs | 334 +++++++ src/Tools/dotnet-stack/dotnet-stack.csproj | 5 +- .../CommandLine/Commands/CollectCommand.cs | 4 +- src/Tools/dotnet-trace/dotnet-trace.csproj | 5 +- src/inc/xclrdata.idl | 169 ++-- src/pal/prebuilt/inc/xclrdata.h | 16 +- src/pal/src/CMakeLists.txt | 18 - src/pal/src/locale/unicode.cpp | 74 -- src/tests/EventPipeTracee/Program.cs | 20 +- .../SymbolServiceTests.cs | 116 +++ .../AspNetTriggerUnitTests.cs | 285 ++++++ .../EventCounterConstants.cs | 17 + .../EventCounterTriggerTests.cs | 495 ++++++++++ .../EventLogsPipelineUnitTests.cs | 36 +- .../GlobMatcherTests.cs | 85 ++ .../SlidingWindowTests.cs | 56 ++ ...rverHelper.cs => DiagnosticPortsHelper.cs} | 21 +- .../DiagnosticsClientApiShim.cs | 93 ++ .../DiagnosticsClientApiShimExtensions.cs | 41 + .../EventPipeSessionTests.cs | 87 +- .../GetProcessEnvironmentTests.cs | 25 +- .../GetProcessInfoTests.cs | 123 ++- .../RemoteTestExecution.cs | 2 +- .../ReversedServerTests.cs | 23 +- src/tests/dotnet-counters/CSVExporterTests.cs | 104 ++- .../dotnet-counters/CounterMonitorTests.cs | 114 ++- .../DotnetCounters.UnitTests.csproj | 2 +- .../dotnet-counters/JSONExporterTests.cs | 114 ++- src/tests/dotnet-counters/TestHelpers.cs | 50 - src/tests/dotnet-trace/ChildProcessTests.cs | 2 +- 206 files changed, 6896 insertions(+), 3308 deletions(-) mode change 100644 => 100755 dotnet.sh delete mode 100644 eng/common/cross/arm/sources.list.trusty delete mode 100644 eng/common/cross/arm/trusty-lttng-2.4.patch delete mode 100644 eng/common/cross/arm/trusty.patch delete mode 100644 eng/common/cross/arm64/sources.list.trusty delete mode 100644 eng/common/cross/x86/sources.list.trusty create mode 100644 eng/common/generate-locproject.ps1 rename eng/common/native/{find-native-compiler.sh => init-compiler.sh} (50%) delete mode 100644 eng/common/performance/blazor_perf.proj delete mode 100644 eng/common/performance/crossgen_perf.proj delete mode 100644 eng/common/performance/microbenchmarks.proj delete mode 100644 eng/common/performance/performance-setup.ps1 delete mode 100644 eng/common/performance/performance-setup.sh create mode 100644 eng/common/sdl/configure-sdl-tool.ps1 create mode 100644 eng/common/sdl/extract-artifact-archives.ps1 create mode 100644 eng/common/templates/job/onelocbuild.yml delete mode 100644 eng/common/templates/job/performance.yml delete mode 100644 eng/common/templates/phases/base.yml delete mode 100644 eng/common/templates/phases/publish-build-assets.yml delete mode 100644 eng/common/templates/steps/perf-send-to-helix.yml create mode 100644 src/Microsoft.Diagnostics.Monitoring.EventPipe/Configuration/AspNetTriggerSourceConfiguration.cs create mode 100644 src/Microsoft.Diagnostics.Monitoring.EventPipe/Counters/TraceEventExtensions.cs create mode 100644 src/Microsoft.Diagnostics.Monitoring.EventPipe/Triggers/AspNet/AspNetRequestCountTrigger.cs create mode 100644 src/Microsoft.Diagnostics.Monitoring.EventPipe/Triggers/AspNet/AspNetRequestCountTriggerSettings.cs create mode 100644 src/Microsoft.Diagnostics.Monitoring.EventPipe/Triggers/AspNet/AspNetRequestDurationTrigger.cs create mode 100644 src/Microsoft.Diagnostics.Monitoring.EventPipe/Triggers/AspNet/AspNetRequestDurationTriggerSettings.cs create mode 100644 src/Microsoft.Diagnostics.Monitoring.EventPipe/Triggers/AspNet/AspNetRequestStatusTrigger.cs create mode 100644 src/Microsoft.Diagnostics.Monitoring.EventPipe/Triggers/AspNet/AspNetRequestStatusTriggerSettings.cs create mode 100644 src/Microsoft.Diagnostics.Monitoring.EventPipe/Triggers/AspNet/AspNetTrigger.cs create mode 100644 src/Microsoft.Diagnostics.Monitoring.EventPipe/Triggers/AspNet/AspNetTriggerFactories.cs create mode 100644 src/Microsoft.Diagnostics.Monitoring.EventPipe/Triggers/AspNet/AspNetTriggerSettings.cs create mode 100644 src/Microsoft.Diagnostics.Monitoring.EventPipe/Triggers/AspNet/GlobMatcher.cs create mode 100644 src/Microsoft.Diagnostics.Monitoring.EventPipe/Triggers/EventCounter/EventCounterTrigger.cs create mode 100644 src/Microsoft.Diagnostics.Monitoring.EventPipe/Triggers/EventCounter/EventCounterTriggerFactory.cs create mode 100644 src/Microsoft.Diagnostics.Monitoring.EventPipe/Triggers/EventCounter/EventCounterTriggerImpl.cs create mode 100644 src/Microsoft.Diagnostics.Monitoring.EventPipe/Triggers/EventCounter/EventCounterTriggerSettings.cs create mode 100644 src/Microsoft.Diagnostics.Monitoring.EventPipe/Triggers/ITraceEventTrigger.cs create mode 100644 src/Microsoft.Diagnostics.Monitoring.EventPipe/Triggers/ITraceEventTriggerFactory.cs create mode 100644 src/Microsoft.Diagnostics.Monitoring.EventPipe/Triggers/Pipelines/EventPipeTriggerPipeline.cs create mode 100644 src/Microsoft.Diagnostics.Monitoring.EventPipe/Triggers/Pipelines/EventPipeTriggerPipelineSettings.cs create mode 100644 src/Microsoft.Diagnostics.Monitoring.EventPipe/Triggers/Pipelines/TraceEventTriggerPipeline.cs create mode 100644 src/Microsoft.Diagnostics.Monitoring.EventPipe/Triggers/SlidingWindow.cs create mode 100644 src/Microsoft.Diagnostics.NETCore.Client/DiagnosticsIpc/IpcResponse.cs create mode 100644 src/Microsoft.Diagnostics.NETCore.Client/StreamExtensions.cs delete mode 100644 src/SOS/SOS.UnitTests/Debuggees/WebApp/Program.cs delete mode 100644 src/SOS/SOS.UnitTests/Debuggees/WebApp/Properties/launchSettings.json delete mode 100644 src/SOS/SOS.UnitTests/Debuggees/WebApp/Startup.cs delete mode 100644 src/SOS/SOS.UnitTests/Debuggees/WebApp/WebApp.csproj create mode 100644 src/Tools/dotnet-counters/CommandLineErrorException.cs delete mode 100644 src/Tools/dotnet-counters/CounterFilter.cs create mode 100644 src/Tools/dotnet-counters/CounterSet.cs create mode 100644 src/Tools/dotnet-stack/Symbolicate.cs create mode 100644 src/tests/Microsoft.Diagnostics.DebugServices.UnitTests/SymbolServiceTests.cs create mode 100644 src/tests/Microsoft.Diagnostics.Monitoring.EventPipe/AspNetTriggerUnitTests.cs create mode 100644 src/tests/Microsoft.Diagnostics.Monitoring.EventPipe/EventCounterConstants.cs create mode 100644 src/tests/Microsoft.Diagnostics.Monitoring.EventPipe/EventCounterTriggerTests.cs create mode 100644 src/tests/Microsoft.Diagnostics.Monitoring.EventPipe/GlobMatcherTests.cs create mode 100644 src/tests/Microsoft.Diagnostics.Monitoring.EventPipe/SlidingWindowTests.cs rename src/tests/Microsoft.Diagnostics.NETCore.Client/{ReversedServerHelper.cs => DiagnosticPortsHelper.cs} (56%) create mode 100644 src/tests/Microsoft.Diagnostics.NETCore.Client/DiagnosticsClientApiShim.cs create mode 100644 src/tests/Microsoft.Diagnostics.NETCore.Client/DiagnosticsClientApiShimExtensions.cs delete mode 100644 src/tests/dotnet-counters/TestHelpers.cs diff --git a/NuGet.config b/NuGet.config index 05dc67aa6d..f256d81e16 100644 --- a/NuGet.config +++ b/NuGet.config @@ -13,8 +13,6 @@ - - diff --git a/debuggees.sln b/debuggees.sln index d1ab497a71..731e71af09 100644 --- a/debuggees.sln +++ b/debuggees.sln @@ -13,8 +13,6 @@ Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Debuggees", "Debuggees", "{ EndProject Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "WebApp3", "src\SOS\SOS.UnitTests\Debuggees\WebApp3\WebApp3.csproj", "{252E5845-8D4C-4306-9D8F-ED2E2F7005F6}" EndProject -Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "WebApp", "src\SOS\SOS.UnitTests\Debuggees\WebApp\WebApp.csproj", "{E7FEA82E-0E16-4868-B122-4B0BC0014E7F}" -EndProject Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "SimpleThrow", "src\SOS\SOS.UnitTests\Debuggees\SimpleThrow\SimpleThrow.csproj", "{179EF543-E30A-4428-ABA0-2E2621860173}" EndProject Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "DivZero", "src\SOS\SOS.UnitTests\Debuggees\DivZero\DivZero.csproj", "{447AC053-2E0A-4119-BD11-30A4A8E3F765}" @@ -39,7 +37,7 @@ Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "LineNums", "src\SOS\SOS.Uni EndProject Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "GCPOH", "src\SOS\SOS.UnitTests\Debuggees\GCPOH\GCPOH.csproj", "{0A34CA51-8B8C-41A1-BE24-AB2C574EA144}" EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "DotnetDumpCommands", "src\SOS\SOS.UnitTests\Debuggees\DotnetDumpCommands\DotnetDumpCommands.csproj", "{F9A69812-DC52-428D-9DB1-8B831A8FF776}" +Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "DotnetDumpCommands", "src\SOS\SOS.UnitTests\Debuggees\DotnetDumpCommands\DotnetDumpCommands.csproj", "{F9A69812-DC52-428D-9DB1-8B831A8FF776}" EndProject Global GlobalSection(SolutionConfigurationPlatforms) = preSolution @@ -145,46 +143,6 @@ Global {252E5845-8D4C-4306-9D8F-ED2E2F7005F6}.RelWithDebInfo|x64.Build.0 = Release|Any CPU {252E5845-8D4C-4306-9D8F-ED2E2F7005F6}.RelWithDebInfo|x86.ActiveCfg = Release|Any CPU {252E5845-8D4C-4306-9D8F-ED2E2F7005F6}.RelWithDebInfo|x86.Build.0 = Release|Any CPU - {E7FEA82E-0E16-4868-B122-4B0BC0014E7F}.Checked|Any CPU.ActiveCfg = Debug|Any CPU - {E7FEA82E-0E16-4868-B122-4B0BC0014E7F}.Checked|Any CPU.Build.0 = Debug|Any CPU - {E7FEA82E-0E16-4868-B122-4B0BC0014E7F}.Checked|ARM.ActiveCfg = Debug|Any CPU - {E7FEA82E-0E16-4868-B122-4B0BC0014E7F}.Checked|ARM.Build.0 = Debug|Any CPU - {E7FEA82E-0E16-4868-B122-4B0BC0014E7F}.Checked|ARM64.ActiveCfg = Debug|Any CPU - {E7FEA82E-0E16-4868-B122-4B0BC0014E7F}.Checked|ARM64.Build.0 = Debug|Any CPU - {E7FEA82E-0E16-4868-B122-4B0BC0014E7F}.Checked|x64.ActiveCfg = Debug|Any CPU - {E7FEA82E-0E16-4868-B122-4B0BC0014E7F}.Checked|x64.Build.0 = Debug|Any CPU - {E7FEA82E-0E16-4868-B122-4B0BC0014E7F}.Checked|x86.ActiveCfg = Debug|Any CPU - {E7FEA82E-0E16-4868-B122-4B0BC0014E7F}.Checked|x86.Build.0 = Debug|Any CPU - {E7FEA82E-0E16-4868-B122-4B0BC0014E7F}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {E7FEA82E-0E16-4868-B122-4B0BC0014E7F}.Debug|Any CPU.Build.0 = Debug|Any CPU - {E7FEA82E-0E16-4868-B122-4B0BC0014E7F}.Debug|ARM.ActiveCfg = Debug|Any CPU - {E7FEA82E-0E16-4868-B122-4B0BC0014E7F}.Debug|ARM.Build.0 = Debug|Any CPU - {E7FEA82E-0E16-4868-B122-4B0BC0014E7F}.Debug|ARM64.ActiveCfg = Debug|Any CPU - {E7FEA82E-0E16-4868-B122-4B0BC0014E7F}.Debug|ARM64.Build.0 = Debug|Any CPU - {E7FEA82E-0E16-4868-B122-4B0BC0014E7F}.Debug|x64.ActiveCfg = Debug|Any CPU - {E7FEA82E-0E16-4868-B122-4B0BC0014E7F}.Debug|x64.Build.0 = Debug|Any CPU - {E7FEA82E-0E16-4868-B122-4B0BC0014E7F}.Debug|x86.ActiveCfg = Debug|Any CPU - {E7FEA82E-0E16-4868-B122-4B0BC0014E7F}.Debug|x86.Build.0 = Debug|Any CPU - {E7FEA82E-0E16-4868-B122-4B0BC0014E7F}.Release|Any CPU.ActiveCfg = Release|Any CPU - {E7FEA82E-0E16-4868-B122-4B0BC0014E7F}.Release|Any CPU.Build.0 = Release|Any CPU - {E7FEA82E-0E16-4868-B122-4B0BC0014E7F}.Release|ARM.ActiveCfg = Release|Any CPU - {E7FEA82E-0E16-4868-B122-4B0BC0014E7F}.Release|ARM.Build.0 = Release|Any CPU - {E7FEA82E-0E16-4868-B122-4B0BC0014E7F}.Release|ARM64.ActiveCfg = Release|Any CPU - {E7FEA82E-0E16-4868-B122-4B0BC0014E7F}.Release|ARM64.Build.0 = Release|Any CPU - {E7FEA82E-0E16-4868-B122-4B0BC0014E7F}.Release|x64.ActiveCfg = Release|Any CPU - {E7FEA82E-0E16-4868-B122-4B0BC0014E7F}.Release|x64.Build.0 = Release|Any CPU - {E7FEA82E-0E16-4868-B122-4B0BC0014E7F}.Release|x86.ActiveCfg = Release|Any CPU - {E7FEA82E-0E16-4868-B122-4B0BC0014E7F}.Release|x86.Build.0 = Release|Any CPU - {E7FEA82E-0E16-4868-B122-4B0BC0014E7F}.RelWithDebInfo|Any CPU.ActiveCfg = Release|Any CPU - {E7FEA82E-0E16-4868-B122-4B0BC0014E7F}.RelWithDebInfo|Any CPU.Build.0 = Release|Any CPU - {E7FEA82E-0E16-4868-B122-4B0BC0014E7F}.RelWithDebInfo|ARM.ActiveCfg = Release|Any CPU - {E7FEA82E-0E16-4868-B122-4B0BC0014E7F}.RelWithDebInfo|ARM.Build.0 = Release|Any CPU - {E7FEA82E-0E16-4868-B122-4B0BC0014E7F}.RelWithDebInfo|ARM64.ActiveCfg = Release|Any CPU - {E7FEA82E-0E16-4868-B122-4B0BC0014E7F}.RelWithDebInfo|ARM64.Build.0 = Release|Any CPU - {E7FEA82E-0E16-4868-B122-4B0BC0014E7F}.RelWithDebInfo|x64.ActiveCfg = Release|Any CPU - {E7FEA82E-0E16-4868-B122-4B0BC0014E7F}.RelWithDebInfo|x64.Build.0 = Release|Any CPU - {E7FEA82E-0E16-4868-B122-4B0BC0014E7F}.RelWithDebInfo|x86.ActiveCfg = Release|Any CPU - {E7FEA82E-0E16-4868-B122-4B0BC0014E7F}.RelWithDebInfo|x86.Build.0 = Release|Any CPU {179EF543-E30A-4428-ABA0-2E2621860173}.Checked|Any CPU.ActiveCfg = Debug|Any CPU {179EF543-E30A-4428-ABA0-2E2621860173}.Checked|Any CPU.Build.0 = Debug|Any CPU {179EF543-E30A-4428-ABA0-2E2621860173}.Checked|ARM.ActiveCfg = Debug|Any CPU @@ -714,7 +672,6 @@ Global {41638A4C-0DAF-47ED-A774-ECBBAC0315D7} = {19FAB78C-3351-4911-8F0C-8C6056401740} {C3072949-6D24-451B-A308-2F3621F858B0} = {41638A4C-0DAF-47ED-A774-ECBBAC0315D7} {252E5845-8D4C-4306-9D8F-ED2E2F7005F6} = {C3072949-6D24-451B-A308-2F3621F858B0} - {E7FEA82E-0E16-4868-B122-4B0BC0014E7F} = {C3072949-6D24-451B-A308-2F3621F858B0} {179EF543-E30A-4428-ABA0-2E2621860173} = {C3072949-6D24-451B-A308-2F3621F858B0} {447AC053-2E0A-4119-BD11-30A4A8E3F765} = {C3072949-6D24-451B-A308-2F3621F858B0} {664F46A9-3C99-489B-AAB9-4CD3A430C425} = {C3072949-6D24-451B-A308-2F3621F858B0} diff --git a/diagnostics.yml b/diagnostics.yml index 91f484de36..0ffba53fdd 100644 --- a/diagnostics.yml +++ b/diagnostics.yml @@ -140,10 +140,11 @@ stages: - template: /eng/build.yml parameters: - name: Alpine3_6 + name: Alpine3_13 osGroup: Linux - dockerImage: mcr.microsoft.com/dotnet-buildtools/prereqs:alpine-3.6-WithNode-f4d3fe3-20181220200247 + dockerImage: mcr.microsoft.com/dotnet-buildtools/prereqs:alpine-3.13-WithNode-20210728123842-ddfc481 artifactsTargetPath: bin/Linux-musl.x64.Release + requiresCapPtraceContainer: true strategy: matrix: Build_Release: @@ -217,9 +218,9 @@ stages: - template: /eng/build.yml parameters: - name: Alpine3_6_cross64 + name: Alpine_cross64 osGroup: Linux - dockerImage: mcr.microsoft.com/dotnet-buildtools/prereqs:ubuntu-16.04-cross-arm64-alpine-406629a-20200127195039 + dockerImage: mcr.microsoft.com/dotnet-buildtools/prereqs:ubuntu-16.04-cross-arm64-alpine-20210719121212-b2c2436 crossrootfsDir: '/crossrootfs/arm64' artifactsTargetPath: bin/Linux-musl.arm64.Release buildAndSkipTest: true @@ -339,12 +340,12 @@ stages: dependsOn: - Windows - CentOS_7 - - Alpine3_6 + - Alpine3_13 - MacOS - MacOS_cross - Linux_cross - Linux_cross64 - - Alpine3_6_cross64 + - Alpine_cross64 condition: succeeded() pool: name: NetCoreInternal-Pool diff --git a/documentation/FAQ.md b/documentation/FAQ.md index bc42c7d914..589dc107ae 100644 --- a/documentation/FAQ.md +++ b/documentation/FAQ.md @@ -1,6 +1,8 @@ Frequently Asked Questions ========================== +* `dotnet-dump analyze` running on Windows doesn't support MacOS .NET 5.0 and 6.0 core dumps. `dotnet-dump` running on MacOS does support .NET 5.0 but not 6.0 core dumps (which will be fixed in a future dotnet-dump release). MacOS .NET 6.0 core dumps generated by the runtime via [createdump](https://github.com/dotnet/runtime/blob/main/docs/design/coreclr/botr/xplat-minidump-generation.md#os-x) are supported by lldb/SOS running on MacOS. + * If SOS or dotnet-dump analyze commands display "UNKNOWN" for types or functions names, your core dump may not have all the managed state. Dumps created with gdb or gcore have this problem. Linux system generated core dumps need the `coredump_filter` for the process to be set to at least 0x3f. See [core](http://man7.org/linux/man-pages/man5/core.5.html) for more information. * If dump collection (`dotnet-dump collect` or `createdump`) doesn't work in a docker container, try adding the SYS\_TRACE capability with --cap-add=SYS\_PTRACE or --privileged. diff --git a/documentation/debugging-coredump.md b/documentation/debugging-coredump.md index 1fc9699f93..eb43cee166 100644 --- a/documentation/debugging-coredump.md +++ b/documentation/debugging-coredump.md @@ -72,6 +72,8 @@ Even if the core dump was not generated on this machine, the native and managed Follow the rest of the above Linux steps to set the symbol server and load native symbols. +NOTE: The following issue has been fixed with .NET 6.0 core dumps generated by the runtime (see [createdump](https://github.com/dotnet/runtime/blob/main/docs/design/coreclr/botr/xplat-minidump-generation.md#os-x)) with a recent version of SOS. + The MacOS lldb has a bug that prevents SOS clrstack from properly working. Because of this bug SOS can't properly match the lldb native with with the managed thread OSID displayed by `clrthreads`. The `setsostid` command is a work around for this lldb bug. This command maps the OSID from this command: ``` diff --git a/documentation/dotnet-trace-instructions.md b/documentation/dotnet-trace-instructions.md index 8d065bce3e..154996a927 100644 --- a/documentation/dotnet-trace-instructions.md +++ b/documentation/dotnet-trace-instructions.md @@ -84,14 +84,12 @@ Output File : E:\temp\gcperfsim\trace.nettrace Press or to exit... ``` -You can stop collecting the trace by pressing `` or `` key. Doing this will also exit `hello.exe`. +You can stop collecting the trace by pressing Enter or Ctrl + C key. Doing this will also exit `hello.exe`. -### NOTE -* Launching `hello.exe` via dotnet-trace will redirect its input/output and you will not be able to interact with it on the console by default. Use the --show-child-io switch to interact with its stdin/stdout. - -* Exiting the tool via CTRL+C or SIGTERM will safely end both the tool and the child process. - -* If the child process exits before the tool, the tool will exit as well and the trace should be safely viewable. +> [!NOTE] +> * Launching hello.exe via dotnet-trace will redirect its input/output and you will not be able to interact with it on the console by default. Use the `--show-child-io` switch to interact with its stdin/stdout. +> * Exiting the tool via CTRL+C or SIGTERM will safely end both the tool and the child process. +> * If the child process exits before the tool, the tool will exit as well and the trace should be safely viewable. ## Viewing the trace captured from dotnet-trace @@ -282,4 +280,4 @@ Options: The format of the output trace file. This defaults to "nettrace" on Windows and "speedscope" on other OSes. -- (for target applications running .NET 5.0 or later only) - The command to run to launch a child process and trace from startup. \ No newline at end of file + The command to run to launch a child process and trace from startup. diff --git a/documentation/lldb/linux-instructions.md b/documentation/lldb/linux-instructions.md index 75155780f9..efb15082a6 100644 --- a/documentation/lldb/linux-instructions.md +++ b/documentation/lldb/linux-instructions.md @@ -88,15 +88,9 @@ To install the lldb packages: This installs lldb version 10.0.0. -#### Alpine 3.9 #### +#### Alpine 3.9 to 3.12 #### -Currently there is no lldb that works on Alpine. - -Issue https://github.com/dotnet/diagnostics/issues/73 - -#### Alpine 3.12 #### - -lldb 10.0 is available for this Apline version. +lldb 10.0 is available and works for these Apline versions. #### CentOS 6 #### diff --git a/dotnet.sh b/dotnet.sh old mode 100644 new mode 100755 diff --git a/eng/Build-Native.cmd b/eng/Build-Native.cmd index 1ac4fa43db..0263b30630 100644 --- a/eng/Build-Native.cmd +++ b/eng/Build-Native.cmd @@ -298,8 +298,8 @@ if %__Build% EQU 1 ( REM Copy the native SOS binaries to where these tools expect for CI & VS testing -set "__dotnet_sos=%__RootBinDir%\bin\dotnet-sos\%__BuildType%\netcoreapp2.1" -set "__dotnet_dump=%__RootBinDir%\bin\dotnet-dump\%__BuildType%\netcoreapp2.1" +set "__dotnet_sos=%__RootBinDir%\bin\dotnet-sos\%__BuildType%\netcoreapp3.1" +set "__dotnet_dump=%__RootBinDir%\bin\dotnet-dump\%__BuildType%\netcoreapp3.1" mkdir %__dotnet_sos%\win-%__BuildArch% mkdir %__dotnet_sos%\publish\win-%__BuildArch% mkdir %__dotnet_dump%\win-%__BuildArch% diff --git a/eng/CleanupPrivateBuild.csproj b/eng/CleanupPrivateBuild.csproj index 5ef592ad86..bb22950b13 100644 --- a/eng/CleanupPrivateBuild.csproj +++ b/eng/CleanupPrivateBuild.csproj @@ -1,6 +1,6 @@ - netcoreapp2.1 + netcoreapp3.1 diff --git a/eng/CreateVersionFile.csproj b/eng/CreateVersionFile.csproj index da5fd90772..e2cfe25269 100644 --- a/eng/CreateVersionFile.csproj +++ b/eng/CreateVersionFile.csproj @@ -1,7 +1,7 @@ - netcoreapp2.1 + netcoreapp3.1 diff --git a/eng/InstallRuntimes.proj b/eng/InstallRuntimes.proj index de0abb9e3b..1cc189bb56 100644 --- a/eng/InstallRuntimes.proj +++ b/eng/InstallRuntimes.proj @@ -23,7 +23,6 @@ $(MicrosoftNETCoreApp50Version) $(MicrosoftAspNetCoreApp50Version) - 5.0 version $(MicrosoftNETCoreApp31Version) $(MicrosoftAspNetCoreApp31Version) - 3.1 version - $(MicrosoftNETCoreApp21Version) $(MicrosoftAspNetCoreApp21Version) - 2.1 version From Arcade: @@ -79,7 +78,6 @@ - @@ -147,9 +145,6 @@ Outputs="$(TestConfigFileName)"> - $(MicrosoftNETCoreApp21Version) - $(MicrosoftAspNetCoreApp21Version) - $(MicrosoftNETCoreApp31Version) $(MicrosoftAspNetCoreApp31Version) @@ -170,9 +165,6 @@ $(PrivateBuildTesting) $(InternalReleaseTesting) - $(RuntimeVersion21) - $(AspNetCoreVersion21) - $(RuntimeVersion31) $(AspNetCoreVersion31) diff --git a/eng/Version.Details.xml b/eng/Version.Details.xml index d5c2b9bad6..9887ba8e87 100644 --- a/eng/Version.Details.xml +++ b/eng/Version.Details.xml @@ -1,24 +1,24 @@ - + https://github.com/dotnet/arcade - 7f13798e5f567b72ffe63205bf49839245f0f8c1 + 3ea0d860c6973f2cbadc9e895c7ec2cbdaec4ad5 - + https://github.com/dotnet/symstore - 3ed87724fe4e98c7ecc77617720591783ee2e676 + b1594bbcab10cc774c12169c694cac11e7e31bbb - + https://github.com/microsoft/clrmd - 957981f36eeccb6e9d266407df6522ca5cfbd899 + d11a6a383bc25a8c3d6b7ef057e9345b3e21cb8b - + https://github.com/microsoft/clrmd - 957981f36eeccb6e9d266407df6522ca5cfbd899 + d11a6a383bc25a8c3d6b7ef057e9345b3e21cb8b - + https://github.com/dotnet/installer - 53e0c8c7f9c65a13c17f58135557665a5a0c15b1 + 885ff3819901e75ee1782185a53a3ab4ea6deac8 https://github.com/dotnet/source-build-reference-packages @@ -27,26 +27,26 @@ - + https://github.com/dotnet/arcade - 7f13798e5f567b72ffe63205bf49839245f0f8c1 + 3ea0d860c6973f2cbadc9e895c7ec2cbdaec4ad5 - + https://github.com/dotnet/aspnetcore - 837b17847c427be12d69623cf32223c10a4ddba5 + 892230b69d93d479c50be1cabf7e868ddc905911 - + https://github.com/dotnet/aspnetcore - 837b17847c427be12d69623cf32223c10a4ddba5 + 892230b69d93d479c50be1cabf7e868ddc905911 - + https://github.com/dotnet/runtime - 98b7ed1a3b0543a31b5a0f9069cf44cb70c9230c + 9e0a252fd3c5e928973cd08bc1a94ca8dbe7cb84 - + https://github.com/dotnet/runtime - 98b7ed1a3b0543a31b5a0f9069cf44cb70c9230c + 9e0a252fd3c5e928973cd08bc1a94ca8dbe7cb84 diff --git a/eng/Versions.props b/eng/Versions.props index a3b464b390..28550ce1fc 100644 --- a/eng/Versions.props +++ b/eng/Versions.props @@ -8,22 +8,20 @@ - 1.0.215101 + 1.0.247701 - 2.1.28 - $(MicrosoftNETCoreApp21Version) - 3.1.15 + 3.1.18 $(MicrosoftNETCoreApp31Version) - 5.0.6 + 5.0.9 $(MicrosoftNETCoreApp50Version) - 6.0.0-preview.7.21361.10 - 6.0.0-preview.7.21361.10 + 6.0.0-rtm.21478.11 + 6.0.0-rtm.21478.11 - 6.0.0-preview.7.21363.17 - 6.0.0-preview.7.21363.17 + 6.0.0-rtm.21479.1 + 6.0.0-rtm.21479.1 - 6.0.100-preview.1.21103.13 + 6.0.100-rtm.21476.2 @@ -36,8 +34,8 @@ 4.3.0 1.1.0 - 2.0.230301 - 2.0.230301 + 2.0.243101 + 2.0.243101 16.9.0-beta1.21055.5 2.0.64 2.1.1 @@ -45,12 +43,14 @@ 5.0.1 2.0.0-beta1.20468.1 2.0.0-beta1.20074.1 + 5.0.0 4.5.4 4.7.2 4.7.1 2.4.1 2.0.3 - 6.0.0-beta.21160.7 + 7.0.0-beta.21474.2 10.0.18362 + 12.0.2 diff --git a/eng/build.sh b/eng/build.sh index 28bb8c8f08..31f16ee3c5 100755 --- a/eng/build.sh +++ b/eng/build.sh @@ -491,8 +491,8 @@ fi # if [[ $__NativeBuild == true || $__Test == true ]]; then - __dotnet_sos=$__RootBinDir/bin/dotnet-sos/$__BuildType/netcoreapp2.1/publish/$__DistroRid - __dotnet_dump=$__RootBinDir/bin/dotnet-dump/$__BuildType/netcoreapp2.1/publish/$__DistroRid + __dotnet_sos=$__RootBinDir/bin/dotnet-sos/$__BuildType/netcoreapp3.1/publish/$__DistroRid + __dotnet_dump=$__RootBinDir/bin/dotnet-dump/$__BuildType/netcoreapp3.1/publish/$__DistroRid mkdir -p "$__dotnet_sos" mkdir -p "$__dotnet_dump" diff --git a/eng/common/SetupNugetSources.ps1 b/eng/common/SetupNugetSources.ps1 index a0b5fc37f4..18823840b1 100644 --- a/eng/common/SetupNugetSources.ps1 +++ b/eng/common/SetupNugetSources.ps1 @@ -158,4 +158,10 @@ if ($dotnet5Source -ne $null) { AddPackageSource -Sources $sources -SourceName "dotnet5-internal-transport" -SourceEndPoint "https://pkgs.dev.azure.com/dnceng/internal/_packaging/dotnet5-internal-transport/nuget/v2" -Creds $creds -Username $userName -Password $Password } +$dotnet6Source = $sources.SelectSingleNode("add[@key='dotnet6']") +if ($dotnet6Source -ne $null) { + AddPackageSource -Sources $sources -SourceName "dotnet6-internal" -SourceEndPoint "https://pkgs.dev.azure.com/dnceng/internal/_packaging/dotnet6-internal/nuget/v2" -Creds $creds -Username $userName -Password $Password + AddPackageSource -Sources $sources -SourceName "dotnet6-internal-transport" -SourceEndPoint "https://pkgs.dev.azure.com/dnceng/internal/_packaging/dotnet6-internal-transport/nuget/v2" -Creds $creds -Username $userName -Password $Password +} + $doc.Save($filename) diff --git a/eng/common/SetupNugetSources.sh b/eng/common/SetupNugetSources.sh index 2734601c13..ad3fb74fd2 100644 --- a/eng/common/SetupNugetSources.sh +++ b/eng/common/SetupNugetSources.sh @@ -129,6 +129,30 @@ if [ "$?" == "0" ]; then PackageSources+=('dotnet5-internal-transport') fi +# Ensure dotnet6-internal and dotnet6-internal-transport are in the packageSources if the public dotnet6 feeds are present +grep -i "" + + sed -i.bak "s|$PackageSourcesNodeFooter|$PackageSourceTemplate${NL}$PackageSourcesNodeFooter|" $ConfigFile + fi + PackageSources+=('dotnet6-internal') + + grep -i "" $ConfigFile + if [ "$?" != "0" ]; then + echo "Adding dotnet6-internal-transport to the packageSources." + PackageSourcesNodeFooter="" + PackageSourceTemplate="${TB}" + + sed -i.bak "s|$PackageSourcesNodeFooter|$PackageSourceTemplate${NL}$PackageSourcesNodeFooter|" $ConfigFile + fi + PackageSources+=('dotnet6-internal-transport') +fi + # I want things split line by line PrevIFS=$IFS IFS=$'\n' diff --git a/eng/common/build.ps1 b/eng/common/build.ps1 index 94a91c0817..8943da242f 100644 --- a/eng/common/build.ps1 +++ b/eng/common/build.ps1 @@ -25,6 +25,7 @@ Param( [switch] $prepareMachine, [string] $runtimeSourceFeed = '', [string] $runtimeSourceFeedKey = '', + [switch] $excludePrereleaseVS, [switch] $help, [Parameter(ValueFromRemainingArguments=$true)][String[]]$properties ) @@ -65,6 +66,7 @@ function Print-Usage() { Write-Host " -prepareMachine Prepare machine for CI run, clean up processes after build" Write-Host " -warnAsError Sets warnaserror msbuild parameter ('true' or 'false')" Write-Host " -msbuildEngine Msbuild engine to use to run build ('dotnet', 'vs', or unspecified)." + Write-Host " -excludePrereleaseVS Set to exclude build engines in prerelease versions of Visual Studio" Write-Host "" Write-Host "Command line arguments not listed above are passed thru to msbuild." diff --git a/eng/common/build.sh b/eng/common/build.sh index 55b298f16c..bc07a1c684 100755 --- a/eng/common/build.sh +++ b/eng/common/build.sh @@ -187,6 +187,10 @@ function InitializeCustomToolset { } function Build { + + if [[ "$ci" == true ]]; then + TryLogClientIpAddress + fi InitializeToolset InitializeCustomToolset diff --git a/eng/common/cross/arm/sources.list.trusty b/eng/common/cross/arm/sources.list.trusty deleted file mode 100644 index 07d8f88d82..0000000000 --- a/eng/common/cross/arm/sources.list.trusty +++ /dev/null @@ -1,11 +0,0 @@ -deb http://ports.ubuntu.com/ubuntu-ports/ trusty main restricted universe -deb-src http://ports.ubuntu.com/ubuntu-ports/ trusty main restricted universe - -deb http://ports.ubuntu.com/ubuntu-ports/ trusty-updates main restricted universe -deb-src http://ports.ubuntu.com/ubuntu-ports/ trusty-updates main restricted universe - -deb http://ports.ubuntu.com/ubuntu-ports/ trusty-backports main restricted -deb-src http://ports.ubuntu.com/ubuntu-ports/ trusty-backports main restricted - -deb http://ports.ubuntu.com/ubuntu-ports/ trusty-security main restricted universe multiverse -deb-src http://ports.ubuntu.com/ubuntu-ports/ trusty-security main restricted universe multiverse \ No newline at end of file diff --git a/eng/common/cross/arm/trusty-lttng-2.4.patch b/eng/common/cross/arm/trusty-lttng-2.4.patch deleted file mode 100644 index 8e4dd7ae71..0000000000 --- a/eng/common/cross/arm/trusty-lttng-2.4.patch +++ /dev/null @@ -1,71 +0,0 @@ -From e72c9d7ead60e3317bd6d1fade995c07021c947b Mon Sep 17 00:00:00 2001 -From: Mathieu Desnoyers -Date: Thu, 7 May 2015 13:25:04 -0400 -Subject: [PATCH] Fix: building probe providers with C++ compiler - -Robert Daniels wrote: -> > I'm attempting to use lttng userspace tracing with a C++ application -> > on an ARM platform. I'm using GCC 4.8.4 on Linux 3.14 with the 2.6 -> > release of lttng. I've compiled lttng-modules, lttng-ust, and -> > lttng-tools and have been able to get a simple test working with C -> > code. When I attempt to run the hello.cxx test on my target it will -> > segfault. -> -> -> I spent a little time digging into this issue and finally discovered the -> cause of my segfault with ARM C++ tracepoints. -> -> There is a struct called 'lttng_event' in ust-events.h which contains an -> empty union 'u'. This was the cause of my issue. Under C, this empty union -> compiles to a zero byte member while under C++ it compiles to a one byte -> member, and in my case was four-byte aligned which caused my C++ code to -> have the 'cds_list_head node' offset incorrectly by four bytes. This lead -> to an incorrect linked list structure which caused my issue. -> -> Since this union is empty, I simply removed it from the struct and everything -> worked correctly. -> -> I don't know the history or purpose behind this empty union so I'd like to -> know if this is a safe fix. If it is I can submit a patch with the union -> removed. - -That's a very nice catch! - -We do not support building tracepoint probe provider with -g++ yet, as stated in lttng-ust(3): - -"- Note for C++ support: although an application instrumented with - tracepoints can be compiled with g++, tracepoint probes should be - compiled with gcc (only tested with gcc so far)." - -However, if it works fine with this fix, then I'm tempted to take it, -especially because removing the empty union does not appear to affect -the layout of struct lttng_event as seen from liblttng-ust, which must -be compiled with a C compiler, and from probe providers compiled with -a C compiler. So all we are changing is the layout of a probe provider -compiled with a C++ compiler, which is anyway buggy at the moment, -because it is not compatible with the layout expected by liblttng-ust -compiled with a C compiler. - -Reported-by: Robert Daniels -Signed-off-by: Mathieu Desnoyers ---- - include/lttng/ust-events.h | 2 -- - 1 file changed, 2 deletions(-) - -diff --git a/usr/include/lttng/ust-events.h b/usr/include/lttng/ust-events.h -index 328a875..3d7a274 100644 ---- a/usr/include/lttng/ust-events.h -+++ b/usr/include/lttng/ust-events.h -@@ -407,8 +407,6 @@ struct lttng_event { - void *_deprecated1; - struct lttng_ctx *ctx; - enum lttng_ust_instrumentation instrumentation; -- union { -- } u; - struct cds_list_head node; /* Event list in session */ - struct cds_list_head _deprecated2; - void *_deprecated3; --- -2.7.4 - diff --git a/eng/common/cross/arm/trusty.patch b/eng/common/cross/arm/trusty.patch deleted file mode 100644 index 2f2972f8eb..0000000000 --- a/eng/common/cross/arm/trusty.patch +++ /dev/null @@ -1,97 +0,0 @@ -diff -u -r a/usr/include/urcu/uatomic/generic.h b/usr/include/urcu/uatomic/generic.h ---- a/usr/include/urcu/uatomic/generic.h 2014-03-28 06:04:42.000000000 +0900 -+++ b/usr/include/urcu/uatomic/generic.h 2017-02-13 10:35:21.189927116 +0900 -@@ -65,17 +65,17 @@ - switch (len) { - #ifdef UATOMIC_HAS_ATOMIC_BYTE - case 1: -- return __sync_val_compare_and_swap_1(addr, old, _new); -+ return __sync_val_compare_and_swap_1((uint8_t *) addr, old, _new); - #endif - #ifdef UATOMIC_HAS_ATOMIC_SHORT - case 2: -- return __sync_val_compare_and_swap_2(addr, old, _new); -+ return __sync_val_compare_and_swap_2((uint16_t *) addr, old, _new); - #endif - case 4: -- return __sync_val_compare_and_swap_4(addr, old, _new); -+ return __sync_val_compare_and_swap_4((uint32_t *) addr, old, _new); - #if (CAA_BITS_PER_LONG == 64) - case 8: -- return __sync_val_compare_and_swap_8(addr, old, _new); -+ return __sync_val_compare_and_swap_8((uint64_t *) addr, old, _new); - #endif - } - _uatomic_link_error(); -@@ -100,20 +100,20 @@ - switch (len) { - #ifdef UATOMIC_HAS_ATOMIC_BYTE - case 1: -- __sync_and_and_fetch_1(addr, val); -+ __sync_and_and_fetch_1((uint8_t *) addr, val); - return; - #endif - #ifdef UATOMIC_HAS_ATOMIC_SHORT - case 2: -- __sync_and_and_fetch_2(addr, val); -+ __sync_and_and_fetch_2((uint16_t *) addr, val); - return; - #endif - case 4: -- __sync_and_and_fetch_4(addr, val); -+ __sync_and_and_fetch_4((uint32_t *) addr, val); - return; - #if (CAA_BITS_PER_LONG == 64) - case 8: -- __sync_and_and_fetch_8(addr, val); -+ __sync_and_and_fetch_8((uint64_t *) addr, val); - return; - #endif - } -@@ -139,20 +139,20 @@ - switch (len) { - #ifdef UATOMIC_HAS_ATOMIC_BYTE - case 1: -- __sync_or_and_fetch_1(addr, val); -+ __sync_or_and_fetch_1((uint8_t *) addr, val); - return; - #endif - #ifdef UATOMIC_HAS_ATOMIC_SHORT - case 2: -- __sync_or_and_fetch_2(addr, val); -+ __sync_or_and_fetch_2((uint16_t *) addr, val); - return; - #endif - case 4: -- __sync_or_and_fetch_4(addr, val); -+ __sync_or_and_fetch_4((uint32_t *) addr, val); - return; - #if (CAA_BITS_PER_LONG == 64) - case 8: -- __sync_or_and_fetch_8(addr, val); -+ __sync_or_and_fetch_8((uint64_t *) addr, val); - return; - #endif - } -@@ -180,17 +180,17 @@ - switch (len) { - #ifdef UATOMIC_HAS_ATOMIC_BYTE - case 1: -- return __sync_add_and_fetch_1(addr, val); -+ return __sync_add_and_fetch_1((uint8_t *) addr, val); - #endif - #ifdef UATOMIC_HAS_ATOMIC_SHORT - case 2: -- return __sync_add_and_fetch_2(addr, val); -+ return __sync_add_and_fetch_2((uint16_t *) addr, val); - #endif - case 4: -- return __sync_add_and_fetch_4(addr, val); -+ return __sync_add_and_fetch_4((uint32_t *) addr, val); - #if (CAA_BITS_PER_LONG == 64) - case 8: -- return __sync_add_and_fetch_8(addr, val); -+ return __sync_add_and_fetch_8((uint64_t *) addr, val); - #endif - } - _uatomic_link_error(); diff --git a/eng/common/cross/arm64/sources.list.trusty b/eng/common/cross/arm64/sources.list.trusty deleted file mode 100644 index 07d8f88d82..0000000000 --- a/eng/common/cross/arm64/sources.list.trusty +++ /dev/null @@ -1,11 +0,0 @@ -deb http://ports.ubuntu.com/ubuntu-ports/ trusty main restricted universe -deb-src http://ports.ubuntu.com/ubuntu-ports/ trusty main restricted universe - -deb http://ports.ubuntu.com/ubuntu-ports/ trusty-updates main restricted universe -deb-src http://ports.ubuntu.com/ubuntu-ports/ trusty-updates main restricted universe - -deb http://ports.ubuntu.com/ubuntu-ports/ trusty-backports main restricted -deb-src http://ports.ubuntu.com/ubuntu-ports/ trusty-backports main restricted - -deb http://ports.ubuntu.com/ubuntu-ports/ trusty-security main restricted universe multiverse -deb-src http://ports.ubuntu.com/ubuntu-ports/ trusty-security main restricted universe multiverse \ No newline at end of file diff --git a/eng/common/cross/arm64/tizen-fetch.sh b/eng/common/cross/arm64/tizen-fetch.sh index a48a6f51c4..16d1301f21 100644 --- a/eng/common/cross/arm64/tizen-fetch.sh +++ b/eng/common/cross/arm64/tizen-fetch.sh @@ -157,7 +157,7 @@ fetch_tizen_pkgs() Inform "Initialize arm base" fetch_tizen_pkgs_init standard base Inform "fetch common packages" -fetch_tizen_pkgs aarch64 gcc glibc glibc-devel libicu libicu-devel libatomic linux-glibc-devel +fetch_tizen_pkgs aarch64 gcc glibc glibc-devel libicu libicu-devel libatomic linux-glibc-devel keyutils keyutils-devel libkeyutils Inform "fetch coreclr packages" fetch_tizen_pkgs aarch64 lldb lldb-devel libgcc libstdc++ libstdc++-devel libunwind libunwind-devel lttng-ust-devel lttng-ust userspace-rcu-devel userspace-rcu Inform "fetch corefx packages" diff --git a/eng/common/cross/build-rootfs.sh b/eng/common/cross/build-rootfs.sh index b26622444f..6fa2c8aa55 100644 --- a/eng/common/cross/build-rootfs.sh +++ b/eng/common/cross/build-rootfs.sh @@ -6,10 +6,10 @@ usage() { echo "Usage: $0 [BuildArch] [CodeName] [lldbx.y] [--skipunmount] --rootfsdir ]" echo "BuildArch can be: arm(default), armel, arm64, x86" - echo "CodeName - optional, Code name for Linux, can be: trusty, xenial(default), zesty, bionic, alpine, alpine3.9 or alpine3.13. If BuildArch is armel, LinuxCodeName is jessie(default) or tizen." - echo " for FreeBSD can be: freebsd11 or freebsd12." + echo "CodeName - optional, Code name for Linux, can be: xenial(default), zesty, bionic, alpine, alpine3.13 or alpine3.14. If BuildArch is armel, LinuxCodeName is jessie(default) or tizen." + echo " for FreeBSD can be: freebsd11, freebsd12, freebsd13" echo " for illumos can be: illumos." - echo "lldbx.y - optional, LLDB version, can be: lldb3.9(default), lldb4.0, lldb5.0, lldb6.0 no-lldb. Ignored for alpine and FReeBSD" + echo "lldbx.y - optional, LLDB version, can be: lldb3.9(default), lldb4.0, lldb5.0, lldb6.0 no-lldb. Ignored for alpine and FreeBSD" echo "--skipunmount - optional, will skip the unmount of rootfs folder." echo "--use-mirror - optional, use mirror URL to fetch resources, when available." exit 1 @@ -32,10 +32,9 @@ __UbuntuPackages="build-essential" __AlpinePackages="alpine-base" __AlpinePackages+=" build-base" __AlpinePackages+=" linux-headers" -__AlpinePackagesEdgeCommunity=" lldb-dev" -__AlpinePackagesEdgeMain=" llvm10-libs" -__AlpinePackagesEdgeMain+=" python3" -__AlpinePackagesEdgeMain+=" libedit" +__AlpinePackages+=" lldb-dev" +__AlpinePackages+=" python3" +__AlpinePackages+=" libedit" # symlinks fixer __UbuntuPackages+=" symlinks" @@ -61,19 +60,25 @@ __AlpinePackages+=" krb5-dev" __AlpinePackages+=" openssl-dev" __AlpinePackages+=" zlib-dev" -__FreeBSDBase="12.1-RELEASE" +__FreeBSDBase="12.2-RELEASE" __FreeBSDPkg="1.12.0" +__FreeBSDABI="12" __FreeBSDPackages="libunwind" __FreeBSDPackages+=" icu" __FreeBSDPackages+=" libinotify" __FreeBSDPackages+=" lttng-ust" __FreeBSDPackages+=" krb5" +__FreeBSDPackages+=" terminfo-db" __IllumosPackages="icu-64.2nb2" __IllumosPackages+=" mit-krb5-1.16.2nb4" __IllumosPackages+=" openssl-1.1.1e" __IllumosPackages+=" zlib-1.2.11" +# ML.NET dependencies +__UbuntuPackages+=" libomp5" +__UbuntuPackages+=" libomp-dev" + __UseMirror=0 __UnprocessedBuildArgs= @@ -111,6 +116,8 @@ while :; do __UbuntuArch=s390x __UbuntuRepo="http://ports.ubuntu.com/ubuntu-ports/" __UbuntuPackages=$(echo ${__UbuntuPackages} | sed 's/ libunwind8-dev//') + __UbuntuPackages=$(echo ${__UbuntuPackages} | sed 's/ libomp-dev//') + __UbuntuPackages=$(echo ${__UbuntuPackages} | sed 's/ libomp5//') unset __LLDB_Package ;; x86) @@ -139,11 +146,6 @@ while :; do no-lldb) unset __LLDB_Package ;; - trusty) # Ubuntu 14.04 - if [ "$__CodeName" != "jessie" ]; then - __CodeName=trusty - fi - ;; xenial) # Ubuntu 16.04 if [ "$__CodeName" != "jessie" ]; then __CodeName=xenial @@ -183,29 +185,34 @@ while :; do __UbuntuRepo= __Tizen=tizen ;; - alpine|alpine3.9) + alpine|alpine3.13) __CodeName=alpine __UbuntuRepo= - __AlpineVersion=3.9 + __AlpineVersion=3.13 + __AlpinePackages+=" llvm10-libs" ;; - alpine3.13) + alpine3.14) __CodeName=alpine __UbuntuRepo= - __AlpineVersion=3.13 - # Alpine 3.13 has all the packages we need in the 3.13 repository - __AlpinePackages+=$__AlpinePackagesEdgeCommunity - __AlpinePackagesEdgeCommunity= - __AlpinePackages+=$__AlpinePackagesEdgeMain - __AlpinePackagesEdgeMain= + __AlpineVersion=3.14 + __AlpinePackages+=" llvm11-libs" ;; freebsd11) __FreeBSDBase="11.3-RELEASE" + __FreeBSDABI="11" ;& freebsd12) __CodeName=freebsd __BuildArch=x64 __SkipUnmount=1 ;; + freebsd13) + __CodeName=freebsd + __FreeBSDBase="13.0-RELEASE" + __FreeBSDABI="13" + __BuildArch=x64 + __SkipUnmount=1 + ;; illumos) __CodeName=illumos __BuildArch=x64 @@ -266,26 +273,12 @@ if [[ "$__CodeName" == "alpine" ]]; then -U --allow-untrusted --root $__RootfsDir --arch $__AlpineArch --initdb \ add $__AlpinePackages - if [[ -n "$__AlpinePackagesEdgeMain" ]]; then - $__ApkToolsDir/apk-tools-$__ApkToolsVersion/apk \ - -X http://dl-cdn.alpinelinux.org/alpine/edge/main \ - -U --allow-untrusted --root $__RootfsDir --arch $__AlpineArch --initdb \ - add $__AlpinePackagesEdgeMain - fi - - if [[ -n "$__AlpinePackagesEdgeCommunity" ]]; then - $__ApkToolsDir/apk-tools-$__ApkToolsVersion/apk \ - -X http://dl-cdn.alpinelinux.org/alpine/edge/community \ - -U --allow-untrusted --root $__RootfsDir --arch $__AlpineArch --initdb \ - add $__AlpinePackagesEdgeCommunity - fi - rm -r $__ApkToolsDir elif [[ "$__CodeName" == "freebsd" ]]; then mkdir -p $__RootfsDir/usr/local/etc + JOBS="$(getconf _NPROCESSORS_ONLN)" wget -O - https://download.freebsd.org/ftp/releases/amd64/${__FreeBSDBase}/base.txz | tar -C $__RootfsDir -Jxf - ./lib ./usr/lib ./usr/libdata ./usr/include ./usr/share/keys ./etc ./bin/freebsd-version - # For now, ask for 11 ABI even on 12. This can be revisited later. - echo "ABI = \"FreeBSD:11:amd64\"; FINGERPRINTS = \"${__RootfsDir}/usr/share/keys\"; REPOS_DIR = [\"${__RootfsDir}/etc/pkg\"]; REPO_AUTOUPDATE = NO; RUN_SCRIPTS = NO;" > ${__RootfsDir}/usr/local/etc/pkg.conf + echo "ABI = \"FreeBSD:${__FreeBSDABI}:amd64\"; FINGERPRINTS = \"${__RootfsDir}/usr/share/keys\"; REPOS_DIR = [\"${__RootfsDir}/etc/pkg\"]; REPO_AUTOUPDATE = NO; RUN_SCRIPTS = NO;" > ${__RootfsDir}/usr/local/etc/pkg.conf echo "FreeBSD: { url: "pkg+http://pkg.FreeBSD.org/\${ABI}/quarterly", mirror_type: \"srv\", signature_type: \"fingerprints\", fingerprints: \"${__RootfsDir}/usr/share/keys/pkg\", enabled: yes }" > ${__RootfsDir}/etc/pkg/FreeBSD.conf mkdir -p $__RootfsDir/tmp # get and build package manager @@ -293,7 +286,7 @@ elif [[ "$__CodeName" == "freebsd" ]]; then cd $__RootfsDir/tmp/pkg-${__FreeBSDPkg} # needed for install to succeed mkdir -p $__RootfsDir/host/etc - ./autogen.sh && ./configure --prefix=$__RootfsDir/host && make && make install + ./autogen.sh && ./configure --prefix=$__RootfsDir/host && make -j "$JOBS" && make install rm -rf $__RootfsDir/tmp/pkg-${__FreeBSDPkg} # install packages we need. INSTALL_AS_USER=$(whoami) $__RootfsDir/host/sbin/pkg -r $__RootfsDir -C $__RootfsDir/usr/local/etc/pkg.conf update @@ -356,13 +349,6 @@ elif [[ -n $__CodeName ]]; then umount $__RootfsDir/* || true fi - if [[ "$__BuildArch" == "arm" && "$__CodeName" == "trusty" ]]; then - pushd $__RootfsDir - patch -p1 < $__CrossDir/$__BuildArch/trusty.patch - patch -p1 < $__CrossDir/$__BuildArch/trusty-lttng-2.4.patch - popd - fi - if [[ "$__BuildArch" == "armel" && "$__CodeName" == "jessie" ]]; then pushd $__RootfsDir patch -p1 < $__CrossDir/$__BuildArch/armel.jessie.patch diff --git a/eng/common/cross/toolchain.cmake b/eng/common/cross/toolchain.cmake index fc11001aa7..6501c3a955 100644 --- a/eng/common/cross/toolchain.cmake +++ b/eng/common/cross/toolchain.cmake @@ -44,7 +44,7 @@ elseif(TARGET_ARCH_NAME STREQUAL "x86") set(TOOLCHAIN "i686-linux-gnu") elseif (CMAKE_SYSTEM_NAME STREQUAL "FreeBSD") set(CMAKE_SYSTEM_PROCESSOR "x86_64") - set(triple "x86_64-unknown-freebsd11") + set(triple "x86_64-unknown-freebsd12") elseif (ILLUMOS) set(CMAKE_SYSTEM_PROCESSOR "x86_64") set(TOOLCHAIN "x86_64-illumos") @@ -91,6 +91,9 @@ elseif(CMAKE_SYSTEM_NAME STREQUAL "FreeBSD") set(CMAKE_CXX_COMPILER_TARGET ${triple}) set(CMAKE_ASM_COMPILER_TARGET ${triple}) set(CMAKE_SYSROOT "${CROSS_ROOTFS}") + set(CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} -fuse-ld=lld") + set(CMAKE_SHARED_LINKER_FLAGS "${CMAKE_SHARED_LINKER_FLAGS} -fuse-ld=lld") + set(CMAKE_MODULE_LINKER_FLAGS "${CMAKE_MODULE_LINKER_FLAGS} -fuse-ld=lld") elseif(ILLUMOS) set(CMAKE_SYSROOT "${CROSS_ROOTFS}") @@ -138,8 +141,8 @@ function(add_toolchain_linker_flag Flag) if (NOT Config STREQUAL "") set(CONFIG_SUFFIX "_${Config}") endif() - set("CMAKE_EXE_LINKER_FLAGS${CONFIG_SUFFIX}" "${CMAKE_EXE_LINKER_FLAGS${CONFIG_SUFFIX}} ${Flag}" PARENT_SCOPE) - set("CMAKE_SHARED_LINKER_FLAGS${CONFIG_SUFFIX}" "${CMAKE_SHARED_LINKER_FLAGS${CONFIG_SUFFIX}} ${Flag}" PARENT_SCOPE) + set("CMAKE_EXE_LINKER_FLAGS${CONFIG_SUFFIX}_INIT" "${CMAKE_EXE_LINKER_FLAGS${CONFIG_SUFFIX}_INIT} ${Flag}" PARENT_SCOPE) + set("CMAKE_SHARED_LINKER_FLAGS${CONFIG_SUFFIX}_INIT" "${CMAKE_SHARED_LINKER_FLAGS${CONFIG_SUFFIX}_INIT} ${Flag}" PARENT_SCOPE) endfunction() if(CMAKE_SYSTEM_NAME STREQUAL "Linux") diff --git a/eng/common/cross/x86/sources.list.trusty b/eng/common/cross/x86/sources.list.trusty deleted file mode 100644 index 9b3085436e..0000000000 --- a/eng/common/cross/x86/sources.list.trusty +++ /dev/null @@ -1,11 +0,0 @@ -deb http://archive.ubuntu.com/ubuntu/ trusty main restricted universe -deb-src http://archive.ubuntu.com/ubuntu/ trusty main restricted universe - -deb http://archive.ubuntu.com/ubuntu/ trusty-updates main restricted universe -deb-src http://archive.ubuntu.com/ubuntu/ trusty-updates main restricted universe - -deb http://archive.ubuntu.com/ubuntu/ trusty-backports main restricted -deb-src http://archive.ubuntu.com/ubuntu/ trusty-backports main restricted - -deb http://archive.ubuntu.com/ubuntu/ trusty-security main restricted universe multiverse -deb-src http://archive.ubuntu.com/ubuntu/ trusty-security main restricted universe multiverse diff --git a/eng/common/dotnet-install.sh b/eng/common/dotnet-install.sh index d6efeb4434..fdfeea66e7 100755 --- a/eng/common/dotnet-install.sh +++ b/eng/common/dotnet-install.sh @@ -70,7 +70,7 @@ case $cpuname in ;; esac -dotnetRoot="$repo_root/.dotnet" +dotnetRoot="${repo_root}.dotnet" if [[ $architecture != "" ]] && [[ $architecture != $buildarch ]]; then dotnetRoot="$dotnetRoot/$architecture" fi diff --git a/eng/common/generate-locproject.ps1 b/eng/common/generate-locproject.ps1 new file mode 100644 index 0000000000..25e97ac007 --- /dev/null +++ b/eng/common/generate-locproject.ps1 @@ -0,0 +1,117 @@ +Param( + [Parameter(Mandatory=$true)][string] $SourcesDirectory, # Directory where source files live; if using a Localize directory it should live in here + [string] $LanguageSet = 'VS_Main_Languages', # Language set to be used in the LocProject.json + [switch] $UseCheckedInLocProjectJson, # When set, generates a LocProject.json and compares it to one that already exists in the repo; otherwise just generates one + [switch] $CreateNeutralXlfs # Creates neutral xlf files. Only set to false when running locally +) + +# Generates LocProject.json files for the OneLocBuild task. OneLocBuildTask is described here: +# https://ceapex.visualstudio.com/CEINTL/_wiki/wikis/CEINTL.wiki/107/Localization-with-OneLocBuild-Task + +Set-StrictMode -Version 2.0 +$ErrorActionPreference = "Stop" +. $PSScriptRoot\tools.ps1 + +Import-Module -Name (Join-Path $PSScriptRoot 'native\CommonLibrary.psm1') + +$exclusionsFilePath = "$SourcesDirectory\eng\Localize\LocExclusions.json" +$exclusions = @{ Exclusions = @() } +if (Test-Path -Path $exclusionsFilePath) +{ + $exclusions = Get-Content "$exclusionsFilePath" | ConvertFrom-Json +} + +Push-Location "$SourcesDirectory" # push location for Resolve-Path -Relative to work + +# Template files +$jsonFiles = @() +$jsonTemplateFiles = Get-ChildItem -Recurse -Path "$SourcesDirectory" | Where-Object { $_.FullName -Match "\.template\.config\\localize\\.+\.en\.json" } # .NET templating pattern +$jsonTemplateFiles | ForEach-Object { + $null = $_.Name -Match "(.+)\.[\w-]+\.json" # matches '[filename].[langcode].json + + $destinationFile = "$($_.Directory.FullName)\$($Matches.1).json" + $jsonFiles += Copy-Item "$($_.FullName)" -Destination $destinationFile -PassThru +} + +$jsonWinformsTemplateFiles = Get-ChildItem -Recurse -Path "$SourcesDirectory" | Where-Object { $_.FullName -Match "en\\strings\.json" } # current winforms pattern + +$xlfFiles = @() + +$allXlfFiles = Get-ChildItem -Recurse -Path "$SourcesDirectory\*\*.xlf" +$langXlfFiles = @() +if ($allXlfFiles) { + $null = $allXlfFiles[0].FullName -Match "\.([\w-]+)\.xlf" # matches '[langcode].xlf' + $firstLangCode = $Matches.1 + $langXlfFiles = Get-ChildItem -Recurse -Path "$SourcesDirectory\*\*.$firstLangCode.xlf" +} +$langXlfFiles | ForEach-Object { + $null = $_.Name -Match "(.+)\.[\w-]+\.xlf" # matches '[filename].[langcode].xlf + + $destinationFile = "$($_.Directory.FullName)\$($Matches.1).xlf" + $xlfFiles += Copy-Item "$($_.FullName)" -Destination $destinationFile -PassThru +} + +$locFiles = $jsonFiles + $jsonWinformsTemplateFiles + $xlfFiles + +$locJson = @{ + Projects = @( + @{ + LanguageSet = $LanguageSet + LocItems = @( + $locFiles | ForEach-Object { + $outputPath = "$(($_.DirectoryName | Resolve-Path -Relative) + "\")" + $continue = $true + foreach ($exclusion in $exclusions.Exclusions) { + if ($outputPath.Contains($exclusion)) + { + $continue = $false + } + } + $sourceFile = ($_.FullName | Resolve-Path -Relative) + if (!$CreateNeutralXlfs -and $_.Extension -eq '.xlf') { + Remove-Item -Path $sourceFile + } + if ($continue) + { + if ($_.Directory.Name -eq 'en' -and $_.Extension -eq '.json') { + return @{ + SourceFile = $sourceFile + CopyOption = "LangIDOnPath" + OutputPath = "$($_.Directory.Parent.FullName | Resolve-Path -Relative)\" + } + } + else { + return @{ + SourceFile = $sourceFile + CopyOption = "LangIDOnName" + OutputPath = $outputPath + } + } + } + } + ) + } + ) +} + +$json = ConvertTo-Json $locJson -Depth 5 +Write-Host "LocProject.json generated:`n`n$json`n`n" +Pop-Location + +if (!$UseCheckedInLocProjectJson) { + New-Item "$SourcesDirectory\eng\Localize\LocProject.json" -Force # Need this to make sure the Localize directory is created + Set-Content "$SourcesDirectory\eng\Localize\LocProject.json" $json +} +else { + New-Item "$SourcesDirectory\eng\Localize\LocProject-generated.json" -Force # Need this to make sure the Localize directory is created + Set-Content "$SourcesDirectory\eng\Localize\LocProject-generated.json" $json + + if ((Get-FileHash "$SourcesDirectory\eng\Localize\LocProject-generated.json").Hash -ne (Get-FileHash "$SourcesDirectory\eng\Localize\LocProject.json").Hash) { + Write-PipelineTelemetryError -Category "OneLocBuild" -Message "Existing LocProject.json differs from generated LocProject.json. Download LocProject-generated.json and compare them." + + exit 1 + } + else { + Write-Host "Generated LocProject.json and current LocProject.json are identical." + } +} \ No newline at end of file diff --git a/eng/common/init-tools-native.sh b/eng/common/init-tools-native.sh index 5bd205b5da..3e6a8d6acf 100644 --- a/eng/common/init-tools-native.sh +++ b/eng/common/init-tools-native.sh @@ -10,7 +10,7 @@ force=false download_retries=5 retry_wait_time_seconds=30 global_json_file="$(dirname "$(dirname "${scriptroot}")")/global.json" -declare -A native_assets +declare -a native_assets . $scriptroot/pipeline-logging-functions.sh . $scriptroot/native/common-library.sh diff --git a/eng/common/internal-feed-operations.ps1 b/eng/common/internal-feed-operations.ps1 index 418c09930c..92b77347d9 100644 --- a/eng/common/internal-feed-operations.ps1 +++ b/eng/common/internal-feed-operations.ps1 @@ -45,11 +45,11 @@ function SetupCredProvider { # Then, we set the 'VSS_NUGET_EXTERNAL_FEED_ENDPOINTS' environment variable to restore from the stable # feeds successfully - $nugetConfigPath = "$RepoRoot\NuGet.config" + $nugetConfigPath = Join-Path $RepoRoot "NuGet.config" if (-Not (Test-Path -Path $nugetConfigPath)) { Write-PipelineTelemetryError -Category 'Build' -Message 'NuGet.config file not found in repo root!' - ExitWithExitCode 1 + ExitWithExitCode 1 } $endpoints = New-Object System.Collections.ArrayList @@ -85,7 +85,7 @@ function SetupCredProvider { #Workaround for https://github.com/microsoft/msbuild/issues/4430 function InstallDotNetSdkAndRestoreArcade { - $dotnetTempDir = "$RepoRoot\dotnet" + $dotnetTempDir = Join-Path $RepoRoot "dotnet" $dotnetSdkVersion="2.1.507" # After experimentation we know this version works when restoring the SDK (compared to 3.0.*) $dotnet = "$dotnetTempDir\dotnet.exe" $restoreProjPath = "$PSScriptRoot\restore.proj" diff --git a/eng/common/internal-feed-operations.sh b/eng/common/internal-feed-operations.sh index e2233e7812..9378223ba0 100644 --- a/eng/common/internal-feed-operations.sh +++ b/eng/common/internal-feed-operations.sh @@ -39,7 +39,7 @@ function SetupCredProvider { # Then, we set the 'VSS_NUGET_EXTERNAL_FEED_ENDPOINTS' environment variable to restore from the stable # feeds successfully - local nugetConfigPath="$repo_root/NuGet.config" + local nugetConfigPath="{$repo_root}NuGet.config" if [ ! "$nugetConfigPath" ]; then Write-PipelineTelemetryError -category 'Build' "NuGet.config file not found in repo's root!" diff --git a/eng/common/internal/Tools.csproj b/eng/common/internal/Tools.csproj index f46d5efe2e..beb9c4648e 100644 --- a/eng/common/internal/Tools.csproj +++ b/eng/common/internal/Tools.csproj @@ -1,5 +1,4 @@ - - + net472 diff --git a/eng/common/msbuild.ps1 b/eng/common/msbuild.ps1 index c640123000..eea19cd845 100644 --- a/eng/common/msbuild.ps1 +++ b/eng/common/msbuild.ps1 @@ -5,6 +5,7 @@ Param( [bool] $nodeReuse = $true, [switch] $ci, [switch] $prepareMachine, + [switch] $excludePrereleaseVS, [Parameter(ValueFromRemainingArguments=$true)][String[]]$extraArgs ) diff --git a/eng/common/native/common-library.sh b/eng/common/native/common-library.sh index bf272dcf55..080c2c283a 100644 --- a/eng/common/native/common-library.sh +++ b/eng/common/native/common-library.sh @@ -148,8 +148,12 @@ function NewScriptShim { fi if [[ ! -f $tool_file_path ]]; then - Write-PipelineTelemetryError -category 'NativeToolsBootstrap' "Specified tool file path:'$tool_file_path' does not exist" - return 1 + # try to see if the path is lower cased + tool_file_path="$(echo $tool_file_path | tr "[:upper:]" "[:lower:]")" + if [[ ! -f $tool_file_path ]]; then + Write-PipelineTelemetryError -category 'NativeToolsBootstrap' "Specified tool file path:'$tool_file_path' does not exist" + return 1 + fi fi local shim_contents=$'#!/usr/bin/env bash\n' diff --git a/eng/common/native/find-native-compiler.sh b/eng/common/native/init-compiler.sh similarity index 50% rename from eng/common/native/find-native-compiler.sh rename to eng/common/native/init-compiler.sh index 289af7eed1..1daadf32a5 100644 --- a/eng/common/native/find-native-compiler.sh +++ b/eng/common/native/init-compiler.sh @@ -1,39 +1,32 @@ #!/usr/bin/env bash # -# This file locates the native compiler with the given name and version and sets the environment variables to locate it. +# This file detects the C/C++ compiler and exports it to the CC/CXX environment variables # -source="${BASH_SOURCE[0]}" - -# resolve $SOURCE until the file is no longer a symlink -while [[ -h $source ]]; do - scriptroot="$( cd -P "$( dirname "$source" )" && pwd )" - source="$(readlink "$source")" - - # if $source was a relative symlink, we need to resolve it relative to the path where the - # symlink file was located - [[ $source != /* ]] && source="$scriptroot/$source" -done -scriptroot="$( cd -P "$( dirname "$source" )" && pwd )" - -if [ $# -lt 0 ] -then +if [[ "$#" -lt 2 ]]; then echo "Usage..." - echo "find-native-compiler.sh " + echo "init-compiler.sh " + echo "Specify the target architecture." echo "Specify the name of compiler (clang or gcc)." echo "Specify the major version of compiler." echo "Specify the minor version of compiler." exit 1 fi -. $scriptroot/../pipeline-logging-functions.sh +. "$( cd -P "$( dirname "$0" )" && pwd )"/../pipeline-logging-functions.sh -compiler="$1" +build_arch="$1" +compiler="$2" cxxCompiler="$compiler++" -majorVersion="$2" -minorVersion="$3" +majorVersion="$3" +minorVersion="$4" -if [ "$compiler" = "gcc" ]; then cxxCompiler="g++"; fi +# clear the existing CC and CXX from environment +CC= +CXX= +LDFLAGS= + +if [[ "$compiler" == "gcc" ]]; then cxxCompiler="g++"; fi check_version_exists() { desired_version=-1 @@ -45,47 +38,43 @@ check_version_exists() { desired_version="$1$2" elif command -v "$compiler-$1$2" > /dev/null; then desired_version="-$1$2" - elif command -v "$compiler$1" > /dev/null; then - desired_version="$1" - elif command -v "$compiler-$1" > /dev/null; then - desired_version="-$1" fi echo "$desired_version" } -if [ -z "$CLR_CC" ]; then +if [[ -z "$CLR_CC" ]]; then # Set default versions - if [ -z "$majorVersion" ]; then + if [[ -z "$majorVersion" ]]; then # note: gcc (all versions) and clang versions higher than 6 do not have minor version in file name, if it is zero. - if [ "$compiler" = "clang" ]; then versions=( 12 11 10 9 8 7 6.0 5.0 4.0 3.9 3.8 3.7 3.6 3.5 ) - elif [ "$compiler" = "gcc" ]; then versions=( 9 8 7 6 5 4.9 ); fi + if [[ "$compiler" == "clang" ]]; then versions=( 12 11 10 9 8 7 6.0 5.0 4.0 3.9 3.8 3.7 3.6 3.5 ) + elif [[ "$compiler" == "gcc" ]]; then versions=( 11 10 9 8 7 6 5 4.9 ); fi for version in "${versions[@]}"; do parts=(${version//./ }) desired_version="$(check_version_exists "${parts[0]}" "${parts[1]}")" - if [ "$desired_version" != "-1" ]; then majorVersion="${parts[0]}"; break; fi + if [[ "$desired_version" != "-1" ]]; then majorVersion="${parts[0]}"; break; fi done - if [ -z "$majorVersion" ]; then + if [[ -z "$majorVersion" ]]; then if command -v "$compiler" > /dev/null; then - if [ "$(uname)" != "Darwin" ]; then + if [[ "$(uname)" != "Darwin" ]]; then Write-PipelineTelemetryError -category "Build" -type "warning" "Specific version of $compiler not found, falling back to use the one in PATH." fi - export CC="$(command -v "$compiler")" - export CXX="$(command -v "$cxxCompiler")" + CC="$(command -v "$compiler")" + CXX="$(command -v "$cxxCompiler")" else Write-PipelineTelemetryError -category "Build" "No usable version of $compiler found." exit 1 fi else - if [ "$compiler" = "clang" ] && [ "$majorVersion" -lt 5 ]; then - if [ "$build_arch" = "arm" ] || [ "$build_arch" = "armel" ]; then + if [[ "$compiler" == "clang" && "$majorVersion" -lt 5 ]]; then + if [[ "$build_arch" == "arm" || "$build_arch" == "armel" ]]; then if command -v "$compiler" > /dev/null; then Write-PipelineTelemetryError -category "Build" -type "warning" "Found clang version $majorVersion which is not supported on arm/armel architectures, falling back to use clang from PATH." - export CC="$(command -v "$compiler")" - export CXX="$(command -v "$cxxCompiler")" + CC="$(command -v "$compiler")" + CXX="$(command -v "$cxxCompiler")" else Write-PipelineTelemetryError -category "Build" "Found clang version $majorVersion which is not supported on arm/armel architectures, and there is no clang in PATH." exit 1 @@ -95,31 +84,40 @@ if [ -z "$CLR_CC" ]; then fi else desired_version="$(check_version_exists "$majorVersion" "$minorVersion")" - if [ "$desired_version" = "-1" ]; then + if [[ "$desired_version" == "-1" ]]; then Write-PipelineTelemetryError -category "Build" "Could not find specific version of $compiler: $majorVersion $minorVersion." exit 1 fi fi - if [ -z "$CC" ]; then - export CC="$(command -v "$compiler$desired_version")" - export CXX="$(command -v "$cxxCompiler$desired_version")" - if [ -z "$CXX" ]; then export CXX="$(command -v "$cxxCompiler")"; fi + if [[ -z "$CC" ]]; then + CC="$(command -v "$compiler$desired_version")" + CXX="$(command -v "$cxxCompiler$desired_version")" + if [[ -z "$CXX" ]]; then CXX="$(command -v "$cxxCompiler")"; fi fi else - if [ ! -f "$CLR_CC" ]; then + if [[ ! -f "$CLR_CC" ]]; then Write-PipelineTelemetryError -category "Build" "CLR_CC is set but path '$CLR_CC' does not exist" exit 1 fi - export CC="$CLR_CC" - export CXX="$CLR_CXX" + CC="$CLR_CC" + CXX="$CLR_CXX" fi -if [ -z "$CC" ]; then - Write-PipelineTelemetryError -category "Build" "Unable to find $compiler." +if [[ -z "$CC" ]]; then + Write-PipelineTelemetryError -category "Build" "Unable to find $compiler." exit 1 fi -export CCC_CC="$CC" -export CCC_CXX="$CXX" -export SCAN_BUILD_COMMAND="$(command -v "scan-build$desired_version")" +if [[ "$compiler" == "clang" ]]; then + if command -v "lld$desired_version" > /dev/null; then + # Only lld version >= 9 can be considered stable + if [[ "$majorVersion" -ge 9 ]]; then + LDFLAGS="-fuse-ld=lld" + fi + fi +fi + +SCAN_BUILD_COMMAND="$(command -v "scan-build$desired_version")" + +export CC CXX LDFLAGS SCAN_BUILD_COMMAND diff --git a/eng/common/performance/blazor_perf.proj b/eng/common/performance/blazor_perf.proj deleted file mode 100644 index 3b25359c43..0000000000 --- a/eng/common/performance/blazor_perf.proj +++ /dev/null @@ -1,30 +0,0 @@ - - - python3 - $(HelixPreCommands);chmod +x $HELIX_WORKITEM_PAYLOAD/SOD/SizeOnDisk - - - - - %(Identity) - - - - - %HELIX_CORRELATION_PAYLOAD%\performance\src\scenarios\ - $(ScenarioDirectory)blazor\ - - - $HELIX_CORRELATION_PAYLOAD/performance/src/scenarios/ - $(ScenarioDirectory)blazor/ - - - - - $(WorkItemDirectory) - cd $(BlazorDirectory);$(Python) pre.py publish --msbuild %27/p:_TrimmerDumpDependencies=true%27 --msbuild-static AdditionalMonoLinkerOptions=%27"%24(AdditionalMonoLinkerOptions) --dump-dependencies"%27 --binlog %27./traces/blazor_publish.binlog%27 - $(Python) test.py sod --scenario-name "%(Identity)" - $(Python) post.py - - - \ No newline at end of file diff --git a/eng/common/performance/crossgen_perf.proj b/eng/common/performance/crossgen_perf.proj deleted file mode 100644 index eb8bdd9c44..0000000000 --- a/eng/common/performance/crossgen_perf.proj +++ /dev/null @@ -1,110 +0,0 @@ - - - - - %(Identity) - - - - - - py -3 - $(HelixPreCommands) - %HELIX_CORRELATION_PAYLOAD%\Core_Root - %HELIX_CORRELATION_PAYLOAD%\performance\src\scenarios\ - $(ScenarioDirectory)crossgen\ - $(ScenarioDirectory)crossgen2\ - - - python3 - $(HelixPreCommands);chmod +x $HELIX_WORKITEM_PAYLOAD/startup/Startup;chmod +x $HELIX_WORKITEM_PAYLOAD/startup/perfcollect;sudo apt update;chmod +x $HELIX_WORKITEM_PAYLOAD/SOD/SizeOnDisk - $HELIX_CORRELATION_PAYLOAD/Core_Root - $HELIX_CORRELATION_PAYLOAD/performance/src/scenarios/ - $(ScenarioDirectory)crossgen/ - $(ScenarioDirectory)crossgen2/ - - - - - - - - - - - - - - - - - - - - - - - $(WorkItemDirectory) - $(Python) $(CrossgenDirectory)test.py crossgen --core-root $(CoreRoot) --test-name %(Identity) - - - - - - $(WorkItemDirectory) - $(Python) $(Crossgen2Directory)test.py crossgen2 --core-root $(CoreRoot) --single %(Identity) - - - - - - $(WorkItemDirectory) - $(Python) $(Crossgen2Directory)test.py crossgen2 --core-root $(CoreRoot) --single %(Identity) --singlethreaded True - - - - - - $(WorkItemDirectory) - $(Python) $(CrossgenDirectory)pre.py crossgen --core-root $(CoreRoot) --single %(Identity) - $(Python) $(CrossgenDirectory)test.py sod --scenario-name "Crossgen %(Identity) Size" --dirs ./crossgen.out/ - $(Python) $(CrossgenDirectory)post.py - - - - - - $(WorkItemDirectory) - $(Python) $(Crossgen2Directory)pre.py crossgen2 --core-root $(CoreRoot) --single %(Identity) - $(Python) $(Crossgen2Directory)test.py sod --scenario-name "Crossgen2 %(Identity) Size" --dirs ./crossgen.out/ - $(Python) $(Crossgen2Directory)post.py - - - - - - - 4:00 - - - - 4:00 - - - 4:00 - - - $(WorkItemDirectory) - $(Python) $(Crossgen2Directory)test.py crossgen2 --core-root $(CoreRoot) --composite $(Crossgen2Directory)framework-r2r.dll.rsp - 1:00 - - - 4:00 - - - 4:00 - - - \ No newline at end of file diff --git a/eng/common/performance/microbenchmarks.proj b/eng/common/performance/microbenchmarks.proj deleted file mode 100644 index 318ca5f1b8..0000000000 --- a/eng/common/performance/microbenchmarks.proj +++ /dev/null @@ -1,144 +0,0 @@ - - - - %HELIX_CORRELATION_PAYLOAD%\performance\scripts\benchmarks_ci.py --csproj %HELIX_CORRELATION_PAYLOAD%\performance\$(TargetCsproj) - --dotnet-versions %DOTNET_VERSION% --cli-source-info args --cli-branch %PERFLAB_BRANCH% --cli-commit-sha %PERFLAB_HASH% --cli-repository https://github.com/%PERFLAB_REPO% --cli-source-timestamp %PERFLAB_BUILDTIMESTAMP% - py -3 - %HELIX_CORRELATION_PAYLOAD%\Core_Root\CoreRun.exe - %HELIX_CORRELATION_PAYLOAD%\Baseline_Core_Root\CoreRun.exe - - $(HelixPreCommands);call %HELIX_CORRELATION_PAYLOAD%\performance\tools\machine-setup.cmd;set PYTHONPATH=%HELIX_WORKITEM_PAYLOAD%\scripts%3B%HELIX_WORKITEM_PAYLOAD% - %HELIX_CORRELATION_PAYLOAD%\artifacts\BenchmarkDotNet.Artifacts - %HELIX_CORRELATION_PAYLOAD%\artifacts\BenchmarkDotNet.Artifacts_Baseline - %HELIX_CORRELATION_PAYLOAD%\performance\src\tools\ResultsComparer\ResultsComparer.csproj - %HELIX_CORRELATION_PAYLOAD%\performance\tools\dotnet\$(Architecture)\dotnet.exe - %25%25 - %HELIX_WORKITEM_ROOT%\testResults.xml - - - - $HELIX_CORRELATION_PAYLOAD - $(BaseDirectory)/performance - - - - $HELIX_WORKITEM_PAYLOAD - $(BaseDirectory) - - - - $(PerformanceDirectory)/scripts/benchmarks_ci.py --csproj $(PerformanceDirectory)/$(TargetCsproj) - --dotnet-versions $DOTNET_VERSION --cli-source-info args --cli-branch $PERFLAB_BRANCH --cli-commit-sha $PERFLAB_HASH --cli-repository https://github.com/$PERFLAB_REPO --cli-source-timestamp $PERFLAB_BUILDTIMESTAMP - python3 - $(BaseDirectory)/Core_Root/corerun - $(BaseDirectory)/Baseline_Core_Root/corerun - $(HelixPreCommands);chmod +x $(PerformanceDirectory)/tools/machine-setup.sh;. $(PerformanceDirectory)/tools/machine-setup.sh - $(BaseDirectory)/artifacts/BenchmarkDotNet.Artifacts - $(BaseDirectory)/artifacts/BenchmarkDotNet.Artifacts_Baseline - $(PerformanceDirectory)/src/tools/ResultsComparer/ResultsComparer.csproj - $(PerformanceDirectory)/tools/dotnet/$(Architecture)/dotnet - %25 - $HELIX_WORKITEM_ROOT/testResults.xml - - - - $(CliArguments) --wasm - - - - --corerun %HELIX_CORRELATION_PAYLOAD%\dotnet-mono\shared\Microsoft.NETCore.App\6.0.0\corerun.exe - - - --corerun $(BaseDirectory)/dotnet-mono/shared/Microsoft.NETCore.App/6.0.0/corerun - - - - --corerun $(CoreRun) - - - - --corerun $(BaselineCoreRun) - - - - $(Python) $(WorkItemCommand) --incremental no --architecture $(Architecture) -f $(_Framework) $(PerfLabArguments) - - - - $(WorkItemCommand) $(CliArguments) - - - - 2:30 - 0:15 - - - - - %(Identity) - - - - - 30 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - false - - - - - - $(WorkItemDirectory) - $(WorkItemCommand) --bdn-artifacts $(BaselineArtifactsDirectory) --bdn-arguments="--anyCategories $(BDNCategories) $(ExtraBenchmarkDotNetArguments) $(BaselineCoreRunArgument) --partition-count $(PartitionCount) --partition-index %(HelixWorkItem.Index)" - $(WorkItemCommand) --bdn-artifacts $(ArtifactsDirectory) --bdn-arguments="--anyCategories $(BDNCategories) $(ExtraBenchmarkDotNetArguments) $(CoreRunArgument) --partition-count $(PartitionCount) --partition-index %(HelixWorkItem.Index)" - $(DotnetExe) run -f $(_Framework) -p $(ResultsComparer) --base $(BaselineArtifactsDirectory) --diff $(ArtifactsDirectory) --threshold 2$(Percent) --xml $(XMLResults);$(FinalCommand) - $(WorkItemTimeout) - - - - - - $(WorkItemDirectory) - $(WorkItemCommand) --bdn-artifacts $(BaselineArtifactsDirectory) --bdn-arguments="--anyCategories $(BDNCategories) $(ExtraBenchmarkDotNetArguments) $(BaselineCoreRunArgument)" - $(WorkItemCommand) --bdn-artifacts $(ArtifactsDirectory) --bdn-arguments="--anyCategories $(BDNCategories) $(ExtraBenchmarkDotNetArguments) $(CoreRunArgument)" - $(DotnetExe) run -f $(_Framework) -p $(ResultsComparer) --base $(BaselineArtifactsDirectory) --diff $(ArtifactsDirectory) --threshold 2$(Percent) --xml $(XMLResults) - 4:00 - - - diff --git a/eng/common/performance/performance-setup.ps1 b/eng/common/performance/performance-setup.ps1 deleted file mode 100644 index 9a64b07e69..0000000000 --- a/eng/common/performance/performance-setup.ps1 +++ /dev/null @@ -1,139 +0,0 @@ -Param( - [string] $SourceDirectory=$env:BUILD_SOURCESDIRECTORY, - [string] $CoreRootDirectory, - [string] $BaselineCoreRootDirectory, - [string] $Architecture="x64", - [string] $Framework="net5.0", - [string] $CompilationMode="Tiered", - [string] $Repository=$env:BUILD_REPOSITORY_NAME, - [string] $Branch=$env:BUILD_SOURCEBRANCH, - [string] $CommitSha=$env:BUILD_SOURCEVERSION, - [string] $BuildNumber=$env:BUILD_BUILDNUMBER, - [string] $RunCategories="Libraries Runtime", - [string] $Csproj="src\benchmarks\micro\MicroBenchmarks.csproj", - [string] $Kind="micro", - [switch] $LLVM, - [switch] $MonoInterpreter, - [switch] $MonoAOT, - [switch] $Internal, - [switch] $Compare, - [string] $MonoDotnet="", - [string] $Configurations="CompilationMode=$CompilationMode RunKind=$Kind", - [string] $LogicalMachine="" -) - -$RunFromPerformanceRepo = ($Repository -eq "dotnet/performance") -or ($Repository -eq "dotnet-performance") -$UseCoreRun = ($CoreRootDirectory -ne [string]::Empty) -$UseBaselineCoreRun = ($BaselineCoreRootDirectory -ne [string]::Empty) - -$PayloadDirectory = (Join-Path $SourceDirectory "Payload") -$PerformanceDirectory = (Join-Path $PayloadDirectory "performance") -$WorkItemDirectory = (Join-Path $SourceDirectory "workitem") -$ExtraBenchmarkDotNetArguments = "--iterationCount 1 --warmupCount 0 --invocationCount 1 --unrollFactor 1 --strategy ColdStart --stopOnFirstError true" -$Creator = $env:BUILD_DEFINITIONNAME -$PerfLabArguments = "" -$HelixSourcePrefix = "pr" - -$Queue = "" - -if ($Internal) { - switch ($LogicalMachine) { - "perftiger" { $Queue = "Windows.10.Amd64.19H1.Tiger.Perf" } - "perfowl" { $Queue = "Windows.10.Amd64.20H2.Owl.Perf" } - "perfsurf" { $Queue = "Windows.10.Arm64.Perf.Surf" } - Default { $Queue = "Windows.10.Amd64.19H1.Tiger.Perf" } - } - $PerfLabArguments = "--upload-to-perflab-container" - $ExtraBenchmarkDotNetArguments = "" - $Creator = "" - $HelixSourcePrefix = "official" -} -else { - $Queue = "Windows.10.Amd64.ClientRS4.DevEx.15.8.Open" -} - -if($MonoInterpreter) -{ - $ExtraBenchmarkDotNetArguments = "--category-exclusion-filter NoInterpreter" -} - -if($MonoDotnet -ne "") -{ - $Configurations += " LLVM=$LLVM MonoInterpreter=$MonoInterpreter MonoAOT=$MonoAOT" - if($ExtraBenchmarkDotNetArguments -eq "") - { - #FIX ME: We need to block these tests as they don't run on mono for now - $ExtraBenchmarkDotNetArguments = "--exclusion-filter *Perf_Image* *Perf_NamedPipeStream*" - } - else - { - #FIX ME: We need to block these tests as they don't run on mono for now - $ExtraBenchmarkDotNetArguments += " --exclusion-filter *Perf_Image* *Perf_NamedPipeStream*" - } -} - -# FIX ME: This is a workaround until we get this from the actual pipeline -$CommonSetupArguments="--channel master --queue $Queue --build-number $BuildNumber --build-configs $Configurations --architecture $Architecture" -$SetupArguments = "--repository https://github.com/$Repository --branch $Branch --get-perf-hash --commit-sha $CommitSha $CommonSetupArguments" - - -if ($RunFromPerformanceRepo) { - $SetupArguments = "--perf-hash $CommitSha $CommonSetupArguments" - - robocopy $SourceDirectory $PerformanceDirectory /E /XD $PayloadDirectory $SourceDirectory\artifacts $SourceDirectory\.git -} -else { - git clone --branch master --depth 1 --quiet https://github.com/dotnet/performance $PerformanceDirectory -} - -if($MonoDotnet -ne "") -{ - $UsingMono = "true" - $MonoDotnetPath = (Join-Path $PayloadDirectory "dotnet-mono") - Move-Item -Path $MonoDotnet -Destination $MonoDotnetPath -} - -if ($UseCoreRun) { - $NewCoreRoot = (Join-Path $PayloadDirectory "Core_Root") - Move-Item -Path $CoreRootDirectory -Destination $NewCoreRoot -} -if ($UseBaselineCoreRun) { - $NewBaselineCoreRoot = (Join-Path $PayloadDirectory "Baseline_Core_Root") - Move-Item -Path $BaselineCoreRootDirectory -Destination $NewBaselineCoreRoot -} - -$DocsDir = (Join-Path $PerformanceDirectory "docs") -robocopy $DocsDir $WorkItemDirectory - -# Set variables that we will need to have in future steps -$ci = $true - -. "$PSScriptRoot\..\pipeline-logging-functions.ps1" - -# Directories -Write-PipelineSetVariable -Name 'PayloadDirectory' -Value "$PayloadDirectory" -IsMultiJobVariable $false -Write-PipelineSetVariable -Name 'PerformanceDirectory' -Value "$PerformanceDirectory" -IsMultiJobVariable $false -Write-PipelineSetVariable -Name 'WorkItemDirectory' -Value "$WorkItemDirectory" -IsMultiJobVariable $false - -# Script Arguments -Write-PipelineSetVariable -Name 'Python' -Value "py -3" -IsMultiJobVariable $false -Write-PipelineSetVariable -Name 'ExtraBenchmarkDotNetArguments' -Value "$ExtraBenchmarkDotNetArguments" -IsMultiJobVariable $false -Write-PipelineSetVariable -Name 'SetupArguments' -Value "$SetupArguments" -IsMultiJobVariable $false -Write-PipelineSetVariable -Name 'PerfLabArguments' -Value "$PerfLabArguments" -IsMultiJobVariable $false -Write-PipelineSetVariable -Name 'BDNCategories' -Value "$RunCategories" -IsMultiJobVariable $false -Write-PipelineSetVariable -Name 'TargetCsproj' -Value "$Csproj" -IsMultiJobVariable $false -Write-PipelineSetVariable -Name 'Kind' -Value "$Kind" -IsMultiJobVariable $false -Write-PipelineSetVariable -Name 'Architecture' -Value "$Architecture" -IsMultiJobVariable $false -Write-PipelineSetVariable -Name 'UseCoreRun' -Value "$UseCoreRun" -IsMultiJobVariable $false -Write-PipelineSetVariable -Name 'UseBaselineCoreRun' -Value "$UseBaselineCoreRun" -IsMultiJobVariable $false -Write-PipelineSetVariable -Name 'RunFromPerfRepo' -Value "$RunFromPerformanceRepo" -IsMultiJobVariable $false -Write-PipelineSetVariable -Name 'Compare' -Value "$Compare" -IsMultiJobVariable $false -Write-PipelineSetVariable -Name 'MonoDotnet' -Value "$UsingMono" -IsMultiJobVariable $false - -# Helix Arguments -Write-PipelineSetVariable -Name 'Creator' -Value "$Creator" -IsMultiJobVariable $false -Write-PipelineSetVariable -Name 'Queue' -Value "$Queue" -IsMultiJobVariable $false -Write-PipelineSetVariable -Name 'HelixSourcePrefix' -Value "$HelixSourcePrefix" -IsMultiJobVariable $false -Write-PipelineSetVariable -Name '_BuildConfig' -Value "$Architecture.$Kind.$Framework" -IsMultiJobVariable $false - -exit 0 \ No newline at end of file diff --git a/eng/common/performance/performance-setup.sh b/eng/common/performance/performance-setup.sh deleted file mode 100644 index 33b60b5033..0000000000 --- a/eng/common/performance/performance-setup.sh +++ /dev/null @@ -1,297 +0,0 @@ -#!/usr/bin/env bash - -source_directory=$BUILD_SOURCESDIRECTORY -core_root_directory= -baseline_core_root_directory= -architecture=x64 -framework=net5.0 -compilation_mode=tiered -repository=$BUILD_REPOSITORY_NAME -branch=$BUILD_SOURCEBRANCH -commit_sha=$BUILD_SOURCEVERSION -build_number=$BUILD_BUILDNUMBER -internal=false -compare=false -mono_dotnet= -kind="micro" -llvm=false -monointerpreter=false -monoaot=false -run_categories="Libraries Runtime" -csproj="src\benchmarks\micro\MicroBenchmarks.csproj" -configurations="CompliationMode=$compilation_mode RunKind=$kind" -run_from_perf_repo=false -use_core_run=true -use_baseline_core_run=true -using_mono=false -wasm_runtime_loc= -using_wasm=false -use_latest_dotnet=false -logical_machine= - -while (($# > 0)); do - lowerI="$(echo $1 | tr "[:upper:]" "[:lower:]")" - case $lowerI in - --sourcedirectory) - source_directory=$2 - shift 2 - ;; - --corerootdirectory) - core_root_directory=$2 - shift 2 - ;; - --baselinecorerootdirectory) - baseline_core_root_directory=$2 - shift 2 - ;; - --architecture) - architecture=$2 - shift 2 - ;; - --framework) - framework=$2 - shift 2 - ;; - --compilationmode) - compilation_mode=$2 - shift 2 - ;; - --logicalmachine) - logical_machine=$2 - shift 2 - ;; - --repository) - repository=$2 - shift 2 - ;; - --branch) - branch=$2 - shift 2 - ;; - --commitsha) - commit_sha=$2 - shift 2 - ;; - --buildnumber) - build_number=$2 - shift 2 - ;; - --kind) - kind=$2 - configurations="CompilationMode=$compilation_mode RunKind=$kind" - shift 2 - ;; - --runcategories) - run_categories=$2 - shift 2 - ;; - --csproj) - csproj=$2 - shift 2 - ;; - --internal) - internal=true - shift 1 - ;; - --alpine) - alpine=true - shift 1 - ;; - --llvm) - llvm=true - shift 1 - ;; - --monointerpreter) - monointerpreter=true - shift 1 - ;; - --monoaot) - monoaot=true - shift 1 - ;; - --monodotnet) - mono_dotnet=$2 - shift 2 - ;; - --wasm) - wasm_runtime_loc=$2 - shift 2 - ;; - --compare) - compare=true - shift 1 - ;; - --configurations) - configurations=$2 - shift 2 - ;; - --latestdotnet) - use_latest_dotnet=true - shift 1 - ;; - *) - echo "Common settings:" - echo " --corerootdirectory Directory where Core_Root exists, if running perf testing with --corerun" - echo " --architecture Architecture of the testing being run" - echo " --configurations List of key=value pairs that will be passed to perf testing infrastructure." - echo " ex: --configurations \"CompilationMode=Tiered OptimzationLevel=PGO\"" - echo " --help Print help and exit" - echo "" - echo "Advanced settings:" - echo " --framework The framework to run, if not running in master" - echo " --compliationmode The compilation mode if not passing --configurations" - echo " --sourcedirectory The directory of the sources. Defaults to env:BUILD_SOURCESDIRECTORY" - echo " --repository The name of the repository in the / format. Defaults to env:BUILD_REPOSITORY_NAME" - echo " --branch The name of the branch. Defaults to env:BUILD_SOURCEBRANCH" - echo " --commitsha The commit sha1 to run against. Defaults to env:BUILD_SOURCEVERSION" - echo " --buildnumber The build number currently running. Defaults to env:BUILD_BUILDNUMBER" - echo " --csproj The relative path to the benchmark csproj whose tests should be run. Defaults to src\benchmarks\micro\MicroBenchmarks.csproj" - echo " --kind Related to csproj. The kind of benchmarks that should be run. Defaults to micro" - echo " --runcategories Related to csproj. Categories of benchmarks to run. Defaults to \"coreclr corefx\"" - echo " --internal If the benchmarks are running as an official job." - echo " --monodotnet Pass the path to the mono dotnet for mono performance testing." - echo " --wasm Path to the unpacked wasm runtime pack." - echo " --latestdotnet --dotnet-versions will not be specified. --dotnet-versions defaults to LKG version in global.json " - echo " --alpine Set for runs on Alpine" - echo "" - exit 0 - ;; - esac -done - -if [ "$repository" == "dotnet/performance" ] || [ "$repository" == "dotnet-performance" ]; then - run_from_perf_repo=true -fi - -if [ -z "$configurations" ]; then - configurations="CompilationMode=$compilation_mode" -fi - -if [ -z "$core_root_directory" ]; then - use_core_run=false -fi - -if [ -z "$baseline_core_root_directory" ]; then - use_baseline_core_run=false -fi - -payload_directory=$source_directory/Payload -performance_directory=$payload_directory/performance -workitem_directory=$source_directory/workitem -extra_benchmark_dotnet_arguments="--iterationCount 1 --warmupCount 0 --invocationCount 1 --unrollFactor 1 --strategy ColdStart --stopOnFirstError true" -perflab_arguments= -queue=Ubuntu.1804.Amd64.Open -creator=$BUILD_DEFINITIONNAME -helix_source_prefix="pr" - -if [[ "$internal" == true ]]; then - perflab_arguments="--upload-to-perflab-container" - helix_source_prefix="official" - creator= - extra_benchmark_dotnet_arguments= - - if [[ "$architecture" = "arm64" ]]; then - queue=Ubuntu.1804.Arm64.Perf - else - if [[ "$logical_machine" = "perfowl" ]]; then - queue=Ubuntu.1804.Amd64.Owl.Perf - else - queue=Ubuntu.1804.Amd64.Tiger.Perf - fi - fi - - if [[ "$alpine" = "true" ]]; then - queue=alpine.amd64.tiger.perf - fi -else - if [[ "$architecture" = "arm64" ]]; then - queue=ubuntu.1804.armarch.open - else - queue=Ubuntu.1804.Amd64.Open - fi - - if [[ "$alpine" = "true" ]]; then - queue=alpine.amd64.tiger.perf - fi -fi - -if [[ "$mono_dotnet" != "" ]] && [[ "$monointerpreter" == "false" ]]; then - configurations="$configurations LLVM=$llvm MonoInterpreter=$monointerpreter MonoAOT=$monoaot" - extra_benchmark_dotnet_arguments="$extra_benchmark_dotnet_arguments --category-exclusion-filter NoMono" -fi - -if [[ "$wasm_runtime_loc" != "" ]]; then - configurations="CompilationMode=wasm RunKind=$kind" - extra_benchmark_dotnet_arguments="$extra_benchmark_dotnet_arguments --category-exclusion-filter NoInterpreter NoWASM NoMono" -fi - -if [[ "$mono_dotnet" != "" ]] && [[ "$monointerpreter" == "true" ]]; then - configurations="$configurations LLVM=$llvm MonoInterpreter=$monointerpreter MonoAOT=$monoaot" - extra_benchmark_dotnet_arguments="$extra_benchmark_dotnet_arguments --category-exclusion-filter NoInterpreter NoMono" -fi - -common_setup_arguments="--channel master --queue $queue --build-number $build_number --build-configs $configurations --architecture $architecture" -setup_arguments="--repository https://github.com/$repository --branch $branch --get-perf-hash --commit-sha $commit_sha $common_setup_arguments" - -if [[ "$run_from_perf_repo" = true ]]; then - payload_directory= - workitem_directory=$source_directory - performance_directory=$workitem_directory - setup_arguments="--perf-hash $commit_sha $common_setup_arguments" -else - git clone --branch master --depth 1 --quiet https://github.com/dotnet/performance $performance_directory - - docs_directory=$performance_directory/docs - mv $docs_directory $workitem_directory -fi - -if [[ "$wasm_runtime_loc" != "" ]]; then - using_wasm=true - wasm_dotnet_path=$payload_directory/dotnet-wasm - mv $wasm_runtime_loc $wasm_dotnet_path - extra_benchmark_dotnet_arguments="$extra_benchmark_dotnet_arguments --wasmMainJS \$HELIX_CORRELATION_PAYLOAD/dotnet-wasm/runtime-test.js --wasmEngine /home/helixbot/.jsvu/v8 --customRuntimePack \$HELIX_CORRELATION_PAYLOAD/dotnet-wasm" -fi - -if [[ "$mono_dotnet" != "" ]]; then - using_mono=true - mono_dotnet_path=$payload_directory/dotnet-mono - mv $mono_dotnet $mono_dotnet_path -fi - -if [[ "$use_core_run" = true ]]; then - new_core_root=$payload_directory/Core_Root - mv $core_root_directory $new_core_root -fi - -if [[ "$use_baseline_core_run" = true ]]; then - new_baseline_core_root=$payload_directory/Baseline_Core_Root - mv $baseline_core_root_directory $new_baseline_core_root -fi - -ci=true - -_script_dir=$(pwd)/eng/common -. "$_script_dir/pipeline-logging-functions.sh" - -# Make sure all of our variables are available for future steps -Write-PipelineSetVariable -name "UseCoreRun" -value "$use_core_run" -is_multi_job_variable false -Write-PipelineSetVariable -name "UseBaselineCoreRun" -value "$use_baseline_core_run" -is_multi_job_variable false -Write-PipelineSetVariable -name "Architecture" -value "$architecture" -is_multi_job_variable false -Write-PipelineSetVariable -name "PayloadDirectory" -value "$payload_directory" -is_multi_job_variable false -Write-PipelineSetVariable -name "PerformanceDirectory" -value "$performance_directory" -is_multi_job_variable false -Write-PipelineSetVariable -name "WorkItemDirectory" -value "$workitem_directory" -is_multi_job_variable false -Write-PipelineSetVariable -name "Queue" -value "$queue" -is_multi_job_variable false -Write-PipelineSetVariable -name "SetupArguments" -value "$setup_arguments" -is_multi_job_variable false -Write-PipelineSetVariable -name "Python" -value "python3" -is_multi_job_variable false -Write-PipelineSetVariable -name "PerfLabArguments" -value "$perflab_arguments" -is_multi_job_variable false -Write-PipelineSetVariable -name "ExtraBenchmarkDotNetArguments" -value "$extra_benchmark_dotnet_arguments" -is_multi_job_variable false -Write-PipelineSetVariable -name "BDNCategories" -value "$run_categories" -is_multi_job_variable false -Write-PipelineSetVariable -name "TargetCsproj" -value "$csproj" -is_multi_job_variable false -Write-PipelineSetVariable -name "RunFromPerfRepo" -value "$run_from_perf_repo" -is_multi_job_variable false -Write-PipelineSetVariable -name "Creator" -value "$creator" -is_multi_job_variable false -Write-PipelineSetVariable -name "HelixSourcePrefix" -value "$helix_source_prefix" -is_multi_job_variable false -Write-PipelineSetVariable -name "Kind" -value "$kind" -is_multi_job_variable false -Write-PipelineSetVariable -name "_BuildConfig" -value "$architecture.$kind.$framework" -is_multi_job_variable false -Write-PipelineSetVariable -name "Compare" -value "$compare" -is_multi_job_variable false -Write-PipelineSetVariable -name "MonoDotnet" -value "$using_mono" -is_multi_job_variable false -Write-PipelineSetVariable -name "WasmDotnet" -value "$using_wasm" -is_multi_job_variable false diff --git a/eng/common/post-build/sourcelink-validation.ps1 b/eng/common/post-build/sourcelink-validation.ps1 index 1c46f7b634..e8ab29afeb 100644 --- a/eng/common/post-build/sourcelink-validation.ps1 +++ b/eng/common/post-build/sourcelink-validation.ps1 @@ -14,7 +14,10 @@ param( $global:RepoFiles = @{} # Maximum number of jobs to run in parallel -$MaxParallelJobs = 6 +$MaxParallelJobs = 16 + +$MaxRetries = 5 +$RetryWaitTimeInSeconds = 30 # Wait time between check for system load $SecondsBetweenLoadChecks = 10 @@ -29,7 +32,10 @@ $ValidatePackage = { # Ensure input file exist if (!(Test-Path $PackagePath)) { Write-Host "Input file does not exist: $PackagePath" - return 1 + return [pscustomobject]@{ + result = 1 + packagePath = $PackagePath + } } # Extensions for which we'll look for SourceLink information @@ -59,7 +65,10 @@ $ValidatePackage = { # We ignore resource DLLs if ($FileName.EndsWith('.resources.dll')) { - return + return [pscustomobject]@{ + result = 0 + packagePath = $PackagePath + } } [System.IO.Compression.ZipFileExtensions]::ExtractToFile($_, $TargetFile, $true) @@ -91,36 +100,59 @@ $ValidatePackage = { $Status = 200 $Cache = $using:RepoFiles - if ( !($Cache.ContainsKey($FilePath)) ) { - try { - $Uri = $Link -as [System.URI] - - # Only GitHub links are valid - if ($Uri.AbsoluteURI -ne $null -and ($Uri.Host -match 'github' -or $Uri.Host -match 'githubusercontent')) { - $Status = (Invoke-WebRequest -Uri $Link -UseBasicParsing -Method HEAD -TimeoutSec 5).StatusCode + $attempts = 0 + + while ($attempts -lt $using:MaxRetries) { + if ( !($Cache.ContainsKey($FilePath)) ) { + try { + $Uri = $Link -as [System.URI] + + if ($Link -match "submodules") { + # Skip submodule links until sourcelink properly handles submodules + $Status = 200 + } + elseif ($Uri.AbsoluteURI -ne $null -and ($Uri.Host -match 'github' -or $Uri.Host -match 'githubusercontent')) { + # Only GitHub links are valid + $Status = (Invoke-WebRequest -Uri $Link -UseBasicParsing -Method HEAD -TimeoutSec 5).StatusCode + } + else { + # If it's not a github link, we want to break out of the loop and not retry. + $Status = 0 + $attempts = $using:MaxRetries + } } - else { + catch { + Write-Host $_ $Status = 0 } } - catch { - write-host $_ - $Status = 0 - } - } - if ($Status -ne 200) { - if ($NumFailedLinks -eq 0) { - if ($FailedFiles.Value -eq 0) { - Write-Host + if ($Status -ne 200) { + $attempts++ + + if ($attempts -lt $using:MaxRetries) + { + $attemptsLeft = $using:MaxRetries - $attempts + Write-Warning "Download failed, $attemptsLeft attempts remaining, will retry in $using:RetryWaitTimeInSeconds seconds" + Start-Sleep -Seconds $using:RetryWaitTimeInSeconds + } + else { + if ($NumFailedLinks -eq 0) { + if ($FailedFiles.Value -eq 0) { + Write-Host + } + + Write-Host "`tFile $RealPath has broken links:" + } + + Write-Host "`t`tFailed to retrieve $Link" + + $NumFailedLinks++ } - - Write-Host "`tFile $RealPath has broken links:" } - - Write-Host "`t`tFailed to retrieve $Link" - - $NumFailedLinks++ + else { + break + } } } } @@ -136,7 +168,7 @@ $ValidatePackage = { } } catch { - + Write-Host $_ } finally { $zip.Dispose() @@ -220,6 +252,7 @@ function ValidateSourceLinkLinks { # Process each NuGet package in parallel Get-ChildItem "$InputPath\*.symbols.nupkg" | ForEach-Object { + Write-Host "Starting $($_.FullName)" Start-Job -ScriptBlock $ValidatePackage -ArgumentList $_.FullName | Out-Null $NumJobs = @(Get-Job -State 'Running').Count @@ -267,6 +300,10 @@ function InstallSourcelinkCli { try { InstallSourcelinkCli + foreach ($Job in @(Get-Job)) { + Remove-Job -Id $Job.Id + } + ValidateSourceLinkLinks } catch { diff --git a/eng/common/post-build/symbols-validation.ps1 b/eng/common/post-build/symbols-validation.ps1 index 99bf28cd5c..a5af041ba7 100644 --- a/eng/common/post-build/symbols-validation.ps1 +++ b/eng/common/post-build/symbols-validation.ps1 @@ -1,13 +1,14 @@ param( - [Parameter(Mandatory=$true)][string] $InputPath, # Full path to directory where NuGet packages to be checked are stored - [Parameter(Mandatory=$true)][string] $ExtractPath, # Full path to directory where the packages will be extracted during validation - [Parameter(Mandatory=$true)][string] $DotnetSymbolVersion, # Version of dotnet symbol to use - [Parameter(Mandatory=$false)][switch] $ContinueOnError, # If we should keep checking symbols after an error - [Parameter(Mandatory=$false)][switch] $Clean # Clean extracted symbols directory after checking symbols + [Parameter(Mandatory = $true)][string] $InputPath, # Full path to directory where NuGet packages to be checked are stored + [Parameter(Mandatory = $true)][string] $ExtractPath, # Full path to directory where the packages will be extracted during validation + [Parameter(Mandatory = $true)][string] $DotnetSymbolVersion, # Version of dotnet symbol to use + [Parameter(Mandatory = $false)][switch] $CheckForWindowsPdbs, # If we should check for the existence of windows pdbs in addition to portable PDBs + [Parameter(Mandatory = $false)][switch] $ContinueOnError, # If we should keep checking symbols after an error + [Parameter(Mandatory = $false)][switch] $Clean # Clean extracted symbols directory after checking symbols ) # Maximum number of jobs to run in parallel -$MaxParallelJobs = 6 +$MaxParallelJobs = 16 # Max number of retries $MaxRetry = 5 @@ -19,9 +20,15 @@ $SecondsBetweenLoadChecks = 10 Set-Variable -Name "ERROR_BADEXTRACT" -Option Constant -Value -1 Set-Variable -Name "ERROR_FILEDOESNOTEXIST" -Option Constant -Value -2 +$WindowsPdbVerificationParam = "" +if ($CheckForWindowsPdbs) { + $WindowsPdbVerificationParam = "--windows-pdbs" +} + $CountMissingSymbols = { param( - [string] $PackagePath # Path to a NuGet package + [string] $PackagePath, # Path to a NuGet package + [string] $WindowsPdbVerificationParam # If we should check for the existence of windows pdbs in addition to portable PDBs ) . $using:PSScriptRoot\..\tools.ps1 @@ -34,7 +41,7 @@ $CountMissingSymbols = { if (!(Test-Path $PackagePath)) { Write-PipelineTaskError "Input file does not exist: $PackagePath" return [pscustomobject]@{ - result = $using:ERROR_FILEDOESNOTEXIST + result = $using:ERROR_FILEDOESNOTEXIST packagePath = $PackagePath } } @@ -57,24 +64,25 @@ $CountMissingSymbols = { Write-Host "Something went wrong extracting $PackagePath" Write-Host $_ return [pscustomobject]@{ - result = $using:ERROR_BADEXTRACT + result = $using:ERROR_BADEXTRACT packagePath = $PackagePath } } Get-ChildItem -Recurse $ExtractPath | - Where-Object {$RelevantExtensions -contains $_.Extension} | - ForEach-Object { - $FileName = $_.FullName - if ($FileName -Match '\\ref\\') { - Write-Host "`t Ignoring reference assembly file " $FileName - return - } + Where-Object { $RelevantExtensions -contains $_.Extension } | + ForEach-Object { + $FileName = $_.FullName + if ($FileName -Match '\\ref\\') { + Write-Host "`t Ignoring reference assembly file " $FileName + return + } - $FirstMatchingSymbolDescriptionOrDefault = { + $FirstMatchingSymbolDescriptionOrDefault = { param( - [string] $FullPath, # Full path to the module that has to be checked - [string] $TargetServerParam, # Parameter to pass to `Symbol Tool` indicating the server to lookup for symbols + [string] $FullPath, # Full path to the module that has to be checked + [string] $TargetServerParam, # Parameter to pass to `Symbol Tool` indicating the server to lookup for symbols + [string] $WindowsPdbVerificationParam, # Parameter to pass to potential check for windows-pdbs. [string] $SymbolsPath ) @@ -99,15 +107,16 @@ $CountMissingSymbols = { # DWARF file for a .dylib $DylibDwarf = $SymbolPath.Replace($Extension, '.dylib.dwarf') - + $dotnetSymbolExe = "$env:USERPROFILE\.dotnet\tools" $dotnetSymbolExe = Resolve-Path "$dotnetSymbolExe\dotnet-symbol.exe" $totalRetries = 0 while ($totalRetries -lt $using:MaxRetry) { + # Save the output and get diagnostic output - $output = & $dotnetSymbolExe --symbols --modules --windows-pdbs $TargetServerParam $FullPath -o $SymbolsPath --diagnostics | Out-String + $output = & $dotnetSymbolExe --symbols --modules $WindowsPdbVerificationParam $TargetServerParam $FullPath -o $SymbolsPath --diagnostics | Out-String if (Test-Path $PdbPath) { return 'PDB' @@ -124,42 +133,50 @@ $CountMissingSymbols = { elseif (Test-Path $SymbolPath) { return 'Module' } - elseif ($output.Contains("503 Service Unavailable")) { - # If we got a 503 error, we should retry. + else + { $totalRetries++ } - else { - return $null - } } return $null } - $SymbolsOnMSDL = & $FirstMatchingSymbolDescriptionOrDefault $FileName '--microsoft-symbol-server' $SymbolsPath - $SymbolsOnSymWeb = & $FirstMatchingSymbolDescriptionOrDefault $FileName '--internal-server' $SymbolsPath - - Write-Host -NoNewLine "`t Checking file " $FileName "... " + $FileGuid = New-Guid + $ExpandedSymbolsPath = Join-Path -Path $SymbolsPath -ChildPath $FileGuid + + $SymbolsOnMSDL = & $FirstMatchingSymbolDescriptionOrDefault ` + -FullPath $FileName ` + -TargetServerParam '--microsoft-symbol-server' ` + -SymbolsPath "$ExpandedSymbolsPath-msdl" ` + -WindowsPdbVerificationParam $WindowsPdbVerificationParam + $SymbolsOnSymWeb = & $FirstMatchingSymbolDescriptionOrDefault ` + -FullPath $FileName ` + -TargetServerParam '--internal-server' ` + -SymbolsPath "$ExpandedSymbolsPath-symweb" ` + -WindowsPdbVerificationParam $WindowsPdbVerificationParam + + Write-Host -NoNewLine "`t Checking file " $FileName "... " - if ($SymbolsOnMSDL -ne $null -and $SymbolsOnSymWeb -ne $null) { - Write-Host "Symbols found on MSDL ($SymbolsOnMSDL) and SymWeb ($SymbolsOnSymWeb)" + if ($SymbolsOnMSDL -ne $null -and $SymbolsOnSymWeb -ne $null) { + Write-Host "Symbols found on MSDL ($SymbolsOnMSDL) and SymWeb ($SymbolsOnSymWeb)" + } + else { + $MissingSymbols++ + + if ($SymbolsOnMSDL -eq $null -and $SymbolsOnSymWeb -eq $null) { + Write-Host 'No symbols found on MSDL or SymWeb!' } else { - $MissingSymbols++ - - if ($SymbolsOnMSDL -eq $null -and $SymbolsOnSymWeb -eq $null) { - Write-Host 'No symbols found on MSDL or SymWeb!' + if ($SymbolsOnMSDL -eq $null) { + Write-Host 'No symbols found on MSDL!' } else { - if ($SymbolsOnMSDL -eq $null) { - Write-Host 'No symbols found on MSDL!' - } - else { - Write-Host 'No symbols found on SymWeb!' - } + Write-Host 'No symbols found on SymWeb!' } } } + } if ($using:Clean) { Remove-Item $ExtractPath -Recurse -Force @@ -168,16 +185,16 @@ $CountMissingSymbols = { Pop-Location return [pscustomobject]@{ - result = $MissingSymbols - packagePath = $PackagePath - } + result = $MissingSymbols + packagePath = $PackagePath + } } function CheckJobResult( - $result, - $packagePath, - [ref]$DupedSymbols, - [ref]$TotalFailures) { + $result, + $packagePath, + [ref]$DupedSymbols, + [ref]$TotalFailures) { if ($result -eq $ERROR_BADEXTRACT) { Write-PipelineTelemetryError -Category 'CheckSymbols' -Message "$packagePath has duplicated symbol files" $DupedSymbols.Value++ @@ -200,6 +217,7 @@ function CheckSymbolsAvailable { Remove-Item $ExtractPath -Force -Recurse -ErrorAction SilentlyContinue } + $TotalPackages = 0 $TotalFailures = 0 $DupedSymbols = 0 @@ -222,7 +240,9 @@ function CheckSymbolsAvailable { return } - Start-Job -ScriptBlock $CountMissingSymbols -ArgumentList $FullName | Out-Null + $TotalPackages++ + + Start-Job -ScriptBlock $CountMissingSymbols -ArgumentList @($FullName,$WindowsPdbVerificationParam) | Out-Null $NumJobs = @(Get-Job -State 'Running').Count @@ -247,11 +267,11 @@ function CheckSymbolsAvailable { if ($TotalFailures -gt 0 -or $DupedSymbols -gt 0) { if ($TotalFailures -gt 0) { - Write-PipelineTelemetryError -Category 'CheckSymbols' -Message "Symbols missing for $TotalFailures packages" + Write-PipelineTelemetryError -Category 'CheckSymbols' -Message "Symbols missing for $TotalFailures/$TotalPackages packages" } if ($DupedSymbols -gt 0) { - Write-PipelineTelemetryError -Category 'CheckSymbols' -Message "$DupedSymbols packages had duplicated symbol files" + Write-PipelineTelemetryError -Category 'CheckSymbols' -Message "$DupedSymbols/$TotalPackages packages had duplicated symbol files and could not be extracted" } ExitWithExitCode 1 diff --git a/eng/common/sdk-task.ps1 b/eng/common/sdk-task.ps1 index f55c43c6f4..7ab9baac5c 100644 --- a/eng/common/sdk-task.ps1 +++ b/eng/common/sdk-task.ps1 @@ -34,7 +34,7 @@ function Print-Usage() { function Build([string]$target) { $logSuffix = if ($target -eq 'Execute') { '' } else { ".$target" } $log = Join-Path $LogDir "$task$logSuffix.binlog" - $outputPath = Join-Path $ToolsetDir "$task\\" + $outputPath = Join-Path $ToolsetDir "$task\" MSBuild $taskProject ` /bl:$log ` @@ -53,7 +53,7 @@ try { } if ($task -eq "") { - Write-PipelineTelemetryError -Category 'Build' -Message "Missing required parameter '-task '" -ForegroundColor Red + Write-PipelineTelemetryError -Category 'Build' -Message "Missing required parameter '-task '" Print-Usage ExitWithExitCode 1 } @@ -64,7 +64,7 @@ try { $GlobalJson.tools | Add-Member -Name "vs" -Value (ConvertFrom-Json "{ `"version`": `"16.5`" }") -MemberType NoteProperty } if( -not ($GlobalJson.tools.PSObject.Properties.Name -match "xcopy-msbuild" )) { - $GlobalJson.tools | Add-Member -Name "xcopy-msbuild" -Value "16.8.0-preview3" -MemberType NoteProperty + $GlobalJson.tools | Add-Member -Name "xcopy-msbuild" -Value "16.10.0-preview2" -MemberType NoteProperty } if ($GlobalJson.tools."xcopy-msbuild".Trim() -ine "none") { $xcopyMSBuildToolsFolder = InitializeXCopyMSBuild $GlobalJson.tools."xcopy-msbuild" -install $true @@ -78,11 +78,14 @@ try { $taskProject = GetSdkTaskProject $task if (!(Test-Path $taskProject)) { - Write-PipelineTelemetryError -Category 'Build' -Message "Unknown task: $task" -ForegroundColor Red + Write-PipelineTelemetryError -Category 'Build' -Message "Unknown task: $task" ExitWithExitCode 1 } if ($restore) { + if ($ci) { + Try-LogClientIpAddress + } Build 'Restore' } diff --git a/eng/common/sdl/configure-sdl-tool.ps1 b/eng/common/sdl/configure-sdl-tool.ps1 new file mode 100644 index 0000000000..4999c30708 --- /dev/null +++ b/eng/common/sdl/configure-sdl-tool.ps1 @@ -0,0 +1,109 @@ +Param( + [string] $GuardianCliLocation, + [string] $WorkingDirectory, + [string] $TargetDirectory, + [string] $GdnFolder, + # The list of Guardian tools to configure. For each object in the array: + # - If the item is a [hashtable], it must contain these entries: + # - Name = The tool name as Guardian knows it. + # - Scenario = (Optional) Scenario-specific name for this configuration entry. It must be unique + # among all tool entries with the same Name. + # - Args = (Optional) Array of Guardian tool configuration args, like '@("Target > C:\temp")' + # - If the item is a [string] $v, it is treated as '@{ Name="$v" }' + [object[]] $ToolsList, + [string] $GuardianLoggerLevel='Standard', + # Optional: Additional params to add to any tool using CredScan. + [string[]] $CrScanAdditionalRunConfigParams, + # Optional: Additional params to add to any tool using PoliCheck. + [string[]] $PoliCheckAdditionalRunConfigParams +) + +$ErrorActionPreference = 'Stop' +Set-StrictMode -Version 2.0 +$disableConfigureToolsetImport = $true +$global:LASTEXITCODE = 0 + +try { + # `tools.ps1` checks $ci to perform some actions. Since the SDL + # scripts don't necessarily execute in the same agent that run the + # build.ps1/sh script this variable isn't automatically set. + $ci = $true + . $PSScriptRoot\..\tools.ps1 + + # Normalize tools list: all in [hashtable] form with defined values for each key. + $ToolsList = $ToolsList | + ForEach-Object { + if ($_ -is [string]) { + $_ = @{ Name = $_ } + } + + if (-not ($_['Scenario'])) { $_.Scenario = "" } + if (-not ($_['Args'])) { $_.Args = @() } + $_ + } + + Write-Host "List of tools to configure:" + $ToolsList | ForEach-Object { $_ | Out-String | Write-Host } + + # We store config files in the r directory of .gdn + $gdnConfigPath = Join-Path $GdnFolder 'r' + $ValidPath = Test-Path $GuardianCliLocation + + if ($ValidPath -eq $False) + { + Write-PipelineTelemetryError -Force -Category 'Sdl' -Message "Invalid Guardian CLI Location." + ExitWithExitCode 1 + } + + foreach ($tool in $ToolsList) { + # Put together the name and scenario to make a unique key. + $toolConfigName = $tool.Name + if ($tool.Scenario) { + $toolConfigName += "_" + $tool.Scenario + } + + Write-Host "=== Configuring $toolConfigName..." + + $gdnConfigFile = Join-Path $gdnConfigPath "$toolConfigName-configure.gdnconfig" + + # For some tools, add default and automatic args. + if ($tool.Name -eq 'credscan') { + if ($targetDirectory) { + $tool.Args += "TargetDirectory < $TargetDirectory" + } + $tool.Args += "OutputType < pre" + $tool.Args += $CrScanAdditionalRunConfigParams + } elseif ($tool.Name -eq 'policheck') { + if ($targetDirectory) { + $tool.Args += "Target < $TargetDirectory" + } + $tool.Args += $PoliCheckAdditionalRunConfigParams + } + + # Create variable pointing to the args array directly so we can use splat syntax later. + $toolArgs = $tool.Args + + # Configure the tool. If args array is provided or the current tool has some default arguments + # defined, add "--args" and splat each element on the end. Arg format is "{Arg id} < {Value}", + # one per parameter. Doc page for "guardian configure": + # https://dev.azure.com/securitytools/SecurityIntegration/_wiki/wikis/Guardian/1395/configure + Exec-BlockVerbosely { + & $GuardianCliLocation configure ` + --working-directory $WorkingDirectory ` + --tool $tool.Name ` + --output-path $gdnConfigFile ` + --logger-level $GuardianLoggerLevel ` + --noninteractive ` + --force ` + $(if ($toolArgs) { "--args" }) @toolArgs + Exit-IfNZEC "Sdl" + } + + Write-Host "Created '$toolConfigName' configuration file: $gdnConfigFile" + } +} +catch { + Write-Host $_.ScriptStackTrace + Write-PipelineTelemetryError -Force -Category 'Sdl' -Message $_ + ExitWithExitCode 1 +} diff --git a/eng/common/sdl/execute-all-sdl-tools.ps1 b/eng/common/sdl/execute-all-sdl-tools.ps1 index 81b729f74a..1157151f48 100644 --- a/eng/common/sdl/execute-all-sdl-tools.ps1 +++ b/eng/common/sdl/execute-all-sdl-tools.ps1 @@ -7,8 +7,17 @@ Param( [string] $SourceDirectory=$env:BUILD_SOURCESDIRECTORY, # Required: the directory where source files are located [string] $ArtifactsDirectory = (Join-Path $env:BUILD_ARTIFACTSTAGINGDIRECTORY ('artifacts')), # Required: the directory where build artifacts are located [string] $AzureDevOpsAccessToken, # Required: access token for dnceng; should be provided via KeyVault - [string[]] $SourceToolsList, # Optional: list of SDL tools to run on source code - [string[]] $ArtifactToolsList, # Optional: list of SDL tools to run on built artifacts + + # Optional: list of SDL tools to run on source code. See 'configure-sdl-tool.ps1' for tools list + # format. + [object[]] $SourceToolsList, + # Optional: list of SDL tools to run on built artifacts. See 'configure-sdl-tool.ps1' for tools + # list format. + [object[]] $ArtifactToolsList, + # Optional: list of SDL tools to run without automatically specifying a target directory. See + # 'configure-sdl-tool.ps1' for tools list format. + [object[]] $CustomToolsList, + [bool] $TsaPublish=$False, # Optional: true will publish results to TSA; only set to true after onboarding to TSA; TSA is the automated framework used to upload test results as bugs. [string] $TsaBranchName=$env:BUILD_SOURCEBRANCH, # Optional: required for TSA publish; defaults to $(Build.SourceBranchName); TSA is the automated framework used to upload test results as bugs. [string] $TsaRepositoryName=$env:BUILD_REPOSITORY_NAME, # Optional: TSA repository name; will be generated automatically if not submitted; TSA is the automated framework used to upload test results as bugs. @@ -32,7 +41,7 @@ try { $ErrorActionPreference = 'Stop' Set-StrictMode -Version 2.0 $disableConfigureToolsetImport = $true - $LASTEXITCODE = 0 + $global:LASTEXITCODE = 0 # `tools.ps1` checks $ci to perform some actions. Since the SDL # scripts don't necessarily execute in the same agent that run the @@ -63,13 +72,16 @@ try { ExitWithExitCode 1 } - & $(Join-Path $PSScriptRoot 'init-sdl.ps1') -GuardianCliLocation $guardianCliLocation -Repository $RepoName -BranchName $BranchName -WorkingDirectory $workingDirectory -AzureDevOpsAccessToken $AzureDevOpsAccessToken -GuardianLoggerLevel $GuardianLoggerLevel + Exec-BlockVerbosely { + & $(Join-Path $PSScriptRoot 'init-sdl.ps1') -GuardianCliLocation $guardianCliLocation -Repository $RepoName -BranchName $BranchName -WorkingDirectory $workingDirectory -AzureDevOpsAccessToken $AzureDevOpsAccessToken -GuardianLoggerLevel $GuardianLoggerLevel + } $gdnFolder = Join-Path $workingDirectory '.gdn' if ($TsaOnboard) { if ($TsaCodebaseName -and $TsaNotificationEmail -and $TsaCodebaseAdmin -and $TsaBugAreaPath) { - Write-Host "$guardianCliLocation tsa-onboard --codebase-name `"$TsaCodebaseName`" --notification-alias `"$TsaNotificationEmail`" --codebase-admin `"$TsaCodebaseAdmin`" --instance-url `"$TsaInstanceUrl`" --project-name `"$TsaProjectName`" --area-path `"$TsaBugAreaPath`" --iteration-path `"$TsaIterationPath`" --working-directory $workingDirectory --logger-level $GuardianLoggerLevel" - & $guardianCliLocation tsa-onboard --codebase-name "$TsaCodebaseName" --notification-alias "$TsaNotificationEmail" --codebase-admin "$TsaCodebaseAdmin" --instance-url "$TsaInstanceUrl" --project-name "$TsaProjectName" --area-path "$TsaBugAreaPath" --iteration-path "$TsaIterationPath" --working-directory $workingDirectory --logger-level $GuardianLoggerLevel + Exec-BlockVerbosely { + & $guardianCliLocation tsa-onboard --codebase-name "$TsaCodebaseName" --notification-alias "$TsaNotificationEmail" --codebase-admin "$TsaCodebaseAdmin" --instance-url "$TsaInstanceUrl" --project-name "$TsaProjectName" --area-path "$TsaBugAreaPath" --iteration-path "$TsaIterationPath" --working-directory $workingDirectory --logger-level $GuardianLoggerLevel + } if ($LASTEXITCODE -ne 0) { Write-PipelineTelemetryError -Force -Category 'Sdl' -Message "Guardian tsa-onboard failed with exit code $LASTEXITCODE." ExitWithExitCode $LASTEXITCODE @@ -80,11 +92,41 @@ try { } } - if ($ArtifactToolsList -and $ArtifactToolsList.Count -gt 0) { - & $(Join-Path $PSScriptRoot 'run-sdl.ps1') -GuardianCliLocation $guardianCliLocation -WorkingDirectory $workingDirectory -TargetDirectory $ArtifactsDirectory -GdnFolder $gdnFolder -ToolsList $ArtifactToolsList -AzureDevOpsAccessToken $AzureDevOpsAccessToken -UpdateBaseline $UpdateBaseline -GuardianLoggerLevel $GuardianLoggerLevel -CrScanAdditionalRunConfigParams $CrScanAdditionalRunConfigParams -PoliCheckAdditionalRunConfigParams $PoliCheckAdditionalRunConfigParams + # Configure a list of tools with a default target directory. Populates the ".gdn/r" directory. + function Configure-ToolsList([object[]] $tools, [string] $targetDirectory) { + if ($tools -and $tools.Count -gt 0) { + Exec-BlockVerbosely { + & $(Join-Path $PSScriptRoot 'configure-sdl-tool.ps1') ` + -GuardianCliLocation $guardianCliLocation ` + -WorkingDirectory $workingDirectory ` + -TargetDirectory $targetDirectory ` + -GdnFolder $gdnFolder ` + -ToolsList $tools ` + -AzureDevOpsAccessToken $AzureDevOpsAccessToken ` + -GuardianLoggerLevel $GuardianLoggerLevel ` + -CrScanAdditionalRunConfigParams $CrScanAdditionalRunConfigParams ` + -PoliCheckAdditionalRunConfigParams $PoliCheckAdditionalRunConfigParams + if ($BreakOnFailure) { + Exit-IfNZEC "Sdl" + } + } + } } - if ($SourceToolsList -and $SourceToolsList.Count -gt 0) { - & $(Join-Path $PSScriptRoot 'run-sdl.ps1') -GuardianCliLocation $guardianCliLocation -WorkingDirectory $workingDirectory -TargetDirectory $SourceDirectory -GdnFolder $gdnFolder -ToolsList $SourceToolsList -AzureDevOpsAccessToken $AzureDevOpsAccessToken -UpdateBaseline $UpdateBaseline -GuardianLoggerLevel $GuardianLoggerLevel -CrScanAdditionalRunConfigParams $CrScanAdditionalRunConfigParams -PoliCheckAdditionalRunConfigParams $PoliCheckAdditionalRunConfigParams + + # Configure Artifact and Source tools with default Target directories. + Configure-ToolsList $ArtifactToolsList $ArtifactsDirectory + Configure-ToolsList $SourceToolsList $SourceDirectory + # Configure custom tools with no default Target directory. + Configure-ToolsList $CustomToolsList $null + + # At this point, all tools are configured in the ".gdn" directory. Run them all in a single call. + # (If we used "run" multiple times, each run would overwrite data from earlier runs.) + Exec-BlockVerbosely { + & $(Join-Path $PSScriptRoot 'run-sdl.ps1') ` + -GuardianCliLocation $guardianCliLocation ` + -WorkingDirectory $workingDirectory ` + -UpdateBaseline $UpdateBaseline ` + -GdnFolder $gdnFolder } if ($TsaPublish) { @@ -92,8 +134,9 @@ try { if (-not $TsaRepositoryName) { $TsaRepositoryName = "$($Repository)-$($BranchName)" } - Write-Host "$guardianCliLocation tsa-publish --all-tools --repository-name `"$TsaRepositoryName`" --branch-name `"$TsaBranchName`" --build-number `"$BuildNumber`" --codebase-name `"$TsaCodebaseName`" --notification-alias `"$TsaNotificationEmail`" --codebase-admin `"$TsaCodebaseAdmin`" --instance-url `"$TsaInstanceUrl`" --project-name `"$TsaProjectName`" --area-path `"$TsaBugAreaPath`" --iteration-path `"$TsaIterationPath`" --working-directory $workingDirectory --logger-level $GuardianLoggerLevel" - & $guardianCliLocation tsa-publish --all-tools --repository-name "$TsaRepositoryName" --branch-name "$TsaBranchName" --build-number "$BuildNumber" --onboard $True --codebase-name "$TsaCodebaseName" --notification-alias "$TsaNotificationEmail" --codebase-admin "$TsaCodebaseAdmin" --instance-url "$TsaInstanceUrl" --project-name "$TsaProjectName" --area-path "$TsaBugAreaPath" --iteration-path "$TsaIterationPath" --working-directory $workingDirectory --logger-level $GuardianLoggerLevel + Exec-BlockVerbosely { + & $guardianCliLocation tsa-publish --all-tools --repository-name "$TsaRepositoryName" --branch-name "$TsaBranchName" --build-number "$BuildNumber" --onboard $True --codebase-name "$TsaCodebaseName" --notification-alias "$TsaNotificationEmail" --codebase-admin "$TsaCodebaseAdmin" --instance-url "$TsaInstanceUrl" --project-name "$TsaProjectName" --area-path "$TsaBugAreaPath" --iteration-path "$TsaIterationPath" --working-directory $workingDirectory --logger-level $GuardianLoggerLevel + } if ($LASTEXITCODE -ne 0) { Write-PipelineTelemetryError -Force -Category 'Sdl' -Message "Guardian tsa-publish failed with exit code $LASTEXITCODE." ExitWithExitCode $LASTEXITCODE @@ -106,7 +149,11 @@ try { if ($BreakOnFailure) { Write-Host "Failing the build in case of breaking results..." - & $guardianCliLocation break + Exec-BlockVerbosely { + & $guardianCliLocation break --working-directory $workingDirectory --logger-level $GuardianLoggerLevel + } + } else { + Write-Host "Letting the build pass even if there were breaking results..." } } catch { diff --git a/eng/common/sdl/extract-artifact-archives.ps1 b/eng/common/sdl/extract-artifact-archives.ps1 new file mode 100644 index 0000000000..68da4fbf25 --- /dev/null +++ b/eng/common/sdl/extract-artifact-archives.ps1 @@ -0,0 +1,63 @@ +# This script looks for each archive file in a directory and extracts it into the target directory. +# For example, the file "$InputPath/bin.tar.gz" extracts to "$ExtractPath/bin.tar.gz.extracted/**". +# Uses the "tar" utility added to Windows 10 / Windows 2019 that supports tar.gz and zip. +param( + # Full path to directory where archives are stored. + [Parameter(Mandatory=$true)][string] $InputPath, + # Full path to directory to extract archives into. May be the same as $InputPath. + [Parameter(Mandatory=$true)][string] $ExtractPath +) + +$ErrorActionPreference = 'Stop' +Set-StrictMode -Version 2.0 + +$disableConfigureToolsetImport = $true + +try { + # `tools.ps1` checks $ci to perform some actions. Since the SDL + # scripts don't necessarily execute in the same agent that run the + # build.ps1/sh script this variable isn't automatically set. + $ci = $true + . $PSScriptRoot\..\tools.ps1 + + Measure-Command { + $jobs = @() + + # Find archive files for non-Windows and Windows builds. + $archiveFiles = @( + Get-ChildItem (Join-Path $InputPath "*.tar.gz") + Get-ChildItem (Join-Path $InputPath "*.zip") + ) + + foreach ($targzFile in $archiveFiles) { + $jobs += Start-Job -ScriptBlock { + $file = $using:targzFile + $fileName = [System.IO.Path]::GetFileName($file) + $extractDir = Join-Path $using:ExtractPath "$fileName.extracted" + + New-Item $extractDir -ItemType Directory -Force | Out-Null + + Write-Host "Extracting '$file' to '$extractDir'..." + + # Pipe errors to stdout to prevent PowerShell detecting them and quitting the job early. + # This type of quit skips the catch, so we wouldn't be able to tell which file triggered the + # error. Save output so it can be stored in the exception string along with context. + $output = tar -xf $file -C $extractDir 2>&1 + # Handle NZEC manually rather than using Exit-IfNZEC: we are in a background job, so we + # don't have access to the outer scope. + if ($LASTEXITCODE -ne 0) { + throw "Error extracting '$file': non-zero exit code ($LASTEXITCODE). Output: '$output'" + } + + Write-Host "Extracted to $extractDir" + } + } + + Receive-Job $jobs -Wait + } +} +catch { + Write-Host $_ + Write-PipelineTelemetryError -Force -Category 'Sdl' -Message $_ + ExitWithExitCode 1 +} diff --git a/eng/common/sdl/init-sdl.ps1 b/eng/common/sdl/init-sdl.ps1 index 1fe9271193..3ac1d92b37 100644 --- a/eng/common/sdl/init-sdl.ps1 +++ b/eng/common/sdl/init-sdl.ps1 @@ -10,7 +10,7 @@ Param( $ErrorActionPreference = 'Stop' Set-StrictMode -Version 2.0 $disableConfigureToolsetImport = $true -$LASTEXITCODE = 0 +$global:LASTEXITCODE = 0 # `tools.ps1` checks $ci to perform some actions. Since the SDL # scripts don't necessarily execute in the same agent that run the diff --git a/eng/common/sdl/run-sdl.ps1 b/eng/common/sdl/run-sdl.ps1 index fe95ab35aa..2eac8c78f1 100644 --- a/eng/common/sdl/run-sdl.ps1 +++ b/eng/common/sdl/run-sdl.ps1 @@ -1,19 +1,15 @@ Param( [string] $GuardianCliLocation, [string] $WorkingDirectory, - [string] $TargetDirectory, [string] $GdnFolder, - [string[]] $ToolsList, [string] $UpdateBaseline, - [string] $GuardianLoggerLevel='Standard', - [string[]] $CrScanAdditionalRunConfigParams, - [string[]] $PoliCheckAdditionalRunConfigParams + [string] $GuardianLoggerLevel='Standard' ) $ErrorActionPreference = 'Stop' Set-StrictMode -Version 2.0 $disableConfigureToolsetImport = $true -$LASTEXITCODE = 0 +$global:LASTEXITCODE = 0 try { # `tools.ps1` checks $ci to perform some actions. Since the SDL @@ -23,7 +19,6 @@ try { . $PSScriptRoot\..\tools.ps1 # We store config files in the r directory of .gdn - Write-Host $ToolsList $gdnConfigPath = Join-Path $GdnFolder 'r' $ValidPath = Test-Path $GuardianCliLocation @@ -33,37 +28,18 @@ try { ExitWithExitCode 1 } - $configParam = @('--config') - - foreach ($tool in $ToolsList) { - $gdnConfigFile = Join-Path $gdnConfigPath "$tool-configure.gdnconfig" - Write-Host $tool - # We have to manually configure tools that run on source to look at the source directory only - if ($tool -eq 'credscan') { - Write-Host "$GuardianCliLocation configure --working-directory $WorkingDirectory --tool $tool --output-path $gdnConfigFile --logger-level $GuardianLoggerLevel --noninteractive --force --args `" TargetDirectory < $TargetDirectory `" `" OutputType < pre `" $(If ($CrScanAdditionalRunConfigParams) {$CrScanAdditionalRunConfigParams})" - & $GuardianCliLocation configure --working-directory $WorkingDirectory --tool $tool --output-path $gdnConfigFile --logger-level $GuardianLoggerLevel --noninteractive --force --args " TargetDirectory < $TargetDirectory " "OutputType < pre" $(If ($CrScanAdditionalRunConfigParams) {$CrScanAdditionalRunConfigParams}) - if ($LASTEXITCODE -ne 0) { - Write-PipelineTelemetryError -Force -Category 'Sdl' -Message "Guardian configure for $tool failed with exit code $LASTEXITCODE." - ExitWithExitCode $LASTEXITCODE - } - } - if ($tool -eq 'policheck') { - Write-Host "$GuardianCliLocation configure --working-directory $WorkingDirectory --tool $tool --output-path $gdnConfigFile --logger-level $GuardianLoggerLevel --noninteractive --force --args `" Target < $TargetDirectory `" $(If ($PoliCheckAdditionalRunConfigParams) {$PoliCheckAdditionalRunConfigParams})" - & $GuardianCliLocation configure --working-directory $WorkingDirectory --tool $tool --output-path $gdnConfigFile --logger-level $GuardianLoggerLevel --noninteractive --force --args " Target < $TargetDirectory " $(If ($PoliCheckAdditionalRunConfigParams) {$PoliCheckAdditionalRunConfigParams}) - if ($LASTEXITCODE -ne 0) { - Write-PipelineTelemetryError -Force -Category 'Sdl' -Message "Guardian configure for $tool failed with exit code $LASTEXITCODE." - ExitWithExitCode $LASTEXITCODE - } - } - - $configParam+=$gdnConfigFile - } - - Write-Host "$GuardianCliLocation run --working-directory $WorkingDirectory --baseline mainbaseline --update-baseline $UpdateBaseline --logger-level $GuardianLoggerLevel $configParam" - & $GuardianCliLocation run --working-directory $WorkingDirectory --tool $tool --baseline mainbaseline --update-baseline $UpdateBaseline --logger-level $GuardianLoggerLevel $configParam - if ($LASTEXITCODE -ne 0) { - Write-PipelineTelemetryError -Force -Category 'Sdl' -Message "Guardian run for $ToolsList using $configParam failed with exit code $LASTEXITCODE." - ExitWithExitCode $LASTEXITCODE + $gdnConfigFiles = Get-ChildItem $gdnConfigPath -Recurse -Include '*.gdnconfig' + Write-Host "Discovered Guardian config files:" + $gdnConfigFiles | Out-String | Write-Host + + Exec-BlockVerbosely { + & $GuardianCliLocation run ` + --working-directory $WorkingDirectory ` + --baseline mainbaseline ` + --update-baseline $UpdateBaseline ` + --logger-level $GuardianLoggerLevel ` + --config @gdnConfigFiles + Exit-IfNZEC "Sdl" } } catch { diff --git a/eng/common/templates/job/execute-sdl.yml b/eng/common/templates/job/execute-sdl.yml index 4a32181fd8..69eb67849d 100644 --- a/eng/common/templates/job/execute-sdl.yml +++ b/eng/common/templates/job/execute-sdl.yml @@ -2,17 +2,41 @@ parameters: enable: 'false' # Whether the SDL validation job should execute or not overrideParameters: '' # Optional: to override values for parameters. additionalParameters: '' # Optional: parameters that need user specific values eg: '-SourceToolsList @("abc","def") -ArtifactToolsList @("ghi","jkl")' + # Optional: if specified, restore and use this version of Guardian instead of the default. + overrideGuardianVersion: '' + # Optional: if true, publish the '.gdn' folder as a pipeline artifact. This can help with in-depth + # diagnosis of problems with specific tool configurations. + publishGuardianDirectoryToPipeline: false + # The script to run to execute all SDL tools. Use this if you want to use a script to define SDL + # parameters rather than relying on YAML. It may be better to use a local script, because you can + # reproduce results locally without piecing together a command based on the YAML. + executeAllSdlToolsScript: 'eng/common/sdl/execute-all-sdl-tools.ps1' # There is some sort of bug (has been reported) in Azure DevOps where if this parameter is named # 'continueOnError', the parameter value is not correctly picked up. # This can also be remedied by the caller (post-build.yml) if it does not use a nested parameter sdlContinueOnError: false # optional: determines whether to continue the build if the step errors; - downloadArtifacts: true # optional: determines if the artifacts should be dowloaded + # optional: determines if build artifacts should be downloaded. + downloadArtifacts: true + # optional: determines if this job should search the directory of downloaded artifacts for + # 'tar.gz' and 'zip' archive files and extract them before running SDL validation tasks. + extractArchiveArtifacts: false dependsOn: '' # Optional: dependencies of the job artifactNames: '' # Optional: patterns supplied to DownloadBuildArtifacts # Usage: # artifactNames: # - 'BlobArtifacts' # - 'Artifacts_Windows_NT_Release' + # Optional: download a list of pipeline artifacts. 'downloadArtifacts' controls build artifacts, + # not pipeline artifacts, so doesn't affect the use of this parameter. + pipelineArtifactNames: [] + # Optional: location and ID of the AzDO build that the build/pipeline artifacts should be + # downloaded from. By default, uses runtime expressions to decide based on the variables set by + # the 'setupMaestroVars' dependency. Overriding this parameter is necessary if SDL tasks are + # running without Maestro++/BAR involved, or to download artifacts from a specific existing build + # to iterate quickly on SDL changes. + AzDOProjectName: $[ dependencies.setupMaestroVars.outputs['setReleaseVars.AzDOProjectName'] ] + AzDOPipelineId: $[ dependencies.setupMaestroVars.outputs['setReleaseVars.AzDOPipelineId'] ] + AzDOBuildId: $[ dependencies.setupMaestroVars.outputs['setReleaseVars.AzDOBuildId'] ] jobs: - job: Run_SDL @@ -22,16 +46,29 @@ jobs: variables: - group: DotNet-VSTS-Bot - name: AzDOProjectName - value: $[ dependencies.setupMaestroVars.outputs['setReleaseVars.AzDOProjectName'] ] + value: ${{ parameters.AzDOProjectName }} - name: AzDOPipelineId - value: $[ dependencies.setupMaestroVars.outputs['setReleaseVars.AzDOPipelineId'] ] + value: ${{ parameters.AzDOPipelineId }} - name: AzDOBuildId - value: $[ dependencies.setupMaestroVars.outputs['setReleaseVars.AzDOBuildId'] ] + value: ${{ parameters.AzDOBuildId }} + # The Guardian version specified in 'eng/common/sdl/packages.config'. This value must be kept in + # sync with the packages.config file. + - name: DefaultGuardianVersion + value: 0.53.3 + - name: GuardianVersion + value: ${{ coalesce(parameters.overrideGuardianVersion, '$(DefaultGuardianVersion)') }} + - name: GuardianPackagesConfigFile + value: $(Build.SourcesDirectory)\eng\common\sdl\packages.config pool: - name: Hosted VS2017 + # To extract archives (.tar.gz, .zip), we need access to "tar", added in Windows 10/2019. + ${{ if eq(parameters.extractArchiveArtifacts, 'false') }}: + name: Hosted VS2017 + ${{ if ne(parameters.extractArchiveArtifacts, 'false') }}: + vmImage: windows-2019 steps: - checkout: self clean: true + - ${{ if ne(parameters.downloadArtifacts, 'false')}}: - ${{ if ne(parameters.artifactNames, '') }}: - ${{ each artifactName in parameters.artifactNames }}: @@ -59,16 +96,51 @@ jobs: itemPattern: "**" downloadPath: $(Build.ArtifactStagingDirectory)\artifacts checkDownloadedFiles: true + + - ${{ each artifactName in parameters.pipelineArtifactNames }}: + - task: DownloadPipelineArtifact@2 + displayName: Download Pipeline Artifacts + inputs: + buildType: specific + buildVersionToDownload: specific + project: $(AzDOProjectName) + pipeline: $(AzDOPipelineId) + buildId: $(AzDOBuildId) + artifactName: ${{ artifactName }} + downloadPath: $(Build.ArtifactStagingDirectory)\artifacts + checkDownloadedFiles: true + - powershell: eng/common/sdl/extract-artifact-packages.ps1 -InputPath $(Build.ArtifactStagingDirectory)\artifacts\BlobArtifacts -ExtractPath $(Build.ArtifactStagingDirectory)\artifacts\BlobArtifacts displayName: Extract Blob Artifacts continueOnError: ${{ parameters.sdlContinueOnError }} + - powershell: eng/common/sdl/extract-artifact-packages.ps1 -InputPath $(Build.ArtifactStagingDirectory)\artifacts\PackageArtifacts -ExtractPath $(Build.ArtifactStagingDirectory)\artifacts\PackageArtifacts displayName: Extract Package Artifacts continueOnError: ${{ parameters.sdlContinueOnError }} + + - ${{ if ne(parameters.extractArchiveArtifacts, 'false') }}: + - powershell: eng/common/sdl/extract-artifact-archives.ps1 + -InputPath $(Build.ArtifactStagingDirectory)\artifacts + -ExtractPath $(Build.ArtifactStagingDirectory)\artifacts + displayName: Extract Archive Artifacts + continueOnError: ${{ parameters.sdlContinueOnError }} + + - ${{ if ne(parameters.overrideGuardianVersion, '') }}: + - powershell: | + $content = Get-Content $(GuardianPackagesConfigFile) + + Write-Host "packages.config content was:`n$content" + + $content = $content.Replace('$(DefaultGuardianVersion)', '$(GuardianVersion)') + $content | Set-Content $(GuardianPackagesConfigFile) + + Write-Host "packages.config content updated to:`n$content" + displayName: Use overridden Guardian version ${{ parameters.overrideGuardianVersion }} + - task: NuGetToolInstaller@1 displayName: 'Install NuGet.exe' - task: NuGetCommand@2 @@ -79,15 +151,35 @@ jobs: nugetConfigPath: $(Build.SourcesDirectory)\eng\common\sdl\NuGet.config externalFeedCredentials: GuardianConnect restoreDirectory: $(Build.SourcesDirectory)\.packages + - ${{ if ne(parameters.overrideParameters, '') }}: - - powershell: eng/common/sdl/execute-all-sdl-tools.ps1 ${{ parameters.overrideParameters }} + - powershell: ${{ parameters.executeAllSdlToolsScript }} ${{ parameters.overrideParameters }} displayName: Execute SDL continueOnError: ${{ parameters.sdlContinueOnError }} - ${{ if eq(parameters.overrideParameters, '') }}: - - powershell: eng/common/sdl/execute-all-sdl-tools.ps1 - -GuardianPackageName Microsoft.Guardian.Cli.0.53.3 + - powershell: ${{ parameters.executeAllSdlToolsScript }} + -GuardianPackageName Microsoft.Guardian.Cli.$(GuardianVersion) -NugetPackageDirectory $(Build.SourcesDirectory)\.packages -AzureDevOpsAccessToken $(dn-bot-dotnet-build-rw-code-rw) ${{ parameters.additionalParameters }} displayName: Execute SDL continueOnError: ${{ parameters.sdlContinueOnError }} + + - ${{ if ne(parameters.publishGuardianDirectoryToPipeline, 'false') }}: + # We want to publish the Guardian results and configuration for easy diagnosis. However, the + # '.gdn' dir is a mix of configuration, results, extracted dependencies, and Guardian default + # tooling files. Some of these files are large and aren't useful during an investigation, so + # exclude them by simply deleting them before publishing. (As of writing, there is no documented + # way to selectively exclude a dir from the pipeline artifact publish task.) + - task: DeleteFiles@1 + displayName: Delete Guardian dependencies to avoid uploading + inputs: + SourceFolder: $(Agent.BuildDirectory)/.gdn + Contents: | + c + i + condition: succeededOrFailed() + - publish: $(Agent.BuildDirectory)/.gdn + artifact: GuardianConfiguration + displayName: Publish GuardianConfiguration + condition: succeededOrFailed() diff --git a/eng/common/templates/job/job.yml b/eng/common/templates/job/job.yml index 8669679348..37dceb1bab 100644 --- a/eng/common/templates/job/job.yml +++ b/eng/common/templates/job/job.yml @@ -103,7 +103,7 @@ jobs: - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}: - ${{ if eq(parameters.enableMicrobuild, 'true') }}: - - task: MicroBuildSigningPlugin@2 + - task: MicroBuildSigningPlugin@3 displayName: Install MicroBuild plugin inputs: signType: $(_SignType) diff --git a/eng/common/templates/job/onelocbuild.yml b/eng/common/templates/job/onelocbuild.yml new file mode 100644 index 0000000000..e8bc77d2eb --- /dev/null +++ b/eng/common/templates/job/onelocbuild.yml @@ -0,0 +1,93 @@ +parameters: + # Optional: dependencies of the job + dependsOn: '' + + # Optional: A defined YAML pool - https://docs.microsoft.com/en-us/azure/devops/pipelines/yaml-schema?view=vsts&tabs=schema#pool + pool: + vmImage: vs2017-win2016 + + CeapexPat: $(dn-bot-ceapex-package-r) # PAT for the loc AzDO instance https://dev.azure.com/ceapex + GithubPat: $(BotAccount-dotnet-bot-repo-PAT) + + SourcesDirectory: $(Build.SourcesDirectory) + CreatePr: true + AutoCompletePr: false + UseLfLineEndings: true + UseCheckedInLocProjectJson: false + LanguageSet: VS_Main_Languages + LclSource: lclFilesInRepo + LclPackageId: '' + RepoType: gitHub + GitHubOrg: dotnet + MirrorRepo: '' + MirrorBranch: main + condition: '' + +jobs: +- job: OneLocBuild + + dependsOn: ${{ parameters.dependsOn }} + + displayName: OneLocBuild + + pool: ${{ parameters.pool }} + + variables: + - group: OneLocBuildVariables # Contains the CeapexPat and GithubPat + - name: _GenerateLocProjectArguments + value: -SourcesDirectory ${{ parameters.SourcesDirectory }} + -LanguageSet "${{ parameters.LanguageSet }}" + -CreateNeutralXlfs + - ${{ if eq(parameters.UseCheckedInLocProjectJson, 'true') }}: + - name: _GenerateLocProjectArguments + value: ${{ variables._GenerateLocProjectArguments }} -UseCheckedInLocProjectJson + + + steps: + - task: Powershell@2 + inputs: + filePath: $(Build.SourcesDirectory)/eng/common/generate-locproject.ps1 + arguments: $(_GenerateLocProjectArguments) + displayName: Generate LocProject.json + condition: ${{ parameters.condition }} + + - task: OneLocBuild@2 + displayName: OneLocBuild + env: + SYSTEM_ACCESSTOKEN: $(System.AccessToken) + inputs: + locProj: eng/Localize/LocProject.json + outDir: $(Build.ArtifactStagingDirectory) + lclSource: ${{ parameters.LclSource }} + lclPackageId: ${{ parameters.LclPackageId }} + isCreatePrSelected: ${{ parameters.CreatePr }} + ${{ if eq(parameters.CreatePr, true) }}: + isAutoCompletePrSelected: ${{ parameters.AutoCompletePr }} + isUseLfLineEndingsSelected: ${{ parameters.UseLfLineEndings }} + packageSourceAuth: patAuth + patVariable: ${{ parameters.CeapexPat }} + ${{ if eq(parameters.RepoType, 'gitHub') }}: + repoType: ${{ parameters.RepoType }} + gitHubPatVariable: "${{ parameters.GithubPat }}" + ${{ if ne(parameters.MirrorRepo, '') }}: + isMirrorRepoSelected: true + gitHubOrganization: ${{ parameters.GitHubOrg }} + mirrorRepo: ${{ parameters.MirrorRepo }} + mirrorBranch: ${{ parameters.MirrorBranch }} + condition: ${{ parameters.condition }} + + - task: PublishBuildArtifacts@1 + displayName: Publish Localization Files + inputs: + PathtoPublish: '$(Build.ArtifactStagingDirectory)/loc' + PublishLocation: Container + ArtifactName: Loc + condition: ${{ parameters.condition }} + + - task: PublishBuildArtifacts@1 + displayName: Publish LocProject.json + inputs: + PathtoPublish: '$(Build.SourcesDirectory)/eng/Localize/' + PublishLocation: Container + ArtifactName: Loc + condition: ${{ parameters.condition }} \ No newline at end of file diff --git a/eng/common/templates/job/performance.yml b/eng/common/templates/job/performance.yml deleted file mode 100644 index f877fd7a89..0000000000 --- a/eng/common/templates/job/performance.yml +++ /dev/null @@ -1,95 +0,0 @@ -parameters: - steps: [] # optional -- any additional steps that need to happen before pulling down the performance repo and sending the performance benchmarks to helix (ie building your repo) - variables: [] # optional -- list of additional variables to send to the template - jobName: '' # required -- job name - displayName: '' # optional -- display name for the job. Will use jobName if not passed - pool: '' # required -- name of the Build pool - container: '' # required -- name of the container - osGroup: '' # required -- operating system for the job - extraSetupParameters: '' # optional -- extra arguments to pass to the setup script - frameworks: ['netcoreapp3.0'] # optional -- list of frameworks to run against - continueOnError: 'false' # optional -- determines whether to continue the build if the step errors - dependsOn: '' # optional -- dependencies of the job - timeoutInMinutes: 320 # optional -- timeout for the job - enableTelemetry: false # optional -- enable for telemetry - -jobs: -- template: ../jobs/jobs.yml - parameters: - dependsOn: ${{ parameters.dependsOn }} - enableTelemetry: ${{ parameters.enableTelemetry }} - enablePublishBuildArtifacts: true - continueOnError: ${{ parameters.continueOnError }} - - jobs: - - job: '${{ parameters.jobName }}' - - ${{ if ne(parameters.displayName, '') }}: - displayName: '${{ parameters.displayName }}' - ${{ if eq(parameters.displayName, '') }}: - displayName: '${{ parameters.jobName }}' - - timeoutInMinutes: ${{ parameters.timeoutInMinutes }} - - variables: - - - ${{ each variable in parameters.variables }}: - - ${{ if ne(variable.name, '') }}: - - name: ${{ variable.name }} - value: ${{ variable.value }} - - ${{ if ne(variable.group, '') }}: - - group: ${{ variable.group }} - - - IsInternal: '' - - HelixApiAccessToken: '' - - HelixPreCommand: '' - - - ${{ if and(ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}: - - ${{ if eq( parameters.osGroup, 'Windows_NT') }}: - - HelixPreCommand: 'set "PERFLAB_UPLOAD_TOKEN=$(PerfCommandUploadToken)"' - - IsInternal: -Internal - - ${{ if ne(parameters.osGroup, 'Windows_NT') }}: - - HelixPreCommand: 'export PERFLAB_UPLOAD_TOKEN="$(PerfCommandUploadTokenLinux)"' - - IsInternal: --internal - - - group: DotNet-HelixApi-Access - - group: dotnet-benchview - - workspace: - clean: all - pool: - ${{ parameters.pool }} - container: ${{ parameters.container }} - strategy: - matrix: - ${{ each framework in parameters.frameworks }}: - ${{ framework }}: - _Framework: ${{ framework }} - steps: - - checkout: self - clean: true - # Run all of the steps to setup repo - - ${{ each step in parameters.steps }}: - - ${{ step }} - - powershell: $(Build.SourcesDirectory)\eng\common\performance\performance-setup.ps1 $(IsInternal) -Framework $(_Framework) ${{ parameters.extraSetupParameters }} - displayName: Performance Setup (Windows) - condition: and(succeeded(), eq(variables['Agent.Os'], 'Windows_NT')) - continueOnError: ${{ parameters.continueOnError }} - - script: $(Build.SourcesDirectory)/eng/common/performance/performance-setup.sh $(IsInternal) --framework $(_Framework) ${{ parameters.extraSetupParameters }} - displayName: Performance Setup (Unix) - condition: and(succeeded(), ne(variables['Agent.Os'], 'Windows_NT')) - continueOnError: ${{ parameters.continueOnError }} - - script: $(Python) $(PerformanceDirectory)/scripts/ci_setup.py $(SetupArguments) - displayName: Run ci setup script - # Run perf testing in helix - - template: /eng/common/templates/steps/perf-send-to-helix.yml - parameters: - HelixSource: '$(HelixSourcePrefix)/$(Build.Repository.Name)/$(Build.SourceBranch)' # sources must start with pr/, official/, prodcon/, or agent/ - HelixType: 'test/performance/$(Kind)/$(_Framework)/$(Architecture)' - HelixAccessToken: $(HelixApiAccessToken) - HelixTargetQueues: $(Queue) - HelixPreCommands: $(HelixPreCommand) - Creator: $(Creator) - WorkItemTimeout: 4:00 # 4 hours - WorkItemDirectory: '$(WorkItemDirectory)' # WorkItemDirectory can not be empty, so we send it some docs to keep it happy - CorrelationPayloadDirectory: '$(PayloadDirectory)' # it gets checked out to a folder with shorter path than WorkItemDirectory so we can avoid file name too long exceptions \ No newline at end of file diff --git a/eng/common/templates/job/publish-build-assets.yml b/eng/common/templates/job/publish-build-assets.yml index 3b9e2524ff..fe9dfdf720 100644 --- a/eng/common/templates/job/publish-build-assets.yml +++ b/eng/common/templates/job/publish-build-assets.yml @@ -94,7 +94,31 @@ jobs: PathtoPublish: '$(Build.StagingDirectory)/ReleaseConfigs.txt' PublishLocation: Container ArtifactName: ReleaseConfigs - + + - task: powershell@2 + displayName: Check if SymbolPublishingExclusionsFile.txt exists + inputs: + targetType: inline + script: | + $symbolExclusionfile = "$(Build.SourcesDirectory)/eng/SymbolPublishingExclusionsFile.txt" + if(Test-Path -Path $symbolExclusionfile) + { + Write-Host "SymbolExclusionFile exists" + Write-Host "##vso[task.setvariable variable=SymbolExclusionFile]true" + } + else{ + Write-Host "Symbols Exclusion file does not exists" + Write-Host "##vso[task.setvariable variable=SymbolExclusionFile]false" + } + + - task: PublishBuildArtifacts@1 + displayName: Publish SymbolPublishingExclusionsFile Artifact + condition: eq(variables['SymbolExclusionFile'], 'true') + inputs: + PathtoPublish: '$(Build.SourcesDirectory)/eng/SymbolPublishingExclusionsFile.txt' + PublishLocation: Container + ArtifactName: ReleaseConfigs + - ${{ if eq(parameters.enablePublishBuildArtifacts, 'true') }}: - template: /eng/common/templates/steps/publish-logs.yml parameters: diff --git a/eng/common/templates/job/source-build.yml b/eng/common/templates/job/source-build.yml index aad4146492..5023d36dcb 100644 --- a/eng/common/templates/job/source-build.yml +++ b/eng/common/templates/job/source-build.yml @@ -15,6 +15,9 @@ parameters: # nonPortable: false # Enables non-portable mode. This means a more specific RID (e.g. fedora.32-x64 rather than # linux-x64), and compiling against distro-provided packages rather than portable ones. + # skipPublishValidation: false + # Disables publishing validation. By default, a check is performed to ensure no packages are + # published by source-build. # container: '' # A container to use. Runs in docker. # pool: {} diff --git a/eng/common/templates/job/source-index-stage1.yml b/eng/common/templates/job/source-index-stage1.yml index c002a2b1b0..1cc0c29e4f 100644 --- a/eng/common/templates/job/source-index-stage1.yml +++ b/eng/common/templates/job/source-index-stage1.yml @@ -1,15 +1,19 @@ parameters: runAsPublic: false - sourceIndexPackageVersion: 1.0.1-20210225.1 + sourceIndexPackageVersion: 1.0.1-20210614.1 sourceIndexPackageSource: https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-tools/nuget/v3/index.json sourceIndexBuildCommand: powershell -NoLogo -NoProfile -ExecutionPolicy Bypass -Command "eng/common/build.ps1 -restore -build -binarylog -ci" preSteps: [] binlogPath: artifacts/log/Debug/Build.binlog pool: vmImage: vs2017-win2016 + condition: '' + dependsOn: '' jobs: - job: SourceIndexStage1 + dependsOn: ${{ parameters.dependsOn }} + condition: ${{ parameters.condition }} variables: - name: SourceIndexPackageVersion value: ${{ parameters.sourceIndexPackageVersion }} @@ -30,29 +34,24 @@ jobs: inputs: packageType: sdk version: 3.1.x - - - task: UseDotNet@2 - displayName: Use .NET Core sdk - inputs: - useGlobalJson: true + installationPath: $(Agent.TempDirectory)/dotnet + workingDirectory: $(Agent.TempDirectory) - script: | - dotnet tool install BinLogToSln --version $(SourceIndexPackageVersion) --add-source $(SourceIndexPackageSource) --tool-path .source-index/tools - dotnet tool install UploadIndexStage1 --version $(SourceIndexPackageVersion) --add-source $(SourceIndexPackageSource) --tool-path .source-index/tools - echo ##vso[task.prependpath]$(Build.SourcesDirectory)/.source-index/tools + $(Agent.TempDirectory)/dotnet/dotnet tool install BinLogToSln --version $(SourceIndexPackageVersion) --add-source $(SourceIndexPackageSource) --tool-path $(Agent.TempDirectory)/.source-index/tools + $(Agent.TempDirectory)/dotnet/dotnet tool install UploadIndexStage1 --version $(SourceIndexPackageVersion) --add-source $(SourceIndexPackageSource) --tool-path $(Agent.TempDirectory)/.source-index/tools displayName: Download Tools + # Set working directory to temp directory so 'dotnet' doesn't try to use global.json and use the repo's sdk. + workingDirectory: $(Agent.TempDirectory) - script: ${{ parameters.sourceIndexBuildCommand }} displayName: Build Repository - - script: BinLogToSln -i $(BinlogPath) -r $(Build.SourcesDirectory) -n $(Build.Repository.Name) -o .source-index/stage1output + - script: $(Agent.TempDirectory)/.source-index/tools/BinLogToSln -i $(BinlogPath) -r $(Build.SourcesDirectory) -n $(Build.Repository.Name) -o .source-index/stage1output displayName: Process Binlog into indexable sln - env: - DOTNET_ROLL_FORWARD_ON_NO_CANDIDATE_FX: 2 - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}: - - script: UploadIndexStage1 -i .source-index/stage1output -n $(Build.Repository.Name) + - script: $(Agent.TempDirectory)/.source-index/tools/UploadIndexStage1 -i .source-index/stage1output -n $(Build.Repository.Name) displayName: Upload stage1 artifacts to source index env: BLOB_CONTAINER_URL: $(source-dot-net-stage1-blob-container-url) - DOTNET_ROLL_FORWARD_ON_NO_CANDIDATE_FX: 2 diff --git a/eng/common/templates/phases/base.yml b/eng/common/templates/phases/base.yml deleted file mode 100644 index 0123cf43b1..0000000000 --- a/eng/common/templates/phases/base.yml +++ /dev/null @@ -1,130 +0,0 @@ -parameters: - # Optional: Clean sources before building - clean: true - - # Optional: Git fetch depth - fetchDepth: '' - - # Optional: name of the phase (not specifying phase name may cause name collisions) - name: '' - # Optional: display name of the phase - displayName: '' - - # Optional: condition for the job to run - condition: '' - - # Optional: dependencies of the phase - dependsOn: '' - - # Required: A defined YAML queue - queue: {} - - # Required: build steps - steps: [] - - # Optional: variables - variables: {} - - # Optional: should run as a public build even in the internal project - # if 'true', the build won't run any of the internal only steps, even if it is running in non-public projects. - runAsPublic: false - - ## Telemetry variables - - # Optional: enable sending telemetry - # if 'true', these "variables" must be specified in the variables object or as part of the queue matrix - # _HelixBuildConfig - differentiate between Debug, Release, other - # _HelixSource - Example: build/product - # _HelixType - Example: official/dotnet/arcade/$(Build.SourceBranch) - enableTelemetry: false - - # Optional: Enable installing Microbuild plugin - # if 'true', these "variables" must be specified in the variables object or as part of the queue matrix - # _TeamName - the name of your team - # _SignType - 'test' or 'real' - enableMicrobuild: false - -# Internal resources (telemetry, microbuild) can only be accessed from non-public projects, -# and some (Microbuild) should only be applied to non-PR cases for internal builds. - -phases: -- phase: ${{ parameters.name }} - - ${{ if ne(parameters.displayName, '') }}: - displayName: ${{ parameters.displayName }} - - ${{ if ne(parameters.condition, '') }}: - condition: ${{ parameters.condition }} - - ${{ if ne(parameters.dependsOn, '') }}: - dependsOn: ${{ parameters.dependsOn }} - - queue: ${{ parameters.queue }} - - ${{ if ne(parameters.variables, '') }}: - variables: - ${{ insert }}: ${{ parameters.variables }} - - steps: - - checkout: self - clean: ${{ parameters.clean }} - ${{ if ne(parameters.fetchDepth, '') }}: - fetchDepth: ${{ parameters.fetchDepth }} - - - ${{ if eq(parameters.enableTelemetry, 'true') }}: - - template: /eng/common/templates/steps/telemetry-start.yml - parameters: - buildConfig: $(_HelixBuildConfig) - helixSource: $(_HelixSource) - helixType: $(_HelixType) - runAsPublic: ${{ parameters.runAsPublic }} - - - ${{ if eq(parameters.enableMicrobuild, 'true') }}: - # Internal only resource, and Microbuild signing shouldn't be applied to PRs. - - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}: - - task: MicroBuildSigningPlugin@2 - displayName: Install MicroBuild plugin - inputs: - signType: $(_SignType) - zipSources: false - feedSource: https://dnceng.pkgs.visualstudio.com/_packaging/MicroBuildToolset/nuget/v3/index.json - - env: - TeamName: $(_TeamName) - continueOnError: false - condition: and(succeeded(), in(variables['_SignType'], 'real', 'test'), eq(variables['Agent.Os'], 'Windows_NT')) - - # Run provided build steps - - ${{ parameters.steps }} - - - ${{ if eq(parameters.enableMicrobuild, 'true') }}: - # Internal only resources - - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}: - - task: MicroBuildCleanup@1 - displayName: Execute Microbuild cleanup tasks - condition: and(always(), in(variables['_SignType'], 'real', 'test'), eq(variables['Agent.Os'], 'Windows_NT')) - env: - TeamName: $(_TeamName) - - - ${{ if eq(parameters.enableTelemetry, 'true') }}: - - template: /eng/common/templates/steps/telemetry-end.yml - parameters: - helixSource: $(_HelixSource) - helixType: $(_HelixType) - - - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}: - - task: CopyFiles@2 - displayName: Gather Asset Manifests - inputs: - SourceFolder: '$(Build.SourcesDirectory)/artifacts/log/$(_BuildConfig)/AssetManifest' - TargetFolder: '$(Build.StagingDirectory)/AssetManifests' - continueOnError: false - condition: and(succeeded(), eq(variables['_DotNetPublishToBlobFeed'], 'true')) - - task: PublishBuildArtifacts@1 - displayName: Push Asset Manifests - inputs: - PathtoPublish: '$(Build.StagingDirectory)/AssetManifests' - PublishLocation: Container - ArtifactName: AssetManifests - continueOnError: false - condition: and(succeeded(), eq(variables['_DotNetPublishToBlobFeed'], 'true')) diff --git a/eng/common/templates/phases/publish-build-assets.yml b/eng/common/templates/phases/publish-build-assets.yml deleted file mode 100644 index 4e51e472e2..0000000000 --- a/eng/common/templates/phases/publish-build-assets.yml +++ /dev/null @@ -1,52 +0,0 @@ -parameters: - dependsOn: '' - queue: {} - configuration: 'Debug' - condition: succeeded() - continueOnError: false - runAsPublic: false - publishUsingPipelines: false -phases: - - phase: Asset_Registry_Publish - displayName: Publish to Build Asset Registry - dependsOn: ${{ parameters.dependsOn }} - queue: ${{ parameters.queue }} - variables: - _BuildConfig: ${{ parameters.configuration }} - steps: - - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}: - - task: DownloadBuildArtifacts@0 - displayName: Download artifact - inputs: - artifactName: AssetManifests - downloadPath: '$(Build.StagingDirectory)/Download' - checkDownloadedFiles: true - condition: ${{ parameters.condition }} - continueOnError: ${{ parameters.continueOnError }} - - task: AzureKeyVault@1 - inputs: - azureSubscription: 'DotNet-Engineering-Services_KeyVault' - KeyVaultName: EngKeyVault - SecretsFilter: 'MaestroAccessToken' - condition: ${{ parameters.condition }} - continueOnError: ${{ parameters.continueOnError }} - - task: PowerShell@2 - displayName: Publish Build Assets - inputs: - filePath: eng\common\sdk-task.ps1 - arguments: -task PublishBuildAssets -restore -msbuildEngine dotnet - /p:ManifestsPath='$(Build.StagingDirectory)/Download/AssetManifests' - /p:BuildAssetRegistryToken=$(MaestroAccessToken) - /p:MaestroApiEndpoint=https://maestro-prod.westus2.cloudapp.azure.com - /p:PublishUsingPipelines=${{ parameters.publishUsingPipelines }} - /p:Configuration=$(_BuildConfig) - condition: ${{ parameters.condition }} - continueOnError: ${{ parameters.continueOnError }} - - task: PublishBuildArtifacts@1 - displayName: Publish Logs to VSTS - inputs: - PathtoPublish: '$(Build.SourcesDirectory)/artifacts/log/$(_BuildConfig)' - PublishLocation: Container - ArtifactName: $(Agent.Os)_Asset_Registry_Publish - continueOnError: true - condition: always() diff --git a/eng/common/templates/post-build/channels/generic-internal-channel.yml b/eng/common/templates/post-build/channels/generic-internal-channel.yml index 58fa9a35b8..8990dfc8c8 100644 --- a/eng/common/templates/post-build/channels/generic-internal-channel.yml +++ b/eng/common/templates/post-build/channels/generic-internal-channel.yml @@ -40,6 +40,9 @@ stages: pool: vmImage: 'windows-2019' steps: + - script: echo "##vso[task.logissue type=warning]Going forward, v2 Arcade publishing is no longer supported. Please read https://github.com/dotnet/arcade/blob/main/Documentation/CorePackages/Publishing.md for details, then contact dnceng if you have further questions." + displayName: Warn about v2 Arcade Publishing Usage + # This is necessary whenever we want to publish/restore to an AzDO private feed - task: NuGetAuthenticate@0 displayName: 'Authenticate to AzDO Feeds' @@ -110,6 +113,9 @@ stages: pool: vmImage: 'windows-2019' steps: + - script: echo "##vso[task.logissue type=warning]Going forward, v2 Arcade publishing is no longer supported. Please read https://github.com/dotnet/arcade/blob/main/Documentation/CorePackages/Publishing.md for details, then contact dnceng if you have further questions." + displayName: Warn about v2 Arcade Publishing Usage + - task: DownloadBuildArtifacts@0 displayName: Download Build Assets continueOnError: true diff --git a/eng/common/templates/post-build/channels/generic-public-channel.yml b/eng/common/templates/post-build/channels/generic-public-channel.yml index b50c0b3bdb..3220c6a4f9 100644 --- a/eng/common/templates/post-build/channels/generic-public-channel.yml +++ b/eng/common/templates/post-build/channels/generic-public-channel.yml @@ -42,6 +42,9 @@ stages: pool: vmImage: 'windows-2019' steps: + - script: echo "##vso[task.logissue type=warning]Going forward, v2 Arcade publishing is no longer supported. Please read https://github.com/dotnet/arcade/blob/main/Documentation/CorePackages/Publishing.md for details, then contact dnceng if you have further questions." + displayName: Warn about v2 Arcade Publishing Usage + - task: DownloadBuildArtifacts@0 displayName: Download Build Assets continueOnError: true @@ -109,6 +112,9 @@ stages: pool: vmImage: 'windows-2019' steps: + - script: echo "##vso[task.logissue type=warning]Going forward, v2 Arcade publishing is no longer supported. Please read https://github.com/dotnet/arcade/blob/main/Documentation/CorePackages/Publishing.md for details, then contact dnceng if you have further questions." + displayName: Warn about v2 Arcade Publishing Usage + - task: DownloadBuildArtifacts@0 displayName: Download Build Assets continueOnError: true diff --git a/eng/common/templates/steps/perf-send-to-helix.yml b/eng/common/templates/steps/perf-send-to-helix.yml deleted file mode 100644 index 3427b311a7..0000000000 --- a/eng/common/templates/steps/perf-send-to-helix.yml +++ /dev/null @@ -1,50 +0,0 @@ -# Please remember to update the documentation if you make changes to these parameters! -parameters: - ProjectFile: '' # required -- project file that specifies the helix workitems - HelixSource: 'pr/default' # required -- sources must start with pr/, official/, prodcon/, or agent/ - HelixType: 'tests/default/' # required -- Helix telemetry which identifies what type of data this is; should include "test" for clarity and must end in '/' - HelixBuild: $(Build.BuildNumber) # required -- the build number Helix will use to identify this -- automatically set to the AzDO build number - HelixTargetQueues: '' # required -- semicolon delimited list of Helix queues to test on; see https://helix.dot.net/ for a list of queues - HelixAccessToken: '' # required -- access token to make Helix API requests; should be provided by the appropriate variable group - HelixPreCommands: '' # optional -- commands to run before Helix work item execution - HelixPostCommands: '' # optional -- commands to run after Helix work item execution - WorkItemDirectory: '' # optional -- a payload directory to zip up and send to Helix; requires WorkItemCommand; incompatible with XUnitProjects - CorrelationPayloadDirectory: '' # optional -- a directory to zip up and send to Helix as a correlation payload - IncludeDotNetCli: false # optional -- true will download a version of the .NET CLI onto the Helix machine as a correlation payload; requires DotNetCliPackageType and DotNetCliVersion - DotNetCliPackageType: '' # optional -- either 'sdk', 'runtime' or 'aspnetcore-runtime'; determines whether the sdk or runtime will be sent to Helix; see https://raw.githubusercontent.com/dotnet/core/main/release-notes/releases.json - DotNetCliVersion: '' # optional -- version of the CLI to send to Helix; based on this: https://raw.githubusercontent.com/dotnet/core/main/release-notes/releases.json - EnableXUnitReporter: false # optional -- true enables XUnit result reporting to Mission Control - WaitForWorkItemCompletion: true # optional -- true will make the task wait until work items have been completed and fail the build if work items fail. False is "fire and forget." - Creator: '' # optional -- if the build is external, use this to specify who is sending the job - DisplayNamePrefix: 'Send job to Helix' # optional -- rename the beginning of the displayName of the steps in AzDO - condition: succeeded() # optional -- condition for step to execute; defaults to succeeded() - continueOnError: false # optional -- determines whether to continue the build if the step errors; defaults to false - osGroup: '' # required -- operating system for the job - - -steps: -- template: /eng/pipelines/common/templates/runtimes/send-to-helix-inner-step.yml - parameters: - osGroup: ${{ parameters.osGroup }} - sendParams: $(Build.SourcesDirectory)/eng/common/performance/${{ parameters.ProjectFile }} /restore /t:Test /bl:$(Build.SourcesDirectory)/artifacts/log/$(_BuildConfig)/SendToHelix.binlog - displayName: ${{ parameters.DisplayNamePrefix }} - condition: ${{ parameters.condition }} - continueOnError: ${{ parameters.continueOnError }} - environment: - BuildConfig: $(_BuildConfig) - HelixSource: ${{ parameters.HelixSource }} - HelixType: ${{ parameters.HelixType }} - HelixBuild: ${{ parameters.HelixBuild }} - HelixTargetQueues: ${{ parameters.HelixTargetQueues }} - HelixAccessToken: ${{ parameters.HelixAccessToken }} - HelixPreCommands: ${{ parameters.HelixPreCommands }} - HelixPostCommands: ${{ parameters.HelixPostCommands }} - WorkItemDirectory: ${{ parameters.WorkItemDirectory }} - CorrelationPayloadDirectory: ${{ parameters.CorrelationPayloadDirectory }} - IncludeDotNetCli: ${{ parameters.IncludeDotNetCli }} - DotNetCliPackageType: ${{ parameters.DotNetCliPackageType }} - DotNetCliVersion: ${{ parameters.DotNetCliVersion }} - EnableXUnitReporter: ${{ parameters.EnableXUnitReporter }} - WaitForWorkItemCompletion: ${{ parameters.WaitForWorkItemCompletion }} - Creator: ${{ parameters.Creator }} - SYSTEM_ACCESSTOKEN: $(System.AccessToken) diff --git a/eng/common/templates/steps/source-build.yml b/eng/common/templates/steps/source-build.yml index 8e336b7d16..ba40dc82f1 100644 --- a/eng/common/templates/steps/source-build.yml +++ b/eng/common/templates/steps/source-build.yml @@ -18,6 +18,35 @@ steps: set -x df -h + # If building on the internal project, the artifact feeds variable may be available (usually only if needed) + # In that case, call the feed setup script to add internal feeds corresponding to public ones. + # In addition, add an msbuild argument to copy the WIP from the repo to the target build location. + # This is because SetupNuGetSources.sh will alter the current NuGet.config file, and we need to preserve those + # changes. + $internalRestoreArgs= + if [ '$(dn-bot-dnceng-artifact-feeds-rw)' != '$''(dn-bot-dnceng-artifact-feeds-rw)' ]; then + # Temporarily work around https://github.com/dotnet/arcade/issues/7709 + chmod +x $(Build.SourcesDirectory)/eng/common/SetupNugetSources.sh + $(Build.SourcesDirectory)/eng/common/SetupNugetSources.sh $(Build.SourcesDirectory)/NuGet.config $(dn-bot-dnceng-artifact-feeds-rw) + internalRestoreArgs='/p:CopyWipIntoInnerSourceBuildRepo=true' + + # The 'Copy WIP' feature of source build uses git stash to apply changes from the original repo. + # This only works if there is a username/email configured, which won't be the case in most CI runs. + git config --get user.email + if [ $? -ne 0 ]; then + git config user.email dn-bot@microsoft.com + git config user.name dn-bot + fi + fi + + # If building on the internal project, the internal storage variable may be available (usually only if needed) + # In that case, add variables to allow the download of internal runtimes if the specified versions are not found + # in the default public locations. + internalRuntimeDownloadArgs= + if [ '$(dotnetclimsrc-read-sas-token-base64)' != '$''(dotnetclimsrc-read-sas-token-base64)' ]; then + internalRuntimeDownloadArgs='/p:DotNetRuntimeSourceFeed=https://dotnetclimsrc.blob.core.windows.net/dotnet /p:DotNetRuntimeSourceFeedKey=$(dotnetclimsrc-read-sas-token-base64) --runtimesourcefeed https://dotnetclimsrc.blob.core.windows.net/dotnet --runtimesourcefeedkey $(dotnetclimsrc-read-sas-token-base64)' + fi + buildConfig=Release # Check if AzDO substitutes in a build config from a variable, and use it if so. if [ '$(_BuildConfig)' != '$''(_BuildConfig)' ]; then @@ -34,10 +63,17 @@ steps: targetRidArgs='/p:TargetRid=${{ parameters.platform.targetRID }}' fi + publishArgs= + if [ '${{ parameters.platform.skipPublishValidation }}' != 'true' ]; then + publishArgs='--publish' + fi + ${{ coalesce(parameters.platform.buildScript, './build.sh') }} --ci \ --configuration $buildConfig \ - --restore --build --pack --publish \ + --restore --build --pack $publishArgs -bl \ $officialBuildArgs \ + $internalRuntimeDownloadArgs \ + $internalRestoreArgs \ $targetRidArgs \ /p:SourceBuildNonPortable=${{ parameters.platform.nonPortable }} \ /p:ArcadeBuildFromSource=true diff --git a/eng/common/tools.ps1 b/eng/common/tools.ps1 index d52467eea1..4448428994 100644 --- a/eng/common/tools.ps1 +++ b/eng/common/tools.ps1 @@ -42,12 +42,15 @@ [bool]$useInstalledDotNetCli = if (Test-Path variable:useInstalledDotNetCli) { $useInstalledDotNetCli } else { $true } # Enable repos to use a particular version of the on-line dotnet-install scripts. -# default URL: https://dot.net/v1/dotnet-install.ps1 +# default URL: https://dotnet.microsoft.com/download/dotnet/scripts/v1/dotnet-install.ps1 [string]$dotnetInstallScriptVersion = if (Test-Path variable:dotnetInstallScriptVersion) { $dotnetInstallScriptVersion } else { 'v1' } # True to use global NuGet cache instead of restoring packages to repository-local directory. [bool]$useGlobalNuGetCache = if (Test-Path variable:useGlobalNuGetCache) { $useGlobalNuGetCache } else { !$ci } +# True to exclude prerelease versions Visual Studio during build +[bool]$excludePrereleaseVS = if (Test-Path variable:excludePrereleaseVS) { $excludePrereleaseVS } else { $false } + # An array of names of processes to stop on script exit if prepareMachine is true. $processesToStopOnExit = if (Test-Path variable:processesToStopOnExit) { $processesToStopOnExit } else { @('msbuild', 'dotnet', 'vbcscompiler') } @@ -103,6 +106,46 @@ function Exec-Process([string]$command, [string]$commandArgs) { } } +# Take the given block, print it, print what the block probably references from the current set of +# variables using low-effort string matching, then run the block. +# +# This is intended to replace the pattern of manually copy-pasting a command, wrapping it in quotes, +# and printing it using "Write-Host". The copy-paste method is more readable in build logs, but less +# maintainable and less reliable. It is easy to make a mistake and modify the command without +# properly updating the "Write-Host" line, resulting in misleading build logs. The probability of +# this mistake makes the pattern hard to trust when it shows up in build logs. Finding the bug in +# existing source code can also be difficult, because the strings are not aligned to each other and +# the line may be 300+ columns long. +# +# By removing the need to maintain two copies of the command, Exec-BlockVerbosely avoids the issues. +# +# In Bash (or any posix-like shell), "set -x" prints usable verbose output automatically. +# "Set-PSDebug" appears to be similar at first glance, but unfortunately, it isn't very useful: it +# doesn't print any info about the variables being used by the command, which is normally the +# interesting part to diagnose. +function Exec-BlockVerbosely([scriptblock] $block) { + Write-Host "--- Running script block:" + $blockString = $block.ToString().Trim() + Write-Host $blockString + + Write-Host "--- List of variables that might be used:" + # For each variable x in the environment, check the block for a reference to x via simple "$x" or + # "@x" syntax. This doesn't detect other ways to reference variables ("${x}" nor "$variable:x", + # among others). It only catches what this function was originally written for: simple + # command-line commands. + $variableTable = Get-Variable | + Where-Object { + $blockString.Contains("`$$($_.Name)") -or $blockString.Contains("@$($_.Name)") + } | + Format-Table -AutoSize -HideTableHeaders -Wrap | + Out-String + Write-Host $variableTable.Trim() + + Write-Host "--- Executing:" + & $block + Write-Host "--- Done running script block!" +} + # createSdkLocationFile parameter enables a file being generated under the toolset directory # which writes the sdk's location into. This is only necessary for cmd --> powershell invocations # as dot sourcing isn't possible. @@ -120,6 +163,9 @@ function InitializeDotNetCli([bool]$install, [bool]$createSdkLocationFile) { # Disable telemetry on CI. if ($ci) { $env:DOTNET_CLI_TELEMETRY_OPTOUT=1 + + # In case of network error, try to log the current IP for reference + Try-LogClientIpAddress } # Source Build uses DotNetCoreSdkDir variable @@ -190,38 +236,42 @@ function InitializeDotNetCli([bool]$install, [bool]$createSdkLocationFile) { return $global:_DotNetInstallDir = $dotnetRoot } +function Retry($downloadBlock, $maxRetries = 5) { + $retries = 1 + + while($true) { + try { + & $downloadBlock + break + } + catch { + Write-PipelineTelemetryError -Category 'InitializeToolset' -Message $_ + } + + if (++$retries -le $maxRetries) { + $delayInSeconds = [math]::Pow(2, $retries) - 1 # Exponential backoff + Write-Host "Retrying. Waiting for $delayInSeconds seconds before next attempt ($retries of $maxRetries)." + Start-Sleep -Seconds $delayInSeconds + } + else { + Write-PipelineTelemetryError -Category 'InitializeToolset' -Message "Unable to download file in $maxRetries attempts." + break + } + + } +} + function GetDotNetInstallScript([string] $dotnetRoot) { $installScript = Join-Path $dotnetRoot 'dotnet-install.ps1' if (!(Test-Path $installScript)) { Create-Directory $dotnetRoot $ProgressPreference = 'SilentlyContinue' # Don't display the console progress UI - it's a huge perf hit + $uri = "https://dotnet.microsoft.com/download/dotnet/scripts/$dotnetInstallScriptVersion/dotnet-install.ps1" - $maxRetries = 5 - $retries = 1 - - $uri = "https://dot.net/$dotnetInstallScriptVersion/dotnet-install.ps1" - - while($true) { - try { - Write-Host "GET $uri" - Invoke-WebRequest $uri -OutFile $installScript - break - } - catch { - Write-Host "Failed to download '$uri'" - Write-Error $_.Exception.Message -ErrorAction Continue - } - - if (++$retries -le $maxRetries) { - $delayInSeconds = [math]::Pow(2, $retries) - 1 # Exponential backoff - Write-Host "Retrying. Waiting for $delayInSeconds seconds before next attempt ($retries of $maxRetries)." - Start-Sleep -Seconds $delayInSeconds - } - else { - throw "Unable to download file in $maxRetries attempts." - } - - } + Retry({ + Write-Host "GET $uri" + Invoke-WebRequest $uri -OutFile $installScript + }) } return $installScript @@ -305,8 +355,8 @@ function InitializeVisualStudioMSBuild([bool]$install, [object]$vsRequirements = # If the version of msbuild is going to be xcopied, # use this version. Version matches a package here: - # https://dev.azure.com/dnceng/public/_packaging?_a=package&feed=dotnet-eng&package=RoslynTools.MSBuild&protocolType=NuGet&version=16.8.0-preview3&view=overview - $defaultXCopyMSBuildVersion = '16.8.0-preview3' + # https://dev.azure.com/dnceng/public/_packaging?_a=package&feed=dotnet-eng&package=RoslynTools.MSBuild&protocolType=NuGet&version=16.10.0-preview2&view=overview + $defaultXCopyMSBuildVersion = '16.10.0-preview2' if (!$vsRequirements) { $vsRequirements = $GlobalJson.tools.vs } $vsMinVersionStr = if ($vsRequirements.version) { $vsRequirements.version } else { $vsMinVersionReqdStr } @@ -371,7 +421,16 @@ function InitializeVisualStudioMSBuild([bool]$install, [object]$vsRequirements = } $msbuildVersionDir = if ([int]$vsMajorVersion -lt 16) { "$vsMajorVersion.0" } else { "Current" } - return $global:_MSBuildExe = Join-Path $vsInstallDir "MSBuild\$msbuildVersionDir\Bin\msbuild.exe" + + $local:BinFolder = Join-Path $vsInstallDir "MSBuild\$msbuildVersionDir\Bin" + $local:Prefer64bit = if (Get-Member -InputObject $vsRequirements -Name 'Prefer64bit') { $vsRequirements.Prefer64bit } else { $false } + if ($local:Prefer64bit -and (Test-Path(Join-Path $local:BinFolder "amd64"))) { + $global:_MSBuildExe = Join-Path $local:BinFolder "amd64\msbuild.exe" + } else { + $global:_MSBuildExe = Join-Path $local:BinFolder "msbuild.exe" + } + + return $global:_MSBuildExe } function InitializeVisualStudioEnvironmentVariables([string] $vsInstallDir, [string] $vsMajorVersion) { @@ -400,9 +459,13 @@ function InitializeXCopyMSBuild([string]$packageVersion, [bool]$install) { } Create-Directory $packageDir + Write-Host "Downloading $packageName $packageVersion" $ProgressPreference = 'SilentlyContinue' # Don't display the console progress UI - it's a huge perf hit - Invoke-WebRequest "https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-eng/nuget/v3/flat2/$packageName/$packageVersion/$packageName.$packageVersion.nupkg" -OutFile $packagePath + Retry({ + Invoke-WebRequest "https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-eng/nuget/v3/flat2/$packageName/$packageVersion/$packageName.$packageVersion.nupkg" -OutFile $packagePath + }) + Unzip $packagePath $packageDir } @@ -439,31 +502,17 @@ function LocateVisualStudio([object]$vsRequirements = $null){ if (!(Test-Path $vsWhereExe)) { Create-Directory $vsWhereDir Write-Host 'Downloading vswhere' - $maxRetries = 5 - $retries = 1 - - while($true) { - try { - Invoke-WebRequest "https://netcorenativeassets.blob.core.windows.net/resource-packages/external/windows/vswhere/$vswhereVersion/vswhere.exe" -OutFile $vswhereExe - break - } - catch{ - Write-PipelineTelemetryError -Category 'InitializeToolset' -Message $_ - } - - if (++$retries -le $maxRetries) { - $delayInSeconds = [math]::Pow(2, $retries) - 1 # Exponential backoff - Write-Host "Retrying. Waiting for $delayInSeconds seconds before next attempt ($retries of $maxRetries)." - Start-Sleep -Seconds $delayInSeconds - } - else { - Write-PipelineTelemetryError -Category 'InitializeToolset' -Message "Unable to download file in $maxRetries attempts." - } - } + Retry({ + Invoke-WebRequest "https://netcorenativeassets.blob.core.windows.net/resource-packages/external/windows/vswhere/$vswhereVersion/vswhere.exe" -OutFile $vswhereExe + }) } if (!$vsRequirements) { $vsRequirements = $GlobalJson.tools.vs } - $args = @('-latest', '-prerelease', '-format', 'json', '-requires', 'Microsoft.Component.MSBuild', '-products', '*') + $args = @('-latest', '-format', 'json', '-requires', 'Microsoft.Component.MSBuild', '-products', '*') + + if (!$excludePrereleaseVS) { + $args += '-prerelease' + } if (Get-Member -InputObject $vsRequirements -Name 'version') { $args += '-version' @@ -489,7 +538,13 @@ function LocateVisualStudio([object]$vsRequirements = $null){ function InitializeBuildTool() { if (Test-Path variable:global:_BuildTool) { - return $global:_BuildTool + # If the requested msbuild parameters do not match, clear the cached variables. + if($global:_BuildTool.Contains('ExcludePrereleaseVS') -and $global:_BuildTool.ExcludePrereleaseVS -ne $excludePrereleaseVS) { + Remove-Item variable:global:_BuildTool + Remove-Item variable:global:_MSBuildExe + } else { + return $global:_BuildTool + } } if (-not $msbuildEngine) { @@ -517,7 +572,7 @@ function InitializeBuildTool() { ExitWithExitCode 1 } - $buildTool = @{ Path = $msbuildPath; Command = ""; Tool = "vs"; Framework = "net472" } + $buildTool = @{ Path = $msbuildPath; Command = ""; Tool = "vs"; Framework = "net472"; ExcludePrereleaseVS = $excludePrereleaseVS } } else { Write-PipelineTelemetryError -Category 'InitializeToolset' -Message "Unexpected value of -msbuildEngine: '$msbuildEngine'." ExitWithExitCode 1 @@ -542,7 +597,7 @@ function GetDefaultMSBuildEngine() { function GetNuGetPackageCachePath() { if ($env:NUGET_PACKAGES -eq $null) { - # Use local cache on CI to ensure deterministic build. + # Use local cache on CI to ensure deterministic build. # Avoid using the http cache as workaround for https://github.com/NuGet/Home/issues/3116 # use global cache in dev builds to avoid cost of downloading packages. # For directory normalization, see also: https://github.com/NuGet/Home/issues/7968 @@ -620,6 +675,17 @@ function ExitWithExitCode([int] $exitCode) { exit $exitCode } +# Check if $LASTEXITCODE is a nonzero exit code (NZEC). If so, print a Azure Pipeline error for +# diagnostics, then exit the script with the $LASTEXITCODE. +function Exit-IfNZEC([string] $category = "General") { + Write-Host "Exit code $LASTEXITCODE" + if ($LASTEXITCODE -ne 0) { + $message = "Last command failed with exit code $LASTEXITCODE." + Write-PipelineTelemetryError -Force -Category $category -Message $message + ExitWithExitCode $LASTEXITCODE + } +} + function Stop-Processes() { Write-Host 'Killing running build processes...' foreach ($processName in $processesToStopOnExit) { @@ -643,6 +709,8 @@ function MSBuild() { Write-PipelineSetVariable -Name 'NUGET_PLUGIN_REQUEST_TIMEOUT_IN_SECONDS' -Value '20' } + Enable-Nuget-EnhancedRetry + $toolsetBuildProject = InitializeToolset $basePath = Split-Path -parent $toolsetBuildProject $possiblePaths = @( @@ -651,6 +719,8 @@ function MSBuild() { (Join-Path $basePath (Join-Path $buildTool.Framework 'Microsoft.DotNet.Arcade.Sdk.dll')), (Join-Path $basePath (Join-Path netcoreapp2.1 'Microsoft.DotNet.ArcadeLogging.dll')), (Join-Path $basePath (Join-Path netcoreapp2.1 'Microsoft.DotNet.Arcade.Sdk.dll')) + (Join-Path $basePath (Join-Path netcoreapp3.1 'Microsoft.DotNet.ArcadeLogging.dll')), + (Join-Path $basePath (Join-Path netcoreapp3.1 'Microsoft.DotNet.Arcade.Sdk.dll')) ) $selectedPath = $null foreach ($path in $possiblePaths) { @@ -687,6 +757,8 @@ function MSBuild-Core() { } } + Enable-Nuget-EnhancedRetry + $buildTool = InitializeBuildTool $cmdArgs = "$($buildTool.Command) /m /nologo /clp:Summary /v:$verbosity /nr:$nodeReuse /p:ContinuousIntegrationBuild=$ci" @@ -699,7 +771,10 @@ function MSBuild-Core() { } foreach ($arg in $args) { - if ($arg -ne $null -and $arg.Trim() -ne "") { + if ($null -ne $arg -and $arg.Trim() -ne "") { + if ($arg.EndsWith('\')) { + $arg = $arg + "\" + } $cmdArgs += " `"$arg`"" } } @@ -771,7 +846,7 @@ function Get-Darc($version) { . $PSScriptRoot\pipeline-logging-functions.ps1 -$RepoRoot = Resolve-Path (Join-Path $PSScriptRoot '..\..') +$RepoRoot = Resolve-Path (Join-Path $PSScriptRoot '..\..\') $EngRoot = Resolve-Path (Join-Path $PSScriptRoot '..') $ArtifactsDir = Join-Path $RepoRoot 'artifacts' $ToolsetDir = Join-Path $ArtifactsDir 'toolset' @@ -806,3 +881,36 @@ if (!$disableConfigureToolsetImport) { } } } + +function Try-LogClientIpAddress() +{ + Write-Host "Attempting to log this client's IP for Azure Package feed telemetry purposes" + try + { + $result = Invoke-WebRequest -Uri "http://co1.msedge.net/fdv2/diagnostics.aspx" -UseBasicParsing + $lines = $result.Content.Split([Environment]::NewLine) + $socketIp = $lines | Select-String -Pattern "^Socket IP:.*" + Write-Host $socketIp + $clientIp = $lines | Select-String -Pattern "^Client IP:.*" + Write-Host $clientIp + } + catch + { + Write-Host "Unable to get this machine's effective IP address for logging: $_" + } +} + +# +# If $ci flag is set, turn on (and log that we did) special environment variables for improved Nuget client retry logic. +# +function Enable-Nuget-EnhancedRetry() { + if ($ci) { + Write-Host "Setting NUGET enhanced retry environment variables" + $env:NUGET_ENABLE_EXPERIMENTAL_HTTP_RETRY = 'true' + $env:NUGET_EXPERIMENTAL_MAX_NETWORK_TRY_COUNT = 6 + $env:NUGET_EXPERIMENTAL_NETWORK_RETRY_DELAY_MILLISECONDS = 1000 + Write-PipelineSetVariable -Name 'NUGET_ENABLE_EXPERIMENTAL_HTTP_RETRY' -Value 'true' + Write-PipelineSetVariable -Name 'NUGET_EXPERIMENTAL_MAX_NETWORK_TRY_COUNT' -Value '6' + Write-PipelineSetVariable -Name 'NUGET_EXPERIMENTAL_NETWORK_RETRY_DELAY_MILLISECONDS' -Value '1000' + } +} diff --git a/eng/common/tools.sh b/eng/common/tools.sh index 5fad1846e5..6a4871ef72 100644 --- a/eng/common/tools.sh +++ b/eng/common/tools.sh @@ -54,7 +54,7 @@ warn_as_error=${warn_as_error:-true} use_installed_dotnet_cli=${use_installed_dotnet_cli:-true} # Enable repos to use a particular version of the on-line dotnet-install scripts. -# default URL: https://dot.net/v1/dotnet-install.sh +# default URL: https://dotnet.microsoft.com/download/dotnet/scripts/v1/dotnet-install.sh dotnetInstallScriptVersion=${dotnetInstallScriptVersion:-'v1'} # True to use global NuGet cache instead of restoring packages to repository-local directory. @@ -262,7 +262,7 @@ function with_retries { function GetDotNetInstallScript { local root=$1 local install_script="$root/dotnet-install.sh" - local install_script_url="https://dot.net/$dotnetInstallScriptVersion/dotnet-install.sh" + local install_script_url="https://dotnet.microsoft.com/download/dotnet/scripts/$dotnetInstallScriptVersion/dotnet-install.sh" if [[ ! -a "$install_script" ]]; then mkdir -p "$root" @@ -399,6 +399,13 @@ function StopProcesses { return 0 } +function TryLogClientIpAddress () { + echo 'Attempting to log this client''s IP for Azure Package feed telemetry purposes' + if command -v curl > /dev/null; then + curl -s 'http://co1.msedge.net/fdv2/diagnostics.aspx' | grep ' IP: ' || true + fi +} + function MSBuild { local args=$@ if [[ "$pipelines_log" == true ]]; then @@ -410,6 +417,13 @@ function MSBuild { export NUGET_PLUGIN_REQUEST_TIMEOUT_IN_SECONDS=20 Write-PipelineSetVariable -name "NUGET_PLUGIN_HANDSHAKE_TIMEOUT_IN_SECONDS" -value "20" Write-PipelineSetVariable -name "NUGET_PLUGIN_REQUEST_TIMEOUT_IN_SECONDS" -value "20" + + export NUGET_ENABLE_EXPERIMENTAL_HTTP_RETRY=true + export NUGET_EXPERIMENTAL_MAX_NETWORK_TRY_COUNT=6 + export NUGET_EXPERIMENTAL_NETWORK_RETRY_DELAY_MILLISECONDS=1000 + Write-PipelineSetVariable -name "NUGET_ENABLE_EXPERIMENTAL_HTTP_RETRY" -value "true" + Write-PipelineSetVariable -name "NUGET_EXPERIMENTAL_MAX_NETWORK_TRY_COUNT" -value "6" + Write-PipelineSetVariable -name "NUGET_EXPERIMENTAL_NETWORK_RETRY_DELAY_MILLISECONDS" -value "1000" fi local toolset_dir="${_InitializeToolset%/*}" @@ -420,6 +434,8 @@ function MSBuild { possiblePaths+=( "$toolset_dir/$_InitializeBuildToolFramework/Microsoft.DotNet.Arcade.Sdk.dll" ) possiblePaths+=( "$toolset_dir/netcoreapp2.1/Microsoft.DotNet.ArcadeLogging.dll" ) possiblePaths+=( "$toolset_dir/netcoreapp2.1/Microsoft.DotNet.Arcade.Sdk.dll" ) + possiblePaths+=( "$toolset_dir/netcoreapp3.1/Microsoft.DotNet.ArcadeLogging.dll" ) + possiblePaths+=( "$toolset_dir/netcoreapp3.1/Microsoft.DotNet.Arcade.Sdk.dll" ) for path in "${possiblePaths[@]}"; do if [[ -f $path ]]; then selectedPath=$path @@ -485,13 +501,14 @@ _script_dir=`dirname "$_ResolvePath"` eng_root=`cd -P "$_script_dir/.." && pwd` repo_root=`cd -P "$_script_dir/../.." && pwd` -artifacts_dir="$repo_root/artifacts" +repo_root="${repo_root}/" +artifacts_dir="${repo_root}artifacts" toolset_dir="$artifacts_dir/toolset" -tools_dir="$repo_root/.tools" +tools_dir="${repo_root}.tools" log_dir="$artifacts_dir/log/$configuration" temp_dir="$artifacts_dir/tmp/$configuration" -global_json_file="$repo_root/global.json" +global_json_file="${repo_root}global.json" # determine if global.json contains a "runtimes" entry global_json_has_runtimes=false if command -v jq &> /dev/null; then @@ -504,7 +521,7 @@ fi # HOME may not be defined in some scenarios, but it is required by NuGet if [[ -z $HOME ]]; then - export HOME="$repo_root/artifacts/.home/" + export HOME="${repo_root}artifacts/.home/" mkdir -p "$HOME" fi diff --git a/global.json b/global.json index d1c8cf898d..e53205d351 100644 --- a/global.json +++ b/global.json @@ -1,6 +1,6 @@ { "tools": { - "dotnet": "6.0.100-preview.1.21103.13", + "dotnet": "6.0.100-rc.1.21430.12", "runtimes": { "dotnet/x64": [ "$(MicrosoftNETCoreApp31Version)", @@ -16,6 +16,6 @@ }, "msbuild-sdks": { "Microsoft.Build.NoTargets": "2.0.1", - "Microsoft.DotNet.Arcade.Sdk": "6.0.0-beta.21160.7" + "Microsoft.DotNet.Arcade.Sdk": "7.0.0-beta.21474.2" } } diff --git a/src/Microsoft.Diagnostics.DebugServices.Implementation/ImageMappingMemoryService.cs b/src/Microsoft.Diagnostics.DebugServices.Implementation/ImageMappingMemoryService.cs index 44ceb95512..2c90151291 100644 --- a/src/Microsoft.Diagnostics.DebugServices.Implementation/ImageMappingMemoryService.cs +++ b/src/Microsoft.Diagnostics.DebugServices.Implementation/ImageMappingMemoryService.cs @@ -2,6 +2,7 @@ // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. +using Microsoft.Diagnostics.Runtime.Utilities; using Microsoft.FileFormats; using Microsoft.FileFormats.ELF; using Microsoft.FileFormats.MachO; @@ -9,9 +10,9 @@ using System.Collections.Generic; using System.Diagnostics; using System.IO; +using System.Linq; using System.Reflection.Metadata; using System.Reflection.PortableExecutable; -using System.Runtime.InteropServices; namespace Microsoft.Diagnostics.DebugServices.Implementation { @@ -101,96 +102,113 @@ private byte[] ReadMemoryFromModule(ulong address, int bytesRequested) IModule module = _moduleService.GetModuleFromAddress(address); if (module != null) { - Trace.TraceInformation("ReadMemory: address {0:X16} size {1:X8} found module {2}", address, bytesRequested, module.FileName); - - // Recursion can happen in the extreme case where the PE, ELF or MachO headers (in the module.Services.GetService<>() calls) + // Recursion can happen in the case where the PE, ELF or MachO headers (in the module.Services.GetService<>() calls) // used to get the timestamp/filesize or build id are not in the dump. - if (!_recursionProtection.Contains(module.ImageBase)) + if (!_recursionProtection.Contains(address)) { - _recursionProtection.Add(module.ImageBase); + _recursionProtection.Add(address); try { // We found a module that contains the memory requested. Now find or download the PE image. PEReader reader = module.Services.GetService(); if (reader is not null) { - // Read the memory from the PE image. int rva = (int)(address - module.ImageBase); Debug.Assert(rva >= 0); + Debug.Assert(!reader.IsLoadedImage); + Debug.Assert(reader.IsEntireImageAvailable); +#if TRACE_VERBOSE + Trace.TraceInformation($"ReadMemoryFromModule: address {address:X16} rva {rva:X8} bytesRequested {bytesRequested:X8} {module.FileName}"); +#endif + // Not reading anything in the PE's header + if (rva > reader.PEHeaders.PEHeader.SizeOfHeaders) + { + // This property can cause recursion because this PE being mapped here is read to determine the layout + if (!module.IsFileLayout.GetValueOrDefault(true)) + { + // If the PE image that we are mapping into has the "loaded" layout convert the rva to a flat/file based one. + for (int i = 0; i < reader.PEHeaders.SectionHeaders.Length; i++) + { + SectionHeader section = reader.PEHeaders.SectionHeaders[i]; + if (rva >= section.VirtualAddress && rva < (section.VirtualAddress + section.VirtualSize)) + { + rva = section.PointerToRawData + (rva - section.VirtualAddress); + break; + } + } + } + } + try { byte[] data = null; - int sizeOfHeaders = reader.PEHeaders.PEHeader.SizeOfHeaders; - if (rva < sizeOfHeaders) - { - // If the address isn't contained in one of the sections, assume that SOS is reading the PE headers directly. - Trace.TraceInformation("ReadMemory: rva {0:X8} size {1:X8} in PE Header", rva, bytesRequested); - data = reader.GetEntireImage().GetReader(rva, bytesRequested).ReadBytes(bytesRequested); - } - else + // Read the memory from the PE image found/downloaded above + PEMemoryBlock block = reader.GetEntireImage(); + if (rva < block.Length) { - PEMemoryBlock block = reader.GetSectionData(rva); - if (block.Length > 0) + int size = Math.Min(block.Length - rva, bytesRequested); + if ((rva + size) <= block.Length) { - int size = Math.Min(block.Length, bytesRequested); - data = block.GetReader().ReadBytes(size); - ApplyRelocations(module, reader, rva, data); + data = block.GetReader(rva, size).ReadBytes(size); + ApplyRelocations(module, reader, (int)(address - module.ImageBase), data); } else { - Trace.TraceError($"ReadMemory: FAILED rva {rva:X8}"); + Trace.TraceError($"ReadMemoryFromModule: FAILED address {address:X16} rva {rva:X8} {module.FileName}"); } } - + return data; } catch (Exception ex) when (ex is BadImageFormatException || ex is InvalidOperationException || ex is IOException) { - Trace.TraceError($"ReadMemory: exception {ex.Message}"); - return null; + Trace.TraceError($"ReadMemoryFromModule: exception: address {address:X16} {ex.Message} {module.FileName}"); } } - - // Find or download the ELF image, if one. - Reader virtualAddressReader = module.Services.GetService()?.VirtualAddressReader; - if (virtualAddressReader is null) + else { - // Find or download the MachO image, if one. - virtualAddressReader = module.Services.GetService()?.VirtualAddressReader; - } - if (virtualAddressReader is not null) - { - // Read the memory from the image. - ulong rva = address - module.ImageBase; - Debug.Assert(rva >= 0); - try + // Find or download the ELF image, if one. + Reader virtualAddressReader = module.Services.GetService()?.VirtualAddressReader; + if (virtualAddressReader is null) { - Trace.TraceInformation("ReadMemory: rva {0:X16} size {1:X8} in ELF or MachO file", rva, bytesRequested); - byte[] data = new byte[bytesRequested]; - uint read = virtualAddressReader.Read(rva, data, 0, (uint)bytesRequested); - if (read == 0) - { - Trace.TraceError($"ReadMemory: FAILED rva {rva:X8}"); - data = null; - } - return data; + // Find or download the MachO image, if one. + virtualAddressReader = module.Services.GetService()?.VirtualAddressReader; } - catch (Exception ex) when (ex is BadInputFormatException || ex is InvalidVirtualAddressException) + if (virtualAddressReader is not null) { - Trace.TraceError($"ReadMemory: ELF or MachO file exception {ex.Message}"); - return null; + // Read the memory from the image. + ulong rva = address - module.ImageBase; + Debug.Assert(rva >= 0); + try + { +#if TRACE_VERBOSE + Trace.TraceInformation($"ReadMemoryFromModule: address {address:X16} rva {rva:X16} size {bytesRequested:X8} in ELF or MachO file {module.FileName}"); +#endif + byte[] data = new byte[bytesRequested]; + uint read = virtualAddressReader.Read(rva, data, 0, (uint)bytesRequested); + if (read == 0) + { + Trace.TraceError($"ReadMemoryFromModule: FAILED address {address:X16} rva {rva:X16} {module.FileName}"); + data = null; + } + return data; + } + catch (Exception ex) when (ex is BadInputFormatException || ex is InvalidVirtualAddressException) + { + Trace.TraceError($"ReadMemoryFromModule: ELF or MachO file exception: address {address:X16} {ex.Message} {module.FileName}"); + } } } } finally { - _recursionProtection.Remove(module.ImageBase); + _recursionProtection.Remove(address); } } else { - Trace.TraceError("ReadMemory: recursion"); + Trace.TraceError($"ReadMemoryFromModule: recursion: address {address:X16} size {bytesRequested:X8} {module.FileName}"); } } return null; @@ -250,21 +268,25 @@ private void ApplyRelocations(IModule module, PEReader reader, int dataVA, byte[ break; case BaseRelocationType.ImageRelBasedHighLow: - { - uint value = BitConverter.ToUInt32(data, offset); - value += (uint)baseDelta; - byte[] source = BitConverter.GetBytes(value); - Array.Copy(source, 0, data, offset, source.Length); + if ((offset + sizeof(uint)) <= data.Length) + { + uint value = BitConverter.ToUInt32(data, offset); + value += (uint)baseDelta; + byte[] source = BitConverter.GetBytes(value); + Array.Copy(source, 0, data, offset, source.Length); + } break; - } + case BaseRelocationType.ImageRelBasedDir64: - { - ulong value = BitConverter.ToUInt64(data, offset); - value += baseDelta; - byte[] source = BitConverter.GetBytes(value); - Array.Copy(source, 0, data, offset, source.Length); + if ((offset + sizeof(ulong)) <= data.Length) + { + ulong value = BitConverter.ToUInt64(data, offset); + value += baseDelta; + byte[] source = BitConverter.GetBytes(value); + Array.Copy(source, 0, data, offset, source.Length); + } break; - } + default: Debug.Fail($"ApplyRelocations: invalid relocation type {type}"); break; diff --git a/src/Microsoft.Diagnostics.DebugServices.Implementation/MetadataMappingMemoryService.cs b/src/Microsoft.Diagnostics.DebugServices.Implementation/MetadataMappingMemoryService.cs index 5b061c76ed..543f548b25 100644 --- a/src/Microsoft.Diagnostics.DebugServices.Implementation/MetadataMappingMemoryService.cs +++ b/src/Microsoft.Diagnostics.DebugServices.Implementation/MetadataMappingMemoryService.cs @@ -3,13 +3,12 @@ // See the LICENSE file in the project root for more information. using Microsoft.Diagnostics.Runtime; -using System.Linq; +using Microsoft.Diagnostics.Runtime.Utilities; using System; using System.Collections.Immutable; -using System.IO; -using Microsoft.Diagnostics.Runtime.Utilities; using System.Diagnostics; -using System.Diagnostics.Contracts; +using System.IO; +using System.Linq; namespace Microsoft.Diagnostics.DebugServices.Implementation { diff --git a/src/Microsoft.Diagnostics.DebugServices.Implementation/Module.cs b/src/Microsoft.Diagnostics.DebugServices.Implementation/Module.cs index 09f68ca5df..2da9ac5eb8 100644 --- a/src/Microsoft.Diagnostics.DebugServices.Implementation/Module.cs +++ b/src/Microsoft.Diagnostics.DebugServices.Implementation/Module.cs @@ -11,6 +11,7 @@ using System.Diagnostics; using System.Reflection.PortableExecutable; using System.Runtime.InteropServices; +using FileVersionInfo = Microsoft.Diagnostics.Runtime.Utilities.FileVersionInfo; namespace Microsoft.Diagnostics.DebugServices.Implementation { @@ -36,7 +37,6 @@ public enum Flags : byte private Flags _flags; private PdbFileInfo _pdbFileInfo; private ImmutableArray _buildId; - private VersionData _versionData; private PEImage _peImage; public readonly ServiceProvider ServiceProvider; @@ -105,16 +105,26 @@ public bool? IsFileLayout { get { - GetPEInfo(); - if ((_flags & Flags.IsFileLayout) != 0) + // For Windows targets we can assume that the file layout is always "loaded". The + // ImageMappingMemoryService depends on no recursion memory access for this property + // i.e. calling GetPEInfo(). + if (Target.OperatingSystem == OSPlatform.Windows) { - return true; + return false; } - if ((_flags & Flags.IsLoadedLayout) != 0) + else { - return false; + GetPEInfo(); + if ((_flags & Flags.IsFileLayout) != 0) + { + return true; + } + if ((_flags & Flags.IsLoadedLayout) != 0) + { + return false; + } + return null; } - return null; } } @@ -147,27 +157,37 @@ public ImmutableArray BuildId } } - public virtual VersionData VersionData - { - get { return _versionData; } - set { _versionData = value; } - } + public abstract VersionData VersionData { get; } public abstract string VersionString { get; } #endregion - protected void GetVersionFromVersionString() + protected VersionData GetVersion() { - GetPEInfo(); + VersionData versionData = null; - // If we can't get the version from the PE, search for version string embedded in the module data - if (_versionData is null && !IsPEImage) + PEImage peImage = GetPEInfo(); + if (peImage != null) + { + FileVersionInfo fileVersionInfo = peImage.GetFileVersionInfo(); + if (fileVersionInfo != null) + { + versionData = fileVersionInfo.VersionInfo.ToVersionData(); + } + } + else { + // If we can't get the version from the PE, search for version string embedded in the module data string versionString = VersionString; if (versionString != null) { int spaceIndex = versionString.IndexOf(' '); + if (spaceIndex < 0) + { + // It is probably a private build version that doesn't end with a space (no commit id after) + spaceIndex = versionString.Length; + } if (spaceIndex > 0) { if (versionString[spaceIndex - 1] == '.') @@ -178,7 +198,7 @@ protected void GetVersionFromVersionString() try { Version version = System.Version.Parse(versionToParse); - _versionData = new VersionData(version.Major, version.Minor, version.Build, version.Revision); + versionData = new VersionData(version.Major, version.Minor, version.Build, version.Revision); } catch (ArgumentException ex) { @@ -187,12 +207,14 @@ protected void GetVersionFromVersionString() } } } + + return versionData; } protected PEImage GetPEInfo() { if (InitializeValue(Flags.InitializePEInfo)) { - _peImage = ModuleService.GetPEInfo(ImageBase, ImageSize, ref _pdbFileInfo, ref _versionData, ref _flags); + _peImage = ModuleService.GetPEInfo(ImageBase, ImageSize, ref _pdbFileInfo, ref _flags); } return _peImage; } diff --git a/src/Microsoft.Diagnostics.DebugServices.Implementation/ModuleService.cs b/src/Microsoft.Diagnostics.DebugServices.Implementation/ModuleService.cs index 65772fc804..1c576e214f 100644 --- a/src/Microsoft.Diagnostics.DebugServices.Implementation/ModuleService.cs +++ b/src/Microsoft.Diagnostics.DebugServices.Implementation/ModuleService.cs @@ -16,7 +16,6 @@ using System.Reflection.PortableExecutable; using System.Runtime.InteropServices; using System.Text; -using FileVersionInfo = Microsoft.Diagnostics.Runtime.Utilities.FileVersionInfo; namespace Microsoft.Diagnostics.DebugServices.Implementation { @@ -203,10 +202,9 @@ private IModule[] GetSortedModules() /// module base address /// module size /// the pdb record or null - /// the PE version or null /// module flags /// PEImage instance or null - internal PEImage GetPEInfo(ulong address, ulong size, ref PdbFileInfo pdbFileInfo, ref VersionData versionData, ref Module.Flags flags) + internal PEImage GetPEInfo(ulong address, ulong size, ref PdbFileInfo pdbFileInfo, ref Module.Flags flags) { PEImage peImage = null; @@ -214,13 +212,13 @@ internal PEImage GetPEInfo(ulong address, ulong size, ref PdbFileInfo pdbFileInf if (Target.Host.HostType != HostType.Lldb) { // First try getting the PE info as load layout (native Windows DLLs and most managed PEs on Linux/MacOS). - peImage = GetPEInfo(isVirtual: true, address, size, ref pdbFileInfo, ref versionData, ref flags); + peImage = GetPEInfo(isVirtual: true, address: address, size: size, pdbFileInfo: ref pdbFileInfo, flags: ref flags); if (peImage == null) { if (Target.OperatingSystem != OSPlatform.Windows) { // Then try getting the PE info as file layout (some managed PEs on Linux/MacOS). - peImage = GetPEInfo(isVirtual: false, address, size, ref pdbFileInfo, ref versionData, ref flags); + peImage = GetPEInfo(isVirtual: false, address: address, size: size, pdbFileInfo: ref pdbFileInfo, flags: ref flags); } } } @@ -234,10 +232,10 @@ internal PEImage GetPEInfo(ulong address, ulong size, ref PdbFileInfo pdbFileInf /// module base address /// module size /// the pdb record or null - /// the PE version or null /// module flags + /// /// PEImage instance or null - private PEImage GetPEInfo(bool isVirtual, ulong address, ulong size, ref PdbFileInfo pdbFileInfo, ref VersionData versionData, ref Module.Flags flags) + private PEImage GetPEInfo(bool isVirtual, ulong address, ulong size, ref PdbFileInfo pdbFileInfo, ref Module.Flags flags) { Stream stream = MemoryService.CreateMemoryStream(address, size); try @@ -248,15 +246,7 @@ private PEImage GetPEInfo(bool isVirtual, ulong address, ulong size, ref PdbFile { flags |= Module.Flags.IsPEImage; flags |= peImage.IsManaged ? Module.Flags.IsManaged : Module.Flags.None; - pdbFileInfo = peImage.DefaultPdb.ToPdbFileInfo(); - if (versionData is null) - { - FileVersionInfo fileVersionInfo = peImage.GetFileVersionInfo(); - if (fileVersionInfo != null) - { - versionData = fileVersionInfo.VersionInfo.ToVersionData(); - } - } + pdbFileInfo = peImage.DefaultPdb?.ToPdbFileInfo(); flags &= ~(Module.Flags.IsLoadedLayout | Module.Flags.IsFileLayout); flags |= isVirtual ? Module.Flags.IsLoadedLayout : Module.Flags.IsFileLayout; return peImage; @@ -268,7 +258,7 @@ private PEImage GetPEInfo(bool isVirtual, ulong address, ulong size, ref PdbFile } catch (Exception ex) when (ex is BadImageFormatException || ex is EndOfStreamException || ex is IOException) { - Trace.TraceError($"GetPEInfo: loaded {address:X16} isVirtual {isVirtual} exception {ex.Message}"); + Trace.TraceError($"GetPEInfo: {address:X16} isVirtual {isVirtual} exception {ex.Message}"); } return null; } diff --git a/src/Microsoft.Diagnostics.DebugServices.Implementation/ModuleServiceFromDataReader.cs b/src/Microsoft.Diagnostics.DebugServices.Implementation/ModuleServiceFromDataReader.cs index 1ee7ae131d..c2c6253b9a 100644 --- a/src/Microsoft.Diagnostics.DebugServices.Implementation/ModuleServiceFromDataReader.cs +++ b/src/Microsoft.Diagnostics.DebugServices.Implementation/ModuleServiceFromDataReader.cs @@ -27,6 +27,7 @@ class ModuleFromDataReader : Module, IExportSymbols private readonly IExportReader _exportReader; private readonly ModuleInfo _moduleInfo; private readonly ulong _imageSize; + private VersionData _versionData; private string _versionString; public ModuleFromDataReader(ModuleServiceFromDataReader moduleService, IExportReader exportReader, int moduleIndex, ModuleInfo moduleInfo, ulong imageSize) @@ -65,17 +66,17 @@ public override VersionData VersionData { if (_moduleInfo.Version != EmptyVersionInfo) { - base.VersionData = _moduleInfo.Version.ToVersionData(); + _versionData = _moduleInfo.Version.ToVersionData(); } else { if (_moduleService.Target.OperatingSystem != OSPlatform.Windows) { - GetVersionFromVersionString(); + _versionData = GetVersion(); } } } - return base.VersionData; + return _versionData; } } diff --git a/src/Microsoft.Diagnostics.DebugServices.Implementation/SymbolService.cs b/src/Microsoft.Diagnostics.DebugServices.Implementation/SymbolService.cs index fffb98fca3..f014c23fc2 100644 --- a/src/Microsoft.Diagnostics.DebugServices.Implementation/SymbolService.cs +++ b/src/Microsoft.Diagnostics.DebugServices.Implementation/SymbolService.cs @@ -8,6 +8,7 @@ using Microsoft.SymbolStore.SymbolStores; using SOS; using System; +using System.Collections.Generic; using System.Collections.Immutable; using System.Diagnostics; using System.IO; @@ -27,8 +28,8 @@ public class SymbolService : ISymbolService /// /// Symbol server URLs /// - const string MsdlSymbolServer = "http://msdl.microsoft.com/download/symbols/"; - const string SymwebSymbolServer = "http://symweb.corp.microsoft.com/"; + public const string MsdlSymbolServer = "http://msdl.microsoft.com/download/symbols/"; + public const string SymwebSymbolServer = "http://symweb.corp.microsoft.com/"; private readonly IHost _host; private string _defaultSymbolCache; @@ -96,77 +97,103 @@ public bool ParseSymbolPath(string symbolPath) foreach (string path in paths.Reverse()) { - string[] parts = path.Split(new char[] { '*' }, StringSplitOptions.RemoveEmptyEntries); - - // UNC or directory paths are ignored (paths not prefixed with srv* or cache*). + string[] parts = path.Split(new char[] { '*' }, StringSplitOptions.None); if (parts.Length > 0) { - string symbolServerPath = null; - string symbolCachePath = null; + List symbolCachePaths = new(); string symbolDirectoryPath = null; - bool msdl = false; + string symbolServerPath = null; + + void ParseServer(int start) + { + symbolServerPath = MsdlSymbolServer; + for (int i = start; i < parts.Length; i++) + { + if (string.IsNullOrEmpty(parts[i])) + { + // srv** means use default cache + if (i != (parts.Length - 1)) + { + symbolCachePaths.Add(DefaultSymbolCache); + } + } + else if (i < (parts.Length - 1)) + { + symbolCachePaths.Add(parts[i]); + } + else + { + symbolServerPath = parts[i]; + } + } + } switch (parts[0].ToLowerInvariant()) { + case "symsrv": + if (parts.Length <= 2) + { + return false; + } + // ignore symsrv.dll or other server dlls in parts[2] + ParseServer(2); + break; + case "srv": - switch (parts.Length) - { - case 1: - msdl = true; - symbolCachePath = DefaultSymbolCache; - break; - case 2: - symbolServerPath = parts[1]; - break; - case 3: - symbolCachePath = parts[1]; - symbolServerPath = parts[2]; - break; - default: - return false; + if (parts.Length <= 1) + { + return false; } + ParseServer(1); break; case "cache": - switch (parts.Length) - { - case 1: - symbolCachePath = DefaultSymbolCache; - break; - case 2: - symbolCachePath = parts[1]; - break; - default: - return false; + if (parts.Length <= 1) + { + return false; + } + else + { + for (int i = 1; i < parts.Length; i++) + { + if (string.IsNullOrEmpty(parts[i])) + { + if (i == 1) + { + symbolCachePaths.Add(DefaultSymbolCache); + } + } + else + { + symbolCachePaths.Add(parts[i]); + } + } } break; default: // Directory path search - switch (parts.Length) + if (parts.Length != 1) { - case 1: - symbolDirectoryPath = parts[0]; - break; - default: - return false; + return false; } + symbolDirectoryPath = parts[0]; break; } - if (msdl || symbolServerPath != null) + if (symbolServerPath != null) { - if (!AddSymbolServer(msdl, symweb: false, symbolServerPath, authToken: null, timeoutInMinutes: 0)) + if (!AddSymbolServer(msdl: false, symweb: false, symbolServerPath.Trim(), authToken: null, timeoutInMinutes: 0)) { return false; } } - if (symbolCachePath != null) + foreach (string symbolCachePath in symbolCachePaths.Reverse()) { - AddCachePath(symbolCachePath); + AddCachePath(symbolCachePath.Trim()); } if (symbolDirectoryPath != null) { - AddDirectoryPath(symbolDirectoryPath); + AddDirectoryPath(symbolDirectoryPath.Trim()); } } } diff --git a/src/Microsoft.Diagnostics.DebugServices/CommandBase.cs b/src/Microsoft.Diagnostics.DebugServices/CommandBase.cs index 3470680e4b..2898669a4a 100644 --- a/src/Microsoft.Diagnostics.DebugServices/CommandBase.cs +++ b/src/Microsoft.Diagnostics.DebugServices/CommandBase.cs @@ -38,6 +38,7 @@ protected void Write(string message) protected void WriteLine(string message) { Console.Write(message + Environment.NewLine); + Console.CancellationToken.ThrowIfCancellationRequested(); } /// @@ -48,6 +49,7 @@ protected void WriteLine(string message) protected void WriteLine(string format, params object[] args) { Console.Write(string.Format(format, args) + Environment.NewLine); + Console.CancellationToken.ThrowIfCancellationRequested(); } /// @@ -58,6 +60,7 @@ protected void WriteLine(string format, params object[] args) protected void WriteLineWarning(string format, params object[] args) { Console.WriteWarning(string.Format(format, args) + Environment.NewLine); + Console.CancellationToken.ThrowIfCancellationRequested(); } /// @@ -68,6 +71,7 @@ protected void WriteLineWarning(string format, params object[] args) protected void WriteLineError(string format, params object[] args) { Console.WriteError(string.Format(format, args) + Environment.NewLine); + Console.CancellationToken.ThrowIfCancellationRequested(); } /// diff --git a/src/Microsoft.Diagnostics.DebugServices/Microsoft.Diagnostics.DebugServices.csproj b/src/Microsoft.Diagnostics.DebugServices/Microsoft.Diagnostics.DebugServices.csproj index 08b9efdb04..2e64849144 100644 --- a/src/Microsoft.Diagnostics.DebugServices/Microsoft.Diagnostics.DebugServices.csproj +++ b/src/Microsoft.Diagnostics.DebugServices/Microsoft.Diagnostics.DebugServices.csproj @@ -13,7 +13,6 @@ - diff --git a/src/Microsoft.Diagnostics.ExtensionCommands/ClrMDHelper.cs b/src/Microsoft.Diagnostics.ExtensionCommands/ClrMDHelper.cs index 4f6a631cbb..40b9662a56 100644 --- a/src/Microsoft.Diagnostics.ExtensionCommands/ClrMDHelper.cs +++ b/src/Microsoft.Diagnostics.ExtensionCommands/ClrMDHelper.cs @@ -721,18 +721,12 @@ private bool TryGetSegmentMemoryRange(ClrSegment segment, GCGeneration generatio switch (generation) { case GCGeneration.Generation0: - if (segment.IsEphemeralSegment) - { - start = segment.Generation0.Start; - end = segment.Generation0.End; - } + start = segment.Generation0.Start; + end = segment.Generation0.End; return start != end; case GCGeneration.Generation1: - if (segment.IsEphemeralSegment) - { - start = segment.Generation1.Start; - end = segment.Generation1.End; - } + start = segment.Generation1.Start; + end = segment.Generation1.End; return start != end; case GCGeneration.Generation2: if (!(segment.IsLargeObjectSegment || segment.IsPinnedObjectSegment)) diff --git a/src/Microsoft.Diagnostics.ExtensionCommands/DumpConcurrentDictionaryCommand.cs b/src/Microsoft.Diagnostics.ExtensionCommands/DumpConcurrentDictionaryCommand.cs index f5b1d5e195..b3c4af85ab 100644 --- a/src/Microsoft.Diagnostics.ExtensionCommands/DumpConcurrentDictionaryCommand.cs +++ b/src/Microsoft.Diagnostics.ExtensionCommands/DumpConcurrentDictionaryCommand.cs @@ -32,7 +32,7 @@ public override void ExtensionInvoke() var heap = Runtime.Heap; var type = heap.GetObjectType(address); - if (type == null) + if (type?.Name is null) { WriteLine($"{Address:x16} is not referencing an object..."); return; diff --git a/src/Microsoft.Diagnostics.ExtensionCommands/DumpGen.cs b/src/Microsoft.Diagnostics.ExtensionCommands/DumpGen.cs index 9261b1c3af..7d341ee3a4 100644 --- a/src/Microsoft.Diagnostics.ExtensionCommands/DumpGen.cs +++ b/src/Microsoft.Diagnostics.ExtensionCommands/DumpGen.cs @@ -49,7 +49,7 @@ public IEnumerable GetInstances(ulong methodTableAddress) private static bool IsTypeNameMatching(string typeName, string typeNameFilter) { - return typeName.IndexOf(typeNameFilter, StringComparison.OrdinalIgnoreCase) >= 0; + return typeName != null && typeName.IndexOf(typeNameFilter, StringComparison.OrdinalIgnoreCase) >= 0; } } } diff --git a/src/Microsoft.Diagnostics.Monitoring.EventPipe/Configuration/AspNetTriggerSourceConfiguration.cs b/src/Microsoft.Diagnostics.Monitoring.EventPipe/Configuration/AspNetTriggerSourceConfiguration.cs new file mode 100644 index 0000000000..f27d97c2b4 --- /dev/null +++ b/src/Microsoft.Diagnostics.Monitoring.EventPipe/Configuration/AspNetTriggerSourceConfiguration.cs @@ -0,0 +1,70 @@ +// Licensed to the .NET Foundation under one or more agreements. +// The .NET Foundation licenses this file to you under the MIT license. +// See the LICENSE file in the project root for more information. + +using System; +using System.Collections.Generic; +using System.Diagnostics.Tracing; +using Microsoft.Diagnostics.NETCore.Client; + +namespace Microsoft.Diagnostics.Monitoring.EventPipe +{ + public sealed class AspNetTriggerSourceConfiguration : MonitoringSourceConfiguration + { + // In order to handle hung requests, we also capture metrics on a regular interval. + // This acts as a wake up timer, since we cannot rely on Activity1Stop. + private readonly bool _supportHeartbeat; + + public const int DefaultHeartbeatInterval = 10; + + public AspNetTriggerSourceConfiguration(bool supportHeartbeat = false) + { + _supportHeartbeat = supportHeartbeat; + } + + /// + /// Filter string for trigger data. Note that even though some triggers use start OR stop, + /// collecting just one causes unusual behavior in data collection. + /// + /// + /// IMPORTANT! We rely on these transformations to make sure we can access relevant data + /// by index. The order must match the data extracted in the triggers. + /// + private const string DiagnosticFilterString = + "Microsoft.AspNetCore/Microsoft.AspNetCore.Hosting.HttpRequestIn.Start@Activity1Start:-" + + "ActivityId=*Activity.Id" + + ";Request.Path" + + ";ActivityStartTime=*Activity.StartTimeUtc.Ticks" + + "\r\n" + + "Microsoft.AspNetCore/Microsoft.AspNetCore.Hosting.HttpRequestIn.Stop@Activity1Stop:-" + + "ActivityId=*Activity.Id" + + ";Request.Path" + + ";Response.StatusCode" + + ";ActivityDuration=*Activity.Duration.Ticks" + + "\r\n"; + + public override IList GetProviders() + { + if (_supportHeartbeat) + { + return new AggregateSourceConfiguration( + new AspNetTriggerSourceConfiguration(supportHeartbeat: false), + new MetricSourceConfiguration(DefaultHeartbeatInterval, new[] { MicrosoftAspNetCoreHostingEventSourceName })).GetProviders(); + + } + else + { + return new[] + { + new EventPipeProvider(DiagnosticSourceEventSource, + keywords: DiagnosticSourceEventSourceEvents | DiagnosticSourceEventSourceMessages, + eventLevel: EventLevel.Verbose, + arguments: new Dictionary + { + { "FilterAndPayloadSpecs", DiagnosticFilterString } + }) + }; + } + } + } +} diff --git a/src/Microsoft.Diagnostics.Monitoring.EventPipe/Configuration/HttpRequestSourceConfiguration.cs b/src/Microsoft.Diagnostics.Monitoring.EventPipe/Configuration/HttpRequestSourceConfiguration.cs index 595ec71a68..904e8d843f 100644 --- a/src/Microsoft.Diagnostics.Monitoring.EventPipe/Configuration/HttpRequestSourceConfiguration.cs +++ b/src/Microsoft.Diagnostics.Monitoring.EventPipe/Configuration/HttpRequestSourceConfiguration.cs @@ -57,7 +57,7 @@ public override IList GetProviders() { // Diagnostic source events new EventPipeProvider(DiagnosticSourceEventSource, - keywords: 0x1 | 0x2, + keywords: DiagnosticSourceEventSourceEvents | DiagnosticSourceEventSourceMessages, eventLevel: EventLevel.Verbose, arguments: new Dictionary { diff --git a/src/Microsoft.Diagnostics.Monitoring.EventPipe/Configuration/MonitoringSourceConfiguration.cs b/src/Microsoft.Diagnostics.Monitoring.EventPipe/Configuration/MonitoringSourceConfiguration.cs index 2bab551996..2fc5c24d80 100644 --- a/src/Microsoft.Diagnostics.Monitoring.EventPipe/Configuration/MonitoringSourceConfiguration.cs +++ b/src/Microsoft.Diagnostics.Monitoring.EventPipe/Configuration/MonitoringSourceConfiguration.cs @@ -9,6 +9,16 @@ namespace Microsoft.Diagnostics.Monitoring.EventPipe { public abstract class MonitoringSourceConfiguration { + /// + /// Indicates diagnostics messages from DiagnosticSourceEventSource should be included. + /// + public const long DiagnosticSourceEventSourceMessages = 0x1; + + /// + /// Indicates that all events from all diagnostic sources should be forwarded to the EventSource using the 'Event' event. + /// + public const long DiagnosticSourceEventSourceEvents = 0x2; + public const string MicrosoftExtensionsLoggingProviderName = "Microsoft-Extensions-Logging"; public const string SystemRuntimeEventSourceName = "System.Runtime"; public const string MicrosoftAspNetCoreHostingEventSourceName = "Microsoft.AspNetCore.Hosting"; diff --git a/src/Microsoft.Diagnostics.Monitoring.EventPipe/Counters/CounterFilter.cs b/src/Microsoft.Diagnostics.Monitoring.EventPipe/Counters/CounterFilter.cs index acfb408148..df2756c3e6 100644 --- a/src/Microsoft.Diagnostics.Monitoring.EventPipe/Counters/CounterFilter.cs +++ b/src/Microsoft.Diagnostics.Monitoring.EventPipe/Counters/CounterFilter.cs @@ -11,13 +11,16 @@ namespace Microsoft.Diagnostics.Monitoring.EventPipe internal sealed class CounterFilter { private Dictionary> _enabledCounters; + private int _intervalMilliseconds; - public static CounterFilter AllCounters { get; } = new CounterFilter(); + public static CounterFilter AllCounters(int counterIntervalSeconds) + => new CounterFilter(counterIntervalSeconds); - public CounterFilter() + public CounterFilter(int intervalSeconds) { //Provider names are not case sensitive, but counter names are. _enabledCounters = new Dictionary>(StringComparer.OrdinalIgnoreCase); + _intervalMilliseconds = intervalSeconds * 1000; } // Called when we want to enable all counters under a provider name. @@ -33,8 +36,12 @@ public void AddFilter(string providerName, string[] counters) public IEnumerable GetProviders() => _enabledCounters.Keys; - public bool IsIncluded(string providerName, string counterName) + public bool IsIncluded(string providerName, string counterName, int interval) { + if (_intervalMilliseconds != interval) + { + return false; + } if (_enabledCounters.Count == 0) { return true; diff --git a/src/Microsoft.Diagnostics.Monitoring.EventPipe/Counters/EventCounterPipeline.cs b/src/Microsoft.Diagnostics.Monitoring.EventPipe/Counters/EventCounterPipeline.cs index a778091235..12beff51b6 100644 --- a/src/Microsoft.Diagnostics.Monitoring.EventPipe/Counters/EventCounterPipeline.cs +++ b/src/Microsoft.Diagnostics.Monitoring.EventPipe/Counters/EventCounterPipeline.cs @@ -24,7 +24,7 @@ public EventCounterPipeline(DiagnosticsClient client, if (settings.CounterGroups.Length > 0) { - _filter = new CounterFilter(); + _filter = new CounterFilter(CounterIntervalSeconds); foreach (var counterGroup in settings.CounterGroups) { _filter.AddFilter(counterGroup.ProviderName, counterGroup.CounterNames); @@ -32,7 +32,7 @@ public EventCounterPipeline(DiagnosticsClient client, } else { - _filter = CounterFilter.AllCounters; + _filter = CounterFilter.AllCounters(CounterIntervalSeconds); } } @@ -49,56 +49,8 @@ protected override async Task OnEventSourceAvailable(EventPipeEventSource eventS { try { - // Metrics - if (traceEvent.EventName.Equals("EventCounters")) + if (traceEvent.TryGetCounterPayload(_filter, out ICounterPayload counterPayload)) { - IDictionary payloadVal = (IDictionary)(traceEvent.PayloadValue(0)); - IDictionary payloadFields = (IDictionary)(payloadVal["Payload"]); - - //Make sure we are part of the requested series. If multiple clients request metrics, all of them get the metrics. - string series = payloadFields["Series"].ToString(); - if (GetInterval(series) != CounterIntervalSeconds * 1000) - { - return; - } - - string counterName = payloadFields["Name"].ToString(); - if (!_filter.IsIncluded(traceEvent.ProviderName, counterName)) - { - return; - } - - float intervalSec = (float)payloadFields["IntervalSec"]; - string displayName = payloadFields["DisplayName"].ToString(); - string displayUnits = payloadFields["DisplayUnits"].ToString(); - double value = 0; - CounterType counterType = CounterType.Metric; - - if (payloadFields["CounterType"].Equals("Mean")) - { - value = (double)payloadFields["Mean"]; - } - else if (payloadFields["CounterType"].Equals("Sum")) - { - counterType = CounterType.Rate; - value = (double)payloadFields["Increment"]; - if (string.IsNullOrEmpty(displayUnits)) - { - displayUnits = "count"; - } - //TODO Should we make these /sec like the dotnet-counters tool? - } - - // Note that dimensional data such as pod and namespace are automatically added in prometheus and azure monitor scenarios. - // We no longer added it here. - var counterPayload = new CounterPayload(traceEvent.TimeStamp, - traceEvent.ProviderName, - counterName, displayName, - displayUnits, - value, - counterType, - intervalSec); - ExecuteCounterLoggerAction((metricLogger) => metricLogger.Log(counterPayload)); } } @@ -118,17 +70,6 @@ protected override async Task OnEventSourceAvailable(EventPipeEventSource eventS ExecuteCounterLoggerAction((metricLogger) => metricLogger.PipelineStopped()); } - private static int GetInterval(string series) - { - const string comparison = "Interval="; - int interval = 0; - if (series.StartsWith(comparison, StringComparison.OrdinalIgnoreCase)) - { - int.TryParse(series.Substring(comparison.Length), out interval); - } - return interval; - } - private void ExecuteCounterLoggerAction(Action action) { foreach (ICountersLogger logger in _loggers) diff --git a/src/Microsoft.Diagnostics.Monitoring.EventPipe/Counters/TraceEventExtensions.cs b/src/Microsoft.Diagnostics.Monitoring.EventPipe/Counters/TraceEventExtensions.cs new file mode 100644 index 0000000000..edf73a89e6 --- /dev/null +++ b/src/Microsoft.Diagnostics.Monitoring.EventPipe/Counters/TraceEventExtensions.cs @@ -0,0 +1,78 @@ +// Licensed to the .NET Foundation under one or more agreements. +// The .NET Foundation licenses this file to you under the MIT license. +// See the LICENSE file in the project root for more information. + +using Microsoft.Diagnostics.Tracing; +using System; +using System.Collections.Generic; + +namespace Microsoft.Diagnostics.Monitoring.EventPipe +{ + internal static class TraceEventExtensions + { + public static bool TryGetCounterPayload(this TraceEvent traceEvent, CounterFilter filter, out ICounterPayload payload) + { + payload = null; + + if ("EventCounters".Equals(traceEvent.EventName)) + { + IDictionary payloadVal = (IDictionary)(traceEvent.PayloadValue(0)); + IDictionary payloadFields = (IDictionary)(payloadVal["Payload"]); + + //Make sure we are part of the requested series. If multiple clients request metrics, all of them get the metrics. + string series = payloadFields["Series"].ToString(); + string counterName = payloadFields["Name"].ToString(); + if (!filter.IsIncluded(traceEvent.ProviderName, counterName, GetInterval(series))) + { + return false; + } + + float intervalSec = (float)payloadFields["IntervalSec"]; + string displayName = payloadFields["DisplayName"].ToString(); + string displayUnits = payloadFields["DisplayUnits"].ToString(); + double value = 0; + CounterType counterType = CounterType.Metric; + + if (payloadFields["CounterType"].Equals("Mean")) + { + value = (double)payloadFields["Mean"]; + } + else if (payloadFields["CounterType"].Equals("Sum")) + { + counterType = CounterType.Rate; + value = (double)payloadFields["Increment"]; + if (string.IsNullOrEmpty(displayUnits)) + { + displayUnits = "count"; + } + //TODO Should we make these /sec like the dotnet-counters tool? + } + + // Note that dimensional data such as pod and namespace are automatically added in prometheus and azure monitor scenarios. + // We no longer added it here. + payload = new CounterPayload( + traceEvent.TimeStamp, + traceEvent.ProviderName, + counterName, displayName, + displayUnits, + value, + counterType, + intervalSec); + return true; + } + + return false; + } + + private static int GetInterval(string series) + { + const string comparison = "Interval="; + int interval = 0; + if (series.StartsWith(comparison, StringComparison.OrdinalIgnoreCase)) + { + int.TryParse(series.Substring(comparison.Length), out interval); + } + return interval; + } + } +} diff --git a/src/Microsoft.Diagnostics.Monitoring.EventPipe/EventPipeStreamProvider.cs b/src/Microsoft.Diagnostics.Monitoring.EventPipe/EventPipeStreamProvider.cs index 545d92a142..7f9681ddfb 100644 --- a/src/Microsoft.Diagnostics.Monitoring.EventPipe/EventPipeStreamProvider.cs +++ b/src/Microsoft.Diagnostics.Monitoring.EventPipe/EventPipeStreamProvider.cs @@ -22,14 +22,14 @@ public EventPipeStreamProvider(MonitoringSourceConfiguration sourceConfig) _stopProcessingSource = new TaskCompletionSource(TaskCreationOptions.RunContinuationsAsynchronously); } - public Task ProcessEvents(DiagnosticsClient client, TimeSpan duration, CancellationToken cancellationToken) + public async Task ProcessEvents(DiagnosticsClient client, TimeSpan duration, CancellationToken cancellationToken) { cancellationToken.ThrowIfCancellationRequested(); EventPipeSession session = null; try { - session = client.StartEventPipeSession(_sourceConfig.GetProviders(), _sourceConfig.RequestRundown, _sourceConfig.BufferSizeInMB); + session = await client.StartEventPipeSessionAsync(_sourceConfig.GetProviders(), _sourceConfig.RequestRundown, _sourceConfig.BufferSizeInMB, cancellationToken).ConfigureAwait(false); } catch (EndOfStreamException e) { @@ -49,10 +49,11 @@ public Task ProcessEvents(DiagnosticsClient client, TimeSpan duration, C // Use TaskCompletionSource instead of Task.Delay with cancellation to avoid // using exceptions for normal termination of event stream. await _stopProcessingSource.Task.ConfigureAwait(false); - StopSession(session); + + await StopSessionAsync(session).ConfigureAwait(false); }); - return Task.FromResult(session.EventStream); + return session.EventStream; } public void StopProcessing() @@ -60,11 +61,13 @@ public void StopProcessing() _stopProcessingSource.TrySetResult(null); } - private static void StopSession(EventPipeSession session) + private static async Task StopSessionAsync(EventPipeSession session) { + // Cancel after a generous amount of time if process ended before command is sent. + using CancellationTokenSource cancellationSource = new(IpcClient.ConnectTimeout); try { - session.Stop(); + await session.StopAsync(cancellationSource.Token).ConfigureAwait(false); } catch (EndOfStreamException) { @@ -74,6 +77,10 @@ private static void StopSession(EventPipeSession session) catch (TimeoutException) { } + // We may time out if the process ended before we sent StopTracing command. We can just exit in that case. + catch (OperationCanceledException) + { + } // On Unix platforms, we may actually get a PNSE since the pipe is gone with the process, and Runtime Client Library // does not know how to distinguish a situation where there is no pipe to begin with, or where the process has exited // before collection started and got rid of a pipe that once existed. diff --git a/src/Microsoft.Diagnostics.Monitoring.EventPipe/Microsoft.Diagnostics.Monitoring.EventPipe.csproj b/src/Microsoft.Diagnostics.Monitoring.EventPipe/Microsoft.Diagnostics.Monitoring.EventPipe.csproj index ec8330ee18..efc358643f 100644 --- a/src/Microsoft.Diagnostics.Monitoring.EventPipe/Microsoft.Diagnostics.Monitoring.EventPipe.csproj +++ b/src/Microsoft.Diagnostics.Monitoring.EventPipe/Microsoft.Diagnostics.Monitoring.EventPipe.csproj @@ -29,6 +29,7 @@ + @@ -39,8 +40,7 @@ - - + diff --git a/src/Microsoft.Diagnostics.Monitoring.EventPipe/Triggers/AspNet/AspNetRequestCountTrigger.cs b/src/Microsoft.Diagnostics.Monitoring.EventPipe/Triggers/AspNet/AspNetRequestCountTrigger.cs new file mode 100644 index 0000000000..e1453788f2 --- /dev/null +++ b/src/Microsoft.Diagnostics.Monitoring.EventPipe/Triggers/AspNet/AspNetRequestCountTrigger.cs @@ -0,0 +1,27 @@ +// Licensed to the .NET Foundation under one or more agreements. +// The .NET Foundation licenses this file to you under the MIT license. +// See the LICENSE file in the project root for more information. + +using Microsoft.Diagnostics.Tracing; +using System; +using System.Collections.Generic; +using System.Text; + +namespace Microsoft.Diagnostics.Monitoring.EventPipe.Triggers.AspNet +{ + internal sealed class AspNetRequestCountTrigger : AspNetTrigger + { + private SlidingWindow _window; + + public AspNetRequestCountTrigger(AspNetRequestCountTriggerSettings settings) : base(settings) + { + _window = new SlidingWindow(settings.SlidingWindowDuration); + } + + protected override bool ActivityStart(DateTime timestamp, string activityId) + { + _window.AddDataPoint(timestamp); + return _window.Count >= Settings.RequestCount; + } + } +} diff --git a/src/Microsoft.Diagnostics.Monitoring.EventPipe/Triggers/AspNet/AspNetRequestCountTriggerSettings.cs b/src/Microsoft.Diagnostics.Monitoring.EventPipe/Triggers/AspNet/AspNetRequestCountTriggerSettings.cs new file mode 100644 index 0000000000..b50fc728a2 --- /dev/null +++ b/src/Microsoft.Diagnostics.Monitoring.EventPipe/Triggers/AspNet/AspNetRequestCountTriggerSettings.cs @@ -0,0 +1,14 @@ +// Licensed to the .NET Foundation under one or more agreements. +// The .NET Foundation licenses this file to you under the MIT license. +// See the LICENSE file in the project root for more information. + +using System; +using System.Collections.Generic; +using System.Text; + +namespace Microsoft.Diagnostics.Monitoring.EventPipe.Triggers.AspNet +{ + internal sealed class AspNetRequestCountTriggerSettings : AspNetTriggerSettings + { + } +} diff --git a/src/Microsoft.Diagnostics.Monitoring.EventPipe/Triggers/AspNet/AspNetRequestDurationTrigger.cs b/src/Microsoft.Diagnostics.Monitoring.EventPipe/Triggers/AspNet/AspNetRequestDurationTrigger.cs new file mode 100644 index 0000000000..72e479ef74 --- /dev/null +++ b/src/Microsoft.Diagnostics.Monitoring.EventPipe/Triggers/AspNet/AspNetRequestDurationTrigger.cs @@ -0,0 +1,84 @@ +// Licensed to the .NET Foundation under one or more agreements. +// The .NET Foundation licenses this file to you under the MIT license. +// See the LICENSE file in the project root for more information. + +using Microsoft.Diagnostics.Tracing; +using System; +using System.Collections.Generic; +using System.Text; + +namespace Microsoft.Diagnostics.Monitoring.EventPipe.Triggers.AspNet +{ + internal sealed class AspNetRequestDurationTrigger : AspNetTrigger + { + private readonly long _durationTicks; + + //This is adjusted due to rounding errors on event counter timestamp math. + private readonly TimeSpan _heartBeatInterval = TimeSpan.FromSeconds(AspNetTriggerSourceConfiguration.DefaultHeartbeatInterval - 1); + private SlidingWindow _window; + private Dictionary _requests = new(); + private DateTime _lastHeartbeatProcessed = DateTime.MinValue; + + public AspNetRequestDurationTrigger(AspNetRequestDurationTriggerSettings settings) : base(settings) + { + _durationTicks = Settings.RequestDuration.Ticks; + _window = new SlidingWindow(settings.SlidingWindowDuration); + } + + protected override bool ActivityStart(DateTime timestamp, string activityId) + { + _requests.Add(activityId, timestamp); + + return false; + } + + protected override bool Heartbeat(DateTime timestamp) + { + //May get additional heartbeats based on multiple counters or extra intervals. We only + //process the data periodically. + if (timestamp - _lastHeartbeatProcessed > _heartBeatInterval) + { + _lastHeartbeatProcessed = timestamp; + List requestsToRemove = new(); + + foreach (KeyValuePair request in _requests) + { + if ((timestamp - request.Value) >= Settings.RequestDuration) + { + _window.AddDataPoint(timestamp); + + //We don't want to count the request more than once, since it could still finish later. + //At this point we already deeemed it too slow. We also want to make sure we + //clear the cached requests periodically even if they don't finish. + requestsToRemove.Add(request.Key); + } + } + + foreach(string requestId in requestsToRemove) + { + _requests.Remove(requestId); + } + + return _window.Count >= Settings.RequestCount; + } + + return false; + } + + protected override bool ActivityStop(DateTime timestamp, string activityId, long durationTicks, int statusCode) + { + if (!_requests.Remove(activityId)) + { + //This request was already removed by the heartbeat. No need to evaluate duration since we don't want to double count the request. + return false; + } + + if (durationTicks >= _durationTicks) + { + _window.AddDataPoint(timestamp); + } + + return _window.Count >= Settings.RequestCount; + } + } +} diff --git a/src/Microsoft.Diagnostics.Monitoring.EventPipe/Triggers/AspNet/AspNetRequestDurationTriggerSettings.cs b/src/Microsoft.Diagnostics.Monitoring.EventPipe/Triggers/AspNet/AspNetRequestDurationTriggerSettings.cs new file mode 100644 index 0000000000..56847ec4ed --- /dev/null +++ b/src/Microsoft.Diagnostics.Monitoring.EventPipe/Triggers/AspNet/AspNetRequestDurationTriggerSettings.cs @@ -0,0 +1,24 @@ +// Licensed to the .NET Foundation under one or more agreements. +// The .NET Foundation licenses this file to you under the MIT license. +// See the LICENSE file in the project root for more information. + +using System; +using System.Collections.Generic; +using System.ComponentModel.DataAnnotations; +using System.Linq; +using System.Text; + +namespace Microsoft.Diagnostics.Monitoring.EventPipe.Triggers.AspNet +{ + internal sealed class AspNetRequestDurationTriggerSettings : AspNetTriggerSettings + { + public const string RequestDuration_MaxValue = "01:00:00"; // 1 hour + public const string RequestDuration_MinValue = "00:00:00"; // No minimum + + /// + /// The minimum duration of the request to be considered slow. + /// + [Range(typeof(TimeSpan), RequestDuration_MinValue, RequestDuration_MaxValue)] + public TimeSpan RequestDuration { get; set; } + } +} diff --git a/src/Microsoft.Diagnostics.Monitoring.EventPipe/Triggers/AspNet/AspNetRequestStatusTrigger.cs b/src/Microsoft.Diagnostics.Monitoring.EventPipe/Triggers/AspNet/AspNetRequestStatusTrigger.cs new file mode 100644 index 0000000000..bb1fee5a41 --- /dev/null +++ b/src/Microsoft.Diagnostics.Monitoring.EventPipe/Triggers/AspNet/AspNetRequestStatusTrigger.cs @@ -0,0 +1,32 @@ +// Licensed to the .NET Foundation under one or more agreements. +// The .NET Foundation licenses this file to you under the MIT license. +// See the LICENSE file in the project root for more information. + +using Microsoft.Diagnostics.Tracing; +using System; +using System.Collections.Generic; +using System.Linq; +using System.Text; + +namespace Microsoft.Diagnostics.Monitoring.EventPipe.Triggers.AspNet +{ + internal sealed class AspNetRequestStatusTrigger : AspNetTrigger + { + private SlidingWindow _window; + + public AspNetRequestStatusTrigger(AspNetRequestStatusTriggerSettings settings) : base(settings) + { + _window = new SlidingWindow(settings.SlidingWindowDuration); + } + + protected override bool ActivityStop(DateTime timestamp, string activityId, long durationTicks, int statusCode) + { + if (Settings.StatusCodes.Any(r => statusCode >= r.Min && statusCode <= r.Max)) + { + _window.AddDataPoint(timestamp); + } + + return _window.Count >= Settings.RequestCount; + } + } +} diff --git a/src/Microsoft.Diagnostics.Monitoring.EventPipe/Triggers/AspNet/AspNetRequestStatusTriggerSettings.cs b/src/Microsoft.Diagnostics.Monitoring.EventPipe/Triggers/AspNet/AspNetRequestStatusTriggerSettings.cs new file mode 100644 index 0000000000..592c923fa3 --- /dev/null +++ b/src/Microsoft.Diagnostics.Monitoring.EventPipe/Triggers/AspNet/AspNetRequestStatusTriggerSettings.cs @@ -0,0 +1,67 @@ +// Licensed to the .NET Foundation under one or more agreements. +// The .NET Foundation licenses this file to you under the MIT license. +// See the LICENSE file in the project root for more information. + +using System; +using System.Collections.Generic; +using System.ComponentModel.DataAnnotations; +using System.Linq; +using System.Text; + +namespace Microsoft.Diagnostics.Monitoring.EventPipe.Triggers.AspNet +{ + internal sealed class AspNetRequestStatusTriggerSettings : AspNetTriggerSettings + { + /// + /// Specifies the set of status codes for the trigger. + /// E.g. 200-200;400-500 + /// + [Required] + [MinLength(1)] + [CustomValidation(typeof(StatusCodeRangeValidator), nameof(StatusCodeRangeValidator.ValidateStatusCodes))] + public StatusCodeRange[] StatusCodes { get; set; } + } + + internal struct StatusCodeRange + { + public StatusCodeRange(int min) : this(min, min) { } + + public StatusCodeRange(int min, int max) + { + Min = min; + Max = max; + } + + public int Min { get; set; } + public int Max { get; set; } + } + + public static class StatusCodeRangeValidator + { + private static readonly string[] _validationMembers = new[] { nameof(AspNetRequestStatusTriggerSettings.StatusCodes)}; + + public static ValidationResult ValidateStatusCodes(object statusCodes) + { + StatusCodeRange[] statusCodeRanges = (StatusCodeRange[])statusCodes; + + Func validateStatusCode = (int statusCode) => statusCode >= 100 && statusCode < 600; + + foreach(StatusCodeRange statusCodeRange in statusCodeRanges) + { + if (statusCodeRange.Min > statusCodeRange.Max) + { + return new ValidationResult($"{nameof(StatusCodeRange.Min)} cannot be greater than {nameof(StatusCodeRange.Max)}", + _validationMembers); + } + + if (!validateStatusCode(statusCodeRange.Min) || !validateStatusCode(statusCodeRange.Max)) + { + return new ValidationResult($"Invalid status code", _validationMembers); + } + } + + return ValidationResult.Success; + } + } + +} diff --git a/src/Microsoft.Diagnostics.Monitoring.EventPipe/Triggers/AspNet/AspNetTrigger.cs b/src/Microsoft.Diagnostics.Monitoring.EventPipe/Triggers/AspNet/AspNetTrigger.cs new file mode 100644 index 0000000000..1bf45559d5 --- /dev/null +++ b/src/Microsoft.Diagnostics.Monitoring.EventPipe/Triggers/AspNet/AspNetTrigger.cs @@ -0,0 +1,133 @@ +// Licensed to the .NET Foundation under one or more agreements. +// The .NET Foundation licenses this file to you under the MIT license. +// See the LICENSE file in the project root for more information. + +using Microsoft.Diagnostics.Tracing; +using System; +using System.Collections.Generic; +using System.Collections.ObjectModel; +using System.ComponentModel.DataAnnotations; +using System.Diagnostics; +using System.Linq; + +namespace Microsoft.Diagnostics.Monitoring.EventPipe.Triggers.AspNet +{ + /// + /// Base class for all Asp.net triggers. + /// + internal abstract class AspNetTrigger : ITraceEventTrigger where TSettings : AspNetTriggerSettings + { + private const string Activity1Start = "Activity1/Start"; + private const string Activity1Stop = "Activity1/Stop"; + private static readonly Guid MicrosoftAspNetCoreHostingGuid = new Guid("{adb401e1-5296-51f8-c125-5fda75826144}"); + private static readonly Dictionary> _providerMap = new() + { + { MonitoringSourceConfiguration.DiagnosticSourceEventSource, new[]{ Activity1Start, Activity1Stop } }, + { MonitoringSourceConfiguration.MicrosoftAspNetCoreHostingEventSourceName, new[]{ "EventCounters" } } + }; + + private readonly GlobMatcher _matcher; + + protected AspNetTrigger(TSettings settings) + { + Settings = settings ?? throw new ArgumentNullException(nameof(settings)); + Validate(settings); + + _matcher = new GlobMatcher(settings.IncludePaths, settings.ExcludePaths); + } + + private static void Validate(TSettings settings) + { + ValidationContext context = new(settings); + Validator.ValidateObject(settings, context, validateAllProperties: true); + } + + public IReadOnlyDictionary> GetProviderEventMap() => _providerMap; + + public TSettings Settings { get; } + + protected virtual bool ActivityStart(DateTime timestamp, string activityId) => false; + + protected virtual bool ActivityStop(DateTime timestamp, string activityId, long durationTicks, int statusCode) => false; + + protected virtual bool Heartbeat(DateTime timestamp) => false; + + public bool HasSatisfiedCondition(TraceEvent traceEvent) + { + //We deconstruct the TraceEvent data to make it easy to write tests + DateTime timeStamp = traceEvent.TimeStamp; + + if (traceEvent.ProviderGuid == MicrosoftAspNetCoreHostingGuid) + { + int? statusCode = null; + long? duration = null; + AspnetTriggerEventType eventType = AspnetTriggerEventType.Start; + + System.Collections.IList arguments = (System.Collections.IList)traceEvent.PayloadValue(2); + string activityId = ExtractByIndex(arguments, 0); + string path = ExtractByIndex(arguments, 1); + + if (traceEvent.EventName == Activity1Stop) + { + statusCode = int.Parse(ExtractByIndex(arguments, 2)); + duration = long.Parse(ExtractByIndex(arguments, 3)); + eventType = AspnetTriggerEventType.Stop; + + Debug.Assert(statusCode != null, "Status code cannot be null."); + Debug.Assert(duration != null, "Duration cannot be null."); + } + + return HasSatisfiedCondition(timeStamp, eventType, activityId, path, statusCode, duration); + } + + //Heartbeat only + return HasSatisfiedCondition(timeStamp, eventType: AspnetTriggerEventType.Heartbeat, activityId: null, path: null, statusCode: null, duration: null); + + } + + /// + /// This method is to enable testing. + /// + internal bool HasSatisfiedCondition(DateTime timestamp, AspnetTriggerEventType eventType, string activityId, string path, int? statusCode, long? duration) + { + if (eventType == AspnetTriggerEventType.Heartbeat) + { + return Heartbeat(timestamp); + } + + if (!_matcher.Match(path)) + { + //No need to update counts if the path is excluded. + return false; + } + + if (eventType == AspnetTriggerEventType.Start) + { + return ActivityStart(timestamp, activityId); + } + else if (eventType == AspnetTriggerEventType.Stop) + { + return ActivityStop(timestamp, activityId, duration.Value, statusCode.Value); + } + return false; + } + + private static string ExtractByIndex(System.Collections.IList arguments, int index) + { + IEnumerable> values = (IEnumerable>)arguments[index]; + //The data is internally organized as two KeyValuePair entries, + //The first entry is { Key, "KeyValue"} + //The second is { Value, "Value"} + //e.g. + //{{ Key:"StatusCode", Value:"200" }} + return (string)values.Last().Value; + } + } + + internal enum AspnetTriggerEventType + { + Start, + Stop, + Heartbeat + } +} diff --git a/src/Microsoft.Diagnostics.Monitoring.EventPipe/Triggers/AspNet/AspNetTriggerFactories.cs b/src/Microsoft.Diagnostics.Monitoring.EventPipe/Triggers/AspNet/AspNetTriggerFactories.cs new file mode 100644 index 0000000000..300938e36d --- /dev/null +++ b/src/Microsoft.Diagnostics.Monitoring.EventPipe/Triggers/AspNet/AspNetTriggerFactories.cs @@ -0,0 +1,25 @@ +// Licensed to the .NET Foundation under one or more agreements. +// The .NET Foundation licenses this file to you under the MIT license. +// See the LICENSE file in the project root for more information. + +using System; +using System.Collections.Generic; +using System.Text; + +namespace Microsoft.Diagnostics.Monitoring.EventPipe.Triggers.AspNet +{ + internal sealed class AspNetRequestCountTriggerFactory : ITraceEventTriggerFactory + { + public ITraceEventTrigger Create(AspNetRequestCountTriggerSettings settings) => new AspNetRequestCountTrigger(settings); + } + + internal sealed class AspNetRequestDurationTriggerFactory : ITraceEventTriggerFactory + { + public ITraceEventTrigger Create(AspNetRequestDurationTriggerSettings settings) => new AspNetRequestDurationTrigger(settings); + } + + internal sealed class AspNetRequestStatusTriggerFactory : ITraceEventTriggerFactory + { + public ITraceEventTrigger Create(AspNetRequestStatusTriggerSettings settings) => new AspNetRequestStatusTrigger(settings); + } +} diff --git a/src/Microsoft.Diagnostics.Monitoring.EventPipe/Triggers/AspNet/AspNetTriggerSettings.cs b/src/Microsoft.Diagnostics.Monitoring.EventPipe/Triggers/AspNet/AspNetTriggerSettings.cs new file mode 100644 index 0000000000..7e088df49b --- /dev/null +++ b/src/Microsoft.Diagnostics.Monitoring.EventPipe/Triggers/AspNet/AspNetTriggerSettings.cs @@ -0,0 +1,73 @@ +// Licensed to the .NET Foundation under one or more agreements. +// The .NET Foundation licenses this file to you under the MIT license. +// See the LICENSE file in the project root for more information. + +using System; +using System.Collections.Generic; +using System.ComponentModel.DataAnnotations; +using System.Linq; +using System.Text; + +namespace Microsoft.Diagnostics.Monitoring.EventPipe.Triggers.AspNet +{ + /// + /// Base class for all Asp.net trigger settings. + /// + internal class AspNetTriggerSettings + { + public const string SlidingWindowDuration_MaxValue = "1.00:00:00"; // 1 day + public const string SlidingWindowDuration_MinValue = "00:00:01"; // 1 second + + /// + /// The sliding duration in which an Asp.net request trigger condition must occur. + /// + [Range(typeof(TimeSpan), SlidingWindowDuration_MinValue, SlidingWindowDuration_MaxValue)] + public TimeSpan SlidingWindowDuration { get; set; } + + /// + /// The amount of requests that must accumulate in the sliding window and meet the trigger condition. + /// Note that requests that do not meet the condition do NOT reset the count. + /// + [Range(1, long.MaxValue)] + public long RequestCount { get; set; } + + /// + /// List of request paths to include in the trigger condition, such as "/" and "/About". + /// + [CustomValidation(typeof(IncludesPathValidator), nameof(IncludesPathValidator.ValidatePath))] + public string[] IncludePaths { get; set; } + + /// + /// List of request paths to exclude in the trigger condition. + /// + [CustomValidation(typeof(ExcludesPathValidator), nameof(ExcludesPathValidator.ValidatePath))] + public string[] ExcludePaths { get; set; } + } + + internal static class PathValidator + { + public static ValidationResult ValidatePath(string[] paths, string[] members) + { + //While not an error, using *** or more causes confusing and unexpected matching. + if (paths?.Any(p => p.IndexOf("***", StringComparison.Ordinal) >= 0) == true) + { + return new ValidationResult("Only * or **/ wildcard chararcters are allowed.", members); + } + return ValidationResult.Success; + } + } + + public static class IncludesPathValidator + { + private static readonly string[] _validationMembers = new[] { nameof(AspNetTriggerSettings.IncludePaths) }; + + public static ValidationResult ValidatePath(string[] paths) => PathValidator.ValidatePath(paths, _validationMembers); + } + + public static class ExcludesPathValidator + { + private static readonly string[] _validationMembers = new[] { nameof(AspNetTriggerSettings.ExcludePaths) }; + + public static ValidationResult ValidatePath(string[] paths) => PathValidator.ValidatePath(paths, _validationMembers); + } +} diff --git a/src/Microsoft.Diagnostics.Monitoring.EventPipe/Triggers/AspNet/GlobMatcher.cs b/src/Microsoft.Diagnostics.Monitoring.EventPipe/Triggers/AspNet/GlobMatcher.cs new file mode 100644 index 0000000000..ba647e3e72 --- /dev/null +++ b/src/Microsoft.Diagnostics.Monitoring.EventPipe/Triggers/AspNet/GlobMatcher.cs @@ -0,0 +1,73 @@ +// Licensed to the .NET Foundation under one or more agreements. +// The .NET Foundation licenses this file to you under the MIT license. +// See the LICENSE file in the project root for more information. + +using System; +using System.Collections.Generic; +using System.IO; +using System.Linq; +using System.Text; +using System.Text.RegularExpressions; + +namespace Microsoft.Diagnostics.Monitoring.EventPipe.Triggers.AspNet +{ + /// + /// Uses regular expressions to match globs. + /// + /// + /// Note there are some differences in glob patterns. Specifically **/ is supported, but ** + /// by itself is not. + /// + internal sealed class GlobMatcher + { + private readonly Regex _includeRegex; + private readonly Regex _excludeRegex; + + //We convert all **/ to a regex that matches 0 or more path segments + private const string EscapedGlobstarDirectory = @"\*\*/"; + private const string GlobstarDirectoryRegex = @"([^/]*/)*"; + + //Convert all * matches to any character other than a path separator + private const string EscapedWildcard = @"\*"; + private const string WildcardRegex = @"[^/]*"; + private const string StartRegex = "^"; + private const string EndRegex = "$"; + + private static readonly TimeSpan Timeout = TimeSpan.FromSeconds(2); + + public GlobMatcher(string[] includes, string[] excludes) + { + _includeRegex = CreateRegex(includes); + _excludeRegex = CreateRegex(excludes); + } + + private static Regex CreateRegex(string[] paths) + { + if (paths?.Length > 0) + { + return new Regex(string.Join("|", paths.Select(TransformPattern)), + RegexOptions.Compiled | RegexOptions.IgnoreCase | RegexOptions.CultureInvariant, matchTimeout: Timeout); + } + return null; + } + + private static string TransformPattern(string globPattern) => + string.Concat(StartRegex, + Regex.Escape(globPattern) + //Note the order is important to make sure globstar gets transformed first. + .Replace(EscapedGlobstarDirectory, GlobstarDirectoryRegex) + .Replace(EscapedWildcard, WildcardRegex), + EndRegex); + + public bool Match(string input) + { + //Prioritize excludes over includes + if (_excludeRegex?.IsMatch(input) == true) + { + return false; + } + + return _includeRegex == null || _includeRegex.IsMatch(input); + } + } +} diff --git a/src/Microsoft.Diagnostics.Monitoring.EventPipe/Triggers/EventCounter/EventCounterTrigger.cs b/src/Microsoft.Diagnostics.Monitoring.EventPipe/Triggers/EventCounter/EventCounterTrigger.cs new file mode 100644 index 0000000000..622171d07c --- /dev/null +++ b/src/Microsoft.Diagnostics.Monitoring.EventPipe/Triggers/EventCounter/EventCounterTrigger.cs @@ -0,0 +1,91 @@ +// Licensed to the .NET Foundation under one or more agreements. +// The .NET Foundation licenses this file to you under the MIT license. +// See the LICENSE file in the project root for more information. + +using Microsoft.Diagnostics.Tracing; +using System; +using System.Collections.Concurrent; +using System.Collections.Generic; +using System.Collections.ObjectModel; +using System.ComponentModel.DataAnnotations; + +namespace Microsoft.Diagnostics.Monitoring.EventPipe.Triggers.EventCounter +{ + /// + /// Trigger that detects when the specified event source counter value is held + /// above, below, or between threshold values for a specified duration of time. + /// + internal sealed class EventCounterTrigger : + ITraceEventTrigger + { + // A cache of the list of events that are expected from the specified event provider. + // This is a mapping of event provider name to the event map returned by GetProviderEventMap. + // This allows caching of the event map between multiple instances of the trigger that + // use the same event provider as the source of counter events. + private static readonly ConcurrentDictionary>> _eventMapCache = + new ConcurrentDictionary>>(StringComparer.OrdinalIgnoreCase); + + // Only care for the EventCounters events from any of the specified providers, thus + // create a static readonly instance that is shared among all event maps. + private static readonly IReadOnlyCollection _eventProviderEvents = + new ReadOnlyCollection(new string[] { "EventCounters" }); + + private readonly CounterFilter _filter; + private readonly EventCounterTriggerImpl _impl; + private readonly string _providerName; + + public EventCounterTrigger(EventCounterTriggerSettings settings) + { + if (null == settings) + { + throw new ArgumentNullException(nameof(settings)); + } + + Validate(settings); + + _filter = new CounterFilter(settings.CounterIntervalSeconds); + _filter.AddFilter(settings.ProviderName, new string[] { settings.CounterName }); + + _impl = new EventCounterTriggerImpl(settings); + + _providerName = settings.ProviderName; + } + + public IReadOnlyDictionary> GetProviderEventMap() + { + return _eventMapCache.GetOrAdd(_providerName, CreateEventMapForProvider); + } + + public bool HasSatisfiedCondition(TraceEvent traceEvent) + { + // Filter to the counter of interest before forwarding to the implementation + if (traceEvent.TryGetCounterPayload(_filter, out ICounterPayload payload)) + { + return _impl.HasSatisfiedCondition(payload); + } + return false; + } + + public static MonitoringSourceConfiguration CreateConfiguration(EventCounterTriggerSettings settings) + { + Validate(settings); + + return new MetricSourceConfiguration(settings.CounterIntervalSeconds, new string[] { settings.ProviderName }); + } + + private static void Validate(EventCounterTriggerSettings settings) + { + ValidationContext context = new(settings); + Validator.ValidateObject(settings, context, validateAllProperties: true); + } + + private IReadOnlyDictionary> CreateEventMapForProvider(string providerName) + { + return new ReadOnlyDictionary>( + new Dictionary>() + { + { _providerName, _eventProviderEvents } + }); + } + } +} diff --git a/src/Microsoft.Diagnostics.Monitoring.EventPipe/Triggers/EventCounter/EventCounterTriggerFactory.cs b/src/Microsoft.Diagnostics.Monitoring.EventPipe/Triggers/EventCounter/EventCounterTriggerFactory.cs new file mode 100644 index 0000000000..f1ccb33b7b --- /dev/null +++ b/src/Microsoft.Diagnostics.Monitoring.EventPipe/Triggers/EventCounter/EventCounterTriggerFactory.cs @@ -0,0 +1,21 @@ +// Licensed to the .NET Foundation under one or more agreements. +// The .NET Foundation licenses this file to you under the MIT license. +// See the LICENSE file in the project root for more information. + +using System; +using System.Runtime; + +namespace Microsoft.Diagnostics.Monitoring.EventPipe.Triggers.EventCounter +{ + /// + /// The trigger factory for the . + /// + internal sealed class EventCounterTriggerFactory : + ITraceEventTriggerFactory + { + public ITraceEventTrigger Create(EventCounterTriggerSettings settings) + { + return new EventCounterTrigger(settings); + } + } +} diff --git a/src/Microsoft.Diagnostics.Monitoring.EventPipe/Triggers/EventCounter/EventCounterTriggerImpl.cs b/src/Microsoft.Diagnostics.Monitoring.EventPipe/Triggers/EventCounter/EventCounterTriggerImpl.cs new file mode 100644 index 0000000000..909e6452c0 --- /dev/null +++ b/src/Microsoft.Diagnostics.Monitoring.EventPipe/Triggers/EventCounter/EventCounterTriggerImpl.cs @@ -0,0 +1,92 @@ +// Licensed to the .NET Foundation under one or more agreements. +// The .NET Foundation licenses this file to you under the MIT license. +// See the LICENSE file in the project root for more information. + +using System; + +namespace Microsoft.Diagnostics.Monitoring.EventPipe.Triggers.EventCounter +{ + // The core implementation of the EventCounter trigger that processes + // the trigger settings and evaluates the counter payload. Primary motivation + // for the implementation is for unit testability separate from TraceEvent. + internal sealed class EventCounterTriggerImpl + { + private readonly long _intervalTicks; + private readonly Func _valueFilter; + private readonly long _windowTicks; + + private long? _latestTicks; + private long? _targetTicks; + + public EventCounterTriggerImpl(EventCounterTriggerSettings settings) + { + if (null == settings) + { + throw new ArgumentNullException(nameof(settings)); + } + + if (settings.GreaterThan.HasValue) + { + double minValue = settings.GreaterThan.Value; + if (settings.LessThan.HasValue) + { + double maxValue = settings.LessThan.Value; + _valueFilter = value => value > minValue && value < maxValue; + } + else + { + _valueFilter = value => value > minValue; + } + } + else if (settings.LessThan.HasValue) + { + double maxValue = settings.LessThan.Value; + _valueFilter = value => value < maxValue; + } + + _intervalTicks = settings.CounterIntervalSeconds * TimeSpan.TicksPerSecond; + _windowTicks = settings.SlidingWindowDuration.Ticks; + } + + public bool HasSatisfiedCondition(ICounterPayload payload) + { + long payloadTimestampTicks = payload.Timestamp.Ticks; + long payloadIntervalTicks = (long)(payload.Interval * TimeSpan.TicksPerSecond); + + if (!_valueFilter(payload.Value)) + { + // Series was broken; reset state. + _latestTicks = null; + _targetTicks = null; + return false; + } + else if (!_targetTicks.HasValue) + { + // This is the first event in the series. Record latest and target times. + _latestTicks = payloadTimestampTicks; + // The target time should be the start of the first passing interval + the requisite time window. + // The start of the first passing interval is the payload time stamp - the interval time. + _targetTicks = payloadTimestampTicks - payloadIntervalTicks + _windowTicks; + } + else if (_latestTicks.Value + (1.5 * _intervalTicks) < payloadTimestampTicks) + { + // Detected that an event was skipped/dropped because the time between the current + // event and the previous is more that 150% of the requested interval; consecutive + // counter events should not have that large of an interval. Reset for current + // event to be first event in series. Record latest and target times. + _latestTicks = payloadTimestampTicks; + // The target time should be the start of the first passing interval + the requisite time window. + // The start of the first passing interval is the payload time stamp - the interval time. + _targetTicks = payloadTimestampTicks - payloadIntervalTicks + _windowTicks; + } + else + { + // Update latest time to the current event time. + _latestTicks = payloadTimestampTicks; + } + + // Trigger is satisfied when the latest time is larger than the target time. + return _latestTicks >= _targetTicks; + } + } +} diff --git a/src/Microsoft.Diagnostics.Monitoring.EventPipe/Triggers/EventCounter/EventCounterTriggerSettings.cs b/src/Microsoft.Diagnostics.Monitoring.EventPipe/Triggers/EventCounter/EventCounterTriggerSettings.cs new file mode 100644 index 0000000000..88331ecd5f --- /dev/null +++ b/src/Microsoft.Diagnostics.Monitoring.EventPipe/Triggers/EventCounter/EventCounterTriggerSettings.cs @@ -0,0 +1,95 @@ +// Licensed to the .NET Foundation under one or more agreements. +// The .NET Foundation licenses this file to you under the MIT license. +// See the LICENSE file in the project root for more information. + +using System; +using System.Collections.Generic; +using System.ComponentModel.DataAnnotations; + +namespace Microsoft.Diagnostics.Monitoring.EventPipe.Triggers.EventCounter +{ + /// + /// The settings for the . + /// + internal sealed class EventCounterTriggerSettings : + IValidatableObject + { + internal const int CounterIntervalSeconds_MaxValue = 24 * 60 * 60; // 1 day + internal const int CounterIntervalSeconds_MinValue = 1; // 1 second + + internal const string EitherGreaterThanLessThanMessage = "Either the " + nameof(GreaterThan) + " field or the " + nameof(LessThan) + " field are required."; + + internal const string GreaterThanMustBeLessThanLessThanMessage = "The " + nameof(GreaterThan) + " field must be less than the " + nameof(LessThan) + " field."; + + internal const string SlidingWindowDuration_MaxValue = "1.00:00:00"; // 1 day + internal const string SlidingWindowDuration_MinValue = "00:00:01"; // 1 second + + /// + /// The name of the event provider from which counters will be monitored. + /// + [Required] + public string ProviderName { get; set; } + + /// + /// The name of the event counter from the event provider to monitor. + /// + [Required] + public string CounterName { get; set; } + + /// + /// The lower bound threshold that the event counter value must hold for + /// the duration specified in . + /// + public double? GreaterThan { get; set; } + + /// + /// The upper bound threshold that the event counter value must hold for + /// the duration specified in . + /// + public double? LessThan { get; set; } + + /// + /// The sliding duration of time in which the event counter must maintain a value + /// above, below, or between the thresholds specified by and . + /// + [Range(typeof(TimeSpan), SlidingWindowDuration_MinValue, SlidingWindowDuration_MaxValue)] + public TimeSpan SlidingWindowDuration { get; set; } + + /// + /// The sampling interval of the event counter. + /// + [Range(CounterIntervalSeconds_MinValue, CounterIntervalSeconds_MaxValue)] + public int CounterIntervalSeconds { get; set; } + + IEnumerable IValidatableObject.Validate(ValidationContext validationContext) + { + List results = new(); + + if (!GreaterThan.HasValue && !LessThan.HasValue) + { + results.Add(new ValidationResult( + EitherGreaterThanLessThanMessage, + new[] + { + nameof(GreaterThan), + nameof(LessThan) + })); + } + else if (GreaterThan.HasValue && LessThan.HasValue) + { + if (GreaterThan.Value >= LessThan.Value) + { + results.Add(new ValidationResult( + GreaterThanMustBeLessThanLessThanMessage, + new[] + { + nameof(GreaterThan), + nameof(LessThan) + })); + } + } + + return results; + } + } +} diff --git a/src/Microsoft.Diagnostics.Monitoring.EventPipe/Triggers/ITraceEventTrigger.cs b/src/Microsoft.Diagnostics.Monitoring.EventPipe/Triggers/ITraceEventTrigger.cs new file mode 100644 index 0000000000..3222767250 --- /dev/null +++ b/src/Microsoft.Diagnostics.Monitoring.EventPipe/Triggers/ITraceEventTrigger.cs @@ -0,0 +1,31 @@ +// Licensed to the .NET Foundation under one or more agreements. +// The .NET Foundation licenses this file to you under the MIT license. +// See the LICENSE file in the project root for more information. + +using Microsoft.Diagnostics.Tracing; +using System.Collections.Generic; + +namespace Microsoft.Diagnostics.Monitoring.EventPipe.Triggers +{ + /// + /// Interface for all -based triggers. + /// + internal interface ITraceEventTrigger + { + /// + /// Mapping of event providers to event names in which the trigger has an interest. + /// + /// + /// The method may return null to signify that all events can be forwarded to the trigger. + /// Each event provider entry also may have a null or empty list of event names to + /// signify that all events from the provider can be forwarded to the trigger. + /// + IReadOnlyDictionary> GetProviderEventMap(); + + /// + /// Check if the given satisfies the condition + /// described by the trigger. + /// + bool HasSatisfiedCondition(TraceEvent traceEvent); + } +} diff --git a/src/Microsoft.Diagnostics.Monitoring.EventPipe/Triggers/ITraceEventTriggerFactory.cs b/src/Microsoft.Diagnostics.Monitoring.EventPipe/Triggers/ITraceEventTriggerFactory.cs new file mode 100644 index 0000000000..51ab73a92a --- /dev/null +++ b/src/Microsoft.Diagnostics.Monitoring.EventPipe/Triggers/ITraceEventTriggerFactory.cs @@ -0,0 +1,18 @@ +// Licensed to the .NET Foundation under one or more agreements. +// The .NET Foundation licenses this file to you under the MIT license. +// See the LICENSE file in the project root for more information. + +namespace Microsoft.Diagnostics.Monitoring.EventPipe.Triggers +{ + /// + /// Interface for creating a new instance of the associated + /// trigger from the specified settings. + /// + internal interface ITraceEventTriggerFactory + { + /// + /// Creates a new instance of the associated trigger from the . + /// + ITraceEventTrigger Create(TSettings settings); + } +} diff --git a/src/Microsoft.Diagnostics.Monitoring.EventPipe/Triggers/Pipelines/EventPipeTriggerPipeline.cs b/src/Microsoft.Diagnostics.Monitoring.EventPipe/Triggers/Pipelines/EventPipeTriggerPipeline.cs new file mode 100644 index 0000000000..cec4da990e --- /dev/null +++ b/src/Microsoft.Diagnostics.Monitoring.EventPipe/Triggers/Pipelines/EventPipeTriggerPipeline.cs @@ -0,0 +1,95 @@ +// Licensed to the .NET Foundation under one or more agreements. +// The .NET Foundation licenses this file to you under the MIT license. +// See the LICENSE file in the project root for more information. + +using Microsoft.Diagnostics.NETCore.Client; +using Microsoft.Diagnostics.Tracing; +using System; +using System.Threading; +using System.Threading.Tasks; + +namespace Microsoft.Diagnostics.Monitoring.EventPipe.Triggers.Pipelines +{ + /// + /// Starts an event pipe session using the specified configuration and invokes a callback + /// when events from the session satisfy the specified trigger. + /// + /// The settings type of the trace event trigger. + internal sealed class EventPipeTriggerPipeline : + EventSourcePipeline> + { + // The callback as provided to the pipeline. Invoked when the trigger condition is satisfied. + // The trigger condition may be satisfied more than once (thus invoking the callback more than + // once) over the lifetime of the pipeline, depending on the implementation of the trigger. + private readonly Action _callback; + + /// + /// The pipeline used to monitor the trace event source from the event pipe using the trigger + /// specified in the settings of the current pipeline. + /// + private TraceEventTriggerPipeline _pipeline; + + // The trigger implementation used to detect a condition in the trace event source. + private ITraceEventTrigger _trigger; + + public EventPipeTriggerPipeline(DiagnosticsClient client, EventPipeTriggerPipelineSettings settings, Action callback) : + base(client, settings) + { + if (null == Settings.Configuration) + { + throw new ArgumentException(FormattableString.Invariant($"The {nameof(settings.Configuration)} property on the settings must not be null."), nameof(settings)); + } + + if (null == Settings.TriggerFactory) + { + throw new ArgumentException(FormattableString.Invariant($"The {nameof(settings.TriggerFactory)} property on the settings must not be null."), nameof(settings)); + } + + _callback = callback; + } + + protected override MonitoringSourceConfiguration CreateConfiguration() + { + return Settings.Configuration; + } + + protected override async Task OnEventSourceAvailable(EventPipeEventSource eventSource, Func stopSessionAsync, CancellationToken token) + { + _trigger = Settings.TriggerFactory.Create(Settings.TriggerSettings); + + _pipeline = new TraceEventTriggerPipeline(eventSource, _trigger, _callback); + + await _pipeline.RunAsync(token).ConfigureAwait(false); + } + + protected override async Task OnStop(CancellationToken token) + { + if (null != _pipeline) + { + await _pipeline.StopAsync(token).ConfigureAwait(false); + } + await base.OnStop(token); + } + + protected override async Task OnCleanup() + { + if (null != _pipeline) + { + await _pipeline.DisposeAsync().ConfigureAwait(false); + } + + // Disposal is not part of the ITraceEventTrigger interface; check the implementation + // of the trigger to see if it implements one of the disposal interfaces and call it. + if (_trigger is IAsyncDisposable asyncDisposableTrigger) + { + await asyncDisposableTrigger.DisposeAsync().ConfigureAwait(false); + } + else if (_trigger is IDisposable disposableTrigger) + { + disposableTrigger.Dispose(); + } + + await base.OnCleanup(); + } + } +} diff --git a/src/Microsoft.Diagnostics.Monitoring.EventPipe/Triggers/Pipelines/EventPipeTriggerPipelineSettings.cs b/src/Microsoft.Diagnostics.Monitoring.EventPipe/Triggers/Pipelines/EventPipeTriggerPipelineSettings.cs new file mode 100644 index 0000000000..c200c2cdc4 --- /dev/null +++ b/src/Microsoft.Diagnostics.Monitoring.EventPipe/Triggers/Pipelines/EventPipeTriggerPipelineSettings.cs @@ -0,0 +1,26 @@ +// Licensed to the .NET Foundation under one or more agreements. +// The .NET Foundation licenses this file to you under the MIT license. +// See the LICENSE file in the project root for more information. + +namespace Microsoft.Diagnostics.Monitoring.EventPipe.Triggers.Pipelines +{ + internal sealed class EventPipeTriggerPipelineSettings : + EventSourcePipelineSettings + { + /// + /// The event pipe configuration used to collect trace event information for the trigger + /// to use to determine if the trigger condition is satisfied. + /// + public MonitoringSourceConfiguration Configuration { get; set; } + + /// + /// The factory that produces the trigger instantiation. + /// + public ITraceEventTriggerFactory TriggerFactory { get; set; } + + /// + /// The settings to pass to the trigger factory when creating the trigger. + /// + public TSettings TriggerSettings { get; set; } + } +} diff --git a/src/Microsoft.Diagnostics.Monitoring.EventPipe/Triggers/Pipelines/TraceEventTriggerPipeline.cs b/src/Microsoft.Diagnostics.Monitoring.EventPipe/Triggers/Pipelines/TraceEventTriggerPipeline.cs new file mode 100644 index 0000000000..a6bd87db6c --- /dev/null +++ b/src/Microsoft.Diagnostics.Monitoring.EventPipe/Triggers/Pipelines/TraceEventTriggerPipeline.cs @@ -0,0 +1,121 @@ +// Licensed to the .NET Foundation under one or more agreements. +// The .NET Foundation licenses this file to you under the MIT license. +// See the LICENSE file in the project root for more information. + +using Microsoft.Diagnostics.Tracing; +using System; +using System.Collections.Generic; +using System.Linq; +using System.Threading; +using System.Threading.Tasks; + +namespace Microsoft.Diagnostics.Monitoring.EventPipe.Triggers.Pipelines +{ + /// + /// A pipeline that detects a condition (as specified by the trigger) within the event stream + /// of the specified event source. The callback is invoked for each instance of the detected condition. + /// + internal sealed class TraceEventTriggerPipeline : Pipeline + { + // The callback as provided to the pipeline. Invoked when the trigger condition is satisfied. + // The trigger condition may be satisfied more than once (thus invoking the callback more than + // once) over the lifetime of the pipeline, depending on the implementation of the trigger. + private readonly Action _callback; + + // Completion source to help coordinate running and stopping the pipeline. + private readonly TaskCompletionSource _completionSource = + new TaskCompletionSource(TaskCreationOptions.RunContinuationsAsynchronously); + + // The source of the trace events to monitor. + private readonly TraceEventSource _eventSource; + + // The trigger implementation used to detect a condition in the trace event source. + private readonly ITraceEventTrigger _trigger; + + public TraceEventTriggerPipeline(TraceEventSource eventSource, ITraceEventTrigger trigger, Action callback) + { + _callback = callback ?? throw new ArgumentNullException(nameof(callback)); + _eventSource = eventSource ?? throw new ArgumentNullException(nameof(eventSource)); + _trigger = trigger ?? throw new ArgumentNullException(nameof(trigger)); + + IReadOnlyDictionary> providerEventMapFromTrigger = + _trigger.GetProviderEventMap(); + + if (null == providerEventMapFromTrigger) + { + // Allow all events to be forwarded to the trigger + _eventSource.Dynamic.AddCallbackForProviderEvents( + null, + TraceEventCallback); + } + else + { + // Event providers should be compared case-insensitive whereas counter names should be compared case-sensative. + // Make a copy of the provided map and change the comparers as appropriate. + IDictionary> providerEventMap = providerEventMapFromTrigger.ToDictionary( + kvp => kvp.Key, + //Accept null or empty, both indicating that any event will be accepted. + kvp => (kvp.Value == null) ? null : (kvp.Value.Count == 0) ? null : kvp.Value.ToArray().AsEnumerable(), + StringComparer.OrdinalIgnoreCase); + + // Only allow events described in the mapping to be forwarded to the trigger. + // If a provider has no events specified, then all events from that provider are forwarded. + _eventSource.Dynamic.AddCallbackForProviderEvents( + (string providerName, string eventName) => + { + if (!providerEventMap.TryGetValue(providerName, out IEnumerable eventNames)) + { + return EventFilterResponse.RejectProvider; + } + else if (null == eventNames) + { + return EventFilterResponse.AcceptEvent; + } + else if (!eventNames.Contains(eventName, StringComparer.Ordinal)) + { + return EventFilterResponse.RejectEvent; + } + return EventFilterResponse.AcceptEvent; + }, + TraceEventCallback); + } + } + + protected override async Task OnRun(CancellationToken token) + { + using var _ = token.Register(() => _completionSource.TrySetCanceled(token)); + + await _completionSource.Task.ConfigureAwait(false); + } + + protected override Task OnStop(CancellationToken token) + { + _completionSource.TrySetResult(null); + + return base.OnStop(token); + } + + protected override Task OnCleanup() + { + _completionSource.TrySetCanceled(); + + _eventSource.Dynamic.RemoveCallback(TraceEventCallback); + + return base.OnCleanup(); + } + + private void TraceEventCallback(TraceEvent obj) + { + // Check if processing of in-flight events should be ignored + // due to pipeline in the midst of stopping. + if (!_completionSource.Task.IsCompleted) + { + // If the trigger condition has been satified, invoke the callback + if (_trigger.HasSatisfiedCondition(obj)) + { + _callback(obj); + } + } + } + } +} diff --git a/src/Microsoft.Diagnostics.Monitoring.EventPipe/Triggers/SlidingWindow.cs b/src/Microsoft.Diagnostics.Monitoring.EventPipe/Triggers/SlidingWindow.cs new file mode 100644 index 0000000000..4ce4fc3549 --- /dev/null +++ b/src/Microsoft.Diagnostics.Monitoring.EventPipe/Triggers/SlidingWindow.cs @@ -0,0 +1,73 @@ +// Licensed to the .NET Foundation under one or more agreements. +// The .NET Foundation licenses this file to you under the MIT license. +// See the LICENSE file in the project root for more information. + +using System; +using System.Collections.Generic; +using System.Diagnostics; +using System.Text; + +namespace Microsoft.Diagnostics.Monitoring.EventPipe.Triggers +{ + internal sealed class SlidingWindow + { + //Any events that occur within this interval are merged. + private readonly TimeSpan _interval = TimeSpan.FromSeconds(1); + private readonly LinkedList<(DateTime Timestamp, int Count)> _timeData = new(); + private readonly TimeSpan _window; + + public SlidingWindow(TimeSpan slidingWindow) + { + _window = slidingWindow; + } + + public int Count { get; private set; } + + public void AddDataPoint(DateTime timestamp) + { + //ASSUMPTION! We are always expecting to get events that are equal or increasing in time. + if (_timeData.Last == null) + { + _timeData.AddLast((timestamp, 1)); + Count++; + return; + } + + (DateTime lastTimestamp, int lastCount) = _timeData.Last.Value; + + Debug.Assert(timestamp >= lastTimestamp, "Unexpected timestamp"); + + //Coalesce close points together + if (timestamp - lastTimestamp < _interval) + { + _timeData.Last.Value = (lastTimestamp, lastCount + 1); + Count++; + //No need for further processing since we can't fall out of the sliding window. + return; + } + + _timeData.AddLast((timestamp, 1)); + Count++; + + while (_timeData.First != null) + { + (DateTime firstTimestamp, int firstCount) = _timeData.First.Value; + if (timestamp - firstTimestamp > _window) + { + _timeData.RemoveFirst(); + Count -= firstCount; + } + else + { + break; + } + } + } + + public void Clear() + { + _timeData.Clear(); + Count = 0; + } + } +} diff --git a/src/Microsoft.Diagnostics.Monitoring/Microsoft.Diagnostics.Monitoring.csproj b/src/Microsoft.Diagnostics.Monitoring/Microsoft.Diagnostics.Monitoring.csproj index 3dcce22cc6..edbd3ef9a6 100644 --- a/src/Microsoft.Diagnostics.Monitoring/Microsoft.Diagnostics.Monitoring.csproj +++ b/src/Microsoft.Diagnostics.Monitoring/Microsoft.Diagnostics.Monitoring.csproj @@ -27,10 +27,8 @@ - - diff --git a/src/Microsoft.Diagnostics.NETCore.Client/DiagnosticsClient/DiagnosticsClient.cs b/src/Microsoft.Diagnostics.NETCore.Client/DiagnosticsClient/DiagnosticsClient.cs index 62391f376a..d77497edf8 100644 --- a/src/Microsoft.Diagnostics.NETCore.Client/DiagnosticsClient/DiagnosticsClient.cs +++ b/src/Microsoft.Diagnostics.NETCore.Client/DiagnosticsClient/DiagnosticsClient.cs @@ -7,7 +7,6 @@ using System.Globalization; using System.IO; using System.Linq; -using System.Runtime.InteropServices; using System.Text.RegularExpressions; using System.Threading; using System.Threading.Tasks; @@ -68,7 +67,7 @@ internal Task WaitForConnectionAsync(CancellationToken token) /// public EventPipeSession StartEventPipeSession(IEnumerable providers, bool requestRundown = true, int circularBufferMB = 256) { - return new EventPipeSession(_endpoint, providers, requestRundown, circularBufferMB); + return EventPipeSession.Start(_endpoint, providers, requestRundown, circularBufferMB); } /// @@ -82,7 +81,37 @@ public EventPipeSession StartEventPipeSession(IEnumerable pro /// public EventPipeSession StartEventPipeSession(EventPipeProvider provider, bool requestRundown = true, int circularBufferMB = 256) { - return new EventPipeSession(_endpoint, new[] { provider }, requestRundown, circularBufferMB); + return EventPipeSession.Start(_endpoint, new[] { provider }, requestRundown, circularBufferMB); + } + + /// + /// Start tracing the application and return an EventPipeSession object + /// + /// An IEnumerable containing the list of Providers to turn on. + /// If true, request rundown events from the runtime + /// The size of the runtime's buffer for collecting events in MB + /// The token to monitor for cancellation requests. + /// + /// An EventPipeSession object representing the EventPipe session that just started. + /// + internal Task StartEventPipeSessionAsync(IEnumerable providers, bool requestRundown, int circularBufferMB, CancellationToken token) + { + return EventPipeSession.StartAsync(_endpoint, providers, requestRundown, circularBufferMB, token); + } + + /// + /// Start tracing the application and return an EventPipeSession object + /// + /// An EventPipeProvider to turn on. + /// If true, request rundown events from the runtime + /// The size of the runtime's buffer for collecting events in MB + /// The token to monitor for cancellation requests. + /// + /// An EventPipeSession object representing the EventPipe session that just started. + /// + internal Task StartEventPipeSessionAsync(EventPipeProvider provider, bool requestRundown, int circularBufferMB, CancellationToken token) + { + return EventPipeSession.StartAsync(_endpoint, new[] { provider }, requestRundown, circularBufferMB, token); } /// @@ -93,26 +122,23 @@ public EventPipeSession StartEventPipeSession(EventPipeProvider provider, bool r /// When set to true, display the dump generation debug log to the console. public void WriteDump(DumpType dumpType, string dumpPath, bool logDumpGeneration = false) { - if (string.IsNullOrEmpty(dumpPath)) - throw new ArgumentNullException($"{nameof(dumpPath)} required"); + IpcMessage request = CreateWriteDumpMessage(dumpType, dumpPath, logDumpGeneration); + IpcMessage response = IpcClient.SendMessage(_endpoint, request); + ValidateResponseMessage(response, nameof(WriteDump)); + } - byte[] payload = SerializePayload(dumpPath, (uint)dumpType, logDumpGeneration); - IpcMessage message = new IpcMessage(DiagnosticsServerCommandSet.Dump, (byte)DumpCommandId.GenerateCoreDump, payload); - IpcMessage response = IpcClient.SendMessage(_endpoint, message); - switch ((DiagnosticsServerResponseId)response.Header.CommandId) - { - case DiagnosticsServerResponseId.Error: - uint hr = BitConverter.ToUInt32(response.Payload, 0); - if (hr == (uint)DiagnosticsIpcError.UnknownCommand) - { - throw new UnsupportedCommandException($"Unsupported operating system: {RuntimeInformation.OSDescription}"); - } - throw new ServerErrorException($"Writing dump failed (HRESULT: 0x{hr:X8})"); - case DiagnosticsServerResponseId.OK: - return; - default: - throw new ServerErrorException($"Writing dump failed - server responded with unknown command"); - } + /// + /// Trigger a core dump generation. + /// + /// Type of the dump to be generated + /// Full path to the dump to be generated. By default it is /tmp/coredump.{pid} + /// When set to true, display the dump generation debug log to the console. + /// The token to monitor for cancellation requests. + internal async Task WriteDumpAsync(DumpType dumpType, string dumpPath, bool logDumpGeneration, CancellationToken token) + { + IpcMessage request = CreateWriteDumpMessage(dumpType, dumpPath, logDumpGeneration); + IpcMessage response = await IpcClient.SendMessageAsync(_endpoint, request, token).ConfigureAwait(false); + ValidateResponseMessage(response, nameof(WriteDumpAsync)); } /// @@ -124,43 +150,22 @@ public void WriteDump(DumpType dumpType, string dumpPath, bool logDumpGeneration /// Additional data to be passed to the profiler public void AttachProfiler(TimeSpan attachTimeout, Guid profilerGuid, string profilerPath, byte[] additionalData = null) { - if (profilerGuid == null || profilerGuid == Guid.Empty) - { - throw new ArgumentException($"{nameof(profilerGuid)} must be a valid Guid"); - } - - if (String.IsNullOrEmpty(profilerPath)) - { - throw new ArgumentException($"{nameof(profilerPath)} must be non-null"); - } - - byte[] serializedConfiguration = SerializePayload((uint)attachTimeout.TotalSeconds, profilerGuid, profilerPath, additionalData); - var message = new IpcMessage(DiagnosticsServerCommandSet.Profiler, (byte)ProfilerCommandId.AttachProfiler, serializedConfiguration); - var response = IpcClient.SendMessage(_endpoint, message); - switch ((DiagnosticsServerResponseId)response.Header.CommandId) - { - case DiagnosticsServerResponseId.Error: - uint hr = BitConverter.ToUInt32(response.Payload, 0); - if (hr == (uint)DiagnosticsIpcError.UnknownCommand) - { - throw new UnsupportedCommandException("The target runtime does not support profiler attach"); - } - if (hr == (uint)DiagnosticsIpcError.ProfilerAlreadyActive) - { - throw new ProfilerAlreadyActiveException("The request to attach a profiler was denied because a profiler is already loaded"); - } - throw new ServerErrorException($"Profiler attach failed (HRESULT: 0x{hr:X8})"); - case DiagnosticsServerResponseId.OK: - return; - default: - throw new ServerErrorException($"Profiler attach failed - server responded with unknown command"); - } + IpcMessage request = CreateAttachProfilerMessage(attachTimeout, profilerGuid, profilerPath, additionalData); + IpcMessage response = IpcClient.SendMessage(_endpoint, request); + ValidateResponseMessage(response, nameof(AttachProfiler)); // The call to set up the pipe and send the message operates on a different timeout than attachTimeout, which is for the runtime. // We should eventually have a configurable timeout for the message passing, potentially either separately from the // runtime timeout or respect attachTimeout as one total duration. } + internal async Task AttachProfilerAsync(TimeSpan attachTimeout, Guid profilerGuid, string profilerPath, byte[] additionalData, CancellationToken token) + { + IpcMessage request = CreateAttachProfilerMessage(attachTimeout, profilerGuid, profilerPath, additionalData); + IpcMessage response = await IpcClient.SendMessageAsync(_endpoint, request, token).ConfigureAwait(false); + ValidateResponseMessage(response, nameof(AttachProfilerAsync)); + } + /// /// Set a profiler as the startup profiler. It is only valid to issue this command /// while the runtime is paused at startup. @@ -169,38 +174,16 @@ public void AttachProfiler(TimeSpan attachTimeout, Guid profilerGuid, string pro /// Path to the profiler to be attached public void SetStartupProfiler(Guid profilerGuid, string profilerPath) { - if (profilerGuid == null || profilerGuid == Guid.Empty) - { - throw new ArgumentException($"{nameof(profilerGuid)} must be a valid Guid"); - } - - if (String.IsNullOrEmpty(profilerPath)) - { - throw new ArgumentException($"{nameof(profilerPath)} must be non-null"); - } - - byte[] serializedConfiguration = SerializePayload(profilerGuid, profilerPath); - var message = new IpcMessage(DiagnosticsServerCommandSet.Profiler, (byte)ProfilerCommandId.StartupProfiler, serializedConfiguration); - var response = IpcClient.SendMessage(_endpoint, message); - switch ((DiagnosticsServerResponseId)response.Header.CommandId) - { - case DiagnosticsServerResponseId.Error: - uint hr = BitConverter.ToUInt32(response.Payload, 0); - if (hr == (uint)DiagnosticsIpcError.UnknownCommand) - { - throw new UnsupportedCommandException("The target runtime does not support the ProfilerStartup command."); - } - else if (hr == (uint)DiagnosticsIpcError.InvalidArgument) - { - throw new ServerErrorException("The runtime must be suspended to issue the SetStartupProfiler command."); - } + IpcMessage request = CreateSetStartupProfilerMessage(profilerGuid, profilerPath); + IpcMessage response = IpcClient.SendMessage(_endpoint, request); + ValidateResponseMessage(response, nameof(SetStartupProfiler), ValidateResponseOptions.InvalidArgumentIsRequiresSuspension); + } - throw new ServerErrorException($"Profiler startup failed (HRESULT: 0x{hr:X8})"); - case DiagnosticsServerResponseId.OK: - return; - default: - throw new ServerErrorException($"Profiler startup failed - server responded with unknown command"); - } + internal async Task SetStartupProfilerAsync(Guid profilerGuid, string profilerPath, CancellationToken token) + { + IpcMessage request = CreateSetStartupProfilerMessage(profilerGuid, profilerPath); + IpcMessage response = await IpcClient.SendMessageAsync(_endpoint, request, token).ConfigureAwait(false); + ValidateResponseMessage(response, nameof(SetStartupProfilerAsync), ValidateResponseOptions.InvalidArgumentIsRequiresSuspension); } /// @@ -208,19 +191,16 @@ public void SetStartupProfiler(Guid profilerGuid, string profilerPath) /// public void ResumeRuntime() { - IpcMessage message = new IpcMessage(DiagnosticsServerCommandSet.Process, (byte)ProcessCommandId.ResumeRuntime); - var response = IpcClient.SendMessage(_endpoint, message); - switch ((DiagnosticsServerResponseId)response.Header.CommandId) - { - case DiagnosticsServerResponseId.Error: - // Try fallback for Preview 7 and Preview 8 - ResumeRuntimeFallback(); - return; - case DiagnosticsServerResponseId.OK: - return; - default: - throw new ServerErrorException($"Resume runtime failed - server responded with unknown command"); - } + IpcMessage request = CreateResumeRuntimeMessage(); + IpcMessage response = IpcClient.SendMessage(_endpoint, request); + ValidateResponseMessage(response, nameof(ResumeRuntime)); + } + + internal async Task ResumeRuntimeAsync(CancellationToken token) + { + IpcMessage request = CreateResumeRuntimeMessage(); + IpcMessage response = await IpcClient.SendMessageAsync(_endpoint, request, token).ConfigureAwait(false); + ValidateResponseMessage(response, nameof(ResumeRuntimeAsync)); } /// @@ -230,29 +210,16 @@ public void ResumeRuntime() /// The value of the environment variable to set. public void SetEnvironmentVariable(string name, string value) { - if (String.IsNullOrEmpty(name)) - { - throw new ArgumentException($"{nameof(name)} must be non-null."); - } - - byte[] serializedConfiguration = SerializePayload(name, value); - var message = new IpcMessage(DiagnosticsServerCommandSet.Process, (byte)ProcessCommandId.SetEnvironmentVariable, serializedConfiguration); - var response = IpcClient.SendMessage(_endpoint, message); - switch ((DiagnosticsServerResponseId)response.Header.CommandId) - { - case DiagnosticsServerResponseId.Error: - uint hr = BitConverter.ToUInt32(response.Payload, 0); - if (hr == (uint)DiagnosticsIpcError.UnknownCommand) - { - throw new UnsupportedCommandException("The target runtime does not support the SetEnvironmentVariable command."); - } + IpcMessage request = CreateSetEnvironmentVariableMessage(name, value); + IpcMessage response = IpcClient.SendMessage(_endpoint, request); + ValidateResponseMessage(response, nameof(SetEnvironmentVariable)); + } - throw new ServerErrorException($"SetEnvironmentVariable failed (HRESULT: 0x{hr:X8})"); - case DiagnosticsServerResponseId.OK: - return; - default: - throw new ServerErrorException($"SetEnvironmentVariable failed - server responded with unknown command"); - } + internal async Task SetEnvironmentVariableAsync(string name, string value, CancellationToken token) + { + IpcMessage request = CreateSetEnvironmentVariableMessage(name, value); + IpcMessage response = await IpcClient.SendMessageAsync(_endpoint, request, token).ConfigureAwait(false); + ValidateResponseMessage(response, nameof(SetEnvironmentVariableAsync)); } /// @@ -261,21 +228,22 @@ public void SetEnvironmentVariable(string name, string value) /// A dictionary containing all of the environment variables defined in the target process. public Dictionary GetProcessEnvironment() { - var message = new IpcMessage(DiagnosticsServerCommandSet.Process, (byte)ProcessCommandId.GetProcessEnvironment); - Stream continuation = IpcClient.SendMessage(_endpoint, message, out IpcMessage response); - switch ((DiagnosticsServerResponseId)response.Header.CommandId) - { - case DiagnosticsServerResponseId.Error: - int hr = BitConverter.ToInt32(response.Payload, 0); - throw new ServerErrorException($"Get process environment failed (HRESULT: 0x{hr:X8})"); - case DiagnosticsServerResponseId.OK: - ProcessEnvironmentHelper helper = ProcessEnvironmentHelper.Parse(response.Payload); - Task> envTask = helper.ReadEnvironmentAsync(continuation); - envTask.Wait(); - return envTask.Result; - default: - throw new ServerErrorException($"Get process environment failed - server responded with unknown command"); - } + IpcMessage message = CreateProcessEnvironmentMessage(); + using IpcResponse response = IpcClient.SendMessageGetContinuation(_endpoint, message); + ValidateResponseMessage(response.Message, nameof(GetProcessEnvironmentAsync)); + + ProcessEnvironmentHelper helper = ProcessEnvironmentHelper.Parse(response.Message.Payload); + return helper.ReadEnvironment(response.Continuation); + } + + internal async Task> GetProcessEnvironmentAsync(CancellationToken token) + { + IpcMessage message = CreateProcessEnvironmentMessage(); + using IpcResponse response = await IpcClient.SendMessageGetContinuationAsync(_endpoint, message, token).ConfigureAwait(false); + ValidateResponseMessage(response.Message, nameof(GetProcessEnvironmentAsync)); + + ProcessEnvironmentHelper helper = ProcessEnvironmentHelper.Parse(response.Message.Payload); + return await helper.ReadEnvironmentAsync(response.Continuation, token).ConfigureAwait(false); } /// @@ -304,76 +272,46 @@ static IEnumerable GetAllPublishedProcesses() return GetAllPublishedProcesses().Distinct(); } - - // Fallback command for .NET 5 Preview 7 and Preview 8 - internal void ResumeRuntimeFallback() + internal ProcessInfo GetProcessInfo() { - IpcMessage message = new IpcMessage(DiagnosticsServerCommandSet.Server, (byte)DiagnosticServerCommandId.ResumeRuntime); - var response = IpcClient.SendMessage(_endpoint, message); - switch ((DiagnosticsServerResponseId)response.Header.CommandId) + // Attempt to get ProcessInfo v2 + ProcessInfo processInfo = TryGetProcessInfo2(); + if (null != processInfo) { - case DiagnosticsServerResponseId.Error: - var hr = BitConverter.ToUInt32(response.Payload, 0); - if (hr == (uint)DiagnosticsIpcError.UnknownCommand) - { - throw new UnsupportedCommandException($"Resume runtime command is unknown by target runtime."); - } - throw new ServerErrorException($"Resume runtime failed (HRESULT: 0x{hr:X8})"); - case DiagnosticsServerResponseId.OK: - return; - default: - throw new ServerErrorException($"Resume runtime failed - server responded with unknown command"); + return processInfo; } + + IpcMessage request = CreateProcessInfoMessage(); + using IpcResponse response = IpcClient.SendMessageGetContinuation(_endpoint, request); + return GetProcessInfoFromResponse(response, nameof(GetProcessInfo)); } - internal ProcessInfo GetProcessInfo() + internal async Task GetProcessInfoAsync(CancellationToken token) { - // RE: https://github.com/dotnet/runtime/issues/54083 - // If the GetProcessInfo2 command is sent too early, it will crash the runtime instance. - // Disable the usage of the command until that issue is fixed. - // Attempt to get ProcessInfo v2 - //ProcessInfo processInfo = GetProcessInfo2(); - //if (null != processInfo) - //{ - // return processInfo; - //} - - // Attempt to get ProcessInfo v1 - IpcMessage message = new IpcMessage(DiagnosticsServerCommandSet.Process, (byte)ProcessCommandId.GetProcessInfo); - var response = IpcClient.SendMessage(_endpoint, message); - switch ((DiagnosticsServerResponseId)response.Header.CommandId) + ProcessInfo processInfo = await TryGetProcessInfo2Async(token); + if (null != processInfo) { - case DiagnosticsServerResponseId.Error: - var hr = BitConverter.ToInt32(response.Payload, 0); - throw new ServerErrorException($"Get process info failed (HRESULT: 0x{hr:X8})"); - case DiagnosticsServerResponseId.OK: - return ProcessInfo.ParseV1(response.Payload); - default: - throw new ServerErrorException($"Get process info failed - server responded with unknown command"); + return processInfo; } + + IpcMessage request = CreateProcessInfoMessage(); + using IpcResponse response = await IpcClient.SendMessageGetContinuationAsync(_endpoint, request, token).ConfigureAwait(false); + return GetProcessInfoFromResponse(response, nameof(GetProcessInfoAsync)); } - private ProcessInfo GetProcessInfo2() + private ProcessInfo TryGetProcessInfo2() { - IpcMessage message = new IpcMessage(DiagnosticsServerCommandSet.Process, (byte)ProcessCommandId.GetProcessInfo2); - var response = IpcClient.SendMessage(_endpoint, message); - switch ((DiagnosticsServerResponseId)response.Header.CommandId) - { - case DiagnosticsServerResponseId.Error: - uint hr = BitConverter.ToUInt32(response.Payload, 0); - // In the case that the runtime doesn't understand the GetProcessInfo2 command, - // just break to allow fallback to try to get ProcessInfo v1. - if (hr == (uint)DiagnosticsIpcError.UnknownCommand) - { - return null; - } - throw new ServerErrorException($"GetProcessInfo2 failed (HRESULT: 0x{hr:X8})"); - case DiagnosticsServerResponseId.OK: - return ProcessInfo.ParseV2(response.Payload); - default: - throw new ServerErrorException($"Get process info failed - server responded with unknown command"); - } + IpcMessage request = CreateProcessInfo2Message(); + using IpcResponse response2 = IpcClient.SendMessageGetContinuation(_endpoint, request); + return TryGetProcessInfo2FromResponse(response2, nameof(GetProcessInfo)); + } + + private async Task TryGetProcessInfo2Async(CancellationToken token) + { + IpcMessage request = CreateProcessInfo2Message(); + using IpcResponse response2 = await IpcClient.SendMessageGetContinuationAsync(_endpoint, request, token).ConfigureAwait(false); + return TryGetProcessInfo2FromResponse(response2, nameof(GetProcessInfoAsync)); } private static byte[] SerializePayload(T arg) @@ -450,10 +388,143 @@ private static void SerializePayloadArgument(T obj, BinaryWriter writer) uint uiValue = bValue ? (uint)1 : 0; writer.Write(uiValue); } + else if (typeof(T) == typeof(Guid)) + { + Guid guidVal = (Guid)((object)obj); + writer.Write(guidVal.ToByteArray()); + } else { throw new ArgumentException($"Type {obj.GetType()} is not supported in SerializePayloadArgument, please add it."); } } + + private static IpcMessage CreateAttachProfilerMessage(TimeSpan attachTimeout, Guid profilerGuid, string profilerPath, byte[] additionalData) + { + if (profilerGuid == null || profilerGuid == Guid.Empty) + { + throw new ArgumentException($"{nameof(profilerGuid)} must be a valid Guid"); + } + + if (String.IsNullOrEmpty(profilerPath)) + { + throw new ArgumentException($"{nameof(profilerPath)} must be non-null"); + } + + byte[] serializedConfiguration = SerializePayload((uint)attachTimeout.TotalSeconds, profilerGuid, profilerPath, additionalData); + return new IpcMessage(DiagnosticsServerCommandSet.Profiler, (byte)ProfilerCommandId.AttachProfiler, serializedConfiguration); + } + + private static IpcMessage CreateProcessEnvironmentMessage() + { + return new IpcMessage(DiagnosticsServerCommandSet.Process, (byte)ProcessCommandId.GetProcessEnvironment); + } + + private static IpcMessage CreateProcessInfoMessage() + { + return new IpcMessage(DiagnosticsServerCommandSet.Process, (byte)ProcessCommandId.GetProcessInfo); + } + + private static IpcMessage CreateProcessInfo2Message() + { + return new IpcMessage(DiagnosticsServerCommandSet.Process, (byte)ProcessCommandId.GetProcessInfo2); + } + + private static IpcMessage CreateResumeRuntimeMessage() + { + return new IpcMessage(DiagnosticsServerCommandSet.Process, (byte)ProcessCommandId.ResumeRuntime); + } + + private static IpcMessage CreateSetEnvironmentVariableMessage(string name, string value) + { + if (String.IsNullOrEmpty(name)) + { + throw new ArgumentException($"{nameof(name)} must be non-null."); + } + + byte[] serializedConfiguration = SerializePayload(name, value); + return new IpcMessage(DiagnosticsServerCommandSet.Process, (byte)ProcessCommandId.SetEnvironmentVariable, serializedConfiguration); + } + + private static IpcMessage CreateSetStartupProfilerMessage(Guid profilerGuid, string profilerPath) + { + if (profilerGuid == null || profilerGuid == Guid.Empty) + { + throw new ArgumentException($"{nameof(profilerGuid)} must be a valid Guid"); + } + + if (String.IsNullOrEmpty(profilerPath)) + { + throw new ArgumentException($"{nameof(profilerPath)} must be non-null"); + } + + byte[] serializedConfiguration = SerializePayload(profilerGuid, profilerPath); + return new IpcMessage(DiagnosticsServerCommandSet.Profiler, (byte)ProfilerCommandId.StartupProfiler, serializedConfiguration); + } + + private static IpcMessage CreateWriteDumpMessage(DumpType dumpType, string dumpPath, bool logDumpGeneration) + { + if (string.IsNullOrEmpty(dumpPath)) + throw new ArgumentNullException($"{nameof(dumpPath)} required"); + + byte[] payload = SerializePayload(dumpPath, (uint)dumpType, logDumpGeneration); + return new IpcMessage(DiagnosticsServerCommandSet.Dump, (byte)DumpCommandId.GenerateCoreDump, payload); + } + + private static ProcessInfo GetProcessInfoFromResponse(IpcResponse response, string operationName) + { + ValidateResponseMessage(response.Message, operationName); + + return ProcessInfo.ParseV1(response.Message.Payload); + } + + private static ProcessInfo TryGetProcessInfo2FromResponse(IpcResponse response, string operationName) + { + if (!ValidateResponseMessage(response.Message, operationName, ValidateResponseOptions.UnknownCommandReturnsFalse)) + { + return null; + } + + return ProcessInfo.ParseV2(response.Message.Payload); + } + + internal static bool ValidateResponseMessage(IpcMessage responseMessage, string operationName, ValidateResponseOptions options = ValidateResponseOptions.None) + { + switch ((DiagnosticsServerResponseId)responseMessage.Header.CommandId) + { + case DiagnosticsServerResponseId.Error: + uint hr = BitConverter.ToUInt32(responseMessage.Payload, 0); + switch (hr) + { + case (uint)DiagnosticsIpcError.UnknownCommand: + if (options.HasFlag(ValidateResponseOptions.UnknownCommandReturnsFalse)) + { + return false; + } + throw new UnsupportedCommandException($"{operationName} failed - Command is not supported."); + case (uint)DiagnosticsIpcError.ProfilerAlreadyActive: + throw new ProfilerAlreadyActiveException($"{operationName} failed - A profiler is already loaded."); + case (uint)DiagnosticsIpcError.InvalidArgument: + if (options.HasFlag(ValidateResponseOptions.InvalidArgumentIsRequiresSuspension)) + { + throw new ServerErrorException($"{operationName} failed - The runtime must be suspended for this command."); + } + throw new UnsupportedCommandException($"{operationName} failed - Invalid command argument."); + } + throw new ServerErrorException($"{operationName} failed - HRESULT: 0x{hr:X8}"); + case DiagnosticsServerResponseId.OK: + return true; + default: + throw new ServerErrorException($"{operationName} failed - Server responded with unknown response."); + } + } + + [Flags] + internal enum ValidateResponseOptions + { + None = 0x0, + UnknownCommandReturnsFalse = 0x1, + InvalidArgumentIsRequiresSuspension = 0x2, + } } } diff --git a/src/Microsoft.Diagnostics.NETCore.Client/DiagnosticsClient/EventPipeSession.cs b/src/Microsoft.Diagnostics.NETCore.Client/DiagnosticsClient/EventPipeSession.cs index 9f82f79418..e277b9a65f 100644 --- a/src/Microsoft.Diagnostics.NETCore.Client/DiagnosticsClient/EventPipeSession.cs +++ b/src/Microsoft.Diagnostics.NETCore.Client/DiagnosticsClient/EventPipeSession.cs @@ -6,83 +6,125 @@ using System.Collections.Generic; using System.Diagnostics; using System.IO; +using System.Threading; +using System.Threading.Tasks; namespace Microsoft.Diagnostics.NETCore.Client { public class EventPipeSession : IDisposable { - private IEnumerable _providers; - private bool _requestRundown; - private int _circularBufferMB; private long _sessionId; private IpcEndpoint _endpoint; private bool _disposedValue = false; // To detect redundant calls private bool _stopped = false; // To detect redundant calls + private readonly IpcResponse _response; - internal EventPipeSession(IpcEndpoint endpoint, IEnumerable providers, bool requestRundown, int circularBufferMB) + private EventPipeSession(IpcEndpoint endpoint, IpcResponse response, long sessionId) { _endpoint = endpoint; - _providers = providers; - _requestRundown = requestRundown; - _circularBufferMB = circularBufferMB; - - var config = new EventPipeSessionConfiguration(circularBufferMB, EventPipeSerializationFormat.NetTrace, providers, requestRundown); - var message = new IpcMessage(DiagnosticsServerCommandSet.EventPipe, (byte)EventPipeCommandId.CollectTracing2, config.SerializeV2()); - EventStream = IpcClient.SendMessage(endpoint, message, out var response); - switch ((DiagnosticsServerResponseId)response.Header.CommandId) - { - case DiagnosticsServerResponseId.OK: - _sessionId = BitConverter.ToInt64(response.Payload, 0); - break; - case DiagnosticsServerResponseId.Error: - var hr = BitConverter.ToInt32(response.Payload, 0); - throw new ServerErrorException($"EventPipe session start failed (HRESULT: 0x{hr:X8})"); - default: - throw new ServerErrorException($"EventPipe session start failed - Server responded with unknown command"); - } + _response = response; + _sessionId = sessionId; } - public Stream EventStream { get; } + public Stream EventStream => _response.Continuation; + + internal static EventPipeSession Start(IpcEndpoint endpoint, IEnumerable providers, bool requestRundown, int circularBufferMB) + { + IpcMessage requestMessage = CreateStartMessage(providers, requestRundown, circularBufferMB); + IpcResponse? response = IpcClient.SendMessageGetContinuation(endpoint, requestMessage); + return CreateSessionFromResponse(endpoint, ref response, nameof(Start)); + } + + internal static async Task StartAsync(IpcEndpoint endpoint, IEnumerable providers, bool requestRundown, int circularBufferMB, CancellationToken cancellationToken) + { + IpcMessage requestMessage = CreateStartMessage(providers, requestRundown, circularBufferMB); + IpcResponse? response = await IpcClient.SendMessageGetContinuationAsync(endpoint, requestMessage, cancellationToken).ConfigureAwait(false); + return CreateSessionFromResponse(endpoint, ref response, nameof(StartAsync)); + } /// /// Stops the given session /// public void Stop() { - Debug.Assert(_sessionId > 0); - - // Do not issue another Stop command if it has already been issued for this session instance. - if (_stopped) + if (TryCreateStopMessage(out IpcMessage requestMessage)) { - return; + try + { + IpcMessage response = IpcClient.SendMessage(_endpoint, requestMessage); + + DiagnosticsClient.ValidateResponseMessage(response, nameof(Stop)); + } + // On non-abrupt exits (i.e. the target process has already exited and pipe is gone, sending Stop command will fail). + catch (IOException) + { + throw new ServerNotAvailableException("Could not send Stop command. The target process may have exited."); + } } - else + } + + public async Task StopAsync(CancellationToken cancellationToken) + { + if (TryCreateStopMessage(out IpcMessage requestMessage)) { - _stopped = true; + try + { + IpcMessage response = await IpcClient.SendMessageAsync(_endpoint, requestMessage, cancellationToken).ConfigureAwait(false); + + DiagnosticsClient.ValidateResponseMessage(response, nameof(StopAsync)); + } + // On non-abrupt exits (i.e. the target process has already exited and pipe is gone, sending Stop command will fail). + catch (IOException) + { + throw new ServerNotAvailableException("Could not send Stop command. The target process may have exited."); + } } + } - byte[] payload = BitConverter.GetBytes(_sessionId); - IpcMessage response; + private static IpcMessage CreateStartMessage(IEnumerable providers, bool requestRundown, int circularBufferMB) + { + var config = new EventPipeSessionConfiguration(circularBufferMB, EventPipeSerializationFormat.NetTrace, providers, requestRundown); + return new IpcMessage(DiagnosticsServerCommandSet.EventPipe, (byte)EventPipeCommandId.CollectTracing2, config.SerializeV2()); + } + + private static EventPipeSession CreateSessionFromResponse(IpcEndpoint endpoint, ref IpcResponse? response, string operationName) + { try { - response = IpcClient.SendMessage(_endpoint, new IpcMessage(DiagnosticsServerCommandSet.EventPipe, (byte)EventPipeCommandId.StopTracing, payload)); + DiagnosticsClient.ValidateResponseMessage(response.Value.Message, operationName); + + long sessionId = BitConverter.ToInt64(response.Value.Message.Payload, 0); + + var session = new EventPipeSession(endpoint, response.Value, sessionId); + response = null; + return session; } - // On non-abrupt exits (i.e. the target process has already exited and pipe is gone, sending Stop command will fail). - catch (IOException) + finally { - throw new ServerNotAvailableException("Could not send Stop command. The target process may have exited."); + response?.Dispose(); } + } - switch ((DiagnosticsServerResponseId)response.Header.CommandId) + private bool TryCreateStopMessage(out IpcMessage stopMessage) + { + Debug.Assert(_sessionId > 0); + + // Do not issue another Stop command if it has already been issued for this session instance. + if (_stopped) { - case DiagnosticsServerResponseId.OK: - return; - case DiagnosticsServerResponseId.Error: - var hr = BitConverter.ToInt32(response.Payload, 0); - throw new ServerErrorException($"EventPipe session stop failed (HRESULT: 0x{hr:X8})"); - default: - throw new ServerErrorException($"EventPipe session stop failed - Server responded with unknown command"); + stopMessage = null; + return false; } + else + { + _stopped = true; + } + + byte[] payload = BitConverter.GetBytes(_sessionId); + + stopMessage = new IpcMessage(DiagnosticsServerCommandSet.EventPipe, (byte)EventPipeCommandId.StopTracing, payload); + + return true; } protected virtual void Dispose(bool disposing) @@ -101,7 +143,7 @@ protected virtual void Dispose(bool disposing) { if (disposing) { - EventStream?.Dispose(); + _response.Dispose(); } _disposedValue = true; } diff --git a/src/Microsoft.Diagnostics.NETCore.Client/DiagnosticsIpc/IpcClient.cs b/src/Microsoft.Diagnostics.NETCore.Client/DiagnosticsIpc/IpcClient.cs index f6364820dc..87efac955a 100644 --- a/src/Microsoft.Diagnostics.NETCore.Client/DiagnosticsIpc/IpcClient.cs +++ b/src/Microsoft.Diagnostics.NETCore.Client/DiagnosticsIpc/IpcClient.cs @@ -5,6 +5,7 @@ using System; using System.IO; using System.Threading; +using System.Threading.Tasks; namespace Microsoft.Diagnostics.NETCore.Client { @@ -12,52 +13,111 @@ internal class IpcClient { // The amount of time to wait for a stream to be available for consumption by the Connect method. // Normally expect the runtime to respond quickly but resource constrained machines may take longer. - private static readonly TimeSpan ConnectTimeout = TimeSpan.FromSeconds(30); + internal static readonly TimeSpan ConnectTimeout = TimeSpan.FromSeconds(30); /// - /// Sends a single DiagnosticsIpc Message to the dotnet process with PID processId. + /// Sends a single DiagnosticsIpc Message to the dotnet process associated with the . /// /// An endpoint that provides a diagnostics connection to a runtime instance. /// The DiagnosticsIpc Message to be sent - /// The response DiagnosticsIpc Message from the dotnet process + /// An that is the response message. public static IpcMessage SendMessage(IpcEndpoint endpoint, IpcMessage message) { - using (var stream = endpoint.Connect(ConnectTimeout)) + using IpcResponse response = SendMessageGetContinuation(endpoint, message); + return response.Message; + } + + /// + /// Sends a single DiagnosticsIpc Message to the dotnet process associated with the . + /// + /// An endpoint that provides a diagnostics connection to a runtime instance. + /// The DiagnosticsIpc Message to be sent + /// An containing the response message and continuation stream. + public static IpcResponse SendMessageGetContinuation(IpcEndpoint endpoint, IpcMessage message) + { + Stream stream = null; + try { + stream = endpoint.Connect(ConnectTimeout); + Write(stream, message); - return Read(stream); + + IpcMessage response = Read(stream); + + return new IpcResponse(response, Release(ref stream)); + } + finally + { + stream?.Dispose(); } } /// - /// Sends a single DiagnosticsIpc Message to the dotnet process with PID processId - /// and returns the Stream for reuse in Optional Continuations. + /// Sends a single DiagnosticsIpc Message to the dotnet process associated with the . /// /// An endpoint that provides a diagnostics connection to a runtime instance. /// The DiagnosticsIpc Message to be sent - /// out var for response message - /// The response DiagnosticsIpc Message from the dotnet process - public static Stream SendMessage(IpcEndpoint endpoint, IpcMessage message, out IpcMessage response) + /// The token to monitor for cancellation requests. + /// An that is the response message. + public static async Task SendMessageAsync(IpcEndpoint endpoint, IpcMessage message, CancellationToken cancellationToken) { - var stream = endpoint.Connect(ConnectTimeout); - Write(stream, message); - response = Read(stream); - return stream; + using IpcResponse response = await SendMessageGetContinuationAsync(endpoint, message, cancellationToken).ConfigureAwait(false); + return response.Message; } - private static void Write(Stream stream, byte[] buffer) + /// + /// Sends a single DiagnosticsIpc Message to the dotnet process associated with the . + /// + /// An endpoint that provides a diagnostics connection to a runtime instance. + /// The DiagnosticsIpc Message to be sent + /// The token to monitor for cancellation requests. + /// An containing the response message and continuation stream. + public static async Task SendMessageGetContinuationAsync(IpcEndpoint endpoint, IpcMessage message, CancellationToken cancellationToken) { - stream.Write(buffer, 0, buffer.Length); + Stream stream = null; + try + { + stream = await endpoint.ConnectAsync(cancellationToken).ConfigureAwait(false); + + await WriteAsync(stream, message, cancellationToken).ConfigureAwait(false); + + IpcMessage response = await ReadAsync(stream, cancellationToken).ConfigureAwait(false); + + return new IpcResponse(response, Release(ref stream)); + } + finally + { + stream?.Dispose(); + } } private static void Write(Stream stream, IpcMessage message) { - Write(stream, message.Serialize()); + byte[] buffer = message.Serialize(); + stream.Write(buffer, 0, buffer.Length); + } + + private static Task WriteAsync(Stream stream, IpcMessage message, CancellationToken cancellationToken) + { + byte[] buffer = message.Serialize(); + return stream.WriteAsync(buffer, 0, buffer.Length, cancellationToken); } private static IpcMessage Read(Stream stream) { return IpcMessage.Parse(stream); } + + private static Task ReadAsync(Stream stream, CancellationToken cancellationToken) + { + return IpcMessage.ParseAsync(stream, cancellationToken); + } + + private static Stream Release(ref Stream stream1) + { + Stream intermediate = stream1; + stream1 = null; + return intermediate; + } } } diff --git a/src/Microsoft.Diagnostics.NETCore.Client/DiagnosticsIpc/IpcCommands.cs b/src/Microsoft.Diagnostics.NETCore.Client/DiagnosticsIpc/IpcCommands.cs index 7b22f00c04..d76f0df0bf 100644 --- a/src/Microsoft.Diagnostics.NETCore.Client/DiagnosticsIpc/IpcCommands.cs +++ b/src/Microsoft.Diagnostics.NETCore.Client/DiagnosticsIpc/IpcCommands.cs @@ -18,17 +18,6 @@ internal enum DiagnosticsServerCommandSet : byte Server = 0xFF, } - // For .NET 5 Preview 7 and Preview 8, use this with the - // DiagnosticsServerCommandSet.Server command set. - // For .NET 5 RC and later, use ProcessCommandId.ResumeRuntime with - // the DiagnosticsServerCommandSet.Process command set. - internal enum DiagnosticServerCommandId : byte - { - // 0x00 used in DiagnosticServerResponseId - ResumeRuntime = 0x01, - // 0xFF used DiagnosticServerResponseId - }; - internal enum DiagnosticsServerResponseId : byte { OK = 0x00, diff --git a/src/Microsoft.Diagnostics.NETCore.Client/DiagnosticsIpc/IpcHeader.cs b/src/Microsoft.Diagnostics.NETCore.Client/DiagnosticsIpc/IpcHeader.cs index 3a8fe59287..6950e8527b 100644 --- a/src/Microsoft.Diagnostics.NETCore.Client/DiagnosticsIpc/IpcHeader.cs +++ b/src/Microsoft.Diagnostics.NETCore.Client/DiagnosticsIpc/IpcHeader.cs @@ -3,12 +3,11 @@ // See the LICENSE file in the project root for more information. using System; -using System.Collections.Generic; using System.Diagnostics; using System.IO; -using System.Net.Sockets; -using System.Runtime.InteropServices; using System.Text; +using System.Threading; +using System.Threading.Tasks; namespace Microsoft.Diagnostics.NETCore.Client { @@ -53,7 +52,7 @@ public byte[] Serialize() } } - public static IpcHeader TryParse(BinaryReader reader) + public static IpcHeader Parse(BinaryReader reader) { IpcHeader header = new IpcHeader { @@ -67,6 +66,16 @@ public static IpcHeader TryParse(BinaryReader reader) return header; } + public static async Task ParseAsync(Stream stream, CancellationToken cancellationToken) + { + byte[] buffer = await stream.ReadBytesAsync(HeaderSizeInBytes, cancellationToken).ConfigureAwait(false); + using MemoryStream bufferStream = new MemoryStream(buffer); + using BinaryReader bufferReader = new BinaryReader(bufferStream); + IpcHeader header = Parse(bufferReader); + Debug.Assert(bufferStream.Position == bufferStream.Length); + return header; + } + override public string ToString() { return $"{{ Magic={Magic}; Size={Size}; CommandSet={CommandSet}; CommandId={CommandId}; Reserved={Reserved} }}"; diff --git a/src/Microsoft.Diagnostics.NETCore.Client/DiagnosticsIpc/IpcMessage.cs b/src/Microsoft.Diagnostics.NETCore.Client/DiagnosticsIpc/IpcMessage.cs index 00b50980ac..c1acad97cf 100644 --- a/src/Microsoft.Diagnostics.NETCore.Client/DiagnosticsIpc/IpcMessage.cs +++ b/src/Microsoft.Diagnostics.NETCore.Client/DiagnosticsIpc/IpcMessage.cs @@ -7,6 +7,8 @@ using System.IO; using System.Text; using System.Runtime.InteropServices; +using System.Threading; +using System.Threading.Tasks; namespace Microsoft.Diagnostics.NETCore.Client { @@ -112,10 +114,18 @@ public static IpcMessage Parse(Stream stream) IpcMessage message = new IpcMessage(); using (var reader = new BinaryReader(stream, Encoding.UTF8, true)) { - message.Header = IpcHeader.TryParse(reader); + message.Header = IpcHeader.Parse(reader); message.Payload = reader.ReadBytes(message.Header.Size - IpcHeader.HeaderSizeInBytes); return message; } } + + public static async Task ParseAsync(Stream stream, CancellationToken cancellationToken) + { + IpcMessage message = new IpcMessage(); + message.Header = await IpcHeader.ParseAsync(stream, cancellationToken).ConfigureAwait(false); + message.Payload = await stream.ReadBytesAsync(message.Header.Size - IpcHeader.HeaderSizeInBytes, cancellationToken).ConfigureAwait(false); + return message; + } } } diff --git a/src/Microsoft.Diagnostics.NETCore.Client/DiagnosticsIpc/IpcResponse.cs b/src/Microsoft.Diagnostics.NETCore.Client/DiagnosticsIpc/IpcResponse.cs new file mode 100644 index 0000000000..02bc098bde --- /dev/null +++ b/src/Microsoft.Diagnostics.NETCore.Client/DiagnosticsIpc/IpcResponse.cs @@ -0,0 +1,27 @@ +// Licensed to the .NET Foundation under one or more agreements. +// The .NET Foundation licenses this file to you under the MIT license. +// See the LICENSE file in the project root for more information. + +using System; +using System.IO; + +namespace Microsoft.Diagnostics.NETCore.Client +{ + internal struct IpcResponse : IDisposable + { + public readonly IpcMessage Message; + + public readonly Stream Continuation; + + public IpcResponse(IpcMessage message, Stream continuation) + { + Message = message; + Continuation = continuation; + } + + public void Dispose() + { + Continuation?.Dispose(); + } + } +} diff --git a/src/Microsoft.Diagnostics.NETCore.Client/DiagnosticsIpc/IpcServerTransport.cs b/src/Microsoft.Diagnostics.NETCore.Client/DiagnosticsIpc/IpcServerTransport.cs index 21168c7b35..7d0deac460 100644 --- a/src/Microsoft.Diagnostics.NETCore.Client/DiagnosticsIpc/IpcServerTransport.cs +++ b/src/Microsoft.Diagnostics.NETCore.Client/DiagnosticsIpc/IpcServerTransport.cs @@ -149,7 +149,7 @@ public override async Task AcceptAsync(CancellationToken token) private NamedPipeServerStream CreateNewNamedPipeServer(string pipeName, int maxInstances) { - var stream = new NamedPipeServerStream(pipeName, PipeDirection.InOut, maxInstances, PipeTransmissionMode.Byte, PipeOptions.Asynchronous); + var stream = new NamedPipeServerStream(pipeName, PipeDirection.InOut, maxInstances, PipeTransmissionMode.Byte, PipeOptions.Asynchronous, 16 * 1024, 16 * 1024); OnCreateNewServer(null); return stream; } diff --git a/src/Microsoft.Diagnostics.NETCore.Client/DiagnosticsIpc/ProcessEnvironment.cs b/src/Microsoft.Diagnostics.NETCore.Client/DiagnosticsIpc/ProcessEnvironment.cs index b3985e49c4..2b65bbe5fd 100644 --- a/src/Microsoft.Diagnostics.NETCore.Client/DiagnosticsIpc/ProcessEnvironment.cs +++ b/src/Microsoft.Diagnostics.NETCore.Client/DiagnosticsIpc/ProcessEnvironment.cs @@ -5,8 +5,6 @@ using System; using System.Collections.Generic; using System.IO; -using System.Linq; -using System.Text; using System.Threading; using System.Threading.Tasks; @@ -14,6 +12,8 @@ namespace Microsoft.Diagnostics.NETCore.Client { internal class ProcessEnvironmentHelper { + private const int CopyBufferSize = (16 << 10) /* 16KiB */; + private ProcessEnvironmentHelper() {} public static ProcessEnvironmentHelper Parse(byte[] payload) { @@ -25,18 +25,29 @@ public static ProcessEnvironmentHelper Parse(byte[] payload) return helper; } - public async Task> ReadEnvironmentAsync(Stream continuation, CancellationToken token = default(CancellationToken)) + public Dictionary ReadEnvironment(Stream continuation) { - var env = new Dictionary(); + using var memoryStream = new MemoryStream(); + continuation.CopyTo(memoryStream, CopyBufferSize); + return ReadEnvironmentCore(memoryStream); + } + public async Task> ReadEnvironmentAsync(Stream continuation, CancellationToken token = default(CancellationToken)) + { using var memoryStream = new MemoryStream(); - await continuation.CopyToAsync(memoryStream, (16 << 10) /* 16KiB */, token); - memoryStream.Seek(0, SeekOrigin.Begin); - byte[] envBlock = memoryStream.ToArray(); + await continuation.CopyToAsync(memoryStream, CopyBufferSize, token); + return ReadEnvironmentCore(memoryStream); + } + + private Dictionary ReadEnvironmentCore(MemoryStream stream) + { + stream.Seek(0, SeekOrigin.Begin); + byte[] envBlock = stream.ToArray(); if (envBlock.Length != (long)ExpectedSizeInBytes) throw new ApplicationException($"ProcessEnvironment continuation length did not match expected length. Expected: {ExpectedSizeInBytes} bytes, Received: {envBlock.Length} bytes"); + var env = new Dictionary(); int cursor = 0; UInt32 nElements = BitConverter.ToUInt32(envBlock, cursor); cursor += sizeof(UInt32); @@ -44,7 +55,7 @@ public static ProcessEnvironmentHelper Parse(byte[] payload) { string pair = IpcHelpers.ReadString(envBlock, ref cursor); int equalsIdx = pair.IndexOf('='); - env[pair.Substring(0,equalsIdx)] = equalsIdx != pair.Length - 1 ? pair.Substring(equalsIdx+1) : ""; + env[pair.Substring(0, equalsIdx)] = equalsIdx != pair.Length - 1 ? pair.Substring(equalsIdx + 1) : ""; } return env; diff --git a/src/Microsoft.Diagnostics.NETCore.Client/Microsoft.Diagnostics.NETCore.Client.csproj b/src/Microsoft.Diagnostics.NETCore.Client/Microsoft.Diagnostics.NETCore.Client.csproj index ffba4a8aa9..37668038de 100644 --- a/src/Microsoft.Diagnostics.NETCore.Client/Microsoft.Diagnostics.NETCore.Client.csproj +++ b/src/Microsoft.Diagnostics.NETCore.Client/Microsoft.Diagnostics.NETCore.Client.csproj @@ -1,7 +1,7 @@  Library - netstandard2.0;netcoreapp2.1 + netstandard2.0;netcoreapp3.1 Microsoft.Diagnostics.NETCore.Client .NET Core Diagnostics Client Library 0.2.0 @@ -19,15 +19,14 @@ - + - + + + - - - diff --git a/src/Microsoft.Diagnostics.NETCore.Client/StreamExtensions.cs b/src/Microsoft.Diagnostics.NETCore.Client/StreamExtensions.cs new file mode 100644 index 0000000000..3cf6f97a81 --- /dev/null +++ b/src/Microsoft.Diagnostics.NETCore.Client/StreamExtensions.cs @@ -0,0 +1,34 @@ +// Licensed to the .NET Foundation under one or more agreements. +// The .NET Foundation licenses this file to you under the MIT license. +// See the LICENSE file in the project root for more information. + +using System.IO; +using System.Threading; +using System.Threading.Tasks; + +namespace Microsoft.Diagnostics.NETCore.Client +{ + internal static class StreamExtensions + { + public static async Task ReadBytesAsync(this Stream stream, int length, CancellationToken cancellationToken) + { + byte[] buffer = new byte[length]; + + int totalRead = 0; + int remaining = length; + while (remaining > 0) + { + int read = await stream.ReadAsync(buffer, totalRead, remaining, cancellationToken); + if (0 == read) + { + throw new EndOfStreamException(); + } + + remaining -= read; + totalRead += read; + } + + return buffer; + } + } +} diff --git a/src/Microsoft.Diagnostics.Repl/Command/CommandProcessor.cs b/src/Microsoft.Diagnostics.Repl/Command/CommandProcessor.cs index cddc21a071..cce0c9a8b8 100644 --- a/src/Microsoft.Diagnostics.Repl/Command/CommandProcessor.cs +++ b/src/Microsoft.Diagnostics.Repl/Command/CommandProcessor.cs @@ -240,14 +240,20 @@ private void BuildParser() private void OnException(Exception ex, InvocationContext context) { + if (ex is TargetInvocationException) + { + ex = ex.InnerException; + } if (ex is NullReferenceException || ex is ArgumentException || ex is ArgumentNullException || ex is ArgumentOutOfRangeException || - ex is NotImplementedException) { + ex is NotImplementedException) + { context.Console.Error.WriteLine(ex.ToString()); } - else { + else + { context.Console.Error.WriteLine(ex.Message); } Trace.TraceError(ex.ToString()); @@ -368,18 +374,11 @@ internal bool InvokeHelp(Parser parser, IServiceProvider services) private void Invoke(MethodInfo methodInfo, InvocationContext context, Parser parser, IServiceProvider services) { - try - { - object instance = _factory(services); - SetProperties(context, parser, services, instance); + object instance = _factory(services); + SetProperties(context, parser, services, instance); - object[] arguments = BuildArguments(methodInfo, services); - methodInfo.Invoke(instance, arguments); - } - catch (TargetInvocationException ex) - { - throw ex.InnerException; - } + object[] arguments = BuildArguments(methodInfo, services); + methodInfo.Invoke(instance, arguments); } private void SetProperties(InvocationContext context, Parser parser, IServiceProvider services, object instance) diff --git a/src/Microsoft.Diagnostics.TestHelpers/CliDebuggeeCompiler.cs b/src/Microsoft.Diagnostics.TestHelpers/CliDebuggeeCompiler.cs index e51c3be9de..2b3feff338 100644 --- a/src/Microsoft.Diagnostics.TestHelpers/CliDebuggeeCompiler.cs +++ b/src/Microsoft.Diagnostics.TestHelpers/CliDebuggeeCompiler.cs @@ -51,7 +51,7 @@ private static Dictionary GetBuildProperties(TestConfiguration co protected override string GetFramework(TestConfiguration config) { - return config.BuildProjectFramework ?? "netcoreapp2.1"; + return config.BuildProjectFramework ?? "netcoreapp3.1"; } protected override string GetDebuggeeBinaryDirPath(string debuggeeProjectDirPath, string framework, string runtime) diff --git a/src/Microsoft.Diagnostics.TestHelpers/DotNetBuildDebuggeeTestStep.cs b/src/Microsoft.Diagnostics.TestHelpers/DotNetBuildDebuggeeTestStep.cs index 58f941e4cc..c8225d885e 100644 --- a/src/Microsoft.Diagnostics.TestHelpers/DotNetBuildDebuggeeTestStep.cs +++ b/src/Microsoft.Diagnostics.TestHelpers/DotNetBuildDebuggeeTestStep.cs @@ -188,10 +188,12 @@ protected async Task Restore(string extraArgs, ITestOutputHelper output) } output.WriteLine("Launching {0} {1}", DotNetToolPath, args); ProcessRunner runner = new ProcessRunner(DotNetToolPath, args). - WithWorkingDirectory(DebuggeeSolutionDirPath). - WithLog(output). - WithTimeout(TimeSpan.FromMinutes(10)). // restore can be painfully slow - WithExpectedExitCode(0); + WithEnvironmentVariable("DOTNET_MULTILEVEL_LOOKUP", "0"). + WithEnvironmentVariable("DOTNET_ROOT", Path.GetDirectoryName(DotNetToolPath)). + WithWorkingDirectory(DebuggeeSolutionDirPath). + WithLog(output). + WithTimeout(TimeSpan.FromMinutes(10)). // restore can be painfully slow + WithExpectedExitCode(0); if (OS.Kind != OSKind.Windows && Environment.GetEnvironmentVariable("HOME") == null) { diff --git a/src/Microsoft.Diagnostics.TestHelpers/Microsoft.Diagnostics.TestHelpers.csproj b/src/Microsoft.Diagnostics.TestHelpers/Microsoft.Diagnostics.TestHelpers.csproj index 65c16bf5d7..6f7dac27ab 100644 --- a/src/Microsoft.Diagnostics.TestHelpers/Microsoft.Diagnostics.TestHelpers.csproj +++ b/src/Microsoft.Diagnostics.TestHelpers/Microsoft.Diagnostics.TestHelpers.csproj @@ -1,6 +1,6 @@ - netcoreapp2.1 + netcoreapp3.1 true ;1591;1701 false diff --git a/src/Microsoft.Diagnostics.TestHelpers/PrebuiltDebuggeeCompiler.cs b/src/Microsoft.Diagnostics.TestHelpers/PrebuiltDebuggeeCompiler.cs index 0c6cb51f07..c7544da462 100644 --- a/src/Microsoft.Diagnostics.TestHelpers/PrebuiltDebuggeeCompiler.cs +++ b/src/Microsoft.Diagnostics.TestHelpers/PrebuiltDebuggeeCompiler.cs @@ -32,7 +32,7 @@ public PrebuiltDebuggeeCompiler(TestConfiguration config, string debuggeeName) public Task Execute(ITestOutputHelper output) { - return Task.Factory.StartNew(() => new DebuggeeConfiguration(_sourcePath, _binaryPath, _binaryExePath)); + return Task.FromResult(new DebuggeeConfiguration(_sourcePath, _binaryPath, _binaryExePath)); } } } \ No newline at end of file diff --git a/src/Microsoft.Diagnostics.TestHelpers/SdkPrebuiltDebuggeeCompiler.cs b/src/Microsoft.Diagnostics.TestHelpers/SdkPrebuiltDebuggeeCompiler.cs index 53fd4187fc..67e374a0fd 100644 --- a/src/Microsoft.Diagnostics.TestHelpers/SdkPrebuiltDebuggeeCompiler.cs +++ b/src/Microsoft.Diagnostics.TestHelpers/SdkPrebuiltDebuggeeCompiler.cs @@ -31,7 +31,7 @@ public SdkPrebuiltDebuggeeCompiler(TestConfiguration config, string debuggeeName public Task Execute(ITestOutputHelper output) { - return Task.Factory.StartNew(() => new DebuggeeConfiguration(_sourcePath, _binaryPath, _binaryExePath)); + return Task.FromResult(new DebuggeeConfiguration(_sourcePath, _binaryPath, _binaryExePath)); } } } \ No newline at end of file diff --git a/src/Microsoft.Diagnostics.TestHelpers/TestConfiguration.cs b/src/Microsoft.Diagnostics.TestHelpers/TestConfiguration.cs index c5bb7ee9a3..fb7ce7935c 100644 --- a/src/Microsoft.Diagnostics.TestHelpers/TestConfiguration.cs +++ b/src/Microsoft.Diagnostics.TestHelpers/TestConfiguration.cs @@ -612,7 +612,7 @@ public string BuildProjectMicrosoftNetCoreAppVersion } /// - /// The framework type/version used to build the debuggee like "netcoreapp2.1" or "netstandard2.0". + /// The framework type/version used to build the debuggee like "netcoreapp3.1" or "netstandard2.0". /// public string BuildProjectFramework { diff --git a/src/SOS/SOS.Extensions/HostServices.cs b/src/SOS/SOS.Extensions/HostServices.cs index 7424a7ae7d..fb5e4ab587 100644 --- a/src/SOS/SOS.Extensions/HostServices.cs +++ b/src/SOS/SOS.Extensions/HostServices.cs @@ -236,7 +236,7 @@ private HResult RegisterDebuggerServices( hr = DebuggerServices.GetSymbolPath(out string symbolPath); if (hr == HResult.S_OK) { - if (!_symbolService.ParseSymbolPath(symbolPath)) + if (!_symbolService.ParseSymbolPathFixDefault(symbolPath)) { Trace.TraceError("ParseSymbolPath FAILED: {0}", symbolPath); } diff --git a/src/SOS/SOS.Extensions/ModuleServiceFromDebuggerServices.cs b/src/SOS/SOS.Extensions/ModuleServiceFromDebuggerServices.cs index f3bc44177d..3ff4cb5c16 100644 --- a/src/SOS/SOS.Extensions/ModuleServiceFromDebuggerServices.cs +++ b/src/SOS/SOS.Extensions/ModuleServiceFromDebuggerServices.cs @@ -4,13 +4,10 @@ using Microsoft.Diagnostics.DebugServices; using Microsoft.Diagnostics.DebugServices.Implementation; -using Microsoft.Diagnostics.Runtime; using Microsoft.Diagnostics.Runtime.Interop; using Microsoft.Diagnostics.Runtime.Utilities; -using System; using System.Collections.Generic; using System.Diagnostics; -using System.IO; using System.Runtime.InteropServices; namespace SOS.Extensions @@ -26,6 +23,7 @@ class ModuleFromDebuggerServices : Module, IExportSymbols, IModuleSymbols private const uint InvalidTimeStamp = 0xFFFFFFFE; private readonly ModuleServiceFromDebuggerServices _moduleService; + private VersionData _versionData; private string _versionString; public ModuleFromDebuggerServices( @@ -77,17 +75,17 @@ public override VersionData VersionData int minor = (int)fileInfo.dwFileVersionMS & 0xffff; int revision = (int)fileInfo.dwFileVersionLS >> 16; int patch = (int)fileInfo.dwFileVersionLS & 0xffff; - base.VersionData = new VersionData(major, minor, revision, patch); + _versionData = new VersionData(major, minor, revision, patch); } else { if (_moduleService.Target.OperatingSystem != OSPlatform.Windows) { - GetVersionFromVersionString(); + _versionData = GetVersion(); } } } - return base.VersionData; + return _versionData; } } diff --git a/src/SOS/SOS.Hosting/SOSLibrary.cs b/src/SOS/SOS.Hosting/SOSLibrary.cs index 6d0ce843fa..10d45761bb 100644 --- a/src/SOS/SOS.Hosting/SOSLibrary.cs +++ b/src/SOS/SOS.Hosting/SOSLibrary.cs @@ -107,11 +107,11 @@ private void Initialize() // This is a workaround for the Microsoft SDK docker images. Can fail when LoadLibrary uses libdl.so to load the SOS module. if (RuntimeInformation.IsOSPlatform(OSPlatform.Linux)) { - throw new DllNotFoundException("Problem loading SOS module. Try installing libc6-dev (apt-get install libc6-dev) to work around this problem.", ex); + throw new DllNotFoundException($"Problem loading SOS module from {sosPath}. Try installing libc6-dev (apt-get install libc6-dev) to work around this problem.", ex); } else { - throw; + throw new DllNotFoundException($"Problem loading SOS module from {sosPath}", ex); } } if (_sosLibrary == IntPtr.Zero) diff --git a/src/SOS/SOS.Hosting/SymbolServiceExtensions.cs b/src/SOS/SOS.Hosting/SymbolServiceExtensions.cs index 21b4a629b1..285b1cace5 100644 --- a/src/SOS/SOS.Hosting/SymbolServiceExtensions.cs +++ b/src/SOS/SOS.Hosting/SymbolServiceExtensions.cs @@ -21,6 +21,24 @@ public static class SymbolServiceExtensions // HRESULT_FROM_WIN32(ERROR_INSUFFICIENT_BUFFER) const int E_INSUFFICIENT_BUFFER = unchecked((int)0x8007007a); + /// + /// Set the windows symbol path converting the default "srv*" to the cached public symbol server URL. + /// + /// The windows symbol path to translate and set + /// if false, error parsing symbol path + public static bool ParseSymbolPathFixDefault( + this ISymbolService symbolService, + string symbolPath) + { + // Translate dbgeng's default .sympath to what the public version actually does. Normally "srv*" + // means no caching and the server path depends on whether dbgeng is internal or public. + if (symbolPath.ToLowerInvariant() == "srv*") + { + symbolPath = "cache*;SRV*https://msdl.microsoft.com/download/symbols"; + } + return symbolService.ParseSymbolPath(symbolPath); + } + /// /// Metadata locator helper for the DAC. /// diff --git a/src/SOS/SOS.Hosting/SymbolServiceWrapper.cs b/src/SOS/SOS.Hosting/SymbolServiceWrapper.cs index a5cee29318..ea90410b6a 100644 --- a/src/SOS/SOS.Hosting/SymbolServiceWrapper.cs +++ b/src/SOS/SOS.Hosting/SymbolServiceWrapper.cs @@ -157,16 +157,16 @@ private bool InitializeSymbolStore( /// /// Parse the Windows sympath format /// - /// windows symbol path + /// windows symbol path /// if false, failure private bool ParseSymbolPath( IntPtr self, - string windowsSymbolPath) + string symbolPath) { - if (windowsSymbolPath == null) { + if (string.IsNullOrWhiteSpace(symbolPath)) { return false; } - return _symbolService.ParseSymbolPath(windowsSymbolPath); + return _symbolService.ParseSymbolPathFixDefault(symbolPath); } /// diff --git a/src/SOS/SOS.UnitTests/ConfigFiles/Unix/Debugger.Tests.Config.txt b/src/SOS/SOS.UnitTests/ConfigFiles/Unix/Debugger.Tests.Config.txt index 60c10ec772..716dbbee7c 100644 --- a/src/SOS/SOS.UnitTests/ConfigFiles/Unix/Debugger.Tests.Config.txt +++ b/src/SOS/SOS.UnitTests/ConfigFiles/Unix/Debugger.Tests.Config.txt @@ -24,25 +24,17 @@ true false - true - false - false - false - - true - false - false - net6.0 net5.0 netcoreapp3.1 - netcoreapp2.1 $(RepoRootDir)/src/SOS/SOS.UnitTests/Debuggees sdk.prebuilt $(RootBinDir) - $(DotNetRoot)/dotnet + + + $(RepoRootDir)/.dotnet/dotnet dotnet6=https://dnceng.pkgs.visualstudio.com/public/_packaging/dotnet6/nuget/v3/index.json; @@ -68,11 +60,6 @@ $(RuntimeVersion31) $(DotNetRoot)/shared/Microsoft.NETCore.App/$(RuntimeFrameworkVersion) - @@ -97,10 +84,6 @@ netcoreapp3.1 $(RuntimeVersion31) - + $(RepoRootDir)\.dotnet\dotnet.exe dotnet6=https://dnceng.pkgs.visualstudio.com/public/_packaging/dotnet6/nuget/v3/index.json; @@ -80,11 +81,6 @@ $(RuntimeVersion31) $(DotNetRoot)/shared/Microsoft.NETCore.App/$(RuntimeFrameworkVersion) - @@ -105,32 +101,6 @@ netcoreapp3.1 $(RuntimeVersion31) - - - - -