diff --git a/cspell.json b/cspell.json index 25bb1e8f711..2eb45589309 100644 --- a/cspell.json +++ b/cspell.json @@ -62,6 +62,7 @@ "openapi", "Pids", "PKCS", + "quantile", "rclsid", "reentrancy", "REFCLSID", diff --git a/documentation/api/livemetrics-custom.md b/documentation/api/livemetrics-custom.md index 1854ce51394..ecba82ccf90 100644 --- a/documentation/api/livemetrics-custom.md +++ b/documentation/api/livemetrics-custom.md @@ -60,7 +60,9 @@ The expected content type is `application/json`. ## Examples -### Sample Request +### EventCounter + +#### Sample Request ```http POST /livemetrics?pid=21632&durationSeconds=60 HTTP/1.1 @@ -81,7 +83,7 @@ Authorization: Bearer fffffffffffffffffffffffffffffffffffffffffff= } ``` -### Sample Response +#### Sample Response ```http HTTP/1.1 200 OK @@ -113,6 +115,50 @@ Location: localhost:52323/operations/67f07e40-5cca-4709-9062-26302c484f18 } ``` +### System.Diagnostics.Metrics + +#### Sample Request + +```http +GET /livemetrics?pid=21632&durationSeconds=60 HTTP/1.1 +Host: localhost:52323 +Authorization: Bearer fffffffffffffffffffffffffffffffffffffffffff= + +{ + "includeDefaultProviders": false, + "providers": [ + { + "providerName": "CustomProvider", + "counterNames": [ + "myHistogram" + ] + } + ] +} +``` + +#### Sample Histogram Response + +```http +HTTP/1.1 200 OK +Content-Type: application/json-seq +Location: localhost:52323/operations/67f07e40-5cca-4709-9062-26302c484f18 + +{ + "timestamp": "2021-08-31T16:58:39.7514031+00:00", + "provider": "CustomProvider", + "name": "myHistogram", + "displayName": "myHistogram", + "unit": null, + "counterType": "Metric", + "value": { + "0.5": 2892, + "0.95": 4848, + "0.99": 4984 + } +} +``` + ## Supported Runtimes | Operating System | Runtime Version | diff --git a/documentation/configuration/metrics-configuration.md b/documentation/configuration/metrics-configuration.md index 783a7a0874e..8c81287c060 100644 --- a/documentation/configuration/metrics-configuration.md +++ b/documentation/configuration/metrics-configuration.md @@ -168,6 +168,72 @@ When `CounterNames` are not specified, all the counters associated with the `Pro [7.1+] Custom metrics support labels for metadata. Metadata cannot include commas (`,`); the inclusion of a comma in metadata will result in all metadata being removed from the custom metric. +## Limit How Many Histograms To Track (8.0+) + +For System.Diagnostics.Metrics, `dotnet monitor` allows you to set the maximum number of histograms that can be tracked. Each unique combination of provider name, histogram name, and dimension values counts as one histogram. Tracking more histograms uses more memory in the target process so this bound guards against unintentional high memory use. `MaxHistograms` has a default value of `20`. + +
+ JSON + + ```json + { + "GlobalCounter": { + "MaxHistograms": 5 + } + } + ``` +
+ +
+ Kubernetes ConfigMap + + ```yaml + GlobalCounter__MaxHistograms: "5" + ``` +
+ +
+ Kubernetes Environment Variables + + ```yaml + - name: DotnetMonitor_GlobalCounter__MaxHistograms + value: "5" + ``` +
+ +## Limit How Many Time Series To Track (8.0+) + +For System.Diagnostics.Metrics, `dotnet monitor` allows you to set the maximum number of time series that can be tracked. Each unique combination of provider name, metric name, and dimension values counts as one time series. Tracking more time series uses more memory in the target process so this bound guards against unintentional high memory use. `MaxTimeSeries` has a default value of `1000`. + +
+ JSON + + ```json + { + "GlobalCounter": { + "MaxTimeSeries": 500 + } + } + ``` +
+ +
+ Kubernetes ConfigMap + + ```yaml + GlobalCounter__MaxTimeSeries: "500" + ``` +
+ +
+ Kubernetes Environment Variables + + ```yaml + - name: DotnetMonitor_GlobalCounter__MaxTimeSeries + value: "500" + ``` +
+ ## Disable default providers In addition to enabling custom providers, `dotnet monitor` also allows you to disable collection of the default providers. You can do so via the following configuration: diff --git a/documentation/openapi.json b/documentation/openapi.json index c5413624096..189a55a4cc9 100644 --- a/documentation/openapi.json +++ b/documentation/openapi.json @@ -1461,6 +1461,9 @@ "type": "string" }, "nullable": true + }, + "metricType": { + "$ref": "#/components/schemas/MetricProviderType" } }, "additionalProperties": false @@ -1553,6 +1556,14 @@ }, "additionalProperties": false }, + "MetricProviderType": { + "enum": [ + "EventCounter", + "Meter", + "All" + ], + "type": "string" + }, "OperationError": { "type": "object", "properties": { diff --git a/documentation/schema.json b/documentation/schema.json index 90c397bd9fc..0f3a2756fba 100644 --- a/documentation/schema.json +++ b/documentation/schema.json @@ -877,6 +877,28 @@ "default": 5.0, "maximum": 86400.0, "minimum": 1.0 + }, + "MaxHistograms": { + "type": [ + "integer", + "null" + ], + "description": "The maximum number of histograms that can be tracked. Each unique combination of provider name, histogram name, and dimension values counts as one histogram. Tracking more histograms uses more memory in the target process so this bound guards against unintentional high memory use.", + "format": "int32", + "default": 20, + "maximum": 2147483647.0, + "minimum": 1.0 + }, + "MaxTimeSeries": { + "type": [ + "integer", + "null" + ], + "description": "The maximum number of time series that can be tracked. Each unique combination of provider name, metric name, and dimension values counts as one time series. Tracking more time series uses more memory in the target process so this bound guards against unintentional high memory use.", + "format": "int32", + "default": 1000, + "maximum": 2147483647.0, + "minimum": 1.0 } } }, @@ -1304,9 +1326,36 @@ "items": { "type": "string" } + }, + "MetricType": { + "description": "The type of metrics this provider consumes", + "default": "All", + "oneOf": [ + { + "type": "null" + }, + { + "$ref": "#/definitions/MetricProviderType" + } + ] } } }, + "MetricProviderType": { + "type": "string", + "description": "", + "x-enumFlags": true, + "x-enumNames": [ + "EventCounter", + "Meter", + "All" + ], + "enum": [ + "EventCounter", + "Meter", + "All" + ] + }, "StorageOptions": { "type": "object", "additionalProperties": false, @@ -1636,6 +1685,16 @@ "items": { "type": "string" } + }, + "MetricType": { + "oneOf": [ + { + "type": "null" + }, + { + "$ref": "#/definitions/MetricProviderType" + } + ] } } }, diff --git a/eng/Version.Details.xml b/eng/Version.Details.xml index 21788b2aefb..d0b989594ae 100644 --- a/eng/Version.Details.xml +++ b/eng/Version.Details.xml @@ -1,12 +1,12 @@ - + https://github.com/dotnet/diagnostics - ab4b95e5762fd323de7fd3bca8dd86eaaf3ecaf8 + 615b21dd10832c0faaaa93d5f177e491ce9b3a09 - + https://github.com/dotnet/diagnostics - ab4b95e5762fd323de7fd3bca8dd86eaaf3ecaf8 + 615b21dd10832c0faaaa93d5f177e491ce9b3a09 https://github.com/dotnet/command-line-api diff --git a/eng/Versions.props b/eng/Versions.props index 35525f3658b..d20339e5adb 100644 --- a/eng/Versions.props +++ b/eng/Versions.props @@ -66,8 +66,8 @@ 2.0.0-beta4.23073.1 - 6.0.0-preview.23077.1 - 6.0.0-preview.23077.1 + 6.0.0-preview.23080.1 + 6.0.0-preview.23080.1 8.0.0-preview1.23076.2 diff --git a/src/Microsoft.Diagnostics.Monitoring.Options/GlobalCounterOptions.cs b/src/Microsoft.Diagnostics.Monitoring.Options/GlobalCounterOptions.cs index 268a42c86d1..54e601769c1 100644 --- a/src/Microsoft.Diagnostics.Monitoring.Options/GlobalCounterOptions.cs +++ b/src/Microsoft.Diagnostics.Monitoring.Options/GlobalCounterOptions.cs @@ -18,11 +18,31 @@ public class GlobalCounterOptions [Range(IntervalMinSeconds, IntervalMaxSeconds)] [DefaultValue(GlobalCounterOptionsDefaults.IntervalSeconds)] public float? IntervalSeconds { get; set; } + + [Display( + ResourceType = typeof(OptionsDisplayStrings), + Description = nameof(OptionsDisplayStrings.DisplayAttributeDescription_MetricsOptions_MaxHistograms))] + [DefaultValue(GlobalCounterOptionsDefaults.MaxHistograms)] + [Range(1, int.MaxValue)] + public int? MaxHistograms { get; set; } + + [Display( + ResourceType = typeof(OptionsDisplayStrings), + Description = nameof(OptionsDisplayStrings.DisplayAttributeDescription_MetricsOptions_MaxTimeSeries))] + [DefaultValue(GlobalCounterOptionsDefaults.MaxTimeSeries)] + [Range(1, int.MaxValue)] + public int? MaxTimeSeries { get; set; } } internal static class GlobalCounterOptionsExtensions { public static float GetIntervalSeconds(this GlobalCounterOptions options) => options.IntervalSeconds.GetValueOrDefault(GlobalCounterOptionsDefaults.IntervalSeconds); + + public static int GetMaxHistograms(this GlobalCounterOptions options) => + options.MaxHistograms.GetValueOrDefault(GlobalCounterOptionsDefaults.MaxHistograms); + + public static int GetMaxTimeSeries(this GlobalCounterOptions options) => + options.MaxTimeSeries.GetValueOrDefault(GlobalCounterOptionsDefaults.MaxTimeSeries); } } diff --git a/src/Microsoft.Diagnostics.Monitoring.Options/GlobalCounterOptionsDefaults.cs b/src/Microsoft.Diagnostics.Monitoring.Options/GlobalCounterOptionsDefaults.cs index 13b7479a9bc..0b1ac21ea8a 100644 --- a/src/Microsoft.Diagnostics.Monitoring.Options/GlobalCounterOptionsDefaults.cs +++ b/src/Microsoft.Diagnostics.Monitoring.Options/GlobalCounterOptionsDefaults.cs @@ -6,5 +6,9 @@ namespace Microsoft.Diagnostics.Monitoring.WebApi internal static class GlobalCounterOptionsDefaults { public const float IntervalSeconds = 5.0f; + + public const int MaxHistograms = 20; + + public const int MaxTimeSeries = 1000; } } diff --git a/src/Microsoft.Diagnostics.Monitoring.Options/MetricsOptions.cs b/src/Microsoft.Diagnostics.Monitoring.Options/MetricsOptions.cs index 39497ce6f10..d52b7f46e6f 100644 --- a/src/Microsoft.Diagnostics.Monitoring.Options/MetricsOptions.cs +++ b/src/Microsoft.Diagnostics.Monitoring.Options/MetricsOptions.cs @@ -1,6 +1,7 @@ // Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. +using System; using System.Collections.Generic; using System.ComponentModel; using System.ComponentModel.DataAnnotations; @@ -57,5 +58,19 @@ public class MetricProvider ResourceType = typeof(OptionsDisplayStrings), Description = nameof(OptionsDisplayStrings.DisplayAttributeDescription_MetricProvider_CounterNames))] public List CounterNames { get; set; } = new List(0); + + [Display( + ResourceType = typeof(OptionsDisplayStrings), + Description = nameof(OptionsDisplayStrings.DisplayAttributeDescription_MetricProvider_MetricType))] + [DefaultValue(MetricsOptionsDefaults.MetricType)] + public MetricProviderType? MetricType { get; set; } + } + + [Flags] + public enum MetricProviderType + { + EventCounter = 0x1, + Meter = 0x2, + All = 0xFF } } diff --git a/src/Microsoft.Diagnostics.Monitoring.Options/MetricsOptionsDefaults.cs b/src/Microsoft.Diagnostics.Monitoring.Options/MetricsOptionsDefaults.cs index a3543e33075..30edfea8001 100644 --- a/src/Microsoft.Diagnostics.Monitoring.Options/MetricsOptionsDefaults.cs +++ b/src/Microsoft.Diagnostics.Monitoring.Options/MetricsOptionsDefaults.cs @@ -10,5 +10,7 @@ internal static class MetricsOptionsDefaults public const int MetricCount = 3; public const bool IncludeDefaultProviders = true; + + public const MetricProviderType MetricType = MetricProviderType.All; } } diff --git a/src/Microsoft.Diagnostics.Monitoring.Options/OptionsDisplayStrings.Designer.cs b/src/Microsoft.Diagnostics.Monitoring.Options/OptionsDisplayStrings.Designer.cs index abc218f8c15..e0416dddc5f 100644 --- a/src/Microsoft.Diagnostics.Monitoring.Options/OptionsDisplayStrings.Designer.cs +++ b/src/Microsoft.Diagnostics.Monitoring.Options/OptionsDisplayStrings.Designer.cs @@ -1148,6 +1148,15 @@ public static string DisplayAttributeDescription_MetricProvider_CounterNames { } } + /// + /// Looks up a localized string similar to The type of metrics this provider consumes. + /// + public static string DisplayAttributeDescription_MetricProvider_MetricType { + get { + return ResourceManager.GetString("DisplayAttributeDescription_MetricProvider_MetricType", resourceCulture); + } + } + /// /// Looks up a localized string similar to The name of the custom metrics provider.. /// @@ -1184,6 +1193,24 @@ public static string DisplayAttributeDescription_MetricsOptions_IncludeDefaultPr } } + /// + /// Looks up a localized string similar to The maximum number of histograms that can be tracked. Each unique combination of provider name, histogram name, and dimension values counts as one histogram. Tracking more histograms uses more memory in the target process so this bound guards against unintentional high memory use.. + /// + public static string DisplayAttributeDescription_MetricsOptions_MaxHistograms { + get { + return ResourceManager.GetString("DisplayAttributeDescription_MetricsOptions_MaxHistograms", resourceCulture); + } + } + + /// + /// Looks up a localized string similar to The maximum number of time series that can be tracked. Each unique combination of provider name, metric name, and dimension values counts as one time series. Tracking more time series uses more memory in the target process so this bound guards against unintentional high memory use.. + /// + public static string DisplayAttributeDescription_MetricsOptions_MaxTimeSeries { + get { + return ResourceManager.GetString("DisplayAttributeDescription_MetricsOptions_MaxTimeSeries", resourceCulture); + } + } + /// /// Looks up a localized string similar to Amount of data points to store per metric.. /// diff --git a/src/Microsoft.Diagnostics.Monitoring.Options/OptionsDisplayStrings.resx b/src/Microsoft.Diagnostics.Monitoring.Options/OptionsDisplayStrings.resx index 8b028a26fce..d162e18422f 100644 --- a/src/Microsoft.Diagnostics.Monitoring.Options/OptionsDisplayStrings.resx +++ b/src/Microsoft.Diagnostics.Monitoring.Options/OptionsDisplayStrings.resx @@ -799,4 +799,16 @@ A mapping of event payload field names to their expected value. A subset of the payload fields may be specified. The description provided for the PayloadFilter parameter on TraceEventFilter. + + The maximum number of histograms that can be tracked. Each unique combination of provider name, histogram name, and dimension values counts as one histogram. Tracking more histograms uses more memory in the target process so this bound guards against unintentional high memory use. + The description provided for the MaxHistograms parameter on MetricsOptions. + + + The maximum number of time series that can be tracked. Each unique combination of provider name, metric name, and dimension values counts as one time series. Tracking more time series uses more memory in the target process so this bound guards against unintentional high memory use. + The description provided for the MaxTimeSeries parameter on MetricsOptions. + + + The type of metrics this provider consumes + The description provided for the MetricType parameter on MetricProvider. + \ No newline at end of file diff --git a/src/Microsoft.Diagnostics.Monitoring.WebApi/Controllers/DiagController.Metrics.cs b/src/Microsoft.Diagnostics.Monitoring.WebApi/Controllers/DiagController.Metrics.cs index 9ba14ee6fa1..8109e02def1 100644 --- a/src/Microsoft.Diagnostics.Monitoring.WebApi/Controllers/DiagController.Metrics.cs +++ b/src/Microsoft.Diagnostics.Monitoring.WebApi/Controllers/DiagController.Metrics.cs @@ -42,7 +42,7 @@ public Task CaptureMetrics( { ProcessKey? processKey = Utilities.GetProcessKey(pid, uid, name); - EventPipeCounterPipelineSettings settings = EventCounterSettingsFactory.CreateSettings( + MetricsPipelineSettings settings = MetricsSettingsFactory.CreateSettings( _counterOptions.CurrentValue, includeDefaults: true, durationSeconds: durationSeconds); @@ -91,7 +91,7 @@ public Task CaptureMetricsCustom( { ProcessKey? processKey = Utilities.GetProcessKey(pid, uid, name); - EventPipeCounterPipelineSettings settings = EventCounterSettingsFactory.CreateSettings( + MetricsPipelineSettings settings = MetricsSettingsFactory.CreateSettings( _counterOptions.CurrentValue, durationSeconds, configuration); diff --git a/src/Microsoft.Diagnostics.Monitoring.WebApi/IMetricsOperationFactory.cs b/src/Microsoft.Diagnostics.Monitoring.WebApi/IMetricsOperationFactory.cs index 392d4bbc351..dcee8724a9c 100644 --- a/src/Microsoft.Diagnostics.Monitoring.WebApi/IMetricsOperationFactory.cs +++ b/src/Microsoft.Diagnostics.Monitoring.WebApi/IMetricsOperationFactory.cs @@ -15,6 +15,6 @@ internal interface IMetricsOperationFactory /// IArtifactOperation Create( IEndpointInfo endpointInfo, - EventPipeCounterPipelineSettings settings); + MetricsPipelineSettings settings); } } diff --git a/src/Microsoft.Diagnostics.Monitoring.WebApi/LoggingExtensions.cs b/src/Microsoft.Diagnostics.Monitoring.WebApi/LoggingExtensions.cs index a4ff8bf297a..9ca3166c56b 100644 --- a/src/Microsoft.Diagnostics.Monitoring.WebApi/LoggingExtensions.cs +++ b/src/Microsoft.Diagnostics.Monitoring.WebApi/LoggingExtensions.cs @@ -99,6 +99,18 @@ internal static class LoggingExtensions logLevel: LogLevel.Warning, formatString: Strings.LogFormatString_MetricsUnprocessed); + private static readonly Action _counterEndedPayload = + LoggerMessage.Define( + eventId: new EventId(16, "CounterEndedPayload"), + logLevel: LogLevel.Warning, + formatString: Strings.LogFormatString_CounterEndedPayload); + + private static readonly Action _errorPayload = + LoggerMessage.Define( + eventId: new EventId(17, "ErrorPayload"), + logLevel: LogLevel.Warning, + formatString: Strings.LogFormatString_ErrorPayload); + public static void RequestFailed(this ILogger logger, Exception ex) { _requestFailed(logger, ex); @@ -173,5 +185,15 @@ public static void MetricsUnprocessed(this ILogger logger, int count) { _metricsUnprocessed(logger, count, null); } + + public static void CounterEndedPayload(this ILogger logger, string counterName) + { + _counterEndedPayload(logger, counterName, null); + } + + public static void ErrorPayload(this ILogger logger, string message) + { + _errorPayload(logger, message, null); + } } } diff --git a/src/Microsoft.Diagnostics.Monitoring.WebApi/Metrics/JsonCounterLogger.cs b/src/Microsoft.Diagnostics.Monitoring.WebApi/Metrics/JsonCounterLogger.cs index b01d754328b..fd4d9ab511f 100644 --- a/src/Microsoft.Diagnostics.Monitoring.WebApi/Metrics/JsonCounterLogger.cs +++ b/src/Microsoft.Diagnostics.Monitoring.WebApi/Metrics/JsonCounterLogger.cs @@ -6,6 +6,7 @@ using System; using System.Buffers; using System.IO; +using System.Linq; using System.Text.Json; using System.Threading.Tasks; @@ -35,34 +36,93 @@ public JsonCounterLogger(Stream stream, ILogger logger) protected override async Task SerializeAsync(ICounterPayload counter) { - await _stream.WriteAsync(JsonSequenceRecordSeparator); + if (counter is ErrorPayload errorPayload) + { + Logger.ErrorPayload(errorPayload.ErrorMessage); + return; + } + else if (counter is CounterEndedPayload) + { + Logger.CounterEndedPayload(counter.Name); + return; + } - _bufferWriter.Clear(); - using (var writer = new Utf8JsonWriter(_bufferWriter, new JsonWriterOptions { Indented = false })) + if (counter is PercentilePayload percentilePayload) { - writer.WriteStartObject(); - writer.WriteString("timestamp", counter.Timestamp); - writer.WriteString("provider", counter.Provider); - writer.WriteString("name", counter.Name); - writer.WriteString("displayName", counter.DisplayName); - writer.WriteString("unit", counter.Unit); - writer.WriteString("counterType", counter.CounterType.ToString()); - - //Some versions of .Net return invalid metric numbers. See https://github.com/dotnet/runtime/pull/46938 - writer.WriteNumber("value", double.IsNaN(counter.Value) ? 0.0 : counter.Value); - - writer.WriteStartObject("metadata"); - foreach (var kvPair in counter.Metadata) + if (!percentilePayload.Quantiles.Any()) { - writer.WriteString(kvPair.Key, kvPair.Value); + return; } - writer.WriteEndObject(); + await _stream.WriteAsync(JsonSequenceRecordSeparator); + _bufferWriter.Clear(); - writer.WriteEndObject(); + for (int i = 0; i < percentilePayload.Quantiles.Length; i++) + { + if (i > 0) + { + _bufferWriter.Write(JsonSequenceRecordSeparator.Span); + } + Quantile quantile = percentilePayload.Quantiles[i]; + + SerializeCounterValues(counter.Timestamp, + counter.Provider, + counter.Name, + counter.DisplayName, + counter.Unit, + counter.CounterType.ToString(), + CounterUtilities.AppendPercentile(counter.Metadata, quantile.Percentage), + quantile.Value); + + if (i < percentilePayload.Quantiles.Length - 1) + { + _bufferWriter.Write(NewLineSeparator.Span); + } + } + } + else + { + await _stream.WriteAsync(JsonSequenceRecordSeparator); + _bufferWriter.Clear(); + + SerializeCounterValues(counter.Timestamp, + counter.Provider, + counter.Name, + counter.DisplayName, + counter.Unit, + counter.CounterType.ToString(), + counter.Metadata, + counter.Value); } await _stream.WriteAsync(_bufferWriter.WrittenMemory); await _stream.WriteAsync(NewLineSeparator); } + + private void SerializeCounterValues( + DateTime timestamp, + string provider, + string name, + string displayName, + string unit, + string counterType, + string tags, + double value) + { + using var writer = new Utf8JsonWriter(_bufferWriter, new JsonWriterOptions { Indented = false }); + writer.WriteStartObject(); + writer.WriteString("timestamp", timestamp); + writer.WriteString("provider", provider); + writer.WriteString("name", name); + writer.WriteString("displayName", displayName); + writer.WriteString("unit", unit); + writer.WriteString("counterType", counterType); + + writer.WriteString("tags", tags); + + //Some versions of .Net return invalid metric numbers. See https://github.com/dotnet/runtime/pull/46938 + writer.WriteNumber("value", double.IsNaN(value) ? 0.0 : value); + + writer.WriteEndObject(); + } } } diff --git a/src/Microsoft.Diagnostics.Monitoring.WebApi/Metrics/MetricsLogger.cs b/src/Microsoft.Diagnostics.Monitoring.WebApi/Metrics/MetricsLogger.cs index a9e5bc2b6e0..3072d7035f3 100644 --- a/src/Microsoft.Diagnostics.Monitoring.WebApi/Metrics/MetricsLogger.cs +++ b/src/Microsoft.Diagnostics.Monitoring.WebApi/Metrics/MetricsLogger.cs @@ -2,6 +2,8 @@ // The .NET Foundation licenses this file to you under the MIT license. using Microsoft.Diagnostics.Monitoring.EventPipe; +using System.Threading; +using System.Threading.Tasks; namespace Microsoft.Diagnostics.Monitoring.WebApi { @@ -19,12 +21,8 @@ public void Log(ICounterPayload metric) _store.AddMetric(metric); } - public void PipelineStarted() - { - } + public Task PipelineStarted(CancellationToken token) => Task.CompletedTask; - public void PipelineStopped() - { - } + public Task PipelineStopped(CancellationToken token) => Task.CompletedTask; } } diff --git a/src/Microsoft.Diagnostics.Monitoring.WebApi/Metrics/MetricsService.cs b/src/Microsoft.Diagnostics.Monitoring.WebApi/Metrics/MetricsService.cs index cbf3c99759b..72cf7d97805 100644 --- a/src/Microsoft.Diagnostics.Monitoring.WebApi/Metrics/MetricsService.cs +++ b/src/Microsoft.Diagnostics.Monitoring.WebApi/Metrics/MetricsService.cs @@ -17,7 +17,7 @@ namespace Microsoft.Diagnostics.Monitoring.WebApi /// internal sealed class MetricsService : BackgroundService { - private EventCounterPipeline _counterPipeline; + private MetricsPipeline _counterPipeline; private readonly IDiagnosticServices _services; private readonly MetricsStoreService _store; private IOptionsMonitor _optionsMonitor; @@ -57,9 +57,9 @@ protected override async Task ExecuteAsync(CancellationToken stoppingToken) //If metric options change, we need to cancel the existing metrics pipeline and restart with the new settings. using IDisposable monitorListener = _optionsMonitor.OnChange((_, _) => optionsTokenSource.SafeCancel()); - EventPipeCounterPipelineSettings counterSettings = EventCounterSettingsFactory.CreateSettings(counterOptions, options); + MetricsPipelineSettings counterSettings = MetricsSettingsFactory.CreateSettings(counterOptions, options); - _counterPipeline = new EventCounterPipeline(client, counterSettings, loggers: new[] { new MetricsLogger(_store.MetricsStore) }); + _counterPipeline = new MetricsPipeline(client, counterSettings, loggers: new[] { new MetricsLogger(_store.MetricsStore) }); using var linkedTokenSource = CancellationTokenSource.CreateLinkedTokenSource(stoppingToken, optionsTokenSource.Token); await _counterPipeline.RunAsync(linkedTokenSource.Token); diff --git a/src/Microsoft.Diagnostics.Monitoring.WebApi/Metrics/EventCounterSettingsFactory.cs b/src/Microsoft.Diagnostics.Monitoring.WebApi/Metrics/MetricsSettingsFactory.cs similarity index 67% rename from src/Microsoft.Diagnostics.Monitoring.WebApi/Metrics/EventCounterSettingsFactory.cs rename to src/Microsoft.Diagnostics.Monitoring.WebApi/Metrics/MetricsSettingsFactory.cs index d79e32dc145..4385a11487d 100644 --- a/src/Microsoft.Diagnostics.Monitoring.WebApi/Metrics/EventCounterSettingsFactory.cs +++ b/src/Microsoft.Diagnostics.Monitoring.WebApi/Metrics/MetricsSettingsFactory.cs @@ -12,49 +12,62 @@ namespace Microsoft.Diagnostics.Monitoring.WebApi /// /// Utility class to create metric settings (for both configuration and on demand metrics). /// - internal static class EventCounterSettingsFactory + internal static class MetricsSettingsFactory { - public static EventPipeCounterPipelineSettings CreateSettings(GlobalCounterOptions counterOptions, bool includeDefaults, + public static MetricsPipelineSettings CreateSettings(GlobalCounterOptions counterOptions, bool includeDefaults, int durationSeconds) { - return CreateSettings(includeDefaults, durationSeconds, counterOptions.GetIntervalSeconds(), () => new List(0)); + return CreateSettings(includeDefaults, + durationSeconds, + counterOptions.GetIntervalSeconds(), + counterOptions.GetMaxHistograms(), + counterOptions.GetMaxTimeSeries(), + () => new List(0)); } - public static EventPipeCounterPipelineSettings CreateSettings(GlobalCounterOptions counterOptions, MetricsOptions options) + public static MetricsPipelineSettings CreateSettings(GlobalCounterOptions counterOptions, MetricsOptions options) { return CreateSettings(options.IncludeDefaultProviders.GetValueOrDefault(MetricsOptionsDefaults.IncludeDefaultProviders), Timeout.Infinite, counterOptions.GetIntervalSeconds(), + counterOptions.GetMaxHistograms(), + counterOptions.GetMaxTimeSeries(), () => ConvertCounterGroups(options.Providers)); } - public static EventPipeCounterPipelineSettings CreateSettings(GlobalCounterOptions counterOptions, int durationSeconds, + public static MetricsPipelineSettings CreateSettings(GlobalCounterOptions counterOptions, int durationSeconds, Models.EventMetricsConfiguration configuration) { return CreateSettings(configuration.IncludeDefaultProviders, durationSeconds, counterOptions.GetIntervalSeconds(), + counterOptions.GetMaxHistograms(), + counterOptions.GetMaxTimeSeries(), () => ConvertCounterGroups(configuration.Providers)); } - private static EventPipeCounterPipelineSettings CreateSettings(bool includeDefaults, + private static MetricsPipelineSettings CreateSettings(bool includeDefaults, int durationSeconds, float counterInterval, + int maxHistograms, + int maxTimeSeries, Func> createCounterGroups) { List eventPipeCounterGroups = createCounterGroups(); if (includeDefaults) { - eventPipeCounterGroups.Add(new EventPipeCounterGroup { ProviderName = MonitoringSourceConfiguration.SystemRuntimeEventSourceName }); - eventPipeCounterGroups.Add(new EventPipeCounterGroup { ProviderName = MonitoringSourceConfiguration.MicrosoftAspNetCoreHostingEventSourceName }); - eventPipeCounterGroups.Add(new EventPipeCounterGroup { ProviderName = MonitoringSourceConfiguration.GrpcAspNetCoreServer }); + eventPipeCounterGroups.Add(new EventPipeCounterGroup { ProviderName = MonitoringSourceConfiguration.SystemRuntimeEventSourceName, Type = CounterGroupType.EventCounter }); + eventPipeCounterGroups.Add(new EventPipeCounterGroup { ProviderName = MonitoringSourceConfiguration.MicrosoftAspNetCoreHostingEventSourceName, Type = CounterGroupType.EventCounter }); + eventPipeCounterGroups.Add(new EventPipeCounterGroup { ProviderName = MonitoringSourceConfiguration.GrpcAspNetCoreServer, Type = CounterGroupType.EventCounter }); } - return new EventPipeCounterPipelineSettings + return new MetricsPipelineSettings { CounterGroups = eventPipeCounterGroups.ToArray(), Duration = Utilities.ConvertSecondsToTimeSpan(durationSeconds), - CounterIntervalSeconds = counterInterval + CounterIntervalSeconds = counterInterval, + MaxHistograms = maxHistograms, + MaxTimeSeries = maxTimeSeries }; } @@ -71,6 +84,9 @@ private static List ConvertCounterGroups(IList ConvertCounterGroups(IList> _allMetrics = new Dictionary>(); private readonly int _maxMetricCount; + private ILogger _logger; - public MetricsStore(int maxMetricCount) + private HashSet _observedErrorMessages = new(); + private HashSet<(string provider, string counter)> _observedEndedCounters = new(); + + public MetricsStore(ILogger logger, int maxMetricCount) { if (maxMetricCount < 1) { throw new ArgumentException(Strings.ErrorMessage_InvalidMetricCount); } _maxMetricCount = maxMetricCount; + _logger = logger; } public void AddMetric(ICounterPayload metric) { + if (metric is PercentilePayload payload && !payload.Quantiles.Any()) + { + // If histogram data is not generated in the monitored app, we can get Histogram events that do not contain quantiles. + // For now, we will ignore these events. + return; + } + //Do not accept CounterEnded payloads. + if (metric is CounterEndedPayload counterEnded) + { + if (_observedEndedCounters.Add((counterEnded.Provider, counterEnded.Name))) + { + _logger.CounterEndedPayload(counterEnded.Name); + } + return; + } + if (metric is ErrorPayload errorPayload) + { + if (_observedErrorMessages.Add(errorPayload.ErrorMessage)) + { + // We only show unique errors once. For example, if a rate callback throws an exception, + // we will receive an error message every 5 seconds. However, we only log the message the first time. + // Error payload information is not tied to a particular provider or counter name. + _logger.ErrorPayload(errorPayload.ErrorMessage); + } + return; + } + lock (_allMetrics) { var metricKey = new MetricKey(metric); @@ -91,22 +124,80 @@ public async Task SnapshotMetrics(Stream outputStream, CancellationToken token) foreach (var metricGroup in copy) { ICounterPayload metricInfo = metricGroup.Value.First(); + string metricName = PrometheusDataModel.GetPrometheusNormalizedName(metricInfo.Provider, metricInfo.Name, metricInfo.Unit); - string metricType = "gauge"; - var keyValuePairs = from pair in metricInfo.Metadata select PrometheusDataModel.GetPrometheusNormalizedLabel(pair.Key, pair.Value); - string metricLabels = string.Join(", ", keyValuePairs); + await WriteMetricHeader(metricInfo, writer, metricName); - //TODO Some clr metrics claim to be incrementing, but are really gauges. + foreach (var metric in metricGroup.Value) + { + if (metric is PercentilePayload percentilePayload) + { + foreach (Quantile quantile in percentilePayload.Quantiles) + { + string metricValue = PrometheusDataModel.GetPrometheusNormalizedValue(metric.Unit, quantile.Value); + string metricLabels = GetMetricLabels(metric, quantile.Percentage); + await WriteMetricDetails(writer, metric, metricName, metricValue, metricLabels); + } + } + else + { + string metricValue = PrometheusDataModel.GetPrometheusNormalizedValue(metric.Unit, metric.Value); + string metricLabels = GetMetricLabels(metric, quantile: null); + await WriteMetricDetails(writer, metric, metricName, metricValue, metricLabels); + } + } + } + } + + private static string GetMetricLabels(ICounterPayload metric, double? quantile) + { + string metadata = metric.Metadata; + if (quantile.HasValue) + { + metadata = CounterUtilities.AppendPercentile(metadata, quantile.Value); + } + + char separator = IsMeter(metric) ? '=' : ':'; + var keyValuePairs = from pair in CounterUtilities.GetMetadata(metadata, separator) + select pair.Key + "=" + "\"" + pair.Value + "\""; + string metricLabels = string.Join(", ", keyValuePairs); + + return metricLabels; + } + + //HACK We should make this easier in the base api + private static bool IsMeter(ICounterPayload payload) => + payload switch + { + GaugePayload or PercentilePayload or CounterEndedPayload or RatePayload => true, + _ => false + }; + + private static async Task WriteMetricHeader(ICounterPayload metricInfo, StreamWriter writer, string metricName) + { + if ((metricInfo.EventType != EventType.Error) && (metricInfo.EventType != EventType.CounterEnded)) + { + string metricType = GetMetricType(metricInfo.EventType); await writer.WriteLineAsync(FormattableString.Invariant($"# HELP {metricName} {metricInfo.DisplayName}")); await writer.WriteLineAsync(FormattableString.Invariant($"# TYPE {metricName} {metricType}")); + } + } - foreach (var metric in metricGroup.Value) - { - string metricValue = PrometheusDataModel.GetPrometheusNormalizedValue(metric.Unit, metric.Value); - await WriteMetricDetails(writer, metric, metricName, metricValue, metricLabels); - } + private static string GetMetricType(EventType eventType) + { + switch (eventType) + { + case EventType.Rate: + return "counter"; + case EventType.Gauge: + return "gauge"; + case EventType.Histogram: + return "summary"; + case EventType.Error: + default: + return string.Empty; // Not sure this is how we want to do it. } } @@ -122,7 +213,10 @@ private static async Task WriteMetricDetails( { await writer.WriteAsync("{" + metricLabels + "}"); } - await writer.WriteLineAsync(FormattableString.Invariant($" {metricValue} {new DateTimeOffset(metric.Timestamp).ToUnixTimeMilliseconds()}")); + + string lineSuffix = metric is PercentilePayload ? string.Empty : FormattableString.Invariant($" {new DateTimeOffset(metric.Timestamp).ToUnixTimeMilliseconds()}"); + + await writer.WriteLineAsync(FormattableString.Invariant($" {metricValue}{lineSuffix}")); } private static bool CompareMetrics(ICounterPayload first, ICounterPayload second) diff --git a/src/Microsoft.Diagnostics.Monitoring.WebApi/Metrics/MetricsStoreService.cs b/src/Microsoft.Diagnostics.Monitoring.WebApi/Metrics/MetricsStoreService.cs index 045bb4263e8..253b358fc14 100644 --- a/src/Microsoft.Diagnostics.Monitoring.WebApi/Metrics/MetricsStoreService.cs +++ b/src/Microsoft.Diagnostics.Monitoring.WebApi/Metrics/MetricsStoreService.cs @@ -1,6 +1,7 @@ // Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. +using Microsoft.Extensions.Logging; using Microsoft.Extensions.Options; using System; @@ -11,9 +12,10 @@ internal sealed class MetricsStoreService : IDisposable public MetricsStore MetricsStore { get; } public MetricsStoreService( + ILogger logger, IOptions options) { - MetricsStore = new MetricsStore(options.Value.MetricCount.GetValueOrDefault(MetricsOptionsDefaults.MetricCount)); + MetricsStore = new MetricsStore(logger, options.Value.MetricCount.GetValueOrDefault(MetricsOptionsDefaults.MetricCount)); } public void Dispose() diff --git a/src/Microsoft.Diagnostics.Monitoring.WebApi/Metrics/StreamingCounterLogger.cs b/src/Microsoft.Diagnostics.Monitoring.WebApi/Metrics/StreamingCounterLogger.cs index 07abcafce02..4a6e0e56e1b 100644 --- a/src/Microsoft.Diagnostics.Monitoring.WebApi/Metrics/StreamingCounterLogger.cs +++ b/src/Microsoft.Diagnostics.Monitoring.WebApi/Metrics/StreamingCounterLogger.cs @@ -14,15 +14,14 @@ internal abstract class StreamingCounterLogger : ICountersLogger { private const int CounterBacklog = 1000; - // The amount of time to wait for serialization to finish before stopping the pipeline - private static readonly TimeSpan FinishSerializationTimeout = TimeSpan.FromSeconds(3); - private readonly Channel _channel; private readonly ChannelReader _channelReader; private readonly ChannelWriter _channelWriter; - private readonly ManualResetEvent _finishedSerialization = new(false); + private Task _processingTask; private readonly ILogger _logger; + protected ILogger Logger => _logger; + private long _dropCount; protected abstract Task SerializeAsync(ICounterPayload counter); @@ -48,12 +47,13 @@ public void Log(ICounterPayload counter) _channelWriter.TryWrite(counter); } - public void PipelineStarted() + public Task PipelineStarted(CancellationToken token) { - _ = ReadAndSerializeAsync(); + _processingTask = ReadAndSerializeAsync(token); + return Task.CompletedTask; } - public void PipelineStopped() + public async Task PipelineStopped(CancellationToken token) { _channelWriter.Complete(); @@ -62,10 +62,7 @@ public void PipelineStopped() _logger.MetricsDropped(_dropCount); } - if (!_finishedSerialization.WaitOne(FinishSerializationTimeout)) - { - _logger.MetricsAbandonCompletion(); - } + await _processingTask; try { @@ -85,23 +82,19 @@ private void ChannelItemDropped(ICounterPayload payload) _dropCount++; } - private async Task ReadAndSerializeAsync() + private async Task ReadAndSerializeAsync(CancellationToken token) { try { - while (await _channelReader.WaitToReadAsync()) + while (await _channelReader.WaitToReadAsync(token)) { - await SerializeAsync(await _channelReader.ReadAsync()); + await SerializeAsync(await _channelReader.ReadAsync(token)); } } - catch (Exception ex) + catch (Exception ex) when (ex is not OperationCanceledException) { _logger.MetricsWriteFailed(ex); } - finally - { - _finishedSerialization.Set(); - } } } } diff --git a/src/Microsoft.Diagnostics.Monitoring.WebApi/Models/EventMetricsConfiguration.cs b/src/Microsoft.Diagnostics.Monitoring.WebApi/Models/EventMetricsConfiguration.cs index 8e54c16f21b..62465e132b5 100644 --- a/src/Microsoft.Diagnostics.Monitoring.WebApi/Models/EventMetricsConfiguration.cs +++ b/src/Microsoft.Diagnostics.Monitoring.WebApi/Models/EventMetricsConfiguration.cs @@ -24,5 +24,8 @@ public class EventMetricsProvider [JsonPropertyName("counterNames")] public string[] CounterNames { get; set; } + + [JsonPropertyName("metricType")] + public MetricProviderType? MetricType { get; set; } } } diff --git a/src/Microsoft.Diagnostics.Monitoring.WebApi/Strings.Designer.cs b/src/Microsoft.Diagnostics.Monitoring.WebApi/Strings.Designer.cs index bb834028bbc..1aeb9e1bf36 100644 --- a/src/Microsoft.Diagnostics.Monitoring.WebApi/Strings.Designer.cs +++ b/src/Microsoft.Diagnostics.Monitoring.WebApi/Strings.Designer.cs @@ -267,6 +267,15 @@ internal static string ErrorMessage_ValueNotString { } } + /// + /// Looks up a localized string similar to The counter {0} ended and is no longer receiving metrics.. + /// + internal static string LogFormatString_CounterEndedPayload { + get { + return ResourceManager.GetString("LogFormatString_CounterEndedPayload", resourceCulture); + } + } + /// /// Looks up a localized string similar to Failed to determine the default process.. /// @@ -294,6 +303,15 @@ internal static string LogFormatString_EgressedArtifact { } } + /// + /// Looks up a localized string similar to {0}. + /// + internal static string LogFormatString_ErrorPayload { + get { + return ResourceManager.GetString("LogFormatString_ErrorPayload", resourceCulture); + } + } + /// /// Looks up a localized string similar to Stopped waiting for metrics to complete writing.. /// diff --git a/src/Microsoft.Diagnostics.Monitoring.WebApi/Strings.resx b/src/Microsoft.Diagnostics.Monitoring.WebApi/Strings.resx index 1371f44215c..aef63084a5d 100644 --- a/src/Microsoft.Diagnostics.Monitoring.WebApi/Strings.resx +++ b/src/Microsoft.Diagnostics.Monitoring.WebApi/Strings.resx @@ -210,6 +210,9 @@ Value must be of type string. Gets a string similar to "Value must be of type string.". + + The counter {0} ended and is no longer receiving metrics. + Failed to determine the default process. Gets the format string that is printed in the 7:DefaultProcessUnexpectedFailure event. @@ -224,6 +227,9 @@ 1 Format Parameter: 1. location: The location that the artifact was egressed to + + {0} + Stopped waiting for metrics to complete writing. @@ -305,4 +311,4 @@ This collection rule is temporarily throttled because the ActionCountLimit has been reached within the ActionCountSlidingWindowDuration. - \ No newline at end of file + diff --git a/src/Tests/Microsoft.Diagnostics.Monitoring.TestCommon/CounterPayload.cs b/src/Tests/Microsoft.Diagnostics.Monitoring.TestCommon/CounterPayload.cs index 067e7b50cb7..9a93e76ec17 100644 --- a/src/Tests/Microsoft.Diagnostics.Monitoring.TestCommon/CounterPayload.cs +++ b/src/Tests/Microsoft.Diagnostics.Monitoring.TestCommon/CounterPayload.cs @@ -1,6 +1,7 @@ // Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. +using System; using System.Text.Json.Serialization; namespace Microsoft.Diagnostics.Monitoring.TestCommon @@ -12,5 +13,14 @@ internal class CounterPayload [JsonPropertyName("name")] public string Name { get; set; } + + [JsonPropertyName("value")] + public double Value { get; set; } + + [JsonPropertyName("timestamp")] + public DateTime Timestamp { get; set; } + + [JsonPropertyName("tags")] + public string Metadata { get; set; } } } diff --git a/src/Tests/Microsoft.Diagnostics.Monitoring.TestCommon/LiveMetricsTestConstants.cs b/src/Tests/Microsoft.Diagnostics.Monitoring.TestCommon/LiveMetricsTestConstants.cs new file mode 100644 index 00000000000..eb2c0ad65fe --- /dev/null +++ b/src/Tests/Microsoft.Diagnostics.Monitoring.TestCommon/LiveMetricsTestConstants.cs @@ -0,0 +1,17 @@ +// Licensed to the .NET Foundation under one or more agreements. +// The .NET Foundation licenses this file to you under the MIT license. + +namespace Microsoft.Diagnostics.Monitoring.TestCommon +{ + public static class LiveMetricsTestConstants + { + public const string CounterName = "test-counter"; + public const string GaugeName = "test-gauge"; + public const string HistogramName1 = "test-histogram"; + public const string HistogramName2 = "test-histogram-2"; + public const string ProviderName1 = "P1"; + public const string ProviderName2 = "P2"; + public const string MetadataKey = "key1"; + public const string MetadataValue = "value1"; + } +} diff --git a/src/Tests/Microsoft.Diagnostics.Monitoring.TestCommon/LiveMetricsTestUtilities.cs b/src/Tests/Microsoft.Diagnostics.Monitoring.TestCommon/LiveMetricsTestUtilities.cs index c456d5eabeb..c57ec68a91a 100644 --- a/src/Tests/Microsoft.Diagnostics.Monitoring.TestCommon/LiveMetricsTestUtilities.cs +++ b/src/Tests/Microsoft.Diagnostics.Monitoring.TestCommon/LiveMetricsTestUtilities.cs @@ -4,6 +4,7 @@ using Microsoft.Diagnostics.Monitoring.WebApi; using System.Collections.Generic; using System.IO; +using System.Linq; using System.Text.Json; using System.Threading.Tasks; using Xunit; @@ -15,11 +16,18 @@ internal static class LiveMetricsTestUtilities internal static async Task ValidateMetrics(IEnumerable expectedProviders, IEnumerable expectedNames, IAsyncEnumerable actualMetrics, bool strict) { - HashSet actualProviders = new(); - HashSet actualNames = new(); + List actualProviders = new(); + List actualNames = new(); + List actualMetadata = new(); - await AggregateMetrics(actualMetrics, actualProviders, actualNames); + await AggregateMetrics(actualMetrics, actualProviders, actualNames, actualMetadata); + ValidateMetrics(expectedProviders, expectedNames, actualProviders.ToHashSet(), actualNames.ToHashSet(), strict); + } + + internal static void ValidateMetrics(IEnumerable expectedProviders, IEnumerable expectedNames, + HashSet actualProviders, HashSet actualNames, bool strict) + { CompareSets(new HashSet(expectedProviders), actualProviders, strict); CompareSets(new HashSet(expectedNames), actualNames, strict); } @@ -41,14 +49,16 @@ private static void CompareSets(HashSet expected, HashSet actual Assert.True(matched, "Missing or unexpected elements: " + string.Join(",", expected)); } - private static async Task AggregateMetrics(IAsyncEnumerable actualMetrics, - HashSet providers, - HashSet names) + internal static async Task AggregateMetrics(IAsyncEnumerable actualMetrics, + List providers, + List names, + List metadata) { await foreach (CounterPayload counter in actualMetrics) { providers.Add(counter.Provider); names.Add(counter.Name); + metadata.Add(counter.Metadata); } } diff --git a/src/Tests/Microsoft.Diagnostics.Monitoring.TestCommon/TestAppScenarios.cs b/src/Tests/Microsoft.Diagnostics.Monitoring.TestCommon/TestAppScenarios.cs index dd863015871..13571ba30cb 100644 --- a/src/Tests/Microsoft.Diagnostics.Monitoring.TestCommon/TestAppScenarios.cs +++ b/src/Tests/Microsoft.Diagnostics.Monitoring.TestCommon/TestAppScenarios.cs @@ -90,6 +90,16 @@ public static class Commands } } + public static class Metrics + { + public const string Name = nameof(Metrics); + + public static class Commands + { + public const string Continue = nameof(Continue); + } + } + public static class SpinWait { public const string Name = nameof(SpinWait); diff --git a/src/Tests/Microsoft.Diagnostics.Monitoring.Tool.FunctionalTests/LiveMetricsTests.cs b/src/Tests/Microsoft.Diagnostics.Monitoring.Tool.FunctionalTests/LiveMetricsTests.cs index 8efa8e9f4fe..9c0e884de6e 100644 --- a/src/Tests/Microsoft.Diagnostics.Monitoring.Tool.FunctionalTests/LiveMetricsTests.cs +++ b/src/Tests/Microsoft.Diagnostics.Monitoring.Tool.FunctionalTests/LiveMetricsTests.cs @@ -8,11 +8,17 @@ using Microsoft.Diagnostics.Monitoring.Tool.FunctionalTests.Runners; using Microsoft.Diagnostics.Monitoring.WebApi; using Microsoft.Diagnostics.Monitoring.WebApi.Models; +using Microsoft.Diagnostics.Tools.Monitor; using Microsoft.Extensions.DependencyInjection; +using System; +using System.Collections.Generic; +using System.Linq; using System.Net.Http; +using System.Text.RegularExpressions; using System.Threading.Tasks; using Xunit; using Xunit.Abstractions; +using Constants = Microsoft.Diagnostics.Monitoring.TestCommon.LiveMetricsTestConstants; namespace Microsoft.Diagnostics.Monitoring.Tool.FunctionalTests { @@ -39,7 +45,7 @@ public Task TestDefaultMetrics() async (appRunner, apiClient) => { using ResponseStreamHolder holder = await apiClient.CaptureMetricsAsync(await appRunner.ProcessIdTask, - durationSeconds: 10); + durationSeconds: 2); var metrics = LiveMetricsTestUtilities.GetAllMetrics(holder.Stream); await LiveMetricsTestUtilities.ValidateMetrics(new[] { EventPipe.MonitoringSourceConfiguration.SystemRuntimeEventSourceName }, @@ -54,6 +60,16 @@ await LiveMetricsTestUtilities.ValidateMetrics(new[] { EventPipe.MonitoringSourc metrics, strict: false); await appRunner.SendCommandAsync(TestAppScenarios.AsyncWait.Commands.Continue); + }, + configureTool: runner => + { + runner.WriteKeyPerValueConfiguration(new RootOptions() + { + GlobalCounter = new GlobalCounterOptions() + { + IntervalSeconds = 1 + } + }); }); } @@ -69,7 +85,7 @@ public Task TestCustomMetrics() var counterNames = new[] { "cpu-usage", "working-set" }; using ResponseStreamHolder holder = await apiClient.CaptureMetricsAsync(await appRunner.ProcessIdTask, - durationSeconds: 10, + durationSeconds: 2, metricsConfiguration: new EventMetricsConfiguration { IncludeDefaultProviders = false, @@ -90,6 +106,407 @@ await LiveMetricsTestUtilities.ValidateMetrics(new[] { EventPipe.MonitoringSourc strict: true); await appRunner.SendCommandAsync(TestAppScenarios.AsyncWait.Commands.Continue); + }, + configureTool: runner => + { + runner.WriteKeyPerValueConfiguration(new RootOptions() + { + GlobalCounter = new GlobalCounterOptions() + { + IntervalSeconds = 1 + } + }); + }); + } + + [Theory] + [InlineData(MetricProviderType.All, true)] + [InlineData(MetricProviderType.Meter, false)] + [InlineData(MetricProviderType.EventCounter, true)] + public Task TestCustomMetrics_MetricProviderType(MetricProviderType metricType, bool expectResults) + { + return ScenarioRunner.SingleTarget(_outputHelper, + _httpClientFactory, + DiagnosticPortConnectionMode.Connect, + TestAppScenarios.AsyncWait.Name, + async (appRunner, apiClient) => + { + var counterNames = new[] { "cpu-usage", "working-set" }; + + using ResponseStreamHolder holder = await apiClient.CaptureMetricsAsync(await appRunner.ProcessIdTask, + durationSeconds: 2, + metricsConfiguration: new EventMetricsConfiguration + { + IncludeDefaultProviders = false, + Providers = new[] + { + new EventMetricsProvider + { + ProviderName = EventPipe.MonitoringSourceConfiguration.SystemRuntimeEventSourceName, + CounterNames = counterNames, + MetricType = metricType + } + } + }); + + var metrics = LiveMetricsTestUtilities.GetAllMetrics(holder.Stream); + + List actualProviders = new(); + List actualNames = new(); + List actualMetadata = new(); + + await LiveMetricsTestUtilities.AggregateMetrics(metrics, actualProviders, actualNames, actualMetadata); + + if (expectResults) + { + LiveMetricsTestUtilities.ValidateMetrics(new[] { EventPipe.MonitoringSourceConfiguration.SystemRuntimeEventSourceName }, + counterNames, + actualProviders.ToHashSet(), + actualNames.ToHashSet(), + strict: true); + } + else + { + Assert.Empty(actualProviders); + Assert.Empty(actualNames); + } + + await appRunner.SendCommandAsync(TestAppScenarios.AsyncWait.Commands.Continue); + }, + configureTool: runner => + { + runner.WriteKeyPerValueConfiguration(new RootOptions() + { + GlobalCounter = new GlobalCounterOptions() + { + IntervalSeconds = 1 + } + }); + }); + } + + [Fact] + public async Task TestSystemDiagnosticsMetrics() + { + var counterNamesP1 = new[] { Constants.CounterName, Constants.GaugeName, Constants.HistogramName1, Constants.HistogramName2 }; + var counterNamesP2 = new[] { Constants.CounterName }; + + MetricProvider p1 = new MetricProvider() + { + ProviderName = Constants.ProviderName1 + }; + + MetricProvider p2 = new MetricProvider() + { + ProviderName = Constants.ProviderName2 + }; + + var providers = new List() + { + p1, p2 + }; + + await ScenarioRunner.SingleTarget( + _outputHelper, + _httpClientFactory, + DiagnosticPortConnectionMode.Connect, + TestAppScenarios.Metrics.Name, + appValidate: async (runner, client) => + { + using ResponseStreamHolder holder = await client.CaptureMetricsAsync(await runner.ProcessIdTask, + durationSeconds: 2, + metricsConfiguration: new EventMetricsConfiguration + { + IncludeDefaultProviders = false, + Providers = new[] + { + new EventMetricsProvider + { + ProviderName = p1.ProviderName, + CounterNames = counterNamesP1, + }, + new EventMetricsProvider + { + ProviderName = p2.ProviderName, + CounterNames = counterNamesP2, + } + } + }); + + await runner.SendCommandAsync(TestAppScenarios.Metrics.Commands.Continue); + + var metrics = LiveMetricsTestUtilities.GetAllMetrics(holder.Stream); + + List actualProviders = new(); + List actualNames = new(); + List actualMetadata = new(); + + await LiveMetricsTestUtilities.AggregateMetrics(metrics, actualProviders, actualNames, actualMetadata); + + LiveMetricsTestUtilities.ValidateMetrics(new[] { p1.ProviderName, p2.ProviderName }, + counterNamesP1, + actualProviders.ToHashSet(), + actualNames.ToHashSet(), + strict: true); + + // NOTE: This assumes the default percentiles of 50/95/99 - if this changes, this test + // will fail and will need to be updated. + Regex regex = new Regex(@"\bPercentile=(50|95|99)"); + + for (int index = 0; index < actualProviders.Count; ++index) + { + if (actualNames[index] == Constants.HistogramName1) + { + Assert.Matches(regex, actualMetadata[index]); + } + else if (actualNames[index] == Constants.HistogramName2) + { + var metadata = actualMetadata[index].Split(','); + Assert.Equal(2, metadata.Length); + Assert.Equal(FormattableString.Invariant($"{Constants.MetadataKey}={Constants.MetadataValue}"), metadata[0]); + Assert.Matches(regex, metadata[1]); + } + } + }, + configureTool: runner => + { + runner.WriteKeyPerValueConfiguration(new RootOptions() + { + Metrics = new MetricsOptions() + { + Enabled = true, + IncludeDefaultProviders = false, + Providers = providers + }, + GlobalCounter = new GlobalCounterOptions() + { + IntervalSeconds = 1 + } + }); + }); + } + + [Theory] + [InlineData(MetricProviderType.All, true)] + [InlineData(MetricProviderType.Meter, true)] + [InlineData(MetricProviderType.EventCounter, false)] + public async Task TestSystemDiagnosticsMetrics_MetricProviderType(MetricProviderType metricType, bool expectResults) + { + var counterNames = new[] { Constants.CounterName }; + + MetricProvider p1 = new MetricProvider() + { + ProviderName = Constants.ProviderName1 + }; + + var providers = new List() + { + p1 + }; + + await ScenarioRunner.SingleTarget( + _outputHelper, + _httpClientFactory, + DiagnosticPortConnectionMode.Connect, + TestAppScenarios.Metrics.Name, + appValidate: async (runner, client) => + { + using ResponseStreamHolder holder = await client.CaptureMetricsAsync(await runner.ProcessIdTask, + durationSeconds: 2, + metricsConfiguration: new EventMetricsConfiguration + { + IncludeDefaultProviders = false, + Providers = new[] + { + new EventMetricsProvider + { + ProviderName = p1.ProviderName, + CounterNames = counterNames, + MetricType = metricType + } + } + }); + + await runner.SendCommandAsync(TestAppScenarios.Metrics.Commands.Continue); + + var metrics = LiveMetricsTestUtilities.GetAllMetrics(holder.Stream); + + List actualProviders = new(); + List actualNames = new(); + List actualMetadata = new(); + + await LiveMetricsTestUtilities.AggregateMetrics(metrics, actualProviders, actualNames, actualMetadata); + + if (expectResults) + { + LiveMetricsTestUtilities.ValidateMetrics(new[] { p1.ProviderName }, + counterNames, + actualProviders.ToHashSet(), + actualNames.ToHashSet(), + strict: true); + } + else + { + Assert.Empty(actualProviders); + Assert.Empty(actualNames); + } + }, + configureTool: runner => + { + runner.WriteKeyPerValueConfiguration(new RootOptions() + { + Metrics = new MetricsOptions() + { + Enabled = true, + IncludeDefaultProviders = false, + Providers = providers + }, + GlobalCounter = new GlobalCounterOptions() + { + IntervalSeconds = 1 + } + }); + }); + } + + [Fact] + public async Task TestSystemDiagnosticsMetrics_MaxHistograms() + { + var counterNames = new[] { Constants.HistogramName1, Constants.HistogramName2 }; + + MetricProvider p1 = new MetricProvider() + { + ProviderName = Constants.ProviderName1 + }; + + var providers = new List() + { + p1 + }; + + await ScenarioRunner.SingleTarget( + _outputHelper, + _httpClientFactory, + DiagnosticPortConnectionMode.Connect, + TestAppScenarios.Metrics.Name, + appValidate: async (runner, client) => + { + using ResponseStreamHolder holder = await client.CaptureMetricsAsync(await runner.ProcessIdTask, + durationSeconds: 2, + metricsConfiguration: new EventMetricsConfiguration + { + IncludeDefaultProviders = false, + Providers = new[] + { + new EventMetricsProvider + { + ProviderName = p1.ProviderName, + CounterNames = counterNames + } + } + }); + + await runner.SendCommandAsync(TestAppScenarios.Metrics.Commands.Continue); + + var metrics = LiveMetricsTestUtilities.GetAllMetrics(holder.Stream); + + List actualProviders = new(); + List actualNames = new(); + List actualMetadata = new(); + + await LiveMetricsTestUtilities.AggregateMetrics(metrics, actualProviders, actualNames, actualMetadata); + + Assert.Contains(Constants.HistogramName1, actualNames); + Assert.DoesNotContain(Constants.HistogramName2, actualNames); + }, + configureTool: runner => + { + runner.WriteKeyPerValueConfiguration(new RootOptions() + { + Metrics = new MetricsOptions() + { + Enabled = true, + IncludeDefaultProviders = false, + Providers = providers, + }, + GlobalCounter = new GlobalCounterOptions() + { + IntervalSeconds = 1, + MaxHistograms = 1 + } + }); + }); + } + + [Fact] + public async Task TestSystemDiagnosticsMetrics_MaxTimeseries() + { + var counterNames = new[] { Constants.CounterName, Constants.GaugeName, Constants.HistogramName1, Constants.HistogramName2 }; + + const int maxTimeSeries = 3; + + MetricProvider p1 = new MetricProvider() + { + ProviderName = Constants.ProviderName1 + }; + + var providers = new List() + { + p1 + }; + + await ScenarioRunner.SingleTarget( + _outputHelper, + _httpClientFactory, + DiagnosticPortConnectionMode.Connect, + TestAppScenarios.Metrics.Name, + appValidate: async (runner, client) => + { + using ResponseStreamHolder holder = await client.CaptureMetricsAsync(await runner.ProcessIdTask, + durationSeconds: 2, + metricsConfiguration: new EventMetricsConfiguration + { + IncludeDefaultProviders = false, + Providers = new[] + { + new EventMetricsProvider + { + ProviderName = p1.ProviderName, + CounterNames = counterNames + } + } + }); + + await runner.SendCommandAsync(TestAppScenarios.Metrics.Commands.Continue); + + var metrics = LiveMetricsTestUtilities.GetAllMetrics(holder.Stream); + + List actualProviders = new(); + List actualNames = new(); + List actualMetadata = new(); + + await LiveMetricsTestUtilities.AggregateMetrics(metrics, actualProviders, actualNames, actualMetadata); + + ISet actualNamesSet = new HashSet(actualNames); + + Assert.Equal(maxTimeSeries, actualNamesSet.Count); + }, + configureTool: runner => + { + runner.WriteKeyPerValueConfiguration(new RootOptions() + { + Metrics = new MetricsOptions() + { + Enabled = true, + IncludeDefaultProviders = false, + Providers = providers + }, + GlobalCounter = new GlobalCounterOptions() + { + IntervalSeconds = 1, + MaxTimeSeries = maxTimeSeries + } + }); }); } } diff --git a/src/Tests/Microsoft.Diagnostics.Monitoring.Tool.FunctionalTests/MetricsTests.cs b/src/Tests/Microsoft.Diagnostics.Monitoring.Tool.FunctionalTests/MetricsTests.cs index 58eeb55438a..c17f05e27d4 100644 --- a/src/Tests/Microsoft.Diagnostics.Monitoring.Tool.FunctionalTests/MetricsTests.cs +++ b/src/Tests/Microsoft.Diagnostics.Monitoring.Tool.FunctionalTests/MetricsTests.cs @@ -46,7 +46,7 @@ public async Task DisableMetricsViaCommandLineTest() // Check that /metrics does not serve metrics var validationProblemDetailsException = await Assert.ThrowsAsync( - () => apiClient.GetMetricsAsync()); + apiClient.GetMetricsAsync); Assert.Equal(HttpStatusCode.BadRequest, validationProblemDetailsException.StatusCode); Assert.Equal(StatusCodes.Status400BadRequest, validationProblemDetailsException.Details.Status); } @@ -69,7 +69,7 @@ public async Task DisableMetricsViaEnvironmentTest() // Check that /metrics does not serve metrics var validationProblemDetailsException = await Assert.ThrowsAsync( - () => apiClient.GetMetricsAsync()); + apiClient.GetMetricsAsync); Assert.Equal(HttpStatusCode.BadRequest, validationProblemDetailsException.StatusCode); Assert.Equal(StatusCodes.Status400BadRequest, validationProblemDetailsException.Details.Status); } @@ -97,7 +97,7 @@ await toolRunner.WriteUserSettingsAsync(new RootOptions() // Check that /metrics does not serve metrics var validationProblemDetailsException = await Assert.ThrowsAsync( - () => client.GetMetricsAsync()); + client.GetMetricsAsync); Assert.Equal(HttpStatusCode.BadRequest, validationProblemDetailsException.StatusCode); Assert.Equal(StatusCodes.Status400BadRequest, validationProblemDetailsException.Details.Status); } @@ -125,7 +125,7 @@ public async Task DisableMetricsViaKeyPerFileTest() // Check that /metrics does not serve metrics var validationProblemDetailsException = await Assert.ThrowsAsync( - () => apiClient.GetMetricsAsync()); + apiClient.GetMetricsAsync); Assert.Equal(HttpStatusCode.BadRequest, validationProblemDetailsException.StatusCode); Assert.Equal(StatusCodes.Status400BadRequest, validationProblemDetailsException.Details.Status); } diff --git a/src/Tests/Microsoft.Diagnostics.Monitoring.Tool.UnitTestCommon/Microsoft.Diagnostics.Monitoring.Tool.UnitTestCommon.csproj b/src/Tests/Microsoft.Diagnostics.Monitoring.Tool.UnitTestCommon/Microsoft.Diagnostics.Monitoring.Tool.UnitTestCommon.csproj index 735de2288dc..4fc879dbd96 100644 --- a/src/Tests/Microsoft.Diagnostics.Monitoring.Tool.UnitTestCommon/Microsoft.Diagnostics.Monitoring.Tool.UnitTestCommon.csproj +++ b/src/Tests/Microsoft.Diagnostics.Monitoring.Tool.UnitTestCommon/Microsoft.Diagnostics.Monitoring.Tool.UnitTestCommon.csproj @@ -9,6 +9,7 @@ + diff --git a/src/Tests/Microsoft.Diagnostics.Monitoring.Tool.UnitTests/Egress/AzureBlob/AzureBlobEgressProviderTests.cs b/src/Tests/Microsoft.Diagnostics.Monitoring.Tool.UnitTests/Egress/AzureBlob/AzureBlobEgressProviderTests.cs index b8de0a7abf8..e468e185032 100644 --- a/src/Tests/Microsoft.Diagnostics.Monitoring.Tool.UnitTests/Egress/AzureBlob/AzureBlobEgressProviderTests.cs +++ b/src/Tests/Microsoft.Diagnostics.Monitoring.Tool.UnitTests/Egress/AzureBlob/AzureBlobEgressProviderTests.cs @@ -133,7 +133,7 @@ public async Task AzureBlobEgress_Supports_RestrictiveSasToken(UploadAction uplo List blobs = await GetAllBlobsAsync(containerClient); BlobItem resultingBlob = Assert.Single(blobs); - Assert.Equal($"{providerOptions.BlobPrefix}/{artifactSettings.Name}", resultingBlob.Name); + Assert.Equal(FormattableString.Invariant($"{providerOptions.BlobPrefix}/{artifactSettings.Name}"), resultingBlob.Name); } [ConditionalTheory(Timeout = TestTimeouts.EgressUnitTestTimeoutMs)] diff --git a/src/Tests/Microsoft.Diagnostics.Monitoring.Tool.UnitTests/MetricsFormattingTests.cs b/src/Tests/Microsoft.Diagnostics.Monitoring.Tool.UnitTests/MetricsFormattingTests.cs new file mode 100644 index 00000000000..2a7ddac41fc --- /dev/null +++ b/src/Tests/Microsoft.Diagnostics.Monitoring.Tool.UnitTests/MetricsFormattingTests.cs @@ -0,0 +1,138 @@ +// Licensed to the .NET Foundation under one or more agreements. +// The .NET Foundation licenses this file to you under the MIT license. + +using Microsoft.Diagnostics.Monitoring.EventPipe; +using Microsoft.Diagnostics.Monitoring.WebApi; +using Microsoft.Extensions.Logging; +using System; +using System.Collections.Generic; +using System.IO; +using System.Threading; +using System.Threading.Tasks; +using Xunit; +using Xunit.Abstractions; + +namespace Microsoft.Diagnostics.Monitoring.Tool.UnitTests +{ + public sealed class MetricsFormattingTests + { + private ITestOutputHelper _outputHelper; + + private readonly string MeterName = "MeterName"; + private readonly string InstrumentName = "InstrumentName"; + private readonly DateTime Timestamp = DateTimeOffset.FromUnixTimeMilliseconds(10000000).DateTime; + private readonly ILogger _logger; + + private const int MetricCount = 1; + private const int Value1 = 1; + private const int Value2 = 2; + private const int Value3 = 3; + private const int IntervalSeconds = 10; + + public MetricsFormattingTests(ITestOutputHelper outputHelper) + { + _outputHelper = outputHelper; + + LoggerFactory factory = new LoggerFactory(); + _logger = factory.CreateLogger(); + } + + [Fact] + public async Task HistogramFormat_Test() + { + List payload = new(); + + string tags1 = "Percentile=50"; + payload.Add(new PercentilePayload(MeterName, InstrumentName, "DisplayName", string.Empty, tags1, + new Quantile[] { new(0.5, Value1), new(0.95, Value2), new(0.99, Value3) }, + Timestamp)); + + using MemoryStream stream = await GetMetrics(payload); + List lines = ReadStream(stream); + + // Question - this is manually recreating what PrometheusDataModel.GetPrometheusNormalizedName does to get the metric name; + // should we call this method, or should this also be implicitly testing its behavior by having this hard-coded? + string metricName = $"{MeterName.ToLowerInvariant()}_{payload[0].Name}"; + + const string percentile_50 = "{Percentile=\"50\"}"; + const string percentile_95 = "{Percentile=\"95\"}"; + const string percentile_99 = "{Percentile=\"99\"}"; + + Assert.Equal(5, lines.Count); + Assert.Equal(FormattableString.Invariant($"# HELP {metricName}{payload[0].Unit} {payload[0].DisplayName}"), lines[0]); + Assert.Equal(FormattableString.Invariant($"# TYPE {metricName} summary"), lines[1]); + Assert.Equal(FormattableString.Invariant($"{metricName}{percentile_50} {Value1}"), lines[2]); + Assert.Equal(FormattableString.Invariant($"{metricName}{percentile_95} {Value2}"), lines[3]); + Assert.Equal(FormattableString.Invariant($"{metricName}{percentile_99} {Value3}"), lines[4]); + } + + [Fact] + public async Task GaugeFormat_Test() + { + ICounterPayload payload = new GaugePayload(MeterName, InstrumentName, "DisplayName", "", null, Value1, Timestamp); + + MemoryStream stream = await GetMetrics(new() { payload }); + + List lines = ReadStream(stream); + + // Question - this is manually recreating what PrometheusDataModel.GetPrometheusNormalizedName does to get the metric name; + // should we call this method, or should this also be implicitly testing its behavior by having this hard-coded? + string metricName = $"{MeterName.ToLowerInvariant()}_{payload.Name}"; + + Assert.Equal(3, lines.Count); + Assert.Equal(FormattableString.Invariant($"# HELP {metricName}{payload.Unit} {payload.DisplayName}"), lines[0]); + Assert.Equal(FormattableString.Invariant($"# TYPE {metricName} gauge"), lines[1]); + Assert.Equal(FormattableString.Invariant($"{metricName} {payload.Value} {new DateTimeOffset(payload.Timestamp).ToUnixTimeMilliseconds()}"), lines[2]); + } + + [Fact] + public async Task CounterFormat_Test() + { + ICounterPayload payload = new RatePayload(MeterName, InstrumentName, "DisplayName", "", null, Value1, IntervalSeconds, Timestamp); + + MemoryStream stream = await GetMetrics(new() { payload }); + + List lines = ReadStream(stream); + + // Question - this is manually recreating what PrometheusDataModel.GetPrometheusNormalizedName does to get the metric name; + // should we call this method, or should this also be implicitly testing its behavior by having this hard-coded? + string metricName = $"{MeterName.ToLowerInvariant()}_{payload.Name}"; + + Assert.Equal(3, lines.Count); + Assert.Equal($"# HELP {metricName}{payload.Unit} {payload.DisplayName}", lines[0]); + Assert.Equal($"# TYPE {metricName} counter", lines[1]); + Assert.Equal($"{metricName} {payload.Value} {new DateTimeOffset(payload.Timestamp).ToUnixTimeMilliseconds()}", lines[2]); + } + + private async Task GetMetrics(List payloads) + { + IMetricsStore metricsStore = new MetricsStore(_logger, MetricCount); + + foreach (var payload in payloads) + { + metricsStore.AddMetric(payload); + } + + var outputStream = new MemoryStream(); + await metricsStore.SnapshotMetrics(outputStream, CancellationToken.None); + + return outputStream; + } + + private static List ReadStream(Stream stream) + { + var lines = new List(); + + stream.Position = 0; + using (var reader = new StreamReader(stream)) + { + while (!reader.EndOfStream) + { + lines.Add(reader.ReadLine()); + } + } + + return lines; + } + } +} diff --git a/src/Tests/Microsoft.Diagnostics.Monitoring.UnitTestApp/Program.cs b/src/Tests/Microsoft.Diagnostics.Monitoring.UnitTestApp/Program.cs index 0c73b6acd9e..e510646629c 100644 --- a/src/Tests/Microsoft.Diagnostics.Monitoring.UnitTestApp/Program.cs +++ b/src/Tests/Microsoft.Diagnostics.Monitoring.UnitTestApp/Program.cs @@ -14,6 +14,7 @@ public static Task Main(string[] args) { return new CommandLineBuilder(new RootCommand() { + MetricsScenario.Command(), AspNetScenario.Command(), AsyncWaitScenario.Command(), ExecuteScenario.Command(), diff --git a/src/Tests/Microsoft.Diagnostics.Monitoring.UnitTestApp/Scenarios/MetricsScenario.cs b/src/Tests/Microsoft.Diagnostics.Monitoring.UnitTestApp/Scenarios/MetricsScenario.cs new file mode 100644 index 00000000000..873acb91efa --- /dev/null +++ b/src/Tests/Microsoft.Diagnostics.Monitoring.UnitTestApp/Scenarios/MetricsScenario.cs @@ -0,0 +1,64 @@ +// Licensed to the .NET Foundation under one or more agreements. +// The .NET Foundation licenses this file to you under the MIT license. + +using Microsoft.Diagnostics.Monitoring.TestCommon; +using System; +using System.Collections.Generic; +using System.CommandLine; +using System.CommandLine.Invocation; +using System.Diagnostics.Metrics; +using System.Linq; +using System.Threading.Tasks; +using Constants = Microsoft.Diagnostics.Monitoring.TestCommon.LiveMetricsTestConstants; + +namespace Microsoft.Diagnostics.Monitoring.UnitTestApp.Scenarios +{ + internal static class MetricsScenario + { + public static Command Command() + { + Command command = new(TestAppScenarios.Metrics.Name); + command.SetHandler(ExecuteAsync); + return command; + } + + public static async Task ExecuteAsync(InvocationContext context) + { + context.ExitCode = await ScenarioHelpers.RunScenarioAsync(async logger => + { + Random rd = new(); + + Meter meter1 = new Meter(Constants.ProviderName1, "1.0.0"); + _ = meter1.CreateObservableCounter(Constants.CounterName, () => 1); + _ = meter1.CreateObservableGauge(Constants.GaugeName, () => rd.Next(1, 100)); + Histogram histogram1 = meter1.CreateHistogram(Constants.HistogramName1); + Histogram histogram2 = meter1.CreateHistogram(Constants.HistogramName2); + + Meter meter2 = new Meter(Constants.ProviderName2, "1.0.0"); + Counter counter2 = meter2.CreateCounter(Constants.CounterName); + + var metadata = new Dictionary + { + { Constants.MetadataKey, Constants.MetadataValue } + }; + + Task continueCommand = Task.Run(() => ScenarioHelpers.WaitForCommandAsync(TestAppScenarios.Metrics.Commands.Continue, logger)); + + while (!continueCommand.IsCompleted) + { + for (int i = 0; i < 20; ++i) + { + histogram1.Record(rd.Next(5000)); + histogram2.Record(rd.Next(5000), metadata.ToArray()); + } + + counter2.Add(1); + + await Task.Delay(100); + } + + return 0; + }, context.GetCancellationToken()); + } + } +} diff --git a/src/Tools/dotnet-monitor/CollectionRules/Actions/CollectLiveMetricsAction.cs b/src/Tools/dotnet-monitor/CollectionRules/Actions/CollectLiveMetricsAction.cs index 80b01a7a47d..db7ed79f9db 100644 --- a/src/Tools/dotnet-monitor/CollectionRules/Actions/CollectLiveMetricsAction.cs +++ b/src/Tools/dotnet-monitor/CollectionRules/Actions/CollectLiveMetricsAction.cs @@ -71,7 +71,7 @@ protected override async Task ExecuteCoreAsync( Providers = providers }; - EventPipeCounterPipelineSettings settings = EventCounterSettingsFactory.CreateSettings( + MetricsPipelineSettings settings = MetricsSettingsFactory.CreateSettings( _counterOptions.CurrentValue, (int)duration.TotalSeconds, configuration); diff --git a/src/Tools/dotnet-monitor/Metrics/MetricsOperation.cs b/src/Tools/dotnet-monitor/Metrics/MetricsOperation.cs index 2a7464b10bc..7242c00f36c 100644 --- a/src/Tools/dotnet-monitor/Metrics/MetricsOperation.cs +++ b/src/Tools/dotnet-monitor/Metrics/MetricsOperation.cs @@ -13,27 +13,27 @@ namespace Microsoft.Diagnostics.Tools.Monitor { - internal sealed class MetricsOperation : PipelineArtifactOperation + internal sealed class MetricsOperation : PipelineArtifactOperation { - private readonly EventPipeCounterPipelineSettings _settings; + private readonly MetricsPipelineSettings _settings; - public MetricsOperation(IEndpointInfo endpointInfo, EventPipeCounterPipelineSettings settings, ILogger logger) + public MetricsOperation(IEndpointInfo endpointInfo, MetricsPipelineSettings settings, ILogger logger) : base(logger, Utils.ArtifactType_Metrics, endpointInfo) { _settings = settings; } - protected override EventCounterPipeline CreatePipeline(Stream outputStream) + protected override MetricsPipeline CreatePipeline(Stream outputStream) { var client = new DiagnosticsClient(EndpointInfo.Endpoint); - return new EventCounterPipeline( + return new MetricsPipeline( client, _settings, loggers: new[] { new JsonCounterLogger(outputStream, Logger) }); } - protected override Task StartPipelineAsync(EventCounterPipeline pipeline, CancellationToken token) + protected override Task StartPipelineAsync(MetricsPipeline pipeline, CancellationToken token) { return pipeline.StartAsync(token); } diff --git a/src/Tools/dotnet-monitor/Metrics/MetricsOperationFactory.cs b/src/Tools/dotnet-monitor/Metrics/MetricsOperationFactory.cs index 3be5c3e6f05..fc297f0265c 100644 --- a/src/Tools/dotnet-monitor/Metrics/MetricsOperationFactory.cs +++ b/src/Tools/dotnet-monitor/Metrics/MetricsOperationFactory.cs @@ -16,7 +16,7 @@ public MetricsOperationFactory(ILogger logger) _logger = logger; } - public IArtifactOperation Create(IEndpointInfo endpointInfo, EventPipeCounterPipelineSettings settings) + public IArtifactOperation Create(IEndpointInfo endpointInfo, MetricsPipelineSettings settings) { return new MetricsOperation(endpointInfo, settings, _logger); }