Skip to content

Commit

Permalink
Remove deprecated Prometheus configuration (#1140)
Browse files Browse the repository at this point in the history
  • Loading branch information
tomkerkhove authored Jul 2, 2020
1 parent cd43d84 commit 017bd2c
Show file tree
Hide file tree
Showing 22 changed files with 26 additions and 453 deletions.
7 changes: 4 additions & 3 deletions changelog/content/experimental/unreleased.md
Original file line number Diff line number Diff line change
Expand Up @@ -5,9 +5,10 @@ weight: 1
version:
---

- {{% tag added %}} New validation rule to ensure at least one resource or resource collection is configured to scrape
- {{% tag removed %}} Support for Swagger 2.0 ([deprecation notice](https://changelog.promitor.io/#swagger-2-0))
- {{% tag removed %}} Support for Prometheus legacy configuration ([deprecation notice](https://changelog.promitor.io/#prometheus-legacy-configuration))
- {{% tag removed %}} Support for Swagger UI 2.0 ([deprecation notice](https://changelog.promitor.io/#swagger-ui-2-0))
- {{% tag removed %}} Support for Swagger 2.0 ([deprecation notice](https://changelog.promitor.io/#swagger-2-0))
- {{% tag added %}} New validation rule to ensure at least one resource or resource collection is configured to scrape
- {{% tag added %}} Provide suggestions when unknown fields are found in the metrics config. [#1105](https://github.com/tomkerkhove/promitor/issues/1105).
- {{% tag added %}} Add validation to ensure the scraping schedule is a valid Cron expression. [#1103](https://github.com/tomkerkhove/promitor/issues/1103).
- Handle validation failures on startup more gracefully. [#1113](https://github.com/tomkerkhove/promitor/issues/1113).
- {{% tag changed %}} Handle validation failures on startup more gracefully. [#1113](https://github.com/tomkerkhove/promitor/issues/1113).
5 changes: 0 additions & 5 deletions config/promitor/scraper/runtime.yaml
Original file line number Diff line number Diff line change
@@ -1,10 +1,5 @@
server:
httpPort: 88
prometheus:
metricUnavailableValue: NaN
enableMetricTimestamps: true # true by default
scrapeEndpoint:
baseUriPath: /metrics-deprecated
metricSinks:
prometheusScrapingEndpoint:
metricUnavailableValue: NaN
Expand Down
25 changes: 4 additions & 21 deletions docs/configuration/v2.x/runtime.md
Original file line number Diff line number Diff line change
Expand Up @@ -22,15 +22,14 @@ Here is a complete example of the runtime YAML:
server:
httpPort: 80 # Optional. Default: 80
metricSinks:
prometheusScrapingEndpoint:
metricUnavailableValue: NaN # Optional. Default: NaN
enableMetricTimestamps: false # Optional. Default: true
baseUriPath: /metrics # Optional. Default: /metrics
statsd:
host: graphite
port: 8125 # Optional. Default: 8125
metricPrefix: promitor. # Optional. Default: None
prometheus:
metricUnavailableValue: NaN # Optional. Default: NaN
enableMetricTimestamps: false # Optional. Default: true
scrapeEndpoint:
baseUriPath: /metrics # Optional. Default: /metrics
metricsConfiguration:
absolutePath: /config/metrics-declaration.yaml # Optional. Default: /config/metrics-declaration.yaml
telemetry:
Expand Down Expand Up @@ -111,22 +110,6 @@ metricSinks:
metricPrefix: promitor.
```

### Prometheus Scraping Endpoint - Legacy Configuration

![Availability Badge](https://img.shields.io/badge/Will%20Be%20Removed%20In-v2.0-red.svg)

For now, we still support configuring it by using the the old way of configuration:

```yaml
prometheus:
metricUnavailableValue: NaN # Optional. Default: NaN
enableMetricTimestamps: false # Optional. Default: true
scrapeEndpoint:
baseUriPath: /metrics # Optional. Default: /metrics
```

However, this approach is deprecated and will be removed in 2.0 so we recommend migrating to metric sink approach.

## Metric Configuration

Promitor will scrape the Azure Monitor metrics that are configured via a metric
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,6 @@
using Promitor.Agents.Scraper.Configuration.Sinks;
using Promitor.Core.Scraping.Configuration.Runtime;
using Promitor.Integrations.AzureMonitor.Configuration;
using Promitor.Integrations.Sinks.Prometheus.Configuration;

namespace Promitor.Agents.Scraper.Configuration
{
Expand All @@ -11,7 +10,6 @@ public class ScraperRuntimeConfiguration : RuntimeConfiguration
public AzureMonitorConfiguration AzureMonitor { get; set; } = new AzureMonitorConfiguration();
public MetricsConfiguration MetricsConfiguration { get; set; } = new MetricsConfiguration();
public MetricSinkConfiguration MetricSinks { get; set; } = new MetricSinkConfiguration();
public PrometheusLegacyConfiguration Prometheus { get; set; }
public ResourceDiscoveryConfiguration ResourceDiscovery { get; set; }
}
}
7 changes: 0 additions & 7 deletions src/Promitor.Agents.Scraper/Docs/Open-Api.xml

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

Original file line number Diff line number Diff line change
Expand Up @@ -19,26 +19,14 @@ public static IApplicationBuilder UseMetricSinks(this IApplicationBuilder app, I
var metricSinkConfiguration = configuration.GetSection("metricSinks").Get<MetricSinkConfiguration>();
if (metricSinkConfiguration?.PrometheusScrapingEndpoint != null)
{
AddPrometheusScraperMetricSink(app, metricSinkConfiguration.PrometheusScrapingEndpoint.BaseUriPath);
}

return app;
}

/// <summary>
/// Add support for exposing a prometheus scraping endpoint
/// </summary>
/// <param name="app">Application Builder</param>
/// <param name="scrapeEndpointPath">Path where the scrape endpoint will be exposed</param>
public static IApplicationBuilder AddPrometheusScraperMetricSink(this IApplicationBuilder app, string scrapeEndpointPath)
{
if (string.IsNullOrWhiteSpace(scrapeEndpointPath) == false)
{
app.UsePrometheusServer(prometheusOptions =>
if (string.IsNullOrWhiteSpace(metricSinkConfiguration.PrometheusScrapingEndpoint.BaseUriPath) == false)
{
prometheusOptions.MapPath = scrapeEndpointPath;
prometheusOptions.UseDefaultCollectors = false;
});
app.UsePrometheusServer(prometheusOptions =>
{
prometheusOptions.MapPath = metricSinkConfiguration.PrometheusScrapingEndpoint.BaseUriPath;
prometheusOptions.UseDefaultCollectors = false;
});
}
}

return app;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,6 @@
using Promitor.Core.Metrics;
using Promitor.Core.Metrics.Sinks;
using Promitor.Core.Scraping.Configuration.Runtime;
using Promitor.Core.Scraping.Interfaces;
using Promitor.Integrations.AzureMonitor.Configuration;
using Promitor.Integrations.Sinks.Prometheus;
using Promitor.Integrations.Sinks.Prometheus.Configuration;
Expand All @@ -44,7 +43,6 @@ public static IServiceCollection DefineDependencies(this IServiceCollection serv
services.AddTransient<IRuntimeMetricsCollector, RuntimeMetricsCollector>();
services.AddTransient<MetricScraperFactory>();
services.AddTransient<RuntimeValidator>();
services.AddTransient<IPrometheusMetricWriter, PrometheusMetricWriter>();
services.AddTransient<ConfigurationSerializer>();
services.AddSingleton<AzureMonitorClientFactory>();

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,6 @@
using Promitor.Core.Scraping.Configuration.Model;
using Promitor.Core.Scraping.Configuration.Model.Metrics;
using Promitor.Core.Scraping.Factories;
using Promitor.Core.Scraping.Interfaces;
using Promitor.Integrations.AzureMonitor;
using Promitor.Integrations.AzureMonitor.Configuration;

Expand All @@ -25,7 +24,6 @@ public class ResourceCollectionScrapingJob : MetricScrapingJob, IScheduledJob
private readonly ResourceDiscoveryRepository _resourceDiscoveryRepository;
private readonly MetricDefinition _metricDefinition;
private readonly AzureMetadata _azureMetadata;
private readonly IPrometheusMetricWriter _prometheusMetricWriter;
private readonly MetricSinkWriter _metricSinkWriter;
private readonly IRuntimeMetricsCollector _runtimeMetricCollector;
private readonly AzureMonitorClientFactory _azureMonitorClientFactory;
Expand All @@ -37,7 +35,6 @@ public class ResourceCollectionScrapingJob : MetricScrapingJob, IScheduledJob

public ResourceCollectionScrapingJob(string jobName, string resourceCollectionName, AzureMetadata azureMetadata, MetricDefinition metricDefinition, ResourceDiscoveryRepository resourceDiscoveryRepository,
MetricSinkWriter metricSinkWriter,
IPrometheusMetricWriter prometheusMetricWriter,
MetricScraperFactory metricScraperFactory,
AzureMonitorClientFactory azureMonitorClientFactory, IRuntimeMetricsCollector runtimeMetricCollector, IConfiguration configuration, IOptions<AzureMonitorLoggingConfiguration> azureMonitorLoggingConfiguration, ILoggerFactory loggerFactory,
ILogger<ResourceCollectionScrapingJob> logger)
Expand All @@ -48,7 +45,6 @@ public ResourceCollectionScrapingJob(string jobName, string resourceCollectionNa
Guard.NotNull(metricDefinition, nameof(metricDefinition));
Guard.NotNull(azureMetadata, nameof(azureMetadata));
Guard.NotNullOrWhitespace(jobName, nameof(jobName));
Guard.NotNull(prometheusMetricWriter, nameof(prometheusMetricWriter));
Guard.NotNull(metricScraperFactory, nameof(metricScraperFactory));
Guard.NotNull(azureMonitorClientFactory, nameof(azureMonitorClientFactory));
Guard.NotNull(runtimeMetricCollector, nameof(runtimeMetricCollector));
Expand All @@ -62,7 +58,6 @@ public ResourceCollectionScrapingJob(string jobName, string resourceCollectionNa
_azureMetadata = azureMetadata;
_metricDefinition = metricDefinition;
_resourceDiscoveryRepository = resourceDiscoveryRepository;
_prometheusMetricWriter = prometheusMetricWriter;
_metricSinkWriter = metricSinkWriter;

_runtimeMetricCollector = runtimeMetricCollector;
Expand Down Expand Up @@ -117,7 +112,7 @@ private async Task ScrapeResourceAsync(IAzureResourceDefinition discoveredResour
{
var scrapingDefinition = _metricDefinition.CreateScrapeDefinition(discoveredResource, _azureMetadata);

var scraper = _metricScraperFactory.CreateScraper(scrapingDefinition.Resource.ResourceType, _metricSinkWriter, _prometheusMetricWriter, azureMonitorClient);
var scraper = _metricScraperFactory.CreateScraper(scrapingDefinition.Resource.ResourceType, _metricSinkWriter, azureMonitorClient);
await scraper.ScrapeAsync(scrapingDefinition);
}
catch (Exception exception)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,6 @@
using Promitor.Core.Metrics.Sinks;
using Promitor.Core.Scraping.Configuration.Model.Metrics;
using Promitor.Core.Scraping.Factories;
using Promitor.Core.Scraping.Interfaces;
using Promitor.Integrations.AzureMonitor;

namespace Promitor.Agents.Scraper.Scheduling
Expand All @@ -17,7 +16,6 @@ public class ResourceScrapingJob : MetricScrapingJob,
IScheduledJob
{
private readonly ScrapeDefinition<IAzureResourceDefinition> _metricScrapeDefinition;
private readonly IPrometheusMetricWriter _prometheusMetricWriter;
private readonly AzureMonitorClient _azureMonitorClient;
private readonly MetricSinkWriter _metricSinkWriter;

Expand All @@ -26,20 +24,17 @@ public class ResourceScrapingJob : MetricScrapingJob,
public ResourceScrapingJob(string jobName,
ScrapeDefinition<IAzureResourceDefinition> metricScrapeDefinition,
MetricSinkWriter metricSinkWriter,
IPrometheusMetricWriter prometheusMetricWriter,
MetricScraperFactory metricScraperFactory,
AzureMonitorClient azureMonitorClient,
ILogger<ResourceScrapingJob> logger)
: base(jobName, logger)
{
Guard.NotNull(metricScrapeDefinition, nameof(metricScrapeDefinition));
Guard.NotNull(prometheusMetricWriter, nameof(prometheusMetricWriter));
Guard.NotNull(metricScraperFactory, nameof(metricScraperFactory));
Guard.NotNull(azureMonitorClient, nameof(azureMonitorClient));
Guard.NotNull(metricSinkWriter, nameof(metricSinkWriter));

_metricScrapeDefinition = metricScrapeDefinition;
_prometheusMetricWriter = prometheusMetricWriter;
_metricSinkWriter = metricSinkWriter;

_metricScraperFactory = metricScraperFactory;
Expand All @@ -64,7 +59,7 @@ private async Task ScrapeMetric(ScrapeDefinition<IAzureResourceDefinition> metri
{
Logger.LogInformation("Scraping {MetricName} for resource type {ResourceType}", metricDefinitionDefinition.PrometheusMetricDefinition.Name, metricDefinitionDefinition.Resource.ResourceType);

var scraper = _metricScraperFactory.CreateScraper(metricDefinitionDefinition.Resource.ResourceType, _metricSinkWriter, _prometheusMetricWriter, _azureMonitorClient);
var scraper = _metricScraperFactory.CreateScraper(metricDefinitionDefinition.Resource.ResourceType, _metricSinkWriter, _azureMonitorClient);
await scraper.ScrapeAsync(metricDefinitionDefinition);
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,6 @@
using Promitor.Core.Metrics.Sinks;
using Promitor.Core.Scraping.Configuration.Model;
using Promitor.Core.Scraping.Configuration.Model.Metrics;
using Promitor.Core.Scraping.Interfaces;
using Promitor.Integrations.AzureMonitor.Configuration;

// ReSharper disable once CheckNamespace
Expand Down Expand Up @@ -76,7 +75,6 @@ private static void ScheduleResourceScraping(IAzureResourceDefinition resource,
{
return new ResourceScrapingJob(jobName, scrapeDefinition,
metricSinkWriter,
jobServices.GetService<IPrometheusMetricWriter>(),
jobServices.GetService<MetricScraperFactory>(),
azureMonitorClient,
jobServices.GetService<ILogger<ResourceScrapingJob>>());
Expand All @@ -102,7 +100,6 @@ private static void ScheduleResourceCollectionScraping(AzureResourceCollection r
return new ResourceCollectionScrapingJob(jobName, resourceCollection.Name, azureMetadata, metricDefinition,
jobServices.GetService<ResourceDiscoveryRepository>(),
metricSinkWriter,
jobServices.GetService<IPrometheusMetricWriter>(),
jobServices.GetService<MetricScraperFactory>(),
azureMonitorClientFactory,
runtimeMetricCollector,
Expand Down
12 changes: 2 additions & 10 deletions src/Promitor.Agents.Scraper/Startup.cs
Original file line number Diff line number Diff line change
Expand Up @@ -21,19 +21,16 @@ namespace Promitor.Agents.Scraper
public class Startup : AgentStartup
{
private const string ComponentName = "Promitor Scraper";
private readonly string _legacyPrometheusUriPath;

public Startup(IConfiguration configuration)
: base(configuration)
{
var runtimeConfiguration = configuration.Get<ScraperRuntimeConfiguration>();
_legacyPrometheusUriPath = runtimeConfiguration?.Prometheus?.ScrapeEndpoint?.BaseUriPath;
}

// This method gets called by the runtime. Use this method to add services to the container.
public void ConfigureServices(IServiceCollection services)
{
string openApiDescription = BuildOpenApiDescription(Configuration, _legacyPrometheusUriPath);
string openApiDescription = BuildOpenApiDescription(Configuration);
services.AddHttpClient("Promitor Resource Discovery", client =>
{
// Provide Promitor User-Agent
Expand Down Expand Up @@ -71,7 +68,6 @@ public void Configure(IApplicationBuilder app, IWebHostEnvironment env)
.UseHttpCorrelation()
.UseRouting()
.UseMetricSinks(Configuration)
.AddPrometheusScraperMetricSink(_legacyPrometheusUriPath) // Deprecated and will be gone in 2.0
.ExposeOpenApiUi()
.UseEndpoints(endpoints => endpoints.MapControllers());
UseSerilog(ComponentName, app.ApplicationServices);
Expand All @@ -91,16 +87,12 @@ protected override LoggerConfiguration FilterTelemetry(LoggerConfiguration logge
return standardConfiguration;
}

private string BuildOpenApiDescription(IConfiguration configuration, string legacyPrometheusUriPath)
private string BuildOpenApiDescription(IConfiguration configuration)
{
var metricSinkConfiguration = configuration.GetSection("metricSinks").Get<MetricSinkConfiguration>();
var openApiDescriptionBuilder = new StringBuilder();
openApiDescriptionBuilder.Append("Collection of APIs to manage the Promitor Scraper.\r\n\r\n");
openApiDescriptionBuilder.AppendLine("Configured metric sinks are:\r\n");
if (string.IsNullOrWhiteSpace(legacyPrometheusUriPath) == false)
{
openApiDescriptionBuilder.AppendLine($"<li>Legacy Prometheus scrape endpoint is exposed at <a href=\"./../..{legacyPrometheusUriPath}\" target=\"_blank\">{legacyPrometheusUriPath}</a></li>");
}

if (metricSinkConfiguration != null)
{
Expand Down
Original file line number Diff line number Diff line change
@@ -1,5 +1,4 @@
using System;
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Logging.Abstractions;
using Microsoft.Extensions.Options;
using Promitor.Agents.Scraper.Configuration;
Expand Down Expand Up @@ -27,34 +26,17 @@ public PrometheusScrapingEndpointMetricSinkValidationStep(IOptions<ScraperRuntim
public ValidationResult Run()
{
var currentRuntimeConfiguration = _runtimeConfiguration.Value;
var legacyPrometheusConfiguration = currentRuntimeConfiguration?.Prometheus;
var prometheusScrapingEndpointConfiguration = currentRuntimeConfiguration?.MetricSinks?.PrometheusScrapingEndpoint;
if (prometheusScrapingEndpointConfiguration == null && legacyPrometheusConfiguration == null)
if (prometheusScrapingEndpointConfiguration == null)
{
return ValidationResult.Successful(ComponentName);
}

if (string.IsNullOrWhiteSpace(prometheusScrapingEndpointConfiguration?.BaseUriPath) == false &&
string.IsNullOrWhiteSpace(legacyPrometheusConfiguration?.ScrapeEndpoint?.BaseUriPath) == false &&
legacyPrometheusConfiguration.ScrapeEndpoint.BaseUriPath.Equals(prometheusScrapingEndpointConfiguration.BaseUriPath, StringComparison.InvariantCultureIgnoreCase))
if (string.IsNullOrWhiteSpace(prometheusScrapingEndpointConfiguration.BaseUriPath))
{
var errorMessage = "Duplicate Prometheus scraping endpoint was configured in the metric sink & legacy configuration";
var errorMessage = "No valid base URI was configured for Prometheus scraping endpoint in metric sink";
return ValidationResult.Failure(ComponentName, errorMessage);
}
else
{
if (prometheusScrapingEndpointConfiguration != null && string.IsNullOrWhiteSpace(prometheusScrapingEndpointConfiguration.BaseUriPath))
{
var errorMessage = "No valid base URI was configured for Prometheus scraping endpoint in metric sink";
return ValidationResult.Failure(ComponentName, errorMessage);
}

if (legacyPrometheusConfiguration?.ScrapeEndpoint != null && string.IsNullOrWhiteSpace(legacyPrometheusConfiguration.ScrapeEndpoint.BaseUriPath))
{
var errorMessage = "No valid base URI was configured for Prometheus scraping endpoint in legacy configuration";
return ValidationResult.Failure(ComponentName, errorMessage);
}
}

return ValidationResult.Successful(ComponentName);
}
Expand Down
Loading

0 comments on commit 017bd2c

Please sign in to comment.