Skip to content

Commit

Permalink
[pkg/translator/prometheus] Switch NormalizeName FG back to alpha (o…
Browse files Browse the repository at this point in the history
…pen-telemetry#23229)

Disable `pkg.translator.prometheus.NormalizeName` feature gate by default

The feature gate `pkg.translator.prometheus.NormalizeName` was enabled prematurely while translation on the prometheus receiver was incomplete. To address this, the feature gate has been reverted back to alpha status. This will remain the case until the translation on the receiver side aligns with the translation on the exporter side, or until it is replaced with a configuration option or completely removed. To maintain the current behavior, you can enable the feature gate using the `--feature-gates=pkg.translator.prometheus.NormalizeName` command argument. However, please note that the translation in the prometheus receiver is a subject to future changes.
  • Loading branch information
dmitryax authored and Caleb-Hurshman committed Jul 6, 2023
1 parent 5a2e810 commit 207b687
Show file tree
Hide file tree
Showing 13 changed files with 180 additions and 134 deletions.
22 changes: 22 additions & 0 deletions .chloggen/set-prom-feature-gate-back-to-aplpha.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,22 @@
# One of 'breaking', 'deprecation', 'new_component', 'enhancement', 'bug_fix'
change_type: breaking

# The name of the component, or a single word describing the area of concern, (e.g. filelogreceiver)
component: prometheusreceiver, prometheusexporter, prometheusremotewrite

# A brief description of the change. Surround your text with quotes ("") if it needs to start with a backtick (`).
note: Disable `pkg.translator.prometheus.NormalizeName` feature gate by default

# One or more tracking issues related to the change
issues: [23208]

# (Optional) One or more lines of additional information to render under the primary note.
# These lines will be padded with 2 spaces and then inserted directly into the document.
# Use pipe (|) for multiline entries.
subtext: |
The feature gate `pkg.translator.prometheus.NormalizeName` was enabled prematurely while translation
on the prometheus receiver was incomplete. To address this, the feature gate has been reverted back to alpha status.
This will remain the case until the translation on the receiver side aligns with the translation on the exporter side,
or until it is replaced with a configuration option or completely removed. To maintain the current behavior, you can
enable the feature gate using the `--feature-gates=pkg.translator.prometheus.NormalizeName` command argument.
However, please note that the translation in the prometheus receiver is a subject to future changes.
3 changes: 1 addition & 2 deletions exporter/prometheusexporter/collector_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -448,6 +448,7 @@ func TestCollectMetrics(t *testing.T) {
continue
}

require.Contains(t, m.Desc().String(), "fqName: \"test_space_test_metric\"")
require.Regexp(t, `variableLabels: \[.*label_1.+label_2.+job.+instance.*\]`, m.Desc().String())

pbMetric := io_prometheus_client.Metric{}
Expand All @@ -466,13 +467,11 @@ func TestCollectMetrics(t *testing.T) {

switch tt.metricType {
case prometheus.CounterValue:
require.Contains(t, m.Desc().String(), "fqName: \"test_space_test_metric_total\"")
require.Equal(t, tt.value, *pbMetric.Counter.Value)
require.Nil(t, pbMetric.Gauge)
require.Nil(t, pbMetric.Histogram)
require.Nil(t, pbMetric.Summary)
case prometheus.GaugeValue:
require.Contains(t, m.Desc().String(), "fqName: \"test_space_test_metric\"")
require.Equal(t, tt.value, *pbMetric.Gauge.Value)
require.Nil(t, pbMetric.Counter)
require.Nil(t, pbMetric.Histogram)
Expand Down
72 changes: 36 additions & 36 deletions exporter/prometheusexporter/prometheus_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -160,10 +160,10 @@ func TestPrometheusExporter_WithTLS(t *testing.T) {
_ = rsp.Body.Close()

want := []string{
`# HELP test_counter_int_total`,
`# TYPE test_counter_int_total counter`,
`test_counter_int_total{code2="one2",foo2="bar2",label_1="label-value-1",resource_attr="resource-attr-val-1"} 123 1581452773000`,
`test_counter_int_total{code2="one2",foo2="bar2",label_2="label-value-2",resource_attr="resource-attr-val-1"} 456 1581452773000`,
`# HELP test_counter_int`,
`# TYPE test_counter_int counter`,
`test_counter_int{code2="one2",foo2="bar2",label_1="label-value-1",resource_attr="resource-attr-val-1"} 123 1581452773000`,
`test_counter_int{code2="one2",foo2="bar2",label_2="label-value-2",resource_attr="resource-attr-val-1"} 456 1581452773000`,
}

for _, w := range want {
Expand Down Expand Up @@ -220,18 +220,18 @@ func TestPrometheusExporter_endToEndMultipleTargets(t *testing.T) {
blob, _ := io.ReadAll(res.Body)
_ = res.Body.Close()
want := []string{
`# HELP test_metric_1_this_one_there_where_total Extra ones`,
`# TYPE test_metric_1_this_one_there_where_total counter`,
fmt.Sprintf(`test_metric_1_this_one_there_where_total{arch="x86",code1="one1",foo1="bar1",instance="localhost:8080",job="cpu-exporter",os="windows"} %v`, 99+128),
fmt.Sprintf(`test_metric_1_this_one_there_where_total{arch="x86",code1="one1",foo1="bar1",instance="localhost:8080",job="cpu-exporter",os="linux"} %v`, 100+128),
fmt.Sprintf(`test_metric_1_this_one_there_where_total{arch="x86",code1="one1",foo1="bar1",instance="localhost:8081",job="cpu-exporter",os="windows"} %v`, 99+128),
fmt.Sprintf(`test_metric_1_this_one_there_where_total{arch="x86",code1="one1",foo1="bar1",instance="localhost:8081",job="cpu-exporter",os="linux"} %v`, 100+128),
`# HELP test_metric_2_this_one_there_where_total Extra ones`,
`# TYPE test_metric_2_this_one_there_where_total counter`,
fmt.Sprintf(`test_metric_2_this_one_there_where_total{arch="x86",code1="one1",foo1="bar1",instance="localhost:8080",job="cpu-exporter",os="windows"} %v`, 99+delta),
fmt.Sprintf(`test_metric_2_this_one_there_where_total{arch="x86",code1="one1",foo1="bar1",instance="localhost:8080",job="cpu-exporter",os="linux"} %v`, 100+delta),
fmt.Sprintf(`test_metric_2_this_one_there_where_total{arch="x86",code1="one1",foo1="bar1",instance="localhost:8081",job="cpu-exporter",os="windows"} %v`, 99+delta),
fmt.Sprintf(`test_metric_2_this_one_there_where_total{arch="x86",code1="one1",foo1="bar1",instance="localhost:8081",job="cpu-exporter",os="linux"} %v`, 100+delta),
`# HELP test_metric_1_this_one_there_where Extra ones`,
`# TYPE test_metric_1_this_one_there_where counter`,
fmt.Sprintf(`test_metric_1_this_one_there_where{arch="x86",code1="one1",foo1="bar1",instance="localhost:8080",job="cpu-exporter",os="windows"} %v`, 99+128),
fmt.Sprintf(`test_metric_1_this_one_there_where{arch="x86",code1="one1",foo1="bar1",instance="localhost:8080",job="cpu-exporter",os="linux"} %v`, 100+128),
fmt.Sprintf(`test_metric_1_this_one_there_where{arch="x86",code1="one1",foo1="bar1",instance="localhost:8081",job="cpu-exporter",os="windows"} %v`, 99+128),
fmt.Sprintf(`test_metric_1_this_one_there_where{arch="x86",code1="one1",foo1="bar1",instance="localhost:8081",job="cpu-exporter",os="linux"} %v`, 100+128),
`# HELP test_metric_2_this_one_there_where Extra ones`,
`# TYPE test_metric_2_this_one_there_where counter`,
fmt.Sprintf(`test_metric_2_this_one_there_where{arch="x86",code1="one1",foo1="bar1",instance="localhost:8080",job="cpu-exporter",os="windows"} %v`, 99+delta),
fmt.Sprintf(`test_metric_2_this_one_there_where{arch="x86",code1="one1",foo1="bar1",instance="localhost:8080",job="cpu-exporter",os="linux"} %v`, 100+delta),
fmt.Sprintf(`test_metric_2_this_one_there_where{arch="x86",code1="one1",foo1="bar1",instance="localhost:8081",job="cpu-exporter",os="windows"} %v`, 99+delta),
fmt.Sprintf(`test_metric_2_this_one_there_where{arch="x86",code1="one1",foo1="bar1",instance="localhost:8081",job="cpu-exporter",os="linux"} %v`, 100+delta),
}

for _, w := range want {
Expand Down Expand Up @@ -300,14 +300,14 @@ func TestPrometheusExporter_endToEnd(t *testing.T) {
blob, _ := io.ReadAll(res.Body)
_ = res.Body.Close()
want := []string{
`# HELP test_metric_1_this_one_there_where_total Extra ones`,
`# TYPE test_metric_1_this_one_there_where_total counter`,
fmt.Sprintf(`test_metric_1_this_one_there_where_total{arch="x86",code1="one1",foo1="bar1",instance="localhost:8080",job="cpu-exporter",os="windows"} %v`, 99+128),
fmt.Sprintf(`test_metric_1_this_one_there_where_total{arch="x86",code1="one1",foo1="bar1",instance="localhost:8080",job="cpu-exporter",os="linux"} %v`, 100+128),
`# HELP test_metric_2_this_one_there_where_total Extra ones`,
`# TYPE test_metric_2_this_one_there_where_total counter`,
fmt.Sprintf(`test_metric_2_this_one_there_where_total{arch="x86",code1="one1",foo1="bar1",instance="localhost:8080",job="cpu-exporter",os="windows"} %v`, 99+delta),
fmt.Sprintf(`test_metric_2_this_one_there_where_total{arch="x86",code1="one1",foo1="bar1",instance="localhost:8080",job="cpu-exporter",os="linux"} %v`, 100+delta),
`# HELP test_metric_1_this_one_there_where Extra ones`,
`# TYPE test_metric_1_this_one_there_where counter`,
fmt.Sprintf(`test_metric_1_this_one_there_where{arch="x86",code1="one1",foo1="bar1",instance="localhost:8080",job="cpu-exporter",os="windows"} %v`, 99+128),
fmt.Sprintf(`test_metric_1_this_one_there_where{arch="x86",code1="one1",foo1="bar1",instance="localhost:8080",job="cpu-exporter",os="linux"} %v`, 100+128),
`# HELP test_metric_2_this_one_there_where Extra ones`,
`# TYPE test_metric_2_this_one_there_where counter`,
fmt.Sprintf(`test_metric_2_this_one_there_where{arch="x86",code1="one1",foo1="bar1",instance="localhost:8080",job="cpu-exporter",os="windows"} %v`, 99+delta),
fmt.Sprintf(`test_metric_2_this_one_there_where{arch="x86",code1="one1",foo1="bar1",instance="localhost:8080",job="cpu-exporter",os="linux"} %v`, 100+delta),
}

for _, w := range want {
Expand Down Expand Up @@ -377,14 +377,14 @@ func TestPrometheusExporter_endToEndWithTimestamps(t *testing.T) {
blob, _ := io.ReadAll(res.Body)
_ = res.Body.Close()
want := []string{
`# HELP test_metric_1_this_one_there_where_total Extra ones`,
`# TYPE test_metric_1_this_one_there_where_total counter`,
fmt.Sprintf(`test_metric_1_this_one_there_where_total{arch="x86",code2="one2",foo2="bar2",instance="localhost:8080",job="node-exporter",os="windows"} %v %v`, 99+128, 1543160298100+128000),
fmt.Sprintf(`test_metric_1_this_one_there_where_total{arch="x86",code2="one2",foo2="bar2",instance="localhost:8080",job="node-exporter",os="linux"} %v %v`, 100+128, 1543160298100),
`# HELP test_metric_2_this_one_there_where_total Extra ones`,
`# TYPE test_metric_2_this_one_there_where_total counter`,
fmt.Sprintf(`test_metric_2_this_one_there_where_total{arch="x86",code2="one2",foo2="bar2",instance="localhost:8080",job="node-exporter",os="windows"} %v %v`, 99+delta, 1543160298100+delta*1000),
fmt.Sprintf(`test_metric_2_this_one_there_where_total{arch="x86",code2="one2",foo2="bar2",instance="localhost:8080",job="node-exporter",os="linux"} %v %v`, 100+delta, 1543160298100),
`# HELP test_metric_1_this_one_there_where Extra ones`,
`# TYPE test_metric_1_this_one_there_where counter`,
fmt.Sprintf(`test_metric_1_this_one_there_where{arch="x86",code2="one2",foo2="bar2",instance="localhost:8080",job="node-exporter",os="windows"} %v %v`, 99+128, 1543160298100+128000),
fmt.Sprintf(`test_metric_1_this_one_there_where{arch="x86",code2="one2",foo2="bar2",instance="localhost:8080",job="node-exporter",os="linux"} %v %v`, 100+128, 1543160298100),
`# HELP test_metric_2_this_one_there_where Extra ones`,
`# TYPE test_metric_2_this_one_there_where counter`,
fmt.Sprintf(`test_metric_2_this_one_there_where{arch="x86",code2="one2",foo2="bar2",instance="localhost:8080",job="node-exporter",os="windows"} %v %v`, 99+delta, 1543160298100+delta*1000),
fmt.Sprintf(`test_metric_2_this_one_there_where{arch="x86",code2="one2",foo2="bar2",instance="localhost:8080",job="node-exporter",os="linux"} %v %v`, 100+delta, 1543160298100),
}

for _, w := range want {
Expand Down Expand Up @@ -457,10 +457,10 @@ func TestPrometheusExporter_endToEndWithResource(t *testing.T) {
_ = rsp.Body.Close()

want := []string{
`# HELP test_counter_int_total`,
`# TYPE test_counter_int_total counter`,
`test_counter_int_total{code2="one2",foo2="bar2",label_1="label-value-1",resource_attr="resource-attr-val-1"} 123 1581452773000`,
`test_counter_int_total{code2="one2",foo2="bar2",label_2="label-value-2",resource_attr="resource-attr-val-1"} 456 1581452773000`,
`# HELP test_counter_int`,
`# TYPE test_counter_int counter`,
`test_counter_int{code2="one2",foo2="bar2",label_1="label-value-1",resource_attr="resource-attr-val-1"} 123 1581452773000`,
`test_counter_int{code2="one2",foo2="bar2",label_2="label-value-2",resource_attr="resource-attr-val-1"} 456 1581452773000`,
}

for _, w := range want {
Expand Down
4 changes: 2 additions & 2 deletions pkg/translator/prometheus/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -12,10 +12,10 @@

> **Warning**
>
> This feature can be disabled with [feature gate](https://github.com/open-telemetry/opentelemetry-collector/tree/main/featuregate) `pkg.translator.prometheus.NormalizeName`. It is enabled by default (beta stage).
> This feature can be enabled with [feature gate](https://github.com/open-telemetry/opentelemetry-collector/tree/main/featuregate) `pkg.translator.prometheus.NormalizeName`. It is disabled by default (alpha stage).
>
> ```shell-session
> $ otelcol --config=config.yaml --feature-gates=-pkg.translator.prometheus.NormalizeName
> $ otelcol --config=config.yaml --feature-gates=pkg.translator.prometheus.NormalizeName
> ```
#### List of transformations to convert OpenTelemetry metrics to Prometheus metrics
Expand Down
2 changes: 1 addition & 1 deletion pkg/translator/prometheus/normalize_name.go
Original file line number Diff line number Diff line change
Expand Up @@ -73,7 +73,7 @@ var perUnitMap = map[string]string{

var normalizeNameGate = featuregate.GlobalRegistry().MustRegister(
"pkg.translator.prometheus.NormalizeName",
featuregate.StageBeta,
featuregate.StageAlpha,
featuregate.WithRegisterDescription("Controls whether metrics names are automatically normalized to follow Prometheus naming convention"),
featuregate.WithRegisterReferenceURL("https://github.com/open-telemetry/opentelemetry-collector-contrib/issues/8950"),
)
Expand Down
10 changes: 5 additions & 5 deletions pkg/translator/prometheus/normalize_name_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -139,7 +139,10 @@ func TestOtelReceivers(t *testing.T) {
}

func TestTrimPromSuffixes(t *testing.T) {
normalizer := NewNormalizer(featuregate.NewRegistry())
registry := featuregate.NewRegistry()
_, err := registry.Register(normalizeNameGate.ID(), featuregate.StageBeta)
require.NoError(t, err)
normalizer := NewNormalizer(registry)

assert.Equal(t, "active_directory_ds_replication_network_io", normalizer.TrimPromSuffixes("active_directory_ds_replication_network_io_bytes_total", pmetric.MetricTypeSum, "bytes"))
assert.Equal(t, "active_directory_ds_name_cache_hit_rate", normalizer.TrimPromSuffixes("active_directory_ds_name_cache_hit_rate_percent", pmetric.MetricTypeGauge, "percent"))
Expand Down Expand Up @@ -172,10 +175,7 @@ func TestTrimPromSuffixes(t *testing.T) {
}

func TestTrimPromSuffixesWithFeatureGateDisabled(t *testing.T) {
registry := featuregate.NewRegistry()
_, err := registry.Register(normalizeNameGate.ID(), featuregate.StageAlpha)
require.NoError(t, err)
normalizer := NewNormalizer(registry)
normalizer := NewNormalizer(featuregate.NewRegistry())

assert.Equal(t, "apache_current_connections", normalizer.TrimPromSuffixes("apache_current_connections", pmetric.MetricTypeGauge, "connections"))
assert.Equal(t, "apache_requests_total", normalizer.TrimPromSuffixes("apache_requests_total", pmetric.MetricTypeSum, "1"))
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -147,10 +147,10 @@ func TestAddSingleSumNumberDataPoint(t *testing.T) {
},
want: func() map[string]*prompb.TimeSeries {
labels := []prompb.Label{
{Name: model.MetricNameLabel, Value: "test_sum_total"},
{Name: model.MetricNameLabel, Value: "test_sum"},
}
createdLabels := []prompb.Label{
{Name: model.MetricNameLabel, Value: "test_sum_total" + createdSuffix},
{Name: model.MetricNameLabel, Value: "test_sum" + createdSuffix},
}
return map[string]*prompb.TimeSeries{
timeSeriesSignature(pmetric.MetricTypeSum.String(), &labels): {
Expand Down Expand Up @@ -183,7 +183,7 @@ func TestAddSingleSumNumberDataPoint(t *testing.T) {
},
want: func() map[string]*prompb.TimeSeries {
labels := []prompb.Label{
{Name: model.MetricNameLabel, Value: "test_sum_total"},
{Name: model.MetricNameLabel, Value: "test_sum"},
}
return map[string]*prompb.TimeSeries{
timeSeriesSignature(pmetric.MetricTypeSum.String(), &labels): {
Expand Down
39 changes: 25 additions & 14 deletions receiver/prometheusreceiver/metrics_receiver_helper_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -108,6 +108,7 @@ type testData struct {
pages []mockPrometheusResponse
attributes pcommon.Map
validateScrapes bool
normalizedName bool
validateFunc func(t *testing.T, td *testData, result []pmetric.ResourceMetrics)
}

Expand Down Expand Up @@ -222,13 +223,13 @@ func metricsCount(resourceMetric pmetric.ResourceMetrics) int {
return metricsCount
}

func getValidScrapes(t *testing.T, rms []pmetric.ResourceMetrics) []pmetric.ResourceMetrics {
func getValidScrapes(t *testing.T, rms []pmetric.ResourceMetrics, normalizedNames bool) []pmetric.ResourceMetrics {
var out []pmetric.ResourceMetrics
// rms will include failed scrapes and scrapes that received no metrics but have internal scrape metrics, filter those out
for i := 0; i < len(rms); i++ {
allMetrics := getMetrics(rms[i])
if expectedScrapeMetricCount < len(allMetrics) && countScrapeMetrics(allMetrics) == expectedScrapeMetricCount {
if isFirstFailedScrape(allMetrics) {
if expectedScrapeMetricCount < len(allMetrics) && countScrapeMetrics(allMetrics, normalizedNames) == expectedScrapeMetricCount {
if isFirstFailedScrape(allMetrics, normalizedNames) {
continue
}
assertUp(t, 1, allMetrics)
Expand All @@ -240,7 +241,7 @@ func getValidScrapes(t *testing.T, rms []pmetric.ResourceMetrics) []pmetric.Reso
return out
}

func isFirstFailedScrape(metrics []pmetric.Metric) bool {
func isFirstFailedScrape(metrics []pmetric.Metric, normalizedNames bool) bool {
for _, m := range metrics {
if m.Name() == "up" {
if m.Gauge().DataPoints().At(0).DoubleValue() == 1 { // assumed up will not have multiple datapoints
Expand All @@ -250,7 +251,7 @@ func isFirstFailedScrape(metrics []pmetric.Metric) bool {
}

for _, m := range metrics {
if isDefaultMetrics(m) {
if isDefaultMetrics(m, normalizedNames) {
continue
}

Expand Down Expand Up @@ -294,37 +295,43 @@ func assertUp(t *testing.T, expected float64, metrics []pmetric.Metric) {
t.Error("No 'up' metric found")
}

func countScrapeMetricsRM(got pmetric.ResourceMetrics) int {
func countScrapeMetricsRM(got pmetric.ResourceMetrics, normalizedNames bool) int {
n := 0
ilms := got.ScopeMetrics()
for j := 0; j < ilms.Len(); j++ {
ilm := ilms.At(j)
for i := 0; i < ilm.Metrics().Len(); i++ {
if isDefaultMetrics(ilm.Metrics().At(i)) {
if isDefaultMetrics(ilm.Metrics().At(i), normalizedNames) {
n++
}
}
}
return n
}

func countScrapeMetrics(metrics []pmetric.Metric) int {
func countScrapeMetrics(metrics []pmetric.Metric, normalizedNames bool) int {
n := 0
for _, m := range metrics {
if isDefaultMetrics(m) {
if isDefaultMetrics(m, normalizedNames) {
n++
}
}
return n
}

func isDefaultMetrics(m pmetric.Metric) bool {
func isDefaultMetrics(m pmetric.Metric, normalizedNames bool) bool {
switch m.Name() {
case "up", "scrape_samples_scraped", "scrape_samples_post_metric_relabeling", "scrape_series_added", "scrape_duration":
case "up", "scrape_samples_scraped", "scrape_samples_post_metric_relabeling", "scrape_series_added":
return true

// if normalizedNames is true, we expect unit `_seconds` to be trimmed.
case "scrape_duration_seconds":
return !normalizedNames
case "scrape_duration":
return normalizedNames
default:
return false
}
return false
}

type metricTypeComparator func(*testing.T, pmetric.Metric)
Expand All @@ -341,8 +348,12 @@ type dataPointExpectation struct {
type testExpectation func(*testing.T, pmetric.ResourceMetrics)

func doCompare(t *testing.T, name string, want pcommon.Map, got pmetric.ResourceMetrics, expectations []testExpectation) {
doCompareNormalized(t, name, want, got, expectations, false)
}

func doCompareNormalized(t *testing.T, name string, want pcommon.Map, got pmetric.ResourceMetrics, expectations []testExpectation, normalizedNames bool) {
t.Run(name, func(t *testing.T) {
assert.Equal(t, expectedScrapeMetricCount, countScrapeMetricsRM(got))
assert.Equal(t, expectedScrapeMetricCount, countScrapeMetricsRM(got, normalizedNames))
assert.Equal(t, want.Len(), got.Resource().Attributes().Len())
for k, v := range want.AsRaw() {
val, ok := got.Resource().Attributes().Get(k)
Expand Down Expand Up @@ -617,7 +628,7 @@ func testComponent(t *testing.T, targets []*testData, useStartTimeMetric bool, s
}
scrapes := pResults[name]
if !target.validateScrapes {
scrapes = getValidScrapes(t, pResults[name])
scrapes = getValidScrapes(t, pResults[name], target.normalizedName)
}
target.validateFunc(t, target, scrapes)
})
Expand Down
Loading

0 comments on commit 207b687

Please sign in to comment.