Skip to content

Commit

Permalink
Revert "[receiver/prometheus] Enable pkg.translator.prometheus.Normal…
Browse files Browse the repository at this point in the history
…izeName by default (open-telemetry#20519)"

This reverts commit 8b0fb2f
  • Loading branch information
sky333999 committed Jun 5, 2023
1 parent 8221b79 commit 2bd9389
Show file tree
Hide file tree
Showing 12 changed files with 158 additions and 134 deletions.
3 changes: 1 addition & 2 deletions exporter/prometheusexporter/collector_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -464,6 +464,7 @@ func TestCollectMetrics(t *testing.T) {
pbMetric := io_prometheus_client.Metric{}
require.NoError(t, m.Write(&pbMetric))

require.Contains(t, m.Desc().String(), "fqName: \"test_space_test_metric\"")
labelsKeys := map[string]string{"label_1": "1", "label_2": "2", "job": "prod/testapp", "instance": "localhost:9090"}
for _, l := range pbMetric.Label {
require.Equal(t, labelsKeys[*l.Name], *l.Value)
Expand All @@ -477,13 +478,11 @@ func TestCollectMetrics(t *testing.T) {

switch tt.metricType {
case prometheus.CounterValue:
require.Contains(t, m.Desc().String(), "fqName: \"test_space_test_metric_total\"")
require.Equal(t, tt.value, *pbMetric.Counter.Value)
require.Nil(t, pbMetric.Gauge)
require.Nil(t, pbMetric.Histogram)
require.Nil(t, pbMetric.Summary)
case prometheus.GaugeValue:
require.Contains(t, m.Desc().String(), "fqName: \"test_space_test_metric\"")
require.Equal(t, tt.value, *pbMetric.Gauge.Value)
require.Nil(t, pbMetric.Counter)
require.Nil(t, pbMetric.Histogram)
Expand Down
72 changes: 36 additions & 36 deletions exporter/prometheusexporter/prometheus_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -172,10 +172,10 @@ func TestPrometheusExporter_WithTLS(t *testing.T) {
_ = rsp.Body.Close()

want := []string{
`# HELP test_counter_int_total`,
`# TYPE test_counter_int_total counter`,
`test_counter_int_total{code2="one2",foo2="bar2",label_1="label-value-1",resource_attr="resource-attr-val-1"} 123 1581452773000`,
`test_counter_int_total{code2="one2",foo2="bar2",label_2="label-value-2",resource_attr="resource-attr-val-1"} 456 1581452773000`,
`# HELP test_counter_int`,
`# TYPE test_counter_int counter`,
`test_counter_int{code2="one2",foo2="bar2",label_1="label-value-1",resource_attr="resource-attr-val-1"} 123 1581452773000`,
`test_counter_int{code2="one2",foo2="bar2",label_2="label-value-2",resource_attr="resource-attr-val-1"} 456 1581452773000`,
}

for _, w := range want {
Expand Down Expand Up @@ -232,18 +232,18 @@ func TestPrometheusExporter_endToEndMultipleTargets(t *testing.T) {
blob, _ := io.ReadAll(res.Body)
_ = res.Body.Close()
want := []string{
`# HELP test_metric_1_this_one_there_where_total Extra ones`,
`# TYPE test_metric_1_this_one_there_where_total counter`,
fmt.Sprintf(`test_metric_1_this_one_there_where_total{arch="x86",code1="one1",foo1="bar1",instance="localhost:8080",job="cpu-exporter",os="windows"} %v`, 99+128),
fmt.Sprintf(`test_metric_1_this_one_there_where_total{arch="x86",code1="one1",foo1="bar1",instance="localhost:8080",job="cpu-exporter",os="linux"} %v`, 100+128),
fmt.Sprintf(`test_metric_1_this_one_there_where_total{arch="x86",code1="one1",foo1="bar1",instance="localhost:8081",job="cpu-exporter",os="windows"} %v`, 99+128),
fmt.Sprintf(`test_metric_1_this_one_there_where_total{arch="x86",code1="one1",foo1="bar1",instance="localhost:8081",job="cpu-exporter",os="linux"} %v`, 100+128),
`# HELP test_metric_2_this_one_there_where_total Extra ones`,
`# TYPE test_metric_2_this_one_there_where_total counter`,
fmt.Sprintf(`test_metric_2_this_one_there_where_total{arch="x86",code1="one1",foo1="bar1",instance="localhost:8080",job="cpu-exporter",os="windows"} %v`, 99+delta),
fmt.Sprintf(`test_metric_2_this_one_there_where_total{arch="x86",code1="one1",foo1="bar1",instance="localhost:8080",job="cpu-exporter",os="linux"} %v`, 100+delta),
fmt.Sprintf(`test_metric_2_this_one_there_where_total{arch="x86",code1="one1",foo1="bar1",instance="localhost:8081",job="cpu-exporter",os="windows"} %v`, 99+delta),
fmt.Sprintf(`test_metric_2_this_one_there_where_total{arch="x86",code1="one1",foo1="bar1",instance="localhost:8081",job="cpu-exporter",os="linux"} %v`, 100+delta),
`# HELP test_metric_1_this_one_there_where Extra ones`,
`# TYPE test_metric_1_this_one_there_where counter`,
fmt.Sprintf(`test_metric_1_this_one_there_where{arch="x86",code1="one1",foo1="bar1",instance="localhost:8080",job="cpu-exporter",os="windows"} %v`, 99+128),
fmt.Sprintf(`test_metric_1_this_one_there_where{arch="x86",code1="one1",foo1="bar1",instance="localhost:8080",job="cpu-exporter",os="linux"} %v`, 100+128),
fmt.Sprintf(`test_metric_1_this_one_there_where{arch="x86",code1="one1",foo1="bar1",instance="localhost:8081",job="cpu-exporter",os="windows"} %v`, 99+128),
fmt.Sprintf(`test_metric_1_this_one_there_where{arch="x86",code1="one1",foo1="bar1",instance="localhost:8081",job="cpu-exporter",os="linux"} %v`, 100+128),
`# HELP test_metric_2_this_one_there_where Extra ones`,
`# TYPE test_metric_2_this_one_there_where counter`,
fmt.Sprintf(`test_metric_2_this_one_there_where{arch="x86",code1="one1",foo1="bar1",instance="localhost:8080",job="cpu-exporter",os="windows"} %v`, 99+delta),
fmt.Sprintf(`test_metric_2_this_one_there_where{arch="x86",code1="one1",foo1="bar1",instance="localhost:8080",job="cpu-exporter",os="linux"} %v`, 100+delta),
fmt.Sprintf(`test_metric_2_this_one_there_where{arch="x86",code1="one1",foo1="bar1",instance="localhost:8081",job="cpu-exporter",os="windows"} %v`, 99+delta),
fmt.Sprintf(`test_metric_2_this_one_there_where{arch="x86",code1="one1",foo1="bar1",instance="localhost:8081",job="cpu-exporter",os="linux"} %v`, 100+delta),
}

for _, w := range want {
Expand Down Expand Up @@ -312,14 +312,14 @@ func TestPrometheusExporter_endToEnd(t *testing.T) {
blob, _ := io.ReadAll(res.Body)
_ = res.Body.Close()
want := []string{
`# HELP test_metric_1_this_one_there_where_total Extra ones`,
`# TYPE test_metric_1_this_one_there_where_total counter`,
fmt.Sprintf(`test_metric_1_this_one_there_where_total{arch="x86",code1="one1",foo1="bar1",instance="localhost:8080",job="cpu-exporter",os="windows"} %v`, 99+128),
fmt.Sprintf(`test_metric_1_this_one_there_where_total{arch="x86",code1="one1",foo1="bar1",instance="localhost:8080",job="cpu-exporter",os="linux"} %v`, 100+128),
`# HELP test_metric_2_this_one_there_where_total Extra ones`,
`# TYPE test_metric_2_this_one_there_where_total counter`,
fmt.Sprintf(`test_metric_2_this_one_there_where_total{arch="x86",code1="one1",foo1="bar1",instance="localhost:8080",job="cpu-exporter",os="windows"} %v`, 99+delta),
fmt.Sprintf(`test_metric_2_this_one_there_where_total{arch="x86",code1="one1",foo1="bar1",instance="localhost:8080",job="cpu-exporter",os="linux"} %v`, 100+delta),
`# HELP test_metric_1_this_one_there_where Extra ones`,
`# TYPE test_metric_1_this_one_there_where counter`,
fmt.Sprintf(`test_metric_1_this_one_there_where{arch="x86",code1="one1",foo1="bar1",instance="localhost:8080",job="cpu-exporter",os="windows"} %v`, 99+128),
fmt.Sprintf(`test_metric_1_this_one_there_where{arch="x86",code1="one1",foo1="bar1",instance="localhost:8080",job="cpu-exporter",os="linux"} %v`, 100+128),
`# HELP test_metric_2_this_one_there_where Extra ones`,
`# TYPE test_metric_2_this_one_there_where counter`,
fmt.Sprintf(`test_metric_2_this_one_there_where{arch="x86",code1="one1",foo1="bar1",instance="localhost:8080",job="cpu-exporter",os="windows"} %v`, 99+delta),
fmt.Sprintf(`test_metric_2_this_one_there_where{arch="x86",code1="one1",foo1="bar1",instance="localhost:8080",job="cpu-exporter",os="linux"} %v`, 100+delta),
}

for _, w := range want {
Expand Down Expand Up @@ -389,14 +389,14 @@ func TestPrometheusExporter_endToEndWithTimestamps(t *testing.T) {
blob, _ := io.ReadAll(res.Body)
_ = res.Body.Close()
want := []string{
`# HELP test_metric_1_this_one_there_where_total Extra ones`,
`# TYPE test_metric_1_this_one_there_where_total counter`,
fmt.Sprintf(`test_metric_1_this_one_there_where_total{arch="x86",code2="one2",foo2="bar2",instance="localhost:8080",job="node-exporter",os="windows"} %v %v`, 99+128, 1543160298100+128000),
fmt.Sprintf(`test_metric_1_this_one_there_where_total{arch="x86",code2="one2",foo2="bar2",instance="localhost:8080",job="node-exporter",os="linux"} %v %v`, 100+128, 1543160298100),
`# HELP test_metric_2_this_one_there_where_total Extra ones`,
`# TYPE test_metric_2_this_one_there_where_total counter`,
fmt.Sprintf(`test_metric_2_this_one_there_where_total{arch="x86",code2="one2",foo2="bar2",instance="localhost:8080",job="node-exporter",os="windows"} %v %v`, 99+delta, 1543160298100+delta*1000),
fmt.Sprintf(`test_metric_2_this_one_there_where_total{arch="x86",code2="one2",foo2="bar2",instance="localhost:8080",job="node-exporter",os="linux"} %v %v`, 100+delta, 1543160298100),
`# HELP test_metric_1_this_one_there_where Extra ones`,
`# TYPE test_metric_1_this_one_there_where counter`,
fmt.Sprintf(`test_metric_1_this_one_there_where{arch="x86",code2="one2",foo2="bar2",instance="localhost:8080",job="node-exporter",os="windows"} %v %v`, 99+128, 1543160298100+128000),
fmt.Sprintf(`test_metric_1_this_one_there_where{arch="x86",code2="one2",foo2="bar2",instance="localhost:8080",job="node-exporter",os="linux"} %v %v`, 100+128, 1543160298100),
`# HELP test_metric_2_this_one_there_where Extra ones`,
`# TYPE test_metric_2_this_one_there_where counter`,
fmt.Sprintf(`test_metric_2_this_one_there_where{arch="x86",code2="one2",foo2="bar2",instance="localhost:8080",job="node-exporter",os="windows"} %v %v`, 99+delta, 1543160298100+delta*1000),
fmt.Sprintf(`test_metric_2_this_one_there_where{arch="x86",code2="one2",foo2="bar2",instance="localhost:8080",job="node-exporter",os="linux"} %v %v`, 100+delta, 1543160298100),
}

for _, w := range want {
Expand Down Expand Up @@ -469,10 +469,10 @@ func TestPrometheusExporter_endToEndWithResource(t *testing.T) {
_ = rsp.Body.Close()

want := []string{
`# HELP test_counter_int_total`,
`# TYPE test_counter_int_total counter`,
`test_counter_int_total{code2="one2",foo2="bar2",label_1="label-value-1",resource_attr="resource-attr-val-1"} 123 1581452773000`,
`test_counter_int_total{code2="one2",foo2="bar2",label_2="label-value-2",resource_attr="resource-attr-val-1"} 456 1581452773000`,
`# HELP test_counter_int`,
`# TYPE test_counter_int counter`,
`test_counter_int{code2="one2",foo2="bar2",label_1="label-value-1",resource_attr="resource-attr-val-1"} 123 1581452773000`,
`test_counter_int{code2="one2",foo2="bar2",label_2="label-value-2",resource_attr="resource-attr-val-1"} 456 1581452773000`,
}

for _, w := range want {
Expand Down
4 changes: 2 additions & 2 deletions pkg/translator/prometheus/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -11,10 +11,10 @@

> **Warning**
>
> This feature can be disabled with [feature gate](https://github.com/open-telemetry/opentelemetry-collector/tree/main/featuregate) `pkg.translator.prometheus.NormalizeName`. It is enabled by default (beta stage).
> This feature must be enabled with [feature gate](https://github.com/open-telemetry/opentelemetry-collector/tree/main/featuregate) `pkg.translator.prometheus.NormalizeName`. It is disabled by default (alpha stage).
>
> ```shell-session
> $ otelcol --config=config.yaml --feature-gates=-pkg.translator.prometheus.NormalizeName
> $ otelcol --config=config.yaml --feature-gates=pkg.translator.prometheus.NormalizeName
> ```
List of transformations performed on OpenTelemetry metrics names:
Expand Down
2 changes: 1 addition & 1 deletion pkg/translator/prometheus/normalize_name.go
Original file line number Diff line number Diff line change
Expand Up @@ -84,7 +84,7 @@ var perUnitMap = map[string]string{

var normalizeNameGate = featuregate.GlobalRegistry().MustRegister(
"pkg.translator.prometheus.NormalizeName",
featuregate.StageBeta,
featuregate.StageAlpha,
featuregate.WithRegisterDescription("Controls whether metrics names are automatically normalized to follow Prometheus naming convention"),
featuregate.WithRegisterReferenceURL("https://github.com/open-telemetry/opentelemetry-collector-contrib/issues/8950"),
)
Expand Down
10 changes: 5 additions & 5 deletions pkg/translator/prometheus/normalize_name_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -150,7 +150,10 @@ func TestOtelReceivers(t *testing.T) {
}

func TestTrimPromSuffixes(t *testing.T) {
normalizer := NewNormalizer(featuregate.NewRegistry())
registry := featuregate.NewRegistry()
_, err := registry.Register(normalizeNameGate.ID(), featuregate.StageBeta)
require.NoError(t, err)
normalizer := NewNormalizer(registry)

assert.Equal(t, "active_directory_ds_replication_network_io", normalizer.TrimPromSuffixes("active_directory_ds_replication_network_io_bytes_total", pmetric.MetricTypeSum, "bytes"))
assert.Equal(t, "active_directory_ds_name_cache_hit_rate", normalizer.TrimPromSuffixes("active_directory_ds_name_cache_hit_rate_percent", pmetric.MetricTypeGauge, "percent"))
Expand Down Expand Up @@ -183,10 +186,7 @@ func TestTrimPromSuffixes(t *testing.T) {
}

func TestTrimPromSuffixesWithFeatureGateDisabled(t *testing.T) {
registry := featuregate.NewRegistry()
_, err := registry.Register(normalizeNameGate.ID(), featuregate.StageAlpha)
require.NoError(t, err)
normalizer := NewNormalizer(registry)
normalizer := NewNormalizer(featuregate.NewRegistry())

assert.Equal(t, "apache_current_connections", normalizer.TrimPromSuffixes("apache_current_connections", pmetric.MetricTypeGauge, "connections"))
assert.Equal(t, "apache_requests_total", normalizer.TrimPromSuffixes("apache_requests_total", pmetric.MetricTypeSum, "1"))
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -158,10 +158,10 @@ func TestAddSingleSumNumberDataPoint(t *testing.T) {
},
want: func() map[string]*prompb.TimeSeries {
labels := []prompb.Label{
{Name: model.MetricNameLabel, Value: "test_sum_total"},
{Name: model.MetricNameLabel, Value: "test_sum"},
}
createdLabels := []prompb.Label{
{Name: model.MetricNameLabel, Value: "test_sum_total" + createdSuffix},
{Name: model.MetricNameLabel, Value: "test_sum" + createdSuffix},
}
return map[string]*prompb.TimeSeries{
timeSeriesSignature(pmetric.MetricTypeSum.String(), &labels): {
Expand Down Expand Up @@ -194,7 +194,7 @@ func TestAddSingleSumNumberDataPoint(t *testing.T) {
},
want: func() map[string]*prompb.TimeSeries {
labels := []prompb.Label{
{Name: model.MetricNameLabel, Value: "test_sum_total"},
{Name: model.MetricNameLabel, Value: "test_sum"},
}
return map[string]*prompb.TimeSeries{
timeSeriesSignature(pmetric.MetricTypeSum.String(), &labels): {
Expand Down
39 changes: 25 additions & 14 deletions receiver/prometheusreceiver/metrics_receiver_helper_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -119,6 +119,7 @@ type testData struct {
pages []mockPrometheusResponse
attributes pcommon.Map
validateScrapes bool
normalizedName bool
validateFunc func(t *testing.T, td *testData, result []pmetric.ResourceMetrics)
}

Expand Down Expand Up @@ -233,13 +234,13 @@ func metricsCount(resourceMetric pmetric.ResourceMetrics) int {
return metricsCount
}

func getValidScrapes(t *testing.T, rms []pmetric.ResourceMetrics) []pmetric.ResourceMetrics {
func getValidScrapes(t *testing.T, rms []pmetric.ResourceMetrics, normalizedNames bool) []pmetric.ResourceMetrics {
var out []pmetric.ResourceMetrics
// rms will include failed scrapes and scrapes that received no metrics but have internal scrape metrics, filter those out
for i := 0; i < len(rms); i++ {
allMetrics := getMetrics(rms[i])
if expectedScrapeMetricCount < len(allMetrics) && countScrapeMetrics(allMetrics) == expectedScrapeMetricCount {
if isFirstFailedScrape(allMetrics) {
if expectedScrapeMetricCount < len(allMetrics) && countScrapeMetrics(allMetrics, normalizedNames) == expectedScrapeMetricCount {
if isFirstFailedScrape(allMetrics, normalizedNames) {
continue
}
assertUp(t, 1, allMetrics)
Expand All @@ -251,7 +252,7 @@ func getValidScrapes(t *testing.T, rms []pmetric.ResourceMetrics) []pmetric.Reso
return out
}

func isFirstFailedScrape(metrics []pmetric.Metric) bool {
func isFirstFailedScrape(metrics []pmetric.Metric, normalizedNames bool) bool {
for _, m := range metrics {
if m.Name() == "up" {
if m.Gauge().DataPoints().At(0).DoubleValue() == 1 { // assumed up will not have multiple datapoints
Expand All @@ -261,7 +262,7 @@ func isFirstFailedScrape(metrics []pmetric.Metric) bool {
}

for _, m := range metrics {
if isDefaultMetrics(m) {
if isDefaultMetrics(m, normalizedNames) {
continue
}

Expand Down Expand Up @@ -305,37 +306,43 @@ func assertUp(t *testing.T, expected float64, metrics []pmetric.Metric) {
t.Error("No 'up' metric found")
}

func countScrapeMetricsRM(got pmetric.ResourceMetrics) int {
func countScrapeMetricsRM(got pmetric.ResourceMetrics, normalizedNames bool) int {
n := 0
ilms := got.ScopeMetrics()
for j := 0; j < ilms.Len(); j++ {
ilm := ilms.At(j)
for i := 0; i < ilm.Metrics().Len(); i++ {
if isDefaultMetrics(ilm.Metrics().At(i)) {
if isDefaultMetrics(ilm.Metrics().At(i), normalizedNames) {
n++
}
}
}
return n
}

func countScrapeMetrics(metrics []pmetric.Metric) int {
func countScrapeMetrics(metrics []pmetric.Metric, normalizedNames bool) int {
n := 0
for _, m := range metrics {
if isDefaultMetrics(m) {
if isDefaultMetrics(m, normalizedNames) {
n++
}
}
return n
}

func isDefaultMetrics(m pmetric.Metric) bool {
func isDefaultMetrics(m pmetric.Metric, normalizedNames bool) bool {
switch m.Name() {
case "up", "scrape_samples_scraped", "scrape_samples_post_metric_relabeling", "scrape_series_added", "scrape_duration":
case "up", "scrape_samples_scraped", "scrape_samples_post_metric_relabeling", "scrape_series_added":
return true

// if normalizedNames is true, we expect unit `_seconds` to be trimmed.
case "scrape_duration_seconds":
return !normalizedNames
case "scrape_duration":
return normalizedNames
default:
return false
}
return false
}

type metricTypeComparator func(*testing.T, pmetric.Metric)
Expand All @@ -352,8 +359,12 @@ type dataPointExpectation struct {
type testExpectation func(*testing.T, pmetric.ResourceMetrics)

func doCompare(t *testing.T, name string, want pcommon.Map, got pmetric.ResourceMetrics, expectations []testExpectation) {
doCompareNormalized(t, name, want, got, expectations, false)
}

func doCompareNormalized(t *testing.T, name string, want pcommon.Map, got pmetric.ResourceMetrics, expectations []testExpectation, normalizedNames bool) {
t.Run(name, func(t *testing.T) {
assert.Equal(t, expectedScrapeMetricCount, countScrapeMetricsRM(got))
assert.Equal(t, expectedScrapeMetricCount, countScrapeMetricsRM(got, normalizedNames))
assert.Equal(t, want.Len(), got.Resource().Attributes().Len())
for k, v := range want.AsRaw() {
val, ok := got.Resource().Attributes().Get(k)
Expand Down Expand Up @@ -628,7 +639,7 @@ func testComponent(t *testing.T, targets []*testData, useStartTimeMetric bool, s
}
scrapes := pResults[name]
if !target.validateScrapes {
scrapes = getValidScrapes(t, pResults[name])
scrapes = getValidScrapes(t, pResults[name], target.normalizedName)
}
target.validateFunc(t, target, scrapes)
})
Expand Down
Loading

0 comments on commit 2bd9389

Please sign in to comment.