diff --git a/exporter/carbonexporter/exporter.go b/exporter/carbonexporter/exporter.go index e563ed88cd6f..5267d07c17ff 100644 --- a/exporter/carbonexporter/exporter.go +++ b/exporter/carbonexporter/exporter.go @@ -21,12 +21,9 @@ import ( "sync" "time" - agentmetricspb "github.com/census-instrumentation/opencensus-proto/gen-go/agent/metrics/v1" "go.opentelemetry.io/collector/component" "go.opentelemetry.io/collector/exporter/exporterhelper" "go.opentelemetry.io/collector/pdata/pmetric" - - internaldata "github.com/open-telemetry/opentelemetry-collector-contrib/pkg/translator/opencensus" ) // newCarbonExporter returns a new Carbon exporter. @@ -62,14 +59,7 @@ type carbonSender struct { } func (cs *carbonSender) pushMetricsData(_ context.Context, md pmetric.Metrics) error { - rms := md.ResourceMetrics() - mds := make([]*agentmetricspb.ExportMetricsServiceRequest, 0, rms.Len()) - for i := 0; i < rms.Len(); i++ { - emsr := &agentmetricspb.ExportMetricsServiceRequest{} - emsr.Node, emsr.Resource, emsr.Metrics = internaldata.ResourceMetricsToOC(rms.At(i)) - mds = append(mds, emsr) - } - lines, _, _ := metricDataToPlaintext(mds) + lines := metricDataToPlaintext(md) if _, err := cs.connPool.Write([]byte(lines)); err != nil { // Use the sum of converted and dropped since the write failed for all. diff --git a/exporter/carbonexporter/exporter_test.go b/exporter/carbonexporter/exporter_test.go index a8657d6d16ee..b94c88229e27 100644 --- a/exporter/carbonexporter/exporter_test.go +++ b/exporter/carbonexporter/exporter_test.go @@ -27,20 +27,16 @@ import ( "testing" "time" - commonpb "github.com/census-instrumentation/opencensus-proto/gen-go/agent/common/v1" - metricspb "github.com/census-instrumentation/opencensus-proto/gen-go/metrics/v1" - resourcepb "github.com/census-instrumentation/opencensus-proto/gen-go/resource/v1" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" "go.opentelemetry.io/collector/component/componenttest" "go.opentelemetry.io/collector/config" + "go.opentelemetry.io/collector/pdata/pcommon" "go.opentelemetry.io/collector/pdata/pmetric" + conventions "go.opentelemetry.io/collector/semconv/v1.9.0" "go.uber.org/atomic" - "google.golang.org/protobuf/types/known/timestamppb" "github.com/open-telemetry/opentelemetry-collector-contrib/internal/common/testutil" - "github.com/open-telemetry/opentelemetry-collector-contrib/internal/coreinternal/metricstestutil/ocmetricstestutil" - internaldata "github.com/open-telemetry/opentelemetry-collector-contrib/pkg/translator/opencensus" ) func TestNew(t *testing.T) { @@ -87,16 +83,14 @@ func TestNew(t *testing.T) { func TestConsumeMetricsData(t *testing.T) { t.Skip("skipping flaky test, see https://github.com/open-telemetry/opentelemetry-collector-contrib/issues/396") - smallBatch := internaldata.OCToMetrics(nil, nil, []*metricspb.Metric{ - ocmetricstestutil.Gauge( - "test_gauge", - []string{"k0", "k1"}, - ocmetricstestutil.Timeseries( - time.Now(), - []string{"v0", "v1"}, - ocmetricstestutil.Double(time.Now(), 123))), - }) - + smallBatch := pmetric.NewMetrics() + m := smallBatch.ResourceMetrics().AppendEmpty().ScopeMetrics().AppendEmpty().Metrics().AppendEmpty() + m.SetName("test_gauge") + dp := m.SetEmptyGauge().DataPoints().AppendEmpty() + dp.Attributes().PutString("k0", "v0") + dp.Attributes().PutString("k1", "v1") + dp.SetTimestamp(pcommon.NewTimestampFromTime(time.Now())) + dp.SetDoubleValue(123) largeBatch := generateLargeBatch() tests := []struct { @@ -284,29 +278,21 @@ func Test_connPool_Concurrency(t *testing.T) { } func generateLargeBatch() pmetric.Metrics { - var metrics []*metricspb.Metric ts := time.Now() + metrics := pmetric.NewMetrics() + rm := metrics.ResourceMetrics().AppendEmpty() + rm.Resource().Attributes().PutString(conventions.AttributeServiceName, "test_carbon") + ms := rm.ScopeMetrics().AppendEmpty().Metrics() + for i := 0; i < 65000; i++ { - metrics = append(metrics, - ocmetricstestutil.Gauge( - "test_"+strconv.Itoa(i), - []string{"k0", "k1"}, - ocmetricstestutil.Timeseries( - time.Now(), - []string{"v0", "v1"}, - &metricspb.Point{ - Timestamp: timestamppb.New(ts), - Value: &metricspb.Point_Int64Value{Int64Value: int64(i)}, - }, - ), - ), - ) + m := ms.AppendEmpty() + m.SetName("test_" + strconv.Itoa(i)) + dp := m.SetEmptyGauge().DataPoints().AppendEmpty() + dp.Attributes().PutString("k0", "v0") + dp.Attributes().PutString("k1", "v1") + dp.SetTimestamp(pcommon.NewTimestampFromTime(ts)) + dp.SetIntValue(int64(i)) } - return internaldata.OCToMetrics( - &commonpb.Node{ - ServiceInfo: &commonpb.ServiceInfo{Name: "test_carbon"}, - }, - &resourcepb.Resource{Type: "test"}, - metrics) + return metrics } diff --git a/exporter/carbonexporter/go.mod b/exporter/carbonexporter/go.mod index 248bb9774917..f241dfcc00e0 100644 --- a/exporter/carbonexporter/go.mod +++ b/exporter/carbonexporter/go.mod @@ -3,25 +3,21 @@ module github.com/open-telemetry/opentelemetry-collector-contrib/exporter/carbon go 1.18 require ( - github.com/census-instrumentation/opencensus-proto v0.4.1 github.com/open-telemetry/opentelemetry-collector-contrib/internal/common v0.61.0 - github.com/open-telemetry/opentelemetry-collector-contrib/internal/coreinternal v0.61.0 - github.com/open-telemetry/opentelemetry-collector-contrib/pkg/translator/opencensus v0.61.0 github.com/stretchr/testify v1.8.0 go.opentelemetry.io/collector v0.61.0 go.opentelemetry.io/collector/pdata v0.61.0 + go.opentelemetry.io/collector/semconv v0.61.0 go.uber.org/atomic v1.10.0 - google.golang.org/protobuf v1.28.1 - ) require ( github.com/cenkalti/backoff/v4 v4.1.3 // indirect github.com/davecgh/go-spew v1.1.1 // indirect + github.com/fsnotify/fsnotify v1.5.4 // indirect github.com/gogo/protobuf v1.3.2 // indirect - github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da // indirect github.com/golang/protobuf v1.5.2 // indirect - github.com/grpc-ecosystem/grpc-gateway/v2 v2.11.3 // indirect + github.com/google/go-cmp v0.5.9 // indirect github.com/json-iterator/go v1.1.12 // indirect github.com/knadh/koanf v1.4.3 // indirect github.com/mitchellh/copystructure v1.2.0 // indirect @@ -29,9 +25,9 @@ require ( github.com/mitchellh/reflectwalk v1.0.2 // indirect github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect github.com/modern-go/reflect2 v1.0.2 // indirect + github.com/pelletier/go-toml v1.9.4 // indirect github.com/pmezard/go-difflib v1.0.0 // indirect go.opencensus.io v0.23.0 // indirect - go.opentelemetry.io/collector/semconv v0.61.0 // indirect go.opentelemetry.io/otel v1.10.0 // indirect go.opentelemetry.io/otel/metric v0.32.1 // indirect go.opentelemetry.io/otel/trace v1.10.0 // indirect @@ -42,11 +38,8 @@ require ( golang.org/x/text v0.3.7 // indirect google.golang.org/genproto v0.0.0-20220822174746-9e6da59bd2fc // indirect google.golang.org/grpc v1.49.0 // indirect + google.golang.org/protobuf v1.28.1 // indirect gopkg.in/yaml.v3 v3.0.1 // indirect ) -replace github.com/open-telemetry/opentelemetry-collector-contrib/pkg/translator/opencensus => ../../pkg/translator/opencensus - replace github.com/open-telemetry/opentelemetry-collector-contrib/internal/common => ../../internal/common - -replace github.com/open-telemetry/opentelemetry-collector-contrib/internal/coreinternal => ../../internal/coreinternal diff --git a/exporter/carbonexporter/go.sum b/exporter/carbonexporter/go.sum index 86801630a628..379c6081cadf 100644 --- a/exporter/carbonexporter/go.sum +++ b/exporter/carbonexporter/go.sum @@ -29,8 +29,6 @@ github.com/bgentry/speakeasy v0.1.0/go.mod h1:+zsyZBPWlz7T6j88CTgSN5bM796AkVf0kB github.com/cenkalti/backoff/v4 v4.1.3 h1:cFAlzYUlVYDysBEH2T5hyJZMh3+5+WCBvSnK6Q8UtC4= github.com/cenkalti/backoff/v4 v4.1.3/go.mod h1:scbssz8iZGpm3xbr14ovlUdkxfGXNInqkPWOWmG2CLw= github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU= -github.com/census-instrumentation/opencensus-proto v0.4.1 h1:iKLQ0xPNFxR/2hzXZMrBo8f1j86j5WHzznCCQxV/b8g= -github.com/census-instrumentation/opencensus-proto v0.4.1/go.mod h1:4T9NM4+4Vw91VeyqjLS6ao50K5bOcLKN6Q42XnYaRYw= github.com/cespare/xxhash/v2 v2.1.1/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw= github.com/cncf/udpa/go v0.0.0-20191209042840-269d4d468f6f/go.mod h1:M8M6+tZqaGXZJjfX53e64911xZQV5JYwmTeXPW+k8Sc= @@ -51,6 +49,7 @@ github.com/fatih/color v1.9.0/go.mod h1:eQcE1qtQxscV5RaZvpXrrb8Drkc3/DdQ+uUYCNjL github.com/fatih/structs v1.1.0/go.mod h1:9NiDSp5zOcgEDl+j00MP/WkGVPOlPRLejGD8Ga6PJ7M= github.com/fsnotify/fsnotify v1.4.9/go.mod h1:znqG4EE+3YCdAaPaxE2ZRY/06pZUdp0tY4IgpuI1SZQ= github.com/fsnotify/fsnotify v1.5.4 h1:jRbGcIw6P2Meqdwuo0H1p6JVLbL5DHKAKlYndzMwVZI= +github.com/fsnotify/fsnotify v1.5.4/go.mod h1:OVB6XrOHzAwXMpEM7uPOzcehqUV2UqJxmVXmkdnm1bU= github.com/ghodss/yaml v1.0.0/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeMEF04= github.com/go-kit/kit v0.8.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2as= github.com/go-kit/kit v0.9.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2as= @@ -68,10 +67,8 @@ github.com/gogo/protobuf v1.1.1/go.mod h1:r8qH/GZQm5c6nD/R0oafs1akxWv10x8SbQlK7a github.com/gogo/protobuf v1.3.2 h1:Ov1cvc58UF3b5XjBnZv7+opcTcQFZebYjWzi34vdm4Q= github.com/gogo/protobuf v1.3.2/go.mod h1:P1XiOD3dCwIKUDQYPy72D8LYyHL2YPYrpS2s69NZV8Q= github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfUlaOkMMuAQtPIUF8ecWP5IEl/CR7VP2Q= -github.com/golang/glog v1.0.0 h1:nfP3RFugxnNRyKgeWd4oI1nYvXpxrx8ck8ZrcizshdQ= github.com/golang/groupcache v0.0.0-20200121045136-8c9f03a8e57e/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da h1:oI5xCqsCo564l8iNU+DwB5epxmsaqB+rhGL0m5jtYqE= -github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= github.com/golang/mock v1.1.1/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A= github.com/golang/protobuf v1.2.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= github.com/golang/protobuf v1.3.1/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= @@ -101,12 +98,11 @@ github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/ github.com/google/go-cmp v0.5.6/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.5.7/go.mod h1:n+brtR0CgQNWTVd5ZUFpTBC8YFBDLK/h/bpaJ8/DtOE= github.com/google/go-cmp v0.5.9 h1:O2Tfq5qg4qc4AmwVlvv0oLiVAGB7enBSJ2x2DqQFi38= +github.com/google/go-cmp v0.5.9/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg= github.com/google/uuid v1.1.2/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= github.com/grpc-ecosystem/go-grpc-prometheus v1.2.0/go.mod h1:8NvIoxWQoOIhqOTXgfV/d3M/q6VIi02HzZEHgUlZvzk= github.com/grpc-ecosystem/grpc-gateway v1.16.0/go.mod h1:BDjrQk3hbvj6Nolgz8mAMFbcEtjT1g+wF4CSlocrBnw= -github.com/grpc-ecosystem/grpc-gateway/v2 v2.11.3 h1:lLT7ZLSzGLI08vc9cpd+tYmNWjdKDqyr/2L+f6U12Fk= -github.com/grpc-ecosystem/grpc-gateway/v2 v2.11.3/go.mod h1:o//XUCC/F+yRGJoPO/VU0GSB0f8Nhgmxx0VIRUvaC0w= github.com/hashicorp/consul/api v1.13.0/go.mod h1:ZlVrynguJKcYr54zGaDbaL3fOvKC9m72FhPvA8T35KQ= github.com/hashicorp/consul/sdk v0.8.0/go.mod h1:GBvyrGALthsZObzUGsfgHZQDXjg4lOjagTIwIR1vPms= github.com/hashicorp/errwrap v1.0.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4= @@ -210,6 +206,7 @@ github.com/pascaldekloe/goe v0.0.0-20180627143212-57f6aae5913c/go.mod h1:lzWF7FI github.com/pascaldekloe/goe v0.1.0/go.mod h1:lzWF7FIEvWOWxwDKqyGYQf6ZUaNfKdP144TG7ZOy1lc= github.com/pelletier/go-toml v1.7.0/go.mod h1:vwGMzjaWMwyfHwgIBhI2YUM4fB6nL6lVAvS1LBMMhTE= github.com/pelletier/go-toml v1.9.4 h1:tjENF6MfZAg8e4ZmZTeWaWiT2vXtsoO6+iuOjFhECwM= +github.com/pelletier/go-toml v1.9.4/go.mod h1:u1nR/EPcESfeI/szUZKdtJ0xRNbUoANCkoOuaOx1Y+c= github.com/pierrec/lz4 v2.0.5+incompatible/go.mod h1:pdkljMzZIN41W+lC3N2tnIh5sFi+IEE17M5jbnwPHcY= github.com/pkg/errors v0.8.0/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= @@ -364,6 +361,7 @@ golang.org/x/sys v0.0.0-20210330210617-4fbd30eecc44/go.mod h1:h1NjWce9XRLGQEsW7w golang.org/x/sys v0.0.0-20210403161142-5e06dd20ab57/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210510120138-977fb7262007/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20210603081109-ebe580a85c40/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220412211240-33da011f77ad/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220808155132-1c4a2a72c664 h1:v1W7bwXHsnLLloWYTVEdvGvA7BHMeBYsPcF0GLDxIRs= golang.org/x/sys v0.0.0-20220808155132-1c4a2a72c664/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= diff --git a/exporter/carbonexporter/metricdata_to_plaintext.go b/exporter/carbonexporter/metricdata_to_plaintext.go index 55bfff745b6a..2a92da7fdf13 100644 --- a/exporter/carbonexporter/metricdata_to_plaintext.go +++ b/exporter/carbonexporter/metricdata_to_plaintext.go @@ -15,12 +15,11 @@ package carbonexporter // import "github.com/open-telemetry/opentelemetry-collector-contrib/exporter/carbonexporter" import ( - "fmt" "strconv" "strings" - agentmetricspb "github.com/census-instrumentation/opencensus-proto/gen-go/agent/metrics/v1" - metricspb "github.com/census-instrumentation/opencensus-proto/gen-go/metrics/v1" + "go.opentelemetry.io/collector/pdata/pcommon" + "go.opentelemetry.io/collector/pdata/pmetric" ) const ( @@ -81,82 +80,55 @@ const ( // a single Carbon metric. // - number of time series successfully converted to carbon. // - number of time series that could not be converted to Carbon. -func metricDataToPlaintext(mds []*agentmetricspb.ExportMetricsServiceRequest) (string, int, int) { - if len(mds) == 0 { - return "", 0, 0 +func metricDataToPlaintext(md pmetric.Metrics) string { + if md.DataPointCount() == 0 { + return "" } - var sb strings.Builder - numTimeseriesDropped := 0 - totalTimeseries := 0 - - for _, md := range mds { - for _, metric := range md.Metrics { - totalTimeseries++ - descriptor := metric.MetricDescriptor - name := descriptor.GetName() - if name == "" { - numTimeseriesDropped += len(metric.Timeseries) - // TODO: observability for this, debug logging. - continue - } - - tagKeys := buildSanitizedTagKeys(metric.MetricDescriptor.LabelKeys) - for _, ts := range metric.Timeseries { - if len(tagKeys) != len(ts.LabelValues) { - numTimeseriesDropped++ - // TODO: observability with debug, something like the message below: - // "inconsistent number of labelKeys(%d) and labelValues(%d) for metric %q", - // len(tagKeys), - // len(labelValues), - // name) + var sb strings.Builder + for i := 0; i < md.ResourceMetrics().Len(); i++ { + rm := md.ResourceMetrics().At(i) + for j := 0; j < rm.ScopeMetrics().Len(); j++ { + sm := rm.ScopeMetrics().At(j) + for k := 0; k < sm.Metrics().Len(); k++ { + metric := sm.Metrics().At(k) + if metric.Name() == "" { + // TODO: log error info continue } - - // From this point on all code below is safe to assume that - // len(tagKeys) is equal to len(labelValues). - - for _, point := range ts.Points { - timestampStr := formatInt64(point.GetTimestamp().GetSeconds()) - - switch pv := point.Value.(type) { - - case *metricspb.Point_Int64Value: - path := buildPath(name, tagKeys, ts.LabelValues) - valueStr := formatInt64(pv.Int64Value) - sb.WriteString(buildLine(path, valueStr, timestampStr)) - - case *metricspb.Point_DoubleValue: - path := buildPath(name, tagKeys, ts.LabelValues) - valueStr := formatFloatForValue(pv.DoubleValue) - sb.WriteString(buildLine(path, valueStr, timestampStr)) - - case *metricspb.Point_DistributionValue: - err := buildDistributionIntoBuilder( - &sb, name, tagKeys, ts.LabelValues, timestampStr, pv.DistributionValue) - if err != nil { - // TODO: log error info - numTimeseriesDropped++ - } - - case *metricspb.Point_SummaryValue: - err := buildSummaryIntoBuilder( - &sb, name, tagKeys, ts.LabelValues, timestampStr, pv.SummaryValue) - if err != nil { - // TODO: log error info - numTimeseriesDropped++ - } - } + switch metric.Type() { + case pmetric.MetricTypeGauge: + formatNumberDataPoints(&sb, metric.Name(), metric.Gauge().DataPoints()) + case pmetric.MetricTypeSum: + formatNumberDataPoints(&sb, metric.Name(), metric.Sum().DataPoints()) + case pmetric.MetricTypeHistogram: + formatHistogramDataPoints(&sb, metric.Name(), metric.Histogram().DataPoints()) + case pmetric.MetricTypeSummary: + formatSummaryDataPoints(&sb, metric.Name(), metric.Summary().DataPoints()) } } } } - return sb.String(), totalTimeseries - numTimeseriesDropped, numTimeseriesDropped + return sb.String() } -// buildDistributionIntoBuilder transforms a metric distribution into a series +func formatNumberDataPoints(sb *strings.Builder, metricName string, dps pmetric.NumberDataPointSlice) { + for i := 0; i < dps.Len(); i++ { + dp := dps.At(i) + var valueStr string + switch dp.ValueType() { + case pmetric.NumberDataPointValueTypeInt: + valueStr = formatInt64(dp.IntValue()) + case pmetric.NumberDataPointValueTypeDouble: + valueStr = formatFloatForValue(dp.DoubleValue()) + } + sb.WriteString(buildLine(buildPath(metricName, dp.Attributes()), valueStr, formatTimestamp(dp.Timestamp()))) + } +} + +// formatHistogramDataPoints transforms a slice of histogram data points into a series // of Carbon metrics and injects them into the string builder. // // Carbon doesn't have direct support to distribution metrics they will be @@ -170,49 +142,35 @@ func metricDataToPlaintext(mds []*agentmetricspb.ExportMetricsServiceRequest) (s // and will include a dimension "upper_bound" that specifies the maximum value in // that bucket. This metric specifies the number of events with a value that is // less than or equal to the upper bound. -func buildDistributionIntoBuilder( +func formatHistogramDataPoints( sb *strings.Builder, metricName string, - tagKeys []string, - labelValues []*metricspb.LabelValue, - timestampStr string, - distributionValue *metricspb.DistributionValue, -) error { - buildCountAndSumIntoBuilder( - sb, - metricName, - tagKeys, - labelValues, - distributionValue.GetCount(), - distributionValue.GetSum(), - timestampStr) - - explicitBuckets := distributionValue.BucketOptions.GetExplicit() - if explicitBuckets == nil { - return fmt.Errorf( - "unknown bucket options type for metric %q", - metricName) - } + dps pmetric.HistogramDataPointSlice, +) { + for i := 0; i < dps.Len(); i++ { + dp := dps.At(i) - bounds := explicitBuckets.Bounds - carbonBounds := make([]string, len(bounds)+1) - for i := 0; i < len(bounds); i++ { - carbonBounds[i] = formatFloatForLabel(bounds[i]) - } - carbonBounds[len(carbonBounds)-1] = infinityCarbonValue - - bucketPath := buildPath(metricName+distributionBucketSuffix, tagKeys, labelValues) - for i, bucket := range distributionValue.Buckets { - sb.WriteString(buildLine( - bucketPath+distributionUpperBoundTagBeforeValue+carbonBounds[i], - formatInt64(bucket.Count), - timestampStr)) - } + timestampStr := formatTimestamp(dp.Timestamp()) + formatCountAndSum(sb, metricName, dp.Attributes(), dp.Count(), dp.Sum(), timestampStr) + if dp.ExplicitBounds().Len() == 0 { + continue + } - return nil + bounds := dp.ExplicitBounds().AsRaw() + carbonBounds := make([]string, len(bounds)+1) + for i := 0; i < len(bounds); i++ { + carbonBounds[i] = formatFloatForLabel(bounds[i]) + } + carbonBounds[len(carbonBounds)-1] = infinityCarbonValue + + bucketPath := buildPath(metricName+distributionBucketSuffix, dp.Attributes()) + for j := 0; j < dp.BucketCounts().Len(); j++ { + sb.WriteString(buildLine(bucketPath+distributionUpperBoundTagBeforeValue+carbonBounds[j], formatUint64(dp.BucketCounts().At(j)), timestampStr)) + } + } } -// buildSummaryIntoBuilder transforms a metric summary into a series +// formatSummaryDataPoints transforms a slice of summary data points into a series // of Carbon metrics and injects them into the string builder. // // Carbon doesn't have direct support to summary metrics they will be @@ -224,39 +182,29 @@ func buildDistributionIntoBuilder( // // 3. Each quantile is represented by a metric named ".quantile" // and will include a tag key "quantile" that specifies the quantile value. -func buildSummaryIntoBuilder( +func formatSummaryDataPoints( sb *strings.Builder, metricName string, - tagKeys []string, - labelValues []*metricspb.LabelValue, - timestampStr string, - summaryValue *metricspb.SummaryValue, -) error { - buildCountAndSumIntoBuilder( - sb, - metricName, - tagKeys, - labelValues, - summaryValue.GetCount().GetValue(), - summaryValue.GetSum().GetValue(), - timestampStr) - - percentiles := summaryValue.GetSnapshot().GetPercentileValues() - if percentiles == nil { - return fmt.Errorf( - "unknown percentiles values for summary metric %q", - metricName) - } + dps pmetric.SummaryDataPointSlice, +) { + for i := 0; i < dps.Len(); i++ { + dp := dps.At(i) - quantilePath := buildPath(metricName+summaryQuantileSuffix, tagKeys, labelValues) - for _, quantile := range percentiles { - sb.WriteString(buildLine( - quantilePath+summaryQuantileTagBeforeValue+formatFloatForLabel(quantile.GetPercentile()), - formatFloatForValue(quantile.GetValue()), - timestampStr)) - } + timestampStr := formatTimestamp(dp.Timestamp()) + formatCountAndSum(sb, metricName, dp.Attributes(), dp.Count(), dp.Sum(), timestampStr) + + if dp.QuantileValues().Len() == 0 { + continue + } - return nil + quantilePath := buildPath(metricName+summaryQuantileSuffix, dp.Attributes()) + for j := 0; j < dp.QuantileValues().Len(); j++ { + sb.WriteString(buildLine( + quantilePath+summaryQuantileTagBeforeValue+formatFloatForLabel(dp.QuantileValues().At(j).Quantile()*100), + formatFloatForValue(dp.QuantileValues().At(j).Value()), + timestampStr)) + } + } } // Carbon doesn't have direct support to distribution or summary metrics in both @@ -266,78 +214,45 @@ func buildSummaryIntoBuilder( // 1. The total count will be represented by a metric named ".count". // // 2. The total sum will be represented by a metruc with the original "". -func buildCountAndSumIntoBuilder( +func formatCountAndSum( sb *strings.Builder, metricName string, - tagKeys []string, - labelValues []*metricspb.LabelValue, - count int64, + attributes pcommon.Map, + count uint64, sum float64, timestampStr string, ) { // Build count and sum metrics. - countPath := buildPath(metricName+countSuffix, tagKeys, labelValues) - valueStr := formatInt64(count) + countPath := buildPath(metricName+countSuffix, attributes) + valueStr := formatUint64(count) sb.WriteString(buildLine(countPath, valueStr, timestampStr)) - sumPath := buildPath(metricName, tagKeys, labelValues) + sumPath := buildPath(metricName, attributes) valueStr = formatFloatForValue(sum) sb.WriteString(buildLine(sumPath, valueStr, timestampStr)) } -// buildPath is used to build the per description above. It -// assumes that the caller code already checked that len(tagKeys) is equal to -// len(labelValues) and as such cannot fail to build the path. -func buildPath( - name string, - tagKeys []string, - labelValues []*metricspb.LabelValue, -) string { - - if len(tagKeys) == 0 { +// buildPath is used to build the per description above. +func buildPath(name string, attributes pcommon.Map) string { + if attributes.Len() == 0 { return name } var sb strings.Builder sb.WriteString(name) - for i, label := range labelValues { - value := label.Value - - switch value { - case "": - // Per Carbon the value must have length > 1 so put a place holder. - if label.HasValue { - value = tagValueEmptyPlaceholder - } else { - value = tagValueNotSetPlaceholder - } - default: - value = sanitizeTagValue(value) + attributes.Range(func(k string, v pcommon.Value) bool { + value := v.AsString() + if value == "" { + value = tagValueEmptyPlaceholder } - - sb.WriteString(tagPrefix + tagKeys[i] + tagKeyValueSeparator + value) - } + sb.WriteString(tagPrefix + sanitizeTagKey(k) + tagKeyValueSeparator + value) + return true + }) return sb.String() } -// buildSanitizedTagKeys builds an slice with the sanitized label keys to be -// used as tag keys on the Carbon metric. -func buildSanitizedTagKeys(labelKeys []*metricspb.LabelKey) []string { - if len(labelKeys) == 0 { - return nil - } - - tagKeys := make([]string, 0, len(labelKeys)) - for _, labelKey := range labelKeys { - tagKey := sanitizeTagKey(labelKey.Key) - tagKeys = append(tagKeys, tagKey) - } - - return tagKeys -} - // buildLine builds a single Carbon metric textual line, ie.: it already adds // a new-line character at the end of the string. func buildLine(path, value, timestamp string) string { @@ -385,6 +300,14 @@ func formatFloatForValue(f float64) string { return strconv.FormatFloat(f, 'f', -1, 64) } +func formatUint64(i uint64) string { + return strconv.FormatUint(i, 10) +} + func formatInt64(i int64) string { return strconv.FormatInt(i, 10) } + +func formatTimestamp(timestamp pcommon.Timestamp) string { + return formatUint64(uint64(timestamp) / 1e9) +} diff --git a/exporter/carbonexporter/metricdata_to_plaintext_test.go b/exporter/carbonexporter/metricdata_to_plaintext_test.go index d90042a1b398..e66b90560838 100644 --- a/exporter/carbonexporter/metricdata_to_plaintext_test.go +++ b/exporter/carbonexporter/metricdata_to_plaintext_test.go @@ -20,16 +20,12 @@ import ( "testing" "time" - agentmetricspb "github.com/census-instrumentation/opencensus-proto/gen-go/agent/metrics/v1" - metricspb "github.com/census-instrumentation/opencensus-proto/gen-go/metrics/v1" "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" - "google.golang.org/protobuf/types/known/timestamppb" - - "github.com/open-telemetry/opentelemetry-collector-contrib/internal/coreinternal/metricstestutil/ocmetricstestutil" + "go.opentelemetry.io/collector/pdata/pcommon" + "go.opentelemetry.io/collector/pdata/pmetric" ) -func Test_sanitizeTagKey(t *testing.T) { +func TestSanitizeTagKey(t *testing.T) { tests := []struct { name string key string @@ -54,7 +50,7 @@ func Test_sanitizeTagKey(t *testing.T) { } } -func Test_sanitizeTagValue(t *testing.T) { +func TestSanitizeTagValue(t *testing.T) { tests := []struct { name string value string @@ -84,65 +80,49 @@ func Test_sanitizeTagValue(t *testing.T) { } } -func Test_buildPath(t *testing.T) { - type args struct { - name string - tagKeys []string - labelValues []*metricspb.LabelValue - } +func TestBuildPath(t *testing.T) { tests := []struct { - name string - args args - want string + name string + attributes pcommon.Map + want string }{ { name: "happy_path", - args: args{ - name: "happy.path", - tagKeys: []string{"key0"}, - labelValues: []*metricspb.LabelValue{ - {Value: "val0", HasValue: true}, - }, - }, - want: "happy.path;key0=val0", + attributes: func() pcommon.Map { + attr := pcommon.NewMap() + attr.FromRaw(map[string]interface{}{"key0": "val0"}) + return attr + }(), + want: "happy_path;key0=val0", }, { - name: "emoty_value", - args: args{ - name: "t", - tagKeys: []string{"k0", "k1"}, - labelValues: []*metricspb.LabelValue{ - {Value: "", HasValue: true}, - {Value: "v1", HasValue: true}, - }, - }, - want: "t;k0=" + tagValueEmptyPlaceholder + ";k1=v1", + name: "empty_value", + attributes: func() pcommon.Map { + attr := pcommon.NewMap() + attr.FromRaw(map[string]interface{}{"k0": "", "k1": "v1"}) + return attr + }(), + want: "empty_value;k0=" + tagValueEmptyPlaceholder + ";k1=v1", }, { - name: "not_set_value", - args: args{ - name: "t", - tagKeys: []string{"k0", "k1"}, - labelValues: []*metricspb.LabelValue{ - {Value: "v0", HasValue: true}, - {Value: "", HasValue: false}, - }, - }, - want: "t;k0=v0;k1=" + tagValueNotSetPlaceholder, + name: "int_value", + attributes: func() pcommon.Map { + attr := pcommon.NewMap() + attr.FromRaw(map[string]interface{}{"k": 1}) + return attr + }(), + want: "int_value;k=1", }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { - got := buildPath(tt.args.name, tt.args.tagKeys, tt.args.labelValues) + got := buildPath(tt.name, tt.attributes) assert.Equal(t, tt.want, got) }) } } -func Test_metricDataToPlaintext(t *testing.T) { - - keys := []string{"k0", "k1"} - values := []string{"v0", "v1"} +func TestToPlaintext(t *testing.T) { expectedTagsStr := ";k0=v0;k1=v1" unixSecs := int64(1574092046) @@ -152,62 +132,51 @@ func Test_metricDataToPlaintext(t *testing.T) { doubleVal := 1234.5678 expectedDobuleValStr := strconv.FormatFloat(doubleVal, 'g', -1, 64) - doublePt := ocmetricstestutil.Double(tsUnix, doubleVal) int64Val := int64(123) expectedInt64ValStr := "123" - int64Pt := &metricspb.Point{ - Timestamp: timestamppb.New(tsUnix), - Value: &metricspb.Point_Int64Value{Int64Value: int64Val}, - } + distributionCount := uint64(16) + distributionSum := float64(34.56) distributionBounds := []float64{1.5, 2, 4} - distributionCounts := []int64{4, 2, 3, 7} - distributionTimeSeries := ocmetricstestutil.Timeseries( - tsUnix, - values, - ocmetricstestutil.DistPt(tsUnix, distributionBounds, distributionCounts)) - distributionPoints := distributionTimeSeries.GetPoints() - require.Equal(t, 1, len(distributionPoints)) - distribubionPoint := distributionPoints[0].Value.(*metricspb.Point_DistributionValue) - distributionValue := distribubionPoint.DistributionValue - - summaryTimeSeries := ocmetricstestutil.Timeseries( - tsUnix, - values, - ocmetricstestutil.SummPt( - tsUnix, - 11, - 111, - []float64{90, 95, 99, 99.9}, - []float64{100, 6, 4, 1})) - summaryPoints := summaryTimeSeries.GetPoints() - require.Equal(t, 1, len(summaryPoints)) - summarySnapshot := summaryPoints[0].GetSummaryValue().GetSnapshot() + distributionCounts := []uint64{4, 2, 3, 7} + summaryCount := uint64(11) + summarySum := float64(111) + summaryQuantiles := []float64{90, 95, 99, 99.9} + summaryQuantileValues := []float64{100, 6, 4, 1} tests := []struct { name string - metricsDataFn func() []*agentmetricspb.ExportMetricsServiceRequest + metricsDataFn func() pmetric.Metrics wantLines []string wantNumConvertedTimeseries int wantNumDroppedTimeseries int }{ { name: "no_dims", - metricsDataFn: func() []*agentmetricspb.ExportMetricsServiceRequest { - return []*agentmetricspb.ExportMetricsServiceRequest{ - { - Metrics: []*metricspb.Metric{ - ocmetricstestutil.Gauge("gauge_double_no_dims", nil, ocmetricstestutil.Timeseries(tsUnix, nil, doublePt)), - ocmetricstestutil.GaugeInt("gauge_int_no_dims", nil, ocmetricstestutil.Timeseries(tsUnix, nil, int64Pt)), - }, - }, - { - Metrics: []*metricspb.Metric{ - ocmetricstestutil.Cumulative("cumulative_double_no_dims", nil, ocmetricstestutil.Timeseries(tsUnix, nil, doublePt)), - ocmetricstestutil.CumulativeInt("cumulative_int_no_dims", nil, ocmetricstestutil.Timeseries(tsUnix, nil, int64Pt)), - }, - }, - } + metricsDataFn: func() pmetric.Metrics { + md := pmetric.NewMetrics() + ms := md.ResourceMetrics().AppendEmpty().ScopeMetrics().AppendEmpty().Metrics() + ms.AppendEmpty().SetName("gauge_double_no_dims") + dps1 := ms.At(0).SetEmptyGauge().DataPoints() + dps1.AppendEmpty().SetTimestamp(pcommon.NewTimestampFromTime(tsUnix)) + dps1.At(0).SetDoubleValue(doubleVal) + ms.AppendEmpty().SetName("gauge_int_no_dims") + dps2 := ms.At(1).SetEmptyGauge().DataPoints() + dps2.AppendEmpty().SetTimestamp(pcommon.NewTimestampFromTime(tsUnix)) + dps2.At(0).SetIntValue(int64Val) + + ms.AppendEmpty().SetName("cumulative_double_no_dims") + ms.At(2).SetEmptySum().SetIsMonotonic(true) + dps3 := ms.At(2).Sum().DataPoints() + dps3.AppendEmpty().SetTimestamp(pcommon.NewTimestampFromTime(tsUnix)) + dps3.At(0).SetDoubleValue(doubleVal) + ms.AppendEmpty().SetName("cumulative_int_no_dims") + ms.At(3).SetEmptySum().SetIsMonotonic(true) + dps4 := ms.At(3).Sum().DataPoints() + dps4.AppendEmpty().SetTimestamp(pcommon.NewTimestampFromTime(tsUnix)) + dps4.At(0).SetIntValue(int64Val) + return md + }, wantLines: []string{ "gauge_double_no_dims " + expectedDobuleValStr + " " + expectedUnixSecsStr, @@ -219,17 +188,37 @@ func Test_metricDataToPlaintext(t *testing.T) { }, { name: "with_dims", - metricsDataFn: func() []*agentmetricspb.ExportMetricsServiceRequest { - return []*agentmetricspb.ExportMetricsServiceRequest{ - { - Metrics: []*metricspb.Metric{ - ocmetricstestutil.Gauge("gauge_double_with_dims", keys, ocmetricstestutil.Timeseries(tsUnix, values, doublePt)), - ocmetricstestutil.GaugeInt("gauge_int_with_dims", keys, ocmetricstestutil.Timeseries(tsUnix, values, int64Pt)), - ocmetricstestutil.Cumulative("cumulative_double_with_dims", keys, ocmetricstestutil.Timeseries(tsUnix, values, doublePt)), - ocmetricstestutil.CumulativeInt("cumulative_int_with_dims", keys, ocmetricstestutil.Timeseries(tsUnix, values, int64Pt)), - }, - }, - } + metricsDataFn: func() pmetric.Metrics { + md := pmetric.NewMetrics() + ms := md.ResourceMetrics().AppendEmpty().ScopeMetrics().AppendEmpty().Metrics() + ms.AppendEmpty().SetName("gauge_double_with_dims") + dps1 := ms.At(0).SetEmptyGauge().DataPoints() + dps1.AppendEmpty().SetTimestamp(pcommon.NewTimestampFromTime(tsUnix)) + dps1.At(0).Attributes().FromRaw(map[string]interface{}{"k0": "v0", "k1": "v1"}) + dps1.At(0).Attributes().Sort() // ensures result order + dps1.At(0).SetDoubleValue(doubleVal) + ms.AppendEmpty().SetName("gauge_int_with_dims") + dps2 := ms.At(1).SetEmptyGauge().DataPoints() + dps2.AppendEmpty().SetTimestamp(pcommon.NewTimestampFromTime(tsUnix)) + dps2.At(0).Attributes().FromRaw(map[string]interface{}{"k0": "v0", "k1": "v1"}) + dps2.At(0).Attributes().Sort() // ensures result order + dps2.At(0).SetIntValue(int64Val) + + ms.AppendEmpty().SetName("cumulative_double_with_dims") + ms.At(2).SetEmptySum().SetIsMonotonic(true) + dps3 := ms.At(2).Sum().DataPoints() + dps3.AppendEmpty().SetTimestamp(pcommon.NewTimestampFromTime(tsUnix)) + dps3.At(0).Attributes().FromRaw(map[string]interface{}{"k0": "v0", "k1": "v1"}) + dps3.At(0).Attributes().Sort() // ensures result order + dps3.At(0).SetDoubleValue(doubleVal) + ms.AppendEmpty().SetName("cumulative_int_with_dims") + ms.At(3).SetEmptySum().SetIsMonotonic(true) + dps4 := ms.At(3).Sum().DataPoints() + dps4.AppendEmpty().SetTimestamp(pcommon.NewTimestampFromTime(tsUnix)) + dps4.At(0).Attributes().FromRaw(map[string]interface{}{"k0": "v0", "k1": "v1"}) + dps4.At(0).Attributes().Sort() // ensures result order + dps4.At(0).SetIntValue(int64Val) + return md }, wantLines: []string{ "gauge_double_with_dims" + expectedTagsStr + " " + expectedDobuleValStr + " " + expectedUnixSecsStr, @@ -241,47 +230,60 @@ func Test_metricDataToPlaintext(t *testing.T) { }, { name: "distributions", - metricsDataFn: func() []*agentmetricspb.ExportMetricsServiceRequest { - return []*agentmetricspb.ExportMetricsServiceRequest{ - { - Metrics: []*metricspb.Metric{ - ocmetricstestutil.GaugeDist("distrib", keys, distributionTimeSeries), - }, - }, - } + metricsDataFn: func() pmetric.Metrics { + md := pmetric.NewMetrics() + ms := md.ResourceMetrics().AppendEmpty().ScopeMetrics().AppendEmpty().Metrics() + ms.AppendEmpty().SetName("distrib") + ms.At(0).SetEmptyHistogram().SetAggregationTemporality(pmetric.MetricAggregationTemporalityCumulative) + dp := ms.At(0).SetEmptyHistogram().DataPoints().AppendEmpty() + dp.SetTimestamp(pcommon.NewTimestampFromTime(tsUnix)) + dp.Attributes().FromRaw(map[string]interface{}{"k0": "v0", "k1": "v1"}) + dp.Attributes().Sort() // ensures result order + dp.SetCount(distributionCount) + dp.SetSum(distributionSum) + dp.ExplicitBounds().FromRaw(distributionBounds) + dp.BucketCounts().FromRaw(distributionCounts) + return md }, wantLines: expectedDistributionLines( "distrib", expectedTagsStr, expectedUnixSecsStr, - distributionValue.Sum, - distributionValue.Count, + distributionSum, + distributionCount, distributionBounds, distributionCounts), wantNumConvertedTimeseries: 1, }, { name: "summary", - metricsDataFn: func() []*agentmetricspb.ExportMetricsServiceRequest { - return []*agentmetricspb.ExportMetricsServiceRequest{ - { - Metrics: []*metricspb.Metric{ - ocmetricstestutil.Summary("summary", keys, summaryTimeSeries), - }, - }, + metricsDataFn: func() pmetric.Metrics { + md := pmetric.NewMetrics() + ms := md.ResourceMetrics().AppendEmpty().ScopeMetrics().AppendEmpty().Metrics() + ms.AppendEmpty().SetName("summary") + dp := ms.At(0).SetEmptySummary().DataPoints().AppendEmpty() + dp.SetTimestamp(pcommon.NewTimestampFromTime(tsUnix)) + dp.Attributes().FromRaw(map[string]interface{}{"k0": "v0", "k1": "v1"}) + dp.Attributes().Sort() // ensures result order + dp.SetCount(summaryCount) + dp.SetSum(summarySum) + for i := range summaryQuantiles { + qv := dp.QuantileValues().AppendEmpty() + qv.SetQuantile(summaryQuantiles[i] / 100) + qv.SetValue(summaryQuantileValues[i]) } + return md }, wantLines: expectedSummaryLines( "summary", expectedTagsStr, expectedUnixSecsStr, - summaryPoints[0].GetSummaryValue().GetSum().Value, - summaryPoints[0].GetSummaryValue().GetCount().Value, - summarySnapshot.PercentileValues), + summarySum, + summaryCount, + summaryQuantiles, + summaryQuantileValues), wantNumConvertedTimeseries: 1, }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { - gotLines, gotNunConvertedTimeseries, gotNumDroppedTimeseries := metricDataToPlaintext(tt.metricsDataFn()) - assert.Equal(t, tt.wantNumConvertedTimeseries, gotNunConvertedTimeseries) - assert.Equal(t, tt.wantNumDroppedTimeseries, gotNumDroppedTimeseries) + gotLines := metricDataToPlaintext(tt.metricsDataFn()) got := strings.Split(gotLines, "\n") got = got[:len(got)-1] assert.Equal(t, tt.wantLines, got) @@ -292,21 +294,21 @@ func Test_metricDataToPlaintext(t *testing.T) { func expectedDistributionLines( metricName, tags, timestampStr string, sum float64, - count int64, + count uint64, bounds []float64, - counts []int64, + counts []uint64, ) []string { lines := []string{ - metricName + ".count" + tags + " " + formatInt64(count) + " " + timestampStr, + metricName + ".count" + tags + " " + formatInt64(int64(count)) + " " + timestampStr, metricName + tags + " " + formatFloatForLabel(sum) + " " + timestampStr, } for i, bound := range bounds { lines = append(lines, - metricName+".bucket"+tags+";upper_bound="+formatFloatForLabel(bound)+" "+formatInt64(counts[i])+" "+timestampStr) + metricName+".bucket"+tags+";upper_bound="+formatFloatForLabel(bound)+" "+formatInt64(int64(counts[i]))+" "+timestampStr) } lines = append(lines, - metricName+".bucket"+tags+";upper_bound=inf "+formatInt64(counts[len(bounds)])+" "+timestampStr) + metricName+".bucket"+tags+";upper_bound=inf "+formatInt64(int64(counts[len(bounds)]))+" "+timestampStr) return lines } @@ -314,17 +316,18 @@ func expectedDistributionLines( func expectedSummaryLines( metricName, tags, timestampStr string, sum float64, - count int64, - percentiles []*metricspb.SummaryValue_Snapshot_ValueAtPercentile, + count uint64, + summaryQuantiles []float64, + summaryQuantileValues []float64, ) []string { lines := []string{ - metricName + ".count" + tags + " " + formatInt64(count) + " " + timestampStr, + metricName + ".count" + tags + " " + formatInt64(int64(count)) + " " + timestampStr, metricName + tags + " " + formatFloatForValue(sum) + " " + timestampStr, } - for _, p := range percentiles { + for i := range summaryQuantiles { lines = append(lines, - metricName+".quantile"+tags+";quantile="+formatFloatForLabel(p.Percentile)+" "+formatFloatForValue(p.Value)+" "+timestampStr) + metricName+".quantile"+tags+";quantile="+formatFloatForLabel(summaryQuantiles[i])+" "+formatFloatForValue(summaryQuantileValues[i])+" "+timestampStr) } return lines diff --git a/unreleased/carbonexporter-oc.yaml b/unreleased/carbonexporter-oc.yaml new file mode 100644 index 000000000000..e233d9a27df7 --- /dev/null +++ b/unreleased/carbonexporter-oc.yaml @@ -0,0 +1,11 @@ +# One of 'breaking', 'deprecation', 'new_component', 'enhancement', 'bug_fix' +change_type: enhancement + +# The name of the component, or a single word describing the area of concern, (e.g. filelogreceiver) +component: carbonexporter + +# A brief description of the change. Surround your text with quotes ("") if it needs to start with a backtick (`). +note: Change carbonexporter to use pdata, remove dependency on opencensus + +# One or more tracking issues related to the change +issues: [14589]