Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Pass metric labels when transforming to gaugeArray #1570

Merged
merged 8 commits into from
Feb 28, 2021
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -50,6 +50,7 @@ This project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.htm
- Windows build of Jaeger tests now compiles with OS specific functions (#1576). (#1577)
- The sequential timing check of timestamps of go.opentelemetry.io/otel/sdk/metric/aggregator/lastvalue are now setup explicitly to be sequential (#1578). (#1579)
- Validate tracestate header keys with vedors according to the W3C TraceContext specification (#1475). (#1581)
- The OTLP exporter includes related labels for translations of a GaugeArray (#1563). (#1570)

## [0.17.0] - 2020-02-12

Expand Down
7 changes: 5 additions & 2 deletions exporters/otlp/internal/transform/metric.go
Original file line number Diff line number Diff line change
Expand Up @@ -308,18 +308,21 @@ func Record(exportSelector export.ExportKindSelector, r export.Record) (*metricp

func gaugeArray(record export.Record, points []aggregation.Point) (*metricpb.Metric, error) {
desc := record.Descriptor()
labels := record.Labels()
m := &metricpb.Metric{
Name: desc.Name(),
Description: desc.Description(),
Unit: string(desc.Unit()),
}

pbLabels := stringKeyValues(labels.Iter())

switch nk := desc.NumberKind(); nk {
case number.Int64Kind:
var pts []*metricpb.IntDataPoint
for _, s := range points {
pts = append(pts, &metricpb.IntDataPoint{
Labels: nil,
Labels: pbLabels,
StartTimeUnixNano: toNanos(record.StartTime()),
TimeUnixNano: toNanos(record.EndTime()),
Value: s.Number.CoerceToInt64(nk),
Expand All @@ -335,7 +338,7 @@ func gaugeArray(record export.Record, points []aggregation.Point) (*metricpb.Met
var pts []*metricpb.DoubleDataPoint
for _, s := range points {
pts = append(pts, &metricpb.DoubleDataPoint{
Labels: nil,
Labels: pbLabels,
StartTimeUnixNano: toNanos(record.StartTime()),
TimeUnixNano: toNanos(record.EndTime()),
Value: s.Number.CoerceToFloat64(nk),
Expand Down
48 changes: 42 additions & 6 deletions exporters/otlp/internal/transform/metric_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -123,7 +123,7 @@ func TestMinMaxSumCountValue(t *testing.T) {

func TestMinMaxSumCountDatapoints(t *testing.T) {
desc := metric.NewDescriptor("", metric.ValueRecorderInstrumentKind, number.Int64Kind)
labels := attribute.NewSet()
labels := attribute.NewSet(attribute.String("one", "1"))
mmsc, ckpt := metrictest.Unslice2(minmaxsumcount.New(2, &desc))

assert.NoError(t, mmsc.Update(context.Background(), 1, &desc))
Expand All @@ -137,6 +137,12 @@ func TestMinMaxSumCountDatapoints(t *testing.T) {
BucketCounts: []uint64{1, 10},
StartTimeUnixNano: uint64(intervalStart.UnixNano()),
TimeUnixNano: uint64(intervalEnd.UnixNano()),
Labels: []*commonpb.StringKeyValue{
{
Key: "one",
Value: "1",
},
},
},
}
record := export.NewRecord(&desc, &labels, nil, ckpt.Aggregation(), intervalStart, intervalEnd)
Expand All @@ -162,7 +168,7 @@ func TestMinMaxSumCountPropagatesErrors(t *testing.T) {

func TestSumIntDataPoints(t *testing.T) {
desc := metric.NewDescriptor("", metric.ValueRecorderInstrumentKind, number.Int64Kind)
labels := attribute.NewSet()
labels := attribute.NewSet(attribute.String("one", "1"))
s, ckpt := metrictest.Unslice2(sumAgg.New(2))
assert.NoError(t, s.Update(context.Background(), number.Number(1), &desc))
require.NoError(t, s.SynchronizedMove(ckpt, &desc))
Expand All @@ -182,6 +188,12 @@ func TestSumIntDataPoints(t *testing.T) {
Value: 1,
StartTimeUnixNano: uint64(intervalStart.UnixNano()),
TimeUnixNano: uint64(intervalEnd.UnixNano()),
Labels: []*commonpb.StringKeyValue{
{
Key: "one",
Value: "1",
},
},
}}}, m.GetIntSum())
assert.Nil(t, m.GetDoubleGauge())
assert.Nil(t, m.GetDoubleHistogram())
Expand All @@ -190,7 +202,7 @@ func TestSumIntDataPoints(t *testing.T) {

func TestSumFloatDataPoints(t *testing.T) {
desc := metric.NewDescriptor("", metric.ValueRecorderInstrumentKind, number.Float64Kind)
labels := attribute.NewSet()
labels := attribute.NewSet(attribute.String("one", "1"))
s, ckpt := metrictest.Unslice2(sumAgg.New(2))
assert.NoError(t, s.Update(context.Background(), number.NewFloat64Number(1), &desc))
require.NoError(t, s.SynchronizedMove(ckpt, &desc))
Expand All @@ -213,13 +225,19 @@ func TestSumFloatDataPoints(t *testing.T) {
Value: 1,
StartTimeUnixNano: uint64(intervalStart.UnixNano()),
TimeUnixNano: uint64(intervalEnd.UnixNano()),
Labels: []*commonpb.StringKeyValue{
{
Key: "one",
Value: "1",
},
},
}}}, m.GetDoubleSum())
}
}

func TestLastValueIntDataPoints(t *testing.T) {
desc := metric.NewDescriptor("", metric.ValueRecorderInstrumentKind, number.Int64Kind)
labels := attribute.NewSet()
labels := attribute.NewSet(attribute.String("one", "1"))
s, ckpt := metrictest.Unslice2(lvAgg.New(2))
assert.NoError(t, s.Update(context.Background(), number.Number(100), &desc))
require.NoError(t, s.SynchronizedMove(ckpt, &desc))
Expand All @@ -234,6 +252,12 @@ func TestLastValueIntDataPoints(t *testing.T) {
Value: 100,
StartTimeUnixNano: 0,
TimeUnixNano: uint64(timestamp.UnixNano()),
Labels: []*commonpb.StringKeyValue{
{
Key: "one",
Value: "1",
},
},
}}, m.GetIntGauge().DataPoints)
assert.Nil(t, m.GetIntHistogram())
assert.Nil(t, m.GetIntSum())
Expand All @@ -245,7 +269,7 @@ func TestLastValueIntDataPoints(t *testing.T) {

func TestExactIntDataPoints(t *testing.T) {
desc := metric.NewDescriptor("", metric.ValueRecorderInstrumentKind, number.Int64Kind)
labels := attribute.NewSet()
labels := attribute.NewSet(attribute.String("one", "1"))
e, ckpt := metrictest.Unslice2(arrAgg.New(2))
assert.NoError(t, e.Update(context.Background(), number.Number(100), &desc))
require.NoError(t, e.SynchronizedMove(ckpt, &desc))
Expand All @@ -260,6 +284,12 @@ func TestExactIntDataPoints(t *testing.T) {
Value: 100,
StartTimeUnixNano: toNanos(intervalStart),
TimeUnixNano: toNanos(intervalEnd),
Labels: []*commonpb.StringKeyValue{
{
Key: "one",
Value: "1",
},
},
}}, m.GetIntGauge().DataPoints)
assert.Nil(t, m.GetIntHistogram())
assert.Nil(t, m.GetIntSum())
Expand All @@ -271,7 +301,7 @@ func TestExactIntDataPoints(t *testing.T) {

func TestExactFloatDataPoints(t *testing.T) {
desc := metric.NewDescriptor("", metric.ValueRecorderInstrumentKind, number.Float64Kind)
labels := attribute.NewSet()
labels := attribute.NewSet(attribute.String("one", "1"))
e, ckpt := metrictest.Unslice2(arrAgg.New(2))
assert.NoError(t, e.Update(context.Background(), number.NewFloat64Number(100), &desc))
require.NoError(t, e.SynchronizedMove(ckpt, &desc))
Expand All @@ -286,6 +316,12 @@ func TestExactFloatDataPoints(t *testing.T) {
Value: 100,
StartTimeUnixNano: toNanos(intervalStart),
TimeUnixNano: toNanos(intervalEnd),
Labels: []*commonpb.StringKeyValue{
{
Key: "one",
Value: "1",
},
},
}}, m.GetDoubleGauge().DataPoints)
assert.Nil(t, m.GetIntHistogram())
assert.Nil(t, m.GetIntSum())
Expand Down