From 76f5ac60776460c3115d3403423842d0a1f2683e Mon Sep 17 00:00:00 2001 From: Tyler Yahn Date: Mon, 11 Jul 2022 15:34:24 -0700 Subject: [PATCH 1/7] Add last-value aggregator --- sdk/metric/internal/aggregation.go | 2 +- sdk/metric/internal/aggregator_test.go | 131 +++++++++++++++++++++++++ sdk/metric/internal/lastvalue.go | 40 ++++++-- sdk/metric/internal/lastvalue_test.go | 35 +++++++ 4 files changed, 201 insertions(+), 7 deletions(-) create mode 100644 sdk/metric/internal/aggregator_test.go create mode 100644 sdk/metric/internal/lastvalue_test.go diff --git a/sdk/metric/internal/aggregation.go b/sdk/metric/internal/aggregation.go index f161d2debbf..be6d91be009 100644 --- a/sdk/metric/internal/aggregation.go +++ b/sdk/metric/internal/aggregation.go @@ -30,7 +30,7 @@ type Aggregation struct { // Timestamp defines the time the last measurement was made. If zero, no // measurements were made for this time span. The time is represented as a // unix timestamp with nanosecond precision. - Timestamp uint64 + Timestamp int64 // Attributes are the unique dimensions Value describes. Attributes attribute.Set diff --git a/sdk/metric/internal/aggregator_test.go b/sdk/metric/internal/aggregator_test.go new file mode 100644 index 00000000000..3d0706f23e8 --- /dev/null +++ b/sdk/metric/internal/aggregator_test.go @@ -0,0 +1,131 @@ +// Copyright The OpenTelemetry Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +//go:build go1.18 +// +build go1.18 + +package internal // import "go.opentelemetry.io/otel/sdk/metric/internal" + +import ( + "sync" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + + "go.opentelemetry.io/otel/attribute" +) + +const ( + defaultGoroutines = 5 + defaultMeasurements = 30 + defaultCycles = 3 +) + +var ( + alice = attribute.NewSet(attribute.String("user", "alice"), attribute.Bool("admin", true)) + bob = attribute.NewSet(attribute.String("user", "bob"), attribute.Bool("admin", false)) + carol = attribute.NewSet(attribute.String("user", "carol"), attribute.Bool("admin", false)) +) + +// setMap maps attribute sets to a number. +type setMap[N int64 | float64] map[attribute.Set]N + +// expectFunc returns a function that will return an setMap of expected +// values of a cycle that contains m measurements (total across all +// goroutines). Each call advances the cycle. +type expectFunc[N int64 | float64] func(increments setMap[N]) func(m int) setMap[N] + +// testAggregator tests aggregator a produces the expecter defined values +// using an aggregatorTester. +func testAggregator[N int64 | float64](a Aggregator[N], expecter expectFunc[N]) func(*testing.T) { + return (&aggregatorTester[N]{ + GoroutineN: defaultGoroutines, + MeasurementN: defaultMeasurements, + CycleN: defaultCycles, + }).Run(a, expecter) +} + +// aggregatorTester runs an acceptance test on an Aggregator. It will ask an +// Aggregator to aggregate a set of values as if they were real measurements +// made MeasurementN number of times. This will be done in GoroutineN number +// of different goroutines. After the Aggregator has been asked to aggregate +// all these measurements, it is validated using a passed expecterFunc. This +// set of operation is a signle cycle, and the the aggregatorTester will run +// CycleN number of cycles. +type aggregatorTester[N int64 | float64] struct { + // GoroutineN is the number of goroutines aggregatorTester will use to run + // the test with. + GoroutineN int + // MeasurementN is the number of measurements that are made each cycle a + // goroutine runs the test. + MeasurementN int + // CycleN is the number of times a goroutine will make a set of + // measurements. + CycleN int +} + +func (at *aggregatorTester[N]) Run(a Aggregator[N], expecter expectFunc[N]) func(*testing.T) { + increments := map[attribute.Set]N{alice: 1, bob: -1, carol: 2} + f := expecter(increments) + m := at.MeasurementN * at.GoroutineN + return func(t *testing.T) { + for i := 0; i < at.CycleN; i++ { + var wg sync.WaitGroup + wg.Add(at.GoroutineN) + for i := 0; i < at.GoroutineN; i++ { + go func() { + defer wg.Done() + for j := 0; j < at.MeasurementN; j++ { + for attrs, n := range increments { + a.Aggregate(n, attrs) + } + } + }() + } + wg.Wait() + + assertSetMap(t, f(m), aggregationsToMap[N](a.Aggregations())) + } + } +} + +func aggregationsToMap[N int64 | float64](a []Aggregation) setMap[N] { + m := make(setMap[N]) + for _, a := range a { + m[a.Attributes] = a.Value.(SingleValue[N]).Value + } + return m +} + +// assertSetMap asserts expected equals actual. The testify assert.Equal +// function does not give clear error messages for maps, this attempts to do +// so. +func assertSetMap[N int64 | float64](t *testing.T, expected, actual setMap[N]) { + extra := make(map[attribute.Set]struct{}) + for attr := range actual { + extra[attr] = struct{}{} + } + + for attr, v := range expected { + name := attr.Encoded(attribute.DefaultEncoder()) + t.Run(name, func(t *testing.T) { + require.Contains(t, actual, attr) + delete(extra, attr) + assert.Equal(t, v, actual[attr]) + }) + } + + assert.Lenf(t, extra, 0, "unknown values added: %v", extra) +} diff --git a/sdk/metric/internal/lastvalue.go b/sdk/metric/internal/lastvalue.go index 986a2313ad0..224c1909a40 100644 --- a/sdk/metric/internal/lastvalue.go +++ b/sdk/metric/internal/lastvalue.go @@ -17,24 +17,52 @@ package internal // import "go.opentelemetry.io/otel/sdk/metric/internal" -import "go.opentelemetry.io/otel/attribute" +import ( + "sync" + "time" + + "go.opentelemetry.io/otel/attribute" +) + +// datapoint is timestamped measurement data. +type datapoint[N int64 | float64] struct { + timestamp int64 + value N +} // lastValue summarizes a set of measurements as the last one made. type lastValue[N int64 | float64] struct { - // TODO(#2971): implement. + sync.Mutex + + values map[attribute.Set]datapoint[N] } // NewLastValue returns an Aggregator that summarizes a set of measurements as // the last one made. func NewLastValue[N int64 | float64]() Aggregator[N] { - return &lastValue[N]{} + return &lastValue[N]{values: make(map[attribute.Set]datapoint[N])} } func (s *lastValue[N]) Aggregate(value N, attr attribute.Set) { - // TODO(#2971): implement. + d := datapoint[N]{timestamp: time.Now().UnixNano(), value: value} + s.Lock() + s.values[attr] = d + s.Unlock() } func (s *lastValue[N]) Aggregations() []Aggregation { - // TODO(#2971): implement. - return nil + s.Lock() + defer s.Unlock() + + aggs := make([]Aggregation, 0, len(s.values)) + for a, v := range s.values { + aggs = append(aggs, Aggregation{ + Timestamp: v.timestamp, + Attributes: a, + Value: SingleValue[N]{Value: v.value}, + }) + // Do not report stale values. + delete(s.values, a) + } + return aggs } diff --git a/sdk/metric/internal/lastvalue_test.go b/sdk/metric/internal/lastvalue_test.go new file mode 100644 index 00000000000..88078dfee2f --- /dev/null +++ b/sdk/metric/internal/lastvalue_test.go @@ -0,0 +1,35 @@ +// Copyright The OpenTelemetry Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +//go:build go1.18 +// +build go1.18 + +package internal // import "go.opentelemetry.io/otel/sdk/metric/internal" + +import "testing" + +func TestLastValue(t *testing.T) { + t.Run("Int64", testAggregator(NewLastValue[int64](), lastValueExpecter[int64])) + t.Run("Float64", testAggregator(NewLastValue[float64](), lastValueExpecter[float64])) +} + +func lastValueExpecter[N int64 | float64](incr setMap[N]) func(int) setMap[N] { + expect := make(setMap[N], len(incr)) + for actor, incr := range incr { + expect[actor] = incr + } + return func(int) setMap[N] { + return expect + } +} From 2c6ba1977e90a086fc0c0238d82fbb139dbc5dc0 Mon Sep 17 00:00:00 2001 From: Tyler Yahn Date: Mon, 11 Jul 2022 15:38:25 -0700 Subject: [PATCH 2/7] Add test of last-value reset of unseen attrs --- sdk/metric/internal/lastvalue_test.go | 25 +++++++++++++++++++++++++ 1 file changed, 25 insertions(+) diff --git a/sdk/metric/internal/lastvalue_test.go b/sdk/metric/internal/lastvalue_test.go index 88078dfee2f..f125e698175 100644 --- a/sdk/metric/internal/lastvalue_test.go +++ b/sdk/metric/internal/lastvalue_test.go @@ -33,3 +33,28 @@ func lastValueExpecter[N int64 | float64](incr setMap[N]) func(int) setMap[N] { return expect } } + +func testLastValueReset[N int64 | float64](a Aggregator[N]) func(*testing.T) { + return func(t *testing.T) { + expect := make(setMap[N]) + assertSetMap(t, expect, aggregationsToMap[N](a.Aggregations())) + + a.Aggregate(1, alice) + expect[alice] = 1 + assertSetMap(t, expect, aggregationsToMap[N](a.Aggregations())) + + // The attr set should be forgotten once Aggregations is called. + delete(expect, alice) + assertSetMap(t, expect, aggregationsToMap[N](a.Aggregations())) + + // Aggregating another set should not affect the original (alice). + a.Aggregate(1, bob) + expect[bob] = 1 + assertSetMap(t, expect, aggregationsToMap[N](a.Aggregations())) + } +} + +func TestLastValueReset(t *testing.T) { + t.Run("Int64", testLastValueReset(NewLastValue[int64]())) + t.Run("Float64", testLastValueReset(NewLastValue[float64]())) +} From b6d9cf2750973b2a897f11ab449ab517c7e361c2 Mon Sep 17 00:00:00 2001 From: Tyler Yahn Date: Mon, 11 Jul 2022 15:44:24 -0700 Subject: [PATCH 3/7] Add benchmark --- sdk/metric/internal/aggregator_test.go | 52 ++++++++++++++++++++++++++ sdk/metric/internal/lastvalue_test.go | 5 +++ 2 files changed, 57 insertions(+) diff --git a/sdk/metric/internal/aggregator_test.go b/sdk/metric/internal/aggregator_test.go index 3d0706f23e8..98a152e05c2 100644 --- a/sdk/metric/internal/aggregator_test.go +++ b/sdk/metric/internal/aggregator_test.go @@ -18,6 +18,7 @@ package internal // import "go.opentelemetry.io/otel/sdk/metric/internal" import ( + "strconv" "sync" "testing" @@ -129,3 +130,54 @@ func assertSetMap[N int64 | float64](t *testing.T, expected, actual setMap[N]) { assert.Lenf(t, extra, 0, "unknown values added: %v", extra) } + +var bmarkResults []Aggregation + +func benchmarkAggregatorN[N int64 | float64](b *testing.B, factory func() Aggregator[N], count int) { + attrs := make([]attribute.Set, count) + for i := range attrs { + attrs[i] = attribute.NewSet(attribute.Int("value", i)) + } + + b.Run("Aggregate", func(b *testing.B) { + agg := factory() + b.ReportAllocs() + b.ResetTimer() + + for n := 0; n < b.N; n++ { + for _, attr := range attrs { + agg.Aggregate(1, attr) + } + } + assert.Len(b, agg.Aggregations(), count) + }) + + b.Run("Aggregations", func(b *testing.B) { + aggs := make([]Aggregator[N], b.N) + for n := range aggs { + a := factory() + for _, attr := range attrs { + a.Aggregate(1, attr) + } + aggs[n] = a + } + + b.ReportAllocs() + b.ResetTimer() + + for n := 0; n < b.N; n++ { + bmarkResults = aggs[n].Aggregations() + } + }) +} + +func benchmarkAggregator[N int64 | float64](factory func() Aggregator[N]) func(*testing.B) { + counts := []int{1, 10, 100} + return func(b *testing.B) { + for _, n := range counts { + b.Run(strconv.Itoa(n), func(b *testing.B) { + benchmarkAggregatorN(b, factory, n) + }) + } + } +} diff --git a/sdk/metric/internal/lastvalue_test.go b/sdk/metric/internal/lastvalue_test.go index f125e698175..d2b9a96eddc 100644 --- a/sdk/metric/internal/lastvalue_test.go +++ b/sdk/metric/internal/lastvalue_test.go @@ -58,3 +58,8 @@ func TestLastValueReset(t *testing.T) { t.Run("Int64", testLastValueReset(NewLastValue[int64]())) t.Run("Float64", testLastValueReset(NewLastValue[float64]())) } + +func BenchmarkLastValue(b *testing.B) { + b.Run("Int64", benchmarkAggregator(NewLastValue[int64])) + b.Run("Float64", benchmarkAggregator(NewLastValue[float64])) +} From d483c2a207c2869aad815a423d8bb0b115f2a7e8 Mon Sep 17 00:00:00 2001 From: Tyler Yahn Date: Thu, 21 Jul 2022 14:59:59 -0700 Subject: [PATCH 4/7] Use generic DataPoint value --- sdk/metric/metricdata/data.go | 32 ++--- .../metricdata/metricdatatest/assertion.go | 44 +++--- .../metricdatatest/assertion_test.go | 126 ++++++++++-------- .../metricdata/metricdatatest/comparisons.go | 92 ++++--------- 4 files changed, 124 insertions(+), 170 deletions(-) diff --git a/sdk/metric/metricdata/data.go b/sdk/metric/metricdata/data.go index ac9eb54c55a..b770b702346 100644 --- a/sdk/metric/metricdata/data.go +++ b/sdk/metric/metricdata/data.go @@ -65,17 +65,17 @@ type Aggregation interface { } // Gauge represents a measurement of the current value of an instrument. -type Gauge struct { +type Gauge[N int64 | float64] struct { // DataPoints reprents individual aggregated measurements with unique Attributes. - DataPoints []DataPoint + DataPoints []DataPoint[N] } -func (Gauge) privateAggregation() {} +func (Gauge[N]) privateAggregation() {} // Sum represents the sum of all measurements of values from an instrument. -type Sum struct { +type Sum[N int64 | float64] struct { // DataPoints reprents individual aggregated measurements with unique Attributes. - DataPoints []DataPoint + DataPoints []DataPoint[N] // Temporality describes if the aggregation is reported as the change from the // last report time, or the cumulative changes since a fixed start time. Temporality Temporality @@ -83,10 +83,10 @@ type Sum struct { IsMonotonic bool } -func (Sum) privateAggregation() {} +func (Sum[N]) privateAggregation() {} // DataPoint is a single data point in a timeseries. -type DataPoint struct { +type DataPoint[N int64 | float64] struct { // Attributes is the set of key value pairs that uniquely identify the // timeseries. Attributes attribute.Set @@ -95,25 +95,9 @@ type DataPoint struct { // Time is the time when the timeseries was recorded. (optional) Time time.Time // Value is the value of this data point. - Value Value + Value N } -// Value is a int64 or float64. All Values created by the sdk will be either -// Int64 or Float64. -type Value interface { - privateValue() -} - -// Int64 is a container for an int64 value. -type Int64 int64 - -func (Int64) privateValue() {} - -// Float64 is a container for a float64 value. -type Float64 float64 - -func (Float64) privateValue() {} - // Histogram represents the histogram of all measurements of values from an instrument. type Histogram struct { // DataPoints reprents individual aggregated measurements with unique Attributes. diff --git a/sdk/metric/metricdata/metricdatatest/assertion.go b/sdk/metric/metricdata/metricdatatest/assertion.go index 9d32886904d..c66409f4814 100644 --- a/sdk/metric/metricdata/metricdatatest/assertion.go +++ b/sdk/metric/metricdata/metricdatatest/assertion.go @@ -28,7 +28,17 @@ import ( // Datatypes are the concrete data-types the metricdata package provides. type Datatypes interface { - metricdata.DataPoint | metricdata.Float64 | metricdata.Gauge | metricdata.Histogram | metricdata.HistogramDataPoint | metricdata.Int64 | metricdata.Metrics | metricdata.ResourceMetrics | metricdata.ScopeMetrics | metricdata.Sum + metricdata.DataPoint[float64] | + metricdata.DataPoint[int64] | + metricdata.Gauge[float64] | + metricdata.Gauge[int64] | + metricdata.Histogram | + metricdata.HistogramDataPoint | + metricdata.Metrics | + metricdata.ResourceMetrics | + metricdata.ScopeMetrics | + metricdata.Sum[float64] | + metricdata.Sum[int64] // Interface types are not allowed in union types, therefore the // Aggregation and Value type from metricdata are not included here. @@ -44,26 +54,28 @@ func AssertEqual[T Datatypes](t *testing.T, expected, actual T) bool { var r []string switch e := interface{}(expected).(type) { - case metricdata.DataPoint: - r = equalDataPoints(e, aIface.(metricdata.DataPoint)) - case metricdata.Float64: - r = equalFloat64(e, aIface.(metricdata.Float64)) - case metricdata.Gauge: - r = equalGauges(e, aIface.(metricdata.Gauge)) + case metricdata.DataPoint[int64]: + r = equalDataPoints(e, aIface.(metricdata.DataPoint[int64])) + case metricdata.DataPoint[float64]: + r = equalDataPoints(e, aIface.(metricdata.DataPoint[float64])) + case metricdata.Gauge[int64]: + r = equalGauges(e, aIface.(metricdata.Gauge[int64])) + case metricdata.Gauge[float64]: + r = equalGauges(e, aIface.(metricdata.Gauge[float64])) case metricdata.Histogram: r = equalHistograms(e, aIface.(metricdata.Histogram)) case metricdata.HistogramDataPoint: r = equalHistogramDataPoints(e, aIface.(metricdata.HistogramDataPoint)) - case metricdata.Int64: - r = equalInt64(e, aIface.(metricdata.Int64)) case metricdata.Metrics: r = equalMetrics(e, aIface.(metricdata.Metrics)) case metricdata.ResourceMetrics: r = equalResourceMetrics(e, aIface.(metricdata.ResourceMetrics)) case metricdata.ScopeMetrics: r = equalScopeMetrics(e, aIface.(metricdata.ScopeMetrics)) - case metricdata.Sum: - r = equalSums(e, aIface.(metricdata.Sum)) + case metricdata.Sum[int64]: + r = equalSums(e, aIface.(metricdata.Sum[int64])) + case metricdata.Sum[float64]: + r = equalSums(e, aIface.(metricdata.Sum[float64])) default: // We control all types passed to this, panic to signal developers // early they changed things in an incompatible way. @@ -86,13 +98,3 @@ func AssertAggregationsEqual(t *testing.T, expected, actual metricdata.Aggregati } return true } - -// AssertValuesEqual asserts that two Values are equal. -func AssertValuesEqual(t *testing.T, expected, actual metricdata.Value) bool { - t.Helper() - if r := equalValues(expected, actual); len(r) > 0 { - t.Error(r) - return false - } - return true -} diff --git a/sdk/metric/metricdata/metricdatatest/assertion_test.go b/sdk/metric/metricdata/metricdatatest/assertion_test.go index 79d639ba2c3..79657dd15fb 100644 --- a/sdk/metric/metricdata/metricdatatest/assertion_test.go +++ b/sdk/metric/metricdata/metricdatatest/assertion_test.go @@ -34,28 +34,34 @@ var ( attrA = attribute.NewSet(attribute.Bool("A", true)) attrB = attribute.NewSet(attribute.Bool("B", true)) - float64A = metricdata.Float64(-1.0) - float64B = metricdata.Float64(2.0) - - int64A = metricdata.Int64(-1) - int64B = metricdata.Int64(2) - startA = time.Now() startB = startA.Add(time.Millisecond) endA = startA.Add(time.Second) endB = startB.Add(time.Second) - dataPointsA = metricdata.DataPoint{ + dataPointInt64A = metricdata.DataPoint[int64]{ Attributes: attrA, StartTime: startA, Time: endA, - Value: int64A, + Value: -1, + } + dataPointFloat64A = metricdata.DataPoint[float64]{ + Attributes: attrA, + StartTime: startA, + Time: endA, + Value: -1.0, + } + dataPointInt64B = metricdata.DataPoint[int64]{ + Attributes: attrB, + StartTime: startB, + Time: endB, + Value: 2, } - dataPointsB = metricdata.DataPoint{ + dataPointFloat64B = metricdata.DataPoint[float64]{ Attributes: attrB, StartTime: startB, Time: endB, - Value: float64B, + Value: 2.0, } max, min = 99.0, 3. @@ -80,18 +86,38 @@ var ( Sum: 3, } - gaugeA = metricdata.Gauge{DataPoints: []metricdata.DataPoint{dataPointsA}} - gaugeB = metricdata.Gauge{DataPoints: []metricdata.DataPoint{dataPointsB}} + gaugeInt64A = metricdata.Gauge[int64]{ + DataPoints: []metricdata.DataPoint[int64]{dataPointInt64A}, + } + gaugeFloat64A = metricdata.Gauge[float64]{ + DataPoints: []metricdata.DataPoint[float64]{dataPointFloat64A}, + } + gaugeInt64B = metricdata.Gauge[int64]{ + DataPoints: []metricdata.DataPoint[int64]{dataPointInt64B}, + } + gaugeFloat64B = metricdata.Gauge[float64]{ + DataPoints: []metricdata.DataPoint[float64]{dataPointFloat64B}, + } - sumA = metricdata.Sum{ + sumInt64A = metricdata.Sum[int64]{ Temporality: metricdata.CumulativeTemporality, IsMonotonic: true, - DataPoints: []metricdata.DataPoint{dataPointsA}, + DataPoints: []metricdata.DataPoint[int64]{dataPointInt64A}, } - sumB = metricdata.Sum{ - Temporality: metricdata.DeltaTemporality, - IsMonotonic: false, - DataPoints: []metricdata.DataPoint{dataPointsB}, + sumFloat64A = metricdata.Sum[float64]{ + Temporality: metricdata.CumulativeTemporality, + IsMonotonic: true, + DataPoints: []metricdata.DataPoint[float64]{dataPointFloat64A}, + } + sumInt64B = metricdata.Sum[int64]{ + Temporality: metricdata.CumulativeTemporality, + IsMonotonic: true, + DataPoints: []metricdata.DataPoint[int64]{dataPointInt64B}, + } + sumFloat64B = metricdata.Sum[float64]{ + Temporality: metricdata.CumulativeTemporality, + IsMonotonic: true, + DataPoints: []metricdata.DataPoint[float64]{dataPointFloat64B}, } histogramA = metricdata.Histogram{ @@ -107,13 +133,13 @@ var ( Name: "A", Description: "A desc", Unit: unit.Dimensionless, - Data: sumA, + Data: sumInt64A, } metricsB = metricdata.Metrics{ Name: "B", Description: "B desc", Unit: unit.Bytes, - Data: gaugeB, + Data: gaugeFloat64B, } scopeMetricsA = metricdata.ScopeMetrics{ @@ -152,12 +178,13 @@ func TestAssertEqual(t *testing.T) { t.Run("ScopeMetrics", testDatatype(scopeMetricsA, scopeMetricsB, equalScopeMetrics)) t.Run("Metrics", testDatatype(metricsA, metricsB, equalMetrics)) t.Run("Histogram", testDatatype(histogramA, histogramB, equalHistograms)) - t.Run("Sum", testDatatype(sumA, sumB, equalSums)) - t.Run("Gauge", testDatatype(gaugeA, gaugeB, equalGauges)) + t.Run("SumInt64", testDatatype(sumInt64A, sumInt64B, equalSums[int64])) + t.Run("SumFloat64", testDatatype(sumFloat64A, sumFloat64B, equalSums[float64])) + t.Run("GaugeInt64", testDatatype(gaugeInt64A, gaugeInt64B, equalGauges[int64])) + t.Run("GaugeFloat64", testDatatype(gaugeFloat64A, gaugeFloat64B, equalGauges[float64])) t.Run("HistogramDataPoint", testDatatype(histogramDataPointA, histogramDataPointB, equalHistogramDataPoints)) - t.Run("DataPoint", testDatatype(dataPointsA, dataPointsB, equalDataPoints)) - t.Run("Int64", testDatatype(int64A, int64B, equalInt64)) - t.Run("Float64", testDatatype(float64A, float64B, equalFloat64)) + t.Run("DataPointInt64", testDatatype(dataPointInt64A, dataPointInt64B, equalDataPoints[int64])) + t.Run("DataPointFloat64", testDatatype(dataPointFloat64A, dataPointFloat64B, equalDataPoints[float64])) } type unknownAggregation struct { @@ -166,50 +193,33 @@ type unknownAggregation struct { func TestAssertAggregationsEqual(t *testing.T) { AssertAggregationsEqual(t, nil, nil) - AssertAggregationsEqual(t, sumA, sumA) - AssertAggregationsEqual(t, gaugeA, gaugeA) + AssertAggregationsEqual(t, sumInt64A, sumInt64A) + AssertAggregationsEqual(t, sumFloat64A, sumFloat64A) + AssertAggregationsEqual(t, gaugeInt64A, gaugeInt64A) + AssertAggregationsEqual(t, gaugeFloat64A, gaugeFloat64A) AssertAggregationsEqual(t, histogramA, histogramA) - r := equalAggregations(sumA, nil) + r := equalAggregations(sumInt64A, nil) assert.Len(t, r, 1, "should return nil comparison mismatch only") - r = equalAggregations(sumA, gaugeA) + r = equalAggregations(sumInt64A, gaugeInt64A) assert.Len(t, r, 1, "should return with type mismatch only") r = equalAggregations(unknownAggregation{}, unknownAggregation{}) assert.Len(t, r, 1, "should return with unknown aggregation only") - r = equalAggregations(sumA, sumB) - assert.Greaterf(t, len(r), 0, "%v == %v", sumA, sumB) - - r = equalAggregations(gaugeA, gaugeB) - assert.Greaterf(t, len(r), 0, "%v == %v", gaugeA, gaugeB) - - r = equalAggregations(histogramA, histogramB) - assert.Greaterf(t, len(r), 0, "%v == %v", histogramA, histogramB) -} - -type unknownValue struct { - metricdata.Value -} + r = equalAggregations(sumInt64A, sumInt64B) + assert.Greaterf(t, len(r), 0, "%v == %v", sumInt64A, sumInt64B) -func TestAssertValuesEqual(t *testing.T) { - AssertValuesEqual(t, nil, nil) - AssertValuesEqual(t, int64A, int64A) - AssertValuesEqual(t, float64A, float64A) + r = equalAggregations(sumFloat64A, sumFloat64B) + assert.Greaterf(t, len(r), 0, "%v == %v", sumFloat64A, sumFloat64B) - r := equalValues(int64A, nil) - assert.Len(t, r, 1, "should return nil comparison mismatch only") + r = equalAggregations(gaugeInt64A, gaugeInt64B) + assert.Greaterf(t, len(r), 0, "%v == %v", gaugeInt64A, gaugeInt64B) - r = equalValues(int64A, float64A) - assert.Len(t, r, 1, "should return with type mismatch only") + r = equalAggregations(gaugeFloat64A, gaugeFloat64B) + assert.Greaterf(t, len(r), 0, "%v == %v", gaugeFloat64A, gaugeFloat64B) - r = equalValues(unknownValue{}, unknownValue{}) - assert.Len(t, r, 1, "should return with unknown value only") - - r = equalValues(int64A, int64B) - assert.Greaterf(t, len(r), 0, "%v == %v", int64A, int64B) - - r = equalValues(float64A, float64B) - assert.Greaterf(t, len(r), 0, "%v == %v", float64A, float64B) + r = equalAggregations(histogramA, histogramB) + assert.Greaterf(t, len(r), 0, "%v == %v", histogramA, histogramB) } diff --git a/sdk/metric/metricdata/metricdatatest/comparisons.go b/sdk/metric/metricdata/metricdatatest/comparisons.go index 78e36169461..171e9a50749 100644 --- a/sdk/metric/metricdata/metricdatatest/comparisons.go +++ b/sdk/metric/metricdata/metricdatatest/comparisons.go @@ -110,16 +110,28 @@ func equalAggregations(a, b metricdata.Aggregation) (reasons []string) { } switch v := a.(type) { - case metricdata.Gauge: - r := equalGauges(v, b.(metricdata.Gauge)) + case metricdata.Gauge[int64]: + r := equalGauges(v, b.(metricdata.Gauge[int64])) if len(r) > 0 { - reasons = append(reasons, "Gauge not equal:") + reasons = append(reasons, "Gauge[int64] not equal:") reasons = append(reasons, r...) } - case metricdata.Sum: - r := equalSums(v, b.(metricdata.Sum)) + case metricdata.Gauge[float64]: + r := equalGauges(v, b.(metricdata.Gauge[float64])) if len(r) > 0 { - reasons = append(reasons, "Sum not equal:") + reasons = append(reasons, "Gauge[float64] not equal:") + reasons = append(reasons, r...) + } + case metricdata.Sum[int64]: + r := equalSums(v, b.(metricdata.Sum[int64])) + if len(r) > 0 { + reasons = append(reasons, "Sum[int64] not equal:") + reasons = append(reasons, r...) + } + case metricdata.Sum[float64]: + r := equalSums(v, b.(metricdata.Sum[float64])) + if len(r) > 0 { + reasons = append(reasons, "Sum[float64] not equal:") reasons = append(reasons, r...) } case metricdata.Histogram: @@ -139,11 +151,11 @@ func equalAggregations(a, b metricdata.Aggregation) (reasons []string) { // // The DataPoints each Gauge contains are compared based on containing the // same DataPoints, not the order they are stored in. -func equalGauges(a, b metricdata.Gauge) (reasons []string) { +func equalGauges[N int64 | float64](a, b metricdata.Gauge[N]) (reasons []string) { r := compareDiff(diffSlices( a.DataPoints, b.DataPoints, - func(a, b metricdata.DataPoint) bool { + func(a, b metricdata.DataPoint[N]) bool { r := equalDataPoints(a, b) return len(r) == 0 }, @@ -159,7 +171,7 @@ func equalGauges(a, b metricdata.Gauge) (reasons []string) { // // The DataPoints each Sum contains are compared based on containing the same // DataPoints, not the order they are stored in. -func equalSums(a, b metricdata.Sum) (reasons []string) { +func equalSums[N int64 | float64](a, b metricdata.Sum[N]) (reasons []string) { if a.Temporality != b.Temporality { reasons = append(reasons, notEqualStr("Temporality", a.Temporality, b.Temporality)) } @@ -170,7 +182,7 @@ func equalSums(a, b metricdata.Sum) (reasons []string) { r := compareDiff(diffSlices( a.DataPoints, b.DataPoints, - func(a, b metricdata.DataPoint) bool { + func(a, b metricdata.DataPoint[N]) bool { r := equalDataPoints(a, b) return len(r) == 0 }, @@ -207,7 +219,7 @@ func equalHistograms(a, b metricdata.Histogram) (reasons []string) { // equalDataPoints returns reasons DataPoints are not equal. If they are // equal, the returned reasons will be empty. -func equalDataPoints(a, b metricdata.DataPoint) (reasons []string) { +func equalDataPoints[N int64 | float64](a, b metricdata.DataPoint[N]) (reasons []string) { if !a.Attributes.Equals(&b.Attributes) { reasons = append(reasons, notEqualStr( "Attributes", @@ -222,10 +234,8 @@ func equalDataPoints(a, b metricdata.DataPoint) (reasons []string) { reasons = append(reasons, notEqualStr("Time", a.Time.UnixNano(), b.Time.UnixNano())) } - r := equalValues(a.Value, b.Value) - if len(r) > 0 { - reasons = append(reasons, "DataPoint Value not equal:") - reasons = append(reasons, r...) + if a.Value != b.Value { + reasons = append(reasons, notEqualStr("Value", a.Value, b.Value)) } return reasons } @@ -267,58 +277,6 @@ func equalHistogramDataPoints(a, b metricdata.HistogramDataPoint) (reasons []str return reasons } -// equalValues returns reasons Values are not equal. If they are equal, the -// returned reasons will be empty. -func equalValues(a, b metricdata.Value) (reasons []string) { - if a == nil || b == nil { - if a != b { - return []string{notEqualStr("Values", a, b)} - } - return reasons - } - - if reflect.TypeOf(a) != reflect.TypeOf(b) { - return []string{fmt.Sprintf("Value types not equal:\nexpected: %T\nactual: %T", a, b)} - } - - switch v := a.(type) { - case metricdata.Int64: - r := equalInt64(v, b.(metricdata.Int64)) - if len(r) > 0 { - reasons = append(reasons, "Int64 not equal:") - reasons = append(reasons, r...) - } - case metricdata.Float64: - r := equalFloat64(v, b.(metricdata.Float64)) - if len(r) > 0 { - reasons = append(reasons, "Float64 not equal:") - reasons = append(reasons, r...) - } - default: - reasons = append(reasons, fmt.Sprintf("Value of unknown types %T", a)) - } - - return reasons -} - -// equalFloat64 returns reasons Float64s are not equal. If they are equal, the -// returned reasons will be empty. -func equalFloat64(a, b metricdata.Float64) (reasons []string) { - if a != b { - reasons = append(reasons, notEqualStr("Float64 value", a, b)) - } - return reasons -} - -// equalInt64 returns reasons Int64s are not equal. If they are equal, the -// returned reasons will be empty. -func equalInt64(a, b metricdata.Int64) (reasons []string) { - if a != b { - reasons = append(reasons, notEqualStr("Int64 value", a, b)) - } - return reasons -} - func notEqualStr(prefix string, expected, actual interface{}) string { return fmt.Sprintf("%s not equal:\nexpected: %v\nactual: %v", prefix, expected, actual) } From 5f72d60d1f1dccc0261eb49a43f2314d1fc4f9cf Mon Sep 17 00:00:00 2001 From: Tyler Yahn Date: Fri, 22 Jul 2022 07:22:43 -0700 Subject: [PATCH 5/7] Fix assertion_fail_test.go --- .../metricdatatest/assertion_fail_test.go | 30 ++++++++----------- 1 file changed, 13 insertions(+), 17 deletions(-) diff --git a/sdk/metric/metricdata/metricdatatest/assertion_fail_test.go b/sdk/metric/metricdata/metricdatatest/assertion_fail_test.go index 228bfbddd8c..fffbe6421be 100644 --- a/sdk/metric/metricdata/metricdatatest/assertion_fail_test.go +++ b/sdk/metric/metricdata/metricdatatest/assertion_fail_test.go @@ -37,27 +37,23 @@ func TestFailAssertEqual(t *testing.T) { t.Run("ScopeMetrics", testFailDatatype(scopeMetricsA, scopeMetricsB)) t.Run("Metrics", testFailDatatype(metricsA, metricsB)) t.Run("Histogram", testFailDatatype(histogramA, histogramB)) - t.Run("Sum", testFailDatatype(sumA, sumB)) - t.Run("Gauge", testFailDatatype(gaugeA, gaugeB)) + t.Run("SumInt64", testFailDatatype(sumInt64A, sumInt64B)) + t.Run("SumFloat64", testFailDatatype(sumFloat64A, sumFloat64B)) + t.Run("GaugeInt64", testFailDatatype(gaugeInt64A, gaugeInt64B)) + t.Run("GaugeFloat64", testFailDatatype(gaugeFloat64A, gaugeFloat64B)) t.Run("HistogramDataPoint", testFailDatatype(histogramDataPointA, histogramDataPointB)) - t.Run("DataPoint", testFailDatatype(dataPointsA, dataPointsB)) - t.Run("Int64", testFailDatatype(int64A, int64B)) - t.Run("Float64", testFailDatatype(float64A, float64B)) + t.Run("DataPointInt64", testFailDatatype(dataPointInt64A, dataPointInt64B)) + t.Run("DataPointFloat64", testFailDatatype(dataPointFloat64A, dataPointFloat64B)) + } func TestFailAssertAggregationsEqual(t *testing.T) { - AssertAggregationsEqual(t, sumA, nil) - AssertAggregationsEqual(t, sumA, gaugeA) + AssertAggregationsEqual(t, sumInt64A, nil) + AssertAggregationsEqual(t, sumFloat64A, gaugeFloat64A) AssertAggregationsEqual(t, unknownAggregation{}, unknownAggregation{}) - AssertAggregationsEqual(t, sumA, sumB) - AssertAggregationsEqual(t, gaugeA, gaugeB) + AssertAggregationsEqual(t, sumInt64A, sumInt64B) + AssertAggregationsEqual(t, sumFloat64A, sumFloat64B) + AssertAggregationsEqual(t, gaugeInt64A, gaugeInt64B) + AssertAggregationsEqual(t, gaugeFloat64A, gaugeFloat64B) AssertAggregationsEqual(t, histogramA, histogramB) } - -func TestFailAssertValuesEqual(t *testing.T) { - AssertValuesEqual(t, int64A, nil) - AssertValuesEqual(t, int64A, float64A) - AssertValuesEqual(t, unknownValue{}, unknownValue{}) - AssertValuesEqual(t, int64A, int64B) - AssertValuesEqual(t, float64A, float64B) -} From 67377cc70882373866a7e994d8cbd8353755f0c1 Mon Sep 17 00:00:00 2001 From: Tyler Yahn Date: Tue, 26 Jul 2022 10:52:40 -0700 Subject: [PATCH 6/7] Fix tests --- sdk/metric/internal/aggregator_test.go | 86 +++++++++----------------- sdk/metric/internal/lastvalue.go | 6 +- sdk/metric/internal/lastvalue_test.go | 78 +++++++++++++++-------- 3 files changed, 86 insertions(+), 84 deletions(-) diff --git a/sdk/metric/internal/aggregator_test.go b/sdk/metric/internal/aggregator_test.go index 98a152e05c2..3f5cd18a8a4 100644 --- a/sdk/metric/internal/aggregator_test.go +++ b/sdk/metric/internal/aggregator_test.go @@ -21,11 +21,11 @@ import ( "strconv" "sync" "testing" - - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" + "time" "go.opentelemetry.io/otel/attribute" + "go.opentelemetry.io/otel/sdk/metric/metricdata" + "go.opentelemetry.io/otel/sdk/metric/metricdata/metricdatatest" ) const ( @@ -38,25 +38,28 @@ var ( alice = attribute.NewSet(attribute.String("user", "alice"), attribute.Bool("admin", true)) bob = attribute.NewSet(attribute.String("user", "bob"), attribute.Bool("admin", false)) carol = attribute.NewSet(attribute.String("user", "carol"), attribute.Bool("admin", false)) + + monoIncr = setMap{alice: 1, bob: 10, carol: 2} + nonMonoIncr = setMap{alice: 1, bob: -1, carol: 2} + + // Sat Jan 01 2000 00:00:00 GMT+0000. + staticTime = time.Unix(946684800, 0) + staticNowFunc = func() time.Time { return staticTime } + // Pass to t.Cleanup to override the now function with staticNowFunc and + // revert once the test completes. E.g. t.Cleanup(mockTime(now)) + mockTime = func(orig func() time.Time) (cleanup func()) { + now = staticNowFunc + return func() { now = orig } + } ) // setMap maps attribute sets to a number. -type setMap[N int64 | float64] map[attribute.Set]N - -// expectFunc returns a function that will return an setMap of expected -// values of a cycle that contains m measurements (total across all -// goroutines). Each call advances the cycle. -type expectFunc[N int64 | float64] func(increments setMap[N]) func(m int) setMap[N] - -// testAggregator tests aggregator a produces the expecter defined values -// using an aggregatorTester. -func testAggregator[N int64 | float64](a Aggregator[N], expecter expectFunc[N]) func(*testing.T) { - return (&aggregatorTester[N]{ - GoroutineN: defaultGoroutines, - MeasurementN: defaultMeasurements, - CycleN: defaultCycles, - }).Run(a, expecter) -} +type setMap map[attribute.Set]int + +// expectFunc is a function that returns an Aggregation of expected values for +// a cycle that contains m measurements (total across all goroutines). Each +// call advances the cycle. +type expectFunc func(m int) metricdata.Aggregation // aggregatorTester runs an acceptance test on an Aggregator. It will ask an // Aggregator to aggregate a set of values as if they were real measurements @@ -77,9 +80,7 @@ type aggregatorTester[N int64 | float64] struct { CycleN int } -func (at *aggregatorTester[N]) Run(a Aggregator[N], expecter expectFunc[N]) func(*testing.T) { - increments := map[attribute.Set]N{alice: 1, bob: -1, carol: 2} - f := expecter(increments) +func (at *aggregatorTester[N]) Run(a Aggregator[N], incr setMap, eFunc expectFunc) func(*testing.T) { m := at.MeasurementN * at.GoroutineN return func(t *testing.T) { for i := 0; i < at.CycleN; i++ { @@ -89,49 +90,20 @@ func (at *aggregatorTester[N]) Run(a Aggregator[N], expecter expectFunc[N]) func go func() { defer wg.Done() for j := 0; j < at.MeasurementN; j++ { - for attrs, n := range increments { - a.Aggregate(n, attrs) + for attrs, n := range incr { + a.Aggregate(N(n), attrs) } } }() } wg.Wait() - assertSetMap(t, f(m), aggregationsToMap[N](a.Aggregations())) + metricdatatest.AssertAggregationsEqual(t, eFunc(m), a.Aggregation()) } } } -func aggregationsToMap[N int64 | float64](a []Aggregation) setMap[N] { - m := make(setMap[N]) - for _, a := range a { - m[a.Attributes] = a.Value.(SingleValue[N]).Value - } - return m -} - -// assertSetMap asserts expected equals actual. The testify assert.Equal -// function does not give clear error messages for maps, this attempts to do -// so. -func assertSetMap[N int64 | float64](t *testing.T, expected, actual setMap[N]) { - extra := make(map[attribute.Set]struct{}) - for attr := range actual { - extra[attr] = struct{}{} - } - - for attr, v := range expected { - name := attr.Encoded(attribute.DefaultEncoder()) - t.Run(name, func(t *testing.T) { - require.Contains(t, actual, attr) - delete(extra, attr) - assert.Equal(t, v, actual[attr]) - }) - } - - assert.Lenf(t, extra, 0, "unknown values added: %v", extra) -} - -var bmarkResults []Aggregation +var bmarkResults metricdata.Aggregation func benchmarkAggregatorN[N int64 | float64](b *testing.B, factory func() Aggregator[N], count int) { attrs := make([]attribute.Set, count) @@ -149,7 +121,7 @@ func benchmarkAggregatorN[N int64 | float64](b *testing.B, factory func() Aggreg agg.Aggregate(1, attr) } } - assert.Len(b, agg.Aggregations(), count) + bmarkResults = agg.Aggregation() }) b.Run("Aggregations", func(b *testing.B) { @@ -166,7 +138,7 @@ func benchmarkAggregatorN[N int64 | float64](b *testing.B, factory func() Aggreg b.ResetTimer() for n := 0; n < b.N; n++ { - bmarkResults = aggs[n].Aggregations() + bmarkResults = aggs[n].Aggregation() } }) } diff --git a/sdk/metric/internal/lastvalue.go b/sdk/metric/internal/lastvalue.go index 2aa7dd80113..1cfeeb59a91 100644 --- a/sdk/metric/internal/lastvalue.go +++ b/sdk/metric/internal/lastvalue.go @@ -25,6 +25,10 @@ import ( "go.opentelemetry.io/otel/sdk/metric/metricdata" ) +// now is used to return the current local time while allowing tests to +// override the the default time.Now function. +var now = time.Now + // datapoint is timestamped measurement data. type datapoint[N int64 | float64] struct { timestamp time.Time @@ -45,7 +49,7 @@ func NewLastValue[N int64 | float64]() Aggregator[N] { } func (s *lastValue[N]) Aggregate(value N, attr attribute.Set) { - d := datapoint[N]{timestamp: time.Now(), value: value} + d := datapoint[N]{timestamp: now(), value: value} s.Lock() s.values[attr] = d s.Unlock() diff --git a/sdk/metric/internal/lastvalue_test.go b/sdk/metric/internal/lastvalue_test.go index d2b9a96eddc..41b75877fe3 100644 --- a/sdk/metric/internal/lastvalue_test.go +++ b/sdk/metric/internal/lastvalue_test.go @@ -17,46 +17,72 @@ package internal // import "go.opentelemetry.io/otel/sdk/metric/internal" -import "testing" +import ( + "testing" + + "go.opentelemetry.io/otel/sdk/metric/metricdata" + "go.opentelemetry.io/otel/sdk/metric/metricdata/metricdatatest" +) func TestLastValue(t *testing.T) { - t.Run("Int64", testAggregator(NewLastValue[int64](), lastValueExpecter[int64])) - t.Run("Float64", testAggregator(NewLastValue[float64](), lastValueExpecter[float64])) + t.Cleanup(mockTime(now)) + + t.Run("Int64", testLastValue[int64]()) + t.Run("Float64", testLastValue[float64]()) } -func lastValueExpecter[N int64 | float64](incr setMap[N]) func(int) setMap[N] { - expect := make(setMap[N], len(incr)) - for actor, incr := range incr { - expect[actor] = incr +func testLastValue[N int64 | float64]() func(*testing.T) { + tester := &aggregatorTester[N]{ + GoroutineN: defaultGoroutines, + MeasurementN: defaultMeasurements, + CycleN: defaultCycles, } - return func(int) setMap[N] { - return expect + + eFunc := func(increments setMap) expectFunc { + data := make([]metricdata.DataPoint[N], 0, len(increments)) + for a, v := range increments { + point := metricdata.DataPoint[N]{Attributes: a, Time: now(), Value: N(v)} + data = append(data, point) + } + gauge := metricdata.Gauge[N]{DataPoints: data} + return func(int) metricdata.Aggregation { return gauge } } + incr := monoIncr + return tester.Run(NewLastValue[N](), incr, eFunc(incr)) } -func testLastValueReset[N int64 | float64](a Aggregator[N]) func(*testing.T) { - return func(t *testing.T) { - expect := make(setMap[N]) - assertSetMap(t, expect, aggregationsToMap[N](a.Aggregations())) +func testLastValueReset[N int64 | float64](t *testing.T) { + t.Cleanup(mockTime(now)) - a.Aggregate(1, alice) - expect[alice] = 1 - assertSetMap(t, expect, aggregationsToMap[N](a.Aggregations())) + a := NewLastValue[N]() + expect := metricdata.Gauge[N]{} + metricdatatest.AssertAggregationsEqual(t, expect, a.Aggregation()) - // The attr set should be forgotten once Aggregations is called. - delete(expect, alice) - assertSetMap(t, expect, aggregationsToMap[N](a.Aggregations())) + a.Aggregate(1, alice) + expect.DataPoints = []metricdata.DataPoint[N]{{ + Attributes: alice, + Time: now(), + Value: 1, + }} + metricdatatest.AssertAggregationsEqual(t, expect, a.Aggregation()) - // Aggregating another set should not affect the original (alice). - a.Aggregate(1, bob) - expect[bob] = 1 - assertSetMap(t, expect, aggregationsToMap[N](a.Aggregations())) - } + // The attr set should be forgotten once Aggregations is called. + expect.DataPoints = nil + metricdatatest.AssertAggregationsEqual(t, expect, a.Aggregation()) + + // Aggregating another set should not affect the original (alice). + a.Aggregate(1, bob) + expect.DataPoints = []metricdata.DataPoint[N]{{ + Attributes: bob, + Time: now(), + Value: 1, + }} + metricdatatest.AssertAggregationsEqual(t, expect, a.Aggregation()) } func TestLastValueReset(t *testing.T) { - t.Run("Int64", testLastValueReset(NewLastValue[int64]())) - t.Run("Float64", testLastValueReset(NewLastValue[float64]())) + t.Run("Int64", testLastValueReset[int64]) + t.Run("Float64", testLastValueReset[float64]) } func BenchmarkLastValue(b *testing.B) { From 3d36ede86026578a04a3a4fb0b5f457ade49a3fd Mon Sep 17 00:00:00 2001 From: Tyler Yahn Date: Tue, 26 Jul 2022 10:59:48 -0700 Subject: [PATCH 7/7] Remove unused test increment values --- sdk/metric/internal/aggregator_test.go | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/sdk/metric/internal/aggregator_test.go b/sdk/metric/internal/aggregator_test.go index 3f5cd18a8a4..645352153c1 100644 --- a/sdk/metric/internal/aggregator_test.go +++ b/sdk/metric/internal/aggregator_test.go @@ -39,14 +39,13 @@ var ( bob = attribute.NewSet(attribute.String("user", "bob"), attribute.Bool("admin", false)) carol = attribute.NewSet(attribute.String("user", "carol"), attribute.Bool("admin", false)) - monoIncr = setMap{alice: 1, bob: 10, carol: 2} - nonMonoIncr = setMap{alice: 1, bob: -1, carol: 2} + monoIncr = setMap{alice: 1, bob: 10, carol: 2} // Sat Jan 01 2000 00:00:00 GMT+0000. staticTime = time.Unix(946684800, 0) staticNowFunc = func() time.Time { return staticTime } // Pass to t.Cleanup to override the now function with staticNowFunc and - // revert once the test completes. E.g. t.Cleanup(mockTime(now)) + // revert once the test completes. E.g. t.Cleanup(mockTime(now)). mockTime = func(orig func() time.Time) (cleanup func()) { now = staticNowFunc return func() { now = orig }