Skip to content

Commit

Permalink
Split out metric test utils from metrics_adjuster_test (#395)
Browse files Browse the repository at this point in the history
* Split out metric test utils from metrics_adjuster_test and move to testutils

* Reorder import of metrics testutils in metrics_adjuster_test

* Move new metrics test utils to their own package

* Rename some utility functions for clarity
  • Loading branch information
dinooliva authored and Paulo Janotti committed Oct 17, 2019
1 parent 7486dae commit 0ce5c5d
Show file tree
Hide file tree
Showing 3 changed files with 442 additions and 212 deletions.
159 changes: 159 additions & 0 deletions internal/metricstestutils/metrics_testutils.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,159 @@
// Copyright 2019 OpenTelemetry Authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.

package metricstestutils

import (
"time"

metricspb "github.com/census-instrumentation/opencensus-proto/gen-go/metrics/v1"
timestamppb "github.com/golang/protobuf/ptypes/timestamp"
wrapperspb "github.com/golang/protobuf/ptypes/wrappers"
)

// Gauge creates a gauge metric.
func Gauge(name string, keys []string, timeseries ...*metricspb.TimeSeries) *metricspb.Metric {
return metric(metricspb.MetricDescriptor_GAUGE_DOUBLE, name, keys, timeseries)
}

// GaugeInt creates a gauge metric of type int64.
func GaugeInt(name string, keys []string, timeseries ...*metricspb.TimeSeries) *metricspb.Metric {
return metric(metricspb.MetricDescriptor_GAUGE_INT64, name, keys, timeseries)
}

// GaugeDist creates a gauge distribution metric.
func GaugeDist(name string, keys []string, timeseries ...*metricspb.TimeSeries) *metricspb.Metric {
return metric(metricspb.MetricDescriptor_GAUGE_DISTRIBUTION, name, keys, timeseries)
}

// Cumulative creates a cumulative metric.
func Cumulative(name string, keys []string, timeseries ...*metricspb.TimeSeries) *metricspb.Metric {
return metric(metricspb.MetricDescriptor_CUMULATIVE_DOUBLE, name, keys, timeseries)
}

// CumulativeInt creates a cumulative metric of type int64.
func CumulativeInt(name string, keys []string, timeseries ...*metricspb.TimeSeries) *metricspb.Metric {
return metric(metricspb.MetricDescriptor_CUMULATIVE_INT64, name, keys, timeseries)
}

// CumulativeDist creates a cumulative distribution metric.
func CumulativeDist(name string, keys []string, timeseries ...*metricspb.TimeSeries) *metricspb.Metric {
return metric(metricspb.MetricDescriptor_CUMULATIVE_DISTRIBUTION, name, keys, timeseries)
}

// Summary creates a summary metric.
func Summary(name string, keys []string, timeseries ...*metricspb.TimeSeries) *metricspb.Metric {
return metric(metricspb.MetricDescriptor_SUMMARY, name, keys, timeseries)
}

// Timeseries creates a timeseries. It takes the start time stamp, a sequence of label values (associated
// with the label keys in the overall metric), and the value of the timeseries.
func Timeseries(sts time.Time, vals []string, point *metricspb.Point) *metricspb.TimeSeries {
return &metricspb.TimeSeries{
StartTimestamp: Timestamp(sts),
Points: []*metricspb.Point{point},
LabelValues: toVals(vals),
}
}

// Double creates a double point.
func Double(ts time.Time, value float64) *metricspb.Point {
return &metricspb.Point{Timestamp: Timestamp(ts), Value: &metricspb.Point_DoubleValue{DoubleValue: value}}
}

// DistPt creates a distribution point. It takes the time stamp, the bucket boundaries for the distribution, and
// the and counts for the individual buckets as input.
func DistPt(ts time.Time, bounds []float64, counts []int64) *metricspb.Point {
var count int64
var sum float64
buckets := make([]*metricspb.DistributionValue_Bucket, len(counts))

for i, bcount := range counts {
count += bcount
buckets[i] = &metricspb.DistributionValue_Bucket{Count: bcount}
// create a sum based on lower bucket bounds
// e.g. for bounds = {0.1, 0.2, 0.4} and counts = {2, 3, 7, 9)
// sum = 0*2 + 0.1*3 + 0.2*7 + 0.4*9
if i > 0 {
sum += float64(bcount) * bounds[i-1]
}
}
distrValue := &metricspb.DistributionValue{
BucketOptions: &metricspb.DistributionValue_BucketOptions{
Type: &metricspb.DistributionValue_BucketOptions_Explicit_{
Explicit: &metricspb.DistributionValue_BucketOptions_Explicit{
Bounds: bounds,
},
},
},
Count: count,
Sum: sum,
Buckets: buckets,
// There's no way to compute SumOfSquaredDeviation from prometheus data
}
return &metricspb.Point{Timestamp: Timestamp(ts), Value: &metricspb.Point_DistributionValue{DistributionValue: distrValue}}
}

// SummPt creates a summary point.
func SummPt(ts time.Time, count int64, sum float64, percent, vals []float64) *metricspb.Point {
percentiles := make([]*metricspb.SummaryValue_Snapshot_ValueAtPercentile, len(percent))
for i := 0; i < len(percent); i++ {
percentiles[i] = &metricspb.SummaryValue_Snapshot_ValueAtPercentile{Percentile: percent[i], Value: vals[i]}
}
summaryValue := &metricspb.SummaryValue{
Sum: &wrapperspb.DoubleValue{Value: sum},
Count: &wrapperspb.Int64Value{Value: count},
Snapshot: &metricspb.SummaryValue_Snapshot{
PercentileValues: percentiles,
},
}
return &metricspb.Point{Timestamp: Timestamp(ts), Value: &metricspb.Point_SummaryValue{SummaryValue: summaryValue}}
}

// Timestamp creates a timestamp.
func Timestamp(ts time.Time) *timestamppb.Timestamp {
return &timestamppb.Timestamp{
Seconds: ts.Unix(),
Nanos: int32(ts.Nanosecond()),
}
}

func metric(ty metricspb.MetricDescriptor_Type, name string, keys []string, timeseries []*metricspb.TimeSeries) *metricspb.Metric {
return &metricspb.Metric{
MetricDescriptor: &metricspb.MetricDescriptor{
Name: name,
Description: "metrics description",
Unit: "",
Type: ty,
LabelKeys: toKeys(keys),
},
Timeseries: timeseries,
}
}

func toKeys(keys []string) []*metricspb.LabelKey {
res := make([]*metricspb.LabelKey, 0, len(keys))
for _, key := range keys {
res = append(res, &metricspb.LabelKey{Key: key, Description: "description: " + key})
}
return res
}

func toVals(vals []string) []*metricspb.LabelValue {
res := make([]*metricspb.LabelValue, 0, len(vals))
for _, val := range vals {
res = append(res, &metricspb.LabelValue{Value: val, HasValue: true})
}
return res
}
180 changes: 180 additions & 0 deletions internal/metricstestutils/metrics_testutils_test.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,180 @@
// Copyright 2019 OpenTelemetry Authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.

package metricstestutils

import (
"testing"
"time"

metricspb "github.com/census-instrumentation/opencensus-proto/gen-go/metrics/v1"
timestamppb "github.com/golang/protobuf/ptypes/timestamp"
wrapperspb "github.com/golang/protobuf/ptypes/wrappers"
"github.com/stretchr/testify/assert"
)

func TestResourceProcessor(t *testing.T) {
op1 := "op1"
op2 := "op2"
k1k2 := []string{"k1", "k2"}
v1v2 := []string{"v1", "v2"}
v10v20 := []string{"v10", "v20"}
bounds0 := []float64{1}
percent0 := []float64{10}
t1Ms := time.Unix(0, 1000000)
t3Ms := time.Unix(0, 3000000)
t5Ms := time.Unix(0, 5000000)

k1k2Labels := []*metricspb.LabelKey{
{Key: "k1", Description: "description: k1"},
{Key: "k2", Description: "description: k2"},
}

v1v2Values := []*metricspb.LabelValue{
{Value: "v1", HasValue: true},
{Value: "v2", HasValue: true},
}

v10v20Values := []*metricspb.LabelValue{
{Value: "v10", HasValue: true},
{Value: "v20", HasValue: true},
}

ts1Ms := &timestamppb.Timestamp{Seconds: 0, Nanos: 1000000}
ts3Ms := &timestamppb.Timestamp{Seconds: 0, Nanos: 3000000}
ts5Ms := &timestamppb.Timestamp{Seconds: 0, Nanos: 5000000}

d44 := &metricspb.Point_DoubleValue{DoubleValue: 44}
d65 := &metricspb.Point_DoubleValue{DoubleValue: 65}
d90 := &metricspb.Point_DoubleValue{DoubleValue: 90}

dist := &metricspb.Point_DistributionValue{
DistributionValue: &metricspb.DistributionValue{
BucketOptions: &metricspb.DistributionValue_BucketOptions{
Type: &metricspb.DistributionValue_BucketOptions_Explicit_{
Explicit: &metricspb.DistributionValue_BucketOptions_Explicit{
Bounds: []float64{1},
},
},
},
Count: 2,
Sum: 0,
Buckets: []*metricspb.DistributionValue_Bucket{{Count: 2}},
},
}

summ := &metricspb.Point_SummaryValue{
SummaryValue: &metricspb.SummaryValue{
Sum: &wrapperspb.DoubleValue{Value: 40},
Count: &wrapperspb.Int64Value{Value: 10},
Snapshot: &metricspb.SummaryValue_Snapshot{
PercentileValues: []*metricspb.SummaryValue_Snapshot_ValueAtPercentile{
{Percentile: 10, Value: 1},
},
},
},
}

got := []*metricspb.Metric{
Gauge(op1, k1k2, Timeseries(t1Ms, v1v2, Double(t1Ms, 44))),
GaugeDist(op2, k1k2, Timeseries(t3Ms, v1v2, DistPt(t1Ms, bounds0, []int64{2}))),
Cumulative(op1, k1k2, Timeseries(t5Ms, v1v2, Double(t5Ms, 90)), Timeseries(t5Ms, v10v20, Double(t5Ms, 65))),
CumulativeDist(op2, k1k2, Timeseries(t1Ms, v1v2, DistPt(t1Ms, bounds0, []int64{2}))),
Summary(op1, k1k2, Timeseries(t1Ms, v1v2, SummPt(t1Ms, 10, 40, percent0, []float64{1, 5}))),
}

want := []*metricspb.Metric{
{
MetricDescriptor: &metricspb.MetricDescriptor{
Name: op1,
Description: "metrics description",
Type: metricspb.MetricDescriptor_GAUGE_DOUBLE,
LabelKeys: k1k2Labels,
},
Timeseries: []*metricspb.TimeSeries{
{
StartTimestamp: ts1Ms,
LabelValues: v1v2Values,
Points: []*metricspb.Point{{Timestamp: ts1Ms, Value: d44}},
},
},
},
{
MetricDescriptor: &metricspb.MetricDescriptor{
Name: op2,
Description: "metrics description",
Type: metricspb.MetricDescriptor_GAUGE_DISTRIBUTION,
LabelKeys: k1k2Labels,
},
Timeseries: []*metricspb.TimeSeries{
{
StartTimestamp: ts3Ms,
LabelValues: v1v2Values,
Points: []*metricspb.Point{{Timestamp: ts1Ms, Value: dist}},
},
},
},
{
MetricDescriptor: &metricspb.MetricDescriptor{
Name: op1,
Description: "metrics description",
Type: metricspb.MetricDescriptor_CUMULATIVE_DOUBLE,
LabelKeys: k1k2Labels,
},
Timeseries: []*metricspb.TimeSeries{
{
StartTimestamp: ts5Ms,
LabelValues: v1v2Values,
Points: []*metricspb.Point{{Timestamp: ts5Ms, Value: d90}},
},
{
StartTimestamp: ts5Ms,
LabelValues: v10v20Values,
Points: []*metricspb.Point{{Timestamp: ts5Ms, Value: d65}},
},
},
},
{
MetricDescriptor: &metricspb.MetricDescriptor{
Name: op2,
Description: "metrics description",
Type: metricspb.MetricDescriptor_CUMULATIVE_DISTRIBUTION,
LabelKeys: k1k2Labels,
},
Timeseries: []*metricspb.TimeSeries{
{
StartTimestamp: ts1Ms,
LabelValues: v1v2Values,
Points: []*metricspb.Point{{Timestamp: ts1Ms, Value: dist}},
},
},
},
{
MetricDescriptor: &metricspb.MetricDescriptor{
Name: op1,
Description: "metrics description",
Type: metricspb.MetricDescriptor_SUMMARY,
LabelKeys: k1k2Labels,
},
Timeseries: []*metricspb.TimeSeries{
{
StartTimestamp: ts1Ms,
LabelValues: v1v2Values,
Points: []*metricspb.Point{{Timestamp: ts1Ms, Value: summ}},
},
},
},
}
assert.Equalf(t, want, got, "got %v, want %v", got, want)
}
Loading

0 comments on commit 0ce5c5d

Please sign in to comment.