Skip to content

Commit

Permalink
SFx exporter: Implement summary type (#2998)
Browse files Browse the repository at this point in the history
* Implement summary type

Prometheus's Summary type was previously not implemented. This change
ports the SFx Smart Agent's implementation to the SFx exporter. It has
been end-to-end tested using a Prometheus server as a source of metrics
sent to an SFx backend.

* Address PR feedback: use minimum precision
  • Loading branch information
pmcollins authored Apr 6, 2021
1 parent b003b68 commit cdbccd0
Show file tree
Hide file tree
Showing 2 changed files with 219 additions and 1 deletion.
56 changes: 56 additions & 0 deletions exporter/signalfxexporter/translation/converter.go
Original file line number Diff line number Diff line change
Expand Up @@ -111,6 +111,8 @@ func (c *MetricsConverter) metricToSfxDataPoints(metric pdata.Metric, extraDimen
dps = convertIntHistogram(metric.IntHistogram().DataPoints(), basePoint, extraDimensions)
case pdata.MetricDataTypeHistogram:
dps = convertHistogram(metric.Histogram().DataPoints(), basePoint, extraDimensions)
case pdata.MetricDataTypeSummary:
dps = convertSummaryDataPoints(metric.Summary().DataPoints(), metric.Name(), extraDimensions)
}

if c.metricTranslator != nil {
Expand Down Expand Up @@ -152,6 +154,60 @@ func labelsToDimensions(labels pdata.StringMap, extraDims []*sfxpb.Dimension) []
return dimensions
}

func convertSummaryDataPoints(
in pdata.SummaryDataPointSlice,
name string,
extraDims []*sfxpb.Dimension,
) []*sfxpb.DataPoint {
out := make([]*sfxpb.DataPoint, 0, in.Len())

for i := 0; i < in.Len(); i++ {
inDp := in.At(i)

dims := labelsToDimensions(inDp.LabelsMap(), extraDims)
ts := timestampToSignalFx(inDp.Timestamp())

countPt := sfxpb.DataPoint{
Metric: name + "_count",
Timestamp: ts,
Dimensions: dims,
MetricType: &sfxMetricTypeCumulativeCounter,
}
c := int64(inDp.Count())
countPt.Value.IntValue = &c
out = append(out, &countPt)

sumPt := sfxpb.DataPoint{
Metric: name,
Timestamp: ts,
Dimensions: dims,
MetricType: &sfxMetricTypeCumulativeCounter,
}
sum := inDp.Sum()
sumPt.Value.DoubleValue = &sum
out = append(out, &sumPt)

qvs := inDp.QuantileValues()
for j := 0; j < qvs.Len(); j++ {
qPt := sfxpb.DataPoint{
Metric: name + "_quantile",
Timestamp: ts,
MetricType: &sfxMetricTypeGauge,
}
qv := qvs.At(j)
qdim := sfxpb.Dimension{
Key: "quantile",
Value: strconv.FormatFloat(qv.Quantile(), 'f', -1, 64),
}
qPt.Dimensions = append(dims, &qdim)
v := qv.Value()
qPt.Value.DoubleValue = &v
out = append(out, &qPt)
}
}
return out
}

func convertIntDatapoints(in pdata.IntDataPointSlice, basePoint *sfxpb.DataPoint, extraDims []*sfxpb.Dimension) []*sfxpb.DataPoint {
out := make([]*sfxpb.DataPoint, 0, in.Len())

Expand Down
164 changes: 163 additions & 1 deletion exporter/signalfxexporter/translation/converter_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -97,6 +97,27 @@ func Test_MetricDataToSignalFxV2(t *testing.T) {
histDPNoBuckets.SetTimestamp(ts)
labels.CopyTo(histDPNoBuckets.LabelsMap())

summaryDP := pdata.NewSummaryDataPoint()
summaryDP.SetTimestamp(ts)
const summarySumVal = 123.4
summaryDP.SetSum(summarySumVal)
const summaryCountVal = 111
summaryDP.SetCount(summaryCountVal)
qvs := summaryDP.QuantileValues()
qvs.Resize(4)
for i := 0; i < qvs.Len(); i++ {
qv := qvs.At(i)
qv.SetQuantile(0.25 * float64(i+1))
qv.SetValue(float64(i))
}
labels.CopyTo(summaryDP.LabelsMap())

emptySummaryDP := pdata.NewSummaryDataPoint()
emptySummaryDP.SetTimestamp(ts)
emptySummaryDP.SetSum(summarySumVal)
emptySummaryDP.SetCount(summaryCountVal)
labels.CopyTo(emptySummaryDP.LabelsMap())

tests := []struct {
name string
metricsDataFn func() pdata.ResourceMetrics
Expand Down Expand Up @@ -504,6 +525,44 @@ func Test_MetricDataToSignalFxV2(t *testing.T) {
},
wantSfxDataPoints: expectedFromIntHistogram("no_bucket_histo", tsMSecs, labelMap, histDPNoBuckets, false),
},
{
name: "summaries",
metricsDataFn: func() pdata.ResourceMetrics {
out := pdata.NewResourceMetrics()
out.InstrumentationLibraryMetrics().Resize(1)
ilm := out.InstrumentationLibraryMetrics().At(0)
ilm.Metrics().Resize(1)

{
m := ilm.Metrics().At(0)
m.SetName("summary")
m.SetDataType(pdata.MetricDataTypeSummary)
m.Summary().DataPoints().Append(summaryDP)
}

return out
},
wantSfxDataPoints: expectedFromSummary("summary", tsMSecs, labelMap, summaryCountVal, summarySumVal),
},
{
name: "empty_summary",
metricsDataFn: func() pdata.ResourceMetrics {
out := pdata.NewResourceMetrics()
out.InstrumentationLibraryMetrics().Resize(1)
ilm := out.InstrumentationLibraryMetrics().At(0)
ilm.Metrics().Resize(1)

{
m := ilm.Metrics().At(0)
m.SetName("empty_summary")
m.SetDataType(pdata.MetricDataTypeSummary)
m.Summary().DataPoints().Append(emptySummaryDP)
}

return out
},
wantSfxDataPoints: expectedFromEmptySummary("empty_summary", tsMSecs, labelMap, summaryCountVal, summarySumVal),
},
{
name: "with_exclude_metrics_filter",
metricsDataFn: func() pdata.ResourceMetrics {
Expand Down Expand Up @@ -642,7 +701,8 @@ func Test_MetricDataToSignalFxV2(t *testing.T) {
t.Run(tt.name, func(t *testing.T) {
c, err := NewMetricsConverter(logger, nil, tt.excludeMetrics, tt.includeMetrics, "")
require.NoError(t, err)
gotSfxDataPoints := c.MetricDataToSignalFxV2(tt.metricsDataFn())
md := tt.metricsDataFn()
gotSfxDataPoints := c.MetricDataToSignalFxV2(md)
// Sort SFx dimensions since they are built from maps and the order
// of those is not deterministic.
sortDimensions(tt.wantSfxDataPoints)
Expand Down Expand Up @@ -878,6 +938,33 @@ func expectedFromHistogram(
return dps
}

func expectedFromSummary(name string, ts int64, labelMap map[string]string, count int64, sumVal float64) []*sfxpb.DataPoint {
countName := name + "_count"
countPt := int64SFxDataPoint(countName, ts, &sfxMetricTypeCumulativeCounter, labelMap, count)
sumPt := doubleSFxDataPoint(name, ts, &sfxMetricTypeCumulativeCounter, labelMap, sumVal)
out := []*sfxpb.DataPoint{countPt, sumPt}
quantileDimVals := []string{"0.25", "0.5", "0.75", "1"}
for i := 0; i < 4; i++ {
qDims := map[string]string{"quantile": quantileDimVals[i]}
qPt := doubleSFxDataPoint(
name+"_quantile",
ts,
&sfxMetricTypeGauge,
util.MergeStringMaps(labelMap, qDims),
float64(i),
)
out = append(out, qPt)
}
return out
}

func expectedFromEmptySummary(name string, ts int64, labelMap map[string]string, count int64, sumVal float64) []*sfxpb.DataPoint {
countName := name + "_count"
countPt := int64SFxDataPoint(countName, ts, &sfxMetricTypeCumulativeCounter, labelMap, count)
sumPt := doubleSFxDataPoint(name, ts, &sfxMetricTypeCumulativeCounter, labelMap, sumVal)
return []*sfxpb.DataPoint{countPt, sumPt}
}

func mergeDPs(dps ...[]*sfxpb.DataPoint) []*sfxpb.DataPoint {
var out []*sfxpb.DataPoint
for i := range dps {
Expand Down Expand Up @@ -970,3 +1057,78 @@ func TestMetricsConverter_ConvertDimension(t *testing.T) {
})
}
}

func TestConvertSummary(t *testing.T) {
extraDims := []*sfxpb.Dimension{{
Key: "dim1",
Value: "val1",
}}
summarys := pdata.NewSummaryDataPointSlice()
summarys.Resize(1)
summary := summarys.At(0)
const count = 42
summary.SetCount(count)
const sum = 10.0
summary.SetSum(sum)
const startTime = 55 * 1e6
summary.SetStartTimestamp(pdata.Timestamp(startTime))
timestamp := 111 * 1e6
summary.SetTimestamp(pdata.Timestamp(timestamp))
qvs := summary.QuantileValues()
qvs.Resize(4)
for i := 0; i < qvs.Len(); i++ {
qv := qvs.At(i)
qv.SetQuantile(0.25 * float64(i+1))
qv.SetValue(float64(i))
}
dps := convertSummaryDataPoints(summarys, "metric_name", extraDims)

pt := dps[0]
assert.Equal(t, sfxpb.MetricType_CUMULATIVE_COUNTER, *pt.MetricType)
assert.Equal(t, int64(111), pt.Timestamp)
assert.Equal(t, "metric_name_count", pt.Metric)
assert.Equal(t, int64(count), pt.Value.GetIntValue())
assert.Equal(t, 1, len(pt.Dimensions))
assertHasExtraDim(t, pt)

pt = dps[1]
assert.Equal(t, sfxpb.MetricType_CUMULATIVE_COUNTER, *pt.MetricType)
assert.Equal(t, int64(111), pt.Timestamp)
assert.Equal(t, "metric_name", pt.Metric)
assert.Equal(t, sum, pt.Value.GetDoubleValue())
assert.Equal(t, 1, len(pt.Dimensions))
assertHasExtraDim(t, pt)

pt = dps[2]
assert.Equal(t, sfxpb.MetricType_GAUGE, *pt.MetricType)
assert.Equal(t, int64(111), pt.Timestamp)
assert.Equal(t, "metric_name_quantile", pt.Metric)
assert.Equal(t, 0.0, pt.Value.GetDoubleValue())
assert.Equal(t, 2, len(pt.Dimensions))
dim := pt.Dimensions[1]
assert.Equal(t, "quantile", dim.Key)

for i := 0; i < 4; i++ {
pt = dps[i+2]
assert.Equal(t, sfxpb.MetricType_GAUGE, *pt.MetricType)
assert.Equal(t, int64(111), pt.Timestamp)
assert.Equal(t, "metric_name_quantile", pt.Metric)
assert.EqualValues(t, i, pt.Value.GetDoubleValue())
assert.Equal(t, 2, len(pt.Dimensions))
dim = pt.Dimensions[1]
assert.Equal(t, "quantile", dim.Key)
}

assert.Equal(t, "0.25", dps[2].Dimensions[1].Value)
assert.Equal(t, "0.5", dps[3].Dimensions[1].Value)
assert.Equal(t, "0.75", dps[4].Dimensions[1].Value)
assert.Equal(t, "1", dps[5].Dimensions[1].Value)

println()
}

func assertHasExtraDim(t *testing.T, pt *sfxpb.DataPoint) {
extraDim := pt.Dimensions[0]
assert.Equal(t, "dim1", extraDim.Key)
assert.Equal(t, "val1", extraDim.Value)
}

0 comments on commit cdbccd0

Please sign in to comment.