Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[coordinator] Treat Prometheus TagName/Value as []byte instead of String #1004

Merged
merged 6 commits into from
Oct 3, 2018
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion src/cmd/services/m3coordinator/downsample/downsampler.go
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@ type Downsampler interface {
// MetricsAppender is a metrics appender that can build a samples
// appender, only valid to use with a single caller at a time.
type MetricsAppender interface {
AddTag(name, value string)
AddTag(name, value []byte)
SamplesAppender() (SamplesAppender, error)
Reset()
Finalize()
Expand Down
22 changes: 16 additions & 6 deletions src/cmd/services/m3coordinator/downsample/downsampler_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@
package downsample

import (
"bytes"
"testing"
"time"

Expand Down Expand Up @@ -151,7 +152,7 @@ func testDownsamplerAggregation(
for _, metric := range testCounterMetrics {
appender.Reset()
for name, value := range metric.tags {
appender.AddTag(name, value)
appender.AddTag([]byte(name), []byte(value))
}

samplesAppender, err := appender.SamplesAppender()
Expand All @@ -165,7 +166,7 @@ func testDownsamplerAggregation(
for _, metric := range testGaugeMetrics {
appender.Reset()
for name, value := range metric.tags {
appender.AddTag(name, value)
appender.AddTag([]byte(name), []byte(value))
}

samplesAppender, err := appender.SamplesAppender()
Expand All @@ -192,18 +193,27 @@ func testDownsamplerAggregation(
writes := testDownsampler.storage.Writes()
for _, metric := range testCounterMetrics {
write := mustFindWrite(t, writes, metric.tags["__name__"])
assert.Equal(t, metric.tags, write.Tags.StringMap())
assert.Equal(t, metric.tags, tagsToStringMap(write.Tags))
require.Equal(t, 1, len(write.Datapoints))
assert.Equal(t, float64(metric.expected), write.Datapoints[0].Value)
}
for _, metric := range testGaugeMetrics {
write := mustFindWrite(t, writes, metric.tags["__name__"])
assert.Equal(t, metric.tags, write.Tags.StringMap())
assert.Equal(t, metric.tags, tagsToStringMap(write.Tags))
require.Equal(t, 1, len(write.Datapoints))
assert.Equal(t, float64(metric.expected), write.Datapoints[0].Value)
}
}

func tagsToStringMap(tags models.Tags) map[string]string {
stringMap := make(map[string]string, len(tags))
for _, t := range tags {
stringMap[string(t.Name)] = string(t.Value)
}

return stringMap
}

type testDownsampler struct {
opts DownsamplerOptions
downsampler Downsampler
Expand Down Expand Up @@ -292,7 +302,7 @@ func newTestID(t *testing.T, tags map[string]string) id.ID {

tagsIter := newTags()
for name, value := range tags {
tagsIter.append(name, value)
tagsIter.append([]byte(name), []byte(value))
}

tagEncoder := tagEncoderPool.Get()
Expand All @@ -317,7 +327,7 @@ func mustFindWrite(t *testing.T, writes []*storage.WriteQuery, name string) *sto
var write *storage.WriteQuery
for _, w := range writes {
if t, ok := w.Tags.Get(models.MetricName); ok {
if t == name {
if bytes.Equal(t, []byte(name)) {
write = w
break
}
Expand Down
12 changes: 6 additions & 6 deletions src/cmd/services/m3coordinator/downsample/flush_handler.go
Original file line number Diff line number Diff line change
Expand Up @@ -37,8 +37,8 @@ import (
"github.com/uber-go/tally"
)

const (
aggregationSuffixTag = "agg"
var (
aggregationSuffixTag = []byte("agg")
)

type downsamplerFlushHandler struct {
Expand Down Expand Up @@ -117,14 +117,14 @@ func (w *downsamplerFlushHandlerWriter) Write(
}

// Add extra tag since we may need to add an aggregation suffix tag
tags := make(models.Tags, 0, expected+1)
tags := make(models.Tags, 0, expected)
for iter.Next() {
name, value := iter.Current()
tags = append(tags, models.Tag{Name: string(name), Value: string(value)})
tags = append(tags, models.Tag{Name: name, Value: value})
}

if len(chunkSuffix) != 0 {
tags = append(tags, models.Tag{Name: aggregationSuffixTag, Value: string(chunkSuffix)})
tags = append(tags, models.Tag{Name: aggregationSuffixTag, Value: chunkSuffix})
}

err := iter.Err()
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -48,7 +48,7 @@ type metricsAppenderOptions struct {
encodedTagsIteratorPool *encodedTagsIteratorPool
}

func (a *metricsAppender) AddTag(name, value string) {
func (a *metricsAppender) AddTag(name, value []byte) {
a.tags.append(name, value)
}

Expand Down
13 changes: 7 additions & 6 deletions src/cmd/services/m3coordinator/downsample/tags.go
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@
package downsample

import (
"bytes"
"sort"

"github.com/m3db/m3x/ident"
Expand All @@ -31,8 +32,8 @@ const (
)

type tags struct {
names []string
values []string
names [][]byte
values [][]byte
idx int
nameBuf []byte
valueBuf []byte
Expand All @@ -46,13 +47,13 @@ var (

func newTags() *tags {
return &tags{
names: make([]string, 0, initAllocTagsSliceCapacity),
values: make([]string, 0, initAllocTagsSliceCapacity),
names: make([][]byte, 0, initAllocTagsSliceCapacity),
values: make([][]byte, 0, initAllocTagsSliceCapacity),
idx: -1,
}
}

func (t *tags) append(name, value string) {
func (t *tags) append(name, value []byte) {
t.names = append(t.names, name)
t.values = append(t.values, value)
}
Expand All @@ -67,7 +68,7 @@ func (t *tags) Swap(i, j int) {
}

func (t *tags) Less(i, j int) bool {
return t.names[i] < t.names[j]
return bytes.Compare(t.names[i], t.names[j]) == -1
}

func (t *tags) Next() bool {
Expand Down
7 changes: 6 additions & 1 deletion src/query/api/v1/handler/json/write.go
Original file line number Diff line number Diff line change
Expand Up @@ -90,8 +90,13 @@ func newStorageWriteQuery(req *WriteQuery) (*storage.WriteQuery, error) {
return nil, err
}

tags := make(models.Tags, 0, len(req.Tags))
for n, v := range req.Tags {
tags = tags.AddTag(models.Tag{Name: []byte(n), Value: []byte(v)})
}

return &storage.WriteQuery{
Tags: models.FromMap(req.Tags),
Tags: tags,
Datapoints: ts.Datapoints{
{
Timestamp: parsedTime,
Expand Down
4 changes: 2 additions & 2 deletions src/query/api/v1/handler/prometheus/native/common.go
Original file line number Diff line number Diff line change
Expand Up @@ -174,8 +174,8 @@ func renderResultsJSON(w io.Writer, series []*ts.Series, params models.RequestPa
jw.BeginObjectField("metric")
jw.BeginObject()
for _, t := range s.Tags {
jw.BeginObjectField(t.Name)
jw.WriteString(t.Value)
jw.BeginObjectField(string(t.Name))
jw.WriteString(string(t.Value))
}
jw.EndObject()

Expand Down
8 changes: 4 additions & 4 deletions src/query/api/v1/handler/prometheus/native/common_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -111,12 +111,12 @@ func TestRenderResultsJSON(t *testing.T) {
params := models.RequestParams{}
series := []*ts.Series{
ts.NewSeries("foo", ts.NewFixedStepValues(10*time.Second, 2, 1, start), models.Tags{
models.Tag{Name: "bar", Value: "baz"},
models.Tag{Name: "qux", Value: "qaz"},
models.Tag{Name: []byte("bar"), Value: []byte("baz")},
models.Tag{Name: []byte("qux"), Value: []byte("qaz")},
}),
ts.NewSeries("bar", ts.NewFixedStepValues(10*time.Second, 2, 2, start), models.Tags{
models.Tag{Name: "baz", Value: "bar"},
models.Tag{Name: "qaz", Value: "qux"},
models.Tag{Name: []byte("baz"), Value: []byte("bar")},
models.Tag{Name: []byte("qaz"), Value: []byte("qux")},
}),
}

Expand Down
4 changes: 2 additions & 2 deletions src/query/api/v1/handler/prometheus/remote/test/read.go
Original file line number Diff line number Diff line change
Expand Up @@ -43,8 +43,8 @@ func GeneratePromReadRequest() *prompb.ReadRequest {
EndTimestampMs: time.Now().UnixNano() / int64(time.Millisecond),
Matchers: []*prompb.LabelMatcher{
&prompb.LabelMatcher{
Name: "__name__",
Value: "first",
Name: []byte("__name__"),
Value: []byte("first"),
Type: prompb.LabelMatcher_EQ,
},
},
Expand Down
12 changes: 6 additions & 6 deletions src/query/api/v1/handler/prometheus/remote/test/write.go
Original file line number Diff line number Diff line change
Expand Up @@ -39,9 +39,9 @@ func GeneratePromWriteRequest() *prompb.WriteRequest {
req := &prompb.WriteRequest{
Timeseries: []*prompb.TimeSeries{{
Labels: []*prompb.Label{
{Name: "__name__", Value: "first"},
{Name: "foo", Value: "bar"},
{Name: "biz", Value: "baz"},
{Name: []byte("__name__"), Value: []byte("first")},
{Name: []byte("foo"), Value: []byte("bar")},
{Name: []byte("biz"), Value: []byte("baz")},
},
Samples: []*prompb.Sample{
{Value: 1.0, Timestamp: time.Now().UnixNano() / int64(time.Millisecond)},
Expand All @@ -50,9 +50,9 @@ func GeneratePromWriteRequest() *prompb.WriteRequest {
},
{
Labels: []*prompb.Label{
{Name: "__name__", Value: "second"},
{Name: "foo", Value: "qux"},
{Name: "bar", Value: "baz"},
{Name: []byte("__name__"), Value: []byte("second")},
{Name: []byte("foo"), Value: []byte("qux")},
{Name: []byte("bar"), Value: []byte("baz")},
},
Samples: []*prompb.Sample{
{Value: 3.0, Timestamp: time.Now().UnixNano() / int64(time.Millisecond)},
Expand Down
10 changes: 5 additions & 5 deletions src/query/api/v1/handler/search_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -52,13 +52,13 @@ func generateSearchReq() *storage.FetchQuery {
matchers := models.Matchers{
{
Type: models.MatchEqual,
Name: "foo",
Value: "bar",
Name: []byte("foo"),
Value: []byte("bar"),
},
{
Type: models.MatchEqual,
Name: "biz",
Value: "baz",
Name: []byte("biz"),
Value: []byte("baz"),
},
}
return &storage.FetchQuery{
Expand Down Expand Up @@ -110,7 +110,7 @@ func TestSearchResponse(t *testing.T) {
require.NoError(t, err)

assert.Equal(t, testID, results.Metrics[0].ID)
assert.Equal(t, models.Tags{{Name: "foo", Value: "bar"}}, results.Metrics[0].Tags)
assert.Equal(t, models.Tags{{Name: []byte("foo"), Value: []byte("bar")}}, results.Metrics[0].Tags)
}

func TestSearchEndpoint(t *testing.T) {
Expand Down
8 changes: 7 additions & 1 deletion src/query/benchmark/benchmarker/main/convert_to_prom.go
Original file line number Diff line number Diff line change
Expand Up @@ -161,7 +161,13 @@ func marshalTSDBToProm(opentsdb string) (*prompb.TimeSeries, error) {
if err := json.Unmarshal(data, &m); err != nil {
return nil, err
}
labels := storage.TagsToPromLabels(models.FromMap(m.Tags))

tags := models.Tags{}
for n, v := range m.Tags {
tags = tags.AddTag(models.Tag{Name: []byte(n), Value: []byte(v)})
}

labels := storage.TagsToPromLabels(tags)
samples := metricsPointsToSamples(m.Value, m.Time)
return &prompb.TimeSeries{
Labels: labels,
Expand Down
7 changes: 6 additions & 1 deletion src/query/benchmark/common/parse_json.go
Original file line number Diff line number Diff line change
Expand Up @@ -170,7 +170,12 @@ func id(lowerCaseTags map[string]string, name string) string {
}

func metricsToPromTS(m Metrics) *prompb.TimeSeries {
labels := storage.TagsToPromLabels(models.FromMap(m.Tags))
tags := models.Tags{}
for n, v := range m.Tags {
tags = tags.AddTag(models.Tag{Name: []byte(n), Value: []byte(v)})
}

labels := storage.TagsToPromLabels(tags)
samples := metricsPointsToSamples(m.Value, m.Time)
return &prompb.TimeSeries{
Labels: labels,
Expand Down
4 changes: 2 additions & 2 deletions src/query/benchmark/read/main/read_benchmark.go
Original file line number Diff line number Diff line change
Expand Up @@ -205,8 +205,8 @@ func generateMatchers() []*prompb.LabelMatcher {
for i, id := range ids {
matchers[i] = &prompb.LabelMatcher{
Type: prompb.LabelMatcher_EQ,
Name: "eq",
Value: id,
Name: []byte("eq"),
Value: []byte(id),
}
}
return matchers
Expand Down
2 changes: 1 addition & 1 deletion src/query/functions/aggregation/base.go
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,7 @@ var aggregationFunctions = map[string]aggregationFn{
// NodeParams contains additional parameters required for aggregation ops
type NodeParams struct {
// MatchingTags is the set of tags by which the aggregation groups output series
MatchingTags []string
MatchingTags [][]byte
// Without indicates if series should use only the MatchingTags or if MatchingTags
// should be excluded from grouping
Without bool
Expand Down
Loading