diff --git a/src/cmd/services/m3coordinator/downsample/downsampler.go b/src/cmd/services/m3coordinator/downsample/downsampler.go index 483096c223..30be27f08b 100644 --- a/src/cmd/services/m3coordinator/downsample/downsampler.go +++ b/src/cmd/services/m3coordinator/downsample/downsampler.go @@ -28,7 +28,7 @@ type Downsampler interface { // MetricsAppender is a metrics appender that can build a samples // appender, only valid to use with a single caller at a time. type MetricsAppender interface { - AddTag(name, value string) + AddTag(name, value []byte) SamplesAppender() (SamplesAppender, error) Reset() Finalize() diff --git a/src/cmd/services/m3coordinator/downsample/downsampler_test.go b/src/cmd/services/m3coordinator/downsample/downsampler_test.go index bf2d64f24d..0da1e4d097 100644 --- a/src/cmd/services/m3coordinator/downsample/downsampler_test.go +++ b/src/cmd/services/m3coordinator/downsample/downsampler_test.go @@ -21,6 +21,7 @@ package downsample import ( + "bytes" "testing" "time" @@ -151,7 +152,7 @@ func testDownsamplerAggregation( for _, metric := range testCounterMetrics { appender.Reset() for name, value := range metric.tags { - appender.AddTag(name, value) + appender.AddTag([]byte(name), []byte(value)) } samplesAppender, err := appender.SamplesAppender() @@ -165,7 +166,7 @@ func testDownsamplerAggregation( for _, metric := range testGaugeMetrics { appender.Reset() for name, value := range metric.tags { - appender.AddTag(name, value) + appender.AddTag([]byte(name), []byte(value)) } samplesAppender, err := appender.SamplesAppender() @@ -192,18 +193,27 @@ func testDownsamplerAggregation( writes := testDownsampler.storage.Writes() for _, metric := range testCounterMetrics { write := mustFindWrite(t, writes, metric.tags["__name__"]) - assert.Equal(t, metric.tags, write.Tags.StringMap()) + assert.Equal(t, metric.tags, tagsToStringMap(write.Tags)) require.Equal(t, 1, len(write.Datapoints)) assert.Equal(t, float64(metric.expected), write.Datapoints[0].Value) } for _, metric := range testGaugeMetrics { write := mustFindWrite(t, writes, metric.tags["__name__"]) - assert.Equal(t, metric.tags, write.Tags.StringMap()) + assert.Equal(t, metric.tags, tagsToStringMap(write.Tags)) require.Equal(t, 1, len(write.Datapoints)) assert.Equal(t, float64(metric.expected), write.Datapoints[0].Value) } } +func tagsToStringMap(tags models.Tags) map[string]string { + stringMap := make(map[string]string, len(tags)) + for _, t := range tags { + stringMap[string(t.Name)] = string(t.Value) + } + + return stringMap +} + type testDownsampler struct { opts DownsamplerOptions downsampler Downsampler @@ -292,7 +302,7 @@ func newTestID(t *testing.T, tags map[string]string) id.ID { tagsIter := newTags() for name, value := range tags { - tagsIter.append(name, value) + tagsIter.append([]byte(name), []byte(value)) } tagEncoder := tagEncoderPool.Get() @@ -317,7 +327,7 @@ func mustFindWrite(t *testing.T, writes []*storage.WriteQuery, name string) *sto var write *storage.WriteQuery for _, w := range writes { if t, ok := w.Tags.Get(models.MetricName); ok { - if t == name { + if bytes.Equal(t, []byte(name)) { write = w break } diff --git a/src/cmd/services/m3coordinator/downsample/flush_handler.go b/src/cmd/services/m3coordinator/downsample/flush_handler.go index 4b9523d251..a770cc67e5 100644 --- a/src/cmd/services/m3coordinator/downsample/flush_handler.go +++ b/src/cmd/services/m3coordinator/downsample/flush_handler.go @@ -37,8 +37,8 @@ import ( "github.com/uber-go/tally" ) -const ( - aggregationSuffixTag = "agg" +var ( + aggregationSuffixTag = []byte("agg") ) type downsamplerFlushHandler struct { @@ -117,14 +117,14 @@ func (w *downsamplerFlushHandlerWriter) Write( } // Add extra tag since we may need to add an aggregation suffix tag - tags := make(models.Tags, 0, expected+1) + tags := make(models.Tags, 0, expected) for iter.Next() { name, value := iter.Current() - tags = append(tags, models.Tag{Name: string(name), Value: string(value)}) + tags = append(tags, models.Tag{Name: name, Value: value}) } - + if len(chunkSuffix) != 0 { - tags = append(tags, models.Tag{Name: aggregationSuffixTag, Value: string(chunkSuffix)}) + tags = append(tags, models.Tag{Name: aggregationSuffixTag, Value: chunkSuffix}) } err := iter.Err() diff --git a/src/cmd/services/m3coordinator/downsample/metrics_appender.go b/src/cmd/services/m3coordinator/downsample/metrics_appender.go index 8657c7cf6f..17ad957e5e 100644 --- a/src/cmd/services/m3coordinator/downsample/metrics_appender.go +++ b/src/cmd/services/m3coordinator/downsample/metrics_appender.go @@ -48,7 +48,7 @@ type metricsAppenderOptions struct { encodedTagsIteratorPool *encodedTagsIteratorPool } -func (a *metricsAppender) AddTag(name, value string) { +func (a *metricsAppender) AddTag(name, value []byte) { a.tags.append(name, value) } diff --git a/src/cmd/services/m3coordinator/downsample/tags.go b/src/cmd/services/m3coordinator/downsample/tags.go index 390e9ef0b5..b0444952ac 100644 --- a/src/cmd/services/m3coordinator/downsample/tags.go +++ b/src/cmd/services/m3coordinator/downsample/tags.go @@ -21,6 +21,7 @@ package downsample import ( + "bytes" "sort" "github.com/m3db/m3x/ident" @@ -31,8 +32,8 @@ const ( ) type tags struct { - names []string - values []string + names [][]byte + values [][]byte idx int nameBuf []byte valueBuf []byte @@ -46,13 +47,13 @@ var ( func newTags() *tags { return &tags{ - names: make([]string, 0, initAllocTagsSliceCapacity), - values: make([]string, 0, initAllocTagsSliceCapacity), + names: make([][]byte, 0, initAllocTagsSliceCapacity), + values: make([][]byte, 0, initAllocTagsSliceCapacity), idx: -1, } } -func (t *tags) append(name, value string) { +func (t *tags) append(name, value []byte) { t.names = append(t.names, name) t.values = append(t.values, value) } @@ -67,7 +68,7 @@ func (t *tags) Swap(i, j int) { } func (t *tags) Less(i, j int) bool { - return t.names[i] < t.names[j] + return bytes.Compare(t.names[i], t.names[j]) == -1 } func (t *tags) Next() bool { diff --git a/src/query/api/v1/handler/json/write.go b/src/query/api/v1/handler/json/write.go index d3b4eff69b..3d5ab3d43d 100644 --- a/src/query/api/v1/handler/json/write.go +++ b/src/query/api/v1/handler/json/write.go @@ -90,8 +90,13 @@ func newStorageWriteQuery(req *WriteQuery) (*storage.WriteQuery, error) { return nil, err } + tags := make(models.Tags, 0, len(req.Tags)) + for n, v := range req.Tags { + tags = tags.AddTag(models.Tag{Name: []byte(n), Value: []byte(v)}) + } + return &storage.WriteQuery{ - Tags: models.FromMap(req.Tags), + Tags: tags, Datapoints: ts.Datapoints{ { Timestamp: parsedTime, diff --git a/src/query/api/v1/handler/prometheus/native/common.go b/src/query/api/v1/handler/prometheus/native/common.go index 0fee5cbb19..897f18197e 100644 --- a/src/query/api/v1/handler/prometheus/native/common.go +++ b/src/query/api/v1/handler/prometheus/native/common.go @@ -174,8 +174,8 @@ func renderResultsJSON(w io.Writer, series []*ts.Series, params models.RequestPa jw.BeginObjectField("metric") jw.BeginObject() for _, t := range s.Tags { - jw.BeginObjectField(t.Name) - jw.WriteString(t.Value) + jw.BeginObjectField(string(t.Name)) + jw.WriteString(string(t.Value)) } jw.EndObject() diff --git a/src/query/api/v1/handler/prometheus/native/common_test.go b/src/query/api/v1/handler/prometheus/native/common_test.go index 8a02eeec95..9444423c05 100644 --- a/src/query/api/v1/handler/prometheus/native/common_test.go +++ b/src/query/api/v1/handler/prometheus/native/common_test.go @@ -111,12 +111,12 @@ func TestRenderResultsJSON(t *testing.T) { params := models.RequestParams{} series := []*ts.Series{ ts.NewSeries("foo", ts.NewFixedStepValues(10*time.Second, 2, 1, start), models.Tags{ - models.Tag{Name: "bar", Value: "baz"}, - models.Tag{Name: "qux", Value: "qaz"}, + models.Tag{Name: []byte("bar"), Value: []byte("baz")}, + models.Tag{Name: []byte("qux"), Value: []byte("qaz")}, }), ts.NewSeries("bar", ts.NewFixedStepValues(10*time.Second, 2, 2, start), models.Tags{ - models.Tag{Name: "baz", Value: "bar"}, - models.Tag{Name: "qaz", Value: "qux"}, + models.Tag{Name: []byte("baz"), Value: []byte("bar")}, + models.Tag{Name: []byte("qaz"), Value: []byte("qux")}, }), } diff --git a/src/query/api/v1/handler/prometheus/remote/test/read.go b/src/query/api/v1/handler/prometheus/remote/test/read.go index 7f779b0064..98eec49b6d 100644 --- a/src/query/api/v1/handler/prometheus/remote/test/read.go +++ b/src/query/api/v1/handler/prometheus/remote/test/read.go @@ -43,8 +43,8 @@ func GeneratePromReadRequest() *prompb.ReadRequest { EndTimestampMs: time.Now().UnixNano() / int64(time.Millisecond), Matchers: []*prompb.LabelMatcher{ &prompb.LabelMatcher{ - Name: "__name__", - Value: "first", + Name: []byte("__name__"), + Value: []byte("first"), Type: prompb.LabelMatcher_EQ, }, }, diff --git a/src/query/api/v1/handler/prometheus/remote/test/write.go b/src/query/api/v1/handler/prometheus/remote/test/write.go index e5531b8f38..8b62cfd061 100644 --- a/src/query/api/v1/handler/prometheus/remote/test/write.go +++ b/src/query/api/v1/handler/prometheus/remote/test/write.go @@ -39,9 +39,9 @@ func GeneratePromWriteRequest() *prompb.WriteRequest { req := &prompb.WriteRequest{ Timeseries: []*prompb.TimeSeries{{ Labels: []*prompb.Label{ - {Name: "__name__", Value: "first"}, - {Name: "foo", Value: "bar"}, - {Name: "biz", Value: "baz"}, + {Name: []byte("__name__"), Value: []byte("first")}, + {Name: []byte("foo"), Value: []byte("bar")}, + {Name: []byte("biz"), Value: []byte("baz")}, }, Samples: []*prompb.Sample{ {Value: 1.0, Timestamp: time.Now().UnixNano() / int64(time.Millisecond)}, @@ -50,9 +50,9 @@ func GeneratePromWriteRequest() *prompb.WriteRequest { }, { Labels: []*prompb.Label{ - {Name: "__name__", Value: "second"}, - {Name: "foo", Value: "qux"}, - {Name: "bar", Value: "baz"}, + {Name: []byte("__name__"), Value: []byte("second")}, + {Name: []byte("foo"), Value: []byte("qux")}, + {Name: []byte("bar"), Value: []byte("baz")}, }, Samples: []*prompb.Sample{ {Value: 3.0, Timestamp: time.Now().UnixNano() / int64(time.Millisecond)}, diff --git a/src/query/api/v1/handler/search_test.go b/src/query/api/v1/handler/search_test.go index 1c0399fa51..6309761896 100644 --- a/src/query/api/v1/handler/search_test.go +++ b/src/query/api/v1/handler/search_test.go @@ -52,13 +52,13 @@ func generateSearchReq() *storage.FetchQuery { matchers := models.Matchers{ { Type: models.MatchEqual, - Name: "foo", - Value: "bar", + Name: []byte("foo"), + Value: []byte("bar"), }, { Type: models.MatchEqual, - Name: "biz", - Value: "baz", + Name: []byte("biz"), + Value: []byte("baz"), }, } return &storage.FetchQuery{ @@ -110,7 +110,7 @@ func TestSearchResponse(t *testing.T) { require.NoError(t, err) assert.Equal(t, testID, results.Metrics[0].ID) - assert.Equal(t, models.Tags{{Name: "foo", Value: "bar"}}, results.Metrics[0].Tags) + assert.Equal(t, models.Tags{{Name: []byte("foo"), Value: []byte("bar")}}, results.Metrics[0].Tags) } func TestSearchEndpoint(t *testing.T) { diff --git a/src/query/benchmark/benchmarker/main/convert_to_prom.go b/src/query/benchmark/benchmarker/main/convert_to_prom.go index c2498144b8..988d68d2a7 100644 --- a/src/query/benchmark/benchmarker/main/convert_to_prom.go +++ b/src/query/benchmark/benchmarker/main/convert_to_prom.go @@ -161,7 +161,13 @@ func marshalTSDBToProm(opentsdb string) (*prompb.TimeSeries, error) { if err := json.Unmarshal(data, &m); err != nil { return nil, err } - labels := storage.TagsToPromLabels(models.FromMap(m.Tags)) + + tags := models.Tags{} + for n, v := range m.Tags { + tags = tags.AddTag(models.Tag{Name: []byte(n), Value: []byte(v)}) + } + + labels := storage.TagsToPromLabels(tags) samples := metricsPointsToSamples(m.Value, m.Time) return &prompb.TimeSeries{ Labels: labels, diff --git a/src/query/benchmark/common/parse_json.go b/src/query/benchmark/common/parse_json.go index fd98cf09a0..fa0555534e 100644 --- a/src/query/benchmark/common/parse_json.go +++ b/src/query/benchmark/common/parse_json.go @@ -170,7 +170,12 @@ func id(lowerCaseTags map[string]string, name string) string { } func metricsToPromTS(m Metrics) *prompb.TimeSeries { - labels := storage.TagsToPromLabels(models.FromMap(m.Tags)) + tags := models.Tags{} + for n, v := range m.Tags { + tags = tags.AddTag(models.Tag{Name: []byte(n), Value: []byte(v)}) + } + + labels := storage.TagsToPromLabels(tags) samples := metricsPointsToSamples(m.Value, m.Time) return &prompb.TimeSeries{ Labels: labels, diff --git a/src/query/benchmark/read/main/read_benchmark.go b/src/query/benchmark/read/main/read_benchmark.go index 775e30f37c..c40a13c41e 100644 --- a/src/query/benchmark/read/main/read_benchmark.go +++ b/src/query/benchmark/read/main/read_benchmark.go @@ -205,8 +205,8 @@ func generateMatchers() []*prompb.LabelMatcher { for i, id := range ids { matchers[i] = &prompb.LabelMatcher{ Type: prompb.LabelMatcher_EQ, - Name: "eq", - Value: id, + Name: []byte("eq"), + Value: []byte(id), } } return matchers diff --git a/src/query/functions/aggregation/base.go b/src/query/functions/aggregation/base.go index 129c6d2f81..58518163ad 100644 --- a/src/query/functions/aggregation/base.go +++ b/src/query/functions/aggregation/base.go @@ -44,7 +44,7 @@ var aggregationFunctions = map[string]aggregationFn{ // NodeParams contains additional parameters required for aggregation ops type NodeParams struct { // MatchingTags is the set of tags by which the aggregation groups output series - MatchingTags []string + MatchingTags [][]byte // Without indicates if series should use only the MatchingTags or if MatchingTags // should be excluded from grouping Without bool diff --git a/src/query/functions/aggregation/base_test.go b/src/query/functions/aggregation/base_test.go index 2cf3b9e548..4a2de3ea21 100644 --- a/src/query/functions/aggregation/base_test.go +++ b/src/query/functions/aggregation/base_test.go @@ -38,12 +38,12 @@ import ( var ( seriesMetas = []block.SeriesMeta{ - {Tags: models.FromMap(map[string]string{"a": "1", "d": "4"})}, - {Tags: models.FromMap(map[string]string{"a": "1", "d": "4"})}, - {Tags: models.FromMap(map[string]string{"a": "1", "b": "2", "d": "4"})}, - {Tags: models.FromMap(map[string]string{"a": "2", "b": "2", "d": "4"})}, - {Tags: models.FromMap(map[string]string{"b": "2", "d": "4"})}, - {Tags: models.FromMap(map[string]string{"c": "3", "d": "4"})}, + {Tags: test.StringTagsToTags(test.StringTags{{"a", "1"}, {"d", "4"}})}, + {Tags: test.StringTagsToTags(test.StringTags{{"a", "1"}, {"d", "4"}})}, + {Tags: test.StringTagsToTags(test.StringTags{{"a", "1"}, {"b", "2"}, {"d", "4"}})}, + {Tags: test.StringTagsToTags(test.StringTags{{"a", "2"}, {"b", "2"}, {"d", "4"}})}, + {Tags: test.StringTagsToTags(test.StringTags{{"b", "2"}, {"d", "4"}})}, + {Tags: test.StringTagsToTags(test.StringTags{{"c", "3"}, {"d", "4"}})}, } v = [][]float64{ {0, math.NaN(), 2, 3, 4}, @@ -72,7 +72,7 @@ func processAggregationOp(t *testing.T, op parser.Params) *executor.SinkNode { func TestFunctionFilteringWithA(t *testing.T) { op, err := NewAggregationOp(StandardDeviationType, NodeParams{ - MatchingTags: []string{"a"}, Without: false, + MatchingTags: [][]byte{[]byte("a")}, Without: false, }) require.NoError(t, err) sink := processAggregationOp(t, op) @@ -86,11 +86,11 @@ func TestFunctionFilteringWithA(t *testing.T) { } expectedMetas := []block.SeriesMeta{ - {Name: StandardDeviationType, Tags: models.FromMap(map[string]string{"a": "1"})}, - {Name: StandardDeviationType, Tags: models.FromMap(map[string]string{"a": "2"})}, - {Name: StandardDeviationType, Tags: models.FromMap(map[string]string{})}, + {Name: StandardDeviationType, Tags: models.Tags{{Name: []byte("a"), Value: []byte("1")}}}, + {Name: StandardDeviationType, Tags: models.Tags{{Name: []byte("a"), Value: []byte("2")}}}, + {Name: StandardDeviationType, Tags: models.Tags{}}, } - expectedMetaTags := models.FromMap(map[string]string{}) + expectedMetaTags := models.Tags{} test.CompareValues(t, sink.Metas, expectedMetas, sink.Values, expected) assert.Equal(t, bounds, sink.Meta.Bounds) @@ -99,7 +99,7 @@ func TestFunctionFilteringWithA(t *testing.T) { func TestFunctionFilteringWithoutA(t *testing.T) { op, err := NewAggregationOp(StandardDeviationType, NodeParams{ - MatchingTags: []string{"a"}, Without: true, + MatchingTags: [][]byte{[]byte("a")}, Without: true, }) require.NoError(t, err) sink := processAggregationOp(t, op) @@ -113,12 +113,12 @@ func TestFunctionFilteringWithoutA(t *testing.T) { } expectedMetas := []block.SeriesMeta{ - {Name: StandardDeviationType, Tags: models.FromMap(map[string]string{})}, - {Name: StandardDeviationType, Tags: models.FromMap(map[string]string{"b": "2"})}, - {Name: StandardDeviationType, Tags: models.FromMap(map[string]string{"c": "3"})}, + {Name: StandardDeviationType, Tags: models.Tags{}}, + {Name: StandardDeviationType, Tags: models.Tags{{Name: []byte("b"), Value: []byte("2")}}}, + {Name: StandardDeviationType, Tags: models.Tags{{Name: []byte("c"), Value: []byte("3")}}}, } - expectedMetaTags := models.FromMap(map[string]string{"d": "4"}) + expectedMetaTags := models.Tags{{Name: []byte("d"), Value: []byte("4")}} test.CompareValues(t, sink.Metas, expectedMetas, sink.Values, expected) assert.Equal(t, bounds, sink.Meta.Bounds) assert.Equal(t, expectedMetaTags, sink.Meta.Tags) @@ -126,7 +126,7 @@ func TestFunctionFilteringWithoutA(t *testing.T) { func TestFunctionFilteringWithD(t *testing.T) { op, err := NewAggregationOp(StandardDeviationType, NodeParams{ - MatchingTags: []string{"d"}, Without: false, + MatchingTags: [][]byte{[]byte("d")}, Without: false, }) require.NoError(t, err) sink := processAggregationOp(t, op) @@ -136,10 +136,10 @@ func TestFunctionFilteringWithD(t *testing.T) { } expectedMetas := []block.SeriesMeta{ - {Name: StandardDeviationType, Tags: models.FromMap(map[string]string{})}, + {Name: StandardDeviationType, Tags: models.Tags{}}, } - expectedMetaTags := models.FromMap(map[string]string{"d": "4"}) + expectedMetaTags := models.Tags{{Name: []byte("d"), Value: []byte("4")}} test.CompareValues(t, sink.Metas, expectedMetas, sink.Values, expected) assert.Equal(t, bounds, sink.Meta.Bounds) assert.Equal(t, expectedMetaTags, sink.Meta.Tags) @@ -147,7 +147,7 @@ func TestFunctionFilteringWithD(t *testing.T) { func TestFunctionFilteringWithoutD(t *testing.T) { op, err := NewAggregationOp(StandardDeviationType, NodeParams{ - MatchingTags: []string{"d"}, Without: true, + MatchingTags: [][]byte{[]byte("d")}, Without: true, }) require.NoError(t, err) sink := processAggregationOp(t, op) @@ -166,11 +166,11 @@ func TestFunctionFilteringWithoutD(t *testing.T) { } expectedMetas := []block.SeriesMeta{ - {Name: StandardDeviationType, Tags: models.FromMap(map[string]string{"a": "1"})}, - {Name: StandardDeviationType, Tags: models.FromMap(map[string]string{"a": "1", "b": "2"})}, - {Name: StandardDeviationType, Tags: models.FromMap(map[string]string{"a": "2", "b": "2"})}, - {Name: StandardDeviationType, Tags: models.FromMap(map[string]string{"b": "2"})}, - {Name: StandardDeviationType, Tags: models.FromMap(map[string]string{"c": "3"})}, + {Name: StandardDeviationType, Tags: test.StringTagsToTags(test.StringTags{{"a", "1"}})}, + {Name: StandardDeviationType, Tags: test.StringTagsToTags(test.StringTags{{"a", "1"}, {"b", "2"}})}, + {Name: StandardDeviationType, Tags: test.StringTagsToTags(test.StringTags{{"a", "2"}, {"b", "2"}})}, + {Name: StandardDeviationType, Tags: test.StringTagsToTags(test.StringTags{{"b", "2"}})}, + {Name: StandardDeviationType, Tags: test.StringTagsToTags(test.StringTags{{"c", "3"}})}, } expectedMetaTags := models.Tags{} diff --git a/src/query/functions/aggregation/count_values.go b/src/query/functions/aggregation/count_values.go index 78e1a0fcf1..1a117832c9 100644 --- a/src/query/functions/aggregation/count_values.go +++ b/src/query/functions/aggregation/count_values.go @@ -194,8 +194,8 @@ func (n *countValuesNode) Process(ID parser.NodeID, b block.Block) error { blockMetas[v+previousBucketBlockIndex] = block.SeriesMeta{ Name: n.op.OpType(), Tags: metas[bucketIndex].Tags.Clone().AddTag(models.Tag{ - Name: n.op.params.StringParameter, - Value: utils.FormatFloat(k), + Name: []byte(n.op.params.StringParameter), + Value: utils.FormatFloatToBytes(k), }), } } diff --git a/src/query/functions/aggregation/count_values_test.go b/src/query/functions/aggregation/count_values_test.go index 0dcfc2a2d8..6ef2a3cd3e 100644 --- a/src/query/functions/aggregation/count_values_test.go +++ b/src/query/functions/aggregation/count_values_test.go @@ -90,10 +90,10 @@ func processCountValuesOp( var ( simpleMetas = []block.SeriesMeta{ - {Tags: models.Tags{{Name: "a", Value: "1"}, {Name: "b", Value: "2"}}}, - {Tags: models.Tags{{Name: "a", Value: "1"}, {Name: "b", Value: "2"}}}, - {Tags: models.Tags{{Name: "a", Value: "1"}, {Name: "b", Value: "3"}}}, - {Tags: models.Tags{{Name: "a", Value: "1"}, {Name: "b", Value: "3"}}}, + {Tags: models.Tags{{Name: []byte("a"), Value: []byte("1")}, {Name: []byte("b"), Value: []byte("2")}}}, + {Tags: models.Tags{{Name: []byte("a"), Value: []byte("1")}, {Name: []byte("b"), Value: []byte("2")}}}, + {Tags: models.Tags{{Name: []byte("a"), Value: []byte("1")}, {Name: []byte("b"), Value: []byte("3")}}}, + {Tags: models.Tags{{Name: []byte("a"), Value: []byte("1")}, {Name: []byte("b"), Value: []byte("3")}}}, } simpleVals = [][]float64{ @@ -117,14 +117,14 @@ func TestSimpleProcessCountValuesFunctionUnfiltered(t *testing.T) { // Double check expected tags is the same length as expected values require.Equal(t, len(expectedTags), len(expected)) assert.Equal(t, bounds, sink.Meta.Bounds) - assert.Equal(t, models.Tags{{Name: tagName, Value: "0"}}, sink.Meta.Tags) + assert.Equal(t, models.Tags{{Name: []byte(tagName), Value: []byte("0")}}, sink.Meta.Tags) test.CompareValues(t, sink.Metas, tagsToSeriesMeta(expectedTags), sink.Values, expected) } func TestSimpleProcessCountValuesFunctionFilteringWithoutA(t *testing.T) { tagName := "tag-name" op, err := NewCountValuesOp(CountValuesType, NodeParams{ - MatchingTags: []string{"a"}, Without: true, StringParameter: tagName, + MatchingTags: [][]byte{[]byte("a")}, Without: true, StringParameter: tagName, }) require.NoError(t, err) sink := processCountValuesOp(t, op, simpleMetas, simpleVals) @@ -133,28 +133,28 @@ func TestSimpleProcessCountValuesFunctionFilteringWithoutA(t *testing.T) { {math.NaN(), 2, 2, 2, 2}, } expectedTags := []models.Tags{ - {{Name: "b", Value: "2"}}, - {{Name: "b", Value: "3"}}, + {{Name: []byte("b"), Value: []byte("2")}}, + {{Name: []byte("b"), Value: []byte("3")}}, } // Double check expected tags is the same length as expected values require.Equal(t, len(expectedTags), len(expected)) assert.Equal(t, bounds, sink.Meta.Bounds) - assert.Equal(t, models.Tags{{Name: tagName, Value: "0"}}, sink.Meta.Tags) + assert.Equal(t, models.Tags{{Name: []byte(tagName), Value: []byte("0")}}, sink.Meta.Tags) test.CompareValues(t, sink.Metas, tagsToSeriesMeta(expectedTags), sink.Values, expected) } func TestCustomProcessCountValuesFunctionFilteringWithoutA(t *testing.T) { tagName := "tag-name" op, err := NewCountValuesOp(CountValuesType, NodeParams{ - MatchingTags: []string{"a"}, Without: true, StringParameter: tagName, + MatchingTags: [][]byte{[]byte("a")}, Without: true, StringParameter: tagName, }) require.NoError(t, err) ms := []block.SeriesMeta{ - {Tags: models.Tags{{Name: "a", Value: "1"}, {Name: "b", Value: "2"}}}, - {Tags: models.Tags{{Name: "a", Value: "1"}, {Name: "b", Value: "2"}}}, - {Tags: models.Tags{{Name: "a", Value: "1"}, {Name: "b", Value: "3"}}}, - {Tags: models.Tags{{Name: "a", Value: "1"}, {Name: "b", Value: "3"}}}, + {Tags: models.Tags{{Name: []byte("a"), Value: []byte("1")}, {Name: []byte("b"), Value: []byte("2")}}}, + {Tags: models.Tags{{Name: []byte("a"), Value: []byte("1")}, {Name: []byte("b"), Value: []byte("2")}}}, + {Tags: models.Tags{{Name: []byte("a"), Value: []byte("1")}, {Name: []byte("b"), Value: []byte("3")}}}, + {Tags: models.Tags{{Name: []byte("a"), Value: []byte("1")}, {Name: []byte("b"), Value: []byte("3")}}}, } vs := [][]float64{ @@ -178,15 +178,15 @@ func TestCustomProcessCountValuesFunctionFilteringWithoutA(t *testing.T) { } expectedTags := []models.Tags{ - {{Name: "b", Value: "2"}, {Name: tagName, Value: "0"}}, - {{Name: "b", Value: "2"}, {Name: tagName, Value: "1"}}, - {{Name: "b", Value: "2"}, {Name: tagName, Value: "2"}}, - {{Name: "b", Value: "2"}, {Name: tagName, Value: "3"}}, - - {{Name: "b", Value: "3"}, {Name: tagName, Value: "0"}}, - {{Name: "b", Value: "3"}, {Name: tagName, Value: "1"}}, - {{Name: "b", Value: "3"}, {Name: tagName, Value: "2"}}, - {{Name: "b", Value: "3"}, {Name: tagName, Value: "3"}}, + {{Name: []byte("b"), Value: []byte("2")}, {Name: []byte(tagName), Value: []byte("0")}}, + {{Name: []byte("b"), Value: []byte("2")}, {Name: []byte(tagName), Value: []byte("1")}}, + {{Name: []byte("b"), Value: []byte("2")}, {Name: []byte(tagName), Value: []byte("2")}}, + {{Name: []byte("b"), Value: []byte("2")}, {Name: []byte(tagName), Value: []byte("3")}}, + + {{Name: []byte("b"), Value: []byte("3")}, {Name: []byte(tagName), Value: []byte("0")}}, + {{Name: []byte("b"), Value: []byte("3")}, {Name: []byte(tagName), Value: []byte("1")}}, + {{Name: []byte("b"), Value: []byte("3")}, {Name: []byte(tagName), Value: []byte("2")}}, + {{Name: []byte("b"), Value: []byte("3")}, {Name: []byte(tagName), Value: []byte("3")}}, } // Double check expected tags is the same length as expected values @@ -199,7 +199,7 @@ func TestCustomProcessCountValuesFunctionFilteringWithoutA(t *testing.T) { func TestSimpleProcessCountValuesFunctionFilteringWithA(t *testing.T) { tagName := "0_tag-name" op, err := NewCountValuesOp(CountValuesType, NodeParams{ - MatchingTags: []string{"a"}, Without: false, StringParameter: tagName, + MatchingTags: [][]byte{[]byte("a")}, Without: false, StringParameter: tagName, }) require.NoError(t, err) sink := processCountValuesOp(t, op, simpleMetas, simpleVals) @@ -209,14 +209,15 @@ func TestSimpleProcessCountValuesFunctionFilteringWithA(t *testing.T) { // Double check expected tags is the same length as expected values require.Equal(t, len(expectedTags), len(expected)) assert.Equal(t, bounds, sink.Meta.Bounds) - assert.Equal(t, models.Tags{{Name: tagName, Value: "0"}, {Name: "a", Value: "1"}}, sink.Meta.Tags) + assert.Equal(t, models.Tags{{Name: []byte(tagName), Value: []byte("0")}, + {Name: []byte("a"), Value: []byte("1")}}, sink.Meta.Tags) test.CompareValues(t, sink.Metas, tagsToSeriesMeta(expectedTags), sink.Values, expected) } func TestProcessCountValuesFunctionFilteringWithoutA(t *testing.T) { tagName := "tag-name" op, err := NewCountValuesOp(CountValuesType, NodeParams{ - MatchingTags: []string{"a"}, Without: true, StringParameter: tagName, + MatchingTags: [][]byte{[]byte("a")}, Without: true, StringParameter: tagName, }) require.NoError(t, err) sink := processCountValuesOp(t, op, seriesMetas, v) @@ -258,42 +259,42 @@ func TestProcessCountValuesFunctionFilteringWithoutA(t *testing.T) { expectedTags := []models.Tags{ // No shared values between series 1 and 2, but two NaNs - {{Name: tagName, Value: "0"}}, - {{Name: tagName, Value: "6"}}, - {{Name: tagName, Value: "2"}}, - {{Name: tagName, Value: "7"}}, - {{Name: tagName, Value: "3"}}, - {{Name: tagName, Value: "8"}}, - {{Name: tagName, Value: "4"}}, - {{Name: tagName, Value: "9"}}, + {{Name: []byte(tagName), Value: []byte("0")}}, + {{Name: []byte(tagName), Value: []byte("6")}}, + {{Name: []byte(tagName), Value: []byte("2")}}, + {{Name: []byte(tagName), Value: []byte("7")}}, + {{Name: []byte(tagName), Value: []byte("3")}}, + {{Name: []byte(tagName), Value: []byte("8")}}, + {{Name: []byte(tagName), Value: []byte("4")}}, + {{Name: []byte(tagName), Value: []byte("9")}}, // One shared value between series 3, 4 and 5, - {{Name: "b", Value: "2"}, {Name: tagName, Value: "10"}}, - {{Name: "b", Value: "2"}, {Name: tagName, Value: "50"}}, - {{Name: "b", Value: "2"}, {Name: tagName, Value: "100"}}, - {{Name: "b", Value: "2"}, {Name: tagName, Value: "20"}}, - {{Name: "b", Value: "2"}, {Name: tagName, Value: "60"}}, - {{Name: "b", Value: "2"}, {Name: tagName, Value: "200"}}, - {{Name: "b", Value: "2"}, {Name: tagName, Value: "30"}}, - {{Name: "b", Value: "2"}, {Name: tagName, Value: "70"}}, - {{Name: "b", Value: "2"}, {Name: tagName, Value: "300"}}, - {{Name: "b", Value: "2"}, {Name: tagName, Value: "40"}}, - {{Name: "b", Value: "2"}, {Name: tagName, Value: "80"}}, - {{Name: "b", Value: "2"}, {Name: tagName, Value: "400"}}, - {{Name: "b", Value: "2"}, {Name: tagName, Value: "90"}}, - {{Name: "b", Value: "2"}, {Name: tagName, Value: "500"}}, + {{Name: []byte("b"), Value: []byte("2")}, {Name: []byte(tagName), Value: []byte("10")}}, + {{Name: []byte("b"), Value: []byte("2")}, {Name: []byte(tagName), Value: []byte("50")}}, + {{Name: []byte("b"), Value: []byte("2")}, {Name: []byte(tagName), Value: []byte("100")}}, + {{Name: []byte("b"), Value: []byte("2")}, {Name: []byte(tagName), Value: []byte("20")}}, + {{Name: []byte("b"), Value: []byte("2")}, {Name: []byte(tagName), Value: []byte("60")}}, + {{Name: []byte("b"), Value: []byte("2")}, {Name: []byte(tagName), Value: []byte("200")}}, + {{Name: []byte("b"), Value: []byte("2")}, {Name: []byte(tagName), Value: []byte("30")}}, + {{Name: []byte("b"), Value: []byte("2")}, {Name: []byte(tagName), Value: []byte("70")}}, + {{Name: []byte("b"), Value: []byte("2")}, {Name: []byte(tagName), Value: []byte("300")}}, + {{Name: []byte("b"), Value: []byte("2")}, {Name: []byte(tagName), Value: []byte("40")}}, + {{Name: []byte("b"), Value: []byte("2")}, {Name: []byte(tagName), Value: []byte("80")}}, + {{Name: []byte("b"), Value: []byte("2")}, {Name: []byte(tagName), Value: []byte("400")}}, + {{Name: []byte("b"), Value: []byte("2")}, {Name: []byte(tagName), Value: []byte("90")}}, + {{Name: []byte("b"), Value: []byte("2")}, {Name: []byte(tagName), Value: []byte("500")}}, // No shared values in series 6 - {{Name: "c", Value: "3"}, {Name: tagName, Value: "600"}}, - {{Name: "c", Value: "3"}, {Name: tagName, Value: "700"}}, - {{Name: "c", Value: "3"}, {Name: tagName, Value: "800"}}, - {{Name: "c", Value: "3"}, {Name: tagName, Value: "900"}}, - {{Name: "c", Value: "3"}, {Name: tagName, Value: "1000"}}, + {{Name: []byte("c"), Value: []byte("3")}, {Name: []byte(tagName), Value: []byte("600")}}, + {{Name: []byte("c"), Value: []byte("3")}, {Name: []byte(tagName), Value: []byte("700")}}, + {{Name: []byte("c"), Value: []byte("3")}, {Name: []byte(tagName), Value: []byte("800")}}, + {{Name: []byte("c"), Value: []byte("3")}, {Name: []byte(tagName), Value: []byte("900")}}, + {{Name: []byte("c"), Value: []byte("3")}, {Name: []byte(tagName), Value: []byte("1000")}}, } // Double check expected tags is the same length as expected values require.Equal(t, len(expectedTags), len(expected)) assert.Equal(t, bounds, sink.Meta.Bounds) - assert.Equal(t, models.Tags{{Name: "d", Value: "4"}}, sink.Meta.Tags) + assert.Equal(t, models.Tags{{Name: []byte("d"), Value: []byte("4")}}, sink.Meta.Tags) test.CompareValues(t, sink.Metas, tagsToSeriesMeta(expectedTags), sink.Values, expected) } diff --git a/src/query/functions/aggregation/quantile_test.go b/src/query/functions/aggregation/quantile_test.go index c75f7ae251..9f6e1dae88 100644 --- a/src/query/functions/aggregation/quantile_test.go +++ b/src/query/functions/aggregation/quantile_test.go @@ -133,7 +133,7 @@ func TestQuantileCreationFn(t *testing.T) { func TestQuantileFunctionFilteringWithoutA(t *testing.T) { op, err := NewAggregationOp(QuantileType, NodeParams{ - MatchingTags: []string{"a"}, Without: true, Parameter: 0.6, + MatchingTags: [][]byte{[]byte("a")}, Without: true, Parameter: 0.6, }) require.NoError(t, err) sink := processAggregationOp(t, op) @@ -148,10 +148,10 @@ func TestQuantileFunctionFilteringWithoutA(t *testing.T) { expectedMetas := []block.SeriesMeta{ {Name: QuantileType, Tags: models.Tags{}}, - {Name: QuantileType, Tags: models.Tags{{Name: "b", Value: "2"}}}, - {Name: QuantileType, Tags: models.Tags{{Name: "c", Value: "3"}}}, + {Name: QuantileType, Tags: models.Tags{{Name: []byte("b"), Value: []byte("2")}}}, + {Name: QuantileType, Tags: models.Tags{{Name: []byte("c"), Value: []byte("3")}}}, } - expectedMetaTags := models.Tags{{Name: "d", Value: "4"}} + expectedMetaTags := models.Tags{{Name: []byte("d"), Value: []byte("4")}} test.CompareValues(t, sink.Metas, expectedMetas, sink.Values, expected) assert.Equal(t, bounds, sink.Meta.Bounds) diff --git a/src/query/functions/aggregation/take_test.go b/src/query/functions/aggregation/take_test.go index 277750fb5b..801cee10a4 100644 --- a/src/query/functions/aggregation/take_test.go +++ b/src/query/functions/aggregation/take_test.go @@ -69,7 +69,7 @@ func processTakeOp(t *testing.T, op parser.Params) *executor.SinkNode { func TestTakeBottomFunctionFilteringWithoutA(t *testing.T) { op, err := NewTakeOp(BottomKType, NodeParams{ - MatchingTags: []string{"a"}, Without: true, Parameter: 1, + MatchingTags: [][]byte{[]byte("a")}, Without: true, Parameter: 1, }) require.NoError(t, err) sink := processTakeOp(t, op) @@ -93,7 +93,7 @@ func TestTakeBottomFunctionFilteringWithoutA(t *testing.T) { func TestTakeTopFunctionFilteringWithoutA(t *testing.T) { op, err := NewTakeOp(TopKType, NodeParams{ - MatchingTags: []string{"a"}, Without: true, Parameter: 1, + MatchingTags: [][]byte{[]byte("a")}, Without: true, Parameter: 1, }) require.NoError(t, err) sink := processTakeOp(t, op) @@ -117,7 +117,7 @@ func TestTakeTopFunctionFilteringWithoutA(t *testing.T) { func TestTakeTopFunctionFilteringWithoutALessThanOne(t *testing.T) { op, err := NewTakeOp(TopKType, NodeParams{ - MatchingTags: []string{"a"}, Without: true, Parameter: -1, + MatchingTags: [][]byte{[]byte("a")}, Without: true, Parameter: -1, }) require.NoError(t, err) sink := processTakeOp(t, op) diff --git a/src/query/functions/binary/common.go b/src/query/functions/binary/common.go index 6e4194974a..3bd1f49fd3 100644 --- a/src/query/functions/binary/common.go +++ b/src/query/functions/binary/common.go @@ -21,6 +21,7 @@ package binary import ( + "bytes" "errors" "github.com/m3db/m3/src/query/block" @@ -49,7 +50,7 @@ type VectorMatching struct { Card VectorMatchCardinality // MatchingLabels contains the labels which define equality of a pair of // elements from the Vectors. - MatchingLabels []string + MatchingLabels [][]byte // On includes the given label names from matching, // rather than excluding them. On bool @@ -60,7 +61,7 @@ type VectorMatching struct { // HashFunc returns a function that calculates the signature for a metric // ignoring the provided labels. If on, then the given labels are only used instead. -func HashFunc(on bool, names ...string) func(models.Tags) uint64 { +func HashFunc(on bool, names ...[]byte) func(models.Tags) uint64 { if on { return func(tags models.Tags) uint64 { return tags.IDWithKeys(names...) } } @@ -78,6 +79,15 @@ var ( errNoMatching = errors.New("vector matching parameters must be provided for binary operations between series") ) +func tagMap(t models.Tags) map[string]models.Tag { + m := make(map[string]models.Tag, len(t)) + for _, tag := range t { + m[string(tag.Name)] = tag + } + + return m +} + func combineMetaAndSeriesMeta( meta, otherMeta block.Metadata, seriesMeta, otherSeriesMeta []block.SeriesMeta, @@ -91,26 +101,27 @@ func combineMetaAndSeriesMeta( // NB (arnikola): mutating tags in `meta` to avoid allocations leftTags := meta.Tags - otherTags := otherMeta.Tags.TagMap() + otherTags := tagMap(otherMeta.Tags) metaTagsToAdd := make(models.Tags, 0, len(leftTags)) otherMetaTagsToAdd := make(models.Tags, 0, len(otherTags)) tags := models.EmptyTags() for _, t := range leftTags { - if otherTag, ok := otherTags[t.Name]; ok { - if t.Value != otherTag.Value { + name := string(t.Name) + if otherTag, ok := otherTags[name]; ok { + if bytes.Equal(t.Value, otherTag.Value) { + tags = append(tags, t) + } else { // If both metas have the same common tag with different // values explicitly add it to each seriesMeta. metaTagsToAdd = append(metaTagsToAdd, t) otherMetaTagsToAdd = append(otherMetaTagsToAdd, otherTag) - } else { - tags = append(tags, t) } // NB(arnikola): delete common tag from otherTags as it // has already been handled - delete(otherTags, t.Name) + delete(otherTags, name) } else { // Tag does not exist on otherMeta explicitly add it to each seriesMeta metaTagsToAdd = append(metaTagsToAdd, t) diff --git a/src/query/functions/binary/common_test.go b/src/query/functions/binary/common_test.go index a7150e994e..7a35f97a06 100644 --- a/src/query/functions/binary/common_test.go +++ b/src/query/functions/binary/common_test.go @@ -27,6 +27,7 @@ import ( "github.com/m3db/m3/src/query/block" "github.com/m3db/m3/src/query/models" + "github.com/m3db/m3/src/query/test" "github.com/golang/mock/gomock" "github.com/stretchr/testify/assert" @@ -123,98 +124,106 @@ func TestAddAtIndicesErrors(t *testing.T) { var combineMetaAndSeriesMetaTests = []struct { name string - tags, otherTags, expectedTags models.Tags - seriesTags, otherSeriesTags models.Tags - expectedSeriesTags, expectedOtherSeriesTags models.Tags + tags, otherTags, expectedTags test.StringTags + seriesTags, otherSeriesTags test.StringTags + expectedSeriesTags, expectedOtherSeriesTags test.StringTags }{ { "no right tags", - models.Tags{{"a", "b"}}, - models.Tags{}, - models.Tags{}, - - models.Tags{{"c", "d"}}, - models.Tags{{"1", "2"}}, - models.Tags{{"a", "b"}, {"c", "d"}}, - models.Tags{{"1", "2"}}, + test.StringTags{{"a", "b"}}, + test.StringTags{}, + test.StringTags{}, + + test.StringTags{{"c", "d"}}, + test.StringTags{{"1", "2"}}, + test.StringTags{{"a", "b"}, {"c", "d"}}, + test.StringTags{{"1", "2"}}, }, { "no left tags", - models.Tags{}, - models.Tags{{"a", "b"}}, - models.Tags{}, - - models.Tags{}, - models.Tags{}, - models.Tags{}, - models.Tags{{"a", "b"}}, + test.StringTags{}, + test.StringTags{{"a", "b"}}, + test.StringTags{}, + + test.StringTags{}, + test.StringTags{}, + test.StringTags{}, + test.StringTags{{"a", "b"}}, }, { "same tags", - models.Tags{{"a", "b"}}, - models.Tags{{"a", "b"}}, - models.Tags{{"a", "b"}}, - - models.Tags{{"a", "b"}, {"c", "d"}}, - models.Tags{}, - models.Tags{{"a", "b"}, {"c", "d"}}, - models.Tags{}, + test.StringTags{{"a", "b"}}, + test.StringTags{{"a", "b"}}, + test.StringTags{{"a", "b"}}, + + test.StringTags{{"a", "b"}, {"c", "d"}}, + test.StringTags{}, + test.StringTags{{"a", "b"}, {"c", "d"}}, + test.StringTags{}, }, { "different tags", - models.Tags{{"a", "b"}}, - models.Tags{{"c", "d"}}, - models.Tags{}, - - models.Tags{{"1", "2"}}, - models.Tags{{"3", "4"}}, - models.Tags{{"1", "2"}, {"a", "b"}}, - models.Tags{{"3", "4"}, {"c", "d"}}, + test.StringTags{{"a", "b"}}, + test.StringTags{{"c", "d"}}, + test.StringTags{}, + + test.StringTags{{"1", "2"}}, + test.StringTags{{"3", "4"}}, + test.StringTags{{"1", "2"}, {"a", "b"}}, + test.StringTags{{"3", "4"}, {"c", "d"}}, }, { "conflicting tags", - models.Tags{{"a", "b"}}, - models.Tags{{"a", "*b"}}, - models.Tags{}, - - models.Tags{{"1", "2"}}, - models.Tags{{"3", "4"}}, - models.Tags{{"1", "2"}, {"a", "b"}}, - models.Tags{{"3", "4"}, {"a", "*b"}}, + test.StringTags{{"a", "b"}}, + test.StringTags{{"a", "*b"}}, + test.StringTags{}, + + test.StringTags{{"1", "2"}}, + test.StringTags{{"3", "4"}}, + test.StringTags{{"1", "2"}, {"a", "b"}}, + test.StringTags{{"3", "4"}, {"a", "*b"}}, }, { "mixed tags", - models.Tags{{"a", "b"}, {"c", "d"}, {"e", "f"}}, - models.Tags{{"a", "b"}, {"c", "*d"}, {"g", "h"}}, - models.Tags{{"a", "b"}}, - - models.Tags{{"1", "2"}}, - models.Tags{{"3", "4"}}, - models.Tags{{"1", "2"}, {"c", "d"}, {"e", "f"}}, - models.Tags{{"3", "4"}, {"c", "*d"}, {"g", "h"}}, + test.StringTags{{"a", "b"}, {"c", "d"}, {"e", "f"}}, + test.StringTags{{"a", "b"}, {"c", "*d"}, {"g", "h"}}, + test.StringTags{{"a", "b"}}, + + test.StringTags{{"1", "2"}}, + test.StringTags{{"3", "4"}}, + test.StringTags{{"1", "2"}, {"c", "d"}, {"e", "f"}}, + test.StringTags{{"3", "4"}, {"c", "*d"}, {"g", "h"}}, }, } func TestCombineMetaAndSeriesMeta(t *testing.T) { for _, tt := range combineMetaAndSeriesMetaTests { t.Run(tt.name, func(t *testing.T) { - meta, otherMeta := block.Metadata{Tags: tt.tags}, block.Metadata{Tags: tt.otherTags} + tags := test.StringTagsToTags(tt.tags) + otherTags := test.StringTagsToTags(tt.otherTags) + seriesTags := test.StringTagsToTags(tt.seriesTags) + expectedTags := test.StringTagsToTags(tt.expectedTags) + otherSeriesTags := test.StringTagsToTags(tt.otherSeriesTags) + expectedSeriesTags := test.StringTagsToTags(tt.expectedSeriesTags) + expectedOtherSeriesTags := test.StringTagsToTags(tt.expectedOtherSeriesTags) - metas := []block.SeriesMeta{{Tags: tt.seriesTags}, {Tags: tt.seriesTags}} - otherMetas := []block.SeriesMeta{{Tags: tt.otherSeriesTags}} + meta, otherMeta := block.Metadata{Tags: tags}, block.Metadata{Tags: otherTags} + + metas := []block.SeriesMeta{{Tags: seriesTags}, {Tags: seriesTags}} + otherMetas := []block.SeriesMeta{{Tags: otherSeriesTags}} meta, seriesMeta, otherSeriesMeta, err := combineMetaAndSeriesMeta(meta, otherMeta, metas, otherMetas) require.NoError(t, err) - assert.Equal(t, tt.expectedTags, meta.Tags) + assert.Equal(t, expectedTags, meta.Tags) require.Equal(t, 2, len(seriesMeta)) for _, meta := range seriesMeta { - assert.Equal(t, tt.expectedSeriesTags, meta.Tags) + assert.Equal(t, expectedSeriesTags, meta.Tags) } require.Equal(t, 1, len(otherSeriesMeta)) for _, otherMeta := range otherSeriesMeta { - assert.Equal(t, tt.expectedOtherSeriesTags, otherMeta.Tags) + assert.Equal(t, expectedOtherSeriesTags, otherMeta.Tags) } }) } diff --git a/src/query/functions/binary/or_test.go b/src/query/functions/binary/or_test.go index c197dfdc51..2a3fd774f9 100644 --- a/src/query/functions/binary/or_test.go +++ b/src/query/functions/binary/or_test.go @@ -101,7 +101,7 @@ func generateMetaDataWithTagsInRange(fromRange, toRange int) []block.SeriesMeta meta := make([]block.SeriesMeta, length) for i := 0; i < length; i++ { strIdx := fmt.Sprint(fromRange + i) - tags := models.Tags{{strIdx, strIdx}} + tags := models.Tags{{Name: []byte(strIdx), Value: []byte(strIdx)}} meta[i] = block.SeriesMeta{ Tags: tags, Name: strIdx, @@ -290,8 +290,8 @@ func TestOrsBoundsError(t *testing.T) { func createSeriesMeta() []block.SeriesMeta { return []block.SeriesMeta{ - {Tags: models.Tags{{"foo", "bar"}}}, - {Tags: models.Tags{{"baz", "qux"}}}, + {Tags: models.Tags{{Name: []byte("foo"), Value: []byte("bar")}}}, + {Tags: models.Tags{{Name: []byte("baz"), Value: []byte("qux")}}}, } } @@ -315,9 +315,10 @@ func TestOrCombinedMetadata(t *testing.T) { StepSize: time.Minute, } + strTags := test.StringTags{{"a", "b"}, {"c", "d"}, {"e", "f"}} lhsMeta := block.Metadata{ Bounds: bounds, - Tags: models.Tags{{"a", "b"}, {"c", "d"}, {"e", "f"}}, + Tags: test.StringTagsToTags(strTags), } lSeriesMeta := createSeriesMeta() @@ -329,9 +330,10 @@ func TestOrCombinedMetadata(t *testing.T) { err = node.Process(parser.NodeID(0), lhs) require.NoError(t, err) + strTags = test.StringTags{{"a", "b"}, {"c", "*d"}, {"g", "h"}} rhsMeta := block.Metadata{ Bounds: bounds, - Tags: models.Tags{{"a", "b"}, {"c", "*d"}, {"g", "h"}}, + Tags: test.StringTagsToTags(strTags), } // NB (arnikola): since common tags for the series differ, @@ -349,13 +351,19 @@ func TestOrCombinedMetadata(t *testing.T) { test.EqualsWithNans(t, [][]float64{{1, 2}, {10, 20}, {3, 4}, {30, 40}}, sink.Values) assert.Equal(t, sink.Meta.Bounds, bounds) - assert.Equal(t, sink.Meta.Tags, models.Tags{{"a", "b"}}) + assert.Equal(t, sink.Meta.Tags, models.Tags{{Name: []byte("a"), Value: []byte("b")}}) - expectedMetas := []block.SeriesMeta{ - {Tags: models.Tags{{"c", "d"}, {"e", "f"}, {"foo", "bar"}}}, - {Tags: models.Tags{{"baz", "qux"}, {"c", "d"}, {"e", "f"}}}, - {Tags: models.Tags{{"c", "*d"}, {"foo", "bar"}, {"g", "h"}}}, - {Tags: models.Tags{{"baz", "qux"}, {"c", "*d"}, {"g", "h"}}}, + stringTags := []test.StringTags{ + {{"c", "d"}, {"e", "f"}, {"foo", "bar"}}, + {{"baz", "qux"}, {"c", "d"}, {"e", "f"}}, + {{"c", "*d"}, {"foo", "bar"}, {"g", "h"}}, + {{"baz", "qux"}, {"c", "*d"}, {"g", "h"}}, + } + + tags := test.StringTagsSliceToTagSlice(stringTags) + expectedMetas := make([]block.SeriesMeta, len(tags)) + for i, t := range tags { + expectedMetas[i] = block.SeriesMeta{Tags: t} } assert.Equal(t, expectedMetas, sink.Metas) diff --git a/src/query/functions/utils/group.go b/src/query/functions/utils/group.go index c903b61d99..abee3978d0 100644 --- a/src/query/functions/utils/group.go +++ b/src/query/functions/utils/group.go @@ -25,23 +25,23 @@ import ( "github.com/m3db/m3/src/query/models" ) -type withKeysID func(tags models.Tags, matchingTags []string) uint64 +type withKeysID func(tags models.Tags, matchingTags [][]byte) uint64 -func includeKeysID(tags models.Tags, matchingTags []string) uint64 { +func includeKeysID(tags models.Tags, matchingTags [][]byte) uint64 { return tags.IDWithKeys(matchingTags...) } -func excludeKeysID(tags models.Tags, matchingTags []string) uint64 { +func excludeKeysID(tags models.Tags, matchingTags [][]byte) uint64 { return tags.IDWithExcludes(matchingTags...) } -type withKeysTags func(tags models.Tags, matchingTags []string) models.Tags +type withKeysTags func(tags models.Tags, matchingTags [][]byte) models.Tags -func includeKeysTags(tags models.Tags, matchingTags []string) models.Tags { +func includeKeysTags(tags models.Tags, matchingTags [][]byte) models.Tags { return tags.TagsWithKeys(matchingTags) } -func excludeKeysTags(tags models.Tags, matchingTags []string) models.Tags { +func excludeKeysTags(tags models.Tags, matchingTags [][]byte) models.Tags { return tags.TagsWithoutKeys(matchingTags) } @@ -50,7 +50,7 @@ func excludeKeysTags(tags models.Tags, matchingTags []string) models.Tags { // and a list of corresponding buckets which signify which // series are mapped to which grouped outputs func GroupSeries( - matchingTags []string, + matchingTags [][]byte, without bool, opName string, metas []block.SeriesMeta, diff --git a/src/query/functions/utils/group_test.go b/src/query/functions/utils/group_test.go index 35a5f748f3..8745d4342f 100644 --- a/src/query/functions/utils/group_test.go +++ b/src/query/functions/utils/group_test.go @@ -84,7 +84,7 @@ var collectTest = []struct { }), }, { - "oneMatching", + "one matching", []string{"a"}, multiTagsFromMaps([]map[string]string{ {"a": "1"}, @@ -245,8 +245,12 @@ func testCollect(t *testing.T, without bool) { metas[i] = block.SeriesMeta{Tags: tagList} } - buckets, collected := GroupSeries(tt.matching, without, name, metas) + match := make([][]byte, len(tt.matching)) + for i, m := range tt.matching { + match[i] = []byte(m) + } + buckets, collected := GroupSeries(match, without, name, metas) expectedTags := tt.withTagsExpectedTags expectedIndicies := tt.withTagsExpectedIndices if without { @@ -278,7 +282,10 @@ func TestCollectWithoutTags(t *testing.T) { func multiTagsFromMaps(tagMaps []map[string]string) []models.Tags { tags := make([]models.Tags, len(tagMaps)) for i, m := range tagMaps { - tags[i] = models.FromMap(m) + tags[i] = models.Tags{} + for n, v := range m { + tags[i] = tags[i].AddTag(models.Tag{Name: []byte(n), Value: []byte(v)}) + } } return tags diff --git a/src/query/functions/utils/metadata.go b/src/query/functions/utils/metadata.go index a610e06142..d35c4f331b 100644 --- a/src/query/functions/utils/metadata.go +++ b/src/query/functions/utils/metadata.go @@ -21,6 +21,8 @@ package utils import ( + "bytes" + "github.com/m3db/m3/src/query/block" "github.com/m3db/m3/src/query/models" ) @@ -45,8 +47,8 @@ func DedupeMetadata( return models.EmptyTags(), seriesMeta } - commonKeys := make([]string, 0, len(seriesMeta[0].Tags)) - commonTags := make(map[string]string, len(seriesMeta[0].Tags)) + commonKeys := make([][]byte, 0, len(seriesMeta[0].Tags)) + commonTags := make(map[string][]byte, len(seriesMeta[0].Tags)) // For each tag in the first series, read through list of seriesMetas; // if key not found or value differs, this is not a shared tag var distinct bool @@ -54,7 +56,7 @@ func DedupeMetadata( distinct = false for _, metas := range seriesMeta[1:] { if val, ok := metas.Tags.Get(t.Name); ok { - if val != t.Value { + if !bytes.Equal(val, t.Value) { distinct = true break } @@ -67,7 +69,7 @@ func DedupeMetadata( if !distinct { // This is a shared tag; add it to shared meta commonKeys = append(commonKeys, t.Name) - commonTags[t.Name] = t.Value + commonTags[string(t.Name)] = t.Value } } @@ -75,5 +77,10 @@ func DedupeMetadata( seriesMeta[i].Tags = meta.Tags.TagsWithoutKeys(commonKeys) } - return models.FromMap(commonTags), seriesMeta + tags := make(models.Tags, 0, len(commonTags)) + for n, v := range commonTags { + tags = tags.AddTag(models.Tag{Name: []byte(n), Value: v}) + } + + return tags, seriesMeta } diff --git a/src/query/functions/utils/metadata_test.go b/src/query/functions/utils/metadata_test.go index 80d7031361..cb1182e268 100644 --- a/src/query/functions/utils/metadata_test.go +++ b/src/query/functions/utils/metadata_test.go @@ -25,21 +25,34 @@ import ( "github.com/m3db/m3/src/query/block" "github.com/m3db/m3/src/query/models" + "github.com/m3db/m3/src/query/test" "github.com/stretchr/testify/assert" ) func TestFlattenMetadata(t *testing.T) { - meta := block.Metadata{Tags: models.Tags{{"a", "b"}, {"c", "d"}}} + meta := block.Metadata{Tags: models.Tags{ + {Name: []byte("a"), Value: []byte("b")}, + {Name: []byte("c"), Value: []byte("d")}, + }} + seriesMetas := []block.SeriesMeta{ - {Name: "foo", Tags: models.Tags{{"e", "f"}}}, - {Name: "bar", Tags: models.Tags{{"g", "h"}}}, + {Name: "foo", Tags: models.Tags{{Name: []byte("e"), Value: []byte("f")}}}, + {Name: "bar", Tags: models.Tags{{Name: []byte("g"), Value: []byte("h")}}}, } flattened := FlattenMetadata(meta, seriesMetas) expected := []block.SeriesMeta{ - {Name: "foo", Tags: models.Tags{{"a", "b"}, {"c", "d"}, {"e", "f"}}}, - {Name: "bar", Tags: models.Tags{{"a", "b"}, {"c", "d"}, {"g", "h"}}}, + {Name: "foo", Tags: models.Tags{ + {Name: []byte("a"), Value: []byte("b")}, + {Name: []byte("c"), Value: []byte("d")}, + {Name: []byte("e"), Value: []byte("f")}, + }}, + {Name: "bar", Tags: models.Tags{ + {Name: []byte("a"), Value: []byte("b")}, + {Name: []byte("c"), Value: []byte("d")}, + {Name: []byte("g"), Value: []byte("h")}, + }}, } assert.Equal(t, expected, flattened) @@ -47,47 +60,47 @@ func TestFlattenMetadata(t *testing.T) { var dedupeMetadataTests = []struct { name string - metaTags []models.Tags - expectedCommon models.Tags - expectedSeriesTags []models.Tags + metaTags []test.StringTags + expectedCommon test.StringTags + expectedSeriesTags []test.StringTags }{ { "empty metas", - []models.Tags{}, - models.Tags{}, - []models.Tags{}, + []test.StringTags{}, + test.StringTags{}, + []test.StringTags{}, }, { "single metas", - []models.Tags{{{"a", "b"}, {"c", "d"}}}, - models.Tags{{"a", "b"}, {"c", "d"}}, - []models.Tags{{}}, + []test.StringTags{{{"a", "b"}, {"c", "d"}}}, + test.StringTags{{"a", "b"}, {"c", "d"}}, + []test.StringTags{{}}, }, { "one common tag, longer first", - []models.Tags{{{"a", "b"}, {"c", "d"}}, {{"a", "b"}}}, - models.Tags{{"a", "b"}}, - []models.Tags{{{"c", "d"}}, {}}, + []test.StringTags{{{"a", "b"}, {"c", "d"}}, {{"a", "b"}}}, + test.StringTags{{"a", "b"}}, + []test.StringTags{{{"c", "d"}}, {}}, }, { "one common tag, longer second", - []models.Tags{{{"a", "b"}}, {{"a", "b"}, {"c", "d"}}}, - models.Tags{{"a", "b"}}, - []models.Tags{{}, {{"c", "d"}}}, + []test.StringTags{{{"a", "b"}}, {{"a", "b"}, {"c", "d"}}}, + test.StringTags{{"a", "b"}}, + []test.StringTags{{}, {{"c", "d"}}}, }, { "two common tags", - []models.Tags{{{"a", "b"}, {"c", "d"}}, {{"a", "b"}, + []test.StringTags{{{"a", "b"}, {"c", "d"}}, {{"a", "b"}, {"c", "d"}}, {{"a", "b"}, {"c", "d"}}}, - models.Tags{{"a", "b"}, {"c", "d"}}, - []models.Tags{{}, {}, {}}, + test.StringTags{{"a", "b"}, {"c", "d"}}, + []test.StringTags{{}, {}, {}}, }, { "no common tags in one series", - []models.Tags{{{"a", "b"}, {"c", "d"}}, {{"a", "b"}, {"c", "d"}}, + []test.StringTags{{{"a", "b"}, {"c", "d"}}, {{"a", "b"}, {"c", "d"}}, {{"a", "b*"}, {"c*", "d"}}}, - models.Tags{}, - []models.Tags{{{"a", "b"}, {"c", "d"}}, {{"a", "b"}, + test.StringTags{}, + []test.StringTags{{{"a", "b"}, {"c", "d"}}, {{"a", "b"}, {"c", "d"}}, {{"a", "b*"}, {"c*", "d"}}}, }, } @@ -98,18 +111,22 @@ func TestDedupeMetadata(t *testing.T) { metaTags := tt.metaTags numSeries := len(metaTags) seriesMetas := make([]block.SeriesMeta, numSeries) - for i, tags := range metaTags { + for i, stringTags := range metaTags { + tags := test.StringTagsToTags(stringTags) seriesMetas[i] = block.SeriesMeta{Tags: tags} } actual, actualSeriesMetas := DedupeMetadata(seriesMetas) - assert.Equal(t, tt.expectedCommon, actual) + exCommon := test.StringTagsToTags(tt.expectedCommon) + assert.Equal(t, exCommon, actual) actualTags := make([]models.Tags, numSeries) for i, metas := range actualSeriesMetas { actualTags[i] = metas.Tags } - assert.Equal(t, tt.expectedSeriesTags, actualTags) + + exSeriesTags := test.StringTagsSliceToTagSlice(tt.expectedSeriesTags) + assert.Equal(t, exSeriesTags, actualTags) }) } } diff --git a/src/query/functions/utils/strformat.go b/src/query/functions/utils/strformat.go index 11509786d0..59fcfc3758 100644 --- a/src/query/functions/utils/strformat.go +++ b/src/query/functions/utils/strformat.go @@ -24,9 +24,16 @@ import ( "strconv" ) -// FormatFloat applies all shared tags from Metadata to each SeriesMeta +// FormatFloat converts float values to formatted strings func FormatFloat( value float64, ) string { return strconv.FormatFloat(value, 'f', -1, 64) } + +// FormatFloatToBytes converts float values to formatted byte arrays +func FormatFloatToBytes( + value float64, +) []byte { + return strconv.AppendFloat([]byte{}, value, 'f', -1, 64) +} diff --git a/src/query/functions/utils/strformat_test.go b/src/query/functions/utils/strformat_test.go index a6270862cb..5872aaf73e 100644 --- a/src/query/functions/utils/strformat_test.go +++ b/src/query/functions/utils/strformat_test.go @@ -35,3 +35,12 @@ func TestValueToProm(t *testing.T) { assert.Equal(t, FormatFloat(math.Inf(-1)), "-Inf") assert.Equal(t, FormatFloat(0.0119311), "0.0119311") } + +func TestValueToPromBytes(t *testing.T) { + assert.Equal(t, FormatFloatToBytes(1.0), []byte("1")) + assert.Equal(t, FormatFloatToBytes(1.2), []byte("1.2")) + assert.Equal(t, FormatFloatToBytes(math.NaN()), []byte("NaN")) + assert.Equal(t, FormatFloatToBytes(math.Inf(1)), []byte("+Inf")) + assert.Equal(t, FormatFloatToBytes(math.Inf(-1)), []byte("-Inf")) + assert.Equal(t, FormatFloatToBytes(0.0119311), []byte("0.0119311")) +} diff --git a/src/query/generated/proto/prompb/types.pb.go b/src/query/generated/proto/prompb/types.pb.go index 24ad516d0c..538b531ad2 100644 --- a/src/query/generated/proto/prompb/types.pb.go +++ b/src/query/generated/proto/prompb/types.pb.go @@ -113,8 +113,8 @@ func (m *TimeSeries) GetSamples() []*Sample { } type Label struct { - Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` - Value string `protobuf:"bytes,2,opt,name=value,proto3" json:"value,omitempty"` + Name []byte `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + Value []byte `protobuf:"bytes,2,opt,name=value,proto3" json:"value,omitempty"` } func (m *Label) Reset() { *m = Label{} } @@ -122,18 +122,18 @@ func (m *Label) String() string { return proto.CompactTextString(m) } func (*Label) ProtoMessage() {} func (*Label) Descriptor() ([]byte, []int) { return fileDescriptorTypes, []int{2} } -func (m *Label) GetName() string { +func (m *Label) GetName() []byte { if m != nil { return m.Name } - return "" + return nil } -func (m *Label) GetValue() string { +func (m *Label) GetValue() []byte { if m != nil { return m.Value } - return "" + return nil } type Labels struct { @@ -155,8 +155,8 @@ func (m *Labels) GetLabels() []Label { // Matcher specifies a rule, which can match or set of labels or not. type LabelMatcher struct { Type LabelMatcher_Type `protobuf:"varint,1,opt,name=type,proto3,enum=prometheus.LabelMatcher_Type" json:"type,omitempty"` - Name string `protobuf:"bytes,2,opt,name=name,proto3" json:"name,omitempty"` - Value string `protobuf:"bytes,3,opt,name=value,proto3" json:"value,omitempty"` + Name []byte `protobuf:"bytes,2,opt,name=name,proto3" json:"name,omitempty"` + Value []byte `protobuf:"bytes,3,opt,name=value,proto3" json:"value,omitempty"` } func (m *LabelMatcher) Reset() { *m = LabelMatcher{} } @@ -171,18 +171,18 @@ func (m *LabelMatcher) GetType() LabelMatcher_Type { return LabelMatcher_EQ } -func (m *LabelMatcher) GetName() string { +func (m *LabelMatcher) GetName() []byte { if m != nil { return m.Name } - return "" + return nil } -func (m *LabelMatcher) GetValue() string { +func (m *LabelMatcher) GetValue() []byte { if m != nil { return m.Value } - return "" + return nil } func init() { @@ -679,7 +679,7 @@ func (m *Label) Unmarshal(dAtA []byte) error { if wireType != 2 { return fmt.Errorf("proto: wrong wireType = %d for field Name", wireType) } - var stringLen uint64 + var byteLen int for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowTypes @@ -689,26 +689,28 @@ func (m *Label) Unmarshal(dAtA []byte) error { } b := dAtA[iNdEx] iNdEx++ - stringLen |= (uint64(b) & 0x7F) << shift + byteLen |= (int(b) & 0x7F) << shift if b < 0x80 { break } } - intStringLen := int(stringLen) - if intStringLen < 0 { + if byteLen < 0 { return ErrInvalidLengthTypes } - postIndex := iNdEx + intStringLen + postIndex := iNdEx + byteLen if postIndex > l { return io.ErrUnexpectedEOF } - m.Name = string(dAtA[iNdEx:postIndex]) + m.Name = append(m.Name[:0], dAtA[iNdEx:postIndex]...) + if m.Name == nil { + m.Name = []byte{} + } iNdEx = postIndex case 2: if wireType != 2 { return fmt.Errorf("proto: wrong wireType = %d for field Value", wireType) } - var stringLen uint64 + var byteLen int for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowTypes @@ -718,20 +720,22 @@ func (m *Label) Unmarshal(dAtA []byte) error { } b := dAtA[iNdEx] iNdEx++ - stringLen |= (uint64(b) & 0x7F) << shift + byteLen |= (int(b) & 0x7F) << shift if b < 0x80 { break } } - intStringLen := int(stringLen) - if intStringLen < 0 { + if byteLen < 0 { return ErrInvalidLengthTypes } - postIndex := iNdEx + intStringLen + postIndex := iNdEx + byteLen if postIndex > l { return io.ErrUnexpectedEOF } - m.Value = string(dAtA[iNdEx:postIndex]) + m.Value = append(m.Value[:0], dAtA[iNdEx:postIndex]...) + if m.Value == nil { + m.Value = []byte{} + } iNdEx = postIndex default: iNdEx = preIndex @@ -887,7 +891,7 @@ func (m *LabelMatcher) Unmarshal(dAtA []byte) error { if wireType != 2 { return fmt.Errorf("proto: wrong wireType = %d for field Name", wireType) } - var stringLen uint64 + var byteLen int for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowTypes @@ -897,26 +901,28 @@ func (m *LabelMatcher) Unmarshal(dAtA []byte) error { } b := dAtA[iNdEx] iNdEx++ - stringLen |= (uint64(b) & 0x7F) << shift + byteLen |= (int(b) & 0x7F) << shift if b < 0x80 { break } } - intStringLen := int(stringLen) - if intStringLen < 0 { + if byteLen < 0 { return ErrInvalidLengthTypes } - postIndex := iNdEx + intStringLen + postIndex := iNdEx + byteLen if postIndex > l { return io.ErrUnexpectedEOF } - m.Name = string(dAtA[iNdEx:postIndex]) + m.Name = append(m.Name[:0], dAtA[iNdEx:postIndex]...) + if m.Name == nil { + m.Name = []byte{} + } iNdEx = postIndex case 3: if wireType != 2 { return fmt.Errorf("proto: wrong wireType = %d for field Value", wireType) } - var stringLen uint64 + var byteLen int for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowTypes @@ -926,20 +932,22 @@ func (m *LabelMatcher) Unmarshal(dAtA []byte) error { } b := dAtA[iNdEx] iNdEx++ - stringLen |= (uint64(b) & 0x7F) << shift + byteLen |= (int(b) & 0x7F) << shift if b < 0x80 { break } } - intStringLen := int(stringLen) - if intStringLen < 0 { + if byteLen < 0 { return ErrInvalidLengthTypes } - postIndex := iNdEx + intStringLen + postIndex := iNdEx + byteLen if postIndex > l { return io.ErrUnexpectedEOF } - m.Value = string(dAtA[iNdEx:postIndex]) + m.Value = append(m.Value[:0], dAtA[iNdEx:postIndex]...) + if m.Value == nil { + m.Value = []byte{} + } iNdEx = postIndex default: iNdEx = preIndex @@ -1074,7 +1082,7 @@ func init() { var fileDescriptorTypes = []byte{ // 368 bytes of a gzipped FileDescriptorProto 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x84, 0x92, 0xc1, 0x8a, 0xdb, 0x30, - 0x10, 0x86, 0x23, 0xdb, 0x71, 0xc8, 0xb4, 0x14, 0x57, 0xf4, 0x60, 0x4a, 0xeb, 0x06, 0x9f, 0x5c, + 0x10, 0x86, 0x23, 0xdb, 0x71, 0xe8, 0x34, 0x14, 0x57, 0xf4, 0x60, 0x4a, 0xeb, 0x06, 0x9f, 0x5c, 0x68, 0x2d, 0x92, 0x9c, 0x0a, 0x85, 0x42, 0xc0, 0xb7, 0xb4, 0x10, 0x25, 0xa7, 0xde, 0xec, 0x64, 0xea, 0x18, 0xac, 0xd8, 0xb5, 0xe4, 0x42, 0xde, 0xa2, 0x97, 0xbe, 0x53, 0x8e, 0x7d, 0x82, 0x65, 0xc9, 0xbe, 0xc8, 0x62, 0x29, 0xd9, 0x04, 0x76, 0x61, 0x2f, 0x66, 0xe6, 0xf7, 0xff, 0xcf, 0x7c, @@ -1083,17 +1091,17 @@ var fileDescriptorTypes = []byte{ 0x4d, 0xa5, 0xaa, 0xee, 0x2b, 0xea, 0x8c, 0xa9, 0x7d, 0x8d, 0x32, 0xd6, 0x12, 0x85, 0x4e, 0x43, 0xb5, 0xc5, 0x56, 0xbe, 0xfd, 0x7c, 0x35, 0x2c, 0xaf, 0xf2, 0xca, 0xa4, 0xb2, 0xf6, 0x97, 0xee, 0xcc, 0x88, 0xae, 0x32, 0xd1, 0xf0, 0x2b, 0xb8, 0xcb, 0x54, 0xd4, 0x25, 0xd2, 0x37, 0xd0, 0xff, - 0x93, 0x96, 0x2d, 0xfa, 0x64, 0x44, 0x22, 0xc2, 0x4d, 0x43, 0xdf, 0xc1, 0x50, 0x15, 0x02, 0xa5, - 0x4a, 0x45, 0xed, 0x5b, 0x23, 0x12, 0xd9, 0xfc, 0x22, 0x84, 0x08, 0xb0, 0x2a, 0x04, 0x2e, 0xb1, - 0x29, 0x50, 0xd2, 0x8f, 0xe0, 0x96, 0x69, 0x86, 0xa5, 0xf4, 0xc9, 0xc8, 0x8e, 0x5e, 0x4c, 0x5e, - 0xc7, 0x17, 0xae, 0x78, 0xde, 0xfd, 0xe1, 0x27, 0x03, 0xfd, 0x04, 0x03, 0xa9, 0xd7, 0x4a, 0xdf, - 0xd2, 0x5e, 0x7a, 0xed, 0x35, 0x44, 0xfc, 0x6c, 0x09, 0xc7, 0xd0, 0xd7, 0x71, 0x4a, 0xc1, 0xd9, - 0xa5, 0xc2, 0x20, 0x0e, 0xb9, 0xae, 0x2f, 0xdc, 0x96, 0x16, 0x4d, 0x13, 0x7e, 0x01, 0x77, 0x6e, - 0x56, 0xb1, 0x67, 0xa9, 0x66, 0xce, 0xe1, 0xe6, 0x43, 0xef, 0xcc, 0x16, 0xfe, 0x23, 0xf0, 0x52, + 0x93, 0x96, 0x2d, 0xfa, 0x64, 0x44, 0x22, 0xc2, 0x4d, 0x43, 0xdf, 0xc1, 0x0b, 0x55, 0x08, 0x94, + 0x2a, 0x15, 0xb5, 0x6f, 0x8d, 0x48, 0x64, 0xf3, 0x8b, 0x10, 0x22, 0xc0, 0xaa, 0x10, 0xb8, 0xc4, + 0xa6, 0x40, 0x49, 0x3f, 0x82, 0x5b, 0xa6, 0x19, 0x96, 0xd2, 0x27, 0x23, 0x3b, 0x7a, 0x39, 0x79, + 0x1d, 0x5f, 0xb8, 0xe2, 0x79, 0xf7, 0x87, 0x9f, 0x0c, 0xf4, 0x13, 0x0c, 0xa4, 0x5e, 0x2b, 0x7d, + 0x4b, 0x7b, 0xe9, 0xb5, 0xd7, 0x10, 0xf1, 0xb3, 0x25, 0x1c, 0x43, 0x5f, 0xc7, 0x29, 0x05, 0x67, + 0x97, 0x0a, 0x83, 0x38, 0xe4, 0xba, 0xbe, 0x70, 0x5b, 0x5a, 0x34, 0x4d, 0xf8, 0x05, 0xdc, 0xb9, + 0x59, 0xc5, 0x9e, 0xa5, 0x9a, 0x39, 0x87, 0x9b, 0x0f, 0xbd, 0x33, 0x5b, 0xf8, 0x8f, 0xc0, 0x50, 0xeb, 0xdf, 0x53, 0xb5, 0xde, 0x62, 0x43, 0xc7, 0xe0, 0x74, 0xa7, 0xad, 0xb7, 0xbe, 0x9a, 0xbc, 0x7f, 0x94, 0x3f, 0xf9, 0xe2, 0xd5, 0xbe, 0x46, 0xae, 0xad, 0x0f, 0xa0, 0xd6, 0x53, 0xa0, 0xf6, 0x35, 0x68, 0x04, 0x4e, 0x97, 0xa3, 0x2e, 0x58, 0xc9, 0xc2, 0xeb, 0xd1, 0x01, 0xd8, 0x3f, 0x92, 0x85, 0x47, 0x3a, 0x81, 0x27, 0x9e, 0xa5, 0x05, 0x9e, 0x78, 0xf6, 0xcc, 0x3f, 0x1c, 0x03, 0xf2, 0xff, 0x18, 0x90, 0xdb, 0x63, 0x40, 0xfe, 0xde, 0x05, 0xbd, 0x9f, 0xae, 0x79, 0x0b, 0x99, 0xab, - 0xef, 0x72, 0x7a, 0x1f, 0x00, 0x00, 0xff, 0xff, 0x14, 0xa2, 0xaa, 0x9a, 0x49, 0x02, 0x00, 0x00, + 0xef, 0x72, 0x7a, 0x1f, 0x00, 0x00, 0xff, 0xff, 0x6f, 0x4a, 0x7b, 0x68, 0x49, 0x02, 0x00, 0x00, } diff --git a/src/query/generated/proto/prompb/types.proto b/src/query/generated/proto/prompb/types.proto index 9dec5ea6f5..608b7cba9a 100644 --- a/src/query/generated/proto/prompb/types.proto +++ b/src/query/generated/proto/prompb/types.proto @@ -17,8 +17,8 @@ message TimeSeries { } message Label { - string name = 1; - string value = 2; + bytes name = 1; + bytes value = 2; } message Labels { @@ -34,6 +34,6 @@ message LabelMatcher { NRE = 3; } Type type = 1; - string name = 2; - string value = 3; + bytes name = 2; + bytes value = 3; } diff --git a/src/query/models/tag.go b/src/query/models/tag.go index 1c6aa3d303..5526891599 100644 --- a/src/query/models/tag.go +++ b/src/query/models/tag.go @@ -21,6 +21,7 @@ package models import ( + "bytes" "fmt" "hash/fnv" "regexp" @@ -29,25 +30,29 @@ import ( ) const ( - // MetricName is an internal name used to denote the name of the metric. - // TODO: Get these from the storage - MetricName = "__name__" - // Separators for tags sep = byte(',') eq = byte('=') ) +var ( + // MetricName is an internal name used to denote the name of the metric. + // TODO: Get these from the storage + MetricName = []byte("__name__") +) + // Tags is a list of key/value metric tag pairs type Tags []Tag -func (t Tags) Len() int { return len(t) } -func (t Tags) Swap(i, j int) { t[i], t[j] = t[j], t[i] } -func (t Tags) Less(i, j int) bool { return t[i].Name < t[j].Name } +func (t Tags) Len() int { return len(t) } +func (t Tags) Swap(i, j int) { t[i], t[j] = t[j], t[i] } +func (t Tags) Less(i, j int) bool { + return bytes.Compare(t[i].Name, t[j].Name) == -1 +} // Tag is a key/value metric tag pair type Tag struct { - Name, Value string + Name, Value []byte } // Metric is the individual metric that gets returned from the search endpoint @@ -84,23 +89,24 @@ func (m MatchType) String() string { } // Matcher models the matching of a label. +// NB: when serialized to JSON these will be base64'd type Matcher struct { Type MatchType `json:"type"` - Name string `json:"name"` - Value string `json:"value"` + Name []byte `json:"name"` + Value []byte `json:"value"` re *regexp.Regexp } // NewMatcher returns a matcher object. -func NewMatcher(t MatchType, n, v string) (*Matcher, error) { +func NewMatcher(t MatchType, n, v []byte) (*Matcher, error) { m := &Matcher{ Type: t, Name: n, Value: v, } if t == MatchRegexp || t == MatchNotRegexp { - re, err := regexp.Compile("^(?:" + v + ")$") + re, err := regexp.Compile("^(?:" + string(v) + ")$") if err != nil { return nil, err } @@ -114,17 +120,18 @@ func (m *Matcher) String() string { } // Matches returns whether the matcher matches the given string value. -func (m *Matcher) Matches(s string) bool { +func (m *Matcher) Matches(s []byte) bool { switch m.Type { case MatchEqual: - return s == m.Value + return bytes.Equal(s, m.Value) case MatchNotEqual: - return s != m.Value + return !bytes.Equal(s, m.Value) case MatchRegexp: - return m.re.MatchString(s) + return m.re.MatchString(string(s)) case MatchNotRegexp: - return !m.re.MatchString(s) + return !m.re.MatchString(string(s)) } + panic("labels.Matcher.Matches: invalid match type") } @@ -155,9 +162,9 @@ func (t Tags) ID() string { strBuilder.Grow(idLen) for _, tag := range t { - strBuilder.WriteString(tag.Name) + strBuilder.Write(tag.Name) strBuilder.WriteByte(eq) - strBuilder.WriteString(tag.Value) + strBuilder.Write(tag.Value) strBuilder.WriteByte(sep) } @@ -189,17 +196,17 @@ func (t Tags) IDLen() int { } // IDWithExcludes returns a string representation of the tags excluding some tag keys -func (t Tags) IDWithExcludes(excludeKeys ...string) uint64 { +func (t Tags) IDWithExcludes(excludeKeys ...[]byte) uint64 { b := make([]byte, 0, len(t)) for _, tag := range t { // Always exclude the metric name by default - if tag.Name == MetricName { + if bytes.Equal(tag.Name, MetricName) { continue } found := false for _, n := range excludeKeys { - if n == tag.Name { + if bytes.Equal(n, tag.Name) { found = true break } @@ -221,12 +228,12 @@ func (t Tags) IDWithExcludes(excludeKeys ...string) uint64 { return h.Sum64() } -func (t Tags) tagSubset(keys []string, include bool) Tags { +func (t Tags) tagSubset(keys [][]byte, include bool) Tags { tags := make(Tags, 0, len(t)) for _, tag := range t { found := false for _, k := range keys { - if tag.Name == k { + if bytes.Equal(tag.Name, k) { found = true break } @@ -241,16 +248,16 @@ func (t Tags) tagSubset(keys []string, include bool) Tags { } // TagsWithoutKeys returns only the tags which do not have the given keys -func (t Tags) TagsWithoutKeys(excludeKeys []string) Tags { +func (t Tags) TagsWithoutKeys(excludeKeys [][]byte) Tags { return t.tagSubset(excludeKeys, false) } // IDWithKeys returns a string representation of the tags only including the given keys -func (t Tags) IDWithKeys(includeKeys ...string) uint64 { +func (t Tags) IDWithKeys(includeKeys ...[]byte) uint64 { b := make([]byte, 0, len(t)) for _, tag := range t { for _, k := range includeKeys { - if tag.Name == k { + if bytes.Equal(tag.Name, k) { b = append(b, tag.Name...) b = append(b, eq) b = append(b, tag.Value...) @@ -266,54 +273,24 @@ func (t Tags) IDWithKeys(includeKeys ...string) uint64 { } // TagsWithKeys returns only the tags which have the given keys -func (t Tags) TagsWithKeys(includeKeys []string) Tags { +func (t Tags) TagsWithKeys(includeKeys [][]byte) Tags { return t.tagSubset(includeKeys, true) } // WithoutName copies the tags excluding the name tag func (t Tags) WithoutName() Tags { - return t.TagsWithoutKeys([]string{MetricName}) + return t.TagsWithoutKeys([][]byte{MetricName}) } // Get returns the value for the tag with the given name. -func (t Tags) Get(key string) (string, bool) { +func (t Tags) Get(key []byte) ([]byte, bool) { for _, tag := range t { - if tag.Name == key { + if bytes.Equal(tag.Name, key) { return tag.Value, true } } - return "", false -} - -// FromMap returns new sorted tags from the given map. -func FromMap(m map[string]string) Tags { - l := make(Tags, 0, len(m)) - for k, v := range m { - l = append(l, Tag{Name: k, Value: v}) - } - - return Normalize(l) -} - -// TagMap returns a tag map of the tags. -func (t Tags) TagMap() map[string]Tag { - m := make(map[string]Tag, len(t)) - for _, tag := range t { - m[tag.Name] = tag - } - - return m -} - -// StringMap returns a string map of the tags. -func (t Tags) StringMap() map[string]string { - m := make(map[string]string, len(t)) - for _, tag := range t { - m[tag.Name] = tag.Value - } - - return m + return nil, false } // Clone returns a copy of the tags diff --git a/src/query/models/tag_test.go b/src/query/models/tag_test.go index ee25f371d5..8c97263172 100644 --- a/src/query/models/tag_test.go +++ b/src/query/models/tag_test.go @@ -29,7 +29,7 @@ import ( ) func mustNewMatcher(t *testing.T, mType MatchType, value string) *Matcher { - m, err := NewMatcher(mType, "", value) + m, err := NewMatcher(mType, []byte{}, []byte(value)) if err != nil { t.Fatal(err) } @@ -95,9 +95,7 @@ func TestMatcher(t *testing.T) { } for _, test := range tests { - if test.matcher.Matches(test.value) != test.match { - t.Fatalf("Unexpected match result for matcher %v and value %q; want %v, got %v", test.matcher, test.value, test.match, !test.match) - } + assert.Equal(t, test.match, test.matcher.Matches([]byte(test.value))) } } @@ -106,9 +104,12 @@ func TestMatchType(t *testing.T) { } func createTags(withName bool) Tags { - tags := Tags{{"t1", "v1"}, {"t2", "v2"}} + tags := Tags{ + {Name: []byte("t1"), Value: []byte("v1")}, + {Name: []byte("t2"), Value: []byte("v2")}, + } if withName { - tags = append(tags, Tag{Name: MetricName, Value: "v0"}) + tags = append(tags, Tag{Name: MetricName, Value: []byte("v0")}) } return tags } @@ -142,15 +143,15 @@ func TestIDWithKeys(t *testing.T) { h := fnv.New64a() h.Write(b) - idWithKeys := tags.IDWithKeys("t1", "t2", MetricName) + idWithKeys := tags.IDWithKeys([]byte("t1"), []byte("t2"), MetricName) assert.Equal(t, h.Sum64(), idWithKeys) } func TestTagsWithKeys(t *testing.T) { tags := createTags(true) - tagsWithKeys := tags.TagsWithKeys([]string{"t1"}) - assert.Equal(t, Tags{{"t1", "v1"}}, tagsWithKeys) + tagsWithKeys := tags.TagsWithKeys([][]byte{[]byte("t1")}) + assert.Equal(t, Tags{{Name: []byte("t1"), Value: []byte("v1")}}, tagsWithKeys) } func TestIDWithExcludes(t *testing.T) { @@ -160,34 +161,48 @@ func TestIDWithExcludes(t *testing.T) { h := fnv.New64a() h.Write(b) - idWithExcludes := tags.IDWithExcludes("t1") + idWithExcludes := tags.IDWithExcludes([]byte("t1")) assert.Equal(t, h.Sum64(), idWithExcludes) } func TestTagsWithExcludes(t *testing.T) { tags := createTags(true) - tagsWithoutKeys := tags.TagsWithoutKeys([]string{"t1", MetricName}) - assert.Equal(t, Tags{{"t2", "v2"}}, tagsWithoutKeys) + tagsWithoutKeys := tags.TagsWithoutKeys([][]byte{[]byte("t1"), MetricName}) + assert.Equal(t, Tags{{Name: []byte("t2"), Value: []byte("v2")}}, tagsWithoutKeys) } func TestTagsWithExcludesCustom(t *testing.T) { - tags := Tags{{"a", "1"}, {"b", "2"}, {"c", "3"}, {MetricName, "foo"}} - tagsWithoutKeys := tags.TagsWithoutKeys([]string{"a", "c", MetricName}) - assert.Equal(t, Tags{{"b", "2"}}, tagsWithoutKeys) + tags := Tags{ + {Name: []byte("a"), Value: []byte("1")}, + {Name: []byte("b"), Value: []byte("2")}, + {Name: []byte("c"), Value: []byte("3")}, + {Name: MetricName, Value: []byte("foo")}, + } + tagsWithoutKeys := tags.TagsWithoutKeys([][]byte{[]byte("a"), []byte("c"), MetricName}) + assert.Equal(t, Tags{{Name: []byte("b"), Value: []byte("2")}}, tagsWithoutKeys) } func TestAddTags(t *testing.T) { tags := make(Tags, 0, 4) - tagToAdd := Tag{"z", "3"} + tagToAdd := Tag{Name: []byte("x"), Value: []byte("3")} tags = tags.AddTag(tagToAdd) assert.Equal(t, Tags{tagToAdd}, tags) - tagsToAdd := Tags{{"a", "1"}, {"b", "2"}, {"c", "3"}} + tagsToAdd := Tags{ + {Name: []byte("a"), Value: []byte("1")}, + {Name: []byte("b"), Value: []byte("2")}, + {Name: []byte("z"), Value: []byte("4")}, + } tags = tags.Add(tagsToAdd) - expected := Tags{{"a", "1"}, {"b", "2"}, {"c", "3"}, {"z", "3"}} + expected := Tags{ + {Name: []byte("a"), Value: []byte("1")}, + {Name: []byte("b"), Value: []byte("2")}, + {Name: []byte("x"), Value: []byte("3")}, + {Name: []byte("z"), Value: []byte("4")}, + } assert.Equal(t, expected, tags) } diff --git a/src/query/parser/promql/parse.go b/src/query/parser/promql/parse.go index f09bd79fb9..19c5e867a6 100644 --- a/src/query/parser/promql/parse.go +++ b/src/query/parser/promql/parse.go @@ -23,7 +23,6 @@ package promql import ( "fmt" - "github.com/m3db/m3/src/query/errors" "github.com/m3db/m3/src/query/parser" pql "github.com/prometheus/prometheus/promql" @@ -200,7 +199,4 @@ func (p *parseState) walk(node pql.Node) error { default: return fmt.Errorf("promql.Walk: unhandled node type %T, %v", node, node) } - - // TODO: This should go away once all cases have been implemented - return errors.ErrNotImplemented } diff --git a/src/query/parser/promql/types.go b/src/query/parser/promql/types.go index 8f80c4a684..9f7ee865fb 100644 --- a/src/query/parser/promql/types.go +++ b/src/query/parser/promql/types.go @@ -63,9 +63,13 @@ func NewSelectorFromMatrix(n *promql.MatrixSelector) (parser.Params, error) { // NewAggregationOperator creates a new aggregation operator based on the type func NewAggregationOperator(expr *promql.AggregateExpr) (parser.Params, error) { opType := expr.Op + byteMatchers := make([][]byte, len(expr.Grouping)) + for i, grouping := range expr.Grouping { + byteMatchers[i] = []byte(grouping) + } nodeInformation := aggregation.NodeParams{ - MatchingTags: expr.Grouping, + MatchingTags: byteMatchers, Without: expr.Without, } @@ -229,7 +233,7 @@ func labelMatchersToModelMatcher(lMatchers []*labels.Matcher) (models.Matchers, return nil, err } - match, err := models.NewMatcher(modelType, m.Name, m.Value) + match, err := models.NewMatcher(modelType, []byte(m.Name), []byte(m.Value)) if err != nil { return nil, err } @@ -278,9 +282,15 @@ func promMatchingToM3(vectorMatching *promql.VectorMatching) *binary.VectorMatch if vectorMatching == nil { return nil } + + byteMatchers := make([][]byte, len(vectorMatching.MatchingLabels)) + for i, label := range vectorMatching.MatchingLabels { + byteMatchers[i] = []byte(label) + } + return &binary.VectorMatching{ Card: promVectorCardinalityToM3(vectorMatching.Card), - MatchingLabels: vectorMatching.MatchingLabels, + MatchingLabels: byteMatchers, On: vectorMatching.On, Include: vectorMatching.Include, } diff --git a/src/query/storage/converter_test.go b/src/query/storage/converter_test.go index 8c6fe5ea2d..b9889a61df 100644 --- a/src/query/storage/converter_test.go +++ b/src/query/storage/converter_test.go @@ -51,7 +51,7 @@ func verifyExpandSeries(t *testing.T, ctrl *gomock.Controller, num int, pools po require.NotNil(t, results) require.NotNil(t, results.SeriesList) require.Len(t, results.SeriesList, num) - expectedTags := models.Tags{{Name: testTags.Name.String(), Value: testTags.Value.String()}} + expectedTags := models.Tags{{Name: testTags.Name.Bytes(), Value: testTags.Value.Bytes()}} for i := 0; i < num; i++ { series := results.SeriesList[i] require.NotNil(t, series) @@ -141,6 +141,11 @@ func TestFailingExpandSeriesValidPools(t *testing.T) { require.EqualError(t, err, "error") } +var ( + name = []byte("foo") + value = []byte("bar") +) + func TestPromReadQueryToM3(t *testing.T) { tests := []struct { name string @@ -151,54 +156,54 @@ func TestPromReadQueryToM3(t *testing.T) { { name: "single exact match", matchers: []*prompb.LabelMatcher{ - &prompb.LabelMatcher{Type: prompb.LabelMatcher_EQ, Name: "foo", Value: "bar"}, + &prompb.LabelMatcher{Type: prompb.LabelMatcher_EQ, Name: name, Value: value}, }, expected: []*models.Matcher{ - &models.Matcher{Type: models.MatchEqual, Name: "foo", Value: "bar"}, + &models.Matcher{Type: models.MatchEqual, Name: name, Value: value}, }, }, { name: "single exact match negated", matchers: []*prompb.LabelMatcher{ - &prompb.LabelMatcher{Type: prompb.LabelMatcher_NEQ, Name: "foo", Value: "bar"}, + &prompb.LabelMatcher{Type: prompb.LabelMatcher_NEQ, Name: name, Value: value}, }, expected: []*models.Matcher{ - &models.Matcher{Type: models.MatchNotEqual, Name: "foo", Value: "bar"}, + &models.Matcher{Type: models.MatchNotEqual, Name: name, Value: value}, }, }, { name: "single regexp match", matchers: []*prompb.LabelMatcher{ - &prompb.LabelMatcher{Type: prompb.LabelMatcher_RE, Name: "foo", Value: "bar"}, + &prompb.LabelMatcher{Type: prompb.LabelMatcher_RE, Name: name, Value: value}, }, expected: []*models.Matcher{ - &models.Matcher{Type: models.MatchRegexp, Name: "foo", Value: "bar"}, + &models.Matcher{Type: models.MatchRegexp, Name: name, Value: value}, }, }, { name: "single regexp match negated", matchers: []*prompb.LabelMatcher{ - &prompb.LabelMatcher{Type: prompb.LabelMatcher_NRE, Name: "foo", Value: "bar"}, + &prompb.LabelMatcher{Type: prompb.LabelMatcher_NRE, Name: name, Value: value}, }, expected: []*models.Matcher{ - &models.Matcher{Type: models.MatchNotRegexp, Name: "foo", Value: "bar"}, + &models.Matcher{Type: models.MatchNotRegexp, Name: name, Value: value}, }, }, { name: "mixed exact match and regexp match", matchers: []*prompb.LabelMatcher{ - &prompb.LabelMatcher{Type: prompb.LabelMatcher_EQ, Name: "foo", Value: "bar"}, - &prompb.LabelMatcher{Type: prompb.LabelMatcher_RE, Name: "baz", Value: "qux"}, + &prompb.LabelMatcher{Type: prompb.LabelMatcher_EQ, Name: name, Value: value}, + &prompb.LabelMatcher{Type: prompb.LabelMatcher_RE, Name: []byte("baz"), Value: []byte("qux")}, }, expected: []*models.Matcher{ - &models.Matcher{Type: models.MatchEqual, Name: "foo", Value: "bar"}, - &models.Matcher{Type: models.MatchRegexp, Name: "baz", Value: "qux"}, + &models.Matcher{Type: models.MatchEqual, Name: name, Value: value}, + &models.Matcher{Type: models.MatchRegexp, Name: []byte("baz"), Value: []byte("qux")}, }, }, { name: "unrecognized matcher type", matchers: []*prompb.LabelMatcher{ - &prompb.LabelMatcher{Type: prompb.LabelMatcher_Type(math.MaxInt32), Name: "foo", Value: "bar"}, + &prompb.LabelMatcher{Type: prompb.LabelMatcher_Type(math.MaxInt32), Name: name, Value: value}, }, expectError: true, }, @@ -258,8 +263,8 @@ func BenchmarkFetchResultToPromResult(b *testing.B) { tags := make(models.Tags, 0, numTagsPerSeries) for i := 0; i < numTagsPerSeries; i++ { tags = append(tags, models.Tag{ - Name: fmt.Sprintf("name-%d", i), - Value: fmt.Sprintf("value-%d", i), + Name: []byte(fmt.Sprintf("name-%d", i)), + Value: []byte(fmt.Sprintf("value-%d", i)), }) } diff --git a/src/query/storage/index.go b/src/query/storage/index.go index a5971c3e82..5aa88edd21 100644 --- a/src/query/storage/index.go +++ b/src/query/storage/index.go @@ -50,8 +50,8 @@ func FromIdentTagIteratorToTags(identTags ident.TagIterator) (models.Tags, error for identTags.Next() { identTag := identTags.Current() tags = append(tags, models.Tag{ - Name: identTag.Name.String(), - Value: identTag.Value.String(), + Name: identTag.Name.Bytes(), + Value: identTag.Value.Bytes(), }) } @@ -64,10 +64,13 @@ func FromIdentTagIteratorToTags(identTags ident.TagIterator) (models.Tags, error // TagsToIdentTagIterator converts coordinator tags to ident tags func TagsToIdentTagIterator(tags models.Tags) ident.TagIterator { + //TODO get a tags and tag iterator from an ident.Pool here rather than allocing them here identTags := make([]ident.Tag, 0, len(tags)) - for _, t := range tags { - identTags = append(identTags, ident.StringTag(t.Name, t.Value)) + identTags = append(identTags, ident.Tag{ + Name: ident.BytesID(t.Name), + Value: ident.BytesID(t.Value), + }) } return ident.NewTagsIterator(ident.NewTags(identTags...)) @@ -106,7 +109,7 @@ func matcherToQuery(matcher *models.Matcher) (idx.Query, error) { negate = true fallthrough case models.MatchRegexp: - query, err := idx.NewRegexpQuery([]byte(matcher.Name), []byte(matcher.Value)) + query, err := idx.NewRegexpQuery(matcher.Name, matcher.Value) if err != nil { return idx.Query{}, err } @@ -120,7 +123,7 @@ func matcherToQuery(matcher *models.Matcher) (idx.Query, error) { negate = true fallthrough case models.MatchEqual: - query := idx.NewTermQuery([]byte(matcher.Name), []byte(matcher.Value)) + query := idx.NewTermQuery(matcher.Name, matcher.Value) if negate { query = idx.NewNegationQuery(query) } diff --git a/src/query/storage/index_test.go b/src/query/storage/index_test.go index 909c23d7ef..decff2a598 100644 --- a/src/query/storage/index_test.go +++ b/src/query/storage/index_test.go @@ -33,8 +33,11 @@ import ( var ( testID = ident.StringID("test_id") - testTags = models.Tags{{Name: "t1", Value: "v1"}, {Name: "t2", Value: "v2"}} - now = time.Now() + testTags = models.Tags{ + {Name: []byte("t1"), Value: []byte("v1")}, + {Name: []byte("t2"), Value: []byte("v2")}, + } + now = time.Now() ) func TestTagsToIdentTagIterator(t *testing.T) { @@ -44,8 +47,8 @@ func TestTagsToIdentTagIterator(t *testing.T) { tags := make(models.Tags, len(testTags)) for i := 0; tagIter.Next(); i++ { tags[i] = models.Tag{ - Name: tagIter.Current().Name.String(), - Value: tagIter.Current().Value.String(), + Name: tagIter.Current().Name.Bytes(), + Value: tagIter.Current().Value.Bytes(), } } @@ -81,8 +84,8 @@ func TestFetchQueryToM3Query(t *testing.T) { matchers: models.Matchers{ { Type: models.MatchEqual, - Name: "t1", - Value: "v1", + Name: []byte("t1"), + Value: []byte("v1"), }, }, }, @@ -92,8 +95,8 @@ func TestFetchQueryToM3Query(t *testing.T) { matchers: models.Matchers{ { Type: models.MatchNotEqual, - Name: "t1", - Value: "v1", + Name: []byte("t1"), + Value: []byte("v1"), }, }, }, @@ -103,8 +106,8 @@ func TestFetchQueryToM3Query(t *testing.T) { matchers: models.Matchers{ { Type: models.MatchRegexp, - Name: "t1", - Value: "v1", + Name: []byte("t1"), + Value: []byte("v1"), }, }, }, @@ -114,8 +117,8 @@ func TestFetchQueryToM3Query(t *testing.T) { matchers: models.Matchers{ { Type: models.MatchNotRegexp, - Name: "t1", - Value: "v1", + Name: []byte("t1"), + Value: []byte("v1"), }, }, }, diff --git a/src/query/storage/m3/storage_test.go b/src/query/storage/m3/storage_test.go index 9939256062..c6fe91ac8e 100644 --- a/src/query/storage/m3/storage_test.go +++ b/src/query/storage/m3/storage_test.go @@ -95,13 +95,13 @@ func newFetchReq() *storage.FetchQuery { matchers := models.Matchers{ { Type: models.MatchEqual, - Name: "foo", - Value: "bar", + Name: []byte("foo"), + Value: []byte("bar"), }, { Type: models.MatchEqual, - Name: "biz", - Value: "baz", + Name: []byte("biz"), + Value: []byte("baz"), }, } return &storage.FetchQuery{ @@ -112,7 +112,10 @@ func newFetchReq() *storage.FetchQuery { } func newWriteQuery() *storage.WriteQuery { - tags := map[string]string{"foo": "bar", "biz": "baz"} + tags := models.Tags{ + {Name: []byte("foo"), Value: []byte("bar")}, + {Name: []byte("biz"), Value: []byte("baz")}, + } datapoints := ts.Datapoints{{ Timestamp: time.Now(), Value: 1.0, @@ -122,7 +125,7 @@ func newWriteQuery() *storage.WriteQuery { Value: 2.0, }} return &storage.WriteQuery{ - Tags: models.FromMap(tags), + Tags: tags, Unit: xtime.Millisecond, Datapoints: datapoints, } @@ -209,7 +212,7 @@ func TestLocalWriteAggregatedSuccess(t *testing.T) { } func TestLocalRead(t *testing.T) { - ctrl := gomock.NewController(xtest.Reporter{t}) + ctrl := gomock.NewController(xtest.Reporter{T: t}) defer ctrl.Finish() store, sessions := setup(t, ctrl) testTags := seriesiter.GenerateTag() @@ -240,13 +243,12 @@ func TestLocalRead(t *testing.T) { searchReq := newFetchReq() results, err := store.Fetch(context.TODO(), searchReq, &storage.FetchOptions{Limit: 100}) assert.NoError(t, err) - tags := make(map[string]string, 1) - tags[testTags.Name.String()] = testTags.Value.String() + tags := models.Tags{{Name: testTags.Name.Bytes(), Value: testTags.Value.Bytes()}} require.NotNil(t, results) require.NotNil(t, results.SeriesList) require.Len(t, results.SeriesList, 1) require.NotNil(t, results.SeriesList[0]) - assert.Equal(t, models.FromMap(tags), results.SeriesList[0].Tags) + assert.Equal(t, tags, results.SeriesList[0].Tags) } func TestLocalReadNoClustersForTimeRangeError(t *testing.T) { @@ -362,7 +364,7 @@ func TestLocalSearchSuccess(t *testing.T) { assert.Equal(t, expected.id, actual.ID) assert.Equal(t, models.Tags{{ - Name: expected.tagName, Value: expected.tagValue, + Name: []byte(expected.tagName), Value: []byte(expected.tagValue), }}, actual.Tags) } } diff --git a/src/query/test/block.go b/src/query/test/block.go index a1e0da83ee..a969bdc544 100644 --- a/src/query/test/block.go +++ b/src/query/test/block.go @@ -100,13 +100,14 @@ func NewMultiUnconsolidatedBlocksFromValues(bounds models.Bounds, seriesValues [ func NewSeriesMeta(tagPrefix string, count int) []block.SeriesMeta { seriesMeta := make([]block.SeriesMeta, count) for i := range seriesMeta { - tags := make(map[string]string) - t := fmt.Sprintf("%s%d", tagPrefix, i) - tags[models.MetricName] = t - tags[t] = t + tags := models.Tags{} + st := fmt.Sprintf("%s%d", tagPrefix, i) + t := []byte(st) + tags = tags.AddTag(models.Tag{Name: models.MetricName, Value: t}) + tags = tags.AddTag(models.Tag{Name: t, Value: t}) seriesMeta[i] = block.SeriesMeta{ - Name: t, - Tags: models.FromMap(tags), + Name: st, + Tags: tags, } } return seriesMeta diff --git a/src/query/test/util.go b/src/query/test/comparison.go similarity index 100% rename from src/query/test/util.go rename to src/query/test/comparison.go diff --git a/src/query/test/handler.go b/src/query/test/handler.go index 8517503ae6..e417de07a5 100644 --- a/src/query/test/handler.go +++ b/src/query/test/handler.go @@ -35,7 +35,7 @@ func GeneratePromReadRequest() *prompb.ReadRequest { req := &prompb.ReadRequest{ Queries: []*prompb.Query{{ Matchers: []*prompb.LabelMatcher{ - {Type: prompb.LabelMatcher_EQ, Name: "eq", Value: "a"}, + {Type: prompb.LabelMatcher_EQ, Name: []byte("eq"), Value: []byte("a")}, }, StartTimestampMs: time.Now().Add(-1*time.Hour*24).UnixNano() / int64(time.Millisecond), EndTimestampMs: time.Now().UnixNano() / int64(time.Millisecond), diff --git a/src/query/test/mock_pools.go b/src/query/test/mock_pools.go new file mode 100644 index 0000000000..0c26428ec2 --- /dev/null +++ b/src/query/test/mock_pools.go @@ -0,0 +1,123 @@ +// Copyright (c) 2018 Uber Technologies, Inc. +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in +// all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +// THE SOFTWARE. + +package test + +import ( + "sync" + + "github.com/m3db/m3/src/dbnode/encoding" + "github.com/m3db/m3/src/dbnode/serialize" + "github.com/m3db/m3/src/dbnode/x/xpool" + "github.com/m3db/m3/src/query/pools" + "github.com/m3db/m3x/ident" + "github.com/m3db/m3x/pool" +) + +var ( + buckets = []pool.Bucket{{Capacity: 100, Count: 100}} + poolOpts = pool.NewObjectPoolOptions().SetSize(1) + mu sync.Mutex +) + +// MakeMockPoolWrapper builds a pool wrapper wrapping a mock iterator +func MakeMockPoolWrapper() *pools.PoolWrapper { + return pools.NewPoolsWrapper(MakeMockIteratorPool()) +} + +// MakeMockIteratorPool builds a mock iterator pool +func MakeMockIteratorPool() *MockIteratorPool { + return &MockIteratorPool{} +} + +// MockIteratorPool is an iterator pool used for testing +type MockIteratorPool struct { + MriPoolUsed, SiPoolUsed, MsiPoolUsed, MriaPoolUsed, + CbwPoolUsed, IdentPoolUsed, EncodePoolUsed, DecodePoolUsed bool +} + +// MultiReaderIterator exposes the session's MultiReaderIteratorPool +func (ip *MockIteratorPool) MultiReaderIterator() encoding.MultiReaderIteratorPool { + ip.MriPoolUsed = true + mriPool := encoding.NewMultiReaderIteratorPool(nil) + mriPool.Init(testIterAlloc) + return mriPool +} + +// MutableSeriesIterators exposes the session's MutableSeriesIteratorsPool +func (ip *MockIteratorPool) MutableSeriesIterators() encoding.MutableSeriesIteratorsPool { + ip.MsiPoolUsed = true + msiPool := encoding.NewMutableSeriesIteratorsPool(buckets) + msiPool.Init() + return msiPool +} + +// SeriesIterator exposes the session's SeriesIteratorPool +func (ip *MockIteratorPool) SeriesIterator() encoding.SeriesIteratorPool { + ip.SiPoolUsed = true + siPool := encoding.NewSeriesIteratorPool(nil) + siPool.Init() + return siPool +} + +// MultiReaderIteratorArray exposes the session's MultiReaderIteratorArrayPool +func (ip *MockIteratorPool) MultiReaderIteratorArray() encoding.MultiReaderIteratorArrayPool { + ip.MriaPoolUsed = true + mriaPool := encoding.NewMultiReaderIteratorArrayPool(nil) + mriaPool.Init() + return mriaPool +} + +// CheckedBytesWrapper exposes the session's CheckedBytesWrapperPool +func (ip *MockIteratorPool) CheckedBytesWrapper() xpool.CheckedBytesWrapperPool { + ip.CbwPoolUsed = true + cbwPool := xpool.NewCheckedBytesWrapperPool(nil) + cbwPool.Init() + return cbwPool +} + +// ID exposes the session's identity pool +func (ip *MockIteratorPool) ID() ident.Pool { + ip.IdentPoolUsed = true + bytesPool := pool.NewCheckedBytesPool(buckets, nil, + func(sizes []pool.Bucket) pool.BytesPool { + return pool.NewBytesPool(sizes, nil) + }) + bytesPool.Init() + return ident.NewPool(bytesPool, ident.PoolOptions{}) +} + +// TagDecoder exposes the session's tag decoder pool +func (ip *MockIteratorPool) TagDecoder() serialize.TagDecoderPool { + ip.DecodePoolUsed = true + decoderPool := serialize.NewTagDecoderPool(serialize.NewTagDecoderOptions(), poolOpts) + decoderPool.Init() + return decoderPool +} + +// TagEncoder exposes the session's tag encoder pool +func (ip *MockIteratorPool) TagEncoder() serialize.TagEncoderPool { + mu.Lock() + ip.EncodePoolUsed = true + encoderPool := serialize.NewTagEncoderPool(serialize.NewTagEncoderOptions(), poolOpts) + encoderPool.Init() + mu.Unlock() + return encoderPool +} diff --git a/src/query/test/tag_utils.go b/src/query/test/tag_utils.go new file mode 100644 index 0000000000..e6b09d3890 --- /dev/null +++ b/src/query/test/tag_utils.go @@ -0,0 +1,52 @@ +// Copyright (c) 2018 Uber Technologies, Inc. +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in +// all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +// THE SOFTWARE. + +package test + +import "github.com/m3db/m3/src/query/models" + +// StringTagsToTags converts string tags to tags +func StringTagsToTags(s StringTags) models.Tags { + tags := models.Tags{} + for _, t := range s { + tags = tags.AddTag(models.Tag{Name: []byte(t.N), Value: []byte(t.V)}) + } + + return tags +} + +// StringTagsSliceToTagSlice converts a slice of string tags to a slice of tags +func StringTagsSliceToTagSlice(s []StringTags) []models.Tags { + tags := make([]models.Tags, len(s)) + + for i, stringTags := range s { + tags[i] = StringTagsToTags(stringTags) + } + + return tags +} + +// StringTags is a slice of string tags +type StringTags []StringTag + +// StringTag is a tag containing string key value pairs +type StringTag struct { + N, V string +} diff --git a/src/query/ts/m3db/convert_test.go b/src/query/ts/m3db/convert_test.go index 79d44ccc39..84ae736f34 100644 --- a/src/query/ts/m3db/convert_test.go +++ b/src/query/ts/m3db/convert_test.go @@ -93,5 +93,8 @@ func TestConversion(t *testing.T) { func checkTags(t *testing.T, tags ident.TagIterator) { convertedTags, err := storage.FromIdentTagIteratorToTags(tags) require.NoError(t, err) - assert.Equal(t, models.Tags{{"baz", testTags["baz"]}, {"foo", testTags["foo"]}}, convertedTags) + assert.Equal(t, models.Tags{ + {Name: []byte("baz"), Value: []byte(testTags["baz"])}, + {Name: []byte("foo"), Value: []byte(testTags["foo"])}, + }, convertedTags) } diff --git a/src/query/ts/m3db/storage/local_test.go b/src/query/ts/m3db/storage/local_test.go index edfd690f7b..cd08f20f66 100644 --- a/src/query/ts/m3db/storage/local_test.go +++ b/src/query/ts/m3db/storage/local_test.go @@ -77,13 +77,13 @@ func newFetchReq() *storage.FetchQuery { matchers := models.Matchers{ { Type: models.MatchEqual, - Name: "foo", - Value: "bar", + Name: []byte("foo"), + Value: []byte("bar"), }, { Type: models.MatchEqual, - Name: "biz", - Value: "baz", + Name: []byte("biz"), + Value: []byte("baz"), }, } return &storage.FetchQuery{ @@ -112,7 +112,10 @@ func TestLocalRead(t *testing.T) { assert.Equal(t, "namespace", blocks.Namespace.String()) blockTags, err := storage.FromIdentTagIteratorToTags(blocks.Tags) require.NoError(t, err) - assert.Equal(t, models.Tags{{"baz", "qux"}, {"foo", "bar"}}, blockTags) + assert.Equal(t, models.Tags{ + {Name: []byte("baz"), Value: []byte("qux")}, + {Name: []byte("foo"), Value: []byte("bar")}, + }, blockTags) } } } diff --git a/src/query/ts/series_test.go b/src/query/ts/series_test.go index f083517794..b020d7b23c 100644 --- a/src/query/ts/series_test.go +++ b/src/query/ts/series_test.go @@ -30,7 +30,10 @@ import ( ) func TestCreateNewSeries(t *testing.T) { - tags := models.Tags{{"foo", "bar"}, {"biz", "baz"}} + tags := models.Tags{ + {Name: []byte("foo"), Value: []byte("bar")}, + {Name: []byte("biz"), Value: []byte("baz")}, + } values := NewFixedStepValues(1000, 10000, 1, time.Now()) series := NewSeries("metrics", values, tags) diff --git a/src/query/tsdb/remote/codecs.go b/src/query/tsdb/remote/codecs.go index 664a3f9d42..d76e464b91 100644 --- a/src/query/tsdb/remote/codecs.go +++ b/src/query/tsdb/remote/codecs.go @@ -51,8 +51,8 @@ func encodeTags(tags models.Tags) []*rpc.Tag { encodedTags := make([]*rpc.Tag, 0, len(tags)) for _, t := range tags { encodedTags = append(encodedTags, &rpc.Tag{ - Name: []byte(t.Name), - Value: []byte(t.Value), + Name: t.Name, + Value: t.Value, }) } @@ -111,7 +111,7 @@ func DecodeDecompressedFetchResult( func decodeTags(tags []*rpc.Tag) models.Tags { modelTags := make(models.Tags, len(tags)) for i, t := range tags { - modelTags[i] = models.Tag{Name: string(t.GetName()), Value: string(t.GetValue())} + modelTags[i] = models.Tag{Name: t.GetName(), Value: t.GetValue()} } return modelTags @@ -166,8 +166,8 @@ func encodeTagMatchers(modelMatchers models.Matchers) (*rpc.TagMatchers, error) } matchers[i] = &rpc.TagMatcher{ - Name: []byte(matcher.Name), - Value: []byte(matcher.Value), + Name: matcher.Name, + Value: matcher.Value, Type: t, } } @@ -254,7 +254,7 @@ func decodeTagMatchers(rpcMatchers *rpc.TagMatchers) (models.Matchers, error) { matchers := make([]*models.Matcher, len(tagMatchers)) for i, matcher := range tagMatchers { matchType, name, value := models.MatchType(matcher.GetType()), matcher.GetName(), matcher.GetValue() - mMatcher, err := models.NewMatcher(matchType, string(name), string(value)) + mMatcher, err := models.NewMatcher(matchType, name, value) if err != nil { return matchers, err } diff --git a/src/query/tsdb/remote/codecs_test.go b/src/query/tsdb/remote/codecs_test.go index c602bc7d06..6c06c481ab 100644 --- a/src/query/tsdb/remote/codecs_test.go +++ b/src/query/tsdb/remote/codecs_test.go @@ -56,8 +56,10 @@ var ( time1 = "2093-02-06T11:54:48+07:00" - tags0 = models.Tags{{Name: "a", Value: "b"}, {Name: "c", Value: "d"}} - tags1 = models.Tags{{Name: "e", Value: "f"}, {Name: "g", Value: "h"}} + tags0 = models.Tags{{Name: []byte("a"), Value: []byte("b")}, + {Name: []byte("c"), Value: []byte("d")}} + tags1 = models.Tags{{Name: []byte("e"), Value: []byte("f")}, + {Name: []byte("g"), Value: []byte("h")}} ) func parseTimes(t *testing.T) (time.Time, time.Time) { @@ -146,9 +148,9 @@ func readQueriesAreEqual(t *testing.T, this, other *storage.FetchQuery) { } func createStorageFetchQuery(t *testing.T) (*storage.FetchQuery, time.Time, time.Time) { - m0, err := models.NewMatcher(models.MatchRegexp, string(name0), string(val0)) + m0, err := models.NewMatcher(models.MatchRegexp, name0, val0) require.Nil(t, err) - m1, err := models.NewMatcher(models.MatchEqual, string(name1), string(val1)) + m1, err := models.NewMatcher(models.MatchEqual, name1, val1) require.Nil(t, err) start, end := parseTimes(t)