Skip to content

Commit

Permalink
Make it possible to update all tests in one pass
Browse files Browse the repository at this point in the history
Signed-off-by: Andres Taylor <[email protected]>
  • Loading branch information
systay committed Feb 28, 2020
1 parent a9d7156 commit 77cc35e
Show file tree
Hide file tree
Showing 10 changed files with 652 additions and 244 deletions.
111 changes: 59 additions & 52 deletions go/vt/vtgate/planbuilder/plan_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -18,14 +18,17 @@ package planbuilder

import (
"bufio"
"bytes"
"encoding/json"
"fmt"
"io"
"io/ioutil"
"os"
"strings"
"testing"

"github.com/google/go-cmp/cmp"
"github.com/stretchr/testify/require"

"vitess.io/vitess/go/sqltypes"
"vitess.io/vitess/go/vt/key"
"vitess.io/vitess/go/vt/sqlparser"
Expand Down Expand Up @@ -140,29 +143,30 @@ func init() {

func TestPlan(t *testing.T) {
vschema := loadSchema(t, "schema_test.json")

testOutputTempDir, err := ioutil.TempDir("", "plan_test")
require.NoError(t, err)
// You will notice that some tests expect user.Id instead of user.id.
// This is because we now pre-create vindex columns in the symbol
// table, which come from vschema. In the test vschema,
// the column is named as Id. This is to make sure that
// column names are case-preserved, but treated as
// case-insensitive even if they come from the vschema.
testFile(t, "aggr_cases.txt", vschema)
testFile(t, "dml_cases.txt", vschema)
testFile(t, "from_cases.txt", vschema)
testFile(t, "filter_cases.txt", vschema)
testFile(t, "postprocess_cases.txt", vschema)
testFile(t, "select_cases.txt", vschema)
testFile(t, "symtab_cases.txt", vschema)
testFile(t, "unsupported_cases.txt", vschema)
testFile(t, "vindex_func_cases.txt", vschema)
testFile(t, "wireup_cases.txt", vschema)
testFile(t, "memory_sort_cases.txt", vschema)
testFile(t, "aggr_cases.txt", testOutputTempDir, vschema)
testFile(t, "dml_cases.txt", testOutputTempDir, vschema)
testFile(t, "from_cases.txt", testOutputTempDir, vschema)
testFile(t, "filter_cases.txt", testOutputTempDir, vschema)
testFile(t, "postprocess_cases.txt", testOutputTempDir, vschema)
testFile(t, "select_cases.txt", testOutputTempDir, vschema)
testFile(t, "symtab_cases.txt", testOutputTempDir, vschema)
testFile(t, "unsupported_cases.txt", testOutputTempDir, vschema)
testFile(t, "vindex_func_cases.txt", testOutputTempDir, vschema)
testFile(t, "wireup_cases.txt", testOutputTempDir, vschema)
testFile(t, "memory_sort_cases.txt", testOutputTempDir, vschema)
}

func TestOne(t *testing.T) {
vschema := loadSchema(t, "schema_test.json")
testFile(t, "onecase.txt", vschema)
testFile(t, "onecase.txt", "", vschema)
}

func loadSchema(t *testing.T, filename string) *vindexes.VSchema {
Expand Down Expand Up @@ -225,35 +229,48 @@ type testPlan struct {
Instructions engine.Primitive `json:",omitempty"`
}

func testFile(t *testing.T, filename string, vschema *vindexes.VSchema) {
for tcase := range iterateExecFile(filename) {
t.Run(tcase.comments, func(t *testing.T) {
plan, err := Build(tcase.input, &vschemaWrapper{
v: vschema,
})
var out string
if err != nil {
out = err.Error()
} else {
bout, _ := json.Marshal(testPlan{
Original: plan.Original,
Instructions: plan.Instructions,
func testFile(t *testing.T, filename, tempDir string, vschema *vindexes.VSchema) {
t.Run(filename, func(t *testing.T) {
expected := &strings.Builder{}
fail := false
for tcase := range iterateExecFile(filename) {
t.Run(tcase.comments, func(t *testing.T) {
plan, err := Build(tcase.input, &vschemaWrapper{
v: vschema,
})
out = string(bout)
}
if out != tcase.output {
t.Errorf("File: %s, Line:%v\n got:\n%s, \nwant:\n%s", filename, tcase.lineno, out, tcase.output)
// Uncomment these lines to re-generate input files

out := getPlanOrErrorOutput(err, plan)

if out != tcase.output {
fail = true
t.Errorf("File: %s, Line: %v\n %s", filename, tcase.lineno, cmp.Diff(out, tcase.output))
}

if err != nil {
out = fmt.Sprintf("\"%s\"", out)
} else {
bout, _ := json.MarshalIndent(plan, "", " ")
out = string(bout)
out = `"` + out + `"`
}
fmt.Printf("%s\"%s\"\n%s\n\n", tcase.comments, tcase.input, out)
}
})

expected.WriteString(fmt.Sprintf("%s\"%s\"\n%s\n\n", tcase.comments, tcase.input, out))

})
}
if fail && tempDir != "" {
gotFile := fmt.Sprintf("%s/%s", tempDir, filename)
ioutil.WriteFile(gotFile, []byte(strings.TrimSpace(expected.String())+"\n"), 0644)
fmt.Println(fmt.Sprintf("Errors found in plantests. If the output is correct, run `cp %s/* testdata/` to update test expectations", tempDir))
}
})
}

func getPlanOrErrorOutput(err error, plan *engine.Plan) string {
if err != nil {
return err.Error()
}
bout, _ := json.MarshalIndent(testPlan{
Original: plan.Original,
Instructions: plan.Instructions,
}, "", " ")
return string(bout)
}

type testCase struct {
Expand Down Expand Up @@ -281,8 +298,7 @@ func iterateExecFile(name string) (testCaseIterator chan testCase) {
binput, err := r.ReadBytes('\n')
if err != nil {
if err != io.EOF {
fmt.Printf("Line: %d\n", lineno)
panic(fmt.Errorf("error reading file %s: %s", name, err.Error()))
panic(fmt.Errorf("error reading file %s: line %d: %s", name, lineno, err.Error()))
}
break
}
Expand All @@ -297,28 +313,19 @@ func iterateExecFile(name string) (testCaseIterator chan testCase) {
}
err = json.Unmarshal(binput, &input)
if err != nil {
fmt.Printf("Line: %d, input: %s\n", lineno, binput)
panic(err)
panic(fmt.Sprintf("Line: %d, input: %s, error: %v\n", lineno, binput, err))
}
input = strings.Trim(input, "\"")
var output []byte
for {
l, err := r.ReadBytes('\n')
lineno++
if err != nil {
fmt.Printf("Line: %d\n", lineno)
panic(fmt.Errorf("error reading file %s: %s", name, err.Error()))
panic(fmt.Sprintf("error reading file %s line# %d: %s", name, lineno, err.Error()))
}
output = append(output, l...)
if l[0] == '}' {
output = output[:len(output)-1]
b := bytes.NewBuffer(make([]byte, 0, 64))
err := json.Compact(b, output)
if err == nil {
output = b.Bytes()
} else {
panic("Invalid JSON " + string(output) + err.Error())
}
break
}
if l[0] == '"' {
Expand Down
39 changes: 22 additions & 17 deletions go/vt/vtgate/planbuilder/testdata/aggr_cases.txt
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
# Test cases in this file follow the code in ordered_aggregate.go.

#
# Aggregate on unsharded
"select count(*), col from unsharded"
{
Expand Down Expand Up @@ -29,7 +29,9 @@
"Query": "select count(*), col from user where id = 1",
"FieldQuery": "select count(*), col from user where 1 != 1",
"Vindex": "user_index",
"Values": [1],
"Values": [
1
],
"Table": "user"
}
}
Expand Down Expand Up @@ -532,7 +534,9 @@
"Query": "select id, count(*) as c from user group by id having id = 1 and c = 10",
"FieldQuery": "select id, count(*) as c from user where 1 != 1 group by id",
"Vindex": "user_index",
"Values": [1],
"Values": [
1
],
"Table": "user"
}
}
Expand Down Expand Up @@ -1037,7 +1041,6 @@
}
}


# scatter aggregate group by invalid column number
"select col from user group by 2"
"column number out of range: 2"
Expand Down Expand Up @@ -1341,19 +1344,21 @@
# Group by with collate operator
"select user.col1 as a from user where user.id = 5 group by a collate utf8_general_ci"
{
"Original":"select user.col1 as a from user where user.id = 5 group by a collate utf8_general_ci",
"Instructions":{
"Opcode":"SelectEqualUnique",
"Keyspace":{
"Name":"user",
"Sharded":true
},
"Query":"select user.col1 as a from user where user.id = 5 group by a collate utf8_general_ci",
"FieldQuery":"select user.col1 as a from user where 1 != 1 group by a collate utf8_general_ci",
"Vindex":"user_index",
"Values":[5],
"Table": "user"
}
"Original": "select user.col1 as a from user where user.id = 5 group by a collate utf8_general_ci",
"Instructions": {
"Opcode": "SelectEqualUnique",
"Keyspace": {
"Name": "user",
"Sharded": true
},
"Query": "select user.col1 as a from user where user.id = 5 group by a collate utf8_general_ci",
"FieldQuery": "select user.col1 as a from user where 1 != 1 group by a collate utf8_general_ci",
"Vindex": "user_index",
"Values": [
5
],
"Table": "user"
}
}

# routing rules for aggregates where sharded and unsharded match. Unsharded always wins.
Expand Down
Loading

0 comments on commit 77cc35e

Please sign in to comment.