Skip to content

Commit

Permalink
Merge pull request cockroachdb#21021 from RaduBerinde/index-cols
Browse files Browse the repository at this point in the history
opt: support arbitrary index columns
  • Loading branch information
RaduBerinde authored Dec 23, 2017
2 parents ed755fc + a48fb4a commit ed342d1
Show file tree
Hide file tree
Showing 5 changed files with 234 additions and 170 deletions.
18 changes: 8 additions & 10 deletions pkg/sql/opt/index_constraints.go
Original file line number Diff line number Diff line change
Expand Up @@ -170,7 +170,7 @@ func (c *indexConstraintCalc) makeSpansForTupleInequality(
prefixLen := 0
dir := c.colInfos[offset].Direction
for i := range lhs.children {
if !isIndexedVar(lhs.children[i], offset+i) {
if !c.isIndexColumn(lhs.children[i], offset+i) {
// Variable doesn't refer to the right column.
break
}
Expand Down Expand Up @@ -269,7 +269,7 @@ func (c *indexConstraintCalc) makeSpansForTupleIn(offset int, e *expr) (LogicalS
Outer:
for i := offset; i < len(c.colInfos); i++ {
for j := range lhs.children {
if isIndexedVar(lhs.children[j], i) {
if c.isIndexColumn(lhs.children[j], i) {
tuplePos = append(tuplePos, j)
continue Outer
}
Expand Down Expand Up @@ -331,7 +331,7 @@ func (c *indexConstraintCalc) makeSpansForExpr(offset int, e *expr) (LogicalSpan
}
// Check for an operation where the left-hand side is an
// indexed var for this column.
if isIndexedVar(e.children[0], offset) {
if c.isIndexColumn(e.children[0], offset) {
return c.makeSpansForSingleColumn(offset, e.op, e.children[1])
}
// Check for tuple operations.
Expand All @@ -348,7 +348,7 @@ func (c *indexConstraintCalc) makeSpansForExpr(offset int, e *expr) (LogicalSpan

// Last resort: for conditions like a > b, our column can appear on the right
// side. We can deduce a not-null constraint from such conditions.
if c.colInfos[offset].Nullable && isIndexedVar(e.children[1], offset) {
if c.colInfos[offset].Nullable && c.isIndexColumn(e.children[1], offset) {
switch e.op {
case eqOp, ltOp, leOp, gtOp, geOp, neOp:
return LogicalSpans{c.makeNotNullSpan(offset)}, true
Expand Down Expand Up @@ -517,20 +517,18 @@ func (c *indexConstraintCalc) calcOffset(offset int) LogicalSpans {
// IndexColumnInfo encompasses the information for index columns, needed for
// index constraints.
type IndexColumnInfo struct {
// VarIdx identifies the indexed var that corresponds to this column.
VarIdx int
Typ types.T
Direction encoding.Direction
// Nullable should be set to false if this column cannot store NULLs. This is
// used to keep the spans simple, e.g. [ - /5] instead of (/NULL - /5].
// Nullable should be set to false if this column cannot store NULLs; used
// to keep the spans simple, e.g. [ - /5] instead of (/NULL - /5].
Nullable bool
}

// MakeIndexConstraints generates constraints from a scalar boolean filter
// expression. See LogicalSpans for more information on how constraints are
// represented.
//
// TODO(radu): for now we assume the index columns are always columns
// @1, @2, @3, etc. Eventually we will need to pass in a list of column
// indices.
func MakeIndexConstraints(
filter *expr, colInfos []IndexColumnInfo, evalCtx *tree.EvalContext,
) LogicalSpans {
Expand Down
6 changes: 6 additions & 0 deletions pkg/sql/opt/index_constraints_spans.go
Original file line number Diff line number Diff line change
Expand Up @@ -141,6 +141,12 @@ type indexConstraintCtx struct {
evalCtx *tree.EvalContext
}

// isIndexColumn returns true if e is an indexed var that corresponds
// to index column <offset>.
func (c *indexConstraintCtx) isIndexColumn(e *expr, index int) bool {
return isIndexedVar(e, c.colInfos[index].VarIdx)
}

// compareKeyVals compares two lists of values for a sequence of index columns
// (namely <offset>, <offset+1>, ...). The directions of the index columns are
// taken into account.
Expand Down
63 changes: 48 additions & 15 deletions pkg/sql/opt/opt_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -48,9 +48,15 @@ package opt
// If present, build-scalar must have been an earlier command.
//
// The supported arguments are:
// - columns=(<type> [ascending|asc|descending|desc], ...)
//
// Sets the types of index var columns, and optionally direction.
// - vars=(<type>, ...)
//
// Sets the types for the index vars in the expression.
//
// - index=(@<index> [ascending|asc|descending|desc] [not null], ...)
//
// Information for the index (used by index-constraints). Each column of the
// index refers to an index var.

import (
"bufio"
Expand All @@ -62,6 +68,7 @@ import (
"os"
"path/filepath"
"regexp"
"strconv"
"strings"
"testing"

Expand Down Expand Up @@ -274,6 +281,7 @@ func TestOpt(t *testing.T) {
t.Run(filepath.Base(path), func(t *testing.T) {
runTest(t, path, func(d *testdata) string {
var e *expr
var varTypes []types.T
var colInfos []IndexColumnInfo
var typedExpr tree.TypedExpr

Expand All @@ -287,18 +295,26 @@ func TestOpt(t *testing.T) {
if len(val) > 2 && val[0] == '(' && val[len(val)-1] == ')' {
val = val[1 : len(val)-1]
}
vals := strings.Split(val, ",")
switch key {
case "columns":
case "vars":
varTypes, err = parseTypes(vals)
if err != nil {
d.fatalf(t, "%v", err)
}
case "index":
if varTypes == nil {
d.fatalf(t, "vars must precede index")
}
var err error
colInfos, err = parseColumns(strings.Split(val, ","))
colInfos, err = parseIndexColumns(varTypes, vals)
if err != nil {
d.fatalf(t, "%v", err)
}
default:
d.fatalf(t, "unknown argument: %s", key)
}
}

buildScalarFn := func() {
defer func() {
if r := recover(); r != nil {
Expand All @@ -310,7 +326,7 @@ func TestOpt(t *testing.T) {

evalCtx := tree.MakeTestingEvalContext()
var err error
typedExpr, err = parseScalarExpr(d.sql, colInfos)
typedExpr, err = parseScalarExpr(d.sql, varTypes)
if err != nil {
d.fatalf(t, "%v", err)
}
Expand Down Expand Up @@ -359,18 +375,38 @@ func parseType(typeStr string) (types.T, error) {
return coltypes.CastTargetToDatumType(colType), nil
}

// parseColumns parses descriptions of index columns; each
// parseColumns parses a list of types.
func parseTypes(colStrs []string) ([]types.T, error) {
res := make([]types.T, len(colStrs))
for i, s := range colStrs {
var err error
res[i], err = parseType(s)
if err != nil {
return nil, err
}
}
return res, nil
}

// parseIndexColumns parses descriptions of index columns; each
// string corresponds to an index column and is of the form:
// <type> [ascending|descending]
func parseColumns(colStrs []string) ([]IndexColumnInfo, error) {
func parseIndexColumns(indexVarTypes []types.T, colStrs []string) ([]IndexColumnInfo, error) {
res := make([]IndexColumnInfo, len(colStrs))
for i := range colStrs {
fields := strings.Fields(colStrs[i])
var err error
res[i].Typ, err = parseType(fields[0])
if fields[0][0] != '@' {
return nil, fmt.Errorf("index column must start with @<index>")
}
idx, err := strconv.Atoi(fields[0][1:])
if err != nil {
return nil, err
}
if idx < 1 || idx > len(indexVarTypes) {
return nil, fmt.Errorf("invalid index var @%d", idx)
}
res[i].VarIdx = idx - 1
res[i].Typ = indexVarTypes[res[i].VarIdx]
res[i].Direction = encoding.Ascending
res[i].Nullable = true
fields = fields[1:]
Expand Down Expand Up @@ -418,17 +454,14 @@ func (*indexedVars) IndexedVarNodeFormatter(idx int) tree.NodeFormatter {
panic("unimplemented")
}

func parseScalarExpr(sql string, indexVarCols []IndexColumnInfo) (tree.TypedExpr, error) {
func parseScalarExpr(sql string, varTypes []types.T) (tree.TypedExpr, error) {
expr, err := parser.ParseExpr(sql)
if err != nil {
return nil, err
}

// Set up an indexed var helper so we can type-check the expression.
iv := &indexedVars{types: make([]types.T, len(indexVarCols))}
for i, colInfo := range indexVarCols {
iv.types[i] = colInfo.Typ
}
iv := &indexedVars{types: varTypes}

sema := tree.MakeSemaContext(false /* privileged */)
iVarHelper := tree.MakeIndexedVarHelper(iv, len(iv.types))
Expand Down
Loading

0 comments on commit ed342d1

Please sign in to comment.