Skip to content

Commit

Permalink
Browse files Browse the repository at this point in the history
101873: opt: do not add derived FK equalities to lookup join ON filters r=mgartner a=mgartner

This commit fixes a minor bug introduced in #90599 which can
unnecessarily add derived FK equality filters to a lookup join's ON
condition. The bug is minor because it does not cause incorrect results.
It only adds a bit of extra work to evaluate the equality. This commit
ensures that the derived FK filters are only used as equality columns in
the lookup join.

Fixes #101844

Release note: None


101908: util/intsets: fix test-only Fast r=mgartner a=mgartner

In #90009, `x/tools/Sparse` was replaced by a new `Sparse` integer set
library. In that commit we forgot to update the test-only implementation
of `Fast` to use the new library correctly. This was not caught earlier
because CI and roachtests are not run with the `fast_int_set_small` and
`fast_int_set_large` build tags.

Epic: None

Release note: None


Co-authored-by: Marcus Gartner <[email protected]>
  • Loading branch information
craig[bot] and mgartner committed Apr 20, 2023
3 parents d9ec744 + f4ed83e + 216c77b commit c44424a
Show file tree
Hide file tree
Showing 4 changed files with 77 additions and 34 deletions.
19 changes: 10 additions & 9 deletions pkg/sql/opt/lookupjoin/constraint_builder.go
Original file line number Diff line number Diff line change
Expand Up @@ -145,26 +145,27 @@ func (b *ConstraintBuilder) Init(
func (b *ConstraintBuilder) Build(
index cat.Index, onFilters, optionalFilters, derivedFkOnFilters memo.FiltersExpr,
) (_ Constraint, foundEqualityCols bool) {
onFilters = append(onFilters, derivedFkOnFilters...)
// Combine the ON and derived FK filters which can contain equality
// conditions.
allFilters := make(memo.FiltersExpr, 0, len(onFilters)+len(derivedFkOnFilters)+len(optionalFilters))
allFilters = append(allFilters, onFilters...)
allFilters = append(allFilters, derivedFkOnFilters...)

// Extract the equality columns from onFilters. We cannot use the results of
// the extraction in Init because onFilters may be reduced by the caller
// after Init due to partial index implication. If the filters are reduced,
// eqFilterOrds calculated during Init would no longer be valid because the
// ordinals of the filters will have changed.
// Extract the equality columns from the ON and derived FK filters.
leftEq, rightEq, eqFilterOrds :=
memo.ExtractJoinEqualityColumnsWithFilterOrds(b.leftCols, b.rightCols, onFilters)
memo.ExtractJoinEqualityColumnsWithFilterOrds(b.leftCols, b.rightCols, allFilters)
rightEqSet := rightEq.ToSet()

// Retrieve the inequality columns from onFilters.
// Retrieve the inequality columns from the ON and derived FK filters.
var rightCmp opt.ColList
var inequalityFilterOrds []int
if b.evalCtx.SessionData().VariableInequalityLookupJoinEnabled {
rightCmp, inequalityFilterOrds =
memo.ExtractJoinInequalityRightColumnsWithFilterOrds(b.leftCols, b.rightCols, onFilters)
}

allFilters := append(onFilters, optionalFilters...)
// Add the optional filters.
allFilters = append(allFilters, optionalFilters...)

// Check if the first column in the index either:
//
Expand Down
48 changes: 48 additions & 0 deletions pkg/sql/opt/xform/testdata/rules/join
Original file line number Diff line number Diff line change
Expand Up @@ -4618,6 +4618,54 @@ inner-join (lookup t63735)
└── filters
└── y:7 = 15 [outer=(7), constraints=(/7: [/15 - /15]; tight), fd=()-->(7)]

# Regression test for #101844. Derived FK equalities should not be added to the
# ON filters of a lookup join.
exec-ddl
CREATE TABLE p101844 (
r INT,
id INT,
PRIMARY KEY (r, id),
UNIQUE WITHOUT INDEX (id)
)
----

exec-ddl
CREATE TABLE c101844 (
r INT,
p_id INT,
id INT,
PRIMARY KEY (r, p_id, id),
UNIQUE INDEX c_p_id_id_key (p_id, id),
FOREIGN KEY (r, p_id) REFERENCES p101844 (r, id)
)
----

# The derived c.r = p.r filters should not be added to the lookup join ON
# condition if they aren't used as equality columns.
opt
SELECT *
FROM p101844 p LEFT LOOKUP JOIN c101844@c_p_id_id_key c
ON c.p_id = p.id AND c.id = 1234
----
left-join (lookup c101844@c_p_id_id_key [as=c])
├── columns: r:1!null id:2!null r:5 p_id:6 id:7
├── flags: force lookup join (into right side)
├── key columns: [2 10] = [6 7]
├── lookup columns are key
├── key: (2)
├── fd: (2)-->(1,5-7), (6)-->(5)
├── project
│ ├── columns: "lookup_join_const_col_@7":10!null p.r:1!null p.id:2!null
│ ├── key: (2)
│ ├── fd: ()-->(10), (2)-->(1)
│ ├── scan p101844 [as=p]
│ │ ├── columns: p.r:1!null p.id:2!null
│ │ ├── key: (2)
│ │ └── fd: (2)-->(1)
│ └── projections
│ └── 1234 [as="lookup_join_const_col_@7":10]
└── filters (true)


# --------------------------------------------------
# GenerateLookupJoinsWithVirtualCols
Expand Down
8 changes: 2 additions & 6 deletions pkg/sql/opt/xform/testdata/rules/limit
Original file line number Diff line number Diff line change
Expand Up @@ -2796,9 +2796,7 @@ project
│ │ │ │ └── fd: ()-->(1,2), (5)-->(3,4,9)
│ │ │ └── filters
│ │ │ ├── t2.col2:20 = 1 [outer=(20), constraints=(/20: [/1 - /1]; tight), fd=()-->(20)]
│ │ │ ├── t2.col1:19 = 1 [outer=(19), constraints=(/19: [/1 - /1]; tight), fd=()-->(19)]
│ │ │ ├── t2.col3:21 = t1.col3:3 [outer=(3,21), constraints=(/3: (/NULL - ]; /21: (/NULL - ]), fd=(3)==(21), (21)==(3)]
│ │ │ └── t2.col4:22 = t1.col4:4 [outer=(4,22), constraints=(/4: (/NULL - ]; /22: (/NULL - ]), fd=(4)==(22), (22)==(4)]
│ │ │ └── t2.col1:19 = 1 [outer=(19), constraints=(/19: [/1 - /1]; tight), fd=()-->(19)]
│ │ └── aggregations
│ │ ├── array-agg [as=array_agg:27, outer=(24)]
│ │ │ └── t2.col6:24
Expand Down Expand Up @@ -2953,9 +2951,7 @@ limit
│ │ │ └── fd: ()-->(1,2), (5)-->(3,4,9)
│ │ └── filters
│ │ ├── t2.col2:20 = 1 [outer=(20), constraints=(/20: [/1 - /1]; tight), fd=()-->(20)]
│ │ ├── t2.col1:19 = 1 [outer=(19), constraints=(/19: [/1 - /1]; tight), fd=()-->(19)]
│ │ ├── t2.col3:21 = t1.col3:3 [outer=(3,21), constraints=(/3: (/NULL - ]; /21: (/NULL - ]), fd=(3)==(21), (21)==(3)]
│ │ └── t2.col4:22 = t1.col4:4 [outer=(4,22), constraints=(/4: (/NULL - ]; /22: (/NULL - ]), fd=(4)==(22), (22)==(4)]
│ │ └── t2.col1:19 = 1 [outer=(19), constraints=(/19: [/1 - /1]; tight), fd=()-->(19)]
│ └── aggregations
│ └── first-agg [as=t2.col2:20, outer=(20)]
│ └── t2.col2:20
Expand Down
36 changes: 17 additions & 19 deletions pkg/util/intsets/fast_testonly.go
Original file line number Diff line number Diff line change
Expand Up @@ -56,7 +56,7 @@ func (s *Fast) prepareForMutation() {
// Add adds a value to the set. No-op if the value is already in the set.
func (s *Fast) Add(i int) {
s.prepareForMutation()
s.s.Insert(i)
s.s.Add(i)
}

// AddRange adds values 'from' up to 'to' (inclusively) to the set.
Expand All @@ -66,7 +66,7 @@ func (s *Fast) Add(i int) {
func (s *Fast) AddRange(from, to int) {
s.prepareForMutation()
for i := from; i <= to; i++ {
s.s.Insert(i)
s.s.Add(i)
}
}

Expand All @@ -78,12 +78,12 @@ func (s *Fast) Remove(i int) {

// Contains returns true if the set contains the value.
func (s Fast) Contains(i int) bool {
return s.s != nil && s.s.Has(i)
return s.s != nil && s.s.Contains(i)
}

// Empty returns true if the set is empty.
func (s Fast) Empty() bool {
return s.s == nil || s.s.IsEmpty()
return s.s == nil || s.s.Empty()
}

// Len returns the number of the elements in the set.
Expand Down Expand Up @@ -119,7 +119,11 @@ func (s Fast) Ordered() []int {
if s.Empty() {
return nil
}
return s.s.AppendTo([]int(nil))
result := make([]int, 0, s.Len())
s.ForEach(func(i int) {
result = append(result, i)
})
return result
}

// Copy returns a copy of s which can be modified independently.
Expand Down Expand Up @@ -213,16 +217,6 @@ func (s Fast) SubsetOf(rhs Fast) bool {
return s.s.SubsetOf(rhs.s)
}

// Shift generates a new set which contains elements i+delta for elements i in
// the original set.
func (s *Fast) Shift(delta int) Fast {
n := &Sparse{}
s.ForEach(func(i int) {
n.Insert(i + delta)
})
return Fast{s: n}
}

// Encode the set and write it to a bytes.Buffer using binary.varint byte
// encoding.
//
Expand All @@ -243,7 +237,14 @@ func (s *Fast) Encode(buf *bytes.Buffer) error {
//gcassert:noescape
tmp := make([]byte, binary.MaxVarintLen64+1)

if s.s == nil || s.s.Max() < 64 {
max := MinInt
s.ForEach(func(i int) {
if i > max {
max = i
}
})

if s.s == nil || max < 64 {
n := binary.PutUvarint(tmp, 0)
var bitmap uint64
for i, ok := s.Next(0); ok; i, ok = s.Next(i + 1) {
Expand Down Expand Up @@ -296,6 +297,3 @@ func (s *Fast) Decode(br io.ByteReader) error {
}
return nil
}

// This is defined to allow the test to build.
const smallCutoff = 10

0 comments on commit c44424a

Please sign in to comment.