Skip to content

Commit

Permalink
Merge pull request #43072 from cockroachdb/revert-42934-backport19.2-…
Browse files Browse the repository at this point in the history
…42616

Revert "release-19.2: colexec: skip over unneeded columns with unknown types in cfetcher"
  • Loading branch information
asubiotto authored Dec 10, 2019
2 parents c85d435 + 834163c commit 36db1f3
Show file tree
Hide file tree
Showing 8 changed files with 40 additions and 190 deletions.
7 changes: 2 additions & 5 deletions pkg/col/coldata/batch.go
Original file line number Diff line number Diff line change
Expand Up @@ -64,8 +64,7 @@ func BatchSize() uint16 {
return batchSize
}

// NewMemBatch allocates a new in-memory Batch. A coltypes.Unknown type
// will create a placeholder Vec that may not be accessed.
// NewMemBatch allocates a new in-memory Batch.
// TODO(jordan): pool these allocations.
func NewMemBatch(types []coltypes.T) Batch {
return NewMemBatchWithSize(types, int(BatchSize()))
Expand Down Expand Up @@ -180,9 +179,7 @@ func (m *MemBatch) Reset(types []coltypes.T, length int) {
func (m *MemBatch) ResetInternalBatch() {
m.SetSelection(false)
for _, v := range m.b {
if v.Type() != coltypes.Unhandled {
v.Nulls().UnsetNulls()
}
v.Nulls().UnsetNulls()
if v.Type() == coltypes.Bytes {
v.Bytes().Reset()
}
Expand Down
97 changes: 0 additions & 97 deletions pkg/col/coldata/unknown_vec.go

This file was deleted.

2 changes: 0 additions & 2 deletions pkg/col/coldata/vec.go
Original file line number Diff line number Diff line change
Expand Up @@ -156,8 +156,6 @@ func NewMemColumn(t coltypes.T, n int) Vec {
return &memColumn{t: t, col: make([]apd.Decimal, n), nulls: nulls}
case coltypes.Timestamp:
return &memColumn{t: t, col: make([]time.Time, n), nulls: nulls}
case coltypes.Unhandled:
return unknown{}
default:
panic(fmt.Sprintf("unhandled type %s", t))
}
Expand Down
14 changes: 7 additions & 7 deletions pkg/sql/colexec/cfetcher.go
Original file line number Diff line number Diff line change
Expand Up @@ -57,6 +57,9 @@ type cTableInfo struct {
// schema changes.
cols []sqlbase.ColumnDescriptor

// The exec types corresponding to the table columns in cols.
typs []coltypes.T

// The ordered list of ColumnIDs that are required.
neededColsList []int

Expand Down Expand Up @@ -264,9 +267,7 @@ func (rf *cFetcher) Init(
typs := make([]coltypes.T, len(colDescriptors))
for i := range typs {
typs[i] = typeconv.FromColumnType(&colDescriptors[i].Type)
if typs[i] == coltypes.Unhandled && tableArgs.ValNeededForCol.Contains(i) {
// Only return an error if the type is unhandled and needed. If not needed,
// a placeholder Vec will be created.
if typs[i] == coltypes.Unhandled {
return errors.Errorf("unhandled type %+v", &colDescriptors[i].Type)
}
}
Expand All @@ -277,6 +278,7 @@ func (rf *cFetcher) Init(
index: tableArgs.Index,
isSecondaryIndex: tableArgs.IsSecondaryIndex,
cols: colDescriptors,
typs: typs,

// These slice fields might get re-allocated below, so reslice them from
// the old table here in case they've got enough capacity already.
Expand Down Expand Up @@ -592,10 +594,8 @@ func (rf *cFetcher) NextBatch(ctx context.Context) (coldata.Batch, error) {
rf.machine.state[0] = stateDecodeFirstKVOfRow

case stateResetBatch:
for _, colvec := range rf.machine.colvecs {
if colvec.Type() != coltypes.Unhandled {
colvec.Nulls().UnsetNulls()
}
for i := range rf.machine.colvecs {
rf.machine.colvecs[i].Nulls().UnsetNulls()
}
rf.machine.batch.ResetInternalBatch()
rf.shiftState()
Expand Down
2 changes: 0 additions & 2 deletions pkg/sql/colexec/mem_estimation.go
Original file line number Diff line number Diff line change
Expand Up @@ -66,8 +66,6 @@ func EstimateBatchSizeBytes(vecTypes []coltypes.T, batchLength int) int {
// significantly overestimate.
// TODO(yuzefovich): figure out whether the caching does take place.
acc += sizeOfTime
case coltypes.Unhandled:
// Placeholder coldata.Vecs of unknown types are allowed.
default:
execerror.VectorizedInternalPanic(fmt.Sprintf("unhandled type %s", t))
}
Expand Down
46 changes: 0 additions & 46 deletions pkg/sql/logictest/testdata/logic_test/vectorize_types
Original file line number Diff line number Diff line change
Expand Up @@ -55,53 +55,7 @@ SELECT * FROM all_types ORDER BY 1
NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL
false 123 2019-10-22 00:00:00 +0000 +0000 1.23 123 123 123 123 1.23 123 63616665-6630-3064-6465-616462656562 2001-01-18 01:00:00.001 +0000 +0000

statement ok
CREATE TABLE skip_unneeded_cols (
_id UUID,
_id2 INT8,
_float FLOAT8,
_unsupported1 INT ARRAY,
_bool BOOL,
_unsupported2 INT ARRAY,
_bool2 BOOL,
PRIMARY KEY(_id, _id2)
)

statement ok
INSERT INTO skip_unneeded_cols VALUES ('63616665-6630-3064-6465-616462656562', 1, '1.2', NULL, true, NULL, false)

statement ok
SET vectorize=experimental_always

statement error pq: unable to vectorize execution plan: unhandled type int\[\]
SELECT _unsupported1 FROM skip_unneeded_cols

query IBB
SELECT _id2, _bool, _bool2 FROM skip_unneeded_cols
----
1 true false

statement ok
RESET vectorize

# This query uses a builtin that returns currently unsupported type
# (TimestampTZ). We're only interested in not getting an error. See #42871.
statement ok
SELECT experimental_strptime(_string, _string) IS NULL FROM all_types

statement ok
CREATE TABLE unsupported_type (id INT PRIMARY KEY, unsupported INT ARRAY)

statement ok
INSERT INTO unsupported_type (id) SELECT * FROM generate_series(1, 2000)

statement ok
SET vectorize=experimental_always

# This query makes sure that CFetcher when reading from a table with an
# unhandled type (that isn't needed) is reset correctly between batches.
statement ok
SELECT id FROM unsupported_type LIMIT 1 OFFSET 1100

statement ok
RESET vectorize
Loading

0 comments on commit 36db1f3

Please sign in to comment.