diff --git a/pkg/ccl/importccl/read_import_avro.go b/pkg/ccl/importccl/read_import_avro.go index 9f5b5a1ca4fe..4f02d3d8f717 100644 --- a/pkg/ccl/importccl/read_import_avro.go +++ b/pkg/ccl/importccl/read_import_avro.go @@ -210,7 +210,7 @@ func (a *avroConsumer) FillDatums( // Set any nil datums to DNull (in case native // record didn't have the value set at all) for i := range conv.Datums { - if _, isTargetCol := conv.IsTargetCol[i]; isTargetCol && conv.Datums[i] == nil { + if conv.TargetColOrds.Contains(i) && conv.Datums[i] == nil { if a.strict { return fmt.Errorf("field %s was not set in the avro import", conv.VisibleCols[i].Name) } diff --git a/pkg/ccl/importccl/read_import_csv.go b/pkg/ccl/importccl/read_import_csv.go index 8fe264cd119c..bcb2785b1293 100644 --- a/pkg/ccl/importccl/read_import_csv.go +++ b/pkg/ccl/importccl/read_import_csv.go @@ -180,7 +180,7 @@ func (c *csvRowConsumer) FillDatums( for i, field := range record { // Skip over record entries corresponding to columns not in the target // columns specified by the user. - if _, ok := conv.IsTargetCol[i]; !ok { + if !conv.TargetColOrds.Contains(i) { continue } diff --git a/pkg/ccl/importccl/read_import_pgdump.go b/pkg/ccl/importccl/read_import_pgdump.go index 851eb573a105..84c698311b88 100644 --- a/pkg/ccl/importccl/read_import_pgdump.go +++ b/pkg/ccl/importccl/read_import_pgdump.go @@ -31,6 +31,7 @@ import ( "github.com/cockroachdb/cockroach/pkg/sql/rowenc" "github.com/cockroachdb/cockroach/pkg/sql/sem/tree" "github.com/cockroachdb/cockroach/pkg/storage/cloud" + "github.com/cockroachdb/cockroach/pkg/util" "github.com/cockroachdb/cockroach/pkg/util/ctxgroup" "github.com/cockroachdb/cockroach/pkg/util/errorutil/unimplemented" "github.com/cockroachdb/cockroach/pkg/util/hlc" @@ -672,21 +673,21 @@ func (m *pgDumpReader) readFile( var targetColMapIdx []int if len(i.Columns) != 0 { targetColMapIdx = make([]int, len(i.Columns)) - conv.IsTargetCol = make(map[int]struct{}, len(i.Columns)) + conv.TargetColOrds = util.FastIntSet{} for j := range i.Columns { colName := string(i.Columns[j]) idx, ok := m.colMap[conv][colName] if !ok { return errors.Newf("targeted column %q not found", colName) } - conv.IsTargetCol[idx] = struct{}{} + conv.TargetColOrds.Add(idx) targetColMapIdx[j] = idx } // For any missing columns, fill those to NULL. // These will get filled in with the correct default / computed expression // provided conv.IsTargetCol is not set for the given column index. for idx := range conv.VisibleCols { - if _, ok := conv.IsTargetCol[idx]; !ok { + if !conv.TargetColOrds.Contains(idx) { conv.Datums[idx] = tree.DNull } } @@ -735,14 +736,14 @@ func (m *pgDumpReader) readFile( var targetColMapIdx []int if conv != nil { targetColMapIdx = make([]int, len(i.Columns)) - conv.IsTargetCol = make(map[int]struct{}, len(i.Columns)) + conv.TargetColOrds = util.FastIntSet{} for j := range i.Columns { colName := string(i.Columns[j]) idx, ok := m.colMap[conv][colName] if !ok { return errors.Newf("targeted column %q not found", colName) } - conv.IsTargetCol[idx] = struct{}{} + conv.TargetColOrds.Add(idx) targetColMapIdx[j] = idx } } @@ -768,7 +769,7 @@ func (m *pgDumpReader) readFile( } switch row := row.(type) { case copyData: - if expected, got := len(conv.IsTargetCol), len(row); expected != got { + if expected, got := conv.TargetColOrds.Len(), len(row); expected != got { return makeRowErr("", count, pgcode.Syntax, "expected %d values, got %d", expected, got) } diff --git a/pkg/sql/backfill/backfill.go b/pkg/sql/backfill/backfill.go index 4af77adfa2db..0f8e953c053c 100644 --- a/pkg/sql/backfill/backfill.go +++ b/pkg/sql/backfill/backfill.go @@ -465,7 +465,7 @@ func (ib *IndexBackfiller) InitForDistributedUse( evalCtx := flowCtx.NewEvalCtx() var predicates map[descpb.IndexID]tree.TypedExpr - var predicateRefColIDs schemaexpr.TableColSet + var predicateRefColIDs catalog.TableColSet // Install type metadata in the target descriptors, as well as resolve any // user defined types in partial index predicate expressions. diff --git a/pkg/sql/catalog/BUILD.bazel b/pkg/sql/catalog/BUILD.bazel index 89c9eba80151..702dccaba4af 100644 --- a/pkg/sql/catalog/BUILD.bazel +++ b/pkg/sql/catalog/BUILD.bazel @@ -9,6 +9,7 @@ go_library( "descriptor.go", "errors.go", "table_col_map.go", + "table_col_set.go", ], importpath = "github.com/cockroachdb/cockroach/pkg/sql/catalog", visibility = ["//visibility:public"], @@ -34,6 +35,7 @@ go_test( srcs = [ "dep_test.go", "descriptor_test.go", + "table_col_set_test.go", ], embed = [":catalog"], deps = [ @@ -42,6 +44,7 @@ go_test( "//pkg/sql/catalog/schemadesc", "//pkg/sql/catalog/tabledesc", "//pkg/testutils/buildutil", + "//pkg/util", "//pkg/util/leaktest", "//vendor/github.com/cockroachdb/redact", "//vendor/github.com/stretchr/testify/require", diff --git a/pkg/sql/catalog/colinfo/column_resolver.go b/pkg/sql/catalog/colinfo/column_resolver.go index 7e413014b66b..e82b4599d517 100644 --- a/pkg/sql/catalog/colinfo/column_resolver.go +++ b/pkg/sql/catalog/colinfo/column_resolver.go @@ -42,8 +42,8 @@ func ProcessTargetColumns( return nil, nil } + var colIDSet catalog.TableColSet cols := make([]descpb.ColumnDescriptor, len(nameList)) - colIDSet := make(map[descpb.ColumnID]struct{}, len(nameList)) for i, colName := range nameList { var col *descpb.ColumnDescriptor var err error @@ -56,11 +56,11 @@ func ProcessTargetColumns( return nil, err } - if _, ok := colIDSet[col.ID]; ok { + if colIDSet.Contains(col.ID) { return nil, pgerror.Newf(pgcode.Syntax, "multiple assignments to the same column %q", &nameList[i]) } - colIDSet[col.ID] = struct{}{} + colIDSet.Add(col.ID) cols[i] = *col } diff --git a/pkg/sql/catalog/schemaexpr/BUILD.bazel b/pkg/sql/catalog/schemaexpr/BUILD.bazel index c07629801a54..231387bc060a 100644 --- a/pkg/sql/catalog/schemaexpr/BUILD.bazel +++ b/pkg/sql/catalog/schemaexpr/BUILD.bazel @@ -13,7 +13,6 @@ go_library( "expr_filter.go", "partial_index.go", "select_name_resolution.go", - "table_col_set.go", ], importpath = "github.com/cockroachdb/cockroach/pkg/sql/catalog/schemaexpr", visibility = ["//visibility:public"], @@ -28,7 +27,6 @@ go_library( "//pkg/sql/sem/tree", "//pkg/sql/sessiondata", "//pkg/sql/types", - "//pkg/util", "//vendor/github.com/cockroachdb/errors", ], ) @@ -40,7 +38,6 @@ go_test( "column_test.go", "expr_test.go", "partial_index_test.go", - "table_col_set_test.go", "testutils_test.go", ], embed = [":schemaexpr"], @@ -53,6 +50,5 @@ go_test( "//pkg/sql/sem/builtins", "//pkg/sql/sem/tree", "//pkg/sql/types", - "//pkg/util", ], ) diff --git a/pkg/sql/catalog/schemaexpr/column.go b/pkg/sql/catalog/schemaexpr/column.go index 2854ac8bff8d..d812fa8dc80a 100644 --- a/pkg/sql/catalog/schemaexpr/column.go +++ b/pkg/sql/catalog/schemaexpr/column.go @@ -234,8 +234,8 @@ func (d *dummyColumn) ResolvedType() *types.T { // descriptor, replaceColumnVars errs with pgcode.UndefinedColumn. func replaceColumnVars( desc catalog.TableDescriptor, rootExpr tree.Expr, -) (tree.Expr, TableColSet, error) { - var colIDs TableColSet +) (tree.Expr, catalog.TableColSet, error) { + var colIDs catalog.TableColSet newExpr, err := tree.SimpleVisit(rootExpr, func(expr tree.Expr) (recurse bool, newExpr tree.Expr, err error) { vBase, ok := expr.(tree.VarName) diff --git a/pkg/sql/catalog/schemaexpr/computed_column.go b/pkg/sql/catalog/schemaexpr/computed_column.go index 9f27cbd912e6..37e51677025e 100644 --- a/pkg/sql/catalog/schemaexpr/computed_column.go +++ b/pkg/sql/catalog/schemaexpr/computed_column.go @@ -64,7 +64,7 @@ func (v *ComputedColumnValidator) Validate(d *tree.ColumnTableDef) error { ) } - var depColIDs TableColSet + var depColIDs catalog.TableColSet // First, check that no column in the expression is a computed column. err := iterColDescriptors(v.desc, d.Computed.Expr, func(c *descpb.ColumnDescriptor) error { if c.IsComputed() { diff --git a/pkg/sql/catalog/schemaexpr/default_exprs.go b/pkg/sql/catalog/schemaexpr/default_exprs.go index f525c467ab56..3be92c6913a1 100644 --- a/pkg/sql/catalog/schemaexpr/default_exprs.go +++ b/pkg/sql/catalog/schemaexpr/default_exprs.go @@ -90,9 +90,9 @@ func ProcessColumnSet( tableDesc catalog.TableDescriptor, inSet func(*descpb.ColumnDescriptor) bool, ) []descpb.ColumnDescriptor { - colIDSet := make(map[descpb.ColumnID]struct{}, len(cols)) + var colIDSet catalog.TableColSet for i := range cols { - colIDSet[cols[i].ID] = struct{}{} + colIDSet.Add(cols[i].ID) } // Add all public or columns in DELETE_AND_WRITE_ONLY state @@ -101,8 +101,8 @@ func ProcessColumnSet( for i := range writable { col := &writable[i] if inSet(col) { - if _, ok := colIDSet[col.ID]; !ok { - colIDSet[col.ID] = struct{}{} + if !colIDSet.Contains(col.ID) { + colIDSet.Add(col.ID) cols = append(cols, *col) } } diff --git a/pkg/sql/catalog/schemaexpr/expr.go b/pkg/sql/catalog/schemaexpr/expr.go index edc46b120727..ef29264a24ca 100644 --- a/pkg/sql/catalog/schemaexpr/expr.go +++ b/pkg/sql/catalog/schemaexpr/expr.go @@ -43,8 +43,8 @@ func DequalifyAndValidateExpr( semaCtx *tree.SemaContext, maxVolatility tree.Volatility, tn *tree.TableName, -) (string, TableColSet, error) { - var colIDs TableColSet +) (string, catalog.TableColSet, error) { + var colIDs catalog.TableColSet sourceInfo := colinfo.NewSourceInfoForSingleTable( *tn, colinfo.ResultColumnsFromColDescs( desc.GetID(), @@ -80,8 +80,10 @@ func DequalifyAndValidateExpr( } // ExtractColumnIDs returns the set of column IDs within the given expression. -func ExtractColumnIDs(desc catalog.TableDescriptor, rootExpr tree.Expr) (TableColSet, error) { - var colIDs TableColSet +func ExtractColumnIDs( + desc catalog.TableDescriptor, rootExpr tree.Expr, +) (catalog.TableColSet, error) { + var colIDs catalog.TableColSet _, err := tree.SimpleVisit(rootExpr, func(expr tree.Expr) (recurse bool, newExpr tree.Expr, err error) { vBase, ok := expr.(tree.VarName) diff --git a/pkg/sql/catalog/schemaexpr/partial_index.go b/pkg/sql/catalog/schemaexpr/partial_index.go index fcf6e11f8a50..5ae1166de0dd 100644 --- a/pkg/sql/catalog/schemaexpr/partial_index.go +++ b/pkg/sql/catalog/schemaexpr/partial_index.go @@ -92,7 +92,7 @@ func MakePartialIndexExprs( tableDesc catalog.TableDescriptor, evalCtx *tree.EvalContext, semaCtx *tree.SemaContext, -) (_ map[descpb.IndexID]tree.TypedExpr, refColIDs TableColSet, _ error) { +) (_ map[descpb.IndexID]tree.TypedExpr, refColIDs catalog.TableColSet, _ error) { // If none of the indexes are partial indexes, return early. partialIndexCount := 0 for i := range indexes { diff --git a/pkg/sql/catalog/schemaexpr/table_col_set.go b/pkg/sql/catalog/table_col_set.go similarity index 88% rename from pkg/sql/catalog/schemaexpr/table_col_set.go rename to pkg/sql/catalog/table_col_set.go index c236c109cb1e..661d3cfc5819 100644 --- a/pkg/sql/catalog/schemaexpr/table_col_set.go +++ b/pkg/sql/catalog/table_col_set.go @@ -8,7 +8,7 @@ // by the Apache License, Version 2.0, included in the file // licenses/APL.txt. -package schemaexpr +package catalog import ( "github.com/cockroachdb/cockroach/pkg/sql/catalog/descpb" @@ -41,14 +41,18 @@ func (s TableColSet) Empty() bool { return s.set.Empty() } // Len returns the number of the columns in the set. func (s TableColSet) Len() int { return s.set.Len() } +// Next returns the first value in the set which is >= startVal. If there is no +// value, the second return value is false. +func (s TableColSet) Next(startVal descpb.ColumnID) (descpb.ColumnID, bool) { + c, ok := s.set.Next(int(startVal)) + return descpb.ColumnID(c), ok +} + // ForEach calls a function for each column in the set (in increasing order). func (s TableColSet) ForEach(f func(col descpb.ColumnID)) { s.set.ForEach(func(i int) { f(descpb.ColumnID(i)) }) } -// UnionWith adds all the columns from rhs to this set. -func (s *TableColSet) UnionWith(rhs TableColSet) { s.set.UnionWith(rhs.set) } - // Ordered returns a slice with all the descpb.ColumnIDs in the set, in // increasing order. func (s TableColSet) Ordered() []descpb.ColumnID { @@ -62,6 +66,9 @@ func (s TableColSet) Ordered() []descpb.ColumnID { return result } +// UnionWith adds all the columns from rhs to this set. +func (s *TableColSet) UnionWith(rhs TableColSet) { s.set.UnionWith(rhs.set) } + // String returns a list representation of elements. Sequential runs of positive // numbers are shown as ranges. For example, for the set {1, 2, 3 5, 6, 10}, // the output is "(1-3,5,6,10)". diff --git a/pkg/sql/catalog/schemaexpr/table_col_set_test.go b/pkg/sql/catalog/table_col_set_test.go similarity index 98% rename from pkg/sql/catalog/schemaexpr/table_col_set_test.go rename to pkg/sql/catalog/table_col_set_test.go index dc08f20a1718..6087fc6d545d 100644 --- a/pkg/sql/catalog/schemaexpr/table_col_set_test.go +++ b/pkg/sql/catalog/table_col_set_test.go @@ -8,7 +8,7 @@ // by the Apache License, Version 2.0, included in the file // licenses/APL.txt. -package schemaexpr +package catalog import ( "testing" diff --git a/pkg/sql/catalog/tabledesc/structured.go b/pkg/sql/catalog/tabledesc/structured.go index 64258d158cc2..fb5677bed62a 100644 --- a/pkg/sql/catalog/tabledesc/structured.go +++ b/pkg/sql/catalog/tabledesc/structured.go @@ -853,9 +853,9 @@ func maybeUpgradeToFamilyFormatVersion(desc *descpb.TableDescriptor) bool { return false } - primaryIndexColumnIds := make(map[descpb.ColumnID]struct{}, len(desc.PrimaryIndex.ColumnIDs)) + var primaryIndexColumnIDs catalog.TableColSet for _, colID := range desc.PrimaryIndex.ColumnIDs { - primaryIndexColumnIds[colID] = struct{}{} + primaryIndexColumnIDs.Add(colID) } desc.Families = []descpb.ColumnFamilyDescriptor{ @@ -863,7 +863,7 @@ func maybeUpgradeToFamilyFormatVersion(desc *descpb.TableDescriptor) bool { } desc.NextFamilyID = desc.Families[0].ID + 1 addFamilyForCol := func(col *descpb.ColumnDescriptor) { - if _, ok := primaryIndexColumnIds[col.ID]; ok { + if primaryIndexColumnIDs.Contains(col.ID) { desc.Families[0].ColumnNames = append(desc.Families[0].ColumnNames, col.Name) desc.Families[0].ColumnIDs = append(desc.Families[0].ColumnIDs, col.ID) return @@ -1172,11 +1172,11 @@ func (desc *Mutable) allocateIndexIDs(columnNames map[string]descpb.ColumnID) er allocateIndexName(desc, index) } - isCompositeColumn := make(map[descpb.ColumnID]struct{}) + var compositeColIDs catalog.TableColSet for i := range desc.Columns { col := &desc.Columns[i] if colinfo.HasCompositeKeyEncoding(col.Type) { - isCompositeColumn[col.ID] = struct{}{} + compositeColIDs.Add(col.ID) } } @@ -1240,12 +1240,12 @@ func (desc *Mutable) allocateIndexIDs(columnNames map[string]descpb.ColumnID) er index.CompositeColumnIDs = nil for _, colID := range index.ColumnIDs { - if _, ok := isCompositeColumn[colID]; ok { + if compositeColIDs.Contains(colID) { index.CompositeColumnIDs = append(index.CompositeColumnIDs, colID) } } for _, colID := range index.ExtraColumnIDs { - if _, ok := isCompositeColumn[colID]; ok { + if compositeColIDs.Contains(colID) { index.CompositeColumnIDs = append(index.CompositeColumnIDs, colID) } } @@ -1263,7 +1263,7 @@ func (desc *Mutable) allocateColumnFamilyIDs(columnNames map[string]descpb.Colum desc.NextFamilyID = 1 } - columnsInFamilies := make(map[descpb.ColumnID]struct{}, len(desc.Columns)) + var columnsInFamilies catalog.TableColSet for i := range desc.Families { family := &desc.Families[i] if family.ID == 0 && i != 0 { @@ -1278,22 +1278,22 @@ func (desc *Mutable) allocateColumnFamilyIDs(columnNames map[string]descpb.Colum if family.ColumnIDs[j] == 0 { family.ColumnIDs[j] = columnNames[colName] } - columnsInFamilies[family.ColumnIDs[j]] = struct{}{} + columnsInFamilies.Add(family.ColumnIDs[j]) } desc.Families[i] = *family } - primaryIndexColIDs := make(map[descpb.ColumnID]struct{}, len(desc.PrimaryIndex.ColumnIDs)) + var primaryIndexColIDs catalog.TableColSet for _, colID := range desc.PrimaryIndex.ColumnIDs { - primaryIndexColIDs[colID] = struct{}{} + primaryIndexColIDs.Add(colID) } ensureColumnInFamily := func(col *descpb.ColumnDescriptor) { - if _, ok := columnsInFamilies[col.ID]; ok { + if columnsInFamilies.Contains(col.ID) { return } - if _, ok := primaryIndexColIDs[col.ID]; ok { + if primaryIndexColIDs.Contains(col.ID) { // Primary index columns are required to be assigned to family 0. desc.Families[0].ColumnNames = append(desc.Families[0].ColumnNames, col.Name) desc.Families[0].ColumnIDs = append(desc.Families[0].ColumnIDs, col.ID) @@ -1347,7 +1347,7 @@ func (desc *Mutable) allocateColumnFamilyIDs(columnNames map[string]descpb.Colum if family.DefaultColumnID == 0 { defaultColumnID := descpb.ColumnID(0) for _, colID := range family.ColumnIDs { - if _, ok := primaryIndexColIDs[colID]; !ok { + if !primaryIndexColIDs.Contains(colID) { if defaultColumnID == 0 { defaultColumnID = colID } else { @@ -2056,7 +2056,7 @@ func (desc *Immutable) validateTableIndexes(columnNames map[string]descpb.Column return fmt.Errorf("index %q must contain at least 1 column", index.Name) } - validateIndexDup := make(map[descpb.ColumnID]struct{}) + var validateIndexDup catalog.TableColSet for i, name := range index.ColumnNames { colID, ok := columnNames[name] if !ok { @@ -2066,10 +2066,10 @@ func (desc *Immutable) validateTableIndexes(columnNames map[string]descpb.Column return fmt.Errorf("index %q column %q should have ID %d, but found ID %d", index.Name, name, colID, index.ColumnIDs[i]) } - if _, ok := validateIndexDup[colID]; ok { + if validateIndexDup.Contains(colID) { return fmt.Errorf("index %q contains duplicate column %q", index.Name, name) } - validateIndexDup[colID] = struct{}{} + validateIndexDup.Add(colID) } if index.IsSharded() { if err := desc.ensureShardedIndexNotComputed(index); err != nil { @@ -3639,7 +3639,7 @@ func (desc *Immutable) ColumnsUsed( "could not parse check constraint %s", cc.Expr) } - colIDsUsed := make(map[descpb.ColumnID]struct{}) + var colIDsUsed catalog.TableColSet visitFn := func(expr tree.Expr) (recurse bool, newExpr tree.Expr, err error) { if vBase, ok := expr.(tree.VarName); ok { v, err := vBase.NormalizeVarName() @@ -3653,7 +3653,7 @@ func (desc *Immutable) ColumnsUsed( "column %q not found for constraint %q", c.ColumnName, parsed.String()) } - colIDsUsed[col.ID] = struct{}{} + colIDsUsed.Add(col.ID) } return false, v, nil } @@ -3663,8 +3663,8 @@ func (desc *Immutable) ColumnsUsed( return nil, err } - cc.ColumnIDs = make([]descpb.ColumnID, 0, len(colIDsUsed)) - for colID := range colIDsUsed { + cc.ColumnIDs = make([]descpb.ColumnID, 0, colIDsUsed.Len()) + for colID, ok := colIDsUsed.Next(0); ok; colID, ok = colIDsUsed.Next(colID + 1) { cc.ColumnIDs = append(cc.ColumnIDs, colID) } sort.Sort(descpb.ColumnIDs(cc.ColumnIDs)) diff --git a/pkg/sql/create_table.go b/pkg/sql/create_table.go index 9a769574b221..e4f328dd3205 100644 --- a/pkg/sql/create_table.go +++ b/pkg/sql/create_table.go @@ -608,8 +608,8 @@ func ResolveFK( validationBehavior tree.ValidationBehavior, evalCtx *tree.EvalContext, ) error { + var originColSet catalog.TableColSet originCols := make([]*descpb.ColumnDescriptor, len(d.FromCols)) - originColMap := make(map[descpb.ColumnID]struct{}, len(d.FromCols)) for i, col := range d.FromCols { col, err := tbl.FindActiveOrNewColumnByName(col) if err != nil { @@ -619,11 +619,11 @@ func ResolveFK( return err } // Ensure that the origin columns don't have duplicates. - if _, ok := originColMap[col.ID]; ok { + if originColSet.Contains(col.ID) { return pgerror.Newf(pgcode.InvalidForeignKey, "foreign key contains duplicate column %q", col.Name) } - originColMap[col.ID] = struct{}{} + originColSet.Add(col.ID) originCols[i] = col } diff --git a/pkg/sql/distsql_plan_stats.go b/pkg/sql/distsql_plan_stats.go index 580d205181ae..4316aed02eea 100644 --- a/pkg/sql/distsql_plan_stats.go +++ b/pkg/sql/distsql_plan_stats.go @@ -18,6 +18,7 @@ import ( "github.com/cockroachdb/cockroach/pkg/jobs/jobspb" "github.com/cockroachdb/cockroach/pkg/kv" "github.com/cockroachdb/cockroach/pkg/settings" + "github.com/cockroachdb/cockroach/pkg/sql/catalog" "github.com/cockroachdb/cockroach/pkg/sql/catalog/descpb" "github.com/cockroachdb/cockroach/pkg/sql/catalog/tabledesc" "github.com/cockroachdb/cockroach/pkg/sql/execinfrapb" @@ -25,7 +26,6 @@ import ( "github.com/cockroachdb/cockroach/pkg/sql/span" "github.com/cockroachdb/cockroach/pkg/sql/stats" "github.com/cockroachdb/cockroach/pkg/sql/types" - "github.com/cockroachdb/cockroach/pkg/util" "github.com/cockroachdb/cockroach/pkg/util/hlc" "github.com/cockroachdb/errors" "github.com/cockroachdb/logtags" @@ -64,11 +64,11 @@ func (dsp *DistSQLPlanner) createStatsPlan( // Calculate the set of columns we need to scan. var colCfg scanColumnsConfig - var tableColSet util.FastIntSet + var tableColSet catalog.TableColSet for _, s := range reqStats { for _, c := range s.columns { - if !tableColSet.Contains(int(c)) { - tableColSet.Add(int(c)) + if !tableColSet.Contains(c) { + tableColSet.Add(c) colCfg.wantedColumns = append(colCfg.wantedColumns, tree.ColumnID(c)) } } diff --git a/pkg/sql/row/fetcher.go b/pkg/sql/row/fetcher.go index 6a48c41ebc06..2da2051b172e 100644 --- a/pkg/sql/row/fetcher.go +++ b/pkg/sql/row/fetcher.go @@ -1397,7 +1397,7 @@ func (rf *Fetcher) checkPrimaryIndexDatumEncodings(ctx context.Context) error { continue } - if skip, err := rh.skipColumnInPK(colID, familyID, rowVal.Datum); err != nil { + if skip, err := rh.skipColumnInPK(colID, rowVal.Datum); err != nil { return errors.NewAssertionErrorWithWrappedErrf(err, "unable to determine skip") } else if skip { continue diff --git a/pkg/sql/row/helper.go b/pkg/sql/row/helper.go index 4090c7074b59..b9a40fc5dc9a 100644 --- a/pkg/sql/row/helper.go +++ b/pkg/sql/row/helper.go @@ -38,7 +38,7 @@ type rowHelper struct { // Computed and cached. primaryIndexKeyPrefix []byte - primaryIndexCols map[descpb.ColumnID]struct{} + primaryIndexCols catalog.TableColSet sortedColumnFamilies map[descpb.FamilyID][]descpb.ColumnID } @@ -135,16 +135,13 @@ func (rh *rowHelper) encodeSecondaryIndexes( // datums are considered too, so a composite datum in a PK will return false. // TODO(dan): This logic is common and being moved into TableDescriptor (see // #6233). Once it is, use the shared one. -func (rh *rowHelper) skipColumnInPK( - colID descpb.ColumnID, family descpb.FamilyID, value tree.Datum, -) (bool, error) { - if rh.primaryIndexCols == nil { - rh.primaryIndexCols = make(map[descpb.ColumnID]struct{}) +func (rh *rowHelper) skipColumnInPK(colID descpb.ColumnID, value tree.Datum) (bool, error) { + if rh.primaryIndexCols.Empty() { for _, colID := range rh.TableDesc.GetPrimaryIndex().ColumnIDs { - rh.primaryIndexCols[colID] = struct{}{} + rh.primaryIndexCols.Add(colID) } } - if _, ok := rh.primaryIndexCols[colID]; !ok { + if !rh.primaryIndexCols.Contains(colID) { return false, nil } if cdatum, ok := value.(tree.CompositeDatum); ok { diff --git a/pkg/sql/row/row_converter.go b/pkg/sql/row/row_converter.go index 49b8a023c9d0..9d41e234fd1f 100644 --- a/pkg/sql/row/row_converter.go +++ b/pkg/sql/row/row_converter.go @@ -14,6 +14,7 @@ import ( "context" "github.com/cockroachdb/cockroach/pkg/roachpb" + "github.com/cockroachdb/cockroach/pkg/sql/catalog" "github.com/cockroachdb/cockroach/pkg/sql/catalog/colinfo" "github.com/cockroachdb/cockroach/pkg/sql/catalog/descpb" "github.com/cockroachdb/cockroach/pkg/sql/catalog/schemaexpr" @@ -207,7 +208,7 @@ type DatumRowConverter struct { // Tracks which column indices in the set of visible columns are part of the // user specified target columns. This can be used before populating Datums // to filter out unwanted column data. - IsTargetCol map[int]struct{} + TargetColOrds util.FastIntSet // The rest of these are derived from tableDesc, just cached here. ri Inserter @@ -266,17 +267,10 @@ func NewDatumRowConverter( targetColDescriptors = tableDesc.VisibleColumns() } - isTargetColID := make(map[descpb.ColumnID]struct{}) - for _, col := range targetColDescriptors { - isTargetColID[col.ID] = struct{}{} - } - - c.IsTargetCol = make(map[int]struct{}) + var targetColIDs catalog.TableColSet for i, col := range targetColDescriptors { - if _, ok := isTargetColID[col.ID]; !ok { - continue - } - c.IsTargetCol[i] = struct{}{} + c.TargetColOrds.Add(i) + targetColIDs.Add(col.ID) } var txCtx transform.ExprTransformContext @@ -319,10 +313,6 @@ func NewDatumRowConverter( // In addition, check for non-targeted columns with non-null DEFAULT expressions. // If the DEFAULT expression is immutable, we can store it in the cache so that it // doesn't have to be reevaluated for every row. - isTargetCol := func(col *descpb.ColumnDescriptor) bool { - _, ok := isTargetColID[col.ID] - return ok - } annot := make(tree.Annotations, 1) annot.Set(cellInfoAddr, &cellInfoAnnotation{uniqueRowIDInstance: 0}) c.EvalCtx.Annotations = &annot @@ -352,11 +342,11 @@ func NewDatumRowConverter( } } } - if !isTargetCol(col) { + if !targetColIDs.Contains(col.ID) { c.Datums = append(c.Datums, nil) } } - if col.IsComputed() && !isTargetCol(col) { + if col.IsComputed() && !targetColIDs.Contains(col.ID) { c.Datums = append(c.Datums, nil) } } @@ -400,10 +390,6 @@ const rowIDBits = 64 - builtins.NodeIDBits // Row inserts kv operations into the current kv batch, and triggers a SendBatch // if necessary. func (c *DatumRowConverter) Row(ctx context.Context, sourceID int32, rowIndex int64) error { - isTargetCol := func(i int) bool { - _, ok := c.IsTargetCol[i] - return ok - } getCellInfoAnnotation(c.EvalCtx.Annotations).Reset(sourceID, rowIndex) for i := range c.cols { col := &c.cols[i] @@ -415,7 +401,7 @@ func (c *DatumRowConverter) Row(ctx context.Context, sourceID int32, rowIndex in // TODO (anzoteh96): Optimize this part of code when there's no expression // involving random(), gen_random_uuid(), or anything like that. datum, err := c.defaultCache[i].Eval(c.EvalCtx) - if !isTargetCol(i) { + if !c.TargetColOrds.Contains(i) { if err != nil { return errors.Wrapf( err, "error evaluating default expression %q", *col.DefaultExpr) diff --git a/pkg/sql/row/updater.go b/pkg/sql/row/updater.go index 497dc558f0b5..a66d5880a6e0 100644 --- a/pkg/sql/row/updater.go +++ b/pkg/sql/row/updater.go @@ -91,14 +91,14 @@ func MakeUpdater( ) (Updater, error) { updateColIDtoRowIndex := ColIDtoRowIndexFromCols(updateCols) - primaryIndexCols := make(map[descpb.ColumnID]struct{}, len(tableDesc.PrimaryIndex.ColumnIDs)) + var primaryIndexCols catalog.TableColSet for _, colID := range tableDesc.PrimaryIndex.ColumnIDs { - primaryIndexCols[colID] = struct{}{} + primaryIndexCols.Add(colID) } var primaryKeyColChange bool for _, c := range updateCols { - if _, ok := primaryIndexCols[c.ID]; ok { + if primaryIndexCols.Contains(c.ID) { primaryKeyColChange = true break } diff --git a/pkg/sql/row/writer.go b/pkg/sql/row/writer.go index bd98a05885eb..66757a5a1d45 100644 --- a/pkg/sql/row/writer.go +++ b/pkg/sql/row/writer.go @@ -174,7 +174,7 @@ func prepareInsertOrUpdateBatch( continue } - if skip, err := helper.skipColumnInPK(colID, family.ID, values[idx]); err != nil { + if skip, err := helper.skipColumnInPK(colID, values[idx]); err != nil { return nil, err } else if skip { continue diff --git a/pkg/sql/rowenc/index_encoding.go b/pkg/sql/rowenc/index_encoding.go index 276b4be8e581..a92c4f8b4488 100644 --- a/pkg/sql/rowenc/index_encoding.go +++ b/pkg/sql/rowenc/index_encoding.go @@ -1003,9 +1003,9 @@ func EncodePrimaryIndex( return nil, err } // This information should be precomputed on the table descriptor. - indexedColumns := map[descpb.ColumnID]struct{}{} + var indexedColumns catalog.TableColSet for _, colID := range index.ColumnIDs { - indexedColumns[colID] = struct{}{} + indexedColumns.Add(colID) } var entryValue []byte indexEntries := make([]IndexEntry, 0, tableDesc.NumFamilies()) @@ -1041,7 +1041,7 @@ func EncodePrimaryIndex( } for _, colID := range family.ColumnIDs { - if _, ok := indexedColumns[colID]; !ok { + if !indexedColumns.Contains(colID) { columnsToEncode = append(columnsToEncode, valueEncodedColumn{id: colID}) continue }