Skip to content

Commit

Permalink
Merge #96648 #96983
Browse files Browse the repository at this point in the history
96648: schemachanger: implement `ALTER TABLE .. VALIDATE CONSTRAINT ..` r=Xiang-Gu a=Xiang-Gu

The main idea is to drop the unvalidated element and add a vanilla counterpart in the builder state.

Commit 2 involves some changes to allow both legacy and declarative schema changer handle
this stmt correctly: `ALTER TABLE .. ADD/DROP CONSTRAINT .. [NOT VALID]; VALIDATE CONSTRAINT ..;`
so this PR warrants a release note.

Epic: None
Release note (bug fix): Allow `ALTER TABLE .. ADD/DROP CONSTRAINT .. [NOT VALID]; VALIDATE CONSTRAINT ..;` to behave consistently with Postgres. Previously, the VALIDATE CONSTRAINT part will fail and cause the whole statement to fail.

96983: backupccl: reassign functions for rewritten schemas r=chengxiong-ruan a=chengxiong-ruan

Forgot to assign the new function signatures to schema in #96911. This pr fixes that and also added more tests to make sure functions in schema descriptor looks good.

Epic: None
Release note: None

Co-authored-by: Xiang Gu <[email protected]>
Co-authored-by: Chengxiong Ruan <[email protected]>
  • Loading branch information
3 people committed Feb 10, 2023
3 parents 8c2060c + 9266fdc + 038d7d0 commit c2bb35d
Show file tree
Hide file tree
Showing 27 changed files with 1,056 additions and 41 deletions.
72 changes: 68 additions & 4 deletions pkg/ccl/backupccl/testdata/backup-restore/user-defined-functions
Original file line number Diff line number Diff line change
Expand Up @@ -299,22 +299,86 @@ new-cluster name=s3

exec-sql cluster=s3
CREATE DATABASE db1;
CREATE TABLE t(a INT PRIMARY KEY);
CREATE FUNCTION f() RETURNS INT LANGUAGE SQL AS $$ SELECT 1 $$;
CREATE SCHEMA sc1;
CREATE TABLE sc1.t(a INT PRIMARY KEY);
CREATE FUNCTION sc1.f() RETURNS INT LANGUAGE SQL AS $$ SELECT 1 $$;
----

# Make sure the original schema has function signatures
query-sql
WITH db_id AS (
SELECT id FROM system.namespace WHERE name = 'defaultdb'
),
schema_id AS (
SELECT ns.id
FROM system.namespace AS ns
JOIN db_id ON ns."parentID" = db_id.id
WHERE ns.name = 'sc1'
)
SELECT id FROM schema_id;
----
109

query-sql
WITH to_json AS (
SELECT
id,
crdb_internal.pb_to_json(
'cockroach.sql.sqlbase.Descriptor',
descriptor,
false
) AS d
FROM
system.descriptor
WHERE id = 109
)
SELECT d->'schema'->>'functions'::string FROM to_json;
----
{"f": {"signatures": [{"id": 111, "returnType": {"family": "IntFamily", "oid": 20, "width": 64}}]}}

exec-sql
BACKUP TABLE t INTO 'nodelocal://0/test/'
BACKUP TABLE sc1.t INTO 'nodelocal://0/test/'
----

exec-sql
RESTORE TABLE t FROM LATEST IN 'nodelocal://0/test/' WITH into_db = 'db1';
RESTORE TABLE sc1.t FROM LATEST IN 'nodelocal://0/test/' WITH into_db = 'db1';
----

exec-sql
USE db1;
----

query-sql
WITH db_id AS (
SELECT id FROM system.namespace WHERE name = 'db1'
),
schema_id AS (
SELECT ns.id
FROM system.namespace AS ns
JOIN db_id ON ns."parentID" = db_id.id
WHERE ns.name = 'sc1'
)
SELECT id FROM schema_id;
----
112

query-sql
WITH to_json AS (
SELECT
id,
crdb_internal.pb_to_json(
'cockroach.sql.sqlbase.Descriptor',
descriptor,
false
) AS d
FROM
system.descriptor
WHERE id = 112
)
SELECT d->'schema'->>'functions'::string FROM to_json;
----
<nil>

# Make sure proper error message is returned when trying to resolve the
# function from the restore target db.
query-sql
Expand Down
5 changes: 5 additions & 0 deletions pkg/ccl/schemachangerccl/backup_base_generated_test.go

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

12 changes: 4 additions & 8 deletions pkg/sql/alter_table.go
Original file line number Diff line number Diff line change
Expand Up @@ -504,8 +504,7 @@ func (n *alterTableNode) startExec(params runParams) error {
if t.IfExists {
continue
}
return pgerror.Newf(pgcode.UndefinedObject,
"constraint %q of relation %q does not exist", t.Constraint, n.tableDesc.Name)
return sqlerrors.NewUndefinedConstraintError(string(t.Constraint), n.tableDesc.Name)
}
if err := n.tableDesc.DropConstraint(c, func(backRef catalog.ForeignKeyConstraint) error {
return params.p.removeFKBackReference(params.ctx, n.tableDesc, backRef.ForeignKeyDesc())
Expand All @@ -521,8 +520,7 @@ func (n *alterTableNode) startExec(params runParams) error {
name := string(t.Constraint)
c := catalog.FindConstraintByName(n.tableDesc, name)
if c == nil {
return pgerror.Newf(pgcode.UndefinedObject,
"constraint %q of relation %q does not exist", t.Constraint, n.tableDesc.Name)
return sqlerrors.NewUndefinedConstraintError(string(t.Constraint), n.tableDesc.Name)
}
switch c.GetConstraintValidity() {
case descpb.ConstraintValidity_Validated:
Expand All @@ -532,8 +530,7 @@ func (n *alterTableNode) startExec(params runParams) error {
return pgerror.Newf(pgcode.ObjectNotInPrerequisiteState,
"constraint %q in the middle of being added, try again later", t.Constraint)
case descpb.ConstraintValidity_Dropping:
return pgerror.Newf(pgcode.ObjectNotInPrerequisiteState,
"constraint %q in the middle of being dropped", t.Constraint)
return sqlerrors.NewUndefinedConstraintError(string(t.Constraint), n.tableDesc.Name)
}
if ck := c.AsCheck(); ck != nil {
if err := validateCheckInTxn(
Expand Down Expand Up @@ -745,8 +742,7 @@ func (n *alterTableNode) startExec(params runParams) error {
case *tree.AlterTableRenameConstraint:
constraint := catalog.FindConstraintByName(n.tableDesc, string(t.Constraint))
if constraint == nil {
return pgerror.Newf(pgcode.UndefinedObject,
"constraint %q of relation %q does not exist", tree.ErrString(&t.Constraint), n.tableDesc.Name)
return sqlerrors.NewUndefinedConstraintError(tree.ErrString(&t.Constraint), n.tableDesc.Name)
}
if t.Constraint == t.NewName {
// Nothing to do.
Expand Down
14 changes: 6 additions & 8 deletions pkg/sql/backfill.go
Original file line number Diff line number Diff line change
Expand Up @@ -2667,10 +2667,9 @@ func getTargetTablesAndFk(
if srcTable.Version > srcTable.ClusterVersion().Version {
syntheticTable = srcTable
}
for i := range srcTable.OutboundFKs {
def := &srcTable.OutboundFKs[i]
if def.Name == fkName {
fk = def
for _, outbounfFK := range srcTable.OutboundForeignKeys() {
if outbounfFK.GetName() == fkName {
fk = outbounfFK.ForeignKeyDesc()
break
}
}
Expand Down Expand Up @@ -2741,10 +2740,9 @@ func validateUniqueWithoutIndexConstraintInTxn(
syntheticDescs = append(syntheticDescs, tableDesc)
}
var uc *descpb.UniqueWithoutIndexConstraint
for i := range tableDesc.UniqueWithoutIndexConstraints {
def := &tableDesc.UniqueWithoutIndexConstraints[i]
if def.Name == constraintName {
uc = def
for _, uwi := range tableDesc.UniqueConstraintsWithoutIndex() {
if uwi.GetName() == constraintName {
uc = uwi.UniqueWithoutIndexDesc()
break
}
}
Expand Down
1 change: 1 addition & 0 deletions pkg/sql/catalog/rewrite/rewrite.go
Original file line number Diff line number Diff line change
Expand Up @@ -566,6 +566,7 @@ func SchemaDescs(schemas []*schemadesc.Mutable, descriptorRewrites jobspb.DescRe
}
}
}
sc.Functions = newFns

if err := rewriteSchemaChangerState(sc, descriptorRewrites); err != nil {
return err
Expand Down
16 changes: 13 additions & 3 deletions pkg/sql/catalog/tabledesc/structured.go
Original file line number Diff line number Diff line change
Expand Up @@ -1391,9 +1391,19 @@ func (desc *Mutable) MakeMutationComplete(m descpb.DescriptorMutation) error {
break
}
}
case descpb.ConstraintValidity_Unvalidated:
case descpb.ConstraintValidity_Unvalidated, descpb.ConstraintValidity_Validated:
// add the constraint to the list of check constraints on the table
// descriptor.
//
// The "validated" validity seems strange -- why would we ever complete
// a constraint mutation whose validity is already "validated"?
// This is because of how legacy schema changer is implemented and will
// occur for the following case:
// `ALTER TABLE .. ADD CONSTRAINT .. NOT VALID, VALIDATE CONSTRAINT ..`
// where the constraint mutation's validity is changed by `VALIDATE CONSTRAINT`
// to "validated", and in the job of `ADD CONSTRAINT .. NOT VALID` we
// came to mark the constraint mutation as completed.
// The same is true for FK and UWI constraints.
desc.Checks = append(desc.Checks, &t.Constraint.Check)
default:
return errors.AssertionFailedf("invalid constraint validity state: %d", t.Constraint.Check.Validity)
Expand All @@ -1409,7 +1419,7 @@ func (desc *Mutable) MakeMutationComplete(m descpb.DescriptorMutation) error {
break
}
}
case descpb.ConstraintValidity_Unvalidated:
case descpb.ConstraintValidity_Unvalidated, descpb.ConstraintValidity_Validated:
// Takes care of adding the Foreign Key to the table index. Adding the
// backreference to the referenced table index must be taken care of
// in another call.
Expand All @@ -1427,7 +1437,7 @@ func (desc *Mutable) MakeMutationComplete(m descpb.DescriptorMutation) error {
break
}
}
case descpb.ConstraintValidity_Unvalidated:
case descpb.ConstraintValidity_Unvalidated, descpb.ConstraintValidity_Validated:
// add the constraint to the list of unique without index constraints
// on the table descriptor.
desc.UniqueWithoutIndexConstraints = append(
Expand Down
6 changes: 2 additions & 4 deletions pkg/sql/comment_on_constraint.go
Original file line number Diff line number Diff line change
Expand Up @@ -15,10 +15,9 @@ import (

"github.com/cockroachdb/cockroach/pkg/sql/catalog"
"github.com/cockroachdb/cockroach/pkg/sql/catalog/catalogkeys"
"github.com/cockroachdb/cockroach/pkg/sql/pgwire/pgcode"
"github.com/cockroachdb/cockroach/pkg/sql/pgwire/pgerror"
"github.com/cockroachdb/cockroach/pkg/sql/privilege"
"github.com/cockroachdb/cockroach/pkg/sql/sem/tree"
"github.com/cockroachdb/cockroach/pkg/sql/sqlerrors"
"github.com/cockroachdb/cockroach/pkg/util/log/eventpb"
)

Expand Down Expand Up @@ -60,8 +59,7 @@ func (n *commentOnConstraintNode) startExec(params runParams) error {
constraintName := string(n.n.Constraint)
constraint := catalog.FindConstraintByName(n.tableDesc, constraintName)
if constraint == nil {
return pgerror.Newf(pgcode.UndefinedObject,
"constraint %q of relation %q does not exist", constraintName, n.tableDesc.GetName())
return sqlerrors.NewUndefinedConstraintError(constraintName, n.tableDesc.GetName())
}

var err error
Expand Down
41 changes: 39 additions & 2 deletions pkg/sql/logictest/testdata/logic_test/alter_table
Original file line number Diff line number Diff line change
Expand Up @@ -1571,7 +1571,7 @@ ALTER TABLE unique_without_index ADD CONSTRAINT my_unique_e UNIQUE WITHOUT INDEX
ALTER TABLE unique_without_index ADD CONSTRAINT my_unique_e2 UNIQUE WITHOUT INDEX (e) NOT VALID

# Trying to validate one of the constraints will fail.
statement error pgcode 23505 pq: could not create unique constraint "my_unique_e"\nDETAIL: Key \(e\)=\(1\) is duplicated\.
statement error pgcode 23505 pq: could not create unique constraint ".*"\nDETAIL: Key \(e\)=\(1\) is duplicated\.
ALTER TABLE unique_without_index VALIDATE CONSTRAINT my_unique_e

# But after we delete a row, validation should succeed.
Expand Down Expand Up @@ -1632,7 +1632,7 @@ statement ok
ALTER TABLE unique_without_index_partial ADD CONSTRAINT uniq_a_1 UNIQUE WITHOUT INDEX (a) WHERE b > 0 OR c > 0 NOT VALID

# Trying to validate the constraint will fail.
statement error pgcode 23505 pq: could not create unique constraint "uniq_a_1"\nDETAIL: Key \(a\)=\(1\) is duplicated\.
statement error pgcode 23505 pq: could not create unique constraint ".*"\nDETAIL: Key \(a\)=\(1\) is duplicated\.
ALTER TABLE unique_without_index_partial VALIDATE CONSTRAINT uniq_a_1

# But after we delete a row, validation should succeed.
Expand Down Expand Up @@ -2903,3 +2903,40 @@ t_96115 t_96115_pkey PRIMARY KEY true

statement ok
DROP TABLE t_96115;

# This subtest tests the behavior of adding/dropping a constraint and validating
# the constraint in one ALTER TABLE statement.

subtest 96648

statement ok
CREATE TABLE t_96648 (i INT PRIMARY KEY);

statement error pq: constraint "check_i" in the middle of being added, try again later
ALTER TABLE t_96648 ADD CONSTRAINT check_i CHECK (i > 0), VALIDATE CONSTRAINT check_i;

statement ok
ALTER TABLE t_96648 ADD CONSTRAINT check_i CHECK (i > 0);

# Ensure check "check_i" is successfully added.
statement error pq: failed to satisfy CHECK constraint \(i > 0:::INT8\)
INSERT INTO t_96648 VALUES (0);

statement ok
ALTER TABLE t_96648 ADD CONSTRAINT check_i1 CHECK (i > 10) NOT VALID, VALIDATE CONSTRAINT check_i1;

# Ensure check "check_i2" is successfully added.
statement error pq: failed to satisfy CHECK constraint \(i > 10:::INT8\)
INSERT INTO t_96648 VALUES (5)

statement error pgcode 42704 constraint "check_i" of relation "t_96648" does not exist
ALTER TABLE t_96648 DROP CONSTRAINT check_i, VALIDATE CONSTRAINT check_i;

statement error pgcode 42704 constraint "check_i1" of relation "t_96648" does not exist
ALTER TABLE t_96648 DROP CONSTRAINT check_i1, VALIDATE CONSTRAINT check_i1;

statement ok
ALTER TABLE t_96648 DROP CONSTRAINT check_i, DROP CONSTRAINT check_i1;

statement ok
DROP TABLE t_96648
77 changes: 77 additions & 0 deletions pkg/sql/opt/exec/execbuilder/testdata/unique
Original file line number Diff line number Diff line change
Expand Up @@ -1775,6 +1775,13 @@ ALTER TABLE uniq_overlaps_pk VALIDATE CONSTRAINT unique_a

# Same test as the previous, but now that the constraint has been validated, it
# can be treated as a key. This allows the joins to be more efficient.
#
# The implementation detail for VALIDATE CONSTRAINT is slightly different between
# legacy and declarative schema changer, causing the output of the following
# EXPLAIN UPDATE to be slightly different: the ordering of three same-level
# constraint checks is different. We thus test on the same EXPLAIN UPDATE twice
# with slightly different expected output.
skipif config local
query T
EXPLAIN UPDATE uniq_overlaps_pk SET a = 1, b = 2, c = 3, d = 4 WHERE a = 5
----
Expand Down Expand Up @@ -1844,6 +1851,76 @@ vectorized: true
└── • scan buffer
label: buffer 1

onlyif config local
query T
EXPLAIN UPDATE uniq_overlaps_pk SET a = 1, b = 2, c = 3, d = 4 WHERE a = 5
----
distribution: local
vectorized: true
·
• root
├── • update
│ │ table: uniq_overlaps_pk
│ │ set: a, b, c, d
│ │
│ └── • buffer
│ │ label: buffer 1
│ │
│ └── • render
│ │
│ └── • scan
│ missing stats
│ table: uniq_overlaps_pk@uniq_overlaps_pk_pkey
│ spans: [/5 - /5]
│ locking strength: for update
├── • constraint-check
│ │
│ └── • error if rows
│ │
│ └── • hash join (right semi)
│ │ equality: (b, c) = (b_new, c_new)
│ │ right cols are key
│ │ pred: a_new != a
│ │
│ ├── • scan
│ │ missing stats
│ │ table: uniq_overlaps_pk@uniq_overlaps_pk_pkey
│ │ spans: FULL SCAN
│ │
│ └── • scan buffer
│ label: buffer 1
├── • constraint-check
│ │
│ └── • error if rows
│ │
│ └── • hash join (right semi)
│ │ equality: (c, d) = (c_new, d_new)
│ │ right cols are key
│ │ pred: (a_new != a) OR (b_new != b)
│ │
│ ├── • scan
│ │ missing stats
│ │ table: uniq_overlaps_pk@uniq_overlaps_pk_pkey
│ │ spans: FULL SCAN
│ │
│ └── • scan buffer
│ label: buffer 1
└── • constraint-check
└── • error if rows
└── • lookup join (semi)
│ table: uniq_overlaps_pk@uniq_overlaps_pk_pkey
│ equality: (a_new) = (a)
│ pred: b_new != b
└── • scan buffer
label: buffer 1

# Update with non-constant input.
# No need to add a check for b,c since those columns weren't updated.
# Add inequality filters for the hidden primary key column.
Expand Down
3 changes: 2 additions & 1 deletion pkg/sql/schema_changer.go
Original file line number Diff line number Diff line change
Expand Up @@ -1413,7 +1413,8 @@ func (sc *SchemaChanger) done(ctx context.Context) error {
}
}
if fk := m.AsForeignKey(); fk != nil && fk.Adding() &&
fk.GetConstraintValidity() == descpb.ConstraintValidity_Unvalidated {
(fk.GetConstraintValidity() == descpb.ConstraintValidity_Unvalidated ||
fk.GetConstraintValidity() == descpb.ConstraintValidity_Validated) {
// Add backreference on the referenced table (which could be the same table)
backrefTable, err := txn.Descriptors().MutableByID(txn.KV()).Table(ctx, fk.GetReferencedTableID())
if err != nil {
Expand Down
Loading

0 comments on commit c2bb35d

Please sign in to comment.