diff --git a/DEPS.bzl b/DEPS.bzl index 030a95d1f2fe..114db13e156b 100644 --- a/DEPS.bzl +++ b/DEPS.bzl @@ -2103,6 +2103,16 @@ def go_deps(): "https://storage.googleapis.com/cockroach-godeps/gomod/github.com/dustin/go-humanize/com_github_dustin_go_humanize-v1.0.0.zip", ], ) + go_repository( + name = "com_github_dvyukov_go_fuzz", + build_file_proto_mode = "disable_global", + importpath = "github.com/dvyukov/go-fuzz", + sha256 = "0a4c4bc0a550c729115d74f6a636e5802894b33bc50aa8af99c4a70196d5990b", + strip_prefix = "github.com/dvyukov/go-fuzz@v0.0.0-20210103155950-6a8e9d1f2415", + urls = [ + "https://storage.googleapis.com/cockroach-godeps/gomod/github.com/dvyukov/go-fuzz/com_github_dvyukov_go_fuzz-v0.0.0-20210103155950-6a8e9d1f2415.zip", + ], + ) go_repository( name = "com_github_eapache_go_resiliency", build_file_proto_mode = "disable_global", @@ -2173,6 +2183,16 @@ def go_deps(): "https://storage.googleapis.com/cockroach-godeps/gomod/github.com/elastic/gosigar/com_github_elastic_gosigar-v0.14.1.zip", ], ) + go_repository( + name = "com_github_elazarl_go_bindata_assetfs", + build_file_proto_mode = "disable_global", + importpath = "github.com/elazarl/go-bindata-assetfs", + sha256 = "ee91e4dedf0efd24ddf201e8f8b62f0b79a64efd0d205b30bcd9fa95f905cd15", + strip_prefix = "github.com/elazarl/go-bindata-assetfs@v1.0.1", + urls = [ + "https://storage.googleapis.com/cockroach-godeps/gomod/github.com/elazarl/go-bindata-assetfs/com_github_elazarl_go_bindata_assetfs-v1.0.1.zip", + ], + ) go_repository( name = "com_github_elazarl_goproxy", build_file_proto_mode = "disable_global", @@ -4107,6 +4127,16 @@ def go_deps(): "https://storage.googleapis.com/cockroach-godeps/gomod/github.com/influxdata/usage-client/com_github_influxdata_usage_client-v0.0.0-20160829180054-6d3895376368.zip", ], ) + go_repository( + name = "com_github_irfansharif_recorder", + build_file_proto_mode = "disable_global", + importpath = "github.com/irfansharif/recorder", + sha256 = "4a2f085d5339eba18558059c51110de1ff6d9ab8389ece8818fd2f62b7b2e7ab", + strip_prefix = "github.com/irfansharif/recorder@v0.0.0-20211218081646-a21b46510fd6", + urls = [ + "https://storage.googleapis.com/cockroach-godeps/gomod/github.com/irfansharif/recorder/com_github_irfansharif_recorder-v0.0.0-20211218081646-a21b46510fd6.zip", + ], + ) go_repository( name = "com_github_iris_contrib_blackfriday", build_file_proto_mode = "disable_global", @@ -4567,6 +4597,16 @@ def go_deps(): "https://storage.googleapis.com/cockroach-godeps/gomod/github.com/julienschmidt/httprouter/com_github_julienschmidt_httprouter-v1.3.0.zip", ], ) + go_repository( + name = "com_github_julusian_godocdown", + build_file_proto_mode = "disable_global", + importpath = "github.com/Julusian/godocdown", + sha256 = "1bd26f1d29b20d40b3eb0a5678691a2e6e153c473efe079b8b1bbd97a7cc1f57", + strip_prefix = "github.com/Julusian/godocdown@v0.0.0-20170816220326-6d19f8ff2df8", + urls = [ + "https://storage.googleapis.com/cockroach-godeps/gomod/github.com/Julusian/godocdown/com_github_julusian_godocdown-v0.0.0-20170816220326-6d19f8ff2df8.zip", + ], + ) go_repository( name = "com_github_jung_kurt_gofpdf", build_file_proto_mode = "disable_global", @@ -6603,6 +6643,16 @@ def go_deps(): "https://storage.googleapis.com/cockroach-godeps/gomod/github.com/retailnext/hllpp/com_github_retailnext_hllpp-v1.0.1-0.20180308014038-101a6d2f8b52.zip", ], ) + go_repository( + name = "com_github_robertkrimen_godocdown", + build_file_proto_mode = "disable_global", + importpath = "github.com/robertkrimen/godocdown", + sha256 = "789ed4a63a797e0dbac7c358eafa8fec4c9885f67ee61da941af4bad2d8c3b55", + strip_prefix = "github.com/robertkrimen/godocdown@v0.0.0-20130622164427-0bfa04905481", + urls = [ + "https://storage.googleapis.com/cockroach-godeps/gomod/github.com/robertkrimen/godocdown/com_github_robertkrimen_godocdown-v0.0.0-20130622164427-0bfa04905481.zip", + ], + ) go_repository( name = "com_github_robfig_cron_v3", build_file_proto_mode = "disable_global", @@ -7093,6 +7143,16 @@ def go_deps(): "https://storage.googleapis.com/cockroach-godeps/gomod/github.com/stefanberger/go-pkcs11uri/com_github_stefanberger_go_pkcs11uri-v0.0.0-20201008174630-78d3cae3a980.zip", ], ) + go_repository( + name = "com_github_stephens2424_writerset", + build_file_proto_mode = "disable_global", + importpath = "github.com/stephens2424/writerset", + sha256 = "a5444ddf04cda5666c4511e5ca793a80372d560376c4193a1fa2e2294d0760dc", + strip_prefix = "github.com/stephens2424/writerset@v1.0.2", + urls = [ + "https://storage.googleapis.com/cockroach-godeps/gomod/github.com/stephens2424/writerset/com_github_stephens2424_writerset-v1.0.2.zip", + ], + ) go_repository( name = "com_github_stoewer_go_strcase", build_file_proto_mode = "disable_global", diff --git a/dev b/dev index 9c0955b0a165..728fd677b6c7 100755 --- a/dev +++ b/dev @@ -3,7 +3,7 @@ set -euo pipefail # Bump this counter to force rebuilding `dev` on all machines. -DEV_VERSION=12 +DEV_VERSION=13 THIS_DIR=$(cd "$(dirname "$0")" && pwd) BINARY_DIR=$THIS_DIR/bin/dev-versions diff --git a/docs/generated/redact_safe.md b/docs/generated/redact_safe.md index ccc3e6228990..bbb781069e10 100644 --- a/docs/generated/redact_safe.md +++ b/docs/generated/redact_safe.md @@ -34,6 +34,7 @@ pkg/sql/catalog/descpb/structured.go | `DescriptorVersion` pkg/sql/catalog/descpb/structured.go | `IndexDescriptorVersion` pkg/sql/catalog/descpb/structured.go | `MutationID` pkg/sql/sem/catid/ids.go | `ColumnID` +pkg/sql/sem/catid/ids.go | `ConstraintID` pkg/sql/sem/catid/ids.go | `DescID` pkg/sql/sem/catid/ids.go | `FamilyID` pkg/sql/sem/catid/ids.go | `IndexID` diff --git a/go.mod b/go.mod index f44c1da5d695..226054889ef9 100644 --- a/go.mod +++ b/go.mod @@ -82,6 +82,7 @@ require ( github.com/gorilla/mux v1.8.0 github.com/goware/modvendor v0.5.0 github.com/grpc-ecosystem/grpc-gateway v1.16.0 + github.com/irfansharif/recorder v0.0.0-20211218081646-a21b46510fd6 github.com/jackc/pgconn v1.10.0 github.com/jackc/pgproto3/v2 v2.1.1 github.com/jackc/pgtype v1.8.1 diff --git a/go.sum b/go.sum index c02982bba372..76513dacaee1 100644 --- a/go.sum +++ b/go.sum @@ -165,6 +165,7 @@ github.com/GeertJohan/go.rice v1.0.0/go.mod h1:eH6gbSOAUv07dQuZVnBmoDP8mgsM1rtix github.com/HdrHistogram/hdrhistogram-go v1.1.0/go.mod h1:yDgFjdqOqDEKOvasDdhWNXYg9BVp4O+o5f6V/ehm6Oo= github.com/Joker/hpp v1.0.0/go.mod h1:8x5n+M1Hp5hC0g8okX3sR3vFQwynaX/UgSOM9MeBKzY= github.com/Joker/jade v1.0.1-0.20190614124447-d475f43051e7/go.mod h1:6E6s8o2AE4KhCrqr6GRJjdC/gNfTdxkIXvuGZZda2VM= +github.com/Julusian/godocdown v0.0.0-20170816220326-6d19f8ff2df8/go.mod h1:INZr5t32rG59/5xeltqoCJoNY7e5x/3xoY9WSWVWg74= github.com/Knetic/govaluate v3.0.1-0.20171022003610-9aa49832a739+incompatible/go.mod h1:r7JcOSlj0wfOMncg0iLm8Leh48TZaKVeNIfJntJ2wa0= github.com/Masterminds/glide v0.13.2/go.mod h1:STyF5vcenH/rUqTEv+/hBXlSTo7KYwg2oc2f4tzPWic= github.com/Masterminds/goutils v1.1.0 h1:zukEsf/1JZwCMgHiK3GZftabmxiCw4apj3a28RPBiVg= @@ -637,6 +638,7 @@ github.com/docopt/docopt-go v0.0.0-20180111231733-ee0de3bc6815/go.mod h1:WwZ+bS3 github.com/dustin/go-humanize v0.0.0-20171111073723-bb3d318650d4/go.mod h1:HtrtbFcZ19U5GC7JDqmcUSB87Iq5E25KnS6fMYU6eOk= github.com/dustin/go-humanize v1.0.0 h1:VSnTsYCnlFHaM2/igO1h6X3HA71jcobQuxemgkq4zYo= github.com/dustin/go-humanize v1.0.0/go.mod h1:HtrtbFcZ19U5GC7JDqmcUSB87Iq5E25KnS6fMYU6eOk= +github.com/dvyukov/go-fuzz v0.0.0-20210103155950-6a8e9d1f2415/go.mod h1:11Gm+ccJnvAhCNLlf5+cS9KjtbaD5I5zaZpFMsTHWTw= github.com/eapache/go-resiliency v1.1.0/go.mod h1:kFI+JgMyC7bLPUVY133qvEBtVayf5mFgVsvEsIPBvNs= github.com/eapache/go-resiliency v1.2.0 h1:v7g92e/KSN71Rq7vSThKaWIq68fL4YHvWyiUKorFR1Q= github.com/eapache/go-resiliency v1.2.0/go.mod h1:kFI+JgMyC7bLPUVY133qvEBtVayf5mFgVsvEsIPBvNs= @@ -650,6 +652,7 @@ github.com/edsrzf/mmap-go v1.0.0/go.mod h1:YO35OhQPt3KJa3ryjFM5Bs14WD66h8eGKpfaB github.com/eknkc/amber v0.0.0-20171010120322-cdade1c07385/go.mod h1:0vRUJqYpeSZifjYj7uP3BG/gKcuzL9xWVV/Y+cK33KM= github.com/elastic/gosigar v0.14.1 h1:T0aQ7n/n2ZA9W7DmAnj60v+qzqKERdBgJBO1CG2W6rc= github.com/elastic/gosigar v0.14.1/go.mod h1:iXRIGg2tLnu7LBdpqzyQfGDEidKCfWcCMS0WKyPWoMs= +github.com/elazarl/go-bindata-assetfs v1.0.1/go.mod h1:v+YaWX3bdea5J/mo8dSETolEo7R71Vk1u8bnjau5yw4= github.com/elazarl/goproxy v0.0.0-20170405201442-c4fc26588b6e/go.mod h1:/Zj4wYkgs4iZTTu3o/KG3Itv/qCCa8VVMlb3i9OVuzc= github.com/elazarl/goproxy v0.0.0-20180725130230-947c36da3153/go.mod h1:/Zj4wYkgs4iZTTu3o/KG3Itv/qCCa8VVMlb3i9OVuzc= github.com/emicklei/dot v0.15.0 h1:XDBW0Xco1QNyRb33cqLe10cT04yMWL1XpCZfa98Q6Og= @@ -1229,6 +1232,8 @@ github.com/influxdata/roaring v0.4.13-0.20180809181101-fc520f41fab6/go.mod h1:bS github.com/influxdata/tdigest v0.0.0-20181121200506-bf2b5ad3c0a9/go.mod h1:Js0mqiSBE6Ffsg94weZZ2c+v/ciT8QRHFOap7EKDrR0= github.com/influxdata/tdigest v0.0.2-0.20210216194612-fc98d27c9e8b/go.mod h1:Z0kXnxzbTC2qrx4NaIzYkE1k66+6oEDQTvL95hQFh5Y= github.com/influxdata/usage-client v0.0.0-20160829180054-6d3895376368/go.mod h1:Wbbw6tYNvwa5dlB6304Sd+82Z3f7PmVZHVKU637d4po= +github.com/irfansharif/recorder v0.0.0-20211218081646-a21b46510fd6 h1:fwL5Wt/OpP14udrdhV+INmmRES4GWoPWqHamttadwKU= +github.com/irfansharif/recorder v0.0.0-20211218081646-a21b46510fd6/go.mod h1:0vDkLIc8rDX+zYp5wX/DG5MAWaHBAqmtXH/SE54vhpY= github.com/iris-contrib/blackfriday v2.0.0+incompatible/go.mod h1:UzZ2bDEoaSGPbkg6SAB4att1aAwTmVIx/5gCVqeyUdI= github.com/iris-contrib/go.uuid v2.0.0+incompatible/go.mod h1:iz2lgM/1UnEf1kP0L/+fafWORmlnuysV2EMP8MW+qe0= github.com/iris-contrib/i18n v0.0.0-20171121225848-987a633949d0/go.mod h1:pMCz62A0xJL6I+umB2YTlFRwWXaDFA0jy+5HzGiJjqI= @@ -1841,6 +1846,7 @@ github.com/rcrowley/go-metrics v0.0.0-20201227073835-cf1acfcdf475 h1:N/ElC8H3+5X github.com/rcrowley/go-metrics v0.0.0-20201227073835-cf1acfcdf475/go.mod h1:bCqnVzQkZxMG4s8nGwiZ5l3QUCyqpo9Y+/ZMZ9VjZe4= github.com/remyoudompheng/bigfft v0.0.0-20170806203942-52369c62f446/go.mod h1:uYEyJGbgTkfkS4+E/PavXkNJcbFIpEtjt2B0KDQ5+9M= github.com/retailnext/hllpp v1.0.1-0.20180308014038-101a6d2f8b52/go.mod h1:RDpi1RftBQPUCDRw6SmxeaREsAaRKnOclghuzp/WRzc= +github.com/robertkrimen/godocdown v0.0.0-20130622164427-0bfa04905481/go.mod h1:C9WhFzY47SzYBIvzFqSvHIR6ROgDo4TtdTuRaOMjF/s= github.com/robfig/cron/v3 v3.0.1 h1:WdRxkvbJztn8LMz/QEvLN5sBU+xKpSqwwUO1Pjr4qDs= github.com/robfig/cron/v3 v3.0.1/go.mod h1:eQICP3HwyT7UooqI/z+Ov+PtYAWygg1TEWWzGIFLtro= github.com/rogpeppe/fastuuid v0.0.0-20150106093220-6724a57986af/go.mod h1:XWv6SoW27p1b0cqNHllgS5HIMJraePCO15w5zCzIWYg= @@ -1952,6 +1958,7 @@ github.com/spf13/viper v1.7.1/go.mod h1:8WkrPz2fc9jxqZNCJI/76HCieCp4Q8HaLFoCha5q github.com/spf13/viper v1.8.1 h1:Kq1fyeebqsBfbjZj4EL7gj2IO0mMaiyjYUWcUsl2O44= github.com/spf13/viper v1.8.1/go.mod h1:o0Pch8wJ9BVSWGQMbra6iw0oQ5oktSIBaujf1rJH9Ns= github.com/stefanberger/go-pkcs11uri v0.0.0-20201008174630-78d3cae3a980/go.mod h1:AO3tvPzVZ/ayst6UlUKUv6rcPQInYe3IknH3jYhAKu8= +github.com/stephens2424/writerset v1.0.2/go.mod h1:aS2JhsMn6eA7e82oNmW4rfsgAOp9COBTTl8mzkwADnc= github.com/stoewer/go-strcase v1.2.0/go.mod h1:IBiWB2sKIp3wVVQ3Y035++gc+knqhUQag1KpM8ahLw8= github.com/streadway/amqp v0.0.0-20190404075320-75d898a42a94/go.mod h1:AZpEONHx3DKn8O/DFsRAY58/XVQiIPMTMB1SddzLXVw= github.com/streadway/amqp v0.0.0-20190827072141-edfb9018d271/go.mod h1:AZpEONHx3DKn8O/DFsRAY58/XVQiIPMTMB1SddzLXVw= @@ -2507,6 +2514,7 @@ golang.org/x/sys v0.0.0-20210124154548-22da62e12c0c/go.mod h1:h1NjWce9XRLGQEsW7w golang.org/x/sys v0.0.0-20210220050731-9a76102bfb43/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210303074136-134d130e1a04/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210305230114-8fe3ee5dd75b/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210309074719-68d13333faf2/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210315160823-c6e025ad8005/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210320140829-1e4c9ba3b0c4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210324051608-47abb6519492/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= diff --git a/pkg/cli/doctor_test.go b/pkg/cli/doctor_test.go index 269f0aaea8f5..2ec01e99ee28 100644 --- a/pkg/cli/doctor_test.go +++ b/pkg/cli/doctor_test.go @@ -57,7 +57,7 @@ func TestDoctorZipDir(t *testing.T) { defer c.Cleanup() t.Run("examine", func(t *testing.T) { - out, err := c.RunWithCapture("debug doctor examine zipdir testdata/doctor/debugzip 21.1") + out, err := c.RunWithCapture("debug doctor examine zipdir testdata/doctor/debugzip 21.1-52") if err != nil { t.Fatal(err) } @@ -81,7 +81,7 @@ func TestDoctorZipDir(t *testing.T) { }) t.Run("deprecated doctor zipdir with verbose", func(t *testing.T) { - out, err := c.RunWithCapture("debug doctor zipdir testdata/doctor/debugzip --verbose") + out, err := c.RunWithCapture("debug doctor zipdir testdata/doctor/debugzip 21.11-52 --verbose") if err != nil { t.Fatal(err) } diff --git a/pkg/cli/testdata/doctor/test_examine_zipdir b/pkg/cli/testdata/doctor/test_examine_zipdir index 54305933a222..2d54d445e6f5 100644 --- a/pkg/cli/testdata/doctor/test_examine_zipdir +++ b/pkg/cli/testdata/doctor/test_examine_zipdir @@ -1,6 +1,6 @@ debug doctor examine zipdir testdata/doctor/debugzip ---- -debug doctor examine zipdir testdata/doctor/debugzip 21.1 +debug doctor examine zipdir testdata/doctor/debugzip 21.1-52 WARNING: errors occurred during the production of system.jobs.txt, contents may be missing or incomplete. Examining 37 descriptors and 42 namespace entries... ParentID 52, ParentSchemaID 29: relation "users" (53): referenced database ID 52: referenced descriptor not found diff --git a/pkg/cli/testdata/doctor/test_examine_zipdir_verbose b/pkg/cli/testdata/doctor/test_examine_zipdir_verbose index 3e864a3243ef..f7a362b135c1 100644 --- a/pkg/cli/testdata/doctor/test_examine_zipdir_verbose +++ b/pkg/cli/testdata/doctor/test_examine_zipdir_verbose @@ -1,6 +1,6 @@ debug doctor zipdir --verbose ---- -debug doctor zipdir testdata/doctor/debugzip --verbose +debug doctor zipdir testdata/doctor/debugzip 21.11-52 --verbose reading testdata/doctor/debugzip/system.descriptor.txt reading testdata/doctor/debugzip/system.namespace.txt WARNING: errors occurred during the production of system.jobs.txt, contents may be missing or incomplete. @@ -39,15 +39,16 @@ Examining 37 descriptors and 42 namespace entries... ParentID 0, ParentSchemaID 0: database "postgres" (51): processed ParentID 52, ParentSchemaID 29: relation "users" (53): referenced database ID 52: referenced descriptor not found ParentID 52, ParentSchemaID 29: relation "users" (53): processed - ParentID 52, ParentSchemaID 29: relation "vehicles" (54): referenced database ID 52: referenced descriptor not found + ParentID 52, ParentSchemaID 29: relation "vehicles" (54): constraint id was missing for constraint: FOREIGN KEY with name "fk_city_ref_users" ParentID 52, ParentSchemaID 29: relation "vehicles" (54): processed - ParentID 52, ParentSchemaID 29: relation "rides" (55): referenced database ID 52: referenced descriptor not found + ParentID 52, ParentSchemaID 29: relation "rides" (55): constraint id was missing for constraint: FOREIGN KEY with name "fk_city_ref_users" + ParentID 52, ParentSchemaID 29: relation "rides" (55): constraint id was missing for constraint: FOREIGN KEY with name "fk_vehicle_city_ref_vehicles" ParentID 52, ParentSchemaID 29: relation "rides" (55): processed - ParentID 52, ParentSchemaID 29: relation "vehicle_location_histories" (56): referenced database ID 52: referenced descriptor not found + ParentID 52, ParentSchemaID 29: relation "vehicle_location_histories" (56): constraint id was missing for constraint: FOREIGN KEY with name "fk_city_ref_rides" ParentID 52, ParentSchemaID 29: relation "vehicle_location_histories" (56): processed ParentID 52, ParentSchemaID 29: relation "promo_codes" (57): referenced database ID 52: referenced descriptor not found ParentID 52, ParentSchemaID 29: relation "promo_codes" (57): processed - ParentID 52, ParentSchemaID 29: relation "user_promo_codes" (58): referenced database ID 52: referenced descriptor not found + ParentID 52, ParentSchemaID 29: relation "user_promo_codes" (58): constraint id was missing for constraint: FOREIGN KEY with name "fk_city_ref_users" ParentID 52, ParentSchemaID 29: relation "user_promo_codes" (58): processed ParentID 0, ParentSchemaID 0: namespace entry "defaultdb" (50): processed ParentID 0, ParentSchemaID 0: namespace entry "movr" (52): descriptor not found diff --git a/pkg/cli/testdata/doctor/test_recreate_zipdir b/pkg/cli/testdata/doctor/test_recreate_zipdir index ad063c61522c..2c0249ed2b11 100644 --- a/pkg/cli/testdata/doctor/test_recreate_zipdir +++ b/pkg/cli/testdata/doctor/test_recreate_zipdir @@ -12,16 +12,16 @@ SELECT crdb_internal.unsafe_upsert_descriptor(50, decode('12380a0964656661756c74 SELECT crdb_internal.unsafe_upsert_namespace_entry(0, 0, 'defaultdb', 50, true); SELECT crdb_internal.unsafe_upsert_descriptor(51, decode('12370a08706f73746772657310331a210a0b0a0561646d696e100218000a0a0a04726f6f74100218001204726f6f7418012200280140004a00', 'hex'), true); SELECT crdb_internal.unsafe_upsert_namespace_entry(0, 0, 'postgres', 51, true); -SELECT crdb_internal.unsafe_upsert_descriptor(53, decode('0aee040a0575736572731835203428013a0042280a02696410011a0d080e100018003000508617600020003000680070007800800100880100980100422a0a046369747910021a0d0807100018003007509308600020003000680070007800800100880100980100422a0a046e616d6510031a0d0807100018003007509308600020013000680070007800800100880100980100422d0a076164647265737310041a0d080710001800300750930860002001300068007000780080010088010098010042310a0b6372656469745f6361726410051a0d08071000180030075093086000200130006800700078008001008801009801004806525a0a077072696d617279100118012204636974792202696430023001400040004a10080010001a00200028003000380040005a007a0408002000800100880100900101980100a20106080012001800a80100b20100ba0100c0010060026a210a0b0a0561646d696e100218000a0a0a04726f6f74100218001204726f6f741801800101880103980100b2013d0a077072696d61727910001a0269641a04636974791a046e616d651a07616464726573731a0b6372656469745f63617264200120022003200420052800b80101c20100e80100f2010408001200f801008002009202009a0200aa02270836100210041802180120352a11666b5f636974795f7265665f75736572733002380040004800aa02270837100210041802180120352a11666b5f636974795f7265665f75736572733002380040004800aa0227083a100110021802180120352a11666b5f636974795f7265665f75736572733002380040004800b20200b80200c0021dc80200e00200f00200800300', 'hex'), true); +SELECT crdb_internal.unsafe_upsert_descriptor(53, decode('0afa040a0575736572731835203428013a0042280a02696410011a0d080e100018003000508617600020003000680070007800800100880100980100422a0a046369747910021a0d0807100018003007509308600020003000680070007800800100880100980100422a0a046e616d6510031a0d0807100018003007509308600020013000680070007800800100880100980100422d0a076164647265737310041a0d080710001800300750930860002001300068007000780080010088010098010042310a0b6372656469745f6361726410051a0d08071000180030075093086000200130006800700078008001008801009801004806525d0a077072696d617279100118012204636974792202696430023001400040004a10080010001a00200028003000380040005a007a0408002000800100880100900101980100a20106080012001800a80100b20100ba0100c0010088020060026a210a0b0a0561646d696e100218000a0a0a04726f6f74100218001204726f6f741801800101880103980100b2013d0a077072696d61727910001a0269641a04636974791a046e616d651a07616464726573731a0b6372656469745f63617264200120022003200420052800b80101c20100e80100f2010408001200f801008002009202009a0200aa02290836100210041802180120352a11666b5f636974795f7265665f757365727330023800400048007000aa02290837100210041802180120352a11666b5f636974795f7265665f757365727330023800400048007000aa0229083a100110021802180120352a11666b5f636974795f7265665f757365727330023800400048007000b20200b80200c0021dc80200e00200f00200800300880300', 'hex'), true); SELECT crdb_internal.unsafe_upsert_namespace_entry(52, 29, 'users', 53, true); -SELECT crdb_internal.unsafe_upsert_descriptor(54, decode('0a91070a0876656869636c65731836203428013a0042280a02696410011a0d080e100018003000508617600020003000680070007800800100880100980100422a0a046369747910021a0d0807100018003007509308600020003000680070007800800100880100980100422a0a047479706510031a0d0807100018003007509308600020013000680070007800800100880100980100422e0a086f776e65725f696410041a0d080e10001800300050861760002001300068007000780080010088010098010042330a0d6372656174696f6e5f74696d6510051a0d080510001800300050da08600020013000680070007800800100880100980100422c0a0673746174757310061a0d080710001800300750930860002001300068007000780080010088010098010042360a1063757272656e745f6c6f636174696f6e10071a0d080710001800300750930860002001300068007000780080010088010098010042290a0365787410081a0d081210001800300050da1d6000200130006800700078008001008801009801004809525a0a077072696d617279100118012204636974792202696430023001400040004a10080010001a00200028003000380040005a007a0408002000800100880100900101980100a20106080012001800a80100b20100ba0100c001005a80010a2576656869636c65735f6175746f5f696e6465785f666b5f636974795f7265665f75736572731002180022046369747922086f776e65725f6964300230043801400040004a10080010001a00200028003000380040005a007a0408002000800100880100900101980100a20106080012001800a80100b20100ba0100c0010060036a210a0b0a0561646d696e100218000a0a0a04726f6f74100218001204726f6f741801800102880103980100b201650a077072696d61727910001a0269641a04636974791a04747970651a086f776e65725f69641a0d6372656174696f6e5f74696d651a067374617475731a1063757272656e745f6c6f636174696f6e1a03657874200120022003200420052006200720082800b80101c20100e80100f2010408001200f801008002009202009a0200a202270836100210041802180120352a11666b5f636974795f7265665f75736572733000380040004800aa02320837100310051802180120362a1c666b5f76656869636c655f636974795f7265665f76656869636c65733002380040004800b20200b80200c0021dc80200e00200f00200800300', 'hex'), true); +SELECT crdb_internal.unsafe_upsert_descriptor(54, decode('0a9e070a0876656869636c65731836203428013a0042280a02696410011a0d080e100018003000508617600020003000680070007800800100880100980100422a0a046369747910021a0d0807100018003007509308600020003000680070007800800100880100980100422a0a047479706510031a0d0807100018003007509308600020013000680070007800800100880100980100422e0a086f776e65725f696410041a0d080e10001800300050861760002001300068007000780080010088010098010042330a0d6372656174696f6e5f74696d6510051a0d080510001800300050da08600020013000680070007800800100880100980100422c0a0673746174757310061a0d080710001800300750930860002001300068007000780080010088010098010042360a1063757272656e745f6c6f636174696f6e10071a0d080710001800300750930860002001300068007000780080010088010098010042290a0365787410081a0d081210001800300050da1d6000200130006800700078008001008801009801004809525d0a077072696d617279100118012204636974792202696430023001400040004a10080010001a00200028003000380040005a007a0408002000800100880100900101980100a20106080012001800a80100b20100ba0100c001008802005a83010a2576656869636c65735f6175746f5f696e6465785f666b5f636974795f7265665f75736572731002180022046369747922086f776e65725f6964300230043801400040004a10080010001a00200028003000380040005a007a0408002000800100880100900101980100a20106080012001800a80100b20100ba0100c0010088020060036a210a0b0a0561646d696e100218000a0a0a04726f6f74100218001204726f6f741801800102880103980100b201650a077072696d61727910001a0269641a04636974791a04747970651a086f776e65725f69641a0d6372656174696f6e5f74696d651a067374617475731a1063757272656e745f6c6f636174696f6e1a03657874200120022003200420052006200720082800b80101c20100e80100f2010408001200f801008002009202009a0200a202290836100210041802180120352a11666b5f636974795f7265665f757365727330003800400048007000aa02340837100310051802180120362a1c666b5f76656869636c655f636974795f7265665f76656869636c657330023800400048007000b20200b80200c0021dc80200e00200f00200800300880300', 'hex'), true); SELECT crdb_internal.unsafe_upsert_namespace_entry(52, 29, 'vehicles', 54, true); -SELECT crdb_internal.unsafe_upsert_descriptor(55, decode('0a990a0a0572696465731837203428013a0042280a02696410011a0d080e100018003000508617600020003000680070007800800100880100980100422a0a046369747910021a0d080710001800300750930860002000300068007000780080010088010098010042320a0c76656869636c655f6369747910031a0d0807100018003007509308600020013000680070007800800100880100980100422e0a0872696465725f696410041a0d080e10001800300050861760002001300068007000780080010088010098010042300a0a76656869636c655f696410051a0d080e10001800300050861760002001300068007000780080010088010098010042330a0d73746172745f6164647265737310061a0d080710001800300750930860002001300068007000780080010088010098010042310a0b656e645f6164647265737310071a0d080710001800300750930860002001300068007000780080010088010098010042300a0a73746172745f74696d6510081a0d080510001800300050da08600020013000680070007800800100880100980100422e0a08656e645f74696d6510091a0d080510001800300050da08600020013000680070007800800100880100980100422d0a07726576656e7565100a1a0d08031002180a300050a40d600020013000680070007800800100880100980100480b525a0a077072696d617279100118012204636974792202696430023001400040004a10080010001a00200028003000380040005a007a0408002000800100880100900101980100a20106080012001800a80100b20100ba0100c001005a7d0a2272696465735f6175746f5f696e6465785f666b5f636974795f7265665f757365727310021800220463697479220872696465725f6964300230043801400040004a10080010001a00200028003000380040005a007a0408002000800100880100900101980100a20106080012001800a80100b20100ba0100c001005a94010a2d72696465735f6175746f5f696e6465785f666b5f76656869636c655f636974795f7265665f76656869636c657310031800220c76656869636c655f63697479220a76656869636c655f69643003300538023801400040004a10080010001a00200028003000380040005a007a0408002000800100880100900101980100a20106080012001800a80100b20100ba0100c0010060046a210a0b0a0561646d696e100218000a0a0a04726f6f74100218001204726f6f741801800103880103980100a201380a1376656869636c655f63697479203d20636974791217636865636b5f76656869636c655f636974795f6369747918002802280330003800b2018a010a077072696d61727910001a0269641a04636974791a0c76656869636c655f636974791a0872696465725f69641a0a76656869636c655f69641a0d73746172745f616464726573731a0b656e645f616464726573731a0a73746172745f74696d651a08656e645f74696d651a07726576656e7565200120022003200420052006200720082009200a2800b80101c20100e80100f2010408001200f801008002009202009a0200a202270837100210041802180120352a11666b5f636974795f7265665f75736572733000380040004800a202320837100310051802180120362a1c666b5f76656869636c655f636974795f7265665f76656869636c65733000380040004800aa02270838100110021802180120372a11666b5f636974795f7265665f72696465733002380040004800b20200b80200c0021dc80200e00200f00200800300', 'hex'), true); +SELECT crdb_internal.unsafe_upsert_descriptor(55, decode('0aae0a0a0572696465731837203428013a0042280a02696410011a0d080e100018003000508617600020003000680070007800800100880100980100422a0a046369747910021a0d080710001800300750930860002000300068007000780080010088010098010042320a0c76656869636c655f6369747910031a0d0807100018003007509308600020013000680070007800800100880100980100422e0a0872696465725f696410041a0d080e10001800300050861760002001300068007000780080010088010098010042300a0a76656869636c655f696410051a0d080e10001800300050861760002001300068007000780080010088010098010042330a0d73746172745f6164647265737310061a0d080710001800300750930860002001300068007000780080010088010098010042310a0b656e645f6164647265737310071a0d080710001800300750930860002001300068007000780080010088010098010042300a0a73746172745f74696d6510081a0d080510001800300050da08600020013000680070007800800100880100980100422e0a08656e645f74696d6510091a0d080510001800300050da08600020013000680070007800800100880100980100422d0a07726576656e7565100a1a0d08031002180a300050a40d600020013000680070007800800100880100980100480b525d0a077072696d617279100118012204636974792202696430023001400040004a10080010001a00200028003000380040005a007a0408002000800100880100900101980100a20106080012001800a80100b20100ba0100c001008802005a80010a2272696465735f6175746f5f696e6465785f666b5f636974795f7265665f757365727310021800220463697479220872696465725f6964300230043801400040004a10080010001a00200028003000380040005a007a0408002000800100880100900101980100a20106080012001800a80100b20100ba0100c001008802005a97010a2d72696465735f6175746f5f696e6465785f666b5f76656869636c655f636974795f7265665f76656869636c657310031800220c76656869636c655f63697479220a76656869636c655f69643003300538023801400040004a10080010001a00200028003000380040005a007a0408002000800100880100900101980100a20106080012001800a80100b20100ba0100c0010088020060046a210a0b0a0561646d696e100218000a0a0a04726f6f74100218001204726f6f741801800103880103980100a2013a0a1376656869636c655f63697479203d20636974791217636865636b5f76656869636c655f636974795f63697479180028022803300038004000b2018a010a077072696d61727910001a0269641a04636974791a0c76656869636c655f636974791a0872696465725f69641a0a76656869636c655f69641a0d73746172745f616464726573731a0b656e645f616464726573731a0a73746172745f74696d651a08656e645f74696d651a07726576656e7565200120022003200420052006200720082009200a2800b80101c20100e80100f2010408001200f801008002009202009a0200a202290837100210041802180120352a11666b5f636974795f7265665f757365727330003800400048007000a202340837100310051802180120362a1c666b5f76656869636c655f636974795f7265665f76656869636c657330003800400048007000aa02290838100110021802180120372a11666b5f636974795f7265665f726964657330023800400048007000b20200b80200c0021dc80200e00200f00200800300880300', 'hex'), true); SELECT crdb_internal.unsafe_upsert_namespace_entry(52, 29, 'rides', 55, true); -SELECT crdb_internal.unsafe_upsert_descriptor(56, decode('0ac1040a1a76656869636c655f6c6f636174696f6e5f686973746f726965731838203428013a00422a0a046369747910011a0d0807100018003007509308600020003000680070007800800100880100980100422d0a07726964655f696410021a0d080e100018003000508617600020003000680070007800800100880100980100422f0a0974696d657374616d7010031a0d080510001800300050da0860002000300068007000780080010088010098010042290a036c617410041a0d080210401800300050bd05600020013000680070007800800100880100980100422a0a046c6f6e6710051a0d080210401800300050bd056000200130006800700078008001008801009801004806526e0a077072696d617279100118012204636974792207726964655f6964220974696d657374616d703001300230034000400040004a10080010001a00200028003000380040005a007a0408002000800100880100900101980100a20106080012001800a80100b20100ba0100c0010060026a210a0b0a0561646d696e100218000a0a0a04726f6f74100218001204726f6f741801800102880103980100b2013c0a077072696d61727910001a04636974791a07726964655f69641a0974696d657374616d701a036c61741a046c6f6e67200120022003200420052800b80101c20100e80100f2010408001200f801008002009202009a0200a202270838100110021802180120372a11666b5f636974795f7265665f72696465733000380040004800b20200b80200c0021dc80200e00200f00200800300', 'hex'), true); +SELECT crdb_internal.unsafe_upsert_descriptor(56, decode('0ac9040a1a76656869636c655f6c6f636174696f6e5f686973746f726965731838203428013a00422a0a046369747910011a0d0807100018003007509308600020003000680070007800800100880100980100422d0a07726964655f696410021a0d080e100018003000508617600020003000680070007800800100880100980100422f0a0974696d657374616d7010031a0d080510001800300050da0860002000300068007000780080010088010098010042290a036c617410041a0d080210401800300050bd05600020013000680070007800800100880100980100422a0a046c6f6e6710051a0d080210401800300050bd05600020013000680070007800800100880100980100480652710a077072696d617279100118012204636974792207726964655f6964220974696d657374616d703001300230034000400040004a10080010001a00200028003000380040005a007a0408002000800100880100900101980100a20106080012001800a80100b20100ba0100c0010088020060026a210a0b0a0561646d696e100218000a0a0a04726f6f74100218001204726f6f741801800102880103980100b2013c0a077072696d61727910001a04636974791a07726964655f69641a0974696d657374616d701a036c61741a046c6f6e67200120022003200420052800b80101c20100e80100f2010408001200f801008002009202009a0200a202290838100110021802180120372a11666b5f636974795f7265665f726964657330003800400048007000b20200b80200c0021dc80200e00200f00200800300880300', 'hex'), true); SELECT crdb_internal.unsafe_upsert_namespace_entry(52, 29, 'vehicle_location_histories', 56, true); -SELECT crdb_internal.unsafe_upsert_descriptor(57, decode('0a96040a0b70726f6d6f5f636f6465731839203428013a00422a0a04636f646510011a0d080710001800300750930860002000300068007000780080010088010098010042310a0b6465736372697074696f6e10021a0d080710001800300750930860002001300068007000780080010088010098010042330a0d6372656174696f6e5f74696d6510031a0d080510001800300050da0860002001300068007000780080010088010098010042350a0f65787069726174696f6e5f74696d6510041a0d080510001800300050da08600020013000680070007800800100880100980100422b0a0572756c657310051a0d081210001800300050da1d600020013000680070007800800100880100980100480652520a077072696d617279100118012204636f6465300140004a10080010001a00200028003000380040005a007a0408002000800100880100900101980100a20106080012001800a80100b20100ba0100c0010060026a210a0b0a0561646d696e100218000a0a0a04726f6f74100218001204726f6f741801800101880103980100b201510a077072696d61727910001a04636f64651a0b6465736372697074696f6e1a0d6372656174696f6e5f74696d651a0f65787069726174696f6e5f74696d651a0572756c6573200120022003200420052800b80101c20100e80100f2010408001200f801008002009202009a0200b20200b80200c0021dc80200e00200f00200800300', 'hex'), true); +SELECT crdb_internal.unsafe_upsert_descriptor(57, decode('0a9c040a0b70726f6d6f5f636f6465731839203428013a00422a0a04636f646510011a0d080710001800300750930860002000300068007000780080010088010098010042310a0b6465736372697074696f6e10021a0d080710001800300750930860002001300068007000780080010088010098010042330a0d6372656174696f6e5f74696d6510031a0d080510001800300050da0860002001300068007000780080010088010098010042350a0f65787069726174696f6e5f74696d6510041a0d080510001800300050da08600020013000680070007800800100880100980100422b0a0572756c657310051a0d081210001800300050da1d600020013000680070007800800100880100980100480652550a077072696d617279100118012204636f6465300140004a10080010001a00200028003000380040005a007a0408002000800100880100900101980100a20106080012001800a80100b20100ba0100c0010088020060026a210a0b0a0561646d696e100218000a0a0a04726f6f74100218001204726f6f741801800101880103980100b201510a077072696d61727910001a04636f64651a0b6465736372697074696f6e1a0d6372656174696f6e5f74696d651a0f65787069726174696f6e5f74696d651a0572756c6573200120022003200420052800b80101c20100e80100f2010408001200f801008002009202009a0200b20200b80200c0021dc80200e00200f00200800300880300', 'hex'), true); SELECT crdb_internal.unsafe_upsert_namespace_entry(52, 29, 'promo_codes', 57, true); -SELECT crdb_internal.unsafe_upsert_descriptor(58, decode('0ac1040a10757365725f70726f6d6f5f636f646573183a203428013a00422a0a046369747910011a0d0807100018003007509308600020003000680070007800800100880100980100422d0a07757365725f696410021a0d080e100018003000508617600020003000680070007800800100880100980100422a0a04636f646510031a0d0807100018003007509308600020003000680070007800800100880100980100422f0a0974696d657374616d7010041a0d080510001800300050da0860002001300068007000780080010088010098010042300a0b75736167655f636f756e7410051a0c08011040180030005014600020013000680070007800800100880100980100480652690a077072696d617279100118012204636974792207757365725f69642204636f64653001300230034000400040004a10080010001a00200028003000380040005a007a0408002000800100880100900101980100a20106080012001800a80100b20100ba0100c0010060026a210a0b0a0561646d696e100218000a0a0a04726f6f74100218001204726f6f741801800102880103980100b201440a077072696d61727910001a04636974791a07757365725f69641a04636f64651a0974696d657374616d701a0b75736167655f636f756e74200120022003200420052800b80101c20100e80100f2010408001200f801008002009202009a0200a20227083a100110021802180120352a11666b5f636974795f7265665f75736572733000380040004800b20200b80200c0021dc80200e00200f00200800300', 'hex'), true); +SELECT crdb_internal.unsafe_upsert_descriptor(58, decode('0ac9040a10757365725f70726f6d6f5f636f646573183a203428013a00422a0a046369747910011a0d0807100018003007509308600020003000680070007800800100880100980100422d0a07757365725f696410021a0d080e100018003000508617600020003000680070007800800100880100980100422a0a04636f646510031a0d0807100018003007509308600020003000680070007800800100880100980100422f0a0974696d657374616d7010041a0d080510001800300050da0860002001300068007000780080010088010098010042300a0b75736167655f636f756e7410051a0c080110401800300050146000200130006800700078008001008801009801004806526c0a077072696d617279100118012204636974792207757365725f69642204636f64653001300230034000400040004a10080010001a00200028003000380040005a007a0408002000800100880100900101980100a20106080012001800a80100b20100ba0100c0010088020060026a210a0b0a0561646d696e100218000a0a0a04726f6f74100218001204726f6f741801800102880103980100b201440a077072696d61727910001a04636974791a07757365725f69641a04636f64651a0974696d657374616d701a0b75736167655f636f756e74200120022003200420052800b80101c20100e80100f2010408001200f801008002009202009a0200a20229083a100110021802180120352a11666b5f636974795f7265665f757365727330003800400048007000b20200b80200c0021dc80200e00200f00200800300880300', 'hex'), true); SELECT crdb_internal.unsafe_upsert_namespace_entry(52, 29, 'user_promo_codes', 58, true); COMMIT; diff --git a/pkg/cmd/dev/BUILD.bazel b/pkg/cmd/dev/BUILD.bazel index f9f7fd8edb3f..90592d536b55 100644 --- a/pkg/cmd/dev/BUILD.bazel +++ b/pkg/cmd/dev/BUILD.bazel @@ -42,16 +42,17 @@ go_test( name = "dev_test", srcs = [ "datadriven_test.go", - "dev_test.go", + "recorderdriven_test.go", ], data = glob(["testdata/**"]), embed = [":dev_lib"], deps = [ "//pkg/cmd/dev/io/exec", "//pkg/cmd/dev/io/os", - "//pkg/cmd/dev/recording", "//pkg/testutils", + "@com_github_alessio_shellescape//:shellescape", "@com_github_cockroachdb_datadriven//:datadriven", + "@com_github_irfansharif_recorder//:recorder", "@com_github_stretchr_testify//require", ], ) diff --git a/pkg/cmd/dev/bench.go b/pkg/cmd/dev/bench.go index 1dfd42bad544..35f5fc54d98f 100644 --- a/pkg/cmd/dev/bench.go +++ b/pkg/cmd/dev/bench.go @@ -12,8 +12,6 @@ package main import ( "fmt" - "path/filepath" - "sort" "strings" "github.com/spf13/cobra" @@ -42,6 +40,7 @@ func makeBenchCmd(runE func(cmd *cobra.Command, args []string) error) *cobra.Com benchCmd.Flags().BoolP(vFlag, "v", false, "show benchmark process output") benchCmd.Flags().BoolP(showLogsFlag, "", false, "show crdb logs in-line") benchCmd.Flags().Int(countFlag, 1, "run benchmark n times") + benchCmd.Flags().Bool(ignoreCacheFlag, false, "ignore cached benchmark runs") // We use a string flag for benchtime instead of a duration; the go test // runner accepts input of the form "Nx" to run the benchmark N times (see // `go help testflag`). @@ -55,14 +54,15 @@ func (d *dev) bench(cmd *cobra.Command, commandLine []string) error { pkgs, additionalBazelArgs := splitArgsAtDash(cmd, commandLine) ctx := cmd.Context() var ( - filter = mustGetFlagString(cmd, filterFlag) - timeout = mustGetFlagDuration(cmd, timeoutFlag) - short = mustGetFlagBool(cmd, shortFlag) - showLogs = mustGetFlagBool(cmd, showLogsFlag) - verbose = mustGetFlagBool(cmd, vFlag) - count = mustGetFlagInt(cmd, countFlag) - benchTime = mustGetFlagString(cmd, benchTimeFlag) - benchMem = mustGetFlagBool(cmd, benchMemFlag) + filter = mustGetFlagString(cmd, filterFlag) + ignoreCache = mustGetFlagBool(cmd, ignoreCacheFlag) + timeout = mustGetFlagDuration(cmd, timeoutFlag) + short = mustGetFlagBool(cmd, shortFlag) + showLogs = mustGetFlagBool(cmd, showLogsFlag) + verbose = mustGetFlagBool(cmd, vFlag) + count = mustGetFlagInt(cmd, countFlag) + benchTime = mustGetFlagString(cmd, benchTimeFlag) + benchMem = mustGetFlagBool(cmd, benchMemFlag) ) // Enumerate all benches to run. @@ -70,86 +70,76 @@ func (d *dev) bench(cmd *cobra.Command, commandLine []string) error { // Empty `dev bench` does the same thing as `dev bench pkg/...` pkgs = append(pkgs, "pkg/...") } - benchesMap := make(map[string]bool) + + var args []string + args = append(args, "test") + args = append(args, mustGetRemoteCacheArgs(remoteCacheAddr)...) + if numCPUs != 0 { + args = append(args, fmt.Sprintf("--local_cpu_resources=%d", numCPUs)) + } + if timeout > 0 { + args = append(args, fmt.Sprintf("--test_timeout=%d", int(timeout.Seconds()))) + } + + var testTargets []string for _, pkg := range pkgs { - dir, isRecursive, tag, err := d.parsePkg(pkg) - if err != nil { - return err + pkg = strings.TrimPrefix(pkg, "//") + pkg = strings.TrimPrefix(pkg, "./") + pkg = strings.TrimRight(pkg, "/") + + if !strings.HasPrefix(pkg, "pkg/") { + return fmt.Errorf("malformed package %q, expecting %q", pkg, "pkg/{...}") } - if isRecursive { - // Use `git grep` to find all Go files that contain benchmark tests. - out, err := d.exec.CommandContextSilent(ctx, "git", "grep", "-l", "^func Benchmark", "--", dir+"/*_test.go") - if err != nil { - return err - } - files := strings.Split(strings.TrimSpace(string(out)), "\n") - for _, file := range files { - dir, _ = filepath.Split(file) - dir = strings.TrimSuffix(dir, "/") - benchesMap[dir] = true - } - } else if tag != "" { - return fmt.Errorf("malformed package %q, tags not supported in 'bench' command", pkg) + + var target string + if strings.Contains(pkg, ":") { + // For parity with bazel, we allow specifying named build targets. + target = pkg } else { - benchesMap[dir] = true + target = fmt.Sprintf("%s:all", pkg) } + testTargets = append(testTargets, target) } - // De-duplicate and sort the list of benches to run. - var benches []string - for pkg := range benchesMap { - benches = append(benches, pkg) + + args = append(args, testTargets...) + if ignoreCache { + args = append(args, "--nocache_test_results") } - sort.Slice(benches, func(i, j int) bool { return benches[i] < benches[j] }) - var argsBase []string - // NOTE the --config=test here. It's very important we compile the test binary with the - // appropriate stuff (gotags, etc.) - argsBase = append(argsBase, "run", "--config=test", "--test_sharding_strategy=disabled") - argsBase = append(argsBase, mustGetRemoteCacheArgs(remoteCacheAddr)...) - if numCPUs != 0 { - argsBase = append(argsBase, fmt.Sprintf("--local_cpu_resources=%d", numCPUs)) + if filter == "" { + args = append(args, "--test_arg", "-test.bench=.") + } else { + args = append(args, "--test_arg", fmt.Sprintf("-test.bench=%s", filter)) + } + if short { + args = append(args, "--test_arg", "-test.short") } if verbose { - argsBase = append(argsBase, "--test_arg", "-test.v") + args = append(args, "--test_arg", "-test.v") } if showLogs { - argsBase = append(argsBase, "--test_arg", "-show-logs") + args = append(args, "--test_arg", "-show-logs") } if count != 1 { - argsBase = append(argsBase, "--test_arg", fmt.Sprintf("-test.count=%d", count)) + args = append(args, "--test_arg", fmt.Sprintf("-test.count=%d", count)) } if benchTime != "" { - argsBase = append(argsBase, "--test_arg", fmt.Sprintf("-test.benchtime=%s", benchTime)) + args = append(args, "--test_arg", fmt.Sprintf("-test.benchtime=%s", benchTime)) } if benchMem { - argsBase = append(argsBase, "--test_arg", "-test.benchmem") + args = append(args, "--test_arg", "-test.benchmem") } - for _, bench := range benches { - args := make([]string, len(argsBase)) - copy(args, argsBase) - base := filepath.Base(bench) - target := "//" + bench + ":" + base + "_test" - args = append(args, target) - args = append(args, additionalBazelArgs...) - args = append(args, "--", "-test.run=-") - if filter == "" { - args = append(args, "-test.bench=.") - } else { - args = append(args, "-test.bench="+filter) - } - if timeout > 0 { - args = append(args, fmt.Sprintf("-test.timeout=%s", timeout.String())) - } - if short { - args = append(args, "-test.short", "-test.benchtime=1ns") - } - logCommand("bazel", args...) - err := d.exec.CommandContextInheritingStdStreams(ctx, "bazel", args...) - if err != nil { - return err + { // Handle test output flags. + testOutputArgs := []string{"--test_output", "errors"} + if verbose || showLogs { + testOutputArgs = []string{"--test_output", "all"} } + args = append(args, testOutputArgs...) } - return nil + args = append(args, additionalBazelArgs...) + + logCommand("bazel", args...) + return d.exec.CommandContextInheritingStdStreams(ctx, "bazel", args...) } diff --git a/pkg/cmd/dev/builder.go b/pkg/cmd/dev/builder.go index 5c27eb07fe5b..fed94f379ce3 100644 --- a/pkg/cmd/dev/builder.go +++ b/pkg/cmd/dev/builder.go @@ -41,7 +41,7 @@ func makeBuilderCmd(runE func(cmd *cobra.Command, args []string) error) *cobra.C func (d *dev) builder(cmd *cobra.Command, extraArgs []string) error { ctx := cmd.Context() volume := mustGetFlagString(cmd, volumeFlag) - args, err := d.getDockerRunArgs(ctx, volume, true) + args, err := d.getDockerRunArgs(ctx, volume, false) args = append(args, extraArgs...) if err != nil { return err diff --git a/pkg/cmd/dev/datadriven_test.go b/pkg/cmd/dev/datadriven_test.go index 02bb7d88a83c..46a33a25226d 100644 --- a/pkg/cmd/dev/datadriven_test.go +++ b/pkg/cmd/dev/datadriven_test.go @@ -16,99 +16,104 @@ import ( "io" "io/ioutil" "log" - stdos "os" - "path/filepath" - "strings" "testing" + "github.com/alessio/shellescape" "github.com/cockroachdb/cockroach/pkg/cmd/dev/io/exec" "github.com/cockroachdb/cockroach/pkg/cmd/dev/io/os" - "github.com/cockroachdb/cockroach/pkg/cmd/dev/recording" "github.com/cockroachdb/cockroach/pkg/testutils" "github.com/cockroachdb/datadriven" "github.com/stretchr/testify/require" ) -// TestDataDriven makes use of datadriven to play back all operations executed -// by individual `dev` invocations. The testcases are defined under testdata/*, -// where each test files corresponds to a recording capture found in -// testdata/recording/*. +const ( + crdbCheckoutPlaceholder = "crdb-checkout" + sandboxPlaceholder = "sandbox" +) + +// TestDataDriven makes use of datadriven to capture all operations executed by +// individual dev invocations. The testcases are defined under +// testdata/datadriven/*. // // DataDriven divvies up these files as subtests, so individual "files" are // runnable through: // -// go test -run TestDataDriven/ +// dev test pkg/cmd/dev -f TestDataDrivenDriven/ [--rewrite] +// OR go test ./pkg/cmd/dev -run TestDataDrivenDriven/ [-rewrite] // -// Recordings are used to mock out "system" behavior. During these test runs -// (unless -record is specified), attempts to shell out to `bazel` or perform -// other OS operations are intercepted and responses are constructed using -// recorded data. +// NB: See commentary on TestRecorderDriven to see how they compare. +// TestDataDriven is well suited for exercising flows that don't depend on +// reading external state in order to function (simply translating a `dev test +// ` to its corresponding bazel invocation for e.g.). It's not well +// suited for flows that do (reading a list of go files in the bazel generated +// sandbox and copying them over one-by-one). func TestDataDriven(t *testing.T) { verbose := testing.Verbose() - testdata := testutils.TestDataPath(t) + testdata := testutils.TestDataPath(t, "datadriven") datadriven.Walk(t, testdata, func(t *testing.T, path string) { - if strings.HasPrefix(path, filepath.Join(testdata, "recording")) { - return - } - - dir, file := filepath.Split(path) - recordingPath := filepath.Join(dir, "recording", file) // path to the recording, if any - // We'll match against printed logs for datadriven. var logger io.ReadWriter = bytes.NewBufferString("") - var exopts []exec.Option - var osopts []os.Option - - exopts = append(exopts, exec.WithLogger(log.New(logger, "", 0))) - osopts = append(osopts, os.WithLogger(log.New(logger, "", 0))) - - if !verbose { - // Suppress all internal output unless told otherwise. - exopts = append(exopts, exec.WithStdOutErr(ioutil.Discard, ioutil.Discard)) + execOpts := []exec.Option{ + exec.WithLogger(log.New(logger, "", 0)), + exec.WithDryrun(), + exec.WithIntercept(workspaceCmd(), crdbCheckoutPlaceholder), + exec.WithIntercept(bazelbinCmd(), sandboxPlaceholder), + } + osOpts := []os.Option{ + os.WithLogger(log.New(logger, "", 0)), + os.WithDryrun(), } - frecording, err := stdos.OpenFile(recordingPath, stdos.O_RDONLY, 0600) - require.NoError(t, err) - defer func() { - require.NoError(t, frecording.Close()) - }() + if !verbose { // suppress all internal output unless told otherwise + execOpts = append(execOpts, exec.WithStdOutErr(ioutil.Discard, ioutil.Discard)) + } - r := recording.WithReplayFrom(frecording, recordingPath) - exopts = append(exopts, exec.WithRecording(r)) - osopts = append(osopts, os.WithRecording(r)) + devExec := exec.New(execOpts...) + devOS := os.New(osOpts...) - devExec := exec.New(exopts...) - devOS := os.New(osopts...) + // TODO(irfansharif): Because these tests are run in dry-run mode, if + // "accidentally" adding a test for a mixed-io command (see top-level test + // comment), it may appear as a test failure where the output of a + // successful shell-out attempt returns an empty response, maybe resulting + // in NPEs. We could catch these panics/errors here and suggest a more + // informative error to test authors. datadriven.RunTest(t, path, func(t *testing.T, d *datadriven.TestData) string { dev := makeDevCmd() - dev.exec = devExec - dev.os = devOS + dev.exec, dev.os = devExec, devOS + dev.knobs.skipDoctorCheck = true + dev.knobs.devBinOverride = "dev" + dev.log = log.New(logger, "", 0) if !verbose { dev.cli.SetErr(ioutil.Discard) dev.cli.SetOut(ioutil.Discard) } - switch d.Cmd { - case "dev": - var args []string - for _, cmdArg := range d.CmdArgs { - args = append(args, cmdArg.Key) - if len(cmdArg.Vals) != 0 { - args = append(args, cmdArg.Vals[0]) - } + require.Equalf(t, d.Cmd, "dev", "unknown command: %s", d.Cmd) + var args []string + for _, cmdArg := range d.CmdArgs { + args = append(args, cmdArg.Key) + if len(cmdArg.Vals) != 0 { + args = append(args, cmdArg.Vals[0]) } - dev.cli.SetArgs(args) - require.NoError(t, dev.cli.Execute()) - - logs, err := ioutil.ReadAll(logger) - require.NoError(t, err) - - return string(logs) - default: - return fmt.Sprintf("unknown command: %s", d.Cmd) } + dev.cli.SetArgs(args) + if err := dev.cli.Execute(); err != nil { + return fmt.Sprintf("err: %s", err) + } + + logs, err := ioutil.ReadAll(logger) + require.NoError(t, err) + return string(logs) }) }) } + +func workspaceCmd() string { + return fmt.Sprintf("bazel %s", shellescape.QuoteCommand([]string{"info", "workspace", "--color=no"})) +} + +func bazelbinCmd() string { + return fmt.Sprintf("bazel %s", shellescape.QuoteCommand([]string{"info", "bazel-bin", "--color=no"})) +} diff --git a/pkg/cmd/dev/dev.go b/pkg/cmd/dev/dev.go index f4c32a104794..e870bd9be0e1 100644 --- a/pkg/cmd/dev/dev.go +++ b/pkg/cmd/dev/dev.go @@ -25,6 +25,11 @@ type dev struct { cli *cobra.Command os *os.OS exec *exec.Exec + + knobs struct { // testing knobs + skipDoctorCheck bool + devBinOverride string + } } func makeDevCmd() *dev { @@ -129,7 +134,7 @@ Typical usage: ret.cli.PersistentFlags().BoolVar(&debugVar, "debug", false, "enable debug logging for dev") ret.cli.PersistentPreRunE = func(cmd *cobra.Command, args []string) error { isDoctor := cmd.Name() == "doctor" - if !isTesting && !isDoctor { + if !isDoctor { if err := ret.checkDoctorStatus(cmd.Context()); err != nil { return err } diff --git a/pkg/cmd/dev/dev_test.go b/pkg/cmd/dev/dev_test.go deleted file mode 100644 index 65882d7df1c1..000000000000 --- a/pkg/cmd/dev/dev_test.go +++ /dev/null @@ -1,62 +0,0 @@ -// Copyright 2021 The Cockroach Authors. -// -// Use of this software is governed by the Business Source License -// included in the file licenses/BSL.txt. -// -// As of the Change Date specified in that file, in accordance with -// the Business Source License, use of this software will be governed -// by the Apache License, Version 2.0, included in the file -// licenses/APL.txt. - -package main - -import ( - "bytes" - "io" - "log" - "strings" - "testing" - - "github.com/cockroachdb/cockroach/pkg/cmd/dev/io/exec" - "github.com/cockroachdb/cockroach/pkg/cmd/dev/io/os" - "github.com/cockroachdb/cockroach/pkg/cmd/dev/recording" - "github.com/stretchr/testify/require" -) - -func init() { - isTesting = true -} - -func TestSetupPath(t *testing.T) { - rec := `getenv PATH ----- -/usr/local/opt/ccache/libexec:/usr/local/opt/make/libexec/gnubin:/usr/local/bin:/usr/bin:/bin:/usr/sbin:/sbin:/usr/local/go/bin:/Library/Apple/usr/bin - -which cc ----- -/usr/local/opt/ccache/libexec/cc - -readlink /usr/local/opt/ccache/libexec/cc ----- -../bin/ccache - -export PATH=/usr/local/opt/make/libexec/gnubin:/usr/local/bin:/usr/bin:/bin:/usr/sbin:/sbin:/usr/local/go/bin:/Library/Apple/usr/bin ----- - -` - r := recording.WithReplayFrom(strings.NewReader(rec), "TestSetupPath") - var logger io.ReadWriter = bytes.NewBufferString("") - var exopts []exec.Option - exopts = append(exopts, exec.WithRecording(r)) - exopts = append(exopts, exec.WithLogger(log.New(logger, "", 0))) - var osopts []os.Option - osopts = append(osopts, os.WithRecording(r)) - osopts = append(osopts, os.WithLogger(log.New(logger, "", 0))) - devExec := exec.New(exopts...) - devOS := os.New(osopts...) - dev := makeDevCmd() - dev.exec = devExec - dev.os = devOS - - require.NoError(t, setupPath(dev)) -} diff --git a/pkg/cmd/dev/doctor.go b/pkg/cmd/dev/doctor.go index 1e99d379f742..e5c25047682a 100644 --- a/pkg/cmd/dev/doctor.go +++ b/pkg/cmd/dev/doctor.go @@ -38,6 +38,10 @@ const ( ) func (d *dev) checkDoctorStatus(ctx context.Context) error { + if d.knobs.skipDoctorCheck { + return nil + } + dir, err := d.getWorkspace(ctx) if err != nil { return err @@ -230,12 +234,13 @@ Please add one of the following to your %s/.bazelrc.user:`, workspace) } } - if success { - if err := d.writeDoctorStatus(ctx, d.exec); err != nil { - return err - } - log.Println("You are ready to build :)") - return nil + if !success { + return errors.New("please address the errors described above and try again") } - return errors.New("please address the errors described above and try again") + + if err := d.writeDoctorStatus(ctx, d.exec); err != nil { + return err + } + log.Println("You are ready to build :)") + return nil } diff --git a/pkg/cmd/dev/generate.go b/pkg/cmd/dev/generate.go index 6401407db1b2..8e72452595c4 100644 --- a/pkg/cmd/dev/generate.go +++ b/pkg/cmd/dev/generate.go @@ -90,10 +90,14 @@ func (d *dev) generateBazel(cmd *cobra.Command) error { executable := filepath.Join(workspace, "build", "bazelutil", "bazel-generate.sh") env := os.Environ() if mirror { - env = append(env, "COCKROACH_BAZEL_CAN_MIRROR=1") + envvar := "COCKROACH_BAZEL_CAN_MIRROR=1" + d.log.Printf("export %s", envvar) + env = append(env, envvar) } if force { - env = append(env, "COCKROACH_BAZEL_FORCE_GENERATE=1") + envvar := "COCKROACH_BAZEL_FORCE_GENERATE=1" + d.log.Printf("export %s", envvar) + env = append(env, envvar) } return d.exec.CommandContextWithEnv(ctx, env, executable) } diff --git a/pkg/cmd/dev/io/exec/BUILD.bazel b/pkg/cmd/dev/io/exec/BUILD.bazel index 548614278048..a91324f32200 100644 --- a/pkg/cmd/dev/io/exec/BUILD.bazel +++ b/pkg/cmd/dev/io/exec/BUILD.bazel @@ -6,7 +6,7 @@ go_library( importpath = "github.com/cockroachdb/cockroach/pkg/cmd/dev/io/exec", visibility = ["//visibility:public"], deps = [ - "//pkg/cmd/dev/recording", "@com_github_alessio_shellescape//:shellescape", + "@com_github_irfansharif_recorder//:recorder", ], ) diff --git a/pkg/cmd/dev/io/exec/exec.go b/pkg/cmd/dev/io/exec/exec.go index 5c68687dc603..40bfb9db9fb2 100644 --- a/pkg/cmd/dev/io/exec/exec.go +++ b/pkg/cmd/dev/io/exec/exec.go @@ -21,17 +21,31 @@ import ( "strings" "github.com/alessio/shellescape" - "github.com/cockroachdb/cockroach/pkg/cmd/dev/recording" + "github.com/irfansharif/recorder" ) -// Exec is a convenience wrapper around the stdlib os/exec package. It lets us -// mock all instances where we shell out for tests. +// Exec is a convenience wrapper around the stdlib os/exec package. It lets us: +// +// (a) mock all instances where we shell out, for tests, and +// (b) capture all instances of shelling out that take place during execution +// +// We achieve (a) by embedding a Recorder, and either replaying from it if +// configured to do so, or "doing the real thing" and recording the fact into +// the Recorder for future playback. +// +// For (b), each operation is logged (if configured to do so). These messages +// can be captured by the caller and compared against what is expected. type Exec struct { dir string logger *log.Logger stdout, stderr io.Writer blocking bool - *recording.Recording + *recorder.Recorder + + knobs struct { // testing knobs + dryrun bool + intercept map[string]string // maps commands to outputs + } } // New returns a new Exec with the given options. @@ -76,10 +90,10 @@ func WithStdOutErr(stdout, stderr io.Writer) func(e *Exec) { } } -// WithRecording configures Exec to use the provided recording. -func WithRecording(r *recording.Recording) func(e *Exec) { +// WithRecorder configures Exec to use the provided recorder. +func WithRecorder(r *recorder.Recorder) func(e *Exec) { return func(e *Exec) { - e.Recording = r + e.Recorder = r } } @@ -91,8 +105,43 @@ func (e *Exec) AsNonBlocking() *Exec { return &out } +// WithWorkingDir configures Exec to use the provided working directory. +func WithWorkingDir(dir string) func(e *Exec) { + return func(e *Exec) { + e.dir = dir + } +} + +// WithDryrun configures Exec to run in dryrun mode. +func WithDryrun() func(e *Exec) { + return func(e *Exec) { + e.knobs.dryrun = true + } +} + +// WithIntercept configures Exec to intercept the given command and return the +// given output instead. +func WithIntercept(command, output string) func(e *Exec) { + return func(e *Exec) { + if e.knobs.intercept == nil { + e.knobs.intercept = make(map[string]string) + } + e.knobs.intercept[command] = output + } +} + +// LookPath wraps around exec.LookPath, which searches for an executable named +// file in the directories named by the PATH environment variable. +func (e *Exec) LookPath(file string) (string, error) { + command := fmt.Sprintf("which %s", file) + e.logger.Print(command) + return e.Next(command, func() (string, error) { + return exec.LookPath(file) + }) +} + // CommandContextSilent is like CommandContext, but does not take over -// stdout/stderr. It's to be used for "internal" operations, and always blocks. +// stdout/stderr. It's used for "internal" operations, and always blocks. func (e *Exec) CommandContextSilent( ctx context.Context, name string, args ...string, ) ([]byte, error) { @@ -135,8 +184,7 @@ func (e *Exec) commandContextInheritingStdStreamsImpl( } e.logger.Print(command) - if e.Recording == nil { - // Do the real thing. + _, err := e.Next(command, func() (string, error) { cmd := exec.CommandContext(ctx, name, args...) cmd.Stdin = os.Stdin cmd.Stdout = e.stdout @@ -145,40 +193,19 @@ func (e *Exec) commandContextInheritingStdStreamsImpl( cmd.Env = env if err := cmd.Start(); err != nil { - return err + return "", err } if e.blocking { if err := cmd.Wait(); err != nil { - return err + return "", err } } - return nil - } + return "", nil + }) - _, err := e.replay(command) return err } -// LookPath wraps around exec.LookPath, which searches for an executable named -// file in the directories named by the PATH environment variable. -func (e *Exec) LookPath(path string) (string, error) { - command := fmt.Sprintf("which %s", path) - e.logger.Print(command) - - if e.Recording == nil { - // Do the real thing. - var err error - fullPath, err := exec.LookPath(path) - if err != nil { - return "", err - } - return fullPath, nil - } - - ret, err := e.replay(command) - return ret, err -} - func (e *Exec) commandContextImpl( ctx context.Context, stdin io.Reader, silent bool, name string, args ...string, ) ([]byte, error) { @@ -190,10 +217,9 @@ func (e *Exec) commandContextImpl( } e.logger.Print(command) - var buffer bytes.Buffer - if e.Recording == nil { - // Do the real thing. + output, err := e.Next(command, func() (string, error) { cmd := exec.CommandContext(ctx, name, args...) + var buffer bytes.Buffer if silent { cmd.Stdout = &buffer cmd.Stderr = nil @@ -207,37 +233,29 @@ func (e *Exec) commandContextImpl( cmd.Dir = e.dir if err := cmd.Start(); err != nil { - return nil, err + return "", err } if err := cmd.Wait(); err != nil { - return nil, err + return "", err } - return buffer.Bytes(), nil - } - - output, err := e.replay(command) + return buffer.String(), nil + }) if err != nil { return nil, err } - return []byte(output), nil } -// replay replays the specified command, erroring out if it's mismatched with -// what the recording plays back next. It returns the recorded output. -func (e *Exec) replay(command string) (output string, err error) { - found, err := e.Recording.Next(func(op recording.Operation) error { - if op.Command != command { - return fmt.Errorf("expected %q, got %q", op.Command, command) - } - output = op.Output - return nil - }) - if err != nil { - return "", err +// Next is a thin interceptor for all exec activity, running them through +// testing knobs first. +func (e *Exec) Next(command string, f func() (output string, err error)) (string, error) { + if e.knobs.dryrun { + return "", nil } - if !found { - return "", fmt.Errorf("recording for %q not found", command) + if e.knobs.intercept != nil { + if output, ok := e.knobs.intercept[command]; ok { + return output, nil + } } - return output, nil + return e.Recorder.Next(command, f) } diff --git a/pkg/cmd/dev/io/os/BUILD.bazel b/pkg/cmd/dev/io/os/BUILD.bazel index 44d20af907f5..0ddfe1abaea4 100644 --- a/pkg/cmd/dev/io/os/BUILD.bazel +++ b/pkg/cmd/dev/io/os/BUILD.bazel @@ -6,6 +6,6 @@ go_library( importpath = "github.com/cockroachdb/cockroach/pkg/cmd/dev/io/os", visibility = ["//visibility:public"], deps = [ - "//pkg/cmd/dev/recording", + "@com_github_irfansharif_recorder//:recorder", ], ) diff --git a/pkg/cmd/dev/io/os/os.go b/pkg/cmd/dev/io/os/os.go index 526a34358a78..cd3da9f83361 100644 --- a/pkg/cmd/dev/io/os/os.go +++ b/pkg/cmd/dev/io/os/os.go @@ -19,16 +19,31 @@ import ( "os" "os/user" "path/filepath" + "strconv" "strings" - "github.com/cockroachdb/cockroach/pkg/cmd/dev/recording" + "github.com/irfansharif/recorder" ) -// OS is a convenience wrapper around the stdlib os package. It lets us -// mock operating system calls in tests. +// OS is a convenience wrapper around the stdlib os package. It lets us: +// +// (a) mock operating system calls in tests, and +// (b) capture the set of calls that take place during execution +// +// We achieve (a) by embedding a Recorder, and either replaying from it if +// configured to do so, or "doing the real thing" and recording the fact into +// the Recorder for future playback. +// +// For (b), each operation is logged (if configured to do so). These messages +// can be captured by the caller and compared against what is expected. type OS struct { + dir string logger *log.Logger - *recording.Recording + *recorder.Recorder + + knobs struct { // testing knobs + dryrun bool + } } // New constructs a new OS handle, configured with the provided options. @@ -62,10 +77,24 @@ func WithLogger(logger *log.Logger) func(o *OS) { } } -// WithRecording configures OS to use the provided recording. -func WithRecording(r *recording.Recording) func(o *OS) { +// WithRecorder configures OS to use the provided recorder. +func WithRecorder(r *recorder.Recorder) func(o *OS) { return func(o *OS) { - o.Recording = r + o.Recorder = r + } +} + +// WithWorkingDir configures OS to use the provided working directory. +func WithWorkingDir(dir string) func(o *OS) { + return func(o *OS) { + o.dir = dir + } +} + +// WithDryrun configures OS to run in dryrun mode. +func WithDryrun() func(e *OS) { + return func(e *OS) { + e.knobs.dryrun = true } } @@ -75,15 +104,9 @@ func (o *OS) MkdirAll(path string) error { command := fmt.Sprintf("mkdir %s", path) o.logger.Print(command) - if o.Recording == nil { - // Do the real thing. - if err := os.MkdirAll(path, 0755); err != nil { - return err - } - return nil - } - - _, err := o.replay(command) + _, err := o.Next(command, func() (output string, err error) { + return "", os.MkdirAll(path, 0755) + }) return err } @@ -92,15 +115,12 @@ func (o *OS) Remove(path string) error { command := fmt.Sprintf("rm %s", path) o.logger.Print(command) - if o.Recording == nil { - // Do the real thing. + _, err := o.Next(command, func() (output string, err error) { if err := os.Remove(path); err != nil && !os.IsNotExist(err) { - return err + return "", err } - return nil - } - - _, err := o.replay(command) + return "", nil + }) return err } @@ -110,15 +130,9 @@ func (o *OS) Symlink(to, from string) error { command := fmt.Sprintf("ln -s %s %s", to, from) o.logger.Print(command) - if o.Recording == nil { - // Do the real thing. - if err := os.Symlink(to, from); err != nil { - return err - } - return nil - } - - _, err := o.replay(command) + _, err := o.Next(command, func() (output string, err error) { + return "", os.Symlink(to, from) + }) return err } @@ -128,13 +142,13 @@ func (o OS) Getenv(key string) string { command := fmt.Sprintf("getenv %s", key) o.logger.Print(command) - if o.Recording == nil { - // Do the real thing. - return os.Getenv(key) + output, err := o.Next(command, func() (output string, err error) { + return os.Getenv(key), nil + }) + if err != nil { + log.Fatalf("%v", err) } - - ret, _ := o.replay(command) - return ret + return output } // Setenv wraps around os.Setenv, which sets the value of the environment @@ -143,12 +157,9 @@ func (o *OS) Setenv(key, value string) error { command := fmt.Sprintf("export %s=%s", key, value) o.logger.Print(command) - if o.Recording == nil { - // Do the real thing. - return os.Setenv(key, value) - } - - _, err := o.replay(command) + _, err := o.Next(command, func() (output string, err error) { + return "", os.Setenv(key, value) + }) return err } @@ -158,13 +169,9 @@ func (o *OS) Readlink(filename string) (string, error) { command := fmt.Sprintf("readlink %s", filename) o.logger.Print(command) - if o.Recording == nil { - // Do the real thing. + return o.Next(command, func() (output string, err error) { return os.Readlink(filename) - } - - ret, err := o.replay(command) - return ret, err + }) } // IsDir wraps around os.Stat, which returns the os.FileInfo of the named @@ -174,17 +181,18 @@ func (o *OS) IsDir(dirname string) (bool, error) { command := fmt.Sprintf("find %s -type d", dirname) o.logger.Print(command) - if o.Recording == nil { + output, err := o.Next(command, func() (output string, err error) { // Do the real thing. stat, err := os.Stat(dirname) if err != nil { - return false, err + return "", err } - return stat.IsDir(), nil + return strconv.FormatBool(stat.IsDir()), nil + }) + if err != nil { + return false, err } - - res, err := o.replay(command) - return err == nil && res != "", err + return strconv.ParseBool(strings.TrimSpace(output)) } // ReadFile wraps around ioutil.ReadFile, reading a file from disk and @@ -193,31 +201,24 @@ func (o *OS) ReadFile(filename string) (string, error) { command := fmt.Sprintf("cat %s", filename) o.logger.Print(command) - if o.Recording == nil { - // Do the real thing. + return o.Next(command, func() (output string, err error) { buf, err := ioutil.ReadFile(filename) if err != nil { return "", err } return string(buf), nil - } - - ret, err := o.replay(command) - return ret, err + }) } // WriteFile wraps around ioutil.ReadFile, writing the given contents to // the given file on disk. func (o *OS) WriteFile(filename, contents string) error { - command := fmt.Sprintf("echo %s > %s", strings.TrimSpace(contents), filename) + command := fmt.Sprintf("echo %q > %s", strings.TrimSpace(contents[:10]), filename) o.logger.Print(command) - if o.Recording == nil { - // Do the real thing. - return ioutil.WriteFile(filename, []byte(contents), 0666) - } - - _, err := o.replay(command) + _, err := o.Next(command, func() (output string, err error) { + return "", ioutil.WriteFile(filename, []byte(contents), 0666) + }) return err } @@ -231,42 +232,39 @@ func (o *OS) CopyFile(src, dst string) error { command := fmt.Sprintf("cp %s %s", src, dst) o.logger.Print(command) - if o.Recording == nil { - // Do the real thing. + _, err := o.Next(command, func() (output string, err error) { srcFile, err := os.Open(src) if err != nil { - return err + return "", err } defer func() { _ = srcFile.Close() }() originalDstFile, err := os.Open(dst) if err != nil && !os.IsNotExist(err) { - return err + return "", err } else if err == nil { defer func() { _ = originalDstFile.Close() }() srcInfo, err := srcFile.Stat() if err != nil { - return err + return "", err } dstInfo, err := originalDstFile.Stat() if err != nil { - return err + return "", err } // If src points to the same file as dst, there's // nothing to be done. if os.SameFile(srcInfo, dstInfo) { - return nil + return "", nil } } dstFile, err := os.Create(dst) if err != nil { - return err + return "", err } defer func() { _ = dstFile.Close() }() _, err = io.Copy(dstFile, srcFile) - return err - } - - _, err := o.replay(command) + return "", err + }) return err } @@ -276,12 +274,11 @@ func (o *OS) ListFilesWithSuffix(root, suffix string) ([]string, error) { command := fmt.Sprintf("find %s -name *%s", root, suffix) o.logger.Print(command) - var ret []string - if o.Recording == nil { - // Do the real thing. - err := filepath.Walk(root, func(path string, info fs.FileInfo, err error) error { - // If there's an error walking the tree, throw it away -- there's nothing - // interesting we can do with it. + output, err := o.Next(command, func() (output string, err error) { + var ret []string + if err := filepath.Walk(root, func(path string, info fs.FileInfo, err error) error { + // If there's an error walking the tree, throw it away -- there's + // nothing interesting we can do with it. if err != nil || info.IsDir() { //nolint:returnerrcheck return nil @@ -290,18 +287,16 @@ func (o *OS) ListFilesWithSuffix(root, suffix string) ([]string, error) { ret = append(ret, path) } return nil - }) - if err != nil { - return nil, err + }); err != nil { + return "", err } - return ret, nil - } - lines, err := o.replay(command) + return fmt.Sprintf("%s\n", strings.Join(ret, "\n")), nil + }) if err != nil { return nil, err } - return strings.Split(strings.TrimSpace(lines), "\n"), nil + return strings.Split(strings.TrimSpace(output), "\n"), nil } // CurrentUserAndGroup returns the user and effective group. @@ -309,41 +304,26 @@ func (o *OS) CurrentUserAndGroup() (uid string, gid string, err error) { command := "id" o.logger.Print(command) - if o.Recording == nil { - // Do the real thing. - var currentUser *user.User - currentUser, err = user.Current() + output, err := o.Next(command, func() (output string, err error) { + current, err := user.Current() if err != nil { - return + return "", err } - uid = currentUser.Uid - gid = currentUser.Gid - return - } - - output, err := o.replay(command) + return fmt.Sprintf("%s:%s", current.Uid, current.Gid), nil + }) if err != nil { - return + return "", "", err } + ids := strings.Split(strings.TrimSpace(output), ":") return ids[0], ids[1], nil } -// replay replays the specified command, erroring out if it's mismatched with -// what the recording plays back next. It returns the recorded output. -func (o *OS) replay(command string) (output string, err error) { - found, err := o.Recording.Next(func(op recording.Operation) error { - if op.Command != command { - return fmt.Errorf("expected %q, got %q", op.Command, command) - } - output = op.Output - return nil - }) - if err != nil { - return "", err - } - if !found { - return "", fmt.Errorf("recording for %q not found", command) +// Next is a thin interceptor for all os activity, running them through +// testing knobs first. +func (o *OS) Next(command string, f func() (output string, err error)) (string, error) { + if o.knobs.dryrun { + return "", nil } - return output, nil + return o.Recorder.Next(command, f) } diff --git a/pkg/cmd/dev/lint.go b/pkg/cmd/dev/lint.go index 6ef7b6ce97aa..bc49becb3d38 100644 --- a/pkg/cmd/dev/lint.go +++ b/pkg/cmd/dev/lint.go @@ -63,13 +63,18 @@ func (d *dev) lint(cmd *cobra.Command, commandLine []string) error { args = append(args, "-test.run", fmt.Sprintf("Lint/%s", filter)) } logCommand("bazel", args...) - if len(pkgs) > 0 { + if len(pkgs) > 1 { + return fmt.Errorf("can only lint a single package (found %s)", strings.Join(pkgs, ", ")) + } + if len(pkgs) == 1 { pkg := strings.TrimRight(pkgs[0], "/") if !strings.HasPrefix(pkg, "./") { pkg = "./" + pkg } env := os.Environ() - env = append(env, fmt.Sprintf("PKG=%s", pkg)) + envvar := fmt.Sprintf("PKG=%s", pkg) + d.log.Printf("export %s", envvar) + env = append(env, envvar) return d.exec.CommandContextWithEnv(ctx, env, "bazel", args...) } return d.exec.CommandContextInheritingStdStreams(ctx, "bazel", args...) diff --git a/pkg/cmd/dev/recorderdriven_test.go b/pkg/cmd/dev/recorderdriven_test.go new file mode 100644 index 000000000000..84ab64e0f0cf --- /dev/null +++ b/pkg/cmd/dev/recorderdriven_test.go @@ -0,0 +1,202 @@ +// Copyright 2022 The Cockroach Authors. +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package main + +import ( + "bytes" + "flag" + "fmt" + "io" + "io/ioutil" + "log" + stdos "os" + stdexec "os/exec" + "strings" + "testing" + + "github.com/cockroachdb/cockroach/pkg/cmd/dev/io/exec" + "github.com/cockroachdb/cockroach/pkg/cmd/dev/io/os" + "github.com/cockroachdb/cockroach/pkg/testutils" + "github.com/cockroachdb/datadriven" + "github.com/irfansharif/recorder" + "github.com/stretchr/testify/require" +) + +// TestRecorderDriven makes use of datadriven to record (if --rewrite is +// specified) or play back (if --rewrite is omitted) all operations executed by +// individual `dev` invocations. The testcases are defined under +// testdata/recorderdriven/*; each test file corresponds to a captured recording +// found in testdata/recorderdriven/*.rec. +// +// DataDriven divvies up these files as subtests, so individual "files" are +// runnable through: +// +// dev test pkg/cmd/dev -f TestRecorderDriven/ +// OR go test ./pkg/cmd/dev -run TestRecorderDriven/ +// +// Recordings are used to mock out "system" behavior. When --rewrite is +// specified, attempts to shell out to bazel or perform other OS operations +// (like creating, removing, symlinking filepaths) are intercepted and system +// responses are recorded for future playback. To update the test files with new +// capture data, try: +// +// go test ./pkg/cmd/dev -run TestRecorderDriven/ -rewrite +// +// NB: This test is worth contrasting to TestDataDriven, where all operations +// are run in "dry-run" mode when --rewrite is specified. Here we'll actually +// shell out (and take time proportional to running the actual commands). In +// dry-run mode (TestDataDriven) all exec and os commands return successfully +// with no error, with an empty response. This makes it suitable for testing +// workflows that don't make use of external state to execute actions (like +// reading the set of targets from a file for e.g., or hoisting files from a +// sandbox by searching through the file system directly). +// +// TODO(irfansharif): When --rewrite-ing, because these tests shell out to the +// actual host system, it makes it difficult to run under bazel/dev (currently +// disallowed). Probably these tests should be ripped out entirely. Dev's +// currently in the business of doing a lot of interactive I/O with the host +// system, instead of pushing it all down into bazel rules. The recorder tests +// are the few remaining examples of this. As we push more things down into +// bazel rules, we should re-evaluate whether this harness provides much value. +// Probably dev commands that require writing a TestRecorderDriven test is worth +// re-writing. +// +func TestRecorderDriven(t *testing.T) { + rewriting := false + if f := flag.Lookup("rewrite"); f != nil && f.Value.String() == "true" { + rewriting = true + } + if rewriting { + t.Fatalf("not supported under bazel") // needs to shell out to bazel itself + } + + verbose := testing.Verbose() + testdata := testutils.TestDataPath(t, "recorderdriven") + datadriven.Walk(t, testdata, func(t *testing.T, path string) { + if strings.HasSuffix(path, ".rec") { + return + } + + recordingPath := fmt.Sprintf("%s.rec", path) + + // We'll match against printed logs for datadriven. + var logger io.ReadWriter = bytes.NewBufferString("") + var recording io.ReadWriter + var rec *recorder.Recorder + + execOpts := []exec.Option{exec.WithLogger(log.New(logger, "", 0))} + osOpts := []os.Option{os.WithLogger(log.New(logger, "", 0))} + + if !verbose { + // Suppress all internal output unless told otherwise. + execOpts = append(execOpts, exec.WithStdOutErr(ioutil.Discard, ioutil.Discard)) + } + + if rewriting { + workspaceResult := workspace(t) + bazelbinResult := bazelbin(t) + execOpts = append(execOpts, + exec.WithWorkingDir(workspaceResult), + exec.WithIntercept(workspaceCmd(), workspaceResult), + exec.WithIntercept(bazelbinCmd(), bazelbinResult), + ) + osOpts = append(osOpts, os.WithWorkingDir(workspaceResult)) + + recording = bytes.NewBufferString("") + rec = recorder.New(recorder.WithRecording(recording)) // the thing to record into + } else { + execOpts = append(execOpts, + exec.WithIntercept(workspaceCmd(), crdbCheckoutPlaceholder), + exec.WithIntercept(bazelbinCmd(), sandboxPlaceholder), + ) + + frecording, err := stdos.OpenFile(recordingPath, stdos.O_RDONLY, 0600) + require.NoError(t, err) + defer func() { require.NoError(t, frecording.Close()) }() + rec = recorder.New(recorder.WithReplay(frecording, recordingPath)) // the recording we're playing back from + } + + require.NotNil(t, rec) + execOpts = append(execOpts, exec.WithRecorder(rec)) + osOpts = append(osOpts, os.WithRecorder(rec)) + + devExec := exec.New(execOpts...) + devOS := os.New(osOpts...) + + datadriven.RunTest(t, path, func(t *testing.T, d *datadriven.TestData) string { + dev := makeDevCmd() + dev.exec, dev.os = devExec, devOS + dev.knobs.skipDoctorCheck = true + dev.knobs.devBinOverride = "dev" + + if !verbose { + dev.cli.SetErr(ioutil.Discard) + dev.cli.SetOut(ioutil.Discard) + } + + require.Equalf(t, d.Cmd, "dev", "unknown command: %s", d.Cmd) + var args []string + for _, cmdArg := range d.CmdArgs { + args = append(args, cmdArg.Key) + if len(cmdArg.Vals) != 0 { + args = append(args, cmdArg.Vals[0]) + } + } + dev.cli.SetArgs(args) + if err := dev.cli.Execute(); err != nil { + return fmt.Sprintf("err: %s", err) + } + + logs, err := ioutil.ReadAll(logger) + require.NoError(t, err) + if rewriting { + logs = anonymize(t, logs) + } + return string(logs) + }) + + if rewriting { + recording, err := ioutil.ReadAll(recording) + require.NoError(t, err) + + frecording, err := stdos.OpenFile(recordingPath, stdos.O_CREATE|stdos.O_WRONLY|stdos.O_TRUNC|stdos.O_SYNC, 0600) + require.NoError(t, err) + defer func() { require.NoError(t, frecording.Close()) }() + + recording = anonymize(t, recording) + _, err = frecording.Write(recording) + require.NoError(t, err) + } + }) +} + +func anonymize(t *testing.T, input []byte) []byte { + output := bytes.ReplaceAll(input, []byte(workspace(t)), []byte(crdbCheckoutPlaceholder)) + return bytes.ReplaceAll(output, []byte(bazelbin(t)), []byte(sandboxPlaceholder)) +} + +func workspace(t *testing.T) string { + cmd := stdexec.Command("bazel", "info", "workspace") + var stdout, stderr bytes.Buffer + cmd.Stdout, cmd.Stderr = &stdout, &stderr + require.NoError(t, cmd.Start()) + require.NoError(t, cmd.Wait(), stderr.String()) + return strings.TrimSpace(stdout.String()) +} + +func bazelbin(t *testing.T) string { + cmd := stdexec.Command("bazel", "info", "bazel-bin") + var stdout, stderr bytes.Buffer + cmd.Stdout, cmd.Stderr = &stdout, &stderr + require.NoError(t, cmd.Start()) + require.NoError(t, cmd.Wait(), stderr.String()) + return strings.TrimSpace(stdout.String()) +} diff --git a/pkg/cmd/dev/recording/BUILD.bazel b/pkg/cmd/dev/recording/BUILD.bazel deleted file mode 100644 index 19a47a095bb1..000000000000 --- a/pkg/cmd/dev/recording/BUILD.bazel +++ /dev/null @@ -1,12 +0,0 @@ -load("@io_bazel_rules_go//go:def.bzl", "go_library") - -go_library( - name = "recording", - srcs = [ - "operation.go", - "recording.go", - "scanner.go", - ], - importpath = "github.com/cockroachdb/cockroach/pkg/cmd/dev/recording", - visibility = ["//visibility:public"], -) diff --git a/pkg/cmd/dev/recording/operation.go b/pkg/cmd/dev/recording/operation.go deleted file mode 100644 index a14d775050e3..000000000000 --- a/pkg/cmd/dev/recording/operation.go +++ /dev/null @@ -1,68 +0,0 @@ -// Copyright 2021 The Cockroach Authors. -// -// Use of this software is governed by the Business Source License -// included in the file licenses/BSL.txt. -// -// As of the Change Date specified in that file, in accordance with -// the Business Source License, use of this software will be governed -// by the Apache License, Version 2.0, included in the file -// licenses/APL.txt. - -package recording - -import "strings" - -// Operation represents the base unit of what can be recorded. It consists of a -// command and an expected output. -// -// The printed form of the command is defined by the following grammar: -// -// # comment -// \ -// -// ---- -// -// -// By default cannot contain blank lines. This alternative syntax -// allows the use of blank lines. -// -// -// ---- -// ---- -// -// -// -// ---- -// ---- -type Operation struct { - Command string // - Output string // -} - -// String returns a printable form for the given Operation. See type-level -// comment to understand the grammar we're constructing against. -func (o *Operation) String() string { - var sb strings.Builder - sb.WriteString(o.Command) - sb.WriteString("\n") - - sb.WriteString("----\n") - - multiline := strings.ContainsAny(strings.TrimRight(o.Output, "\n"), "\n") - if multiline { - sb.WriteString("----\n") - } - - sb.WriteString(o.Output) - if o.Output != "" && !strings.HasSuffix(o.Output, "\n") { - sb.WriteString("\n") - } - - if multiline { - sb.WriteString("----\n") - sb.WriteString("----\n") - } - - sb.WriteString("\n") - return sb.String() -} diff --git a/pkg/cmd/dev/recording/recording.go b/pkg/cmd/dev/recording/recording.go deleted file mode 100644 index 3cc87368ae46..000000000000 --- a/pkg/cmd/dev/recording/recording.go +++ /dev/null @@ -1,209 +0,0 @@ -// Copyright 2021 The Cockroach Authors. -// -// Use of this software is governed by the Business Source License -// included in the file licenses/BSL.txt. -// -// As of the Change Date specified in that file, in accordance with -// the Business Source License, use of this software will be governed -// by the Apache License, Version 2.0, included in the file -// licenses/APL.txt. - -package recording - -import ( - "bytes" - "fmt" - "io" - "strings" -) - -// Recording can be used to play back a set of operations (defined only by a -// "command" and an "expected output"). It provides a handy way to mock out the -// components being recorded. -type Recording struct { - // scanner is where we're replaying the recording from. op is the - // scratch space used to parse out the current operation being read. - scanner *scanner - op Operation -} - -// WithReplayFrom is used to configure a Recording to play back from the given -// reader. The provided name is used only for diagnostic purposes, it's -// typically the name of the file being read. -func WithReplayFrom(r io.Reader, name string) *Recording { - re := &Recording{} - re.scanner = newScanner(r, name) - return re -} - -// Next is used to step through the next operation found in the recording, if -// any. -func (r *Recording) Next(f func(Operation) error) (found bool, err error) { - parsed, err := r.parseOperation() - if err != nil { - return false, err - } - - if !parsed { - return false, nil - } - - if err := f(r.op); err != nil { - return false, fmt.Errorf("%s: %w", r.scanner.pos(), err) - } - return true, nil -} - -// parseOperation parses out the next Operation from the internal scanner. See -// type-level comment on Operation to understand the grammar we're parsing -// against. -func (r *Recording) parseOperation() (parsed bool, err error) { - for r.scanner.Scan() { - r.op = Operation{} - line := r.scanner.Text() - - line = strings.TrimSpace(line) - if strings.HasPrefix(line, "#") { - // Skip comment lines. - continue - } - - // Support wrapping command directive lines using "\". - for strings.HasSuffix(line, `\`) && r.scanner.Scan() { - nextLine := r.scanner.Text() - line = strings.TrimSuffix(line, `\`) - line = strings.TrimSpace(line) - line = fmt.Sprintf("%s %s", line, strings.TrimSpace(nextLine)) - } - - command, err := r.parseCommand(line) - if err != nil { - return false, err - } - if command == "" { - // Nothing to do here. - continue - } - r.op.Command = command - - if err := r.parseSeparator(); err != nil { - return false, err - } - - if err := r.parseOutput(); err != nil { - return false, err - } - - return true, nil - } - return false, nil -} - -// parseCommand parses a line and returns it if parsed correctly. See -// type-level comment on Operation to understand the grammar we're parsing -// against. -func (r *Recording) parseCommand(line string) (cmd string, err error) { - line = strings.TrimSpace(line) - if line == "" { - return "", nil - } - - origLine := line - cmd = strings.TrimSpace(line) - if cmd == "" { - column := len(origLine) - len(line) + 1 - return "", fmt.Errorf("%s: cannot parse command at col %d: %s", r.scanner.pos(), column, origLine) - } - return cmd, nil -} - -// parseSeparator parses a separator ('----'), erroring out if it's not parsed -// correctly. See type-level comment on Operation to understand the grammar -// we're parsing against. -func (r *Recording) parseSeparator() error { - if !r.scanner.Scan() { - return fmt.Errorf("%s: expected to find separator after command", r.scanner.pos()) - } - line := r.scanner.Text() - if line != "----" { - return fmt.Errorf("%s: expected to find separator after command, found %q instead", r.scanner.pos(), line) - } - return nil -} - -// parseOutput parses an . See type-level comment on Operation to -// understand the grammar we're parsing against. -func (r *Recording) parseOutput() error { - var buf bytes.Buffer - var line string - - var allowBlankLines bool - if r.scanner.Scan() { - line = r.scanner.Text() - if line == "----" { - allowBlankLines = true - } - } - - if !allowBlankLines { - // Terminate on first blank line. - for { - if strings.TrimSpace(line) == "" { - break - } - - if _, err := fmt.Fprintln(&buf, line); err != nil { - return err - } - - if !r.scanner.Scan() { - break - } - - line = r.scanner.Text() - } - r.op.Output = buf.String() - return nil - } - - // Look for two successive lines of "----" before terminating. - for r.scanner.Scan() { - line = r.scanner.Text() - if line != "----" { - // We just picked up a regular line that's part of the command - // output. - if _, err := fmt.Fprintln(&buf, line); err != nil { - return err - } - - continue - } - - // We picked up a separator. We could either be part of the - // command output, or it was actually intended by the user as a - // separator. Let's check to see if we can parse a second one. - if err := r.parseSeparator(); err == nil { - // We just saw the second separator, the output portion is done. - // Read the following blank line. - if r.scanner.Scan() && r.scanner.Text() != "" { - return fmt.Errorf("%s: non-blank line after end of double ---- separator section", r.scanner.pos()) - } - break - } - - // The separator we saw was part of the command output. - // Let's collect both lines (the first separator, and the - // new one). - if _, err := fmt.Fprintln(&buf, line); err != nil { - return err - } - - line2 := r.scanner.Text() - if _, err := fmt.Fprintln(&buf, line2); err != nil { - return err - } - } - - r.op.Output = buf.String() - return nil -} diff --git a/pkg/cmd/dev/recording/scanner.go b/pkg/cmd/dev/recording/scanner.go deleted file mode 100644 index 06d2c8c22764..000000000000 --- a/pkg/cmd/dev/recording/scanner.go +++ /dev/null @@ -1,51 +0,0 @@ -// Copyright 2021 The Cockroach Authors. -// -// Use of this software is governed by the Business Source License -// included in the file licenses/BSL.txt. -// -// As of the Change Date specified in that file, in accordance with -// the Business Source License, use of this software will be governed -// by the Apache License, Version 2.0, included in the file -// licenses/APL.txt. - -package recording - -import ( - "bufio" - "fmt" - "io" -) - -// scanner is a convenience wrapper around a bufio.Scanner that keeps track of -// the currently read line number (and an associated name for the reader - -// typically a file name). -type scanner struct { - *bufio.Scanner - line int - name string -} - -func newScanner(r io.Reader, name string) *scanner { - bufioScanner := bufio.NewScanner(r) - // We use a large max-token-size to account for lines in the output that far - // exceed the default bufio scanner token size. - bufioScanner.Buffer(make([]byte, 100), 10*bufio.MaxScanTokenSize) - return &scanner{ - Scanner: bufioScanner, - name: name, - } -} - -func (s *scanner) Scan() bool { - ok := s.Scanner.Scan() - if ok { - s.line++ - } - return ok -} - -// pos is a file:line prefix for the input file, suitable for inclusion in logs -// and error messages. -func (s *scanner) pos() string { - return fmt.Sprintf("%s:%d", s.name, s.line) -} diff --git a/pkg/cmd/dev/test.go b/pkg/cmd/dev/test.go index 9feb1980ffb7..8a81f7c38932 100644 --- a/pkg/cmd/dev/test.go +++ b/pkg/cmd/dev/test.go @@ -11,15 +11,11 @@ package main import ( - "context" - "errors" "fmt" - "os/exec" "path/filepath" "strings" "time" - "github.com/alessio/shellescape" "github.com/spf13/cobra" ) @@ -98,6 +94,12 @@ func (d *dev) test(cmd *cobra.Command, commandLine []string) error { vModule = mustGetFlagString(cmd, vModuleFlag) ) + // Enumerate all tests to run. + if len(pkgs) == 0 { + // Empty `dev test` does the same thing as `dev test pkg/...` + pkgs = append(pkgs, "pkg/...") + } + var args []string args = append(args, "test") args = append(args, mustGetRemoteCacheArgs(remoteCacheAddr)...) @@ -112,54 +114,25 @@ func (d *dev) test(cmd *cobra.Command, commandLine []string) error { var testTargets []string for _, pkg := range pkgs { - dir, isRecursive, tag, err := d.parsePkg(pkg) - if err != nil { - return err + pkg = strings.TrimPrefix(pkg, "//") + pkg = strings.TrimPrefix(pkg, "./") + pkg = strings.TrimRight(pkg, "/") + + if !strings.HasPrefix(pkg, "pkg/") { + return fmt.Errorf("malformed package %q, expecting %q", pkg, "pkg/{...}") } - querySuffix := "" - if isRecursive { - // Similar to `go test`, we implement `...` expansion to allow - // callers to use the following pattern to test all packages under a - // named one: - // - // dev test pkg/util/... -v - // - // NB: We'll want to filter for just the go_test targets here. Not - // doing so prompts bazel to try and build all named targets. This - // is undesirable for the various `*_proto` targets seeing as how - // they're not buildable in isolation. This is because we often - // attach methods to proto types in hand-written files, files that - // are not picked up by the proto bazel targets[1]. Regular bazel - // compilation is still fine seeing as how the top-level go_library - // targets both embeds the proto target, and sources the - // hand-written file. But the proto target in isolation may not be - // buildable because without those additional methods, those types - // may fail to satisfy required interfaces. - // - // So, blinding selecting for all targets won't work, and we'll want - // to filter things out first. - // - // [1]: pkg/rpc/heartbeat.proto is one example of this pattern, - // where we define `Stringer` separately for the `RemoteOffset` - // type. - querySuffix = "/..." + + var target string + if strings.Contains(pkg, ":") { + // For parity with bazel, we allow specifying named build targets. + target = pkg } else { - if tag == "" { - tag = "all" - } - querySuffix = ":" + tag + target = fmt.Sprintf("%s:all", pkg) } - query := fmt.Sprintf("kind(go_test, //%s%s)", dir, querySuffix) - out, err := d.getQueryOutput(ctx, query) - if err != nil { - return err - } - tests := strings.Split(strings.TrimSpace(string(out)), "\n") - testTargets = append(testTargets, tests...) + testTargets = append(testTargets, target) } args = append(args, testTargets...) - if ignoreCache { args = append(args, "--nocache_test_results") } @@ -213,7 +186,7 @@ func (d *dev) test(cmd *cobra.Command, commandLine []string) error { // NB: Run with -bazel, which propagates `TEST_TMPDIR` to `TMPDIR`, // and -shardable-artifacts set such that we can merge the XML output // files. - fmt.Sprintf("%s -bazel -shardable-artifacts 'XML_OUTPUT_FILE=%s merge-test-xmls' %s", stressTarget, getDevBin(), strings.Join(stressCmdArgs, " "))) + fmt.Sprintf("%s -bazel -shardable-artifacts 'XML_OUTPUT_FILE=%s merge-test-xmls' %s", stressTarget, d.getDevBin(), strings.Join(stressCmdArgs, " "))) } if filter != "" { @@ -254,6 +227,12 @@ func (d *dev) test(cmd *cobra.Command, commandLine []string) error { logCommand("bazel", args...) return d.exec.CommandContextInheritingStdStreams(ctx, "bazel", args...) + + // TODO(irfansharif): Both here and in `dev bench`, if the command is + // unsuccessful we could explicitly check for "missing package" errors. The + // situation is not so bad currently however: + // + // [...] while parsing 'pkg/f:all': no such package 'pkg/f' } func getDirectoryFromTarget(target string) string { @@ -264,25 +243,3 @@ func getDirectoryFromTarget(target string) string { } return target[:colon] } - -// getQueryOutput runs `bazel query` w/ the given arguments, but returns -// a more informative error if the query fails. -func (d *dev) getQueryOutput(ctx context.Context, args ...string) ([]byte, error) { - queryArgs := []string{"query"} - queryArgs = append(queryArgs, args...) - stdoutBytes, err := d.exec.CommandContextSilent(ctx, "bazel", queryArgs...) - if err == nil { - return stdoutBytes, err - } - var cmderr *exec.ExitError - var stdout, stderr string - if len(stdoutBytes) > 0 { - stdout = fmt.Sprintf("stdout: \"%s\" ", string(stdoutBytes)) - } - if errors.As(err, &cmderr) && len(cmderr.Stderr) > 0 { - stderr = fmt.Sprintf("stderr: \"%s\" ", strings.TrimSpace(string(cmderr.Stderr))) - } - return nil, fmt.Errorf("failed to run `bazel %s` %s%s(%w)", - shellescape.QuoteCommand(queryArgs), stdout, stderr, err) - -} diff --git a/pkg/cmd/dev/testdata/bench.txt b/pkg/cmd/dev/testdata/bench.txt deleted file mode 100644 index 66c9bec5f577..000000000000 --- a/pkg/cmd/dev/testdata/bench.txt +++ /dev/null @@ -1,11 +0,0 @@ -dev bench pkg/util/... ----- -find pkg/util -type d -git grep -l '^func Benchmark' -- 'pkg/util/*_test.go' -bazel run --config=test --test_sharding_strategy=disabled //pkg/util:util_test -- -test.run=- -test.bench=. -bazel run --config=test --test_sharding_strategy=disabled //pkg/util/uuid:uuid_test -- -test.run=- -test.bench=. - -dev bench pkg/sql/parser --filter=BenchmarkParse ----- -find pkg/sql/parser -type d -bazel run --config=test --test_sharding_strategy=disabled //pkg/sql/parser:parser_test -- -test.run=- -test.bench=BenchmarkParse diff --git a/pkg/cmd/dev/testdata/build.txt b/pkg/cmd/dev/testdata/build.txt deleted file mode 100644 index 6feecd979c7a..000000000000 --- a/pkg/cmd/dev/testdata/build.txt +++ /dev/null @@ -1,90 +0,0 @@ -dev build cockroach-short --skip-generate ----- -bazel build //pkg/cmd/cockroach-short:cockroach-short -bazel info workspace --color=no -mkdir go/src/github.com/cockroachdb/cockroach/bin -bazel info bazel-bin --color=no -rm go/src/github.com/cockroachdb/cockroach/cockroach-short -ln -s /private/var/tmp/_bazel/99e666e4e674209ecdb66b46371278df/execroot/cockroach/bazel-out/darwin-fastbuild/bin/pkg/cmd/cockroach-short/cockroach-short_/cockroach-short go/src/github.com/cockroachdb/cockroach/cockroach-short - -dev build cockroach-short --cpus=12 --skip-generate ----- -bazel build --local_cpu_resources=12 //pkg/cmd/cockroach-short:cockroach-short -mkdir go/src/github.com/cockroachdb/cockroach/bin -bazel info bazel-bin --color=no -rm go/src/github.com/cockroachdb/cockroach/cockroach-short -ln -s /private/var/tmp/_bazel/99e666e4e674209ecdb66b46371278df/execroot/cockroach/bazel-out/darwin-fastbuild/bin/pkg/cmd/cockroach-short/cockroach-short_/cockroach-short go/src/github.com/cockroachdb/cockroach/cockroach-short - -dev build --debug short --skip-generate ----- -bazel build //pkg/cmd/cockroach-short:cockroach-short -mkdir go/src/github.com/cockroachdb/cockroach/bin -bazel info bazel-bin --color=no -rm go/src/github.com/cockroachdb/cockroach/cockroach-short -ln -s /private/var/tmp/_bazel/99e666e4e674209ecdb66b46371278df/execroot/cockroach/bazel-out/darwin-fastbuild/bin/pkg/cmd/cockroach-short/cockroach-short_/cockroach-short go/src/github.com/cockroachdb/cockroach/cockroach-short - -dev build cockroach-short --remote-cache 127.0.0.1:9090 --skip-generate ----- -bazel build --remote_local_fallback --remote_cache=grpc://127.0.0.1:9090 --experimental_remote_downloader=grpc://127.0.0.1:9090 //pkg/cmd/cockroach-short:cockroach-short -mkdir go/src/github.com/cockroachdb/cockroach/bin -bazel info bazel-bin --color=no -rm go/src/github.com/cockroachdb/cockroach/cockroach-short -ln -s /private/var/tmp/_bazel/99e666e4e674209ecdb66b46371278df/execroot/cockroach/bazel-out/darwin-fastbuild/bin/pkg/cmd/cockroach-short/cockroach-short_/cockroach-short go/src/github.com/cockroachdb/cockroach/cockroach-short - -dev build cockroach-short ----- -bazel build //pkg/cmd/cockroach-short:cockroach-short //:go_path -mkdir go/src/github.com/cockroachdb/cockroach/bin -bazel info bazel-bin --color=no -rm go/src/github.com/cockroachdb/cockroach/cockroach-short -ln -s /private/var/tmp/_bazel/99e666e4e674209ecdb66b46371278df/execroot/cockroach/bazel-out/darwin-fastbuild/bin/pkg/cmd/cockroach-short/cockroach-short_/cockroach-short go/src/github.com/cockroachdb/cockroach/cockroach-short -git status --ignored --short go/src/github.com/cockroachdb/cockroach/pkg -rm pkg/file_to_delete.go -find /private/var/tmp/_bazel/99e666e4e674209ecdb66b46371278df/execroot/cockroach/bazel-out/darwin-fastbuild/bin/go_path/src/github.com/cockroachdb/cockroach -name *.go -cat go/src/github.com/cockroachdb/cockroach/build/bazelutil/checked_in_genfiles.txt -cp /private/var/tmp/_bazel/99e666e4e674209ecdb66b46371278df/execroot/cockroach/bazel-out/darwin-fastbuild/bin/go_path/src/github.com/cockroachdb/cockroach/pkg/kv/kvserver/storage_services.pb.go go/src/github.com/cockroachdb/cockroach/pkg/kv/kvserver/storage_services.pb.go -cp /private/var/tmp/_bazel/99e666e4e674209ecdb66b46371278df/execroot/cockroach/bazel-out/darwin-fastbuild/bin/go_path/src/github.com/cockroachdb/cockroach/pkg/roachpb/batch_generated-gen.go go/src/github.com/cockroachdb/cockroach/pkg/roachpb/batch_generated.go -cp /private/var/tmp/_bazel/99e666e4e674209ecdb66b46371278df/execroot/cockroach/bazel-out/darwin-fastbuild/bin/go_path/src/github.com/cockroachdb/cockroach/pkg/sql/opt/optgen/lang/expr-gen.og.go go/src/github.com/cockroachdb/cockroach/pkg/sql/opt/optgen/lang/expr.og.go -cp /private/var/tmp/_bazel/99e666e4e674209ecdb66b46371278df/execroot/cockroach/bazel-out/darwin-fastbuild/bin/go_path/src/github.com/cockroachdb/cockroach/pkg/sql/opt/optgen/lang/operator-gen.og.go go/src/github.com/cockroachdb/cockroach/pkg/sql/opt/optgen/lang/operator.og.go - -dev build short --skip-generate -- -s ----- -bazel build //pkg/cmd/cockroach-short:cockroach-short -s -mkdir go/src/github.com/cockroachdb/cockroach/bin -bazel info bazel-bin --color=no -rm go/src/github.com/cockroachdb/cockroach/cockroach-short -ln -s /private/var/tmp/_bazel/99e666e4e674209ecdb66b46371278df/execroot/cockroach/bazel-out/darwin-fastbuild/bin/pkg/cmd/cockroach-short/cockroach-short_/cockroach-short go/src/github.com/cockroachdb/cockroach/cockroach-short - -dev build --skip-generate -- --verbose_failures --sandbox_debug ----- -bazel run @nodejs//:yarn -- --check-files --cwd pkg/ui --offline -bazel build //pkg/cmd/cockroach:cockroach --config=with_ui --verbose_failures --sandbox_debug -mkdir go/src/github.com/cockroachdb/cockroach/bin -bazel info bazel-bin --color=no -rm go/src/github.com/cockroachdb/cockroach/cockroach -ln -s /private/var/tmp/_bazel/99e666e4e674209ecdb66b46371278df/execroot/cockroach/bazel-out/darwin-fastbuild/bin/pkg/cmd/cockroach/cockroach_/cockroach go/src/github.com/cockroachdb/cockroach/cockroach - -dev build @com_github_cockroachdb_stress//:stress --skip-generate ----- -bazel query @com_github_cockroachdb_stress//:stress --output=label_kind -bazel build @com_github_cockroachdb_stress//:stress -mkdir go/src/github.com/cockroachdb/cockroach/bin -bazel info bazel-bin --color=no -rm go/src/github.com/cockroachdb/cockroach/bin/stress -ln -s /private/var/tmp/_bazel/99e666e4e674209ecdb66b46371278df/execroot/cockroach/bazel-out/darwin-fastbuild/bin/external/com_github_cockroachdb_stress/stress_/stress go/src/github.com/cockroachdb/cockroach/bin/stress - -dev build pkg/roachpb:roachpb_test --skip-generate ----- -bazel query pkg/roachpb:roachpb_test --output=label_kind -bazel build //pkg/roachpb:roachpb_test --config=test -mkdir go/src/github.com/cockroachdb/cockroach/bin -bazel info bazel-bin --color=no - -dev build pkg/foo/... --skip-generate ----- -bazel query pkg/foo/... --output=label_kind -bazel build //pkg/foo:bar //pkg/foo:baz --config=test -mkdir go/src/github.com/cockroachdb/cockroach/bin -bazel info bazel-bin --color=no -rm go/src/github.com/cockroachdb/cockroach/bin/bar -ln -s /private/var/tmp/_bazel/99e666e4e674209ecdb66b46371278df/execroot/cockroach/bazel-out/darwin-fastbuild/bin/pkg/foo/bar_/bar go/src/github.com/cockroachdb/cockroach/bin/bar diff --git a/pkg/cmd/dev/testdata/builder.txt b/pkg/cmd/dev/testdata/builder.txt deleted file mode 100644 index af382ffcf39f..000000000000 --- a/pkg/cmd/dev/testdata/builder.txt +++ /dev/null @@ -1,15 +0,0 @@ -dev builder ----- -id -cat go/src/github.com/cockroachdb/cockroach/build/teamcity-bazel-support.sh -docker volume inspect bzlhome -mkdir go/src/github.com/cockroachdb/cockroach/artifacts -docker run --rm -it -v go/src/github.com/cockroachdb/cockroach:/cockroach --workdir=/cockroach -v go/src/github.com/cockroachdb/cockroach/artifacts:/artifacts -v bzlhome:/home/roach:delegated -u 1001:1002 mock_bazel_image:1234 - -dev builder echo hi ----- -id -cat go/src/github.com/cockroachdb/cockroach/build/teamcity-bazel-support.sh -docker volume inspect bzlhome -mkdir go/src/github.com/cockroachdb/cockroach/artifacts -docker run --rm -it -v go/src/github.com/cockroachdb/cockroach:/cockroach --workdir=/cockroach -v go/src/github.com/cockroachdb/cockroach/artifacts:/artifacts -v bzlhome:/home/roach:delegated -u 1001:1002 mock_bazel_image:1234 echo hi diff --git a/pkg/cmd/dev/testdata/datadriven/bench b/pkg/cmd/dev/testdata/datadriven/bench new file mode 100644 index 000000000000..778085f057bb --- /dev/null +++ b/pkg/cmd/dev/testdata/datadriven/bench @@ -0,0 +1,15 @@ +dev bench pkg/spanconfig/... +---- +bazel test pkg/spanconfig/...:all --test_arg -test.bench=. --test_output errors + +dev bench pkg/sql/parser --filter=BenchmarkParse +---- +bazel test pkg/sql/parser:all --test_arg -test.bench=BenchmarkParse --test_output errors + +dev bench pkg/bench -f=BenchmarkTracing/1node/scan/trace=off --count=2 --bench-time=10x --bench-mem +---- +bazel test pkg/bench:all --test_arg -test.bench=BenchmarkTracing/1node/scan/trace=off --test_arg -test.count=2 --test_arg -test.benchtime=10x --test_arg -test.benchmem --test_output errors + +dev bench pkg/spanconfig/spanconfigkvsubscriber -f=BenchmarkSpanConfigDecoder --cpus=10 --ignore-cache -v --timeout=50s +---- +bazel test --local_cpu_resources=10 --test_timeout=50 pkg/spanconfig/spanconfigkvsubscriber:all --nocache_test_results --test_arg -test.bench=BenchmarkSpanConfigDecoder --test_arg -test.v --test_output all diff --git a/pkg/cmd/dev/testdata/datadriven/compose b/pkg/cmd/dev/testdata/datadriven/compose new file mode 100644 index 000000000000..4b04dd079efd --- /dev/null +++ b/pkg/cmd/dev/testdata/datadriven/compose @@ -0,0 +1,7 @@ +dev compose +---- +bazel run //pkg/compose:compose_test --config=test + +dev compose --cpus 12 --short --timeout 1m -f TestComposeCompare +---- +bazel run //pkg/compose:compose_test --config=test --local_cpu_resources=12 --test_filter=TestComposeCompare --test_arg -test.short --test_timeout=60 diff --git a/pkg/cmd/dev/testdata/datadriven/dev-build b/pkg/cmd/dev/testdata/datadriven/dev-build new file mode 100644 index 000000000000..ab478ffe4598 --- /dev/null +++ b/pkg/cmd/dev/testdata/datadriven/dev-build @@ -0,0 +1,54 @@ +dev build cockroach-short --skip-generate +---- +bazel build //pkg/cmd/cockroach-short:cockroach-short +bazel info workspace --color=no +mkdir bin +bazel info bazel-bin --color=no +rm cockroach-short +ln -s pkg/cmd/cockroach-short/cockroach-short_/cockroach-short cockroach-short + +dev build cockroach-short --cpus=12 --skip-generate +---- +bazel build --local_cpu_resources=12 //pkg/cmd/cockroach-short:cockroach-short +bazel info workspace --color=no +mkdir bin +bazel info bazel-bin --color=no +rm cockroach-short +ln -s pkg/cmd/cockroach-short/cockroach-short_/cockroach-short cockroach-short + +dev build --debug short --skip-generate +---- +bazel build //pkg/cmd/cockroach-short:cockroach-short +bazel info workspace --color=no +mkdir bin +bazel info bazel-bin --color=no +rm cockroach-short +ln -s pkg/cmd/cockroach-short/cockroach-short_/cockroach-short cockroach-short + +dev build short --skip-generate -- -s +---- +bazel build //pkg/cmd/cockroach-short:cockroach-short -s +bazel info workspace --color=no +mkdir bin +bazel info bazel-bin --color=no +rm cockroach-short +ln -s pkg/cmd/cockroach-short/cockroach-short_/cockroach-short cockroach-short + +dev build --skip-generate -- --verbose_failures --sandbox_debug +---- +bazel run @nodejs//:yarn -- --check-files --cwd pkg/ui --offline +bazel build //pkg/cmd/cockroach:cockroach --config=with_ui --verbose_failures --sandbox_debug +bazel info workspace --color=no +mkdir bin +bazel info bazel-bin --color=no +rm cockroach +ln -s pkg/cmd/cockroach/cockroach_/cockroach cockroach + +dev build stress --skip-generate +---- +bazel build @com_github_cockroachdb_stress//:stress +bazel info workspace --color=no +mkdir bin +bazel info bazel-bin --color=no +rm bin/stress +ln -s external/com_github_cockroachdb_stress/stress_/stress bin/stress diff --git a/pkg/cmd/dev/testdata/datadriven/generate b/pkg/cmd/dev/testdata/datadriven/generate new file mode 100644 index 000000000000..86f279c4e80f --- /dev/null +++ b/pkg/cmd/dev/testdata/datadriven/generate @@ -0,0 +1,15 @@ +dev gen protobuf +---- +bazel run //pkg/gen:go_proto + +dev gen bazel +---- +bazel info workspace --color=no +build/bazelutil/bazel-generate.sh + +dev generate bazel --mirror --force +---- +bazel info workspace --color=no +export COCKROACH_BAZEL_CAN_MIRROR=1 +export COCKROACH_BAZEL_FORCE_GENERATE=1 +build/bazelutil/bazel-generate.sh diff --git a/pkg/cmd/dev/testdata/datadriven/lint b/pkg/cmd/dev/testdata/datadriven/lint new file mode 100644 index 000000000000..172eedae41d1 --- /dev/null +++ b/pkg/cmd/dev/testdata/datadriven/lint @@ -0,0 +1,20 @@ +dev lint +---- +bazel run --config=test //build/bazelutil:lint -- -test.v + +dev lint --short --timeout=5m +---- +bazel run --config=test //build/bazelutil:lint -- -test.v -test.short -test.timeout 5m0s + +dev lint pkg/cmd/dev +---- +export PKG=./pkg/cmd/dev +bazel run --config=test //build/bazelutil:lint -- -test.v + +dev lint -f TestLowercaseFunctionNames --cpus 4 +---- +bazel run --config=test //build/bazelutil:lint --local_cpu_resources=4 -- -test.v -test.run Lint/TestLowercaseFunctionNames + +dev lint pkg/cmd/dev pkg/spanconfig +---- +err: can only lint a single package (found pkg/cmd/dev, pkg/spanconfig) diff --git a/pkg/cmd/dev/testdata/datadriven/test b/pkg/cmd/dev/testdata/datadriven/test new file mode 100644 index 000000000000..e6b49b48867d --- /dev/null +++ b/pkg/cmd/dev/testdata/datadriven/test @@ -0,0 +1,68 @@ +dev test pkg/util/tracing +---- +bazel test pkg/util/tracing:all --test_env=GOTRACEBACK=all --test_output errors + +dev test pkg/util/tracing/... +---- +bazel test pkg/util/tracing/...:all --test_env=GOTRACEBACK=all --test_output errors + +dev test pkg/util/tracing -f TestStartChild* +---- +bazel test pkg/util/tracing:all --test_env=GOTRACEBACK=all '--test_filter=TestStartChild*' --test_output errors + +dev test pkg/util/tracing -f TestStartChild* -v --show-logs +---- +bazel test pkg/util/tracing:all --test_env=GOTRACEBACK=all '--test_filter=TestStartChild*' --test_arg -test.v --test_arg -show-logs --test_output all + +dev test pkg/util/tracing -f TestStartChild* --remote-cache 127.0.0.1:9092 +---- +bazel test --remote_local_fallback --remote_cache=grpc://127.0.0.1:9092 --experimental_remote_downloader=grpc://127.0.0.1:9092 pkg/util/tracing:all --test_env=GOTRACEBACK=all '--test_filter=TestStartChild*' --test_output errors + +dev test pkg/util/tracing -f TestStartChild* --ignore-cache +---- +bazel test pkg/util/tracing:all --nocache_test_results --test_env=GOTRACEBACK=all '--test_filter=TestStartChild*' --test_output errors + +dev test --stress pkg/util/tracing --filter TestStartChild* --cpus=12 --timeout=25s +---- +bazel test --local_cpu_resources=12 --test_sharding_strategy=disabled pkg/util/tracing:all --test_env=GOTRACEBACK=all --test_timeout=85 --run_under '@com_github_cockroachdb_stress//:stress -bazel -shardable-artifacts '"'"'XML_OUTPUT_FILE=dev merge-test-xmls'"'"' -maxtime=25s -p=12 ' '--test_filter=TestStartChild*' --test_output streamed + +dev test //pkg/testutils --timeout=10s +---- +bazel test pkg/testutils:all --test_env=GOTRACEBACK=all --test_timeout=10 --test_output errors + +dev test pkg/util/tracing -- -s +---- +bazel test pkg/util/tracing:all --test_env=GOTRACEBACK=all --test_output errors -s + +dev test ./pkg/roachpb +---- +bazel test pkg/roachpb:all --test_env=GOTRACEBACK=all --test_output errors + +dev test pkg/roachpb:string_test +---- +bazel test pkg/roachpb:string_test --test_env=GOTRACEBACK=all --test_output errors + +dev test //pkg/testutils +---- +bazel test pkg/testutils:all --test_env=GOTRACEBACK=all --test_output errors + +dev test //pkg/testutils pkg/util/limit +---- +bazel test pkg/testutils:all pkg/util/limit:all --test_env=GOTRACEBACK=all --test_output errors + +dev test pkg/spanconfig --count 5 --race +---- +bazel test --config=race pkg/spanconfig:all --test_env=GOTRACEBACK=all --test_arg -test.count=5 --test_output errors + +dev test pkg/cmd/dev -f TestDataDriven/test --rewrite -v +---- +bazel info workspace --color=no +bazel test pkg/cmd/dev:all --test_env=GOTRACEBACK=all --test_env=COCKROACH_WORKSPACE= --test_arg -rewrite --sandbox_writable_path=pkg/cmd/dev --test_filter=TestDataDriven/test --test_arg -test.v --test_output all + +dev test pkg/server -f=TestSpanStatsResponse -v --count=5 --vmodule=raft=1 +---- +bazel test pkg/server:all --test_env=GOTRACEBACK=all --test_filter=TestSpanStatsResponse --test_arg -test.v --test_arg -test.count=5 --test_arg -vmodule=raft=1 --test_output all + +dev test --short +---- +bazel test pkg/...:all --test_env=GOTRACEBACK=all --test_arg -test.short --test_output errors diff --git a/pkg/cmd/dev/testdata/datadriven/testlogic b/pkg/cmd/dev/testdata/datadriven/testlogic new file mode 100644 index 000000000000..9b105b9a956d --- /dev/null +++ b/pkg/cmd/dev/testdata/datadriven/testlogic @@ -0,0 +1,27 @@ +dev testlogic +---- +bazel test --test_env=GOTRACEBACK=all --test_output errors //pkg/sql/logictest:logictest_test --test_filter TestLogic/// +bazel test --test_env=GOTRACEBACK=all --test_output errors //pkg/ccl/logictestccl:logictestccl_test --test_filter 'Test(CCL|Tenant)Logic///' +bazel test --test_env=GOTRACEBACK=all --test_output errors //pkg/sql/opt/exec/execbuilder:execbuilder_test --test_filter TestExecBuild/// + +dev testlogic ccl +---- +bazel test --test_env=GOTRACEBACK=all --test_output errors //pkg/ccl/logictestccl:logictestccl_test --test_filter 'Test(CCL|Tenant)Logic///' + +dev testlogic ccl opt +---- +bazel test --test_env=GOTRACEBACK=all --test_output errors //pkg/ccl/logictestccl:logictestccl_test --test_filter 'Test(CCL|Tenant)Logic///' +bazel test --test_env=GOTRACEBACK=all --test_output errors //pkg/sql/opt/exec/execbuilder:execbuilder_test --test_filter TestExecBuild/// + +dev testlogic base --ignore-cache +---- +bazel test --test_env=GOTRACEBACK=all --nocache_test_results --test_output errors //pkg/sql/logictest:logictest_test --test_filter TestLogic/// + +dev testlogic base --files=prepare|fk --subtests=20042 --config=local +---- +bazel test --test_env=GOTRACEBACK=all --test_arg -show-sql --test_arg -config --test_arg local --test_output errors //pkg/sql/logictest:logictest_test --test_filter 'TestLogic/^local$/^prepare|fk$/20042' + +dev testlogic base --files=auto_span_config_reconciliation --config=local -v --show-logs --timeout=50s --rewrite +---- +bazel info workspace --color=no +bazel test --test_env=GOTRACEBACK=all --test_arg -test.v --test_arg -show-logs --test_timeout=50 --test_arg -show-sql --test_arg -config --test_arg local --test_output all --test_env=COCKROACH_WORKSPACE= --test_arg -rewrite --sandbox_writable_path=pkg/sql/logictest //pkg/sql/logictest:logictest_test --test_filter 'TestLogic/^local$/^auto_span_config_reconciliation$/' diff --git a/pkg/cmd/dev/testdata/datadriven/ui b/pkg/cmd/dev/testdata/datadriven/ui new file mode 100644 index 000000000000..51c5553b45f0 --- /dev/null +++ b/pkg/cmd/dev/testdata/datadriven/ui @@ -0,0 +1,39 @@ +dev ui watch +---- +bazel build //pkg/ui/workspaces/db-console/src/js:crdb-protobuf-client //pkg/ui/workspaces/db-console/ccl/src/js:crdb-protobuf-client-ccl +which yarn +bazel info workspace --color=no +yarn --silent --cwd pkg/ui/workspaces/cluster-ui build:watch +yarn --silent --cwd pkg/ui/workspaces/db-console webpack-dev-server --config webpack.app.js --mode development --env.WEBPACK_SERVE --env.dist=ccl --env.target=http://localhost:8080 --port 3000 + +dev ui watch --oss +---- +bazel build //pkg/ui/workspaces/db-console/src/js:crdb-protobuf-client +which yarn +bazel info workspace --color=no +yarn --silent --cwd pkg/ui/workspaces/cluster-ui build:watch +yarn --silent --cwd pkg/ui/workspaces/db-console webpack-dev-server --config webpack.app.js --mode development --env.WEBPACK_SERVE --env.dist=oss --env.target=http://localhost:8080 --port 3000 + +dev ui watch --secure +---- +bazel build //pkg/ui/workspaces/db-console/src/js:crdb-protobuf-client //pkg/ui/workspaces/db-console/ccl/src/js:crdb-protobuf-client-ccl +which yarn +bazel info workspace --color=no +yarn --silent --cwd pkg/ui/workspaces/cluster-ui build:watch +yarn --silent --cwd pkg/ui/workspaces/db-console webpack-dev-server --config webpack.app.js --mode development --env.WEBPACK_SERVE --env.dist=ccl --env.target=http://localhost:8080 --port 3000 --https + +dev ui watch --db http://example.crdb.io:4848 +---- +bazel build //pkg/ui/workspaces/db-console/src/js:crdb-protobuf-client //pkg/ui/workspaces/db-console/ccl/src/js:crdb-protobuf-client-ccl +which yarn +bazel info workspace --color=no +yarn --silent --cwd pkg/ui/workspaces/cluster-ui build:watch +yarn --silent --cwd pkg/ui/workspaces/db-console webpack-dev-server --config webpack.app.js --mode development --env.WEBPACK_SERVE --env.dist=ccl --env.target=http://example.crdb.io:4848 --port 3000 + +dev ui watch --port 12345 +---- +bazel build //pkg/ui/workspaces/db-console/src/js:crdb-protobuf-client //pkg/ui/workspaces/db-console/ccl/src/js:crdb-protobuf-client-ccl +which yarn +bazel info workspace --color=no +yarn --silent --cwd pkg/ui/workspaces/cluster-ui build:watch +yarn --silent --cwd pkg/ui/workspaces/db-console webpack-dev-server --config webpack.app.js --mode development --env.WEBPACK_SERVE --env.dist=ccl --env.target=http://localhost:8080 --port 12345 diff --git a/pkg/cmd/dev/testdata/generate.txt b/pkg/cmd/dev/testdata/generate.txt deleted file mode 100644 index 5ca41f0e7472..000000000000 --- a/pkg/cmd/dev/testdata/generate.txt +++ /dev/null @@ -1,31 +0,0 @@ -dev gen bazel ----- -go/src/github.com/cockroachdb/cockroach/build/bazelutil/bazel-generate.sh - -dev gen docs ----- -cat go/src/github.com/cockroachdb/cockroach/docs/generated/bazel_targets.txt -bazel build //docs/generated:gen-logging-md //docs/generated/sql -bazel info bazel-bin --color=no -bazel query --output=xml //docs/generated:gen-logging-md -cp /private/var/tmp/_bazel/99e666e4e674209ecdb66b46371278df/execroot/cockroach/bazel-out/darwin-fastbuild/bin/docs/generated/logging.md go/src/github.com/cockroachdb/cockroach/docs/generated/logging.md -bazel query --output=xml //docs/generated/sql -cp /private/var/tmp/_bazel/99e666e4e674209ecdb66b46371278df/execroot/cockroach/bazel-out/darwin-fastbuild/bin/docs/generated/sql/aggregates.md go/src/github.com/cockroachdb/cockroach/docs/generated/sql/aggregates.md -cp /private/var/tmp/_bazel/99e666e4e674209ecdb66b46371278df/execroot/cockroach/bazel-out/darwin-fastbuild/bin/docs/generated/sql/functions.md go/src/github.com/cockroachdb/cockroach/docs/generated/sql/functions.md -cp /private/var/tmp/_bazel/99e666e4e674209ecdb66b46371278df/execroot/cockroach/bazel-out/darwin-fastbuild/bin/docs/generated/sql/operators.md go/src/github.com/cockroachdb/cockroach/docs/generated/sql/operators.md -cp /private/var/tmp/_bazel/99e666e4e674209ecdb66b46371278df/execroot/cockroach/bazel-out/darwin-fastbuild/bin/docs/generated/sql/window_functions.md go/src/github.com/cockroachdb/cockroach/docs/generated/sql/window_functions.md -go/src/github.com/cockroachdb/cockroach/build/bazelutil/generate_redact_safe.sh -echo MOCK_REDACT_SAFE_OUTPUT > go/src/github.com/cockroachdb/cockroach/docs/generated/redact_safe.md - -dev gen go ----- -bazel build //:go_path --show_result=0 -bazel info bazel-bin --color=no -git status --ignored --short go/src/github.com/cockroachdb/cockroach/pkg -rm pkg/file_to_delete.go -find /private/var/tmp/_bazel/99e666e4e674209ecdb66b46371278df/execroot/cockroach/bazel-out/darwin-fastbuild/bin/go_path/src/github.com/cockroachdb/cockroach -name *.go -cat go/src/github.com/cockroachdb/cockroach/build/bazelutil/checked_in_genfiles.txt -cp /private/var/tmp/_bazel/99e666e4e674209ecdb66b46371278df/execroot/cockroach/bazel-out/darwin-fastbuild/bin/go_path/src/github.com/cockroachdb/cockroach/pkg/kv/kvserver/storage_services.pb.go go/src/github.com/cockroachdb/cockroach/pkg/kv/kvserver/storage_services.pb.go -cp /private/var/tmp/_bazel/99e666e4e674209ecdb66b46371278df/execroot/cockroach/bazel-out/darwin-fastbuild/bin/go_path/src/github.com/cockroachdb/cockroach/pkg/roachpb/batch_generated-gen.go go/src/github.com/cockroachdb/cockroach/pkg/roachpb/batch_generated.go -cp /private/var/tmp/_bazel/99e666e4e674209ecdb66b46371278df/execroot/cockroach/bazel-out/darwin-fastbuild/bin/go_path/src/github.com/cockroachdb/cockroach/pkg/sql/opt/optgen/lang/expr-gen.og.go go/src/github.com/cockroachdb/cockroach/pkg/sql/opt/optgen/lang/expr.og.go -cp /private/var/tmp/_bazel/99e666e4e674209ecdb66b46371278df/execroot/cockroach/bazel-out/darwin-fastbuild/bin/go_path/src/github.com/cockroachdb/cockroach/pkg/sql/opt/optgen/lang/operator-gen.og.go go/src/github.com/cockroachdb/cockroach/pkg/sql/opt/optgen/lang/operator.og.go diff --git a/pkg/cmd/dev/testdata/lint.txt b/pkg/cmd/dev/testdata/lint.txt deleted file mode 100644 index 3fc96a5483f5..000000000000 --- a/pkg/cmd/dev/testdata/lint.txt +++ /dev/null @@ -1,7 +0,0 @@ -dev lint ----- -bazel run --config=test //build/bazelutil:lint -- -test.v - -dev lint --short --timeout=5m ----- -bazel run --config=test //build/bazelutil:lint -- -test.v -test.short -test.timeout 5m0s diff --git a/pkg/cmd/dev/testdata/logic.txt b/pkg/cmd/dev/testdata/logic.txt deleted file mode 100644 index cacfb9cd4e96..000000000000 --- a/pkg/cmd/dev/testdata/logic.txt +++ /dev/null @@ -1,13 +0,0 @@ -dev testlogic ----- -bazel test --test_env=GOTRACEBACK=all --test_output errors //pkg/sql/logictest:logictest_test --test_filter TestLogic/// -bazel test --test_env=GOTRACEBACK=all --test_output errors //pkg/ccl/logictestccl:logictestccl_test --test_filter 'Test(CCL|Tenant)Logic///' -bazel test --test_env=GOTRACEBACK=all --test_output errors //pkg/sql/opt/exec/execbuilder:execbuilder_test --test_filter TestExecBuild/// - -dev testlogic base --files=prepare|fk --subtests=20042 --config=local ----- -bazel test --test_env=GOTRACEBACK=all --test_arg -show-sql --test_arg -config --test_arg local --test_output errors //pkg/sql/logictest:logictest_test --test_filter 'TestLogic/^local$/^prepare|fk$/20042' - -dev testlogic base --files=auto_span_config_reconciliation_job --config=local -v --show-logs --timeout=50s --rewrite ----- -bazel test --test_env=GOTRACEBACK=all --test_arg -test.v --test_arg -show-logs --test_timeout=50 --test_arg -show-sql --test_arg -config --test_arg local --test_output all --test_env=COCKROACH_WORKSPACE=go/src/github.com/cockroachdb/cockroach --test_arg -rewrite --sandbox_writable_path=go/src/github.com/cockroachdb/cockroach/pkg/sql/logictest //pkg/sql/logictest:logictest_test --test_filter 'TestLogic/^local$/^auto_span_config_reconciliation_job$/' diff --git a/pkg/cmd/dev/testdata/recorderdriven/builder b/pkg/cmd/dev/testdata/recorderdriven/builder new file mode 100644 index 000000000000..cdf77030a072 --- /dev/null +++ b/pkg/cmd/dev/testdata/recorderdriven/builder @@ -0,0 +1,19 @@ +dev builder +---- +which docker +id +bazel info workspace --color=no +cat crdb-checkout/build/teamcity-bazel-support.sh +docker volume inspect bzlhome +mkdir crdb-checkout/artifacts +docker run --rm -i -v crdb-checkout:/cockroach --workdir=/cockroach -v crdb-checkout/artifacts:/artifacts -v bzlhome:/home/roach:delegated -u 503:503 cockroachdb/bazel:20220121-121551 + +dev builder echo hi +---- +which docker +id +bazel info workspace --color=no +cat crdb-checkout/build/teamcity-bazel-support.sh +docker volume inspect bzlhome +mkdir crdb-checkout/artifacts +docker run --rm -i -v crdb-checkout:/cockroach --workdir=/cockroach -v crdb-checkout/artifacts:/artifacts -v bzlhome:/home/roach:delegated -u 503:503 cockroachdb/bazel:20220121-121551 echo hi diff --git a/pkg/cmd/dev/testdata/recorderdriven/builder.rec b/pkg/cmd/dev/testdata/recorderdriven/builder.rec new file mode 100644 index 000000000000..0c41ef19354d --- /dev/null +++ b/pkg/cmd/dev/testdata/recorderdriven/builder.rec @@ -0,0 +1,278 @@ +which docker +---- +/usr/local/bin/docker + +id +---- +503:20 + +cat crdb-checkout/build/teamcity-bazel-support.sh +---- +---- +# FYI: You can run `./dev builder` to run this Docker image. :) +# `dev` depends on this variable! Don't change the name or format unless you +# also update `dev` accordingly. +BAZEL_IMAGE=cockroachdb/bazel:20220121-121551 + +# Call `run_bazel $NAME_OF_SCRIPT` to start an appropriately-configured Docker +# container with the `cockroachdb/bazel` image running the given script. +# BAZEL_SUPPORT_EXTRA_DOCKER_ARGS will be passed on to `docker run` unchanged. +run_bazel() { + if [ -z "${root:-}" ] + then + echo '$root is not set; please source teamcity-support.sh' + exit 1 + fi + + # Set up volumes. + # TeamCity uses git alternates, so make sure we mount the path to the real + # git objects. + teamcity_alternates="/home/agent/system/git" + vols="--volume ${teamcity_alternates}:${teamcity_alternates}:ro" + artifacts_dir=$root/artifacts + mkdir -p "$artifacts_dir" + vols="${vols} --volume ${artifacts_dir}:/artifacts" + cache=/home/agent/.bzlhome + mkdir -p $cache + vols="${vols} --volume ${root}:/go/src/github.com/cockroachdb/cockroach" + vols="${vols} --volume ${cache}:/home/roach" + + docker run -i ${tty-} --rm --init \ + -u "$(id -u):$(id -g)" \ + --workdir="/go/src/github.com/cockroachdb/cockroach" \ + ${BAZEL_SUPPORT_EXTRA_DOCKER_ARGS:+$BAZEL_SUPPORT_EXTRA_DOCKER_ARGS} \ + ${vols} \ + $BAZEL_IMAGE "$@" +} + +# local copy of tc_release_branch from teamcity-support.sh to avoid imports. +_tc_release_branch() { + [[ "$TC_BUILD_BRANCH" == master || "$TC_BUILD_BRANCH" == release-* || "$TC_BUILD_BRANCH" == provisional_* ]] +} + +# process_test_json processes logs and submits failures to GitHub +# Requires GITHUB_API_TOKEN set for the release branches. +# Accepts 5 arguments: +# testfilter: path to the `testfilter` executable, usually +# `$BAZEL_BIN/pkg/cmd/testfilter/testfilter_/testfilter` +# github_post: path to the `github-post` executable, usually +# `$BAZEL_BIN/pkg/cmd/github-post/github-post_/github-post` +# artifacts_dir: usually `/artifacts` +# test_json: path to test's JSON output, usually generated by `rules_go`'s and +# `GO_TEST_JSON_OUTPUT_FILE`. +# create_tarball: whether to create a tarball with full logs. If the test's +# exit code is passed, the tarball is generated on failures. +process_test_json() { + local testfilter=$1 + local github_post=$2 + local artifacts_dir=$3 + local test_json=$4 + local create_tarball=$5 + + $testfilter -mode=strip < "$test_json" | $testfilter -mode=omit | $testfilter -mode=convert > "$artifacts_dir"/failures.txt + failures_size=$(stat --format=%s "$artifacts_dir"/failures.txt) + if [ $failures_size = 0 ]; then + rm -f "$artifacts_dir"/failures.txt + fi + + if _tc_release_branch; then + if [ -z "${GITHUB_API_TOKEN-}" ]; then + # GITHUB_API_TOKEN must be in the env or github-post will barf if it's + # ever asked to post, so enforce that on all runs. + # The way this env var is made available here is quite tricky. The build + # calling this method is usually a build that is invoked from PRs, so it + # can't have secrets available to it (for the PR could modify + # build/teamcity-* to leak the secret). Instead, we provide the secrets + # to a higher-level job (Publish Bleeding Edge) and use TeamCity magic to + # pass that env var through when it's there. This means we won't have the + # env var on PR builds, but we'll have it for builds that are triggered + # from the release branches. + echo "GITHUB_API_TOKEN must be set" + exit 1 + else + $github_post < "$test_json" + fi + fi + + if [ "$create_tarball" -ne 0 ]; then + # Keep the debug file around for failed builds. Compress it to avoid + # clogging the agents with stuff we'll hopefully rarely ever need to + # look at. + # If the process failed, also save the full human-readable output. This is + # helpful in cases in which tests timed out, where it's difficult to blame + # the failure on any particular test. It's also a good alternative to poking + # around in test.json.txt itself when anything else we don't handle well happens, + # whatever that may be. + $testfilter -mode=convert < "$test_json" > "$artifacts_dir"/full_output.txt + (cd "$artifacts_dir" && tar --strip-components 1 -czf full_output.tgz full_output.txt $(basename $test_json)) + rm -rf "$artifacts_dir"/full_output.txt + fi + + # Some unit tests test automatic ballast creation. These ballasts can be + # larger than the maximum artifact size. Remove any artifacts with the + # EMERGENCY_BALLAST filename. + find "$artifacts_dir" -name "EMERGENCY_BALLAST" -delete +} + +---- +---- + +docker volume inspect bzlhome +---- +[ + { + "CreatedAt": "2022-02-04T03:10:06Z", + "Driver": "local", + "Labels": {}, + "Mountpoint": "/var/lib/docker/volumes/bzlhome/_data", + "Name": "bzlhome", + "Options": {}, + "Scope": "local" + } +] + +mkdir crdb-checkout/artifacts +---- + +docker run --rm -i -v crdb-checkout:/cockroach --workdir=/cockroach -v crdb-checkout/artifacts:/artifacts -v bzlhome:/home/roach:delegated -u 503:503 cockroachdb/bazel:20220121-121551 +---- + +which docker +---- +/usr/local/bin/docker + +id +---- +503:20 + +cat crdb-checkout/build/teamcity-bazel-support.sh +---- +---- +# FYI: You can run `./dev builder` to run this Docker image. :) +# `dev` depends on this variable! Don't change the name or format unless you +# also update `dev` accordingly. +BAZEL_IMAGE=cockroachdb/bazel:20220121-121551 + +# Call `run_bazel $NAME_OF_SCRIPT` to start an appropriately-configured Docker +# container with the `cockroachdb/bazel` image running the given script. +# BAZEL_SUPPORT_EXTRA_DOCKER_ARGS will be passed on to `docker run` unchanged. +run_bazel() { + if [ -z "${root:-}" ] + then + echo '$root is not set; please source teamcity-support.sh' + exit 1 + fi + + # Set up volumes. + # TeamCity uses git alternates, so make sure we mount the path to the real + # git objects. + teamcity_alternates="/home/agent/system/git" + vols="--volume ${teamcity_alternates}:${teamcity_alternates}:ro" + artifacts_dir=$root/artifacts + mkdir -p "$artifacts_dir" + vols="${vols} --volume ${artifacts_dir}:/artifacts" + cache=/home/agent/.bzlhome + mkdir -p $cache + vols="${vols} --volume ${root}:/go/src/github.com/cockroachdb/cockroach" + vols="${vols} --volume ${cache}:/home/roach" + + docker run -i ${tty-} --rm --init \ + -u "$(id -u):$(id -g)" \ + --workdir="/go/src/github.com/cockroachdb/cockroach" \ + ${BAZEL_SUPPORT_EXTRA_DOCKER_ARGS:+$BAZEL_SUPPORT_EXTRA_DOCKER_ARGS} \ + ${vols} \ + $BAZEL_IMAGE "$@" +} + +# local copy of tc_release_branch from teamcity-support.sh to avoid imports. +_tc_release_branch() { + [[ "$TC_BUILD_BRANCH" == master || "$TC_BUILD_BRANCH" == release-* || "$TC_BUILD_BRANCH" == provisional_* ]] +} + +# process_test_json processes logs and submits failures to GitHub +# Requires GITHUB_API_TOKEN set for the release branches. +# Accepts 5 arguments: +# testfilter: path to the `testfilter` executable, usually +# `$BAZEL_BIN/pkg/cmd/testfilter/testfilter_/testfilter` +# github_post: path to the `github-post` executable, usually +# `$BAZEL_BIN/pkg/cmd/github-post/github-post_/github-post` +# artifacts_dir: usually `/artifacts` +# test_json: path to test's JSON output, usually generated by `rules_go`'s and +# `GO_TEST_JSON_OUTPUT_FILE`. +# create_tarball: whether to create a tarball with full logs. If the test's +# exit code is passed, the tarball is generated on failures. +process_test_json() { + local testfilter=$1 + local github_post=$2 + local artifacts_dir=$3 + local test_json=$4 + local create_tarball=$5 + + $testfilter -mode=strip < "$test_json" | $testfilter -mode=omit | $testfilter -mode=convert > "$artifacts_dir"/failures.txt + failures_size=$(stat --format=%s "$artifacts_dir"/failures.txt) + if [ $failures_size = 0 ]; then + rm -f "$artifacts_dir"/failures.txt + fi + + if _tc_release_branch; then + if [ -z "${GITHUB_API_TOKEN-}" ]; then + # GITHUB_API_TOKEN must be in the env or github-post will barf if it's + # ever asked to post, so enforce that on all runs. + # The way this env var is made available here is quite tricky. The build + # calling this method is usually a build that is invoked from PRs, so it + # can't have secrets available to it (for the PR could modify + # build/teamcity-* to leak the secret). Instead, we provide the secrets + # to a higher-level job (Publish Bleeding Edge) and use TeamCity magic to + # pass that env var through when it's there. This means we won't have the + # env var on PR builds, but we'll have it for builds that are triggered + # from the release branches. + echo "GITHUB_API_TOKEN must be set" + exit 1 + else + $github_post < "$test_json" + fi + fi + + if [ "$create_tarball" -ne 0 ]; then + # Keep the debug file around for failed builds. Compress it to avoid + # clogging the agents with stuff we'll hopefully rarely ever need to + # look at. + # If the process failed, also save the full human-readable output. This is + # helpful in cases in which tests timed out, where it's difficult to blame + # the failure on any particular test. It's also a good alternative to poking + # around in test.json.txt itself when anything else we don't handle well happens, + # whatever that may be. + $testfilter -mode=convert < "$test_json" > "$artifacts_dir"/full_output.txt + (cd "$artifacts_dir" && tar --strip-components 1 -czf full_output.tgz full_output.txt $(basename $test_json)) + rm -rf "$artifacts_dir"/full_output.txt + fi + + # Some unit tests test automatic ballast creation. These ballasts can be + # larger than the maximum artifact size. Remove any artifacts with the + # EMERGENCY_BALLAST filename. + find "$artifacts_dir" -name "EMERGENCY_BALLAST" -delete +} + +---- +---- + +docker volume inspect bzlhome +---- +[ + { + "CreatedAt": "2022-02-04T03:10:06Z", + "Driver": "local", + "Labels": {}, + "Mountpoint": "/var/lib/docker/volumes/bzlhome/_data", + "Name": "bzlhome", + "Options": {}, + "Scope": "local" + } +] + +mkdir crdb-checkout/artifacts +---- + +docker run --rm -i -v crdb-checkout:/cockroach --workdir=/cockroach -v crdb-checkout/artifacts:/artifacts -v bzlhome:/home/roach:delegated -u 503:503 cockroachdb/bazel:20220121-121551 echo hi +---- + diff --git a/pkg/cmd/dev/testdata/recorderdriven/dev-build b/pkg/cmd/dev/testdata/recorderdriven/dev-build new file mode 100644 index 000000000000..43089f074760 --- /dev/null +++ b/pkg/cmd/dev/testdata/recorderdriven/dev-build @@ -0,0 +1,15 @@ +dev build pkg/roachpb:roachpb_test --skip-generate +---- +bazel query pkg/roachpb:roachpb_test --output=label_kind +bazel build //pkg/roachpb:roachpb_test --config=test +bazel info workspace --color=no +mkdir crdb-checkout/bin +bazel info bazel-bin --color=no + +# TODO(irfansharif): This test case is skipped -- it's too verbose given it +# scans through the sandbox for each generated file and copies them over +# one-by-one manually. Probably we want to push the logic down into bazel +# itself the same way we do now for protobufs. +# +# dev build cockroach-short +# ---- diff --git a/pkg/cmd/dev/testdata/recorderdriven/dev-build.rec b/pkg/cmd/dev/testdata/recorderdriven/dev-build.rec new file mode 100644 index 000000000000..65673788e72f --- /dev/null +++ b/pkg/cmd/dev/testdata/recorderdriven/dev-build.rec @@ -0,0 +1,10 @@ +bazel query pkg/roachpb:roachpb_test --output=label_kind +---- +go_test rule //pkg/roachpb:roachpb_test + +bazel build //pkg/roachpb:roachpb_test --config=test +---- + +mkdir crdb-checkout/bin +---- + diff --git a/pkg/cmd/dev/testdata/recorderdriven/generate b/pkg/cmd/dev/testdata/recorderdriven/generate new file mode 100644 index 000000000000..c6bd0add0448 --- /dev/null +++ b/pkg/cmd/dev/testdata/recorderdriven/generate @@ -0,0 +1,15 @@ +# TODO(irfansharif): This test case is skipped -- it's too verbose given it +# scans through the sandbox for each generated file and copies them over +# one-by-one manually. +# +# dev gen docs +# ---- + +# TODO(irfansharif): This test case is skipped -- it's too verbose given it +# scans through the sandbox for each generated file and copies them over +# one-by-one manually. Probably for this and above, we want to push the logic +# down into bazel itself the same way we do now for protobufs. Alternatively, +# stop checking in these generated doc files. +# +# dev gen go +# ---- diff --git a/pkg/cmd/dev/testdata/recorderdriven/generate.rec b/pkg/cmd/dev/testdata/recorderdriven/generate.rec new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/pkg/cmd/dev/testdata/recording/bench.txt b/pkg/cmd/dev/testdata/recording/bench.txt deleted file mode 100644 index cc43393b5358..000000000000 --- a/pkg/cmd/dev/testdata/recording/bench.txt +++ /dev/null @@ -1,24 +0,0 @@ -find pkg/util -type d ----- -pkg/util -pkg/util/uuid - -git grep -l '^func Benchmark' -- 'pkg/util/*_test.go' ----- -pkg/util/topk_test.go -pkg/util/uuid/benchmark_fast_test.go -pkg/util/uuid/codec_test.go -pkg/util/uuid/generator_test.go - -bazel run --config=test --test_sharding_strategy=disabled //pkg/util:util_test -- -test.run=- -test.bench=. ----- - -bazel run --config=test --test_sharding_strategy=disabled //pkg/util/uuid:uuid_test -- -test.run=- -test.bench=. ----- - -find pkg/sql/parser -type d ----- -pkg/sql/parser - -bazel run --config=test --test_sharding_strategy=disabled //pkg/sql/parser:parser_test -- -test.run=- -test.bench=BenchmarkParse ----- diff --git a/pkg/cmd/dev/testdata/recording/build.txt b/pkg/cmd/dev/testdata/recording/build.txt deleted file mode 100644 index c2a9d09667f4..000000000000 --- a/pkg/cmd/dev/testdata/recording/build.txt +++ /dev/null @@ -1,223 +0,0 @@ -bazel build //pkg/cmd/cockroach-short:cockroach-short ----- - -bazel info workspace --color=no ----- -go/src/github.com/cockroachdb/cockroach - -mkdir go/src/github.com/cockroachdb/cockroach/bin ----- - -bazel info bazel-bin --color=no ----- -/private/var/tmp/_bazel/99e666e4e674209ecdb66b46371278df/execroot/cockroach/bazel-out/darwin-fastbuild/bin - -rm go/src/github.com/cockroachdb/cockroach/cockroach-short ----- - -ln -s /private/var/tmp/_bazel/99e666e4e674209ecdb66b46371278df/execroot/cockroach/bazel-out/darwin-fastbuild/bin/pkg/cmd/cockroach-short/cockroach-short_/cockroach-short go/src/github.com/cockroachdb/cockroach/cockroach-short ----- - -bazel build --local_cpu_resources=12 //pkg/cmd/cockroach-short:cockroach-short ----- - -mkdir go/src/github.com/cockroachdb/cockroach/bin ----- - -bazel info bazel-bin --color=no ----- -/private/var/tmp/_bazel/99e666e4e674209ecdb66b46371278df/execroot/cockroach/bazel-out/darwin-fastbuild/bin - -rm go/src/github.com/cockroachdb/cockroach/cockroach-short ----- - -ln -s /private/var/tmp/_bazel/99e666e4e674209ecdb66b46371278df/execroot/cockroach/bazel-out/darwin-fastbuild/bin/pkg/cmd/cockroach-short/cockroach-short_/cockroach-short go/src/github.com/cockroachdb/cockroach/cockroach-short ----- - -bazel build //pkg/cmd/cockroach-short:cockroach-short ----- - -mkdir go/src/github.com/cockroachdb/cockroach/bin ----- - -bazel info bazel-bin --color=no ----- -/private/var/tmp/_bazel/99e666e4e674209ecdb66b46371278df/execroot/cockroach/bazel-out/darwin-fastbuild/bin - -rm go/src/github.com/cockroachdb/cockroach/cockroach-short ----- - -ln -s /private/var/tmp/_bazel/99e666e4e674209ecdb66b46371278df/execroot/cockroach/bazel-out/darwin-fastbuild/bin/pkg/cmd/cockroach-short/cockroach-short_/cockroach-short go/src/github.com/cockroachdb/cockroach/cockroach-short ----- - -bazel build --remote_local_fallback --remote_cache=grpc://127.0.0.1:9090 --experimental_remote_downloader=grpc://127.0.0.1:9090 //pkg/cmd/cockroach-short:cockroach-short ----- - -mkdir go/src/github.com/cockroachdb/cockroach/bin ----- - -bazel info bazel-bin --color=no ----- -/private/var/tmp/_bazel/99e666e4e674209ecdb66b46371278df/execroot/cockroach/bazel-out/darwin-fastbuild/bin - -rm go/src/github.com/cockroachdb/cockroach/cockroach-short ----- - -ln -s /private/var/tmp/_bazel/99e666e4e674209ecdb66b46371278df/execroot/cockroach/bazel-out/darwin-fastbuild/bin/pkg/cmd/cockroach-short/cockroach-short_/cockroach-short go/src/github.com/cockroachdb/cockroach/cockroach-short ----- - -bazel build //pkg/cmd/cockroach-short:cockroach-short //:go_path ----- - -mkdir go/src/github.com/cockroachdb/cockroach/bin ----- - -bazel info bazel-bin --color=no ----- -/private/var/tmp/_bazel/99e666e4e674209ecdb66b46371278df/execroot/cockroach/bazel-out/darwin-fastbuild/bin - -rm go/src/github.com/cockroachdb/cockroach/cockroach-short ----- - -ln -s /private/var/tmp/_bazel/99e666e4e674209ecdb66b46371278df/execroot/cockroach/bazel-out/darwin-fastbuild/bin/pkg/cmd/cockroach-short/cockroach-short_/cockroach-short go/src/github.com/cockroachdb/cockroach/cockroach-short ----- - -git status --ignored --short go/src/github.com/cockroachdb/cockroach/pkg ----- ----- - M pkg/some_modified_file.go -?? pkg/some_unknown_file.go -!! pkg/file_to_delete.go -!! pkg/zcgo_flags_file_to_ignore.go -!! pkg/ui/node_modules/ ----- ----- - -rm pkg/file_to_delete.go ----- - -find /private/var/tmp/_bazel/99e666e4e674209ecdb66b46371278df/execroot/cockroach/bazel-out/darwin-fastbuild/bin/go_path/src/github.com/cockroachdb/cockroach -name *.go ----- ----- -/private/var/tmp/_bazel/99e666e4e674209ecdb66b46371278df/execroot/cockroach/bazel-out/darwin-fastbuild/bin/go_path/src/github.com/cockroachdb/cockroach/pkg/kv/kvserver/storage_services.pb.go -/private/var/tmp/_bazel/99e666e4e674209ecdb66b46371278df/execroot/cockroach/bazel-out/darwin-fastbuild/bin/go_path/src/github.com/cockroachdb/cockroach/pkg/roachpb/batch_generated-gen.go -/private/var/tmp/_bazel/99e666e4e674209ecdb66b46371278df/execroot/cockroach/bazel-out/darwin-fastbuild/bin/go_path/src/github.com/cockroachdb/cockroach/pkg/sql/opt/optgen/lang/expr-gen.og.go -/private/var/tmp/_bazel/99e666e4e674209ecdb66b46371278df/execroot/cockroach/bazel-out/darwin-fastbuild/bin/go_path/src/github.com/cockroachdb/cockroach/pkg/sql/opt/optgen/lang/operator-gen.og.go ----- ----- - -cat go/src/github.com/cockroachdb/cockroach/build/bazelutil/checked_in_genfiles.txt ----- ----- -# Comment -//pkg/roachpb:gen-batch-generated|batch_generated-gen.go|batch_generated.go -//pkg/sql/opt/optgen/lang:gen-expr|expr-gen.og.go|expr.og.go -//pkg/sql/opt/optgen/lang:gen-operator|operator-gen.og.go|operator.og.go ----- ----- - -cp /private/var/tmp/_bazel/99e666e4e674209ecdb66b46371278df/execroot/cockroach/bazel-out/darwin-fastbuild/bin/go_path/src/github.com/cockroachdb/cockroach/pkg/kv/kvserver/storage_services.pb.go go/src/github.com/cockroachdb/cockroach/pkg/kv/kvserver/storage_services.pb.go ----- - -cp /private/var/tmp/_bazel/99e666e4e674209ecdb66b46371278df/execroot/cockroach/bazel-out/darwin-fastbuild/bin/go_path/src/github.com/cockroachdb/cockroach/pkg/roachpb/batch_generated-gen.go go/src/github.com/cockroachdb/cockroach/pkg/roachpb/batch_generated.go ----- - -cp /private/var/tmp/_bazel/99e666e4e674209ecdb66b46371278df/execroot/cockroach/bazel-out/darwin-fastbuild/bin/go_path/src/github.com/cockroachdb/cockroach/pkg/sql/opt/optgen/lang/expr-gen.og.go go/src/github.com/cockroachdb/cockroach/pkg/sql/opt/optgen/lang/expr.og.go ----- - -cp /private/var/tmp/_bazel/99e666e4e674209ecdb66b46371278df/execroot/cockroach/bazel-out/darwin-fastbuild/bin/go_path/src/github.com/cockroachdb/cockroach/pkg/sql/opt/optgen/lang/operator-gen.og.go go/src/github.com/cockroachdb/cockroach/pkg/sql/opt/optgen/lang/operator.og.go ----- - -bazel build //pkg/cmd/cockroach-short:cockroach-short -s ----- - -mkdir go/src/github.com/cockroachdb/cockroach/bin ----- - -bazel info bazel-bin --color=no ----- -/private/var/tmp/_bazel/99e666e4e674209ecdb66b46371278df/execroot/cockroach/bazel-out/darwin-fastbuild/bin - -rm go/src/github.com/cockroachdb/cockroach/cockroach-short ----- - -ln -s /private/var/tmp/_bazel/99e666e4e674209ecdb66b46371278df/execroot/cockroach/bazel-out/darwin-fastbuild/bin/pkg/cmd/cockroach-short/cockroach-short_/cockroach-short go/src/github.com/cockroachdb/cockroach/cockroach-short ----- - -bazel run @nodejs//:yarn -- --check-files --cwd pkg/ui --offline ----- - -bazel build //pkg/cmd/cockroach:cockroach --config=with_ui --verbose_failures --sandbox_debug ----- - -mkdir go/src/github.com/cockroachdb/cockroach/bin ----- - -bazel info bazel-bin --color=no ----- -/private/var/tmp/_bazel/99e666e4e674209ecdb66b46371278df/execroot/cockroach/bazel-out/darwin-fastbuild/bin - -rm go/src/github.com/cockroachdb/cockroach/cockroach ----- - -ln -s /private/var/tmp/_bazel/99e666e4e674209ecdb66b46371278df/execroot/cockroach/bazel-out/darwin-fastbuild/bin/pkg/cmd/cockroach/cockroach_/cockroach go/src/github.com/cockroachdb/cockroach/cockroach ----- - -bazel query @com_github_cockroachdb_stress//:stress --output=label_kind ----- -go_binary rule @com_github_cockroachdb_stress//:stress - -bazel build @com_github_cockroachdb_stress//:stress ----- - -mkdir go/src/github.com/cockroachdb/cockroach/bin ----- - -bazel info bazel-bin --color=no ----- -/private/var/tmp/_bazel/99e666e4e674209ecdb66b46371278df/execroot/cockroach/bazel-out/darwin-fastbuild/bin - -rm go/src/github.com/cockroachdb/cockroach/bin/stress ----- - -ln -s /private/var/tmp/_bazel/99e666e4e674209ecdb66b46371278df/execroot/cockroach/bazel-out/darwin-fastbuild/bin/external/com_github_cockroachdb_stress/stress_/stress go/src/github.com/cockroachdb/cockroach/bin/stress ----- - -bazel query pkg/roachpb:roachpb_test --output=label_kind ----- -go_test rule //pkg/roachpb:roachpb_test - -bazel build //pkg/roachpb:roachpb_test --config=test ----- - -mkdir go/src/github.com/cockroachdb/cockroach/bin ----- - -bazel info bazel-bin --color=no ----- -/private/var/tmp/_bazel/99e666e4e674209ecdb66b46371278df/execroot/cockroach/bazel-out/darwin-fastbuild/bin - -bazel query pkg/foo/... --output=label_kind ----- ----- -go_binary rule //pkg/foo:bar -go_test rule //pkg/foo:baz -go_proto_library rule //pkg/foo:bar_proto_library ----- ----- - -bazel build //pkg/foo:bar //pkg/foo:baz --config=test ----- - -mkdir go/src/github.com/cockroachdb/cockroach/bin ----- - -bazel info bazel-bin --color=no ----- -/private/var/tmp/_bazel/99e666e4e674209ecdb66b46371278df/execroot/cockroach/bazel-out/darwin-fastbuild/bin - -rm go/src/github.com/cockroachdb/cockroach/bin/bar ----- - -ln -s /private/var/tmp/_bazel/99e666e4e674209ecdb66b46371278df/execroot/cockroach/bazel-out/darwin-fastbuild/bin/pkg/foo/bar_/bar go/src/github.com/cockroachdb/cockroach/bin/bar ----- diff --git a/pkg/cmd/dev/testdata/recording/builder.txt b/pkg/cmd/dev/testdata/recording/builder.txt deleted file mode 100644 index 4739fb3f9ae1..000000000000 --- a/pkg/cmd/dev/testdata/recording/builder.txt +++ /dev/null @@ -1,33 +0,0 @@ -id ----- -1001:1002 - -cat go/src/github.com/cockroachdb/cockroach/build/teamcity-bazel-support.sh ----- -BAZEL_IMAGE=mock_bazel_image:1234 - -docker volume inspect bzlhome ----- - -mkdir go/src/github.com/cockroachdb/cockroach/artifacts ----- - -docker run --rm -it -v go/src/github.com/cockroachdb/cockroach:/cockroach --workdir=/cockroach -v go/src/github.com/cockroachdb/cockroach/artifacts:/artifacts -v bzlhome:/home/roach:delegated -u 1001:1002 mock_bazel_image:1234 ----- - -id ----- -1001:1002 - -cat go/src/github.com/cockroachdb/cockroach/build/teamcity-bazel-support.sh ----- -BAZEL_IMAGE=mock_bazel_image:1234 - -docker volume inspect bzlhome ----- - -mkdir go/src/github.com/cockroachdb/cockroach/artifacts ----- - -docker run --rm -it -v go/src/github.com/cockroachdb/cockroach:/cockroach --workdir=/cockroach -v go/src/github.com/cockroachdb/cockroach/artifacts:/artifacts -v bzlhome:/home/roach:delegated -u 1001:1002 mock_bazel_image:1234 echo hi ----- diff --git a/pkg/cmd/dev/testdata/recording/generate.txt b/pkg/cmd/dev/testdata/recording/generate.txt deleted file mode 100644 index 24222cb86aa1..000000000000 --- a/pkg/cmd/dev/testdata/recording/generate.txt +++ /dev/null @@ -1,149 +0,0 @@ -go/src/github.com/cockroachdb/cockroach/build/bazelutil/bazel-generate.sh ----- - -cat go/src/github.com/cockroachdb/cockroach/docs/generated/bazel_targets.txt ----- ----- -This line is ignored. - -//docs/generated:gen-logging-md -//docs/generated/sql ----- ----- - -bazel build //docs/generated:gen-logging-md //docs/generated/sql ----- - -bazel info bazel-bin --color=no ----- -/private/var/tmp/_bazel/99e666e4e674209ecdb66b46371278df/execroot/cockroach/bazel-out/darwin-fastbuild/bin - -bazel query --output=xml //docs/generated:gen-logging-md ----- ----- - - - - - - - - - - - - - - - - - - ----- ----- - -cp /private/var/tmp/_bazel/99e666e4e674209ecdb66b46371278df/execroot/cockroach/bazel-out/darwin-fastbuild/bin/docs/generated/logging.md go/src/github.com/cockroachdb/cockroach/docs/generated/logging.md ----- - -bazel query --output=xml //docs/generated/sql ----- ----- - - - - - - - - - - - - - - - - - - - - - ----- ----- - -cp /private/var/tmp/_bazel/99e666e4e674209ecdb66b46371278df/execroot/cockroach/bazel-out/darwin-fastbuild/bin/docs/generated/sql/aggregates.md go/src/github.com/cockroachdb/cockroach/docs/generated/sql/aggregates.md ----- - -cp /private/var/tmp/_bazel/99e666e4e674209ecdb66b46371278df/execroot/cockroach/bazel-out/darwin-fastbuild/bin/docs/generated/sql/functions.md go/src/github.com/cockroachdb/cockroach/docs/generated/sql/functions.md ----- - -cp /private/var/tmp/_bazel/99e666e4e674209ecdb66b46371278df/execroot/cockroach/bazel-out/darwin-fastbuild/bin/docs/generated/sql/operators.md go/src/github.com/cockroachdb/cockroach/docs/generated/sql/operators.md ----- - -cp /private/var/tmp/_bazel/99e666e4e674209ecdb66b46371278df/execroot/cockroach/bazel-out/darwin-fastbuild/bin/docs/generated/sql/window_functions.md go/src/github.com/cockroachdb/cockroach/docs/generated/sql/window_functions.md ----- - -go/src/github.com/cockroachdb/cockroach/build/bazelutil/generate_redact_safe.sh ----- -MOCK_REDACT_SAFE_OUTPUT - -echo MOCK_REDACT_SAFE_OUTPUT > go/src/github.com/cockroachdb/cockroach/docs/generated/redact_safe.md ----- - -bazel build //:go_path --show_result=0 ----- - -bazel info bazel-bin --color=no ----- -/private/var/tmp/_bazel/99e666e4e674209ecdb66b46371278df/execroot/cockroach/bazel-out/darwin-fastbuild/bin - - -git status --ignored --short go/src/github.com/cockroachdb/cockroach/pkg ----- ----- - M pkg/some_modified_file.go -?? pkg/some_unknown_file.go -!! pkg/file_to_delete.go -!! pkg/zcgo_flags_file_to_ignore.go -!! pkg/ui/node_modules/ ----- ----- - -rm pkg/file_to_delete.go ----- - -find /private/var/tmp/_bazel/99e666e4e674209ecdb66b46371278df/execroot/cockroach/bazel-out/darwin-fastbuild/bin/go_path/src/github.com/cockroachdb/cockroach -name *.go ----- ----- -/private/var/tmp/_bazel/99e666e4e674209ecdb66b46371278df/execroot/cockroach/bazel-out/darwin-fastbuild/bin/go_path/src/github.com/cockroachdb/cockroach/pkg/kv/kvserver/storage_services.pb.go -/private/var/tmp/_bazel/99e666e4e674209ecdb66b46371278df/execroot/cockroach/bazel-out/darwin-fastbuild/bin/go_path/src/github.com/cockroachdb/cockroach/pkg/roachpb/batch_generated-gen.go -/private/var/tmp/_bazel/99e666e4e674209ecdb66b46371278df/execroot/cockroach/bazel-out/darwin-fastbuild/bin/go_path/src/github.com/cockroachdb/cockroach/pkg/sql/opt/optgen/lang/expr-gen.og.go -/private/var/tmp/_bazel/99e666e4e674209ecdb66b46371278df/execroot/cockroach/bazel-out/darwin-fastbuild/bin/go_path/src/github.com/cockroachdb/cockroach/pkg/sql/opt/optgen/lang/operator-gen.og.go ----- ----- - -cat go/src/github.com/cockroachdb/cockroach/build/bazelutil/checked_in_genfiles.txt ----- ----- -# Comment -//pkg/roachpb:gen-batch-generated|batch_generated-gen.go|batch_generated.go -//pkg/sql/opt/optgen/lang:gen-expr|expr-gen.og.go|expr.og.go -//pkg/sql/opt/optgen/lang:gen-operator|operator-gen.og.go|operator.og.go ----- ----- - -cp /private/var/tmp/_bazel/99e666e4e674209ecdb66b46371278df/execroot/cockroach/bazel-out/darwin-fastbuild/bin/go_path/src/github.com/cockroachdb/cockroach/pkg/kv/kvserver/storage_services.pb.go go/src/github.com/cockroachdb/cockroach/pkg/kv/kvserver/storage_services.pb.go ----- - -cp /private/var/tmp/_bazel/99e666e4e674209ecdb66b46371278df/execroot/cockroach/bazel-out/darwin-fastbuild/bin/go_path/src/github.com/cockroachdb/cockroach/pkg/roachpb/batch_generated-gen.go go/src/github.com/cockroachdb/cockroach/pkg/roachpb/batch_generated.go ----- - -cp /private/var/tmp/_bazel/99e666e4e674209ecdb66b46371278df/execroot/cockroach/bazel-out/darwin-fastbuild/bin/go_path/src/github.com/cockroachdb/cockroach/pkg/sql/opt/optgen/lang/expr-gen.og.go go/src/github.com/cockroachdb/cockroach/pkg/sql/opt/optgen/lang/expr.og.go ----- - -cp /private/var/tmp/_bazel/99e666e4e674209ecdb66b46371278df/execroot/cockroach/bazel-out/darwin-fastbuild/bin/go_path/src/github.com/cockroachdb/cockroach/pkg/sql/opt/optgen/lang/operator-gen.og.go go/src/github.com/cockroachdb/cockroach/pkg/sql/opt/optgen/lang/operator.og.go ----- diff --git a/pkg/cmd/dev/testdata/recording/lint.txt b/pkg/cmd/dev/testdata/recording/lint.txt deleted file mode 100644 index 1f53e2e199f3..000000000000 --- a/pkg/cmd/dev/testdata/recording/lint.txt +++ /dev/null @@ -1,5 +0,0 @@ -bazel run --config=test //build/bazelutil:lint -- -test.v ----- - -bazel run --config=test //build/bazelutil:lint -- -test.v -test.short -test.timeout 5m0s ----- diff --git a/pkg/cmd/dev/testdata/recording/logic.txt b/pkg/cmd/dev/testdata/recording/logic.txt deleted file mode 100644 index e19a7b132797..000000000000 --- a/pkg/cmd/dev/testdata/recording/logic.txt +++ /dev/null @@ -1,14 +0,0 @@ -bazel test --test_env=GOTRACEBACK=all --test_output errors //pkg/sql/logictest:logictest_test --test_filter TestLogic/// ----- - -bazel test --test_env=GOTRACEBACK=all --test_output errors //pkg/ccl/logictestccl:logictestccl_test --test_filter 'Test(CCL|Tenant)Logic///' ----- - -bazel test --test_env=GOTRACEBACK=all --test_output errors //pkg/sql/opt/exec/execbuilder:execbuilder_test --test_filter TestExecBuild/// ----- - -bazel test --test_env=GOTRACEBACK=all --test_arg -show-sql --test_arg -config --test_arg local --test_output errors //pkg/sql/logictest:logictest_test --test_filter 'TestLogic/^local$/^prepare|fk$/20042' ----- - -bazel test --test_env=GOTRACEBACK=all --test_arg -test.v --test_arg -show-logs --test_timeout=50 --test_arg -show-sql --test_arg -config --test_arg local --test_output all --test_env=COCKROACH_WORKSPACE=go/src/github.com/cockroachdb/cockroach --test_arg -rewrite --sandbox_writable_path=go/src/github.com/cockroachdb/cockroach/pkg/sql/logictest //pkg/sql/logictest:logictest_test --test_filter 'TestLogic/^local$/^auto_span_config_reconciliation_job$/' ----- diff --git a/pkg/cmd/dev/testdata/recording/test.txt b/pkg/cmd/dev/testdata/recording/test.txt deleted file mode 100644 index 884f6cbc3985..000000000000 --- a/pkg/cmd/dev/testdata/recording/test.txt +++ /dev/null @@ -1,268 +0,0 @@ -find pkg/util/tracing -type d ----- -pkg/util/tracing - -bazel query 'kind(go_test, //pkg/util/tracing:all)' ----- -//pkg/util/tracing:tracing_test - -bazel test //pkg/util/tracing:tracing_test --test_env=GOTRACEBACK=all --test_output errors ----- ----- -//pkg/util/tracing:tracing_test PASSED in 0.2s - -Executed 1 out of 1 test: 1 test passes. ----- ----- - -find pkg/util/tracing -type d ----- -pkg/util/tracing - -bazel query 'kind(go_test, //pkg/util/tracing/...)' ----- -//pkg/util/tracing:tracing_test - -bazel test //pkg/util/tracing:tracing_test --test_env=GOTRACEBACK=all --test_output errors ----- ----- -//pkg/util/tracing:tracing_test (cached) PASSED in 0.2s - -Executed 0 out of 1 test: 1 test passes. ----- ----- - -find pkg/util/tracing -type d ----- -pkg/util/tracing - -bazel query 'kind(go_test, //pkg/util/tracing:all)' ----- -//pkg/util/tracing:tracing_test - -bazel test //pkg/util/tracing:tracing_test --test_env=GOTRACEBACK=all '--test_filter=TestStartChild*' --test_output errors ----- ----- -//pkg/util/tracing:tracing_test PASSED in 0.1s - -Executed 1 out of 1 test: 1 test passes. ----- ----- - -find pkg/util/tracing -type d ----- -pkg/util/tracing - -bazel query 'kind(go_test, //pkg/util/tracing:all)' ----- -//pkg/util/tracing:tracing_test - -bazel test //pkg/util/tracing:tracing_test --test_env=GOTRACEBACK=all '--test_filter=TestStartChild*' --test_arg -test.v --test_arg -show-logs --test_output all ----- ----- -==================== Test output for //pkg/util/tracing:tracing_test: -testing: warning: no tests to run -PASS -================================================================================ -//pkg/util/tracing:tracing_test PASSED in 0.1s - -Executed 1 out of 1 test: 1 test passes. ----- ----- - -find pkg/util/tracing -type d ----- -pkg/util/tracing - -bazel query 'kind(go_test, //pkg/util/tracing:all)' ----- -//pkg/util/tracing:tracing_test - -bazel test --remote_local_fallback --remote_cache=grpc://127.0.0.1:9092 --experimental_remote_downloader=grpc://127.0.0.1:9092 //pkg/util/tracing:tracing_test --test_env=GOTRACEBACK=all '--test_filter=TestStartChild*' --test_output errors ----- ----- -//pkg/util/tracing:tracing_test (cached) PASSED in 0.0s - -Executed 0 out of 1 test: 1 test passes. ----- ----- - -find pkg/util/tracing -type d ----- -pkg/util/tracing - -bazel query 'kind(go_test, //pkg/util/tracing:all)' ----- -//pkg/util/tracing:tracing_test - -bazel test //pkg/util/tracing:tracing_test --nocache_test_results --test_env=GOTRACEBACK=all '--test_filter=TestStartChild*' --test_output errors ----- ----- -//pkg/util/tracing:tracing_test PASSED in 0.1s - -Executed 1 out of 1 test: 1 test passes. ----- ----- - -find pkg/util/tracing -type d ----- -pkg/util/tracing - -bazel query 'kind(go_test, //pkg/util/tracing:all)' ----- -//pkg/util/tracing:tracing_test - -bazel test --test_sharding_strategy=disabled //pkg/util/tracing:tracing_test --test_env=GOTRACEBACK=all --test_timeout=86400 --run_under '@com_github_cockroachdb_stress//:stress -bazel -shardable-artifacts '"'"'XML_OUTPUT_FILE=dev merge-test-xmls'"'"' ' '--test_filter=TestStartChild*' --test_output streamed ----- ----- -//pkg/util/tracing:tracing_test PASSED in 12.3s - -Executed 1 out of 1 test: 1 test passes. ----- ----- - -find pkg/util/tracing -type d ----- -pkg/util/tracing - -bazel query 'kind(go_test, //pkg/util/tracing:all)' ----- -//pkg/util/tracing:tracing_test - -bazel test --local_cpu_resources=12 --test_sharding_strategy=disabled //pkg/util/tracing:tracing_test --test_env=GOTRACEBACK=all --test_timeout=86400 --run_under '@com_github_cockroachdb_stress//:stress -bazel -shardable-artifacts '"'"'XML_OUTPUT_FILE=dev merge-test-xmls'"'"' -p=12 ' '--test_filter=TestStartChild*' --test_output streamed ----- ----- -//pkg/util/tracing:tracing_test PASSED in 12.3s - -Executed 1 out of 1 test: 1 test passes. ----- ----- - -find pkg/util/tracing -type d ----- -pkg/util/tracing - -bazel query 'kind(go_test, //pkg/util/tracing:all)' ----- -//pkg/util/tracing:tracing_test - -bazel test --test_sharding_strategy=disabled //pkg/util/tracing:tracing_test --test_env=GOTRACEBACK=all --test_timeout=70 --run_under '@com_github_cockroachdb_stress//:stress -bazel -shardable-artifacts '"'"'XML_OUTPUT_FILE=dev merge-test-xmls'"'"' -maxtime=10s ' '--test_filter=TestStartChild*' --test_arg -test.v --test_output streamed ----- ----- -==================== Test output for //pkg/util/tracing:tracing_test: -232 runs so far, 0 failures, over 5s -528 runs so far, 0 failures, over 10s -528 runs completed, 0 failures, over 10s -SUCCESS -================================================================================ -//pkg/util/tracing:tracing_test PASSED in 10.1s - -Executed 1 out of 1 test: 1 test passes. ----- ----- - -find pkg/testutils -type d ----- -pkg/testutils - -bazel query 'kind(go_test, //pkg/testutils:all)' ----- -//pkg/testutils:testutils_test - -bazel test //pkg/testutils:testutils_test --test_env=GOTRACEBACK=all --test_timeout=10 --test_output errors ----- ----- -Loading: -Loading: 0 packages loaded -INFO: Build option --test_timeout has changed, discarding analysis cache. -Analyzing: target //pkg/testutils:testutils_test (0 packages loaded, 0 targets configured) -INFO: Analyzed target //pkg/testutils:testutils_test (0 packages loaded, 11870 targets configured). -INFO: Found 1 test target... -[0 / 2] [Prepa] BazelWorkspaceStatusAction stable-status.txt -[1,220 / 1,221] GoLink pkg/testutils/testutils_test_/testutils_test; 0s darwin-sandbox -[1,221 / 1,222] Testing //pkg/testutils:testutils_test; 0s darwin-sandbox -Target //pkg/testutils:testutils_test up-to-date: - _bazel/bin/pkg/testutils/testutils_test_/testutils_test -INFO: Elapsed time: 4.336s, Critical Path: 2.79s -INFO: 3 processes: 1 internal, 2 darwin-sandbox. -INFO: Build completed successfully, 3 total actions -//pkg/testutils:testutils_test PASSED in 0.8s - -Executed 1 out of 1 test: 1 test passes. -INFO: Build completed successfully, 3 total actions - ----- ----- - -find pkg/util/tracing -type d ----- -pkg/util/tracing - -bazel query 'kind(go_test, //pkg/util/tracing:all)' ----- -//pkg/util/tracing:tracing_test - -bazel test //pkg/util/tracing:tracing_test --test_env=GOTRACEBACK=all --test_output errors -s ----- ----- -//pkg/util/tracing:tracing_test PASSED in 0.2s - -Executed 1 out of 1 test: 1 test passes. ----- ----- - -find pkg/roachpb -type d ----- -pkg/roachpb - -bazel query 'kind(go_test, //pkg/roachpb:all)' ----- ----- -//pkg/roachpb:roachpb_test -//pkg/roachpb:string_test ----- ----- - -bazel test //pkg/roachpb:roachpb_test //pkg/roachpb:string_test --test_env=GOTRACEBACK=all --test_output errors ----- - -find pkg/roachpb -type d ----- -pkg/roachpb - -bazel query 'kind(go_test, //pkg/roachpb:string_test)' ----- -//pkg/roachpb:string_test - -bazel test //pkg/roachpb:string_test --test_env=GOTRACEBACK=all --test_output errors ----- - -find pkg/testutils -type d ----- -pkg/testutils - -bazel query 'kind(go_test, //pkg/testutils:all)' ----- -//pkg/testutils:testutils_test - -bazel test //pkg/testutils:testutils_test --test_env=GOTRACEBACK=all --test_env=COCKROACH_WORKSPACE=go/src/github.com/cockroachdb/cockroach --test_arg -rewrite --sandbox_writable_path=go/src/github.com/cockroachdb/cockroach/pkg/testutils --test_output errors ----- - -find pkg/testutils -type d ----- -pkg/testutils - -bazel query 'kind(go_test, //pkg/testutils:all)' ----- -//pkg/testutils:testutils_test - -find pkg/other/test -type d ----- -pkg/other/test - -bazel query 'kind(go_test, //pkg/other/test:all)' ----- -//pkg/other/test:test_test - -bazel test //pkg/testutils:testutils_test //pkg/other/test:test_test --test_env=GOTRACEBACK=all --test_env=COCKROACH_WORKSPACE=go/src/github.com/cockroachdb/cockroach --test_arg -rewrite --sandbox_writable_path=go/src/github.com/cockroachdb/cockroach/pkg/testutils --sandbox_writable_path=go/src/github.com/cockroachdb/cockroach/pkg/other/test --test_output errors ----- diff --git a/pkg/cmd/dev/testdata/recording/ui.txt b/pkg/cmd/dev/testdata/recording/ui.txt deleted file mode 100644 index 04704e30e230..000000000000 --- a/pkg/cmd/dev/testdata/recording/ui.txt +++ /dev/null @@ -1,48 +0,0 @@ -bazel build //pkg/ui/workspaces/db-console/src/js:crdb-protobuf-client //pkg/ui/workspaces/db-console/ccl/src/js:crdb-protobuf-client-ccl ----- - -yarn --silent --cwd go/src/github.com/cockroachdb/cockroach/pkg/ui/workspaces/cluster-ui build:watch ----- - -yarn --silent --cwd go/src/github.com/cockroachdb/cockroach/pkg/ui/workspaces/db-console webpack-dev-server --config webpack.app.js --mode development --env.WEBPACK_SERVE --env.dist=ccl --env.target=http://localhost:8080 --port 3000 ----- - - -bazel build //pkg/ui/workspaces/db-console/src/js:crdb-protobuf-client ----- - -yarn --silent --cwd go/src/github.com/cockroachdb/cockroach/pkg/ui/workspaces/cluster-ui build:watch ----- - -yarn --silent --cwd go/src/github.com/cockroachdb/cockroach/pkg/ui/workspaces/db-console webpack-dev-server --config webpack.app.js --mode development --env.WEBPACK_SERVE --env.dist=oss --env.target=http://localhost:8080 --port 3000 ----- - - -bazel build //pkg/ui/workspaces/db-console/src/js:crdb-protobuf-client //pkg/ui/workspaces/db-console/ccl/src/js:crdb-protobuf-client-ccl ----- - -yarn --silent --cwd go/src/github.com/cockroachdb/cockroach/pkg/ui/workspaces/cluster-ui build:watch ----- - -yarn --silent --cwd go/src/github.com/cockroachdb/cockroach/pkg/ui/workspaces/db-console webpack-dev-server --config webpack.app.js --mode development --env.WEBPACK_SERVE --env.dist=ccl --env.target=http://localhost:8080 --port 3000 --https ----- - - -bazel build //pkg/ui/workspaces/db-console/src/js:crdb-protobuf-client //pkg/ui/workspaces/db-console/ccl/src/js:crdb-protobuf-client-ccl ----- - -yarn --silent --cwd go/src/github.com/cockroachdb/cockroach/pkg/ui/workspaces/cluster-ui build:watch ----- - -yarn --silent --cwd go/src/github.com/cockroachdb/cockroach/pkg/ui/workspaces/db-console webpack-dev-server --config webpack.app.js --mode development --env.WEBPACK_SERVE --env.dist=ccl --env.target=http://example.crdb.io:4848 --port 3000 ----- - - -bazel build //pkg/ui/workspaces/db-console/src/js:crdb-protobuf-client //pkg/ui/workspaces/db-console/ccl/src/js:crdb-protobuf-client-ccl ----- - -yarn --silent --cwd go/src/github.com/cockroachdb/cockroach/pkg/ui/workspaces/cluster-ui build:watch ----- - -yarn --silent --cwd go/src/github.com/cockroachdb/cockroach/pkg/ui/workspaces/db-console webpack-dev-server --config webpack.app.js --mode development --env.WEBPACK_SERVE --env.dist=ccl --env.target=http://localhost:8080 --port 12345 ----- diff --git a/pkg/cmd/dev/testdata/test.txt b/pkg/cmd/dev/testdata/test.txt deleted file mode 100644 index 16f78ef0c326..000000000000 --- a/pkg/cmd/dev/testdata/test.txt +++ /dev/null @@ -1,91 +0,0 @@ -dev test pkg/util/tracing ----- -find pkg/util/tracing -type d -bazel query 'kind(go_test, //pkg/util/tracing:all)' -bazel test //pkg/util/tracing:tracing_test --test_env=GOTRACEBACK=all --test_output errors - -dev test pkg/util/tracing/... ----- -find pkg/util/tracing -type d -bazel query 'kind(go_test, //pkg/util/tracing/...)' -bazel test //pkg/util/tracing:tracing_test --test_env=GOTRACEBACK=all --test_output errors - -dev test pkg/util/tracing -f TestStartChild* ----- -find pkg/util/tracing -type d -bazel query 'kind(go_test, //pkg/util/tracing:all)' -bazel test //pkg/util/tracing:tracing_test --test_env=GOTRACEBACK=all '--test_filter=TestStartChild*' --test_output errors - -dev test pkg/util/tracing -f TestStartChild* -v --show-logs ----- -find pkg/util/tracing -type d -bazel query 'kind(go_test, //pkg/util/tracing:all)' -bazel test //pkg/util/tracing:tracing_test --test_env=GOTRACEBACK=all '--test_filter=TestStartChild*' --test_arg -test.v --test_arg -show-logs --test_output all - -dev test pkg/util/tracing -f TestStartChild* --remote-cache 127.0.0.1:9092 ----- -find pkg/util/tracing -type d -bazel query 'kind(go_test, //pkg/util/tracing:all)' -bazel test --remote_local_fallback --remote_cache=grpc://127.0.0.1:9092 --experimental_remote_downloader=grpc://127.0.0.1:9092 //pkg/util/tracing:tracing_test --test_env=GOTRACEBACK=all '--test_filter=TestStartChild*' --test_output errors - -dev test pkg/util/tracing -f TestStartChild* --ignore-cache ----- -find pkg/util/tracing -type d -bazel query 'kind(go_test, //pkg/util/tracing:all)' -bazel test //pkg/util/tracing:tracing_test --nocache_test_results --test_env=GOTRACEBACK=all '--test_filter=TestStartChild*' --test_output errors - -dev test --stress pkg/util/tracing --filter TestStartChild* ----- -find pkg/util/tracing -type d -bazel query 'kind(go_test, //pkg/util/tracing:all)' -bazel test --test_sharding_strategy=disabled //pkg/util/tracing:tracing_test --test_env=GOTRACEBACK=all --test_timeout=86400 --run_under '@com_github_cockroachdb_stress//:stress -bazel -shardable-artifacts '"'"'XML_OUTPUT_FILE=dev merge-test-xmls'"'"' ' '--test_filter=TestStartChild*' --test_output streamed - -dev test --stress pkg/util/tracing --filter TestStartChild* --cpus=12 ----- -find pkg/util/tracing -type d -bazel query 'kind(go_test, //pkg/util/tracing:all)' -bazel test --local_cpu_resources=12 --test_sharding_strategy=disabled //pkg/util/tracing:tracing_test --test_env=GOTRACEBACK=all --test_timeout=86400 --run_under '@com_github_cockroachdb_stress//:stress -bazel -shardable-artifacts '"'"'XML_OUTPUT_FILE=dev merge-test-xmls'"'"' -p=12 ' '--test_filter=TestStartChild*' --test_output streamed - -dev test --stress pkg/util/tracing --filter TestStartChild* --timeout=10s -v ----- -find pkg/util/tracing -type d -bazel query 'kind(go_test, //pkg/util/tracing:all)' -bazel test --test_sharding_strategy=disabled //pkg/util/tracing:tracing_test --test_env=GOTRACEBACK=all --test_timeout=70 --run_under '@com_github_cockroachdb_stress//:stress -bazel -shardable-artifacts '"'"'XML_OUTPUT_FILE=dev merge-test-xmls'"'"' -maxtime=10s ' '--test_filter=TestStartChild*' --test_arg -test.v --test_output streamed - -dev test //pkg/testutils --timeout=10s ----- -find pkg/testutils -type d -bazel query 'kind(go_test, //pkg/testutils:all)' -bazel test //pkg/testutils:testutils_test --test_env=GOTRACEBACK=all --test_timeout=10 --test_output errors - -dev test pkg/util/tracing -- -s ----- -find pkg/util/tracing -type d -bazel query 'kind(go_test, //pkg/util/tracing:all)' -bazel test //pkg/util/tracing:tracing_test --test_env=GOTRACEBACK=all --test_output errors -s - -dev test ./pkg/roachpb ----- -find pkg/roachpb -type d -bazel query 'kind(go_test, //pkg/roachpb:all)' -bazel test //pkg/roachpb:roachpb_test //pkg/roachpb:string_test --test_env=GOTRACEBACK=all --test_output errors - -dev test pkg/roachpb:string_test ----- -find pkg/roachpb -type d -bazel query 'kind(go_test, //pkg/roachpb:string_test)' -bazel test //pkg/roachpb:string_test --test_env=GOTRACEBACK=all --test_output errors - -dev test //pkg/testutils --rewrite ----- -find pkg/testutils -type d -bazel query 'kind(go_test, //pkg/testutils:all)' -bazel test //pkg/testutils:testutils_test --test_env=GOTRACEBACK=all --test_env=COCKROACH_WORKSPACE=go/src/github.com/cockroachdb/cockroach --test_arg -rewrite --sandbox_writable_path=go/src/github.com/cockroachdb/cockroach/pkg/testutils --test_output errors - -dev test //pkg/testutils pkg/other/test --rewrite ----- -find pkg/testutils -type d -bazel query 'kind(go_test, //pkg/testutils:all)' -find pkg/other/test -type d -bazel query 'kind(go_test, //pkg/other/test:all)' -bazel test //pkg/testutils:testutils_test //pkg/other/test:test_test --test_env=GOTRACEBACK=all --test_env=COCKROACH_WORKSPACE=go/src/github.com/cockroachdb/cockroach --test_arg -rewrite --sandbox_writable_path=go/src/github.com/cockroachdb/cockroach/pkg/testutils --sandbox_writable_path=go/src/github.com/cockroachdb/cockroach/pkg/other/test --test_output errors diff --git a/pkg/cmd/dev/testdata/ui.txt b/pkg/cmd/dev/testdata/ui.txt deleted file mode 100644 index 4ba163682570..000000000000 --- a/pkg/cmd/dev/testdata/ui.txt +++ /dev/null @@ -1,29 +0,0 @@ -dev ui watch ----- -bazel build //pkg/ui/workspaces/db-console/src/js:crdb-protobuf-client //pkg/ui/workspaces/db-console/ccl/src/js:crdb-protobuf-client-ccl -yarn --silent --cwd go/src/github.com/cockroachdb/cockroach/pkg/ui/workspaces/cluster-ui build:watch -yarn --silent --cwd go/src/github.com/cockroachdb/cockroach/pkg/ui/workspaces/db-console webpack-dev-server --config webpack.app.js --mode development --env.WEBPACK_SERVE --env.dist=ccl --env.target=http://localhost:8080 --port 3000 - -dev ui watch --oss ----- -bazel build //pkg/ui/workspaces/db-console/src/js:crdb-protobuf-client -yarn --silent --cwd go/src/github.com/cockroachdb/cockroach/pkg/ui/workspaces/cluster-ui build:watch -yarn --silent --cwd go/src/github.com/cockroachdb/cockroach/pkg/ui/workspaces/db-console webpack-dev-server --config webpack.app.js --mode development --env.WEBPACK_SERVE --env.dist=oss --env.target=http://localhost:8080 --port 3000 - -dev ui watch --secure ----- -bazel build //pkg/ui/workspaces/db-console/src/js:crdb-protobuf-client //pkg/ui/workspaces/db-console/ccl/src/js:crdb-protobuf-client-ccl -yarn --silent --cwd go/src/github.com/cockroachdb/cockroach/pkg/ui/workspaces/cluster-ui build:watch -yarn --silent --cwd go/src/github.com/cockroachdb/cockroach/pkg/ui/workspaces/db-console webpack-dev-server --config webpack.app.js --mode development --env.WEBPACK_SERVE --env.dist=ccl --env.target=http://localhost:8080 --port 3000 --https - -dev ui watch --db http://example.crdb.io:4848 ----- -bazel build //pkg/ui/workspaces/db-console/src/js:crdb-protobuf-client //pkg/ui/workspaces/db-console/ccl/src/js:crdb-protobuf-client-ccl -yarn --silent --cwd go/src/github.com/cockroachdb/cockroach/pkg/ui/workspaces/cluster-ui build:watch -yarn --silent --cwd go/src/github.com/cockroachdb/cockroach/pkg/ui/workspaces/db-console webpack-dev-server --config webpack.app.js --mode development --env.WEBPACK_SERVE --env.dist=ccl --env.target=http://example.crdb.io:4848 --port 3000 - -dev ui watch --port 12345 ----- -bazel build //pkg/ui/workspaces/db-console/src/js:crdb-protobuf-client //pkg/ui/workspaces/db-console/ccl/src/js:crdb-protobuf-client-ccl -yarn --silent --cwd go/src/github.com/cockroachdb/cockroach/pkg/ui/workspaces/cluster-ui build:watch -yarn --silent --cwd go/src/github.com/cockroachdb/cockroach/pkg/ui/workspaces/db-console webpack-dev-server --config webpack.app.js --mode development --env.WEBPACK_SERVE --env.dist=ccl --env.target=http://localhost:8080 --port 12345 diff --git a/pkg/cmd/dev/ui.go b/pkg/cmd/dev/ui.go index 7e85044b51a5..fc3c56eac456 100644 --- a/pkg/cmd/dev/ui.go +++ b/pkg/cmd/dev/ui.go @@ -174,11 +174,6 @@ Replaces 'make ui-watch'.`, return err } - // Wait for OS signals to cancel if we're not in test-mode - if !isTesting { - <-ctx.Done() - } - return nil }, } diff --git a/pkg/cmd/dev/util.go b/pkg/cmd/dev/util.go index 71beb608abe8..564c1aac04a3 100644 --- a/pkg/cmd/dev/util.go +++ b/pkg/cmd/dev/util.go @@ -36,9 +36,6 @@ var ( // Shared flags. remoteCacheAddr string numCPUs int - - // To be turned on for tests. Turns off some deeper checks for reproducibility. - isTesting bool ) func mustGetFlagString(cmd *cobra.Command, name string) string { @@ -112,31 +109,26 @@ func (d *dev) getBazelInfo(ctx context.Context, key string) (string, error) { } -var workspace string - func (d *dev) getWorkspace(ctx context.Context) (string, error) { - if workspace == "" { - if _, err := os.Stat("WORKSPACE"); err == nil { - w, err := os.Getwd() - if err != nil { - return "", err - } - workspace = w - } else { - w, err := d.getBazelInfo(ctx, "workspace") - if err != nil { - return "", err - } - workspace = w - } + if _, err := os.Stat("WORKSPACE"); err == nil { + return os.Getwd() } - return workspace, nil + + return d.getBazelInfo(ctx, "workspace") } func (d *dev) getBazelBin(ctx context.Context) (string, error) { return d.getBazelInfo(ctx, "bazel-bin") } +// getDevBin returns the path to the running dev executable. +func (d *dev) getDevBin() string { + if d.knobs.devBinOverride != "" { + return d.knobs.devBinOverride + } + return os.Args[0] +} + func addCommonBuildFlags(cmd *cobra.Command) { cmd.Flags().IntVar(&numCPUs, "cpus", 0, "cap the number of cpu cores used") // This points to the grpc endpoint of a running `buchr/bazel-remote` @@ -154,10 +146,8 @@ func addCommonTestFlags(cmd *cobra.Command) { } func (d *dev) ensureBinaryInPath(bin string) error { - if !isTesting { - if _, err := d.exec.LookPath(bin); err != nil { - return fmt.Errorf("could not find %s in PATH", bin) - } + if _, err := d.exec.LookPath(bin); err != nil { + return fmt.Errorf("could not find %s in PATH", bin) } return nil } @@ -238,66 +228,9 @@ func splitArgsAtDash(cmd *cobra.Command, args []string) (before, after []string) return } -// parsePkg decomposes and validates a "pkg/.*" argument passed to the test or -// the bench commands. -func (d *dev) parsePkg(pkg string) (dir string, isRecursive bool, tag string, _ error) { - dir = pkg - - // Trim left. - dir = strings.TrimPrefix(dir, "//") - dir = strings.TrimPrefix(dir, "./") - if !strings.HasPrefix(dir, "pkg/") { - return "", false, "", fmt.Errorf( - "malformed package %q, expecting %q", pkg, "pkg/{...}") - } - - // Trim right. - dir = strings.TrimRight(dir, "/") - { - parts := strings.Split(dir, ":") - switch len(parts) { - case 0: - return "", false, "", fmt.Errorf( - "malformed package %q, expecting %q", pkg, "pkg/{...}") - case 1: - break - case 2: - dir = parts[0] - tag = parts[1] - default: - return "", false, "", fmt.Errorf( - "malformed package %q, expected at most one ':'", pkg) - } - } - const recursiveSuffix = "/..." - isRecursive = strings.HasSuffix(dir, recursiveSuffix) - if isRecursive { - dir = dir[:len(dir)-len(recursiveSuffix)] - if tag != "" { - return "", false, "", fmt.Errorf( - "malformed package %q, cannot end in %q and be followed by a tag", pkg, recursiveSuffix) - } - } - - // Check directory existence. - if ok, err := d.os.IsDir(dir); err != nil || !ok { - return "", false, "", fmt.Errorf( - "malformed package %q, %q is not an existing directory", pkg, dir) - } - return dir, isRecursive, tag, nil -} - func logCommand(cmd string, args ...string) { var fullArgs []string fullArgs = append(fullArgs, cmd) fullArgs = append(fullArgs, args...) log.Printf("$ %s", shellescape.QuoteCommand(fullArgs)) } - -// getDevBin returns the path to the running dev executable. -func getDevBin() string { - if isTesting { - return "dev" - } - return os.Args[0] -} diff --git a/pkg/kv/kvclient/kvstreamer/streamer.go b/pkg/kv/kvclient/kvstreamer/streamer.go index e32244f5118b..0cdca45eca07 100644 --- a/pkg/kv/kvclient/kvstreamer/streamer.go +++ b/pkg/kv/kvclient/kvstreamer/streamer.go @@ -215,10 +215,6 @@ type Streamer struct { enqueueKeys []int - // waitForResults is used to block GetResults() call until some results are - // available. - waitForResults chan struct{} - mu struct { // If the budget's mutex also needs to be locked, the budget's mutex // must be acquired first. @@ -261,6 +257,10 @@ type Streamer struct { // by GetResults() to the caller which the caller hasn't processed yet. numUnreleasedResults int + // hasResults is used by the client's goroutine to block until there are + // some results to be picked up. + hasResults *sync.Cond + // results are the results of already completed requests that haven't // been returned by GetResults() yet. results []Result @@ -321,6 +321,7 @@ func NewStreamer( budget: newBudget(acc, limitBytes), } s.mu.hasWork = sync.NewCond(&s.mu.Mutex) + s.mu.hasResults = sync.NewCond(&s.mu.Mutex) s.coordinator = workerCoordinator{ s: s, txn: txn, @@ -362,7 +363,6 @@ func (s *Streamer) Init(mode OperationMode, hints Hints, maxKeysPerRow int) { } s.hints = hints s.maxKeysPerRow = int32(maxKeysPerRow) - s.waitForResults = make(chan struct{}, 1) } // Enqueue dispatches multiple requests for execution. Results are delivered @@ -583,45 +583,23 @@ func (s *Streamer) enqueueMemoryAccountingLocked( // result slice is returned once all enqueued requests have been responded to. func (s *Streamer) GetResults(ctx context.Context) ([]Result, error) { s.mu.Lock() - results := s.mu.results - err := s.mu.err - s.mu.results = nil - allComplete := s.mu.numCompleteRequests == s.mu.numEnqueuedRequests - // Non-blockingly clear the waitForResults channel in case we've just picked - // up some results. We do so while holding the mutex so that new results - // aren't appended. - select { - case <-s.waitForResults: - default: - } - s.mu.Unlock() - - if len(results) > 0 || allComplete || err != nil { - return results, err - } - - select { - case <-ctx.Done(): - return nil, ctx.Err() - case <-s.waitForResults: - s.mu.Lock() - results = s.mu.results - err = s.mu.err + defer s.mu.Unlock() + for { + results := s.mu.results s.mu.results = nil - s.mu.Unlock() - return results, err - } -} - -// notifyGetResultsLocked non-blockingly sends a message on waitForResults -// channel. This method should be called only while holding the lock of s.mu so -// that other results couldn't be appended which would cause us to miss the -// notification about that. -func (s *Streamer) notifyGetResultsLocked() { - s.mu.AssertHeld() - select { - case s.waitForResults <- struct{}{}: - default: + allComplete := s.mu.numCompleteRequests == s.mu.numEnqueuedRequests + if len(results) > 0 || allComplete || s.mu.err != nil { + return results, s.mu.err + } + s.mu.hasResults.Wait() + // Check whether the Streamer has been canceled or closed while we were + // waiting for the results. + if err := ctx.Err(); err != nil { + // No need to use setErrorLocked here because the current goroutine + // is the only one blocking on hasResults condition variable. + s.mu.err = err + return nil, err + } } } @@ -642,7 +620,7 @@ func (s *Streamer) setErrorLocked(err error) { if s.mu.err == nil { s.mu.err = err } - s.notifyGetResultsLocked() + s.mu.hasResults.Signal() } // Close cancels all in-flight operations and releases all of the resources of @@ -655,6 +633,11 @@ func (s *Streamer) Close() { s.mu.done = true // Unblock the coordinator in case it is waiting for more work. s.mu.hasWork.Signal() + // Note that only the client's goroutine can be blocked waiting for the + // results, and Close() is called only by the same goroutine, so + // signaling hasResult condition variable isn't necessary. However, we + // choose to be safe and do it anyway. + s.mu.hasResults.Signal() s.mu.Unlock() // Unblock the coordinator in case it is waiting for the budget. s.budget.mu.waitForBudget.Signal() @@ -1265,23 +1248,36 @@ func (w *workerCoordinator) processSingleRangeResults( resumeReqIdx++ } else { // This Get was completed. - result := Result{ - GetResp: get, - // This currently only works because all requests - // are unique. - EnqueueKeysSatisfied: []int{enqueueKey}, - position: req.positions[i], + if get.Value != nil { + // Create a Result only for non-empty Get responses. + result := Result{ + GetResp: get, + // This currently only works because all requests + // are unique. + EnqueueKeysSatisfied: []int{enqueueKey}, + position: req.positions[i], + } + result.memoryTok.streamer = w.s + result.memoryTok.toRelease = getResponseSize(get) + memoryTokensBytes += result.memoryTok.toRelease + results = append(results, result) } - result.memoryTok.streamer = w.s - result.memoryTok.toRelease = getResponseSize(get) - memoryTokensBytes += result.memoryTok.toRelease - results = append(results, result) + // Note that we count this Get response as complete regardless + // of the fact whether it is empty or not. numCompleteGetResponses++ } case *roachpb.ScanRequest: scan := reply.(*roachpb.ScanResponse) - if len(scan.Rows) > 0 || len(scan.BatchResponses) > 0 { + if len(scan.Rows) > 0 || len(scan.BatchResponses) > 0 || scan.ResumeSpan == nil { + // Only the last part of the conditional is true whenever we + // received an empty response for the Scan request (i.e. there + // was no data in the span to scan). In such a scenario we still + // create a Result with no data that the client will skip over + // (this approach makes it easier to support Scans that span + // multiple ranges and the last range has no data in it - we + // want to be able to set Complete field on such an empty + // Result). result := Result{ // This currently only works because all requests // are unique. @@ -1321,13 +1317,9 @@ func (w *workerCoordinator) processSingleRangeResults( } } - // If we have any results, finalize them. - if len(results) > 0 { - w.finalizeSingleRangeResults( - results, memoryFootprintBytes, hasNonEmptyScanResponse, - numCompleteGetResponses, - ) - } + w.finalizeSingleRangeResults( + results, memoryFootprintBytes, hasNonEmptyScanResponse, numCompleteGetResponses, + ) // If we have any incomplete requests, add them back into the work // pool. @@ -1340,8 +1332,6 @@ func (w *workerCoordinator) processSingleRangeResults( // singleRangeBatch. By "finalization" we mean setting Complete field of // ScanResp to correct value for all scan responses, updating the estimate of an // average response size, and telling the Streamer about these results. -// -// This method assumes that results has length greater than zero. func (w *workerCoordinator) finalizeSingleRangeResults( results []Result, actualMemoryReservation int64, @@ -1389,9 +1379,15 @@ func (w *workerCoordinator) finalizeSingleRangeResults( w.s.mu.avgResponseEstimator.update(actualMemoryReservation, int64(len(results))) w.s.mu.numCompleteRequests += numCompleteResponses w.s.mu.numUnreleasedResults += len(results) - // Store the results and non-blockingly notify the Streamer about them. w.s.mu.results = append(w.s.mu.results, results...) - w.s.notifyGetResultsLocked() + if len(results) > 0 || numCompleteResponses > 0 { + // We want to signal the condition variable when either we have some + // results to return to the client or we received some empty responses. + // The latter is needed so that the client doesn't block forever + // thinking there are more requests in flight when, in fact, all + // responses have already come back empty. + w.s.mu.hasResults.Signal() + } } var zeroIntSlice []int diff --git a/pkg/kv/kvclient/kvstreamer/streamer_test.go b/pkg/kv/kvclient/kvstreamer/streamer_test.go index bc65005148d0..7c1f6a74be08 100644 --- a/pkg/kv/kvclient/kvstreamer/streamer_test.go +++ b/pkg/kv/kvclient/kvstreamer/streamer_test.go @@ -38,10 +38,11 @@ import ( func getStreamer( ctx context.Context, s serverutils.TestServerInterface, limitBytes int64, acc *mon.BoundAccount, ) *Streamer { + rootTxn := kv.NewTxn(ctx, s.DB(), s.NodeID()) return NewStreamer( s.DistSenderI().(*kvcoord.DistSender), s.Stopper(), - kv.NewTxn(ctx, s.DB(), s.NodeID()), + kv.NewLeafTxn(ctx, s.DB(), s.NodeID(), rootTxn.GetLeafTxnInputState(ctx)), cluster.MakeTestingClusterSettings(), lock.WaitPolicy(0), limitBytes, @@ -91,10 +92,12 @@ func TestStreamerLimitations(t *testing.T) { streamer := getStreamer() defer streamer.Close() streamer.Init(OutOfOrder, Hints{UniqueRequests: true}, 1 /* maxKeysPerRow */) - get := roachpb.NewGet(roachpb.Key("key"), false /* forUpdate */) + // Use a Scan request for this test case because Gets of non-existent + // keys aren't added to the results. + scan := roachpb.NewScan(roachpb.Key("key"), roachpb.Key("key1"), false /* forUpdate */) reqs := []roachpb.RequestUnion{{ - Value: &roachpb.RequestUnion_Get{ - Get: get.(*roachpb.GetRequest), + Value: &roachpb.RequestUnion_Scan{ + Scan: scan.(*roachpb.ScanRequest), }, }} require.NoError(t, streamer.Enqueue(ctx, reqs, nil /* enqueueKeys */)) @@ -422,3 +425,90 @@ func TestStreamerWideRows(t *testing.T) { }) } } + +// TestStreamerEmptyScans verifies that the Streamer behaves correctly when +// Scan requests return empty responses. +func TestStreamerEmptyScans(t *testing.T) { + defer leaktest.AfterTest(t)() + defer log.Scope(t).Close(t) + + // Start a cluster with large --max-sql-memory parameter so that the + // Streamer isn't hitting the root budget exceeded error. + const rootPoolSize = 1 << 30 /* 1GiB */ + s, db, _ := serverutils.StartServer(t, base.TestServerArgs{ + SQLMemoryPoolSize: rootPoolSize, + }) + ctx := context.Background() + defer s.Stopper().Stop(ctx) + + // Create a dummy table for which we know the encoding of valid keys. + // Although not strictly necessary, we set up two column families since with + // a single family in production a Get request would have been used. + _, err := db.Exec("CREATE TABLE t (pk INT PRIMARY KEY, k INT, blob STRING, INDEX (k), FAMILY (pk, k), FAMILY (blob))") + require.NoError(t, err) + + // Split the table into 5 ranges and populate the range cache. + for pk := 1; pk < 5; pk++ { + _, err = db.Exec(fmt.Sprintf("ALTER TABLE t SPLIT AT VALUES(%d)", pk)) + require.NoError(t, err) + } + _, err = db.Exec("SELECT count(*) from t") + require.NoError(t, err) + + makeScanRequest := func(start, end int) roachpb.RequestUnion { + var res roachpb.RequestUnion + var scan roachpb.ScanRequest + var union roachpb.RequestUnion_Scan + makeKey := func(pk int) []byte { + // These numbers essentially make a key like '/t/primary/pk'. + return []byte{240, 137, byte(136 + pk)} + } + scan.Key = makeKey(start) + scan.EndKey = makeKey(end) + union.Scan = &scan + res.Value = &union + return res + } + + getStreamer := func() *Streamer { + s := getStreamer(ctx, s, math.MaxInt64, nil /* acc */) + // There are two column families in the table. + s.Init(OutOfOrder, Hints{UniqueRequests: true}, 2 /* maxKeysPerRow */) + return s + } + + t.Run("scan single range", func(t *testing.T) { + streamer := getStreamer() + defer streamer.Close() + + // Scan the row with pk=0. + reqs := make([]roachpb.RequestUnion, 1) + reqs[0] = makeScanRequest(0, 1) + require.NoError(t, streamer.Enqueue(ctx, reqs, nil /* enqueueKeys */)) + results, err := streamer.GetResults(ctx) + require.NoError(t, err) + // We expect a single empty Scan response. + require.Equal(t, 1, len(results)) + }) + + t.Run("scan multiple ranges", func(t *testing.T) { + streamer := getStreamer() + defer streamer.Close() + + // Scan the rows with pk in range [1, 4). + reqs := make([]roachpb.RequestUnion, 1) + reqs[0] = makeScanRequest(1, 4) + require.NoError(t, streamer.Enqueue(ctx, reqs, nil /* enqueueKeys */)) + // We expect an empty response for each range. + var numResults int + for { + results, err := streamer.GetResults(ctx) + require.NoError(t, err) + numResults += len(results) + if len(results) == 0 { + break + } + } + require.Equal(t, 3, numResults) + }) +} diff --git a/pkg/migration/migrations/BUILD.bazel b/pkg/migration/migrations/BUILD.bazel index 1cfbdcd51ae7..4acee5b63c86 100644 --- a/pkg/migration/migrations/BUILD.bazel +++ b/pkg/migration/migrations/BUILD.bazel @@ -67,6 +67,7 @@ go_test( "builtins_test.go", "comment_on_index_migration_external_test.go", "descriptor_utils_test.go", + "ensure_constraint_id_test.go", "ensure_no_draining_names_external_test.go", "grant_option_migration_external_test.go", "helpers_test.go", @@ -95,6 +96,7 @@ go_test( "//pkg/spanconfig", "//pkg/sql", "//pkg/sql/catalog", + "//pkg/sql/catalog/catalogkeys", "//pkg/sql/catalog/descpb", "//pkg/sql/catalog/descs", "//pkg/sql/catalog/desctestutils", diff --git a/pkg/migration/migrations/ensure_constraint_id_test.go b/pkg/migration/migrations/ensure_constraint_id_test.go new file mode 100644 index 000000000000..9c4c063e6e16 --- /dev/null +++ b/pkg/migration/migrations/ensure_constraint_id_test.go @@ -0,0 +1,133 @@ +// Copyright 2021 The Cockroach Authors. +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package migrations_test + +import ( + "context" + "fmt" + "testing" + + "github.com/cockroachdb/cockroach/pkg/base" + "github.com/cockroachdb/cockroach/pkg/clusterversion" + "github.com/cockroachdb/cockroach/pkg/keys" + "github.com/cockroachdb/cockroach/pkg/server" + "github.com/cockroachdb/cockroach/pkg/sql/catalog/catalogkeys" + "github.com/cockroachdb/cockroach/pkg/sql/catalog/descpb" + "github.com/cockroachdb/cockroach/pkg/sql/catalog/desctestutils" + "github.com/cockroachdb/cockroach/pkg/sql/catalog/tabledesc" + "github.com/cockroachdb/cockroach/pkg/testutils/sqlutils" + "github.com/cockroachdb/cockroach/pkg/testutils/testcluster" + "github.com/cockroachdb/cockroach/pkg/util/leaktest" + "github.com/cockroachdb/cockroach/pkg/util/log" + "github.com/stretchr/testify/require" +) + +// TestEnsureConstraintIDs tests that constraint IDs are added as expected. +func TestEnsureConstraintIDs(t *testing.T) { + defer leaktest.AfterTest(t)() + defer log.Scope(t).Close(t) + + // Start off with the version that did not support + // constraint IDs. + clusterArgs := base.TestClusterArgs{ + ServerArgs: base.TestServerArgs{ + Knobs: base.TestingKnobs{ + Server: &server.TestingKnobs{ + DisableAutomaticVersionUpgrade: 1, + BinaryVersionOverride: clusterversion.ByKey( + tabledesc.ConstraintIDsAddedToTableDescsVersion - 1), + }, + }, + }, + } + c := keys.SystemSQLCodec + ctx := context.Background() + tc := testcluster.StartTestCluster(t, 1, clusterArgs) + s := tc.Server(0) + defer tc.Stopper().Stop(ctx) + sqlDB := tc.ServerConn(0) + tdb := sqlutils.MakeSQLRunner(sqlDB) + // Create table with a primary key constraint. + tdb.Exec(t, "CREATE TABLE t(name int primary key)") + // Validate the comments on constraints are blocked. + tdb.ExpectErr(t, + "pq: cannot comment on constraint", + "COMMENT ON CONSTRAINT \"t_pkey\" ON t IS 'primary_comment'") + // Validate that we have a constraint ID due to post deserialization logic + + desc := desctestutils.TestingGetMutableExistingTableDescriptor(s.DB(), c, "defaultdb", "t") + desc.PrimaryIndex.ConstraintID = 0 + require.NoError(t, s.DB().Put( + context.Background(), + catalogkeys.MakeDescMetadataKey(keys.SystemSQLCodec, desc.GetID()), + desc.DescriptorProto(), + )) + // Validate that the post serialization will recompute the constraint IDs + // if they are missing. + desc = desctestutils.TestingGetMutableExistingTableDescriptor(s.DB(), c, "defaultdb", "t") + require.Equal(t, desc.PrimaryIndex.ConstraintID, descpb.ConstraintID(2)) + // If we set both the constraint ID / next value to 0, then we will have + // it assigned form scratch. + desc.PrimaryIndex.ConstraintID = 0 + desc.NextConstraintID = 0 + require.NoError(t, s.DB().Put( + context.Background(), + catalogkeys.MakeDescMetadataKey(keys.SystemSQLCodec, desc.GetID()), + desc.DescriptorProto(), + )) + // Validate that the descriptor is invalid, since the constraint IDs + // are missing. + tdb.CheckQueryResults(t, + `SELECT strpos(desc_json, 'constraintId') > 0, + strpos(desc_json, 'nextConstraintId') > 0 + FROM ( + SELECT jsonb_pretty( + crdb_internal.pb_to_json( + 'cockroach.sql.sqlbase.Descriptor', + descriptor, + false + ) + ) AS desc_json + FROM system.descriptor + WHERE id = `+ + fmt.Sprintf("%d", desc.GetID())+ + `);`, + [][]string{{"false", "false"}}, + ) + // Migrate to the new cluster version. + tdb.Exec(t, `SET CLUSTER SETTING version = $1`, + clusterversion.ByKey(tabledesc.ConstraintIDsAddedToTableDescsVersion).String()) + tdb.CheckQueryResultsRetry(t, "SHOW CLUSTER SETTING version", + [][]string{{clusterversion.ByKey(tabledesc.ConstraintIDsAddedToTableDescsVersion).String()}}) + // Validate the constraint IDs are populated. + // Validate that the descriptor is invalid, since the constraint IDs + // are missing. + tdb.CheckQueryResults(t, + `SELECT strpos(desc_json, 'constraintId') > 0, + strpos(desc_json, 'nextConstraintId') > 0 + FROM ( + SELECT jsonb_pretty( + crdb_internal.pb_to_json( + 'cockroach.sql.sqlbase.Descriptor', + descriptor, + false + ) + ) AS desc_json + FROM system.descriptor + WHERE id = `+ + fmt.Sprintf("%d", desc.GetID())+ + `);`, + [][]string{{"true", "true"}}, + ) + // Validate we can comment constraints. + tdb.Exec(t, + "COMMENT ON CONSTRAINT \"t_pkey\" ON t IS 'primary_comment'") +} diff --git a/pkg/migration/migrations/remove_invalid_database_privileges.go b/pkg/migration/migrations/remove_invalid_database_privileges.go index 06483447ba92..d251a761c14d 100644 --- a/pkg/migration/migrations/remove_invalid_database_privileges.go +++ b/pkg/migration/migrations/remove_invalid_database_privileges.go @@ -32,7 +32,8 @@ type descIDAndVersion struct { } // runRemoveInvalidDatabasePrivileges calls RunPostDeserializationChanges on -// every database descriptor. +// every database descriptor. It also calls RunPostDeserializationChanges on +// all table descriptors to add constraint IDs. // This migration is done to convert invalid privileges on the // database to default privileges. func runRemoveInvalidDatabasePrivileges( @@ -143,9 +144,9 @@ func descriptorUpgradeMigration( if err != nil { return err } - // If the descriptor is not a database descriptor, we can skip it. - _, databaseDesc, _, _ := descpb.FromDescriptorWithMVCCTimestamp(&desc, ts) - if databaseDesc == nil { + // If the descriptor is not a database or table descriptor, we can skip it. + tableDesc, databaseDesc, _, _ := descpb.FromDescriptorWithMVCCTimestamp(&desc, ts) + if databaseDesc == nil && tableDesc == nil { continue } ids = append(ids, id) diff --git a/pkg/server/server_sql.go b/pkg/server/server_sql.go index 11b42aa49735..c957bffaac8c 100644 --- a/pkg/server/server_sql.go +++ b/pkg/server/server_sql.go @@ -811,9 +811,9 @@ func newSQLServer(ctx context.Context, cfg sqlServerArgs) (*SQLServer, error) { sql.ValidateInvertedIndexes, sql.NewFakeSessionData, ) + execCfg.DescMetadaUpdaterFactory = descmetadata.NewMetadataUpdaterFactory( ieFactory, - sql.MakeConstraintOidBuilder, collectionFactory, &execCfg.Settings.SV, ) diff --git a/pkg/sql/BUILD.bazel b/pkg/sql/BUILD.bazel index be33bc7e53c9..3ffba48b4098 100644 --- a/pkg/sql/BUILD.bazel +++ b/pkg/sql/BUILD.bazel @@ -314,7 +314,6 @@ go_library( "//pkg/sql/contention/txnidcache", "//pkg/sql/covering", "//pkg/sql/delegate", - "//pkg/sql/descmetadata", "//pkg/sql/distsql", "//pkg/sql/enum", "//pkg/sql/execinfra", diff --git a/pkg/sql/alter_primary_key.go b/pkg/sql/alter_primary_key.go index 4619060f5f66..351bbd37964d 100644 --- a/pkg/sql/alter_primary_key.go +++ b/pkg/sql/alter_primary_key.go @@ -179,7 +179,9 @@ func (p *planner) AlterPrimaryKey( EncodingType: descpb.PrimaryIndexEncoding, Type: descpb.IndexDescriptor_FORWARD, Version: descpb.LatestNonPrimaryIndexDescriptorVersion, + ConstraintID: tableDesc.GetNextConstraintID(), } + tableDesc.NextConstraintID++ // If the new index is requested to be sharded, set up the index descriptor // to be sharded, and add the new shard column if it is missing. diff --git a/pkg/sql/alter_table.go b/pkg/sql/alter_table.go index ba03156ab12e..591124956e6b 100644 --- a/pkg/sql/alter_table.go +++ b/pkg/sql/alter_table.go @@ -1214,8 +1214,9 @@ func applyColumnMutation( for k := range info { inuseNames[k] = struct{}{} } - check := tabledesc.MakeNotNullCheckConstraint(col.GetName(), col.GetID(), inuseNames, descpb.ConstraintValidity_Validating) + check := tabledesc.MakeNotNullCheckConstraint(col.GetName(), col.GetID(), tableDesc.GetNextConstraintID(), inuseNames, descpb.ConstraintValidity_Validating) tableDesc.AddNotNullMutation(check, descpb.DescriptorMutation_ADD) + tableDesc.NextConstraintID++ case *tree.AlterTableDropNotNull: if col.IsNullable() { @@ -1253,8 +1254,9 @@ func applyColumnMutation( // Add a check constraint equivalent to the non-null constraint and drop // it in the schema changer. - check := tabledesc.MakeNotNullCheckConstraint(col.GetName(), col.GetID(), inuseNames, descpb.ConstraintValidity_Dropping) + check := tabledesc.MakeNotNullCheckConstraint(col.GetName(), col.GetID(), tableDesc.GetNextConstraintID(), inuseNames, descpb.ConstraintValidity_Dropping) tableDesc.Checks = append(tableDesc.Checks, check) + tableDesc.NextConstraintID++ tableDesc.AddNotNullMutation(check, descpb.DescriptorMutation_DROP) case *tree.AlterTableDropStored: diff --git a/pkg/sql/catalog/descpb/constraint.go b/pkg/sql/catalog/descpb/constraint.go index 8a825b97afc7..2499b07dba93 100644 --- a/pkg/sql/catalog/descpb/constraint.go +++ b/pkg/sql/catalog/descpb/constraint.go @@ -87,10 +87,11 @@ const ( // TODO(ajwerner): Lift this up a level of abstraction next to the // Immutable and have it store those for the ReferencedTable. type ConstraintDetail struct { - Kind ConstraintType - Columns []string - Details string - Unvalidated bool + Kind ConstraintType + ConstraintID ConstraintID + Columns []string + Details string + Unvalidated bool // Only populated for PK and Unique Constraints with an index. Index *IndexDescriptor diff --git a/pkg/sql/catalog/descpb/structured.go b/pkg/sql/catalog/descpb/structured.go index daa33d5fa900..b5a61ed108f3 100644 --- a/pkg/sql/catalog/descpb/structured.go +++ b/pkg/sql/catalog/descpb/structured.go @@ -74,6 +74,9 @@ type FamilyID = catid.FamilyID // IndexID is a custom type for IndexDescriptor IDs. type IndexID = catid.IndexID +// ConstraintID is a custom type for TableDescriptor constraint IDs. +type ConstraintID = catid.ConstraintID + // DescriptorVersion is a custom type for TableDescriptor Versions. type DescriptorVersion uint64 diff --git a/pkg/sql/catalog/descpb/structured.proto b/pkg/sql/catalog/descpb/structured.proto index 212ccb784a48..ae60b9f7e5be 100644 --- a/pkg/sql/catalog/descpb/structured.proto +++ b/pkg/sql/catalog/descpb/structured.proto @@ -93,6 +93,11 @@ message ForeignKeyConstraint { // These fields were used for foreign keys until 20.1. reserved 10, 11, 12, 13; + + // Used within the table descriptor to uniquely identify individual + // constraints. + optional uint32 constraint_id = 14 [(gogoproto.customname) = "ConstraintID", + (gogoproto.casttype) = "ConstraintID", (gogoproto.nullable) = false]; } // UniqueWithoutIndexConstraint is the representation of a unique constraint @@ -111,6 +116,11 @@ message UniqueWithoutIndexConstraint { // unique constraint with Predicate as the expression. Columns are referred to // in the expression by their name. optional string predicate = 5 [(gogoproto.nullable) = false]; + + // Used within the table descriptor to uniquely identify individual + // constraints. + optional uint32 constraint_id = 6 [(gogoproto.customname) = "ConstraintID", + (gogoproto.casttype) = "ConstraintID", (gogoproto.nullable) = false]; } message ColumnDescriptor { @@ -522,6 +532,12 @@ message IndexDescriptor { // require more data being stored in each deleted entry and further complicate // the merge process. See #75720 for further details. optional bool use_delete_preserving_encoding = 24 [(gogoproto.nullable) = false]; + + // Used within the table descriptor to uniquely identify individual + // constraints, which is only set for primary keys and unique secondary + // indexes. + optional uint32 constraint_id = 33 [(gogoproto.customname) = "ConstraintID", + (gogoproto.casttype) = "ConstraintID", (gogoproto.nullable) = false]; } // ConstraintToUpdate represents a constraint to be added to the table and @@ -973,6 +989,10 @@ message TableDescriptor { // of SHOW CREATE TABLE. We no longer them in order to make the output to be // round-trippable, but we still set this field for now. See #68031. optional bool hidden = 7 [(gogoproto.nullable) = false]; + // Used within the table descriptor to uniquely identify individual + // constraints. + optional uint32 constraint_id = 8 [(gogoproto.customname) = "ConstraintID", + (gogoproto.casttype) = "ConstraintID", (gogoproto.nullable) = false]; } repeated CheckConstraint checks = 20; @@ -1214,7 +1234,9 @@ message TableDescriptor { // such a table will result in an empty table on the restoring cluster. optional bool exclude_data_from_backup = 48 [(gogoproto.nullable) = false]; - // Next ID: 48 + // Constraint ID for the next constraint. + optional uint32 next_constraint_id = 49 [(gogoproto.nullable) = false, + (gogoproto.customname) = "NextConstraintID", (gogoproto.casttype) = "ConstraintID"]; } // SurvivalGoal is the survival goal for a database. diff --git a/pkg/sql/catalog/descriptor.go b/pkg/sql/catalog/descriptor.go index 1cba4296243d..79a9a8d4e580 100644 --- a/pkg/sql/catalog/descriptor.go +++ b/pkg/sql/catalog/descriptor.go @@ -512,6 +512,9 @@ type TableDescriptor interface { // GetNextColumnID returns the next unused column ID for this table. Column // IDs are unique per table, but not unique globally. GetNextColumnID() descpb.ColumnID + // GetNextConstraintID returns the next unused constraint ID for this table. + // Constraint IDs are unique per table, but not unique globally. + GetNextConstraintID() descpb.ConstraintID // CheckConstraintUsesColumn returns whether the check constraint uses the // specified column. CheckConstraintUsesColumn(cc *descpb.TableDescriptor_CheckConstraint, colID descpb.ColumnID) (bool, error) diff --git a/pkg/sql/catalog/descs/collection_test.go b/pkg/sql/catalog/descs/collection_test.go index 95977ea3fd85..2dec131b4034 100644 --- a/pkg/sql/catalog/descs/collection_test.go +++ b/pkg/sql/catalog/descs/collection_test.go @@ -98,13 +98,15 @@ func TestCollectionWriteDescToBatch(t *testing.T) { KeyColumnDirections: []descpb.IndexDescriptor_Direction{descpb.IndexDescriptor_ASC}, EncodingType: descpb.PrimaryIndexEncoding, Version: descpb.PrimaryIndexWithStoredColumnsVersion, + ConstraintID: 1, }, - Privileges: descpb.NewBasePrivilegeDescriptor(security.AdminRoleName()), - NextColumnID: 2, - NextFamilyID: 1, - NextIndexID: 2, - NextMutationID: 1, - FormatVersion: descpb.InterleavedFormatVersion, + Privileges: descpb.NewBasePrivilegeDescriptor(security.AdminRoleName()), + NextColumnID: 2, + NextConstraintID: 2, + NextFamilyID: 1, + NextIndexID: 2, + NextMutationID: 1, + FormatVersion: descpb.InterleavedFormatVersion, }).BuildCreatedMutableTable() b := txn.NewBatch() diff --git a/pkg/sql/catalog/schemaexpr/check_constraint.go b/pkg/sql/catalog/schemaexpr/check_constraint.go index d2ad24e83c48..b05cf877d08b 100644 --- a/pkg/sql/catalog/schemaexpr/check_constraint.go +++ b/pkg/sql/catalog/schemaexpr/check_constraint.go @@ -102,12 +102,14 @@ func (b *CheckConstraintBuilder) Build( if err != nil { return nil, err } - + constraintID := b.desc.TableDesc().GetNextConstraintID() + b.desc.TableDesc().NextConstraintID++ return &descpb.TableDescriptor_CheckConstraint{ - Expr: expr, - Name: name, - ColumnIDs: colIDs.Ordered(), - Hidden: c.Hidden, + Expr: expr, + Name: name, + ColumnIDs: colIDs.Ordered(), + Hidden: c.Hidden, + ConstraintID: constraintID, }, nil } diff --git a/pkg/sql/catalog/systemschema/system.go b/pkg/sql/catalog/systemschema/system.go index d9c1b24aba47..1ae19f7cefcb 100644 --- a/pkg/sql/catalog/systemschema/system.go +++ b/pkg/sql/catalog/systemschema/system.go @@ -727,7 +727,10 @@ func systemTable( Indexes: indexes[1:], FormatVersion: descpb.InterleavedFormatVersion, NextMutationID: 1, + NextConstraintID: 1, } + tbl.PrimaryIndex.ConstraintID = tbl.NextConstraintID + tbl.NextConstraintID++ for _, col := range columns { if tbl.NextColumnID <= col.ID { tbl.NextColumnID = col.ID + 1 @@ -738,10 +741,15 @@ func systemTable( tbl.NextFamilyID = fam.ID + 1 } } - for _, idx := range indexes { + for i, idx := range indexes { if tbl.NextIndexID <= idx.ID { tbl.NextIndexID = idx.ID + 1 } + // Only assigned constraint IDs to unique non-primary indexes. + if idx.Unique && i > 1 { + tbl.Indexes[i-1].ConstraintID = tbl.NextConstraintID + tbl.NextConstraintID++ + } } return tbl } @@ -952,6 +960,11 @@ var ( tbl.NextFamilyID = 0 tbl.NextIndexID = 0 tbl.NextMutationID = 0 + // Sequences never exposed their internal constraints, + // so all IDs will be left at zero. CREATE SEQUENCE has + // the same behaviour. + tbl.NextConstraintID = 0 + tbl.PrimaryIndex.ConstraintID = 0 }, ) diff --git a/pkg/sql/catalog/table_elements.go b/pkg/sql/catalog/table_elements.go index 6d9c8ed23648..b9f4bbda105d 100644 --- a/pkg/sql/catalog/table_elements.go +++ b/pkg/sql/catalog/table_elements.go @@ -127,6 +127,7 @@ type Index interface { // The remaining methods operate on the underlying descpb.IndexDescriptor object. GetID() descpb.IndexID + GetConstraintID() descpb.ConstraintID GetName() string IsPartial() bool IsUnique() bool @@ -386,6 +387,9 @@ type ConstraintToUpdate interface { // UniqueWithoutIndex returns the underlying unique without index constraint, if // there is one. UniqueWithoutIndex() descpb.UniqueWithoutIndexConstraint + + // GetConstraintID returns the ID for the constraint. + GetConstraintID() descpb.ConstraintID } // PrimaryKeySwap is an interface around a primary key swap mutation. diff --git a/pkg/sql/catalog/tabledesc/BUILD.bazel b/pkg/sql/catalog/tabledesc/BUILD.bazel index ae73fd190ad4..ed810895514c 100644 --- a/pkg/sql/catalog/tabledesc/BUILD.bazel +++ b/pkg/sql/catalog/tabledesc/BUILD.bazel @@ -89,6 +89,7 @@ go_test( "//pkg/sql/catalog/internal/validate", "//pkg/sql/catalog/nstree", "//pkg/sql/catalog/typedesc", + "//pkg/sql/privilege", "//pkg/sql/types", "//pkg/testutils", "//pkg/testutils/serverutils", diff --git a/pkg/sql/catalog/tabledesc/helpers_test.go b/pkg/sql/catalog/tabledesc/helpers_test.go index cb4d075d0c4a..2606cf9b023d 100644 --- a/pkg/sql/catalog/tabledesc/helpers_test.go +++ b/pkg/sql/catalog/tabledesc/helpers_test.go @@ -11,6 +11,7 @@ package tabledesc import ( + "github.com/cockroachdb/cockroach/pkg/clusterversion" "github.com/cockroachdb/cockroach/pkg/sql/catalog" "github.com/cockroachdb/cockroach/pkg/sql/catalog/descpb" "github.com/cockroachdb/errors" @@ -24,6 +25,38 @@ func ValidatePartitioning(immI catalog.TableDescriptor) error { return imm.validatePartitioning() } +// constraintValidationErrorAccumulator implements catalog.ValidationErrorAccumulator +type constraintValidationErrorAccumulator struct { + Errors []error +} + +// Report implements catalog.ValidationErrorAccumulator +func (cea *constraintValidationErrorAccumulator) Report(err error) { + cea.Errors = append(cea.Errors, err) +} + +// IsActive implements catalog.ValidationErrorAccumulator +func (cea *constraintValidationErrorAccumulator) IsActive(version clusterversion.Key) bool { + return true +} + +func ValidateConstraints(immI catalog.TableDescriptor) error { + imm, ok := immI.(*immutable) + if !ok { + return errors.Errorf("expected immutable descriptor") + } + cea := &constraintValidationErrorAccumulator{} + imm.validateConstraintIDs(cea) + if cea.Errors == nil { + return nil + } + if len(cea.Errors) > 1 { + return errors.AssertionFailedf("expected only a single error inside "+ + "validate constraint %q", cea.Errors) + } + return cea.Errors[0] +} + func GetPostDeserializationChanges( immI catalog.TableDescriptor, ) (PostDeserializationTableDescriptorChanges, error) { diff --git a/pkg/sql/catalog/tabledesc/index.go b/pkg/sql/catalog/tabledesc/index.go index 9545ece32832..35bef93eb28b 100644 --- a/pkg/sql/catalog/tabledesc/index.go +++ b/pkg/sql/catalog/tabledesc/index.go @@ -69,6 +69,11 @@ func (w index) GetID() descpb.IndexID { return w.desc.ID } +// GetConstraintID returns the constraint ID. +func (w index) GetConstraintID() descpb.ConstraintID { + return w.desc.ConstraintID +} + // GetName returns the index name. func (w index) GetName() string { return w.desc.Name diff --git a/pkg/sql/catalog/tabledesc/mutation.go b/pkg/sql/catalog/tabledesc/mutation.go index 27cbc06070e7..60c6da246b0f 100644 --- a/pkg/sql/catalog/tabledesc/mutation.go +++ b/pkg/sql/catalog/tabledesc/mutation.go @@ -143,6 +143,21 @@ func (c constraintToUpdate) UniqueWithoutIndex() descpb.UniqueWithoutIndexConstr return c.desc.UniqueWithoutIndexConstraint } +// GetConstraintID returns the ID for the constraint. +func (c constraintToUpdate) GetConstraintID() descpb.ConstraintID { + switch c.desc.ConstraintType { + case descpb.ConstraintToUpdate_CHECK: + return c.desc.Check.ConstraintID + case descpb.ConstraintToUpdate_FOREIGN_KEY: + return c.ForeignKey().ConstraintID + case descpb.ConstraintToUpdate_NOT_NULL: + return 0 + case descpb.ConstraintToUpdate_UNIQUE_WITHOUT_INDEX: + return c.UniqueWithoutIndex().ConstraintID + } + panic("unknown constraint type") +} + // primaryKeySwap implements the catalog.PrimaryKeySwap interface. type primaryKeySwap struct { maybeMutation diff --git a/pkg/sql/catalog/tabledesc/safe_format_test.go b/pkg/sql/catalog/tabledesc/safe_format_test.go index f6147a3836a1..16c2f32903ac 100644 --- a/pkg/sql/catalog/tabledesc/safe_format_test.go +++ b/pkg/sql/catalog/tabledesc/safe_format_test.go @@ -107,17 +107,19 @@ func TestSafeMessage(t *testing.T) { // Add check constraints, unique without index constraints, foreign key // constraints and various mutations. mutable.Checks = append(mutable.Checks, &descpb.TableDescriptor_CheckConstraint{ - Name: "check", - Expr: "j > 0", - Validity: descpb.ConstraintValidity_Validated, - ColumnIDs: []descpb.ColumnID{2}, + Name: "check", + Expr: "j > 0", + Validity: descpb.ConstraintValidity_Validated, + ColumnIDs: []descpb.ColumnID{2}, + ConstraintID: 1, }) mutable.UniqueWithoutIndexConstraints = append( mutable.UniqueWithoutIndexConstraints, descpb.UniqueWithoutIndexConstraint{ - Name: "unique", - TableID: 112, - Validity: descpb.ConstraintValidity_Validated, - ColumnIDs: []descpb.ColumnID{2}, + Name: "unique", + TableID: 112, + Validity: descpb.ConstraintValidity_Validated, + ColumnIDs: []descpb.ColumnID{2}, + ConstraintID: 2, }, ) mutable.InboundFKs = append(mutable.InboundFKs, descpb.ForeignKeyConstraint{ @@ -129,6 +131,7 @@ func TestSafeMessage(t *testing.T) { Validity: descpb.ConstraintValidity_Validated, OnDelete: catpb.ForeignKeyAction_CASCADE, Match: descpb.ForeignKeyReference_PARTIAL, + ConstraintID: 3, }) mutable.OutboundFKs = append(mutable.OutboundFKs, descpb.ForeignKeyConstraint{ Name: "outbound_fk", @@ -139,6 +142,7 @@ func TestSafeMessage(t *testing.T) { Validity: descpb.ConstraintValidity_Validated, OnDelete: catpb.ForeignKeyAction_SET_DEFAULT, Match: descpb.ForeignKeyReference_SIMPLE, + ConstraintID: 4, }) mutable.Mutations = append(mutable.Mutations, descpb.DescriptorMutation{ @@ -154,7 +158,8 @@ func TestSafeMessage(t *testing.T) { ReferencedTableID: 2, ReferencedColumnIDs: []descpb.ColumnID{3}, Validity: descpb.ConstraintValidity_Unvalidated, OnDelete: catpb.ForeignKeyAction_SET_NULL, - Match: descpb.ForeignKeyReference_FULL, + Match: descpb.ForeignKeyReference_FULL, + ConstraintID: 5, }, }, }, @@ -185,6 +190,7 @@ func TestSafeMessage(t *testing.T) { Validity: descpb.ConstraintValidity_Unvalidated, ColumnIDs: []descpb.ColumnID{2}, IsNonNullConstraint: true, + ConstraintID: 6, }, NotNullColumn: 2, }, @@ -226,6 +232,7 @@ func TestSafeMessage(t *testing.T) { JobID: 1234, }, ) + mutable.PrimaryIndex.ConstraintID = 7 mutable.PrimaryIndex.StoreColumnIDs = append(mutable.PrimaryIndex.StoreColumnIDs, 5) mutable.PrimaryIndex.StoreColumnNames = append(mutable.PrimaryIndex.StoreColumnNames, "c") mutable.NextColumnID = 6 diff --git a/pkg/sql/catalog/tabledesc/structured.go b/pkg/sql/catalog/tabledesc/structured.go index 02d860c43ac4..a893c1d07dd4 100644 --- a/pkg/sql/catalog/tabledesc/structured.go +++ b/pkg/sql/catalog/tabledesc/structured.go @@ -103,6 +103,10 @@ type PostDeserializationTableDescriptorChanges struct { // has redundant IDs in its DependsOn, DependsOnTypes and DependedOnBy // references. RemovedDuplicateIDsInRefs bool + + // AddedConstraintIDs indicates that table descriptors had constraint ID + // added. + AddedConstraintIDs bool } // DescriptorType returns the type of this descriptor. @@ -606,6 +610,9 @@ func (desc *Mutable) initIDs() { if desc.NextMutationID == descpb.InvalidMutationID { desc.NextMutationID = 1 } + if desc.NextConstraintID == 0 { + desc.NextConstraintID = 1 + } } // MaybeFillColumnID assigns a column ID to the given column if the said column has an ID @@ -712,6 +719,9 @@ func (desc *Mutable) allocateIndexIDs(columnNames map[string]descpb.ColumnID) er if desc.NextIndexID == 0 { desc.NextIndexID = 1 } + if desc.NextConstraintID == 0 { + desc.NextConstraintID = 1 + } // Assign names to unnamed indexes. err := catalog.ForEachNonPrimaryIndex(desc, func(idx catalog.Index) error { @@ -722,6 +732,10 @@ func (desc *Mutable) allocateIndexIDs(columnNames map[string]descpb.ColumnID) er } idx.IndexDesc().Name = name } + if idx.GetConstraintID() == 0 && idx.IsUnique() { + idx.IndexDesc().ConstraintID = desc.NextConstraintID + desc.NextConstraintID++ + } return nil }) if err != nil { @@ -742,6 +756,10 @@ func (desc *Mutable) allocateIndexIDs(columnNames map[string]descpb.ColumnID) er if !idx.Primary() { maybeUpgradeSecondaryIndexFormatVersion(idx.IndexDesc()) } + if idx.Primary() && idx.GetConstraintID() == 0 { + idx.IndexDesc().ConstraintID = desc.NextConstraintID + desc.NextConstraintID++ + } if idx.GetID() == 0 { idx.IndexDesc().ID = desc.NextIndexID desc.NextIndexID++ @@ -1942,6 +1960,7 @@ func (desc *Mutable) AddUniqueWithoutIndexMutation( func MakeNotNullCheckConstraint( colName string, colID descpb.ColumnID, + constraintID descpb.ConstraintID, inuseNames map[string]struct{}, validity descpb.ConstraintValidity, ) *descpb.TableDescriptor_CheckConstraint { @@ -1973,6 +1992,7 @@ func MakeNotNullCheckConstraint( Validity: validity, ColumnIDs: []descpb.ColumnID{colID}, IsNonNullConstraint: true, + ConstraintID: constraintID, } } diff --git a/pkg/sql/catalog/tabledesc/structured_test.go b/pkg/sql/catalog/tabledesc/structured_test.go index 8c73121e667c..36328f3611ec 100644 --- a/pkg/sql/catalog/tabledesc/structured_test.go +++ b/pkg/sql/catalog/tabledesc/structured_test.go @@ -114,7 +114,9 @@ func TestAllocateIDs(t *testing.T) { StoreColumnIDs: descpb.ColumnIDs{3}, StoreColumnNames: []string{"c"}, EncodingType: descpb.PrimaryIndexEncoding, - Version: descpb.LatestPrimaryIndexDescriptorVersion}, + Version: descpb.LatestPrimaryIndexDescriptorVersion, + ConstraintID: 1, + }, Indexes: []descpb.IndexDescriptor{ {ID: 2, Name: "d", KeyColumnIDs: []descpb.ColumnID{2, 1}, KeyColumnNames: []string{"b", "a"}, KeyColumnDirections: []descpb.IndexDescriptor_Direction{descpb.IndexDescriptor_ASC, @@ -129,12 +131,13 @@ func TestAllocateIDs(t *testing.T) { EncodingType: descpb.PrimaryIndexEncoding, Version: descpb.LatestNonPrimaryIndexDescriptorVersion}, }, - Privileges: descpb.NewBasePrivilegeDescriptor(security.AdminRoleName()), - NextColumnID: 4, - NextFamilyID: 1, - NextIndexID: 5, - NextMutationID: 1, - FormatVersion: descpb.InterleavedFormatVersion, + Privileges: descpb.NewBasePrivilegeDescriptor(security.AdminRoleName()), + NextColumnID: 4, + NextFamilyID: 1, + NextIndexID: 5, + NextMutationID: 1, + NextConstraintID: 2, + FormatVersion: descpb.InterleavedFormatVersion, }).BuildCreatedMutableTable() if !reflect.DeepEqual(expected, desc) { a, _ := json.MarshalIndent(expected, "", " ") @@ -347,13 +350,14 @@ func TestMaybeUpgradeIndexFormatVersion(t *testing.T) { }, }, upgraded: &descpb.TableDescriptor{ - FormatVersion: descpb.InterleavedFormatVersion, - ID: 51, - Name: "tbl", - ParentID: 52, - NextColumnID: 3, - NextFamilyID: 1, - NextIndexID: 2, + FormatVersion: descpb.InterleavedFormatVersion, + ID: 51, + Name: "tbl", + ParentID: 52, + NextColumnID: 3, + NextFamilyID: 1, + NextIndexID: 2, + NextConstraintID: 2, Columns: []descpb.ColumnDescriptor{ {ID: 1, Name: "foo"}, {ID: 2, Name: "bar"}, @@ -374,6 +378,7 @@ func TestMaybeUpgradeIndexFormatVersion(t *testing.T) { KeyColumnDirections: []descpb.IndexDescriptor_Direction{descpb.IndexDescriptor_ASC, descpb.IndexDescriptor_ASC}, EncodingType: descpb.PrimaryIndexEncoding, Version: descpb.LatestPrimaryIndexDescriptorVersion, + ConstraintID: 1, }, Privileges: descpb.NewBasePrivilegeDescriptor(security.RootUserName()), }, @@ -381,13 +386,14 @@ func TestMaybeUpgradeIndexFormatVersion(t *testing.T) { { // 2 // This test case is defined to be a no-op. desc: descpb.TableDescriptor{ - FormatVersion: descpb.InterleavedFormatVersion, - ID: 51, - Name: "tbl", - ParentID: 52, - NextColumnID: 3, - NextFamilyID: 1, - NextIndexID: 3, + FormatVersion: descpb.InterleavedFormatVersion, + ID: 51, + Name: "tbl", + ParentID: 52, + NextColumnID: 3, + NextFamilyID: 1, + NextIndexID: 3, + NextConstraintID: 2, Columns: []descpb.ColumnDescriptor{ {ID: 1, Name: "foo"}, {ID: 2, Name: "bar"}, @@ -408,6 +414,7 @@ func TestMaybeUpgradeIndexFormatVersion(t *testing.T) { KeyColumnDirections: []descpb.IndexDescriptor_Direction{descpb.IndexDescriptor_ASC, descpb.IndexDescriptor_ASC}, EncodingType: descpb.PrimaryIndexEncoding, Version: descpb.PrimaryIndexWithStoredColumnsVersion, + ConstraintID: 1, }, Indexes: []descpb.IndexDescriptor{ { @@ -428,12 +435,13 @@ func TestMaybeUpgradeIndexFormatVersion(t *testing.T) { // In this case we expect validation to fail owing to a violation of // assumptions for this secondary index's descriptor format version. desc: descpb.TableDescriptor{ - FormatVersion: descpb.BaseFormatVersion, - ID: 51, - Name: "tbl", - ParentID: 52, - NextColumnID: 3, - NextIndexID: 3, + FormatVersion: descpb.BaseFormatVersion, + ID: 51, + Name: "tbl", + ParentID: 52, + NextColumnID: 3, + NextIndexID: 3, + NextConstraintID: 2, Columns: []descpb.ColumnDescriptor{ {ID: 1, Name: "foo"}, {ID: 2, Name: "bar"}, @@ -445,6 +453,7 @@ func TestMaybeUpgradeIndexFormatVersion(t *testing.T) { KeyColumnNames: []string{"foo", "bar"}, KeyColumnDirections: []descpb.IndexDescriptor_Direction{descpb.IndexDescriptor_ASC, descpb.IndexDescriptor_ASC}, Version: descpb.PrimaryIndexWithStoredColumnsVersion, + ConstraintID: 1, }, Indexes: []descpb.IndexDescriptor{ { @@ -464,12 +473,13 @@ func TestMaybeUpgradeIndexFormatVersion(t *testing.T) { // This test case is much like the first but more complex and with more // indexes. All three should be upgraded to the latest format version. desc: descpb.TableDescriptor{ - FormatVersion: descpb.BaseFormatVersion, - ID: 51, - Name: "tbl", - ParentID: 52, - NextColumnID: 3, - NextIndexID: 4, + FormatVersion: descpb.BaseFormatVersion, + ID: 51, + Name: "tbl", + ParentID: 52, + NextColumnID: 3, + NextIndexID: 4, + NextConstraintID: 2, Columns: []descpb.ColumnDescriptor{ {ID: 1, Name: "foo"}, {ID: 2, Name: "bar"}, @@ -481,6 +491,7 @@ func TestMaybeUpgradeIndexFormatVersion(t *testing.T) { KeyColumnNames: []string{"foo", "bar"}, KeyColumnDirections: []descpb.IndexDescriptor_Direction{descpb.IndexDescriptor_ASC, descpb.IndexDescriptor_ASC}, Version: descpb.PrimaryIndexWithStoredColumnsVersion, + ConstraintID: 1, }, Indexes: []descpb.IndexDescriptor{ { @@ -502,13 +513,14 @@ func TestMaybeUpgradeIndexFormatVersion(t *testing.T) { }, }, upgraded: &descpb.TableDescriptor{ - FormatVersion: descpb.InterleavedFormatVersion, - ID: 51, - Name: "tbl", - ParentID: 52, - NextColumnID: 3, - NextFamilyID: 1, - NextIndexID: 4, + FormatVersion: descpb.InterleavedFormatVersion, + ID: 51, + Name: "tbl", + ParentID: 52, + NextColumnID: 3, + NextFamilyID: 1, + NextIndexID: 4, + NextConstraintID: 2, Columns: []descpb.ColumnDescriptor{ {ID: 1, Name: "foo"}, {ID: 2, Name: "bar"}, @@ -529,6 +541,7 @@ func TestMaybeUpgradeIndexFormatVersion(t *testing.T) { KeyColumnDirections: []descpb.IndexDescriptor_Direction{descpb.IndexDescriptor_ASC, descpb.IndexDescriptor_ASC}, EncodingType: descpb.PrimaryIndexEncoding, Version: descpb.LatestPrimaryIndexDescriptorVersion, + ConstraintID: 1, }, Indexes: []descpb.IndexDescriptor{ { @@ -595,8 +608,9 @@ func TestUnvalidateConstraints(t *testing.T) { ctx := context.Background() desc := NewBuilder(&descpb.TableDescriptor{ - Name: "test", - ParentID: descpb.ID(1), + Name: "test", + ParentID: descpb.ID(1), + NextConstraintID: 2, Columns: []descpb.ColumnDescriptor{ {Name: "a", Type: types.Int}, {Name: "b", Type: types.Int}, @@ -609,6 +623,7 @@ func TestUnvalidateConstraints(t *testing.T) { Name: "fk", ReferencedTableID: descpb.ID(1), Validity: descpb.ConstraintValidity_Validated, + ConstraintID: 1, }, }, }).BuildCreatedMutableTable() diff --git a/pkg/sql/catalog/tabledesc/table.go b/pkg/sql/catalog/tabledesc/table.go index d7312159523a..a90724d15b6d 100644 --- a/pkg/sql/catalog/tabledesc/table.go +++ b/pkg/sql/catalog/tabledesc/table.go @@ -339,7 +339,10 @@ func (desc *wrapper) collectConstraintInfo( indexName = mutation.GetPrimaryKeySwap().NewPrimaryIndexName } } - detail := descpb.ConstraintDetail{Kind: descpb.ConstraintTypePK} + detail := descpb.ConstraintDetail{ + Kind: descpb.ConstraintTypePK, + ConstraintID: index.ConstraintID, + } detail.Columns = index.KeyColumnNames detail.Index = index info[indexName] = detail @@ -348,7 +351,10 @@ func (desc *wrapper) collectConstraintInfo( return nil, pgerror.Newf(pgcode.DuplicateObject, "duplicate constraint name: %q", index.Name) } - detail := descpb.ConstraintDetail{Kind: descpb.ConstraintTypeUnique} + detail := descpb.ConstraintDetail{ + Kind: descpb.ConstraintTypeUnique, + ConstraintID: index.ConstraintID, + } detail.Columns = index.KeyColumnNames detail.Index = index info[index.Name] = detail @@ -362,7 +368,10 @@ func (desc *wrapper) collectConstraintInfo( return nil, pgerror.Newf(pgcode.DuplicateObject, "duplicate constraint name: %q", uc.Name) } - detail := descpb.ConstraintDetail{Kind: descpb.ConstraintTypeUnique} + detail := descpb.ConstraintDetail{ + Kind: descpb.ConstraintTypeUnique, + ConstraintID: uc.ConstraintID, + } // Constraints in the Validating state are considered Unvalidated for this // purpose. detail.Unvalidated = uc.Validity != descpb.ConstraintValidity_Validated @@ -381,7 +390,10 @@ func (desc *wrapper) collectConstraintInfo( return nil, pgerror.Newf(pgcode.DuplicateObject, "duplicate constraint name: %q", fk.Name) } - detail := descpb.ConstraintDetail{Kind: descpb.ConstraintTypeFK} + detail := descpb.ConstraintDetail{ + Kind: descpb.ConstraintTypeFK, + ConstraintID: fk.ConstraintID, + } // Constraints in the Validating state are considered Unvalidated for this // purpose. detail.Unvalidated = fk.Validity != descpb.ConstraintValidity_Validated @@ -414,7 +426,10 @@ func (desc *wrapper) collectConstraintInfo( return nil, pgerror.Newf(pgcode.DuplicateObject, "duplicate constraint name: %q", c.Name) } - detail := descpb.ConstraintDetail{Kind: descpb.ConstraintTypeCheck} + detail := descpb.ConstraintDetail{ + Kind: descpb.ConstraintTypeCheck, + ConstraintID: c.ConstraintID, + } // Constraints in the Validating state are considered Unvalidated for this // purpose. detail.Unvalidated = c.Validity != descpb.ConstraintValidity_Validated diff --git a/pkg/sql/catalog/tabledesc/table_desc.go b/pkg/sql/catalog/tabledesc/table_desc.go index 1f58a60e8062..1640ae61d494 100644 --- a/pkg/sql/catalog/tabledesc/table_desc.go +++ b/pkg/sql/catalog/tabledesc/table_desc.go @@ -12,6 +12,7 @@ package tabledesc import ( + "github.com/cockroachdb/cockroach/pkg/clusterversion" "github.com/cockroachdb/cockroach/pkg/sql/catalog" "github.com/cockroachdb/cockroach/pkg/sql/catalog/colinfo" "github.com/cockroachdb/cockroach/pkg/sql/catalog/descpb" @@ -26,6 +27,10 @@ var _ catalog.TableDescriptor = (*Mutable)(nil) var _ catalog.MutableDescriptor = (*Mutable)(nil) var _ catalog.TableDescriptor = (*wrapper)(nil) +// ConstraintIDsAddedToTableDescsVersion constraint IDs have been added to table +// descriptors at this cluster version. +const ConstraintIDsAddedToTableDescsVersion = clusterversion.RemoveIncompatibleDatabasePrivileges + // wrapper is the base implementation of the catalog.Descriptor // interface, which is overloaded by immutable and Mutable. type wrapper struct { @@ -60,7 +65,8 @@ func (desc *wrapper) HasPostDeserializationChanges() bool { desc.postDeserializationChanges.UpgradedFormatVersion || desc.postDeserializationChanges.UpgradedIndexFormatVersion || desc.postDeserializationChanges.UpgradedNamespaceName || - desc.postDeserializationChanges.UpgradedPrivileges + desc.postDeserializationChanges.UpgradedPrivileges || + desc.postDeserializationChanges.AddedConstraintIDs } // ActiveChecks implements the TableDescriptor interface. diff --git a/pkg/sql/catalog/tabledesc/table_desc_builder.go b/pkg/sql/catalog/tabledesc/table_desc_builder.go index 7ebacb950e4f..31487e7f7f0c 100644 --- a/pkg/sql/catalog/tabledesc/table_desc_builder.go +++ b/pkg/sql/catalog/tabledesc/table_desc_builder.go @@ -224,6 +224,7 @@ func maybeFillInDescriptor( addedGrantOptions := catprivilege.MaybeUpdateGrantOptions(desc.Privileges) changes.UpgradedPrivileges = fixedPrivileges || addedGrantOptions changes.RemovedDuplicateIDsInRefs = maybeRemoveDuplicateIDsInRefs(desc) + changes.AddedConstraintIDs = maybeAddConstraintIDs(desc) return changes } @@ -354,7 +355,9 @@ func maybeUpgradeForeignKeyRepOnIndex( OnDelete: ref.OnDelete, OnUpdate: ref.OnUpdate, Match: ref.Match, + ConstraintID: desc.GetNextConstraintID(), } + desc.NextConstraintID++ desc.OutboundFKs = append(desc.OutboundFKs, outFK) } changed = true @@ -412,6 +415,7 @@ func maybeUpgradeForeignKeyRepOnIndex( OnDelete: forwardFK.OnDelete, OnUpdate: forwardFK.OnUpdate, Match: forwardFK.Match, + ConstraintID: desc.GetNextConstraintID(), } } else { // We have an old (not upgraded yet) table, with a matching forward @@ -427,8 +431,10 @@ func maybeUpgradeForeignKeyRepOnIndex( OnDelete: originIndex.ForeignKey.OnDelete, OnUpdate: originIndex.ForeignKey.OnUpdate, Match: originIndex.ForeignKey.Match, + ConstraintID: desc.GetNextConstraintID(), } } + desc.NextConstraintID++ desc.InboundFKs = append(desc.InboundFKs, inFK) } changed = true @@ -650,3 +656,86 @@ func cleanedIDs(input []descpb.ID) []descpb.ID { } return s } + +// maybeAddConstraintIDs ensures that all constraints have an ID associated with +// them. +func maybeAddConstraintIDs(desc *descpb.TableDescriptor) (hasChanged bool) { + // Only assign constraint IDs to physical tables. + if !desc.IsTable() { + return false + } + initialConstraintID := desc.NextConstraintID + // Maps index IDs to indexes for one which have + // a constraint ID assigned. + constraintIndexes := make(map[descpb.IndexID]*descpb.IndexDescriptor) + if desc.NextConstraintID == 0 { + desc.NextConstraintID = 1 + } + nextConstraintID := func() descpb.ConstraintID { + id := desc.GetNextConstraintID() + desc.NextConstraintID++ + return id + } + // Loop over all constraints and assign constraint IDs. + if desc.PrimaryIndex.ConstraintID == 0 { + desc.PrimaryIndex.ConstraintID = nextConstraintID() + constraintIndexes[desc.PrimaryIndex.ID] = &desc.PrimaryIndex + } + for i := range desc.Indexes { + idx := &desc.Indexes[i] + if idx.Unique && idx.ConstraintID == 0 { + idx.ConstraintID = nextConstraintID() + constraintIndexes[idx.ID] = idx + } + } + for i := range desc.Checks { + check := desc.Checks[i] + if check.ConstraintID == 0 { + check.ConstraintID = nextConstraintID() + } + } + for i := range desc.InboundFKs { + fk := &desc.InboundFKs[i] + if fk.ConstraintID == 0 { + fk.ConstraintID = nextConstraintID() + } + } + for i := range desc.OutboundFKs { + fk := desc.OutboundFKs[i] + if fk.ConstraintID == 0 { + fk.ConstraintID = nextConstraintID() + } + } + for i := range desc.UniqueWithoutIndexConstraints { + unique := desc.UniqueWithoutIndexConstraints[i] + if unique.ConstraintID == 0 { + unique.ConstraintID = nextConstraintID() + } + } + // Update mutations to add the constraint ID. In the case of a PK swap + // we may need to maintain the same constraint ID. + for _, mutation := range desc.GetMutations() { + if idx := mutation.GetIndex(); idx != nil && + mutation.Direction == descpb.DescriptorMutation_ADD && + idx.Unique { + idx.ConstraintID = nextConstraintID() + constraintIndexes[idx.ID] = idx + } else if pkSwap := mutation.GetPrimaryKeySwap(); pkSwap != nil { + for idx := range pkSwap.NewIndexes { + oldIdx, firstOk := constraintIndexes[pkSwap.OldIndexes[idx]] + newIdx := constraintIndexes[pkSwap.NewIndexes[idx]] + if !firstOk { + continue + } + newIdx.ConstraintID = oldIdx.ConstraintID + } + } else if constraint := mutation.GetConstraint(); constraint != nil { + nextID := nextConstraintID() + constraint.UniqueWithoutIndexConstraint.ConstraintID = nextID + constraint.ForeignKey.ConstraintID = nextID + constraint.Check.ConstraintID = nextID + } + + } + return desc.NextConstraintID != initialConstraintID +} diff --git a/pkg/sql/catalog/tabledesc/validate.go b/pkg/sql/catalog/tabledesc/validate.go index 6448d3644073..e6e6a91a1966 100644 --- a/pkg/sql/catalog/tabledesc/validate.go +++ b/pkg/sql/catalog/tabledesc/validate.go @@ -11,6 +11,8 @@ package tabledesc import ( + "sort" + "github.com/cockroachdb/cockroach/pkg/keys" "github.com/cockroachdb/cockroach/pkg/roachpb" "github.com/cockroachdb/cockroach/pkg/sql/catalog" @@ -536,6 +538,7 @@ func (desc *wrapper) ValidateSelf(vea catalog.ValidationErrorAccumulator) { if hasErrs { return } + desc.validateConstraintIDs(vea) } // Ensure that mutations cannot be queued if a primary key change or @@ -645,6 +648,36 @@ func ValidateOnUpdate(desc catalog.TableDescriptor, errReportFn func(err error)) }) } +func (desc *wrapper) validateConstraintIDs(vea catalog.ValidationErrorAccumulator) { + if !vea.IsActive(ConstraintIDsAddedToTableDescsVersion) { + return + } + if !desc.IsTable() { + return + } + constraints, err := desc.GetConstraintInfo() + if err != nil { + vea.Report(err) + return + } + // Sort the names to get deterministic behaviour, since + // constraints are stored in a map. + orderedNames := make([]string, 0, len(constraints)) + for name := range constraints { + orderedNames = append(orderedNames, name) + } + sort.Strings(orderedNames) + for _, name := range orderedNames { + constraint := constraints[name] + if constraint.ConstraintID == 0 { + vea.Report(errors.AssertionFailedf("constraint id was missing for constraint: %s with name %q", + constraint.Kind, + name)) + + } + } +} + func (desc *wrapper) validateColumns( columnNames map[string]descpb.ColumnID, columnIDs map[descpb.ColumnID]*descpb.ColumnDescriptor, ) error { diff --git a/pkg/sql/catalog/tabledesc/validate_test.go b/pkg/sql/catalog/tabledesc/validate_test.go index 5ad8c83061f7..9ffcd7d29441 100644 --- a/pkg/sql/catalog/tabledesc/validate_test.go +++ b/pkg/sql/catalog/tabledesc/validate_test.go @@ -27,6 +27,7 @@ import ( "github.com/cockroachdb/cockroach/pkg/sql/catalog/internal/validate" "github.com/cockroachdb/cockroach/pkg/sql/catalog/nstree" "github.com/cockroachdb/cockroach/pkg/sql/catalog/typedesc" + "github.com/cockroachdb/cockroach/pkg/sql/privilege" "github.com/cockroachdb/cockroach/pkg/sql/types" "github.com/cockroachdb/cockroach/pkg/testutils" "github.com/cockroachdb/cockroach/pkg/util/leaktest" @@ -128,6 +129,7 @@ var validationMap = []struct { "NewSchemaChangeJobID": {status: iSolemnlySwearThisFieldIsValidated}, "RowLevelTTL": {status: iSolemnlySwearThisFieldIsValidated}, "ExcludeDataFromBackup": {status: thisFieldReferencesNoObjects}, + "NextConstraintID": {status: iSolemnlySwearThisFieldIsValidated}, }, }, { @@ -167,6 +169,7 @@ var validationMap = []struct { "GeoConfig": {status: thisFieldReferencesNoObjects}, "Predicate": {status: iSolemnlySwearThisFieldIsValidated}, "UseDeletePreservingEncoding": {status: thisFieldReferencesNoObjects}, + "ConstraintID": {status: iSolemnlySwearThisFieldIsValidated}, }, }, { @@ -215,16 +218,18 @@ var validationMap = []struct { "OnDelete": {status: thisFieldReferencesNoObjects}, "OnUpdate": {status: thisFieldReferencesNoObjects}, "Match": {status: thisFieldReferencesNoObjects}, + "ConstraintID": {status: iSolemnlySwearThisFieldIsValidated}, }, }, { obj: descpb.UniqueWithoutIndexConstraint{}, fieldMap: map[string]validationStatusInfo{ - "TableID": {status: iSolemnlySwearThisFieldIsValidated}, - "ColumnIDs": {status: iSolemnlySwearThisFieldIsValidated}, - "Name": {status: thisFieldReferencesNoObjects}, - "Validity": {status: thisFieldReferencesNoObjects}, - "Predicate": {status: iSolemnlySwearThisFieldIsValidated}, + "TableID": {status: iSolemnlySwearThisFieldIsValidated}, + "ColumnIDs": {status: iSolemnlySwearThisFieldIsValidated}, + "Name": {status: thisFieldReferencesNoObjects}, + "Validity": {status: thisFieldReferencesNoObjects}, + "Predicate": {status: iSolemnlySwearThisFieldIsValidated}, + "ConstraintID": {status: iSolemnlySwearThisFieldIsValidated}, }, }, { @@ -982,10 +987,12 @@ func TestValidateTableDesc(t *testing.T) { EncodingType: descpb.PrimaryIndexEncoding, Version: descpb.LatestPrimaryIndexDescriptorVersion, ForeignKey: descpb.ForeignKeyReference{Table: 123, Index: 456}, + ConstraintID: 1, }, - NextColumnID: 2, - NextFamilyID: 1, - NextIndexID: 2, + NextColumnID: 2, + NextFamilyID: 1, + NextIndexID: 2, + NextConstraintID: 2, }}, {`at least one of LIST or RANGE partitioning must be used`, // Verify that validatePartitioning is hooked up. The rest of these @@ -1176,6 +1183,7 @@ func TestValidateTableDesc(t *testing.T) { KeyColumnDirections: []descpb.IndexDescriptor_Direction{descpb.IndexDescriptor_ASC}, Version: descpb.PrimaryIndexWithStoredColumnsVersion, EncodingType: descpb.PrimaryIndexEncoding, + ConstraintID: 1, }, Indexes: []descpb.IndexDescriptor{ {ID: 2, Name: "sec", KeyColumnIDs: []descpb.ColumnID{2}, @@ -1190,9 +1198,10 @@ func TestValidateTableDesc(t *testing.T) { ColumnNames: []string{"c1", "c2"}, }, }, - NextColumnID: 4, - NextFamilyID: 1, - NextIndexID: 3, + NextColumnID: 4, + NextFamilyID: 1, + NextIndexID: 3, + NextConstraintID: 2, }}, {"", descpb.TableDescriptor{ @@ -1214,6 +1223,7 @@ func TestValidateTableDesc(t *testing.T) { KeyColumnDirections: []descpb.IndexDescriptor_Direction{descpb.IndexDescriptor_ASC}, Version: descpb.PrimaryIndexWithStoredColumnsVersion, EncodingType: descpb.PrimaryIndexEncoding, + ConstraintID: 1, }, Indexes: []descpb.IndexDescriptor{ {ID: 2, Name: "sec", KeyColumnIDs: []descpb.ColumnID{2}, @@ -1240,6 +1250,7 @@ func TestValidateTableDesc(t *testing.T) { KeyColumnDirections: []descpb.IndexDescriptor_Direction{descpb.IndexDescriptor_ASC}, Version: descpb.PrimaryIndexWithStoredColumnsVersion, EncodingType: descpb.PrimaryIndexEncoding, + ConstraintID: 1, }, }, Direction: descpb.DescriptorMutation_ADD, @@ -1270,10 +1281,11 @@ func TestValidateTableDesc(t *testing.T) { State: descpb.DescriptorMutation_DELETE_ONLY, }, }, - NextColumnID: 4, - NextFamilyID: 1, - NextIndexID: 5, - Privileges: descpb.NewBasePrivilegeDescriptor(security.AdminRoleName()), + NextColumnID: 4, + NextFamilyID: 1, + NextIndexID: 5, + NextConstraintID: 2, + Privileges: descpb.NewBasePrivilegeDescriptor(security.AdminRoleName()), }}, {`index "sec" cannot store virtual column "c3"`, descpb.TableDescriptor{ @@ -2454,3 +2466,118 @@ func TestValidatePartitioning(t *testing.T) { }) } } + +func TestValidateConstraintID(t *testing.T) { + defer leaktest.AfterTest(t)() + + tests := []struct { + err string + desc descpb.TableDescriptor + }{ + {`constraint id was missing for constraint: PRIMARY KEY with name \"primary\"`, + descpb.TableDescriptor{ + ID: 2, + ParentID: 1, + Name: "foo", + FormatVersion: descpb.InterleavedFormatVersion, + Columns: []descpb.ColumnDescriptor{ + {ID: 1, Name: "bar"}, + }, + Families: []descpb.ColumnFamilyDescriptor{ + {ID: 0, Name: "primary", ColumnIDs: []descpb.ColumnID{1}, ColumnNames: []string{"bar"}}, + }, + PrimaryIndex: descpb.IndexDescriptor{ + ID: 1, Name: "primary", KeyColumnIDs: []descpb.ColumnID{1}, KeyColumnNames: []string{"bar"}, + KeyColumnDirections: []descpb.IndexDescriptor_Direction{descpb.IndexDescriptor_ASC}}, + NextColumnID: 2, + NextFamilyID: 1, + Privileges: descpb.NewPrivilegeDescriptor( + security.PublicRoleName(), + privilege.SchemaPrivileges, + privilege.List{}, + security.RootUserName()), + }}, + {`constraint id was missing for constraint: UNIQUE with name \"secondary\"`, + descpb.TableDescriptor{ + ID: 2, + ParentID: 1, + Name: "foo", + FormatVersion: descpb.InterleavedFormatVersion, + Columns: []descpb.ColumnDescriptor{ + {ID: 1, Name: "bar"}, + }, + Families: []descpb.ColumnFamilyDescriptor{ + {ID: 0, Name: "primary", ColumnIDs: []descpb.ColumnID{1}, ColumnNames: []string{"bar"}}, + }, + Indexes: []descpb.IndexDescriptor{ + { + ID: 1, Name: "secondary", KeyColumnIDs: []descpb.ColumnID{1}, KeyColumnNames: []string{"bar"}, + KeyColumnDirections: []descpb.IndexDescriptor_Direction{descpb.IndexDescriptor_ASC}, + Unique: true, + }, + }, + NextColumnID: 2, + NextFamilyID: 1, + Privileges: descpb.NewPrivilegeDescriptor( + security.PublicRoleName(), + privilege.SchemaPrivileges, + privilege.List{}, + security.RootUserName()), + }}, + {`constraint id was missing for constraint: UNIQUE with name \"bad\"`, + descpb.TableDescriptor{ + ID: 2, + ParentID: 1, + Name: "foo", + FormatVersion: descpb.InterleavedFormatVersion, + Columns: []descpb.ColumnDescriptor{ + {ID: 1, Name: "bar"}, + }, + Families: []descpb.ColumnFamilyDescriptor{ + {ID: 0, Name: "primary", ColumnIDs: []descpb.ColumnID{1}, ColumnNames: []string{"bar"}}, + }, + UniqueWithoutIndexConstraints: []descpb.UniqueWithoutIndexConstraint{ + {Name: "bad"}, + }, + NextColumnID: 2, + NextFamilyID: 1, + Privileges: descpb.NewPrivilegeDescriptor( + security.PublicRoleName(), + privilege.SchemaPrivileges, + privilege.List{}, + security.RootUserName()), + }}, + {`constraint id was missing for constraint: CHECK with name \"bad\"`, + descpb.TableDescriptor{ + ID: 2, + ParentID: 1, + Name: "foo", + FormatVersion: descpb.InterleavedFormatVersion, + Columns: []descpb.ColumnDescriptor{ + {ID: 1, Name: "bar"}, + }, + Families: []descpb.ColumnFamilyDescriptor{ + {ID: 0, Name: "primary", ColumnIDs: []descpb.ColumnID{1}, ColumnNames: []string{"bar"}}, + }, + Checks: []*descpb.TableDescriptor_CheckConstraint{ + {Name: "bad"}, + }, + NextColumnID: 2, + NextFamilyID: 1, + Privileges: descpb.NewPrivilegeDescriptor( + security.PublicRoleName(), + privilege.SchemaPrivileges, + privilege.List{}, + security.RootUserName()), + }}, + } + for i, test := range tests { + t.Run(test.err, func(t *testing.T) { + desc := NewBuilder(&test.desc).BuildImmutableTable() + err := ValidateConstraints(desc) + if !testutils.IsError(err, test.err) { + t.Errorf(`%d: got "%v" expected "%v"`, i, err, test.err) + } + }) + } +} diff --git a/pkg/sql/comment_on_constraint.go b/pkg/sql/comment_on_constraint.go index 3c99f5a39c03..9094ec727d1d 100644 --- a/pkg/sql/comment_on_constraint.go +++ b/pkg/sql/comment_on_constraint.go @@ -13,9 +13,8 @@ package sql import ( "context" - "github.com/cockroachdb/cockroach/pkg/keys" "github.com/cockroachdb/cockroach/pkg/sql/catalog" - "github.com/cockroachdb/cockroach/pkg/sql/catalog/descpb" + "github.com/cockroachdb/cockroach/pkg/sql/catalog/tabledesc" "github.com/cockroachdb/cockroach/pkg/sql/pgwire/pgcode" "github.com/cockroachdb/cockroach/pkg/sql/pgwire/pgerror" "github.com/cockroachdb/cockroach/pkg/sql/privilege" @@ -27,7 +26,6 @@ import ( type commentOnConstraintNode struct { n *tree.CommentOnConstraint tableDesc catalog.TableDescriptor - oid *tree.DOid metadataUpdater scexec.DescriptorMetadataUpdater } @@ -36,6 +34,10 @@ type commentOnConstraintNode struct { func (p *planner) CommentOnConstraint( ctx context.Context, n *tree.CommentOnConstraint, ) (planNode, error) { + // Block comments on constraint until cluster is updated. + if !p.ExecCfg().Settings.Version.IsActive(ctx, tabledesc.ConstraintIDsAddedToTableDescsVersion) { + return nil, pgerror.Newf(pgcode.FeatureNotSupported, "cannot comment on constraint") + } if err := checkSchemaChangeEnabled( ctx, p.ExecCfg(), @@ -69,12 +71,6 @@ func (n *commentOnConstraintNode) startExec(params runParams) error { if err != nil { return err } - schema, err := params.p.Descriptors().GetImmutableSchemaByID( - params.ctx, params.extendedEvalCtx.Txn, n.tableDesc.GetParentSchemaID(), tree.SchemaLookupFlags{Required: true}, - ) - if err != nil { - return err - } constraintName := string(n.n.Constraint) constraint, ok := info[constraintName] @@ -82,40 +78,21 @@ func (n *commentOnConstraintNode) startExec(params runParams) error { return pgerror.Newf(pgcode.UndefinedObject, "constraint %q of relation %q does not exist", constraintName, n.tableDesc.GetName()) } - - hasher := makeOidHasher() - switch kind := constraint.Kind; kind { - case descpb.ConstraintTypePK: - constraintDesc := constraint.Index - n.oid = hasher.PrimaryKeyConstraintOid(n.tableDesc.GetParentID(), schema.GetName(), n.tableDesc.GetID(), constraintDesc) - case descpb.ConstraintTypeFK: - constraintDesc := constraint.FK - n.oid = hasher.ForeignKeyConstraintOid(n.tableDesc.GetParentID(), schema.GetName(), n.tableDesc.GetID(), constraintDesc) - case descpb.ConstraintTypeUnique: - constraintDesc := constraint.Index.ID - n.oid = hasher.UniqueConstraintOid(n.tableDesc.GetParentID(), schema.GetName(), n.tableDesc.GetID(), constraintDesc) - case descpb.ConstraintTypeCheck: - constraintDesc := constraint.CheckConstraint - n.oid = hasher.CheckConstraintOid(n.tableDesc.GetParentID(), schema.GetName(), n.tableDesc.GetID(), constraintDesc) - - } // Setting the comment to NULL is the // equivalent of deleting the comment. if n.n.Comment != nil { - err := n.metadataUpdater.UpsertDescriptorComment( - int64(n.oid.DInt), - 0, - keys.ConstraintCommentType, + err := n.metadataUpdater.UpsertConstraintComment( + n.tableDesc, + constraint.ConstraintID, *n.n.Comment, ) if err != nil { return err } } else { - err := n.metadataUpdater.DeleteDescriptorComment( - int64(n.oid.DInt), - 0, - keys.ConstraintCommentType, + err := n.metadataUpdater.DeleteConstraintComment( + n.tableDesc, + constraint.ConstraintID, ) if err != nil { return err diff --git a/pkg/sql/commenter/BUILD.bazel b/pkg/sql/commenter/BUILD.bazel new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/pkg/sql/create_table.go b/pkg/sql/create_table.go index d9785723d9da..04eca6a63b84 100644 --- a/pkg/sql/create_table.go +++ b/pkg/sql/create_table.go @@ -764,13 +764,14 @@ func ResolveUniqueWithoutIndexConstraint( } uc := descpb.UniqueWithoutIndexConstraint{ - Name: constraintName, - TableID: tbl.ID, - ColumnIDs: columnIDs, - Predicate: predicate, - Validity: validity, - } - + Name: constraintName, + TableID: tbl.ID, + ColumnIDs: columnIDs, + Predicate: predicate, + Validity: validity, + ConstraintID: tbl.NextConstraintID, + } + tbl.NextConstraintID++ if ts == NewTable { tbl.UniqueWithoutIndexConstraints = append(tbl.UniqueWithoutIndexConstraints, uc) } else { @@ -1023,8 +1024,9 @@ func ResolveFK( OnDelete: descpb.ForeignKeyReferenceActionValue[d.Actions.Delete], OnUpdate: descpb.ForeignKeyReferenceActionValue[d.Actions.Update], Match: descpb.CompositeKeyMatchMethodValue[d.Match], + ConstraintID: tbl.NextConstraintID, } - + tbl.NextConstraintID++ if ts == NewTable { tbl.OutboundFKs = append(tbl.OutboundFKs, ref) target.InboundFKs = append(target.InboundFKs, ref) diff --git a/pkg/sql/descmetadata/BUILD.bazel b/pkg/sql/descmetadata/BUILD.bazel index 264d7ec8f4b6..7714a17da1ae 100644 --- a/pkg/sql/descmetadata/BUILD.bazel +++ b/pkg/sql/descmetadata/BUILD.bazel @@ -39,7 +39,6 @@ go_library( "//pkg/sql/catalog/descpb", "//pkg/sql/catalog/descs", "//pkg/sql/schemachanger/scexec", - "//pkg/sql/schemachanger/scpb", "//pkg/sql/sem/tree", "//pkg/sql/sessiondata", "//pkg/sql/sessiondatapb", diff --git a/pkg/sql/descmetadata/metadata_updater.go b/pkg/sql/descmetadata/metadata_updater.go index 09a5e3c38cbc..1b8f2f19ca63 100644 --- a/pkg/sql/descmetadata/metadata_updater.go +++ b/pkg/sql/descmetadata/metadata_updater.go @@ -20,43 +20,17 @@ import ( "github.com/cockroachdb/cockroach/pkg/sql/catalog" "github.com/cockroachdb/cockroach/pkg/sql/catalog/descpb" "github.com/cockroachdb/cockroach/pkg/sql/catalog/descs" - "github.com/cockroachdb/cockroach/pkg/sql/schemachanger/scpb" "github.com/cockroachdb/cockroach/pkg/sql/sem/tree" "github.com/cockroachdb/cockroach/pkg/sql/sessiondata" "github.com/cockroachdb/cockroach/pkg/sql/sessioninit" "github.com/cockroachdb/cockroach/pkg/sql/sqlutil" ) -// ConstraintOidBuilder constructs an OID based on constraint information. -type ConstraintOidBuilder interface { - // ForeignKeyConstraintOid generates a foreign key OID. - ForeignKeyConstraintOid( - dbID descpb.ID, scName string, tableID descpb.ID, fk *descpb.ForeignKeyConstraint, - ) *tree.DOid - // UniqueWithoutIndexConstraintOid generates a unique without index constraint OID. - UniqueWithoutIndexConstraintOid( - dbID descpb.ID, scName string, tableID descpb.ID, uc *descpb.UniqueWithoutIndexConstraint, - ) *tree.DOid - // UniqueConstraintOid generates a unique with index constraint OID. - UniqueConstraintOid( - dbID descpb.ID, scName string, tableID descpb.ID, indexID descpb.IndexID, - ) *tree.DOid - // PrimaryKeyConstraintOid generates a primary key constraint OID. - PrimaryKeyConstraintOid( - dbID descpb.ID, scName string, tableID descpb.ID, pkey *descpb.IndexDescriptor, - ) *tree.DOid - // CheckConstraintOid generates check constraint OID. - CheckConstraintOid( - dbID descpb.ID, scName string, tableID descpb.ID, check *descpb.TableDescriptor_CheckConstraint, - ) *tree.DOid -} - -// metadataUpdater which implements scexec.DescriptorMetadataUpdater that is used to update -// metaadata such as comments on different schema objects. +// metadataUpdater which implements scexec.MetaDataUpdater that is used to update +// comments on different schema objects. type metadataUpdater struct { txn *kv.Txn ie sqlutil.InternalExecutor - oidBuilder ConstraintOidBuilder collectionFactory *descs.CollectionFactory cacheEnabled bool } @@ -95,90 +69,18 @@ func (mu metadataUpdater) DeleteDescriptorComment( return err } -func (mu metadataUpdater) oidFromConstraint( - desc catalog.TableDescriptor, - schemaName string, - constraintName string, - constraintType scpb.ConstraintType, -) *tree.DOid { - switch constraintType { - case scpb.ConstraintType_FK: - for _, fk := range desc.AllActiveAndInactiveForeignKeys() { - if fk.Name == constraintName { - return mu.oidBuilder.ForeignKeyConstraintOid( - desc.GetParentID(), - schemaName, - desc.GetID(), - fk, - ) - } - } - case scpb.ConstraintType_PrimaryKey: - for _, idx := range desc.AllIndexes() { - if idx.GetName() == constraintName { - mu.oidBuilder.UniqueConstraintOid( - desc.GetParentID(), - schemaName, - desc.GetID(), - idx.GetID(), - ) - } - } - case scpb.ConstraintType_UniqueWithoutIndex: - for _, unique := range desc.GetUniqueWithoutIndexConstraints() { - if unique.GetName() == constraintName { - return mu.oidBuilder.UniqueWithoutIndexConstraintOid( - desc.GetParentID(), - schemaName, - desc.GetID(), - &unique, - ) - } - } - case scpb.ConstraintType_Check: - for _, check := range desc.GetChecks() { - if check.Name == constraintName { - return mu.oidBuilder.CheckConstraintOid( - desc.GetParentID(), - schemaName, - desc.GetID(), - check, - ) - } - } - } - return nil -} - -// UpsertConstraintComment implements scexec.DescriptorMetadataUpdater. +// UpsertConstraintComment implements scexec.CommentUpdater. func (mu metadataUpdater) UpsertConstraintComment( - desc catalog.TableDescriptor, - schemaName string, - constraintName string, - constraintType scpb.ConstraintType, - comment string, + desc catalog.TableDescriptor, constraintID descpb.ConstraintID, comment string, ) error { - oid := mu.oidFromConstraint(desc, schemaName, constraintName, constraintType) - // Constraint was not found. - if oid == nil { - return nil - } - return mu.UpsertDescriptorComment(int64(oid.DInt), 0, keys.ConstraintCommentType, comment) + return mu.UpsertDescriptorComment(int64(desc.GetID()), int64(constraintID), keys.ConstraintCommentType, comment) } // DeleteConstraintComment implements scexec.DescriptorMetadataUpdater. func (mu metadataUpdater) DeleteConstraintComment( - desc catalog.TableDescriptor, - schemaName string, - constraintName string, - constraintType scpb.ConstraintType, + desc catalog.TableDescriptor, constraintID descpb.ConstraintID, ) error { - oid := mu.oidFromConstraint(desc, schemaName, constraintName, constraintType) - // Constraint was not found. - if oid == nil { - return nil - } - return mu.DeleteDescriptorComment(int64(oid.DInt), 0, keys.ConstraintCommentType) + return mu.DeleteDescriptorComment(int64(desc.GetID()), int64(constraintID), keys.ConstraintCommentType) } // DeleteDatabaseRoleSettings implement scexec.DescriptorMetaDataUpdater. diff --git a/pkg/sql/descmetadata/metadata_updater_factory.go b/pkg/sql/descmetadata/metadata_updater_factory.go index a8ab68ef0f35..19b7fb78a573 100644 --- a/pkg/sql/descmetadata/metadata_updater_factory.go +++ b/pkg/sql/descmetadata/metadata_updater_factory.go @@ -23,30 +23,24 @@ import ( "github.com/cockroachdb/cockroach/pkg/sql/sqlutil" ) -// MakeConstraintOidBuilderFn creates a ConstraintOidBuilder. -type MakeConstraintOidBuilderFn func() ConstraintOidBuilder - // MetadataUpdaterFactory used to construct a commenter.DescriptorMetadataUpdater, which // can be used to update comments on schema objects. type MetadataUpdaterFactory struct { - ieFactory sqlutil.SessionBoundInternalExecutorFactory - makeConstraintOidBuilder MakeConstraintOidBuilderFn - collectionFactory *descs.CollectionFactory - settings *settings.Values + ieFactory sqlutil.SessionBoundInternalExecutorFactory + collectionFactory *descs.CollectionFactory + settings *settings.Values } // NewMetadataUpdaterFactory creates a new comment updater factory. func NewMetadataUpdaterFactory( ieFactory sqlutil.SessionBoundInternalExecutorFactory, - makeConstraintOidBuilder MakeConstraintOidBuilderFn, collectionFactory *descs.CollectionFactory, settings *settings.Values, ) scexec.DescriptorMetadataUpdaterFactory { return MetadataUpdaterFactory{ - ieFactory: ieFactory, - makeConstraintOidBuilder: makeConstraintOidBuilder, - collectionFactory: collectionFactory, - settings: settings, + ieFactory: ieFactory, + collectionFactory: collectionFactory, + settings: settings, } } @@ -65,7 +59,6 @@ func (mf MetadataUpdaterFactory) NewMetadataUpdater( return metadataUpdater{ txn: txn, ie: mf.ieFactory(ctx, modifiedSessionData), - oidBuilder: mf.makeConstraintOidBuilder(), collectionFactory: mf.collectionFactory, cacheEnabled: sessioninit.CacheEnabled.Get(mf.settings), } diff --git a/pkg/sql/doctor/doctor_test.go b/pkg/sql/doctor/doctor_test.go index fe2fad3cf798..4fe917ccc158 100644 --- a/pkg/sql/doctor/doctor_test.go +++ b/pkg/sql/doctor/doctor_test.go @@ -42,7 +42,8 @@ var validTableDesc = &descpb.Descriptor{ Columns: []descpb.ColumnDescriptor{ {Name: "col", ID: 1, Type: types.Int}, }, - NextColumnID: 2, + NextColumnID: 2, + NextConstraintID: 2, Families: []descpb.ColumnFamilyDescriptor{ {ID: 0, Name: "f", ColumnNames: []string{"col"}, ColumnIDs: []descpb.ColumnID{1}, DefaultColumnID: 1}, }, @@ -56,6 +57,7 @@ var validTableDesc = &descpb.Descriptor{ KeyColumnIDs: []descpb.ColumnID{1}, Version: descpb.PrimaryIndexWithStoredColumnsVersion, EncodingType: descpb.PrimaryIndexEncoding, + ConstraintID: 1, }, NextIndexID: 2, Privileges: descpb.NewCustomSuperuserPrivilegeDescriptor( diff --git a/pkg/sql/indexbackfiller_test.go b/pkg/sql/indexbackfiller_test.go index 1e9556bb0bd4..7d43cd3a4a35 100644 --- a/pkg/sql/indexbackfiller_test.go +++ b/pkg/sql/indexbackfiller_test.go @@ -229,10 +229,11 @@ INSERT INTO foo VALUES (1, 2), (2, 3), (3, 4); }, setupDesc: func(t *testing.T, mut *tabledesc.Mutable) { indexToBackfill := descpb.IndexDescriptor{ - Name: "virtual_column_backed_index", - ID: mut.NextIndexID, - Unique: true, - Version: descpb.LatestNonPrimaryIndexDescriptorVersion, + Name: "virtual_column_backed_index", + ID: mut.NextIndexID, + ConstraintID: mut.NextConstraintID, + Unique: true, + Version: descpb.LatestNonPrimaryIndexDescriptorVersion, KeyColumnNames: []string{ mut.Columns[2].Name, }, @@ -249,6 +250,7 @@ INSERT INTO foo VALUES (1, 2), (2, 3), (3, 4); EncodingType: descpb.SecondaryIndexEncoding, } mut.NextIndexID++ + mut.NextConstraintID++ require.NoError(t, mut.AddIndexMutation( &indexToBackfill, descpb.DescriptorMutation_ADD, )) @@ -306,10 +308,11 @@ INSERT INTO foo VALUES (1), (10), (100); computedColumnNotInPrimaryIndex.Name) indexToBackfill := descpb.IndexDescriptor{ - Name: "new_primary_index", - ID: mut.NextIndexID, - Unique: true, - Version: descpb.LatestNonPrimaryIndexDescriptorVersion, + Name: "new_primary_index", + ID: mut.NextIndexID, + ConstraintID: mut.NextConstraintID, + Unique: true, + Version: descpb.LatestNonPrimaryIndexDescriptorVersion, KeyColumnNames: []string{ mut.Columns[0].Name, }, @@ -332,6 +335,7 @@ INSERT INTO foo VALUES (1), (10), (100); EncodingType: descpb.PrimaryIndexEncoding, } mut.NextIndexID++ + mut.NextConstraintID++ require.NoError(t, mut.AddIndexMutation( &indexToBackfill, descpb.DescriptorMutation_ADD, )) diff --git a/pkg/sql/mem_limit_test.go b/pkg/sql/mem_limit_test.go index 46ecc22da974..095113637e8a 100644 --- a/pkg/sql/mem_limit_test.go +++ b/pkg/sql/mem_limit_test.go @@ -175,10 +175,6 @@ func TestStreamerTightBudget(t *testing.T) { _, err = db.Exec(fmt.Sprintf("SET distsql_workmem = '%dB'", blobSize)) require.NoError(t, err) - // TODO(yuzefovich): remove this once the streamer is enabled by default. - _, err = db.Exec("SET CLUSTER SETTING sql.distsql.use_streamer.enabled = true;") - require.NoError(t, err) - // Perform an index join to read the blobs. query := "EXPLAIN ANALYZE SELECT sum(length(blob)) FROM t@t_k_idx WHERE k = 1" maximumMemoryUsageRegex := regexp.MustCompile(`maximum memory usage: (\d+\.\d+) MiB`) diff --git a/pkg/sql/opt/exec/execbuilder/testdata/show_trace_nonmetamorphic b/pkg/sql/opt/exec/execbuilder/testdata/show_trace_nonmetamorphic index d7dbc6774fff..985016559a29 100644 --- a/pkg/sql/opt/exec/execbuilder/testdata/show_trace_nonmetamorphic +++ b/pkg/sql/opt/exec/execbuilder/testdata/show_trace_nonmetamorphic @@ -40,7 +40,7 @@ WHERE message NOT LIKE '%Z/%' AND operation != 'dist sender send' ---- batch flow coordinator CPut /NamespaceTable/30/1/106/107/"kv"/4/1 -> 108 -batch flow coordinator CPut /Table/3/1/108/2/1 -> table: parent_id:106 unexposed_parent_schema_id:107 columns: nullable:false hidden:false inaccessible:false generated_as_identity_type:NOT_IDENTITY_COLUMN virtual:false pg_attribute_num:0 alter_column_type_in_progress:false system_column_kind:NONE > columns: nullable:true hidden:false inaccessible:false generated_as_identity_type:NOT_IDENTITY_COLUMN virtual:false pg_attribute_num:0 alter_column_type_in_progress:false system_column_kind:NONE > next_column_id:3 families: next_family_id:1 primary_index: interleave:<> partitioning: type:FORWARD created_explicitly:false encoding_type:1 sharded: disabled:false geo_config:<> predicate:"" use_delete_preserving_encoding:false > next_index_id:2 privileges: users: users: owner_proto:"root" version:2 > next_mutation_id:1 format_version:3 state:PUBLIC offline_reason:"" view_query:"" is_materialized_view:false new_schema_change_job_id:0 drop_time:0 replacement_of: > audit_mode:DISABLED drop_job_id:0 create_query:"" create_as_of_time:<> temporary:false partition_all_by:false exclude_data_from_backup:false > +batch flow coordinator CPut /Table/3/1/108/2/1 -> table: parent_id:106 unexposed_parent_schema_id:107 columns: nullable:false hidden:false inaccessible:false generated_as_identity_type:NOT_IDENTITY_COLUMN virtual:false pg_attribute_num:0 alter_column_type_in_progress:false system_column_kind:NONE > columns: nullable:true hidden:false inaccessible:false generated_as_identity_type:NOT_IDENTITY_COLUMN virtual:false pg_attribute_num:0 alter_column_type_in_progress:false system_column_kind:NONE > next_column_id:3 families: next_family_id:1 primary_index: interleave:<> partitioning: type:FORWARD created_explicitly:false encoding_type:1 sharded: disabled:false geo_config:<> predicate:"" use_delete_preserving_encoding:false constraint_id:1 > next_index_id:2 privileges: users: users: owner_proto:"root" version:2 > next_mutation_id:1 format_version:3 state:PUBLIC offline_reason:"" view_query:"" is_materialized_view:false new_schema_change_job_id:0 drop_time:0 replacement_of: > audit_mode:DISABLED drop_job_id:0 create_query:"" create_as_of_time:<> temporary:false partition_all_by:false exclude_data_from_backup:false next_constraint_id:2 > sql query rows affected: 0 # We avoid using the full trace output, because that would make the @@ -66,7 +66,7 @@ WHERE message NOT LIKE '%Z/%' AND message NOT LIKE 'querying next range at%' AND tag NOT LIKE '%IndexBackfiller%' AND operation != 'dist sender send' ---- -batch flow coordinator Put /Table/3/1/108/2/1 -> table: parent_id:106 unexposed_parent_schema_id:107 columns: nullable:false hidden:false inaccessible:false generated_as_identity_type:NOT_IDENTITY_COLUMN virtual:false pg_attribute_num:0 alter_column_type_in_progress:false system_column_kind:NONE > columns: nullable:true hidden:false inaccessible:false generated_as_identity_type:NOT_IDENTITY_COLUMN virtual:false pg_attribute_num:0 alter_column_type_in_progress:false system_column_kind:NONE > next_column_id:3 families: next_family_id:1 primary_index: interleave:<> partitioning: type:FORWARD created_explicitly:false encoding_type:1 sharded: disabled:false geo_config:<> predicate:"" use_delete_preserving_encoding:false > next_index_id:3 privileges: users: users: owner_proto:"root" version:2 > mutations: interleave:<> partitioning: type:FORWARD created_explicitly:true encoding_type:0 sharded: disabled:false geo_config:<> predicate:"" use_delete_preserving_encoding:false > state:DELETE_ONLY direction:ADD mutation_id:1 rollback:false > next_mutation_id:2 format_version:3 state:PUBLIC offline_reason:"" view_query:"" is_materialized_view:false mutationJobs:<...> new_schema_change_job_id:0 drop_time:0 replacement_of: > audit_mode:DISABLED drop_job_id:0 create_query:"" create_as_of_time:<...> temporary:false partition_all_by:false exclude_data_from_backup:false > +batch flow coordinator Put /Table/3/1/108/2/1 -> table: parent_id:106 unexposed_parent_schema_id:107 columns: nullable:false hidden:false inaccessible:false generated_as_identity_type:NOT_IDENTITY_COLUMN virtual:false pg_attribute_num:0 alter_column_type_in_progress:false system_column_kind:NONE > columns: nullable:true hidden:false inaccessible:false generated_as_identity_type:NOT_IDENTITY_COLUMN virtual:false pg_attribute_num:0 alter_column_type_in_progress:false system_column_kind:NONE > next_column_id:3 families: next_family_id:1 primary_index: interleave:<> partitioning: type:FORWARD created_explicitly:false encoding_type:1 sharded: disabled:false geo_config:<> predicate:"" use_delete_preserving_encoding:false constraint_id:1 > next_index_id:3 privileges: users: users: owner_proto:"root" version:2 > mutations: interleave:<> partitioning: type:FORWARD created_explicitly:true encoding_type:0 sharded: disabled:false geo_config:<> predicate:"" use_delete_preserving_encoding:false constraint_id:2 > state:DELETE_ONLY direction:ADD mutation_id:1 rollback:false > next_mutation_id:2 format_version:3 state:PUBLIC offline_reason:"" view_query:"" is_materialized_view:false mutationJobs:<...> new_schema_change_job_id:0 drop_time:0 replacement_of: > audit_mode:DISABLED drop_job_id:0 create_query:"" create_as_of_time:<...> temporary:false partition_all_by:false exclude_data_from_backup:false next_constraint_id:3 > sql query rows affected: 0 statement ok @@ -121,7 +121,7 @@ WHERE message NOT LIKE '%Z/%' AND operation != 'dist sender send' ---- batch flow coordinator CPut /NamespaceTable/30/1/106/107/"kv2"/4/1 -> 109 -batch flow coordinator CPut /Table/3/1/109/2/1 -> table: parent_id:106 unexposed_parent_schema_id:107 columns: nullable:true hidden:false inaccessible:false generated_as_identity_type:NOT_IDENTITY_COLUMN virtual:false pg_attribute_num:0 alter_column_type_in_progress:false system_column_kind:NONE > columns: nullable:true hidden:false inaccessible:false generated_as_identity_type:NOT_IDENTITY_COLUMN virtual:false pg_attribute_num:0 alter_column_type_in_progress:false system_column_kind:NONE > columns: nullable:false default_expr:"unique_rowid()" hidden:true inaccessible:false generated_as_identity_type:NOT_IDENTITY_COLUMN virtual:false pg_attribute_num:0 alter_column_type_in_progress:false system_column_kind:NONE > next_column_id:4 families: next_family_id:1 primary_index: interleave:<> partitioning: type:FORWARD created_explicitly:false encoding_type:1 sharded: disabled:false geo_config:<> predicate:"" use_delete_preserving_encoding:false > next_index_id:2 privileges: users: users: owner_proto:"root" version:2 > next_mutation_id:1 format_version:3 state:ADD offline_reason:"" view_query:"" is_materialized_view:false new_schema_change_job_id:0 drop_time:0 replacement_of: > audit_mode:DISABLED drop_job_id:0 create_query:"TABLE t.public.kv" create_as_of_time:<> temporary:false partition_all_by:false exclude_data_from_backup:false > +batch flow coordinator CPut /Table/3/1/109/2/1 -> table: parent_id:106 unexposed_parent_schema_id:107 columns: nullable:true hidden:false inaccessible:false generated_as_identity_type:NOT_IDENTITY_COLUMN virtual:false pg_attribute_num:0 alter_column_type_in_progress:false system_column_kind:NONE > columns: nullable:true hidden:false inaccessible:false generated_as_identity_type:NOT_IDENTITY_COLUMN virtual:false pg_attribute_num:0 alter_column_type_in_progress:false system_column_kind:NONE > columns: nullable:false default_expr:"unique_rowid()" hidden:true inaccessible:false generated_as_identity_type:NOT_IDENTITY_COLUMN virtual:false pg_attribute_num:0 alter_column_type_in_progress:false system_column_kind:NONE > next_column_id:4 families: next_family_id:1 primary_index: interleave:<> partitioning: type:FORWARD created_explicitly:false encoding_type:1 sharded: disabled:false geo_config:<> predicate:"" use_delete_preserving_encoding:false constraint_id:1 > next_index_id:2 privileges: users: users: owner_proto:"root" version:2 > next_mutation_id:1 format_version:3 state:ADD offline_reason:"" view_query:"" is_materialized_view:false new_schema_change_job_id:0 drop_time:0 replacement_of: > audit_mode:DISABLED drop_job_id:0 create_query:"TABLE t.public.kv" create_as_of_time:<> temporary:false partition_all_by:false exclude_data_from_backup:false next_constraint_id:2 > sql query rows affected: 0 statement ok @@ -170,7 +170,7 @@ WHERE message NOT LIKE '%Z/%' AND message NOT LIKE 'querying next range at%' AND operation != 'dist sender send' ---- batch flow coordinator Del /NamespaceTable/30/1/106/107/"kv2"/4/1 -batch flow coordinator Put /Table/3/1/109/2/1 -> table: parent_id:106 unexposed_parent_schema_id:107 columns: nullable:true hidden:false inaccessible:false generated_as_identity_type:NOT_IDENTITY_COLUMN virtual:false pg_attribute_num:0 alter_column_type_in_progress:false system_column_kind:NONE > columns: nullable:true hidden:false inaccessible:false generated_as_identity_type:NOT_IDENTITY_COLUMN virtual:false pg_attribute_num:0 alter_column_type_in_progress:false system_column_kind:NONE > columns: nullable:false default_expr:"unique_rowid()" hidden:true inaccessible:false generated_as_identity_type:NOT_IDENTITY_COLUMN virtual:false pg_attribute_num:0 alter_column_type_in_progress:false system_column_kind:NONE > next_column_id:4 families: next_family_id:1 primary_index: interleave:<> partitioning: type:FORWARD created_explicitly:false encoding_type:1 sharded: disabled:false geo_config:<> predicate:"" use_delete_preserving_encoding:false > next_index_id:2 privileges: users: users: owner_proto:"root" version:2 > next_mutation_id:1 format_version:3 state:DROP offline_reason:"" view_query:"" is_materialized_view:false new_schema_change_job_id:0 drop_time:... replacement_of: > audit_mode:DISABLED drop_job_id:0 create_query:"TABLE t.public.kv" create_as_of_time:<...> temporary:false partition_all_by:false exclude_data_from_backup:false > +batch flow coordinator Put /Table/3/1/109/2/1 -> table: parent_id:106 unexposed_parent_schema_id:107 columns: nullable:true hidden:false inaccessible:false generated_as_identity_type:NOT_IDENTITY_COLUMN virtual:false pg_attribute_num:0 alter_column_type_in_progress:false system_column_kind:NONE > columns: nullable:true hidden:false inaccessible:false generated_as_identity_type:NOT_IDENTITY_COLUMN virtual:false pg_attribute_num:0 alter_column_type_in_progress:false system_column_kind:NONE > columns: nullable:false default_expr:"unique_rowid()" hidden:true inaccessible:false generated_as_identity_type:NOT_IDENTITY_COLUMN virtual:false pg_attribute_num:0 alter_column_type_in_progress:false system_column_kind:NONE > next_column_id:4 families: next_family_id:1 primary_index: interleave:<> partitioning: type:FORWARD created_explicitly:false encoding_type:1 sharded: disabled:false geo_config:<> predicate:"" use_delete_preserving_encoding:false constraint_id:1 > next_index_id:2 privileges: users: users: owner_proto:"root" version:2 > next_mutation_id:1 format_version:3 state:DROP offline_reason:"" view_query:"" is_materialized_view:false new_schema_change_job_id:0 drop_time:... replacement_of: > audit_mode:DISABLED drop_job_id:0 create_query:"TABLE t.public.kv" create_as_of_time:<...> temporary:false partition_all_by:false exclude_data_from_backup:false next_constraint_id:2 > sql query rows affected: 0 statement ok @@ -204,7 +204,7 @@ WHERE message NOT LIKE '%Z/%' AND message NOT LIKE 'querying next range at%' AND tag NOT LIKE '%IndexBackfiller%' AND operation != 'dist sender send' ---- -batch flow coordinator Put /Table/3/1/108/2/1 -> table: parent_id:106 unexposed_parent_schema_id:107 columns: nullable:false hidden:false inaccessible:false generated_as_identity_type:NOT_IDENTITY_COLUMN virtual:false pg_attribute_num:0 alter_column_type_in_progress:false system_column_kind:NONE > columns: nullable:true hidden:false inaccessible:false generated_as_identity_type:NOT_IDENTITY_COLUMN virtual:false pg_attribute_num:0 alter_column_type_in_progress:false system_column_kind:NONE > next_column_id:3 families: next_family_id:1 primary_index: interleave:<> partitioning: type:FORWARD created_explicitly:false encoding_type:1 sharded: disabled:false geo_config:<> predicate:"" use_delete_preserving_encoding:false > next_index_id:3 privileges: users: users: owner_proto:"root" version:2 > mutations: interleave:<> partitioning: type:FORWARD created_explicitly:true encoding_type:0 sharded: disabled:false geo_config:<> predicate:"" use_delete_preserving_encoding:false > state:DELETE_AND_WRITE_ONLY direction:DROP mutation_id:2 rollback:false > next_mutation_id:3 format_version:3 state:PUBLIC offline_reason:"" view_query:"" is_materialized_view:false mutationJobs:<...> new_schema_change_job_id:0 drop_time:0 replacement_of: > audit_mode:DISABLED drop_job_id:0 create_query:"" create_as_of_time:<...> temporary:false partition_all_by:false exclude_data_from_backup:false > +batch flow coordinator Put /Table/3/1/108/2/1 -> table: parent_id:106 unexposed_parent_schema_id:107 columns: nullable:false hidden:false inaccessible:false generated_as_identity_type:NOT_IDENTITY_COLUMN virtual:false pg_attribute_num:0 alter_column_type_in_progress:false system_column_kind:NONE > columns: nullable:true hidden:false inaccessible:false generated_as_identity_type:NOT_IDENTITY_COLUMN virtual:false pg_attribute_num:0 alter_column_type_in_progress:false system_column_kind:NONE > next_column_id:3 families: next_family_id:1 primary_index: interleave:<> partitioning: type:FORWARD created_explicitly:false encoding_type:1 sharded: disabled:false geo_config:<> predicate:"" use_delete_preserving_encoding:false constraint_id:1 > next_index_id:3 privileges: users: users: owner_proto:"root" version:2 > mutations: interleave:<> partitioning: type:FORWARD created_explicitly:true encoding_type:0 sharded: disabled:false geo_config:<> predicate:"" use_delete_preserving_encoding:false constraint_id:4 > state:DELETE_AND_WRITE_ONLY direction:DROP mutation_id:2 rollback:false > next_mutation_id:3 format_version:3 state:PUBLIC offline_reason:"" view_query:"" is_materialized_view:false mutationJobs:<...> new_schema_change_job_id:0 drop_time:0 replacement_of: > audit_mode:DISABLED drop_job_id:0 create_query:"" create_as_of_time:<...> temporary:false partition_all_by:false exclude_data_from_backup:false next_constraint_id:5 > sql query rows affected: 0 statement ok @@ -222,7 +222,7 @@ WHERE message NOT LIKE '%Z/%' AND message NOT LIKE 'querying next range at%' AND operation != 'dist sender send' ---- batch flow coordinator Del /NamespaceTable/30/1/106/107/"kv"/4/1 -batch flow coordinator Put /Table/3/1/108/2/1 -> table: parent_id:106 unexposed_parent_schema_id:107 columns: nullable:false hidden:false inaccessible:false generated_as_identity_type:NOT_IDENTITY_COLUMN virtual:false pg_attribute_num:0 alter_column_type_in_progress:false system_column_kind:NONE > columns: nullable:true hidden:false inaccessible:false generated_as_identity_type:NOT_IDENTITY_COLUMN virtual:false pg_attribute_num:0 alter_column_type_in_progress:false system_column_kind:NONE > next_column_id:3 families: next_family_id:1 primary_index: interleave:<> partitioning: type:FORWARD created_explicitly:false encoding_type:1 sharded: disabled:false geo_config:<> predicate:"" use_delete_preserving_encoding:false > next_index_id:3 privileges: users: users: owner_proto:"root" version:2 > next_mutation_id:3 format_version:3 state:DROP offline_reason:"" view_query:"" is_materialized_view:false new_schema_change_job_id:0 drop_time:... replacement_of: > audit_mode:DISABLED drop_job_id:0 create_query:"" create_as_of_time:<...> temporary:false partition_all_by:false exclude_data_from_backup:false > +batch flow coordinator Put /Table/3/1/108/2/1 -> table: parent_id:106 unexposed_parent_schema_id:107 columns: nullable:false hidden:false inaccessible:false generated_as_identity_type:NOT_IDENTITY_COLUMN virtual:false pg_attribute_num:0 alter_column_type_in_progress:false system_column_kind:NONE > columns: nullable:true hidden:false inaccessible:false generated_as_identity_type:NOT_IDENTITY_COLUMN virtual:false pg_attribute_num:0 alter_column_type_in_progress:false system_column_kind:NONE > next_column_id:3 families: next_family_id:1 primary_index: interleave:<> partitioning: type:FORWARD created_explicitly:false encoding_type:1 sharded: disabled:false geo_config:<> predicate:"" use_delete_preserving_encoding:false constraint_id:1 > next_index_id:3 privileges: users: users: owner_proto:"root" version:2 > next_mutation_id:3 format_version:3 state:DROP offline_reason:"" view_query:"" is_materialized_view:false new_schema_change_job_id:0 drop_time:... replacement_of: > audit_mode:DISABLED drop_job_id:0 create_query:"" create_as_of_time:<...> temporary:false partition_all_by:false exclude_data_from_backup:false next_constraint_id:5 > sql query rows affected: 0 # Check that session tracing does not inhibit the fast path for inserts & diff --git a/pkg/sql/pg_catalog.go b/pkg/sql/pg_catalog.go index 28473cfd268b..349f1bfee906 100644 --- a/pkg/sql/pg_catalog.go +++ b/pkg/sql/pg_catalog.go @@ -32,7 +32,6 @@ import ( "github.com/cockroachdb/cockroach/pkg/sql/catalog/schemaexpr" "github.com/cockroachdb/cockroach/pkg/sql/catalog/tabledesc" "github.com/cockroachdb/cockroach/pkg/sql/catalog/typedesc" - "github.com/cockroachdb/cockroach/pkg/sql/descmetadata" "github.com/cockroachdb/cockroach/pkg/sql/pgwire/pgcode" "github.com/cockroachdb/cockroach/pkg/sql/pgwire/pgerror" "github.com/cockroachdb/cockroach/pkg/sql/privilege" @@ -1541,7 +1540,36 @@ https://www.postgresql.org/docs/9.5/catalog-pg-description.html`, objID = tree.NewDOid(tree.MustBeDInt(objID)) classOid = tree.NewDOid(catconstants.PgCatalogClassTableID) case keys.ConstraintCommentType: - objID = tree.NewDOid(tree.MustBeDInt(objID)) + tableDesc, err := p.Descriptors().GetImmutableTableByID( + ctx, + p.txn, + descpb.ID(tree.MustBeDInt(objID)), + tree.ObjectLookupFlagsWithRequiredTableKind(tree.ResolveRequireTableDesc)) + if err != nil { + return err + } + schema, err := p.Descriptors().GetImmutableSchemaByID( + ctx, + p.txn, + tableDesc.GetParentSchemaID(), + tree.CommonLookupFlags{ + Required: true, + }) + if err != nil { + return err + } + constraints, err := tableDesc.GetConstraintInfo() + if err != nil { + return err + } + var constraint descpb.ConstraintDetail + for _, constraintToCheck := range constraints { + if constraintToCheck.ConstraintID == descpb.ConstraintID(tree.MustBeDInt(objSubID)) { + constraint = constraintToCheck + break + } + } + objID = getOIDFromConstraint(constraint, dbContext.GetID(), schema.GetName(), tableDesc) objSubID = tree.DZero classOid = tree.NewDOid(catconstants.PgCatalogConstraintTableID) case keys.IndexCommentType: @@ -1563,6 +1591,55 @@ https://www.postgresql.org/docs/9.5/catalog-pg-description.html`, }, } +func getOIDFromConstraint( + constraint descpb.ConstraintDetail, + dbID descpb.ID, + schemaName string, + tableDesc catalog.TableDescriptor, +) *tree.DOid { + hasher := makeOidHasher() + tableID := tableDesc.GetID() + var oid *tree.DOid + if constraint.CheckConstraint != nil { + oid = hasher.CheckConstraintOid( + dbID, + schemaName, + tableID, + constraint.CheckConstraint) + } else if constraint.FK != nil { + oid = hasher.ForeignKeyConstraintOid( + dbID, + schemaName, + tableID, + constraint.FK, + ) + } else if constraint.UniqueWithoutIndexConstraint != nil { + oid = hasher.UniqueWithoutIndexConstraintOid( + dbID, + schemaName, + tableID, + constraint.UniqueWithoutIndexConstraint, + ) + } else if constraint.Index != nil { + if constraint.Index.ID == tableDesc.GetPrimaryIndexID() { + oid = hasher.PrimaryKeyConstraintOid( + dbID, + schemaName, + tableID, + constraint.Index, + ) + } else { + oid = hasher.UniqueConstraintOid( + dbID, + schemaName, + tableID, + constraint.Index.ID, + ) + } + } + return oid +} + var pgCatalogSharedDescriptionTable = virtualSchemaTable{ comment: `shared object comments https://www.postgresql.org/docs/9.5/catalog-pg-shdescription.html`, @@ -4377,8 +4454,3 @@ func stringOid(s string) *tree.DOid { h.writeStr(s) return h.getOid() } - -// MakeConstraintOidBuilder constructs an OID builder. -func MakeConstraintOidBuilder() descmetadata.ConstraintOidBuilder { - return makeOidHasher() -} diff --git a/pkg/sql/row/kv_batch_fetcher.go b/pkg/sql/row/kv_batch_fetcher.go index ea1e3a5be633..9d5675b90346 100644 --- a/pkg/sql/row/kv_batch_fetcher.go +++ b/pkg/sql/row/kv_batch_fetcher.go @@ -467,11 +467,16 @@ func (f *txnKVFetcher) nextBatch( if len(t.BatchResponses) > 0 { batchResp, f.remainingBatches = popBatch(t.BatchResponses) } + // Note that t.Rows and batchResp might be nil when the ScanResponse + // is empty, and the caller (the KVFetcher) will skip over it. return true, t.Rows, batchResp, nil case *roachpb.ReverseScanResponse: if len(t.BatchResponses) > 0 { batchResp, f.remainingBatches = popBatch(t.BatchResponses) } + // Note that t.Rows and batchResp might be nil when the + // ReverseScanResponse is empty, and the caller (the KVFetcher) will + // skip over it. return true, t.Rows, batchResp, nil case *roachpb.GetResponse: if t.IntentValue != nil { diff --git a/pkg/sql/row/kv_batch_streamer.go b/pkg/sql/row/kv_batch_streamer.go index 0aae5582dbd4..0aeb1cf5fe32 100644 --- a/pkg/sql/row/kv_batch_streamer.go +++ b/pkg/sql/row/kv_batch_streamer.go @@ -38,7 +38,7 @@ var useStreamerEnabled = settings.RegisterBoolSetting( "determines whether the usage of the Streamer API is allowed. "+ "Enabling this will increase the speed of lookup/index joins "+ "while adhering to memory limits.", - false, + true, ) // TxnKVStreamer handles retrieval of key/values. @@ -46,10 +46,6 @@ type TxnKVStreamer struct { streamer *kvstreamer.Streamer spans roachpb.Spans - // numOutstandingRequests tracks the number of requests that haven't been - // fully responded to yet. - numOutstandingRequests int - // getResponseScratch is reused to return the result of Get requests. getResponseScratch [1]roachpb.KeyValue @@ -82,9 +78,8 @@ func NewTxnKVStreamer( return nil, err } return &TxnKVStreamer{ - streamer: streamer, - spans: spans, - numOutstandingRequests: len(spans), + streamer: streamer, + spans: spans, }, nil } @@ -93,44 +88,30 @@ func NewTxnKVStreamer( // GetResponses). func (f *TxnKVStreamer) proceedWithLastResult( ctx context.Context, -) (skip bool, kvs []roachpb.KeyValue, batchResp []byte, err error) { +) (kvs []roachpb.KeyValue, batchResp []byte, err error) { result := f.lastResultState.Result if get := result.GetResp; get != nil { if get.IntentValue != nil { - return false, nil, nil, errors.AssertionFailedf( + return nil, nil, errors.AssertionFailedf( "unexpectedly got an IntentValue back from a SQL GetRequest %v", *get.IntentValue, ) } - if get.Value == nil { - // Nothing found in this particular response, so we skip it. - f.releaseLastResult(ctx) - return true, nil, nil, nil - } pos := result.EnqueueKeysSatisfied[f.lastResultState.numEmitted] origSpan := f.spans[pos] f.lastResultState.numEmitted++ - f.numOutstandingRequests-- f.getResponseScratch[0] = roachpb.KeyValue{Key: origSpan.Key, Value: *get.Value} - return false, f.getResponseScratch[:], nil, nil + return f.getResponseScratch[:], nil, nil } scan := result.ScanResp if len(scan.BatchResponses) > 0 { batchResp, f.lastResultState.remainingBatches = scan.BatchResponses[0], scan.BatchResponses[1:] } if len(f.lastResultState.remainingBatches) == 0 { - f.processedScanResponse() - } - return false, scan.Rows, batchResp, nil -} - -// processedScanResponse updates the lastResultState before emitting the last -// part of the ScanResponse. This method should be called for each request that -// the ScanResponse satisfies. -func (f *TxnKVStreamer) processedScanResponse() { - f.lastResultState.numEmitted++ - if f.lastResultState.ScanResp.Complete { - f.numOutstandingRequests-- + f.lastResultState.numEmitted++ } + // Note that scan.Rows and batchResp might be nil when the ScanResponse is + // empty, and the caller (the KVFetcher) will skip over it. + return scan.Rows, batchResp, nil } func (f *TxnKVStreamer) releaseLastResult(ctx context.Context) { @@ -143,17 +124,11 @@ func (f *TxnKVStreamer) releaseLastResult(ctx context.Context) { func (f *TxnKVStreamer) nextBatch( ctx context.Context, ) (ok bool, kvs []roachpb.KeyValue, batchResp []byte, err error) { - if f.numOutstandingRequests == 0 { - // All requests have already been responded to. - f.releaseLastResult(ctx) - return false, nil, nil, nil - } - // Check whether there are more batches in the current ScanResponse. if len(f.lastResultState.remainingBatches) > 0 { batchResp, f.lastResultState.remainingBatches = f.lastResultState.remainingBatches[0], f.lastResultState.remainingBatches[1:] if len(f.lastResultState.remainingBatches) == 0 { - f.processedScanResponse() + f.lastResultState.numEmitted++ } return true, nil, batchResp, nil } @@ -162,7 +137,7 @@ func (f *TxnKVStreamer) nextBatch( if f.lastResultState.numEmitted < len(f.lastResultState.EnqueueKeysSatisfied) { // Note that we should never get an error here since we're processing // the same result again. - _, kvs, batchResp, err = f.proceedWithLastResult(ctx) + kvs, batchResp, err = f.proceedWithLastResult(ctx) return true, kvs, batchResp, err } @@ -172,7 +147,7 @@ func (f *TxnKVStreamer) nextBatch( } // Process the next result we have already received from the streamer. - for len(f.results) > 0 { + if len(f.results) > 0 { // Peel off the next result and set it into lastResultState. f.lastResultState.Result = f.results[0] f.lastResultState.numEmitted = 0 @@ -181,15 +156,8 @@ func (f *TxnKVStreamer) nextBatch( // the next iteration. f.results[0] = kvstreamer.Result{} f.results = f.results[1:] - var skip bool - skip, kvs, batchResp, err = f.proceedWithLastResult(ctx) - if err != nil { - return false, nil, nil, err - } - if skip { - continue - } - return true, kvs, batchResp, nil + kvs, batchResp, err = f.proceedWithLastResult(ctx) + return true, kvs, batchResp, err } // Get more results from the streamer. This call will block until some diff --git a/pkg/sql/schemachanger/scbuild/internal/scbuildstmt/common_relation.go b/pkg/sql/schemachanger/scbuild/internal/scbuildstmt/common_relation.go index 51f5b6c3a84a..4172c8c6d1d1 100644 --- a/pkg/sql/schemachanger/scbuild/internal/scbuildstmt/common_relation.go +++ b/pkg/sql/schemachanger/scbuild/internal/scbuildstmt/common_relation.go @@ -411,10 +411,9 @@ func decomposeTableDescToElements( enqueue(b, targetStatus, indexName) if targetStatus == scpb.Status_ABSENT { enqueue(b, targetStatus, &scpb.ConstraintComment{ - ConstraintType: scpb.ConstraintType_PrimaryKey, - ConstraintName: index.GetName(), - TableID: tbl.GetID(), - Comment: scpb.PlaceHolderComment, + ConstraintID: index.GetConstraintID(), + TableID: tbl.GetID(), + Comment: scpb.PlaceHolderComment, }) } @@ -424,10 +423,9 @@ func decomposeTableDescToElements( enqueue(b, targetStatus, indexName) if targetStatus == scpb.Status_ABSENT && secondaryIndex.Unique { enqueue(b, targetStatus, &scpb.ConstraintComment{ - ConstraintType: scpb.ConstraintType_PrimaryKey, - ConstraintName: index.GetName(), - TableID: tbl.GetID(), - Comment: scpb.PlaceHolderComment, + ConstraintID: index.GetConstraintID(), + TableID: tbl.GetID(), + Comment: scpb.PlaceHolderComment, }) } } @@ -493,10 +491,9 @@ func decomposeTableDescToElements( }) if targetStatus == scpb.Status_ABSENT { enqueue(b, targetStatus, &scpb.ConstraintComment{ - ConstraintType: scpb.ConstraintType_UniqueWithoutIndex, - ConstraintName: constraint.Name, - TableID: tbl.GetID(), - Comment: scpb.PlaceHolderComment, + ConstraintID: constraint.ConstraintID, + TableID: tbl.GetID(), + Comment: scpb.PlaceHolderComment, }) } } @@ -527,10 +524,9 @@ func decomposeTableDescToElements( }) if targetStatus == scpb.Status_ABSENT { enqueue(b, targetStatus, &scpb.ConstraintComment{ - ConstraintType: scpb.ConstraintType_Check, - ConstraintName: constraint.Name, - TableID: tbl.GetID(), - Comment: scpb.PlaceHolderComment, + ConstraintID: constraint.ConstraintID, + TableID: tbl.GetID(), + Comment: scpb.PlaceHolderComment, }) } } @@ -538,10 +534,9 @@ func decomposeTableDescToElements( for _, fk := range tbl.AllActiveAndInactiveForeignKeys() { if targetStatus == scpb.Status_ABSENT { enqueue(b, targetStatus, &scpb.ConstraintComment{ - ConstraintType: scpb.ConstraintType_FK, - ConstraintName: fk.Name, - TableID: tbl.GetID(), - Comment: scpb.PlaceHolderComment, + ConstraintID: fk.ConstraintID, + TableID: tbl.GetID(), + Comment: scpb.PlaceHolderComment, }) } } diff --git a/pkg/sql/schemachanger/scbuild/testdata/drop_database b/pkg/sql/schemachanger/scbuild/testdata/drop_database index 674768ad9b0c..623c5285013e 100644 --- a/pkg/sql/schemachanger/scbuild/testdata/drop_database +++ b/pkg/sql/schemachanger/scbuild/testdata/drop_database @@ -188,19 +188,17 @@ DROP DATABASE db1 CASCADE columnId: 3 name: val tableId: 110 -- [[ConstraintComment:{DescID: 109, ConstraintType: PrimaryKey, Name: t1_pkey}, ABSENT], PUBLIC] +- [[ConstraintComment:{DescID: 109, ConstraintID: 1}, ABSENT], PUBLIC] details: comment: TODO(fqazi) Comments are not currently fetched from system.comments when doing decomposition - constraintName: t1_pkey - constraintType: PrimaryKey + constraintId: 1 tableId: 109 -- [[ConstraintComment:{DescID: 110, ConstraintType: PrimaryKey, Name: t1_pkey}, ABSENT], PUBLIC] +- [[ConstraintComment:{DescID: 110, ConstraintID: 1}, ABSENT], PUBLIC] details: comment: TODO(fqazi) Comments are not currently fetched from system.comments when doing decomposition - constraintName: t1_pkey - constraintType: PrimaryKey + constraintId: 1 tableId: 110 - [[Database:{DescID: 104}, ABSENT], PUBLIC] details: diff --git a/pkg/sql/schemachanger/scbuild/testdata/drop_schema b/pkg/sql/schemachanger/scbuild/testdata/drop_schema index fde10027148c..d23cfb6e3dda 100644 --- a/pkg/sql/schemachanger/scbuild/testdata/drop_schema +++ b/pkg/sql/schemachanger/scbuild/testdata/drop_schema @@ -114,12 +114,11 @@ DROP SCHEMA defaultdb.SC1 CASCADE columnId: 3 name: val tableId: 107 -- [[ConstraintComment:{DescID: 107, ConstraintType: PrimaryKey, Name: t1_pkey}, ABSENT], PUBLIC] +- [[ConstraintComment:{DescID: 107, ConstraintID: 1}, ABSENT], PUBLIC] details: comment: TODO(fqazi) Comments are not currently fetched from system.comments when doing decomposition - constraintName: t1_pkey - constraintType: PrimaryKey + constraintId: 1 tableId: 107 - [[DatabaseSchemaEntry:{DescID: 100, ReferencedDescID: 104}, ABSENT], PUBLIC] details: diff --git a/pkg/sql/schemachanger/scbuild/testdata/drop_table b/pkg/sql/schemachanger/scbuild/testdata/drop_table index fb41ac1c63a9..cdb29eb41c07 100644 --- a/pkg/sql/schemachanger/scbuild/testdata/drop_table +++ b/pkg/sql/schemachanger/scbuild/testdata/drop_table @@ -197,26 +197,23 @@ DROP TABLE defaultdb.shipments CASCADE; columnId: 6 tableId: 109 typeId: 108 -- [[ConstraintComment:{DescID: 109, ConstraintType: FK, Name: fk_customers}, ABSENT], PUBLIC] +- [[ConstraintComment:{DescID: 109, ConstraintID: 1}, ABSENT], PUBLIC] details: comment: TODO(fqazi) Comments are not currently fetched from system.comments when doing decomposition - constraintName: fk_customers - constraintType: FK + constraintId: 1 tableId: 109 -- [[ConstraintComment:{DescID: 109, ConstraintType: FK, Name: fk_orders}, ABSENT], PUBLIC] +- [[ConstraintComment:{DescID: 109, ConstraintID: 2}, ABSENT], PUBLIC] details: comment: TODO(fqazi) Comments are not currently fetched from system.comments when doing decomposition - constraintName: fk_orders - constraintType: FK + constraintId: 2 tableId: 109 -- [[ConstraintComment:{DescID: 109, ConstraintType: PrimaryKey, Name: shipments_pkey}, ABSENT], PUBLIC] +- [[ConstraintComment:{DescID: 109, ConstraintID: 3}, ABSENT], PUBLIC] details: comment: TODO(fqazi) Comments are not currently fetched from system.comments when doing decomposition - constraintName: shipments_pkey - constraintType: PrimaryKey + constraintId: 3 tableId: 109 - [[DefaultExpression:{DescID: 109, ColumnID: 1}, ABSENT], PUBLIC] details: diff --git a/pkg/sql/schemachanger/scdeps/sctestdeps/test_deps.go b/pkg/sql/schemachanger/scdeps/sctestdeps/test_deps.go index 49a186b5672d..cd9fc7fcfd06 100644 --- a/pkg/sql/schemachanger/scdeps/sctestdeps/test_deps.go +++ b/pkg/sql/schemachanger/scdeps/sctestdeps/test_deps.go @@ -850,26 +850,19 @@ func (s *TestState) DeleteDescriptorComment( //UpsertConstraintComment implements scexec.DescriptorMetaDataUpdater. func (s *TestState) UpsertConstraintComment( - desc catalog.TableDescriptor, - _ string, - constraintName string, - constraintType scpb.ConstraintType, - comment string, + desc catalog.TableDescriptor, constraintID descpb.ConstraintID, comment string, ) error { - s.LogSideEffectf("upsert comment %s for constraint on #%d, name: %s, type: %s"+ - comment, desc.GetID(), constraintName, constraintType) + s.LogSideEffectf("upsert comment %s for constraint on #%d, constraint id: %d"+ + comment, desc.GetID(), constraintID) return nil } //DeleteConstraintComment implements scexec.DescriptorMetaDataUpdater. func (s *TestState) DeleteConstraintComment( - desc catalog.TableDescriptor, - schemaName string, - constraintName string, - constraintType scpb.ConstraintType, + desc catalog.TableDescriptor, constraintID descpb.ConstraintID, ) error { - s.LogSideEffectf("delete comment for constraint on #%d, name: %s, type: %s", - desc.GetID(), constraintName, constraintType) + s.LogSideEffectf("delete comment for constraint on #%d, constraint id: %d", + desc.GetID(), constraintID) return nil } diff --git a/pkg/sql/schemachanger/scexec/dependencies.go b/pkg/sql/schemachanger/scexec/dependencies.go index 4d6cd25ca26a..9dccfc3c1a49 100644 --- a/pkg/sql/schemachanger/scexec/dependencies.go +++ b/pkg/sql/schemachanger/scexec/dependencies.go @@ -22,7 +22,6 @@ import ( "github.com/cockroachdb/cockroach/pkg/sql/catalog" "github.com/cockroachdb/cockroach/pkg/sql/catalog/descpb" "github.com/cockroachdb/cockroach/pkg/sql/schemachanger/scexec/scmutationexec" - "github.com/cockroachdb/cockroach/pkg/sql/schemachanger/scpb" "github.com/cockroachdb/cockroach/pkg/sql/sessiondata" "github.com/cockroachdb/cockroach/pkg/util/hlc" "github.com/cockroachdb/cockroach/pkg/util/log/eventpb" @@ -253,10 +252,10 @@ type DescriptorMetadataUpdater interface { DeleteDescriptorComment(id int64, subID int64, commentType keys.CommentType) error //UpsertConstraintComment upserts a comment associated with a constraint. - UpsertConstraintComment(desc catalog.TableDescriptor, schemaName string, constraintName string, constraintType scpb.ConstraintType, comment string) error + UpsertConstraintComment(desc catalog.TableDescriptor, constraintID descpb.ConstraintID, comment string) error //DeleteConstraintComment deletes a comment associated with a constraint. - DeleteConstraintComment(desc catalog.TableDescriptor, schemaName string, constraintName string, constraintType scpb.ConstraintType) error + DeleteConstraintComment(desc catalog.TableDescriptor, constraintID descpb.ConstraintID) error // DeleteDatabaseRoleSettings deletes role settings associated with a database. DeleteDatabaseRoleSettings(ctx context.Context, database catalog.DatabaseDescriptor) error diff --git a/pkg/sql/schemachanger/scexec/exec_mutation.go b/pkg/sql/schemachanger/scexec/exec_mutation.go index 24b22e3aa429..bc6ec158f357 100644 --- a/pkg/sql/schemachanger/scexec/exec_mutation.go +++ b/pkg/sql/schemachanger/scexec/exec_mutation.go @@ -155,12 +155,12 @@ func executeDescriptorMutationOps(ctx context.Context, deps Dependencies, ops [] for _, comment := range mvs.constraintCommentsToUpdate { if len(comment.comment) > 0 { if err := commentUpdater.UpsertConstraintComment( - comment.tbl, comment.schemaName, comment.constraintName, comment.constraintType, comment.comment); err != nil { + comment.tbl, comment.constraintID, comment.comment); err != nil { return err } } else { if err := commentUpdater.DeleteConstraintComment( - comment.tbl, comment.schemaName, comment.constraintName, comment.constraintType); err != nil { + comment.tbl, comment.constraintID); err != nil { return err } } @@ -293,11 +293,9 @@ type mutationVisitorState struct { } type constraintCommentToUpdate struct { - tbl catalog.TableDescriptor - schemaName string - constraintName string - constraintType scpb.ConstraintType - comment string + tbl catalog.TableDescriptor + constraintID descpb.ConstraintID + comment string } type commentToUpdate struct { @@ -383,21 +381,12 @@ func (mvs *mutationVisitorState) DeleteComment( } func (mvs *mutationVisitorState) DeleteConstraintComment( - ctx context.Context, - tbl catalog.TableDescriptor, - constraintName string, - constraintType scpb.ConstraintType, + ctx context.Context, tbl catalog.TableDescriptor, constraintID descpb.ConstraintID, ) error { - schema, err := mvs.c.MustReadImmutableDescriptor(ctx, tbl.GetParentSchemaID()) - if err != nil { - return err - } mvs.constraintCommentsToUpdate = append(mvs.constraintCommentsToUpdate, constraintCommentToUpdate{ - tbl: tbl, - schemaName: schema.GetName(), - constraintName: constraintName, - constraintType: constraintType, + tbl: tbl, + constraintID: constraintID, }) return nil } diff --git a/pkg/sql/schemachanger/scexec/executor_external_test.go b/pkg/sql/schemachanger/scexec/executor_external_test.go index 2d8e588b1aa4..7c879859cc42 100644 --- a/pkg/sql/schemachanger/scexec/executor_external_test.go +++ b/pkg/sql/schemachanger/scexec/executor_external_test.go @@ -178,6 +178,7 @@ CREATE TABLE db.t ( KeyColumnDirections: []descpb.IndexDescriptor_Direction{ descpb.IndexDescriptor_ASC, }, + ConstraintID: 2, } for _, tc := range []testCase{ { @@ -186,6 +187,7 @@ CREATE TABLE db.t ( exp: makeTable(func(mutable *tabledesc.Mutable) { mutable.MaybeIncrementVersion() mutable.NextIndexID++ + mutable.NextConstraintID++ mutable.Mutations = append(mutable.Mutations, descpb.DescriptorMutation{ Descriptor_: &descpb.DescriptorMutation_Index{ Index: &indexToAdd, @@ -541,21 +543,14 @@ func (noopMetadataUpdater) DeleteDescriptorComment( //UpsertConstraintComment implements scexec.DescriptorMetadataUpdater func (noopMetadataUpdater) UpsertConstraintComment( - desc catalog.TableDescriptor, - schemaName string, - constraintName string, - constraintType scpb.ConstraintType, - comment string, + desc catalog.TableDescriptor, constraintID descpb.ConstraintID, comment string, ) error { return nil } //DeleteConstraintComment implements scexec.DescriptorMetadataUpdater func (noopMetadataUpdater) DeleteConstraintComment( - desc catalog.TableDescriptor, - schemaName string, - constraintName string, - constraintType scpb.ConstraintType, + desc catalog.TableDescriptor, constraintID descpb.ConstraintID, ) error { return nil } diff --git a/pkg/sql/schemachanger/scexec/scmutationexec/scmutationexec.go b/pkg/sql/schemachanger/scexec/scmutationexec/scmutationexec.go index fffdd1a071b0..7c68d756e24f 100644 --- a/pkg/sql/schemachanger/scexec/scmutationexec/scmutationexec.go +++ b/pkg/sql/schemachanger/scexec/scmutationexec/scmutationexec.go @@ -84,8 +84,7 @@ type MutationVisitorStateUpdater interface { DeleteConstraintComment( ctx context.Context, tbl catalog.TableDescriptor, - constraintName string, - constraintType scpb.ConstraintType, + constraintID descpb.ConstraintID, ) error // DeleteDatabaseRoleSettings removes a database role setting @@ -768,10 +767,12 @@ func (m *visitor) MakeAddedIndexDeleteOnly( CompositeColumnIDs: op.CompositeColumnIDs, CreatedExplicitly: true, EncodingType: encodingType, + ConstraintID: tbl.GetNextConstraintID(), } if op.ShardedDescriptor != nil { idx.Sharded = *op.ShardedDescriptor } + tbl.NextConstraintID++ return enqueueAddIndexMutation(tbl, idx) } @@ -1061,7 +1062,7 @@ func (m *visitor) RemoveConstraintComment( if err != nil { return err } - return m.s.DeleteConstraintComment(ctx, tbl.(catalog.TableDescriptor), op.ConstraintName, op.ConstraintType) + return m.s.DeleteConstraintComment(ctx, tbl.(catalog.TableDescriptor), op.ConstraintID) } func (m *visitor) RemoveDatabaseRoleSettings( diff --git a/pkg/sql/schemachanger/scop/mutation.go b/pkg/sql/schemachanger/scop/mutation.go index 6610b62a2baf..e0be9834ab32 100644 --- a/pkg/sql/schemachanger/scop/mutation.go +++ b/pkg/sql/schemachanger/scop/mutation.go @@ -406,9 +406,8 @@ type RemoveColumnComment struct { // constraint. type RemoveConstraintComment struct { mutationOp - TableID descpb.ID - ConstraintType scpb.ConstraintType - ConstraintName string + TableID descpb.ID + ConstraintID descpb.ConstraintID } // RemoveDatabaseRoleSettings is used to delete a role setting for a database. diff --git a/pkg/sql/schemachanger/scpb/elements.proto b/pkg/sql/schemachanger/scpb/elements.proto index 7b7f8dd0d29b..6e5ba03da2ff 100644 --- a/pkg/sql/schemachanger/scpb/elements.proto +++ b/pkg/sql/schemachanger/scpb/elements.proto @@ -403,9 +403,8 @@ message ColumnComment { message ConstraintComment { option (gogoproto.equal) = true; uint32 table_id = 1 [(gogoproto.customname) = "TableID", (gogoproto.casttype) = "github.com/cockroachdb/cockroach/pkg/sql/catalog/descpb.ID"]; - string constraint_name = 2; - ConstraintType constraint_type = 3; - string comment = 4; + uint32 constraint_id = 2 [(gogoproto.customname) = "ConstraintID", (gogoproto.casttype) = "github.com/cockroachdb/cockroach/pkg/sql/catalog/descpb.ConstraintID"]; + string comment = 3; } message DatabaseRoleSetting { diff --git a/pkg/sql/schemachanger/scpb/uml/table.puml b/pkg/sql/schemachanger/scpb/uml/table.puml index 6b1710d0b15e..878d21b8953d 100644 --- a/pkg/sql/schemachanger/scpb/uml/table.puml +++ b/pkg/sql/schemachanger/scpb/uml/table.puml @@ -258,8 +258,7 @@ ColumnComment : Comment object ConstraintComment ConstraintComment : TableID -ConstraintComment : ConstraintName -ConstraintComment : ConstraintType +ConstraintComment : ConstraintID ConstraintComment : Comment object DatabaseRoleSetting diff --git a/pkg/sql/schemachanger/scplan/internal/opgen/opgen_constraint_comment.go b/pkg/sql/schemachanger/scplan/internal/opgen/opgen_constraint_comment.go index 9a4ebcf5dd4d..1a568cda9049 100644 --- a/pkg/sql/schemachanger/scplan/internal/opgen/opgen_constraint_comment.go +++ b/pkg/sql/schemachanger/scplan/internal/opgen/opgen_constraint_comment.go @@ -30,9 +30,8 @@ func init() { to(scpb.Status_ABSENT, emit(func(this *scpb.ConstraintComment) scop.Op { return &scop.RemoveConstraintComment{ - TableID: this.TableID, - ConstraintType: this.ConstraintType, - ConstraintName: this.ConstraintName, + TableID: this.TableID, + ConstraintID: this.ConstraintID, } }), ), diff --git a/pkg/sql/schemachanger/scplan/testdata/drop_database b/pkg/sql/schemachanger/scplan/testdata/drop_database index 1c767eb80bc1..57e80324aace 100644 --- a/pkg/sql/schemachanger/scplan/testdata/drop_database +++ b/pkg/sql/schemachanger/scplan/testdata/drop_database @@ -62,7 +62,7 @@ StatementPhase stage 1 of 1 with 37 MutationType ops [[Column:{DescID: 110, ColumnID: 3}, ABSENT], PUBLIC] -> DELETE_AND_WRITE_ONLY [[ColumnComment:{DescID: 110, ColumnID: 3}, ABSENT], PUBLIC] -> ABSENT [[PrimaryIndex:{DescID: 110, IndexID: 1}, ABSENT], PUBLIC] -> VALIDATED - [[ConstraintComment:{DescID: 110, ConstraintType: PrimaryKey, Name: t1_pkey}, ABSENT], PUBLIC] -> ABSENT + [[ConstraintComment:{DescID: 110, ConstraintID: 1}, ABSENT], PUBLIC] -> ABSENT [[IndexComment:{DescID: 110, IndexID: 1}, ABSENT], PUBLIC] -> ABSENT [[Schema:{DescID: 105}, ABSENT], PUBLIC] -> TXN_DROPPED [[SchemaComment:{DescID: 105}, ABSENT], PUBLIC] -> ABSENT @@ -77,7 +77,7 @@ StatementPhase stage 1 of 1 with 37 MutationType ops [[Column:{DescID: 109, ColumnID: 3}, ABSENT], PUBLIC] -> DELETE_AND_WRITE_ONLY [[ColumnComment:{DescID: 109, ColumnID: 3}, ABSENT], PUBLIC] -> ABSENT [[PrimaryIndex:{DescID: 109, IndexID: 1}, ABSENT], PUBLIC] -> VALIDATED - [[ConstraintComment:{DescID: 109, ConstraintType: PrimaryKey, Name: t1_pkey}, ABSENT], PUBLIC] -> ABSENT + [[ConstraintComment:{DescID: 109, ConstraintID: 1}, ABSENT], PUBLIC] -> ABSENT [[IndexComment:{DescID: 109, IndexID: 1}, ABSENT], PUBLIC] -> ABSENT [[View:{DescID: 111}, ABSENT], PUBLIC] -> TXN_DROPPED [[TableComment:{DescID: 111}, ABSENT], PUBLIC] -> ABSENT @@ -115,8 +115,7 @@ StatementPhase stage 1 of 1 with 37 MutationType ops ColumnID: 3 TableID: 110 *scop.RemoveConstraintComment - ConstraintName: t1_pkey - ConstraintType: 4 + ConstraintID: 1 TableID: 110 *scop.RemoveIndexComment IndexID: 1 @@ -143,8 +142,7 @@ StatementPhase stage 1 of 1 with 37 MutationType ops ColumnID: 3 TableID: 109 *scop.RemoveConstraintComment - ConstraintName: t1_pkey - ConstraintType: 4 + ConstraintID: 1 TableID: 109 *scop.RemoveIndexComment IndexID: 1 diff --git a/pkg/sql/schemachanger/scplan/testdata/drop_schema b/pkg/sql/schemachanger/scplan/testdata/drop_schema index 077d7ec0f618..95034edab7d9 100644 --- a/pkg/sql/schemachanger/scplan/testdata/drop_schema +++ b/pkg/sql/schemachanger/scplan/testdata/drop_schema @@ -406,7 +406,7 @@ StatementPhase stage 1 of 1 with 23 MutationType ops [[Column:{DescID: 106, ColumnID: 3}, ABSENT], PUBLIC] -> DELETE_AND_WRITE_ONLY [[ColumnComment:{DescID: 106, ColumnID: 3}, ABSENT], PUBLIC] -> ABSENT [[PrimaryIndex:{DescID: 106, IndexID: 1}, ABSENT], PUBLIC] -> VALIDATED - [[ConstraintComment:{DescID: 106, ConstraintType: PrimaryKey, Name: t1_pkey}, ABSENT], PUBLIC] -> ABSENT + [[ConstraintComment:{DescID: 106, ConstraintID: 1}, ABSENT], PUBLIC] -> ABSENT [[IndexComment:{DescID: 106, IndexID: 1}, ABSENT], PUBLIC] -> ABSENT [[View:{DescID: 107}, ABSENT], PUBLIC] -> TXN_DROPPED [[TableComment:{DescID: 107}, ABSENT], PUBLIC] -> ABSENT @@ -441,8 +441,7 @@ StatementPhase stage 1 of 1 with 23 MutationType ops ColumnID: 3 TableID: 106 *scop.RemoveConstraintComment - ConstraintName: t1_pkey - ConstraintType: 4 + ConstraintID: 1 TableID: 106 *scop.RemoveIndexComment IndexID: 1 diff --git a/pkg/sql/schemachanger/scplan/testdata/drop_table b/pkg/sql/schemachanger/scplan/testdata/drop_table index 919bbf88bda5..16471b40bfd4 100644 --- a/pkg/sql/schemachanger/scplan/testdata/drop_table +++ b/pkg/sql/schemachanger/scplan/testdata/drop_table @@ -53,10 +53,10 @@ StatementPhase stage 1 of 1 with 15 MutationType ops [[Column:{DescID: 107, ColumnID: 5}, ABSENT], PUBLIC] -> DELETE_AND_WRITE_ONLY [[ColumnComment:{DescID: 107, ColumnID: 5}, ABSENT], PUBLIC] -> ABSENT [[PrimaryIndex:{DescID: 107, IndexID: 1}, ABSENT], PUBLIC] -> VALIDATED - [[ConstraintComment:{DescID: 107, ConstraintType: PrimaryKey, Name: shipments_pkey}, ABSENT], PUBLIC] -> ABSENT + [[ConstraintComment:{DescID: 107, ConstraintID: 1}, ABSENT], PUBLIC] -> ABSENT [[IndexComment:{DescID: 107, IndexID: 1}, ABSENT], PUBLIC] -> ABSENT - [[ConstraintComment:{DescID: 107, ConstraintType: FK, Name: fk_customers}, ABSENT], PUBLIC] -> ABSENT - [[ConstraintComment:{DescID: 107, ConstraintType: FK, Name: fk_orders}, ABSENT], PUBLIC] -> ABSENT + [[ConstraintComment:{DescID: 107, ConstraintID: 2}, ABSENT], PUBLIC] -> ABSENT + [[ConstraintComment:{DescID: 107, ConstraintID: 3}, ABSENT], PUBLIC] -> ABSENT [[View:{DescID: 109}, ABSENT], PUBLIC] -> TXN_DROPPED [[TableComment:{DescID: 109}, ABSENT], PUBLIC] -> ABSENT [[Sequence:{DescID: 108}, ABSENT], PUBLIC] -> TXN_DROPPED @@ -82,19 +82,16 @@ StatementPhase stage 1 of 1 with 15 MutationType ops ColumnID: 5 TableID: 107 *scop.RemoveConstraintComment - ConstraintName: shipments_pkey - ConstraintType: 4 + ConstraintID: 1 TableID: 107 *scop.RemoveIndexComment IndexID: 1 TableID: 107 *scop.RemoveConstraintComment - ConstraintName: fk_customers - ConstraintType: 3 + ConstraintID: 2 TableID: 107 *scop.RemoveConstraintComment - ConstraintName: fk_orders - ConstraintType: 3 + ConstraintID: 3 TableID: 107 *scop.MarkDescriptorAsDroppedSynthetically DescID: 109 diff --git a/pkg/sql/schemachanger/screl/attr.go b/pkg/sql/schemachanger/screl/attr.go index 8ca5bed4efa4..14a63aef8e42 100644 --- a/pkg/sql/schemachanger/screl/attr.go +++ b/pkg/sql/schemachanger/screl/attr.go @@ -70,6 +70,8 @@ const ( ConstraintType // ConstraintOrdinal is the ordinal of the constraints ConstraintOrdinal + // ConstraintID is the ID of a constraint + ConstraintID // RoleName is the name of a roleset element. RoleName ) @@ -242,8 +244,7 @@ var Schema = rel.MustSchema("screl", ), rel.EntityMapping(t((*scpb.ConstraintComment)(nil)), rel.EntityAttr(DescID, "TableID"), - rel.EntityAttr(Name, "ConstraintName"), - rel.EntityAttr(ConstraintType, "ConstraintType"), + rel.EntityAttr(ConstraintID, "ConstraintID"), ), rel.EntityMapping(t((*scpb.DatabaseRoleSetting)(nil)), rel.EntityAttr(DescID, "DatabaseID"), diff --git a/pkg/sql/schemachanger/screl/attr_string.go b/pkg/sql/schemachanger/screl/attr_string.go index 9c2d863eb10d..1fc6f2969193 100644 --- a/pkg/sql/schemachanger/screl/attr_string.go +++ b/pkg/sql/schemachanger/screl/attr_string.go @@ -20,12 +20,13 @@ func _() { _ = x[Username-10] _ = x[ConstraintType-11] _ = x[ConstraintOrdinal-12] - _ = x[RoleName-13] + _ = x[ConstraintID-13] + _ = x[RoleName-14] } -const _Attr_name = "DescIDReferencedDescIDColumnIDNameIndexIDTargetStatusCurrentStatusElementTargetUsernameConstraintTypeConstraintOrdinalRoleName" +const _Attr_name = "DescIDReferencedDescIDColumnIDNameIndexIDTargetStatusCurrentStatusElementTargetUsernameConstraintTypeConstraintOrdinalConstraintIDRoleName" -var _Attr_index = [...]uint8{0, 6, 22, 30, 34, 41, 53, 66, 73, 79, 87, 101, 118, 126} +var _Attr_index = [...]uint8{0, 6, 22, 30, 34, 41, 53, 66, 73, 79, 87, 101, 118, 130, 138} func (i Attr) String() string { i -= 1 diff --git a/pkg/sql/schemachanger/screl/compare.go b/pkg/sql/schemachanger/screl/compare.go index 1fe12f2a3bde..a66657b39f46 100644 --- a/pkg/sql/schemachanger/screl/compare.go +++ b/pkg/sql/schemachanger/screl/compare.go @@ -21,6 +21,7 @@ var equalityAttrs = []rel.Attr{ DescID, ReferencedDescID, ColumnID, + ConstraintID, ConstraintType, ConstraintOrdinal, Name, diff --git a/pkg/sql/schemachanger/testdata/alter_table_add_column b/pkg/sql/schemachanger/testdata/alter_table_add_column index 14b7bc298723..6ec1ff32ad35 100644 --- a/pkg/sql/schemachanger/testdata/alter_table_add_column +++ b/pkg/sql/schemachanger/testdata/alter_table_add_column @@ -46,6 +46,7 @@ upsert descriptor #106 + state: DELETE_ONLY + - direction: ADD + index: + + constraintId: 2 + createdExplicitly: true + encodingType: 1 + foreignKey: {} @@ -71,8 +72,10 @@ upsert descriptor #106 + state: DELETE_ONLY name: tbl - nextColumnId: 2 + - nextConstraintId: 2 + newSchemaChangeJobId: "1" + nextColumnId: 3 + + nextConstraintId: 3 nextFamilyId: 1 - nextIndexId: 2 + nextIndexId: 3 @@ -160,7 +163,9 @@ upsert descriptor #106 - - direction: ADD + - direction: DROP index: + - constraintId: 2 - createdExplicitly: true + + constraintId: 1 encodingType: 1 foreignKey: {} geoConfig: {} @@ -184,6 +189,8 @@ upsert descriptor #106 ... parentId: 104 primaryIndex: + - constraintId: 1 + + constraintId: 2 + createdExplicitly: true encodingType: 1 foreignKey: {} @@ -237,6 +244,7 @@ upsert descriptor #106 - mutations: - - direction: DROP - index: + - constraintId: 1 - encodingType: 1 - foreignKey: {} - geoConfig: {} @@ -259,7 +267,7 @@ upsert descriptor #106 name: tbl - newSchemaChangeJobId: "1" nextColumnId: 3 - nextFamilyId: 1 + nextConstraintId: 3 ... time: {} unexposedParentSchemaId: 105 diff --git a/pkg/sql/schemachanger/testdata/drop b/pkg/sql/schemachanger/testdata/drop index 82574a70d3c9..3f4b2f0d2050 100644 --- a/pkg/sql/schemachanger/testdata/drop +++ b/pkg/sql/schemachanger/testdata/drop @@ -70,7 +70,7 @@ delete comment for descriptor #108 of type TableCommentType delete comment for descriptor #108 of type ColumnCommentType delete comment for descriptor #108 of type ColumnCommentType delete comment for descriptor #108 of type IndexCommentType -delete comment for constraint on #108, name: t_pkey, type: PrimaryKey +delete comment for constraint on #108, constraint id: 1 # end StatementPhase # begin PreCommitPhase ## PreCommitPhase stage 1 of 1 with 4 MutationType ops @@ -83,7 +83,7 @@ upsert descriptor #108 name: t + newSchemaChangeJobId: "1" nextColumnId: 4 - nextFamilyId: 1 + nextConstraintId: 2 ... replacementOf: time: {} @@ -107,7 +107,7 @@ upsert descriptor #108 name: t - newSchemaChangeJobId: "1" nextColumnId: 4 - nextFamilyId: 1 + nextConstraintId: 2 ... state: DROP unexposedParentSchemaId: 107 @@ -288,8 +288,8 @@ delete comment for descriptor #121 of type TableCommentType delete comment for descriptor #124 of type TableCommentType delete comment for descriptor #113 of type SchemaCommentType delete comment for descriptor #111 of type DatabaseCommentType -delete comment for constraint on #117, name: t1_pkey, type: PrimaryKey -delete comment for constraint on #116, name: t1_pkey, type: PrimaryKey +delete comment for constraint on #117, constraint id: 1 +delete comment for constraint on #116, constraint id: 1 delete role settings for database on #111 # end StatementPhase # begin PreCommitPhase @@ -409,7 +409,7 @@ upsert descriptor #116 name: t1 + newSchemaChangeJobId: "1" nextColumnId: 4 - nextFamilyId: 1 + nextConstraintId: 2 ... replacementOf: time: {} @@ -438,7 +438,7 @@ upsert descriptor #117 name: t1 + newSchemaChangeJobId: "1" nextColumnId: 4 - nextFamilyId: 1 + nextConstraintId: 2 ... replacementOf: time: {} @@ -465,7 +465,7 @@ upsert descriptor #118 name: v1 + newSchemaChangeJobId: "1" nextColumnId: 2 - nextMutationId: 1 + nextConstraintId: 1 ... replacementOf: time: {} @@ -495,7 +495,7 @@ upsert descriptor #119 name: v2 + newSchemaChangeJobId: "1" nextColumnId: 3 - nextMutationId: 1 + nextConstraintId: 1 ... replacementOf: time: {} @@ -517,7 +517,7 @@ upsert descriptor #120 name: v3 + newSchemaChangeJobId: "1" nextColumnId: 3 - nextMutationId: 1 + nextConstraintId: 1 ... replacementOf: time: {} @@ -543,7 +543,7 @@ upsert descriptor #121 name: v4 + newSchemaChangeJobId: "1" nextColumnId: 3 - nextMutationId: 1 + nextConstraintId: 1 ... replacementOf: time: {} @@ -585,7 +585,7 @@ upsert descriptor #124 name: v5 + newSchemaChangeJobId: "1" nextColumnId: 4 - nextMutationId: 1 + nextConstraintId: 1 ... replacementOf: time: {} @@ -644,7 +644,7 @@ upsert descriptor #116 name: t1 - newSchemaChangeJobId: "1" nextColumnId: 4 - nextFamilyId: 1 + nextConstraintId: 2 ... state: DROP unexposedParentSchemaId: 113 @@ -656,7 +656,7 @@ upsert descriptor #117 name: t1 - newSchemaChangeJobId: "1" nextColumnId: 4 - nextFamilyId: 1 + nextConstraintId: 2 ... state: DROP unexposedParentSchemaId: 112 @@ -668,7 +668,7 @@ upsert descriptor #118 name: v1 - newSchemaChangeJobId: "1" nextColumnId: 2 - nextMutationId: 1 + nextConstraintId: 1 ... state: DROP unexposedParentSchemaId: 113 @@ -681,7 +681,7 @@ upsert descriptor #119 name: v2 - newSchemaChangeJobId: "1" nextColumnId: 3 - nextMutationId: 1 + nextConstraintId: 1 ... state: DROP unexposedParentSchemaId: 113 @@ -694,7 +694,7 @@ upsert descriptor #120 name: v3 - newSchemaChangeJobId: "1" nextColumnId: 3 - nextMutationId: 1 + nextConstraintId: 1 ... state: DROP unexposedParentSchemaId: 113 @@ -707,7 +707,7 @@ upsert descriptor #121 name: v4 - newSchemaChangeJobId: "1" nextColumnId: 3 - nextMutationId: 1 + nextConstraintId: 1 ... state: DROP unexposedParentSchemaId: 113 @@ -720,7 +720,7 @@ upsert descriptor #124 name: v5 - newSchemaChangeJobId: "1" nextColumnId: 4 - nextMutationId: 1 + nextConstraintId: 1 ... state: DROP unexposedParentSchemaId: 113 diff --git a/pkg/sql/schemachanger/testdata/index b/pkg/sql/schemachanger/testdata/index index 35fa41b5141e..08935b6ba8b6 100644 --- a/pkg/sql/schemachanger/testdata/index +++ b/pkg/sql/schemachanger/testdata/index @@ -22,6 +22,7 @@ upsert descriptor #104 + mutations: + - direction: ADD + index: + + constraintId: 2 + createdExplicitly: true + foreignKey: {} + geoConfig: {} @@ -46,6 +47,8 @@ upsert descriptor #104 name: t + newSchemaChangeJobId: "1" nextColumnId: 3 + - nextConstraintId: 2 + + nextConstraintId: 3 nextFamilyId: 1 - nextIndexId: 2 + nextIndexId: 3 @@ -92,7 +95,8 @@ upsert descriptor #104 formatVersion: 3 id: 104 + indexes: - + - createdExplicitly: true + + - constraintId: 2 + + createdExplicitly: true + foreignKey: {} + geoConfig: {} + id: 2 @@ -115,6 +119,7 @@ upsert descriptor #104 - mutations: - - direction: ADD - index: + - constraintId: 2 - createdExplicitly: true - foreignKey: {} - geoConfig: {} @@ -139,7 +144,7 @@ upsert descriptor #104 name: t - newSchemaChangeJobId: "1" nextColumnId: 3 - nextFamilyId: 1 + nextConstraintId: 3 ... time: {} unexposedParentSchemaId: 101 diff --git a/pkg/sql/sem/catid/ids.go b/pkg/sql/sem/catid/ids.go index ef1c3f95a19b..337d6a1ad1e0 100644 --- a/pkg/sql/sem/catid/ids.go +++ b/pkg/sql/sem/catid/ids.go @@ -34,3 +34,9 @@ type IndexID uint32 // SafeValue implements the redact.SafeValue interface. func (IndexID) SafeValue() {} + +// ConstraintID is a custom type for TableDeascriptor constraint IDs. +type ConstraintID uint32 + +// SafeValue implements the redact.SafeValue interface. +func (ConstraintID) SafeValue() {} diff --git a/pkg/sql/table_test.go b/pkg/sql/table_test.go index 89a025e93c75..ef757e84a123 100644 --- a/pkg/sql/table_test.go +++ b/pkg/sql/table_test.go @@ -223,6 +223,7 @@ func TestMakeTableDescIndexes(t *testing.T) { KeyColumnDirections: []descpb.IndexDescriptor_Direction{descpb.IndexDescriptor_ASC}, EncodingType: descpb.PrimaryIndexEncoding, Version: descpb.LatestPrimaryIndexDescriptorVersion, + ConstraintID: 1, }, []descpb.IndexDescriptor{}, }, @@ -239,6 +240,7 @@ func TestMakeTableDescIndexes(t *testing.T) { StoreColumnIDs: []descpb.ColumnID{1}, EncodingType: descpb.PrimaryIndexEncoding, Version: descpb.LatestPrimaryIndexDescriptorVersion, + ConstraintID: 2, }, []descpb.IndexDescriptor{ { @@ -250,6 +252,7 @@ func TestMakeTableDescIndexes(t *testing.T) { KeySuffixColumnIDs: []descpb.ColumnID{2}, KeyColumnDirections: []descpb.IndexDescriptor_Direction{descpb.IndexDescriptor_ASC}, Version: descpb.LatestNonPrimaryIndexDescriptorVersion, + ConstraintID: 1, }, }, }, @@ -264,6 +267,7 @@ func TestMakeTableDescIndexes(t *testing.T) { KeyColumnDirections: []descpb.IndexDescriptor_Direction{descpb.IndexDescriptor_ASC, descpb.IndexDescriptor_ASC}, EncodingType: descpb.PrimaryIndexEncoding, Version: descpb.LatestPrimaryIndexDescriptorVersion, + ConstraintID: 1, }, []descpb.IndexDescriptor{}, }, @@ -278,6 +282,7 @@ func TestMakeTableDescIndexes(t *testing.T) { KeyColumnDirections: []descpb.IndexDescriptor_Direction{descpb.IndexDescriptor_ASC, descpb.IndexDescriptor_ASC}, EncodingType: descpb.PrimaryIndexEncoding, Version: descpb.LatestPrimaryIndexDescriptorVersion, + ConstraintID: 2, }, []descpb.IndexDescriptor{ { @@ -289,6 +294,7 @@ func TestMakeTableDescIndexes(t *testing.T) { KeySuffixColumnIDs: []descpb.ColumnID{1}, KeyColumnDirections: []descpb.IndexDescriptor_Direction{descpb.IndexDescriptor_ASC}, Version: descpb.LatestNonPrimaryIndexDescriptorVersion, + ConstraintID: 1, }, }, }, @@ -303,6 +309,7 @@ func TestMakeTableDescIndexes(t *testing.T) { KeyColumnDirections: []descpb.IndexDescriptor_Direction{descpb.IndexDescriptor_ASC, descpb.IndexDescriptor_ASC}, EncodingType: descpb.PrimaryIndexEncoding, Version: descpb.LatestPrimaryIndexDescriptorVersion, + ConstraintID: 1, }, []descpb.IndexDescriptor{}, }, @@ -345,9 +352,10 @@ func TestMakeTableDescUniqueConstraints(t *testing.T) { "a INT UNIQUE WITHOUT INDEX, b INT PRIMARY KEY", []descpb.UniqueWithoutIndexConstraint{ { - TableID: 100, - ColumnIDs: []descpb.ColumnID{1}, - Name: "unique_a", + TableID: 100, + ColumnIDs: []descpb.ColumnID{1}, + Name: "unique_a", + ConstraintID: 2, }, }, }, @@ -355,9 +363,10 @@ func TestMakeTableDescUniqueConstraints(t *testing.T) { "a INT, b INT, CONSTRAINT c UNIQUE WITHOUT INDEX (b), UNIQUE (a, b)", []descpb.UniqueWithoutIndexConstraint{ { - TableID: 100, - ColumnIDs: []descpb.ColumnID{2}, - Name: "c", + TableID: 100, + ColumnIDs: []descpb.ColumnID{2}, + Name: "c", + ConstraintID: 3, }, }, }, @@ -365,14 +374,16 @@ func TestMakeTableDescUniqueConstraints(t *testing.T) { "a INT, b INT, c INT, UNIQUE WITHOUT INDEX (a, b), UNIQUE WITHOUT INDEX (c)", []descpb.UniqueWithoutIndexConstraint{ { - TableID: 100, - ColumnIDs: []descpb.ColumnID{1, 2}, - Name: "unique_a_b", + TableID: 100, + ColumnIDs: []descpb.ColumnID{1, 2}, + Name: "unique_a_b", + ConstraintID: 2, }, { - TableID: 100, - ColumnIDs: []descpb.ColumnID{3}, - Name: "unique_c", + TableID: 100, + ColumnIDs: []descpb.ColumnID{3}, + Name: "unique_c", + ConstraintID: 3, }, }, }, diff --git a/pkg/sql/tests/repair_test.go b/pkg/sql/tests/repair_test.go index a6a37da2294e..07a6f89b173b 100644 --- a/pkg/sql/tests/repair_test.go +++ b/pkg/sql/tests/repair_test.go @@ -685,6 +685,7 @@ var repairedDescriptor = `'{ "nextFamilyId": 1, "nextIndexId": 2, "nextMutationId": 1, + "nextConstraintId": 2, "parentId": $firstDatabaseID, "primaryIndex": { "encodingType": 1, @@ -695,7 +696,8 @@ var repairedDescriptor = `'{ "name": "primary", "type": "FORWARD", "unique": true, - "version": 4 + "version": 4, + "constraintId": 1 }, "privileges": { "owner_proto": "root", @@ -804,6 +806,7 @@ SELECT crdb_internal.unsafe_upsert_descriptor($firstTableID, crdb_internal.json_ "nextFamilyId": 1, "nextIndexId": 2, "nextMutationId": 1, + "nextConstraintId": 2, "parentId": $firstDatabaseID, "primaryIndex": { "encodingType": 1, @@ -814,7 +817,8 @@ SELECT crdb_internal.unsafe_upsert_descriptor($firstTableID, crdb_internal.json_ "name": "primary", "type": "FORWARD", "unique": true, - "version": 4 + "version": 4, + "constraintId": 1 }, "privileges": { "owner_proto": "admin", diff --git a/pkg/testutils/lint/lint_test.go b/pkg/testutils/lint/lint_test.go index a204987ea9f4..987000d9c0ce 100644 --- a/pkg/testutils/lint/lint_test.go +++ b/pkg/testutils/lint/lint_test.go @@ -2192,6 +2192,7 @@ func TestLint(t *testing.T) { }) t.Run("CODEOWNERS", func(t *testing.T) { + skip.UnderBazel(t, "doesn't work under bazel") co, err := codeowners.DefaultLoadCodeOwners() require.NoError(t, err) const verbose = false diff --git a/pkg/testutils/skip/skip.go b/pkg/testutils/skip/skip.go index 6be83ff2bb5f..932fc7e308f8 100644 --- a/pkg/testutils/skip/skip.go +++ b/pkg/testutils/skip/skip.go @@ -89,6 +89,14 @@ func UnderRaceWithIssue(t SkippableTest, githubIssueID int, args ...interface{}) } } +// UnderBazel skips this test if run under bazel. +func UnderBazel(t SkippableTest, args ...interface{}) { + t.Helper() + if bazel.BuiltWithBazel() { + t.Skip(append([]interface{}{"disabled under bazel"}, args...)) + } +} + // UnderBazelWithIssue skips this test if we are building inside bazel, // logging the given issue ID as the reason. func UnderBazelWithIssue(t SkippableTest, githubIssueID int, args ...interface{}) { diff --git a/vendor b/vendor index b983eaeec06c..1474933a93ac 160000 --- a/vendor +++ b/vendor @@ -1 +1 @@ -Subproject commit b983eaeec06c55f94be2047b53c8528dc617e6d7 +Subproject commit 1474933a93ac2c005a5c8455b5a0bac6a584933b