diff --git a/pkg/ccl/importccl/import_processor_test.go b/pkg/ccl/importccl/import_processor_test.go index 1d58012becd3..7281deec0cfb 100644 --- a/pkg/ccl/importccl/import_processor_test.go +++ b/pkg/ccl/importccl/import_processor_test.go @@ -116,8 +116,7 @@ func TestConverterFlushesBatches(t *testing.T) { } kvCh := make(chan row.KVBatch, batchSize) - conv, err := makeInputConverter(ctx, converterSpec, &evalCtx, kvCh, - nil /* seqChunkProvider */) + conv, err := makeInputConverter(ctx, converterSpec, &evalCtx, kvCh, nil /* seqChunkProvider */) if err != nil { t.Fatalf("makeInputConverter() error = %v", err) } @@ -953,7 +952,8 @@ func pgDumpFormat() roachpb.IOFileFormat { return roachpb.IOFileFormat{ Format: roachpb.IOFileFormat_PgDump, PgDump: roachpb.PgDumpOptions{ - MaxRowSize: 64 * 1024, + MaxRowSize: 64 * 1024, + IgnoreUnsupported: true, }, } } diff --git a/pkg/ccl/importccl/import_stmt.go b/pkg/ccl/importccl/import_stmt.go index a36faf60ffeb..90e921578262 100644 --- a/pkg/ccl/importccl/import_stmt.go +++ b/pkg/ccl/importccl/import_stmt.go @@ -9,6 +9,7 @@ package importccl import ( + "bytes" "context" "fmt" "io/ioutil" @@ -99,6 +100,12 @@ const ( avroSchema = "schema" avroSchemaURI = "schema_uri" + pgDumpIgnoreAllUnsupported = "ignore_unsupported" + pgDumpIgnoreShuntFileDest = "ignored_stmt_log" + pgDumpUnsupportedSchemaStmtLog = "unsupported_schema_stmts" + pgDumpUnsupportedDataStmtLog = "unsupported_data-_stmts" + pgDumpMaxLoggedStmts = 10 + // RunningStatusImportBundleParseSchema indicates to the user that a bundle format // schema is being parsed runningStatusImportBundleParseSchema jobs.RunningStatus = "parsing schema on Import Bundle" @@ -133,6 +140,9 @@ var importOptionExpectValues = map[string]sql.KVStringOptValidate{ avroRecordsSeparatedBy: sql.KVStringOptRequireValue, avroBinRecords: sql.KVStringOptRequireNoValue, avroJSONRecords: sql.KVStringOptRequireNoValue, + + pgDumpIgnoreAllUnsupported: sql.KVStringOptRequireNoValue, + pgDumpIgnoreShuntFileDest: sql.KVStringOptRequireValue, } func makeStringSet(opts ...string) map[string]struct{} { @@ -162,7 +172,8 @@ var mysqlOutAllowedOptions = makeStringSet( ) var mysqlDumpAllowedOptions = makeStringSet(importOptionSkipFKs, csvRowLimit) var pgCopyAllowedOptions = makeStringSet(pgCopyDelimiter, pgCopyNull, optMaxRowSize) -var pgDumpAllowedOptions = makeStringSet(optMaxRowSize, importOptionSkipFKs, csvRowLimit) +var pgDumpAllowedOptions = makeStringSet(optMaxRowSize, importOptionSkipFKs, csvRowLimit, + pgDumpIgnoreAllUnsupported, pgDumpIgnoreShuntFileDest) // DROP is required because the target table needs to be take offline during // IMPORT INTO. @@ -607,6 +618,16 @@ func importPlanHook( maxRowSize = int32(sz) } format.PgDump.MaxRowSize = maxRowSize + if _, ok := opts[pgDumpIgnoreAllUnsupported]; ok { + format.PgDump.IgnoreUnsupported = true + } + + if dest, ok := opts[pgDumpIgnoreShuntFileDest]; ok { + if !format.PgDump.IgnoreUnsupported { + return errors.New("cannot log unsupported PGDUMP stmts without `ignore_unsupported` option") + } + format.PgDump.IgnoreUnsupportedLog = dest + } if override, ok := opts[csvRowLimit]; ok { rowLimit, err := strconv.Atoi(override) @@ -1249,6 +1270,102 @@ func (r *importResumer) ReportResults(ctx context.Context, resultsCh chan<- tree } } +type loggerKind int + +const ( + schemaParsing loggerKind = iota + dataIngestion +) + +// unsupportedStmtLogger is responsible for handling unsupported PGDUMP SQL +// statements seen during the import. +type unsupportedStmtLogger struct { + // Values are initialized based on the options specified in the IMPORT PGDUMP + // stmt. + ignoreUnsupported bool + ignoreUnsupportedLogDest string + externalStorage cloud.ExternalStorageFactory + + // logBuffer holds the string to be flushed to the ignoreUnsupportedLogDest. + logBuffer *bytes.Buffer + numIgnoredStmts int + + loggerType loggerKind +} + +func makeUnsupportedStmtLogger( + ignoreUnsupported bool, + unsupportedLogDest string, + loggerType loggerKind, + externalStorage cloud.ExternalStorageFactory, +) *unsupportedStmtLogger { + l := &unsupportedStmtLogger{ + ignoreUnsupported: ignoreUnsupported, + ignoreUnsupportedLogDest: unsupportedLogDest, + loggerType: loggerType, + logBuffer: new(bytes.Buffer), + externalStorage: externalStorage, + } + header := "Unsupported statements during schema parse phase:\n\n" + if loggerType == dataIngestion { + header = "Unsupported statements during data ingestion phase:\n\n" + } + l.logBuffer.WriteString(header) + return l +} + +func (u *unsupportedStmtLogger) log(logLine string, isParseError bool) { + // We have already logged parse errors during the schema ingestion phase, so + // skip them to avoid duplicate entries. + skipLoggingParseErr := isParseError && u.loggerType == dataIngestion + if u.ignoreUnsupportedLogDest == "" || skipLoggingParseErr { + return + } + + if u.numIgnoredStmts < pgDumpMaxLoggedStmts { + if isParseError { + logLine = fmt.Sprintf("%s: could not be parsed\n", logLine) + } else { + logLine = fmt.Sprintf("%s: unsupported by IMPORT\n", logLine) + } + u.logBuffer.Write([]byte(logLine)) + } + u.numIgnoredStmts++ +} + +func (u *unsupportedStmtLogger) flush(ctx context.Context, user security.SQLUsername) error { + if u.ignoreUnsupportedLogDest == "" { + return nil + } + + numLoggedStmts := pgDumpMaxLoggedStmts + if u.numIgnoredStmts < pgDumpMaxLoggedStmts { + numLoggedStmts = u.numIgnoredStmts + } + u.logBuffer.WriteString(fmt.Sprintf("\nLogging %d out of %d ignored statements.\n", + numLoggedStmts, u.numIgnoredStmts)) + + conf, err := cloudimpl.ExternalStorageConfFromURI(u.ignoreUnsupportedLogDest, user) + if err != nil { + return errors.Wrap(err, "failed to log unsupported stmts during IMPORT PGDUMP") + } + var s cloud.ExternalStorage + if s, err = u.externalStorage(ctx, conf); err != nil { + return errors.New("failed to log unsupported stmts during IMPORT PGDUMP") + } + defer s.Close() + + logFileName := pgDumpUnsupportedSchemaStmtLog + if u.loggerType == dataIngestion { + logFileName = pgDumpUnsupportedDataStmtLog + } + err = s.WriteFile(ctx, logFileName, bytes.NewReader(u.logBuffer.Bytes())) + if err != nil { + return errors.Wrap(err, "failed to log unsupported stmts to log during IMPORT PGDUMP") + } + return nil +} + // parseAndCreateBundleTableDescs parses and creates the table // descriptors for bundle formats. func parseAndCreateBundleTableDescs( @@ -1298,7 +1415,19 @@ func parseAndCreateBundleTableDescs( tableDescs, err = readMysqlCreateTable(ctx, reader, evalCtx, p, defaultCSVTableID, parentID, tableName, fks, seqVals, owner, walltime) case roachpb.IOFileFormat_PgDump: evalCtx := &p.ExtendedEvalContext().EvalContext - tableDescs, err = readPostgresCreateTable(ctx, reader, evalCtx, p, tableName, parentID, walltime, fks, int(format.PgDump.MaxRowSize), owner) + + // Setup a logger to handle unsupported DDL statements in the PGDUMP file. + unsupportedStmtLogger := makeUnsupportedStmtLogger(format.PgDump.IgnoreUnsupported, + format.PgDump.IgnoreUnsupportedLog, schemaParsing, p.ExecCfg().DistSQLSrv.ExternalStorage) + + tableDescs, err = readPostgresCreateTable(ctx, reader, evalCtx, p, tableName, parentID, + walltime, fks, int(format.PgDump.MaxRowSize), owner, unsupportedStmtLogger) + + logErr := unsupportedStmtLogger.flush(ctx, p.User()) + if logErr != nil { + return nil, logErr + } + default: return tableDescs, errors.Errorf("non-bundle format %q does not support reading schemas", format.Format.String()) } diff --git a/pkg/ccl/importccl/import_stmt_test.go b/pkg/ccl/importccl/import_stmt_test.go index 097aad652c98..b6c43de4eaed 100644 --- a/pkg/ccl/importccl/import_stmt_test.go +++ b/pkg/ccl/importccl/import_stmt_test.go @@ -818,6 +818,7 @@ END; name: "fk", typ: "PGDUMP", data: testPgdumpFk, + with: "WITH ignore_unsupported", query: map[string][][]string{ getTablesQuery: { {"public", "cities", "table"}, @@ -896,7 +897,7 @@ END; name: "fk-skip", typ: "PGDUMP", data: testPgdumpFk, - with: `WITH skip_foreign_keys`, + with: `WITH skip_foreign_keys, ignore_unsupported`, query: map[string][][]string{ getTablesQuery: { {"public", "cities", "table"}, @@ -911,13 +912,14 @@ END; name: "fk unreferenced", typ: "TABLE weather FROM PGDUMP", data: testPgdumpFk, + with: "WITH ignore_unsupported", err: `table "cities" not found`, }, { name: "fk unreferenced skipped", typ: "TABLE weather FROM PGDUMP", data: testPgdumpFk, - with: `WITH skip_foreign_keys`, + with: `WITH skip_foreign_keys, ignore_unsupported`, query: map[string][][]string{ getTablesQuery: {{"public", "weather", "table"}}, }, @@ -936,6 +938,7 @@ END; { name: "sequence", typ: "PGDUMP", + with: "WITH ignore_unsupported", data: ` CREATE TABLE t (a INT8); CREATE SEQUENCE public.i_seq @@ -964,6 +967,7 @@ END; INSERT INTO "bob" ("c", "b") VALUES (3, 2); COMMIT `, + with: `WITH ignore_unsupported`, query: map[string][][]string{ `SELECT * FROM bob`: { {"1", "NULL", "2"}, @@ -1020,29 +1024,6 @@ END; data: "create table s.t (i INT8)", err: `non-public schemas unsupported: s`, }, - { - name: "various create ignores", - typ: "PGDUMP", - data: ` - CREATE TRIGGER conditions_set_updated_at BEFORE UPDATE ON conditions FOR EACH ROW EXECUTE PROCEDURE set_updated_at(); - REVOKE ALL ON SEQUENCE knex_migrations_id_seq FROM PUBLIC; - REVOKE ALL ON SEQUENCE knex_migrations_id_seq FROM database; - GRANT ALL ON SEQUENCE knex_migrations_id_seq TO database; - GRANT SELECT ON SEQUENCE knex_migrations_id_seq TO opentrials_readonly; - - CREATE FUNCTION public.isnumeric(text) RETURNS boolean - LANGUAGE sql - AS $_$ - SELECT $1 ~ '^[0-9]+$' - $_$; - ALTER FUNCTION public.isnumeric(text) OWNER TO roland; - - CREATE TABLE t (i INT8); - `, - query: map[string][][]string{ - getTablesQuery: {{"public", "t", "table"}}, - }, - }, { name: "many tables", typ: "PGDUMP", @@ -1599,7 +1580,8 @@ func TestImportRowLimit(t *testing.T) { expectedRowLimit := 4 // Import a single table `second` and verify number of rows imported. - importQuery := fmt.Sprintf(`IMPORT TABLE second FROM PGDUMP ($1) WITH row_limit="%d"`, expectedRowLimit) + importQuery := fmt.Sprintf(`IMPORT TABLE second FROM PGDUMP ($1) WITH row_limit="%d",ignore_unsupported`, + expectedRowLimit) sqlDB.Exec(t, importQuery, second...) var numRows int @@ -1610,7 +1592,7 @@ func TestImportRowLimit(t *testing.T) { // Import multiple tables including `simple` and `second`. expectedRowLimit = 3 - importQuery = fmt.Sprintf(`IMPORT PGDUMP ($1) WITH row_limit="%d"`, expectedRowLimit) + importQuery = fmt.Sprintf(`IMPORT PGDUMP ($1) WITH row_limit="%d",ignore_unsupported`, expectedRowLimit) sqlDB.Exec(t, importQuery, multitable...) sqlDB.QueryRow(t, "SELECT count(*) FROM second").Scan(&numRows) require.Equal(t, expectedRowLimit, numRows) @@ -5533,14 +5515,14 @@ func TestImportPgDump(t *testing.T) { CONSTRAINT simple_pkey PRIMARY KEY (i), UNIQUE INDEX simple_b_s_idx (b, s), INDEX simple_s_idx (s) - ) PGDUMP DATA ($1)`, + ) PGDUMP DATA ($1) WITH ignore_unsupported`, simple, }, - {`single table dump`, expectSimple, `IMPORT TABLE simple FROM PGDUMP ($1)`, simple}, - {`second table dump`, expectSecond, `IMPORT TABLE second FROM PGDUMP ($1)`, second}, - {`simple from multi`, expectSimple, `IMPORT TABLE simple FROM PGDUMP ($1)`, multitable}, - {`second from multi`, expectSecond, `IMPORT TABLE second FROM PGDUMP ($1)`, multitable}, - {`all from multi`, expectAll, `IMPORT PGDUMP ($1)`, multitable}, + {`single table dump`, expectSimple, `IMPORT TABLE simple FROM PGDUMP ($1) WITH ignore_unsupported`, simple}, + {`second table dump`, expectSecond, `IMPORT TABLE second FROM PGDUMP ($1) WITH ignore_unsupported`, second}, + {`simple from multi`, expectSimple, `IMPORT TABLE simple FROM PGDUMP ($1) WITH ignore_unsupported`, multitable}, + {`second from multi`, expectSecond, `IMPORT TABLE second FROM PGDUMP ($1) WITH ignore_unsupported`, multitable}, + {`all from multi`, expectAll, `IMPORT PGDUMP ($1) WITH ignore_unsupported`, multitable}, } { t.Run(c.name, func(t *testing.T) { sqlDB.Exec(t, `DROP TABLE IF EXISTS simple, second`) @@ -5680,6 +5662,137 @@ func TestImportPgDump(t *testing.T) { }) } +func TestImportPgDumpIgnoredStmts(t *testing.T) { + defer leaktest.AfterTest(t)() + defer log.Scope(t).Close(t) + + ctx := context.Background() + tc := testcluster.StartTestCluster(t, 1 /* nodes */, base.TestClusterArgs{}) + defer tc.Stopper().Stop(ctx) + conn := tc.Conns[0] + sqlDB := sqlutils.MakeSQLRunner(conn) + + data := ` + -- Statements that CRDB cannot parse. + CREATE TRIGGER conditions_set_updated_at BEFORE UPDATE ON conditions FOR EACH ROW EXECUTE PROCEDURE set_updated_at(); + + REVOKE ALL ON SEQUENCE knex_migrations_id_seq FROM PUBLIC; + REVOKE ALL ON SEQUENCE knex_migrations_id_seq FROM database; + + GRANT ALL ON SEQUENCE knex_migrations_id_seq TO database; + GRANT SELECT ON SEQUENCE knex_migrations_id_seq TO opentrials_readonly; + + COMMENT ON EXTENSION plpgsql IS 'PL/pgSQL procedural language'; + CREATE EXTENSION IF NOT EXISTS plpgsql WITH SCHEMA pg_catalog; + + -- Valid statement. + CREATE TABLE foo (id INT); + + CREATE FUNCTION public.isnumeric(text) RETURNS boolean + LANGUAGE sql + AS $_$ + SELECT $1 ~ '^[0-9]+$' + $_$; + ALTER FUNCTION public.isnumeric(text) OWNER TO roland; + + -- Valid statements. + INSERT INTO foo VALUES (1), (2), (3); + CREATE TABLE t (i INT8); + + -- Statements that CRDB can parse, but IMPORT does not support. + -- These are processed during the schema pass of IMPORT. + COMMENT ON TABLE t IS 'This should be skipped'; + COMMENT ON DATABASE t IS 'This should be skipped'; + COMMENT ON COLUMN t IS 'This should be skipped'; + + + -- Statements that CRDB can parse, but IMPORT does not support. + -- These are processed during the data ingestion pass of IMPORT. + SELECT pg_catalog.set_config('search_path', '', false); + DELETE FROM geometry_columns WHERE f_table_name = 'nyc_census_blocks' AND f_table_schema = 'public'; + ` + + srv := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + if r.Method == "GET" { + _, _ = w.Write([]byte(data)) + } + })) + defer srv.Close() + t.Run("ignore-unsupported", func(t *testing.T) { + sqlDB.Exec(t, "CREATE DATABASE foo; USE foo;") + sqlDB.Exec(t, "IMPORT PGDUMP ($1) WITH ignore_unsupported", srv.URL) + // Check that statements which are not expected to be ignored, are still + // processed. + sqlDB.CheckQueryResults(t, "SELECT * FROM foo", [][]string{{"1"}, {"2"}, {"3"}}) + }) + + t.Run("dont-ignore-unsupported", func(t *testing.T) { + sqlDB.Exec(t, "CREATE DATABASE foo1; USE foo1;") + sqlDB.ExpectErr(t, "syntax error", "IMPORT PGDUMP ($1)", srv.URL) + }) + + t.Run("require-both-unsupported-options", func(t *testing.T) { + sqlDB.Exec(t, "CREATE DATABASE foo2; USE foo2;") + ignoredLog := `userfile:///ignore.log` + sqlDB.ExpectErr(t, "cannot log unsupported PGDUMP stmts without `ignore_unsupported` option", + "IMPORT PGDUMP ($1) WITH ignored_stmt_log=$2", srv.URL, ignoredLog) + }) + + t.Run("log-unsupported-stmts", func(t *testing.T) { + sqlDB.Exec(t, "CREATE DATABASE foo3; USE foo3;") + ignoredLog := `userfile:///ignore.log` + sqlDB.Exec(t, "IMPORT PGDUMP ($1) WITH ignore_unsupported, ignored_stmt_log=$2", + srv.URL, ignoredLog) + // Check that statements which are not expected to be ignored, are still + // processed. + sqlDB.CheckQueryResults(t, "SELECT * FROM foo", [][]string{{"1"}, {"2"}, {"3"}}) + + // Read the unsupported log and verify its contents. + store, err := cloudimpl.ExternalStorageFromURI(ctx, ignoredLog, + base.ExternalIODirConfig{}, + tc.Servers[0].ClusterSettings(), + blobs.TestEmptyBlobClientFactory, + security.RootUserName(), + tc.Servers[0].InternalExecutor().(*sql.InternalExecutor), tc.Servers[0].DB()) + require.NoError(t, err) + defer store.Close() + content, err := store.ReadFile(ctx, pgDumpUnsupportedSchemaStmtLog) + require.NoError(t, err) + descBytes, err := ioutil.ReadAll(content) + require.NoError(t, err) + expectedSchemaLog := `Unsupported statements during schema parse phase: + +create trigger: could not be parsed +revoke privileges on sequence: could not be parsed +revoke privileges on sequence: could not be parsed +grant privileges on sequence: could not be parsed +grant privileges on sequence: could not be parsed +comment on extension: could not be parsed +create extension if not exists with: could not be parsed +create function: could not be parsed +alter function: could not be parsed +COMMENT ON TABLE t IS 'This should be skipped': unsupported by IMPORT + +Logging 10 out of 13 ignored statements. +` + require.Equal(t, []byte(expectedSchemaLog), descBytes) + + expectedDataLog := `Unsupported statements during data ingestion phase: + +unsupported 3 fn args in select: ['search_path' '' false]: unsupported by IMPORT +unsupported *tree.Delete statement: DELETE FROM geometry_columns WHERE (f_table_name = 'nyc_census_blocks') AND (f_table_schema = 'public'): unsupported by IMPORT + +Logging 2 out of 2 ignored statements. +` + + content, err = store.ReadFile(ctx, pgDumpUnsupportedDataStmtLog) + require.NoError(t, err) + descBytes, err = ioutil.ReadAll(content) + require.NoError(t, err) + require.Equal(t, []byte(expectedDataLog), descBytes) + }) +} + // TestImportPgDumpGeo tests that a file with SQLFn classes can be // imported. These are functions like AddGeometryColumn which create and // execute SQL when called (!). They are, for example, used by shp2pgsql @@ -5700,7 +5813,7 @@ func TestImportPgDumpGeo(t *testing.T) { sqlDB := sqlutils.MakeSQLRunner(conn) sqlDB.Exec(t, `CREATE DATABASE importdb; SET DATABASE = importdb`) - sqlDB.Exec(t, "IMPORT PGDUMP 'nodelocal://0/geo_shp2pgsql.sql'") + sqlDB.Exec(t, "IMPORT PGDUMP 'nodelocal://0/geo_shp2pgsql.sql' WITH ignore_unsupported") sqlDB.Exec(t, `CREATE DATABASE execdb; SET DATABASE = execdb`) geoSQL, err := ioutil.ReadFile(filepath.Join(baseDir, "geo_shp2pgsql.sql")) @@ -5743,7 +5856,7 @@ func TestImportPgDumpGeo(t *testing.T) { sqlDB := sqlutils.MakeSQLRunner(conn) sqlDB.Exec(t, `CREATE DATABASE importdb; SET DATABASE = importdb`) - sqlDB.Exec(t, "IMPORT PGDUMP 'nodelocal://0/geo_ogr2ogr.sql'") + sqlDB.Exec(t, "IMPORT PGDUMP 'nodelocal://0/geo_ogr2ogr.sql' WITH ignore_unsupported") sqlDB.Exec(t, `CREATE DATABASE execdb; SET DATABASE = execdb`) geoSQL, err := ioutil.ReadFile(filepath.Join(baseDir, "geo_ogr2ogr.sql")) @@ -5860,7 +5973,7 @@ func TestImportCockroachDump(t *testing.T) { conn := tc.Conns[0] sqlDB := sqlutils.MakeSQLRunner(conn) - sqlDB.Exec(t, "IMPORT PGDUMP ($1)", "nodelocal://0/cockroachdump/dump.sql") + sqlDB.Exec(t, "IMPORT PGDUMP ($1) WITH ignore_unsupported", "nodelocal://0/cockroachdump/dump.sql") sqlDB.CheckQueryResults(t, "SELECT * FROM t ORDER BY i", [][]string{ {"1", "test"}, {"2", "other"}, @@ -5916,7 +6029,7 @@ func TestCreateStatsAfterImport(t *testing.T) { sqlDB.Exec(t, `SET CLUSTER SETTING sql.stats.automatic_collection.enabled=true`) - sqlDB.Exec(t, "IMPORT PGDUMP ($1)", "nodelocal://0/cockroachdump/dump.sql") + sqlDB.Exec(t, "IMPORT PGDUMP ($1) WITH ignore_unsupported", "nodelocal://0/cockroachdump/dump.sql") // Verify that statistics have been created. sqlDB.CheckQueryResultsRetry(t, diff --git a/pkg/ccl/importccl/read_import_pgdump.go b/pkg/ccl/importccl/read_import_pgdump.go index 96cbc210d1b4..07af37520c87 100644 --- a/pkg/ccl/importccl/read_import_pgdump.go +++ b/pkg/ccl/importccl/read_import_pgdump.go @@ -11,6 +11,7 @@ package importccl import ( "bufio" "context" + "fmt" "io" "regexp" "strings" @@ -42,16 +43,20 @@ import ( ) type postgreStream struct { - s *bufio.Scanner - copy *postgreStreamCopy + ctx context.Context + s *bufio.Scanner + copy *postgreStreamCopy + unsupportedStmtLogger *unsupportedStmtLogger } // newPostgreStream returns a struct that can stream statements from an // io.Reader. -func newPostgreStream(r io.Reader, max int) *postgreStream { +func newPostgreStream( + ctx context.Context, r io.Reader, max int, unsupportedStmtLogger *unsupportedStmtLogger, +) *postgreStream { s := bufio.NewScanner(r) s.Buffer(nil, max) - p := &postgreStream{s: s} + p := &postgreStream{ctx: ctx, s: s, unsupportedStmtLogger: unsupportedStmtLogger} s.Split(p.split) return p } @@ -95,14 +100,19 @@ func (p *postgreStream) Next() (interface{}, error) { for p.s.Scan() { t := p.s.Text() - // Regardless if we can parse the statement, check that it's not something - // we want to ignore. - if isIgnoredStatement(t) { - continue - } + skipOverComments(t) stmts, err := parser.Parse(t) if err != nil { + // There are some statements that CRDB is unable to parse. If the user has + // indicated that they want to skip these stmts during the IMPORT, then do + // so here. + if p.unsupportedStmtLogger.ignoreUnsupported && errors.HasType(err, (*tree.UnsupportedError)(nil)) { + if unsupportedErr := (*tree.UnsupportedError)(nil); errors.As(err, &unsupportedErr) { + p.unsupportedStmtLogger.log(unsupportedErr.FeatureName, true /* isParseError */) + } + continue + } return nil, err } switch len(stmts) { @@ -145,20 +155,10 @@ func (p *postgreStream) Next() (interface{}, error) { } var ( - ignoreComments = regexp.MustCompile(`^\s*(--.*)`) - ignoreStatements = []*regexp.Regexp{ - regexp.MustCompile("(?i)^alter function"), - regexp.MustCompile("(?i)^alter sequence .* owned by"), - regexp.MustCompile("(?i)^comment on"), - regexp.MustCompile("(?i)^create extension"), - regexp.MustCompile("(?i)^create function"), - regexp.MustCompile("(?i)^create trigger"), - regexp.MustCompile("(?i)^grant .* on sequence"), - regexp.MustCompile("(?i)^revoke .* on sequence"), - } + ignoreComments = regexp.MustCompile(`^\s*(--.*)`) ) -func isIgnoredStatement(s string) bool { +func skipOverComments(s string) { // Look for the first line with no whitespace or comments. for { m := ignoreComments.FindStringIndex(s) @@ -167,13 +167,6 @@ func isIgnoredStatement(s string) bool { } s = s[m[1]:] } - s = strings.TrimSpace(s) - for _, re := range ignoreStatements { - if re.MatchString(s) { - return true - } - } - return false } type regclassRewriter struct{} @@ -229,6 +222,7 @@ func readPostgresCreateTable( fks fkHandler, max int, owner security.SQLUsername, + unsupportedStmtLogger *unsupportedStmtLogger, ) ([]*tabledesc.Mutable, error) { // Modify the CreateTable stmt with the various index additions. We do this // instead of creating a full table descriptor first and adding indexes @@ -239,7 +233,7 @@ func readPostgresCreateTable( createTbl := make(map[string]*tree.CreateTable) createSeq := make(map[string]*tree.CreateSequence) tableFKs := make(map[string][]*tree.ForeignKeyConstraintTableDef) - ps := newPostgreStream(input, max) + ps := newPostgreStream(ctx, input, max, unsupportedStmtLogger) for { stmt, err := ps.Next() if err == io.EOF { @@ -310,7 +304,8 @@ func readPostgresCreateTable( if err != nil { return nil, errors.Wrap(err, "postgres parse error") } - if err := readPostgresStmt(ctx, evalCtx, match, fks, createTbl, createSeq, tableFKs, stmt, p, parentID); err != nil { + if err := readPostgresStmt(ctx, evalCtx, match, fks, createTbl, createSeq, tableFKs, stmt, p, + parentID, unsupportedStmtLogger); err != nil { return nil, err } } @@ -327,7 +322,9 @@ func readPostgresStmt( stmt interface{}, p sql.JobExecContext, parentID descpb.ID, + unsupportedStmtLogger *unsupportedStmtLogger, ) error { + ignoreUnsupportedStmts := unsupportedStmtLogger.ignoreUnsupported switch stmt := stmt.(type) { case *tree.CreateTable: name, err := getTableName(&stmt.Table) @@ -402,6 +399,10 @@ func readPostgresStmt( } case *tree.AlterTableAddColumn: if cmd.IfNotExists { + if ignoreUnsupportedStmts { + unsupportedStmtLogger.log(stmt.String(), false /* isParseError */) + continue + } return errors.Errorf("unsupported statement: %s", stmt) } create.Defs = append(create.Defs, cmd.ColumnDef) @@ -422,14 +423,20 @@ func readPostgresStmt( if !found { return colinfo.NewUndefinedColumnError(cmd.Column.String()) } - case *tree.AlterTableValidateConstraint: - // ignore default: + if ignoreUnsupportedStmts { + unsupportedStmtLogger.log(stmt.String(), false /* isParseError */) + continue + } return errors.Errorf("unsupported statement: %s", stmt) } } case *tree.AlterTableOwner: - // ignore + if ignoreUnsupportedStmts { + unsupportedStmtLogger.log(stmt.String(), false /* isParseError */) + return nil + } + return errors.Errorf("unsupported statement: %s", stmt) case *tree.CreateSequence: name, err := getTableName(&stmt.Name) if err != nil { @@ -438,6 +445,12 @@ func readPostgresStmt( if match == "" || match == name { createSeq[name] = stmt } + case *tree.AlterSequence: + if ignoreUnsupportedStmts { + unsupportedStmtLogger.log(stmt.String(), false /* isParseError */) + return nil + } + return errors.Errorf("unsupported %T statement: %s", stmt, stmt) // Some SELECT statements mutate schema. Search for those here. case *tree.Select: switch sel := stmt.Select.(type) { @@ -461,12 +474,13 @@ func readPostgresStmt( // Search for a SQLFn, which returns a SQL string to execute. fn := ov.SQLFn if fn == nil { - switch f := expr.Func.String(); f { - case "set_config", "setval": + err := errors.Errorf("unsupported function call: %s in stmt: %s", + expr.Func.String(), stmt.String()) + if ignoreUnsupportedStmts { + unsupportedStmtLogger.log(err.Error(), false /* isParseError */) continue - default: - return errors.Errorf("unsupported function call: %s", expr.Func.String()) } + return err } // Attempt to convert all func exprs to datums. datums := make(tree.Datums, len(expr.Exprs)) @@ -493,7 +507,8 @@ func readPostgresStmt( for _, fnStmt := range fnStmts { switch ast := fnStmt.AST.(type) { case *tree.AlterTable: - if err := readPostgresStmt(ctx, evalCtx, match, fks, createTbl, createSeq, tableFKs, ast, p, parentID); err != nil { + if err := readPostgresStmt(ctx, evalCtx, match, fks, createTbl, createSeq, + tableFKs, ast, p, parentID, unsupportedStmtLogger); err != nil { return err } default: @@ -502,11 +517,21 @@ func readPostgresStmt( } } default: - return errors.Errorf("unsupported %T SELECT expr: %s", expr, expr) + err := errors.Errorf("unsupported %T SELECT expr: %s", expr, expr) + if ignoreUnsupportedStmts { + unsupportedStmtLogger.log(err.Error(), false /* isParseError */) + continue + } + return err } } default: - return errors.Errorf("unsupported %T SELECT: %s", sel, sel) + err := errors.Errorf("unsupported %T SELECT %s", sel, sel) + if ignoreUnsupportedStmts { + unsupportedStmtLogger.log(err.Error(), false /* isParseError */) + return nil + } + return err } case *tree.DropTable: names := stmt.Names @@ -535,17 +560,33 @@ func readPostgresStmt( } } case *tree.BeginTransaction, *tree.CommitTransaction: - // ignore txns. - case *tree.SetVar, *tree.Insert, *tree.CopyFrom, copyData, *tree.Delete: - // ignore SETs and DMLs. - case *tree.Analyze: - // ANALYZE is syntatictic sugar for CreateStatistics. It can be ignored because - // the auto stats stuff will pick up the changes and run if needed. + // Ignore transaction statements as they have no meaning during an IMPORT. + // TODO(during review): Should we guard these statements under the + // ignore_unsupported flag as well? + case *tree.Insert, *tree.CopyFrom, *tree.Delete, copyData: + // handled during the data ingestion pass. + case *tree.CreateExtension, *tree.CommentOnDatabase, *tree.CommentOnTable, + *tree.CommentOnIndex, *tree.CommentOnColumn, *tree.SetVar, *tree.Analyze: + // These are the statements that can be parsed by CRDB but are not + // supported, or are not required to be processed, during an IMPORT. + // - ignore txns. + // - ignore SETs and DMLs. + // - ANALYZE is syntactic sugar for CreateStatistics. It can be ignored + // because the auto stats stuff will pick up the changes and run if needed. + if ignoreUnsupportedStmts { + unsupportedStmtLogger.log(fmt.Sprintf("%s", stmt), false /* isParseError */) + return nil + } + return errors.Errorf("unsupported %T statement: %s", stmt, stmt) case error: if !errors.Is(stmt, errCopyDone) { return stmt } default: + if ignoreUnsupportedStmts { + unsupportedStmtLogger.log(fmt.Sprintf("%s", stmt), false /* isParseError */) + return nil + } return errors.Errorf("unsupported %T statement: %s", stmt, stmt) } return nil @@ -575,14 +616,15 @@ func getTableName2(u *tree.UnresolvedObjectName) (string, error) { } type pgDumpReader struct { - tableDescs map[string]catalog.TableDescriptor - tables map[string]*row.DatumRowConverter - descs map[string]*execinfrapb.ReadImportDataSpec_ImportTable - kvCh chan row.KVBatch - opts roachpb.PgDumpOptions - walltime int64 - colMap map[*row.DatumRowConverter](map[string]int) - evalCtx *tree.EvalContext + tableDescs map[string]catalog.TableDescriptor + tables map[string]*row.DatumRowConverter + descs map[string]*execinfrapb.ReadImportDataSpec_ImportTable + kvCh chan row.KVBatch + opts roachpb.PgDumpOptions + walltime int64 + colMap map[*row.DatumRowConverter](map[string]int) + unsupportedStmtLogger *unsupportedStmtLogger + evalCtx *tree.EvalContext } var _ inputConverter = &pgDumpReader{} @@ -646,7 +688,16 @@ func (m *pgDumpReader) readFiles( makeExternalStorage cloud.ExternalStorageFactory, user security.SQLUsername, ) error { - return readInputFiles(ctx, dataFiles, resumePos, format, m.readFile, makeExternalStorage, user) + // Setup logger to handle unsupported DML statements seen in the PGDUMP file. + m.unsupportedStmtLogger = makeUnsupportedStmtLogger(format.PgDump.IgnoreUnsupported, + format.PgDump.IgnoreUnsupportedLog, dataIngestion, makeExternalStorage) + + err := readInputFiles(ctx, dataFiles, resumePos, format, m.readFile, makeExternalStorage, user) + if err != nil { + return err + } + + return m.unsupportedStmtLogger.flush(ctx, user) } func (m *pgDumpReader) readFile( @@ -655,7 +706,7 @@ func (m *pgDumpReader) readFile( tableNameToRowsProcessed := make(map[string]int64) var inserts, count int64 rowLimit := m.opts.RowLimit - ps := newPostgreStream(input, int(m.opts.MaxRowSize)) + ps := newPostgreStream(ctx, input, int(m.opts.MaxRowSize), m.unsupportedStmtLogger) semaCtx := tree.MakeSemaContext() for _, conv := range m.tables { conv.KvBatch.Source = inputIdx @@ -700,6 +751,12 @@ func (m *pgDumpReader) readFile( timestamp := timestampAfterEpoch(m.walltime) values, ok := i.Rows.Select.(*tree.ValuesClause) if !ok { + if m.unsupportedStmtLogger.ignoreUnsupported { + logLine := fmt.Sprintf("%s: unsupported by IMPORT\n", + i.Rows.Select.String()) + m.unsupportedStmtLogger.log(logLine, false /* isParseError */) + continue + } return errors.Errorf("unsupported: %s", i.Rows.Select) } inserts++ @@ -846,22 +903,47 @@ func (m *pgDumpReader) readFile( // by pg_dump, and thus if it isn't, we don't try to figure out what to do. sc, ok := i.Select.(*tree.SelectClause) if !ok { - return errors.Errorf("unsupported %T Select: %v", i.Select, i.Select) + err := errors.Errorf("unsupported %T Select: %v", i.Select, i.Select) + if m.unsupportedStmtLogger.ignoreUnsupported { + m.unsupportedStmtLogger.log(err.Error(), false /* isParseError */) + continue + } + return err } if len(sc.Exprs) != 1 { - return errors.Errorf("unsupported %d select args: %v", len(sc.Exprs), sc.Exprs) + err := errors.Errorf("unsupported %d select args: %v", len(sc.Exprs), sc.Exprs) + if m.unsupportedStmtLogger.ignoreUnsupported { + m.unsupportedStmtLogger.log(err.Error(), false /* isParseError */) + continue + } + return err } fn, ok := sc.Exprs[0].Expr.(*tree.FuncExpr) if !ok { - return errors.Errorf("unsupported select arg %T: %v", sc.Exprs[0].Expr, sc.Exprs[0].Expr) + err := errors.Errorf("unsupported select arg %T: %v", sc.Exprs[0].Expr, sc.Exprs[0].Expr) + if m.unsupportedStmtLogger.ignoreUnsupported { + m.unsupportedStmtLogger.log(err.Error(), false /* isParseError */) + continue + } + return err } switch funcName := strings.ToLower(fn.Func.String()); funcName { case "search_path", "pg_catalog.set_config": - continue + err := errors.Errorf("unsupported %d fn args in select: %v", len(fn.Exprs), fn.Exprs) + if m.unsupportedStmtLogger.ignoreUnsupported { + m.unsupportedStmtLogger.log(err.Error(), false /* isParseError */) + continue + } + return err case "setval", "pg_catalog.setval": if args := len(fn.Exprs); args < 2 || args > 3 { - return errors.Errorf("unsupported %d fn args: %v", len(fn.Exprs), fn.Exprs) + err := errors.Errorf("unsupported %d fn args in select: %v", len(fn.Exprs), fn.Exprs) + if m.unsupportedStmtLogger.ignoreUnsupported { + m.unsupportedStmtLogger.log(err.Error(), false /* isParseError */) + continue + } + return err } seqname, ok := fn.Exprs[0].(*tree.StrVal) if !ok { @@ -877,7 +959,12 @@ func (m *pgDumpReader) readFile( } seqval, ok := fn.Exprs[1].(*tree.NumVal) if !ok { - return errors.Errorf("unsupported setval %T arg: %v", fn.Exprs[1], fn.Exprs[1]) + err := errors.Errorf("unsupported setval %T arg: %v", fn.Exprs[1], fn.Exprs[1]) + if m.unsupportedStmtLogger.ignoreUnsupported { + m.unsupportedStmtLogger.log(err.Error(), false /* isParseError */) + continue + } + return err } val, err := seqval.AsInt64() if err != nil { @@ -887,7 +974,12 @@ func (m *pgDumpReader) readFile( if len(fn.Exprs) == 3 { called, ok := fn.Exprs[2].(*tree.DBool) if !ok { - return errors.Errorf("unsupported setval %T arg: %v", fn.Exprs[2], fn.Exprs[2]) + err := errors.Errorf("unsupported setval %T arg: %v", fn.Exprs[2], fn.Exprs[2]) + if m.unsupportedStmtLogger.ignoreUnsupported { + m.unsupportedStmtLogger.log(err.Error(), false /* isParseError */) + continue + } + return err } isCalled = bool(*called) } @@ -911,25 +1003,28 @@ func (m *pgDumpReader) readFile( case "addgeometrycolumn": // handled during schema extraction. default: - return errors.Errorf("unsupported function: %s", funcName) + err := errors.Errorf("unsupported function %s in stmt %s", funcName, i.Select.String()) + if m.unsupportedStmtLogger.ignoreUnsupported { + m.unsupportedStmtLogger.log(err.Error(), false /* isParseError */) + continue + } + return err } + case *tree.CreateExtension, *tree.CommentOnDatabase, *tree.CommentOnTable, + *tree.CommentOnIndex, *tree.CommentOnColumn, *tree.AlterSequence: + // handled during schema extraction. case *tree.SetVar, *tree.BeginTransaction, *tree.CommitTransaction, *tree.Analyze: - // ignored. - case *tree.CreateTable, *tree.AlterTable, *tree.AlterTableOwner, *tree.CreateIndex, *tree.CreateSequence, *tree.DropTable: // handled during schema extraction. - case *tree.Delete: - switch stmt := i.Table.(type) { - case *tree.AliasedTableExpr: - // ogr2ogr has `DELETE FROM geometry_columns / geography_columns ...` statements. - // We're not planning to support this functionality in CRDB, so it is safe to ignore it when countered in PGDUMP. - if tn, ok := stmt.Expr.(*tree.TableName); !(ok && (tn.Table() == "geometry_columns" || tn.Table() == "geography_columns")) { - return errors.Errorf("unsupported DELETE FROM %T statement: %s", stmt, stmt) - } - default: - return errors.Errorf("unsupported %T statement: %s", i, i) - } + case *tree.CreateTable, *tree.AlterTable, *tree.AlterTableOwner, *tree.CreateIndex, + *tree.CreateSequence, *tree.DropTable: + // handled during schema extraction. default: - return errors.Errorf("unsupported %T statement: %v", i, i) + err := errors.Errorf("unsupported %T statement: %v", i, i) + if m.unsupportedStmtLogger.ignoreUnsupported { + m.unsupportedStmtLogger.log(err.Error(), false /* isParseError */) + continue + } + return err } } for _, conv := range m.tables { diff --git a/pkg/ccl/importccl/read_import_pgdump_test.go b/pkg/ccl/importccl/read_import_pgdump_test.go index b883810426ae..0e42cd9b23bf 100644 --- a/pkg/ccl/importccl/read_import_pgdump_test.go +++ b/pkg/ccl/importccl/read_import_pgdump_test.go @@ -39,7 +39,7 @@ select '123456789012345678901234567890123456789012345678901234567890123456789012 -- ` - p := newPostgreStream(strings.NewReader(sql), defaultScanBuffer) + p := newPostgreStream(context.Background(), strings.NewReader(sql), defaultScanBuffer, nil /* unsupportedStmtLogger */) var sb strings.Builder for { s, err := p.Next() @@ -121,7 +121,7 @@ COPY public.t (s) FROM stdin; -- ` - p := newPostgreStream(strings.NewReader(sql), defaultScanBuffer) + p := newPostgreStream(context.Background(), strings.NewReader(sql), defaultScanBuffer, nil /* unsupportedStmtLogger */) var sb strings.Builder for { s, err := p.Next() diff --git a/pkg/ccl/importccl/testdata/pgdump/simple.sql b/pkg/ccl/importccl/testdata/pgdump/simple.sql index e117fea0017e..1d49cfa0336a 100644 --- a/pkg/ccl/importccl/testdata/pgdump/simple.sql +++ b/pkg/ccl/importccl/testdata/pgdump/simple.sql @@ -19,6 +19,20 @@ SET default_tablespace = ''; SET default_with_oids = false; + +-- +-- Name: simple; Type: EXTENSION; Schema: -; Owner: +-- + +CREATE EXTENSION IF NOT EXISTS simple WITH SCHEMA pg_catalog; + + +-- +-- Name: EXTENSION plpgsql; Type: COMMENT; Schema: -; Owner: +-- + +COMMENT ON EXTENSION simple IS 'simple extension'; + -- -- Name: simple; Type: TABLE; Schema: public; Owner: postgres -- diff --git a/pkg/roachpb/io-formats.pb.go b/pkg/roachpb/io-formats.pb.go index 45b572a6e788..a8a9295af5da 100644 --- a/pkg/roachpb/io-formats.pb.go +++ b/pkg/roachpb/io-formats.pb.go @@ -68,7 +68,7 @@ func (x *IOFileFormat_FileFormat) UnmarshalJSON(data []byte) error { return nil } func (IOFileFormat_FileFormat) EnumDescriptor() ([]byte, []int) { - return fileDescriptor_io_formats_cf12087409a41794, []int{0, 0} + return fileDescriptor_io_formats_b77b03f938e9e2ae, []int{0, 0} } type IOFileFormat_Compression int32 @@ -110,7 +110,7 @@ func (x *IOFileFormat_Compression) UnmarshalJSON(data []byte) error { return nil } func (IOFileFormat_Compression) EnumDescriptor() ([]byte, []int) { - return fileDescriptor_io_formats_cf12087409a41794, []int{0, 1} + return fileDescriptor_io_formats_b77b03f938e9e2ae, []int{0, 1} } type MySQLOutfileOptions_Enclose int32 @@ -149,7 +149,7 @@ func (x *MySQLOutfileOptions_Enclose) UnmarshalJSON(data []byte) error { return nil } func (MySQLOutfileOptions_Enclose) EnumDescriptor() ([]byte, []int) { - return fileDescriptor_io_formats_cf12087409a41794, []int{2, 0} + return fileDescriptor_io_formats_b77b03f938e9e2ae, []int{2, 0} } type AvroOptions_Format int32 @@ -191,7 +191,7 @@ func (x *AvroOptions_Format) UnmarshalJSON(data []byte) error { return nil } func (AvroOptions_Format) EnumDescriptor() ([]byte, []int) { - return fileDescriptor_io_formats_cf12087409a41794, []int{6, 0} + return fileDescriptor_io_formats_b77b03f938e9e2ae, []int{6, 0} } type IOFileFormat struct { @@ -211,7 +211,7 @@ func (m *IOFileFormat) Reset() { *m = IOFileFormat{} } func (m *IOFileFormat) String() string { return proto.CompactTextString(m) } func (*IOFileFormat) ProtoMessage() {} func (*IOFileFormat) Descriptor() ([]byte, []int) { - return fileDescriptor_io_formats_cf12087409a41794, []int{0} + return fileDescriptor_io_formats_b77b03f938e9e2ae, []int{0} } func (m *IOFileFormat) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -258,7 +258,7 @@ func (m *CSVOptions) Reset() { *m = CSVOptions{} } func (m *CSVOptions) String() string { return proto.CompactTextString(m) } func (*CSVOptions) ProtoMessage() {} func (*CSVOptions) Descriptor() ([]byte, []int) { - return fileDescriptor_io_formats_cf12087409a41794, []int{1} + return fileDescriptor_io_formats_b77b03f938e9e2ae, []int{1} } func (m *CSVOptions) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -309,7 +309,7 @@ func (m *MySQLOutfileOptions) Reset() { *m = MySQLOutfileOptions{} } func (m *MySQLOutfileOptions) String() string { return proto.CompactTextString(m) } func (*MySQLOutfileOptions) ProtoMessage() {} func (*MySQLOutfileOptions) Descriptor() ([]byte, []int) { - return fileDescriptor_io_formats_cf12087409a41794, []int{2} + return fileDescriptor_io_formats_b77b03f938e9e2ae, []int{2} } func (m *MySQLOutfileOptions) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -348,7 +348,7 @@ func (m *PgCopyOptions) Reset() { *m = PgCopyOptions{} } func (m *PgCopyOptions) String() string { return proto.CompactTextString(m) } func (*PgCopyOptions) ProtoMessage() {} func (*PgCopyOptions) Descriptor() ([]byte, []int) { - return fileDescriptor_io_formats_cf12087409a41794, []int{3} + return fileDescriptor_io_formats_b77b03f938e9e2ae, []int{3} } func (m *PgCopyOptions) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -380,13 +380,21 @@ type PgDumpOptions struct { // Indicates the number of rows to import per table. // Must be a non-zero positive number. RowLimit int64 `protobuf:"varint,2,opt,name=row_limit,json=rowLimit" json:"row_limit"` + // Indicates if all unparseable and parseable, but unimplemented PGDUMP stmts + // should be ignored during IMPORT. + IgnoreUnsupported bool `protobuf:"varint,3,opt,name=ignore_unsupported,json=ignoreUnsupported" json:"ignore_unsupported"` + // Points to the destination where unsupported statements during a PGDUMP + // import should be logged. This can only be used when ignore_unsupported is + // specified, otherwise the IMPORT errors out on encountering an unsupported + // stmt. + IgnoreUnsupportedLog string `protobuf:"bytes,4,opt,name=ignore_unsupported_log,json=ignoreUnsupportedLog" json:"ignore_unsupported_log"` } func (m *PgDumpOptions) Reset() { *m = PgDumpOptions{} } func (m *PgDumpOptions) String() string { return proto.CompactTextString(m) } func (*PgDumpOptions) ProtoMessage() {} func (*PgDumpOptions) Descriptor() ([]byte, []int) { - return fileDescriptor_io_formats_cf12087409a41794, []int{4} + return fileDescriptor_io_formats_b77b03f938e9e2ae, []int{4} } func (m *PgDumpOptions) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -421,7 +429,7 @@ func (m *MysqldumpOptions) Reset() { *m = MysqldumpOptions{} } func (m *MysqldumpOptions) String() string { return proto.CompactTextString(m) } func (*MysqldumpOptions) ProtoMessage() {} func (*MysqldumpOptions) Descriptor() ([]byte, []int) { - return fileDescriptor_io_formats_cf12087409a41794, []int{5} + return fileDescriptor_io_formats_b77b03f938e9e2ae, []int{5} } func (m *MysqldumpOptions) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -464,7 +472,7 @@ func (m *AvroOptions) Reset() { *m = AvroOptions{} } func (m *AvroOptions) String() string { return proto.CompactTextString(m) } func (*AvroOptions) ProtoMessage() {} func (*AvroOptions) Descriptor() ([]byte, []int) { - return fileDescriptor_io_formats_cf12087409a41794, []int{6} + return fileDescriptor_io_formats_b77b03f938e9e2ae, []int{6} } func (m *AvroOptions) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -728,6 +736,18 @@ func (m *PgDumpOptions) MarshalTo(dAtA []byte) (int, error) { dAtA[i] = 0x10 i++ i = encodeVarintIoFormats(dAtA, i, uint64(m.RowLimit)) + dAtA[i] = 0x18 + i++ + if m.IgnoreUnsupported { + dAtA[i] = 1 + } else { + dAtA[i] = 0 + } + i++ + dAtA[i] = 0x22 + i++ + i = encodeVarintIoFormats(dAtA, i, uint64(len(m.IgnoreUnsupportedLog))) + i += copy(dAtA[i:], m.IgnoreUnsupportedLog) return i, nil } @@ -887,6 +907,9 @@ func (m *PgDumpOptions) Size() (n int) { _ = l n += 1 + sovIoFormats(uint64(m.MaxRowSize)) n += 1 + sovIoFormats(uint64(m.RowLimit)) + n += 2 + l = len(m.IgnoreUnsupportedLog) + n += 1 + l + sovIoFormats(uint64(l)) return n } @@ -1810,6 +1833,55 @@ func (m *PgDumpOptions) Unmarshal(dAtA []byte) error { break } } + case 3: + if wireType != 0 { + return fmt.Errorf("proto: wrong wireType = %d for field IgnoreUnsupported", wireType) + } + var v int + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowIoFormats + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + v |= (int(b) & 0x7F) << shift + if b < 0x80 { + break + } + } + m.IgnoreUnsupported = bool(v != 0) + case 4: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field IgnoreUnsupportedLog", wireType) + } + var stringLen uint64 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowIoFormats + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + stringLen |= (uint64(b) & 0x7F) << shift + if b < 0x80 { + break + } + } + intStringLen := int(stringLen) + if intStringLen < 0 { + return ErrInvalidLengthIoFormats + } + postIndex := iNdEx + intStringLen + if postIndex > l { + return io.ErrUnexpectedEOF + } + m.IgnoreUnsupportedLog = string(dAtA[iNdEx:postIndex]) + iNdEx = postIndex default: iNdEx = preIndex skippy, err := skipIoFormats(dAtA[iNdEx:]) @@ -2181,68 +2253,71 @@ var ( ) func init() { - proto.RegisterFile("roachpb/io-formats.proto", fileDescriptor_io_formats_cf12087409a41794) -} - -var fileDescriptor_io_formats_cf12087409a41794 = []byte{ - // 938 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x8c, 0x55, 0x5f, 0x6f, 0xe3, 0x44, - 0x10, 0xb7, 0xf3, 0xcf, 0xf1, 0x24, 0x69, 0x97, 0x85, 0x07, 0xeb, 0x04, 0x26, 0xe4, 0x38, 0xd4, - 0x03, 0xce, 0x95, 0x2a, 0x2a, 0xf1, 0x86, 0xae, 0xb9, 0x96, 0xeb, 0xe9, 0x9a, 0x70, 0x89, 0x38, - 0x21, 0x5e, 0x2c, 0x63, 0x6f, 0x53, 0x53, 0xdb, 0xeb, 0x7a, 0x9d, 0xe4, 0x72, 0x9f, 0x82, 0xcf, - 0xc4, 0x53, 0x1f, 0xef, 0x8d, 0x93, 0x90, 0x10, 0xb4, 0xdf, 0x03, 0xa1, 0x5d, 0xaf, 0x13, 0xbb, - 0x35, 0x77, 0x7d, 0x1b, 0xcd, 0x6f, 0x7e, 0xb3, 0x33, 0xf3, 0x9b, 0xdd, 0x05, 0x23, 0xa1, 0x8e, - 0x7b, 0x16, 0xff, 0xb2, 0xeb, 0xd3, 0x47, 0xa7, 0x34, 0x09, 0x9d, 0x94, 0x59, 0x71, 0x42, 0x53, - 0x8a, 0x3f, 0x70, 0xa9, 0x7b, 0x2e, 0x50, 0x4b, 0xc6, 0xdc, 0xfb, 0x68, 0x46, 0x67, 0x54, 0xa0, - 0xbb, 0xdc, 0xca, 0x02, 0x07, 0xff, 0x36, 0xa1, 0x7b, 0x3c, 0x3e, 0xf2, 0x03, 0x72, 0x24, 0x12, - 0xe0, 0xa7, 0xd0, 0xca, 0x52, 0x19, 0x6a, 0x5f, 0xdd, 0xd9, 0xda, 0xfb, 0xd2, 0xba, 0x95, 0xca, - 0x2a, 0x12, 0xac, 0x8d, 0x79, 0xd0, 0xb8, 0xfc, 0xeb, 0x53, 0x65, 0x22, 0xf9, 0x78, 0x1f, 0xea, - 0x2e, 0x5b, 0x18, 0xb5, 0xbe, 0xba, 0xd3, 0xd9, 0xfb, 0xa4, 0x22, 0xcd, 0x70, 0xfa, 0x72, 0x1c, - 0xa7, 0x3e, 0x8d, 0x98, 0x64, 0xf2, 0x78, 0x7c, 0x0c, 0x7a, 0xb8, 0x62, 0x17, 0x81, 0x4d, 0xe7, - 0xa9, 0x51, 0x17, 0xe4, 0x2f, 0x2a, 0xc8, 0x27, 0xab, 0xe9, 0x8b, 0xe7, 0xe3, 0x79, 0x7a, 0xea, - 0x07, 0xa4, 0x9c, 0xa5, 0x2d, 0xe8, 0xe3, 0x79, 0x8a, 0xbf, 0x03, 0x2d, 0x9e, 0xd9, 0x2e, 0x8d, - 0x57, 0x46, 0x43, 0x24, 0xea, 0x57, 0x24, 0xfa, 0x61, 0x36, 0xa4, 0xf1, 0xaa, 0x9c, 0xa2, 0x15, - 0x0b, 0x27, 0x9e, 0x42, 0xc7, 0xa5, 0x61, 0x9c, 0x10, 0xc6, 0x7c, 0x1a, 0x19, 0x4d, 0x31, 0x91, - 0xaf, 0xde, 0x37, 0x91, 0xe1, 0x86, 0x22, 0xf3, 0x15, 0xb3, 0xc8, 0xaa, 0xbc, 0x79, 0x18, 0x1b, - 0xad, 0x77, 0x54, 0xf5, 0x64, 0x1e, 0xc6, 0xb7, 0xaa, 0xe2, 0x4e, 0xfc, 0x10, 0x7a, 0xcc, 0x59, - 0x10, 0x3b, 0x21, 0xbf, 0x12, 0x37, 0x25, 0x9e, 0xa1, 0xf5, 0xd5, 0x9d, 0xb6, 0x0c, 0xea, 0x72, - 0x68, 0x22, 0x11, 0xfc, 0x2d, 0x34, 0x9c, 0x45, 0x42, 0x8d, 0xb6, 0x38, 0xc8, 0xac, 0x38, 0xe8, - 0xf1, 0x22, 0xa1, 0xe5, 0x63, 0x04, 0x03, 0x3f, 0x05, 0xc8, 0x64, 0x10, 0x85, 0xea, 0x82, 0x7f, - 0xbf, 0x52, 0x07, 0x76, 0x11, 0x78, 0xb7, 0x6a, 0xcd, 0x34, 0xe4, 0xe5, 0x0e, 0x08, 0x40, 0x61, - 0xbf, 0x3a, 0xa0, 0xfd, 0x18, 0x9d, 0x47, 0x74, 0x19, 0x21, 0x05, 0x6b, 0x50, 0x1f, 0x4e, 0x5f, - 0x22, 0x15, 0x23, 0xe8, 0x9e, 0x48, 0xd5, 0xb8, 0xa0, 0xa8, 0x86, 0x7b, 0xa0, 0xaf, 0x53, 0xa3, - 0x3a, 0x06, 0x68, 0x65, 0x42, 0xa1, 0x46, 0x66, 0xf3, 0xd4, 0xa8, 0x89, 0xdb, 0xd0, 0xe0, 0x1d, - 0xa0, 0xd6, 0x60, 0x1f, 0x3a, 0x85, 0xc1, 0x0b, 0x60, 0x9e, 0x52, 0xa4, 0x70, 0x6b, 0x44, 0x23, - 0x82, 0x54, 0x6e, 0x7d, 0xff, 0xda, 0x8f, 0x51, 0x8d, 0x5b, 0x07, 0xdc, 0xaa, 0x0f, 0xfe, 0x54, - 0x01, 0x36, 0x8b, 0x88, 0xef, 0x41, 0xd3, 0xa5, 0x61, 0xe8, 0x88, 0xed, 0x6f, 0xca, 0x66, 0x32, - 0x17, 0x36, 0x41, 0xe3, 0x06, 0x89, 0x52, 0xb1, 0xd4, 0x39, 0x9a, 0x3b, 0xb9, 0x2e, 0xd1, 0x3c, - 0x08, 0x6c, 0x12, 0xb9, 0xd4, 0xf3, 0xa3, 0x99, 0xd8, 0x5e, 0x5d, 0x44, 0xa9, 0x93, 0x2e, 0x87, - 0x0e, 0x25, 0x82, 0x0d, 0x68, 0xb0, 0x73, 0x3f, 0x16, 0x6b, 0xd9, 0xcb, 0xe7, 0xce, 0x3d, 0x42, - 0xdc, 0x34, 0xf1, 0xdd, 0xd4, 0xbe, 0x98, 0xd3, 0x94, 0x30, 0xb1, 0x74, 0x1b, 0x71, 0x05, 0xf4, - 0x42, 0x20, 0xf8, 0x33, 0xd0, 0x13, 0xba, 0xb4, 0x03, 0x3f, 0xf4, 0x53, 0xb1, 0x4a, 0xf5, 0xfc, - 0x06, 0x24, 0x74, 0xf9, 0x9c, 0x7b, 0x07, 0xbf, 0xd7, 0xe1, 0xc3, 0x8a, 0x9b, 0xc2, 0x4f, 0xe1, - 0x54, 0x46, 0x62, 0x27, 0x71, 0x52, 0x9a, 0x94, 0xda, 0xed, 0x26, 0x74, 0x39, 0xcd, 0x11, 0xfc, - 0x08, 0xb6, 0x4f, 0x7d, 0x12, 0x78, 0x85, 0xe0, 0x62, 0xf7, 0x5b, 0x02, 0xdc, 0x84, 0x8f, 0x40, - 0x23, 0x91, 0x1b, 0x50, 0x46, 0x44, 0xfb, 0x5b, 0x7b, 0xd6, 0xdd, 0x2e, 0xaf, 0x75, 0x98, 0xb1, - 0xf2, 0xa1, 0xca, 0x24, 0xb8, 0x0f, 0x6d, 0x69, 0x26, 0x62, 0x5a, 0xf9, 0xb9, 0x6b, 0x2f, 0xbe, - 0x0f, 0x70, 0xe6, 0x30, 0x9b, 0x30, 0xd7, 0x89, 0x49, 0x69, 0x5c, 0xfa, 0x99, 0xc3, 0x0e, 0x85, - 0x1b, 0x7f, 0x0c, 0x2d, 0x19, 0xd0, 0x2a, 0x24, 0x91, 0xbe, 0xb5, 0x1c, 0x5a, 0x95, 0x1c, 0x65, - 0x4d, 0xdb, 0xff, 0xab, 0x69, 0x49, 0x0e, 0xa8, 0x94, 0xc3, 0x02, 0x4d, 0xb6, 0x89, 0x75, 0x68, - 0x8e, 0xc8, 0x82, 0x24, 0x48, 0xe1, 0xfb, 0xfc, 0x38, 0x58, 0x3a, 0x2b, 0x86, 0x54, 0xdc, 0x85, - 0x76, 0x36, 0x10, 0x27, 0x40, 0xb5, 0x67, 0x8d, 0xb6, 0x8e, 0x60, 0xc0, 0xa0, 0x57, 0x7a, 0xa4, - 0xf0, 0x00, 0x74, 0x8f, 0x88, 0x73, 0x48, 0x59, 0xb9, 0x8d, 0x9b, 0xb7, 0xc4, 0xab, 0x13, 0x5a, - 0xe9, 0x79, 0x4b, 0xdc, 0x83, 0x3f, 0x07, 0x08, 0x9d, 0x57, 0x13, 0xba, 0x9c, 0xfa, 0xaf, 0x33, - 0x91, 0x72, 0x7a, 0xc1, 0x3f, 0xf8, 0x89, 0x1f, 0x5a, 0x78, 0x83, 0x6e, 0xd0, 0xd4, 0x6a, 0x5a, - 0x79, 0x08, 0xb5, 0xca, 0x21, 0xec, 0x03, 0xba, 0xf9, 0x68, 0x94, 0x69, 0x6a, 0x25, 0xed, 0x8f, - 0x1a, 0x74, 0x0a, 0x8f, 0x15, 0x1e, 0xde, 0xf8, 0xa8, 0x1e, 0xbc, 0xfb, 0x71, 0xb3, 0x2a, 0xff, - 0xa8, 0x07, 0xd0, 0x91, 0xb7, 0x2d, 0xa4, 0x1e, 0x11, 0x05, 0xe7, 0xcb, 0x03, 0x19, 0x70, 0x42, - 0x3d, 0xc2, 0x7b, 0x67, 0xee, 0x19, 0x09, 0x9d, 0x67, 0xd3, 0xf1, 0xa8, 0x70, 0xad, 0x79, 0xd4, - 0xda, 0x8f, 0xbf, 0x86, 0xed, 0xd0, 0x79, 0x65, 0x27, 0xc4, 0xa5, 0x89, 0x67, 0x33, 0x3e, 0xa6, - 0xe2, 0xc6, 0xf6, 0xf8, 0x98, 0x04, 0x26, 0x26, 0xb5, 0x0b, 0x28, 0x8f, 0x5c, 0x5f, 0xac, 0x66, - 0x21, 0x7c, 0x3b, 0x43, 0x37, 0x37, 0xeb, 0x0e, 0xd7, 0xfd, 0x1b, 0x68, 0xc9, 0x67, 0x56, 0x83, - 0xfa, 0x78, 0x78, 0x84, 0x14, 0xbc, 0x0d, 0x9d, 0x83, 0xe3, 0x91, 0x3d, 0x39, 0x1c, 0x8e, 0x27, - 0x4f, 0xa6, 0xd9, 0x53, 0xcb, 0xab, 0x5d, 0x7b, 0x6a, 0x07, 0x0f, 0x2f, 0xff, 0x31, 0x95, 0xcb, - 0x2b, 0x53, 0x7d, 0x73, 0x65, 0xaa, 0x6f, 0xaf, 0x4c, 0xf5, 0xef, 0x2b, 0x53, 0xfd, 0xed, 0xda, - 0x54, 0xde, 0x5c, 0x9b, 0xca, 0xdb, 0x6b, 0x53, 0xf9, 0x59, 0x93, 0x03, 0xfd, 0x2f, 0x00, 0x00, - 0xff, 0xff, 0xf9, 0x03, 0xbb, 0x68, 0x72, 0x08, 0x00, 0x00, + proto.RegisterFile("roachpb/io-formats.proto", fileDescriptor_io_formats_b77b03f938e9e2ae) +} + +var fileDescriptor_io_formats_b77b03f938e9e2ae = []byte{ + // 986 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x8c, 0x55, 0xcf, 0x6e, 0xdb, 0xc6, + 0x13, 0x16, 0xf5, 0x8f, 0xe2, 0x48, 0xb2, 0x99, 0xfd, 0x05, 0x3f, 0x10, 0x41, 0xcb, 0xaa, 0x4a, + 0x53, 0x38, 0x6d, 0x23, 0x03, 0x6e, 0x0d, 0x14, 0xbd, 0x14, 0xb1, 0x62, 0x37, 0x0e, 0x6c, 0xa9, + 0x91, 0x90, 0x1c, 0x7a, 0x21, 0x58, 0x72, 0x2d, 0xb3, 0x26, 0xb9, 0xf4, 0x2e, 0x29, 0x45, 0x79, + 0x8a, 0x3e, 0x53, 0x4f, 0xee, 0x2d, 0xb7, 0x06, 0x28, 0x50, 0xb4, 0xf6, 0x7b, 0x14, 0xc5, 0x2e, + 0x97, 0x12, 0x69, 0xb1, 0x69, 0x6e, 0x83, 0xf9, 0xe6, 0x1b, 0xce, 0xcc, 0x37, 0x3b, 0x04, 0x83, + 0x12, 0xdb, 0x39, 0x8f, 0x7e, 0xdc, 0xf5, 0xc8, 0xa3, 0x33, 0x42, 0x03, 0x3b, 0x66, 0x83, 0x88, + 0x92, 0x98, 0xa0, 0x3b, 0x0e, 0x71, 0x2e, 0x04, 0x3a, 0x90, 0x31, 0xf7, 0xee, 0xce, 0xc8, 0x8c, + 0x08, 0x74, 0x97, 0x5b, 0x69, 0x60, 0xff, 0xef, 0x06, 0x74, 0x8e, 0xc7, 0x47, 0x9e, 0x8f, 0x8f, + 0x44, 0x02, 0xf4, 0x14, 0x9a, 0x69, 0x2a, 0x43, 0xe9, 0x29, 0x3b, 0x5b, 0x7b, 0x9f, 0x0d, 0x36, + 0x52, 0x0d, 0xf2, 0x84, 0xc1, 0xda, 0x3c, 0xa8, 0x5f, 0xfd, 0xf1, 0x51, 0x65, 0x22, 0xf9, 0x68, + 0x1f, 0x6a, 0x0e, 0x9b, 0x1b, 0xd5, 0x9e, 0xb2, 0xd3, 0xde, 0xfb, 0xb0, 0x24, 0xcd, 0x70, 0xfa, + 0x72, 0x1c, 0xc5, 0x1e, 0x09, 0x99, 0x64, 0xf2, 0x78, 0x74, 0x0c, 0x5a, 0xb0, 0x64, 0x97, 0xbe, + 0x45, 0x92, 0xd8, 0xa8, 0x09, 0xf2, 0xa7, 0x25, 0xe4, 0xd3, 0xe5, 0xf4, 0xf9, 0xc9, 0x38, 0x89, + 0xcf, 0x3c, 0x1f, 0x17, 0xb3, 0xb4, 0x04, 0x7d, 0x9c, 0xc4, 0xe8, 0x5b, 0x50, 0xa3, 0x99, 0xe5, + 0x90, 0x68, 0x69, 0xd4, 0x45, 0xa2, 0x5e, 0x49, 0xa2, 0xef, 0x67, 0x43, 0x12, 0x2d, 0x8b, 0x29, + 0x9a, 0x91, 0x70, 0xa2, 0x29, 0xb4, 0x1d, 0x12, 0x44, 0x14, 0x33, 0xe6, 0x91, 0xd0, 0x68, 0x88, + 0x89, 0x7c, 0xfe, 0x5f, 0x13, 0x19, 0xae, 0x29, 0x32, 0x5f, 0x3e, 0x8b, 0xac, 0xca, 0x4d, 0x82, + 0xc8, 0x68, 0xbe, 0xa3, 0xaa, 0x27, 0x49, 0x10, 0x6d, 0x54, 0xc5, 0x9d, 0xe8, 0x21, 0x74, 0x99, + 0x3d, 0xc7, 0x16, 0xc5, 0x3f, 0x61, 0x27, 0xc6, 0xae, 0xa1, 0xf6, 0x94, 0x9d, 0x96, 0x0c, 0xea, + 0x70, 0x68, 0x22, 0x11, 0xf4, 0x35, 0xd4, 0xed, 0x39, 0x25, 0x46, 0x4b, 0x7c, 0xc8, 0x2c, 0xf9, + 0xd0, 0xe3, 0x39, 0x25, 0xc5, 0xcf, 0x08, 0x06, 0x7a, 0x0a, 0x90, 0xca, 0x20, 0x0a, 0xd5, 0x04, + 0xff, 0x7e, 0xa9, 0x0e, 0xec, 0xd2, 0x77, 0x37, 0x6a, 0x4d, 0x35, 0xe4, 0xe5, 0xf6, 0x31, 0x40, + 0x6e, 0xbf, 0xda, 0xa0, 0xbe, 0x08, 0x2f, 0x42, 0xb2, 0x08, 0xf5, 0x0a, 0x52, 0xa1, 0x36, 0x9c, + 0xbe, 0xd4, 0x15, 0xa4, 0x43, 0xe7, 0x54, 0xaa, 0xc6, 0x05, 0xd5, 0xab, 0xa8, 0x0b, 0xda, 0x2a, + 0xb5, 0x5e, 0x43, 0x00, 0xcd, 0x54, 0x28, 0xbd, 0x9e, 0xda, 0x3c, 0xb5, 0xde, 0x40, 0x2d, 0xa8, + 0xf3, 0x0e, 0xf4, 0x66, 0x7f, 0x1f, 0xda, 0xb9, 0xc1, 0x0b, 0x20, 0x89, 0x89, 0x5e, 0xe1, 0xd6, + 0x88, 0x84, 0x58, 0x57, 0xb8, 0xf5, 0xdd, 0x6b, 0x2f, 0xd2, 0xab, 0xdc, 0x3a, 0xe0, 0x56, 0xad, + 0xff, 0xbb, 0x02, 0xb0, 0x5e, 0x44, 0x74, 0x0f, 0x1a, 0x0e, 0x09, 0x02, 0x5b, 0x6c, 0x7f, 0x43, + 0x36, 0x93, 0xba, 0x90, 0x09, 0x2a, 0x37, 0x70, 0x18, 0x8b, 0xa5, 0xce, 0xd0, 0xcc, 0xc9, 0x75, + 0x09, 0x13, 0xdf, 0xb7, 0x70, 0xe8, 0x10, 0xd7, 0x0b, 0x67, 0x62, 0x7b, 0x35, 0x11, 0xa5, 0x4c, + 0x3a, 0x1c, 0x3a, 0x94, 0x08, 0x32, 0xa0, 0xce, 0x2e, 0xbc, 0x48, 0xac, 0x65, 0x37, 0x9b, 0x3b, + 0xf7, 0x08, 0x71, 0x63, 0xea, 0x39, 0xb1, 0x75, 0x99, 0x90, 0x18, 0x33, 0xb1, 0x74, 0x6b, 0x71, + 0x05, 0xf4, 0x5c, 0x20, 0xe8, 0x63, 0xd0, 0x28, 0x59, 0x58, 0xbe, 0x17, 0x78, 0xb1, 0x58, 0xa5, + 0x5a, 0xf6, 0x02, 0x28, 0x59, 0x9c, 0x70, 0x6f, 0xff, 0x97, 0x1a, 0xfc, 0xaf, 0xe4, 0xa5, 0xf0, + 0xaf, 0x70, 0x2a, 0xc3, 0x91, 0x4d, 0xed, 0x98, 0xd0, 0x42, 0xbb, 0x1d, 0x4a, 0x16, 0xd3, 0x0c, + 0x41, 0x8f, 0x60, 0xfb, 0xcc, 0xc3, 0xbe, 0x9b, 0x0b, 0xce, 0x77, 0xbf, 0x25, 0xc0, 0x75, 0xf8, + 0x08, 0x54, 0x1c, 0x3a, 0x3e, 0x61, 0x58, 0xb4, 0xbf, 0xb5, 0x37, 0x78, 0xbf, 0xc7, 0x3b, 0x38, + 0x4c, 0x59, 0xd9, 0x50, 0x65, 0x12, 0xd4, 0x83, 0x96, 0x34, 0xa9, 0x98, 0x56, 0xf6, 0xdd, 0x95, + 0x17, 0xdd, 0x07, 0x38, 0xb7, 0x99, 0x85, 0x99, 0x63, 0x47, 0xb8, 0x30, 0x2e, 0xed, 0xdc, 0x66, + 0x87, 0xc2, 0x8d, 0x3e, 0x80, 0xa6, 0x0c, 0x68, 0xe6, 0x92, 0x48, 0xdf, 0x4a, 0x0e, 0xb5, 0x4c, + 0x8e, 0xa2, 0xa6, 0xad, 0x7f, 0xd5, 0xb4, 0x20, 0x07, 0x94, 0xca, 0x31, 0x00, 0x55, 0xb6, 0x89, + 0x34, 0x68, 0x8c, 0xf0, 0x1c, 0x53, 0xbd, 0xc2, 0xf7, 0xf9, 0xb1, 0xbf, 0xb0, 0x97, 0x4c, 0x57, + 0x50, 0x07, 0x5a, 0xe9, 0x40, 0x6c, 0x5f, 0xaf, 0x3e, 0xab, 0xb7, 0x34, 0x1d, 0xfa, 0x0c, 0xba, + 0x85, 0x23, 0x85, 0xfa, 0xa0, 0xb9, 0x58, 0x7c, 0x07, 0x17, 0x95, 0x5b, 0xbb, 0x79, 0x4b, 0xbc, + 0x3a, 0xa1, 0x95, 0x96, 0xb5, 0xc4, 0x3d, 0xe8, 0x13, 0x80, 0xc0, 0x7e, 0x35, 0x21, 0x8b, 0xa9, + 0xf7, 0x3a, 0x15, 0x29, 0xa3, 0xe7, 0xfc, 0xfd, 0x5f, 0x15, 0xfe, 0xd5, 0xdc, 0x11, 0xba, 0xc5, + 0x53, 0xca, 0x79, 0xc5, 0x29, 0x54, 0xcb, 0xa6, 0x80, 0xbe, 0x04, 0xe4, 0xcd, 0x42, 0x42, 0xb1, + 0x95, 0x84, 0x2c, 0x89, 0x22, 0x42, 0xf9, 0x11, 0xab, 0xe5, 0x84, 0xbb, 0x93, 0xe2, 0x2f, 0xd6, + 0x30, 0xfa, 0x06, 0xfe, 0xbf, 0x49, 0xb2, 0x7c, 0x32, 0x13, 0x5b, 0x91, 0x75, 0x78, 0x77, 0x83, + 0x78, 0x42, 0x66, 0xfd, 0x7d, 0xd0, 0x6f, 0x9f, 0xa9, 0x62, 0x9d, 0x4a, 0xa9, 0x5a, 0xbf, 0x55, + 0xa1, 0x9d, 0x3b, 0x8f, 0x68, 0x78, 0xeb, 0xd7, 0xf8, 0xe0, 0xdd, 0xe7, 0x74, 0x50, 0xfa, 0x57, + 0x7c, 0x00, 0x6d, 0xf9, 0xbe, 0x03, 0xe2, 0x62, 0x31, 0xa1, 0xac, 0x6b, 0x48, 0x81, 0x53, 0xe2, + 0x62, 0x3e, 0x6c, 0xe6, 0x9c, 0xe3, 0xc0, 0x7e, 0x36, 0x1d, 0x8f, 0x72, 0x87, 0x84, 0x47, 0xad, + 0xfc, 0xe8, 0x0b, 0xd8, 0x0e, 0xec, 0x57, 0x16, 0xc5, 0x0e, 0xa1, 0xae, 0xc5, 0xb8, 0x2e, 0xf9, + 0x37, 0xd2, 0xe5, 0xba, 0x08, 0x4c, 0x48, 0xb3, 0x0b, 0x7a, 0x16, 0xb9, 0x7a, 0xca, 0x8d, 0x5c, + 0xf8, 0x76, 0x8a, 0xae, 0xdf, 0xf2, 0x7b, 0x1c, 0x98, 0xaf, 0xa0, 0x29, 0x0f, 0xbb, 0x0a, 0xb5, + 0xf1, 0xf0, 0x48, 0xaf, 0xa0, 0x6d, 0x68, 0x1f, 0x1c, 0x8f, 0xac, 0xc9, 0xe1, 0x70, 0x3c, 0x79, + 0x32, 0x4d, 0x8f, 0x3b, 0xaf, 0x76, 0xe5, 0xa9, 0x1e, 0x3c, 0xbc, 0xfa, 0xcb, 0xac, 0x5c, 0x5d, + 0x9b, 0xca, 0x9b, 0x6b, 0x53, 0x79, 0x7b, 0x6d, 0x2a, 0x7f, 0x5e, 0x9b, 0xca, 0xcf, 0x37, 0x66, + 0xe5, 0xcd, 0x8d, 0x59, 0x79, 0x7b, 0x63, 0x56, 0x7e, 0x50, 0xe5, 0x40, 0xff, 0x09, 0x00, 0x00, + 0xff, 0xff, 0x17, 0x19, 0x54, 0x15, 0xe4, 0x08, 0x00, 0x00, } diff --git a/pkg/roachpb/io-formats.proto b/pkg/roachpb/io-formats.proto index b61cf85c9123..e7291619c67b 100644 --- a/pkg/roachpb/io-formats.proto +++ b/pkg/roachpb/io-formats.proto @@ -110,6 +110,14 @@ message PgDumpOptions { // Indicates the number of rows to import per table. // Must be a non-zero positive number. optional int64 row_limit = 2 [(gogoproto.nullable) = false]; + // Indicates if all unparseable and parseable, but unimplemented PGDUMP stmts + // should be ignored during IMPORT. + optional bool ignore_unsupported = 3 [(gogoproto.nullable) = false]; + // Points to the destination where unsupported statements during a PGDUMP + // import should be logged. This can only be used when ignore_unsupported is + // specified, otherwise the IMPORT errors out on encountering an unsupported + // stmt. + optional string ignore_unsupported_log = 4 [(gogoproto.nullable) = false]; } message MysqldumpOptions { diff --git a/pkg/sql/parser/lexer.go b/pkg/sql/parser/lexer.go index 62f55940d1f2..73870c7b8991 100644 --- a/pkg/sql/parser/lexer.go +++ b/pkg/sql/parser/lexer.go @@ -153,36 +153,52 @@ func (l *lexer) UpdateNumPlaceholders(p *tree.Placeholder) { } } -// Unimplemented wraps Error, setting lastUnimplementedError. -func (l *lexer) Unimplemented(feature string) { - l.lastError = unimp.New(feature, "this syntax") +// PurposelyUnimplemented wraps Error, setting lastUnimplementedError. +func (l *lexer) PurposelyUnimplemented(feature string, reason string) { + // We purposely do not use unimp here, as it appends hints to suggest that + // the error may be actively tracked as a bug. + l.lastError = errors.WithHint( + errors.WithTelemetry( + pgerror.Newf(pgcode.Syntax, "unimplemented: this syntax"), + fmt.Sprintf("sql.purposely_unimplemented.%s", feature), + ), + reason, + ) l.populateErrorDetails() + l.lastError = &tree.UnsupportedError{ + Err: l.lastError, + FeatureName: feature, + } } // UnimplementedWithIssue wraps Error, setting lastUnimplementedError. func (l *lexer) UnimplementedWithIssue(issue int) { l.lastError = unimp.NewWithIssue(issue, "this syntax") l.populateErrorDetails() + l.lastError = &tree.UnsupportedError{ + Err: l.lastError, + FeatureName: fmt.Sprintf("https://github.com/cockroachdb/cockroach/issues/%d", issue), + } } // UnimplementedWithIssueDetail wraps Error, setting lastUnimplementedError. func (l *lexer) UnimplementedWithIssueDetail(issue int, detail string) { l.lastError = unimp.NewWithIssueDetail(issue, detail, "this syntax") l.populateErrorDetails() + l.lastError = &tree.UnsupportedError{ + Err: l.lastError, + FeatureName: detail, + } } -// PurposelyUnimplemented wraps Error, setting lastUnimplementedError. -func (l *lexer) PurposelyUnimplemented(feature string, reason string) { - // We purposely do not use unimp here, as it appends hints to suggest that - // the error may be actively tracked as a bug. - l.lastError = errors.WithHint( - errors.WithTelemetry( - pgerror.Newf(pgcode.Syntax, "unimplemented: this syntax"), - fmt.Sprintf("sql.purposely_unimplemented.%s", feature), - ), - reason, - ) +// Unimplemented wraps Error, setting lastUnimplementedError. +func (l *lexer) Unimplemented(feature string) { + l.lastError = unimp.New(feature, "this syntax") l.populateErrorDetails() + l.lastError = &tree.UnsupportedError{ + Err: l.lastError, + FeatureName: feature, + } } // setErr is called from parsing action rules to register an error observed diff --git a/pkg/sql/parser/parse_test.go b/pkg/sql/parser/parse_test.go index e419b870b4dd..2fa5a38fd1fb 100644 --- a/pkg/sql/parser/parse_test.go +++ b/pkg/sql/parser/parse_test.go @@ -3192,9 +3192,12 @@ func TestUnimplementedSyntax(t *testing.T) { {`ALTER TABLE a ADD CONSTRAINT foo EXCLUDE USING gist (bar WITH =)`, 46657, `add constraint exclude using`, ``}, {`ALTER TABLE a INHERITS b`, 22456, `alter table inherits`, ``}, {`ALTER TABLE a NO INHERITS b`, 22456, `alter table no inherits`, ``}, + {`ALTER FUNCTION public.isnumeric(text) OWNER TO bob`, 0, `alter function`, ``}, {`CREATE ACCESS METHOD a`, 0, `create access method`, ``}, + {`COMMENT ON EXTENSION plpgsql IS 'PL/pgSQL procedural language'`, 0, `comment on extension`, ``}, + {`COPY x FROM STDIN WHERE a = b`, 54580, ``, ``}, {`CREATE AGGREGATE a`, 0, `create aggregate`, ``}, @@ -3202,6 +3205,7 @@ func TestUnimplementedSyntax(t *testing.T) { {`CREATE CONSTRAINT TRIGGER a`, 28296, `create constraint`, ``}, {`CREATE CONVERSION a`, 0, `create conversion`, ``}, {`CREATE DEFAULT CONVERSION a`, 0, `create def conv`, ``}, + {`CREATE EXTENSION IF NOT EXISTS plpgsql WITH SCHEMA pg_catalog`, 0, `create extension if not exists with`, ``}, {`CREATE FOREIGN DATA WRAPPER a`, 0, `create fdw`, ``}, {`CREATE FOREIGN TABLE a`, 0, `create foreign table`, ``}, {`CREATE FUNCTION a`, 17511, `create`, ``}, @@ -3240,6 +3244,9 @@ func TestUnimplementedSyntax(t *testing.T) { {`DISCARD TEMP`, 0, `discard temp`, ``}, {`DISCARD TEMPORARY`, 0, `discard temp`, ``}, + {`GRANT ALL ON SEQUENCE`, 0, `grant privileges on sequence`, ``}, + {`REVOKE ALL ON SEQUENCE`, 0, `revoke privileges on sequence`, ``}, + {`SET CONSTRAINTS foo`, 0, `set constraints`, ``}, {`SET LOCAL foo = bar`, 32562, ``, ``}, {`SET foo FROM CURRENT`, 0, `set from current`, ``}, diff --git a/pkg/sql/parser/sql.y b/pkg/sql/parser/sql.y index 38cca28b799e..f49b782d7443 100644 --- a/pkg/sql/parser/sql.y +++ b/pkg/sql/parser/sql.y @@ -70,6 +70,8 @@ func unimplementedWithIssueDetail(sqllex sqlLexer, issue int, detail string) int sqllex.(*lexer).UnimplementedWithIssueDetail(issue, detail) return 1 } + + %} %{ @@ -726,6 +728,7 @@ func (u *sqlSymUnion) objectNamePrefixList() tree.ObjectNamePrefixList { %type stmt_block %type stmt + %type alter_stmt %type alter_ddl_stmt %type alter_table_stmt @@ -738,6 +741,7 @@ func (u *sqlSymUnion) objectNamePrefixList() tree.ObjectNamePrefixList { %type alter_role_stmt %type alter_type_stmt %type alter_schema_stmt +%type alter_unsupported_stmt // ALTER RANGE %type alter_zone_range_stmt @@ -1328,6 +1332,7 @@ stmt: alter_stmt: alter_ddl_stmt // help texts in sub-rule | alter_role_stmt // EXTEND WITH HELP: ALTER ROLE +| alter_unsupported_stmt | ALTER error // SHOW HELP: ALTER alter_ddl_stmt: @@ -2719,6 +2724,13 @@ import_format: $$ = strings.ToUpper($1) } +alter_unsupported_stmt: + ALTER FUNCTION error + { + return unimplemented(sqllex, "alter function") + } + + // %Help: IMPORT - load data from file in a distributed manner // %Category: CCL // %Text: @@ -3086,6 +3098,7 @@ comment_stmt: { $$.val = &tree.CommentOnIndex{Index: $4.tableIndexName(), Comment: $6.strPtr()} } +| COMMENT ON EXTENSION error { return unimplemented(sqllex, "comment on extension") } comment_text: SCONST @@ -3125,6 +3138,7 @@ create_extension_stmt: | CREATE EXTENSION name { $$.val = &tree.CreateExtension{Name: $3} } +| CREATE EXTENSION IF NOT EXISTS name WITH error { return unimplemented(sqllex, "create extension if not exists with") } | CREATE EXTENSION error // SHOW HELP: CREATE EXTENSION create_unsupported: @@ -3146,7 +3160,7 @@ create_unsupported: | CREATE SUBSCRIPTION error { return unimplemented(sqllex, "create subscription") } | CREATE TABLESPACE error { return unimplementedWithIssueDetail(sqllex, 54113, "create tablespace") } | CREATE TEXT error { return unimplementedWithIssueDetail(sqllex, 7821, "create text") } -| CREATE TRIGGER error { return unimplementedWithIssueDetail(sqllex, 28296, "create") } +| CREATE TRIGGER error { return unimplementedWithIssueDetail(sqllex, 28296, "create trigger") } opt_or_replace: OR REPLACE {} @@ -3884,6 +3898,10 @@ grant_stmt: Grantees: $7.nameList(), } } +| GRANT privileges ON SEQUENCE error + { + return unimplemented(sqllex, "grant privileges on sequence") + } | GRANT error // SHOW HELP: GRANT // %Help: REVOKE - remove access privileges and role memberships @@ -3931,6 +3949,10 @@ revoke_stmt: Grantees: $7.nameList(), } } +| REVOKE privileges ON SEQUENCE error + { + return unimplemented(sqllex, "revoke privileges on sequence") + } | REVOKE error // SHOW HELP: REVOKE // ALL can either be by itself, or with the optional PRIVILEGES keyword (which no-ops) diff --git a/pkg/sql/sem/tree/BUILD.bazel b/pkg/sql/sem/tree/BUILD.bazel index a11b2beef9ee..bf56e8d61a46 100644 --- a/pkg/sql/sem/tree/BUILD.bazel +++ b/pkg/sql/sem/tree/BUILD.bazel @@ -87,6 +87,7 @@ go_library( "type_check.go", "type_name.go", "union.go", + "unsupported_error.go", "update.go", "values.go", "var_name.go", diff --git a/pkg/sql/sem/tree/unsupported_error.go b/pkg/sql/sem/tree/unsupported_error.go new file mode 100644 index 000000000000..ac91e1efc7fd --- /dev/null +++ b/pkg/sql/sem/tree/unsupported_error.go @@ -0,0 +1,31 @@ +// Copyright 2020 The Cockroach Authors. +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package tree + +var _ error = &UnsupportedError{} + +// UnsupportedError is an error object which is returned by some unimplemented SQL +// statements. It is currently only used to skip over PGDUMP statements during +// an import. +type UnsupportedError struct { + Err error + FeatureName string +} + +func (u *UnsupportedError) Error() string { + return u.Err.Error() +} + +// Cause implements causer. +func (u *UnsupportedError) Cause() error { return u.Err } + +// Unwrap implements wrapper. +func (u *UnsupportedError) Unwrap() error { return u.Err }