From 371dbd62d7c16e27b8e5c5f627c9e22315891b98 Mon Sep 17 00:00:00 2001 From: tiancaiamao Date: Fri, 18 Aug 2023 11:47:01 +0800 Subject: [PATCH 1/3] This is an automated cherry-pick of #46127 Signed-off-by: ti-chi-bot --- br/pkg/backup/client.go | 14 ++++++++ br/tests/br_autoid/run.sh | 51 ++++++++++++++++++++++++++ br/tests/run_group.sh | 76 +++++++++++++++++++++++++++++++++++++++ ddl/ddl_api.go | 6 ++++ parser/model/model.go | 31 +++++++++++----- 5 files changed, 170 insertions(+), 8 deletions(-) create mode 100644 br/tests/br_autoid/run.sh create mode 100755 br/tests/run_group.sh diff --git a/br/pkg/backup/client.go b/br/pkg/backup/client.go index 3c0837a4621d8..8c948308869a3 100644 --- a/br/pkg/backup/client.go +++ b/br/pkg/backup/client.go @@ -578,6 +578,20 @@ func BuildBackupRangeAndSchema( default: if tableInfo.SepAutoInc() { globalAutoID, err = autoIDAccess.IncrementID(tableInfo.Version).Get() + // For a nonclustered table with auto_increment column, both auto_increment_id and _tidb_rowid are required. + // See also https://github.com/pingcap/tidb/issues/46093 + if rowID, err1 := autoIDAccess.RowID().Get(); err1 == nil { + tableInfo.AutoIncIDExtra = rowID + 1 + } else { + // It is possible that the rowid meta key does not exist (i.e. table have auto_increment_id but no _rowid), + // so err1 != nil might be expected. + if globalAutoID == 0 { + // When both auto_increment_id and _rowid are missing, it must be something wrong. + return errors.Trace(err1) + } + // Print a warning in other scenes, should it be a INFO log? + log.Warn("get rowid error", zap.Error(err1)) + } } else { globalAutoID, err = idAlloc.NextGlobalAutoID() } diff --git a/br/tests/br_autoid/run.sh b/br/tests/br_autoid/run.sh new file mode 100644 index 0000000000000..af0ee46c7f582 --- /dev/null +++ b/br/tests/br_autoid/run.sh @@ -0,0 +1,51 @@ +#!/bin/sh +# +# Copyright 2023 PingCAP, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +set -eu +DB="$TEST_NAME" + +run_sql "create database if not exists ${DB}" +run_sql "create table $DB.issue46093 (a int primary key nonclustered auto_increment, b int) auto_id_cache = 1;" +run_sql "insert into $DB.issue46093 (b) values (1), (2), (3);" +run_sql "show table $DB.issue46093 next_row_id;" +check_contains "NEXT_GLOBAL_ROW_ID: 30001" +check_contains "NEXT_GLOBAL_ROW_ID: 4" + +run_sql "backup table $DB.issue46093 to 'local://$TEST_DIR/$DB'"; +run_sql "drop table $DB.issue46093;" +run_sql "restore table $DB.issue46093 from 'local://$TEST_DIR/$DB';" + +run_sql "show table $DB.issue46093 next_row_id;" +check_contains "NEXT_GLOBAL_ROW_ID: 30001" +check_contains "NEXT_GLOBAL_ROW_ID: 4001" +run_sql "insert into $DB.issue46093 (b) values (4), (5), (6);" +run_sql "insert into $DB.issue46093 (b) values (7), (8), (9);" +run_sql "select * from $DB.issue46093;" +check_contains "a: 1" +check_contains "a: 2" +check_contains "a: 3" +check_contains "a: 4001" +check_contains "a: 4002" +check_contains "a: 4003" +check_contains "a: 4004" +check_contains "a: 4005" +check_contains "a: 4006" +check_contains "b: 4" +check_contains "b: 5" +check_contains "b: 6" +check_contains "b: 7" +check_contains "b: 8" +check_contains "b: 9" diff --git a/br/tests/run_group.sh b/br/tests/run_group.sh new file mode 100755 index 0000000000000..58fe387d4be6a --- /dev/null +++ b/br/tests/run_group.sh @@ -0,0 +1,76 @@ +#!/usr/bin/env bash + +# This script split the integration tests into 16 groups to support parallel group tests execution. +# all the integration tests are located in br/tests directory. only the directories +# containing run.sh will be considered as integration tests. the script will print the total # # # number + +set -eo pipefail + +# Step 1 +CUR=$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd) +group=$1 +export COV_DIR="/tmp/group_cover" +rm -rf COV_DIR +mkdir $COV_DIR + +# Define groups +# Note: If new group is added, the group name must also be added to CI +# * https://github.com/PingCAP-QE/ci/blob/main/pipelines/pingcap/tidb/latest/pull_br_integration_test.groovy +# Each group of tests consumes as much time as possible, thus reducing CI waiting time. +# Putting multiple light tests together and heavy tests in a separate group. +declare -A groups +groups=( + ["G00"]="br_300_small_tables br_backup_empty br_backup_version br_cache_table br_case_sensitive br_charset_gbk br_check_new_collocation_enable" + ["G01"]="br_autoid br_crypter2 br_db br_db_online br_db_online_newkv br_db_skip br_debug_meta br_ebs br_foreign_key br_full" + ["G02"]="br_full_cluster_restore br_full_ddl br_full_index br_gcs br_history" + ["G03"]='br_incompatible_tidb_config br_incremental br_incremental_ddl br_incremental_index' + ["G04"]='br_incremental_only_ddl br_incremental_same_table br_insert_after_restore br_key_locked br_log_test br_move_backup br_mv_index br_other br_partition_add_index' + ["G05"]='br_range br_rawkv br_replica_read br_restore_TDE_enable br_restore_log_task_enable br_s3 br_shuffle_leader br_shuffle_region br_single_table' + ["G06"]='br_skip_checksum br_small_batch_size br_split_region_fail br_systables br_table_filter br_txn' + ["G07"]='br_clustered_index br_crypter br_table_partition br_tidb_placement_policy br_tiflash br_tikv_outage' + ["G08"]='br_tikv_outage2 br_ttl br_views_and_sequences br_z_gc_safepoint lightning_add_index lightning_alter_random lightning_auto_columns' + ["G09"]='lightning_auto_random_default lightning_bom_file lightning_character_sets lightning_check_partial_imported lightning_checkpoint lightning_checkpoint_chunks lightning_checkpoint_columns lightning_checkpoint_dirty_tableid' + ["G10"]='lightning_checkpoint_engines lightning_checkpoint_engines_order lightning_checkpoint_error_destroy lightning_checkpoint_parquet lightning_checkpoint_timestamp lightning_checksum_mismatch lightning_cmdline_override lightning_column_permutation lightning_common_handle' + ["G11"]='lightning_compress lightning_concurrent-restore lightning_config_max_error lightning_config_skip_csv_header lightning_csv lightning_default-columns lightning_disable_scheduler_by_key_range lightning_disk_quota lightning_distributed_import' + ["G12"]='lightning_drop_other_tables_halfway lightning_duplicate_detection lightning_duplicate_detection_new lightning_duplicate_resolution lightning_duplicate_resolution_incremental lightning_error_summary lightning_examples lightning_exotic_filenames lightning_extend_routes lightning_fail_fast' + ["G13"]='lightning_fail_fast_on_nonretry_err lightning_file_routing lightning_foreign_key lightning_gcs lightning_generated_columns lightning_ignore_columns lightning_import_compress lightning_incremental lightning_issue_282' + ["G14"]='lightning_issue_40657 lightning_issue_410 lightning_issue_519 lightning_local_backend lightning_max_incr lightning_max_random lightning_multi_valued_index lightning_new_collation lightning_no_schema' + ["G15"]='lightning_parquet lightning_partition_incremental lightning_partitioned-table lightning_record_network lightning_reload_cert lightning_restore lightning_routes lightning_routes_panic lightning_row-format-v2 lightning_s3' + ["G16"]='lightning_shard_rowid lightning_source_linkfile lightning_sqlmode lightning_tidb_duplicate_data lightning_tidb_rowid lightning_tiflash lightning_tikv_multi_rocksdb lightning_too_many_columns lightning_tool_135' + ["G17"]='lightning_tool_1420 lightning_tool_1472 lightning_tool_241 lightning_ttl lightning_unused_config_keys lightning_various_types lightning_view lightning_write_batch lightning_write_limit' +) + +# Get other cases not in groups, to avoid missing any case +others=() +for script in "$CUR"/*/run.sh; do + test_name="$(basename "$(dirname "$script")")" + # shellcheck disable=SC2076 + if [[ ! " ${groups[*]} " =~ " ${test_name} " ]]; then + others=("${others[@]} ${test_name}") + fi +done + +if [[ "$group" == "others" ]]; then + if [[ -z $others ]]; then + echo "All br&lightning integration test cases have been added to groups" + exit 0 + fi + echo "Error: "$others" is not added to any group in br/tests/run_group.sh" + exit 1 +elif [[ " ${!groups[*]} " =~ " ${group} " ]]; then + test_names="${groups[${group}]}" + # Run test cases + if [[ -n $test_names ]]; then + echo "" + echo "Run cases: ${test_names}" + for case_name in $test_names; do + echo "Run cases: ${case_name}" + rm -rf /tmp/backup_restore_test + mkdir -p /tmp/backup_restore_test + TEST_NAME=${case_name} ${CUR}/run.sh + done + fi +else + echo "Error: invalid group name: ${group}" + exit 1 +fi diff --git a/ddl/ddl_api.go b/ddl/ddl_api.go index 3c252a5d1ee25..1df222bd09712 100644 --- a/ddl/ddl_api.go +++ b/ddl/ddl_api.go @@ -2504,6 +2504,12 @@ func (d *ddl) createTableWithInfoPost( return errors.Trace(err) } } + // For issue https://github.com/pingcap/tidb/issues/46093 + if tbInfo.AutoIncIDExtra != 0 { + if err = d.handleAutoIncID(tbInfo, schemaID, tbInfo.AutoIncIDExtra-1, autoid.RowIDAllocType); err != nil { + return errors.Trace(err) + } + } if tbInfo.AutoRandID > 1 { // Default tableAutoRandID base is 0. // If the first ID is expected to greater than 1, we need to do rebase. diff --git a/parser/model/model.go b/parser/model/model.go index 371e7c44dfc88..8a4b611eb8ca5 100644 --- a/parser/model/model.go +++ b/parser/model/model.go @@ -489,14 +489,29 @@ type TableInfo struct { // 1 for the clustered index created > 5.0.0 RC. CommonHandleVersion uint16 `json:"common_handle_version"` - Comment string `json:"comment"` - AutoIncID int64 `json:"auto_inc_id"` - AutoIdCache int64 `json:"auto_id_cache"` //nolint:revive - AutoRandID int64 `json:"auto_rand_id"` - MaxColumnID int64 `json:"max_col_id"` - MaxIndexID int64 `json:"max_idx_id"` - MaxForeignKeyID int64 `json:"max_fk_id"` - MaxConstraintID int64 `json:"max_cst_id"` + Comment string `json:"comment"` + AutoIncID int64 `json:"auto_inc_id"` + + // Only used by BR when: + // 1. SepAutoInc() is true + // 2. The table is nonclustered and has auto_increment column. + // In that case, both auto_increment_id and tidb_rowid need to be backup & recover. + // See also https://github.com/pingcap/tidb/issues/46093 + // + // It should have been named TiDBRowID, but for historial reasons, we do not use separate meta key for _tidb_rowid and auto_increment_id, + // and field `AutoIncID` is used to serve both _tidb_rowid and auto_increment_id. + // If we introduce a TiDBRowID here, it could make furthur misunderstanding: + // in most cases, AutoIncID is _tidb_rowid and TiDBRowID is null + // but in some cases, AutoIncID is auto_increment_id and TiDBRowID is _tidb_rowid + // So let's just use another name AutoIncIDExtra to avoid misconception. + AutoIncIDExtra int64 `json:"auto_inc_id_extra,omitempty"` + + AutoIdCache int64 `json:"auto_id_cache"` //nolint:revive + AutoRandID int64 `json:"auto_rand_id"` + MaxColumnID int64 `json:"max_col_id"` + MaxIndexID int64 `json:"max_idx_id"` + MaxForeignKeyID int64 `json:"max_fk_id"` + MaxConstraintID int64 `json:"max_cst_id"` // UpdateTS is used to record the timestamp of updating the table's schema information. // These changing schema operations don't include 'truncate table' and 'rename table'. UpdateTS uint64 `json:"update_timestamp"` From 7e21e7c556267f0ffa2ede2de6996169da4a66d2 Mon Sep 17 00:00:00 2001 From: tiancaiamao Date: Fri, 18 Aug 2023 11:58:09 +0800 Subject: [PATCH 2/3] resolve conflict --- br/pkg/backup/client.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/br/pkg/backup/client.go b/br/pkg/backup/client.go index 8c948308869a3..c69e4283b386b 100644 --- a/br/pkg/backup/client.go +++ b/br/pkg/backup/client.go @@ -587,7 +587,7 @@ func BuildBackupRangeAndSchema( // so err1 != nil might be expected. if globalAutoID == 0 { // When both auto_increment_id and _rowid are missing, it must be something wrong. - return errors.Trace(err1) + return nil, nil, nil, errors.Trace(err1) } // Print a warning in other scenes, should it be a INFO log? log.Warn("get rowid error", zap.Error(err1)) From a786a4d6e61523cfc0b4a6497e0e44fff6d63536 Mon Sep 17 00:00:00 2001 From: tiancaiamao Date: Fri, 18 Aug 2023 13:30:40 +0800 Subject: [PATCH 3/3] address comment --- br/tests/run_group.sh | 76 ------------------------------------------- 1 file changed, 76 deletions(-) delete mode 100755 br/tests/run_group.sh diff --git a/br/tests/run_group.sh b/br/tests/run_group.sh deleted file mode 100755 index 58fe387d4be6a..0000000000000 --- a/br/tests/run_group.sh +++ /dev/null @@ -1,76 +0,0 @@ -#!/usr/bin/env bash - -# This script split the integration tests into 16 groups to support parallel group tests execution. -# all the integration tests are located in br/tests directory. only the directories -# containing run.sh will be considered as integration tests. the script will print the total # # # number - -set -eo pipefail - -# Step 1 -CUR=$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd) -group=$1 -export COV_DIR="/tmp/group_cover" -rm -rf COV_DIR -mkdir $COV_DIR - -# Define groups -# Note: If new group is added, the group name must also be added to CI -# * https://github.com/PingCAP-QE/ci/blob/main/pipelines/pingcap/tidb/latest/pull_br_integration_test.groovy -# Each group of tests consumes as much time as possible, thus reducing CI waiting time. -# Putting multiple light tests together and heavy tests in a separate group. -declare -A groups -groups=( - ["G00"]="br_300_small_tables br_backup_empty br_backup_version br_cache_table br_case_sensitive br_charset_gbk br_check_new_collocation_enable" - ["G01"]="br_autoid br_crypter2 br_db br_db_online br_db_online_newkv br_db_skip br_debug_meta br_ebs br_foreign_key br_full" - ["G02"]="br_full_cluster_restore br_full_ddl br_full_index br_gcs br_history" - ["G03"]='br_incompatible_tidb_config br_incremental br_incremental_ddl br_incremental_index' - ["G04"]='br_incremental_only_ddl br_incremental_same_table br_insert_after_restore br_key_locked br_log_test br_move_backup br_mv_index br_other br_partition_add_index' - ["G05"]='br_range br_rawkv br_replica_read br_restore_TDE_enable br_restore_log_task_enable br_s3 br_shuffle_leader br_shuffle_region br_single_table' - ["G06"]='br_skip_checksum br_small_batch_size br_split_region_fail br_systables br_table_filter br_txn' - ["G07"]='br_clustered_index br_crypter br_table_partition br_tidb_placement_policy br_tiflash br_tikv_outage' - ["G08"]='br_tikv_outage2 br_ttl br_views_and_sequences br_z_gc_safepoint lightning_add_index lightning_alter_random lightning_auto_columns' - ["G09"]='lightning_auto_random_default lightning_bom_file lightning_character_sets lightning_check_partial_imported lightning_checkpoint lightning_checkpoint_chunks lightning_checkpoint_columns lightning_checkpoint_dirty_tableid' - ["G10"]='lightning_checkpoint_engines lightning_checkpoint_engines_order lightning_checkpoint_error_destroy lightning_checkpoint_parquet lightning_checkpoint_timestamp lightning_checksum_mismatch lightning_cmdline_override lightning_column_permutation lightning_common_handle' - ["G11"]='lightning_compress lightning_concurrent-restore lightning_config_max_error lightning_config_skip_csv_header lightning_csv lightning_default-columns lightning_disable_scheduler_by_key_range lightning_disk_quota lightning_distributed_import' - ["G12"]='lightning_drop_other_tables_halfway lightning_duplicate_detection lightning_duplicate_detection_new lightning_duplicate_resolution lightning_duplicate_resolution_incremental lightning_error_summary lightning_examples lightning_exotic_filenames lightning_extend_routes lightning_fail_fast' - ["G13"]='lightning_fail_fast_on_nonretry_err lightning_file_routing lightning_foreign_key lightning_gcs lightning_generated_columns lightning_ignore_columns lightning_import_compress lightning_incremental lightning_issue_282' - ["G14"]='lightning_issue_40657 lightning_issue_410 lightning_issue_519 lightning_local_backend lightning_max_incr lightning_max_random lightning_multi_valued_index lightning_new_collation lightning_no_schema' - ["G15"]='lightning_parquet lightning_partition_incremental lightning_partitioned-table lightning_record_network lightning_reload_cert lightning_restore lightning_routes lightning_routes_panic lightning_row-format-v2 lightning_s3' - ["G16"]='lightning_shard_rowid lightning_source_linkfile lightning_sqlmode lightning_tidb_duplicate_data lightning_tidb_rowid lightning_tiflash lightning_tikv_multi_rocksdb lightning_too_many_columns lightning_tool_135' - ["G17"]='lightning_tool_1420 lightning_tool_1472 lightning_tool_241 lightning_ttl lightning_unused_config_keys lightning_various_types lightning_view lightning_write_batch lightning_write_limit' -) - -# Get other cases not in groups, to avoid missing any case -others=() -for script in "$CUR"/*/run.sh; do - test_name="$(basename "$(dirname "$script")")" - # shellcheck disable=SC2076 - if [[ ! " ${groups[*]} " =~ " ${test_name} " ]]; then - others=("${others[@]} ${test_name}") - fi -done - -if [[ "$group" == "others" ]]; then - if [[ -z $others ]]; then - echo "All br&lightning integration test cases have been added to groups" - exit 0 - fi - echo "Error: "$others" is not added to any group in br/tests/run_group.sh" - exit 1 -elif [[ " ${!groups[*]} " =~ " ${group} " ]]; then - test_names="${groups[${group}]}" - # Run test cases - if [[ -n $test_names ]]; then - echo "" - echo "Run cases: ${test_names}" - for case_name in $test_names; do - echo "Run cases: ${case_name}" - rm -rf /tmp/backup_restore_test - mkdir -p /tmp/backup_restore_test - TEST_NAME=${case_name} ${CUR}/run.sh - done - fi -else - echo "Error: invalid group name: ${group}" - exit 1 -fi