From 7080bca11007ac2018c5aac21ad42ac074c8716b Mon Sep 17 00:00:00 2001 From: Ti Chi Robot Date: Wed, 10 May 2023 18:24:08 +0800 Subject: [PATCH] makefile(all): add new cases to run_group and check cases in makefile (#8794) (#8924) close pingcap/tiflow#8932 --- Makefile | 11 ++- dm/checker/checker.go | 9 +- dm/pkg/checker/lightning.go | 19 +++-- dm/pkg/checker/lightning_test.go | 8 +- .../conf/diff_config1.toml | 35 ++++++++ .../conf/diff_config2.toml | 35 ++++++++ .../conf/diff_config3.toml | 29 +++++++ .../conf/diff_config4.toml | 29 +++++++ .../lightning_load_task/conf/dm-master.toml | 6 ++ .../conf/dm-task-standalone.yaml | 41 +++++++++ .../lightning_load_task/conf/dm-task.yaml | 1 - .../conf/dm-task2-standalone.yaml | 41 +++++++++ .../lightning_load_task/conf/dm-worker1.toml | 2 + .../lightning_load_task/conf/dm-worker2.toml | 2 + .../lightning_load_task/conf/dm-worker3.toml | 2 + .../lightning_load_task/conf/source1.yaml | 13 +++ .../lightning_load_task/conf/source2.yaml | 9 ++ .../data/db1.increment.sql | 11 +++ .../lightning_load_task/data/db1.prepare.sql | 17 ++++ .../data/db2.increment.sql | 8 ++ .../lightning_load_task/data/db2.prepare.sql | 17 ++++ dm/tests/lightning_load_task/run.sh | 36 ++++---- dm/tests/others_integration_1.txt | 10 --- dm/tests/others_integration_2.txt | 12 --- dm/tests/others_integration_3.txt | 11 --- dm/tests/run.sh | 11 --- dm/tests/run_group.sh | 28 +++--- engine/test/integration_tests/run_group.sh | 27 +++--- go.mod | 20 ++--- go.sum | 41 ++++----- tests/integration_tests/api_v2/run.sh | 8 +- tests/integration_tests/bank/run.sh | 6 +- .../batch_update_to_no_batch/run.sh | 2 +- tests/integration_tests/bdr_mode/run.sh | 2 +- .../canal_json_adapter_compatibility/run.sh | 2 +- .../integration_tests/canal_json_basic/run.sh | 2 +- .../canal_json_storage_basic/run.sh | 4 +- .../canal_json_storage_partition_table/run.sh | 4 +- tests/integration_tests/cdc/cdc.go | 2 +- .../{ => cdc}/dailytest/case.go | 0 .../{ => cdc}/dailytest/dailytest.go | 0 .../{ => cdc}/dailytest/data.go | 0 .../{ => cdc}/dailytest/db.go | 0 .../{ => cdc}/dailytest/exector.go | 0 .../{ => cdc}/dailytest/job.go | 0 .../{ => cdc}/dailytest/parser.go | 0 .../{ => cdc}/dailytest/rand.go | 0 .../changefeed_fast_fail/run.sh | 2 +- .../run.sh | 15 ++-- .../integration_tests/clustered_index/run.sh | 4 +- .../consistent_replicate_ddl/run.sh | 2 +- .../consistent_replicate_gbk/run.sh | 2 +- .../consistent_replicate_nfs/run.sh | 2 +- .../consistent_replicate_storage_file/run.sh | 2 +- .../consistent_replicate_storage_s3/run.sh | 2 +- .../csv_storage_basic/run.sh | 4 +- .../csv_storage_multi_tables_ddl/run.sh | 4 +- .../csv_storage_partition_table/run.sh | 4 +- tests/integration_tests/ddl_reentrant/run.sh | 2 +- tests/integration_tests/http_api/run.sh | 4 +- tests/integration_tests/http_api_tls/run.sh | 4 +- .../kafka_big_messages/run.sh | 2 +- .../kafka_big_messages_v2/run.sh | 2 +- .../conf/changefeed.toml | 0 .../conf/diff_config.toml | 2 +- .../conf/workload | 0 .../{big_txn_v2 => kafka_big_txn_v2}/run.sh | 3 +- .../kafka_compression/run.sh | 2 +- tests/integration_tests/kafka_messages/run.sh | 22 +++-- .../kafka_sink_error_resume/run.sh | 2 +- .../kill_owner_with_ddl/run.sh | 2 +- .../multi_cdc_cluster/run.sh | 2 +- .../integration_tests/multi_changefeed/run.sh | 2 +- .../multi_tables_ddl_v2/run.sh | 3 +- tests/integration_tests/multi_topics/run.sh | 2 +- .../integration_tests/multi_topics_v2/run.sh | 3 +- .../owner_remove_table_error/run.sh | 2 +- .../processor_etcd_worker_delay/run.sh | 2 +- .../processor_resolved_ts_fallback/run.sh | 2 +- tests/integration_tests/run_group.sh | 85 ++++++++++++------- tests/integration_tests/sequence/run.sh | 2 +- tests/integration_tests/sink_hang/run.sh | 2 +- 82 files changed, 518 insertions(+), 252 deletions(-) create mode 100644 dm/tests/lightning_load_task/conf/diff_config1.toml create mode 100644 dm/tests/lightning_load_task/conf/diff_config2.toml create mode 100644 dm/tests/lightning_load_task/conf/diff_config3.toml create mode 100644 dm/tests/lightning_load_task/conf/diff_config4.toml create mode 100644 dm/tests/lightning_load_task/conf/dm-master.toml create mode 100644 dm/tests/lightning_load_task/conf/dm-task-standalone.yaml create mode 100644 dm/tests/lightning_load_task/conf/dm-task2-standalone.yaml create mode 100644 dm/tests/lightning_load_task/conf/dm-worker1.toml create mode 100644 dm/tests/lightning_load_task/conf/dm-worker2.toml create mode 100644 dm/tests/lightning_load_task/conf/dm-worker3.toml create mode 100644 dm/tests/lightning_load_task/conf/source1.yaml create mode 100644 dm/tests/lightning_load_task/conf/source2.yaml create mode 100644 dm/tests/lightning_load_task/data/db1.increment.sql create mode 100644 dm/tests/lightning_load_task/data/db1.prepare.sql create mode 100644 dm/tests/lightning_load_task/data/db2.increment.sql create mode 100644 dm/tests/lightning_load_task/data/db2.prepare.sql delete mode 100644 dm/tests/others_integration_1.txt delete mode 100644 dm/tests/others_integration_2.txt delete mode 100644 dm/tests/others_integration_3.txt mode change 100644 => 100755 engine/test/integration_tests/run_group.sh rename tests/integration_tests/{ => cdc}/dailytest/case.go (100%) rename tests/integration_tests/{ => cdc}/dailytest/dailytest.go (100%) rename tests/integration_tests/{ => cdc}/dailytest/data.go (100%) rename tests/integration_tests/{ => cdc}/dailytest/db.go (100%) rename tests/integration_tests/{ => cdc}/dailytest/exector.go (100%) rename tests/integration_tests/{ => cdc}/dailytest/job.go (100%) rename tests/integration_tests/{ => cdc}/dailytest/parser.go (100%) rename tests/integration_tests/{ => cdc}/dailytest/rand.go (100%) rename tests/integration_tests/{big_txn_v2 => kafka_big_txn_v2}/conf/changefeed.toml (100%) rename tests/integration_tests/{big_txn_v2 => kafka_big_txn_v2}/conf/diff_config.toml (85%) rename tests/integration_tests/{big_txn_v2 => kafka_big_txn_v2}/conf/workload (100%) rename tests/integration_tests/{big_txn_v2 => kafka_big_txn_v2}/run.sh (96%) mode change 100644 => 100755 tests/integration_tests/run_group.sh diff --git a/Makefile b/Makefile index de8729e2432..6050e503a41 100644 --- a/Makefile +++ b/Makefile @@ -311,8 +311,8 @@ check-static: tools/bin/golangci-lint cd dm && ../tools/bin/golangci-lint run --timeout 10m0s check: check-copyright generate_mock go-generate fmt check-static tidy terror_check errdoc \ - check-merge-conflicts check-ticdc-dashboard check-diff-line-width \ - swagger-spec check-makefiles check_engine_integration_test + check-merge-conflicts check-ticdc-dashboard check-diff-line-width swagger-spec check-makefiles \ + check_cdc_integration_test check_dm_integration_test check_engine_integration_test @git --no-pager diff --exit-code || (echo "Please add changed files!" && false) fast_check: check-copyright fmt check-static tidy terror_check errdoc \ @@ -537,6 +537,13 @@ check_third_party_binary_for_engine: check_engine_integration_test: ./engine/test/utils/check_case.sh + ./engine/test/integration_tests/run_group.sh "check others" + +check_dm_integration_test: + ./dm/tests/run_group.sh "check others" + +check_cdc_integration_test: + ./tests/integration_tests/run_group.sh check "others" bin/mc: ./scripts/download-mc.sh diff --git a/dm/checker/checker.go b/dm/checker/checker.go index c6f87ef97db..8386212bb46 100644 --- a/dm/checker/checker.go +++ b/dm/checker/checker.go @@ -28,6 +28,7 @@ import ( "github.com/pingcap/tidb/br/pkg/lightning/importer" "github.com/pingcap/tidb/br/pkg/lightning/importer/opts" "github.com/pingcap/tidb/br/pkg/lightning/mydump" + "github.com/pingcap/tidb/br/pkg/lightning/precheck" "github.com/pingcap/tidb/dumpling/export" "github.com/pingcap/tidb/parser/mysql" "github.com/pingcap/tidb/types" @@ -477,28 +478,28 @@ func (c *Checker) Init(ctx context.Context) (err error) { info.totalDataSize.Load(), targetInfoGetter)) } if _, ok := c.checkingItems[config.LightningEmptyRegionChecking]; ok { - lChecker, err := builder.BuildPrecheckItem(importer.CheckTargetClusterEmptyRegion) + lChecker, err := builder.BuildPrecheckItem(precheck.CheckTargetClusterEmptyRegion) if err != nil { return err } c.checkList = append(c.checkList, checker.NewLightningEmptyRegionChecker(lChecker)) } if _, ok := c.checkingItems[config.LightningRegionDistributionChecking]; ok { - lChecker, err := builder.BuildPrecheckItem(importer.CheckTargetClusterRegionDist) + lChecker, err := builder.BuildPrecheckItem(precheck.CheckTargetClusterRegionDist) if err != nil { return err } c.checkList = append(c.checkList, checker.NewLightningRegionDistributionChecker(lChecker)) } if _, ok := c.checkingItems[config.LightningDownstreamVersionChecking]; ok { - lChecker, err := builder.BuildPrecheckItem(importer.CheckTargetClusterVersion) + lChecker, err := builder.BuildPrecheckItem(precheck.CheckTargetClusterVersion) if err != nil { return err } c.checkList = append(c.checkList, checker.NewLightningClusterVersionChecker(lChecker)) } if _, ok := c.checkingItems[config.LightningMutexFeatureChecking]; ok { - lChecker, err := builder.BuildPrecheckItem(importer.CheckTargetUsingCDCPITR) + lChecker, err := builder.BuildPrecheckItem(precheck.CheckTargetUsingCDCPITR) if err != nil { return err } diff --git a/dm/pkg/checker/lightning.go b/dm/pkg/checker/lightning.go index 9b8a1addb74..ae249a5f393 100644 --- a/dm/pkg/checker/lightning.go +++ b/dm/pkg/checker/lightning.go @@ -19,13 +19,14 @@ import ( "github.com/docker/go-units" "github.com/pingcap/tidb/br/pkg/lightning/importer" + "github.com/pingcap/tidb/br/pkg/lightning/precheck" "github.com/pingcap/tiflow/dm/pkg/log" ) func convertLightningPrecheck( ctx context.Context, dmResult *Result, - lightningPrechecker importer.PrecheckItem, + lightningPrechecker precheck.Checker, failLevel State, instruction string, ) { @@ -45,11 +46,11 @@ func convertLightningPrecheck( // LightningEmptyRegionChecker checks whether there are too many empty regions in the cluster. type LightningEmptyRegionChecker struct { - inner importer.PrecheckItem + inner precheck.Checker } // NewLightningEmptyRegionChecker creates a new LightningEmptyRegionChecker. -func NewLightningEmptyRegionChecker(lightningChecker importer.PrecheckItem) RealChecker { +func NewLightningEmptyRegionChecker(lightningChecker precheck.Checker) RealChecker { return &LightningEmptyRegionChecker{inner: lightningChecker} } @@ -77,11 +78,11 @@ func (c *LightningEmptyRegionChecker) Check(ctx context.Context) *Result { // LightningRegionDistributionChecker checks whether the region distribution is balanced. type LightningRegionDistributionChecker struct { - inner importer.PrecheckItem + inner precheck.Checker } // NewLightningRegionDistributionChecker creates a new LightningRegionDistributionChecker. -func NewLightningRegionDistributionChecker(lightningChecker importer.PrecheckItem) RealChecker { +func NewLightningRegionDistributionChecker(lightningChecker precheck.Checker) RealChecker { return &LightningRegionDistributionChecker{inner: lightningChecker} } @@ -109,11 +110,11 @@ func (c *LightningRegionDistributionChecker) Check(ctx context.Context) *Result // LightningClusterVersionChecker checks whether the cluster version is compatible with Lightning. type LightningClusterVersionChecker struct { - inner importer.PrecheckItem + inner precheck.Checker } // NewLightningClusterVersionChecker creates a new LightningClusterVersionChecker. -func NewLightningClusterVersionChecker(lightningChecker importer.PrecheckItem) RealChecker { +func NewLightningClusterVersionChecker(lightningChecker precheck.Checker) RealChecker { return &LightningClusterVersionChecker{inner: lightningChecker} } @@ -207,11 +208,11 @@ func (c *LightningFreeSpaceChecker) Check(ctx context.Context) *Result { // LightningCDCPiTRChecker checks whether the cluster has running CDC PiTR tasks. type LightningCDCPiTRChecker struct { - inner importer.PrecheckItem + inner precheck.Checker } // NewLightningCDCPiTRChecker creates a new LightningCDCPiTRChecker. -func NewLightningCDCPiTRChecker(lightningChecker importer.PrecheckItem) RealChecker { +func NewLightningCDCPiTRChecker(lightningChecker precheck.Checker) RealChecker { c, ok := lightningChecker.(*importer.CDCPITRCheckItem) if ok { c.Instruction = "physical import mode is not compatible with them. Please switch to logical import mode then try again." diff --git a/dm/pkg/checker/lightning_test.go b/dm/pkg/checker/lightning_test.go index 5a94a7191e8..21b317f5ff4 100644 --- a/dm/pkg/checker/lightning_test.go +++ b/dm/pkg/checker/lightning_test.go @@ -17,7 +17,7 @@ import ( "context" "testing" - "github.com/pingcap/tidb/br/pkg/lightning/importer" + "github.com/pingcap/tidb/br/pkg/lightning/precheck" "github.com/pingcap/tiflow/pkg/errors" "github.com/stretchr/testify/require" ) @@ -28,17 +28,17 @@ type mockPrecheckItem struct { msg string } -func (m mockPrecheckItem) Check(ctx context.Context) (*importer.CheckResult, error) { +func (m mockPrecheckItem) Check(ctx context.Context) (*precheck.CheckResult, error) { if m.err != nil { return nil, m.err } - return &importer.CheckResult{ + return &precheck.CheckResult{ Passed: m.pass, Message: m.msg, }, nil } -func (m mockPrecheckItem) GetCheckItemID() importer.CheckItemID { +func (m mockPrecheckItem) GetCheckItemID() precheck.CheckItemID { return "mock" } diff --git a/dm/tests/lightning_load_task/conf/diff_config1.toml b/dm/tests/lightning_load_task/conf/diff_config1.toml new file mode 100644 index 00000000000..c1b51cdbde0 --- /dev/null +++ b/dm/tests/lightning_load_task/conf/diff_config1.toml @@ -0,0 +1,35 @@ +# diff Configuration. + +check-thread-count = 4 + +export-fix-sql = true + +check-struct-only = false + +[task] + output-dir = "/tmp/ticdc_dm_test/output" + + source-instances = ["mysql1", "mysql2"] + + target-instance = "tidb0" + + target-check-tables = ["load_task1.t?*"] + +[data-sources] +[data-sources.mysql1] +host = "127.0.0.1" +port = 3306 +user = "root" +password = "123456" + +[data-sources.mysql2] +host = "127.0.0.1" +port = 3307 +user = "root" +password = "123456" + +[data-sources.tidb0] +host = "127.0.0.1" +port = 4000 +user = "test" +password = "123456" diff --git a/dm/tests/lightning_load_task/conf/diff_config2.toml b/dm/tests/lightning_load_task/conf/diff_config2.toml new file mode 100644 index 00000000000..ec0038ccd61 --- /dev/null +++ b/dm/tests/lightning_load_task/conf/diff_config2.toml @@ -0,0 +1,35 @@ +# diff Configuration. + +check-thread-count = 4 + +export-fix-sql = true + +check-struct-only = false + +[task] + output-dir = "/tmp/ticdc_dm_test/output" + + source-instances = ["mysql1", "mysql2"] + + target-instance = "tidb0" + + target-check-tables = ["load_task2.t?*"] + +[data-sources] +[data-sources.mysql1] +host = "127.0.0.1" +port = 3306 +user = "root" +password = "123456" + +[data-sources.mysql2] +host = "127.0.0.1" +port = 3307 +user = "root" +password = "123456" + +[data-sources.tidb0] +host = "127.0.0.1" +port = 4000 +user = "test" +password = "123456" diff --git a/dm/tests/lightning_load_task/conf/diff_config3.toml b/dm/tests/lightning_load_task/conf/diff_config3.toml new file mode 100644 index 00000000000..3dc14b45ba5 --- /dev/null +++ b/dm/tests/lightning_load_task/conf/diff_config3.toml @@ -0,0 +1,29 @@ +# diff Configuration. + +check-thread-count = 4 + +export-fix-sql = true + +check-struct-only = false + +[task] + output-dir = "/tmp/ticdc_dm_test/output" + + source-instances = ["mysql1"] + + target-instance = "tidb0" + + target-check-tables = ["load_task3.t?*"] + +[data-sources] +[data-sources.mysql1] +host = "127.0.0.1" +port = 3307 +user = "root" +password = "123456" + +[data-sources.tidb0] +host = "127.0.0.1" +port = 4000 +user = "test" +password = "123456" diff --git a/dm/tests/lightning_load_task/conf/diff_config4.toml b/dm/tests/lightning_load_task/conf/diff_config4.toml new file mode 100644 index 00000000000..33f675f7e1c --- /dev/null +++ b/dm/tests/lightning_load_task/conf/diff_config4.toml @@ -0,0 +1,29 @@ +# diff Configuration. + +check-thread-count = 4 + +export-fix-sql = true + +check-struct-only = false + +[task] + output-dir = "/tmp/ticdc_dm_test/output" + + source-instances = ["mysql1"] + + target-instance = "tidb0" + + target-check-tables = ["load_task4.t?*"] + +[data-sources] +[data-sources.mysql1] +host = "127.0.0.1" +port = 3306 +user = "root" +password = "123456" + +[data-sources.tidb0] +host = "127.0.0.1" +port = 4000 +user = "test" +password = "123456" diff --git a/dm/tests/lightning_load_task/conf/dm-master.toml b/dm/tests/lightning_load_task/conf/dm-master.toml new file mode 100644 index 00000000000..53a294e7d07 --- /dev/null +++ b/dm/tests/lightning_load_task/conf/dm-master.toml @@ -0,0 +1,6 @@ +# Master Configuration. +master-addr = ":8261" +advertise-addr = "127.0.0.1:8261" + +rpc-timeout = "30s" +auto-compaction-retention = "3s" diff --git a/dm/tests/lightning_load_task/conf/dm-task-standalone.yaml b/dm/tests/lightning_load_task/conf/dm-task-standalone.yaml new file mode 100644 index 00000000000..0d293423e43 --- /dev/null +++ b/dm/tests/lightning_load_task/conf/dm-task-standalone.yaml @@ -0,0 +1,41 @@ +--- +name: load_task1 +task-mode: all +is-sharding: false +meta-schema: "dm_meta" +heartbeat-update-interval: 1 +heartbeat-report-interval: 1 + +target-database: + host: "127.0.0.1" + port: 4000 + user: "test" + password: "/Q7B9DizNLLTTfiZHv9WoEAKamfpIUs=" + +mysql-instances: + - source-id: "mysql-replica-01" + block-allow-list: "instance" + mydumper-config-name: "global" + loader-config-name: "global" + syncer-config-name: "global" + +block-allow-list: + instance: + do-dbs: ["load_task1"] + +mydumpers: + global: + threads: 4 + chunk-filesize: 64 + skip-tz-utc: true + extra-args: "" + +loaders: + global: + pool-size: 16 + dir: "./dumped_data" + +syncers: + global: + worker-count: 16 + batch: 100 diff --git a/dm/tests/lightning_load_task/conf/dm-task.yaml b/dm/tests/lightning_load_task/conf/dm-task.yaml index a8f2e93d964..32cacf0379a 100644 --- a/dm/tests/lightning_load_task/conf/dm-task.yaml +++ b/dm/tests/lightning_load_task/conf/dm-task.yaml @@ -40,7 +40,6 @@ loaders: global: pool-size: 16 dir: "./dumped_data" - import-mode: "sql" syncers: global: diff --git a/dm/tests/lightning_load_task/conf/dm-task2-standalone.yaml b/dm/tests/lightning_load_task/conf/dm-task2-standalone.yaml new file mode 100644 index 00000000000..bc98e4efac3 --- /dev/null +++ b/dm/tests/lightning_load_task/conf/dm-task2-standalone.yaml @@ -0,0 +1,41 @@ +--- +name: load_task2 +task-mode: all +is-sharding: false +meta-schema: "dm_meta" +heartbeat-update-interval: 1 +heartbeat-report-interval: 1 + +target-database: + host: "127.0.0.1" + port: 4000 + user: "test" + password: "/Q7B9DizNLLTTfiZHv9WoEAKamfpIUs=" + +mysql-instances: + - source-id: "mysql-replica-01" + block-allow-list: "instance" + mydumper-config-name: "global" + loader-config-name: "global" + syncer-config-name: "global" + +block-allow-list: + instance: + do-dbs: ["load_task2"] + +mydumpers: + global: + threads: 4 + chunk-filesize: 64 + skip-tz-utc: true + extra-args: "" + +loaders: + global: + pool-size: 16 + dir: "./dumped_data" + +syncers: + global: + worker-count: 16 + batch: 100 diff --git a/dm/tests/lightning_load_task/conf/dm-worker1.toml b/dm/tests/lightning_load_task/conf/dm-worker1.toml new file mode 100644 index 00000000000..7a72ea72bf8 --- /dev/null +++ b/dm/tests/lightning_load_task/conf/dm-worker1.toml @@ -0,0 +1,2 @@ +name = "worker1" +join = "127.0.0.1:8261" diff --git a/dm/tests/lightning_load_task/conf/dm-worker2.toml b/dm/tests/lightning_load_task/conf/dm-worker2.toml new file mode 100644 index 00000000000..010e21c73eb --- /dev/null +++ b/dm/tests/lightning_load_task/conf/dm-worker2.toml @@ -0,0 +1,2 @@ +name = "worker2" +join = "127.0.0.1:8261" diff --git a/dm/tests/lightning_load_task/conf/dm-worker3.toml b/dm/tests/lightning_load_task/conf/dm-worker3.toml new file mode 100644 index 00000000000..ab7e1b9cb32 --- /dev/null +++ b/dm/tests/lightning_load_task/conf/dm-worker3.toml @@ -0,0 +1,2 @@ +name = "worker3" +join = "127.0.0.1:8261" diff --git a/dm/tests/lightning_load_task/conf/source1.yaml b/dm/tests/lightning_load_task/conf/source1.yaml new file mode 100644 index 00000000000..c2b659d3fba --- /dev/null +++ b/dm/tests/lightning_load_task/conf/source1.yaml @@ -0,0 +1,13 @@ +source-id: mysql-replica-01 +flavor: '' +enable-gtid: true +enable-relay: false +from: + host: 127.0.0.1 + user: root + password: /Q7B9DizNLLTTfiZHv9WoEAKamfpIUs= + port: 3306 +checker: + check-enable: true + backoff-rollback: 5m + backoff-max: 5m diff --git a/dm/tests/lightning_load_task/conf/source2.yaml b/dm/tests/lightning_load_task/conf/source2.yaml new file mode 100644 index 00000000000..fb1985ca354 --- /dev/null +++ b/dm/tests/lightning_load_task/conf/source2.yaml @@ -0,0 +1,9 @@ +source-id: mysql-replica-02 +flavor: '' +enable-gtid: false +enable-relay: false +from: + host: 127.0.0.1 + user: root + password: /Q7B9DizNLLTTfiZHv9WoEAKamfpIUs= + port: 3307 diff --git a/dm/tests/lightning_load_task/data/db1.increment.sql b/dm/tests/lightning_load_task/data/db1.increment.sql new file mode 100644 index 00000000000..8db36ca7875 --- /dev/null +++ b/dm/tests/lightning_load_task/data/db1.increment.sql @@ -0,0 +1,11 @@ +use load_task1; +insert into t1 (id, name) values (3, 'Eddard Stark'); +insert into t1 (id, name) values (4, 'haha'); + +use load_task2; +insert into t1 (id, name) values (3, 'Eddard Stark'); +insert into t1 (id, name) values (4, 'haha'); + +use load_task4; +insert into t1 (id, name) values (3, 'Eddard Stark'); +insert into t1 (id, name) values (4, 'haha'); \ No newline at end of file diff --git a/dm/tests/lightning_load_task/data/db1.prepare.sql b/dm/tests/lightning_load_task/data/db1.prepare.sql new file mode 100644 index 00000000000..5a6eec15812 --- /dev/null +++ b/dm/tests/lightning_load_task/data/db1.prepare.sql @@ -0,0 +1,17 @@ +drop database if exists `load_task1`; +create database `load_task1`; +use `load_task1`; +create table t1 (id int, name varchar(20), primary key(`id`)); +insert into t1 (id, name) values (1, 'arya'), (2, 'catelyn'); + +drop database if exists `load_task2`; +create database `load_task2`; +use `load_task2`; +create table t1 (id int, name varchar(20), primary key(`id`)); +insert into t1 (id, name) values (1, 'arya'), (2, 'catelyn'); + +drop database if exists `load_task4`; +create database `load_task4`; +use `load_task4`; +create table t1 (id int, name varchar(20), primary key(`id`)); +insert into t1 (id, name) values (1, 'arya'), (2, 'catelyn'); diff --git a/dm/tests/lightning_load_task/data/db2.increment.sql b/dm/tests/lightning_load_task/data/db2.increment.sql new file mode 100644 index 00000000000..45a9dca6778 --- /dev/null +++ b/dm/tests/lightning_load_task/data/db2.increment.sql @@ -0,0 +1,8 @@ +use load_task1; +delete from t2 where name = 'Sansa'; + +use load_task2; +delete from t2 where name = 'Sansa'; + +use load_task3; +delete from t2 where name = 'Sansa'; diff --git a/dm/tests/lightning_load_task/data/db2.prepare.sql b/dm/tests/lightning_load_task/data/db2.prepare.sql new file mode 100644 index 00000000000..d23bd3500f0 --- /dev/null +++ b/dm/tests/lightning_load_task/data/db2.prepare.sql @@ -0,0 +1,17 @@ +drop database if exists `load_task1`; +create database `load_task1`; +use `load_task1`; +create table t2 (id int auto_increment, name varchar(20), primary key (`id`)); +insert into t2 (name) values ('Arya'), ('Bran'), ('Sansa'); + +drop database if exists `load_task2`; +create database `load_task2`; +use `load_task2`; +create table t2 (id int auto_increment, name varchar(20), primary key (`id`)); +insert into t2 (name) values ('Arya'), ('Bran'), ('Sansa'); + +drop database if exists `load_task3`; +create database `load_task3`; +use `load_task3`; +create table t2 (id int auto_increment, name varchar(20), primary key (`id`)); +insert into t2 (name) values ('Arya'), ('Bran'), ('Sansa'); diff --git a/dm/tests/lightning_load_task/run.sh b/dm/tests/lightning_load_task/run.sh index 471d7f947ef..687ad257adf 100755 --- a/dm/tests/lightning_load_task/run.sh +++ b/dm/tests/lightning_load_task/run.sh @@ -34,7 +34,7 @@ function test_worker_restart() { # worker1 online export GO_FAILPOINTS="github.com/pingcap/tiflow/dm/loader/LoadDataSlowDownByTask=return(\"load_task1\")" - run_dm_worker $WORK_DIR/worker1 $WORKER1_PORT $CONF_DIR/dm-worker1.toml + run_dm_worker $WORK_DIR/worker1 $WORKER1_PORT $cur/conf/dm-worker1.toml check_rpc_alive $cur/../bin/check_worker_online 127.0.0.1:$WORKER1_PORT # transfer to worker1 @@ -83,7 +83,7 @@ function test_transfer_two_sources() { # worker2 online export GO_FAILPOINTS="github.com/pingcap/tiflow/dm/loader/LoadDataSlowDown=sleep(15000)" - run_dm_worker $WORK_DIR/worker2 $WORKER2_PORT $CONF_DIR/dm-worker2.toml + run_dm_worker $WORK_DIR/worker2 $WORKER2_PORT $cur/conf/dm-worker2.toml check_rpc_alive $cur/../bin/check_worker_online 127.0.0.1:$WORKER2_PORT # worker2 free since (worker3, source2) has load task(load_task3) @@ -111,7 +111,7 @@ function test_transfer_two_sources() { # worker1 online export GO_FAILPOINTS="" - run_dm_worker $WORK_DIR/worker1 $WORKER1_PORT $CONF_DIR/dm-worker1.toml + run_dm_worker $WORK_DIR/worker1 $WORKER1_PORT $cur/conf/dm-worker1.toml check_rpc_alive $cur/../bin/check_worker_online 127.0.0.1:$WORKER1_PORT # worker1 free since (worker2, source1) has load task(load_task4) @@ -157,7 +157,7 @@ function test_transfer_two_sources() { # worker3 online export GO_FAILPOINTS="" - run_dm_worker $WORK_DIR/worker3 $WORKER3_PORT $CONF_DIR/dm-worker3.toml + run_dm_worker $WORK_DIR/worker3 $WORKER3_PORT $cur/conf/dm-worker3.toml check_rpc_alive $cur/../bin/check_worker_online 127.0.0.1:$WORKER3_PORT # source2 is bound to worker3 since load_task3 @@ -174,34 +174,34 @@ function test_transfer_two_sources() { function run() { echo "import prepare data" - run_sql_file $DATA_DIR/db1.prepare.sql $MYSQL_HOST1 $MYSQL_PORT1 $MYSQL_PASSWORD1 + run_sql_file $cur/data/db1.prepare.sql $MYSQL_HOST1 $MYSQL_PORT1 $MYSQL_PASSWORD1 check_contains 'Query OK, 2 rows affected' - run_sql_file $DATA_DIR/db2.prepare.sql $MYSQL_HOST2 $MYSQL_PORT2 $MYSQL_PASSWORD2 + run_sql_file $cur/data/db2.prepare.sql $MYSQL_HOST2 $MYSQL_PORT2 $MYSQL_PASSWORD2 check_contains 'Query OK, 3 rows affected' echo "start DM master, workers and sources" - run_dm_master $WORK_DIR/master $MASTER_PORT1 $CONF_DIR/dm-master.toml + run_dm_master $WORK_DIR/master $MASTER_PORT1 $cur/conf/dm-master.toml check_rpc_alive $cur/../bin/check_master_online 127.0.0.1:$MASTER_PORT1 # worker1 loading load_task1 export GO_FAILPOINTS="github.com/pingcap/tiflow/dm/loader/LoadDataSlowDownByTask=return(\"load_task1\")" - run_dm_worker $WORK_DIR/worker1 $WORKER1_PORT $CONF_DIR/dm-worker1.toml + run_dm_worker $WORK_DIR/worker1 $WORKER1_PORT $cur/conf/dm-worker1.toml check_rpc_alive $cur/../bin/check_worker_online 127.0.0.1:$WORKER1_PORT - cp $CONF_DIR/source1.yaml $WORK_DIR/source1.yaml + cp $cur/conf/source1.yaml $WORK_DIR/source1.yaml sed -i "/relay-binlog-name/i\relay-dir: $WORK_DIR/worker1/relay_log" $WORK_DIR/source1.yaml dmctl_operate_source create $WORK_DIR/source1.yaml $SOURCE_ID1 # worker2 loading load_task2 export GO_FAILPOINTS="github.com/pingcap/tiflow/dm/loader/LoadDataSlowDownByTask=return(\"load_task2\")" - run_dm_worker $WORK_DIR/worker2 $WORKER2_PORT $CONF_DIR/dm-worker2.toml + run_dm_worker $WORK_DIR/worker2 $WORKER2_PORT $cur/conf/dm-worker2.toml check_rpc_alive $cur/../bin/check_worker_online 127.0.0.1:$WORKER2_PORT - cp $CONF_DIR/source2.yaml $WORK_DIR/source2.yaml + cp $cur/conf/source2.yaml $WORK_DIR/source2.yaml sed -i "/relay-binlog-name/i\relay-dir: $WORK_DIR/worker2/relay_log" $WORK_DIR/source2.yaml dmctl_operate_source create $WORK_DIR/source2.yaml $SOURCE_ID2 # worker3 loading load_task3 export GO_FAILPOINTS="github.com/pingcap/tiflow/dm/loader/LoadDataSlowDownByTask=return(\"load_task3\")" - run_dm_worker $WORK_DIR/worker3 $WORKER3_PORT $CONF_DIR/dm-worker3.toml + run_dm_worker $WORK_DIR/worker3 $WORKER3_PORT $cur/conf/dm-worker3.toml check_rpc_alive $cur/../bin/check_worker_online 127.0.0.1:$WORKER3_PORT echo "start DM task" @@ -223,12 +223,12 @@ function run() { test_transfer_two_sources - run_sql_file $DATA_DIR/db1.increment.sql $MYSQL_HOST1 $MYSQL_PORT1 $MYSQL_PASSWORD1 - run_sql_file $DATA_DIR/db2.increment.sql $MYSQL_HOST2 $MYSQL_PORT2 $MYSQL_PASSWORD2 - check_sync_diff $WORK_DIR $CONF_DIR/diff_config1.toml - check_sync_diff $WORK_DIR $CONF_DIR/diff_config2.toml - check_sync_diff $WORK_DIR $CONF_DIR/diff_config3.toml - check_sync_diff $WORK_DIR $CONF_DIR/diff_config4.toml + run_sql_file $cur/data/db1.increment.sql $MYSQL_HOST1 $MYSQL_PORT1 $MYSQL_PASSWORD1 + run_sql_file $cur/data/db2.increment.sql $MYSQL_HOST2 $MYSQL_PORT2 $MYSQL_PASSWORD2 + check_sync_diff $WORK_DIR $cur/conf/diff_config1.toml + check_sync_diff $WORK_DIR $cur/conf/diff_config2.toml + check_sync_diff $WORK_DIR $cur/conf/diff_config3.toml + check_sync_diff $WORK_DIR $cur/conf/diff_config4.toml } cleanup_data load_task1 diff --git a/dm/tests/others_integration_1.txt b/dm/tests/others_integration_1.txt deleted file mode 100644 index 087ab335e08..00000000000 --- a/dm/tests/others_integration_1.txt +++ /dev/null @@ -1,10 +0,0 @@ -adjust_gtid -check_task -validator_basic -validator_checkpoint -drop_column_with_index -downstream_diff_index -full_mode -sequence_sharding_optimistic -sequence_sharding_removemeta -gtid diff --git a/dm/tests/others_integration_2.txt b/dm/tests/others_integration_2.txt deleted file mode 100644 index fbd5f0ef558..00000000000 --- a/dm/tests/others_integration_2.txt +++ /dev/null @@ -1,12 +0,0 @@ -foreign_key -downstream_more_column -expression_filter -fake_rotate_event -metrics -case_sensitive -sql_mode -http_proxies -openapi -duplicate_event -binlog_parse -only_dml diff --git a/dm/tests/others_integration_3.txt b/dm/tests/others_integration_3.txt deleted file mode 100644 index a3ff49671ee..00000000000 --- a/dm/tests/others_integration_3.txt +++ /dev/null @@ -1,11 +0,0 @@ -checkpoint_transaction -lightning_mode -slow_relay_writer -sync_collation -s3_dumpling_lightning -async_checkpoint_flush -tracker_ignored_ddl -extend_column -shardddl_optimistic -gbk -new_collation_off diff --git a/dm/tests/run.sh b/dm/tests/run.sh index 2924662ce35..e7f1b0d2f33 100755 --- a/dm/tests/run.sh +++ b/dm/tests/run.sh @@ -74,17 +74,6 @@ if [ "$test_case" == "*" ]; then should_run=1 elif [ "$test_case" == "compatibility" ]; then should_run=1 -elif [ "$test_case" == "others" ]; then - test_case=$(cat $CUR/others_integration_1.txt) - should_run=1 -elif [ "$test_case" == "others_2" ]; then - test_case=$(cat $CUR/others_integration_2.txt) - should_run=1 -elif [ "$test_case" == "others_3" ]; then - test_case=$(cat $CUR/others_integration_3.txt) - if [ ! -z "$test_case" ]; then - should_run=1 - fi else exist_case="" for one_case in $test_case; do diff --git a/dm/tests/run_group.sh b/dm/tests/run_group.sh index e58207b278f..4e2be79e1de 100755 --- a/dm/tests/run_group.sh +++ b/dm/tests/run_group.sh @@ -22,9 +22,10 @@ groups=( ["G06"]="relay_interrupt safe_mode sequence_safe_mode" ["G07"]="shardddl1 shardddl1_1 shardddl2 shardddl2_1" ["G08"]="shardddl3 shardddl3_1 shardddl4 shardddl4_1 sharding sequence_sharding" - ["G09"]="others others_2 others_3" + ["G09"]="import_v10x sharding2 ha" ["G10"]="start_task print_status http_apis new_relay all_mode" - ["G11"]="import_v10x sharding2 ha" + # `others others_2 others_3` tests of old pipeline + ["G11"]="adjust_gtid async_checkpoint_flush binlog_parse case_sensitive checkpoint_transaction check_task dm_syncer downstream_diff_index downstream_more_column drop_column_with_index duplicate_event expression_filter extend_column fake_rotate_event foreign_key full_mode gbk gtid ha_cases http_proxies lightning_load_task lightning_mode metrics new_collation_off only_dml openapi s3_dumpling_lightning sequence_sharding_optimistic sequence_sharding_removemeta shardddl_optimistic slow_relay_writer sql_mode sync_collation tracker_ignored_ddl validator_basic validator_checkpoint" ["TLS_GROUP"]="tls" ) @@ -38,20 +39,21 @@ for script in "$CUR"/*/run.sh; do fi done -# Get test names -test_names="" -# shellcheck disable=SC2076 -if [[ "$group" == "others" ]]; then - test_names="${others[*]}" +if [[ "$group" == "check others" ]]; then + if [[ -z $others ]]; then + echo "All engine integration test cases are added to groups" + exit 0 + fi + echo "Error: "$others" is not added to any group in dm/tests/run_group.sh" + exit 1 elif [[ " ${!groups[*]} " =~ " ${group} " ]]; then test_names="${groups[${group}]}" + # Run test cases + if [[ -n $test_names ]]; then + echo "Run cases: ${test_names}" + "${CUR}"/run.sh "${test_names}" + fi else echo "Error: invalid group name: ${group}" exit 1 fi - -# Run test cases -if [[ -n $test_names ]]; then - echo "Run cases: ${test_names}" - "${CUR}"/run.sh "${test_names}" -fi diff --git a/engine/test/integration_tests/run_group.sh b/engine/test/integration_tests/run_group.sh old mode 100644 new mode 100755 index aa23af44270..0ace75b471a --- a/engine/test/integration_tests/run_group.sh +++ b/engine/test/integration_tests/run_group.sh @@ -30,22 +30,23 @@ for script in "$CUR"/*/run.sh; do fi done -# Get test names -test_names="" -# shellcheck disable=SC2076 -if [[ "$group" == "others" ]]; then - test_names="${others[*]}" +if [[ "$group" == "check others" ]]; then + if [[ -z $others ]]; then + echo "All engine integration test cases are added to groups" + exit 0 + fi + echo "Error: "$others" is not added to any group in engine/test/integration_tests/run_group.sh" + exit 1 elif [[ " ${!groups[*]} " =~ " ${group} " ]]; then test_names="${groups[${group}]}" + # Run test cases + if [[ -n $test_names ]]; then + echo "Run cases: ${test_names}" + mkdir -p /tmp/tiflow_engine_test + "${CUR}"/run.sh "${test_names}" 2>&1 | tee /tmp/tiflow_engine_test/engine_it.log + ./engine/test/utils/check_log.sh + fi else echo "Error: invalid group name: ${group}" exit 1 fi - -# Run test cases -if [[ -n $test_names ]]; then - echo "Run cases: ${test_names}" - mkdir -p /tmp/tiflow_engine_test - "${CUR}"/run.sh "${test_names}" 2>&1 | tee /tmp/tiflow_engine_test/engine_it.log - ./engine/test/utils/check_log.sh -fi diff --git a/go.mod b/go.mod index 0c9b60b4647..75d58541a95 100644 --- a/go.mod +++ b/go.mod @@ -59,12 +59,12 @@ require ( github.com/pingcap/check v0.0.0-20211026125417-57bd13f7b5f0 github.com/pingcap/errors v0.11.5-0.20221009092201-b66cddb77c32 github.com/pingcap/failpoint v0.0.0-20220801062533-2eaa32854a6c - github.com/pingcap/kvproto v0.0.0-20230407040905-68d0eebd564a + github.com/pingcap/kvproto v0.0.0-20230419072653-dc3cd8784a19 github.com/pingcap/log v1.1.1-0.20230317032135-a0d097d16e22 - github.com/pingcap/tidb v1.1.0-beta.0.20230418111328-47e7432054a1 - github.com/pingcap/tidb-tools v6.5.1-0.20230208065359-62b90e1e24a7+incompatible - github.com/pingcap/tidb/parser v0.0.0-20230417161919-627110332165 - github.com/prometheus/client_golang v1.14.0 + github.com/pingcap/tidb v1.1.0-beta.0.20230420065519-eb77d3928398 + github.com/pingcap/tidb-tools v7.0.0+incompatible + github.com/pingcap/tidb/parser v0.0.0-20230420065519-eb77d3928398 + github.com/prometheus/client_golang v1.15.0 github.com/prometheus/client_model v0.3.0 github.com/r3labs/diff v1.1.0 github.com/rcrowley/go-metrics v0.0.0-20201227073835-cf1acfcdf475 @@ -79,9 +79,9 @@ require ( github.com/swaggo/gin-swagger v1.2.0 github.com/swaggo/swag v1.8.3 github.com/syndtr/goleveldb v1.0.1-0.20210305035536-64b5b1c73954 - github.com/tikv/client-go/v2 v2.0.8-0.20230417065328-92db9f7b151f + github.com/tikv/client-go/v2 v2.0.8-0.20230419123920-35c1ee47c4f9 github.com/tikv/pd v1.1.0-beta.0.20230203015356-248b3f0be132 - github.com/tikv/pd/client v0.0.0-20230329114254-1948c247c2b1 + github.com/tikv/pd/client v0.0.0-20230419153320-f1d1a80feb95 github.com/tinylib/msgp v1.1.6 github.com/uber-go/atomic v1.4.0 github.com/vmihailenco/msgpack/v5 v5.3.5 @@ -105,7 +105,7 @@ require ( golang.org/x/sys v0.7.0 golang.org/x/text v0.9.0 golang.org/x/time v0.3.0 - google.golang.org/genproto v0.0.0-20230403163135-c38d8f061ccd + google.golang.org/genproto v0.0.0-20230410155749-daa745c078e1 google.golang.org/grpc v1.54.0 google.golang.org/protobuf v1.30.0 gopkg.in/yaml.v2 v2.4.0 @@ -199,7 +199,7 @@ require ( github.com/json-iterator/go v1.1.12 // indirect github.com/klauspost/compress v1.15.13 // indirect github.com/klauspost/cpuid v1.3.1 // indirect - github.com/kr/pretty v0.3.0 // indirect + github.com/kr/pretty v0.3.1 // indirect github.com/kr/text v0.2.0 // indirect github.com/labstack/echo/v4 v4.2.1 // indirect github.com/leodido/go-urn v1.2.1 // indirect @@ -236,7 +236,7 @@ require ( github.com/pkg/errors v0.9.1 // indirect github.com/pmezard/go-difflib v1.0.0 // indirect github.com/power-devops/perfstat v0.0.0-20221212215047-62379fc7944b // indirect - github.com/prometheus/common v0.39.0 // indirect + github.com/prometheus/common v0.42.0 // indirect github.com/prometheus/procfs v0.9.0 // indirect github.com/remyoudompheng/bigfft v0.0.0-20220927061507-ef77025ab5aa // indirect github.com/rivo/uniseg v0.4.4 // indirect diff --git a/go.sum b/go.sum index b13a048f87c..47b30c40b73 100644 --- a/go.sum +++ b/go.sum @@ -120,8 +120,8 @@ github.com/alecthomas/template v0.0.0-20160405071501-a0175ee3bccc/go.mod h1:LOuy github.com/alecthomas/template v0.0.0-20190718012654-fb15b899a751/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc= github.com/alecthomas/units v0.0.0-20151022065526-2efee857e7cf/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0= github.com/alecthomas/units v0.0.0-20190717042225-c3de453c63f4/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0= -github.com/alecthomas/units v0.0.0-20190924025748-f65c72e2690d h1:UQZhZ2O0vMHr2cI+DC1Mbh0TJxzA3RcLoMsFw+aXw7E= github.com/alecthomas/units v0.0.0-20190924025748-f65c72e2690d/go.mod h1:rBZYJk541a8SKzHPHnH3zbiI+7dagKZ0cgpgrD7Fyho= +github.com/alecthomas/units v0.0.0-20211218093645-b94a6e3cc137 h1:s6gZFSlWYmbqAuRjVTiNNhvNRfY2Wxp9nhfyel4rklc= github.com/aliyun/alibaba-cloud-sdk-go v1.61.1581 h1:Q/yk4z/cHUVZfgTqtD09qeYBxHwshQAjVRX73qs8UH0= github.com/aliyun/alibaba-cloud-sdk-go v1.61.1581/go.mod h1:RcDobYh8k5VP6TNybz9m++gL3ijVI5wueVr0EM10VsU= github.com/andres-erbsen/clock v0.0.0-20160526145045-9e14626cd129 h1:MzBOUgng9orim59UnfUTLRjMpd09C5uEVQ6RPGeCaVI= @@ -706,8 +706,9 @@ github.com/konsorten/go-windows-terminal-sequences v1.0.3/go.mod h1:T0+1ngSBFLxv github.com/kr/logfmt v0.0.0-20140226030751-b84e30acd515/go.mod h1:+0opPa2QZZtGFBFZlji/RkVcI2GknAs/DXo4wKdlNEc= github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo= github.com/kr/pretty v0.2.1/go.mod h1:ipq/a2n7PKx3OHsz4KJII5eveXtPO4qwEXGdVfWzfnI= -github.com/kr/pretty v0.3.0 h1:WgNl7dwNpEZ6jJ9k1snq4pZsg7DOEN8hP9Xw0Tsjwk0= github.com/kr/pretty v0.3.0/go.mod h1:640gp4NfQd8pI5XOwp5fnNeVWj67G7CFk/SaSQn7NBk= +github.com/kr/pretty v0.3.1 h1:flRD4NNwYAUpkphVc1HcthR4KEIFJ65n8Mw5qdRn3LE= +github.com/kr/pretty v0.3.1/go.mod h1:hoEshYVHaxMs3cyo3Yncou5ZscifuDolrwPKZanG3xk= github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ= github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI= github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY= @@ -895,8 +896,8 @@ github.com/pingcap/kvproto v0.0.0-20220302110454-c696585a961b/go.mod h1:IOdRDPLy github.com/pingcap/kvproto v0.0.0-20220304032058-ccd676426a27/go.mod h1:IOdRDPLyda8GX2hE/jO7gqaCV/PNFh8BZQCQZXfIOqI= github.com/pingcap/kvproto v0.0.0-20220328072018-6e75c12dbd73/go.mod h1:IOdRDPLyda8GX2hE/jO7gqaCV/PNFh8BZQCQZXfIOqI= github.com/pingcap/kvproto v0.0.0-20220429093005-2839fa5a1ed6/go.mod h1:OYtxs0786qojVTmkVeufx93xe+jUgm56GUYRIKnmaGI= -github.com/pingcap/kvproto v0.0.0-20230407040905-68d0eebd564a h1:PWkMSJSDaOuLNKCV84K3tQ9stZuZPN8E148jRPD9TcA= -github.com/pingcap/kvproto v0.0.0-20230407040905-68d0eebd564a/go.mod h1:guCyM5N+o+ru0TsoZ1hi9lDjUMs2sIBjW3ARTEpVbnk= +github.com/pingcap/kvproto v0.0.0-20230419072653-dc3cd8784a19 h1:aGROoQpU8Sx9MhCspeSrDXpNkW1pcG+EWdMYxg4d5uo= +github.com/pingcap/kvproto v0.0.0-20230419072653-dc3cd8784a19/go.mod h1:guCyM5N+o+ru0TsoZ1hi9lDjUMs2sIBjW3ARTEpVbnk= github.com/pingcap/log v0.0.0-20191012051959-b742a5d432e9/go.mod h1:4rbK1p9ILyIfb6hU7OG2CiWSqMXnp3JMbiaVJ6mvoY8= github.com/pingcap/log v0.0.0-20200511115504-543df19646ad/go.mod h1:4rbK1p9ILyIfb6hU7OG2CiWSqMXnp3JMbiaVJ6mvoY8= github.com/pingcap/log v0.0.0-20210625125904-98ed8e2eb1c7/go.mod h1:8AanEdAHATuRurdGxZXBz0At+9avep+ub7U1AGYLIMM= @@ -908,15 +909,15 @@ github.com/pingcap/sysutil v0.0.0-20220114020952-ea68d2dbf5b4/go.mod h1:sDCsM39c github.com/pingcap/sysutil v1.0.1-0.20230407040306-fb007c5aff21 h1:QV6jqlfOkh8hqvEAgwBZa+4bSgO0EeKC7s5c6Luam2I= github.com/pingcap/sysutil v1.0.1-0.20230407040306-fb007c5aff21/go.mod h1:QYnjfA95ZaMefyl1NO8oPtKeb8pYUdnDVhQgf+qdpjM= github.com/pingcap/tidb v1.1.0-beta.0.20220511160835-98c31070d958/go.mod h1:luW4sIZoLHY3bCWuKqyqk2QgMvF+/M7nWOXf/me0+fY= -github.com/pingcap/tidb v1.1.0-beta.0.20230418111328-47e7432054a1 h1:7yuiJQ2iRU4Qc+MPUrRx7lWhw/cekzu4A8triPWghiI= -github.com/pingcap/tidb v1.1.0-beta.0.20230418111328-47e7432054a1/go.mod h1:coCCXjP3wKEvEHAFAvyYDftSMEt+2abglH8K7R41u/8= -github.com/pingcap/tidb-tools v6.5.1-0.20230208065359-62b90e1e24a7+incompatible h1:OT1Mrhe5UQInwiO+vGjbtd5Ej4r1ECjmeN4oaTdPlbE= -github.com/pingcap/tidb-tools v6.5.1-0.20230208065359-62b90e1e24a7+incompatible/go.mod h1:XGdcy9+yqlDSEMTpOXnwf3hiTeqrV6MN/u1se9N8yIM= +github.com/pingcap/tidb v1.1.0-beta.0.20230420065519-eb77d3928398 h1:sv/Sj6LAZRgZThCnbvZgIJuk2WBxCXbOEJnlr7Ng/Zw= +github.com/pingcap/tidb v1.1.0-beta.0.20230420065519-eb77d3928398/go.mod h1:qLNiZTfvdd77/tRcUWs29ieRsNQg2lFeO9F5AWnS7hw= +github.com/pingcap/tidb-tools v7.0.0+incompatible h1:CHjAva2ON13HZAB0HRNI69fC/1AzfQBkzDE31Rh6NSg= +github.com/pingcap/tidb-tools v7.0.0+incompatible/go.mod h1:XGdcy9+yqlDSEMTpOXnwf3hiTeqrV6MN/u1se9N8yIM= github.com/pingcap/tidb/parser v0.0.0-20211011031125-9b13dc409c5e/go.mod h1:e1MGCA9Sg3T8jid8PKAEq5eYVuMMCq4n8gJ+Kqp4Plg= github.com/pingcap/tidb/parser v0.0.0-20220511160835-98c31070d958/go.mod h1:ElJiub4lRy6UZDb+0JHDkGEdr6aOli+ykhyej7VCLoI= github.com/pingcap/tidb/parser v0.0.0-20221126021158-6b02a5d8ba7d/go.mod h1:ElJiub4lRy6UZDb+0JHDkGEdr6aOli+ykhyej7VCLoI= -github.com/pingcap/tidb/parser v0.0.0-20230417161919-627110332165 h1:Rtym1QmDOvMaW0jHpOJLpiv9nh/5OhkFicds1oc5Mp8= -github.com/pingcap/tidb/parser v0.0.0-20230417161919-627110332165/go.mod h1:R0xUtp5gJK/Xtb+PIvR3Wh/Ayvmorwk0nzT4p3HLZJk= +github.com/pingcap/tidb/parser v0.0.0-20230420065519-eb77d3928398 h1:QrQU2/pj2pd3uWTVTzwcymp9K2kWAg0uiNhqu//VhEM= +github.com/pingcap/tidb/parser v0.0.0-20230420065519-eb77d3928398/go.mod h1:R0xUtp5gJK/Xtb+PIvR3Wh/Ayvmorwk0nzT4p3HLZJk= github.com/pingcap/tipb v0.0.0-20220215045658-d12dec7a7609/go.mod h1:A7mrd7WHBl1o63LE2bIBGEJMTNWXqhgmYiOvMLxozfs= github.com/pingcap/tipb v0.0.0-20230310043643-5362260ee6f7 h1:CeeMOq1aHPAhXrw4eYXtQRyWOFlbfqK1+3f9Iop4IfU= github.com/pingcap/tipb v0.0.0-20230310043643-5362260ee6f7/go.mod h1:A7mrd7WHBl1o63LE2bIBGEJMTNWXqhgmYiOvMLxozfs= @@ -943,8 +944,8 @@ github.com/prometheus/client_golang v1.7.1/go.mod h1:PY5Wy2awLA44sXw4AOSfFBetzPP github.com/prometheus/client_golang v1.11.0/go.mod h1:Z6t4BnS23TR94PD6BsDNk8yVqroYurpAkEiz0P2BEV0= github.com/prometheus/client_golang v1.11.1/go.mod h1:Z6t4BnS23TR94PD6BsDNk8yVqroYurpAkEiz0P2BEV0= github.com/prometheus/client_golang v1.12.1/go.mod h1:3Z9XVyYiZYEO+YQWt3RD2R3jrbd179Rt297l4aS6nDY= -github.com/prometheus/client_golang v1.14.0 h1:nJdhIvne2eSX/XRAFV9PcvFFRbrjbcTUj0VP62TMhnw= -github.com/prometheus/client_golang v1.14.0/go.mod h1:8vpkKitgIVNcqrRBWh1C4TIUQgYNtG/XQE4E/Zae36Y= +github.com/prometheus/client_golang v1.15.0 h1:5fCgGYogn0hFdhyhLbw7hEsWxufKtY9klyvdNfFlFhM= +github.com/prometheus/client_golang v1.15.0/go.mod h1:e9yaBhRPU2pPNsZwE+JdQl0KEt1N9XgF6zxWmaC0xOk= github.com/prometheus/client_model v0.0.0-20180712105110-5c3871d89910/go.mod h1:MbSGuTsp3dbXC40dX6PRTWyKYBIrTGTE9sqQNg2J8bo= github.com/prometheus/client_model v0.0.0-20190129233127-fd36f4220a90/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= github.com/prometheus/client_model v0.0.0-20190812154241-14fe0d1b01d4/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= @@ -958,8 +959,8 @@ github.com/prometheus/common v0.4.1/go.mod h1:TNfzLD0ON7rHzMJeJkieUDPYmFC7Snx/y8 github.com/prometheus/common v0.10.0/go.mod h1:Tlit/dnDKsSWFlCLTWaA1cyBgKHSMdTB80sz/V91rCo= github.com/prometheus/common v0.26.0/go.mod h1:M7rCNAaPfAosfx8veZJCuw84e35h3Cfd9VFqTh1DIvc= github.com/prometheus/common v0.32.1/go.mod h1:vu+V0TpY+O6vW9J44gczi3Ap/oXXR10b+M/gUGO4Hls= -github.com/prometheus/common v0.39.0 h1:oOyhkDq05hPZKItWVBkJ6g6AtGxi+fy7F4JvUV8uhsI= -github.com/prometheus/common v0.39.0/go.mod h1:6XBZ7lYdLCbkAVhwRsWTZn+IN5AB9F/NXd5w0BbEX0Y= +github.com/prometheus/common v0.42.0 h1:EKsfXEYo4JpWMHH5cg+KOUWeuJSov1Id8zGR8eeI1YM= +github.com/prometheus/common v0.42.0/go.mod h1:xBwqVerjNdUDjgODMpudtOMwlOwf2SaTr1yjz4b7Zbc= github.com/prometheus/procfs v0.0.0-20181005140218-185b4288413d/go.mod h1:c3At6R/oaqEKCNdg8wHV1ftS6bRYblBhIjjI8uT2IGk= github.com/prometheus/procfs v0.0.0-20190507164030-5867b95ac084/go.mod h1:TjEm7ze935MbeOT/UhFTIMYKhuLP4wbCsTZCD3I8kEA= github.com/prometheus/procfs v0.0.2/go.mod h1:TjEm7ze935MbeOT/UhFTIMYKhuLP4wbCsTZCD3I8kEA= @@ -1086,13 +1087,13 @@ github.com/tiancaiamao/appdash v0.0.0-20181126055449-889f96f722a2/go.mod h1:2PfK github.com/tiancaiamao/gp v0.0.0-20221230034425-4025bc8a4d4a h1:J/YdBZ46WKpXsxsW93SG+q0F8KI+yFrcIDT4c/RNoc4= github.com/tiancaiamao/gp v0.0.0-20221230034425-4025bc8a4d4a/go.mod h1:h4xBhSNtOeEosLJ4P7JyKXX7Cabg7AVkWCK5gV2vOrM= github.com/tikv/client-go/v2 v2.0.1-0.20220510032238-ff5e35ac2869/go.mod h1:0scaG+seu7L56apm+Gjz9vckyO7ABIzM6T7n00mrIXs= -github.com/tikv/client-go/v2 v2.0.8-0.20230417065328-92db9f7b151f h1:pfDrSVAnfkk2EkrOc0iOmtA4n8F6TL9oEAK8R/enC50= -github.com/tikv/client-go/v2 v2.0.8-0.20230417065328-92db9f7b151f/go.mod h1:Dkqcv2dYoCOiNMiRgnEhpTa04dUaF9E3rbcz4rXxf3U= +github.com/tikv/client-go/v2 v2.0.8-0.20230419123920-35c1ee47c4f9 h1:b6sIFrhNA8R5Q+geziSnAenDphJ+BZtHMkGgHyj472I= +github.com/tikv/client-go/v2 v2.0.8-0.20230419123920-35c1ee47c4f9/go.mod h1:JhXnkzPnxpgXj9Tww4P6sayf9TtsnB9XrtS71jfzq+U= github.com/tikv/pd v1.1.0-beta.0.20230203015356-248b3f0be132 h1:vCVu7LxFou5WuaY6jHDMHKVeJTtwr5o2i1xWgGAdDo4= github.com/tikv/pd v1.1.0-beta.0.20230203015356-248b3f0be132/go.mod h1:jb9oq6rN4U0U3FZdvqWlpi9rZzFJxiOlvZ3aj5BTpg8= github.com/tikv/pd/client v0.0.0-20220307081149-841fa61e9710/go.mod h1:AtvppPwkiyUgQlR1W9qSqfTB+OsOIu19jDCOxOsPkmU= -github.com/tikv/pd/client v0.0.0-20230329114254-1948c247c2b1 h1:bzlSSzw+6qTwPs8pMcPI1bt27TAOhSdAEwdPCz6eBlg= -github.com/tikv/pd/client v0.0.0-20230329114254-1948c247c2b1/go.mod h1:3cTcfo8GRA2H/uSttqA3LvMfMSHVBJaXk3IgkFXFVxo= +github.com/tikv/pd/client v0.0.0-20230419153320-f1d1a80feb95 h1:177X/S43/qjxDyFq9CBB4Nts0nwLvjJFXzoav2XCUSA= +github.com/tikv/pd/client v0.0.0-20230419153320-f1d1a80feb95/go.mod h1:5vgcvO020ZCdMZkTrRdS/wFZQUab82BSfKE38T61ro0= github.com/tinylib/msgp v1.1.6 h1:i+SbKraHhnrf9M5MYmvQhFnbLhAXSDWF8WWsuyRdocw= github.com/tinylib/msgp v1.1.6/go.mod h1:75BAfg2hauQhs3qedfdDZmWAPcFMAvJE5b9rGOMufyw= github.com/tklauser/go-sysconf v0.3.9/go.mod h1:11DU/5sG7UexIrp/O6g35hrWzu0JxlwQ3LSFUzyeuhs= @@ -1765,8 +1766,8 @@ google.golang.org/genproto v0.0.0-20220114231437-d2e6a121cae0/go.mod h1:5CzLGKJ6 google.golang.org/genproto v0.0.0-20220201184016-50beb8ab5c44/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= google.golang.org/genproto v0.0.0-20220211171837-173942840c17/go.mod h1:kGP+zUP2Ddo0ayMi4YuN7C3WZyJvGLZRh8Z5wnAqvEI= google.golang.org/genproto v0.0.0-20220216160803-4663080d8bc8/go.mod h1:kGP+zUP2Ddo0ayMi4YuN7C3WZyJvGLZRh8Z5wnAqvEI= -google.golang.org/genproto v0.0.0-20230403163135-c38d8f061ccd h1:sLpv7bNL1AsX3fdnWh9WVh7ejIzXdOc1RRHGeAmeStU= -google.golang.org/genproto v0.0.0-20230403163135-c38d8f061ccd/go.mod h1:UUQDJDOlWu4KYeJZffbWgBkS1YFobzKbLVfK69pe0Ak= +google.golang.org/genproto v0.0.0-20230410155749-daa745c078e1 h1:KpwkzHKEF7B9Zxg18WzOa7djJ+Ha5DzthMyZYQfEn2A= +google.golang.org/genproto v0.0.0-20230410155749-daa745c078e1/go.mod h1:nKE/iIaLqn2bQwXBg8f1g2Ylh6r5MN5CmZvuzZCgsCU= google.golang.org/grpc v0.0.0-20180607172857-7a6a684ca69e/go.mod h1:yo6s7OP7yaDglbqo1J04qKzAhqBH6lvTonzMVmEdcZw= google.golang.org/grpc v1.8.0/go.mod h1:yo6s7OP7yaDglbqo1J04qKzAhqBH6lvTonzMVmEdcZw= google.golang.org/grpc v1.12.0/go.mod h1:yo6s7OP7yaDglbqo1J04qKzAhqBH6lvTonzMVmEdcZw= diff --git a/tests/integration_tests/api_v2/run.sh b/tests/integration_tests/api_v2/run.sh index 4fd76f36a50..865ee8ae66b 100644 --- a/tests/integration_tests/api_v2/run.sh +++ b/tests/integration_tests/api_v2/run.sh @@ -9,10 +9,6 @@ CDC_BINARY=cdc.test SINK_TYPE=$1 function prepare() { - if [ "$SINK_TYPE" == "kafka" ]; then - return - fi - rm -rf $WORK_DIR && mkdir -p $WORK_DIR start_tidb_cluster --workdir $WORK_DIR @@ -29,8 +25,8 @@ function prepare() { } trap stop_tidb_cluster EXIT -# kafka is not supported yet. -if [ "$SINK_TYPE" != "kafka" ]; then +# kafka and storage is not supported yet. +if [ "$SINK_TYPE" == "mysql" ]; then prepare $* cd "$(dirname "$0")" diff --git a/tests/integration_tests/bank/run.sh b/tests/integration_tests/bank/run.sh index 9aa463a0030..eb213c25ae4 100644 --- a/tests/integration_tests/bank/run.sh +++ b/tests/integration_tests/bank/run.sh @@ -9,10 +9,6 @@ CDC_BINARY=cdc.test SINK_TYPE=$1 function prepare() { - if [ "$SINK_TYPE" == "kafka" ]; then - return - fi - rm -rf $WORK_DIR && mkdir -p $WORK_DIR start_tidb_cluster --workdir $WORK_DIR @@ -30,7 +26,7 @@ function prepare() { trap stop_tidb_cluster EXIT # kafka is not supported yet. -if [ "$SINK_TYPE" != "kafka" ]; then +if [ "$SINK_TYPE" == "mysql" ]; then prepare $* cd "$(dirname "$0")" diff --git a/tests/integration_tests/batch_update_to_no_batch/run.sh b/tests/integration_tests/batch_update_to_no_batch/run.sh index ee5865267fa..15a9b26f340 100644 --- a/tests/integration_tests/batch_update_to_no_batch/run.sh +++ b/tests/integration_tests/batch_update_to_no_batch/run.sh @@ -14,7 +14,7 @@ SINK_TYPE=$1 # 3. cdc can switch from batch mode to no-batch mode and vice versa and works well function run() { # batch mode only supports mysql sink - if [ "$SINK_TYPE" == "kafka" ]; then + if [ "$SINK_TYPE" != "mysql" ]; then return fi diff --git a/tests/integration_tests/bdr_mode/run.sh b/tests/integration_tests/bdr_mode/run.sh index 2c77256ec75..25007391f56 100644 --- a/tests/integration_tests/bdr_mode/run.sh +++ b/tests/integration_tests/bdr_mode/run.sh @@ -10,7 +10,7 @@ SINK_TYPE=$1 function run() { # BDR mode only supports mysql sink - if [ "$SINK_TYPE" == "kafka" ]; then + if [ "$SINK_TYPE" != "mysql" ]; then return fi diff --git a/tests/integration_tests/canal_json_adapter_compatibility/run.sh b/tests/integration_tests/canal_json_adapter_compatibility/run.sh index 877adee988d..f9c45af1dc3 100644 --- a/tests/integration_tests/canal_json_adapter_compatibility/run.sh +++ b/tests/integration_tests/canal_json_adapter_compatibility/run.sh @@ -11,7 +11,7 @@ SINK_TYPE=$1 # use canal-adapter to sync data from kafka to mysql, # make sure that `canal-json` output can be consumed by the canal-adapter. function run() { - if [ "$SINK_TYPE" == "mysql" ]; then + if [ "$SINK_TYPE" != "kafka" ]; then return fi diff --git a/tests/integration_tests/canal_json_basic/run.sh b/tests/integration_tests/canal_json_basic/run.sh index 56affd90ece..9551a4b7e51 100644 --- a/tests/integration_tests/canal_json_basic/run.sh +++ b/tests/integration_tests/canal_json_basic/run.sh @@ -10,7 +10,7 @@ SINK_TYPE=$1 # use kafka-consumer with canal-json decoder to sync data from kafka to mysql function run() { - if [ "$SINK_TYPE" == "mysql" ]; then + if [ "$SINK_TYPE" != "kafka" ]; then return fi diff --git a/tests/integration_tests/canal_json_storage_basic/run.sh b/tests/integration_tests/canal_json_storage_basic/run.sh index 2f3f6c50564..2be757962d0 100644 --- a/tests/integration_tests/canal_json_storage_basic/run.sh +++ b/tests/integration_tests/canal_json_storage_basic/run.sh @@ -9,9 +9,7 @@ CDC_BINARY=cdc.test SINK_TYPE=$1 function run() { - # Now, we run the storage tests in mysql sink tests. - # It's a temporary solution, we will move it to a new test pipeline later. - if [ "$SINK_TYPE" != "mysql" ]; then + if [ "$SINK_TYPE" != "storage" ]; then return fi diff --git a/tests/integration_tests/canal_json_storage_partition_table/run.sh b/tests/integration_tests/canal_json_storage_partition_table/run.sh index 82e97bbf31f..be3cac83c86 100644 --- a/tests/integration_tests/canal_json_storage_partition_table/run.sh +++ b/tests/integration_tests/canal_json_storage_partition_table/run.sh @@ -9,9 +9,7 @@ CDC_BINARY=cdc.test SINK_TYPE=$1 function run() { - # Now, we run the storage tests in mysql sink tests. - # It's a temporary solution, we will move it to a new test pipeline later. - if [ "$SINK_TYPE" != "mysql" ]; then + if [ "$SINK_TYPE" != "storage" ]; then return fi diff --git a/tests/integration_tests/cdc/cdc.go b/tests/integration_tests/cdc/cdc.go index 6b7f56cb36f..5e224609302 100644 --- a/tests/integration_tests/cdc/cdc.go +++ b/tests/integration_tests/cdc/cdc.go @@ -20,7 +20,7 @@ import ( _ "github.com/go-sql-driver/mysql" "github.com/pingcap/errors" "github.com/pingcap/log" - "github.com/pingcap/tiflow/tests/integration_tests/dailytest" + "github.com/pingcap/tiflow/tests/integration_tests/cdc/dailytest" "github.com/pingcap/tiflow/tests/integration_tests/util" ) diff --git a/tests/integration_tests/dailytest/case.go b/tests/integration_tests/cdc/dailytest/case.go similarity index 100% rename from tests/integration_tests/dailytest/case.go rename to tests/integration_tests/cdc/dailytest/case.go diff --git a/tests/integration_tests/dailytest/dailytest.go b/tests/integration_tests/cdc/dailytest/dailytest.go similarity index 100% rename from tests/integration_tests/dailytest/dailytest.go rename to tests/integration_tests/cdc/dailytest/dailytest.go diff --git a/tests/integration_tests/dailytest/data.go b/tests/integration_tests/cdc/dailytest/data.go similarity index 100% rename from tests/integration_tests/dailytest/data.go rename to tests/integration_tests/cdc/dailytest/data.go diff --git a/tests/integration_tests/dailytest/db.go b/tests/integration_tests/cdc/dailytest/db.go similarity index 100% rename from tests/integration_tests/dailytest/db.go rename to tests/integration_tests/cdc/dailytest/db.go diff --git a/tests/integration_tests/dailytest/exector.go b/tests/integration_tests/cdc/dailytest/exector.go similarity index 100% rename from tests/integration_tests/dailytest/exector.go rename to tests/integration_tests/cdc/dailytest/exector.go diff --git a/tests/integration_tests/dailytest/job.go b/tests/integration_tests/cdc/dailytest/job.go similarity index 100% rename from tests/integration_tests/dailytest/job.go rename to tests/integration_tests/cdc/dailytest/job.go diff --git a/tests/integration_tests/dailytest/parser.go b/tests/integration_tests/cdc/dailytest/parser.go similarity index 100% rename from tests/integration_tests/dailytest/parser.go rename to tests/integration_tests/cdc/dailytest/parser.go diff --git a/tests/integration_tests/dailytest/rand.go b/tests/integration_tests/cdc/dailytest/rand.go similarity index 100% rename from tests/integration_tests/dailytest/rand.go rename to tests/integration_tests/cdc/dailytest/rand.go diff --git a/tests/integration_tests/changefeed_fast_fail/run.sh b/tests/integration_tests/changefeed_fast_fail/run.sh index 2e0ff878b47..881ee79ff25 100644 --- a/tests/integration_tests/changefeed_fast_fail/run.sh +++ b/tests/integration_tests/changefeed_fast_fail/run.sh @@ -12,7 +12,7 @@ MAX_RETRIES=20 function run() { # it is no need to test kafka # the logic are all the same - if [ "$SINK_TYPE" == "kafka" ]; then + if [ "$SINK_TYPE" != "mysql" ]; then return fi diff --git a/tests/integration_tests/changefeed_resume_with_checkpoint_ts/run.sh b/tests/integration_tests/changefeed_resume_with_checkpoint_ts/run.sh index d8508c9caf2..378e218142b 100644 --- a/tests/integration_tests/changefeed_resume_with_checkpoint_ts/run.sh +++ b/tests/integration_tests/changefeed_resume_with_checkpoint_ts/run.sh @@ -10,11 +10,6 @@ SINK_TYPE=$1 MAX_RETRIES=20 function prepare() { - if [ "$SINK_TYPE" == "kafka" ]; then - echo "[$(date)] <<<<<< run test case $TEST_NAME success! >>>>>>" - exit 0 - fi - rm -rf $WORK_DIR && mkdir -p $WORK_DIR start_tidb_cluster --workdir $WORK_DIR cd $WORK_DIR @@ -132,9 +127,11 @@ function resume_changefeed_in_failed_state() { } trap stop_tidb_cluster EXIT -prepare -resume_changefeed_in_stopped_state $* -resume_changefeed_in_failed_state $* -check_logs $WORK_DIR +if [ "$SINK_TYPE" == "mysql" ]; then + prepare + resume_changefeed_in_stopped_state $* + resume_changefeed_in_failed_state $* + check_logs $WORK_DIR +fi echo "[$(date)] <<<<<< run test case $TEST_NAME success! >>>>>>" diff --git a/tests/integration_tests/clustered_index/run.sh b/tests/integration_tests/clustered_index/run.sh index afde9e62284..ca3b9db047c 100755 --- a/tests/integration_tests/clustered_index/run.sh +++ b/tests/integration_tests/clustered_index/run.sh @@ -46,8 +46,8 @@ function run() { # kafka is not supported yet. # ref to issue: https://github.com/pingcap/tiflow/issues/3421 -if [ "$SINK_TYPE" = "kafka" ]; then - echo "[$(date)] <<<<<< skip test case $TEST_NAME for kafka! >>>>>>" +if [ "$SINK_TYPE" != "mysql" ]; then + echo "[$(date)] <<<<<< skip test case $TEST_NAME for $SINK_TYPE! >>>>>>" exit 0 fi trap stop_tidb_cluster EXIT diff --git a/tests/integration_tests/consistent_replicate_ddl/run.sh b/tests/integration_tests/consistent_replicate_ddl/run.sh index 6000c481202..c144c623819 100644 --- a/tests/integration_tests/consistent_replicate_ddl/run.sh +++ b/tests/integration_tests/consistent_replicate_ddl/run.sh @@ -19,7 +19,7 @@ stop() { function run() { # we only support eventually consistent replication with MySQL sink - if [ "$SINK_TYPE" == "kafka" ]; then + if [ "$SINK_TYPE" != "mysql" ]; then return fi diff --git a/tests/integration_tests/consistent_replicate_gbk/run.sh b/tests/integration_tests/consistent_replicate_gbk/run.sh index 2054f741116..1359e3abc7c 100644 --- a/tests/integration_tests/consistent_replicate_gbk/run.sh +++ b/tests/integration_tests/consistent_replicate_gbk/run.sh @@ -43,7 +43,7 @@ s3cmd --access_key=$MINIO_ACCESS_KEY --secret_key=$MINIO_SECRET_KEY --host=$S3_E function run() { # we only support eventually consistent replication with MySQL sink - if [ "$SINK_TYPE" == "kafka" ]; then + if [ "$SINK_TYPE" != "mysql" ]; then return fi diff --git a/tests/integration_tests/consistent_replicate_nfs/run.sh b/tests/integration_tests/consistent_replicate_nfs/run.sh index 35579801da9..f2b9a6f6612 100644 --- a/tests/integration_tests/consistent_replicate_nfs/run.sh +++ b/tests/integration_tests/consistent_replicate_nfs/run.sh @@ -16,7 +16,7 @@ stop() { function run() { # we only support eventually consistent replication with MySQL sink - if [ "$SINK_TYPE" == "kafka" ]; then + if [ "$SINK_TYPE" != "mysql" ]; then return fi diff --git a/tests/integration_tests/consistent_replicate_storage_file/run.sh b/tests/integration_tests/consistent_replicate_storage_file/run.sh index 73ebab005fa..5e03d111e43 100644 --- a/tests/integration_tests/consistent_replicate_storage_file/run.sh +++ b/tests/integration_tests/consistent_replicate_storage_file/run.sh @@ -19,7 +19,7 @@ stop() { function run() { # we only support eventually consistent replication with MySQL sink - if [ "$SINK_TYPE" == "kafka" ]; then + if [ "$SINK_TYPE" != "mysql" ]; then return fi diff --git a/tests/integration_tests/consistent_replicate_storage_s3/run.sh b/tests/integration_tests/consistent_replicate_storage_s3/run.sh index 9ce5048b9dc..4ec5d429f26 100644 --- a/tests/integration_tests/consistent_replicate_storage_s3/run.sh +++ b/tests/integration_tests/consistent_replicate_storage_s3/run.sh @@ -45,7 +45,7 @@ s3cmd --access_key=$MINIO_ACCESS_KEY --secret_key=$MINIO_SECRET_KEY --host=$S3_E function run() { # we only support eventually consistent replication with MySQL sink - if [ "$SINK_TYPE" == "kafka" ]; then + if [ "$SINK_TYPE" != "mysql" ]; then return fi diff --git a/tests/integration_tests/csv_storage_basic/run.sh b/tests/integration_tests/csv_storage_basic/run.sh index 0dc534a894a..90e7f5849c1 100644 --- a/tests/integration_tests/csv_storage_basic/run.sh +++ b/tests/integration_tests/csv_storage_basic/run.sh @@ -9,9 +9,7 @@ CDC_BINARY=cdc.test SINK_TYPE=$1 function run() { - # Now, we run the storage tests in mysql sink tests. - # It's a temporary solution, we will move it to a new test pipeline later. - if [ "$SINK_TYPE" != "mysql" ]; then + if [ "$SINK_TYPE" != "storage" ]; then return fi diff --git a/tests/integration_tests/csv_storage_multi_tables_ddl/run.sh b/tests/integration_tests/csv_storage_multi_tables_ddl/run.sh index 62686f15a20..ec75a5d761f 100755 --- a/tests/integration_tests/csv_storage_multi_tables_ddl/run.sh +++ b/tests/integration_tests/csv_storage_multi_tables_ddl/run.sh @@ -11,7 +11,7 @@ SINK_TYPE=$1 function run() { # Now, we run the storage tests in mysql sink tests. # It's a temporary solution, we will move it to a new test pipeline later. - if [ "$SINK_TYPE" != "mysql" ]; then + if [ "$SINK_TYPE" != "storage" ]; then return fi @@ -58,7 +58,7 @@ function run() { check_changefeed_state "http://${UP_PD_HOST_1}:${UP_PD_PORT_1}" $cf_normal "normal" "null" "" check_changefeed_state "http://${UP_PD_HOST_1}:${UP_PD_PORT_1}" $cf_err1 "normal" "null" "" - check_changefeed_state "http://${UP_PD_HOST_1}:${UP_PD_PORT_1}" $cf_err2 "error" "ErrSyncRenameTableFailed" "" + check_changefeed_state "http://${UP_PD_HOST_1}:${UP_PD_PORT_1}" $cf_err2 "failed" "ErrSyncRenameTableFailed" "" check_sync_diff $WORK_DIR $CUR/conf/diff_config.toml 100 diff --git a/tests/integration_tests/csv_storage_partition_table/run.sh b/tests/integration_tests/csv_storage_partition_table/run.sh index 155d986aa74..c5acb21a83a 100644 --- a/tests/integration_tests/csv_storage_partition_table/run.sh +++ b/tests/integration_tests/csv_storage_partition_table/run.sh @@ -9,9 +9,7 @@ CDC_BINARY=cdc.test SINK_TYPE=$1 function run() { - # Now, we run the storage tests in mysql sink tests. - # It's a temporary solution, we will move it to a new test pipeline later. - if [ "$SINK_TYPE" != "mysql" ]; then + if [ "$SINK_TYPE" != "storage" ]; then return fi diff --git a/tests/integration_tests/ddl_reentrant/run.sh b/tests/integration_tests/ddl_reentrant/run.sh index 44410a72e31..a719c36604a 100644 --- a/tests/integration_tests/ddl_reentrant/run.sh +++ b/tests/integration_tests/ddl_reentrant/run.sh @@ -119,7 +119,7 @@ function ddl_test() { function run() { # don't test kafka in this case - if [ "$SINK_TYPE" == "kafka" ]; then + if [ "$SINK_TYPE" != "mysql" ]; then return fi diff --git a/tests/integration_tests/http_api/run.sh b/tests/integration_tests/http_api/run.sh index e202b502370..c95795971f3 100644 --- a/tests/integration_tests/http_api/run.sh +++ b/tests/integration_tests/http_api/run.sh @@ -10,8 +10,8 @@ SINK_TYPE=$1 MAX_RETRIES=50 function run() { - # mysql and kafka are the same - if [ "$SINK_TYPE" == "kafka" ]; then + # storage and kafka are the same as mysql + if [ "$SINK_TYPE" != "mysql" ]; then return fi diff --git a/tests/integration_tests/http_api_tls/run.sh b/tests/integration_tests/http_api_tls/run.sh index e16d4c69463..5a1aec78f8d 100644 --- a/tests/integration_tests/http_api_tls/run.sh +++ b/tests/integration_tests/http_api_tls/run.sh @@ -11,8 +11,8 @@ TLS_DIR=$(cd $CUR/../_certificates && pwd) MAX_RETRIES=20 function run() { - # mysql and kafka are the same - if [ "$SINK_TYPE" == "kafka" ]; then + # storage and kafka are the same as mysql + if [ "$SINK_TYPE" != "mysql" ]; then return fi diff --git a/tests/integration_tests/kafka_big_messages/run.sh b/tests/integration_tests/kafka_big_messages/run.sh index 40698eb0466..d0904360006 100755 --- a/tests/integration_tests/kafka_big_messages/run.sh +++ b/tests/integration_tests/kafka_big_messages/run.sh @@ -10,7 +10,7 @@ SINK_TYPE=$1 function run() { # test kafka sink only in this case - if [ "$SINK_TYPE" == "mysql" ]; then + if [ "$SINK_TYPE" != "kafka" ]; then return fi rm -rf $WORK_DIR && mkdir -p $WORK_DIR diff --git a/tests/integration_tests/kafka_big_messages_v2/run.sh b/tests/integration_tests/kafka_big_messages_v2/run.sh index 60da2498fc8..d1f861fdeee 100755 --- a/tests/integration_tests/kafka_big_messages_v2/run.sh +++ b/tests/integration_tests/kafka_big_messages_v2/run.sh @@ -10,7 +10,7 @@ SINK_TYPE=$1 function run() { # test kafka sink only in this case - if [ "$SINK_TYPE" == "mysql" ]; then + if [ "$SINK_TYPE" != "kafka" ]; then return fi rm -rf $WORK_DIR && mkdir -p $WORK_DIR diff --git a/tests/integration_tests/big_txn_v2/conf/changefeed.toml b/tests/integration_tests/kafka_big_txn_v2/conf/changefeed.toml similarity index 100% rename from tests/integration_tests/big_txn_v2/conf/changefeed.toml rename to tests/integration_tests/kafka_big_txn_v2/conf/changefeed.toml diff --git a/tests/integration_tests/big_txn_v2/conf/diff_config.toml b/tests/integration_tests/kafka_big_txn_v2/conf/diff_config.toml similarity index 85% rename from tests/integration_tests/big_txn_v2/conf/diff_config.toml rename to tests/integration_tests/kafka_big_txn_v2/conf/diff_config.toml index 367c21817c8..71521d8b006 100644 --- a/tests/integration_tests/big_txn_v2/conf/diff_config.toml +++ b/tests/integration_tests/kafka_big_txn_v2/conf/diff_config.toml @@ -7,7 +7,7 @@ export-fix-sql = true check-struct-only = false [task] - output-dir = "/tmp/tidb_cdc_test/big_txn/sync_diff/output" + output-dir = "/tmp/tidb_cdc_test/kafka_big_txn_v2/sync_diff/output" source-instances = ["tidb"] diff --git a/tests/integration_tests/big_txn_v2/conf/workload b/tests/integration_tests/kafka_big_txn_v2/conf/workload similarity index 100% rename from tests/integration_tests/big_txn_v2/conf/workload rename to tests/integration_tests/kafka_big_txn_v2/conf/workload diff --git a/tests/integration_tests/big_txn_v2/run.sh b/tests/integration_tests/kafka_big_txn_v2/run.sh similarity index 96% rename from tests/integration_tests/big_txn_v2/run.sh rename to tests/integration_tests/kafka_big_txn_v2/run.sh index 63142119583..bd449ff6ccb 100755 --- a/tests/integration_tests/big_txn_v2/run.sh +++ b/tests/integration_tests/kafka_big_txn_v2/run.sh @@ -12,7 +12,8 @@ CDC_COUNT=3 DB_COUNT=4 function run() { - if [ "$SINK_TYPE" == "mysql" ]; then + # test kafka sink only in this case + if [ "$SINK_TYPE" != "kafka" ]; then return fi rm -rf $WORK_DIR && mkdir -p $WORK_DIR diff --git a/tests/integration_tests/kafka_compression/run.sh b/tests/integration_tests/kafka_compression/run.sh index ed779827eff..64f5544781e 100644 --- a/tests/integration_tests/kafka_compression/run.sh +++ b/tests/integration_tests/kafka_compression/run.sh @@ -30,7 +30,7 @@ function test_compression() { } function run() { - if [ "$SINK_TYPE" == "mysql" ]; then + if [ "$SINK_TYPE" != "kafka" ]; then return fi diff --git a/tests/integration_tests/kafka_messages/run.sh b/tests/integration_tests/kafka_messages/run.sh index f2659b79900..88562a92725 100755 --- a/tests/integration_tests/kafka_messages/run.sh +++ b/tests/integration_tests/kafka_messages/run.sh @@ -12,11 +12,6 @@ CDC_COUNT=3 DB_COUNT=4 function run_length_limit() { - # test kafka sink only in this case - if [ "$SINK_TYPE" == "mysql" ]; then - return - fi - rm -rf $WORK_DIR && mkdir -p $WORK_DIR start_tidb_cluster --workdir $WORK_DIR @@ -71,11 +66,6 @@ function run_length_limit() { } function run_batch_size_limit() { - # test kafka sink only in this case - if [ "$SINK_TYPE" == "mysql" ]; then - return - fi - rm -rf $WORK_DIR && mkdir -p $WORK_DIR start_tidb_cluster --workdir $WORK_DIR @@ -126,8 +116,16 @@ function run_batch_size_limit() { stop_tidb_cluster } +function run() { + # test kafka sink only in this case + if [ "$SINK_TYPE" != "kafka" ]; then + return + fi + + run_length_limit $* + run_batch_size_limit $* +} + trap stop_tidb_cluster EXIT -run_length_limit $* -run_batch_size_limit $* check_logs $WORK_DIR echo "[$(date)] <<<<<< run test case $TEST_NAME success! >>>>>>" diff --git a/tests/integration_tests/kafka_sink_error_resume/run.sh b/tests/integration_tests/kafka_sink_error_resume/run.sh index 2e6326ea4ec..c5e44a3acdc 100755 --- a/tests/integration_tests/kafka_sink_error_resume/run.sh +++ b/tests/integration_tests/kafka_sink_error_resume/run.sh @@ -14,7 +14,7 @@ MAX_RETRIES=20 function run() { # test kafka sink only in this case - if [ "$SINK_TYPE" == "mysql" ]; then + if [ "$SINK_TYPE" != "kafka" ]; then return fi diff --git a/tests/integration_tests/kill_owner_with_ddl/run.sh b/tests/integration_tests/kill_owner_with_ddl/run.sh index 60c7c6a69c3..39475021a06 100755 --- a/tests/integration_tests/kill_owner_with_ddl/run.sh +++ b/tests/integration_tests/kill_owner_with_ddl/run.sh @@ -39,7 +39,7 @@ export -f kill_cdc_and_restart function run() { # kafka is not supported yet. - if [ "$SINK_TYPE" == "kafka" ]; then + if [ "$SINK_TYPE" != "mysql" ]; then return fi diff --git a/tests/integration_tests/multi_cdc_cluster/run.sh b/tests/integration_tests/multi_cdc_cluster/run.sh index 862f3874688..8b17118462f 100644 --- a/tests/integration_tests/multi_cdc_cluster/run.sh +++ b/tests/integration_tests/multi_cdc_cluster/run.sh @@ -10,7 +10,7 @@ SINK_TYPE=$1 function run() { # test mysql sink only in this case - if [ "$SINK_TYPE" == "kafka" ]; then + if [ "$SINK_TYPE" != "mysql" ]; then return fi rm -rf $WORK_DIR && mkdir -p $WORK_DIR diff --git a/tests/integration_tests/multi_changefeed/run.sh b/tests/integration_tests/multi_changefeed/run.sh index f4522be3d63..f6850b5d594 100755 --- a/tests/integration_tests/multi_changefeed/run.sh +++ b/tests/integration_tests/multi_changefeed/run.sh @@ -57,7 +57,7 @@ export -f check_old_value_enabled function run() { # kafka is not supported yet. - if [ "$SINK_TYPE" == "kafka" ]; then + if [ "$SINK_TYPE" != "mysql" ]; then return fi diff --git a/tests/integration_tests/multi_tables_ddl_v2/run.sh b/tests/integration_tests/multi_tables_ddl_v2/run.sh index f0d94d24813..09fd07d89c6 100755 --- a/tests/integration_tests/multi_tables_ddl_v2/run.sh +++ b/tests/integration_tests/multi_tables_ddl_v2/run.sh @@ -9,7 +9,8 @@ CDC_BINARY=cdc.test SINK_TYPE=$1 function run() { - if [ "$SINK_TYPE" == "mysql" ]; then + # test kafka sink only in this case + if [ "$SINK_TYPE" != "kafka" ]; then return fi diff --git a/tests/integration_tests/multi_topics/run.sh b/tests/integration_tests/multi_topics/run.sh index f7257f0fbcc..fa38b2f1037 100644 --- a/tests/integration_tests/multi_topics/run.sh +++ b/tests/integration_tests/multi_topics/run.sh @@ -9,7 +9,7 @@ CDC_BINARY=cdc.test SINK_TYPE=$1 function run() { - if [ "$SINK_TYPE" == "mysql" ]; then + if [ "$SINK_TYPE" != "kafka" ]; then return fi diff --git a/tests/integration_tests/multi_topics_v2/run.sh b/tests/integration_tests/multi_topics_v2/run.sh index f7257f0fbcc..615e577e900 100644 --- a/tests/integration_tests/multi_topics_v2/run.sh +++ b/tests/integration_tests/multi_topics_v2/run.sh @@ -9,7 +9,8 @@ CDC_BINARY=cdc.test SINK_TYPE=$1 function run() { - if [ "$SINK_TYPE" == "mysql" ]; then + # test kafka sink only in this case + if [ "$SINK_TYPE" != "kafka" ]; then return fi diff --git a/tests/integration_tests/owner_remove_table_error/run.sh b/tests/integration_tests/owner_remove_table_error/run.sh index d28160059ba..22d5a21c8fa 100644 --- a/tests/integration_tests/owner_remove_table_error/run.sh +++ b/tests/integration_tests/owner_remove_table_error/run.sh @@ -14,7 +14,7 @@ MAX_RETRIES=20 function run() { # kafka is not supported yet. - if [ "$SINK_TYPE" == "kafka" ]; then + if [ "$SINK_TYPE" != "mysql" ]; then return fi diff --git a/tests/integration_tests/processor_etcd_worker_delay/run.sh b/tests/integration_tests/processor_etcd_worker_delay/run.sh index 24107fefd9d..0dacb38e5f4 100644 --- a/tests/integration_tests/processor_etcd_worker_delay/run.sh +++ b/tests/integration_tests/processor_etcd_worker_delay/run.sh @@ -14,7 +14,7 @@ MAX_RETRIES=20 function run() { # kafka is not supported yet. - if [ "$SINK_TYPE" == "kafka" ]; then + if [ "$SINK_TYPE" != "mysql" ]; then return fi diff --git a/tests/integration_tests/processor_resolved_ts_fallback/run.sh b/tests/integration_tests/processor_resolved_ts_fallback/run.sh index 732039c7b51..0b494ff7272 100755 --- a/tests/integration_tests/processor_resolved_ts_fallback/run.sh +++ b/tests/integration_tests/processor_resolved_ts_fallback/run.sh @@ -10,7 +10,7 @@ SINK_TYPE=$1 function run() { # TODO: kafka sink has bug with this case, remove this after bug is fixed - if [ "$SINK_TYPE" == "kafka" ]; then + if [ "$SINK_TYPE" != "mysql" ]; then return fi diff --git a/tests/integration_tests/run_group.sh b/tests/integration_tests/run_group.sh old mode 100644 new mode 100755 index 32677cc9e53..b488fea9a3e --- a/tests/integration_tests/run_group.sh +++ b/tests/integration_tests/run_group.sh @@ -7,6 +7,23 @@ CUR=$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd) sink_type=$1 group=$2 +# Other tests that only support mysql: batch_update_to_no_batch ddl_reentrant +# changefeed_fast_fail changefeed_resume_with_checkpoint_ts sequence +# multi_cdc_cluster capture_suicide_while_balance_table +mysql_only="bdr_mode capture_suicide_while_balance_table syncpoint" +mysql_only_http="http_api http_api_tls api_v2" +mysql_only_consistent_replicate="consistent_replicate_ddl consistent_replicate_gbk consistent_replicate_nfs consistent_replicate_storage_file consistent_replicate_storage_s3" + +# Tests that need to support kafka: bank kill_owner_with_ddl owner_remove_table_error +# owner_resign processor_etcd_worker_delay processor_resolved_ts_fallback +# multi_changefeed clustered_index sink_hang +kafka_only="kafka_big_messages kafka_compression kafka_messages kafka_sink_error_resume" +kafka_only_protocol="canal_json_adapter_compatibility canal_json_basic multi_topics" +kafka_only_v2="kafka_big_txn_v2 kafka_big_messages_v2 multi_tables_ddl_v2 multi_topics_v2" + +storage_only_csv="csv_storage_basic csv_storage_multi_tables_ddl csv_storage_partition_table" +storage_only_canal_json="canal_json_storage_basic canal_json_storage_partition_table" + # Define groups # Note: If new group is added, the group name must also be added to CI # * https://github.com/PingCAP-QE/ci/blob/main/pipelines/pingcap/tiflow/latest/pull_cdc_integration_kafka_test.groovy @@ -15,31 +32,32 @@ group=$2 # Putting multiple light tests together and heavy tests in a separate group. declare -A groups groups=( - ["G00"]='changefeed_error ddl_sequence force_replicate_table' - ["G01"]='multi_capture kafka_big_messages cdc' - ["G02"]='drop_many_tables multi_cdc_cluster processor_stop_delay' - ["G03"]='capture_suicide_while_balance_table row_format' - ["G04"]='foreign_key canal_json_basic ddl_puller_lag' + # Note: only the tests in the first three groups are running in storage sink pipeline. + ["G00"]="$mysql_only $kafka_only $storage_only_csv" + ["G01"]="$mysql_only_http $kafka_only_protocol $storage_only_canal_json" + ["G02"]="$mysql_only_consistent_replicate $kafka_only_v2" + ["G03"]='row_format drop_many_tables processor_stop_delay' + ["G04"]='foreign_key ddl_puller_lag ddl_only_block_related_table' ["G05"]='partition_table changefeed_auto_stop' - ["G06"]='charset_gbk owner_remove_table_error bdr_mode' - ["G07"]='clustered_index multi_tables_ddl big_txn_v2' - ["G08"]='bank multi_source kafka_sink_error_resume' - ["G09"]='capture_suicide_while_balance_table' - ["G10"]='multi_topics_v2 consistent_replicate_storage_s3 sink_retry' - ["G11"]='consistent_replicate_storage_file kv_client_stream_reconnect consistent_replicate_gbk' - ["G12"]='http_api changefeed_fast_fail tidb_mysql_test server_config_compatibility' - ["G13"]='canal_json_adapter_compatibility resourcecontrol processor_etcd_worker_delay' - ["G14"]='batch_update_to_no_batch gc_safepoint default_value changefeed_pause_resume' - ["G15"]='cli simple cdc_server_tips changefeed_resume_with_checkpoint_ts ddl_reentrant' - ["G16"]='processor_err_chan resolve_lock move_table kafka_compression autorandom' - ["G17"]='ddl_attributes many_pk_or_uk kafka_messages capture_session_done_during_task http_api_tls' + ["G06"]='charset_gbk owner_remove_table_error ddl_manager' + ["G07"]='clustered_index multi_tables_ddl' + ["G08"]='bank multi_source multi_capture' + ["G09"]='ddl_reentrant multi_cdc_cluster' + ["G10"]='sink_retry changefeed_error ddl_sequence' + ["G11"]='kv_client_stream_reconnect cdc default_value' + ["G12"]='changefeed_fast_fail tidb_mysql_test server_config_compatibility' + ["G13"]='resourcecontrol processor_etcd_worker_delay' + ["G14"]='batch_update_to_no_batch gc_safepoint changefeed_pause_resume' + ["G15"]='cli simple cdc_server_tips changefeed_resume_with_checkpoint_ts' + ["G16"]='processor_err_chan resolve_lock move_table autorandom' + ["G17"]='ddl_attributes many_pk_or_uk capture_session_done_during_task' ["G18"]='tiflash new_ci_collation_without_old_value region_merge common_1' - ["G19"]='kafka_big_messages_v2 multi_tables_ddl_v2 split_region availability' - ["G20"]='changefeed_reconstruct http_proxies kill_owner_with_ddl savepoint' - ["G21"]='event_filter generate_column syncpoint sequence processor_resolved_ts_fallback' - ["G22"]='big_txn csv_storage_basic changefeed_finish sink_hang canal_json_storage_basic' - ["G23"]='multi_topics new_ci_collation_with_old_value batch_add_table multi_changefeed' - ["G24"]='consistent_replicate_nfs owner_resign api_v2' + ["G19"]='split_region availability kill_owner_with_ddl' + ["G20"]='changefeed_reconstruct http_proxies savepoint' + ["G21"]='event_filter generate_column sequence processor_resolved_ts_fallback' + ["G22"]='big_txn changefeed_finish sink_hang' + ["G23"]='new_ci_collation_with_old_value batch_add_table ' + ["G24"]='owner_resign force_replicate_table multi_changefeed' ) # Get other cases not in groups, to avoid missing any case @@ -52,20 +70,21 @@ for script in "$CUR"/*/run.sh; do fi done -# Get test names -test_names="" -# shellcheck disable=SC2076 if [[ "$group" == "others" ]]; then - test_names="${others[*]}" + if [[ -z $others ]]; then + echo "All CDC integration test cases are added to groups" + exit 0 + fi + echo "Error: "$others" is not added to any group in tests/integration_tests/run_group.sh" + exit 1 elif [[ " ${!groups[*]} " =~ " ${group} " ]]; then test_names="${groups[${group}]}" + # Run test cases + if [[ -n $test_names ]]; then + echo "Run cases: ${test_names}" + "${CUR}"/run.sh "${sink_type}" "${test_names}" + fi else echo "Error: invalid group name: ${group}" exit 1 fi - -# Run test cases -if [[ -n $test_names ]]; then - echo "Run cases: ${test_names}" - "${CUR}"/run.sh "${sink_type}" "${test_names}" -fi diff --git a/tests/integration_tests/sequence/run.sh b/tests/integration_tests/sequence/run.sh index 702131d6015..ff90a243dac 100755 --- a/tests/integration_tests/sequence/run.sh +++ b/tests/integration_tests/sequence/run.sh @@ -10,7 +10,7 @@ SINK_TYPE=$1 function run() { # No need to test kafka. - if [ "$SINK_TYPE" == "kafka" ]; then + if [ "$SINK_TYPE" != "mysql" ]; then return fi diff --git a/tests/integration_tests/sink_hang/run.sh b/tests/integration_tests/sink_hang/run.sh index d00e04557c8..3c33ad17f08 100644 --- a/tests/integration_tests/sink_hang/run.sh +++ b/tests/integration_tests/sink_hang/run.sh @@ -14,7 +14,7 @@ MAX_RETRIES=20 function run() { # kafka is not supported yet. - if [ "$SINK_TYPE" == "kafka" ]; then + if [ "$SINK_TYPE" != "mysql" ]; then return fi