diff --git a/Makefile b/Makefile index 8c2a34000ac..4b99de07eac 100644 --- a/Makefile +++ b/Makefile @@ -299,9 +299,15 @@ check-static: tools/bin/golangci-lint tools/bin/golangci-lint run --timeout 10m0s --skip-dirs "^dm/","^tests/" #cd dm && ../tools/bin/golangci-lint run --timeout 10m0s +<<<<<<< HEAD check: check-copyright fmt check-static tidy terror_check errdoc \ check-merge-conflicts check-ticdc-dashboard check-diff-line-width \ swagger-spec check-makefiles check_engine_integration_test +======= +check: check-copyright generate_mock go-generate fmt check-static tidy terror_check errdoc \ + check-merge-conflicts check-ticdc-dashboard check-diff-line-width swagger-spec check-makefiles \ + check_cdc_integration_test check_dm_integration_test check_engine_integration_test +>>>>>>> 65217445e8 (makefile(all): add new cases to run_group and check cases in makefile (#8794)) @git --no-pager diff --exit-code || (echo "Please add changed files!" && false) integration_test_coverage: tools/bin/gocovmerge tools/bin/goveralls @@ -514,6 +520,13 @@ check_third_party_binary_for_engine: check_engine_integration_test: ./engine/test/utils/check_case.sh + ./engine/test/integration_tests/run_group.sh "check others" + +check_dm_integration_test: + ./dm/tests/run_group.sh "check others" + +check_cdc_integration_test: + ./tests/integration_tests/run_group.sh check "others" bin/mc: ./scripts/download-mc.sh diff --git a/dm/checker/checker.go b/dm/checker/checker.go index 07a734dec98..7b36f87dd3d 100644 --- a/dm/checker/checker.go +++ b/dm/checker/checker.go @@ -25,7 +25,15 @@ import ( "time" _ "github.com/go-sql-driver/mysql" // for mysql +<<<<<<< HEAD "github.com/pingcap/tidb/br/pkg/lightning/restore" +======= + "github.com/pingcap/tidb/br/pkg/lightning/checkpoints" + "github.com/pingcap/tidb/br/pkg/lightning/importer" + "github.com/pingcap/tidb/br/pkg/lightning/importer/opts" + "github.com/pingcap/tidb/br/pkg/lightning/mydump" + "github.com/pingcap/tidb/br/pkg/lightning/precheck" +>>>>>>> 65217445e8 (makefile(all): add new cases to run_group and check cases in makefile (#8794)) "github.com/pingcap/tidb/dumpling/export" "github.com/pingcap/tidb/parser/mysql" "github.com/pingcap/tidb/util/dbutil" @@ -355,26 +363,48 @@ func (c *Checker) Init(ctx context.Context) (err error) { return err } if _, ok := c.checkingItems[config.LightningEmptyRegionChecking]; ok { +<<<<<<< HEAD lChecker, err := builder.BuildPrecheckItem(restore.CheckTargetClusterEmptyRegion) +======= + lChecker, err := builder.BuildPrecheckItem(precheck.CheckTargetClusterEmptyRegion) +>>>>>>> 65217445e8 (makefile(all): add new cases to run_group and check cases in makefile (#8794)) if err != nil { return err } c.checkList = append(c.checkList, checker.NewLightningEmptyRegionChecker(lChecker)) } if _, ok := c.checkingItems[config.LightningRegionDistributionChecking]; ok { +<<<<<<< HEAD lChecker, err := builder.BuildPrecheckItem(restore.CheckTargetClusterRegionDist) +======= + lChecker, err := builder.BuildPrecheckItem(precheck.CheckTargetClusterRegionDist) +>>>>>>> 65217445e8 (makefile(all): add new cases to run_group and check cases in makefile (#8794)) if err != nil { return err } c.checkList = append(c.checkList, checker.NewLightningRegionDistributionChecker(lChecker)) } if _, ok := c.checkingItems[config.LightningDownstreamVersionChecking]; ok { +<<<<<<< HEAD lChecker, err := builder.BuildPrecheckItem(restore.CheckTargetClusterVersion) +======= + lChecker, err := builder.BuildPrecheckItem(precheck.CheckTargetClusterVersion) +>>>>>>> 65217445e8 (makefile(all): add new cases to run_group and check cases in makefile (#8794)) if err != nil { return err } c.checkList = append(c.checkList, checker.NewLightningClusterVersionChecker(lChecker)) } +<<<<<<< HEAD +======= + if _, ok := c.checkingItems[config.LightningMutexFeatureChecking]; ok { + lChecker, err := builder.BuildPrecheckItem(precheck.CheckTargetUsingCDCPITR) + if err != nil { + return err + } + c.checkList = append(c.checkList, checker.NewLightningCDCPiTRChecker(lChecker)) + } +>>>>>>> 65217445e8 (makefile(all): add new cases to run_group and check cases in makefile (#8794)) } c.tctx.Logger.Info(c.displayCheckingItems()) diff --git a/dm/pkg/checker/lightning.go b/dm/pkg/checker/lightning.go index 156560d0205..49b9f60625a 100644 --- a/dm/pkg/checker/lightning.go +++ b/dm/pkg/checker/lightning.go @@ -16,13 +16,24 @@ package checker import ( "context" +<<<<<<< HEAD "github.com/pingcap/tidb/br/pkg/lightning/restore" +======= + "github.com/docker/go-units" + "github.com/pingcap/tidb/br/pkg/lightning/importer" + "github.com/pingcap/tidb/br/pkg/lightning/precheck" + "github.com/pingcap/tiflow/dm/pkg/log" +>>>>>>> 65217445e8 (makefile(all): add new cases to run_group and check cases in makefile (#8794)) ) func convertLightningPrecheck( ctx context.Context, dmResult *Result, +<<<<<<< HEAD lightningPrechecker restore.PrecheckItem, +======= + lightningPrechecker precheck.Checker, +>>>>>>> 65217445e8 (makefile(all): add new cases to run_group and check cases in makefile (#8794)) failLevel State, instruction string, ) { @@ -42,11 +53,19 @@ func convertLightningPrecheck( // LightningEmptyRegionChecker checks whether there are too many empty regions in the cluster. type LightningEmptyRegionChecker struct { +<<<<<<< HEAD inner restore.PrecheckItem } // NewLightningEmptyRegionChecker creates a new LightningEmptyRegionChecker. func NewLightningEmptyRegionChecker(lightningChecker restore.PrecheckItem) RealChecker { +======= + inner precheck.Checker +} + +// NewLightningEmptyRegionChecker creates a new LightningEmptyRegionChecker. +func NewLightningEmptyRegionChecker(lightningChecker precheck.Checker) RealChecker { +>>>>>>> 65217445e8 (makefile(all): add new cases to run_group and check cases in makefile (#8794)) return &LightningEmptyRegionChecker{inner: lightningChecker} } @@ -74,11 +93,19 @@ func (c *LightningEmptyRegionChecker) Check(ctx context.Context) *Result { // LightningRegionDistributionChecker checks whether the region distribution is balanced. type LightningRegionDistributionChecker struct { +<<<<<<< HEAD inner restore.PrecheckItem } // NewLightningRegionDistributionChecker creates a new LightningRegionDistributionChecker. func NewLightningRegionDistributionChecker(lightningChecker restore.PrecheckItem) RealChecker { +======= + inner precheck.Checker +} + +// NewLightningRegionDistributionChecker creates a new LightningRegionDistributionChecker. +func NewLightningRegionDistributionChecker(lightningChecker precheck.Checker) RealChecker { +>>>>>>> 65217445e8 (makefile(all): add new cases to run_group and check cases in makefile (#8794)) return &LightningRegionDistributionChecker{inner: lightningChecker} } @@ -106,11 +133,19 @@ func (c *LightningRegionDistributionChecker) Check(ctx context.Context) *Result // LightningClusterVersionChecker checks whether the cluster version is compatible with Lightning. type LightningClusterVersionChecker struct { +<<<<<<< HEAD inner restore.PrecheckItem } // NewLightningClusterVersionChecker creates a new LightningClusterVersionChecker. func NewLightningClusterVersionChecker(lightningChecker restore.PrecheckItem) RealChecker { +======= + inner precheck.Checker +} + +// NewLightningClusterVersionChecker creates a new LightningClusterVersionChecker. +func NewLightningClusterVersionChecker(lightningChecker precheck.Checker) RealChecker { +>>>>>>> 65217445e8 (makefile(all): add new cases to run_group and check cases in makefile (#8794)) return &LightningClusterVersionChecker{inner: lightningChecker} } @@ -135,3 +170,110 @@ func (c *LightningClusterVersionChecker) Check(ctx context.Context) *Result { ) return result } +<<<<<<< HEAD +======= + +// LightningFreeSpaceChecker checks whether the cluster has enough free space. +type LightningFreeSpaceChecker struct { + sourceDataSize int64 + infoGetter importer.TargetInfoGetter +} + +// NewLightningFreeSpaceChecker creates a new LightningFreeSpaceChecker. +func NewLightningFreeSpaceChecker(sourceDataSize int64, getter importer.TargetInfoGetter) RealChecker { + return &LightningFreeSpaceChecker{ + sourceDataSize: sourceDataSize, + infoGetter: getter, + } +} + +// Name implements the RealChecker interface. +func (c *LightningFreeSpaceChecker) Name() string { + return "lightning_free_space" +} + +// Check implements the RealChecker interface. +func (c *LightningFreeSpaceChecker) Check(ctx context.Context) *Result { + result := &Result{ + Name: c.Name(), + Desc: "check whether the downstream has enough free space to store the data to be migrated", + State: StateFailure, + } + storeInfo, err := c.infoGetter.GetStorageInfo(ctx) + if err != nil { + markCheckError(result, err) + return result + } + clusterAvail := uint64(0) + for _, store := range storeInfo.Stores { + clusterAvail += uint64(store.Status.Available) + } + if clusterAvail < uint64(c.sourceDataSize) { + result.State = StateFailure + result.Errors = append(result.Errors, &Error{ + Severity: StateFailure, + ShortErr: fmt.Sprintf("Downstream doesn't have enough space, available is %s, but we need %s", + units.BytesSize(float64(clusterAvail)), units.BytesSize(float64(c.sourceDataSize))), + }) + result.Instruction = "you can try to scale-out TiKV storage or TiKV instance to gain more storage space" + return result + } + + replConfig, err := c.infoGetter.GetReplicationConfig(ctx) + if err != nil { + markCheckError(result, err) + return result + } + safeSize := uint64(c.sourceDataSize) * replConfig.MaxReplicas * 2 + if clusterAvail < safeSize { + result.State = StateWarning + result.Errors = append(result.Errors, &Error{ + Severity: StateWarning, + ShortErr: fmt.Sprintf("Cluster may not have enough space, available is %s, but we need %s", + units.BytesSize(float64(clusterAvail)), units.BytesSize(float64(safeSize))), + }) + result.Instruction = "you can try to scale-out TiKV storage or TiKV instance to gain more storage space" + return result + } + result.State = StateSuccess + return result +} + +// LightningCDCPiTRChecker checks whether the cluster has running CDC PiTR tasks. +type LightningCDCPiTRChecker struct { + inner precheck.Checker +} + +// NewLightningCDCPiTRChecker creates a new LightningCDCPiTRChecker. +func NewLightningCDCPiTRChecker(lightningChecker precheck.Checker) RealChecker { + c, ok := lightningChecker.(*importer.CDCPITRCheckItem) + if ok { + c.Instruction = "physical import mode is not compatible with them. Please switch to logical import mode then try again." + } else { + log.L().DPanic("lightningChecker is not CDCPITRCheckItem") + } + return &LightningCDCPiTRChecker{inner: lightningChecker} +} + +// Name implements the RealChecker interface. +func (c *LightningCDCPiTRChecker) Name() string { + return "lightning_downstream_mutex_features" +} + +// Check implements the RealChecker interface. +func (c *LightningCDCPiTRChecker) Check(ctx context.Context) *Result { + result := &Result{ + Name: c.Name(), + Desc: "check whether the downstream has tasks incompatible with physical import mode", + State: StateFailure, + } + convertLightningPrecheck( + ctx, + result, + c.inner, + StateFailure, + `you can switch to logical import mode which has no requirements on this`, + ) + return result +} +>>>>>>> 65217445e8 (makefile(all): add new cases to run_group and check cases in makefile (#8794)) diff --git a/dm/pkg/checker/lightning_test.go b/dm/pkg/checker/lightning_test.go index 1c542fd7b17..3e1740fedd9 100644 --- a/dm/pkg/checker/lightning_test.go +++ b/dm/pkg/checker/lightning_test.go @@ -17,7 +17,11 @@ import ( "context" "testing" +<<<<<<< HEAD "github.com/pingcap/tidb/br/pkg/lightning/restore" +======= + "github.com/pingcap/tidb/br/pkg/lightning/precheck" +>>>>>>> 65217445e8 (makefile(all): add new cases to run_group and check cases in makefile (#8794)) "github.com/pingcap/tiflow/pkg/errors" "github.com/stretchr/testify/require" ) @@ -28,17 +32,29 @@ type mockPrecheckItem struct { msg string } +<<<<<<< HEAD func (m mockPrecheckItem) Check(ctx context.Context) (*restore.CheckResult, error) { if m.err != nil { return nil, m.err } return &restore.CheckResult{ +======= +func (m mockPrecheckItem) Check(ctx context.Context) (*precheck.CheckResult, error) { + if m.err != nil { + return nil, m.err + } + return &precheck.CheckResult{ +>>>>>>> 65217445e8 (makefile(all): add new cases to run_group and check cases in makefile (#8794)) Passed: m.pass, Message: m.msg, }, nil } +<<<<<<< HEAD func (m mockPrecheckItem) GetCheckItemID() restore.CheckItemID { +======= +func (m mockPrecheckItem) GetCheckItemID() precheck.CheckItemID { +>>>>>>> 65217445e8 (makefile(all): add new cases to run_group and check cases in makefile (#8794)) return "mock" } diff --git a/dm/tests/lightning_load_task/conf/diff_config1.toml b/dm/tests/lightning_load_task/conf/diff_config1.toml new file mode 100644 index 00000000000..c1b51cdbde0 --- /dev/null +++ b/dm/tests/lightning_load_task/conf/diff_config1.toml @@ -0,0 +1,35 @@ +# diff Configuration. + +check-thread-count = 4 + +export-fix-sql = true + +check-struct-only = false + +[task] + output-dir = "/tmp/ticdc_dm_test/output" + + source-instances = ["mysql1", "mysql2"] + + target-instance = "tidb0" + + target-check-tables = ["load_task1.t?*"] + +[data-sources] +[data-sources.mysql1] +host = "127.0.0.1" +port = 3306 +user = "root" +password = "123456" + +[data-sources.mysql2] +host = "127.0.0.1" +port = 3307 +user = "root" +password = "123456" + +[data-sources.tidb0] +host = "127.0.0.1" +port = 4000 +user = "test" +password = "123456" diff --git a/dm/tests/lightning_load_task/conf/diff_config2.toml b/dm/tests/lightning_load_task/conf/diff_config2.toml new file mode 100644 index 00000000000..ec0038ccd61 --- /dev/null +++ b/dm/tests/lightning_load_task/conf/diff_config2.toml @@ -0,0 +1,35 @@ +# diff Configuration. + +check-thread-count = 4 + +export-fix-sql = true + +check-struct-only = false + +[task] + output-dir = "/tmp/ticdc_dm_test/output" + + source-instances = ["mysql1", "mysql2"] + + target-instance = "tidb0" + + target-check-tables = ["load_task2.t?*"] + +[data-sources] +[data-sources.mysql1] +host = "127.0.0.1" +port = 3306 +user = "root" +password = "123456" + +[data-sources.mysql2] +host = "127.0.0.1" +port = 3307 +user = "root" +password = "123456" + +[data-sources.tidb0] +host = "127.0.0.1" +port = 4000 +user = "test" +password = "123456" diff --git a/dm/tests/lightning_load_task/conf/diff_config3.toml b/dm/tests/lightning_load_task/conf/diff_config3.toml new file mode 100644 index 00000000000..3dc14b45ba5 --- /dev/null +++ b/dm/tests/lightning_load_task/conf/diff_config3.toml @@ -0,0 +1,29 @@ +# diff Configuration. + +check-thread-count = 4 + +export-fix-sql = true + +check-struct-only = false + +[task] + output-dir = "/tmp/ticdc_dm_test/output" + + source-instances = ["mysql1"] + + target-instance = "tidb0" + + target-check-tables = ["load_task3.t?*"] + +[data-sources] +[data-sources.mysql1] +host = "127.0.0.1" +port = 3307 +user = "root" +password = "123456" + +[data-sources.tidb0] +host = "127.0.0.1" +port = 4000 +user = "test" +password = "123456" diff --git a/dm/tests/lightning_load_task/conf/diff_config4.toml b/dm/tests/lightning_load_task/conf/diff_config4.toml new file mode 100644 index 00000000000..33f675f7e1c --- /dev/null +++ b/dm/tests/lightning_load_task/conf/diff_config4.toml @@ -0,0 +1,29 @@ +# diff Configuration. + +check-thread-count = 4 + +export-fix-sql = true + +check-struct-only = false + +[task] + output-dir = "/tmp/ticdc_dm_test/output" + + source-instances = ["mysql1"] + + target-instance = "tidb0" + + target-check-tables = ["load_task4.t?*"] + +[data-sources] +[data-sources.mysql1] +host = "127.0.0.1" +port = 3306 +user = "root" +password = "123456" + +[data-sources.tidb0] +host = "127.0.0.1" +port = 4000 +user = "test" +password = "123456" diff --git a/dm/tests/lightning_load_task/conf/dm-master.toml b/dm/tests/lightning_load_task/conf/dm-master.toml new file mode 100644 index 00000000000..53a294e7d07 --- /dev/null +++ b/dm/tests/lightning_load_task/conf/dm-master.toml @@ -0,0 +1,6 @@ +# Master Configuration. +master-addr = ":8261" +advertise-addr = "127.0.0.1:8261" + +rpc-timeout = "30s" +auto-compaction-retention = "3s" diff --git a/dm/tests/lightning_load_task/conf/dm-task-standalone.yaml b/dm/tests/lightning_load_task/conf/dm-task-standalone.yaml new file mode 100644 index 00000000000..0d293423e43 --- /dev/null +++ b/dm/tests/lightning_load_task/conf/dm-task-standalone.yaml @@ -0,0 +1,41 @@ +--- +name: load_task1 +task-mode: all +is-sharding: false +meta-schema: "dm_meta" +heartbeat-update-interval: 1 +heartbeat-report-interval: 1 + +target-database: + host: "127.0.0.1" + port: 4000 + user: "test" + password: "/Q7B9DizNLLTTfiZHv9WoEAKamfpIUs=" + +mysql-instances: + - source-id: "mysql-replica-01" + block-allow-list: "instance" + mydumper-config-name: "global" + loader-config-name: "global" + syncer-config-name: "global" + +block-allow-list: + instance: + do-dbs: ["load_task1"] + +mydumpers: + global: + threads: 4 + chunk-filesize: 64 + skip-tz-utc: true + extra-args: "" + +loaders: + global: + pool-size: 16 + dir: "./dumped_data" + +syncers: + global: + worker-count: 16 + batch: 100 diff --git a/dm/tests/lightning_load_task/conf/dm-task.yaml b/dm/tests/lightning_load_task/conf/dm-task.yaml index a8f2e93d964..32cacf0379a 100644 --- a/dm/tests/lightning_load_task/conf/dm-task.yaml +++ b/dm/tests/lightning_load_task/conf/dm-task.yaml @@ -40,7 +40,6 @@ loaders: global: pool-size: 16 dir: "./dumped_data" - import-mode: "sql" syncers: global: diff --git a/dm/tests/lightning_load_task/conf/dm-task2-standalone.yaml b/dm/tests/lightning_load_task/conf/dm-task2-standalone.yaml new file mode 100644 index 00000000000..bc98e4efac3 --- /dev/null +++ b/dm/tests/lightning_load_task/conf/dm-task2-standalone.yaml @@ -0,0 +1,41 @@ +--- +name: load_task2 +task-mode: all +is-sharding: false +meta-schema: "dm_meta" +heartbeat-update-interval: 1 +heartbeat-report-interval: 1 + +target-database: + host: "127.0.0.1" + port: 4000 + user: "test" + password: "/Q7B9DizNLLTTfiZHv9WoEAKamfpIUs=" + +mysql-instances: + - source-id: "mysql-replica-01" + block-allow-list: "instance" + mydumper-config-name: "global" + loader-config-name: "global" + syncer-config-name: "global" + +block-allow-list: + instance: + do-dbs: ["load_task2"] + +mydumpers: + global: + threads: 4 + chunk-filesize: 64 + skip-tz-utc: true + extra-args: "" + +loaders: + global: + pool-size: 16 + dir: "./dumped_data" + +syncers: + global: + worker-count: 16 + batch: 100 diff --git a/dm/tests/lightning_load_task/conf/dm-worker1.toml b/dm/tests/lightning_load_task/conf/dm-worker1.toml new file mode 100644 index 00000000000..7a72ea72bf8 --- /dev/null +++ b/dm/tests/lightning_load_task/conf/dm-worker1.toml @@ -0,0 +1,2 @@ +name = "worker1" +join = "127.0.0.1:8261" diff --git a/dm/tests/lightning_load_task/conf/dm-worker2.toml b/dm/tests/lightning_load_task/conf/dm-worker2.toml new file mode 100644 index 00000000000..010e21c73eb --- /dev/null +++ b/dm/tests/lightning_load_task/conf/dm-worker2.toml @@ -0,0 +1,2 @@ +name = "worker2" +join = "127.0.0.1:8261" diff --git a/dm/tests/lightning_load_task/conf/dm-worker3.toml b/dm/tests/lightning_load_task/conf/dm-worker3.toml new file mode 100644 index 00000000000..ab7e1b9cb32 --- /dev/null +++ b/dm/tests/lightning_load_task/conf/dm-worker3.toml @@ -0,0 +1,2 @@ +name = "worker3" +join = "127.0.0.1:8261" diff --git a/dm/tests/lightning_load_task/conf/source1.yaml b/dm/tests/lightning_load_task/conf/source1.yaml new file mode 100644 index 00000000000..c2b659d3fba --- /dev/null +++ b/dm/tests/lightning_load_task/conf/source1.yaml @@ -0,0 +1,13 @@ +source-id: mysql-replica-01 +flavor: '' +enable-gtid: true +enable-relay: false +from: + host: 127.0.0.1 + user: root + password: /Q7B9DizNLLTTfiZHv9WoEAKamfpIUs= + port: 3306 +checker: + check-enable: true + backoff-rollback: 5m + backoff-max: 5m diff --git a/dm/tests/lightning_load_task/conf/source2.yaml b/dm/tests/lightning_load_task/conf/source2.yaml new file mode 100644 index 00000000000..fb1985ca354 --- /dev/null +++ b/dm/tests/lightning_load_task/conf/source2.yaml @@ -0,0 +1,9 @@ +source-id: mysql-replica-02 +flavor: '' +enable-gtid: false +enable-relay: false +from: + host: 127.0.0.1 + user: root + password: /Q7B9DizNLLTTfiZHv9WoEAKamfpIUs= + port: 3307 diff --git a/dm/tests/lightning_load_task/data/db1.increment.sql b/dm/tests/lightning_load_task/data/db1.increment.sql new file mode 100644 index 00000000000..8db36ca7875 --- /dev/null +++ b/dm/tests/lightning_load_task/data/db1.increment.sql @@ -0,0 +1,11 @@ +use load_task1; +insert into t1 (id, name) values (3, 'Eddard Stark'); +insert into t1 (id, name) values (4, 'haha'); + +use load_task2; +insert into t1 (id, name) values (3, 'Eddard Stark'); +insert into t1 (id, name) values (4, 'haha'); + +use load_task4; +insert into t1 (id, name) values (3, 'Eddard Stark'); +insert into t1 (id, name) values (4, 'haha'); \ No newline at end of file diff --git a/dm/tests/lightning_load_task/data/db1.prepare.sql b/dm/tests/lightning_load_task/data/db1.prepare.sql new file mode 100644 index 00000000000..5a6eec15812 --- /dev/null +++ b/dm/tests/lightning_load_task/data/db1.prepare.sql @@ -0,0 +1,17 @@ +drop database if exists `load_task1`; +create database `load_task1`; +use `load_task1`; +create table t1 (id int, name varchar(20), primary key(`id`)); +insert into t1 (id, name) values (1, 'arya'), (2, 'catelyn'); + +drop database if exists `load_task2`; +create database `load_task2`; +use `load_task2`; +create table t1 (id int, name varchar(20), primary key(`id`)); +insert into t1 (id, name) values (1, 'arya'), (2, 'catelyn'); + +drop database if exists `load_task4`; +create database `load_task4`; +use `load_task4`; +create table t1 (id int, name varchar(20), primary key(`id`)); +insert into t1 (id, name) values (1, 'arya'), (2, 'catelyn'); diff --git a/dm/tests/lightning_load_task/data/db2.increment.sql b/dm/tests/lightning_load_task/data/db2.increment.sql new file mode 100644 index 00000000000..45a9dca6778 --- /dev/null +++ b/dm/tests/lightning_load_task/data/db2.increment.sql @@ -0,0 +1,8 @@ +use load_task1; +delete from t2 where name = 'Sansa'; + +use load_task2; +delete from t2 where name = 'Sansa'; + +use load_task3; +delete from t2 where name = 'Sansa'; diff --git a/dm/tests/lightning_load_task/data/db2.prepare.sql b/dm/tests/lightning_load_task/data/db2.prepare.sql new file mode 100644 index 00000000000..d23bd3500f0 --- /dev/null +++ b/dm/tests/lightning_load_task/data/db2.prepare.sql @@ -0,0 +1,17 @@ +drop database if exists `load_task1`; +create database `load_task1`; +use `load_task1`; +create table t2 (id int auto_increment, name varchar(20), primary key (`id`)); +insert into t2 (name) values ('Arya'), ('Bran'), ('Sansa'); + +drop database if exists `load_task2`; +create database `load_task2`; +use `load_task2`; +create table t2 (id int auto_increment, name varchar(20), primary key (`id`)); +insert into t2 (name) values ('Arya'), ('Bran'), ('Sansa'); + +drop database if exists `load_task3`; +create database `load_task3`; +use `load_task3`; +create table t2 (id int auto_increment, name varchar(20), primary key (`id`)); +insert into t2 (name) values ('Arya'), ('Bran'), ('Sansa'); diff --git a/dm/tests/lightning_load_task/run.sh b/dm/tests/lightning_load_task/run.sh index a40eb188fe0..7c3404b1bbc 100755 --- a/dm/tests/lightning_load_task/run.sh +++ b/dm/tests/lightning_load_task/run.sh @@ -34,7 +34,7 @@ function test_worker_restart() { # worker1 online export GO_FAILPOINTS="github.com/pingcap/tiflow/dm/loader/LoadDataSlowDownByTask=return(\"load_task1\")" - run_dm_worker $WORK_DIR/worker1 $WORKER1_PORT $CONF_DIR/dm-worker1.toml + run_dm_worker $WORK_DIR/worker1 $WORKER1_PORT $cur/conf/dm-worker1.toml check_rpc_alive $cur/../bin/check_worker_online 127.0.0.1:$WORKER1_PORT # transfer to worker1 @@ -83,7 +83,7 @@ function test_transfer_two_sources() { # worker2 online export GO_FAILPOINTS="github.com/pingcap/tiflow/dm/loader/LoadDataSlowDown=sleep(15000)" - run_dm_worker $WORK_DIR/worker2 $WORKER2_PORT $CONF_DIR/dm-worker2.toml + run_dm_worker $WORK_DIR/worker2 $WORKER2_PORT $cur/conf/dm-worker2.toml check_rpc_alive $cur/../bin/check_worker_online 127.0.0.1:$WORKER2_PORT # worker2 free since (worker3, source2) has load task(load_task3) @@ -111,7 +111,7 @@ function test_transfer_two_sources() { # worker1 online export GO_FAILPOINTS="" - run_dm_worker $WORK_DIR/worker1 $WORKER1_PORT $CONF_DIR/dm-worker1.toml + run_dm_worker $WORK_DIR/worker1 $WORKER1_PORT $cur/conf/dm-worker1.toml check_rpc_alive $cur/../bin/check_worker_online 127.0.0.1:$WORKER1_PORT # worker1 free since (worker2, source1) has load task(load_task4) @@ -157,7 +157,7 @@ function test_transfer_two_sources() { # worker3 online export GO_FAILPOINTS="" - run_dm_worker $WORK_DIR/worker3 $WORKER3_PORT $CONF_DIR/dm-worker3.toml + run_dm_worker $WORK_DIR/worker3 $WORKER3_PORT $cur/conf/dm-worker3.toml check_rpc_alive $cur/../bin/check_worker_online 127.0.0.1:$WORKER3_PORT # source2 bound to worker3 since load_task3 @@ -174,34 +174,34 @@ function test_transfer_two_sources() { function run() { echo "import prepare data" - run_sql_file $DATA_DIR/db1.prepare.sql $MYSQL_HOST1 $MYSQL_PORT1 $MYSQL_PASSWORD1 + run_sql_file $cur/data/db1.prepare.sql $MYSQL_HOST1 $MYSQL_PORT1 $MYSQL_PASSWORD1 check_contains 'Query OK, 2 rows affected' - run_sql_file $DATA_DIR/db2.prepare.sql $MYSQL_HOST2 $MYSQL_PORT2 $MYSQL_PASSWORD2 + run_sql_file $cur/data/db2.prepare.sql $MYSQL_HOST2 $MYSQL_PORT2 $MYSQL_PASSWORD2 check_contains 'Query OK, 3 rows affected' echo "start DM master, workers and sources" - run_dm_master $WORK_DIR/master $MASTER_PORT1 $CONF_DIR/dm-master.toml + run_dm_master $WORK_DIR/master $MASTER_PORT1 $cur/conf/dm-master.toml check_rpc_alive $cur/../bin/check_master_online 127.0.0.1:$MASTER_PORT1 # worker1 loading load_task1 export GO_FAILPOINTS="github.com/pingcap/tiflow/dm/loader/LoadDataSlowDownByTask=return(\"load_task1\")" - run_dm_worker $WORK_DIR/worker1 $WORKER1_PORT $CONF_DIR/dm-worker1.toml + run_dm_worker $WORK_DIR/worker1 $WORKER1_PORT $cur/conf/dm-worker1.toml check_rpc_alive $cur/../bin/check_worker_online 127.0.0.1:$WORKER1_PORT - cp $CONF_DIR/source1.yaml $WORK_DIR/source1.yaml + cp $cur/conf/source1.yaml $WORK_DIR/source1.yaml sed -i "/relay-binlog-name/i\relay-dir: $WORK_DIR/worker1/relay_log" $WORK_DIR/source1.yaml dmctl_operate_source create $WORK_DIR/source1.yaml $SOURCE_ID1 # worker2 loading load_task2 export GO_FAILPOINTS="github.com/pingcap/tiflow/dm/loader/LoadDataSlowDownByTask=return(\"load_task2\")" - run_dm_worker $WORK_DIR/worker2 $WORKER2_PORT $CONF_DIR/dm-worker2.toml + run_dm_worker $WORK_DIR/worker2 $WORKER2_PORT $cur/conf/dm-worker2.toml check_rpc_alive $cur/../bin/check_worker_online 127.0.0.1:$WORKER2_PORT - cp $CONF_DIR/source2.yaml $WORK_DIR/source2.yaml + cp $cur/conf/source2.yaml $WORK_DIR/source2.yaml sed -i "/relay-binlog-name/i\relay-dir: $WORK_DIR/worker2/relay_log" $WORK_DIR/source2.yaml dmctl_operate_source create $WORK_DIR/source2.yaml $SOURCE_ID2 # worker3 loading load_task3 export GO_FAILPOINTS="github.com/pingcap/tiflow/dm/loader/LoadDataSlowDownByTask=return(\"load_task3\")" - run_dm_worker $WORK_DIR/worker3 $WORKER3_PORT $CONF_DIR/dm-worker3.toml + run_dm_worker $WORK_DIR/worker3 $WORKER3_PORT $cur/conf/dm-worker3.toml check_rpc_alive $cur/../bin/check_worker_online 127.0.0.1:$WORKER3_PORT echo "start DM task" @@ -223,12 +223,12 @@ function run() { test_transfer_two_sources - run_sql_file $DATA_DIR/db1.increment.sql $MYSQL_HOST1 $MYSQL_PORT1 $MYSQL_PASSWORD1 - run_sql_file $DATA_DIR/db2.increment.sql $MYSQL_HOST2 $MYSQL_PORT2 $MYSQL_PASSWORD2 - check_sync_diff $WORK_DIR $CONF_DIR/diff_config1.toml - check_sync_diff $WORK_DIR $CONF_DIR/diff_config2.toml - check_sync_diff $WORK_DIR $CONF_DIR/diff_config3.toml - check_sync_diff $WORK_DIR $CONF_DIR/diff_config4.toml + run_sql_file $cur/data/db1.increment.sql $MYSQL_HOST1 $MYSQL_PORT1 $MYSQL_PASSWORD1 + run_sql_file $cur/data/db2.increment.sql $MYSQL_HOST2 $MYSQL_PORT2 $MYSQL_PASSWORD2 + check_sync_diff $WORK_DIR $cur/conf/diff_config1.toml + check_sync_diff $WORK_DIR $cur/conf/diff_config2.toml + check_sync_diff $WORK_DIR $cur/conf/diff_config3.toml + check_sync_diff $WORK_DIR $cur/conf/diff_config4.toml } cleanup_data load_task1 diff --git a/dm/tests/run.sh b/dm/tests/run.sh index 2924662ce35..e7f1b0d2f33 100755 --- a/dm/tests/run.sh +++ b/dm/tests/run.sh @@ -74,17 +74,6 @@ if [ "$test_case" == "*" ]; then should_run=1 elif [ "$test_case" == "compatibility" ]; then should_run=1 -elif [ "$test_case" == "others" ]; then - test_case=$(cat $CUR/others_integration_1.txt) - should_run=1 -elif [ "$test_case" == "others_2" ]; then - test_case=$(cat $CUR/others_integration_2.txt) - should_run=1 -elif [ "$test_case" == "others_3" ]; then - test_case=$(cat $CUR/others_integration_3.txt) - if [ ! -z "$test_case" ]; then - should_run=1 - fi else exist_case="" for one_case in $test_case; do diff --git a/dm/tests/run_group.sh b/dm/tests/run_group.sh new file mode 100755 index 00000000000..4e2be79e1de --- /dev/null +++ b/dm/tests/run_group.sh @@ -0,0 +1,59 @@ +#!/bin/bash + +set -eo pipefail + +CUR=$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd) + +group=$1 + +# Define groups +# Note: If new group is added, the group name must also be added to CI +# https://github.com/PingCAP-QE/ci/blob/main/pipelines/pingcap/tiflow/latest/pull_dm_integration_test.groovy +# Each group of tests consumes as much time as possible, thus reducing CI waiting time. +# Putting multiple light tests together and heavy tests in a separate group. +declare -A groups +groups=( + ["G00"]="ha_cases_1 ha_cases_2 ha_cases2" + ["G01"]="ha_cases3 ha_cases3_1 ha_master" + ["G02"]="handle_error handle_error_2 handle_error_3" + ["G03"]="dmctl_advance dmctl_basic dmctl_command" + ["G04"]="import_goroutine_leak incremental_mode initial_unit" + ["G05"]="load_interrupt many_tables online_ddl" + ["G06"]="relay_interrupt safe_mode sequence_safe_mode" + ["G07"]="shardddl1 shardddl1_1 shardddl2 shardddl2_1" + ["G08"]="shardddl3 shardddl3_1 shardddl4 shardddl4_1 sharding sequence_sharding" + ["G09"]="import_v10x sharding2 ha" + ["G10"]="start_task print_status http_apis new_relay all_mode" + # `others others_2 others_3` tests of old pipeline + ["G11"]="adjust_gtid async_checkpoint_flush binlog_parse case_sensitive checkpoint_transaction check_task dm_syncer downstream_diff_index downstream_more_column drop_column_with_index duplicate_event expression_filter extend_column fake_rotate_event foreign_key full_mode gbk gtid ha_cases http_proxies lightning_load_task lightning_mode metrics new_collation_off only_dml openapi s3_dumpling_lightning sequence_sharding_optimistic sequence_sharding_removemeta shardddl_optimistic slow_relay_writer sql_mode sync_collation tracker_ignored_ddl validator_basic validator_checkpoint" + ["TLS_GROUP"]="tls" +) + +# Get other cases not in groups, to avoid missing any case +others=() +for script in "$CUR"/*/run.sh; do + test_name="$(basename "$(dirname "$script")")" + # shellcheck disable=SC2076 + if [[ ! " ${groups[*]} " =~ " ${test_name} " ]]; then + others=("${others[@]} ${test_name}") + fi +done + +if [[ "$group" == "check others" ]]; then + if [[ -z $others ]]; then + echo "All engine integration test cases are added to groups" + exit 0 + fi + echo "Error: "$others" is not added to any group in dm/tests/run_group.sh" + exit 1 +elif [[ " ${!groups[*]} " =~ " ${group} " ]]; then + test_names="${groups[${group}]}" + # Run test cases + if [[ -n $test_names ]]; then + echo "Run cases: ${test_names}" + "${CUR}"/run.sh "${test_names}" + fi +else + echo "Error: invalid group name: ${group}" + exit 1 +fi diff --git a/engine/test/integration_tests/run_group.sh b/engine/test/integration_tests/run_group.sh new file mode 100755 index 00000000000..0ace75b471a --- /dev/null +++ b/engine/test/integration_tests/run_group.sh @@ -0,0 +1,52 @@ +#!/bin/bash + +set -eo pipefail + +CUR=$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd) + +group=$1 + +# Define groups +# Note: If new group is added, the group name must also be added to CI +# https://github.com/PingCAP-QE/ci/blob/main/pipelines/pingcap/tiflow/latest/pull_engine_integration_test.groovy +# Each group of tests consumes as much time as possible, thus reducing CI waiting time. +# Putting multiple light tests together and heavy tests in a separate group. +declare -A groups +groups=( + ["G00"]='dm_basic dm_case_sensitive dm_collation' + ["G01"]='dm_full_mode dm_lightning_checkpoint dm_many_tables' + ["G02"]="dm_many_tables_local dm_new_collation_off dm_sql_mode" + ["G03"]="dm_tls e2e_fast_finished e2e_node_failure e2e_with_selectors" + ["G04"]="e2e_worker_error external_resource" +) + +# Get other cases not in groups, to avoid missing any case +others=() +for script in "$CUR"/*/run.sh; do + test_name="$(basename "$(dirname "$script")")" + # shellcheck disable=SC2076 + if [[ ! " ${groups[*]} " =~ " ${test_name} " ]]; then + others=("${others[@]} ${test_name}") + fi +done + +if [[ "$group" == "check others" ]]; then + if [[ -z $others ]]; then + echo "All engine integration test cases are added to groups" + exit 0 + fi + echo "Error: "$others" is not added to any group in engine/test/integration_tests/run_group.sh" + exit 1 +elif [[ " ${!groups[*]} " =~ " ${group} " ]]; then + test_names="${groups[${group}]}" + # Run test cases + if [[ -n $test_names ]]; then + echo "Run cases: ${test_names}" + mkdir -p /tmp/tiflow_engine_test + "${CUR}"/run.sh "${test_names}" 2>&1 | tee /tmp/tiflow_engine_test/engine_it.log + ./engine/test/utils/check_log.sh + fi +else + echo "Error: invalid group name: ${group}" + exit 1 +fi diff --git a/go.mod b/go.mod index fc7e3e0fe41..0d3692301c4 100644 --- a/go.mod +++ b/go.mod @@ -56,6 +56,7 @@ require ( github.com/modern-go/reflect2 v1.0.2 github.com/phayes/freeport v0.0.0-20180830031419-95f893ade6f2 github.com/pingcap/check v0.0.0-20211026125417-57bd13f7b5f0 +<<<<<<< HEAD github.com/pingcap/errors v0.11.5-0.20220729040631-518f63d66278 github.com/pingcap/failpoint v0.0.0-20220423142525-ae43b7f4e5c3 github.com/pingcap/kvproto v0.0.0-20221130022225-6c56ac56fe5f @@ -65,6 +66,17 @@ require ( github.com/pingcap/tidb/parser v0.0.0-20230412065101-c24a1cda6d26 github.com/prometheus/client_golang v1.13.0 github.com/prometheus/client_model v0.2.0 +======= + github.com/pingcap/errors v0.11.5-0.20221009092201-b66cddb77c32 + github.com/pingcap/failpoint v0.0.0-20220801062533-2eaa32854a6c + github.com/pingcap/kvproto v0.0.0-20230419072653-dc3cd8784a19 + github.com/pingcap/log v1.1.1-0.20230317032135-a0d097d16e22 + github.com/pingcap/tidb v1.1.0-beta.0.20230420065519-eb77d3928398 + github.com/pingcap/tidb-tools v7.0.0+incompatible + github.com/pingcap/tidb/parser v0.0.0-20230420065519-eb77d3928398 + github.com/prometheus/client_golang v1.15.0 + github.com/prometheus/client_model v0.3.0 +>>>>>>> 65217445e8 (makefile(all): add new cases to run_group and check cases in makefile (#8794)) github.com/r3labs/diff v1.1.0 github.com/rcrowley/go-metrics v0.0.0-20201227073835-cf1acfcdf475 github.com/shirou/gopsutil/v3 v3.23.1 @@ -77,9 +89,15 @@ require ( github.com/swaggo/gin-swagger v1.2.0 github.com/swaggo/swag v1.6.6-0.20200529100950-7c765ddd0476 github.com/syndtr/goleveldb v1.0.1-0.20210305035536-64b5b1c73954 +<<<<<<< HEAD github.com/tikv/client-go/v2 v2.0.4-0.20230131081004-cd83d1507d70 github.com/tikv/pd v1.1.0-beta.0.20220303060546-3695d8164800 github.com/tikv/pd/client v0.0.0-20221031025758-80f0d8ca4d07 +======= + github.com/tikv/client-go/v2 v2.0.8-0.20230419123920-35c1ee47c4f9 + github.com/tikv/pd v1.1.0-beta.0.20230203015356-248b3f0be132 + github.com/tikv/pd/client v0.0.0-20230419153320-f1d1a80feb95 +>>>>>>> 65217445e8 (makefile(all): add new cases to run_group and check cases in makefile (#8794)) github.com/tinylib/msgp v1.1.6 github.com/uber-go/atomic v1.4.0 github.com/vmihailenco/msgpack/v5 v5.3.5 @@ -100,12 +118,21 @@ require ( golang.org/x/exp v0.0.0-20221023144134-a1e5550cf13e golang.org/x/net v0.2.0 golang.org/x/sync v0.1.0 +<<<<<<< HEAD golang.org/x/sys v0.4.0 golang.org/x/text v0.4.0 golang.org/x/time v0.2.0 google.golang.org/genproto v0.0.0-20221201164419-0e50fba7f41c google.golang.org/grpc v1.50.1 google.golang.org/protobuf v1.28.1 +======= + golang.org/x/sys v0.7.0 + golang.org/x/text v0.9.0 + golang.org/x/time v0.3.0 + google.golang.org/genproto v0.0.0-20230410155749-daa745c078e1 + google.golang.org/grpc v1.54.0 + google.golang.org/protobuf v1.30.0 +>>>>>>> 65217445e8 (makefile(all): add new cases to run_group and check cases in makefile (#8794)) gopkg.in/yaml.v2 v2.4.0 gorm.io/driver/mysql v1.3.3 gorm.io/gorm v1.23.8 @@ -195,7 +222,7 @@ require ( github.com/json-iterator/go v1.1.12 // indirect github.com/klauspost/compress v1.15.9 // indirect github.com/klauspost/cpuid v1.3.1 // indirect - github.com/kr/pretty v0.3.0 // indirect + github.com/kr/pretty v0.3.1 // indirect github.com/kr/text v0.2.0 // indirect github.com/labstack/echo/v4 v4.2.1 // indirect github.com/leodido/go-urn v1.2.1 // indirect @@ -228,9 +255,15 @@ require ( github.com/pkg/browser v0.0.0-20180916011732-0a3d74bf9ce4 // indirect github.com/pkg/errors v0.9.1 // indirect github.com/pmezard/go-difflib v1.0.0 // indirect +<<<<<<< HEAD github.com/power-devops/perfstat v0.0.0-20210106213030-5aafc221ea8c // indirect github.com/prometheus/common v0.37.0 // indirect github.com/prometheus/procfs v0.8.0 // indirect +======= + github.com/power-devops/perfstat v0.0.0-20221212215047-62379fc7944b // indirect + github.com/prometheus/common v0.42.0 // indirect + github.com/prometheus/procfs v0.9.0 // indirect +>>>>>>> 65217445e8 (makefile(all): add new cases to run_group and check cases in makefile (#8794)) github.com/remyoudompheng/bigfft v0.0.0-20220927061507-ef77025ab5aa // indirect github.com/rivo/uniseg v0.4.2 // indirect github.com/rogpeppe/go-internal v1.9.0 // indirect diff --git a/go.sum b/go.sum index d1f586c8965..511bb553f35 100644 --- a/go.sum +++ b/go.sum @@ -125,8 +125,8 @@ github.com/alecthomas/template v0.0.0-20160405071501-a0175ee3bccc/go.mod h1:LOuy github.com/alecthomas/template v0.0.0-20190718012654-fb15b899a751/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc= github.com/alecthomas/units v0.0.0-20151022065526-2efee857e7cf/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0= github.com/alecthomas/units v0.0.0-20190717042225-c3de453c63f4/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0= -github.com/alecthomas/units v0.0.0-20190924025748-f65c72e2690d h1:UQZhZ2O0vMHr2cI+DC1Mbh0TJxzA3RcLoMsFw+aXw7E= github.com/alecthomas/units v0.0.0-20190924025748-f65c72e2690d/go.mod h1:rBZYJk541a8SKzHPHnH3zbiI+7dagKZ0cgpgrD7Fyho= +github.com/alecthomas/units v0.0.0-20211218093645-b94a6e3cc137 h1:s6gZFSlWYmbqAuRjVTiNNhvNRfY2Wxp9nhfyel4rklc= github.com/aliyun/alibaba-cloud-sdk-go v1.61.1581 h1:Q/yk4z/cHUVZfgTqtD09qeYBxHwshQAjVRX73qs8UH0= github.com/aliyun/alibaba-cloud-sdk-go v1.61.1581/go.mod h1:RcDobYh8k5VP6TNybz9m++gL3ijVI5wueVr0EM10VsU= github.com/alvaroloes/enumer v1.1.2/go.mod h1:FxrjvuXoDAx9isTJrv4c+T410zFi0DtXIT0m65DJ+Wo= @@ -781,8 +781,9 @@ github.com/konsorten/go-windows-terminal-sequences v1.0.3/go.mod h1:T0+1ngSBFLxv github.com/kr/logfmt v0.0.0-20140226030751-b84e30acd515/go.mod h1:+0opPa2QZZtGFBFZlji/RkVcI2GknAs/DXo4wKdlNEc= github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo= github.com/kr/pretty v0.2.1/go.mod h1:ipq/a2n7PKx3OHsz4KJII5eveXtPO4qwEXGdVfWzfnI= -github.com/kr/pretty v0.3.0 h1:WgNl7dwNpEZ6jJ9k1snq4pZsg7DOEN8hP9Xw0Tsjwk0= github.com/kr/pretty v0.3.0/go.mod h1:640gp4NfQd8pI5XOwp5fnNeVWj67G7CFk/SaSQn7NBk= +github.com/kr/pretty v0.3.1 h1:flRD4NNwYAUpkphVc1HcthR4KEIFJ65n8Mw5qdRn3LE= +github.com/kr/pretty v0.3.1/go.mod h1:hoEshYVHaxMs3cyo3Yncou5ZscifuDolrwPKZanG3xk= github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ= github.com/kr/pty v1.1.5/go.mod h1:9r2w37qlBe7rQ6e1fg1S/9xpWHSnaqNdHD3WcMdbPDA= github.com/kr/pty v1.1.8/go.mod h1:O1sed60cT9XZ5uDucP5qwvh+TE3NnUj51EiZO/lmSfw= @@ -997,9 +998,14 @@ github.com/pingcap/kvproto v0.0.0-20220302110454-c696585a961b/go.mod h1:IOdRDPLy github.com/pingcap/kvproto v0.0.0-20220304032058-ccd676426a27/go.mod h1:IOdRDPLyda8GX2hE/jO7gqaCV/PNFh8BZQCQZXfIOqI= github.com/pingcap/kvproto v0.0.0-20220328072018-6e75c12dbd73/go.mod h1:IOdRDPLyda8GX2hE/jO7gqaCV/PNFh8BZQCQZXfIOqI= github.com/pingcap/kvproto v0.0.0-20220429093005-2839fa5a1ed6/go.mod h1:OYtxs0786qojVTmkVeufx93xe+jUgm56GUYRIKnmaGI= +<<<<<<< HEAD github.com/pingcap/kvproto v0.0.0-20221026112947-f8d61344b172/go.mod h1:OYtxs0786qojVTmkVeufx93xe+jUgm56GUYRIKnmaGI= github.com/pingcap/kvproto v0.0.0-20221130022225-6c56ac56fe5f h1:46ZD6xzQWJ8Jkeal/U7SqkX030Mgs8DAn6QV/9zbqOQ= github.com/pingcap/kvproto v0.0.0-20221130022225-6c56ac56fe5f/go.mod h1:OYtxs0786qojVTmkVeufx93xe+jUgm56GUYRIKnmaGI= +======= +github.com/pingcap/kvproto v0.0.0-20230419072653-dc3cd8784a19 h1:aGROoQpU8Sx9MhCspeSrDXpNkW1pcG+EWdMYxg4d5uo= +github.com/pingcap/kvproto v0.0.0-20230419072653-dc3cd8784a19/go.mod h1:guCyM5N+o+ru0TsoZ1hi9lDjUMs2sIBjW3ARTEpVbnk= +>>>>>>> 65217445e8 (makefile(all): add new cases to run_group and check cases in makefile (#8794)) github.com/pingcap/log v0.0.0-20191012051959-b742a5d432e9/go.mod h1:4rbK1p9ILyIfb6hU7OG2CiWSqMXnp3JMbiaVJ6mvoY8= github.com/pingcap/log v0.0.0-20200511115504-543df19646ad/go.mod h1:4rbK1p9ILyIfb6hU7OG2CiWSqMXnp3JMbiaVJ6mvoY8= github.com/pingcap/log v0.0.0-20210625125904-98ed8e2eb1c7/go.mod h1:8AanEdAHATuRurdGxZXBz0At+9avep+ub7U1AGYLIMM= @@ -1013,6 +1019,7 @@ github.com/pingcap/sysutil v0.0.0-20211208032423-041a72e5860d/go.mod h1:7j18ezaW github.com/pingcap/sysutil v0.0.0-20220114020952-ea68d2dbf5b4 h1:HYbcxtnkN3s5tqrZ/z3eJS4j3Db8wMphEm1q10lY/TM= github.com/pingcap/sysutil v0.0.0-20220114020952-ea68d2dbf5b4/go.mod h1:sDCsM39cGiv2vwunZkaFA917vVkqDTGSPbbV7z4Oops= github.com/pingcap/tidb v1.1.0-beta.0.20220511160835-98c31070d958/go.mod h1:luW4sIZoLHY3bCWuKqyqk2QgMvF+/M7nWOXf/me0+fY= +<<<<<<< HEAD github.com/pingcap/tidb v1.1.0-beta.0.20230412065101-c24a1cda6d26 h1:lTw8Ly3BBXx2C5SrWP5KnYRIRH89giRlaw78gJl1+DY= github.com/pingcap/tidb v1.1.0-beta.0.20230412065101-c24a1cda6d26/go.mod h1:RxVes5DVgeIFmijfzT/Q6QAG7FVY3kIWgS6I5zxiFLY= github.com/pingcap/tidb-dashboard v0.0.0-20220117082709-e8076b5c79ba/go.mod h1:4hk/3owVGWdvI9Kx6yCqqvM1T5PVgwyQNyMQxD3rwfc= @@ -1023,6 +1030,17 @@ github.com/pingcap/tidb/parser v0.0.0-20220511160835-98c31070d958/go.mod h1:ElJi github.com/pingcap/tidb/parser v0.0.0-20221126021158-6b02a5d8ba7d/go.mod h1:ElJiub4lRy6UZDb+0JHDkGEdr6aOli+ykhyej7VCLoI= github.com/pingcap/tidb/parser v0.0.0-20230412065101-c24a1cda6d26 h1:6ORFxj278f5AGRT+dOyCdKdLtOBNaAWIjQo1yHLo/Yo= github.com/pingcap/tidb/parser v0.0.0-20230412065101-c24a1cda6d26/go.mod h1:wjvp+T3/T9XYt0nKqGX3Kc1AKuyUcfno6LTc6b2A6ew= +======= +github.com/pingcap/tidb v1.1.0-beta.0.20230420065519-eb77d3928398 h1:sv/Sj6LAZRgZThCnbvZgIJuk2WBxCXbOEJnlr7Ng/Zw= +github.com/pingcap/tidb v1.1.0-beta.0.20230420065519-eb77d3928398/go.mod h1:qLNiZTfvdd77/tRcUWs29ieRsNQg2lFeO9F5AWnS7hw= +github.com/pingcap/tidb-tools v7.0.0+incompatible h1:CHjAva2ON13HZAB0HRNI69fC/1AzfQBkzDE31Rh6NSg= +github.com/pingcap/tidb-tools v7.0.0+incompatible/go.mod h1:XGdcy9+yqlDSEMTpOXnwf3hiTeqrV6MN/u1se9N8yIM= +github.com/pingcap/tidb/parser v0.0.0-20211011031125-9b13dc409c5e/go.mod h1:e1MGCA9Sg3T8jid8PKAEq5eYVuMMCq4n8gJ+Kqp4Plg= +github.com/pingcap/tidb/parser v0.0.0-20220511160835-98c31070d958/go.mod h1:ElJiub4lRy6UZDb+0JHDkGEdr6aOli+ykhyej7VCLoI= +github.com/pingcap/tidb/parser v0.0.0-20221126021158-6b02a5d8ba7d/go.mod h1:ElJiub4lRy6UZDb+0JHDkGEdr6aOli+ykhyej7VCLoI= +github.com/pingcap/tidb/parser v0.0.0-20230420065519-eb77d3928398 h1:QrQU2/pj2pd3uWTVTzwcymp9K2kWAg0uiNhqu//VhEM= +github.com/pingcap/tidb/parser v0.0.0-20230420065519-eb77d3928398/go.mod h1:R0xUtp5gJK/Xtb+PIvR3Wh/Ayvmorwk0nzT4p3HLZJk= +>>>>>>> 65217445e8 (makefile(all): add new cases to run_group and check cases in makefile (#8794)) github.com/pingcap/tipb v0.0.0-20220215045658-d12dec7a7609/go.mod h1:A7mrd7WHBl1o63LE2bIBGEJMTNWXqhgmYiOvMLxozfs= github.com/pingcap/tipb v0.0.0-20221123081521-2fb828910813 h1:DbmCfCbcavo0JG+gSp0ySvv1ub/c/j3hsnYzyYPzONo= github.com/pingcap/tipb v0.0.0-20221123081521-2fb828910813/go.mod h1:A7mrd7WHBl1o63LE2bIBGEJMTNWXqhgmYiOvMLxozfs= @@ -1049,8 +1067,13 @@ github.com/prometheus/client_golang v1.7.1/go.mod h1:PY5Wy2awLA44sXw4AOSfFBetzPP github.com/prometheus/client_golang v1.11.0/go.mod h1:Z6t4BnS23TR94PD6BsDNk8yVqroYurpAkEiz0P2BEV0= github.com/prometheus/client_golang v1.11.1/go.mod h1:Z6t4BnS23TR94PD6BsDNk8yVqroYurpAkEiz0P2BEV0= github.com/prometheus/client_golang v1.12.1/go.mod h1:3Z9XVyYiZYEO+YQWt3RD2R3jrbd179Rt297l4aS6nDY= +<<<<<<< HEAD github.com/prometheus/client_golang v1.13.0 h1:b71QUfeo5M8gq2+evJdTPfZhYMAU0uKPkyPJ7TPsloU= github.com/prometheus/client_golang v1.13.0/go.mod h1:vTeo+zgvILHsnnj/39Ou/1fPN5nJFOEMgftOUOmlvYQ= +======= +github.com/prometheus/client_golang v1.15.0 h1:5fCgGYogn0hFdhyhLbw7hEsWxufKtY9klyvdNfFlFhM= +github.com/prometheus/client_golang v1.15.0/go.mod h1:e9yaBhRPU2pPNsZwE+JdQl0KEt1N9XgF6zxWmaC0xOk= +>>>>>>> 65217445e8 (makefile(all): add new cases to run_group and check cases in makefile (#8794)) github.com/prometheus/client_model v0.0.0-20180712105110-5c3871d89910/go.mod h1:MbSGuTsp3dbXC40dX6PRTWyKYBIrTGTE9sqQNg2J8bo= github.com/prometheus/client_model v0.0.0-20190129233127-fd36f4220a90/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= github.com/prometheus/client_model v0.0.0-20190812154241-14fe0d1b01d4/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= @@ -1064,8 +1087,13 @@ github.com/prometheus/common v0.6.0/go.mod h1:eBmuwkDJBwy6iBfxCBob6t6dR6ENT/y+J+ github.com/prometheus/common v0.10.0/go.mod h1:Tlit/dnDKsSWFlCLTWaA1cyBgKHSMdTB80sz/V91rCo= github.com/prometheus/common v0.26.0/go.mod h1:M7rCNAaPfAosfx8veZJCuw84e35h3Cfd9VFqTh1DIvc= github.com/prometheus/common v0.32.1/go.mod h1:vu+V0TpY+O6vW9J44gczi3Ap/oXXR10b+M/gUGO4Hls= +<<<<<<< HEAD github.com/prometheus/common v0.37.0 h1:ccBbHCgIiT9uSoFY0vX8H3zsNR5eLt17/RQLUvn8pXE= github.com/prometheus/common v0.37.0/go.mod h1:phzohg0JFMnBEFGxTDbfu3QyL5GI8gTQJFhYO5B3mfA= +======= +github.com/prometheus/common v0.42.0 h1:EKsfXEYo4JpWMHH5cg+KOUWeuJSov1Id8zGR8eeI1YM= +github.com/prometheus/common v0.42.0/go.mod h1:xBwqVerjNdUDjgODMpudtOMwlOwf2SaTr1yjz4b7Zbc= +>>>>>>> 65217445e8 (makefile(all): add new cases to run_group and check cases in makefile (#8794)) github.com/prometheus/procfs v0.0.0-20181005140218-185b4288413d/go.mod h1:c3At6R/oaqEKCNdg8wHV1ftS6bRYblBhIjjI8uT2IGk= github.com/prometheus/procfs v0.0.0-20190507164030-5867b95ac084/go.mod h1:TjEm7ze935MbeOT/UhFTIMYKhuLP4wbCsTZCD3I8kEA= github.com/prometheus/procfs v0.0.2/go.mod h1:TjEm7ze935MbeOT/UhFTIMYKhuLP4wbCsTZCD3I8kEA= @@ -1206,6 +1234,7 @@ github.com/tidwall/gjson v1.6.0/go.mod h1:P256ACg0Mn+j1RXIDXoss50DeIABTYK1PULOJH github.com/tidwall/match v1.0.1/go.mod h1:LujAq0jyVjBy028G1WhWfIzbpQfMO8bBZ6Tyb0+pL9E= github.com/tidwall/pretty v1.0.0/go.mod h1:XNkn88O1ChpSDQmQeStsy+sBenx6DDtFZJxhVysOjyk= github.com/tikv/client-go/v2 v2.0.1-0.20220510032238-ff5e35ac2869/go.mod h1:0scaG+seu7L56apm+Gjz9vckyO7ABIzM6T7n00mrIXs= +<<<<<<< HEAD github.com/tikv/client-go/v2 v2.0.4-0.20230131081004-cd83d1507d70 h1:AyFCezjYx4KYXuyPM5o38lYqh5UdR0OM36UEocOx+Hs= github.com/tikv/client-go/v2 v2.0.4-0.20230131081004-cd83d1507d70/go.mod h1:MDT4J9LzgS7Bj1DnEq6Gk/puy6mp8TgUC92zGEVVLLg= github.com/tikv/pd v1.1.0-beta.0.20220303060546-3695d8164800 h1:lIfIwqe1HPa0suhMpiI200nYxau+rXWXTqZxSGg1HS4= @@ -1213,6 +1242,15 @@ github.com/tikv/pd v1.1.0-beta.0.20220303060546-3695d8164800/go.mod h1:J/dj1zpEE github.com/tikv/pd/client v0.0.0-20220307081149-841fa61e9710/go.mod h1:AtvppPwkiyUgQlR1W9qSqfTB+OsOIu19jDCOxOsPkmU= github.com/tikv/pd/client v0.0.0-20221031025758-80f0d8ca4d07 h1:ckPpxKcl75mO2N6a4cJXiZH43hvcHPpqc9dh1TmH1nc= github.com/tikv/pd/client v0.0.0-20221031025758-80f0d8ca4d07/go.mod h1:CipBxPfxPUME+BImx9MUYXCnAVLS3VJUr3mnSJwh40A= +======= +github.com/tikv/client-go/v2 v2.0.8-0.20230419123920-35c1ee47c4f9 h1:b6sIFrhNA8R5Q+geziSnAenDphJ+BZtHMkGgHyj472I= +github.com/tikv/client-go/v2 v2.0.8-0.20230419123920-35c1ee47c4f9/go.mod h1:JhXnkzPnxpgXj9Tww4P6sayf9TtsnB9XrtS71jfzq+U= +github.com/tikv/pd v1.1.0-beta.0.20230203015356-248b3f0be132 h1:vCVu7LxFou5WuaY6jHDMHKVeJTtwr5o2i1xWgGAdDo4= +github.com/tikv/pd v1.1.0-beta.0.20230203015356-248b3f0be132/go.mod h1:jb9oq6rN4U0U3FZdvqWlpi9rZzFJxiOlvZ3aj5BTpg8= +github.com/tikv/pd/client v0.0.0-20220307081149-841fa61e9710/go.mod h1:AtvppPwkiyUgQlR1W9qSqfTB+OsOIu19jDCOxOsPkmU= +github.com/tikv/pd/client v0.0.0-20230419153320-f1d1a80feb95 h1:177X/S43/qjxDyFq9CBB4Nts0nwLvjJFXzoav2XCUSA= +github.com/tikv/pd/client v0.0.0-20230419153320-f1d1a80feb95/go.mod h1:5vgcvO020ZCdMZkTrRdS/wFZQUab82BSfKE38T61ro0= +>>>>>>> 65217445e8 (makefile(all): add new cases to run_group and check cases in makefile (#8794)) github.com/tinylib/msgp v1.1.6 h1:i+SbKraHhnrf9M5MYmvQhFnbLhAXSDWF8WWsuyRdocw= github.com/tinylib/msgp v1.1.6/go.mod h1:75BAfg2hauQhs3qedfdDZmWAPcFMAvJE5b9rGOMufyw= github.com/tklauser/go-sysconf v0.3.4/go.mod h1:Cl2c8ZRWfHD5IrfHo9VN+FX9kCFjIOyVklgXycLB6ek= @@ -1924,8 +1962,13 @@ google.golang.org/genproto v0.0.0-20220114231437-d2e6a121cae0/go.mod h1:5CzLGKJ6 google.golang.org/genproto v0.0.0-20220201184016-50beb8ab5c44/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= google.golang.org/genproto v0.0.0-20220211171837-173942840c17/go.mod h1:kGP+zUP2Ddo0ayMi4YuN7C3WZyJvGLZRh8Z5wnAqvEI= google.golang.org/genproto v0.0.0-20220216160803-4663080d8bc8/go.mod h1:kGP+zUP2Ddo0ayMi4YuN7C3WZyJvGLZRh8Z5wnAqvEI= +<<<<<<< HEAD google.golang.org/genproto v0.0.0-20221201164419-0e50fba7f41c h1:S34D59DS2GWOEwWNt4fYmTcFrtlOgukG2k9WsomZ7tg= google.golang.org/genproto v0.0.0-20221201164419-0e50fba7f41c/go.mod h1:rZS5c/ZVYMaOGBfO68GWtjOw/eLaZM1X6iVtgjZ+EWg= +======= +google.golang.org/genproto v0.0.0-20230410155749-daa745c078e1 h1:KpwkzHKEF7B9Zxg18WzOa7djJ+Ha5DzthMyZYQfEn2A= +google.golang.org/genproto v0.0.0-20230410155749-daa745c078e1/go.mod h1:nKE/iIaLqn2bQwXBg8f1g2Ylh6r5MN5CmZvuzZCgsCU= +>>>>>>> 65217445e8 (makefile(all): add new cases to run_group and check cases in makefile (#8794)) google.golang.org/grpc v0.0.0-20180607172857-7a6a684ca69e/go.mod h1:yo6s7OP7yaDglbqo1J04qKzAhqBH6lvTonzMVmEdcZw= google.golang.org/grpc v1.8.0/go.mod h1:yo6s7OP7yaDglbqo1J04qKzAhqBH6lvTonzMVmEdcZw= google.golang.org/grpc v1.12.0/go.mod h1:yo6s7OP7yaDglbqo1J04qKzAhqBH6lvTonzMVmEdcZw= diff --git a/tests/integration_tests/run_group.sh b/tests/integration_tests/run_group.sh new file mode 100755 index 00000000000..becd070f264 --- /dev/null +++ b/tests/integration_tests/run_group.sh @@ -0,0 +1,73 @@ +#!/bin/bash + +set -eo pipefail + +CUR=$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd) + +sink_type=$1 +group=$2 + +# Define groups +# Note: If new group is added, the group name must also be added to CI +# * https://github.com/PingCAP-QE/ci/blob/main/pipelines/pingcap/tiflow/latest/pull_cdc_integration_kafka_test.groovy +# * https://github.com/PingCAP-QE/ci/blob/main/pipelines/pingcap/tiflow/latest/pull_cdc_integration_test.groovy +# Each group of tests consumes as much time as possible, thus reducing CI waiting time. +# Putting multiple light tests together and heavy tests in a separate group. +declare -A groups +groups=( + ["G00"]='changefeed_error ddl_sequence force_replicate_table' + ["G01"]='multi_capture kafka_big_messages cdc' + ["G02"]='drop_many_tables multi_cdc_cluster processor_stop_delay' + ["G03"]='capture_suicide_while_balance_table row_format ddl_only_block_related_table ddl_manager' + ["G04"]='foreign_key canal_json_basic ddl_puller_lag owner_resign' + ["G05"]='partition_table changefeed_auto_stop' + ["G06"]='charset_gbk owner_remove_table_error bdr_mode' + ["G07"]='clustered_index multi_tables_ddl big_txn_v2' + ["G08"]='bank multi_source kafka_sink_error_resume' + ["G09"]='capture_suicide_while_balance_table' + ["G10"]='multi_topics_v2 consistent_replicate_storage_s3 sink_retry' + ["G11"]='consistent_replicate_storage_file kv_client_stream_reconnect consistent_replicate_gbk' + ["G12"]='http_api changefeed_fast_fail tidb_mysql_test server_config_compatibility' + ["G13"]='canal_json_adapter_compatibility resourcecontrol processor_etcd_worker_delay' + ["G14"]='batch_update_to_no_batch gc_safepoint default_value changefeed_pause_resume' + ["G15"]='cli simple cdc_server_tips changefeed_resume_with_checkpoint_ts ddl_reentrant' + ["G16"]='processor_err_chan resolve_lock move_table kafka_compression autorandom' + ["G17"]='ddl_attributes many_pk_or_uk kafka_messages capture_session_done_during_task http_api_tls' + ["G18"]='tiflash new_ci_collation_without_old_value region_merge common_1' + ["G19"]='kafka_big_messages_v2 multi_tables_ddl_v2 split_region availability' + ["G20"]='changefeed_reconstruct http_proxies kill_owner_with_ddl savepoint' + ["G21"]='event_filter generate_column syncpoint sequence processor_resolved_ts_fallback' + ["G22"]='big_txn csv_storage_basic changefeed_finish sink_hang canal_json_storage_basic' + ["G23"]='multi_topics new_ci_collation_with_old_value batch_add_table multi_changefeed' + ["G24"]='consistent_replicate_nfs consistent_replicate_ddl owner_resign api_v2' + ["G25"]='canal_json_storage_partition_table csv_storage_partition_table csv_storage_multi_tables_ddl' +) + +# Get other cases not in groups, to avoid missing any case +others=() +for script in "$CUR"/*/run.sh; do + test_name="$(basename "$(dirname "$script")")" + # shellcheck disable=SC2076 + if [[ ! " ${groups[*]} " =~ " ${test_name} " ]]; then + others=("${others[@]} ${test_name}") + fi +done + +if [[ "$group" == "others" ]]; then + if [[ -z $others ]]; then + echo "All CDC integration test cases are added to groups" + exit 0 + fi + echo "Error: "$others" is not added to any group in tests/integration_tests/run_group.sh" + exit 1 +elif [[ " ${!groups[*]} " =~ " ${group} " ]]; then + test_names="${groups[${group}]}" + # Run test cases + if [[ -n $test_names ]]; then + echo "Run cases: ${test_names}" + "${CUR}"/run.sh "${sink_type}" "${test_names}" + fi +else + echo "Error: invalid group name: ${group}" + exit 1 +fi