Skip to content

Commit

Permalink
roachtest/tpcc: add chaos variant for multiregion
Browse files Browse the repository at this point in the history
Added a TPCC run that turns random nodes in a cluster on and off for 5
minute periods whilst running a multi-region workload.

Release note: None
  • Loading branch information
otan committed Jun 1, 2021
1 parent b81b6d9 commit 467c077
Show file tree
Hide file tree
Showing 3 changed files with 50 additions and 0 deletions.
2 changes: 2 additions & 0 deletions TEAMS.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,8 @@ cockroachdb/geospatial:
triage_column_id: 9487269
cockroachdb/dev-inf:
triage_column_id: 10210759
cockroachdb/multiregion:
triage_column_id: 11926170
cockroachdb/storage:
triage_column_id: 6668367
cockroachdb/test-eng:
Expand Down
1 change: 1 addition & 0 deletions pkg/cmd/roachtest/test_registry.go
Original file line number Diff line number Diff line change
Expand Up @@ -34,6 +34,7 @@ const (
OwnerBulkIO Owner = `bulk-io`
OwnerCDC Owner = `cdc`
OwnerKV Owner = `kv`
OwnerMultiRegion Owner = `multiregion`
OwnerServer Owner = `server`
OwnerSQLQueries Owner = `sql-queries`
OwnerSQLSchema Owner = `sql-schema`
Expand Down
47 changes: 47 additions & 0 deletions pkg/cmd/roachtest/tpcc.go
Original file line number Diff line number Diff line change
Expand Up @@ -349,6 +349,53 @@ func registerTPCC(r *testRegistry) {
},
})

for _, survivalGoal := range []string{"zone", "region"} {
zs := []string{
"us-east1-b", "us-west1-b", "europe-west2-b",
}
regions := []string{
"us-east1",
"us-west1",
"europe-west2",
}
r.Add(testSpec{
Name: fmt.Sprintf("tpcc/multiregion/survive=%s/chaos=true", survivalGoal),
Owner: OwnerMultiRegion,
MinVersion: "v21.1.0",
// 3 nodes per region + 1 node for workload.
Cluster: makeClusterSpec(10, geo(), zones(strings.Join(zs, ","))),
Run: func(ctx context.Context, t *test, c *cluster) {
duration := 90 * time.Minute
partitionArgs := fmt.Sprintf(
`--survival-goal=%s --regions=%s --partitions=%d`,
survivalGoal,
strings.Join(regions, ","),
len(regions),
)
// TODO(#multiregion): setup workload to run specifically for a given partition
// on each node of a cluster, instead of one node using a workload on all clusters.
runTPCC(ctx, t, c, tpccOptions{
Warehouses: 9,
Duration: duration,
ExtraSetupArgs: partitionArgs,
ExtraRunArgs: `--method=simple --wait=false --tolerate-errors ` + partitionArgs,
Chaos: func() Chaos {
return Chaos{
Timer: Periodic{
Period: 300 * time.Second,
DownTime: 300 * time.Second,
},
Target: func() nodeListOption { return c.Node(1 + rand.Intn(c.spec.NodeCount-1)) },
Stopper: time.After(duration),
DrainAndQuit: false,
}
},
SetupType: usingInit,
})
},
})
}

r.Add(testSpec{
Name: "tpcc/w=100/nodes=3/chaos=true",
Owner: OwnerKV,
Expand Down

0 comments on commit 467c077

Please sign in to comment.