diff --git a/pkg/roachpb/app_stats.go b/pkg/roachpb/app_stats.go index e5c6f02187b1..178dd898df47 100644 --- a/pkg/roachpb/app_stats.go +++ b/pkg/roachpb/app_stats.go @@ -164,6 +164,7 @@ func (s *StatementStatistics) Add(other *StatementStatistics) { s.RowsRead.Add(other.RowsRead, s.Count, other.Count) s.RowsWritten.Add(other.RowsWritten, s.Count, other.Count) s.Nodes = util.CombineUniqueInt64(s.Nodes, other.Nodes) + s.PlanGists = util.CombineUniqueString(s.PlanGists, other.PlanGists) s.ExecStats.Add(other.ExecStats) diff --git a/pkg/roachpb/app_stats.proto b/pkg/roachpb/app_stats.proto index ea006f3a69bf..8c9f33e9953f 100644 --- a/pkg/roachpb/app_stats.proto +++ b/pkg/roachpb/app_stats.proto @@ -104,6 +104,12 @@ message StatementStatistics { // Nodes is the ordered list of nodes ids on which the statement was executed. repeated int64 nodes = 24; + // plan_gists is list of a compressed version of plan that can be converted (lossily) + // back into a logical plan. + // Each statement contain only one plan gist, but the same statement fingerprint id + // can contain more than one value. + repeated string plan_gists = 26; + // Note: be sure to update `sql/app_stats.go` when adding/removing fields here! reserved 13, 14, 17, 18, 19, 20; diff --git a/pkg/server/combined_statement_stats.go b/pkg/server/combined_statement_stats.go index 8bc08b27901d..3e638f9948a7 100644 --- a/pkg/server/combined_statement_stats.go +++ b/pkg/server/combined_statement_stats.go @@ -68,8 +68,8 @@ func getCombinedStatementStats( startTime := getTimeFromSeconds(req.Start) endTime := getTimeFromSeconds(req.End) limit := SQLStatsResponseMax.Get(&settings.SV) - whereClause, args := getFilterAndParams(startTime, endTime, limit, testingKnobs) - statements, err := collectCombinedStatements(ctx, ie, whereClause, args) + whereClause, args := getFilterAndParams(startTime, endTime) + statements, err := collectCombinedStatements(ctx, ie, whereClause, args, limit, testingKnobs) if err != nil { return nil, err } @@ -89,12 +89,9 @@ func getCombinedStatementStats( return response, nil } -func getFilterAndParams( - start, end *time.Time, limit int64, testingKnobs *sqlstats.TestingKnobs, -) (string, []interface{}) { +func getFilterAndParams(start, end *time.Time) (string, []interface{}) { var args []interface{} var buffer strings.Builder - buffer.WriteString(testingKnobs.GetAOSTClause()) // Filter out internal statements by app name. buffer.WriteString(" WHERE app_name NOT LIKE '$ internal%'") @@ -109,17 +106,16 @@ func getFilterAndParams( args = append(args, *end) } - // Retrieve the top rows ordered by aggregation time and service latency. - buffer.WriteString(fmt.Sprintf(` -ORDER BY aggregated_ts DESC,(statistics -> 'statistics' -> 'svcLat' ->> 'mean')::float DESC -LIMIT $%d`, len(args)+1)) - args = append(args, limit) - return buffer.String(), args } func collectCombinedStatements( - ctx context.Context, ie *sql.InternalExecutor, whereClause string, qargs []interface{}, + ctx context.Context, + ie *sql.InternalExecutor, + whereClause string, + qargs []interface{}, + limit int64, + testingKnobs *sqlstats.TestingKnobs, ) ([]serverpb.StatementsResponse_CollectedStatementStatistics, error) { query := fmt.Sprintf( @@ -132,8 +128,29 @@ func collectCombinedStatements( statistics, sampled_plan, aggregation_interval - FROM crdb_internal.statement_statistics - %s`, whereClause) + FROM ( + SELECT + fingerprint_id, + transaction_fingerprint_id, + app_name, + aggregated_ts, + max(metadata) AS metadata, + crdb_internal.merge_statement_stats(array_agg(statistics)) AS statistics, + max(sampled_plan) AS sampled_plan, + aggregation_interval + FROM crdb_internal.statement_statistics + %s + GROUP BY + fingerprint_id, + transaction_fingerprint_id, + app_name, + aggregated_ts, + aggregation_interval) + %s + ORDER BY (statistics -> 'statistics' -> 'svcLat' ->> 'mean')::float DESC + LIMIT $%d`, whereClause, testingKnobs.GetAOSTClause(), len(qargs)+1) + + qargs = append(qargs, limit) const expectedNumDatums = 8 diff --git a/pkg/sql/crdb_internal.go b/pkg/sql/crdb_internal.go index f9146d9696f6..180a4de87230 100644 --- a/pkg/sql/crdb_internal.go +++ b/pkg/sql/crdb_internal.go @@ -5118,10 +5118,8 @@ CREATE TABLE crdb_internal.cluster_statement_statistics ( transactionFingerprintID := tree.NewDBytes( tree.DBytes(sqlstatsutil.EncodeUint64ToBytes(uint64(statistics.Key.TransactionFingerprintID)))) - // TODO(azhng): properly update plan_hash value once we can expose it - // from the optimizer. planHash := tree.NewDBytes( - tree.DBytes(sqlstatsutil.EncodeUint64ToBytes(0))) + tree.DBytes(sqlstatsutil.EncodeUint64ToBytes(statistics.Key.PlanHash))) metadataJSON, err := sqlstatsutil.BuildStmtMetadataJSON(statistics) if err != nil { diff --git a/pkg/sql/executor_statement_metrics.go b/pkg/sql/executor_statement_metrics.go index 4f4ee3ab6132..b41286fb7e42 100644 --- a/pkg/sql/executor_statement_metrics.go +++ b/pkg/sql/executor_statement_metrics.go @@ -163,6 +163,7 @@ func (ex *connExecutor) recordStatementSummary( FullScan: flags.IsSet(planFlagContainsFullIndexScan) || flags.IsSet(planFlagContainsFullTableScan), Failed: stmtErr != nil, Database: planner.SessionData().Database, + PlanHash: planner.instrumentation.planGist.Hash(), } // We only populate the transaction fingerprint ID field if we are in an @@ -204,6 +205,7 @@ func (ex *connExecutor) recordStatementSummary( Nodes: getNodesFromPlanner(planner), StatementType: stmt.AST.StatementType(), Plan: planner.instrumentation.PlanForStats(ctx), + PlanGist: planner.instrumentation.planGist.String(), StatementError: stmtErr, } diff --git a/pkg/sql/instrumentation.go b/pkg/sql/instrumentation.go index 0d68e524221d..8646bd01378a 100644 --- a/pkg/sql/instrumentation.go +++ b/pkg/sql/instrumentation.go @@ -296,6 +296,7 @@ func (ih *instrumentationHelper) Finish( ImplicitTxn: ih.implicitTxn, Database: p.SessionData().Database, Failed: retErr != nil, + PlanHash: ih.planGist.Hash(), } // We populate transaction fingerprint ID if this is an implicit transaction. // See executor_statement_metrics.go:recordStatementSummary() for further diff --git a/pkg/sql/instrumentation_test.go b/pkg/sql/instrumentation_test.go index 8dd5a3fb937d..0cdc9508c8e6 100644 --- a/pkg/sql/instrumentation_test.go +++ b/pkg/sql/instrumentation_test.go @@ -73,6 +73,7 @@ func TestSampledStatsCollection(t *testing.T) { }, )) require.NotNil(t, stats) + require.NotZero(t, stats.Key.PlanHash) return stats } diff --git a/pkg/sql/sqlstats/persistedsqlstats/flush.go b/pkg/sql/sqlstats/persistedsqlstats/flush.go index 878c031c6d2d..4d290242b6a4 100644 --- a/pkg/sql/sqlstats/persistedsqlstats/flush.go +++ b/pkg/sql/sqlstats/persistedsqlstats/flush.go @@ -147,7 +147,7 @@ func (s *PersistedSQLStats) doFlushSingleStmtStats( serializedFingerprintID := sqlstatsutil.EncodeUint64ToBytes(uint64(scopedStats.ID)) serializedTransactionFingerprintID := sqlstatsutil.EncodeUint64ToBytes(uint64(scopedStats.Key.TransactionFingerprintID)) - serializedPlanHash := sqlstatsutil.EncodeUint64ToBytes(uint64(dummyPlanHash)) + serializedPlanHash := sqlstatsutil.EncodeUint64ToBytes(scopedStats.Key.PlanHash) insertFn := func(ctx context.Context, txn *kv.Txn) (alreadyExists bool, err error) { rowsAffected, err := s.insertStatementStats( @@ -417,7 +417,7 @@ WHERE fingerprint_id = $2 "plan_hash: %d, "+ "node_id: %d", serializedFingerprintID, serializedTransactionFingerprintID, stats.Key.App, - aggregatedTs, dummyPlanHash, s.cfg.SQLIDContainer.SQLInstanceID()) + aggregatedTs, serializedPlanHash, s.cfg.SQLIDContainer.SQLInstanceID()) } return nil @@ -581,12 +581,12 @@ FOR UPDATE if row == nil { return errors.AssertionFailedf( "statement statistics not found fingerprint_id: %s, app: %s, aggregated_ts: %s, plan_hash: %d, node_id: %d", - serializedFingerprintID, key.App, aggregatedTs, dummyPlanHash, s.cfg.SQLIDContainer.SQLInstanceID()) + serializedFingerprintID, key.App, aggregatedTs, serializedPlanHash, s.cfg.SQLIDContainer.SQLInstanceID()) } if len(row) != 1 { return errors.AssertionFailedf("unexpectedly found %d returning columns for fingerprint_id: %s, app: %s, aggregated_ts: %s, plan_hash %d, node_id: %d", - len(row), serializedFingerprintID, key.App, aggregatedTs, dummyPlanHash, + len(row), serializedFingerprintID, key.App, aggregatedTs, serializedPlanHash, s.cfg.SQLIDContainer.SQLInstanceID()) } diff --git a/pkg/sql/sqlstats/persistedsqlstats/provider.go b/pkg/sql/sqlstats/persistedsqlstats/provider.go index 911d00b6c8ed..3327c23902e4 100644 --- a/pkg/sql/sqlstats/persistedsqlstats/provider.go +++ b/pkg/sql/sqlstats/persistedsqlstats/provider.go @@ -34,14 +34,6 @@ import ( "github.com/cockroachdb/errors" ) -const ( - // TODO(azhng): currently we do not have the ability to compute a hash for - // query plan. This is currently being worked on by the SQL Queries team. - // Once we are able get consistent hash value from a query plan, we should - // update this. - dummyPlanHash = int64(0) -) - // ErrConcurrentSQLStatsCompaction is reported when two sql stats compaction // jobs are issued concurrently. This is a sentinel error. var ErrConcurrentSQLStatsCompaction = errors.New("another sql stats compaction job is already running") diff --git a/pkg/sql/sqlstats/persistedsqlstats/sqlstatsutil/json_encoding_test.go b/pkg/sql/sqlstats/persistedsqlstats/sqlstatsutil/json_encoding_test.go index 57ed207bb6cc..d8bf2d1ae227 100644 --- a/pkg/sql/sqlstats/persistedsqlstats/sqlstatsutil/json_encoding_test.go +++ b/pkg/sql/sqlstats/persistedsqlstats/sqlstatsutil/json_encoding_test.go @@ -96,7 +96,8 @@ func TestSQLStatsJsonEncoding(t *testing.T) { "mean": {{.Float}}, "sqDiff": {{.Float}} }, - "nodes": [{{joinInts .IntArray}}] + "nodes": [{{joinInts .IntArray}}], + "planGists": [{{joinStrings .StringArray}}] }, "execution_statistics": { "cnt": {{.Int64}}, @@ -211,6 +212,7 @@ func TestSQLStatsJsonEncoding(t *testing.T) { "sqDiff": {{.Float}} }, "nodes": [{{joinInts .IntArray}}] + "planGists": [{{joinStrings .StringArray}}] }, "execution_statistics": { "cnt": {{.Int64}}, diff --git a/pkg/sql/sqlstats/persistedsqlstats/sqlstatsutil/json_impl.go b/pkg/sql/sqlstats/persistedsqlstats/sqlstatsutil/json_impl.go index bddd7a8ce97c..85f61c02e1f3 100644 --- a/pkg/sql/sqlstats/persistedsqlstats/sqlstatsutil/json_impl.go +++ b/pkg/sql/sqlstats/persistedsqlstats/sqlstatsutil/json_impl.go @@ -135,6 +135,39 @@ func (a *int64Array) encodeJSON() (json.JSON, error) { return builder.Build(), nil } +type stringArray []string + +func (a *stringArray) decodeJSON(js json.JSON) error { + arrLen := js.Len() + for i := 0; i < arrLen; i++ { + var value jsonString + valJSON, err := js.FetchValIdx(i) + if err != nil { + return err + } + if err := value.decodeJSON(valJSON); err != nil { + return err + } + *a = append(*a, string(value)) + } + + return nil +} + +func (a *stringArray) encodeJSON() (json.JSON, error) { + builder := json.NewArrayBuilder(len(*a)) + + for _, value := range *a { + jsVal, err := (*jsonString)(&value).encodeJSON() + if err != nil { + return nil, err + } + builder.Add(jsVal) + } + + return builder.Build(), nil +} + type stmtFingerprintIDArray []roachpb.StmtFingerprintID func (s *stmtFingerprintIDArray) decodeJSON(js json.JSON) error { @@ -237,6 +270,7 @@ func (s *innerStmtStats) jsonFields() jsonFields { {"rowsRead", (*numericStats)(&s.RowsRead)}, {"rowsWritten", (*numericStats)(&s.RowsWritten)}, {"nodes", (*int64Array)(&s.Nodes)}, + {"planGists", (*stringArray)(&s.PlanGists)}, } } diff --git a/pkg/sql/sqlstats/persistedsqlstats/sqlstatsutil/testutils.go b/pkg/sql/sqlstats/persistedsqlstats/sqlstatsutil/testutils.go index afbc5c3a515f..41e65352c013 100644 --- a/pkg/sql/sqlstats/persistedsqlstats/sqlstatsutil/testutils.go +++ b/pkg/sql/sqlstats/persistedsqlstats/sqlstatsutil/testutils.go @@ -11,12 +11,13 @@ package sqlstatsutil import ( - "html/template" + "fmt" "math/rand" "reflect" "strconv" "strings" "testing" + "text/template" "time" "github.com/cockroachdb/cockroach/pkg/roachpb" @@ -36,12 +37,13 @@ func GetRandomizedCollectedStatementStatisticsForTest( } type randomData struct { - Bool bool - String string - Int64 int64 - Float float64 - IntArray []int64 - Time time.Time + Bool bool + String string + Int64 int64 + Float float64 + IntArray []int64 + StringArray []string + Time time.Time } var alphabet = []rune("abcdefghijklmkopqrstuvwxyz") @@ -57,11 +59,22 @@ func genRandomData() randomData { } r.String = b.String() + // Generate a randomized array of length 5. + arrLen := 5 + r.StringArray = make([]string, arrLen) + for i := 0; i < arrLen; i++ { + // Randomly generating 10-character string. + b := strings.Builder{} + for j := 0; j < 10; j++ { + b.WriteRune(alphabet[rand.Intn(26)]) + } + r.StringArray[i] = b.String() + } + r.Int64 = rand.Int63() r.Float = rand.Float64() // Generate a randomized array of length 5. - arrLen := 5 r.IntArray = make([]int64, arrLen) for i := 0; i < arrLen; i++ { r.IntArray[i] = rand.Int63() @@ -79,6 +92,13 @@ func fillTemplate(t *testing.T, tmplStr string, data randomData) string { } return strings.Join(strArr, ",") } + joinStrings := func(arr []string) string { + strArr := make([]string, len(arr)) + for i, val := range arr { + strArr[i] = fmt.Sprintf("%q", val) + } + return strings.Join(strArr, ",") + } stringifyTime := func(tm time.Time) string { s, err := tm.MarshalText() require.NoError(t, err) @@ -88,6 +108,7 @@ func fillTemplate(t *testing.T, tmplStr string, data randomData) string { New(""). Funcs(template.FuncMap{ "joinInts": joinInts, + "joinStrings": joinStrings, "stringifyTime": stringifyTime, }). Parse(tmplStr) @@ -133,8 +154,15 @@ func fillObject(t *testing.T, val reflect.Value, data *randomData) { case reflect.Bool: val.SetBool(data.Bool) case reflect.Slice: - for _, randInt := range data.IntArray { - val.Set(reflect.Append(val, reflect.ValueOf(randInt))) + switch val.Type().String() { + case "[]string": + for _, randString := range data.StringArray { + val.Set(reflect.Append(val, reflect.ValueOf(randString))) + } + case "[]int64": + for _, randInt := range data.IntArray { + val.Set(reflect.Append(val, reflect.ValueOf(randInt))) + } } case reflect.Struct: switch val.Type().Name() { diff --git a/pkg/sql/sqlstats/persistedsqlstats/testdata/logical_plan_sampling_for_explicit_txn b/pkg/sql/sqlstats/persistedsqlstats/testdata/logical_plan_sampling_for_explicit_txn index d752d06d4860..2c1bbbc8d9fa 100644 --- a/pkg/sql/sqlstats/persistedsqlstats/testdata/logical_plan_sampling_for_explicit_txn +++ b/pkg/sql/sqlstats/persistedsqlstats/testdata/logical_plan_sampling_for_explicit_txn @@ -28,7 +28,7 @@ SELECT 1 # logical plan sampling. should-sample-logical-plan db=defaultdb implicitTxn=true fingerprint=SELECT%_ ---- -false +true # However, if we are to execute the same statement but under explicit # transaction, the plan will still need to be sampled. @@ -48,7 +48,7 @@ COMMIT # collection. should-sample-logical-plan db=defaultdb implicitTxn=false fingerprint=SELECT%_ ---- -false +true # Set the time to the future and ensure we will resample the logical plan. set-time time=2021-09-20T15:05:01Z @@ -67,7 +67,7 @@ SELECT 1 should-sample-logical-plan db=defaultdb implicitTxn=true fingerprint=SELECT%_ ---- -false +true should-sample-logical-plan db=defaultdb implicitTxn=false fingerprint=SELECT%_ ---- @@ -82,4 +82,4 @@ COMMIT should-sample-logical-plan db=defaultdb implicitTxn=false fingerprint=SELECT%_ ---- -false +true diff --git a/pkg/sql/sqlstats/ssmemstorage/ss_mem_iterator.go b/pkg/sql/sqlstats/ssmemstorage/ss_mem_iterator.go index ee1d9748b375..531e70cecf0c 100644 --- a/pkg/sql/sqlstats/ssmemstorage/ss_mem_iterator.go +++ b/pkg/sql/sqlstats/ssmemstorage/ss_mem_iterator.go @@ -94,6 +94,7 @@ func (s *StmtStatsIterator) Next() bool { Failed: stmtKey.failed, App: s.container.appName, Database: database, + PlanHash: stmtKey.planHash, TransactionFingerprintID: stmtKey.transactionFingerprintID, }, ID: stmtFingerprintID, diff --git a/pkg/sql/sqlstats/ssmemstorage/ss_mem_storage.go b/pkg/sql/sqlstats/ssmemstorage/ss_mem_storage.go index fbf4a0c2e24c..00ba707b8682 100644 --- a/pkg/sql/sqlstats/ssmemstorage/ss_mem_storage.go +++ b/pkg/sql/sqlstats/ssmemstorage/ss_mem_storage.go @@ -49,6 +49,7 @@ type planKey struct { failed bool implicitTxn bool database string + planHash uint64 } func (p planKey) size() int64 { @@ -255,6 +256,7 @@ func NewTempContainerFromExistingStmtStats( failed: statistics[i].Key.KeyData.Failed, implicitTxn: statistics[i].Key.KeyData.ImplicitTxn, database: statistics[i].Key.KeyData.Database, + planHash: statistics[i].Key.KeyData.PlanHash, }, transactionFingerprintID: statistics[i].Key.KeyData.TransactionFingerprintID, } @@ -472,6 +474,7 @@ func (s *Container) getStatsForStmt( implicitTxn bool, database string, failed bool, + planHash uint64, transactionFingerprintID roachpb.TransactionFingerprintID, createIfNonexistent bool, ) ( @@ -489,6 +492,7 @@ func (s *Container) getStatsForStmt( failed: failed, implicitTxn: implicitTxn, database: database, + planHash: planHash, }, transactionFingerprintID: transactionFingerprintID, } @@ -662,6 +666,7 @@ func (s *Container) MergeApplicationStatementStats( failed: statistics.Key.Failed, implicitTxn: statistics.Key.ImplicitTxn, database: statistics.Key.Database, + planHash: statistics.Key.PlanHash, }, transactionFingerprintID: statistics.Key.TransactionFingerprintID, } diff --git a/pkg/sql/sqlstats/ssmemstorage/ss_mem_writer.go b/pkg/sql/sqlstats/ssmemstorage/ss_mem_writer.go index ebcdb169c9c2..1e02cfad59e0 100644 --- a/pkg/sql/sqlstats/ssmemstorage/ss_mem_writer.go +++ b/pkg/sql/sqlstats/ssmemstorage/ss_mem_writer.go @@ -74,6 +74,7 @@ func (s *Container) RecordStatement( key.ImplicitTxn, key.Database, key.Failed, + key.PlanHash, key.TransactionFingerprintID, createIfNonExistent, ) @@ -123,6 +124,7 @@ func (s *Container) RecordStatement( stats.mu.data.RowsWritten.Record(stats.mu.data.Count, float64(value.RowsWritten)) stats.mu.data.LastExecTimestamp = s.getTimeNow() stats.mu.data.Nodes = util.CombineUniqueInt64(stats.mu.data.Nodes, value.Nodes) + stats.mu.data.PlanGists = util.CombineUniqueString(stats.mu.data.PlanGists, []string{value.PlanGist}) // Note that some fields derived from tracing statements (such as // BytesSentOverNetwork) are not updated here because they are collected // on-demand. @@ -170,6 +172,7 @@ func (s *Container) RecordStatementExecStats( key.ImplicitTxn, key.Database, key.Failed, + key.PlanHash, key.TransactionFingerprintID, false, /* createIfNotExists */ ) diff --git a/pkg/sql/sqlstats/ssprovider.go b/pkg/sql/sqlstats/ssprovider.go index 214d81c3117a..07503e441076 100644 --- a/pkg/sql/sqlstats/ssprovider.go +++ b/pkg/sql/sqlstats/ssprovider.go @@ -196,6 +196,7 @@ type RecordedStmtStats struct { Nodes []int64 StatementType tree.StatementType Plan *roachpb.ExplainTreePlanNode + PlanGist string StatementError error } diff --git a/pkg/ui/workspaces/cluster-ui/src/statementsPage/statementsPage.fixture.ts b/pkg/ui/workspaces/cluster-ui/src/statementsPage/statementsPage.fixture.ts index a11259319bcf..a1abf20fff9d 100644 --- a/pkg/ui/workspaces/cluster-ui/src/statementsPage/statementsPage.fixture.ts +++ b/pkg/ui/workspaces/cluster-ui/src/statementsPage/statementsPage.fixture.ts @@ -92,6 +92,7 @@ const statementStats: Required = { mean: 2, squared_diffs: 0.005, }, + plan_gists: ["Ais="], exec_stats: execStats, last_exec_timestamp: { seconds: Long.fromInt(1599670292), diff --git a/pkg/ui/workspaces/cluster-ui/src/util/appStats/appStats.spec.ts b/pkg/ui/workspaces/cluster-ui/src/util/appStats/appStats.spec.ts index cefda7871d94..fc1306dc57a7 100644 --- a/pkg/ui/workspaces/cluster-ui/src/util/appStats/appStats.spec.ts +++ b/pkg/ui/workspaces/cluster-ui/src/util/appStats/appStats.spec.ts @@ -274,6 +274,7 @@ function randomStats( nanos: 111613000, }, nodes: [Long.fromInt(1), Long.fromInt(3), Long.fromInt(4)], + plan_gists: ["Ais="], }; } diff --git a/pkg/ui/workspaces/cluster-ui/src/util/appStats/appStats.ts b/pkg/ui/workspaces/cluster-ui/src/util/appStats/appStats.ts index 250c057f72c0..b9ecbf766b36 100644 --- a/pkg/ui/workspaces/cluster-ui/src/util/appStats/appStats.ts +++ b/pkg/ui/workspaces/cluster-ui/src/util/appStats/appStats.ts @@ -15,6 +15,7 @@ import { TimestampToNumber, DurationToNumber, uniqueLong, + unique, } from "src/util"; export type StatementStatistics = protos.cockroach.sql.IStatementStatistics; @@ -174,6 +175,7 @@ export function addStatementStats( ? a.last_exec_timestamp : b.last_exec_timestamp, nodes: uniqueLong([...a.nodes, ...b.nodes]), + plan_gists: unique([...a.plan_gists, ...b.plan_gists]), }; } diff --git a/pkg/ui/workspaces/db-console/src/views/statements/statements.spec.tsx b/pkg/ui/workspaces/db-console/src/views/statements/statements.spec.tsx index 3ce9e1226020..0716f06245a1 100644 --- a/pkg/ui/workspaces/db-console/src/views/statements/statements.spec.tsx +++ b/pkg/ui/workspaces/db-console/src/views/statements/statements.spec.tsx @@ -497,6 +497,7 @@ function makeStats(): Required { nanos: 111613000, }, nodes: [Long.fromInt(1), Long.fromInt(2), Long.fromInt(3)], + plan_gists: ["Ais="], }; }