From 223bec0e915cd76c1dad705355bdaf7b6f2ece87 Mon Sep 17 00:00:00 2001 From: Akshay Joshi Date: Thu, 14 Nov 2024 15:37:46 +0530 Subject: [PATCH] cli: add timestamp as tag in tsdump datadog upload Previously, we updated `upload_id` identifier to include timestamp as part of based on uploadID. This change adds upload year, month & day tags as part of tsdump upload. Epic: None Part of: CRDB-44379 Release note: None --- pkg/cli/testdata/tsdump/json | 4 ++-- pkg/cli/tsdump_test.go | 6 +++++- pkg/cli/tsdump_upload.go | 29 ++++++++++++++++++++++------- pkg/cli/zip_upload.go | 7 +++---- pkg/cli/zip_upload_test.go | 2 +- 5 files changed, 33 insertions(+), 15 deletions(-) diff --git a/pkg/cli/testdata/tsdump/json b/pkg/cli/testdata/tsdump/json index 0b387b2ffd18..41d362af7c8f 100644 --- a/pkg/cli/testdata/tsdump/json +++ b/pkg/cli/testdata/tsdump/json @@ -31,7 +31,7 @@ cr.node.admission.admitted.elastic.cpu 2 1.000000 1711130560 ---- POST: https://example.com/data DD-API-KEY: api-key -Body: {"series":[{"metric":"crdb.tsdump.admission.admitted.elastic.cpu","type":0,"points":[{"timestamp":1711130470,"value":0},{"timestamp":1711130480,"value":1},{"timestamp":1711130490,"value":1},{"timestamp":1711130500,"value":1}],"resources":null,"tags":["node_id:1","cluster_type:SELF_HOSTED","job:cockroachdb","region:local","cluster_label:test-cluster","upload_id:test-cluster-1234"]}]} +Body: {"series":[{"metric":"crdb.tsdump.admission.admitted.elastic.cpu","type":0,"points":[{"timestamp":1711130470,"value":0},{"timestamp":1711130480,"value":1},{"timestamp":1711130490,"value":1},{"timestamp":1711130500,"value":1}],"resources":null,"tags":["node_id:1","cluster_type:SELF_HOSTED","cluster_label:test-cluster","upload_id:test-cluster-1234","upload_year:2024","upload_month:11","upload_day:14"]}]} POST: https://example.com/data DD-API-KEY: api-key -Body: {"series":[{"metric":"crdb.tsdump.admission.admitted.elastic.cpu","type":0,"points":[{"timestamp":1711130510,"value":1},{"timestamp":1711130520,"value":1},{"timestamp":1711130530,"value":1},{"timestamp":1711130540,"value":1},{"timestamp":1711130550,"value":1},{"timestamp":1711130560,"value":1}],"resources":null,"tags":["node_id:2","cluster_type:SELF_HOSTED","job:cockroachdb","region:local","cluster_label:test-cluster","upload_id:test-cluster-1234"]}]} +Body: {"series":[{"metric":"crdb.tsdump.admission.admitted.elastic.cpu","type":0,"points":[{"timestamp":1711130510,"value":1},{"timestamp":1711130520,"value":1},{"timestamp":1711130530,"value":1},{"timestamp":1711130540,"value":1},{"timestamp":1711130550,"value":1},{"timestamp":1711130560,"value":1}],"resources":null,"tags":["node_id:2","cluster_type:SELF_HOSTED","cluster_label:test-cluster","upload_id:test-cluster-1234","upload_year:2024","upload_month:11","upload_day:14"]}]} diff --git a/pkg/cli/tsdump_test.go b/pkg/cli/tsdump_test.go index b3daf87a85ee..a026e69afda5 100644 --- a/pkg/cli/tsdump_test.go +++ b/pkg/cli/tsdump_test.go @@ -17,6 +17,7 @@ import ( "strconv" "strings" "testing" + "time" "github.com/cockroachdb/cockroach/pkg/testutils" "github.com/cockroachdb/cockroach/pkg/ts/tspb" @@ -188,9 +189,12 @@ func parseDDInput(t *testing.T, input string, w *datadogWriter) { func TestTsDumpFormatsDataDriven(t *testing.T) { defer leaktest.AfterTest(t)() defer log.Scope(t).Close(t) - defer testutils.TestingHook(&newUploadID, func(cluster string) string { + defer testutils.TestingHook(&newUploadID, func(cluster string, uploadTime time.Time) string { return fmt.Sprintf("%s-1234", cluster) })() + defer testutils.TestingHook(&getCurrentTime, func() time.Time { + return time.Date(2024, 11, 14, 0, 0, 0, 0, time.UTC) + })() datadriven.Walk(t, "testdata/tsdump", func(t *testing.T, path string) { datadriven.RunTest(t, path, func(t *testing.T, d *datadriven.TestData) string { diff --git a/pkg/cli/tsdump_upload.go b/pkg/cli/tsdump_upload.go index d079edde9ef3..a7b78d2423ec 100644 --- a/pkg/cli/tsdump_upload.go +++ b/pkg/cli/tsdump_upload.go @@ -15,8 +15,10 @@ import ( "io" "net/http" "os" + "strconv" "strings" "sync" + "time" "github.com/cockroachdb/cockroach/pkg/base" "github.com/cockroachdb/cockroach/pkg/roachpb" @@ -50,7 +52,7 @@ var ( targetURLFormat = "https://%s/api/v2/series" datadogDashboardURLFormat = "https://us5.datadoghq.com/dashboard/bif-kwe-gx2/self-hosted-db-console-tsdump?" + - "tpl_var_cluster=%s&tpl_var_upload_id=%s&from_ts=%d&to_ts=%d" + "tpl_var_cluster=%s&tpl_var_upload_id=%s&tpl_var_upload_day=%d&tpl_var_upload_month=%d&tpl_var_upload_year=%d&from_ts=%d&to_ts=%d" zipFileSignature = []byte{0x50, 0x4B, 0x03, 0x04} ) @@ -86,7 +88,7 @@ type DatadogResp struct { Errors []string `json:"errors"` } -var newTsdumpUploadID = func() string { +var newTsdumpUploadID = func(uploadTime time.Time) string { clusterTagValue := "" if debugTimeSeriesDumpOpts.clusterLabel != "" { clusterTagValue = debugTimeSeriesDumpOpts.clusterLabel @@ -95,7 +97,7 @@ var newTsdumpUploadID = func() string { } else { clusterTagValue = fmt.Sprintf("cluster-debug-%d", timeutil.Now().Unix()) } - return newUploadID(clusterTagValue) + return newUploadID(clusterTagValue, uploadTime) } // datadogWriter can convert our metrics to Datadog format and send @@ -112,6 +114,7 @@ type datadogWriter struct { namePrefix string doRequest func(req *http.Request) error threshold int + uploadTime time.Time } func makeDatadogWriter( @@ -121,17 +124,25 @@ func makeDatadogWriter( threshold int, doRequest func(req *http.Request) error, ) *datadogWriter { + + currentTime := getCurrentTime() + return &datadogWriter{ targetURL: targetURL, - uploadID: newTsdumpUploadID(), + uploadID: newTsdumpUploadID(currentTime), init: init, apiKey: apiKey, namePrefix: "crdb.tsdump.", // Default pre-set prefix to distinguish these uploads. doRequest: doRequest, threshold: threshold, + uploadTime: currentTime, } } +var getCurrentTime = func() time.Time { + return timeutil.Now() +} + func doDDRequest(req *http.Request) error { resp, err := http.DefaultClient.Do(req) if err != nil { @@ -204,8 +215,6 @@ func (d *datadogWriter) emitDataDogMetrics(data []DatadogSeries) error { var tags []string // Hardcoded values tags = append(tags, "cluster_type:SELF_HOSTED") - tags = append(tags, "job:cockroachdb") - tags = append(tags, "region:local") if debugTimeSeriesDumpOpts.clusterLabel != "" { tags = append(tags, makeDDTag("cluster_label", debugTimeSeriesDumpOpts.clusterLabel)) @@ -213,6 +222,11 @@ func (d *datadogWriter) emitDataDogMetrics(data []DatadogSeries) error { tags = append(tags, makeDDTag(uploadIDTag, d.uploadID)) + year, month, day := timeutil.Now().Date() + tags = append(tags, makeDDTag("upload_year", strconv.Itoa(year))) + tags = append(tags, makeDDTag("upload_month", strconv.Itoa(int(month)))) + tags = append(tags, makeDDTag("upload_day", strconv.Itoa(day))) + for i := 0; i < len(data); i++ { data[i].Tags = append(data[i].Tags, tags...) data[i].Metric = d.namePrefix + data[i].Metric @@ -363,7 +377,8 @@ func (d *datadogWriter) upload(fileName string) error { toUnixTimestamp := timeutil.Now().UnixMilli() //create timestamp for T-30 days. fromUnixTimestamp := toUnixTimestamp - (30 * 24 * 60 * 60 * 1000) - dashboardLink := fmt.Sprintf(datadogDashboardURLFormat, debugTimeSeriesDumpOpts.clusterLabel, d.uploadID, fromUnixTimestamp, toUnixTimestamp) + year, month, day := d.uploadTime.Date() + dashboardLink := fmt.Sprintf(datadogDashboardURLFormat, debugTimeSeriesDumpOpts.clusterLabel, d.uploadID, day, int(month), year, fromUnixTimestamp, toUnixTimestamp) if len(errorsInDDUpload.errors) != 0 { fmt.Printf("\n%d upload errors occurred:\n%s\n", len(errorsInDDUpload.errors), strings.Join(errorsInDDUpload.errors, "\n")) diff --git a/pkg/cli/zip_upload.go b/pkg/cli/zip_upload.go index a2cb8c091118..ff2cb3d5f4cd 100644 --- a/pkg/cli/zip_upload.go +++ b/pkg/cli/zip_upload.go @@ -135,7 +135,7 @@ func runDebugZipUpload(cmd *cobra.Command, args []string) error { } // a unique ID for this upload session. This should be used to tag all the artifacts uploaded in this session - uploadID := newUploadID(debugZipUploadOpts.clusterName) + uploadID := newUploadID(debugZipUploadOpts.clusterName, timeutil.Now()) // override the list of artifacts to upload if the user has provided any artifactsToUpload := zipArtifactTypes @@ -848,9 +848,8 @@ var doUploadReq = func(req *http.Request) ([]byte, error) { // Everything is converted to lowercase and spaces are replaced with hyphens. Because, // datadog will do this anyway and we want to make sure the UUIDs match when we generate the // explore/dashboard links. -var newUploadID = func(cluster string) string { - currentTime := timeutil.Now() - formattedTime := currentTime.Format("20060102150405") +var newUploadID = func(cluster string, uploadTime time.Time) string { + formattedTime := uploadTime.Format("20060102150405") return strings.ToLower( strings.ReplaceAll( fmt.Sprintf("%s-%s", cluster, formattedTime), " ", "-", diff --git a/pkg/cli/zip_upload_test.go b/pkg/cli/zip_upload_test.go index e098d1ebd950..0cd50c7fadce 100644 --- a/pkg/cli/zip_upload_test.go +++ b/pkg/cli/zip_upload_test.go @@ -108,7 +108,7 @@ func setupZipDir(t *testing.T, inputs zipUploadTestContents) (string, func()) { func TestUploadZipEndToEnd(t *testing.T) { defer leaktest.AfterTest(t)() defer log.Scope(t).Close(t) - defer testutils.TestingHook(&newUploadID, func(string) string { + defer testutils.TestingHook(&newUploadID, func(string, time.Time) string { return "123" })() defer testutils.TestingHook(&newRandStr, func(l int, n bool) string {