Skip to content

Commit

Permalink
cli: add timestamp as tag in tsdump datadog upload
Browse files Browse the repository at this point in the history
Previously, we updated `upload_id` identifier to include timestamp as part of
based on uploadID. This change adds upload year, month & day tags as part of
tsdump upload.

Epic: None
Part of: CRDB-44379
Release note: None
  • Loading branch information
aa-joshi committed Nov 14, 2024
1 parent 39e43b8 commit 223bec0
Show file tree
Hide file tree
Showing 5 changed files with 33 additions and 15 deletions.
4 changes: 2 additions & 2 deletions pkg/cli/testdata/tsdump/json
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@ cr.node.admission.admitted.elastic.cpu 2 1.000000 1711130560
----
POST: https://example.com/data
DD-API-KEY: api-key
Body: {"series":[{"metric":"crdb.tsdump.admission.admitted.elastic.cpu","type":0,"points":[{"timestamp":1711130470,"value":0},{"timestamp":1711130480,"value":1},{"timestamp":1711130490,"value":1},{"timestamp":1711130500,"value":1}],"resources":null,"tags":["node_id:1","cluster_type:SELF_HOSTED","job:cockroachdb","region:local","cluster_label:test-cluster","upload_id:test-cluster-1234"]}]}
Body: {"series":[{"metric":"crdb.tsdump.admission.admitted.elastic.cpu","type":0,"points":[{"timestamp":1711130470,"value":0},{"timestamp":1711130480,"value":1},{"timestamp":1711130490,"value":1},{"timestamp":1711130500,"value":1}],"resources":null,"tags":["node_id:1","cluster_type:SELF_HOSTED","cluster_label:test-cluster","upload_id:test-cluster-1234","upload_year:2024","upload_month:11","upload_day:14"]}]}
POST: https://example.com/data
DD-API-KEY: api-key
Body: {"series":[{"metric":"crdb.tsdump.admission.admitted.elastic.cpu","type":0,"points":[{"timestamp":1711130510,"value":1},{"timestamp":1711130520,"value":1},{"timestamp":1711130530,"value":1},{"timestamp":1711130540,"value":1},{"timestamp":1711130550,"value":1},{"timestamp":1711130560,"value":1}],"resources":null,"tags":["node_id:2","cluster_type:SELF_HOSTED","job:cockroachdb","region:local","cluster_label:test-cluster","upload_id:test-cluster-1234"]}]}
Body: {"series":[{"metric":"crdb.tsdump.admission.admitted.elastic.cpu","type":0,"points":[{"timestamp":1711130510,"value":1},{"timestamp":1711130520,"value":1},{"timestamp":1711130530,"value":1},{"timestamp":1711130540,"value":1},{"timestamp":1711130550,"value":1},{"timestamp":1711130560,"value":1}],"resources":null,"tags":["node_id:2","cluster_type:SELF_HOSTED","cluster_label:test-cluster","upload_id:test-cluster-1234","upload_year:2024","upload_month:11","upload_day:14"]}]}
6 changes: 5 additions & 1 deletion pkg/cli/tsdump_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@ import (
"strconv"
"strings"
"testing"
"time"

"github.com/cockroachdb/cockroach/pkg/testutils"
"github.com/cockroachdb/cockroach/pkg/ts/tspb"
Expand Down Expand Up @@ -188,9 +189,12 @@ func parseDDInput(t *testing.T, input string, w *datadogWriter) {
func TestTsDumpFormatsDataDriven(t *testing.T) {
defer leaktest.AfterTest(t)()
defer log.Scope(t).Close(t)
defer testutils.TestingHook(&newUploadID, func(cluster string) string {
defer testutils.TestingHook(&newUploadID, func(cluster string, uploadTime time.Time) string {
return fmt.Sprintf("%s-1234", cluster)
})()
defer testutils.TestingHook(&getCurrentTime, func() time.Time {
return time.Date(2024, 11, 14, 0, 0, 0, 0, time.UTC)
})()

datadriven.Walk(t, "testdata/tsdump", func(t *testing.T, path string) {
datadriven.RunTest(t, path, func(t *testing.T, d *datadriven.TestData) string {
Expand Down
29 changes: 22 additions & 7 deletions pkg/cli/tsdump_upload.go
Original file line number Diff line number Diff line change
Expand Up @@ -15,8 +15,10 @@ import (
"io"
"net/http"
"os"
"strconv"
"strings"
"sync"
"time"

"github.com/cockroachdb/cockroach/pkg/base"
"github.com/cockroachdb/cockroach/pkg/roachpb"
Expand Down Expand Up @@ -50,7 +52,7 @@ var (

targetURLFormat = "https://%s/api/v2/series"
datadogDashboardURLFormat = "https://us5.datadoghq.com/dashboard/bif-kwe-gx2/self-hosted-db-console-tsdump?" +
"tpl_var_cluster=%s&tpl_var_upload_id=%s&from_ts=%d&to_ts=%d"
"tpl_var_cluster=%s&tpl_var_upload_id=%s&tpl_var_upload_day=%d&tpl_var_upload_month=%d&tpl_var_upload_year=%d&from_ts=%d&to_ts=%d"
zipFileSignature = []byte{0x50, 0x4B, 0x03, 0x04}
)

Expand Down Expand Up @@ -86,7 +88,7 @@ type DatadogResp struct {
Errors []string `json:"errors"`
}

var newTsdumpUploadID = func() string {
var newTsdumpUploadID = func(uploadTime time.Time) string {
clusterTagValue := ""
if debugTimeSeriesDumpOpts.clusterLabel != "" {
clusterTagValue = debugTimeSeriesDumpOpts.clusterLabel
Expand All @@ -95,7 +97,7 @@ var newTsdumpUploadID = func() string {
} else {
clusterTagValue = fmt.Sprintf("cluster-debug-%d", timeutil.Now().Unix())
}
return newUploadID(clusterTagValue)
return newUploadID(clusterTagValue, uploadTime)
}

// datadogWriter can convert our metrics to Datadog format and send
Expand All @@ -112,6 +114,7 @@ type datadogWriter struct {
namePrefix string
doRequest func(req *http.Request) error
threshold int
uploadTime time.Time
}

func makeDatadogWriter(
Expand All @@ -121,17 +124,25 @@ func makeDatadogWriter(
threshold int,
doRequest func(req *http.Request) error,
) *datadogWriter {

currentTime := getCurrentTime()

return &datadogWriter{
targetURL: targetURL,
uploadID: newTsdumpUploadID(),
uploadID: newTsdumpUploadID(currentTime),
init: init,
apiKey: apiKey,
namePrefix: "crdb.tsdump.", // Default pre-set prefix to distinguish these uploads.
doRequest: doRequest,
threshold: threshold,
uploadTime: currentTime,
}
}

var getCurrentTime = func() time.Time {
return timeutil.Now()
}

func doDDRequest(req *http.Request) error {
resp, err := http.DefaultClient.Do(req)
if err != nil {
Expand Down Expand Up @@ -204,15 +215,18 @@ func (d *datadogWriter) emitDataDogMetrics(data []DatadogSeries) error {
var tags []string
// Hardcoded values
tags = append(tags, "cluster_type:SELF_HOSTED")
tags = append(tags, "job:cockroachdb")
tags = append(tags, "region:local")

if debugTimeSeriesDumpOpts.clusterLabel != "" {
tags = append(tags, makeDDTag("cluster_label", debugTimeSeriesDumpOpts.clusterLabel))
}

tags = append(tags, makeDDTag(uploadIDTag, d.uploadID))

year, month, day := timeutil.Now().Date()
tags = append(tags, makeDDTag("upload_year", strconv.Itoa(year)))
tags = append(tags, makeDDTag("upload_month", strconv.Itoa(int(month))))
tags = append(tags, makeDDTag("upload_day", strconv.Itoa(day)))

for i := 0; i < len(data); i++ {
data[i].Tags = append(data[i].Tags, tags...)
data[i].Metric = d.namePrefix + data[i].Metric
Expand Down Expand Up @@ -363,7 +377,8 @@ func (d *datadogWriter) upload(fileName string) error {
toUnixTimestamp := timeutil.Now().UnixMilli()
//create timestamp for T-30 days.
fromUnixTimestamp := toUnixTimestamp - (30 * 24 * 60 * 60 * 1000)
dashboardLink := fmt.Sprintf(datadogDashboardURLFormat, debugTimeSeriesDumpOpts.clusterLabel, d.uploadID, fromUnixTimestamp, toUnixTimestamp)
year, month, day := d.uploadTime.Date()
dashboardLink := fmt.Sprintf(datadogDashboardURLFormat, debugTimeSeriesDumpOpts.clusterLabel, d.uploadID, day, int(month), year, fromUnixTimestamp, toUnixTimestamp)

if len(errorsInDDUpload.errors) != 0 {
fmt.Printf("\n%d upload errors occurred:\n%s\n", len(errorsInDDUpload.errors), strings.Join(errorsInDDUpload.errors, "\n"))
Expand Down
7 changes: 3 additions & 4 deletions pkg/cli/zip_upload.go
Original file line number Diff line number Diff line change
Expand Up @@ -135,7 +135,7 @@ func runDebugZipUpload(cmd *cobra.Command, args []string) error {
}

// a unique ID for this upload session. This should be used to tag all the artifacts uploaded in this session
uploadID := newUploadID(debugZipUploadOpts.clusterName)
uploadID := newUploadID(debugZipUploadOpts.clusterName, timeutil.Now())

// override the list of artifacts to upload if the user has provided any
artifactsToUpload := zipArtifactTypes
Expand Down Expand Up @@ -848,9 +848,8 @@ var doUploadReq = func(req *http.Request) ([]byte, error) {
// Everything is converted to lowercase and spaces are replaced with hyphens. Because,
// datadog will do this anyway and we want to make sure the UUIDs match when we generate the
// explore/dashboard links.
var newUploadID = func(cluster string) string {
currentTime := timeutil.Now()
formattedTime := currentTime.Format("20060102150405")
var newUploadID = func(cluster string, uploadTime time.Time) string {
formattedTime := uploadTime.Format("20060102150405")
return strings.ToLower(
strings.ReplaceAll(
fmt.Sprintf("%s-%s", cluster, formattedTime), " ", "-",
Expand Down
2 changes: 1 addition & 1 deletion pkg/cli/zip_upload_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -108,7 +108,7 @@ func setupZipDir(t *testing.T, inputs zipUploadTestContents) (string, func()) {
func TestUploadZipEndToEnd(t *testing.T) {
defer leaktest.AfterTest(t)()
defer log.Scope(t).Close(t)
defer testutils.TestingHook(&newUploadID, func(string) string {
defer testutils.TestingHook(&newUploadID, func(string, time.Time) string {
return "123"
})()
defer testutils.TestingHook(&newRandStr, func(l int, n bool) string {
Expand Down

0 comments on commit 223bec0

Please sign in to comment.