Skip to content

Commit

Permalink
comments
Browse files Browse the repository at this point in the history
  • Loading branch information
laurentsimon committed Mar 18, 2022
1 parent c581b35 commit 36f28fc
Show file tree
Hide file tree
Showing 8 changed files with 33 additions and 41 deletions.
24 changes: 12 additions & 12 deletions cron/config/config.go
Original file line number Diff line number Diff line change
Expand Up @@ -51,8 +51,8 @@ const (
bigqueryTableV2 string = "SCORECARD_BIGQUERY_TABLEV2"
resultDataBucketURLV2 string = "SCORECARD_DATA_BUCKET_URLV2"
// v0 raw results.
rawBigqueryTableV0 string = "RAW_SCORECARD_BIGQUERY_TABLEV0"
rawResultDataBucketURLV0 string = "RAW_SCORECARD_DATA_BUCKET_URLV0"
rawBigqueryTable string = "RAW_SCORECARD_BIGQUERY_TABLE"
rawResultDataBucketURL string = "RAW_SCORECARD_DATA_BUCKET_URL"
)

var (
Expand Down Expand Up @@ -82,8 +82,8 @@ type config struct {
ResultDataBucketURLV2 string `yaml:"result-data-bucket-url-v2"`
BigQueryTableV2 string `yaml:"bigquery-table-v2"`
// Raw results v0.
RawResultDataBucketURLV0 string `yaml:"raw-result-data-bucket-url-v0"`
RawBigQueryTableV0 string `yaml:"raw-bigquery-table-v0"`
RawResultDataBucketURL string `yaml:"raw-result-data-bucket-url"`
RawBigQueryTable string `yaml:"raw-bigquery-table"`
}

func getParsedConfigFromFile(byteValue []byte) (config, error) {
Expand Down Expand Up @@ -202,16 +202,16 @@ func GetResultDataBucketURLV2() (string, error) {
return getStringConfigValue(resultDataBucketURLV2, configYAML, "ResultDataBucketURLV2", "result-data-bucket-url-v2")
}

// GetRawBigQueryTableV0 returns the table name to transfer cron job results.
func GetRawBigQueryTableV0() (string, error) {
return getStringConfigValue(rawBigqueryTableV0, configYAML,
"RawBigQueryTableV0", "raw-bigquery-table-v0")
// GetRawBigQueryTable returns the table name to transfer cron job results.
func GetRawBigQueryTable() (string, error) {
return getStringConfigValue(rawBigqueryTable, configYAML,
"RawBigQueryTable", "raw-bigquery-table")
}

// GetRawResultDataBucketURLV0 returns the bucketURL for storing cron job's raw results.
func GetRawResultDataBucketURLV0() (string, error) {
return getStringConfigValue(rawResultDataBucketURLV0, configYAML,
"RawResultDataBucketURLV0", "raw-result-data-bucket-url-v0")
// GetRawResultDataBucketURL returns the bucketURL for storing cron job's raw results.
func GetRawResultDataBucketURL() (string, error) {
return getStringConfigValue(rawResultDataBucketURL, configYAML,
"RawResultDataBucketURL", "raw-result-data-bucket-url")
}

// GetShardSize returns the shard_size for the cron job.
Expand Down
8 changes: 4 additions & 4 deletions cron/config/config_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -40,8 +40,8 @@ const (
prodBucketV2 = "gs://ossf-scorecard-data2"
prodBigQueryTableV2 = "scorecard-v2"
// v0 raw results.
prodRawBucketV0 = "gs://ossf-scorecard-rawdata-v0"
prodRawBigQueryTableV0 = "scorecard-rawdata-v0"
prodRawBucket = "gs://ossf-scorecard-rawdata"
prodRawBigQueryTable = "scorecard-rawdata"
)

func getByteValueFromFile(filename string) ([]byte, error) {
Expand Down Expand Up @@ -78,8 +78,8 @@ func TestYAMLParsing(t *testing.T) {
// UPGRADEv2: to remove.
ResultDataBucketURLV2: prodBucketV2,
BigQueryTableV2: prodBigQueryTableV2,
RawResultDataBucketURLV0: prodRawBucketV0,
RawBigQueryTableV0: prodRawBigQueryTableV0,
RawResultDataBucketURL: prodRawBucket,
RawBigQueryTable: prodRawBigQueryTable,
},
},

Expand Down
8 changes: 4 additions & 4 deletions cron/controller/main.go
Original file line number Diff line number Diff line change
Expand Up @@ -123,7 +123,7 @@ func main() {
panic(err)
}

rawBucketV0, err := config.GetRawResultDataBucketURLV0()
rawBucket, err := config.GetRawResultDataBucketURL()
if err != nil {
panic(err)
}
Expand Down Expand Up @@ -162,12 +162,12 @@ func main() {
}

// Raw data v0.
*metadata.ShardLoc = rawBucketV0 + "/" + data.GetBlobFilename("", t)
*metadata.ShardLoc = rawBucket + "/" + data.GetBlobFilename("", t)
metadataJSON, err = protojson.Marshal(&metadata)
if err != nil {
panic(fmt.Errorf("error during protojson.Marshal raw-v0: %w", err))
panic(fmt.Errorf("error during protojson.Marshal raw: %w", err))
}
err = data.WriteToBlobStore(ctx, rawBucketV0, data.GetShardMetadataFilename(t), metadataJSON)
err = data.WriteToBlobStore(ctx, rawBucket, data.GetShardMetadataFilename(t), metadataJSON)
if err != nil {
panic(fmt.Errorf("error writing to BlobStore raw v0: %w", err))
}
Expand Down
9 changes: 1 addition & 8 deletions cron/k8s/controller.release.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -62,15 +62,8 @@ spec:
# UPGRADEv2: to remove.
- name: SCORECARD_DATA_BUCKET_URLV2
value: "gs://ossf-scorecard-data-releasetest2"
- name: SCORECARD_SHARD_SIZE
value: "5"
resources:
limits:
memory: 1Gi
requests:
memory: 1Gi
# Raw results v0.
- name: RAW_SCORECARD_DATA_BUCKET_URLV0
- name: RAW_SCORECARD_DATA_BUCKET_URL
value: "gs://ossf-scorecard-rawdata-releasetest-v0"
- name: SCORECARD_SHARD_SIZE
value: "5"
Expand Down
4 changes: 2 additions & 2 deletions cron/k8s/raw-transfer-v0.yaml → cron/k8s/transfer-raw.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -34,8 +34,8 @@ spec:
requests:
memory: 1Gi
env:
- name: RAW_SCORECARD_BIGQUERY_TABLEV0
- name: RAW_SCORECARD_BIGQUERY_TABLE
value: "scorecard-rawdata-v0"
- name: RAW_SCORECARD_DATA_BUCKET_URLV0
- name: RAW_SCORECARD_DATA_BUCKET_URL
value: "gs://ossf-scorecard-rawdata-v0"
restartPolicy: OnFailure
Original file line number Diff line number Diff line change
Expand Up @@ -30,9 +30,9 @@ spec:
image: gcr.io/openssf/scorecard-bq-transfer:latest
imagePullPolicy: Always
env:
- name: RAW_SCORECARD_DATA_BUCKET_URLV0
- name: RAW_SCORECARD_DATA_BUCKET_URL
value: "gs://ossf-scorecard-rawdata-releasetest-v0"
- name: RAW_SCORECARD_BIGQUERY_TABLEV0
- name: RAW_SCORECARD_BIGQUERY_TABLE
value: "scorecard_raw-releasetest-v0"
- name: SCORECARD_COMPLETION_THRESHOLD
value: "0.9"
Expand Down
2 changes: 1 addition & 1 deletion cron/k8s/worker.release.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,7 @@ spec:
# UPGRADEv2: to remove.
- name: SCORECARD_DATA_BUCKET_URLV2
value: "gs://ossf-scorecard-data-releasetest2"
- name: RAW_SCORECARD_DATA_BUCKET_URLV0
- name: RAW_SCORECARD_DATA_BUCKET_URL
value: "gs://ossf-scorecard-rawdata-releasetest-v0"
- name: SCORECARD_REQUEST_SUBSCRIPTION_URL
value: "gcppubsub://projects/openssf/subscriptions/scorecard-batch-worker-releasetest"
Expand Down
15 changes: 7 additions & 8 deletions cron/worker/main.go
Original file line number Diff line number Diff line change
Expand Up @@ -48,7 +48,7 @@ var ignoreRuntimeErrors = flag.Bool("ignoreRuntimeErrors", false, "if set to tru
// nolint: gocognit
func processRequest(ctx context.Context,
batchRequest *data.ScorecardBatchRequest,
blacklistedChecks []string, bucketURL, bucketURL2, rawBucketURLV0 string,
blacklistedChecks []string, bucketURL, bucketURL2, rawBucketURL string,
checkDocs docs.Doc,
repoClient clients.RepoClient, ossFuzzRepoClient clients.RepoClient,
ciiClient clients.CIIBestPracticesClient,
Expand All @@ -69,7 +69,7 @@ func processRequest(ctx context.Context,
return fmt.Errorf("error during BlobExists: %w", err)
}

exists3, err := data.BlobExists(ctx, rawBucketURLV0, filename)
exists3, err := data.BlobExists(ctx, rawBucketURL, filename)
if err != nil {
return fmt.Errorf("error during BlobExists: %w", err)
}
Expand All @@ -82,7 +82,7 @@ func processRequest(ctx context.Context,

var buffer bytes.Buffer
var buffer2 bytes.Buffer
var rawBufferV0 bytes.Buffer
var rawBuffer bytes.Buffer
// TODO: run Scorecard for each repo in a separate thread.
for _, repoReq := range batchRequest.GetRepos() {
logger.Info(fmt.Sprintf("Running Scorecard for repo: %s", *repoReq.Url))
Expand Down Expand Up @@ -139,10 +139,9 @@ func processRequest(ctx context.Context,
}

// Raw result v0.
if err := format.AsRawJSON(&result, &rawBufferV0); err != nil {
if err := format.AsRawJSON(&result, &rawBuffer); err != nil {
return fmt.Errorf("error during result.AsRawJSON: %w", err)
}

}
if err := data.WriteToBlobStore(ctx, bucketURL, filename, buffer.Bytes()); err != nil {
return fmt.Errorf("error during WriteToBlobStore: %w", err)
Expand All @@ -153,7 +152,7 @@ func processRequest(ctx context.Context,
}

// Raw result v0.
if err := data.WriteToBlobStore(ctx, rawBucketURLV0, filename, rawBufferV0.Bytes()); err != nil {
if err := data.WriteToBlobStore(ctx, rawBucketURL, filename, rawBuffer.Bytes()); err != nil {
return fmt.Errorf("error during WriteToBlobStore2: %w", err)
}

Expand Down Expand Up @@ -210,7 +209,7 @@ func main() {
panic(err)
}

rawBucketURLV0, err := config.GetRawResultDataBucketURLV0()
rawBucketURL, err := config.GetRawResultDataBucketURL()
if err != nil {
panic(err)
}
Expand Down Expand Up @@ -260,7 +259,7 @@ func main() {
break
}
if err := processRequest(ctx, req, blacklistedChecks,
bucketURL, bucketURL2, rawBucketURLV0, checkDocs,
bucketURL, bucketURL2, rawBucketURL, checkDocs,
repoClient, ossFuzzRepoClient, ciiClient, vulnsClient, logger); err != nil {
// TODO(log): Previously Warn. Consider logging an error here.
logger.Info(fmt.Sprintf("error processing request: %v", err))
Expand Down

0 comments on commit 36f28fc

Please sign in to comment.