Skip to content

Commit

Permalink
add linter and fix faults
Browse files Browse the repository at this point in the history
  • Loading branch information
psycofdj committed Apr 25, 2023
1 parent 4acd226 commit 80a716b
Show file tree
Hide file tree
Showing 30 changed files with 445 additions and 379 deletions.
File renamed without changes.
22 changes: 22 additions & 0 deletions .github/workflows/linter.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,22 @@
name: linter

on: push

jobs:
linter:
runs-on: ubuntu-latest
steps:
- name: checkout
uses: actions/checkout@v3
with:
fetch-depth: 0

- name: set up go
uses: actions/setup-go@v4
with:
go-version: ">=1.20"

- name: golangci-lint
uses: golangci/golangci-lint-action@v3
with:
args: --config .golangci.yml
35 changes: 35 additions & 0 deletions .golangci.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,35 @@
run:
timeout: 30m
output:
format: line-number
linters:
disable-all: false
enable:
- bodyclose
- depguard
- dogsled
#- dupl
- errcheck
- exportloopref
#- funlen
- gocognit
- goconst
- gocritic
- godox
- gofmt
- goimports
- gosec
- gosimple
- govet
- ineffassign
- misspell
- nakedret
- revive
- staticcheck
- stylecheck
- typecheck
- unconvert
- unparam
- unused
- whitespace
- reassign
12 changes: 12 additions & 0 deletions Makefile
Original file line number Diff line number Diff line change
@@ -0,0 +1,12 @@
all: test

test:
@go test -v ./...

check:
@golangci-lint run --config .golangci.yml

coverage:
@go test -cover -coverprofile cover.out -v ./...
@go tool cover -func=cover.out
@rm -f cover.out
14 changes: 7 additions & 7 deletions collectors/raw_metrics_collector.go
Original file line number Diff line number Diff line change
Expand Up @@ -2,17 +2,18 @@ package collectors

import (
"compress/gzip"
"io"
"net/http"
"strings"
"sync"
"time"

"github.com/bosh-prometheus/firehose_exporter/metrics"
"github.com/gogo/protobuf/proto"
"github.com/prometheus/client_golang/prometheus"
dto "github.com/prometheus/client_model/go"
"github.com/prometheus/common/expfmt"
log "github.com/sirupsen/logrus"
"io"
"net/http"
"strings"
"sync"
"time"
)

var gzipPool = sync.Pool{
Expand All @@ -32,7 +33,6 @@ func NewRawMetricsCollector(
pointBuffer chan []*metrics.RawMetric,
metricExpireIn time.Duration,
) *RawMetricsCollector {

return &RawMetricsCollector{
pointBuffer: pointBuffer,
metricStore: &sync.Map{},
Expand All @@ -46,7 +46,7 @@ func (c *RawMetricsCollector) Collect() {
for _, point := range points {
smapMetric, _ := c.metricStore.LoadOrStore(point.MetricName(), &sync.Map{})
point.ExpireIn(c.metricExpireIn)
smapMetric.(*sync.Map).Store(point.Id(), point)
smapMetric.(*sync.Map).Store(point.ID(), point)
}
}
}
Expand Down
39 changes: 20 additions & 19 deletions firehose_exporter.go
Original file line number Diff line number Diff line change
Expand Up @@ -8,21 +8,19 @@ import (
"time"

"code.cloudfoundry.org/go-loggregator/v8"
"github.com/alecthomas/kingpin/v2"
"github.com/bosh-prometheus/firehose_exporter/collectors"
"github.com/bosh-prometheus/firehose_exporter/metricmaker"
"github.com/bosh-prometheus/firehose_exporter/metrics"
"github.com/bosh-prometheus/firehose_exporter/nozzle"
"github.com/prometheus/common/version"
log "github.com/sirupsen/logrus"
"github.com/alecthomas/kingpin/v2"
)

var (
retroCompatDisable = kingpin.Flag("retro_compat.disable", "Disable retro compatibility",
).Envar("FIREHOSE_EXPORTER_RETRO_COMPAT_DISABLE").Default("false").Bool()
retroCompatDisable = kingpin.Flag("retro_compat.disable", "Disable retro compatibility").Envar("FIREHOSE_EXPORTER_RETRO_COMPAT_DISABLE").Default("false").Bool()

enableRetroCompatDelta = kingpin.Flag("retro_compat.enable_delta", "Enable retro compatibility delta in counter",
).Envar("FIREHOSE_EXPORTER_RETRO_COMPAT_ENABLE_DELTA").Default("false").Bool()
enableRetroCompatDelta = kingpin.Flag("retro_compat.enable_delta", "Enable retro compatibility delta in counter").Envar("FIREHOSE_EXPORTER_RETRO_COMPAT_ENABLE_DELTA").Default("false").Bool()

loggingURL = kingpin.Flag(
"logging.url", "Cloud Foundry Logging endpoint ($FIREHOSE_EXPORTER_LOGGING_URL)",
Expand All @@ -48,7 +46,7 @@ var (
"metrics.batch_size", "Batch size for nozzle envelop buffer ($FIREHOSE_EXPORTER_METRICS_NAMESPACE)",
).Envar("FIREHOSE_EXPORTER_METRICS_BATCH_SIZE").Default("-1").Int()

metricsShardId = kingpin.Flag(
metricsShardID = kingpin.Flag(
"metrics.shard_id", "The sharding group name to use for egress from RLP ($FIREHOSE_EXPORTER_SHARD_ID)",
).Envar("FIREHOSE_EXPORTER_SHARD_ID").Default("firehose_exporter").String()

Expand Down Expand Up @@ -104,14 +102,11 @@ var (
"web.tls.key_file", "Path to a file that contains the TLS private key (PEM format) ($FIREHOSE_EXPORTER_WEB_TLS_KEYFILE)",
).Envar("FIREHOSE_EXPORTER_WEB_TLS_KEYFILE").ExistingFile()

enableProfiler = kingpin.Flag("profiler.enable", "Enable pprof profiling on app on /debug/pprof",
).Envar("FIREHOSE_EXPORTER_ENABLE_PROFILER").Default("false").Bool()
enableProfiler = kingpin.Flag("profiler.enable", "Enable pprof profiling on app on /debug/pprof").Envar("FIREHOSE_EXPORTER_ENABLE_PROFILER").Default("false").Bool()

logLevel = kingpin.Flag("log.level", "Only log messages with the given severity or above. Valid levels: [debug, info, warn, error, fatal]",
).Envar("FIREHOSE_EXPORTER_LOG_LEVEL").Default("info").String()
logLevel = kingpin.Flag("log.level", "Only log messages with the given severity or above. Valid levels: [debug, info, warn, error, fatal]").Envar("FIREHOSE_EXPORTER_LOG_LEVEL").Default("info").String()

logInJson = kingpin.Flag("log.in_json", "Log in json",
).Envar("FIREHOSE_EXPORTER_LOG_IN_JSON").Default("false").Bool()
logInJSON = kingpin.Flag("log.in_json", "Log in json").Envar("FIREHOSE_EXPORTER_LOG_IN_JSON").Default("false").Bool()
)

type basicAuthHandler struct {
Expand All @@ -129,7 +124,6 @@ func (h *basicAuthHandler) ServeHTTP(w http.ResponseWriter, r *http.Request) {
return
}
h.handler(w, r)
return
}

func initLog() {
Expand All @@ -138,7 +132,7 @@ func initLog() {
log.Panic(err.Error())
}
log.SetLevel(logLvl)
if *logInJson {
if *logInJSON {
log.SetFormatter(&log.JSONFormatter{})
}
}
Expand All @@ -154,7 +148,6 @@ func initMetricMaker() {
} else {
metricmaker.PrependMetricConverter(metricmaker.SuffixCounterWithTotal)
}

}

func MakeStreamer() (*loggregator.EnvelopeStreamConnector, error) {
Expand Down Expand Up @@ -210,7 +203,7 @@ func main() {
im := metrics.NewInternalMetrics(*metricsNamespace, *metricsEnvironment)
nozz := nozzle.NewNozzle(
streamer,
*metricsShardId,
*metricsShardID,
*metricsNodeIndex,
pointBuffer,
im,
Expand Down Expand Up @@ -249,7 +242,7 @@ func main() {
router.Handle("/debug/vars", expvar.Handler())
}
router.HandleFunc("/", func(w http.ResponseWriter, r *http.Request) {
w.Write([]byte(`<html>
_, _ = w.Write([]byte(`<html>
<head><title>Cloud Foundry Firehose Exporter</title></head>
<body>
<h1>Cloud Foundry Firehose Exporter</h1>
Expand All @@ -258,13 +251,21 @@ func main() {
</html>`))
})

server := &http.Server{
Addr: *listenAddress,
ReadTimeout: time.Second * 5,
ReadHeaderTimeout: time.Second * 10,
}

if *tlsCertFile != "" && *tlsKeyFile != "" {
log.Infoln("Listening TLS on", *listenAddress)
log.Fatal(http.ListenAndServeTLS(*listenAddress, *tlsCertFile, *tlsKeyFile, router))
err = server.ListenAndServeTLS(*tlsCertFile, *tlsKeyFile)
} else {
log.Infoln("Listening on", *listenAddress)
log.Fatal(http.ListenAndServe(*listenAddress, router))
err = server.ListenAndServe()
}

log.Fatal(err)
}

func prometheusHandler(collector *collectors.RawMetricsCollector) http.Handler {
Expand Down
2 changes: 1 addition & 1 deletion metricmaker/converters.go
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@ func OrderAndSanitizeLabels(metric *metrics.RawMetric) {
continue
}
if strings.Contains(label.GetName(), "-") {
label.Name = proto.String(strings.Replace(label.GetName(), "-", "_", -1))
label.Name = proto.String(strings.ReplaceAll(label.GetName(), "-", "_"))
}
labels = append(labels, label)
}
Expand Down
1 change: 0 additions & 1 deletion metricmaker/metricmaker_suite_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -11,4 +11,3 @@ func TestMetricmaker(t *testing.T) {
RegisterFailHandler(Fail)
RunSpecs(t, "Metricate Suite")
}

4 changes: 2 additions & 2 deletions metricmaker/metricmaker_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -105,11 +105,11 @@ var _ = Describe("MetricMaker", func() {
Message: &loggregator_v2.Envelope_Gauge{
Gauge: &loggregator_v2.Gauge{
Metrics: map[string]*loggregator_v2.GaugeValue{
"my_metric_1": &loggregator_v2.GaugeValue{
"my_metric_1": {
Unit: "bytes",
Value: 1,
},
"my_metric_2": &loggregator_v2.GaugeValue{
"my_metric_2": {
Unit: "bytes",
Value: 1,
},
Expand Down
8 changes: 4 additions & 4 deletions metrics/consts.go
Original file line number Diff line number Diff line change
@@ -1,8 +1,8 @@
package metrics

const (
GorouterHttpMetricName = "http"
GorouterHttpCounterMetricName = GorouterHttpMetricName + "_total"
GorouterHttpHistogramMetricName = GorouterHttpMetricName + "_duration_seconds"
GorouterHttpSummaryMetricName = GorouterHttpMetricName + "_response_size_bytes"
GorouterHTTPMetricName = "http"
GorouterHTTPCounterMetricName = GorouterHTTPMetricName + "_total"
GorouterHTTPHistogramMetricName = GorouterHTTPMetricName + "_duration_seconds"
GorouterHTTPSummaryMetricName = GorouterHTTPMetricName + "_response_size_bytes"
)
8 changes: 4 additions & 4 deletions metrics/internal_metrics.go
Original file line number Diff line number Diff line change
Expand Up @@ -20,8 +20,8 @@ type InternalMetrics struct {
LastCounterEventReceivedTimestamp prometheus.Gauge
TotalValueMetricsReceived prometheus.Counter
LastValueMetricReceivedTimestamp prometheus.Gauge
TotalHttpMetricsReceived prometheus.Counter
LastHttpMetricReceivedTimestamp prometheus.Gauge
TotalHTTPMetricsReceived prometheus.Counter
LastHTTPMetricReceivedTimestamp prometheus.Gauge
}

func NewInternalMetrics(namespace string, environment string) *InternalMetrics {
Expand Down Expand Up @@ -109,7 +109,7 @@ func NewInternalMetrics(namespace string, environment string) *InternalMetrics {
},
)

im.TotalHttpMetricsReceived = promauto.NewCounter(
im.TotalHTTPMetricsReceived = promauto.NewCounter(
prometheus.CounterOpts{
Namespace: namespace,
Subsystem: "",
Expand All @@ -118,7 +118,7 @@ func NewInternalMetrics(namespace string, environment string) *InternalMetrics {
ConstLabels: prometheus.Labels{"environment": environment},
},
)
im.LastHttpMetricReceivedTimestamp = promauto.NewGauge(
im.LastHTTPMetricReceivedTimestamp = promauto.NewGauge(
prometheus.GaugeOpts{
Namespace: namespace,
Subsystem: "",
Expand Down
6 changes: 3 additions & 3 deletions metrics/raw_metric.go
Original file line number Diff line number Diff line change
Expand Up @@ -121,7 +121,7 @@ func (r *RawMetric) EstimateMetricSize() (size int) {
return size
}

func (r *RawMetric) Id() uint64 {
func (r *RawMetric) ID() uint64 {
if r.id != 0 {
return r.id
}
Expand All @@ -131,8 +131,8 @@ func (r *RawMetric) Id() uint64 {
if label.GetName() == model.MetricNameLabel {
continue
}
xxh.WriteString("$" + label.GetName() + "$" + label.GetValue())
xxh.Write(separatorByteSlice)
_, _ = xxh.WriteString("$" + label.GetName() + "$" + label.GetValue())
_, _ = xxh.Write(separatorByteSlice)
}
r.id = xxh.Sum64()
return r.id
Expand Down
4 changes: 2 additions & 2 deletions metrics/raw_metric_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -69,8 +69,8 @@ var _ = Describe("RawMetric", func() {
}),
})

Expect(m1.Id()).To(Equal(m2.Id()))
Expect(m1.Id()).ToNot(Equal(m3.Id()))
Expect(m1.ID()).To(Equal(m2.ID()))
Expect(m1.ID()).ToNot(Equal(m3.ID()))
})
})
})
24 changes: 12 additions & 12 deletions nozzle/filter_selector.go
Original file line number Diff line number Diff line change
Expand Up @@ -9,13 +9,13 @@ import (
type FilterSelectorType int32

const (
FilterSelectorType_CONTAINER_METRIC FilterSelectorType = 0
FilterSelectorType_COUNTER_EVENT FilterSelectorType = 1
FilterSelectorType_HTTP_START_STOP FilterSelectorType = 2
FilterSelectorType_VALUE_METRIC FilterSelectorType = 3
FilterSelectorTypeContainerMetric FilterSelectorType = 0
FilterSelectorTypeCounterEvent FilterSelectorType = 1
FilterSelectorTypeHTTPStartStop FilterSelectorType = 2
FilterSelectorTypeValueMetric FilterSelectorType = 3
)

var FilterSelectorType_value = map[string]int32{
var FilterSelectorTypeValue = map[string]int32{
"containermetric": 0,
"counterevent": 1,
"httpstartstop": 2,
Expand Down Expand Up @@ -60,7 +60,7 @@ func (f FilterSelector) ValueMetricDisabled() bool {
return f.valueMetricDisabled
}

func (f FilterSelector) HttpStartStopDisabled() bool {
func (f FilterSelector) HTTPStartStopDisabled() bool {
return f.httpStartStopDisabled
}

Expand All @@ -79,13 +79,13 @@ func (f FilterSelector) AllGaugeDisabled() bool {
func (f *FilterSelector) Filters(filterSelectorTypes ...FilterSelectorType) {
for _, filterSelectorType := range filterSelectorTypes {
switch filterSelectorType {
case FilterSelectorType_CONTAINER_METRIC:
case FilterSelectorTypeContainerMetric:
f.containerMetricDisabled = false
case FilterSelectorType_COUNTER_EVENT:
case FilterSelectorTypeCounterEvent:
f.counterEventDisabled = false
case FilterSelectorType_HTTP_START_STOP:
case FilterSelectorTypeHTTPStartStop:
f.httpStartStopDisabled = false
case FilterSelectorType_VALUE_METRIC:
case FilterSelectorTypeValueMetric:
f.valueMetricDisabled = false
}
}
Expand All @@ -94,7 +94,7 @@ func (f *FilterSelector) Filters(filterSelectorTypes ...FilterSelectorType) {
func (f *FilterSelector) FiltersByNames(filterSelectorNames ...string) {
filterSelectorTypes := make([]FilterSelectorType, 0)
for _, filterSelectorName := range filterSelectorNames {
if selectorType, ok := FilterSelectorType_value[strings.ToLower(filterSelectorName)]; ok {
if selectorType, ok := FilterSelectorTypeValue[strings.ToLower(filterSelectorName)]; ok {
filterSelectorTypes = append(filterSelectorTypes, FilterSelectorType(selectorType))
}
}
Expand All @@ -117,7 +117,7 @@ func (f *FilterSelector) ToSelectorTypes() []*loggregator_v2.Selector {
},
})
}
if !f.HttpStartStopDisabled() {
if !f.HTTPStartStopDisabled() {
selectors = append(selectors, &loggregator_v2.Selector{
Message: &loggregator_v2.Selector_Timer{
Timer: &loggregator_v2.TimerSelector{},
Expand Down
Loading

0 comments on commit 80a716b

Please sign in to comment.