Skip to content

Commit

Permalink
[receiver/purefa] Add relabel and env configs to purefa (#17266)
Browse files Browse the repository at this point in the history
Signed-off-by: dgoscn <[email protected]>
  • Loading branch information
dgoscn authored Jan 9, 2023
1 parent 7711ef4 commit 85ce958
Show file tree
Hide file tree
Showing 7 changed files with 39 additions and 6 deletions.
16 changes: 16 additions & 0 deletions .chloggen/purefa-receiver-relabel-config.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,16 @@
# One of 'breaking', 'deprecation', 'new_component', 'enhancement', 'bug_fix'
change_type: enhancement

# The name of the component, or a single word describing the area of concern, (e.g. filelogreceiver)
component: receiver/purefareceiver

# A brief description of the change. Surround your text with quotes ("") if it needs to start with a backtick (`).
note: Add a relabel config for important default label names on FlashArray endpoints

# One or more tracking issues related to the change
issues: [14886]

# (Optional) One or more lines of additional information to render under the primary note.
# These lines will be padded with 2 spaces and then inserted directly into the document.
# Use pipe (|) for multiline entries.
subtext:
1 change: 1 addition & 0 deletions receiver/purefareceiver/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,7 @@ receivers:
- address: gse-array01
auth:
authenticator: bearertokenauth/array01
env: dev
settings:
reload_intervals:
array: 10s
Expand Down
3 changes: 3 additions & 0 deletions receiver/purefareceiver/config.go
Original file line number Diff line number Diff line change
Expand Up @@ -46,6 +46,9 @@ type Config struct {

// Volumes represents the list of volumes to query
Volumes []internal.ScraperConfig `mapstructure:"volumes"`

// Env represents the respective environment value valid to scrape
Env string `mapstructure:"env"`
}

type Settings struct {
Expand Down
4 changes: 4 additions & 0 deletions receiver/purefareceiver/internal/scraper.go
Original file line number Diff line number Diff line change
Expand Up @@ -47,19 +47,22 @@ type scraper struct {
endpoint string
configs []ScraperConfig
scrapeInterval time.Duration
labels model.LabelSet
}

func NewScraper(ctx context.Context,
scraperType ScraperType,
endpoint string,
configs []ScraperConfig,
scrapeInterval time.Duration,
labels model.LabelSet,
) Scraper {
return &scraper{
scraperType: scraperType,
endpoint: endpoint,
configs: configs,
scrapeInterval: scrapeInterval,
labels: labels,
}
}

Expand Down Expand Up @@ -98,6 +101,7 @@ func (h *scraper) ToPrometheusReceiverConfig(host component.Host, fact receiver.
Targets: []model.LabelSet{
{model.AddressLabel: model.LabelValue(u.Host)},
},
Labels: h.labels,
},
},
},
Expand Down
3 changes: 2 additions & 1 deletion receiver/purefareceiver/internal/scraper_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@ import (
"testing"
"time"

"github.com/prometheus/common/model"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"go.opentelemetry.io/collector/component"
Expand Down Expand Up @@ -57,7 +58,7 @@ func TestToPrometheusConfig(t *testing.T) {
},
}

scraper := NewScraper(context.Background(), "hosts", endpoint, cfgs, interval)
scraper := NewScraper(context.Background(), "hosts", endpoint, cfgs, interval, model.LabelSet{})

// test
scCfgs, err := scraper.ToPrometheusReceiverConfig(host, prFactory)
Expand Down
17 changes: 12 additions & 5 deletions receiver/purefareceiver/receiver.go
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@ package purefareceiver // import "github.com/open-telemetry/opentelemetry-collec
import (
"context"

"github.com/prometheus/common/model"
"github.com/prometheus/prometheus/config"
"go.opentelemetry.io/collector/component"
"go.opentelemetry.io/collector/consumer"
Expand Down Expand Up @@ -48,35 +49,41 @@ func (r *purefaReceiver) Start(ctx context.Context, compHost component.Host) err
fact := prometheusreceiver.NewFactory()
scrapeCfgs := []*config.ScrapeConfig{}

arrScraper := internal.NewScraper(ctx, internal.ScraperTypeArray, r.cfg.Endpoint, r.cfg.Arrays, r.cfg.Settings.ReloadIntervals.Array)
commomLabel := model.LabelSet{
"env": model.LabelValue(r.cfg.Env),
"fa_array_name": model.LabelValue(r.cfg.Endpoint),
"host": model.LabelValue(r.cfg.Endpoint),
}

arrScraper := internal.NewScraper(ctx, internal.ScraperTypeArray, r.cfg.Endpoint, r.cfg.Arrays, r.cfg.Settings.ReloadIntervals.Array, commomLabel)
if scCfgs, err := arrScraper.ToPrometheusReceiverConfig(compHost, fact); err == nil {
scrapeCfgs = append(scrapeCfgs, scCfgs...)
} else {
return err
}

hostScraper := internal.NewScraper(ctx, internal.ScraperTypeHost, r.cfg.Endpoint, r.cfg.Hosts, r.cfg.Settings.ReloadIntervals.Host)
hostScraper := internal.NewScraper(ctx, internal.ScraperTypeHost, r.cfg.Endpoint, r.cfg.Hosts, r.cfg.Settings.ReloadIntervals.Host, commomLabel)
if scCfgs, err := hostScraper.ToPrometheusReceiverConfig(compHost, fact); err == nil {
scrapeCfgs = append(scrapeCfgs, scCfgs...)
} else {
return err
}

directoriesScraper := internal.NewScraper(ctx, internal.ScraperTypeDirectories, r.cfg.Endpoint, r.cfg.Directories, r.cfg.Settings.ReloadIntervals.Directories)
directoriesScraper := internal.NewScraper(ctx, internal.ScraperTypeDirectories, r.cfg.Endpoint, r.cfg.Directories, r.cfg.Settings.ReloadIntervals.Directories, commomLabel)
if scCfgs, err := directoriesScraper.ToPrometheusReceiverConfig(compHost, fact); err == nil {
scrapeCfgs = append(scrapeCfgs, scCfgs...)
} else {
return err
}

podsScraper := internal.NewScraper(ctx, internal.ScraperTypePods, r.cfg.Endpoint, r.cfg.Pods, r.cfg.Settings.ReloadIntervals.Pods)
podsScraper := internal.NewScraper(ctx, internal.ScraperTypePods, r.cfg.Endpoint, r.cfg.Pods, r.cfg.Settings.ReloadIntervals.Pods, commomLabel)
if scCfgs, err := podsScraper.ToPrometheusReceiverConfig(compHost, fact); err == nil {
scrapeCfgs = append(scrapeCfgs, scCfgs...)
} else {
return err
}

volumesScraper := internal.NewScraper(ctx, internal.ScraperTypeVolumes, r.cfg.Endpoint, r.cfg.Volumes, r.cfg.Settings.ReloadIntervals.Volumes)
volumesScraper := internal.NewScraper(ctx, internal.ScraperTypeVolumes, r.cfg.Endpoint, r.cfg.Volumes, r.cfg.Settings.ReloadIntervals.Volumes, model.LabelSet{})
if scCfgs, err := volumesScraper.ToPrometheusReceiverConfig(compHost, fact); err == nil {
scrapeCfgs = append(scrapeCfgs, scCfgs...)
} else {
Expand Down
1 change: 1 addition & 0 deletions receiver/purefareceiver/testdata/config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@ receivers:
- address: gse-array02
auth:
authenticator: bearertokenauth/array02
env: dev
settings:
reload_intervals:
array: 10s
Expand Down

0 comments on commit 85ce958

Please sign in to comment.