Skip to content

Commit

Permalink
[target-allocator] fix updating scrape configs (#1620)
Browse files Browse the repository at this point in the history
* [target-allocator] fix updating scrape configs

Only take scrape configs from the most recently applied config, and only
save them if we've successfully updated.

* [target-allocator] drop unnecessary job to scrape config map

* [tatget-allocator] add discoverer benchmark
  • Loading branch information
Mikołaj Świątek authored Apr 13, 2023
1 parent 1ed268c commit 20ea43a
Show file tree
Hide file tree
Showing 4 changed files with 86 additions and 10 deletions.
16 changes: 16 additions & 0 deletions .chloggen/fix_targetallocator_update-scrape-configs.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,16 @@
# One of 'breaking', 'deprecation', 'new_component', 'enhancement', 'bug_fix'
change_type: bug_fix

# The name of the component, or a single word describing the area of concern, (e.g. operator, target allocator, github action)
component: target allocator

# A brief description of the change. Surround your text with quotes ("") if it needs to start with a backtick (`).
note: fix updating scrape configs

# One or more tracking issues related to the change
issues: [1415]

# (Optional) One or more lines of additional information to render under the primary note.
# These lines will be padded with 2 spaces and then inserted directly into the document.
# Use pipe (|) for multiline entries.
subtext:
9 changes: 4 additions & 5 deletions cmd/otel-allocator/target/discovery.go
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,6 @@ type Discoverer struct {
manager *discovery.Manager
close chan struct{}
configsMap map[allocatorWatcher.EventSource]*config.Config
jobToScrapeConfig map[string]*config.ScrapeConfig
hook discoveryHook
scrapeConfigsHash uint64
scrapeConfigsUpdater scrapeConfigsUpdater
Expand All @@ -59,34 +58,34 @@ func NewDiscoverer(log logr.Logger, manager *discovery.Manager, hook discoveryHo
manager: manager,
close: make(chan struct{}),
configsMap: make(map[allocatorWatcher.EventSource]*config.Config),
jobToScrapeConfig: make(map[string]*config.ScrapeConfig),
hook: hook,
scrapeConfigsUpdater: scrapeConfigsUpdater,
}
}

func (m *Discoverer) ApplyConfig(source allocatorWatcher.EventSource, cfg *config.Config) error {
m.configsMap[source] = cfg
jobToScrapeConfig := make(map[string]*config.ScrapeConfig)

discoveryCfg := make(map[string]discovery.Configs)
relabelCfg := make(map[string][]*relabel.Config)

for _, value := range m.configsMap {
for _, scrapeConfig := range value.ScrapeConfigs {
m.jobToScrapeConfig[scrapeConfig.JobName] = scrapeConfig
jobToScrapeConfig[scrapeConfig.JobName] = scrapeConfig
discoveryCfg[scrapeConfig.JobName] = scrapeConfig.ServiceDiscoveryConfigs
relabelCfg[scrapeConfig.JobName] = scrapeConfig.RelabelConfigs
}
}

hash, err := hashstructure.Hash(m.jobToScrapeConfig, nil)
hash, err := hashstructure.Hash(jobToScrapeConfig, nil)
if err != nil {
return err
}
// If the hash has changed, updated stored hash and send the new config.
// Otherwise skip updating scrape configs.
if m.scrapeConfigsUpdater != nil && m.scrapeConfigsHash != hash {
err := m.scrapeConfigsUpdater.UpdateScrapeConfigResponse(m.jobToScrapeConfig)
err := m.scrapeConfigsUpdater.UpdateScrapeConfigResponse(jobToScrapeConfig)
if err != nil {
return err
}
Expand Down
68 changes: 63 additions & 5 deletions cmd/otel-allocator/target/discovery_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,7 @@ import (
"github.com/prometheus/prometheus/discovery"
"github.com/prometheus/prometheus/model/relabel"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
ctrl "sigs.k8s.io/controller-runtime"

"github.com/open-telemetry/opentelemetry-operator/cmd/otel-allocator/config"
Expand All @@ -48,7 +49,7 @@ func TestDiscovery(t *testing.T) {
args: args{
file: "./testdata/test.yaml",
},
want: []string{"prom.domain:9001", "prom.domain:9002", "prom.domain:9003", "promfile.domain:1001", "promfile.domain:3000"},
want: []string{"prom.domain:9001", "prom.domain:9002", "prom.domain:9003", "prom.domain:8001", "promfile.domain:1001", "promfile.domain:3000"},
},
{
name: "update",
Expand All @@ -58,9 +59,10 @@ func TestDiscovery(t *testing.T) {
want: []string{"prom.domain:9004", "prom.domain:9005", "promfile.domain:1001", "promfile.domain:3000"},
},
}
scu := &mockScrapeConfigUpdater{}
ctx, cancelFunc := context.WithCancel(context.Background())
d := discovery.NewManager(ctx, gokitlog.NewNopLogger())
manager := NewDiscoverer(ctrl.Log.WithName("test"), d, nil, nil)
manager := NewDiscoverer(ctrl.Log.WithName("test"), d, nil, scu)
defer close(manager.close)
defer cancelFunc()

Expand Down Expand Up @@ -91,6 +93,13 @@ func TestDiscovery(t *testing.T) {
sort.Strings(gotTargets)
sort.Strings(tt.want)
assert.Equal(t, tt.want, gotTargets)

// check the updated scrape configs
expectedScrapeConfigs := map[string]*promconfig.ScrapeConfig{}
for _, scrapeConfig := range cfg.Config.ScrapeConfigs {
expectedScrapeConfigs[scrapeConfig.JobName] = scrapeConfig
}
assert.Equal(t, expectedScrapeConfigs, scu.mockCfg)
})
}
}
Expand Down Expand Up @@ -286,6 +295,7 @@ func TestDiscovery_ScrapeConfigHashing(t *testing.T) {
}
var (
lastValidHash uint64
expectedConfig map[string]*promconfig.ScrapeConfig
lastValidConfig map[string]*promconfig.ScrapeConfig
)

Expand All @@ -298,27 +308,75 @@ func TestDiscovery_ScrapeConfigHashing(t *testing.T) {
t.Run(tc.description, func(t *testing.T) {
err := manager.ApplyConfig(allocatorWatcher.EventSourcePrometheusCR, tc.cfg)
if !tc.expectErr {
expectedConfig = make(map[string]*promconfig.ScrapeConfig)
for _, value := range manager.configsMap {
for _, scrapeConfig := range value.ScrapeConfigs {
expectedConfig[scrapeConfig.JobName] = scrapeConfig
}
}
assert.NoError(t, err)
assert.NotZero(t, manager.scrapeConfigsHash)
// Assert that scrape configs in manager are correctly
// reflected in the scrape job updater.
assert.Equal(t, manager.jobToScrapeConfig, scu.mockCfg)
assert.Equal(t, expectedConfig, scu.mockCfg)

lastValidHash = manager.scrapeConfigsHash
lastValidConfig = manager.jobToScrapeConfig
lastValidConfig = expectedConfig
} else {
// In case of error, assert that we retain the last
// known valid config.
assert.Error(t, err)
assert.Equal(t, lastValidHash, manager.scrapeConfigsHash)
assert.Equal(t, lastValidConfig, manager.jobToScrapeConfig)
assert.Equal(t, lastValidConfig, scu.mockCfg)
}

})
}
}

func BenchmarkApplyScrapeConfig(b *testing.B) {
numConfigs := 1000
scrapeConfig := promconfig.ScrapeConfig{
JobName: "serviceMonitor/testapp/testapp/0",
HonorTimestamps: true,
ScrapeInterval: model.Duration(30 * time.Second),
ScrapeTimeout: model.Duration(30 * time.Second),
MetricsPath: "/metrics",
Scheme: "http",
HTTPClientConfig: commonconfig.HTTPClientConfig{
FollowRedirects: true,
},
RelabelConfigs: []*relabel.Config{
{
SourceLabels: model.LabelNames{model.LabelName("job")},
Separator: ";",
Regex: relabel.MustNewRegexp("(.*)"),
TargetLabel: "__tmp_prometheus_job_name",
Replacement: "$$1",
Action: relabel.Replace,
},
},
}
cfg := &promconfig.Config{
ScrapeConfigs: make([]*promconfig.ScrapeConfig, numConfigs),
}

for i := 0; i < numConfigs; i++ {
cfg.ScrapeConfigs[i] = &scrapeConfig
}

scu := &mockScrapeConfigUpdater{}
ctx := context.Background()
d := discovery.NewManager(ctx, gokitlog.NewNopLogger())
manager := NewDiscoverer(ctrl.Log.WithName("test"), d, nil, scu)

b.ResetTimer()
for i := 0; i < b.N; i++ {
err := manager.ApplyConfig(allocatorWatcher.EventSourcePrometheusCR, cfg)
require.NoError(b, err)
}
}

var _ scrapeConfigsUpdater = &mockScrapeConfigUpdater{}

// mockScrapeConfigUpdater is a mock implementation of the scrapeConfigsUpdater.
Expand Down
3 changes: 3 additions & 0 deletions cmd/otel-allocator/target/testdata/test.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -12,3 +12,6 @@ config:
- targets: ["prom.domain:9001", "prom.domain:9002", "prom.domain:9003"]
labels:
my: label
- job_name: prometheus2
static_configs:
- targets: ["prom.domain:8001"]

0 comments on commit 20ea43a

Please sign in to comment.