Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Make OpenMetrics use the RequestsWrapper #5414

Merged
merged 15 commits into from
Jan 9, 2020
3 changes: 1 addition & 2 deletions crio/datadog_checks/crio/crio.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,11 +11,10 @@ class CrioCheck(OpenMetricsBaseCheck):

DEFAULT_METRIC_LIMIT = 0

def __init__(self, name, init_config, agentConfig, instances=None):
def __init__(self, name, init_config, instances):
super(CrioCheck, self).__init__(
name,
init_config,
agentConfig,
instances,
default_instances={
"crio": {
Expand Down
2 changes: 1 addition & 1 deletion crio/tests/test_crio.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@ def test_crio(aggregator, mock_data):
Testing crio.
"""

c = CrioCheck(CHECK_NAME, None, {}, [instance])
c = CrioCheck(CHECK_NAME, {}, [instance])
c.check(instance)
aggregator.assert_metric(NAMESPACE + '.operations.count')
aggregator.assert_metric(NAMESPACE + '.operations.latency.count')
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -34,6 +34,14 @@ class OpenMetricsBaseCheck(OpenMetricsScraperMixin, AgentCheck):

DEFAULT_METRIC_LIMIT = 2000

HTTP_CONFIG_REMAPPER = {
'ssl_verify': {'name': 'tls_verify'},
'ssl_cert': {'name': 'tls_cert'},
'ssl_private_key': {'name': 'tls_private_key'},
'ssl_ca_cert': {'name': 'tls_ca_cert'},
'prometheus_timeout': {'name': 'timeout'},
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Mean we need to update all integrations config examples?

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Yes, though they'd still work of course so no rush

}

def __init__(self, *args, **kwargs):
args = list(args)
default_instances = kwargs.pop('default_instances', None) or {}
Expand All @@ -49,6 +57,7 @@ def __init__(self, *args, **kwargs):

super(OpenMetricsBaseCheck, self).__init__(*args, **kwargs)
self.config_map = {}
self.http_handlers = {}
self.default_instances = default_instances
self.default_namespace = default_namespace

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -10,12 +10,11 @@
import requests
from prometheus_client.parser import text_fd_to_metric_families
from six import PY3, iteritems, itervalues, string_types
from urllib3.exceptions import InsecureRequestWarning

from ...config import is_affirmative
from ...errors import CheckException
from ...utils.common import to_string
from ...utils.warnings_util import disable_warnings_ctx
from ...utils.http import RequestsWrapper
from .. import AgentCheck

if PY3:
Expand Down Expand Up @@ -288,6 +287,25 @@ def create_scraper_configuration(self, instance=None):
if config['metadata_metric_name'] and config['metadata_label_map']:
config['_default_metric_transformers'][config['metadata_metric_name']] = self.transform_metadata

# Set up the HTTP wrapper for this endpoint
ofek marked this conversation as resolved.
Show resolved Hide resolved

# TODO: Deprecate this behavior
ofek marked this conversation as resolved.
Show resolved Hide resolved
if config['ssl_verify'] is False:
config.setdefault('tls_ignore_warning', True)

http_handler = self.http_handlers[endpoint] = RequestsWrapper(
config, self.init_config, self.HTTP_CONFIG_REMAPPER, self.log
)
ofek marked this conversation as resolved.
Show resolved Hide resolved

headers = http_handler.options['headers']

bearer_token = config['_bearer_token']
if bearer_token is not None:
headers['Authorization'] = 'Bearer {}'.format(bearer_token)
ofek marked this conversation as resolved.
Show resolved Hide resolved

# TODO: Determine if we really need this
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Determine if we really need this

Can we determine now? What's needed to answer this question? :)

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

headers.setdefault('accept-encoding', 'gzip')
ofek marked this conversation as resolved.
Show resolved Hide resolved

return config

def parse_metric_family(self, response, scraper_config):
Expand Down Expand Up @@ -556,53 +574,12 @@ def poll(self, scraper_config, headers=None):
raise

def send_request(self, endpoint, scraper_config, headers=None):
# Determine the headers
if headers is None:
headers = {}
if 'accept-encoding' not in headers:
headers['accept-encoding'] = 'gzip'
headers.update(scraper_config['extra_headers'])

# Add the bearer token to headers
bearer_token = scraper_config['_bearer_token']
if bearer_token is not None:
auth_header = {'Authorization': 'Bearer {}'.format(bearer_token)}
headers.update(auth_header)

# Determine the SSL verification settings
cert = None
if isinstance(scraper_config['ssl_cert'], string_types):
if isinstance(scraper_config['ssl_private_key'], string_types):
cert = (scraper_config['ssl_cert'], scraper_config['ssl_private_key'])
else:
cert = scraper_config['ssl_cert']

verify = scraper_config['ssl_verify']
# TODO: deprecate use as `ssl_verify` boolean
if scraper_config['ssl_ca_cert'] is False:
verify = False

disable_insecure_warnings = False
if isinstance(scraper_config['ssl_ca_cert'], string_types):
verify = scraper_config['ssl_ca_cert']
elif verify is False:
disable_insecure_warnings = True

# Determine the authentication settings
username = scraper_config['username']
password = scraper_config['password']
auth = (username, password) if username is not None and password is not None else None

with disable_warnings_ctx(InsecureRequestWarning, disable=disable_insecure_warnings):
return requests.get(
endpoint,
headers=headers,
stream=True,
timeout=scraper_config['prometheus_timeout'],
cert=cert,
verify=verify,
auth=auth,
)
# We set this up at the first run, but check it again just
# in case a subclass alters the expected order of calls.
if not self.http_handlers:
self.set_up_http_handlers()
ofek marked this conversation as resolved.
Show resolved Hide resolved

return self.http_handlers[scraper_config['prometheus_url']].get(endpoint, stream=True)

def get_hostname_for_sample(self, sample, scraper_config):
"""
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -81,7 +81,7 @@ class KubeControllerManagerCheck(KubeLeaderElectionMixin, OpenMetricsBaseCheck):
"record_namespace": "kube-system",
}

def __init__(self, name, init_config, agentConfig, instances=None):
def __init__(self, name, init_config, instances):
self.QUEUE_METRICS_TRANSFORMERS = {
'_adds': self.queue_adds,
'_depth': self.queue_depth,
Expand All @@ -103,7 +103,6 @@ def __init__(self, name, init_config, agentConfig, instances=None):
super(KubeControllerManagerCheck, self).__init__(
name,
init_config,
agentConfig,
instances,
default_instances={
"kube_controller_manager": {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -57,14 +57,14 @@ def mock_leader():


def test_check_metrics_with_deprecated(aggregator, mock_metrics, mock_leader):
c = KubeControllerManagerCheck(CHECK_NAME, None, {}, [instance])
c = KubeControllerManagerCheck(CHECK_NAME, {}, [instance])
c.check(instance)

generic_check_metrics(aggregator, True)


def test_check_metrics_without_deprecated(aggregator, mock_metrics, mock_leader):
c = KubeControllerManagerCheck(CHECK_NAME, None, {}, [instance])
c = KubeControllerManagerCheck(CHECK_NAME, {}, [instance])
c.check(instance2)

generic_check_metrics(aggregator, False)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -82,11 +82,10 @@ class KubeMetricsServerCheck(OpenMetricsBaseCheck):

KUBE_METRICS_SERVER_NAMESPACE = "kube_metrics_server"

def __init__(self, name, init_config, agentConfig, instances=None):
def __init__(self, name, init_config, instances):
super(KubeMetricsServerCheck, self).__init__(
name,
init_config,
agentConfig,
instances,
default_instances={
"kube_metrics_server": {
Expand Down
2 changes: 1 addition & 1 deletion kube_metrics_server/tests/test_kube_metrics_server.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,7 @@ def mock_metrics():


def test_check_metrics(aggregator, mock_metrics):
c = KubeMetricsServerCheck(CHECK_NAME, None, {}, [instance])
c = KubeMetricsServerCheck(CHECK_NAME, {}, [instance])
c.check(instance)

def assert_metric(name, **kwargs):
Expand Down
3 changes: 1 addition & 2 deletions kube_proxy/datadog_checks/kube_proxy/kube_proxy.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,11 +7,10 @@
class KubeProxyCheck(OpenMetricsBaseCheck):
DEFAULT_METRIC_LIMIT = 0

def __init__(self, name, init_config, agentConfig, instances=None):
def __init__(self, name, init_config, instances):
super(KubeProxyCheck, self).__init__(
name,
init_config,
agentConfig,
instances,
default_instances={
"kubeproxy": {
Expand Down
4 changes: 2 additions & 2 deletions kube_proxy/tests/test_kube_proxy.py
Original file line number Diff line number Diff line change
Expand Up @@ -51,7 +51,7 @@ def test_check_iptables(aggregator, mock_iptables):
Testing Kube_proxy in iptables mode.
"""

c = KubeProxyCheck(CHECK_NAME, None, {}, [instance])
c = KubeProxyCheck(CHECK_NAME, {}, [instance])
c.check(instance)
aggregator.assert_metric(NAMESPACE + '.cpu.time')
aggregator.assert_metric(NAMESPACE + '.mem.resident')
Expand All @@ -74,7 +74,7 @@ def test_check_userspace(aggregator, mock_userspace):
"""
Testing Kube_proxy in userspace mode.
"""
c = KubeProxyCheck(CHECK_NAME, None, {}, [instance])
c = KubeProxyCheck(CHECK_NAME, {}, [instance])
c.check(instance)
aggregator.assert_metric(NAMESPACE + '.cpu.time')
aggregator.assert_metric(NAMESPACE + '.mem.resident')
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -103,11 +103,10 @@ class KubeSchedulerCheck(KubeLeaderElectionMixin, OpenMetricsBaseCheck):
"record_namespace": "kube-system",
}

def __init__(self, name, init_config, agentConfig, instances=None):
def __init__(self, name, init_config, instances):
super(KubeSchedulerCheck, self).__init__(
name,
init_config,
agentConfig,
instances,
default_instances={
"kube_scheduler": {
Expand Down
2 changes: 1 addition & 1 deletion kube_scheduler/tests/test_kube_scheduler_1_13.py
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,7 @@ def mock_leader():


def test_check_metrics_1_13(aggregator, mock_metrics, mock_leader):
c = KubeSchedulerCheck(CHECK_NAME, None, {}, [instance])
c = KubeSchedulerCheck(CHECK_NAME, {}, [instance])
c.check(instance)

def assert_metric(name, **kwargs):
Expand Down
2 changes: 1 addition & 1 deletion kube_scheduler/tests/test_kube_scheduler_1_14.py
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,7 @@ def mock_leader():


def test_check_metrics_1_14(aggregator, mock_metrics, mock_leader):
c = KubeSchedulerCheck(CHECK_NAME, None, {}, [instance])
c = KubeSchedulerCheck(CHECK_NAME, {}, [instance])
c.check(instance)

def assert_metric(name, **kwargs):
Expand Down
6 changes: 4 additions & 2 deletions linkerd/datadog_checks/linkerd/linkerd.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,13 +15,15 @@ class LinkerdCheck(OpenMetricsBaseCheck):
DEFAULT_METRIC_LIMIT = 0
HEALTH_METRIC = 'linkerd.prometheus.health'

def __init__(self, name, init_config, agentConfig, instances=None):
def __init__(self, name, init_config, instances):
labels_mapper = {'rt': 'linkerd_router', 'client': 'linkerd_client', 'service': 'linkerd_service'}

default_config = {
'linkerd': {'labels_mapper': labels_mapper, 'metrics': [METRIC_MAP], 'type_overrides': TYPE_OVERRIDES}
}
super(LinkerdCheck, self).__init__(name, init_config, agentConfig, instances, default_config, 'linkerd')
super(LinkerdCheck, self).__init__(
name, init_config, instances, default_instances=default_config, default_namespace='linkerd'
)

def process(self, scraper_config, metric_transformers=None):
# Override the process method to send the health metric, as service checks can be disabled.
Expand Down
6 changes: 3 additions & 3 deletions linkerd/tests/test_linkerd.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@ def test_linkerd(aggregator):
"""
Test the full check
"""
check = LinkerdCheck('linkerd', None, {}, [MOCK_INSTANCE])
check = LinkerdCheck('linkerd', {}, [MOCK_INSTANCE])
with requests_mock.Mocker() as metric_request:
metric_request.get('http://fake.tld/prometheus', text=get_response('linkerd.txt'))
check.check(MOCK_INSTANCE)
Expand All @@ -40,7 +40,7 @@ def test_linkerd(aggregator):


def test_linkerd_v2(aggregator):
check = LinkerdCheck('linkerd', None, {}, [MOCK_INSTANCE])
check = LinkerdCheck('linkerd', {}, [MOCK_INSTANCE])
with requests_mock.Mocker() as metric_request:
metric_request.get('http://fake.tld/prometheus', text=get_response('linkerd_v2.txt'))
check.check(MOCK_INSTANCE)
Expand All @@ -56,7 +56,7 @@ def test_linkerd_v2(aggregator):


def test_openmetrics_error(monkeypatch):
check = LinkerdCheck('linkerd', None, {}, [MOCK_INSTANCE])
check = LinkerdCheck('linkerd', {}, [MOCK_INSTANCE])
with requests_mock.Mocker() as metric_request:
metric_request.get('http://fake.tld/prometheus', exc="Exception")
with pytest.raises(Exception):
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -11,11 +11,10 @@ class NginxIngressControllerCheck(OpenMetricsBaseCheck):

DEFAULT_METRIC_LIMIT = 0

def __init__(self, name, init_config, agentConfig, instances=None):
def __init__(self, name, init_config, instances):
super(NginxIngressControllerCheck, self).__init__(
name,
init_config,
agentConfig,
instances,
default_instances={
"nginx_ingress": {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@ def test_nginx_ingress_controller(aggregator, mock_data):
Testing nginx ingress controller.
"""

c = NginxIngressControllerCheck(CHECK_NAME, None, {}, [instance])
c = NginxIngressControllerCheck(CHECK_NAME, {}, [instance])
c.check(instance)
# nginx metrics
aggregator.assert_metric(NAMESPACE + '.nginx.connections.current')
Expand Down
2 changes: 1 addition & 1 deletion openmetrics/tests/test_integration.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@

@pytest.mark.integration
def test_integration(aggregator, dd_environment):
c = OpenMetricsCheck('openmetrics', None, {}, [dd_environment])
c = OpenMetricsCheck('openmetrics', {}, [dd_environment])
c.check(dd_environment)
aggregator.assert_metric(CHECK_NAME + '.go_memstats_mallocs_total', metric_type=aggregator.MONOTONIC_COUNT)
assert_metrics_covered(aggregator)
Expand Down
10 changes: 4 additions & 6 deletions openmetrics/tests/test_openmetrics.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@


def test_openmetrics_check(aggregator):
c = OpenMetricsCheck('openmetrics', None, {}, [instance])
c = OpenMetricsCheck('openmetrics', {}, [instance])
c.check(instance)
aggregator.assert_metric(
CHECK_NAME + '.renamed.metric1',
Expand All @@ -38,7 +38,7 @@ def test_openmetrics_check(aggregator):

def test_openmetrics_check_counter_gauge(aggregator):
instance['send_monotonic_counter'] = False
c = OpenMetricsCheck('openmetrics', None, {}, [instance])
c = OpenMetricsCheck('openmetrics', {}, [instance])
c.check(instance)
aggregator.assert_metric(
CHECK_NAME + '.renamed.metric1',
Expand All @@ -64,7 +64,7 @@ def test_invalid_metric(aggregator):
'metrics': [{'metric1': 'renamed.metric1'}, 'metric2', 'metric3'],
'send_histograms_buckets': True,
}
c = OpenMetricsCheck('openmetrics', None, {}, [bad_metric_instance])
c = OpenMetricsCheck('openmetrics', {}, [bad_metric_instance])
c.check(bad_metric_instance)
assert aggregator.metrics('metric3') == []

Expand All @@ -76,7 +76,7 @@ def test_openmetrics_wildcard(aggregator):
'metrics': ['metric*'],
}

c = OpenMetricsCheck('openmetrics', None, {}, [instance_wildcard])
c = OpenMetricsCheck('openmetrics', {}, [instance_wildcard])
c.check(instance)
aggregator.assert_metric(
CHECK_NAME + '.metric1',
Expand All @@ -98,7 +98,6 @@ def test_openmetrics_default_instance(aggregator):

c = OpenMetricsCheck(
CHECK_NAME,
None,
{},
[],
default_instances={
Expand Down Expand Up @@ -127,7 +126,6 @@ def test_openmetrics_default_instance(aggregator):
def test_openmetrics_mixed_instance(aggregator):
c = OpenMetricsCheck(
CHECK_NAME,
None,
{},
[],
default_instances={
Expand Down