Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Make OpenMetrics use the RequestsWrapper #5414

Merged
merged 15 commits into from
Jan 9, 2020
3 changes: 1 addition & 2 deletions crio/datadog_checks/crio/crio.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,11 +11,10 @@ class CrioCheck(OpenMetricsBaseCheck):

DEFAULT_METRIC_LIMIT = 0

def __init__(self, name, init_config, agentConfig, instances=None):
def __init__(self, name, init_config, instances):
super(CrioCheck, self).__init__(
name,
init_config,
agentConfig,
instances,
default_instances={
"crio": {
Expand Down
2 changes: 1 addition & 1 deletion crio/tests/test_crio.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@ def test_crio(aggregator, mock_data):
Testing crio.
"""

c = CrioCheck(CHECK_NAME, None, {}, [instance])
c = CrioCheck(CHECK_NAME, {}, [instance])
c.check(instance)
aggregator.assert_metric(NAMESPACE + '.operations.count')
aggregator.assert_metric(NAMESPACE + '.operations.latency.count')
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -34,6 +34,14 @@ class OpenMetricsBaseCheck(OpenMetricsScraperMixin, AgentCheck):

DEFAULT_METRIC_LIMIT = 2000

HTTP_CONFIG_REMAPPER = {
'ssl_verify': {'name': 'tls_verify'},
'ssl_cert': {'name': 'tls_cert'},
'ssl_private_key': {'name': 'tls_private_key'},
'ssl_ca_cert': {'name': 'tls_ca_cert'},
'prometheus_timeout': {'name': 'timeout'},
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Mean we need to update all integrations config examples?

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Yes, though they'd still work of course so no rush

}

def __init__(self, *args, **kwargs):
args = list(args)
default_instances = kwargs.pop('default_instances', None) or {}
Expand All @@ -49,6 +57,7 @@ def __init__(self, *args, **kwargs):

super(OpenMetricsBaseCheck, self).__init__(*args, **kwargs)
self.config_map = {}
self.http_handlers = {}
self.default_instances = default_instances
self.default_namespace = default_namespace

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -10,12 +10,11 @@
import requests
from prometheus_client.parser import text_fd_to_metric_families
from six import PY3, iteritems, itervalues, string_types
from urllib3.exceptions import InsecureRequestWarning

from ...config import is_affirmative
from ...errors import CheckException
from ...utils.common import to_string
from ...utils.warnings_util import disable_warnings_ctx
from ...utils.http import RequestsWrapper
from .. import AgentCheck

if PY3:
Expand Down Expand Up @@ -288,8 +287,29 @@ def create_scraper_configuration(self, instance=None):
if config['metadata_metric_name'] and config['metadata_label_map']:
config['_default_metric_transformers'][config['metadata_metric_name']] = self.transform_metadata

# Set up the HTTP wrapper for this endpoint
ofek marked this conversation as resolved.
Show resolved Hide resolved
self._set_up_http_handler(endpoint, config)

return config

def _set_up_http_handler(self, endpoint, scraper_config):
# TODO: Deprecate this behavior in Agent 8
if scraper_config['ssl_verify'] is False:
scraper_config.setdefault('tls_ignore_warning', True)

http_handler = self.http_handlers[endpoint] = RequestsWrapper(
scraper_config, self.init_config, self.HTTP_CONFIG_REMAPPER, self.log
)
ofek marked this conversation as resolved.
Show resolved Hide resolved

headers = http_handler.options['headers']

bearer_token = scraper_config['_bearer_token']
if bearer_token is not None:
headers['Authorization'] = 'Bearer {}'.format(bearer_token)
ofek marked this conversation as resolved.
Show resolved Hide resolved

# TODO: Determine if we really need this
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Determine if we really need this

Can we determine now? What's needed to answer this question? :)

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

headers.setdefault('accept-encoding', 'gzip')
ofek marked this conversation as resolved.
Show resolved Hide resolved

def parse_metric_family(self, response, scraper_config):
"""
Parse the MetricFamily from a valid requests.Response object to provide a MetricFamily object (see [0])
Expand Down Expand Up @@ -556,53 +576,11 @@ def poll(self, scraper_config, headers=None):
raise

def send_request(self, endpoint, scraper_config, headers=None):
# Determine the headers
if headers is None:
headers = {}
if 'accept-encoding' not in headers:
headers['accept-encoding'] = 'gzip'
headers.update(scraper_config['extra_headers'])

# Add the bearer token to headers
bearer_token = scraper_config['_bearer_token']
if bearer_token is not None:
auth_header = {'Authorization': 'Bearer {}'.format(bearer_token)}
headers.update(auth_header)

# Determine the SSL verification settings
cert = None
if isinstance(scraper_config['ssl_cert'], string_types):
if isinstance(scraper_config['ssl_private_key'], string_types):
cert = (scraper_config['ssl_cert'], scraper_config['ssl_private_key'])
else:
cert = scraper_config['ssl_cert']

verify = scraper_config['ssl_verify']
# TODO: deprecate use as `ssl_verify` boolean
if scraper_config['ssl_ca_cert'] is False:
verify = False

disable_insecure_warnings = False
if isinstance(scraper_config['ssl_ca_cert'], string_types):
verify = scraper_config['ssl_ca_cert']
elif verify is False:
disable_insecure_warnings = True

# Determine the authentication settings
username = scraper_config['username']
password = scraper_config['password']
auth = (username, password) if username is not None and password is not None else None

with disable_warnings_ctx(InsecureRequestWarning, disable=disable_insecure_warnings):
return requests.get(
endpoint,
headers=headers,
stream=True,
timeout=scraper_config['prometheus_timeout'],
cert=cert,
verify=verify,
auth=auth,
)
kwargs = {}
if headers:
kwargs['headers'] = headers

return self.http_handlers[scraper_config['prometheus_url']].get(endpoint, stream=True, **kwargs)

def get_hostname_for_sample(self, sample, scraper_config):
"""
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -81,7 +81,7 @@ class KubeControllerManagerCheck(KubeLeaderElectionMixin, OpenMetricsBaseCheck):
"record_namespace": "kube-system",
}

def __init__(self, name, init_config, agentConfig, instances=None):
def __init__(self, name, init_config, instances):
self.QUEUE_METRICS_TRANSFORMERS = {
'_adds': self.queue_adds,
'_depth': self.queue_depth,
Expand All @@ -103,7 +103,6 @@ def __init__(self, name, init_config, agentConfig, instances=None):
super(KubeControllerManagerCheck, self).__init__(
name,
init_config,
agentConfig,
instances,
default_instances={
"kube_controller_manager": {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -57,14 +57,14 @@ def mock_leader():


def test_check_metrics_with_deprecated(aggregator, mock_metrics, mock_leader):
c = KubeControllerManagerCheck(CHECK_NAME, None, {}, [instance])
c = KubeControllerManagerCheck(CHECK_NAME, {}, [instance])
c.check(instance)

generic_check_metrics(aggregator, True)


def test_check_metrics_without_deprecated(aggregator, mock_metrics, mock_leader):
c = KubeControllerManagerCheck(CHECK_NAME, None, {}, [instance])
c = KubeControllerManagerCheck(CHECK_NAME, {}, [instance])
c.check(instance2)

generic_check_metrics(aggregator, False)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -82,11 +82,10 @@ class KubeMetricsServerCheck(OpenMetricsBaseCheck):

KUBE_METRICS_SERVER_NAMESPACE = "kube_metrics_server"

def __init__(self, name, init_config, agentConfig, instances=None):
def __init__(self, name, init_config, instances):
super(KubeMetricsServerCheck, self).__init__(
name,
init_config,
agentConfig,
instances,
default_instances={
"kube_metrics_server": {
Expand Down
2 changes: 1 addition & 1 deletion kube_metrics_server/tests/test_kube_metrics_server.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,7 @@ def mock_metrics():


def test_check_metrics(aggregator, mock_metrics):
c = KubeMetricsServerCheck(CHECK_NAME, None, {}, [instance])
c = KubeMetricsServerCheck(CHECK_NAME, {}, [instance])
c.check(instance)

def assert_metric(name, **kwargs):
Expand Down
3 changes: 1 addition & 2 deletions kube_proxy/datadog_checks/kube_proxy/kube_proxy.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,11 +7,10 @@
class KubeProxyCheck(OpenMetricsBaseCheck):
DEFAULT_METRIC_LIMIT = 0

def __init__(self, name, init_config, agentConfig, instances=None):
def __init__(self, name, init_config, instances):
super(KubeProxyCheck, self).__init__(
name,
init_config,
agentConfig,
instances,
default_instances={
"kubeproxy": {
Expand Down
4 changes: 2 additions & 2 deletions kube_proxy/tests/test_kube_proxy.py
Original file line number Diff line number Diff line change
Expand Up @@ -51,7 +51,7 @@ def test_check_iptables(aggregator, mock_iptables):
Testing Kube_proxy in iptables mode.
"""

c = KubeProxyCheck(CHECK_NAME, None, {}, [instance])
c = KubeProxyCheck(CHECK_NAME, {}, [instance])
c.check(instance)
aggregator.assert_metric(NAMESPACE + '.cpu.time')
aggregator.assert_metric(NAMESPACE + '.mem.resident')
Expand All @@ -74,7 +74,7 @@ def test_check_userspace(aggregator, mock_userspace):
"""
Testing Kube_proxy in userspace mode.
"""
c = KubeProxyCheck(CHECK_NAME, None, {}, [instance])
c = KubeProxyCheck(CHECK_NAME, {}, [instance])
c.check(instance)
aggregator.assert_metric(NAMESPACE + '.cpu.time')
aggregator.assert_metric(NAMESPACE + '.mem.resident')
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -103,11 +103,10 @@ class KubeSchedulerCheck(KubeLeaderElectionMixin, OpenMetricsBaseCheck):
"record_namespace": "kube-system",
}

def __init__(self, name, init_config, agentConfig, instances=None):
def __init__(self, name, init_config, instances):
super(KubeSchedulerCheck, self).__init__(
name,
init_config,
agentConfig,
instances,
default_instances={
"kube_scheduler": {
Expand Down
2 changes: 1 addition & 1 deletion kube_scheduler/tests/test_kube_scheduler_1_13.py
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,7 @@ def mock_leader():


def test_check_metrics_1_13(aggregator, mock_metrics, mock_leader):
c = KubeSchedulerCheck(CHECK_NAME, None, {}, [instance])
c = KubeSchedulerCheck(CHECK_NAME, {}, [instance])
c.check(instance)

def assert_metric(name, **kwargs):
Expand Down
2 changes: 1 addition & 1 deletion kube_scheduler/tests/test_kube_scheduler_1_14.py
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,7 @@ def mock_leader():


def test_check_metrics_1_14(aggregator, mock_metrics, mock_leader):
c = KubeSchedulerCheck(CHECK_NAME, None, {}, [instance])
c = KubeSchedulerCheck(CHECK_NAME, {}, [instance])
c.check(instance)

def assert_metric(name, **kwargs):
Expand Down
4 changes: 2 additions & 2 deletions kubelet/datadog_checks/kubelet/kubelet.py
Original file line number Diff line number Diff line change
Expand Up @@ -163,7 +163,7 @@ class KubeletCheck(CadvisorPrometheusScraperMixin, OpenMetricsBaseCheck, Cadviso

COUNTER_METRICS = {'kubelet_evictions': 'kubelet.evictions'}

def __init__(self, name, init_config, agentConfig, instances=None):
def __init__(self, name, init_config, instances):
self.NAMESPACE = 'kubernetes'
if instances is not None and len(instances) > 1:
raise Exception('Kubelet check only supports one configured instance.')
Expand All @@ -172,7 +172,7 @@ def __init__(self, name, init_config, agentConfig, instances=None):
cadvisor_instance = self._create_cadvisor_prometheus_instance(inst)
kubelet_instance = self._create_kubelet_prometheus_instance(inst)
generic_instances = [cadvisor_instance, kubelet_instance]
super(KubeletCheck, self).__init__(name, init_config, agentConfig, generic_instances)
super(KubeletCheck, self).__init__(name, init_config, generic_instances)

self.cadvisor_legacy_port = inst.get('cadvisor_port', CADVISOR_DEFAULT_PORT)
self.cadvisor_legacy_url = None
Expand Down
2 changes: 1 addition & 1 deletion kubelet/tests/test_cadvisor.py
Original file line number Diff line number Diff line change
Expand Up @@ -57,7 +57,7 @@ def test_detect_cadvisor_port_zero():
def test_kubelet_check_cadvisor(monkeypatch, aggregator, tagger):
instance_with_tag = {"tags": ["instance:tag"], "cadvisor_port": 4194}
cadvisor_url = "http://valid:port/url"
check = KubeletCheck('kubelet', None, {}, [instance_with_tag])
check = KubeletCheck('kubelet', {}, [instance_with_tag])
monkeypatch.setattr(
check, 'retrieve_pod_list', mock.Mock(return_value=json.loads(mock_from_file('pods_list_1.2.json')))
)
Expand Down
18 changes: 9 additions & 9 deletions kubelet/tests/test_common.py
Original file line number Diff line number Diff line change
Expand Up @@ -120,9 +120,9 @@ def test_credentials_empty():
assert creds.cert_pair() is None
assert creds.headers("https://dummy") is None

scraper = OpenMetricsBaseCheck('prometheus', {}, {})
scraper_config = scraper.create_scraper_configuration()
scraper_config['prometheus_url'] = "https://dummy"
instance = {'prometheus_url': 'https://dummy', 'namespace': 'foo'}
scraper = OpenMetricsBaseCheck('prometheus', {}, [instance])
scraper_config = scraper.create_scraper_configuration(instance)
creds.configure_scraper(scraper_config)
assert scraper_config['ssl_ca_cert'] is None
assert scraper_config['ssl_cert'] is None
Expand All @@ -138,9 +138,9 @@ def test_credentials_certificates():
assert creds.cert_pair() == ("crt", "key")
assert creds.headers("https://dummy") is None

scraper = OpenMetricsBaseCheck('prometheus', {}, {})
scraper_config = scraper.create_scraper_configuration({})
scraper_config['prometheus_url'] = "https://dummy"
instance = {'prometheus_url': 'https://dummy', 'namespace': 'foo'}
scraper = OpenMetricsBaseCheck('prometheus', {}, [instance])
scraper_config = scraper.create_scraper_configuration(instance)
creds.configure_scraper(scraper_config)
assert scraper_config['ssl_ca_cert'] == "ca_cert"
assert scraper_config['ssl_cert'] == "crt"
Expand All @@ -159,9 +159,9 @@ def test_credentials_token_noverify():
# Make sure we don't leak the token over http
assert creds.headers("http://dummy") is None

scraper = OpenMetricsBaseCheck('prometheus', {}, {})
scraper_config = scraper.create_scraper_configuration()
scraper_config['prometheus_url'] = 'https://dummy'
instance = {'prometheus_url': 'https://dummy', 'namespace': 'foo'}
scraper = OpenMetricsBaseCheck('prometheus', {}, [instance])
scraper_config = scraper.create_scraper_configuration(instance)
creds.configure_scraper(scraper_config)
assert scraper_config['ssl_ca_cert'] is False
assert scraper_config['ssl_cert'] is None
Expand Down
Loading