From 67fb3c648e6d854b5f9c8732a8c73132ea0fa974 Mon Sep 17 00:00:00 2001 From: Ofek Lev Date: Mon, 1 Feb 2021 17:46:36 -0500 Subject: [PATCH] Add support for legacy config to OpenMetricsCompatibilityScraper --- .../base/checks/openmetrics/v2/scraper.py | 76 ++++++++ .../tests/openmetrics/test_compat_scraper.py | 184 ++++++++++++++++++ .../tests/openmetrics/test_interface.py | 13 +- .../tests/openmetrics/utils.py | 22 ++- 4 files changed, 283 insertions(+), 12 deletions(-) create mode 100644 datadog_checks_base/tests/openmetrics/test_compat_scraper.py diff --git a/datadog_checks_base/datadog_checks/base/checks/openmetrics/v2/scraper.py b/datadog_checks_base/datadog_checks/base/checks/openmetrics/v2/scraper.py index 5b36f2130281a..5b08b8ab6f8cd 100644 --- a/datadog_checks_base/datadog_checks/base/checks/openmetrics/v2/scraper.py +++ b/datadog_checks_base/datadog_checks/base/checks/openmetrics/v2/scraper.py @@ -1,8 +1,10 @@ # (C) Datadog, Inc. 2020-present # All rights reserved # Licensed under a 3-clause BSD style license (see LICENSE) +import fnmatch import inspect import re +from copy import deepcopy from itertools import chain from math import isinf, isnan @@ -346,3 +348,77 @@ def create_scraper(self, config): """ SERVICE_CHECK_HEALTH = 'prometheus.health' + + def __init__(self, check, config): + new_config = deepcopy(config) + new_config.setdefault('enable_health_service_check', new_config.pop('health_service_check', True)) + new_config.setdefault('collect_histogram_buckets', new_config.pop('send_histograms_buckets', True)) + new_config.setdefault('non_cumulative_histogram_buckets', new_config.pop('non_cumulative_buckets', False)) + new_config.setdefault('histogram_buckets_as_distributions', new_config.pop('send_distribution_buckets', False)) + new_config.setdefault('raw_metric_prefix', new_config.pop('prometheus_metrics_prefix', '')) + new_config.setdefault('hostname_label', new_config.pop('label_to_hostname', '')) + new_config.setdefault('rename_labels', new_config.pop('labels_mapper', {})) + new_config.setdefault( + 'exclude_metrics', [fnmatch.translate(metric) for metric in new_config.pop('ignore_metrics', [])] + ) + + if 'label_to_hostname_suffix' in new_config: + suffix = new_config.pop('label_to_hostname_suffix') + new_config.setdefault('hostname_format', f'{suffix}') + + exclude_metrics_by_labels = new_config.setdefault('exclude_metrics_by_labels', {}) + for metric, labels in new_config.pop('ignore_metrics_by_labels', {}).items(): + if '*' in labels: + exclude_metrics_by_labels[metric] = True + else: + exclude_metrics_by_labels[metric] = labels + + share_labels = new_config.setdefault('share_labels', {}) + for metric, data in new_config.pop('label_joins', {}).items(): + share_labels[metric] = { + 'match': data.get('labels_to_match', []), + 'labels': data.get('labels_to_get', []), + 'values': [1], + } + + old_metrics = new_config.pop('metrics', []) + type_overrides = new_config.pop('type_overrides', {}) + metrics = new_config.setdefault('metrics', []) + for metric in old_metrics: + data = {} + + if isinstance(metric, str): + key = fnmatch.translate(metric) + data[key] = {'name': metric} + if metric in type_overrides: + data[key]['type'] = type_overrides.pop(metric) + else: + for name, new_name in metric.items(): + key = fnmatch.translate(name) + data[key] = {'name': new_name} + if name in type_overrides: + data[key]['type'] = type_overrides.pop(name) + + metrics.append(data) + + for metric, metric_type in type_overrides.items(): + metrics.append({fnmatch.translate(metric): {'type': metric_type}}) + + metadata_metric_name = new_config.pop('metadata_metric_name', '') + metadata_label_map = new_config.pop('metadata_label_map', {}) + if metadata_metric_name and metadata_label_map: + metadata_name, label_name = metadata_label_map.popitem() + metrics.append({metadata_metric_name: {'name': metadata_name, 'type': 'metadata', 'label': label_name}}) + + bearer_token_auth = new_config.pop('bearer_token_auth', False) + bearer_token_path = new_config.pop('bearer_token_path', '/var/run/secrets/kubernetes.io/serviceaccount/token') + if bearer_token_auth: + new_config.setdefault( + 'auth_token', + { + 'reader': {'type': 'file', 'path': bearer_token_path}, + 'writer': {'type': 'header', 'name': 'Authorization', 'value': 'Bearer '}, + }, + ) + + super(OpenMetricsCompatibilityScraper, self).__init__(check, new_config) diff --git a/datadog_checks_base/tests/openmetrics/test_compat_scraper.py b/datadog_checks_base/tests/openmetrics/test_compat_scraper.py new file mode 100644 index 0000000000000..175e68ecb2b72 --- /dev/null +++ b/datadog_checks_base/tests/openmetrics/test_compat_scraper.py @@ -0,0 +1,184 @@ +# (C) Datadog, Inc. 2020-present +# All rights reserved +# Licensed under a 3-clause BSD style license (see LICENSE) +import pytest + +from ..utils import requires_py3 +from .utils import get_legacy_check + +pytestmark = [requires_py3, pytest.mark.openmetrics, pytest.mark.openmetrics_compat_scraper] + + +class TestRawMetricPrefix: + def test_not_string(self, dd_run_check): + check = get_legacy_check({'prometheus_metrics_prefix': 9000}) + + with pytest.raises(Exception, match='^Setting `raw_metric_prefix` must be a string$'): + dd_run_check(check, extract_message=True) + + +class TestHostnameLabel: + def test_not_string(self, dd_run_check): + check = get_legacy_check({'label_to_hostname': 9000}) + + with pytest.raises(Exception, match='^Setting `hostname_label` must be a string$'): + dd_run_check(check, extract_message=True) + + +class TestRenameLabels: + def test_not_mapping(self, dd_run_check): + check = get_legacy_check({'labels_mapper': 9000}) + + with pytest.raises(Exception, match='^Setting `rename_labels` must be a mapping$'): + dd_run_check(check, extract_message=True) + + def test_value_not_string(self, dd_run_check): + check = get_legacy_check({'labels_mapper': {'foo': 9000}}) + + with pytest.raises(Exception, match='^Value for label `foo` of setting `rename_labels` must be a string$'): + dd_run_check(check, extract_message=True) + + +class TestExcludeMetrics: + def test_entry_invalid_type(self, dd_run_check): + check = get_legacy_check({'exclude_metrics': [9000]}) + + with pytest.raises(Exception, match='^Entry #1 of setting `exclude_metrics` must be a string$'): + dd_run_check(check, extract_message=True) + + +class TestExcludeMetricsByLabels: + def test_value_not_string(self, dd_run_check): + check = get_legacy_check({'ignore_metrics_by_labels': {'foo': [9000]}}) + + with pytest.raises( + Exception, match='^Value #1 for label `foo` of setting `exclude_metrics_by_labels` must be a string$' + ): + dd_run_check(check, extract_message=True) + + +class TestShareLabels: + def test_not_mapping(self, dd_run_check): + check = get_legacy_check({'share_labels': 9000}) + + with pytest.raises(Exception, match='^Setting `share_labels` must be a mapping$'): + dd_run_check(check, extract_message=True) + + def test_invalid_type(self, dd_run_check): + check = get_legacy_check({'share_labels': {'foo': 9000}}) + + with pytest.raises( + Exception, match='^Metric `foo` of setting `share_labels` must be a mapping or set to `true`$' + ): + dd_run_check(check, extract_message=True) + + def test_values_not_array(self, dd_run_check): + check = get_legacy_check({'share_labels': {'foo': {'values': 9000}}}) + + with pytest.raises( + Exception, match='^Option `values` for metric `foo` of setting `share_labels` must be an array$' + ): + dd_run_check(check, extract_message=True) + + def test_values_entry_not_integer(self, dd_run_check): + check = get_legacy_check({'share_labels': {'foo': {'values': [1.0]}}}) + + with pytest.raises( + Exception, + match=( + '^Entry #1 of option `values` for metric `foo` of setting `share_labels` must represent an integer$' + ), + ): + dd_run_check(check, extract_message=True) + + @pytest.mark.parametrize('option', ['labels', 'match']) + def test_option_not_array(self, dd_run_check, option): + check = get_legacy_check({'share_labels': {'foo': {option: 9000}}}) + + with pytest.raises( + Exception, match='^Option `{}` for metric `foo` of setting `share_labels` must be an array$'.format(option) + ): + dd_run_check(check, extract_message=True) + + @pytest.mark.parametrize('option', ['labels', 'match']) + def test_option_entry_not_string(self, dd_run_check, option): + check = get_legacy_check({'share_labels': {'foo': {option: [9000]}}}) + + with pytest.raises( + Exception, + match=( + '^Entry #1 of option `{}` for metric `foo` of setting `share_labels` must be a string$'.format(option) + ), + ): + dd_run_check(check, extract_message=True) + + def test_share_labels(self, aggregator, dd_run_check, mock_http_response): + mock_http_response( + """ + # HELP go_memstats_alloc_bytes Number of bytes allocated and still in use. + # TYPE go_memstats_alloc_bytes gauge + go_memstats_alloc_bytes{foo="bar",baz="foo",pod="test"} 1 + # HELP go_memstats_gc_sys_bytes Number of bytes used for garbage collection system metadata. + # TYPE go_memstats_gc_sys_bytes gauge + go_memstats_gc_sys_bytes{bar="foo",baz="foo"} 901120 + # HELP go_memstats_free_bytes Number of bytes free and available for use. + # TYPE go_memstats_free_bytes gauge + go_memstats_free_bytes{bar="baz",baz="bar"} 6.396288e+06 + """ + ) + check = get_legacy_check( + { + 'metrics': ['*'], + 'label_joins': {'go_memstats_alloc_bytes': {'labels_to_match': ['baz'], 'labels_to_get': ['pod']}}, + } + ) + dd_run_check(check) + + aggregator.assert_metric( + 'test.go_memstats_alloc_bytes', + 1, + metric_type=aggregator.GAUGE, + tags=['endpoint:test', 'foo:bar', 'baz:foo', 'pod:test'], + ) + aggregator.assert_metric( + 'test.go_memstats_gc_sys_bytes', + 901120, + metric_type=aggregator.GAUGE, + tags=['endpoint:test', 'bar:foo', 'baz:foo', 'pod:test'], + ) + aggregator.assert_metric( + 'test.go_memstats_free_bytes', + 6396288, + metric_type=aggregator.GAUGE, + tags=['endpoint:test', 'bar:baz', 'baz:bar'], + ) + + aggregator.assert_all_metrics_covered() + + def test_metadata(self, aggregator, datadog_agent, dd_run_check, mock_http_response): + mock_http_response( + """ + # HELP kubernetes_build_info A metric with a constant '1' value labeled by major, minor, git version, git commit, git tree state, build date, Go version, and compiler from which Kubernetes was built, and platform on which it is running. + # TYPE kubernetes_build_info gauge + kubernetes_build_info{buildDate="2016-11-18T23:57:26Z",compiler="gc",gitCommit="3872cb93abf9482d770e651b5fe14667a6fca7e0",gitTreeState="dirty",gitVersion="v1.6.0-alpha.0.680+3872cb93abf948-dirty",goVersion="go1.7.3",major="1",minor="6+",platform="linux/amd64"} 1 + """ # noqa: E501 + ) + check = get_legacy_check( + {'metadata_metric_name': 'kubernetes_build_info', 'metadata_label_map': {'version': 'gitVersion'}} + ) + check.check_id = 'test:instance' + dd_run_check(check) + + version_metadata = { + 'version.major': '1', + 'version.minor': '6', + 'version.patch': '0', + 'version.release': 'alpha.0.680', + 'version.build': '3872cb93abf948-dirty', + 'version.raw': 'v1.6.0-alpha.0.680+3872cb93abf948-dirty', + 'version.scheme': 'semver', + } + + datadog_agent.assert_metadata('test:instance', version_metadata) + datadog_agent.assert_metadata_count(len(version_metadata)) + aggregator.assert_all_metrics_covered() diff --git a/datadog_checks_base/tests/openmetrics/test_interface.py b/datadog_checks_base/tests/openmetrics/test_interface.py index 38a74b80784c5..b3b6ff7152193 100644 --- a/datadog_checks_base/tests/openmetrics/test_interface.py +++ b/datadog_checks_base/tests/openmetrics/test_interface.py @@ -7,7 +7,7 @@ from datadog_checks.base.constants import ServiceCheck from ..utils import requires_py3 -from .utils import get_check +from .utils import get_check, get_legacy_check pytestmark = [requires_py3, pytest.mark.openmetrics, pytest.mark.openmetrics_interface] @@ -82,15 +82,6 @@ def test_service_check_dynamic_tags(aggregator, dd_run_check, mock_http_response def test_scraper_override(aggregator, dd_run_check, mock_http_response): - # TODO: when we drop Python 2 move this up top - from datadog_checks.base.checks.openmetrics.v2.scraper import OpenMetricsCompatibilityScraper - - class Check(OpenMetricsBaseCheckV2): - __NAMESPACE__ = 'test' - - def create_scraper(self, config): - return OpenMetricsCompatibilityScraper(self, self.get_config_with_defaults(config)) - mock_http_response( """ # HELP go_memstats_alloc_bytes Number of bytes allocated and still in use. @@ -98,7 +89,7 @@ def create_scraper(self, config): go_memstats_alloc_bytes{foo="baz"} 6.396288e+06 """ ) - check = Check('test', {}, [{'openmetrics_endpoint': 'test', 'metrics': ['.+']}]) + check = get_legacy_check() dd_run_check(check) aggregator.assert_service_check('test.prometheus.health', ServiceCheck.OK, tags=['endpoint:test']) diff --git a/datadog_checks_base/tests/openmetrics/utils.py b/datadog_checks_base/tests/openmetrics/utils.py index 11d717898c598..5248e374090d0 100644 --- a/datadog_checks_base/tests/openmetrics/utils.py +++ b/datadog_checks_base/tests/openmetrics/utils.py @@ -3,15 +3,35 @@ # Licensed under a 3-clause BSD style license (see LICENSE) from datadog_checks.base import OpenMetricsBaseCheckV2 +# TODO: remove `try` when we drop Python 2 +try: + from datadog_checks.base.checks.openmetrics.v2.scraper import OpenMetricsCompatibilityScraper + + class LegacyCheck(OpenMetricsBaseCheckV2): + def create_scraper(self, config): + return OpenMetricsCompatibilityScraper(self, self.get_config_with_defaults(config)) + + +except Exception: + pass + def get_check(instance=None, init_config=None): + return _get_check(OpenMetricsBaseCheckV2, instance, init_config) + + +def get_legacy_check(instance=None, init_config=None): + return _get_check(LegacyCheck, instance, init_config) + + +def _get_check(cls, instance, init_config): if instance is None: instance = {} if init_config is None: init_config = {} instance.setdefault('openmetrics_endpoint', 'test') - check = OpenMetricsBaseCheckV2('test', init_config, [instance]) + check = cls('test', init_config, [instance]) check.__NAMESPACE__ = 'test' return check