Skip to content

Commit

Permalink
Support Python 3 (#3030)
Browse files Browse the repository at this point in the history
  • Loading branch information
nmuesch authored Jan 25, 2019
1 parent 6e7c3cc commit 9ef9994
Show file tree
Hide file tree
Showing 4 changed files with 19 additions and 24 deletions.
2 changes: 1 addition & 1 deletion .travis.yml
Original file line number Diff line number Diff line change
Expand Up @@ -160,7 +160,7 @@ jobs:
- stage: test
env: CHECK=kubelet
- stage: test
env: CHECK=kubernetes_state
env: CHECK=kubernetes_state PYTHON3=true
- stage: test
env: CHECK=kyototycoon PYTHON3=true
- stage: test
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@
import time
from collections import defaultdict, Counter
from copy import deepcopy
from six import iteritems

from datadog_checks.errors import CheckException
from datadog_checks.checks.openmetrics import OpenMetricsBaseCheck
Expand Down Expand Up @@ -98,9 +99,9 @@ def check(self, instance):
scraper_config = self.config_map[endpoint]
self.process(scraper_config, metric_transformers=self.METRIC_TRANSFORMERS)

for job_tags, job_count in self.job_succeeded_count.iteritems():
for job_tags, job_count in iteritems(self.job_succeeded_count):
self.monotonic_count(scraper_config['namespace'] + '.job.succeeded', job_count, list(job_tags))
for job_tags, job_count in self.job_failed_count.iteritems():
for job_tags, job_count in iteritems(self.job_failed_count):
self.monotonic_count(scraper_config['namespace'] + '.job.failed', job_count, list(job_tags))

def _create_kubernetes_state_prometheus_instance(self, instance):
Expand Down Expand Up @@ -396,7 +397,7 @@ def kube_pod_status_phase(self, metric, scraper_config):
] + scraper_config['custom_tags']
status_phase_counter[tuple(sorted(tags))] += sample[self.SAMPLE_VALUE]

for tags, count in status_phase_counter.iteritems():
for tags, count in iteritems(status_phase_counter):
self.gauge(metric_name, count, tags=list(tags))

def _submit_metric_kube_pod_container_status_reason(self, metric, metric_suffix, whitelisted_status_reasons,
Expand Down Expand Up @@ -443,7 +444,7 @@ def kube_cronjob_next_schedule_time(self, metric, scraper_config):
for sample in metric.samples:
on_schedule = int(sample[self.SAMPLE_VALUE]) - curr_time
tags = [self._format_tag(label_name, label_value, scraper_config)
for label_name, label_value in sample[self.SAMPLE_LABELS].iteritems()]
for label_name, label_value in iteritems(sample[self.SAMPLE_LABELS])]
tags += scraper_config['custom_tags']
if on_schedule < 0:
message = "The service check scheduled at {} is {} seconds late".format(
Expand All @@ -457,7 +458,7 @@ def kube_job_complete(self, metric, scraper_config):
service_check_name = scraper_config['namespace'] + '.job.complete'
for sample in metric.samples:
tags = []
for label_name, label_value in sample[self.SAMPLE_LABELS].iteritems():
for label_name, label_value in iteritems(sample[self.SAMPLE_LABELS]):
if label_name == 'job' or label_name == 'job_name':
trimmed_job = self._trim_job_tag(label_value)
tags.append(self._format_tag(label_name, trimmed_job, scraper_config))
Expand All @@ -469,7 +470,7 @@ def kube_job_failed(self, metric, scraper_config):
service_check_name = scraper_config['namespace'] + '.job.complete'
for sample in metric.samples:
tags = []
for label_name, label_value in sample[self.SAMPLE_LABELS].iteritems():
for label_name, label_value in iteritems(sample[self.SAMPLE_LABELS]):
if label_name == 'job' or label_name == 'job_name':
trimmed_job = self._trim_job_tag(label_value)
tags.append(self._format_tag(label_name, trimmed_job, scraper_config))
Expand All @@ -480,7 +481,7 @@ def kube_job_failed(self, metric, scraper_config):
def kube_job_status_failed(self, metric, scraper_config):
for sample in metric.samples:
tags = [] + scraper_config['custom_tags']
for label_name, label_value in sample[self.SAMPLE_LABELS].iteritems():
for label_name, label_value in iteritems(sample[self.SAMPLE_LABELS]):
if label_name == 'job' or label_name == 'job_name':
trimmed_job = self._trim_job_tag(label_value)
tags.append(self._format_tag(label_name, trimmed_job, scraper_config))
Expand All @@ -491,7 +492,7 @@ def kube_job_status_failed(self, metric, scraper_config):
def kube_job_status_succeeded(self, metric, scraper_config):
for sample in metric.samples:
tags = [] + scraper_config['custom_tags']
for label_name, label_value in sample[self.SAMPLE_LABELS].iteritems():
for label_name, label_value in iteritems(sample[self.SAMPLE_LABELS]):
if label_name == 'job' or label_name == 'job_name':
trimmed_job = self._trim_job_tag(label_value)
tags.append(self._format_tag(label_name, trimmed_job, scraper_config))
Expand All @@ -518,7 +519,7 @@ def kube_node_status_condition(self, metric, scraper_config):
] + scraper_config['custom_tags']
by_condition_counter[tuple(sorted(tags))] += sample[self.SAMPLE_VALUE]

for tags, count in by_condition_counter.iteritems():
for tags, count in iteritems(by_condition_counter):
self.gauge(metric_name, count, tags=list(tags))

def kube_node_status_ready(self, metric, scraper_config):
Expand Down Expand Up @@ -568,7 +569,7 @@ def kube_node_spec_unschedulable(self, metric, scraper_config):
if metric.type in METRIC_TYPES:
for sample in metric.samples:
tags = [self._format_tag(label_name, label_value, scraper_config)
for label_name, label_value in sample[self.SAMPLE_LABELS].iteritems()]
for label_name, label_value in iteritems(sample[self.SAMPLE_LABELS])]
tags += scraper_config['custom_tags']
status = statuses[int(sample[self.SAMPLE_VALUE])] # value can be 0 or 1
tags.append(self._format_tag('status', status, scraper_config))
Expand Down Expand Up @@ -636,5 +637,5 @@ def count_objects_by_tags(self, metric, scraper_config):
] + scraper_config['custom_tags']
object_counter[tuple(sorted(tags))] += sample[self.SAMPLE_VALUE]

for tags, count in object_counter.iteritems():
for tags, count in iteritems(object_counter):
self.gauge(metric_name, count, tags=list(tags))
12 changes: 3 additions & 9 deletions kubernetes_state/tests/test_kubernetes_state.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,8 +6,8 @@
import mock
import pytest

from datadog_checks.stubs import aggregator as _aggregator
from datadog_checks.kubernetes_state import KubernetesState
from datadog_checks.utils.common import ensure_unicode


HERE = os.path.dirname(os.path.abspath(__file__))
Expand Down Expand Up @@ -198,19 +198,13 @@ def __init__(self, content, content_type):
self.headers = {'Content-Type': content_type}

def iter_lines(self, **_):
for elt in self.content.split("\n"):
yield elt
for elt in self.content.split(b"\n"):
yield ensure_unicode(elt)

def close(self):
pass


@pytest.fixture
def aggregator():
_aggregator.reset()
return _aggregator


@pytest.fixture
def instance():
return {
Expand Down
6 changes: 3 additions & 3 deletions kubernetes_state/tox.ini
Original file line number Diff line number Diff line change
@@ -1,16 +1,16 @@
[tox]
minversion = 2.0
basepython = py27
envlist = unit, flake8
envlist =
{py27,py36}-unit
flake8

[testenv]
usedevelop = true
platform = linux|darwin|win32
deps =
-e../datadog_checks_base[deps]
-rrequirements-dev.txt

[testenv:unit]
commands =
pip install -r requirements.in
pytest -v
Expand Down

0 comments on commit 9ef9994

Please sign in to comment.