Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Support Python 3 #2870

Merged
merged 2 commits into from
Jan 7, 2019
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .travis.yml
Original file line number Diff line number Diff line change
Expand Up @@ -165,7 +165,7 @@ jobs:
- stage: test
env: CHECK=linux_proc_extras PYTHON3=true
- stage: test
env: CHECK=mapreduce
env: CHECK=mapreduce PYTHON3=true
- stage: test
env: CHECK=marathon
- stage: test
Expand Down
23 changes: 10 additions & 13 deletions mapreduce/datadog_checks/mapreduce/mapreduce.py
Original file line number Diff line number Diff line change
Expand Up @@ -40,17 +40,13 @@
mapreduce.job.reduce.task.progress The distribution of all reduce task progresses
"""

# stdlib
from urlparse import urljoin
from urlparse import urlsplit
from urlparse import urlunsplit

# 3rd party
import requests
from requests.exceptions import Timeout, HTTPError, InvalidURL, ConnectionError
from simplejson import JSONDecodeError
from six.moves.urllib.parse import urljoin, urlsplit, urlunsplit
from six import iteritems

# Project
from datadog_checks.checks import AgentCheck
from datadog_checks.config import _is_affirmative

Expand Down Expand Up @@ -172,8 +168,9 @@ def check(self, instance):

# Report success after gathering all metrics from Application Master
if running_jobs:
job_id, metrics = running_jobs.items()[0]
am_address = self._get_url_base(metrics['tracking_url'])
for job_id, metrics in iteritems(running_jobs):
am_address = self._get_url_base(metrics['tracking_url'])
break

self.service_check(
self.MAPREDUCE_SERVICE_CHECK,
Expand Down Expand Up @@ -319,7 +316,7 @@ def _mapreduce_job_metrics(self, running_apps, auth, ssl_verify, addl_tags):
"""
running_jobs = {}

for app_id, (app_name, tracking_url) in running_apps.iteritems():
for app_id, (app_name, tracking_url) in iteritems(running_apps):

metrics_json = self._rest_request_to_json(
tracking_url, auth, ssl_verify, self.MAPREDUCE_JOBS_PATH, self.MAPREDUCE_SERVICE_CHECK
Expand Down Expand Up @@ -359,7 +356,7 @@ def _mapreduce_job_counters_metrics(self, running_jobs, auth, ssl_verify, addl_t
"""
Get custom metrics specified for each counter
"""
for job_id, job_metrics in running_jobs.iteritems():
for job_id, job_metrics in iteritems(running_jobs):
job_name = job_metrics['job_name']

# Check if the job_name exist in the custom metrics
Expand Down Expand Up @@ -419,7 +416,7 @@ def _mapreduce_task_metrics(self, running_jobs, auth, ssl_verify, addl_tags):
Get metrics for each MapReduce task
Return a dictionary of {task_id: 'tracking_url'} for each MapReduce task
"""
for job_id, job_stats in running_jobs.iteritems():
for job_id, job_stats in iteritems(running_jobs):

metrics_json = self._rest_request_to_json(
job_stats['tracking_url'], auth, ssl_verify, 'tasks', self.MAPREDUCE_SERVICE_CHECK, tags=addl_tags
Expand Down Expand Up @@ -451,7 +448,7 @@ def _set_metrics_from_json(self, metrics_json, metrics, tags):
"""
Parse the JSON response and set the metrics
"""
for status, (metric_name, metric_type) in metrics.iteritems():
for status, (metric_name, metric_type) in iteritems(metrics):
metric_status = metrics_json.get(status)

if metric_status is not None:
Expand Down Expand Up @@ -491,7 +488,7 @@ def _rest_request_to_json(self, address, auth, ssl_verify, object_path, service_

# Add kwargs as arguments
if kwargs:
query = '&'.join(['{}={}'.format(key, value) for key, value in kwargs.iteritems()])
query = '&'.join(['{}={}'.format(key, value) for key, value in iteritems(kwargs)])
url = urljoin(url, '?' + query)

try:
Expand Down
15 changes: 9 additions & 6 deletions mapreduce/tests/test_mapreduce.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,9 @@
# Licensed under a 3-clause BSD style license (see LICENSE)

from datadog_checks.mapreduce import MapReduceCheck

from six import iteritems

from .common import (
INIT_CONFIG,
MR_CONFIG,
Expand Down Expand Up @@ -34,19 +37,19 @@ def test_check(aggregator, mocked_request):
mapreduce.check(MR_CONFIG["instances"][0])

# Check the MapReduce job metrics
for metric, value in MAPREDUCE_JOB_METRIC_VALUES.iteritems():
for metric, value in iteritems(MAPREDUCE_JOB_METRIC_VALUES):
aggregator.assert_metric(metric, value=value, tags=MAPREDUCE_JOB_METRIC_TAGS + CUSTOM_TAGS, count=1)

# Check the map task metrics
for metric, value in MAPREDUCE_MAP_TASK_METRIC_VALUES.iteritems():
for metric, value in iteritems(MAPREDUCE_MAP_TASK_METRIC_VALUES):
aggregator.assert_metric(metric, value=value, tags=MAPREDUCE_MAP_TASK_METRIC_TAGS + CUSTOM_TAGS, count=1)

# Check the reduce task metrics
for metric, value in MAPREDUCE_REDUCE_TASK_METRIC_VALUES.iteritems():
for metric, value in iteritems(MAPREDUCE_REDUCE_TASK_METRIC_VALUES):
aggregator.assert_metric(metric, value=value, tags=MAPREDUCE_REDUCE_TASK_METRIC_TAGS + CUSTOM_TAGS, count=1)

# Check the MapReduce job counter metrics
for metric, attributes in MAPREDUCE_JOB_COUNTER_METRIC_VALUES_READ.iteritems():
for metric, attributes in iteritems(MAPREDUCE_JOB_COUNTER_METRIC_VALUES_READ):
aggregator.assert_metric(
metric,
value=attributes["value"],
Expand All @@ -55,7 +58,7 @@ def test_check(aggregator, mocked_request):
)

# Check the MapReduce job counter metrics
for metric, attributes in MAPREDUCE_JOB_COUNTER_METRIC_VALUES_WRITTEN.iteritems():
for metric, attributes in iteritems(MAPREDUCE_JOB_COUNTER_METRIC_VALUES_WRITTEN):
aggregator.assert_metric(
metric,
value=attributes["value"],
Expand All @@ -64,7 +67,7 @@ def test_check(aggregator, mocked_request):
)

# Check the MapReduce job counter metrics
for metric, attributes in MAPREDUCE_JOB_COUNTER_METRIC_VALUES_RECORDS.iteritems():
for metric, attributes in iteritems(MAPREDUCE_JOB_COUNTER_METRIC_VALUES_RECORDS):
aggregator.assert_metric(
metric,
value=attributes["value"],
Expand Down
21 changes: 9 additions & 12 deletions mapreduce/tox.ini
Original file line number Diff line number Diff line change
Expand Up @@ -2,25 +2,22 @@
minversion = 2.0
basepython = py27
envlist =
mapreduce
{py27,py36}-unit
flake8

[testenv]
usedevelop = true
platform = linux|darwin|win32

[testenv:mapreduce]
skip_install =
flake8: true
deps =
-e../datadog_checks_base[deps]
-rrequirements-dev.txt
unit: -e../datadog_checks_base[deps]
unit: -rrequirements-dev.txt
flake8: flake8
commands =
pip install -r requirements.in
pytest -v

[testenv:flake8]
skip_install = true
deps = flake8
commands = flake8 .
unit: pip install -r requirements.in
unit: pytest -v
flake8: flake8 .

[flake8]
exclude = .eggs,.tox
Expand Down