Skip to content

Commit

Permalink
Remove use of 'six' package from most tests (and haproxy and istio) (#…
Browse files Browse the repository at this point in the history
…18593)

* Remove use of 'six' package from most tests

* fix redisdb test
  • Loading branch information
iliakur authored Sep 16, 2024
1 parent 333a5ec commit db6993c
Show file tree
Hide file tree
Showing 50 changed files with 127 additions and 219 deletions.
2 changes: 1 addition & 1 deletion amazon_msk/tests/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,10 +2,10 @@
# All rights reserved
# Licensed under a 3-clause BSD style license (see LICENSE)
import json
from urllib.parse import urlparse

import mock
import pytest
from six.moves.urllib.parse import urlparse

from datadog_checks.dev import docker_run
from datadog_checks.dev.http import MockResponse
Expand Down
5 changes: 1 addition & 4 deletions clickhouse/tests/test_unit.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,6 @@
import mock
import pytest
from clickhouse_driver.errors import Error, NetworkError
from six import PY3

from datadog_checks.clickhouse import ClickhouseCheck, queries

Expand Down Expand Up @@ -65,9 +64,7 @@ def test_error_query(instance, dd_run_check):
ids=['SystemMetrics', 'SystemEvents'],
)
def test_latest_metrics_supported(metrics, ignored_columns, metric_source_url):
# While we're here, also check key order
if PY3:
assert list(metrics) == sorted(metrics)
assert list(metrics) == sorted(metrics)

described_metrics = parse_described_metrics(metric_source_url)

Expand Down
4 changes: 1 addition & 3 deletions consul/tests/consul_mocks.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,8 +3,6 @@
# Licensed under a 3-clause BSD style license (see LICENSE)
import random

from six import iteritems

MOCK_CONFIG = {'url': 'http://localhost:8500', 'catalog_checks': True}
MOCK_CONFIG_DISABLE_SERVICE_TAG = {
'url': 'http://localhost:8500',
Expand All @@ -30,7 +28,7 @@


def mock_check(check, mocks):
for f_name, m in iteritems(mocks):
for f_name, m in mocks.items():
if not hasattr(check, f_name):
continue
else:
Expand Down
9 changes: 2 additions & 7 deletions datadog_checks_base/tests/test_metadata.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,6 @@

import mock
import pytest
from six import PY3

from datadog_checks.base import AgentCheck, ensure_bytes, ensure_unicode

Expand Down Expand Up @@ -57,12 +56,8 @@ class NewAgentCheck(AgentCheck):
def test_encoding(self):
check = AgentCheck('test', {}, [{}])
check.check_id = 'test:123'
if PY3:
constructor = ensure_bytes
finalizer = ensure_unicode
else:
constructor = ensure_unicode
finalizer = ensure_bytes
constructor = ensure_bytes
finalizer = ensure_unicode

name = constructor(u'nam\u00E9')
value = constructor(u'valu\u00E9')
Expand Down
2 changes: 1 addition & 1 deletion datadog_checks_dev/tests/test_conditions.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,9 +3,9 @@
# Licensed under a 3-clause BSD style license (see LICENSE)
import os
import sys
from urllib.response import addinfourl

import pytest
from six.moves.urllib.response import addinfourl

from datadog_checks.dev.conditions import CheckCommandOutput, CheckDockerLogs, CheckEndpoints, WaitFor
from datadog_checks.dev.errors import RetryError
Expand Down
13 changes: 6 additions & 7 deletions disk/tests/test_unit.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,6 @@

import mock
import pytest
from six import iteritems

from datadog_checks.base.utils.platform import Platform
from datadog_checks.base.utils.timeout import TimeoutException
Expand Down Expand Up @@ -65,10 +64,10 @@ def test_default(aggregator, gauge_metrics, rate_metrics, count_metrics, dd_run_
else:
tags = []

for name, value in iteritems(gauge_metrics):
for name, value in gauge_metrics.items():
aggregator.assert_metric(name, value=value, count=1, metric_type=aggregator.GAUGE, tags=tags)

for name, value in iteritems(rate_metrics):
for name, value in rate_metrics.items():
aggregator.assert_metric(
name,
value=value,
Expand All @@ -77,7 +76,7 @@ def test_default(aggregator, gauge_metrics, rate_metrics, count_metrics, dd_run_
tags=['device:{}'.format(DEFAULT_DEVICE_NAME), 'device_name:{}'.format(DEFAULT_DEVICE_BASE_NAME)],
)

for name, value in iteritems(count_metrics):
for name, value in count_metrics.items():
aggregator.assert_metric(
name,
value=value,
Expand Down Expand Up @@ -110,14 +109,14 @@ def test_use_mount(aggregator, instance_basic_mount, gauge_metrics, rate_metrics
c = Disk('disk', {}, [instance_basic_mount])
dd_run_check(c)

for name, value in iteritems(gauge_metrics):
for name, value in gauge_metrics.items():
aggregator.assert_metric(
name,
value=value,
tags=['device:{}'.format(DEFAULT_MOUNT_POINT), 'device_name:{}'.format(DEFAULT_DEVICE_BASE_NAME)],
)

for name, value in chain(iteritems(rate_metrics), iteritems(count_metrics)):
for name, value in chain(rate_metrics.items(), count_metrics.items()):
aggregator.assert_metric(
name,
value=value,
Expand Down Expand Up @@ -155,7 +154,7 @@ def test_device_tagging(aggregator, gauge_metrics, rate_metrics, count_metrics,
'device_label:mylab',
]

for name, value in chain(iteritems(gauge_metrics), iteritems(rate_metrics), iteritems(count_metrics)):
for name, value in chain(gauge_metrics.items(), rate_metrics.items(), count_metrics.items()):
aggregator.assert_metric(
name,
value=value,
Expand Down
3 changes: 1 addition & 2 deletions dns_check/tests/mocks.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,6 @@
# Licensed under Simplified BSD License (see LICENSE)

from dns.resolver import NXDOMAIN
from six import PY3


class MockDNSAnswer:
Expand All @@ -18,7 +17,7 @@ def __init__(self, address):
else:
items = [MockDNSAnswer.MockItem(address)]

self.items = {item: None for item in items} if PY3 else items
self.items = {item: None for item in items}

class MockItem:
def __init__(self, address):
Expand Down
7 changes: 3 additions & 4 deletions elastic/tests/test_integration.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,6 @@
import pytest
import requests
from packaging import version
from six import iteritems

from datadog_checks.dev.utils import get_metadata_metrics
from datadog_checks.elastic import ESCheck
Expand Down Expand Up @@ -276,7 +275,7 @@ def test_node_name_as_host(dd_environment, instance_normalize_hostname, aggregat
elastic_check.check(None)
node_name = node_tags[-1].split(':')[1]

for m_name, _ in iteritems(STATS_METRICS):
for m_name in STATS_METRICS:
aggregator.assert_metric(m_name, count=1, tags=node_tags, hostname=node_name)


Expand All @@ -288,7 +287,7 @@ def test_pshard_metrics(dd_environment, aggregator):
elastic_check.check(None)

pshard_stats_metrics = pshard_stats_for_version(es_version)
for m_name, desc in iteritems(pshard_stats_metrics):
for m_name, desc in pshard_stats_metrics.items():
if desc[0] == 'gauge':
aggregator.assert_metric(m_name)

Expand All @@ -310,7 +309,7 @@ def test_detailed_index_stats(dd_environment, aggregator):
es_version = elastic_check._get_es_version()
elastic_check.check(None)
pshard_stats_metrics = pshard_stats_for_version(es_version)
for m_name, desc in iteritems(pshard_stats_metrics):
for m_name, desc in pshard_stats_metrics.items():
if desc[0] == 'gauge' and desc[1].startswith('_all.'):
aggregator.assert_metric(m_name)

Expand Down
7 changes: 1 addition & 6 deletions esxi/tests/ssh_tunnel.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,21 +4,16 @@
from __future__ import absolute_import

import os
import subprocess
from contextlib import contextmanager

import psutil
from six import PY3

from datadog_checks.dev.conditions import WaitForPortListening
from datadog_checks.dev.env import environment_run
from datadog_checks.dev.structures import LazyFunction, TempDir
from datadog_checks.dev.utils import ON_WINDOWS, find_free_port, get_ip

if PY3:
import subprocess
else:
import subprocess32 as subprocess

PID_FILE = 'ssh.pid'


Expand Down
3 changes: 1 addition & 2 deletions go_expvar/tests/test_integration.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,6 @@
import logging

import pytest
from six import iteritems

from . import common

Expand All @@ -25,7 +24,7 @@ def test_go_expvar(check, aggregator):
aggregator.assert_metric(gauge, count=1, tags=shared_tags)
for rate in common.CHECK_RATES:
aggregator.assert_metric(rate, count=1, tags=shared_tags)
for rate, value in iteritems(CHECK_RATES_CUSTOM):
for rate, value in CHECK_RATES_CUSTOM.items():
aggregator.assert_metric(rate, count=1, value=value, tags=shared_tags)
for count in common.CHECK_COUNT:
aggregator.assert_metric(count, count=1, metric_type=3, tags=shared_tags)
Expand Down
5 changes: 2 additions & 3 deletions go_expvar/tests/test_unit.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,6 @@
import logging

import pytest
from six import iteritems

from . import common

Expand Down Expand Up @@ -83,7 +82,7 @@ def test_go_expvar_mocked(go_expvar_mock, check, aggregator):
aggregator.assert_metric(
gauge.format(common.CHECK_NAME), metric_type=aggregator.GAUGE, count=1, tags=shared_tags
)
for gauge, tags in iteritems(CHECK_GAUGES_CUSTOM_MOCK):
for gauge, tags in CHECK_GAUGES_CUSTOM_MOCK.items():
aggregator.assert_metric(
gauge.format(common.CHECK_NAME), metric_type=aggregator.GAUGE, count=1, tags=shared_tags + tags
)
Expand Down Expand Up @@ -145,7 +144,7 @@ def test_go_expvar_mocked_namespace(go_expvar_mock, check, aggregator):

for gauge in CHECK_GAUGES:
aggregator.assert_metric(gauge.format(metric_namespace), count=1, tags=shared_tags)
for gauge, tags in iteritems(CHECK_GAUGES_CUSTOM_MOCK):
for gauge, tags in CHECK_GAUGES_CUSTOM_MOCK.items():
aggregator.assert_metric(gauge.format(metric_namespace), count=1, tags=shared_tags + tags)

for rate in CHECK_RATES:
Expand Down
39 changes: 16 additions & 23 deletions haproxy/datadog_checks/haproxy/legacy/haproxy.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,9 +9,7 @@
import socket
import time
from collections import defaultdict, namedtuple

from six import PY2, iteritems
from six.moves.urllib.parse import urlparse
from urllib.parse import urlparse

from datadog_checks.base import AgentCheck, is_affirmative, to_string
from datadog_checks.base.errors import CheckException
Expand Down Expand Up @@ -115,20 +113,15 @@ def _fetch_url_data(self):

@staticmethod
def _decode_response(response):
# it only needs additional decoding in py3, so skip it if it's py2
if PY2:
return response.content.splitlines()
else:
content = response.content

# If the content is a string, it can't be decoded again
# But if it's bytes, it can be decoded.
# So, check if it has the decode method
decode_fn = getattr(content, "decode", None)
if callable(decode_fn):
content = content.decode('utf-8')
content = response.content
# If the content is a string, it can't be decoded again
# But if it's bytes, it can be decoded.
# So, check if it has the decode method
decode_fn = getattr(content, "decode", None)
if callable(decode_fn):
content = content.decode('utf-8')

return content.splitlines()
return content.splitlines()

@staticmethod
def _parse_uptime(uptime):
Expand Down Expand Up @@ -443,7 +436,7 @@ def _tag_from_regex(self, service_name):

# match.groupdict() returns tags dictionary in the form of {'name': 'value'}
# convert it to Datadog tag LIST: ['name:value']
return ["%s:%s" % (name, value) for name, value in iteritems(match.groupdict())]
return ["%s:%s" % (name, value) for name, value in match.groupdict().items()]

@staticmethod
def _normalize_status(status):
Expand All @@ -463,7 +456,7 @@ def _process_backend_hosts_metric(self, active_tag=None):
agg_statuses = defaultdict(lambda: {status: 0 for status in Services.COLLATED_STATUSES})
active_tag = [] if active_tag is None else active_tag

for host_status, count in iteritems(self.hosts_statuses):
for host_status, count in self.hosts_statuses.items():
try:
service, back_or_front, hostname, status = host_status
except ValueError:
Expand Down Expand Up @@ -512,7 +505,7 @@ def _process_status_metric(
reported_statuses_dict[reported_status] = 0
statuses_counter = defaultdict(lambda: copy.copy(reported_statuses_dict))

for host_status, count in iteritems(self.hosts_statuses):
for host_status, count in self.hosts_statuses.items():
hostname = None
try:
service, _, hostname, status = host_status
Expand Down Expand Up @@ -555,13 +548,13 @@ def _process_status_metric(
status_key = Services.STATUS_TO_COLLATED.get(status, Services.UNAVAILABLE)
agg_statuses_counter[tuple(agg_tags)][status_key] += count

for tags, count_per_status in iteritems(statuses_counter):
for status, count in iteritems(count_per_status):
for tags, count_per_status in statuses_counter.items():
for status, count in count_per_status.items():
self.gauge('haproxy.count_per_status', count, tags=tags + ('status:%s' % status,))

# Send aggregates
for service_tags, service_agg_statuses in iteritems(agg_statuses_counter):
for status, count in iteritems(service_agg_statuses):
for service_tags, service_agg_statuses in agg_statuses_counter.items():
for status, count in service_agg_statuses.items():
self.gauge("haproxy.count_per_status", count, tags=service_tags + ('status:%s' % status,))

def _process_metrics(self, data, custom_tags=None, active_tag=None):
Expand Down
3 changes: 1 addition & 2 deletions hdfs_datanode/tests/test_hdfs_datanode.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,6 @@
# Licensed under a 3-clause BSD style license (see LICENSE)
import mock
import pytest
from six import iteritems

from datadog_checks.hdfs_datanode import HDFSDataNode

Expand Down Expand Up @@ -38,7 +37,7 @@ def test_check(aggregator, mocked_request):
HDFSDataNode.JMX_SERVICE_CHECK, status=HDFSDataNode.OK, tags=HDFS_DATANODE_METRIC_TAGS + CUSTOM_TAGS, count=1
)

for metric, value in iteritems(HDFS_DATANODE_METRICS_VALUES):
for metric, value in HDFS_DATANODE_METRICS_VALUES.items():
aggregator.assert_metric(metric, value=value, tags=HDFS_DATANODE_METRIC_TAGS + CUSTOM_TAGS, count=1)


Expand Down
7 changes: 3 additions & 4 deletions hdfs_namenode/tests/test_hdfs_namenode.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,6 @@
# Licensed under a 3-clause BSD style license (see LICENSE)
import mock
import pytest
from six import iteritems

from datadog_checks.hdfs_namenode import HDFSNameNode

Expand Down Expand Up @@ -34,13 +33,13 @@ def test_check(aggregator, dd_run_check, mocked_request):
HDFSNameNode.JMX_SERVICE_CHECK, HDFSNameNode.OK, tags=HDFS_NAMESYSTEM_METRIC_TAGS + CUSTOM_TAGS, count=1
)

for metric, value in iteritems(HDFS_NAMESYSTEM_STATE_METRICS_VALUES):
for metric, value in HDFS_NAMESYSTEM_STATE_METRICS_VALUES.items():
aggregator.assert_metric(metric, value=value, tags=HDFS_NAMESYSTEM_METRIC_TAGS + CUSTOM_TAGS, count=1)

for metric, value in iteritems(HDFS_NAMESYSTEM_METRICS_VALUES):
for metric, value in HDFS_NAMESYSTEM_METRICS_VALUES.items():
aggregator.assert_metric(metric, value=value, tags=HDFS_NAMESYSTEM_METRIC_TAGS + CUSTOM_TAGS, count=1)

for metric, value in iteritems(HDFS_NAMESYSTEM_MUTUAL_METRICS_VALUES):
for metric, value in HDFS_NAMESYSTEM_MUTUAL_METRICS_VALUES.items():
aggregator.assert_metric(metric, value=value, tags=HDFS_NAMESYSTEM_METRIC_TAGS + CUSTOM_TAGS, count=2)

aggregator.assert_all_metrics_covered()
Expand Down
1 change: 0 additions & 1 deletion ibm_mq/tests/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,6 @@
import re

import pytest
from six.moves import range

from datadog_checks.dev import docker_run
from datadog_checks.dev.conditions import CheckDockerLogs, WaitFor
Expand Down
Loading

0 comments on commit db6993c

Please sign in to comment.