Skip to content

Commit

Permalink
Drop 'six' dependency from base check package
Browse files Browse the repository at this point in the history
  • Loading branch information
iliakur committed Sep 20, 2024
1 parent a47f5c6 commit e2c838c
Show file tree
Hide file tree
Showing 39 changed files with 112 additions and 244 deletions.
20 changes: 6 additions & 14 deletions datadog_checks_base/datadog_checks/base/__init__.py
Original file line number Diff line number Diff line change
@@ -1,28 +1,20 @@
# (C) Datadog, Inc. 2018-present
# All rights reserved
# Licensed under a 3-clause BSD style license (see LICENSE)
from six import PY3

if PY3:
from datadog_checks.base.agent import datadog_agent

if datadog_agent.get_config('use_boringssl'):
import urllib3.contrib.pyopenssl

urllib3.contrib.pyopenssl.inject_into_urllib3()
from datadog_checks.base.agent import datadog_agent

from .__about__ import __version__
from .checks import AgentCheck
from .checks.openmetrics import OpenMetricsBaseCheck
from .checks.openmetrics.v2.base import OpenMetricsBaseCheckV2
from .config import is_affirmative
from .errors import ConfigurationError
from .utils.common import ensure_bytes, ensure_unicode, to_native_string, to_string

# Python 3+
try:
from .checks.openmetrics.v2.base import OpenMetricsBaseCheckV2
except ImportError:
OpenMetricsBaseCheckV2 = None
if datadog_agent.get_config('use_boringssl'):
import urllib3.contrib.pyopenssl

urllib3.contrib.pyopenssl.inject_into_urllib3()

# Windows-only
try:
Expand Down
28 changes: 11 additions & 17 deletions datadog_checks_base/datadog_checks/base/checks/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@
)

import yaml
from six import PY2, binary_type, iteritems, raise_from, text_type
from pydantic import BaseModel, ValidationError

from datadog_checks.base.agent import AGENT_RUNNING, aggregator, datadog_agent

Expand Down Expand Up @@ -84,9 +84,6 @@
prof = Profiler(service='datadog-agent-integrations')
prof.start()

if not PY2:
from pydantic import BaseModel, ValidationError

if TYPE_CHECKING:
import ssl # noqa: F401

Expand Down Expand Up @@ -305,8 +302,7 @@ def __init__(self, *args, **kwargs):
# Functions that will be called exactly once (if successful) before the first check run
self.check_initializations = deque() # type: Deque[Callable[[], None]]

if not PY2:
self.check_initializations.append(self.load_configuration_models)
self.check_initializations.append(self.load_configuration_models)

self.__formatted_tags = None
self.__logs_enabled = None
Expand Down Expand Up @@ -506,11 +502,9 @@ def log_typos_in_options(self, user_config, models_config, level):

known_options = {k for k, _ in models_config} # type: Set[str]

if not PY2:

if isinstance(models_config, BaseModel):
# Also add aliases, if any
known_options.update(set(models_config.model_dump(by_alias=True)))
if isinstance(models_config, BaseModel):
# Also add aliases, if any
known_options.update(set(models_config.model_dump(by_alias=True)))

unknown_options = [option for option in user_configs.keys() if option not in known_options] # type: List[str]

Expand Down Expand Up @@ -594,7 +588,7 @@ def load_configuration_model(import_path, model_name, config, context):
)
message_lines.append(' {}'.format(error['msg']))

raise_from(ConfigurationError('\n'.join(message_lines)), None)
raise ConfigurationError('\n'.join(message_lines)) from None
else:
return config_model

Expand Down Expand Up @@ -1123,7 +1117,7 @@ def set_external_tags(self, external_tags):
new_tags = []
for hostname, source_map in external_tags:
new_tags.append((to_native_string(hostname), source_map))
for src_name, tags in iteritems(source_map):
for src_name, tags in source_map.items():
source_map[src_name] = self._normalize_tags_type(tags)
datadog_agent.set_external_tags(new_tags)
except IndexError:
Expand Down Expand Up @@ -1222,7 +1216,7 @@ def normalize(self, metric, prefix=None, fix_case=False):
prefix: A prefix to to add to the normalized name, default None
fix_case: A boolean, indicating whether to make sure that the metric name returned is in "snake_case"
"""
if isinstance(metric, text_type):
if isinstance(metric, str):
metric = unicodedata.normalize('NFKD', metric).encode('ascii', 'ignore')

if fix_case:
Expand All @@ -1247,7 +1241,7 @@ def normalize_tag(self, tag):
This happens for legacy reasons, when we cleaned up some characters (like '-')
which are allowed in tags.
"""
if isinstance(tag, text_type):
if isinstance(tag, str):
tag = tag.encode('utf-8', 'ignore')
tag = self.TAG_REPLACEMENT.sub(br'_', tag)
tag = self.MULTIPLE_UNDERSCORE_CLEANUP.sub(br'_', tag)
Expand Down Expand Up @@ -1345,8 +1339,8 @@ def event(self, event):
the event to be sent
"""
# Enforce types of some fields, considerably facilitates handling in go bindings downstream
for key, value in iteritems(event):
if not isinstance(value, (text_type, binary_type)):
for key, value in event.items():
if not isinstance(value, (str, bytes)):
continue

try:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,6 @@

from prometheus_client.metrics_core import Metric
from prometheus_client.parser import _parse_sample, _replace_help_escaping
from six.moves import zip


def text_fd_to_metric_families(fd):
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,8 +25,7 @@
import sys
import threading
import traceback

from six.moves import queue, range
import queue

# Item pushed on the work queue to tell the worker threads to terminate
SENTINEL = "QUIT"
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,6 @@
from copy import deepcopy

import requests
from six import PY2

from ...errors import CheckException
from ...utils.tracing import traced_class
Expand Down Expand Up @@ -183,5 +182,4 @@ class StandardFields(object):
pass


if not PY2:
StandardFields.__doc__ = '\n'.join('- `{}`'.format(field) for field in STANDARD_FIELDS)
StandardFields.__doc__ = '\n'.join('- `{}`'.format(field) for field in STANDARD_FIELDS)
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,6 @@

import requests
from prometheus_client.samples import Sample
from six import PY3, iteritems, string_types

from datadog_checks.base.agent import datadog_agent

Expand All @@ -23,9 +22,6 @@
from .. import AgentCheck
from ..libs.prometheus import text_fd_to_metric_families

if PY3:
long = int


class OpenMetricsScraperMixin(object):
# pylint: disable=E1101
Expand Down Expand Up @@ -110,7 +106,7 @@ def _get_setting(name, default):
# We merge list and dictionaries from optional defaults & instance settings
metrics = default_instance.get('metrics', []) + instance.get('metrics', [])
for metric in metrics:
if isinstance(metric, string_types):
if isinstance(metric, str):
metrics_mapper[metric] = metric
else:
metrics_mapper.update(metric)
Expand Down Expand Up @@ -273,7 +269,7 @@ def _get_setting(name, default):
config['_type_override_patterns'] = {}

with_wildcards = set()
for metric, type in iteritems(config['type_overrides']):
for metric, type in config['type_overrides'].items():
if '*' in metric:
config['_type_override_patterns'][compile(translate(metric))] = type
with_wildcards.add(metric)
Expand Down Expand Up @@ -468,7 +464,7 @@ def parse_metric_family(self, response, scraper_config):
if type_override:
metric.type = type_override
elif scraper_config['_type_override_patterns']:
for pattern, new_type in iteritems(scraper_config['_type_override_patterns']):
for pattern, new_type in scraper_config['_type_override_patterns'].items():
if pattern.search(metric.name):
metric.type = new_type
break
Expand Down Expand Up @@ -518,7 +514,7 @@ def scrape_metrics(self, scraper_config):
watched['sets'] = {}
watched['keys'] = {}
watched['singles'] = set()
for key, val in iteritems(scraper_config['label_joins']):
for key, val in scraper_config['label_joins'].items():
labels = []
if 'labels_to_match' in val:
labels = val['labels_to_match']
Expand All @@ -542,7 +538,7 @@ def scrape_metrics(self, scraper_config):
# Set dry run off
scraper_config['_dry_run'] = False
# Garbage collect unused mapping and reset active labels
for metric, mapping in list(iteritems(scraper_config['_label_mapping'])):
for metric, mapping in scraper_config['_label_mapping'].items():
for key in list(mapping):
if (
metric in scraper_config['_active_label_mapping']
Expand Down Expand Up @@ -599,7 +595,7 @@ def process(self, scraper_config, metric_transformers=None):

def transform_metadata(self, metric, scraper_config):
labels = metric.samples[0][self.SAMPLE_LABELS]
for metadata_name, label_name in iteritems(scraper_config['metadata_label_map']):
for metadata_name, label_name in scraper_config['metadata_label_map'].items():
if label_name in labels:
self.set_metadata(metadata_name, labels[label_name])

Expand Down Expand Up @@ -662,7 +658,7 @@ def _store_labels(self, metric, scraper_config):
label_dict = {}

if get_all:
for label_name, label_value in iteritems(sample_labels):
for label_name, label_value in sample_labels.items():
if label_name in matching_labels:
continue
label_dict[label_name] = label_value
Expand Down Expand Up @@ -717,7 +713,7 @@ def _join_labels(self, metric, scraper_config):
sample_labels.update(label_mapping[mapping_key][mapping_value])

# Match with tuples of labels
for key, mapping_key in iteritems(keys):
for key, mapping_key in keys.items():
if mapping_key in matching_single_labels:
continue

Expand Down Expand Up @@ -806,7 +802,7 @@ def process_metric(self, metric, scraper_config, metric_transformers=None):

return
# check for wildcards in transformers
for transformer_name, transformer in iteritems(metric_transformers):
for transformer_name, transformer in metric_transformers.items():
if transformer_name.endswith('*') and metric.name.startswith(transformer_name[:-1]):
transformer(metric, scraper_config, transformer_name)

Expand Down Expand Up @@ -1058,7 +1054,7 @@ def _submit_gauges_from_histogram(self, metric_name, metric, scraper_config, hos
def _compute_bucket_hash(self, tags):
# we need the unique context for all the buckets
# hence we remove the "le" tag
return hash(frozenset(sorted((k, v) for k, v in iteritems(tags) if k != 'le')))
return hash(frozenset(sorted((k, v) for k, v in tags.items() if k != 'le')))

def _decumulate_histogram_buckets(self, metric):
"""
Expand Down Expand Up @@ -1174,7 +1170,7 @@ def _metric_tags(self, metric_name, val, sample, scraper_config, hostname=None):
custom_tags = scraper_config['custom_tags']
_tags = list(custom_tags)
_tags.extend(scraper_config['_metric_tags'])
for label_name, label_value in iteritems(sample[self.SAMPLE_LABELS]):
for label_name, label_value in sample[self.SAMPLE_LABELS].items():
if label_name not in scraper_config['exclude_labels']:
if label_name in scraper_config['include_labels'] or len(scraper_config['include_labels']) == 0:
tag_name = scraper_config['labels_mapper'].get(label_name, label_name)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,6 @@
from contextlib import contextmanager

from requests.exceptions import RequestException
from six import raise_from

from ....errors import ConfigurationError
from ....utils.tracing import traced_class
Expand Down Expand Up @@ -75,7 +74,7 @@ def check(self, _):
scraper.scrape()
except (ConnectionError, RequestException) as e:
self.log.error("There was an error scraping endpoint %s: %s", endpoint, str(e))
raise_from(type(e)("There was an error scraping endpoint {}: {}".format(endpoint, e)), None)
raise type(e)("There was an error scraping endpoint {}: {}".format(endpoint, e)) from None

def configure_scrapers(self):
"""
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -4,8 +4,6 @@
import re
from copy import deepcopy

from six import raise_from

from ....config import is_affirmative
from . import transformers

Expand Down Expand Up @@ -53,7 +51,7 @@ def __init__(self, check, config):
self.transformer_data[raw_metric_name] = self.compile_transformer(config)
except Exception as e:
error = f'Error compiling transformer for metric `{raw_metric_name}`: {e}'
raise_from(type(e)(error), None)
raise type(e)(error) from None

def get(self, metric):
metric_name = metric.name
Expand Down
Original file line number Diff line number Diff line change
@@ -1,8 +1,6 @@
# (C) Datadog, Inc. 2020-present
# All rights reserved
# Licensed under a 3-clause BSD style license (see LICENSE)
from six import raise_from

from .....constants import ServiceCheck


Expand Down Expand Up @@ -56,7 +54,7 @@ def compile_service_check_statuses(modifiers):
try:
value = int(value)
except Exception:
raise_from(TypeError(f'value `{value}` of parameter `status_map` does not represent an integer'), None)
raise TypeError(f'value `{value}` of parameter `status_map` does not represent an integer') from None

if not isinstance(status_string, str):
raise ValueError(f'status `{status_string}` for value `{value}` of parameter `status_map` is not a string')
Expand Down
Original file line number Diff line number Diff line change
@@ -1,8 +1,6 @@
# (C) Datadog, Inc. 2018-present
# All rights reserved
# Licensed under a 3-clause BSD style license (see LICENSE)
from six import string_types

from ...errors import CheckException
from ...utils.common import to_native_string
from .. import AgentCheck
Expand Down Expand Up @@ -145,7 +143,7 @@ def get_scraper(self, instance):
# We merge list and dictionaries from optional defaults & instance settings
metrics = default_instance.get("metrics", []) + instance.get("metrics", [])
for metric in metrics:
if isinstance(metric, string_types):
if isinstance(metric, str):
metrics_mapper[metric] = metric
else:
metrics_mapper.update(metric)
Expand Down
Loading

0 comments on commit e2c838c

Please sign in to comment.